diff options
-rw-r--r-- | media/libstagefright/AwesomePlayer.cpp | 195 | ||||
-rw-r--r-- | media/libstagefright/QCMediaDefs.cpp | 4 | ||||
-rw-r--r-- | media/libstagefright/TunnelPlayer.cpp | 174 | ||||
-rw-r--r-- | media/libstagefright/include/AwesomePlayer.h | 22 | ||||
-rw-r--r-- | services/audioflinger/AudioFlinger.cpp | 3 |
5 files changed, 351 insertions, 47 deletions
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 9c0e799..83c480d 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -1,8 +1,6 @@ /* * Copyright (C) 2009 The Android Open Source Project - * Copyright (c) 2012, The Linux Foundation. All rights reserved. - * - * Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. + * Copyright (c) 2011-2013, The Linux Foundation. All rights reserved. * Not a Contribution, Apache license notifications and license are retained * for attribution purposes only. @@ -68,6 +66,8 @@ #define USE_SURFACE_ALLOC 1 #define FRAME_DROP_FREQ 0 +#define LPA_MIN_DURATION_USEC_ALLOWED 30000000 +#define LPA_MIN_DURATION_USEC_DEFAULT 60000000 namespace android { @@ -607,6 +607,18 @@ void AwesomePlayer::reset_l() { mStats.mVideoHeight = -1; mStats.mFlags = 0; mStats.mTracks.clear(); + mStats.mConsecutiveFramesDropped = 0; + mStats.mCatchupTimeStart = 0; + mStats.mNumTimesSyncLoss = 0; + mStats.mMaxEarlyDelta = 0; + mStats.mMaxLateDelta = 0; + mStats.mMaxTimeSyncLoss = 0; + mStats.mTotalFrames = 0; + mStats.mLastFrameUs = 0; + mStats.mTotalTimeUs = 0; + mStats.mLastPausedTimeMs = 0; + mStats.mLastSeekToTimeMs = 0; + mStats.mFirstFrameLatencyUs = 0; } mWatchForAudioSeekComplete = false; @@ -975,13 +987,21 @@ status_t AwesomePlayer::play_l() { ALOGV("nchannels %d;LPA will be skipped if nchannels is > 2 or nchannels == 0",nchannels); } } - char lpaDecode[128]; + char lpaDecode[PROPERTY_VALUE_MAX]; + uint32_t minDurationForLPA = LPA_MIN_DURATION_USEC_DEFAULT; + char minUserDefDuration[PROPERTY_VALUE_MAX]; property_get("lpa.decode",lpaDecode,"0"); + property_get("lpa.min_duration",minUserDefDuration,"LPA_MIN_DURATION_USEC_DEFAULT"); + minDurationForLPA = atoi(minUserDefDuration); + if(minDurationForLPA < LPA_MIN_DURATION_USEC_ALLOWED) { + ALOGE("LPAPlayer::Clip duration setting of less than 30sec not supported, defaulting to 60sec"); + minDurationForLPA = LPA_MIN_DURATION_USEC_DEFAULT; + } if((strcmp("true",lpaDecode) == 0) && (mAudioPlayer == NULL) && (tunnelObjectsAlive==0) && (nchannels && (nchannels <= 2))) { ALOGV("LPAPlayer::getObjectsAlive() %d",LPAPlayer::objectsAlive); - if ( mDurationUs > 60000000 + if ( mDurationUs > minDurationForLPA && (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG) || !strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_AAC)) && LPAPlayer::objectsAlive == 0 && mVideoSource == NULL) { ALOGD("LPAPlayer created, LPA MODE detected mime %s duration %lld", mime, mDurationUs); @@ -1046,6 +1066,12 @@ status_t AwesomePlayer::play_l() { mTimeSource = &mSystemTimeSource; } + { + Mutex::Autolock autoLock(mStatsLock); + mStats.mFirstFrameLatencyStartUs = getTimeOfDayUs(); + mStats.mVeryFirstFrame = true; + } + if (mVideoSource != NULL) { // Kick off video playback postVideoEvent_l(); @@ -1269,6 +1295,11 @@ status_t AwesomePlayer::pause_l(bool at_eos) { Playback::PAUSE, 0); } + if(!(mFlags & AT_EOS)){ + Mutex::Autolock autoLock(mStatsLock); + mStats.mLastPausedTimeMs = mVideoTimeUs/1000; + } + uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder; if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) { params |= IMediaPlayerService::kBatteryDataTrackAudio; @@ -1429,6 +1460,12 @@ status_t AwesomePlayer::seekTo_l(int64_t timeUs) { } mSeeking = SEEK; + + { + Mutex::Autolock autoLock(mStatsLock); + mStats.mFirstFrameLatencyStartUs = getTimeOfDayUs(); + mStats.mVeryFirstFrame = true; + } mSeekNotificationSent = false; mSeekTimeUs = timeUs; modifyFlags((AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS), CLEAR); @@ -1496,7 +1533,12 @@ status_t AwesomePlayer::initAudioDecoder() { CHECK(meta->findCString(kKeyMIMEType, &mime)); #ifdef QCOM_ENHANCED_AUDIO int32_t nchannels = 0; + int32_t isADTS = 0; meta->findInt32( kKeyChannelCount, &nchannels ); + meta->findInt32(kKeyIsADTS, &isADTS); + if(isADTS == 1){ + ALOGV("Widevine content\n"); + } ALOGV("nchannels %d;LPA will be skipped if nchannels is > 2 or nchannels == 0", nchannels); @@ -1510,7 +1552,8 @@ status_t AwesomePlayer::initAudioDecoder() { mime, (TunnelPlayer::mTunnelObjectsAlive), mTunnelAliveAP); if(((strcmp("true",tunnelDecode) == 0)||(atoi(tunnelDecode))) && (TunnelPlayer::mTunnelObjectsAlive == 0) && - mTunnelAliveAP == 0 && + //widevine will fallback to software decoder + mTunnelAliveAP == 0 && (isADTS == 0) && mAudioSink->realtime() && ((!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) || (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) || (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS)) || @@ -1532,6 +1575,12 @@ status_t AwesomePlayer::initAudioDecoder() { else ALOGD("Normal Audio Playback"); #endif + + if (isStreamingHTTP()) { + ALOGV("Streaming, force disable tunnel mode playback"); + mIsTunnelAudio = false; + } + if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW) || (mIsTunnelAudio && (mTunnelAliveAP == 0))) { ALOGD("Set Audio Track as Audio Source"); @@ -1549,14 +1598,22 @@ status_t AwesomePlayer::initAudioDecoder() { int64_t durationUs; uint32_t flags = 0; char lpaDecode[128]; + uint32_t minDurationForLPA = LPA_MIN_DURATION_USEC_DEFAULT; + char minUserDefDuration[PROPERTY_VALUE_MAX]; property_get("lpa.decode",lpaDecode,"0"); + property_get("lpa.min_duration",minUserDefDuration,"LPA_MIN_DURATION_USEC_DEFAULT"); + minDurationForLPA = atoi(minUserDefDuration); + if(minDurationForLPA < LPA_MIN_DURATION_USEC_ALLOWED) { + ALOGE("LPAPlayer::Clip duration setting of less than 30sec not supported, defaulting to 60sec"); + minDurationForLPA = LPA_MIN_DURATION_USEC_DEFAULT; + } if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) { Mutex::Autolock autoLock(mMiscStateLock); if (mDurationUs < 0 || durationUs > mDurationUs) { mDurationUs = durationUs; } } - if ( mDurationUs > 60000000 + if ( mDurationUs > minDurationForLPA && (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG) || !strcasecmp(mime,MEDIA_MIMETYPE_AUDIO_AAC)) && LPAPlayer::objectsAlive == 0 && mVideoSource == NULL && (strcmp("true",lpaDecode) == 0) && (nchannels && (nchannels <= 2)) ) { @@ -1771,6 +1828,12 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) { mDrmManagerClient->setPlaybackStatus(mDecryptHandle, Playback::START, videoTimeUs / 1000); } + + { + Mutex::Autolock autoLock(mStatsLock); + mStats.mLastSeekToTimeMs = mSeekTimeUs/1000; + logFirstFrame(); + } } void AwesomePlayer::onVideoEvent() { @@ -1783,6 +1846,14 @@ void AwesomePlayer::onVideoEvent() { } mVideoEventPending = false; + { + Mutex::Autolock autoLock(mStatsLock); + if(!mStats.mVeryFirstFrame && mSeeking == NO_SEEK){ + mStats.mTotalTimeUs += getTimeOfDayUs() - mStats.mLastFrameUs; + } + mStats.mLastFrameUs = getTimeOfDayUs(); + } + if (mSeeking != NO_SEEK) { if (mVideoBuffer) { mVideoBuffer->release(); @@ -1916,18 +1987,26 @@ void AwesomePlayer::onVideoEvent() { modifyFlags(FIRST_FRAME, CLEAR); mSinceLastDropped = 0; mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; + + { + Mutex::Autolock autoLock(mStatsLock); + if(mStats.mVeryFirstFrame){ + logFirstFrame(); + mStats.mLastFrameUs = getTimeOfDayUs(); + } + } } - int64_t realTimeUs, mediaTimeUs; + int64_t realTimeUs, mediaTimeUs, nowUs = 0, latenessUs = 0; if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) { mTimeSourceDeltaUs = realTimeUs - mediaTimeUs; } if (wasSeeking == SEEK_VIDEO_ONLY) { - int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; + nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; - int64_t latenessUs = nowUs - timeUs; + latenessUs = nowUs - timeUs; ATRACE_INT("Video Lateness (ms)", latenessUs / 1E3); @@ -1939,9 +2018,9 @@ void AwesomePlayer::onVideoEvent() { if (wasSeeking == NO_SEEK) { // Let's display the first frame after seeking right away. - int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; + nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs; - int64_t latenessUs = nowUs - timeUs; + latenessUs = nowUs - timeUs; ATRACE_INT("Video Lateness (ms)", latenessUs / 1E3); @@ -1988,6 +2067,11 @@ void AwesomePlayer::onVideoEvent() { { Mutex::Autolock autoLock(mStatsLock); ++mStats.mNumVideoFramesDropped; + mStats.mConsecutiveFramesDropped++; + if (mStats.mConsecutiveFramesDropped == 1){ + mStats.mCatchupTimeStart = mTimeSource->getRealTimeUs(); + } + if(!(mFlags & AT_EOS)) logLate(timeUs,nowUs,latenessUs); } postVideoEvent_l(); @@ -1997,6 +2081,11 @@ void AwesomePlayer::onVideoEvent() { if (latenessUs < -10000) { // We're more than 10ms early. + logOnTime(timeUs,nowUs,latenessUs); + { + Mutex::Autolock autoLock(mStatsLock); + mStats.mConsecutiveFramesDropped = 0; + } postVideoEvent_l(10000); return; } @@ -2017,6 +2106,12 @@ void AwesomePlayer::onVideoEvent() { notifyListener_l(MEDIA_INFO, MEDIA_INFO_RENDERING_START); } + { + Mutex::Autolock autoLock(mStatsLock); + logOnTime(timeUs,nowUs,latenessUs); + mStats.mTotalFrames++; + mStats.mConsecutiveFramesDropped = 0; + } } mVideoBuffer->release(); @@ -2814,13 +2909,37 @@ status_t AwesomePlayer::dump(int fd, const Vector<String16> &args) const { if ((ssize_t)i == mStats.mVideoTrackIndex) { fprintf(out, - " videoDimensions(%d x %d), " - "numVideoFramesDecoded(%lld), " - "numVideoFramesDropped(%lld)\n", + " videoDimensions(%d x %d)\n" + " Total Video Frames Decoded(%lld)\n" + " Total Video Frames Rendered(%lld)\n" + " Total Playback Duration(%lld ms)\n" + " numVideoFramesDropped(%lld)\n" + " Average Frames Per Second(%.4f)\n" + " Last Seek To Time(%lld ms)\n" + " Last Paused Time(%lld ms)\n" + " First Frame Latency (%lld ms)\n" + " Number of times AV Sync Lost(%u)\n" + " Max Video Ahead Time Delta(%u)\n" + " Max Video Behind Time Delta(%u)\n" + " Max Time Sync Loss(%u)\n" + " EOS(%d)\n" + " PLAYING(%d)\n", mStats.mVideoWidth, mStats.mVideoHeight, mStats.mNumVideoFramesDecoded, - mStats.mNumVideoFramesDropped); + mStats.mTotalFrames, + mStats.mTotalTimeUs/1000, + mStats.mNumVideoFramesDropped, + ((double)(mStats.mTotalFrames)*1E6)/((double)mStats.mTotalTimeUs), + mStats.mLastSeekToTimeMs, + mStats.mLastPausedTimeMs, + mStats.mFirstFrameLatencyUs/1000, + mStats.mNumTimesSyncLoss, + -mStats.mMaxEarlyDelta/1000, + mStats.mMaxLateDelta/1000, + mStats.mMaxTimeSyncLoss/1000, + (mFlags & AT_EOS) > 0, + (mFlags & PLAYING) > 0); } } @@ -2854,4 +2973,48 @@ void AwesomePlayer::modifyFlags(unsigned value, FlagMode mode) { } } +inline void AwesomePlayer::logFirstFrame() { + mStats.mFirstFrameLatencyUs = getTimeOfDayUs()-mStats.mFirstFrameLatencyStartUs; + mStats.mVeryFirstFrame = false; +} + +inline void AwesomePlayer::logCatchUp(int64_t ts, int64_t clock, int64_t delta) +{ + if (mStats.mConsecutiveFramesDropped > 0) { + mStats.mNumTimesSyncLoss++; + if (mStats.mMaxTimeSyncLoss < (clock - mStats.mCatchupTimeStart) && clock > 0 && ts > 0) { + mStats.mMaxTimeSyncLoss = clock - mStats.mCatchupTimeStart; + } + } +} + +inline void AwesomePlayer::logLate(int64_t ts, int64_t clock, int64_t delta) +{ + if (mStats.mMaxLateDelta < delta && clock > 0 && ts > 0) { + mStats.mMaxLateDelta = delta; + } +} + +inline void AwesomePlayer::logOnTime(int64_t ts, int64_t clock, int64_t delta) +{ + bool needLogLate = false; + logCatchUp(ts, clock, delta); + if (delta <= 0) { + if ((-delta) > (-mStats.mMaxEarlyDelta) && clock > 0 && ts > 0) { + mStats.mMaxEarlyDelta = delta; + } + } + else { + needLogLate = true; + } + + if(needLogLate) logLate(ts, clock, delta); +} + +inline int64_t AwesomePlayer::getTimeOfDayUs() { + struct timeval tv; + gettimeofday(&tv, NULL); + + return (int64_t)tv.tv_sec * 1000000 + tv.tv_usec; +} } // namespace android diff --git a/media/libstagefright/QCMediaDefs.cpp b/media/libstagefright/QCMediaDefs.cpp index ec2d04e..5e8b84f 100644 --- a/media/libstagefright/QCMediaDefs.cpp +++ b/media/libstagefright/QCMediaDefs.cpp @@ -1,4 +1,4 @@ -/*Copyright (c) 2012, The Linux Foundation. All rights reserved. +/*Copyright (c) 2012 - 2013, The Linux Foundation. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are @@ -50,6 +50,6 @@ const char *MEDIA_MIMETYPE_AUDIO_DTS = "audio/dts"; const char *MEDIA_MIMETYPE_AUDIO_DTS_LBR = "audio/dts-lbr"; const char *MEDIA_MIMETYPE_AUDIO_EAC3 = "audio/eac3"; const char *MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS = "audio/amr-wb-plus"; - +const char *MEDIA_MIMETYPE_CONTAINER_QCMPEG2TS = "video/qc-mp2ts"; } // namespace android diff --git a/media/libstagefright/TunnelPlayer.cpp b/media/libstagefright/TunnelPlayer.cpp index fa2dc06..475cf56 100644 --- a/media/libstagefright/TunnelPlayer.cpp +++ b/media/libstagefright/TunnelPlayer.cpp @@ -1,6 +1,6 @@ /* * Copyright (C) 2009 The Android Open Source Project - * Copyright (c) 2009-2012, The Linux Foundation. All rights reserved. + * Copyright (c) 2009-2013, The Linux Foundation. All rights reserved. * Not a Contribution, Apache license notifications and license are retained * for attribution purposes only. * @@ -50,8 +50,10 @@ static const char mName[] = "TunnelPlayer"; #define MEM_METADATA_SIZE 64 -#define MEM_BUFFER_SIZE (600*1024 - MEM_METADATA_SIZE) +#define MEM_PADDING 64 +#define MEM_BUFFER_SIZE (256*1024 - MEM_METADATA_SIZE) #define MEM_BUFFER_COUNT 4 +#define TUNNEL_BUFFER_TIME 1500000 namespace android { int TunnelPlayer::mTunnelObjectsAlive = 0; @@ -100,6 +102,7 @@ mObserver(observer) { //mAudioFlinger->registerClient(mAudioFlingerClient); mSeekTimeUs = 0; + mIsAudioRouted = false; mHasVideo = hasVideo; initCheck = true; @@ -265,7 +268,7 @@ status_t TunnelPlayer::start(bool sourceAlreadyStarted) { CHECK(!mStarted); CHECK(mSource != NULL); - ALOGD("start: sourceAlreadyStarted %d", sourceAlreadyStarted); + ALOGV("start: sourceAlreadyStarted %d", sourceAlreadyStarted); //Check if the source is started, start it status_t err; if (!sourceAlreadyStarted) { @@ -363,7 +366,7 @@ status_t TunnelPlayer::start(bool sourceAlreadyStarted) { mIsAudioRouted = true; mStarted = true; mAudioSink->start(); - ALOGV("Waking up decoder thread"); + ALOGV("Waking up extractor thread"); pthread_cond_signal(&extractor_cv); return OK; @@ -372,16 +375,44 @@ status_t TunnelPlayer::start(bool sourceAlreadyStarted) { status_t TunnelPlayer::seekTo(int64_t time_us) { ALOGV("seekTo: time_us %lld", time_us); - if ( mReachedEOS ) { - mReachedEOS = false; - mReachedOutputEOS = false; + + if (mPositionTimeRealUs != 0) { + //check for return conditions only if seektime + // is set + if (time_us > mPositionTimeRealUs){ + if((time_us - mPositionTimeRealUs) < TUNNEL_BUFFER_TIME){ + ALOGV("In seekTo(), ignoring time_us %lld mSeekTimeUs %lld", time_us, mSeekTimeUs); + mObserver->postAudioSeekComplete(); + return OK; + } + } else { + if((mPositionTimeRealUs - time_us) < TUNNEL_BUFFER_TIME){ + ALOGV("In seekTo(), ignoring time_us %lld mSeekTimeUs %lld", time_us, mSeekTimeUs); + mObserver->postAudioSeekComplete(); + return OK; + } + } } + mSeeking = true; mSeekTimeUs = time_us; + mPauseTime = mSeekTimeUs; ALOGV("In seekTo(), mSeekTimeUs %lld",mSeekTimeUs); - mAudioSink->flush(); - pthread_cond_signal(&extractor_cv); - //TODO: Update the mPauseTime + + if (mIsAudioRouted) { + mAudioSink->flush(); + } + + if (mReachedEOS) { + mReachedEOS = false; + mReachedOutputEOS = false; + if(mPaused == false) { + ALOGV("Going to signal extractor thread since playback is already going on "); + pthread_cond_signal(&extractor_cv); + ALOGV("Signalled extractor thread."); + } + } + ALOGV("seek done."); return OK; } void TunnelPlayer::pause(bool playPendingSamples) { @@ -407,6 +438,7 @@ void TunnelPlayer::pause(bool playPendingSamples) { } void TunnelPlayer::resume() { + Mutex::Autolock autoLock(mLock); ALOGV("resume: mPaused %d",mPaused); if ( mPaused) { CHECK(mStarted); @@ -435,8 +467,11 @@ void TunnelPlayer::resume() { mIsAudioRouted = true; } mPaused = false; + ALOGV("Audio sink open succeeded."); mAudioSink->start(); + ALOGV("Audio sink start succeeded."); pthread_cond_signal(&extractor_cv); + ALOGV("Audio signalling extractor thread."); } } @@ -446,15 +481,20 @@ size_t TunnelPlayer::AudioSinkCallback( void *buffer, size_t size, void *cookie) { if (buffer == NULL && size == AudioTrack::EVENT_UNDERRUN) { TunnelPlayer *me = (TunnelPlayer *)cookie; - me->mReachedEOS = true; - me->mReachedOutputEOS = true; - ALOGV("postAudioEOS"); - me->mObserver->postAudioEOS(0); + if(me->mReachedEOS == true) { + //in the case of seek all these flags will be reset + me->mReachedOutputEOS = true; + ALOGV("postAudioEOS mSeeking %d", me->mSeeking); + me->mObserver->postAudioEOS(0); + }else { + ALOGV("postAudioEOS ignored since %d", me->mSeeking); + } } return 1; } void TunnelPlayer::reset() { + ALOGV("Reset"); mReachedEOS = true; @@ -482,7 +522,8 @@ void TunnelPlayer::reset() { mInputBuffer = NULL; } - mSource->stop(); + if(mStarted) + mSource->stop(); // The following hack is necessary to ensure that the OMX // component is completely released by the time we may try @@ -529,7 +570,7 @@ void TunnelPlayer::extractorThreadEntry() { pid_t tid = gettid(); androidSetThreadPriority(tid, ANDROID_PRIORITY_AUDIO); - prctl(PR_SET_NAME, (unsigned long)"Tunnel DecodeThread", 0, 0, 0); + prctl(PR_SET_NAME, (unsigned long)"Extractor Thread", 0, 0, 0); ALOGV("extractorThreadEntry wait for signal \n"); if (!mStarted) { @@ -545,26 +586,79 @@ void TunnelPlayer::extractorThreadEntry() { const char *mime; bool success = format->findCString(kKeyMIMEType, &mime); } - void* local_buf = malloc(BufferSizeToUse); + void* local_buf = malloc(BufferSizeToUse + MEM_PADDING); + int *lptr = ((int*)local_buf); int bytesWritten = 0; + bool lSeeking = false; + bool lPaused = false; while (!killExtractorThread) { if (mReachedEOS || mPaused || !mIsAudioRouted) { + ALOGV("Going to sleep before write since " + "mReachedEOS %d, mPaused %d, mIsAudioRouted %d", + mReachedEOS, mPaused, mIsAudioRouted); pthread_mutex_lock(&extractor_mutex); pthread_cond_wait(&extractor_cv, &extractor_mutex); pthread_mutex_unlock(&extractor_mutex); + ALOGV("Woke up from sleep before write since " + "mReachedEOS %d, mPaused %d, mIsAudioRouted %d", + mReachedEOS, mPaused, mIsAudioRouted); continue; } if (!mIsA2DPEnabled) { - ALOGW("FillBuffer: MemBuffer size %d", BufferSizeToUse); + ALOGV("FillBuffer: MemBuffer size %d", BufferSizeToUse); ALOGV("Fillbuffer started"); bytesWritten = fillBuffer(local_buf, BufferSizeToUse); ALOGV("FillBuffer completed bytesToWrite %d", bytesWritten); if(!killExtractorThread) { - mAudioSink->write(local_buf, bytesWritten); - if(mReachedEOS && bytesWritten) - mAudioSink->write(local_buf, 0); + mLock.lock(); + lPaused = mPaused; + mLock.unlock(); + + if(lPaused == true) { + //write only if player is not in paused state. Sleep on lock + // resume is called + ALOGV("Going to sleep in decodethreadiwrite since sink is paused"); + pthread_mutex_lock(&extractor_mutex); + pthread_cond_wait(&extractor_cv, &extractor_mutex); + ALOGV("Going to unlock n decodethreadwrite since sink " + "resumed mPaused %d, mIsAudioRouted %d, mReachedEOS %d", + mPaused, mIsAudioRouted, mReachedEOS); + pthread_mutex_unlock(&extractor_mutex); + } + mLock.lock(); + lSeeking = mSeeking||mInternalSeeking; + mLock.unlock(); + + if(lSeeking == false && (killExtractorThread == false)){ + //if we are seeking, ignore write, otherwise write + ALOGV("Fillbuffer before write %d and seek flag %d", mSeeking, + lptr[MEM_BUFFER_SIZE/sizeof(int)]); + int lWrittenBytes = mAudioSink->write(local_buf, bytesWritten); + ALOGV("Fillbuffer after write, written bytes %d and seek flag %d", lWrittenBytes, mSeeking); + if(lWrittenBytes > 0) { + //send EOS only if write was successful, if is_buffer_available + // is flushed out (which returns 0 do not SEND EOS + ALOGV("Fillbuffer after write and seek flag %d", mSeeking); + mLock.lock(); + lSeeking = mSeeking||mInternalSeeking; + mLock.unlock(); + //ignore posting zero length buffer is seeking is set + if(mReachedEOS && bytesWritten && !lSeeking && (killExtractorThread == false)) { + ALOGV("Fillbuffer after write sent EOS flag %d", lSeeking); + mAudioSink->write(local_buf, 0); + } else { + ALOGV("Not sending EOS buffer sent since seeking %d, " + "kill %d and mReachedEOS %d", \ + lSeeking, killExtractorThread, mReachedEOS); + } + } else { + ALOGV("write exited because of flush %d", mSeeking); + } + } else { + ALOGV("Fillbuffer ignored since we seeked after fillBuffer was set %d", mSeeking); + } } } } @@ -588,7 +682,7 @@ void TunnelPlayer::createThreads() { extractorThreadAlive = true; - ALOGV("Creating decoder Thread"); + ALOGV("Creating Extractor Thread"); pthread_create(&extractorThread, &attr, extractorThreadWrapper, this); pthread_attr_destroy(&attr); @@ -603,6 +697,10 @@ size_t TunnelPlayer::fillBuffer(void *data, size_t size) { size_t size_done = 0; size_t size_remaining = size; + int *ldataptr = (int*) data; + //clear the flag since we dont know whether we are seeking or not, yet + ldataptr[(MEM_BUFFER_SIZE/sizeof(int))] = 0; + ALOGV("fillBuffer: Clearing seek flag in fill buffer"); while (size_remaining > 0) { MediaSource::ReadOptions options; @@ -635,10 +733,13 @@ size_t TunnelPlayer::fillBuffer(void *data, size_t size) { mSeeking = false; if (mObserver && !mInternalSeeking) { - ALOGD("fillBuffer: Posting audio seek complete event"); + ALOGV("fillBuffer: Posting audio seek complete event"); postSeekComplete = true; } mInternalSeeking = false; + ALOGV("fillBuffer: Setting seek flag in fill buffer"); + //set the flag since we know that this buffer is the new positions buffer + ldataptr[(MEM_BUFFER_SIZE/sizeof(int))] = 1; } } if (mInputBuffer == NULL) { @@ -708,6 +809,7 @@ int64_t TunnelPlayer::getRealTimeUs() { } void TunnelPlayer::getPlayedTimeFromDSP_l(int64_t* timeStamp ) { + ALOGV("going to query timestamp"); mAudioSink->getTimeStamp((uint64_t*)timeStamp); ALOGV("timestamp returned from DSP %lld ", (*timeStamp)); return; @@ -744,15 +846,25 @@ void TunnelPlayer::requestAndWaitForExtractorThreadExit() { if (!extractorThreadAlive) return; - if (mIsAudioRouted) - mAudioSink->flush(); + killExtractorThread = true; + + ALOGV("requestAndWaitForExtractorThreadExit +0"); + if (mIsAudioRouted && !mReachedOutputEOS) { + mAudioSink->flush(); + } + + ALOGV("requestAndWaitForExtractorThreadExit +1"); pthread_cond_signal(&extractor_cv); + ALOGV("requestAndWaitForExtractorThreadExit +2"); pthread_join(extractorThread,NULL); - ALOGD("Extractor thread killed"); + ALOGV("requestAndWaitForExtractorThreadExit +3"); + + ALOGV("Extractor thread killed"); } void TunnelPlayer::onPauseTimeOut() { + Mutex::Autolock autoLock(mLock); int64_t playedTime = 0; ALOGV("onPauseTimeOut"); if (!mPauseEventPending) { @@ -765,17 +877,23 @@ void TunnelPlayer::onPauseTimeOut() { mReachedOutputEOS = false; if(mSeeking == false) { + ALOGV("onPauseTimeOut +2"); mInternalSeeking = true; - mLock.lock(); + ALOGV("onPauseTimeOut +3"); getPlayedTimeFromDSP_l(&playedTime); - mLock.unlock(); mSeekTimeUs += playedTime; } else { ALOGV("Do not update seek time if it was seeked before onpause timeout"); } // 2.) Close routing Session + ALOGV("onPauseTimeOut +4"); + mAudioSink->flush(); + ALOGV("onPauseTimeOut +5"); + mAudioSink->stop(); + ALOGV("onPauseTimeOut +6"); mAudioSink->close(); + ALOGV("onPauseTimeOut +7"); mIsAudioRouted = false; // 3.) Release Wake Lock diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 107c5da..a5586dd 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -1,5 +1,6 @@ /* * Copyright (C) 2009 The Android Open Source Project + * Copyright (c) 2013, The Linux Foundation. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -303,6 +304,12 @@ private: ASSIGN }; void modifyFlags(unsigned value, FlagMode mode); + void logFirstFrame(); + void logCatchUp(int64_t ts, int64_t clock, int64_t delta); + void logLate(int64_t ts, int64_t clock, int64_t delta); + void logOnTime(int64_t ts, int64_t clock, int64_t delta); + int64_t getTimeOfDayUs(); + bool mStatistics; struct TrackStat { String8 mMIME; @@ -328,6 +335,21 @@ private: int32_t mVideoHeight; uint32_t mFlags; Vector<TrackStat> mTracks; + + int64_t mConsecutiveFramesDropped; + uint32_t mCatchupTimeStart; + uint32_t mNumTimesSyncLoss; + uint32_t mMaxEarlyDelta; + uint32_t mMaxLateDelta; + uint32_t mMaxTimeSyncLoss; + uint64_t mTotalFrames; + int64_t mFirstFrameLatencyStartUs; //first frame latency start + int64_t mFirstFrameLatencyUs; + int64_t mLastFrameUs; + bool mVeryFirstFrame; + int64_t mTotalTimeUs; + int64_t mLastPausedTimeMs; + int64_t mLastSeekToTimeMs; } mStats; status_t setVideoScalingMode(int32_t mode); diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 724b1e3..12cfe9d 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -6166,12 +6166,13 @@ AudioFlinger::DirectAudioTrack::~DirectAudioTrack() { mAudioFlinger->deleteEffectSession(); deallocateBufPool(); } + AudioSystem::releaseOutput(mOutput); releaseWakeLock(); + if (mPowerManager != 0) { sp<IBinder> binder = mPowerManager->asBinder(); binder->unlinkToDeath(mDeathRecipient); } - AudioSystem::releaseOutput(mOutput); } status_t AudioFlinger::DirectAudioTrack::start() { |