summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/CameraSource.cpp
diff options
context:
space:
mode:
authorJames Dong <jdong@google.com>2010-06-19 09:04:18 -0700
committerJames Dong <jdong@google.com>2010-06-21 17:34:01 -0700
commitf60cafe0e6aad8f9ce54660fa88b651ae4e749e6 (patch)
treeeabfca8c6c979a1000f49efca5c33ab9039245ba /media/libstagefright/CameraSource.cpp
parente6de2667d6bf4bb7b926da6784cc7eb886b93e83 (diff)
downloadframeworks_av-f60cafe0e6aad8f9ce54660fa88b651ae4e749e6.zip
frameworks_av-f60cafe0e6aad8f9ce54660fa88b651ae4e749e6.tar.gz
frameworks_av-f60cafe0e6aad8f9ce54660fa88b651ae4e749e6.tar.bz2
Audio/video sync during recording (second part)
Change-Id: Iba0b35f57fdeac7ee1da16899406bf4b957a2c8c
Diffstat (limited to 'media/libstagefright/CameraSource.cpp')
-rw-r--r--media/libstagefright/CameraSource.cpp56
1 files changed, 46 insertions, 10 deletions
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 0ab76b3..6f4c980 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -123,6 +123,8 @@ CameraSource::CameraSource(const sp<Camera> &camera)
mNumFramesReceived(0),
mNumFramesEncoded(0),
mNumFramesDropped(0),
+ mNumGlitches(0),
+ mGlitchDurationThresholdUs(200000),
mCollectStats(false),
mStarted(false) {
@@ -136,6 +138,13 @@ CameraSource::CameraSource(const sp<Camera> &camera)
CameraParameters params(s);
params.getPreviewSize(&width, &height);
+ // Calculate glitch duraton threshold based on frame rate
+ int32_t frameRate = params.getPreviewFrameRate();
+ int64_t glitchDurationUs = (1000000LL / frameRate);
+ if (glitchDurationUs > mGlitchDurationThresholdUs) {
+ mGlitchDurationThresholdUs = glitchDurationUs;
+ }
+
const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
CHECK(colorFormatStr != NULL);
int32_t colorFormat = getColorFormat(colorFormatStr);
@@ -161,8 +170,7 @@ CameraSource::~CameraSource() {
}
}
-status_t CameraSource::start(MetaData *) {
- LOGV("start");
+status_t CameraSource::start(MetaData *meta) {
CHECK(!mStarted);
char value[PROPERTY_VALUE_MAX];
@@ -171,6 +179,12 @@ status_t CameraSource::start(MetaData *) {
mCollectStats = true;
}
+ mStartTimeUs = 0;
+ int64_t startTimeUs;
+ if (meta && meta->findInt64(kKeyTime, &startTimeUs)) {
+ mStartTimeUs = startTimeUs;
+ }
+
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(new CameraSourceListener(this));
CHECK_EQ(OK, mCamera->startRecording());
@@ -222,16 +236,19 @@ sp<MetaData> CameraSource::getFormat() {
return mMeta;
}
+void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ mCamera->releaseRecordingFrame(frame);
+ IPCThreadState::self()->restoreCallingIdentity(token);
+}
+
void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
LOGV("signalBufferReturned: %p", buffer->data());
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame((*it));
- IPCThreadState::self()->restoreCallingIdentity(token);
-
+ releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
buffer->setObserver(0);
@@ -285,22 +302,41 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
LOGV("dataCallbackTimestamp: timestamp %lld us", timestampUs);
Mutex::Autolock autoLock(mLock);
if (!mStarted) {
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(data);
- IPCThreadState::self()->restoreCallingIdentity(token);
+ releaseOneRecordingFrame(data);
++mNumFramesReceived;
++mNumFramesDropped;
return;
}
+ if (mNumFramesReceived > 0 &&
+ timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) {
+ if (mNumGlitches % 10 == 0) { // Don't spam the log
+ LOGW("Long delay detected in video recording");
+ }
+ ++mNumGlitches;
+ }
+
mLastFrameTimestampUs = timestampUs;
if (mNumFramesReceived == 0) {
mFirstFrameTimeUs = timestampUs;
+ // Initial delay
+ if (mStartTimeUs > 0) {
+ if (timestampUs < mStartTimeUs) {
+ // Frame was captured before recording was started
+ // Drop it without updating the statistical data.
+ releaseOneRecordingFrame(data);
+ return;
+ }
+ mStartTimeUs = timestampUs - mStartTimeUs;
+ }
}
++mNumFramesReceived;
mFramesReceived.push_back(data);
- mFrameTimes.push_back(timestampUs - mFirstFrameTimeUs);
+ int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
+ mFrameTimes.push_back(timeUs);
+ LOGV("initial delay: %lld, current time stamp: %lld",
+ mStartTimeUs, timeUs);
mFrameAvailableCondition.signal();
}