summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/CameraSource.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'media/libstagefright/CameraSource.cpp')
-rw-r--r--media/libstagefright/CameraSource.cpp70
1 files changed, 67 insertions, 3 deletions
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 66280da..e2ad924 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -33,6 +33,8 @@
#include <utils/String8.h>
#include <cutils/properties.h>
+#include <stagefright/AVExtensions.h>
+
#if LOG_NDEBUG
#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
#else
@@ -98,6 +100,11 @@ void CameraSourceListener::postDataTimestamp(
}
static int32_t getColorFormat(const char* colorFormat) {
+ if (!colorFormat) {
+ ALOGE("Invalid color format");
+ return -1;
+ }
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
return OMX_COLOR_FormatYUV420Planar;
}
@@ -107,9 +114,21 @@ static int32_t getColorFormat(const char* colorFormat) {
}
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
+#ifdef USE_SAMSUNG_COLORFORMAT
+ static const int OMX_SEC_COLOR_FormatNV12LPhysicalAddress = 0x7F000002;
+ return OMX_SEC_COLOR_FormatNV12LPhysicalAddress;
+#else
return OMX_COLOR_FormatYUV420SemiPlanar;
+#endif
}
+#ifdef USE_SAMSUNG_CAMERAFORMAT_NV21
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP_NV21)) {
+ static const int OMX_SEC_COLOR_FormatNV21Linear = 0x7F000011;
+ return OMX_SEC_COLOR_FormatNV21Linear;
+ }
+#endif /* USE_SAMSUNG_CAMERAFORMAT_NV21 */
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
return OMX_COLOR_FormatYCbYCr;
}
@@ -126,6 +145,10 @@ static int32_t getColorFormat(const char* colorFormat) {
return OMX_COLOR_FormatAndroidOpaque;
}
+ if (!strcmp(colorFormat, "YVU420SemiPlanar")) {
+ return OMX_QCOM_COLOR_FormatYVU420SemiPlanar;
+ }
+
ALOGE("Uknown color format (%s), please add it to "
"CameraSource::getColorFormat", colorFormat);
@@ -185,7 +208,11 @@ CameraSource::CameraSource(
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false) {
+ mCollectStats(false),
+ mPauseAdjTimeUs(0),
+ mPauseStartTimeUs(0),
+ mPauseEndTimeUs(0),
+ mRecPause(false) {
mVideoSize.width = -1;
mVideoSize.height = -1;
@@ -575,6 +602,8 @@ status_t CameraSource::initWithCameraAccess(
mMeta->setInt32(kKeyStride, mVideoSize.width);
mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
mMeta->setInt32(kKeyFrameRate, mVideoFrameRate);
+ AVUtils::get()->extractCustomCameraKeys(params, mMeta);
+
return OK;
}
@@ -641,6 +670,14 @@ status_t CameraSource::startCameraRecording() {
status_t CameraSource::start(MetaData *meta) {
ALOGV("start");
+ if(mRecPause) {
+ mRecPause = false;
+ mPauseAdjTimeUs = mPauseEndTimeUs - mPauseStartTimeUs;
+ ALOGV("resume : mPause Adj / End / Start : %" PRId64 " / %" PRId64 " / %" PRId64" us",
+ mPauseAdjTimeUs, mPauseEndTimeUs, mPauseStartTimeUs);
+ return OK;
+ }
+
CHECK(!mStarted);
if (mInitCheck != OK) {
ALOGE("CameraSource is not initialized yet");
@@ -654,6 +691,10 @@ status_t CameraSource::start(MetaData *meta) {
}
mStartTimeUs = 0;
+ mRecPause = false;
+ mPauseAdjTimeUs = 0;
+ mPauseStartTimeUs = 0;
+ mPauseEndTimeUs = 0;
mNumInputBuffers = 0;
mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
mEncoderDataSpace = HAL_DATASPACE_BT709;
@@ -687,6 +728,16 @@ status_t CameraSource::start(MetaData *meta) {
return err;
}
+status_t CameraSource::pause() {
+ mRecPause = true;
+ mPauseStartTimeUs = mLastFrameTimestampUs;
+ //record the end time too, or there is a risk the end time is 0
+ mPauseEndTimeUs = mLastFrameTimestampUs;
+ ALOGV("pause : mPauseStart %" PRId64 " us, #Queued Frames : %zd",
+ mPauseStartTimeUs, mFramesReceived.size());
+ return OK;
+}
+
void CameraSource::stopCameraRecording() {
ALOGV("stopCameraRecording");
if (mCameraFlags & FLAGS_HOT_CAMERA) {
@@ -888,10 +939,23 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
return;
}
+ if (mRecPause == true) {
+ if(!mFramesReceived.empty()) {
+ ALOGV("releaseQueuedFrames - #Queued Frames : %zd", mFramesReceived.size());
+ releaseQueuedFrames();
+ }
+ ALOGV("release One Video Frame for Pause : %" PRId64 "us", timestampUs);
+ releaseOneRecordingFrame(data);
+ mPauseEndTimeUs = timestampUs;
+ return;
+ }
+ timestampUs -= mPauseAdjTimeUs;
+ ALOGV("dataCallbackTimestamp: AdjTimestamp %" PRId64 "us", timestampUs);
+
if (mNumFramesReceived > 0) {
if (timestampUs <= mLastFrameTimestampUs) {
- ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
- (long long)timestampUs, (long long)mLastFrameTimestampUs);
+ ALOGW("Dropping frame with backward timestamp %" PRId64 " (last %" PRId64 ")",
+ timestampUs, mLastFrameTimestampUs);
releaseOneRecordingFrame(data);
return;
}