summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/CameraSource.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'media/libstagefright/CameraSource.cpp')
-rw-r--r--media/libstagefright/CameraSource.cpp107
1 files changed, 103 insertions, 4 deletions
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 66280da..f6b4741 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,12 +27,16 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
+#include <media/hardware/HardwareAPI.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <camera/ICameraRecordingProxy.h>
#include <gui/Surface.h>
#include <utils/String8.h>
#include <cutils/properties.h>
+#include <stagefright/AVExtensions.h>
+
#if LOG_NDEBUG
#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
#else
@@ -98,6 +102,11 @@ void CameraSourceListener::postDataTimestamp(
}
static int32_t getColorFormat(const char* colorFormat) {
+ if (!colorFormat) {
+ ALOGE("Invalid color format");
+ return -1;
+ }
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
return OMX_COLOR_FormatYUV420Planar;
}
@@ -107,8 +116,20 @@ static int32_t getColorFormat(const char* colorFormat) {
}
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) {
+#ifdef USE_SAMSUNG_COLORFORMAT
+ static const int OMX_SEC_COLOR_FormatNV12LPhysicalAddress = 0x7F000002;
+ return OMX_SEC_COLOR_FormatNV12LPhysicalAddress;
+#else
return OMX_COLOR_FormatYUV420SemiPlanar;
+#endif
+ }
+
+#ifdef USE_SAMSUNG_CAMERAFORMAT_NV21
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420SP_NV21)) {
+ static const int OMX_SEC_COLOR_FormatNV21Linear = 0x7F000011;
+ return OMX_SEC_COLOR_FormatNV21Linear;
}
+#endif /* USE_SAMSUNG_CAMERAFORMAT_NV21 */
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422I)) {
return OMX_COLOR_FormatYCbYCr;
@@ -126,6 +147,10 @@ static int32_t getColorFormat(const char* colorFormat) {
return OMX_COLOR_FormatAndroidOpaque;
}
+ if (!strcmp(colorFormat, "YVU420SemiPlanar")) {
+ return OMX_QCOM_COLOR_FormatYVU420SemiPlanar;
+ }
+
ALOGE("Uknown color format (%s), please add it to "
"CameraSource::getColorFormat", colorFormat);
@@ -185,7 +210,11 @@ CameraSource::CameraSource(
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false) {
+ mCollectStats(false),
+ mPauseAdjTimeUs(0),
+ mPauseStartTimeUs(0),
+ mPauseEndTimeUs(0),
+ mRecPause(false) {
mVideoSize.width = -1;
mVideoSize.height = -1;
@@ -575,6 +604,8 @@ status_t CameraSource::initWithCameraAccess(
mMeta->setInt32(kKeyStride, mVideoSize.width);
mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
mMeta->setInt32(kKeyFrameRate, mVideoFrameRate);
+ AVUtils::get()->extractCustomCameraKeys(params, mMeta);
+
return OK;
}
@@ -641,6 +672,14 @@ status_t CameraSource::startCameraRecording() {
status_t CameraSource::start(MetaData *meta) {
ALOGV("start");
+ if(mRecPause) {
+ mRecPause = false;
+ mPauseAdjTimeUs = mPauseEndTimeUs - mPauseStartTimeUs;
+ ALOGV("resume : mPause Adj / End / Start : %" PRId64 " / %" PRId64 " / %" PRId64" us",
+ mPauseAdjTimeUs, mPauseEndTimeUs, mPauseStartTimeUs);
+ return OK;
+ }
+
CHECK(!mStarted);
if (mInitCheck != OK) {
ALOGE("CameraSource is not initialized yet");
@@ -654,13 +693,23 @@ status_t CameraSource::start(MetaData *meta) {
}
mStartTimeUs = 0;
+ mRecPause = false;
+ mPauseAdjTimeUs = 0;
+ mPauseStartTimeUs = 0;
+ mPauseEndTimeUs = 0;
mNumInputBuffers = 0;
mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
mEncoderDataSpace = HAL_DATASPACE_BT709;
if (meta) {
int64_t startTimeUs;
- if (meta->findInt64(kKeyTime, &startTimeUs)) {
+
+ auto key = kKeyTime;
+ if (!property_get_bool("media.camera.ts.monotonic", true)) {
+ key = kKeyTimeBoot;
+ }
+
+ if (meta->findInt64(key, &startTimeUs)) {
mStartTimeUs = startTimeUs;
}
@@ -687,6 +736,16 @@ status_t CameraSource::start(MetaData *meta) {
return err;
}
+status_t CameraSource::pause() {
+ mRecPause = true;
+ mPauseStartTimeUs = mLastFrameTimestampUs;
+ //record the end time too, or there is a risk the end time is 0
+ mPauseEndTimeUs = mLastFrameTimestampUs;
+ ALOGV("pause : mPauseStart %" PRId64 " us, #Queued Frames : %zd",
+ mPauseStartTimeUs, mFramesReceived.size());
+ return OK;
+}
+
void CameraSource::stopCameraRecording() {
ALOGV("stopCameraRecording");
if (mCameraFlags & FLAGS_HOT_CAMERA) {
@@ -792,6 +851,8 @@ void CameraSource::releaseQueuedFrames() {
List<sp<IMemory> >::iterator it;
while (!mFramesReceived.empty()) {
it = mFramesReceived.begin();
+ // b/28466701
+ adjustOutgoingANWBuffer(it->get());
releaseRecordingFrame(*it);
mFramesReceived.erase(it);
++mNumFramesDropped;
@@ -812,6 +873,9 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
+ // b/28466701
+ adjustOutgoingANWBuffer(it->get());
+
releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
@@ -888,10 +952,23 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
return;
}
+ if (mRecPause == true) {
+ if(!mFramesReceived.empty()) {
+ ALOGV("releaseQueuedFrames - #Queued Frames : %zd", mFramesReceived.size());
+ releaseQueuedFrames();
+ }
+ ALOGV("release One Video Frame for Pause : %" PRId64 "us", timestampUs);
+ releaseOneRecordingFrame(data);
+ mPauseEndTimeUs = timestampUs;
+ return;
+ }
+ timestampUs -= mPauseAdjTimeUs;
+ ALOGV("dataCallbackTimestamp: AdjTimestamp %" PRId64 "us", timestampUs);
+
if (mNumFramesReceived > 0) {
if (timestampUs <= mLastFrameTimestampUs) {
- ALOGW("Dropping frame with backward timestamp %lld (last %lld)",
- (long long)timestampUs, (long long)mLastFrameTimestampUs);
+ ALOGW("Dropping frame with backward timestamp %" PRId64 " (last %" PRId64 ")",
+ timestampUs, mLastFrameTimestampUs);
releaseOneRecordingFrame(data);
return;
}
@@ -917,6 +994,10 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
++mNumFramesReceived;
CHECK(data != NULL && data->size() > 0);
+
+ // b/28466701
+ adjustIncomingANWBuffer(data.get());
+
mFramesReceived.push_back(data);
int64_t timeUs = mStartTimeUs + (timestampUs - mFirstFrameTimeUs);
mFrameTimes.push_back(timeUs);
@@ -930,6 +1011,24 @@ bool CameraSource::isMetaDataStoredInVideoBuffers() const {
return mIsMetaDataStoredInVideoBuffers;
}
+void CameraSource::adjustIncomingANWBuffer(IMemory* data) {
+ VideoNativeMetadata *payload =
+ reinterpret_cast<VideoNativeMetadata*>(data->pointer());
+ if (payload->eType == kMetadataBufferTypeANWBuffer) {
+ payload->pBuffer = (ANativeWindowBuffer*)(((uint8_t*)payload->pBuffer) +
+ ICameraRecordingProxy::getCommonBaseAddress());
+ }
+}
+
+void CameraSource::adjustOutgoingANWBuffer(IMemory* data) {
+ VideoNativeMetadata *payload =
+ reinterpret_cast<VideoNativeMetadata*>(data->pointer());
+ if (payload->eType == kMetadataBufferTypeANWBuffer) {
+ payload->pBuffer = (ANativeWindowBuffer*)(((uint8_t*)payload->pBuffer) -
+ ICameraRecordingProxy::getCommonBaseAddress());
+ }
+}
+
CameraSource::ProxyListener::ProxyListener(const sp<CameraSource>& source) {
mSource = source;
}