diff options
19 files changed, 166 insertions, 64 deletions
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h index 96dfd7e..069e897 100644 --- a/include/media/stagefright/CameraSource.h +++ b/include/media/stagefright/CameraSource.h @@ -83,7 +83,7 @@ public:                                            Size videoSize,                                            int32_t frameRate,                                            const sp<IGraphicBufferProducer>& surface, -                                          bool storeMetaDataInVideoBuffers = false); +                                          bool storeMetaDataInVideoBuffers = true);      virtual ~CameraSource(); @@ -149,6 +149,8 @@ protected:      int32_t  mNumInputBuffers;      int32_t  mVideoFrameRate;      int32_t  mColorFormat; +    int32_t  mEncoderFormat; +    int32_t  mEncoderDataSpace;      status_t mInitCheck;      sp<Camera>   mCamera; diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index e16a4b5..98abe9c 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -1194,8 +1194,7 @@ void StagefrightRecorder::clipVideoFrameWidth() {      }  } -status_t StagefrightRecorder::checkVideoEncoderCapabilities( -        bool *supportsCameraSourceMetaDataMode) { +status_t StagefrightRecorder::checkVideoEncoderCapabilities() {      /* hardware codecs must support camera source meta data mode */      Vector<CodecCapabilities> codecs;      OMXClient client; @@ -1207,9 +1206,6 @@ status_t StagefrightRecorder::checkVideoEncoderCapabilities(               mVideoEncoder == VIDEO_ENCODER_VP8 ? MEDIA_MIMETYPE_VIDEO_VP8 :               mVideoEncoder == VIDEO_ENCODER_H264 ? MEDIA_MIMETYPE_VIDEO_AVC : ""),              false /* decoder */, true /* hwCodec */, &codecs); -    *supportsCameraSourceMetaDataMode = codecs.size() > 0; -    ALOGV("encoder %s camera source meta-data mode", -            *supportsCameraSourceMetaDataMode ? "supports" : "DOES NOT SUPPORT");      if (!mCaptureTimeLapse) {          // Dont clip for time lapse capture as encoder will have enough @@ -1418,9 +1414,7 @@ status_t StagefrightRecorder::setupMediaSource(  status_t StagefrightRecorder::setupCameraSource(          sp<CameraSource> *cameraSource) {      status_t err = OK; -    bool encoderSupportsCameraSourceMetaDataMode; -    if ((err = checkVideoEncoderCapabilities( -                &encoderSupportsCameraSourceMetaDataMode)) != OK) { +    if ((err = checkVideoEncoderCapabilities()) != OK) {          return err;      }      Size videoSize; @@ -1436,14 +1430,13 @@ status_t StagefrightRecorder::setupCameraSource(          mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(                  mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,                  videoSize, mFrameRate, mPreviewSurface, -                mTimeBetweenTimeLapseFrameCaptureUs, -                encoderSupportsCameraSourceMetaDataMode); +                mTimeBetweenTimeLapseFrameCaptureUs);          *cameraSource = mCameraSourceTimeLapse;      } else {          *cameraSource = CameraSource::CreateFromCamera(                  mCamera, mCameraProxy, mCameraId, mClientName, mClientUid,                  videoSize, mFrameRate, -                mPreviewSurface, encoderSupportsCameraSourceMetaDataMode); +                mPreviewSurface);      }      mCamera.clear();      mCameraProxy.clear(); diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index 7473f42..8af9278 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -141,8 +141,7 @@ private:      status_t setupRTPRecording();      status_t setupMPEG2TSRecording();      sp<MediaSource> createAudioSource(); -    status_t checkVideoEncoderCapabilities( -            bool *supportsCameraSourceMetaDataMode); +    status_t checkVideoEncoderCapabilities();      status_t checkAudioEncoderCapabilities();      // Generic MediaSource set-up. Returns the appropriate      // source (CameraSource or SurfaceMediaSource) diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index 1b788f3..2606e44 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -608,6 +608,16 @@ status_t CameraSource::startCameraRecording() {          }      } +    err = mCamera->sendCommand( +        CAMERA_CMD_SET_VIDEO_FORMAT, mEncoderFormat, mEncoderDataSpace); + +    // This could happen for CameraHAL1 clients; thus the failure is +    // not a fatal error +    if (err != OK) { +        ALOGW("Failed to set video encoder format/dataspace to %d, %d due to %d", +                mEncoderFormat, mEncoderDataSpace, err); +    } +      err = OK;      if (mCameraFlags & FLAGS_HOT_CAMERA) {          mCamera->unlock(); @@ -645,6 +655,9 @@ status_t CameraSource::start(MetaData *meta) {      mStartTimeUs = 0;      mNumInputBuffers = 0; +    mEncoderFormat = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; +    mEncoderDataSpace = HAL_DATASPACE_BT709; +      if (meta) {          int64_t startTimeUs;          if (meta->findInt64(kKeyTime, &startTimeUs)) { @@ -656,6 +669,10 @@ status_t CameraSource::start(MetaData *meta) {              CHECK_GT(nBuffers, 0);              mNumInputBuffers = nBuffers;          } + +        // TODO: Read in format/dataspace from somewhere +        // Uncomment to test SW encoders until TODO is resolved +        // mEncoderFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;      }      status_t err; diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index cbead32..e8ef24e 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -73,6 +73,7 @@ LOCAL_SHARED_LIBRARIES:= \  LOCAL_C_INCLUDES += \      system/media/camera/include \      system/media/private/camera/include \ +    frameworks/native/include/media/openmax \      external/jpeg diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index f2d6ab2..ca14cdb 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -1559,6 +1559,9 @@ status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {              return commandPingL();          case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT:              return commandSetVideoBufferCountL(arg1); +        case CAMERA_CMD_SET_VIDEO_FORMAT: +            return commandSetVideoFormatL(arg1, +                    static_cast<android_dataspace>(arg2));          default:              ALOGE("%s: Unknown command %d (arguments %d, %d)",                      __FUNCTION__, cmd, arg1, arg2); @@ -1710,6 +1713,17 @@ status_t Camera2Client::commandSetVideoBufferCountL(size_t count) {      return mStreamingProcessor->setRecordingBufferCount(count);  } +status_t Camera2Client::commandSetVideoFormatL(int format, +        android_dataspace dataspace) { +    if (recordingEnabledL()) { +        ALOGE("%s: Camera %d: Error setting video format after " +                "recording was started", __FUNCTION__, mCameraId); +        return INVALID_OPERATION; +    } + +    return mStreamingProcessor->setRecordingFormat(format, dataspace); +} +  void Camera2Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,          const CaptureResultExtras& resultExtras) {      int32_t err = CAMERA_ERROR_UNKNOWN; diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h index 3784aab..c6df228 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.h +++ b/services/camera/libcameraservice/api1/Camera2Client.h @@ -167,6 +167,7 @@ private:      status_t commandEnableFocusMoveMsgL(bool enable);      status_t commandPingL();      status_t commandSetVideoBufferCountL(size_t count); +    status_t commandSetVideoFormatL(int format, android_dataspace dataSpace);      // Current camera device configuration      camera2::SharedParameters mParameters; diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp index 88c5811..143cc61 100644 --- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp @@ -123,7 +123,7 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) {          // Check if stream parameters have to change          uint32_t currentWidth, currentHeight, currentFormat;          res = device->getStreamInfo(mCallbackStreamId, -                ¤tWidth, ¤tHeight, ¤tFormat); +                ¤tWidth, ¤tHeight, ¤tFormat, 0);          if (res != OK) {              ALOGE("%s: Camera %d: Error querying callback output stream info: "                      "%s (%d)", __FUNCTION__, mId, diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp index 34798bf..88987f9 100644 --- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp @@ -115,7 +115,7 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) {          // Check if stream parameters have to change          uint32_t currentWidth, currentHeight;          res = device->getStreamInfo(mCaptureStreamId, -                ¤tWidth, ¤tHeight, 0); +                ¤tWidth, ¤tHeight, 0, 0);          if (res != OK) {              ALOGE("%s: Camera %d: Error querying capture output stream info: "                      "%s (%d)", __FUNCTION__, diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp index b6071f6..36d143b 100644 --- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp @@ -25,11 +25,12 @@  #define ALOGVV(...) ((void)0)  #endif +#include <cutils/properties.h>  #include <utils/Log.h>  #include <utils/Trace.h>  #include <gui/BufferItem.h>  #include <gui/Surface.h> -#include <media/hardware/MetadataBufferType.h> +#include <media/hardware/HardwareAPI.h>  #include "common/CameraDeviceBase.h"  #include "api1/Camera2Client.h" @@ -51,7 +52,10 @@ StreamingProcessor::StreamingProcessor(sp<Camera2Client> client):          mRecordingStreamId(NO_STREAM),          mRecordingFrameAvailable(false),          mRecordingHeapCount(kDefaultRecordingHeapCount), -        mRecordingHeapFree(kDefaultRecordingHeapCount) +        mRecordingHeapFree(kDefaultRecordingHeapCount), +        mRecordingFormat(kDefaultRecordingFormat), +        mRecordingDataSpace(kDefaultRecordingDataSpace), +        mRecordingGrallocUsage(kDefaultRecordingGrallocUsage)  {  } @@ -151,7 +155,7 @@ status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) {          // Check if stream parameters have to change          uint32_t currentWidth, currentHeight;          res = device->getStreamInfo(mPreviewStreamId, -                ¤tWidth, ¤tHeight, 0); +                ¤tWidth, ¤tHeight, 0, 0);          if (res != OK) {              ALOGE("%s: Camera %d: Error querying preview stream info: "                      "%s (%d)", __FUNCTION__, mId, strerror(-res), res); @@ -280,6 +284,46 @@ status_t StreamingProcessor::setRecordingBufferCount(size_t count) {      return OK;  } +status_t StreamingProcessor::setRecordingFormat(int format, +        android_dataspace dataSpace) { +    ATRACE_CALL(); + +    Mutex::Autolock m(mMutex); + +    ALOGV("%s: Camera %d: New recording format/dataspace from encoder: %X, %X", +            __FUNCTION__, mId, format, dataSpace); + +    mRecordingFormat = format; +    mRecordingDataSpace = dataSpace; +    int prevGrallocUsage = mRecordingGrallocUsage; +    if (mRecordingFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { +        mRecordingGrallocUsage = GRALLOC_USAGE_HW_VIDEO_ENCODER; +    } else { +        mRecordingGrallocUsage = GRALLOC_USAGE_SW_READ_OFTEN; +    } + +    ALOGV("%s: Camera %d: New recording gralloc usage: %08X", __FUNCTION__, mId, +            mRecordingGrallocUsage); + +    if (prevGrallocUsage != mRecordingGrallocUsage) { +        ALOGV("%s: Camera %d: Resetting recording consumer for new usage", +            __FUNCTION__, mId); + +        if (isStreamActive(mActiveStreamIds, mRecordingStreamId)) { +            ALOGE("%s: Camera %d: Changing recording format when " +                    "recording stream is already active!", __FUNCTION__, +                    mId); +            return INVALID_OPERATION; +        } + +        releaseAllRecordingFramesLocked(); + +        mRecordingConsumer.clear(); +    } + +    return OK; +} +  status_t StreamingProcessor::updateRecordingRequest(const Parameters ¶ms) {      ATRACE_CALL();      status_t res; @@ -340,9 +384,10 @@ status_t StreamingProcessor::recordingStreamNeedsUpdate(          return INVALID_OPERATION;      } -    uint32_t currentWidth, currentHeight; +    uint32_t currentWidth, currentHeight, currentFormat; +    android_dataspace currentDataSpace;      res = device->getStreamInfo(mRecordingStreamId, -            ¤tWidth, ¤tHeight, 0); +            ¤tWidth, ¤tHeight, ¤tFormat, ¤tDataSpace);      if (res != OK) {          ALOGE("%s: Camera %d: Error querying recording output stream info: "                  "%s (%d)", __FUNCTION__, mId, @@ -350,8 +395,11 @@ status_t StreamingProcessor::recordingStreamNeedsUpdate(          return res;      } -    if (mRecordingConsumer == 0 || currentWidth != (uint32_t)params.videoWidth || -            currentHeight != (uint32_t)params.videoHeight) { +    if (mRecordingConsumer == 0 || +            currentWidth != (uint32_t)params.videoWidth || +            currentHeight != (uint32_t)params.videoHeight || +            currentFormat != (uint32_t)mRecordingFormat || +            currentDataSpace != mRecordingDataSpace) {          *needsUpdate = true;      }      *needsUpdate = false; @@ -380,7 +428,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) {          sp<IGraphicBufferConsumer> consumer;          BufferQueue::createBufferQueue(&producer, &consumer);          mRecordingConsumer = new BufferItemConsumer(consumer, -                GRALLOC_USAGE_HW_VIDEO_ENCODER, +                mRecordingGrallocUsage,                  mRecordingHeapCount + 1);          mRecordingConsumer->setFrameAvailableListener(this);          mRecordingConsumer->setName(String8("Camera2-RecordingConsumer")); @@ -392,8 +440,11 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) {      if (mRecordingStreamId != NO_STREAM) {          // Check if stream parameters have to change          uint32_t currentWidth, currentHeight; +        uint32_t currentFormat; +        android_dataspace currentDataSpace;          res = device->getStreamInfo(mRecordingStreamId, -                ¤tWidth, ¤tHeight, 0); +                ¤tWidth, ¤tHeight, +                ¤tFormat, ¤tDataSpace);          if (res != OK) {              ALOGE("%s: Camera %d: Error querying recording output stream info: "                      "%s (%d)", __FUNCTION__, mId, @@ -401,7 +452,10 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) {              return res;          }          if (currentWidth != (uint32_t)params.videoWidth || -                currentHeight != (uint32_t)params.videoHeight || newConsumer) { +                currentHeight != (uint32_t)params.videoHeight || +                currentFormat != (uint32_t)mRecordingFormat || +                currentDataSpace != mRecordingDataSpace || +                newConsumer) {              // TODO: Should wait to be sure previous recording has finished              res = device->deleteStream(mRecordingStreamId); @@ -422,11 +476,9 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) {      if (mRecordingStreamId == NO_STREAM) {          mRecordingFrameCount = 0; -        // Selecting BT.709 colorspace by default -        // TODO: Wire this in from encoder side          res = device->createStream(mRecordingWindow,                  params.videoWidth, params.videoHeight, -                CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_BT709, +                mRecordingFormat, mRecordingDataSpace,                  CAMERA3_STREAM_ROTATION_0, &mRecordingStreamId);          if (res != OK) {              ALOGE("%s: Camera %d: Can't create output stream for recording: " @@ -722,12 +774,12 @@ status_t StreamingProcessor::processRecordingFrame() {          }          if (mRecordingHeap == 0) { -            const size_t bufferSize = 4 + sizeof(buffer_handle_t); +            size_t payloadSize = sizeof(VideoNativeMetadata);              ALOGV("%s: Camera %d: Creating recording heap with %zu buffers of "                      "size %zu bytes", __FUNCTION__, mId, -                    mRecordingHeapCount, bufferSize); +                    mRecordingHeapCount, payloadSize); -            mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount, +            mRecordingHeap = new Camera2Heap(payloadSize, mRecordingHeapCount,                      "Camera2Client::RecordingHeap");              if (mRecordingHeap->mHeap->getSize() == 0) {                  ALOGE("%s: Camera %d: Unable to allocate memory for recording", @@ -750,7 +802,7 @@ status_t StreamingProcessor::processRecordingFrame() {              mRecordingHeapFree = mRecordingHeapCount;          } -        if ( mRecordingHeapFree == 0) { +        if (mRecordingHeapFree == 0) {              ALOGE("%s: Camera %d: No free recording buffers, dropping frame",                      __FUNCTION__, mId);              mRecordingConsumer->releaseBuffer(imgBuffer); @@ -770,13 +822,15 @@ status_t StreamingProcessor::processRecordingFrame() {                  mRecordingHeap->mBuffers[heapIdx]->getMemory(&offset,                          &size); -        uint8_t *data = (uint8_t*)heap->getBase() + offset; -        uint32_t type = kMetadataBufferTypeGrallocSource; -        *((uint32_t*)data) = type; -        *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle; -        ALOGVV("%s: Camera %d: Sending out buffer_handle_t %p", -                __FUNCTION__, mId, -                imgBuffer.mGraphicBuffer->handle); +        VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>( +            (uint8_t*)heap->getBase() + offset); +        payload->eType = kMetadataBufferTypeANWBuffer; +        payload->pBuffer = imgBuffer.mGraphicBuffer->getNativeBuffer(); +        payload->nFenceFd = -1; + +        ALOGVV("%s: Camera %d: Sending out ANWBuffer %p", +                __FUNCTION__, mId, payload->pBuffer); +          mRecordingBuffers.replaceAt(imgBuffer, heapIdx);          recordingHeap = mRecordingHeap;      } @@ -809,42 +863,42 @@ void StreamingProcessor::releaseRecordingFrame(const sp<IMemory>& mem) {                  heap->getHeapID(), mRecordingHeap->mHeap->getHeapID());          return;      } -    uint8_t *data = (uint8_t*)heap->getBase() + offset; -    uint32_t type = *(uint32_t*)data; -    if (type != kMetadataBufferTypeGrallocSource) { + +    VideoNativeMetadata *payload = reinterpret_cast<VideoNativeMetadata*>( +        (uint8_t*)heap->getBase() + offset); + +    if (payload->eType != kMetadataBufferTypeANWBuffer) {          ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)", -                __FUNCTION__, mId, type, -                kMetadataBufferTypeGrallocSource); +                __FUNCTION__, mId, payload->eType, +                kMetadataBufferTypeANWBuffer);          return;      }      // Release the buffer back to the recording queue - -    buffer_handle_t imgHandle = *(buffer_handle_t*)(data + 4); -      size_t itemIndex;      for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) {          const BufferItem item = mRecordingBuffers[itemIndex];          if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT && -                item.mGraphicBuffer->handle == imgHandle) { -            break; +                item.mGraphicBuffer->getNativeBuffer() == payload->pBuffer) { +                break;          }      } +      if (itemIndex == mRecordingBuffers.size()) { -        ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of " +        ALOGE("%s: Camera %d: Can't find returned ANW Buffer %p in list of "                  "outstanding buffers", __FUNCTION__, mId, -                imgHandle); +                payload->pBuffer);          return;      } -    ALOGVV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__, -            mId, imgHandle); +    ALOGVV("%s: Camera %d: Freeing returned ANW buffer %p index %d", __FUNCTION__, +            mId, payload->pBuffer, itemIndex);      res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]);      if (res != OK) {          ALOGE("%s: Camera %d: Unable to free recording frame " -                "(buffer_handle_t: %p): %s (%d)", __FUNCTION__, -                mId, imgHandle, strerror(-res), res); +                "(Returned ANW buffer: %p): %s (%d)", __FUNCTION__, +                mId, payload->pBuffer, strerror(-res), res);          return;      }      mRecordingBuffers.replaceAt(itemIndex); diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h index 2474062..42e9e7a 100644 --- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h @@ -53,6 +53,8 @@ class StreamingProcessor:      int getPreviewStreamId() const;      status_t setRecordingBufferCount(size_t count); +    status_t setRecordingFormat(int format, android_dataspace_t dataspace); +      status_t updateRecordingRequest(const Parameters ¶ms);      // If needsUpdate is set to true, a updateRecordingStream call with params will recreate      // recording stream @@ -127,6 +129,18 @@ class StreamingProcessor:      Vector<BufferItem> mRecordingBuffers;      size_t mRecordingHeapHead, mRecordingHeapFree; +    static const int kDefaultRecordingFormat = +            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; +    int mRecordingFormat; + +    static const android_dataspace kDefaultRecordingDataSpace = +            HAL_DATASPACE_BT709; +    android_dataspace mRecordingDataSpace; + +    static const int kDefaultRecordingGrallocUsage = +            GRALLOC_USAGE_HW_VIDEO_ENCODER; +    int mRecordingGrallocUsage; +      virtual bool threadLoop();      status_t processRecordingFrame(); diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp index a03f9c7..d8500df 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp @@ -147,7 +147,7 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) {          // Check if stream parameters have to change          uint32_t currentWidth, currentHeight;          res = device->getStreamInfo(mZslStreamId, -                ¤tWidth, ¤tHeight, 0); +                ¤tWidth, ¤tHeight, 0, 0);          if (res != OK) {              ALOGE("%s: Camera %d: Error querying capture output stream info: "                      "%s (%d)", __FUNCTION__, diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp index 470a6d6..69620ac 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp @@ -150,7 +150,7 @@ status_t ZslProcessor3::updateStream(const Parameters ¶ms) {          // Check if stream parameters have to change          uint32_t currentWidth, currentHeight;          res = device->getStreamInfo(mZslStreamId, -                ¤tWidth, ¤tHeight, 0); +                ¤tWidth, ¤tHeight, 0, 0);          if (res != OK) {              ALOGE("%s: Camera %d: Error querying capture output stream info: "                      "%s (%d)", __FUNCTION__, diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h index 64236c5..27c33a3 100644 --- a/services/camera/libcameraservice/common/CameraDeviceBase.h +++ b/services/camera/libcameraservice/common/CameraDeviceBase.h @@ -128,7 +128,8 @@ class CameraDeviceBase : public virtual RefBase {       * Get information about a given stream.       */      virtual status_t getStreamInfo(int id, -            uint32_t *width, uint32_t *height, uint32_t *format) = 0; +            uint32_t *width, uint32_t *height, +            uint32_t *format, android_dataspace *dataSpace) = 0;      /**       * Set stream gralloc buffer transform diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp index 7aba0ee..88f555b 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.cpp +++ b/services/camera/libcameraservice/device2/Camera2Device.cpp @@ -315,7 +315,8 @@ status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) {  status_t Camera2Device::getStreamInfo(int id, -        uint32_t *width, uint32_t *height, uint32_t *format) { +        uint32_t *width, uint32_t *height, +        uint32_t *format, android_dataspace *dataSpace) {      ATRACE_CALL();      ALOGV("%s: E", __FUNCTION__);      bool found = false; @@ -336,6 +337,7 @@ status_t Camera2Device::getStreamInfo(int id,      if (width) *width = (*streamI)->getWidth();      if (height) *height = (*streamI)->getHeight();      if (format) *format = (*streamI)->getFormat(); +    if (dataSpace) *dataSpace = HAL_DATASPACE_UNKNOWN;      return OK;  } diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h index a9affa2..a001a91 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.h +++ b/services/camera/libcameraservice/device2/Camera2Device.h @@ -63,7 +63,8 @@ class Camera2Device: public CameraDeviceBase {              uint32_t width, uint32_t height, int format, int *id);      virtual status_t createReprocessStreamFromStream(int outputId, int *id);      virtual status_t getStreamInfo(int id, -            uint32_t *width, uint32_t *height, uint32_t *format); +            uint32_t *width, uint32_t *height, +            uint32_t *format, android_dataspace *dataSpace);      virtual status_t setStreamTransform(int id, int transform);      virtual status_t deleteStream(int id);      virtual status_t deleteReprocessStream(int id); diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 731f74c..852751c 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -904,7 +904,8 @@ status_t Camera3Device::createReprocessStreamFromStream(int outputId, int *id) {  status_t Camera3Device::getStreamInfo(int id, -        uint32_t *width, uint32_t *height, uint32_t *format) { +        uint32_t *width, uint32_t *height, +        uint32_t *format, android_dataspace *dataSpace) {      ATRACE_CALL();      Mutex::Autolock il(mInterfaceLock);      Mutex::Autolock l(mLock); @@ -935,7 +936,7 @@ status_t Camera3Device::getStreamInfo(int id,      if (width) *width  = mOutputStreams[idx]->getWidth();      if (height) *height = mOutputStreams[idx]->getHeight();      if (format) *format = mOutputStreams[idx]->getFormat(); - +    if (dataSpace) *dataSpace = mOutputStreams[idx]->getDataSpace();      return OK;  } diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index b3db2e9..180b1f8 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -109,7 +109,8 @@ class Camera3Device :      virtual status_t createReprocessStreamFromStream(int outputId, int *id);      virtual status_t getStreamInfo(int id, -            uint32_t *width, uint32_t *height, uint32_t *format); +            uint32_t *width, uint32_t *height, +            uint32_t *format, android_dataspace *dataSpace);      virtual status_t setStreamTransform(int id, int transform);      virtual status_t deleteStream(int id); diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h index d177b57..6c87a45 100644 --- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h +++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h @@ -45,6 +45,7 @@ class Camera3StreamInterface : public virtual RefBase {      virtual uint32_t getWidth() const = 0;      virtual uint32_t getHeight() const = 0;      virtual int      getFormat() const = 0; +    virtual android_dataspace getDataSpace() const = 0;      /**       * Start the stream configuration process. Returns a handle to the stream's  | 
