summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
authorLajos Molnar <lajos@google.com>2014-10-10 17:15:17 -0700
committerLajos Molnar <lajos@google.com>2014-10-13 23:16:20 -0700
commit2edda09a2ad1d112c52acd37d323f63f0a492d67 (patch)
treeef7d3fbbf888e258fb12b24ddf23a04dac3e54c5 /media
parent512e979284de984427e5b2f73b9054ae1b5e2b0a (diff)
downloadframeworks_av-2edda09a2ad1d112c52acd37d323f63f0a492d67.zip
frameworks_av-2edda09a2ad1d112c52acd37d323f63f0a492d67.tar.gz
frameworks_av-2edda09a2ad1d112c52acd37d323f63f0a492d67.tar.bz2
stagefright: fix surface input handling of software encoders
- added SoftVideoEncoder for common color conversion and extension handling logic - fix YUV420 SemiPlanar handling that should be NV12 not NV21 Bug: 17935149 Change-Id: I9b8d05678b1862dd37bf349ea83d67bdf1bb5560
Diffstat (limited to 'media')
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp116
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h12
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp122
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h12
-rw-r--r--media/libstagefright/codecs/on2/enc/Android.mk4
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp147
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h8
-rw-r--r--media/libstagefright/include/SoftVideoEncoderOMXComponent.h67
-rw-r--r--media/libstagefright/omx/Android.mk6
-rw-r--r--media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp311
10 files changed, 456 insertions, 349 deletions
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index 0f4a00d..ed3dca0 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -111,36 +111,6 @@ static status_t ConvertAvcSpecLevelToOmxAvcLevel(
return BAD_VALUE;
}
-inline static void ConvertYUV420SemiPlanarToYUV420Planar(
- uint8_t *inyuv, uint8_t* outyuv,
- int32_t width, int32_t height) {
-
- int32_t outYsize = width * height;
- uint32_t *outy = (uint32_t *) outyuv;
- uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
- uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
- /* Y copying */
- memcpy(outy, inyuv, outYsize);
-
- /* U & V copying */
- uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
- for (int32_t i = height >> 1; i > 0; --i) {
- for (int32_t j = width >> 2; j > 0; --j) {
- uint32_t temp = *inyuv_4++;
- uint32_t tempU = temp & 0xFF;
- tempU = tempU | ((temp >> 8) & 0xFF00);
-
- uint32_t tempV = (temp >> 8) & 0xFF;
- tempV = tempV | ((temp >> 16) & 0xFF00);
-
- // Flip U and V
- *outcb++ = tempV;
- *outcr++ = tempU;
- }
- }
-}
-
static void* MallocWrapper(
void * /* userData */, int32_t size, int32_t /* attrs */) {
void *ptr = malloc(size);
@@ -178,7 +148,7 @@ SoftAVCEncoder::SoftAVCEncoder(
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
mVideoWidth(176),
mVideoHeight(144),
mVideoFrameRate(30),
@@ -260,9 +230,10 @@ OMX_ERRORTYPE SoftAVCEncoder::initEncParams() {
mEncParams->use_overrun_buffer = AVC_OFF;
- if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
+ || mStoreMetaDataInBuffers) {
// Color conversion is needed.
- CHECK(mInputFrameData == NULL);
+ free(mInputFrameData);
mInputFrameData =
(uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
CHECK(mInputFrameData != NULL);
@@ -348,10 +319,10 @@ OMX_ERRORTYPE SoftAVCEncoder::releaseEncoder() {
PVAVCCleanUpEncoder(mHandle);
releaseOutputBuffers();
- delete mInputFrameData;
+ free(mInputFrameData);
mInputFrameData = NULL;
- delete mSliceGroup;
+ free(mSliceGroup);
mSliceGroup = NULL;
delete mEncParams;
@@ -713,11 +684,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
mStoreMetaDataInBuffers ? " true" : "false");
if (mStoreMetaDataInBuffers) {
- mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
- if (mInputFrameData == NULL) {
- mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
- }
+ mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
}
return OMX_ErrorNone;
@@ -801,8 +768,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
}
}
- buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
-
// Get next input video frame
if (mReadyForNextFrame) {
// Save the input buffer info so that it can be
@@ -823,7 +788,7 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
videoInput.height = ((mVideoHeight + 15) >> 4) << 4;
videoInput.pitch = ((mVideoWidth + 15) >> 4) << 4;
videoInput.coding_timestamp = (inHeader->nTimeStamp + 500) / 1000; // in ms
- uint8_t *inputData = NULL;
+ const uint8_t *inputData = NULL;
if (mStoreMetaDataInBuffers) {
if (inHeader->nFilledLen != 8) {
ALOGE("MetaData buffer is wrong size! "
@@ -833,8 +798,10 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
return;
}
inputData =
- extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
- &srcBuffer);
+ extractGraphicBuffer(
+ mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+ inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
+ mVideoWidth, mVideoHeight);
if (inputData == NULL) {
ALOGE("Unable to extract gralloc buffer in metadata mode");
mSignalledError = true;
@@ -843,16 +810,16 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
}
// TODO: Verify/convert pixel format enum
} else {
- inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ ConvertYUV420SemiPlanarToYUV420Planar(
+ inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+ inputData = mInputFrameData;
+ }
}
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
- ConvertYUV420SemiPlanarToYUV420Planar(
- inputData, mInputFrameData, mVideoWidth, mVideoHeight);
- inputData = mInputFrameData;
- }
CHECK(inputData != NULL);
- videoInput.YCbCr[0] = inputData;
+ videoInput.YCbCr[0] = (uint8_t *)inputData;
videoInput.YCbCr[1] = videoInput.YCbCr[0] + videoInput.height * videoInput.pitch;
videoInput.YCbCr[2] = videoInput.YCbCr[1] +
((videoInput.height * videoInput.pitch) >> 2);
@@ -869,14 +836,12 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (encoderStatus < AVCENC_SUCCESS) {
ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
mSignalledError = true;
- releaseGrallocData(srcBuffer);
notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
return;
} else {
ALOGV("encoderStatus = %d at line %d", encoderStatus, __LINE__);
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
- releaseGrallocData(srcBuffer);
notifyEmptyBufferDone(inHeader);
return;
}
@@ -916,7 +881,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (encoderStatus < AVCENC_SUCCESS) {
ALOGE("encoderStatus = %d at line %d", encoderStatus, __LINE__);
mSignalledError = true;
- releaseGrallocData(srcBuffer);
notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
return;
}
@@ -926,7 +890,6 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
- releaseGrallocData(srcBuffer);
notifyEmptyBufferDone(inHeader);
outQueue.erase(outQueue.begin());
@@ -974,47 +937,6 @@ void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
ALOGV("signalBufferReturned: %p", buffer);
}
-OMX_ERRORTYPE SoftAVCEncoder::getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index) {
- if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *(int32_t*)index = kStoreMetaDataExtensionIndex;
- return OMX_ErrorNone;
- }
- return OMX_ErrorUndefined;
-}
-
-uint8_t *SoftAVCEncoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
- OMX_U32 type = *(OMX_U32*)data;
- status_t res;
- if (type != kMetadataBufferTypeGrallocSource) {
- ALOGE("Data passed in with metadata mode does not have type "
- "kMetadataBufferTypeGrallocSource (%d), has type %d instead",
- kMetadataBufferTypeGrallocSource, type);
- return NULL;
- }
- buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
-
- const Rect rect(mVideoWidth, mVideoHeight);
- uint8_t *img;
- res = GraphicBufferMapper::get().lock(imgBuffer,
- GRALLOC_USAGE_HW_VIDEO_ENCODER,
- rect, (void**)&img);
- if (res != OK) {
- ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
- imgBuffer);
- return NULL;
- }
-
- *buffer = imgBuffer;
- return img;
-}
-
-void SoftAVCEncoder::releaseGrallocData(buffer_handle_t buffer) {
- if (mStoreMetaDataInBuffers) {
- GraphicBufferMapper::get().unlock(buffer);
- }
-}
-
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
index cfa9ca5..130593f 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
@@ -22,14 +22,14 @@
#include <utils/Vector.h>
#include "avcenc_api.h"
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
namespace android {
struct MediaBuffer;
struct SoftAVCEncoder : public MediaBufferObserver,
- public SimpleSoftOMXComponent {
+ public SoftVideoEncoderOMXComponent {
SoftAVCEncoder(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
@@ -45,11 +45,6 @@ struct SoftAVCEncoder : public MediaBufferObserver,
virtual void onQueueFilled(OMX_U32 portIndex);
- // Override SoftOMXComponent methods
-
- virtual OMX_ERRORTYPE getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index);
-
// Implement MediaBufferObserver
virtual void signalBufferReturned(MediaBuffer *buffer);
@@ -105,9 +100,6 @@ private:
OMX_ERRORTYPE releaseEncoder();
void releaseOutputBuffers();
- uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
- void releaseGrallocData(buffer_handle_t buffer);
-
DISALLOW_EVIL_CONSTRUCTORS(SoftAVCEncoder);
};
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index 42c9956..c87d19c 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -46,42 +46,12 @@ static void InitOMXParams(T *params) {
params->nVersion.s.nStep = 0;
}
-inline static void ConvertYUV420SemiPlanarToYUV420Planar(
- uint8_t *inyuv, uint8_t* outyuv,
- int32_t width, int32_t height) {
-
- int32_t outYsize = width * height;
- uint32_t *outy = (uint32_t *) outyuv;
- uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
- uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
- /* Y copying */
- memcpy(outy, inyuv, outYsize);
-
- /* U & V copying */
- uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
- for (int32_t i = height >> 1; i > 0; --i) {
- for (int32_t j = width >> 2; j > 0; --j) {
- uint32_t temp = *inyuv_4++;
- uint32_t tempU = temp & 0xFF;
- tempU = tempU | ((temp >> 8) & 0xFF00);
-
- uint32_t tempV = (temp >> 8) & 0xFF;
- tempV = tempV | ((temp >> 16) & 0xFF00);
-
- // Flip U and V
- *outcb++ = tempV;
- *outcr++ = tempU;
- }
- }
-}
-
SoftMPEG4Encoder::SoftMPEG4Encoder(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
mEncodeMode(COMBINE_MODE_WITH_ERR_RES),
mVideoWidth(176),
mVideoHeight(144),
@@ -149,9 +119,10 @@ OMX_ERRORTYPE SoftMPEG4Encoder::initEncParams() {
mEncParams->quantType[0] = 0;
mEncParams->noFrameSkipped = PV_OFF;
- if (mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar
+ || mStoreMetaDataInBuffers) {
// Color conversion is needed.
- CHECK(mInputFrameData == NULL);
+ free(mInputFrameData);
mInputFrameData =
(uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
CHECK(mInputFrameData != NULL);
@@ -216,7 +187,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::releaseEncoder() {
PVCleanUpVideoEncoder(mHandle);
- delete mInputFrameData;
+ free(mInputFrameData);
mInputFrameData = NULL;
delete mEncParams;
@@ -486,6 +457,17 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
mVideoHeight = def->format.video.nFrameHeight;
mVideoFrameRate = def->format.video.xFramerate >> 16;
mVideoColorFormat = def->format.video.eColorFormat;
+
+ OMX_PARAM_PORTDEFINITIONTYPE *portDef =
+ &editPortInfo(0)->mDef;
+ portDef->format.video.nFrameWidth = mVideoWidth;
+ portDef->format.video.nFrameHeight = mVideoHeight;
+ portDef->format.video.xFramerate = def->format.video.xFramerate;
+ portDef->format.video.eColorFormat =
+ (OMX_COLOR_FORMATTYPE) mVideoColorFormat;
+ portDef = &editPortInfo(1)->mDef;
+ portDef->format.video.nFrameWidth = mVideoWidth;
+ portDef->format.video.nFrameHeight = mVideoHeight;
} else {
mVideoBitRate = def->format.video.nBitrate;
}
@@ -607,11 +589,7 @@ OMX_ERRORTYPE SoftMPEG4Encoder::internalSetParameter(
mStoreMetaDataInBuffers ? " true" : "false");
if (mStoreMetaDataInBuffers) {
- mVideoColorFormat == OMX_COLOR_FormatYUV420SemiPlanar;
- if (mInputFrameData == NULL) {
- mInputFrameData =
- (uint8_t *) malloc((mVideoWidth * mVideoHeight * 3 ) >> 1);
- }
+ mVideoColorFormat = OMX_COLOR_FormatAndroidOpaque;
}
return OMX_ErrorNone;
@@ -679,9 +657,8 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
mSawInputEOS = true;
}
- buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
if (inHeader->nFilledLen > 0) {
- uint8_t *inputData = NULL;
+ const uint8_t *inputData = NULL;
if (mStoreMetaDataInBuffers) {
if (inHeader->nFilledLen != 8) {
ALOGE("MetaData buffer is wrong size! "
@@ -691,24 +668,25 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
return;
}
inputData =
- extractGrallocData(inHeader->pBuffer + inHeader->nOffset,
- &srcBuffer);
+ extractGraphicBuffer(
+ mInputFrameData, (mVideoWidth * mVideoHeight * 3) >> 1,
+ inHeader->pBuffer + inHeader->nOffset, inHeader->nFilledLen,
+ mVideoWidth, mVideoHeight);
if (inputData == NULL) {
ALOGE("Unable to extract gralloc buffer in metadata mode");
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
return;
}
- // TODO: Verify/convert pixel format enum
} else {
- inputData = (uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ inputData = (const uint8_t *)inHeader->pBuffer + inHeader->nOffset;
+ if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
+ ConvertYUV420SemiPlanarToYUV420Planar(
+ inputData, mInputFrameData, mVideoWidth, mVideoHeight);
+ inputData = mInputFrameData;
+ }
}
- if (mVideoColorFormat != OMX_COLOR_FormatYUV420Planar) {
- ConvertYUV420SemiPlanarToYUV420Planar(
- inputData, mInputFrameData, mVideoWidth, mVideoHeight);
- inputData = mInputFrameData;
- }
CHECK(inputData != NULL);
VideoEncFrameIO vin, vout;
@@ -717,7 +695,7 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
vin.height = ((mVideoHeight + 15) >> 4) << 4;
vin.pitch = ((mVideoWidth + 15) >> 4) << 4;
vin.timestamp = (inHeader->nTimeStamp + 500) / 1000; // in ms
- vin.yChan = inputData;
+ vin.yChan = (uint8_t *)inputData;
vin.uChan = vin.yChan + vin.height * vin.pitch;
vin.vChan = vin.uChan + ((vin.height * vin.pitch) >> 2);
@@ -744,7 +722,6 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
- releaseGrallocData(srcBuffer);
notifyEmptyBufferDone(inHeader);
outQueue.erase(outQueue.begin());
@@ -759,47 +736,6 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
}
}
-OMX_ERRORTYPE SoftMPEG4Encoder::getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index) {
- if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *(int32_t*)index = kStoreMetaDataExtensionIndex;
- return OMX_ErrorNone;
- }
- return OMX_ErrorUndefined;
-}
-
-uint8_t *SoftMPEG4Encoder::extractGrallocData(void *data, buffer_handle_t *buffer) {
- OMX_U32 type = *(OMX_U32*)data;
- status_t res;
- if (type != kMetadataBufferTypeGrallocSource) {
- ALOGE("Data passed in with metadata mode does not have type "
- "kMetadataBufferTypeGrallocSource (%d), has type %d instead",
- kMetadataBufferTypeGrallocSource, type);
- return NULL;
- }
- buffer_handle_t imgBuffer = *(buffer_handle_t*)((uint8_t*)data + 4);
-
- const Rect rect(mVideoWidth, mVideoHeight);
- uint8_t *img;
- res = GraphicBufferMapper::get().lock(imgBuffer,
- GRALLOC_USAGE_HW_VIDEO_ENCODER,
- rect, (void**)&img);
- if (res != OK) {
- ALOGE("%s: Unable to lock image buffer %p for access", __FUNCTION__,
- imgBuffer);
- return NULL;
- }
-
- *buffer = imgBuffer;
- return img;
-}
-
-void SoftMPEG4Encoder::releaseGrallocData(buffer_handle_t buffer) {
- if (mStoreMetaDataInBuffers) {
- GraphicBufferMapper::get().unlock(buffer);
- }
-}
-
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index c59a1b9..b0605b4 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -19,7 +19,7 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/foundation/ABase.h>
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
#include "mp4enc_api.h"
@@ -27,7 +27,7 @@ namespace android {
struct MediaBuffer;
-struct SoftMPEG4Encoder : public SimpleSoftOMXComponent {
+struct SoftMPEG4Encoder : public SoftVideoEncoderOMXComponent {
SoftMPEG4Encoder(
const char *name,
const OMX_CALLBACKTYPE *callbacks,
@@ -43,11 +43,6 @@ struct SoftMPEG4Encoder : public SimpleSoftOMXComponent {
virtual void onQueueFilled(OMX_U32 portIndex);
- // Override SoftOMXComponent methods
-
- virtual OMX_ERRORTYPE getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index);
-
protected:
virtual ~SoftMPEG4Encoder();
@@ -86,9 +81,6 @@ private:
OMX_ERRORTYPE initEncoder();
OMX_ERRORTYPE releaseEncoder();
- uint8_t* extractGrallocData(void *data, buffer_handle_t *buffer);
- void releaseGrallocData(buffer_handle_t buffer);
-
DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG4Encoder);
};
diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk
index 4060a0a..e265104 100644
--- a/media/libstagefright/codecs/on2/enc/Android.mk
+++ b/media/libstagefright/codecs/on2/enc/Android.mk
@@ -12,10 +12,6 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/include \
frameworks/native/include/media/openmax \
-ifeq ($(TARGET_DEVICE), manta)
- LOCAL_CFLAGS += -DSURFACE_IS_BGR32
-endif
-
LOCAL_STATIC_LIBRARIES := \
libvpx
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index cabd6bd..eb621d5 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -50,90 +50,11 @@ static int GetCPUCoreCount() {
return cpuCoreCount;
}
-
-// This color conversion utility is copied from SoftMPEG4Encoder.cpp
-inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv,
- uint8_t* outyuv,
- int32_t width,
- int32_t height) {
- int32_t outYsize = width * height;
- uint32_t *outy = (uint32_t *) outyuv;
- uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
- uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
-
- /* Y copying */
- memcpy(outy, inyuv, outYsize);
-
- /* U & V copying */
- uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
- for (int32_t i = height >> 1; i > 0; --i) {
- for (int32_t j = width >> 2; j > 0; --j) {
- uint32_t temp = *inyuv_4++;
- uint32_t tempU = temp & 0xFF;
- tempU = tempU | ((temp >> 8) & 0xFF00);
-
- uint32_t tempV = (temp >> 8) & 0xFF;
- tempV = tempV | ((temp >> 16) & 0xFF00);
-
- // Flip U and V
- *outcb++ = tempV;
- *outcr++ = tempU;
- }
- }
-}
-
-static void ConvertRGB32ToPlanar(
- const uint8_t *src, uint8_t *dstY, int32_t width, int32_t height) {
- CHECK((width & 1) == 0);
- CHECK((height & 1) == 0);
-
- uint8_t *dstU = dstY + width * height;
- uint8_t *dstV = dstU + (width / 2) * (height / 2);
-
- for (int32_t y = 0; y < height; ++y) {
- for (int32_t x = 0; x < width; ++x) {
-#ifdef SURFACE_IS_BGR32
- unsigned blue = src[4 * x];
- unsigned green = src[4 * x + 1];
- unsigned red= src[4 * x + 2];
-#else
- unsigned red= src[4 * x];
- unsigned green = src[4 * x + 1];
- unsigned blue = src[4 * x + 2];
-#endif
-
- unsigned luma =
- ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
-
- dstY[x] = luma;
-
- if ((x & 1) == 0 && (y & 1) == 0) {
- unsigned U =
- ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
-
- unsigned V =
- ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
-
- dstU[x / 2] = U;
- dstV[x / 2] = V;
- }
- }
-
- if ((y & 1) == 0) {
- dstU += width / 2;
- dstV += width / 2;
- }
-
- src += 4 * width;
- dstY += width;
- }
-}
-
SoftVPXEncoder::SoftVPXEncoder(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ : SoftVideoEncoderOMXComponent(name, callbacks, appData, component),
mCodecContext(NULL),
mCodecConfiguration(NULL),
mCodecInterface(NULL),
@@ -157,7 +78,6 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name,
mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
mConversionBuffer(NULL),
mInputDataIsMeta(false),
- mGrallocModule(NULL),
mKeyFrameRequested(false) {
memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
mTemporalLayerBitrateRatio[0] = 100;
@@ -447,13 +367,12 @@ status_t SoftVPXEncoder::initEncoder() {
}
}
- if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || mInputDataIsMeta) {
+ if (mColorFormat != OMX_COLOR_FormatYUV420Planar || mInputDataIsMeta) {
+ free(mConversionBuffer);
+ mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
if (mConversionBuffer == NULL) {
- mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
- if (mConversionBuffer == NULL) {
- ALOGE("Allocating conversion buffer failed.");
- return UNKNOWN_ERROR;
- }
+ ALOGE("Allocating conversion buffer failed.");
+ return UNKNOWN_ERROR;
}
}
return OK;
@@ -473,7 +392,7 @@ status_t SoftVPXEncoder::releaseEncoder() {
}
if (mConversionBuffer != NULL) {
- delete mConversionBuffer;
+ free(mConversionBuffer);
mConversionBuffer = NULL;
}
@@ -1035,49 +954,28 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
return;
}
- uint8_t *source =
+ const uint8_t *source =
inputBufferHeader->pBuffer + inputBufferHeader->nOffset;
if (mInputDataIsMeta) {
- CHECK_GE(inputBufferHeader->nFilledLen,
- 4 + sizeof(buffer_handle_t));
-
- uint32_t bufferType = *(uint32_t *)source;
- CHECK_EQ(bufferType, kMetadataBufferTypeGrallocSource);
-
- if (mGrallocModule == NULL) {
- CHECK_EQ(0, hw_get_module(
- GRALLOC_HARDWARE_MODULE_ID, &mGrallocModule));
+ source = extractGraphicBuffer(
+ mConversionBuffer, mWidth * mHeight * 3 / 2,
+ source, inputBufferHeader->nFilledLen,
+ mWidth, mHeight);
+ if (source == NULL) {
+ ALOGE("Unable to extract gralloc buffer in metadata mode");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, 0);
+ return;
}
-
- const gralloc_module_t *grmodule =
- (const gralloc_module_t *)mGrallocModule;
-
- buffer_handle_t handle = *(buffer_handle_t *)(source + 4);
-
- void *bits;
- CHECK_EQ(0,
- grmodule->lock(
- grmodule, handle,
- GRALLOC_USAGE_SW_READ_OFTEN
- | GRALLOC_USAGE_SW_WRITE_NEVER,
- 0, 0, mWidth, mHeight, &bits));
-
- ConvertRGB32ToPlanar(
- (const uint8_t *)bits, mConversionBuffer, mWidth, mHeight);
-
- source = mConversionBuffer;
-
- CHECK_EQ(0, grmodule->unlock(grmodule, handle));
} else if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
- ConvertSemiPlanarToPlanar(
+ ConvertYUV420SemiPlanarToYUV420Planar(
source, mConversionBuffer, mWidth, mHeight);
source = mConversionBuffer;
}
vpx_image_t raw_frame;
vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight,
- kInputBufferAlignment, source);
+ kInputBufferAlignment, (uint8_t *)source);
vpx_enc_frame_flags_t flags = 0;
if (mTemporalPatternLength > 0) {
@@ -1153,15 +1051,6 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
-OMX_ERRORTYPE SoftVPXEncoder::getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index) {
- if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *(int32_t*)index = kStoreMetaDataExtensionIndex;
- return OMX_ErrorNone;
- }
- return SimpleSoftOMXComponent::getExtensionIndex(name, index);
-}
-
} // namespace android
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 5b4c954..f4c1564 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -18,7 +18,7 @@
#define SOFT_VPX_ENCODER_H_
-#include "SimpleSoftOMXComponent.h"
+#include "SoftVideoEncoderOMXComponent.h"
#include <OMX_VideoExt.h>
#include <OMX_IndexExt.h>
@@ -59,7 +59,7 @@ namespace android {
// - OMX timestamps are in microseconds, therefore
// encoder timebase is fixed to 1/1000000
-struct SoftVPXEncoder : public SimpleSoftOMXComponent {
+struct SoftVPXEncoder : public SoftVideoEncoderOMXComponent {
SoftVPXEncoder(const char *name,
const OMX_CALLBACKTYPE *callbacks,
OMX_PTR appData,
@@ -87,9 +87,6 @@ protected:
// encoding of the frame
virtual void onQueueFilled(OMX_U32 portIndex);
- virtual OMX_ERRORTYPE getExtensionIndex(
- const char *name, OMX_INDEXTYPE *index);
-
private:
enum TemporalReferences {
// For 1 layer case: reference all (last, golden, and alt ref), but only
@@ -233,7 +230,6 @@ private:
uint8_t* mConversionBuffer;
bool mInputDataIsMeta;
- const hw_module_t *mGrallocModule;
bool mKeyFrameRequested;
diff --git a/media/libstagefright/include/SoftVideoEncoderOMXComponent.h b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
new file mode 100644
index 0000000..b3b810d
--- /dev/null
+++ b/media/libstagefright/include/SoftVideoEncoderOMXComponent.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
+
+#define SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
+
+#include "SimpleSoftOMXComponent.h"
+#include <system/window.h>
+
+struct hw_module_t;
+
+namespace android {
+
+struct SoftVideoEncoderOMXComponent : public SimpleSoftOMXComponent {
+ SoftVideoEncoderOMXComponent(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ static void ConvertFlexYUVToPlanar(
+ uint8_t *dst, size_t dstStride, size_t dstVStride,
+ struct android_ycbcr *ycbcr, int32_t width, int32_t height);
+
+ static void ConvertYUV420SemiPlanarToYUV420Planar(
+ const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height);
+
+ static void ConvertRGB32ToPlanar(
+ uint8_t *dstY, size_t dstStride, size_t dstVStride,
+ const uint8_t *src, size_t width, size_t height, size_t srcStride,
+ bool bgr);
+
+ const uint8_t *extractGraphicBuffer(
+ uint8_t *dst, size_t dstSize, const uint8_t *src, size_t srcSize,
+ size_t width, size_t height) const;
+
+ virtual OMX_ERRORTYPE getExtensionIndex(const char *name, OMX_INDEXTYPE *index);
+
+ enum {
+ kInputPortIndex = 0,
+ kOutputPortIndex = 1,
+ };
+
+private:
+ mutable const hw_module_t *mGrallocModule;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftVideoEncoderOMXComponent);
+};
+
+} // namespace android
+
+#endif // SOFT_VIDEO_ENCODER_OMX_COMPONENT_H_
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index cd912e7..aaa8334 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -1,6 +1,10 @@
LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
+ifeq ($(TARGET_DEVICE), manta)
+ LOCAL_CFLAGS += -DSURFACE_IS_BGR32
+endif
+
LOCAL_SRC_FILES:= \
GraphicBufferSource.cpp \
OMX.cpp \
@@ -10,6 +14,7 @@ LOCAL_SRC_FILES:= \
SoftOMXComponent.cpp \
SoftOMXPlugin.cpp \
SoftVideoDecoderOMXComponent.cpp \
+ SoftVideoEncoderOMXComponent.cpp \
LOCAL_C_INCLUDES += \
$(TOP)/frameworks/av/media/libstagefright \
@@ -18,6 +23,7 @@ LOCAL_C_INCLUDES += \
LOCAL_SHARED_LIBRARIES := \
libbinder \
+ libhardware \
libmedia \
libutils \
liblog \
diff --git a/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
new file mode 100644
index 0000000..8bff142
--- /dev/null
+++ b/media/libstagefright/omx/SoftVideoEncoderOMXComponent.cpp
@@ -0,0 +1,311 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftVideoEncoderOMXComponent"
+#include <utils/Log.h>
+
+#include "include/SoftVideoEncoderOMXComponent.h"
+
+#include <hardware/gralloc.h>
+#include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
+
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+namespace android {
+
+SoftVideoEncoderOMXComponent::SoftVideoEncoderOMXComponent(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mGrallocModule(NULL) {
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertFlexYUVToPlanar(
+ uint8_t *dst, size_t dstStride, size_t dstVStride,
+ struct android_ycbcr *ycbcr, int32_t width, int32_t height) {
+ const uint8_t *src = (const uint8_t *)ycbcr->y;
+ const uint8_t *srcU = (const uint8_t *)ycbcr->cb;
+ const uint8_t *srcV = (const uint8_t *)ycbcr->cr;
+ uint8_t *dstU = dst + dstVStride * dstStride;
+ uint8_t *dstV = dstU + (dstVStride >> 1) * (dstStride >> 1);
+
+ for (size_t y = height; y > 0; --y) {
+ memcpy(dst, src, width);
+ dst += dstStride;
+ src += ycbcr->ystride;
+ }
+ if (ycbcr->cstride == ycbcr->ystride >> 1 && ycbcr->chroma_step == 1) {
+ // planar
+ for (size_t y = height >> 1; y > 0; --y) {
+ memcpy(dstU, srcU, width >> 1);
+ dstU += dstStride >> 1;
+ srcU += ycbcr->cstride;
+ memcpy(dstV, srcV, width >> 1);
+ dstV += dstStride >> 1;
+ srcV += ycbcr->cstride;
+ }
+ } else {
+ // arbitrary
+ for (size_t y = height >> 1; y > 0; --y) {
+ for (size_t x = width >> 1; x > 0; --x) {
+ *dstU++ = *srcU;
+ *dstV++ = *srcV;
+ srcU += ycbcr->chroma_step;
+ srcV += ycbcr->chroma_step;
+ }
+ dstU += (dstStride >> 1) - (width >> 1);
+ dstV += (dstStride >> 1) - (width >> 1);
+ srcU += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
+ srcV += ycbcr->cstride - (width >> 1) * ycbcr->chroma_step;
+ }
+ }
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertYUV420SemiPlanarToYUV420Planar(
+ const uint8_t *inYVU, uint8_t* outYUV, int32_t width, int32_t height) {
+ // TODO: add support for stride
+ int32_t outYsize = width * height;
+ uint32_t *outY = (uint32_t *) outYUV;
+ uint16_t *outCb = (uint16_t *) (outYUV + outYsize);
+ uint16_t *outCr = (uint16_t *) (outYUV + outYsize + (outYsize >> 2));
+
+ /* Y copying */
+ memcpy(outY, inYVU, outYsize);
+
+ /* U & V copying */
+ // FIXME this only works if width is multiple of 4
+ uint32_t *inYVU_4 = (uint32_t *) (inYVU + outYsize);
+ for (int32_t i = height >> 1; i > 0; --i) {
+ for (int32_t j = width >> 2; j > 0; --j) {
+ uint32_t temp = *inYVU_4++;
+ uint32_t tempU = temp & 0xFF;
+ tempU = tempU | ((temp >> 8) & 0xFF00);
+
+ uint32_t tempV = (temp >> 8) & 0xFF;
+ tempV = tempV | ((temp >> 16) & 0xFF00);
+
+ *outCb++ = tempU;
+ *outCr++ = tempV;
+ }
+ }
+}
+
+// static
+void SoftVideoEncoderOMXComponent::ConvertRGB32ToPlanar(
+ uint8_t *dstY, size_t dstStride, size_t dstVStride,
+ const uint8_t *src, size_t width, size_t height, size_t srcStride,
+ bool bgr) {
+ CHECK((width & 1) == 0);
+ CHECK((height & 1) == 0);
+
+ uint8_t *dstU = dstY + dstStride * dstVStride;
+ uint8_t *dstV = dstU + (dstStride >> 1) * (dstVStride >> 1);
+
+#ifdef SURFACE_IS_BGR32
+ bgr = !bgr;
+#endif
+
+ const size_t redOffset = bgr ? 2 : 0;
+ const size_t greenOffset = 1;
+ const size_t blueOffset = bgr ? 0 : 2;
+
+ for (size_t y = 0; y < height; ++y) {
+ for (size_t x = 0; x < width; ++x) {
+ unsigned red = src[redOffset];
+ unsigned green = src[greenOffset];
+ unsigned blue = src[blueOffset];
+
+ // using ITU-R BT.601 conversion matrix
+ unsigned luma =
+ ((red * 66 + green * 129 + blue * 25) >> 8) + 16;
+
+ dstY[x] = luma;
+
+ if ((x & 1) == 0 && (y & 1) == 0) {
+ unsigned U =
+ ((-red * 38 - green * 74 + blue * 112) >> 8) + 128;
+
+ unsigned V =
+ ((red * 112 - green * 94 - blue * 18) >> 8) + 128;
+
+ dstU[x >> 1] = U;
+ dstV[x >> 1] = V;
+ }
+ src += 4;
+ }
+
+ if ((y & 1) == 0) {
+ dstU += dstStride >> 1;
+ dstV += dstStride >> 1;
+ }
+
+ src += srcStride - 4 * width;
+ dstY += dstStride;
+ }
+}
+
+const uint8_t *SoftVideoEncoderOMXComponent::extractGraphicBuffer(
+ uint8_t *dst, size_t dstSize,
+ const uint8_t *src, size_t srcSize,
+ size_t width, size_t height) const {
+ size_t dstStride = width;
+ size_t dstVStride = height;
+
+ MetadataBufferType bufferType = *(MetadataBufferType *)src;
+ bool usingGraphicBuffer = bufferType == kMetadataBufferTypeGraphicBuffer;
+ if (!usingGraphicBuffer && bufferType != kMetadataBufferTypeGrallocSource) {
+ ALOGE("Unsupported metadata type (%d)", bufferType);
+ return NULL;
+ }
+
+ if (mGrallocModule == NULL) {
+ CHECK_EQ(0, hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &mGrallocModule));
+ }
+
+ const gralloc_module_t *grmodule =
+ (const gralloc_module_t *)mGrallocModule;
+
+ buffer_handle_t handle;
+ int format;
+ size_t srcStride;
+ size_t srcVStride;
+ if (usingGraphicBuffer) {
+ if (srcSize < 4 + sizeof(GraphicBuffer *)) {
+ ALOGE("Metadata is too small (%zu vs %zu)", srcSize, 4 + sizeof(GraphicBuffer *));
+ return NULL;
+ }
+
+ GraphicBuffer *buffer = *(GraphicBuffer **)(src + 4);
+ handle = buffer->handle;
+ format = buffer->format;
+ srcStride = buffer->stride;
+ srcVStride = buffer->height;
+ // convert stride from pixels to bytes
+ if (format != HAL_PIXEL_FORMAT_YV12 &&
+ format != HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ // TODO do we need to support other formats?
+ srcStride *= 4;
+ }
+ } else {
+ // TODO: remove this part. Check if anyone uses this.
+
+ if (srcSize < 4 + sizeof(buffer_handle_t)) {
+ ALOGE("Metadata is too small (%zu vs %zu)", srcSize, 4 + sizeof(buffer_handle_t));
+ return NULL;
+ }
+
+ handle = *(buffer_handle_t *)(src + 4);
+ // assume HAL_PIXEL_FORMAT_RGBA_8888
+ // there is no way to get the src stride without the graphic buffer
+ format = HAL_PIXEL_FORMAT_RGBA_8888;
+ srcStride = width * 4;
+ srcVStride = height;
+ }
+
+ size_t neededSize =
+ dstStride * dstVStride + (width >> 1)
+ + (dstStride >> 1) * ((dstVStride >> 1) + (height >> 1) - 1);
+ if (dstSize < neededSize) {
+ ALOGE("destination buffer is too small (%zu vs %zu)", dstSize, neededSize);
+ return NULL;
+ }
+
+ void *bits = NULL;
+ struct android_ycbcr ycbcr;
+ status_t res;
+ if (format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+ res = grmodule->lock_ycbcr(
+ grmodule, handle,
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
+ 0, 0, width, height, &ycbcr);
+ } else {
+ res = grmodule->lock(
+ grmodule, handle,
+ GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_NEVER,
+ 0, 0, width, height, &bits);
+ }
+ if (res != OK) {
+ ALOGE("Unable to lock image buffer %p for access", handle);
+ return NULL;
+ }
+
+ switch (format) {
+ case HAL_PIXEL_FORMAT_YV12: // YCrCb / YVU planar
+ // convert to flex YUV
+ ycbcr.y = bits;
+ ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
+ ycbcr.cb = (uint8_t *)ycbcr.cr + (srcStride >> 1) * (srcVStride >> 1);
+ ycbcr.chroma_step = 1;
+ ycbcr.cstride = srcVStride >> 1;
+ ycbcr.ystride = srcVStride;
+ ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+ break;
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP: // YCrCb / YVU semiplanar, NV21
+ // convert to flex YUV
+ ycbcr.y = bits;
+ ycbcr.cr = (uint8_t *)bits + srcStride * srcVStride;
+ ycbcr.cb = (uint8_t *)ycbcr.cr + 1;
+ ycbcr.chroma_step = 2;
+ ycbcr.cstride = srcVStride;
+ ycbcr.ystride = srcVStride;
+ ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+ break;
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ ConvertFlexYUVToPlanar(dst, dstStride, dstVStride, &ycbcr, width, height);
+ break;
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ ConvertRGB32ToPlanar(
+ dst, dstStride, dstVStride,
+ (const uint8_t *)bits, width, height, srcStride,
+ format == HAL_PIXEL_FORMAT_BGRA_8888);
+ break;
+ default:
+ ALOGE("Unsupported pixel format %#x", format);
+ dst = NULL;
+ break;
+ }
+
+ if (grmodule->unlock(grmodule, handle) != OK) {
+ ALOGE("Unable to unlock image buffer %p for access", handle);
+ }
+
+ return dst;
+}
+
+OMX_ERRORTYPE SoftVideoEncoderOMXComponent::getExtensionIndex(
+ const char *name, OMX_INDEXTYPE *index) {
+ if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers") ||
+ !strcmp(name, "OMX.google.android.index.storeGraphicBufferInMetaData")) {
+ *(int32_t*)index = kStoreMetaDataExtensionIndex;
+ return OMX_ErrorNone;
+ }
+ return SimpleSoftOMXComponent::getExtensionIndex(name, index);
+}
+
+} // namespace android