summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
diff options
context:
space:
mode:
authorJames Dong <jdong@google.com>2011-05-31 18:53:46 -0700
committerJames Dong <jdong@google.com>2011-06-02 12:32:46 -0700
commit0c1bc742181ded4930842b46e9507372f0b1b963 (patch)
treec952bfcb03ff7cce5e0f91ad7d25c67a2fdd39cb /media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
parent92a746c3b18d035189f596ce32847bf26247aaca (diff)
downloadframeworks_av-0c1bc742181ded4930842b46e9507372f0b1b963.zip
frameworks_av-0c1bc742181ded4930842b46e9507372f0b1b963.tar.gz
frameworks_av-0c1bc742181ded4930842b46e9507372f0b1b963.tar.bz2
Initial-checkin for ON2 Software AVC/H264 decoder
o when neon is present, the performance gain of On2 AVC software decoder over PV software decoder is more than 30%. o In addition, it fixes some known PV software decoder issues like missing output frames o allow both pv and on2 software avc to be available for easy comparision o change output frames from 8 to 16 Change-Id: I567ad1842025ead7092f0c47e3513d6d9ca232dd
Diffstat (limited to 'media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp')
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp515
1 files changed, 515 insertions, 0 deletions
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
new file mode 100644
index 0000000..259fbc9
--- /dev/null
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -0,0 +1,515 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftAVC"
+#include <utils/Log.h>
+
+#include "SoftAVC.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftAVC::SoftAVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mHandle(NULL),
+ mInputBufferCount(0),
+ mWidth(320),
+ mHeight(240),
+ mPictureSize(mWidth * mHeight * 3 / 2),
+ mCropLeft(0),
+ mCropTop(0),
+ mFirstPicture(NULL),
+ mFirstPictureId(-1),
+ mPicId(0),
+ mHeadersDecoded(false),
+ mEOSStatus(INPUT_DATA_AVAILABLE),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftAVC::~SoftAVC() {
+ H264SwDecRelease(mHandle);
+ mHandle = NULL;
+
+ while (mPicToHeaderMap.size() != 0) {
+ OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.editValueAt(0);
+ mPicToHeaderMap.removeItemsAt(0);
+ delete header;
+ header = NULL;
+ }
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+ List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
+ CHECK(outQueue.empty());
+ CHECK(inQueue.empty());
+
+ delete[] mFirstPicture;
+}
+
+void SoftAVC::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = kInputPortIndex;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumInputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 8192;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_AVC);
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
+ def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+ def.format.video.pNativeWindow = NULL;
+
+ addPort(def);
+
+ def.nPortIndex = kOutputPortIndex;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumOutputBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainVideo;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
+ def.format.video.pNativeRender = NULL;
+ def.format.video.nFrameWidth = mWidth;
+ def.format.video.nFrameHeight = mHeight;
+ def.format.video.nStride = def.format.video.nFrameWidth;
+ def.format.video.nSliceHeight = def.format.video.nFrameHeight;
+ def.format.video.nBitrate = 0;
+ def.format.video.xFramerate = 0;
+ def.format.video.bFlagErrorConcealment = OMX_FALSE;
+ def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ def.format.video.pNativeWindow = NULL;
+
+ def.nBufferSize =
+ (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2;
+
+ addPort(def);
+}
+
+status_t SoftAVC::initDecoder() {
+ if (H264SwDecInit(&mHandle, 1) == H264SWDEC_OK) {
+ return OK;
+ }
+ return UNKNOWN_ERROR;
+}
+
+OMX_ERRORTYPE SoftAVC::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > kOutputPortIndex) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ if (formatParams->nPortIndex == kInputPortIndex) {
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
+ formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+ formatParams->xFramerate = 0;
+ } else {
+ CHECK(formatParams->nPortIndex == kOutputPortIndex);
+
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ formatParams->xFramerate = 0;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAVC::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "video_decoder.avc",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamVideoPortFormat:
+ {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
+
+ if (formatParams->nPortIndex > kOutputPortIndex) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (formatParams->nIndex != 0) {
+ return OMX_ErrorNoMore;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftAVC::getConfig(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexConfigCommonOutputCrop:
+ {
+ OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params;
+
+ if (rectParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ rectParams->nLeft = mCropLeft;
+ rectParams->nTop = mCropTop;
+ rectParams->nWidth = mWidth;
+ rectParams->nHeight = mHeight;
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return OMX_ErrorUnsupportedIndex;
+ }
+}
+
+void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+ H264SwDecRet ret = H264SWDEC_PIC_RDY;
+ status_t err = OK;
+ bool portSettingsChanged = false;
+ while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
+ && outQueue.size() == kNumOutputBuffers) {
+
+ if (mEOSStatus == INPUT_EOS_SEEN) {
+ drainAllOutputBuffers();
+ return;
+ }
+
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ ++mPicId;
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ mEOSStatus = INPUT_EOS_SEEN;
+ continue;
+ }
+
+ OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE;
+ memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE));
+ header->nTimeStamp = inHeader->nTimeStamp;
+ header->nFlags = inHeader->nFlags;
+ mPicToHeaderMap.add(mPicId, header);
+ inQueue.erase(inQueue.begin());
+
+ H264SwDecInput inPicture;
+ H264SwDecOutput outPicture;
+ memset(&inPicture, 0, sizeof(inPicture));
+ inPicture.dataLen = inHeader->nFilledLen;
+ inPicture.pStream = inHeader->pBuffer + inHeader->nOffset;
+ inPicture.picId = mPicId;
+ inPicture.intraConcealmentMethod = 1;
+ H264SwDecPicture decodedPicture;
+
+ while (inPicture.dataLen > 0) {
+ ret = H264SwDecDecode(mHandle, &inPicture, &outPicture);
+ if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY ||
+ ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) {
+ inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream);
+ inPicture.pStream = outPicture.pStrmCurrPos;
+ if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY) {
+ mHeadersDecoded = true;
+ H264SwDecInfo decoderInfo;
+ CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
+
+ if (handlePortSettingChangeEvent(&decoderInfo)) {
+ portSettingsChanged = true;
+ }
+
+ if (decoderInfo.croppingFlag &&
+ handleCropRectEvent(&decoderInfo.cropParams)) {
+ portSettingsChanged = true;
+ }
+ }
+ } else {
+ if (portSettingsChanged) {
+ if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
+ == H264SWDEC_PIC_RDY) {
+
+ // Save this output buffer; otherwise, it will be
+ // lost during dynamic port reconfiguration because
+ // OpenMAX client will delete _all_ output buffers
+ // in the process.
+ saveFirstOutputBuffer(
+ decodedPicture.picId,
+ (uint8_t *)decodedPicture.pOutputPicture);
+ }
+ }
+ inPicture.dataLen = 0;
+ if (ret < 0) {
+ LOGE("Decoder failed: %d", ret);
+ err = ERROR_MALFORMED;
+ }
+ }
+ }
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ if (portSettingsChanged) {
+ portSettingsChanged = false;
+ return;
+ }
+
+ if (mFirstPicture && !outQueue.empty()) {
+ drainOneOutputBuffer(mFirstPictureId, mFirstPicture);
+ delete[] mFirstPicture;
+ mFirstPicture = NULL;
+ mFirstPictureId = -1;
+ }
+
+ while (!outQueue.empty() &&
+ mHeadersDecoded &&
+ H264SwDecNextPicture(mHandle, &decodedPicture, 0)
+ == H264SWDEC_PIC_RDY) {
+
+ int32_t picId = decodedPicture.picId;
+ uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture;
+ drainOneOutputBuffer(picId, data);
+ }
+
+ if (err != OK) {
+ notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
+ }
+ }
+}
+
+bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) {
+ if (mWidth != info->picWidth || mHeight != info->picHeight) {
+ mWidth = info->picWidth;
+ mHeight = info->picHeight;
+ mPictureSize = mWidth * mHeight * 3 / 2;
+ updatePortDefinitions();
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return true;
+ }
+
+ return false;
+}
+
+bool SoftAVC::handleCropRectEvent(const CropParams *crop) {
+ if (mCropLeft != crop->cropLeftOffset ||
+ mCropTop != crop->cropTopOffset ||
+ mWidth != crop->cropOutWidth ||
+ mHeight != crop->cropOutHeight) {
+
+ mCropLeft = crop->cropLeftOffset;
+ mCropTop = crop->cropTopOffset;
+ mWidth = crop->cropOutWidth;
+ mHeight = crop->cropOutHeight;
+ mPictureSize = mWidth * mHeight * 3 / 2;
+
+ notify(OMX_EventPortSettingsChanged, 1,
+ OMX_IndexConfigCommonOutputCrop, NULL);
+
+ return true;
+ }
+ return false;
+}
+
+void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
+ CHECK(mFirstPicture == NULL);
+ mFirstPictureId = picId;
+
+ mFirstPicture = new uint8_t[mPictureSize];
+ memcpy(mFirstPicture, data, mPictureSize);
+}
+
+void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+ BufferInfo *outInfo = *outQueue.begin();
+ outQueue.erase(outQueue.begin());
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+ OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
+ outHeader->nTimeStamp = header->nTimeStamp;
+ outHeader->nFlags = header->nFlags;
+ outHeader->nFilledLen = mPictureSize;
+ memcpy(outHeader->pBuffer + outHeader->nOffset,
+ data, mPictureSize);
+ mPicToHeaderMap.removeItem(picId);
+ delete header;
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+}
+
+bool SoftAVC::drainAllOutputBuffers() {
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+ H264SwDecPicture decodedPicture;
+
+ while (!outQueue.empty()) {
+ BufferInfo *outInfo = *outQueue.begin();
+ outQueue.erase(outQueue.begin());
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+ if (mHeadersDecoded &&
+ H264SWDEC_PIC_RDY ==
+ H264SwDecNextPicture(mHandle, &decodedPicture, 1 /* flush */)) {
+
+ int32_t picId = decodedPicture.picId;
+ CHECK(mPicToHeaderMap.indexOfKey(picId) >= 0);
+
+ memcpy(outHeader->pBuffer + outHeader->nOffset,
+ decodedPicture.pOutputPicture,
+ mPictureSize);
+
+ OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
+ outHeader->nTimeStamp = header->nTimeStamp;
+ outHeader->nFlags = header->nFlags;
+ outHeader->nFilledLen = mPictureSize;
+ mPicToHeaderMap.removeItem(picId);
+ delete header;
+ } else {
+ outHeader->nTimeStamp = 0;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mEOSStatus = OUTPUT_FRAMES_FLUSHED;
+ }
+
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ }
+
+ return true;
+}
+
+void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == kInputPortIndex) {
+ mEOSStatus = INPUT_DATA_AVAILABLE;
+ }
+}
+
+void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+void SoftAVC::updatePortDefinitions() {
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def = &editPortInfo(1)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nStride = def->format.video.nFrameWidth;
+ def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+
+ def->nBufferSize =
+ (def->format.video.nFrameWidth
+ * def->format.video.nFrameHeight * 3) / 2;
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftAVC(name, callbacks, appData, component);
+}