summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/codecs
diff options
context:
space:
mode:
Diffstat (limited to 'media/libstagefright/codecs')
-rw-r--r--media/libstagefright/codecs/aacdec/Android.mk5
-rw-r--r--media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp372
-rw-r--r--media/libstagefright/codecs/aacdec/DrcPresModeWrap.h62
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.cpp740
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.h29
-rw-r--r--media/libstagefright/codecs/aacenc/Android.mk6
-rw-r--r--media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp79
-rw-r--r--media/libstagefright/codecs/aacenc/SoftAACEncoder2.h2
-rw-r--r--media/libstagefright/codecs/aacenc/basic_op/oper_32b.c4
-rw-r--r--media/libstagefright/codecs/aacenc/src/aacenc.c8
-rw-r--r--media/libstagefright/codecs/aacenc/src/bitenc.c3
-rw-r--r--media/libstagefright/codecs/aacenc/src/psy_main.c6
-rw-r--r--media/libstagefright/codecs/aacenc/src/qc_main.c8
-rw-r--r--media/libstagefright/codecs/aacenc/src/tns.c4
-rw-r--r--media/libstagefright/codecs/amrnb/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/amrnb/dec/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrnb/enc/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrwb/Android.mk2
-rw-r--r--media/libstagefright/codecs/amrwbenc/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/autocorr.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/convolve.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/pitch_f4.c3
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/syn_filt.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c4
-rw-r--r--media/libstagefright/codecs/avc/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/avc/enc/Android.mk4
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp15
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h4
-rw-r--r--media/libstagefright/codecs/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/flac/enc/Android.mk2
-rw-r--r--media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp25
-rw-r--r--media/libstagefright/codecs/g711/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/gsm/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/hevcdec/Android.mk30
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.cpp720
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.h125
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/Android.mk4
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/Android.mk4
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp2
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h4
-rw-r--r--media/libstagefright/codecs/mp3dec/Android.mk4
-rw-r--r--media/libstagefright/codecs/mp3dec/SoftMP3.cpp20
-rw-r--r--media/libstagefright/codecs/on2/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp84
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.h6
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp333
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h81
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp81
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.h5
-rw-r--r--media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c3
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c5
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c3
-rw-r--r--media/libstagefright/codecs/opus/Android.mk4
-rw-r--r--media/libstagefright/codecs/opus/dec/Android.mk19
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.cpp540
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.h94
-rw-r--r--media/libstagefright/codecs/raw/Android.mk2
-rw-r--r--media/libstagefright/codecs/vorbis/dec/Android.mk2
58 files changed, 3248 insertions, 350 deletions
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index ffa64f9..afb00aa 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -3,7 +3,8 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
- SoftAAC2.cpp
+ SoftAAC2.cpp \
+ DrcPresModeWrap.cpp
LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/include \
@@ -17,6 +18,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS :=
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
new file mode 100644
index 0000000..129ad65
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "DrcPresModeWrap.h"
+
+#include <assert.h>
+
+#define LOG_TAG "SoftAAC2_DrcWrapper"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+//#define DRC_PRES_MODE_WRAP_DEBUG
+
+#define GPM_ENCODER_TARGET_LEVEL 64
+#define MAX_TARGET_LEVEL 64
+
+CDrcPresModeWrapper::CDrcPresModeWrapper()
+{
+ mDataUpdate = true;
+
+ /* Data from streamInfo. */
+ /* Initialized to the same values as in the aac decoder */
+ mStreamPRL = -1;
+ mStreamDRCPresMode = -1;
+ mStreamNrAACChan = 0;
+ mStreamNrOutChan = 0;
+
+ /* Desired values (set by user). */
+ /* Initialized to the same values as in the aac decoder */
+ mDesTarget = -1;
+ mDesAttFactor = 0;
+ mDesBoostFactor = 0;
+ mDesHeavy = 0;
+
+ mEncoderTarget = -1;
+
+ /* Values from last time. */
+ /* Initialized to the same values as the desired values */
+ mLastTarget = -1;
+ mLastAttFactor = 0;
+ mLastBoostFactor = 0;
+ mLastHeavy = 0;
+}
+
+CDrcPresModeWrapper::~CDrcPresModeWrapper()
+{
+}
+
+void
+CDrcPresModeWrapper::setDecoderHandle(const HANDLE_AACDECODER handle)
+{
+ mHandleDecoder = handle;
+}
+
+void
+CDrcPresModeWrapper::submitStreamData(CStreamInfo* pStreamInfo)
+{
+ assert(pStreamInfo);
+
+ if (mStreamPRL != pStreamInfo->drcProgRefLev) {
+ mStreamPRL = pStreamInfo->drcProgRefLev;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcProgRefLev is %d\n", mStreamPRL);
+#endif
+ }
+
+ if (mStreamDRCPresMode != pStreamInfo->drcPresMode) {
+ mStreamDRCPresMode = pStreamInfo->drcPresMode;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcPresMode is %d\n", mStreamDRCPresMode);
+#endif
+ }
+
+ if (mStreamNrAACChan != pStreamInfo->aacNumChannels) {
+ mStreamNrAACChan = pStreamInfo->aacNumChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: aacNumChannels is %d\n", mStreamNrAACChan);
+#endif
+ }
+
+ if (mStreamNrOutChan != pStreamInfo->numChannels) {
+ mStreamNrOutChan = pStreamInfo->numChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: numChannels is %d\n", mStreamNrOutChan);
+#endif
+ }
+
+
+
+ if (mStreamNrOutChan<mStreamNrAACChan) {
+ mIsDownmix = true;
+ } else {
+ mIsDownmix = false;
+ }
+
+ if (mIsDownmix && (mStreamNrOutChan == 1)) {
+ mIsMonoDownmix = true;
+ } else {
+ mIsMonoDownmix = false;
+ }
+
+ if (mIsDownmix && mStreamNrOutChan == 2){
+ mIsStereoDownmix = true;
+ } else {
+ mIsStereoDownmix = false;
+ }
+
+}
+
+void
+CDrcPresModeWrapper::setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value)
+{
+ switch (param) {
+ case DRC_PRES_MODE_WRAP_DESIRED_TARGET:
+ mDesTarget = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR:
+ mDesAttFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR:
+ mDesBoostFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_HEAVY:
+ mDesHeavy = value;
+ break;
+ case DRC_PRES_MODE_WRAP_ENCODER_TARGET:
+ mEncoderTarget = value;
+ break;
+ default:
+ break;
+ }
+ mDataUpdate = true;
+}
+
+void
+CDrcPresModeWrapper::update()
+{
+ // Get Data from Decoder
+ int progRefLevel = mStreamPRL;
+ int drcPresMode = mStreamDRCPresMode;
+
+ // by default, do as desired
+ int newTarget = mDesTarget;
+ int newAttFactor = mDesAttFactor;
+ int newBoostFactor = mDesBoostFactor;
+ int newHeavy = mDesHeavy;
+
+ if (mDataUpdate) {
+ // sanity check
+ if (mDesTarget < MAX_TARGET_LEVEL){
+ mDesTarget = MAX_TARGET_LEVEL; // limit target level to -16 dB or below
+ newTarget = MAX_TARGET_LEVEL;
+ }
+
+ if (mEncoderTarget != -1) {
+ if (mDesTarget<124) { // if target level > -31 dB
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ // no stereo or mono downmixing, calculated scaling of light DRC
+ /* use as little compression as possible */
+ newAttFactor = 0;
+ newBoostFactor = 0;
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ if (mEncoderTarget < mDesTarget) { // if mEncoderTarget > target level
+ // mEncoderTarget > target level > PRL
+ int calcFactor;
+ float calcFactor_norm;
+ // 0.0f < calcFactor_norm < 1.0f
+ calcFactor_norm = (float)(mDesTarget - progRefLevel) /
+ (float)(mEncoderTarget - progRefLevel);
+ calcFactor = (int)(calcFactor_norm*127.0f); // 0 <= calcFactor < 127
+ // calcFactor is the lower limit
+ newAttFactor = (calcFactor>newAttFactor) ? calcFactor : newAttFactor;
+ // new AttFactor will be always = calcFactor, as it is set to 0 before.
+ newBoostFactor = newAttFactor;
+ } else {
+ /* target level > mEncoderTarget > PRL */
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ newBoostFactor = 127;
+ }
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget<92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else {
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB: light DRC only needed if we perform downmixing
+ if (mIsDownmix) { // we do downmixing
+ newAttFactor = 127;
+ }
+ }
+ }
+ else { // handle other used encoder target levels
+
+ // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+ if (mStreamNrAACChan > 6) {
+ drcPresMode = 0;
+ }
+
+ switch (drcPresMode) {
+ case 0:
+ default: // presentation mode not indicated
+ {
+
+ if (mDesTarget<124) { // if target level > -31 dB
+ // no stereo or mono downmixing
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127; // at least, use light compression
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget < 92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else{
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ // Presentation mode 1 and 2 according to ETSI TS 101 154:
+ // Digital Video Broadcasting (DVB); Specification for the use of Video and Audio Coding
+ // in Broadcasting Applications based on the MPEG-2 Transport Stream,
+ // section C.5.4., "Decoding", and Table C.33
+ // ISO DRC -> newHeavy = 0 (Use light compression, MPEG-style)
+ // Compression_value -> newHeavy = 1 (Use heavy compression, DVB-style)
+ // scaling restricted -> newAttFactor = 127
+
+ case 1: // presentation mode 1, Light:-31/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ newHeavy = 1;
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ case 2: // presentation mode 2, Light:-23/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ if (mIsMonoDownmix) { // if mono downmix
+ newHeavy = 1;
+ } else {
+ newHeavy = 0;
+ newAttFactor = 127;
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ newHeavy = 0;
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ } // switch()
+ } // if (mEncoderTarget == GPM_ENCODER_TARGET_LEVEL)
+
+ // sanity again
+ if (newHeavy == 1) {
+ newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
+ newAttFactor = 127;
+ }
+
+ // update the decoder
+ if (newTarget != mLastTarget) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_REFERENCE_LEVEL, newTarget);
+ mLastTarget = newTarget;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newTarget != mDesTarget)
+ ALOGV("DRC presentation mode wrapper: forced target level to %d (from %d)\n", newTarget, mDesTarget);
+ else
+ ALOGV("DRC presentation mode wrapper: set target level to %d\n", newTarget);
+#endif
+ }
+
+ if (newAttFactor != mLastAttFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_ATTENUATION_FACTOR, newAttFactor);
+ mLastAttFactor = newAttFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newAttFactor != mDesAttFactor)
+ ALOGV("DRC presentation mode wrapper: forced attenuation factor to %d (from %d)\n", newAttFactor, mDesAttFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set attenuation factor to %d\n", newAttFactor);
+#endif
+ }
+
+ if (newBoostFactor != mLastBoostFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_BOOST_FACTOR, newBoostFactor);
+ mLastBoostFactor = newBoostFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newBoostFactor != mDesBoostFactor)
+ ALOGV("DRC presentation mode wrapper: forced boost factor to %d (from %d)\n",
+ newBoostFactor, mDesBoostFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set boost factor to %d\n", newBoostFactor);
+#endif
+ }
+
+ if (newHeavy != mLastHeavy) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_HEAVY_COMPRESSION, newHeavy);
+ mLastHeavy = newHeavy;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newHeavy != mDesHeavy)
+ ALOGV("DRC presentation mode wrapper: forced heavy compression to %d (from %d)\n",
+ newHeavy, mDesHeavy);
+ else
+ ALOGV("DRC presentation mode wrapper: set heavy compression to %d\n", newHeavy);
+#endif
+ }
+
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC config: tgt_lev: %3d, cut: %3d, boost: %3d, heavy: %d\n", newTarget,
+ newAttFactor, newBoostFactor, newHeavy);
+#endif
+ mDataUpdate = false;
+
+ } // if (mDataUpdate)
+}
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
new file mode 100644
index 0000000..f0b6cf2
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+#include "aacdecoder_lib.h"
+
+typedef enum
+{
+ DRC_PRES_MODE_WRAP_DESIRED_TARGET = 0x0000,
+ DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR = 0x0001,
+ DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR = 0x0002,
+ DRC_PRES_MODE_WRAP_DESIRED_HEAVY = 0x0003,
+ DRC_PRES_MODE_WRAP_ENCODER_TARGET = 0x0004
+} DRC_PRES_MODE_WRAP_PARAM;
+
+
+class CDrcPresModeWrapper {
+public:
+ CDrcPresModeWrapper();
+ ~CDrcPresModeWrapper();
+ void setDecoderHandle(const HANDLE_AACDECODER handle);
+ void setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value);
+ void submitStreamData(CStreamInfo*);
+ void update();
+
+protected:
+ HANDLE_AACDECODER mHandleDecoder;
+ int mDesTarget;
+ int mDesAttFactor;
+ int mDesBoostFactor;
+ int mDesHeavy;
+
+ int mEncoderTarget;
+
+ int mLastTarget;
+ int mLastAttFactor;
+ int mLastBoostFactor;
+ int mLastHeavy;
+
+ SCHAR mStreamPRL;
+ SCHAR mStreamDRCPresMode;
+ INT mStreamNrAACChan;
+ INT mStreamNrOutChan;
+
+ bool mIsDownmix;
+ bool mIsMonoDownmix;
+ bool mIsStereoDownmix;
+
+ bool mDataUpdate;
+};
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index d4b0de7..8b4dd6f 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -19,22 +19,30 @@
#include <utils/Log.h>
#include "SoftAAC2.h"
+#include <OMX_AudioExt.h>
+#include <OMX_IndexExt.h>
#include <cutils/properties.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaErrors.h>
+#include <math.h>
+
#define FILEREAD_MAX_LAYERS 2
#define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
-#define MAX_CHANNEL_COUNT 6 /* maximum number of audio channels that can be decoded */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1 /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL -1 /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
// names of properties that can be used to override the default DRC settings
#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut"
#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
namespace android {
@@ -57,11 +65,10 @@ SoftAAC2::SoftAAC2(
mStreamInfo(NULL),
mIsADTS(false),
mInputBufferCount(0),
+ mOutputBufferCount(0),
mSignalledError(false),
- mSawInputEos(false),
- mSignalledOutputEos(false),
- mAnchorTimeUs(0),
- mNumSamplesOutput(0),
+ mLastInHeader(NULL),
+ mCurrentInputTime(0),
mOutputPortSettingsChange(NONE) {
initPorts();
CHECK_EQ(initDecoder(), (status_t)OK);
@@ -69,6 +76,7 @@ SoftAAC2::SoftAAC2(
SoftAAC2::~SoftAAC2() {
aacDecoder_Close(mAACDecoder);
+ delete mOutputDelayRingBuffer;
}
void SoftAAC2::initPorts() {
@@ -113,6 +121,7 @@ void SoftAAC2::initPorts() {
}
status_t SoftAAC2::initDecoder() {
+ ALOGV("initDecoder()");
status_t status = UNKNOWN_ERROR;
mAACDecoder = aacDecoder_Open(TT_MP4_ADIF, /* num layers */ 1);
if (mAACDecoder != NULL) {
@@ -121,36 +130,72 @@ status_t SoftAAC2::initDecoder() {
status = OK;
}
}
- mDecoderHasData = false;
- // for streams that contain metadata, use the mobile profile DRC settings unless overridden
- // by platform properties:
+ mEndOfInput = false;
+ mEndOfOutput = false;
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
+ mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize];
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+
+ if (mAACDecoder == NULL) {
+ ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code");
+ }
+
+ //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0);
+
+ //init DRC wrapper
+ mDrcWrap.setDecoderHandle(mAACDecoder);
+ mDrcWrap.submitStreamData(mStreamInfo);
+
+ // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties
+ // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone)
char value[PROPERTY_VALUE_MAX];
- // * AAC_DRC_REFERENCE_LEVEL
+ // DRC_PRES_MODE_WRAP_DESIRED_TARGET
if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL)) {
unsigned refLevel = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_REFERENCE_LEVEL of %d instead of %d",
- refLevel, DRC_DEFAULT_MOBILE_REF_LEVEL);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_REFERENCE_LEVEL, refLevel);
+ ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d", refLevel,
+ DRC_DEFAULT_MOBILE_REF_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, refLevel);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_REFERENCE_LEVEL, DRC_DEFAULT_MOBILE_REF_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, DRC_DEFAULT_MOBILE_REF_LEVEL);
}
- // * AAC_DRC_ATTENUATION_FACTOR
+ // DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR
if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL)) {
unsigned cut = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_ATTENUATION_FACTOR of %d instead of %d",
- cut, DRC_DEFAULT_MOBILE_DRC_CUT);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_ATTENUATION_FACTOR, cut);
+ ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", cut,
+ DRC_DEFAULT_MOBILE_DRC_CUT);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, cut);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_ATTENUATION_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
}
- // * AAC_DRC_BOOST_FACTOR (note: no default, using cut)
+ // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL)) {
unsigned boost = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_BOOST_FACTOR of %d", boost);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, boost);
+ ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", boost,
+ DRC_DEFAULT_MOBILE_DRC_BOOST);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, boost);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+ }
+ // DRC_PRES_MODE_WRAP_DESIRED_HEAVY
+ if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL)) {
+ unsigned heavy = atoi(value);
+ ALOGV("AAC decoder using desried DRC heavy compression switch of %d instead of %d", heavy,
+ DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, heavy);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ }
+ // DRC_PRES_MODE_WRAP_ENCODER_TARGET
+ if (property_get(PROP_DRC_OVERRIDE_ENC_LEVEL, value, NULL)) {
+ unsigned encoderRefLevel = atoi(value);
+ ALOGV("AAC decoder using encoder-side DRC reference level of %d instead of %d",
+ encoderRefLevel, DRC_DEFAULT_MOBILE_ENC_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, encoderRefLevel);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, DRC_DEFAULT_MOBILE_ENC_LEVEL);
}
return status;
@@ -233,7 +278,7 @@ OMX_ERRORTYPE SoftAAC2::internalGetParameter(
OMX_ERRORTYPE SoftAAC2::internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR params) {
- switch (index) {
+ switch ((int)index) {
case OMX_IndexParamStandardComponentRole:
{
const OMX_PARAM_COMPONENTROLETYPE *roleParams =
@@ -269,6 +314,67 @@ OMX_ERRORTYPE SoftAAC2::internalSetParameter(
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioAndroidAacPresentation:
+ {
+ const OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE *aacPresParams =
+ (const OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE *)params;
+ // for the following parameters of the OMX_AUDIO_PARAM_AACPROFILETYPE structure,
+ // a value of -1 implies the parameter is not set by the application:
+ // nMaxOutputChannels uses default platform properties, see configureDownmix()
+ // nDrcCut uses default platform properties, see initDecoder()
+ // nDrcBoost idem
+ // nHeavyCompression idem
+ // nTargetReferenceLevel idem
+ // nEncodedTargetLevel idem
+ if (aacPresParams->nMaxOutputChannels >= 0) {
+ int max;
+ if (aacPresParams->nMaxOutputChannels >= 8) { max = 8; }
+ else if (aacPresParams->nMaxOutputChannels >= 6) { max = 6; }
+ else if (aacPresParams->nMaxOutputChannels >= 2) { max = 2; }
+ else {
+ // -1 or 0: disable downmix, 1: mono
+ max = aacPresParams->nMaxOutputChannels;
+ }
+ ALOGV("set nMaxOutputChannels=%d", max);
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, max);
+ }
+ bool updateDrcWrapper = false;
+ if (aacPresParams->nDrcBoost >= 0) {
+ ALOGV("set nDrcBoost=%d", aacPresParams->nDrcBoost);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR,
+ aacPresParams->nDrcBoost);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nDrcCut >= 0) {
+ ALOGV("set nDrcCut=%d", aacPresParams->nDrcCut);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, aacPresParams->nDrcCut);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nHeavyCompression >= 0) {
+ ALOGV("set nHeavyCompression=%d", aacPresParams->nHeavyCompression);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY,
+ aacPresParams->nHeavyCompression);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nTargetReferenceLevel >= 0) {
+ ALOGV("set nTargetReferenceLevel=%d", aacPresParams->nTargetReferenceLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET,
+ aacPresParams->nTargetReferenceLevel);
+ updateDrcWrapper = true;
+ }
+ if (aacPresParams->nEncodedTargetLevel >= 0) {
+ ALOGV("set nEncodedTargetLevel=%d", aacPresParams->nEncodedTargetLevel);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET,
+ aacPresParams->nEncodedTargetLevel);
+ updateDrcWrapper = true;
+ }
+ if (updateDrcWrapper) {
+ mDrcWrap.update();
+ }
+
+ return OMX_ErrorNone;
+ }
+
case OMX_IndexParamAudioPcm:
{
const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
@@ -290,19 +396,105 @@ bool SoftAAC2::isConfigured() const {
return mInputBufferCount > 0;
}
-void SoftAAC2::maybeConfigureDownmix() const {
- if (mStreamInfo->numChannels > 2) {
- char value[PROPERTY_VALUE_MAX];
- if (!(property_get("media.aac_51_output_enabled", value, NULL) &&
- (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
- ALOGI("Downmixing multichannel AAC to stereo");
- aacDecoder_SetParam(mAACDecoder, AAC_PCM_OUTPUT_CHANNELS, 2);
- mStreamInfo->numChannels = 2;
+void SoftAAC2::configureDownmix() const {
+ char value[PROPERTY_VALUE_MAX];
+ if (!(property_get("media.aac_51_output_enabled", value, NULL)
+ && (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
+ ALOGI("limiting to stereo output");
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, 2);
+ // By default, the decoder creates a 5.1 channel downmix signal
+ // for seven and eight channel input streams. To enable 6.1 and 7.1 channel output
+ // use aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1)
+ }
+}
+
+bool SoftAAC2::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
+ if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos
+ || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i];
+ }
+
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ if (mOutputDelayRingBufferWritePos == mOutputDelayRingBufferReadPos) {
+ ALOGE("RING BUFFER OVERFLOW");
+ return false;
+ }
+ } else {
+ ALOGV("slow SoftAAC2::outputDelayRingBufferPutSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i];
+ mOutputDelayRingBufferWritePos++;
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ if (mOutputDelayRingBufferWritePos == mOutputDelayRingBufferReadPos) {
+ ALOGE("RING BUFFER OVERFLOW");
+ return false;
+ }
}
}
+ return true;
}
-void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
+int32_t SoftAAC2::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) {
+ if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos
+ || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ if (samples != 0) {
+ for (int32_t i = 0; i < numSamples; i++) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++];
+ }
+ } else {
+ mOutputDelayRingBufferReadPos += numSamples;
+ }
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow SoftAAC2::outputDelayRingBufferGetSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ if (mOutputDelayRingBufferWritePos == mOutputDelayRingBufferReadPos) {
+ ALOGE("RING BUFFER UNDERRUN");
+ return -1;
+ }
+ if (samples != 0) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos];
+ }
+ mOutputDelayRingBufferReadPos++;
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ }
+ }
+ return numSamples;
+}
+
+int32_t SoftAAC2::outputDelayRingBufferSamplesAvailable() {
+ int32_t available = mOutputDelayRingBufferWritePos - mOutputDelayRingBufferReadPos;
+ if (available < 0) {
+ available += mOutputDelayRingBufferSize;
+ }
+ if (available < 0) {
+ ALOGE("FATAL RING BUFFER ERROR");
+ return 0;
+ }
+ return available;
+}
+
+int32_t SoftAAC2::outputDelayRingBufferSamplesLeft() {
+ return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable();
+}
+
+
+void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mOutputPortSettingsChange != NONE) {
return;
}
@@ -314,64 +506,64 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
- if (portIndex == 0 && mInputBufferCount == 0) {
- ++mInputBufferCount;
- BufferInfo *info = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *header = info->mHeader;
-
- inBuffer[0] = header->pBuffer + header->nOffset;
- inBufferLength[0] = header->nFilledLen;
-
- AAC_DECODER_ERROR decoderErr =
- aacDecoder_ConfigRaw(mAACDecoder,
- inBuffer,
- inBufferLength);
+ while ((!inQueue.empty() || mEndOfInput) && !outQueue.empty()) {
+ if (!inQueue.empty()) {
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
- if (decoderErr != AAC_DEC_OK) {
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
- return;
- }
+ mEndOfInput = (inHeader->nFlags & OMX_BUFFERFLAG_EOS) != 0;
+ if ((inHeader->nFlags & OMX_BUFFERFLAG_CODECCONFIG) != 0) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
- inQueue.erase(inQueue.begin());
- info->mOwnedByUs = false;
- notifyEmptyBufferDone(header);
+ inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
+ inBufferLength[0] = inHeader->nFilledLen;
- // Only send out port settings changed event if both sample rate
- // and numChannels are valid.
- if (mStreamInfo->sampleRate && mStreamInfo->numChannels) {
- maybeConfigureDownmix();
- ALOGI("Initially configuring decoder: %d Hz, %d channels",
- mStreamInfo->sampleRate,
- mStreamInfo->numChannels);
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_ConfigRaw(mAACDecoder,
+ inBuffer,
+ inBufferLength);
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- }
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr);
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
- return;
- }
+ mInputBufferCount++;
+ mOutputBufferCount++; // fake increase of outputBufferCount to keep the counters aligned
- while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
- BufferInfo *inInfo = NULL;
- OMX_BUFFERHEADERTYPE *inHeader = NULL;
- if (!inQueue.empty()) {
- inInfo = *inQueue.begin();
- inHeader = inInfo->mHeader;
- }
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
- BufferInfo *outInfo = *outQueue.begin();
- OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- outHeader->nFlags = 0;
+ configureDownmix();
+ // Only send out port settings changed event if both sample rate
+ // and numChannels are valid.
+ if (mStreamInfo->sampleRate && mStreamInfo->numChannels) {
+ ALOGI("Initially configuring decoder: %d Hz, %d channels",
+ mStreamInfo->sampleRate,
+ mStreamInfo->numChannels);
- if (inHeader) {
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- mSawInputEos = true;
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+ return;
}
- if (inHeader->nOffset == 0 && inHeader->nFilledLen) {
- mAnchorTimeUs = inHeader->nTimeStamp;
- mNumSamplesOutput = 0;
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ continue;
}
if (mIsADTS) {
@@ -384,7 +576,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
bool signalError = false;
if (inHeader->nFilledLen < 7) {
ALOGE("Audio data too short to contain even the ADTS header. "
- "Got %d bytes.", inHeader->nFilledLen);
+ "Got %d bytes.", inHeader->nFilledLen);
hexdump(adtsHeader, inHeader->nFilledLen);
signalError = true;
} else {
@@ -397,9 +589,9 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
if (inHeader->nFilledLen < aac_frame_length) {
ALOGE("Not enough audio data for the complete frame. "
- "Got %d bytes, frame size according to the ADTS "
- "header is %u bytes.",
- inHeader->nFilledLen, aac_frame_length);
+ "Got %d bytes, frame size according to the ADTS "
+ "header is %u bytes.",
+ inHeader->nFilledLen, aac_frame_length);
hexdump(adtsHeader, inHeader->nFilledLen);
signalError = true;
} else {
@@ -415,76 +607,89 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
if (signalError) {
mSignalledError = true;
-
- notify(OMX_EventError,
- OMX_ErrorStreamCorrupt,
- ERROR_MALFORMED,
- NULL);
-
+ notify(OMX_EventError, OMX_ErrorStreamCorrupt, ERROR_MALFORMED, NULL);
return;
}
} else {
inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
inBufferLength[0] = inHeader->nFilledLen;
}
- } else {
- inBufferLength[0] = 0;
- }
-
- // Fill and decode
- INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(
- outHeader->pBuffer + outHeader->nOffset);
- bytesValid[0] = inBufferLength[0];
+ // Fill and decode
+ bytesValid[0] = inBufferLength[0];
- int prevSampleRate = mStreamInfo->sampleRate;
- int prevNumChannels = mStreamInfo->numChannels;
+ INT prevSampleRate = mStreamInfo->sampleRate;
+ INT prevNumChannels = mStreamInfo->numChannels;
- AAC_DECODER_ERROR decoderErr = AAC_DEC_NOT_ENOUGH_BITS;
- while ((bytesValid[0] > 0 || mSawInputEos) && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
- mDecoderHasData |= (bytesValid[0] > 0);
+ if (inHeader != mLastInHeader) {
+ mLastInHeader = inHeader;
+ mCurrentInputTime = inHeader->nTimeStamp;
+ } else {
+ if (mStreamInfo->sampleRate) {
+ mCurrentInputTime += mStreamInfo->aacSamplesPerFrame *
+ 1000000ll / mStreamInfo->sampleRate;
+ } else {
+ ALOGW("no sample rate yet");
+ }
+ }
+ mAnchorTimes.add(mCurrentInputTime);
aacDecoder_Fill(mAACDecoder,
inBuffer,
inBufferLength,
bytesValid);
- decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
- outBuffer,
- outHeader->nAllocLen,
- 0 /* flags */);
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ 0 /* flags */);
+
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
- if (mSawInputEos && bytesValid[0] <= 0) {
- if (mDecoderHasData) {
- // flush out the decoder's delayed data by calling DecodeFrame
- // one more time, with the AACDEC_FLUSH flag set
- decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
- outBuffer,
- outHeader->nAllocLen,
- AACDEC_FLUSH);
- mDecoderHasData = false;
- }
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
- mSignalledOutputEos = true;
- break;
- } else {
- ALOGW("Not enough bits, bytesValid %d", bytesValid[0]);
- }
+ ALOGE("AAC_DEC_NOT_ENOUGH_BITS should never happen");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ if (bytesValid[0] != 0) {
+ ALOGE("bytesValid[0] != 0 should never happen");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
}
- }
- size_t numOutBytes =
- mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+ size_t numOutBytes =
+ mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
- if (inHeader) {
if (decoderErr == AAC_DEC_OK) {
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
inHeader->nFilledLen -= inBufferUsedLength;
inHeader->nOffset += inBufferUsedLength;
} else {
- ALOGW("AAC decoder returned error %d, substituting silence",
- decoderErr);
+ ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr);
- memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
+ memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow
+
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
// Discard input buffer.
inHeader->nFilledLen = 0;
@@ -494,60 +699,147 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
// fall through
}
+ /*
+ * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
+ * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
+ * rate system and the sampling rate in the final output is actually
+ * doubled compared with the core AAC decoder sampling rate.
+ *
+ * Explicit signalling is done by explicitly defining SBR audio object
+ * type in the bitstream. Implicit signalling is done by embedding
+ * SBR content in AAC extension payload specific to SBR, and hence
+ * requires an AAC decoder to perform pre-checks on actual audio frames.
+ *
+ * Thus, we could not say for sure whether a stream is
+ * AAC+/eAAC+ until the first data frame is decoded.
+ */
+ if (mInputBufferCount <= 2 || mOutputBufferCount > 1) { // TODO: <= 1
+ if (mStreamInfo->sampleRate != prevSampleRate ||
+ mStreamInfo->numChannels != prevNumChannels) {
+ ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+ prevSampleRate, mStreamInfo->sampleRate,
+ prevNumChannels, mStreamInfo->numChannels);
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ mInputBufferCount++;
+ inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ return;
+ }
+ } else if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
+ ALOGW("Invalid AAC stream");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
if (inHeader->nFilledLen == 0) {
inInfo->mOwnedByUs = false;
+ mInputBufferCount++;
inQueue.erase(inQueue.begin());
+ mLastInHeader = NULL;
inInfo = NULL;
notifyEmptyBufferDone(inHeader);
inHeader = NULL;
+ } else {
+ ALOGV("inHeader->nFilledLen = %d", inHeader->nFilledLen);
}
}
- /*
- * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
- * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
- * rate system and the sampling rate in the final output is actually
- * doubled compared with the core AAC decoder sampling rate.
- *
- * Explicit signalling is done by explicitly defining SBR audio object
- * type in the bitstream. Implicit signalling is done by embedding
- * SBR content in AAC extension payload specific to SBR, and hence
- * requires an AAC decoder to perform pre-checks on actual audio frames.
- *
- * Thus, we could not say for sure whether a stream is
- * AAC+/eAAC+ until the first data frame is decoded.
- */
- if (mInputBufferCount <= 2) {
- if (mStreamInfo->sampleRate != prevSampleRate ||
- mStreamInfo->numChannels != prevNumChannels) {
- maybeConfigureDownmix();
- ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
- prevSampleRate, mStreamInfo->sampleRate,
- prevNumChannels, mStreamInfo->numChannels);
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- return;
+ int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+ if (!mEndOfInput && mOutputDelayCompensated < outputDelay) {
+ // discard outputDelay at the beginning
+ int32_t toCompensate = outputDelay - mOutputDelayCompensated;
+ int32_t discard = outputDelayRingBufferSamplesAvailable();
+ if (discard > toCompensate) {
+ discard = toCompensate;
}
- } else if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
- ALOGW("Invalid AAC stream");
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
- return;
+ int32_t discarded = outputDelayRingBufferGetSamples(0, discard);
+ mOutputDelayCompensated += discarded;
+ continue;
}
- if (decoderErr == AAC_DEC_OK || mNumSamplesOutput > 0) {
- // We'll only output data if we successfully decoded it or
- // we've previously decoded valid data, in the latter case
- // (decode failed) we'll output a silent frame.
- outHeader->nFilledLen = numOutBytes;
+ if (mEndOfInput) {
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+ mOutputDelayCompensated -= tmpOutBufferSamples;
+ }
+ }
+
+ while (!outQueue.empty()
+ && outputDelayRingBufferSamplesAvailable()
+ >= mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (outHeader->nOffset != 0) {
+ ALOGE("outHeader->nOffset != 0 is not handled");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ INT_PCM *outBuffer =
+ reinterpret_cast<INT_PCM *>(outHeader->pBuffer + outHeader->nOffset);
+ if (outHeader->nOffset
+ + mStreamInfo->frameSize * mStreamInfo->numChannels * sizeof(int16_t)
+ > outHeader->nAllocLen) {
+ ALOGE("buffer overflow");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+
+ }
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels); // TODO: check for overflow
+ if (ns != mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ ALOGE("not a complete frame of samples available");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
- outHeader->nTimeStamp =
- mAnchorTimeUs
- + (mNumSamplesOutput * 1000000ll) / mStreamInfo->sampleRate;
+ outHeader->nFilledLen = mStreamInfo->frameSize * mStreamInfo->numChannels
+ * sizeof(int16_t);
+ if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mEndOfOutput = true;
+ } else {
+ outHeader->nFlags = 0;
+ }
- mNumSamplesOutput += mStreamInfo->frameSize;
+ outHeader->nTimeStamp = mAnchorTimes.isEmpty() ? 0 : mAnchorTimes.itemAt(0);
+ mAnchorTimes.removeAt(0);
+ mOutputBufferCount++;
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
outInfo = NULL;
@@ -555,8 +847,48 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
outHeader = NULL;
}
- if (decoderErr == AAC_DEC_OK) {
- ++mInputBufferCount;
+ if (mEndOfInput) {
+ if (outputDelayRingBufferSamplesAvailable() > 0
+ && outputDelayRingBufferSamplesAvailable()
+ < mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ ALOGE("not a complete frame of samples available");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
+ if (!mEndOfOutput) {
+ // send empty block signaling EOS
+ mEndOfOutput = true;
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (outHeader->nOffset != 0) {
+ ALOGE("outHeader->nOffset != 0 is not handled");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(outHeader->pBuffer
+ + outHeader->nOffset);
+ int32_t ns = 0;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outHeader->nTimeStamp = mAnchorTimes.itemAt(0);
+ mAnchorTimes.removeAt(0);
+
+ mOutputBufferCount++;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+ break; // if outQueue not empty but no more output
+ }
}
}
}
@@ -567,38 +899,70 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
// depend on fragments from the last one decoded.
// drain all existing data
drainDecoder();
- // force decoder loop to drop the first decoded buffer by resetting these state variables,
- // but only if initialization has already happened.
- if (mInputBufferCount != 0) {
- mInputBufferCount = 1;
- mStreamInfo->sampleRate = 0;
+ mAnchorTimes.clear();
+ mLastInHeader = NULL;
+ } else {
+ while (outputDelayRingBufferSamplesAvailable() > 0) {
+ int32_t ns = outputDelayRingBufferGetSamples(0,
+ mStreamInfo->frameSize * mStreamInfo->numChannels);
+ if (ns != mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ ALOGE("not a complete frame of samples available");
+ }
+ mOutputBufferCount++;
}
+ mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
}
}
void SoftAAC2::drainDecoder() {
- // a buffer big enough for 6 channels of decoded HE-AAC
- short buf [2048*6];
- aacDecoder_DecodeFrame(mAACDecoder,
- buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
- aacDecoder_DecodeFrame(mAACDecoder,
- buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
- aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
- mDecoderHasData = false;
+ int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+ // flush decoder until outputDelay is compensated
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+
+ mOutputDelayCompensated -= tmpOutBufferSamples;
+ }
}
void SoftAAC2::onReset() {
drainDecoder();
// reset the "configured" state
mInputBufferCount = 0;
- mNumSamplesOutput = 0;
+ mOutputBufferCount = 0;
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mEndOfInput = false;
+ mEndOfOutput = false;
+ mAnchorTimes.clear();
+ mLastInHeader = NULL;
+
// To make the codec behave the same before and after a reset, we need to invalidate the
// streaminfo struct. This does that:
- mStreamInfo->sampleRate = 0;
+ mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
mSignalledError = false;
- mSawInputEos = false;
- mSignalledOutputEos = false;
mOutputPortSettingsChange = NONE;
}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index a7ea1e2..865bd15 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -20,6 +20,7 @@
#include "SimpleSoftOMXComponent.h"
#include "aacdecoder_lib.h"
+#include "DrcPresModeWrap.h"
namespace android {
@@ -47,18 +48,21 @@ private:
enum {
kNumInputBuffers = 4,
kNumOutputBuffers = 4,
+ kNumDelayBlocksMax = 8,
};
HANDLE_AACDECODER mAACDecoder;
CStreamInfo *mStreamInfo;
bool mIsADTS;
- bool mDecoderHasData;
+ bool mIsFirst;
size_t mInputBufferCount;
+ size_t mOutputBufferCount;
bool mSignalledError;
- bool mSawInputEos;
- bool mSignalledOutputEos;
- int64_t mAnchorTimeUs;
- int64_t mNumSamplesOutput;
+ OMX_BUFFERHEADERTYPE *mLastInHeader;
+ int64_t mCurrentInputTime;
+ Vector<int64_t> mAnchorTimes;
+
+ CDrcPresModeWrapper mDrcWrap;
enum {
NONE,
@@ -69,9 +73,22 @@ private:
void initPorts();
status_t initDecoder();
bool isConfigured() const;
- void maybeConfigureDownmix() const;
+ void configureDownmix() const;
void drainDecoder();
+// delay compensation
+ bool mEndOfInput;
+ bool mEndOfOutput;
+ int32_t mOutputDelayCompensated;
+ int32_t mOutputDelayRingBufferSize;
+ short *mOutputDelayRingBuffer;
+ int32_t mOutputDelayRingBufferWritePos;
+ int32_t mOutputDelayRingBufferReadPos;
+ bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferSamplesAvailable();
+ int32_t outputDelayRingBufferSamplesLeft();
+
DISALLOW_EVIL_CONSTRUCTORS(SoftAAC2);
};
diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk
index 057c69b..58ec3ba 100644
--- a/media/libstagefright/codecs/aacenc/Android.mk
+++ b/media/libstagefright/codecs/aacenc/Android.mk
@@ -82,6 +82,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7
endif
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -106,6 +108,8 @@ ifeq ($(AAC_LIBRARY), fraunhofer)
LOCAL_CFLAGS :=
+ LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
@@ -128,6 +132,8 @@ else # visualon
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+ LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_aacenc
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
index 6093621..35aa883 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
@@ -19,6 +19,7 @@
#include <utils/Log.h>
#include "SoftAACEncoder2.h"
+#include <OMX_AudioExt.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/hexdump.h>
@@ -44,6 +45,8 @@ SoftAACEncoder2::SoftAACEncoder2(
mNumChannels(1),
mSampleRate(44100),
mBitRate(0),
+ mSBRMode(-1),
+ mSBRRatio(0),
mAACProfile(OMX_AUDIO_AACObjectLC),
mSentCodecSpecificData(false),
mInputSize(0),
@@ -156,6 +159,41 @@ OMX_ERRORTYPE SoftAACEncoder2::internalGetParameter(
aacParams->nSampleRate = mSampleRate;
aacParams->nFrameLength = 0;
+ switch (mSBRMode) {
+ case 1: // sbr on
+ switch (mSBRRatio) {
+ case 0:
+ // set both OMX AAC tool flags
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 1:
+ // set single-rate SBR active
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ case 2:
+ // set dual-rate SBR active
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ default:
+ ALOGE("invalid SBR ratio %d", mSBRRatio);
+ TRESPASS();
+ }
+ break;
+ case 0: // sbr off
+ case -1: // sbr undefined
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
+ aacParams->nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
+ break;
+ default:
+ ALOGE("invalid SBR mode %d", mSBRMode);
+ TRESPASS();
+ }
+
+
+
return OMX_ErrorNone;
}
@@ -243,6 +281,23 @@ OMX_ERRORTYPE SoftAACEncoder2::internalSetParameter(
mAACProfile = aacParams->eAACProfile;
}
+ if (!(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
+ && !(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
+ mSBRMode = 0;
+ mSBRRatio = 0;
+ } else if ((aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
+ && !(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
+ mSBRMode = 1;
+ mSBRRatio = 1;
+ } else if (!(aacParams->nAACtools & OMX_AUDIO_AACToolAndroidSSBR)
+ && (aacParams->nAACtools & OMX_AUDIO_AACToolAndroidDSBR)) {
+ mSBRMode = 1;
+ mSBRRatio = 2;
+ } else {
+ mSBRMode = -1; // codec default sbr mode
+ mSBRRatio = 0;
+ }
+
if (setAudioParams() != OK) {
return OMX_ErrorUndefined;
}
@@ -305,11 +360,11 @@ static AUDIO_OBJECT_TYPE getAOTFromProfile(OMX_U32 profile) {
}
status_t SoftAACEncoder2::setAudioParams() {
- // We call this whenever sample rate, number of channels or bitrate change
+ // We call this whenever sample rate, number of channels, bitrate or SBR mode change
// in reponse to setParameter calls.
- ALOGV("setAudioParams: %u Hz, %u channels, %u bps",
- mSampleRate, mNumChannels, mBitRate);
+ ALOGV("setAudioParams: %u Hz, %u channels, %u bps, %i sbr mode, %i sbr ratio",
+ mSampleRate, mNumChannels, mBitRate, mSBRMode, mSBRRatio);
if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_AOT,
getAOTFromProfile(mAACProfile))) {
@@ -335,6 +390,24 @@ status_t SoftAACEncoder2::setAudioParams() {
return UNKNOWN_ERROR;
}
+ if (mSBRMode != -1 && mAACProfile == OMX_AUDIO_AACObjectELD) {
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_MODE, mSBRMode)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* SBR ratio parameter configurations:
+ 0: Default configuration wherein SBR ratio is configured depending on audio object type by
+ the FDK.
+ 1: Downsampled SBR (default for ELD)
+ 2: Dualrate SBR (default for HE-AAC)
+ */
+ if (AACENC_OK != aacEncoder_SetParam(mAACEncoder, AACENC_SBR_RATIO, mSBRRatio)) {
+ ALOGE("Failed to set AAC encoder parameters");
+ return UNKNOWN_ERROR;
+ }
+
return OK;
}
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
index 2603f4f..bce9c24 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.h
@@ -53,6 +53,8 @@ private:
OMX_U32 mNumChannels;
OMX_U32 mSampleRate;
OMX_U32 mBitRate;
+ OMX_S32 mSBRMode;
+ OMX_S32 mSBRRatio;
OMX_U32 mAACProfile;
bool mSentCodecSpecificData;
diff --git a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
index cc01927..1d029fc 100644
--- a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
+++ b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
@@ -24,6 +24,8 @@
#include "basic_op.h"
#include "oper_32b.h"
+#define UNUSED(x) (void)(x)
+
/*****************************************************************************
* *
* Function L_Extract() *
@@ -243,6 +245,8 @@ Word16 iLog4(Word32 value)
Word32 rsqrt(Word32 value, /*!< Operand to square root (0.0 ... 1) */
Word32 accuracy) /*!< Number of valid bits that will be calculated */
{
+ UNUSED(accuracy);
+
Word32 root = 0;
Word32 scale;
diff --git a/media/libstagefright/codecs/aacenc/src/aacenc.c b/media/libstagefright/codecs/aacenc/src/aacenc.c
index d1c8621..40db92c 100644
--- a/media/libstagefright/codecs/aacenc/src/aacenc.c
+++ b/media/libstagefright/codecs/aacenc/src/aacenc.c
@@ -27,6 +27,8 @@
#include "cmnMemory.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
+
/**
* Init the audio codec module and return codec handle
* \param phCodec [OUT] Return the video codec handle
@@ -46,6 +48,8 @@ VO_U32 VO_API voAACEncInit(VO_HANDLE * phCodec,VO_AUDIO_CODINGTYPE vType, VO_COD
VO_MEM_OPERATOR *pMemOP;
int interMem;
+ UNUSED(vType);
+
interMem = 0;
error = 0;
@@ -471,6 +475,10 @@ VO_U32 VO_API voAACEncSetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData)
*/
VO_U32 VO_API voAACEncGetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData)
{
+ UNUSED(hCodec);
+ UNUSED(uParamID);
+ UNUSED(pData);
+
return VO_ERR_NONE;
}
diff --git a/media/libstagefright/codecs/aacenc/src/bitenc.c b/media/libstagefright/codecs/aacenc/src/bitenc.c
index fcc12dd..d1fd647 100644
--- a/media/libstagefright/codecs/aacenc/src/bitenc.c
+++ b/media/libstagefright/codecs/aacenc/src/bitenc.c
@@ -26,6 +26,7 @@
#include "qc_data.h"
#include "interface.h"
+#define UNUSED(x) (void)(x)
static const Word16 globalGainOffset = 100;
static const Word16 icsReservedBit = 0;
@@ -585,6 +586,8 @@ Word16 WriteBitstream (HANDLE_BIT_BUF hBitStream,
Word16 elementUsedBits;
Word16 frameBits=0;
+ UNUSED(ancBytes);
+
/* struct bitbuffer bsWriteCopy; */
bitMarkUp = GetBitsAvail(hBitStream);
if(qcOut->qcElement.adtsUsed) /* write adts header*/
diff --git a/media/libstagefright/codecs/aacenc/src/psy_main.c b/media/libstagefright/codecs/aacenc/src/psy_main.c
index 4e9218c..6f0679c 100644
--- a/media/libstagefright/codecs/aacenc/src/psy_main.c
+++ b/media/libstagefright/codecs/aacenc/src/psy_main.c
@@ -38,6 +38,8 @@
#include "tns_func.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
+
/* long start short stop */
static Word16 blockType2windowShape[] = {KBD_WINDOW,SINE_WINDOW,SINE_WINDOW,KBD_WINDOW};
@@ -170,7 +172,9 @@ Word16 PsyOutNew(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP)
*****************************************************************************/
Word16 PsyOutDelete(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP)
{
- hPsyOut=NULL;
+ UNUSED(hPsyOut);
+ UNUSED(pMemOP);
+
return 0;
}
diff --git a/media/libstagefright/codecs/aacenc/src/qc_main.c b/media/libstagefright/codecs/aacenc/src/qc_main.c
index 48ff300..e5d78aa 100644
--- a/media/libstagefright/codecs/aacenc/src/qc_main.c
+++ b/media/libstagefright/codecs/aacenc/src/qc_main.c
@@ -33,6 +33,7 @@
#include "channel_map.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
typedef enum{
FRAME_LEN_BYTES_MODULO = 1,
@@ -204,11 +205,8 @@ Word16 QCNew(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP)
**********************************************************************************/
void QCDelete(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP)
{
-
- /*
- nothing to do
- */
- hQC=NULL;
+ UNUSED(hQC);
+ UNUSED(pMemOP);
}
/*********************************************************************************
diff --git a/media/libstagefright/codecs/aacenc/src/tns.c b/media/libstagefright/codecs/aacenc/src/tns.c
index 455a864..5172612 100644
--- a/media/libstagefright/codecs/aacenc/src/tns.c
+++ b/media/libstagefright/codecs/aacenc/src/tns.c
@@ -30,6 +30,8 @@
#include "psy_configuration.h"
#include "tns_func.h"
+#define UNUSED(x) (void)(x)
+
#define TNS_MODIFY_BEGIN 2600 /* Hz */
#define RATIO_PATCH_LOWER_BORDER 380 /* Hz */
#define TNS_GAIN_THRESH 141 /* 1.41*100 */
@@ -643,6 +645,8 @@ static Word16 CalcTnsFilter(const Word16 *signal,
Word32 i;
Word32 tnsOrderPlus1 = tnsOrder + 1;
+ UNUSED(window);
+
assert(tnsOrder <= TNS_MAX_ORDER); /* remove asserts later? (btg) */
for(i=0;i<tnsOrder;i++) {
diff --git a/media/libstagefright/codecs/amrnb/common/Android.mk b/media/libstagefright/codecs/amrnb/common/Android.mk
index 30ce29c..a2b3c8f 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.mk
+++ b/media/libstagefright/codecs/amrnb/common/Android.mk
@@ -69,6 +69,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrnb_common
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk
index bc54c5d..4aa8c17 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.mk
+++ b/media/libstagefright/codecs/amrnb/dec/Android.mk
@@ -47,6 +47,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrnbdec
include $(BUILD_STATIC_LIBRARY)
@@ -68,6 +70,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbdec libstagefright_amrwbdec
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk
index f4e467a..afc0b89 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.mk
+++ b/media/libstagefright/codecs/amrnb/enc/Android.mk
@@ -69,6 +69,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrnbenc
include $(BUILD_STATIC_LIBRARY)
@@ -88,6 +90,8 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../common/include \
$(LOCAL_PATH)/../common
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbenc
diff --git a/media/libstagefright/codecs/amrwb/Android.mk b/media/libstagefright/codecs/amrwb/Android.mk
index 677107f..efdf988 100644
--- a/media/libstagefright/codecs/amrwb/Android.mk
+++ b/media/libstagefright/codecs/amrwb/Android.mk
@@ -50,6 +50,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrwbdec
include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index c5b8e0c..64fe8d1 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -112,6 +112,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7
endif
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -126,6 +128,8 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/codecs/common/include \
frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrwbenc
diff --git a/media/libstagefright/codecs/amrwbenc/src/autocorr.c b/media/libstagefright/codecs/amrwbenc/src/autocorr.c
index 8c477ca..0b2ea89 100644
--- a/media/libstagefright/codecs/amrwbenc/src/autocorr.c
+++ b/media/libstagefright/codecs/amrwbenc/src/autocorr.c
@@ -28,6 +28,8 @@
#include "acelp.h"
#include "ham_wind.tab"
+#define UNUSED(x) (void)(x)
+
void Autocorr(
Word16 x[], /* (i) : Input signal */
Word16 m, /* (i) : LPC order */
@@ -40,6 +42,8 @@ void Autocorr(
Word32 L_sum, L_sum1, L_tmp, F_LEN;
Word16 *p1,*p2,*p3;
const Word16 *p4;
+ UNUSED(m);
+
/* Windowing of signal */
p1 = x;
p4 = vo_window;
diff --git a/media/libstagefright/codecs/amrwbenc/src/convolve.c b/media/libstagefright/codecs/amrwbenc/src/convolve.c
index acba532..4c1f7d4 100644
--- a/media/libstagefright/codecs/amrwbenc/src/convolve.c
+++ b/media/libstagefright/codecs/amrwbenc/src/convolve.c
@@ -25,6 +25,8 @@
#include "typedef.h"
#include "basic_op.h"
+#define UNUSED(x) (void)(x)
+
void Convolve (
Word16 x[], /* (i) : input vector */
Word16 h[], /* (i) : impulse response */
@@ -35,6 +37,8 @@ void Convolve (
Word32 i, n;
Word16 *tmpH,*tmpX;
Word32 s;
+ UNUSED(L);
+
for (n = 0; n < 64;)
{
tmpH = h+n;
diff --git a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
index 0d66c31..b66b55e 100644
--- a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
+++ b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
@@ -31,6 +31,8 @@
#define UP_SAMP 4
#define L_INTERPOL1 4
+#define UNUSED(x) (void)(x)
+
/* Local functions */
#ifdef ASM_OPT
@@ -171,6 +173,7 @@ static void Norm_Corr(
Word32 corr, exp_corr, norm, exp, scale;
Word16 exp_norm, excf[L_SUBFR], tmp;
Word32 L_tmp, L_tmp1, L_tmp2;
+ UNUSED(L_subfr);
/* compute the filtered excitation for the first delay t_min */
k = -t_min;
diff --git a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
index 1bda05a..961aadc 100644
--- a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
+++ b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
@@ -26,6 +26,8 @@
#include "math_op.h"
#include "cnst.h"
+#define UNUSED(x) (void)(x)
+
void Syn_filt(
Word16 a[], /* (i) Q12 : a[m+1] prediction coefficients */
Word16 x[], /* (i) : input signal */
@@ -95,6 +97,8 @@ void Syn_filt_32(
Word32 i,a0;
Word32 L_tmp, L_tmp1;
Word16 *p1, *p2, *p3;
+ UNUSED(m);
+
a0 = a[0] >> (4 + Qnew); /* input / 16 and >>Qnew */
/* Do the filtering. */
for (i = 0; i < lg; i++)
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
index ea9da52..df7b9b3 100644
--- a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
+++ b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
@@ -39,6 +39,8 @@
#include "mem_align.h"
#include "cmnMemory.h"
+#define UNUSED(x) (void)(x)
+
#ifdef __cplusplus
extern "C" {
#endif
@@ -1602,6 +1604,8 @@ VO_U32 VO_API voAMRWB_Init(VO_HANDLE * phCodec, /* o: the audi
VO_MEM_OPERATOR voMemoprator;
#endif
VO_MEM_OPERATOR *pMemOP;
+ UNUSED(vType);
+
int interMem = 0;
if(pUserData == NULL || pUserData->memflag != VO_IMF_USERMEMOPERATOR || pUserData->memData == NULL )
diff --git a/media/libstagefright/codecs/avc/common/Android.mk b/media/libstagefright/codecs/avc/common/Android.mk
index 22dee15..844ef0a 100644
--- a/media/libstagefright/codecs/avc/common/Android.mk
+++ b/media/libstagefright/codecs/avc/common/Android.mk
@@ -16,4 +16,6 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk
index 7d17c2a..537ba42 100644
--- a/media/libstagefright/codecs/avc/enc/Android.mk
+++ b/media/libstagefright/codecs/avc/enc/Android.mk
@@ -30,6 +30,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -69,4 +71,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_h264enc
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index 27c63c3..8aff5b6 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -34,6 +34,12 @@
#include "SoftAVCEncoder.h"
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
template<class T>
@@ -136,14 +142,14 @@ inline static void ConvertYUV420SemiPlanarToYUV420Planar(
}
static void* MallocWrapper(
- void *userData, int32_t size, int32_t attrs) {
+ void * /* userData */, int32_t size, int32_t /* attrs */) {
void *ptr = malloc(size);
if (ptr)
memset(ptr, 0, size);
return ptr;
}
-static void FreeWrapper(void *userData, void* ptr) {
+static void FreeWrapper(void * /* userData */, void* ptr) {
free(ptr);
}
@@ -722,7 +728,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
}
}
-void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mSawInputEOS) {
return;
}
@@ -795,7 +801,7 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
- buffer_handle_t srcBuffer; // for MetaDataMode only
+ buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
// Get next input video frame
if (mReadyForNextFrame) {
@@ -965,6 +971,7 @@ int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
}
void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
+ UNUSED_UNLESS_VERBOSE(buffer);
ALOGV("signalBufferReturned: %p", buffer);
}
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
index 23d5ff1..cfa9ca5 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.h
@@ -67,10 +67,6 @@ private:
kNumBuffers = 2,
};
- enum {
- kStoreMetaDataExtensionIndex = OMX_IndexVendorStartUnused + 1
- };
-
// OMX input buffer's timestamp and flags
typedef struct {
int64_t mTimeUs;
diff --git a/media/libstagefright/codecs/common/Android.mk b/media/libstagefright/codecs/common/Android.mk
index a33cb92..b0010ff 100644
--- a/media/libstagefright/codecs/common/Android.mk
+++ b/media/libstagefright/codecs/common/Android.mk
@@ -14,6 +14,8 @@ LOCAL_STATIC_LIBRARIES :=
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk
index f01d605..59a11de 100644
--- a/media/libstagefright/codecs/flac/enc/Android.mk
+++ b/media/libstagefright/codecs/flac/enc/Android.mk
@@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax \
external/flac/include
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index d797197..1301060 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -27,6 +27,12 @@
#define FLAC_COMPRESSION_LEVEL_DEFAULT 5
#define FLAC_COMPRESSION_LEVEL_MAX 8
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
template<class T>
@@ -257,7 +263,7 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter(
}
void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
- //UNUSED_UNLESS_VERBOSE(portIndex);
+ UNUSED_UNLESS_VERBOSE(portIndex);
ALOGV("SoftFlacEncoder::onQueueFilled(portIndex=%d)", portIndex);
if (mSignalledError) {
@@ -343,16 +349,17 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
-
FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable(
const FLAC__byte buffer[],
- size_t bytes, unsigned samples, unsigned current_frame) {
- ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%d, samples=%d, curr_frame=%d)",
+ size_t bytes, unsigned samples,
+ unsigned current_frame) {
+ UNUSED_UNLESS_VERBOSE(current_frame);
+ ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%zu, samples=%u, curr_frame=%u)",
bytes, samples, current_frame);
#ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER
if (samples == 0) {
- ALOGI(" saving %d bytes of header", bytes);
+ ALOGI(" saving %zu bytes of header", bytes);
memcpy(mHeader + mHeaderOffset, buffer, bytes);
mHeaderOffset += bytes;// will contain header size when finished receiving header
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
@@ -444,8 +451,12 @@ return_result:
// static
FLAC__StreamEncoderWriteStatus SoftFlacEncoder::flacEncoderWriteCallback(
- const FLAC__StreamEncoder *encoder, const FLAC__byte buffer[],
- size_t bytes, unsigned samples, unsigned current_frame, void *client_data) {
+ const FLAC__StreamEncoder * /* encoder */,
+ const FLAC__byte buffer[],
+ size_t bytes,
+ unsigned samples,
+ unsigned current_frame,
+ void *client_data) {
return ((SoftFlacEncoder*) client_data)->onEncodedFlacAvailable(
buffer, bytes, samples, current_frame);
}
diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk
index 4c80da6..a0112e1 100644
--- a/media/libstagefright/codecs/g711/dec/Android.mk
+++ b/media/libstagefright/codecs/g711/dec/Android.mk
@@ -14,4 +14,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_g711dec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk
index 71613d2..30868d5 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.mk
+++ b/media/libstagefright/codecs/gsm/dec/Android.mk
@@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax \
external/libgsm/inc
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/hevcdec/Android.mk b/media/libstagefright/codecs/hevcdec/Android.mk
new file mode 100644
index 0000000..c0c694e
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/Android.mk
@@ -0,0 +1,30 @@
+ifeq ($(if $(wildcard external/libhevc),1,0),1)
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libstagefright_soft_hevcdec
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_STATIC_LIBRARIES := libhevcdec
+LOCAL_SRC_FILES := SoftHEVC.cpp
+
+LOCAL_C_INCLUDES := $(TOP)/external/libhevc/decoder
+LOCAL_C_INCLUDES += $(TOP)/external/libhevc/common
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES += libstagefright_omx
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation
+LOCAL_SHARED_LIBRARIES += libutils
+LOCAL_SHARED_LIBRARIES += liblog
+
+# We need this because the current asm generates the following link error:
+# requires unsupported dynamic reloc R_ARM_REL32; recompile with -fPIC
+# Bug: 16853291
+LOCAL_LDFLAGS := -Wl,-Bsymbolic
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
new file mode 100644
index 0000000..b0d0827
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -0,0 +1,720 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftHEVC"
+#include <utils/Log.h>
+
+#include "ihevc_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+#include "ithread.h"
+#include "ihevcd_cxa.h"
+#include "SoftHEVC.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <OMX_VideoExt.h>
+
+namespace android {
+
+#define componentName "video_decoder.hevc"
+#define codingType OMX_VIDEO_CodingHEVC
+#define CODEC_MIME_TYPE MEDIA_MIMETYPE_VIDEO_HEVC
+
+/** Function and structure definitions to keep code similar for each codec */
+#define ivdec_api_function ihevcd_cxa_api_function
+#define ivdext_init_ip_t ihevcd_cxa_init_ip_t
+#define ivdext_init_op_t ihevcd_cxa_init_op_t
+#define ivdext_fill_mem_rec_ip_t ihevcd_cxa_fill_mem_rec_ip_t
+#define ivdext_fill_mem_rec_op_t ihevcd_cxa_fill_mem_rec_op_t
+#define ivdext_ctl_set_num_cores_ip_t ihevcd_cxa_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t
+
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
+
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel1 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel2 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel21 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel3 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel31 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel4 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel41 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel5 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel51 },
+};
+
+SoftHEVC::SoftHEVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVideoDecoderOMXComponent(name, componentName, codingType,
+ kProfileLevels, ARRAY_SIZE(kProfileLevels),
+ 320 /* width */, 240 /* height */, callbacks,
+ appData, component) {
+ initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers,
+ CODEC_MIME_TYPE);
+
+ mOmxColorFormat = OMX_COLOR_FormatYUV420Planar;
+ mStride = mWidth;
+
+ if (OMX_COLOR_FormatYUV420Planar == mOmxColorFormat) {
+ mIvColorFormat = IV_YUV_420P;
+ } else if (OMX_COLOR_FormatYUV420SemiPlanar == mOmxColorFormat) {
+ mIvColorFormat = IV_YUV_420SP_UV;
+ }
+
+ mInitWidth = mWidth;
+ mInitHeight = mHeight;
+
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftHEVC::~SoftHEVC() {
+ ALOGD("In SoftHEVC::~SoftHEVC");
+ CHECK_EQ(deInitDecoder(), (status_t)OK);
+}
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGD("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+void SoftHEVC::logVersion() {
+ ivd_ctl_getversioninfo_ip_t s_ctl_ip;
+ ivd_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ s_ctl_ip.pv_version_buffer = au1_buf;
+ s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version number: 0x%x",
+ s_ctl_op.u4_error_code);
+ } else {
+ ALOGD("Ittiam decoder version number: %s",
+ (char *)s_ctl_ip.pv_version_buffer);
+ }
+ return;
+}
+
+status_t SoftHEVC::setParams(size_t stride) {
+ ivd_ctl_set_config_ip_t s_ctl_ip;
+ ivd_ctl_set_config_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+ s_ctl_ip.u4_disp_wd = (UWORD32)stride;
+ s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+
+ s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME;
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+
+ ALOGD("Set the run-time (dynamic) parameters");
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the run-time parameters: 0x%x",
+ s_ctl_op.u4_error_code);
+
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftHEVC::resetPlugin() {
+ mIsInFlush = false;
+ mReceivedEOS = false;
+ memset(mTimeStamps, 0, sizeof(mTimeStamps));
+ memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
+
+ /* Initialize both start and end times */
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+
+ return OK;
+}
+
+status_t SoftHEVC::resetDecoder() {
+ ivd_ctl_reset_ip_t s_ctl_ip;
+ ivd_ctl_reset_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ /* Set the run-time (dynamic) parameters */
+ setParams(0);
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ return OK;
+}
+
+status_t SoftHEVC::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_cores_op;
+ IV_API_CALL_STATUS_T status;
+ s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
+ s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ ALOGD("Set number of cores to %u", s_set_cores_ip.u4_num_cores);
+ status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip,
+ (void *)&s_set_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in setting number of cores: 0x%x",
+ s_set_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftHEVC::setFlushMode() {
+ IV_API_CALL_STATUS_T status;
+ ivd_ctl_flush_ip_t s_video_flush_ip;
+ ivd_ctl_flush_op_t s_video_flush_op;
+
+ s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+ ALOGD("Set the decoder in flush mode ");
+
+ /* Set the decoder in Flush mode, subsequent decode() calls will flush */
+ status = ivdec_api_function(mCodecCtx, (void *)&s_video_flush_ip,
+ (void *)&s_video_flush_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
+ s_video_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mIsInFlush = true;
+ return OK;
+}
+
+status_t SoftHEVC::initDecoder() {
+ IV_API_CALL_STATUS_T status;
+
+ UWORD32 u4_num_reorder_frames;
+ UWORD32 u4_num_ref_frames;
+ UWORD32 u4_share_disp_buf;
+ WORD32 i4_level;
+
+ mNumCores = GetCPUCoreCount();
+ mMemRecords = NULL;
+ mFlushOutBuffer = NULL;
+
+ /* Initialize number of ref and reorder modes (for HEVC) */
+ u4_num_reorder_frames = 16;
+ u4_num_ref_frames = 16;
+ u4_share_disp_buf = 0;
+
+ if ((mWidth * mHeight) > (1920 * 1088)) {
+ i4_level = 50;
+ } else if ((mWidth * mHeight) > (1280 * 720)) {
+ i4_level = 40;
+ } else if ((mWidth * mHeight) > (960 * 540)) {
+ i4_level = 31;
+ } else if ((mWidth * mHeight) > (640 * 360)) {
+ i4_level = 30;
+ } else if ((mWidth * mHeight) > (352 * 288)) {
+ i4_level = 21;
+ } else {
+ i4_level = 20;
+ }
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
+ s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ ALOGV("Get number of mem records");
+ status = ivdec_api_function(mCodecCtx, (void*)&s_num_mem_rec_ip,
+ (void*)&s_num_mem_rec_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in getting mem records: 0x%x",
+ s_num_mem_rec_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ mMemRecords = (iv_mem_rec_t*)ivd_aligned_malloc(
+ 128, mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (mMemRecords == NULL) {
+ ALOGE("Allocation failure");
+ return NO_MEMORY;
+ }
+
+ memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
+
+ {
+ size_t i;
+ ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
+ ivdext_fill_mem_rec_op_t s_fill_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_ip_t);
+ s_fill_mem_ip.i4_level = i4_level;
+ s_fill_mem_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_fill_mem_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_fill_mem_ip.u4_num_extra_disp_buf = 0;
+ s_fill_mem_ip.e_output_format = mIvColorFormat;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = mWidth;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = mHeight;
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_op_t);
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++)
+ ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_fill_mem_ip,
+ (void *)&s_fill_mem_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in filling mem records: 0x%x",
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mNumMemRecords =
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
+
+ ps_mem_rec = mMemRecords;
+
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ivd_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for memory record #%zu of size %u",
+ i, ps_mem_rec->u4_mem_size);
+ status = IV_FAIL;
+ return NO_MEMORY;
+ }
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Initialize the decoder */
+ {
+ ivdext_init_ip_t s_init_ip;
+ ivdext_init_op_t s_init_op;
+
+ void *dec_fxns = (void *)ivdec_api_function;
+
+ s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
+ s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
+ s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = mWidth;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
+
+ s_init_ip.i4_level = i4_level;
+ s_init_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_init_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_init_ip.u4_num_extra_disp_buf = 0;
+
+ s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
+
+ s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
+
+ mCodecCtx = (iv_obj_t*)mMemRecords[0].pv_base;
+ mCodecCtx->pv_fxns = dec_fxns;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+
+ ALOGD("Initializing decoder");
+ status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip,
+ (void *)&s_init_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in init: 0x%x",
+ s_init_op.s_ivd_init_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* Reset the plugin state */
+ resetPlugin();
+
+ /* Set the run time (dynamic) parameters */
+ setParams(0);
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ /* Get codec version */
+ logVersion();
+
+ /* Allocate internal picture buffer */
+ mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, mStride * mHeight * 3 / 2);
+ if (NULL == mFlushOutBuffer) {
+ ALOGE("Could not allocate flushOutputBuffer of size %zu", mStride * mHeight * 3 / 2);
+ return NO_MEMORY;
+ }
+
+ return OK;
+}
+
+status_t SoftHEVC::deInitDecoder() {
+ size_t i;
+
+ if (mMemRecords) {
+ iv_mem_rec_t *ps_mem_rec;
+
+ ps_mem_rec = mMemRecords;
+ ALOGD("Freeing codec memory");
+ for (i = 0; i < mNumMemRecords; i++) {
+ if(ps_mem_rec->pv_base) {
+ ivd_aligned_free(ps_mem_rec->pv_base);
+ }
+ ps_mem_rec++;
+ }
+ ivd_aligned_free(mMemRecords);
+ }
+
+ if(mFlushOutBuffer) {
+ ivd_aligned_free(mFlushOutBuffer);
+ }
+ return OK;
+}
+
+status_t SoftHEVC::reInitDecoder() {
+ status_t ret;
+
+ deInitDecoder();
+
+ ret = initDecoder();
+ if (OK != ret) {
+ ALOGE("Create failure");
+ deInitDecoder();
+ return NO_MEMORY;
+ }
+ return OK;
+}
+void SoftHEVC::onReset() {
+ ALOGD("onReset called");
+ SoftVideoDecoderOMXComponent::onReset();
+
+ resetDecoder();
+ resetPlugin();
+}
+
+void SoftHEVC::setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t sizeY,
+ size_t timeStampIx) {
+ size_t sizeUV;
+ uint8_t *pBuf;
+
+ ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t);
+ ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t);
+
+ ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE;
+
+ /* When in flush and after EOS with zero byte input,
+ * inHeader is set to zero. Hence check for non-null */
+ if (inHeader) {
+ ps_dec_ip->u4_ts = timeStampIx;
+ ps_dec_ip->pv_stream_buffer = inHeader->pBuffer
+ + inHeader->nOffset;
+ ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen;
+ } else {
+ ps_dec_ip->u4_ts = 0;
+ ps_dec_ip->pv_stream_buffer = NULL;
+ ps_dec_ip->u4_num_Bytes = 0;
+ }
+
+ if (outHeader) {
+ pBuf = outHeader->pBuffer;
+ } else {
+ pBuf = mFlushOutBuffer;
+ }
+
+ sizeUV = sizeY / 4;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
+ ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
+
+ ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf;
+ ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY;
+ ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV;
+ ps_dec_ip->s_out_buffer.u4_num_bufs = 3;
+ return;
+}
+void SoftHEVC::onPortFlushCompleted(OMX_U32 portIndex) {
+ ALOGD("onPortFlushCompleted on port %d", portIndex);
+
+ /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
+ if (kOutputPortIndex == portIndex) {
+ setFlushMode();
+
+ while (true) {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ IV_API_CALL_STATUS_T status;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, mStride * mHeight, 0);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
+ (void *)&s_dec_op);
+ if (0 == s_dec_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+ }
+}
+
+void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
+ IV_API_CALL_STATUS_T status;
+
+ UNUSED(portIndex);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+
+ /* If input EOS is seen and decoder is not in flush mode,
+ * set the decoder in flush mode.
+ * There can be a case where EOS is sent along with last picture data
+ * In that case, only after decoding that input data, decoder has to be
+ * put in flush. This case is handled here */
+
+ if (mReceivedEOS && !mIsInFlush) {
+ setFlushMode();
+ }
+
+ while (outQueue.size() == kNumBuffers) {
+ BufferInfo *inInfo;
+ OMX_BUFFERHEADERTYPE *inHeader;
+
+ BufferInfo *outInfo;
+ OMX_BUFFERHEADERTYPE *outHeader;
+ size_t timeStampIx;
+
+ inInfo = NULL;
+ inHeader = NULL;
+
+ if (!mIsInFlush) {
+ if (!inQueue.empty()) {
+ inInfo = *inQueue.begin();
+ inHeader = inInfo->mHeader;
+ } else {
+ break;
+ }
+ }
+
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = 0;
+ outHeader->nOffset = 0;
+
+ if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
+ ALOGD("EOS seen on input");
+ mReceivedEOS = true;
+ if (inHeader->nFilledLen == 0) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ setFlushMode();
+ }
+ }
+
+ /* Get a free slot in timestamp array to hold input timestamp */
+ {
+ size_t i;
+ timeStampIx = 0;
+ for (i = 0; i < MAX_TIME_STAMPS; i++) {
+ if (!mTimeStampsValid[i]) {
+ timeStampIx = i;
+ break;
+ }
+ }
+ if (inHeader != NULL) {
+ mTimeStampsValid[timeStampIx] = true;
+ mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
+ }
+ }
+
+ {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ WORD32 timeDelay, timeTaken;
+ size_t sizeY, sizeUV;
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader,
+ mStride * mHeight, timeStampIx);
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
+ (void *)&s_dec_op);
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGD("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_dec_op.u4_num_bytes_consumed);
+
+ /* If width and height are greater than the
+ * the dimensions used during codec create, then
+ * delete the current instance and recreate an instance with
+ * new dimensions */
+
+ if(IHEVCD_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code) {
+ mInitWidth = s_dec_op.u4_pic_wd;
+ mInitHeight = s_dec_op.u4_pic_ht;
+ mStride = mInitWidth;
+ CHECK_EQ(reInitDecoder(), (status_t)OK);
+
+ setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader,
+ mStride * mHeight, timeStampIx);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
+ (void *)&s_dec_op);
+ }
+ if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
+ /* If the input did not contain picture data, then ignore
+ * the associated timestamp */
+ mTimeStampsValid[timeStampIx] = false;
+ }
+
+ /* If valid height and width are decoded,
+ * then look at change in resolution */
+ if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
+ uint32_t width = s_dec_op.u4_pic_wd;
+ uint32_t height = s_dec_op.u4_pic_ht;
+
+ if ((width != mWidth) || (height != mHeight)) {
+ mWidth = width;
+ mHeight = height;
+ mStride = mWidth;
+
+ updatePortDefinitions();
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return;
+ }
+ }
+
+ if (s_dec_op.u4_output_present) {
+ outHeader->nFilledLen = (mStride * mHeight * 3) / 2;
+
+ outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
+ mTimeStampsValid[s_dec_op.u4_ts] = false;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ } else {
+ /* If in flush mode and no output is returned by the codec,
+ * then come out of flush mode */
+ mIsInFlush = false;
+
+ /* If EOS was recieved on input port and there is no output
+ * from the codec, then signal EOS on output port */
+ if (mReceivedEOS) {
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ resetPlugin();
+ }
+ }
+ }
+
+ // TODO: Handle more than one picture data
+ if (inHeader != NULL) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(const char *name,
+ const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
+ OMX_COMPONENTTYPE **component) {
+ return new android::SoftHEVC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
new file mode 100644
index 0000000..233db0c
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_HEVC_H_
+
+#define SOFT_HEVC_H_
+
+#include "SoftVideoDecoderOMXComponent.h"
+#include <sys/time.h>
+
+namespace android {
+
+#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ivd_aligned_free(buf) free(buf)
+
+/** Number of entries in the time-stamp array */
+#define MAX_TIME_STAMPS 64
+
+/** Maximum number of cores supported by the codec */
+#define CODEC_MAX_NUM_CORES 4
+
+#define CODEC_MAX_WIDTH 1920
+
+#define CODEC_MAX_HEIGHT 1088
+
+/** Input buffer size */
+#define INPUT_BUF_SIZE (1024 * 1024)
+
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = ((end.tv_sec - start.tv_sec) * 1000000) + \
+ (end.tv_usec - start.tv_usec);
+
+struct SoftHEVC: public SoftVideoDecoderOMXComponent {
+ SoftHEVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftHEVC();
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
+private:
+ // Number of input and output buffers
+ enum {
+ kNumBuffers = 8
+ };
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by the codec
+
+ uint32_t mNewWidth; // New width after change in resolution
+ uint32_t mNewHeight; // New height after change in resolution
+ uint32_t mInitWidth; // Width used during codec creation
+ uint32_t mInitHeight; // Height used during codec creation
+ size_t mStride; // Stride to be used for display buffers
+
+ size_t mNumCores; // Number of cores to be uesd by the codec
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ // Internal buffer to be used to flush out the buffers from decoder
+ uint8_t *mFlushOutBuffer;
+
+ // Status of entries in the timestamp array
+ bool mTimeStampsValid[MAX_TIME_STAMPS];
+
+ // Timestamp array - Since codec does not take 64 bit timestamps,
+ // they are maintained in the plugin
+ OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
+
+ OMX_COLOR_FORMATTYPE mOmxColorFormat; // OMX Color format
+ IV_COLOR_FORMAT_T mIvColorFormat; // Ittiam Color format
+
+ bool mIsInFlush; // codec is flush mode
+ bool mReceivedEOS; // EOS is receieved on input port
+ bool mIsAdapting; // plugin in middle of change in resolution
+
+ status_t initDecoder();
+ status_t deInitDecoder();
+ status_t setFlushMode();
+ status_t setParams(size_t stride);
+ void logVersion();
+ status_t setNumCores();
+ status_t resetDecoder();
+ status_t resetPlugin();
+ status_t reInitDecoder();
+
+ void setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
+ ivd_video_decode_op_t *ps_dec_op,
+ OMX_BUFFERHEADERTYPE *inHeader,
+ OMX_BUFFERHEADERTYPE *outHeader,
+ size_t sizeY,
+ size_t timeStampIx);
+
+ DISALLOW_EVIL_CONSTRUCTORS (SoftHEVC);
+};
+
+} // namespace android
+
+#endif // SOFT_HEVC_H_
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
index a3d5779..1d232c6 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
@@ -46,6 +46,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_mpeg4dec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
index 83a2dd2..c9006d9 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
@@ -33,6 +33,8 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_mpeg4enc
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index bf77f4a..42c9956 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -679,7 +679,7 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
mSawInputEOS = true;
}
- buffer_handle_t srcBuffer; // for MetaDataMode only
+ buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
if (inHeader->nFilledLen > 0) {
uint8_t *inputData = NULL;
if (mStoreMetaDataInBuffers) {
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
index cc4ea8f..c59a1b9 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.h
@@ -56,10 +56,6 @@ private:
kNumBuffers = 2,
};
- enum {
- kStoreMetaDataExtensionIndex = OMX_IndexVendorStartUnused + 1
- };
-
// OMX input buffer's timestamp and flags
typedef struct {
int64_t mTimeUs;
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index 135c715..8284490 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -50,6 +50,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_mp3dec
LOCAL_ARM_MODE := arm
@@ -69,6 +71,8 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index 4d864df..5396022 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -146,6 +146,23 @@ OMX_ERRORTYPE SoftMP3::internalGetParameter(
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioMp3:
+ {
+ OMX_AUDIO_PARAM_MP3TYPE *mp3Params =
+ (OMX_AUDIO_PARAM_MP3TYPE *)params;
+
+ if (mp3Params->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ mp3Params->nChannels = mNumChannels;
+ mp3Params->nBitRate = 0 /* unknown */;
+ mp3Params->nSampleRate = mSamplingRate;
+ // other fields are encoder-only
+
+ return OMX_ErrorNone;
+ }
+
default:
return SimpleSoftOMXComponent::internalGetParameter(index, params);
}
@@ -335,6 +352,9 @@ void SoftMP3::onPortFlushCompleted(OMX_U32 portIndex) {
// depend on fragments from the last one decoded.
pvmp3_InitDecoder(mConfig, mDecoderBuf);
mIsFirst = true;
+ mSignalledError = false;
+ mSawInputEos = false;
+ mSignalledOutputEos = false;
}
}
diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk
index 7f2c46d..93ff64c 100644
--- a/media/libstagefright/codecs/on2/dec/Android.mk
+++ b/media/libstagefright/codecs/on2/dec/Android.mk
@@ -20,4 +20,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_vpxdec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 423a057..828577a 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -23,9 +23,6 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaDefs.h>
-#include "vpx/vpx_decoder.h"
-#include "vpx/vpx_codec.h"
-#include "vpx/vp8dx.h"
namespace android {
@@ -41,7 +38,8 @@ SoftVPX::SoftVPX(
NULL /* profileLevels */, 0 /* numProfileLevels */,
320 /* width */, 240 /* height */, callbacks, appData, component),
mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
- mCtx(NULL) {
+ mCtx(NULL),
+ mImg(NULL) {
initPorts(kNumBuffers, 768 * 1024 /* inputBufferSize */,
kNumBuffers,
codingType == OMX_VIDEO_CodingVP8 ? MEDIA_MIMETYPE_VIDEO_VP8 : MEDIA_MIMETYPE_VIDEO_VP9);
@@ -118,35 +116,30 @@ void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
}
}
- if (vpx_codec_decode(
- (vpx_codec_ctx_t *)mCtx,
- inHeader->pBuffer + inHeader->nOffset,
- inHeader->nFilledLen,
- NULL,
- 0)) {
- ALOGE("on2 decoder failed to decode frame.");
+ if (mImg == NULL) {
+ if (vpx_codec_decode(
+ (vpx_codec_ctx_t *)mCtx,
+ inHeader->pBuffer + inHeader->nOffset,
+ inHeader->nFilledLen,
+ NULL,
+ 0)) {
+ ALOGE("on2 decoder failed to decode frame.");
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ vpx_codec_iter_t iter = NULL;
+ mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
}
- vpx_codec_iter_t iter = NULL;
- vpx_image_t *img = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
-
- if (img != NULL) {
- CHECK_EQ(img->fmt, IMG_FMT_I420);
-
- uint32_t width = img->d_w;
- uint32_t height = img->d_h;
+ if (mImg != NULL) {
+ CHECK_EQ(mImg->fmt, IMG_FMT_I420);
- if (width != mWidth || height != mHeight) {
- mWidth = width;
- mHeight = height;
-
- updatePortDefinitions();
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
+ uint32_t width = mImg->d_w;
+ uint32_t height = mImg->d_h;
+ bool portWillReset = false;
+ handlePortSettingsChange(&portWillReset, width, height);
+ if (portWillReset) {
return;
}
@@ -155,31 +148,16 @@ void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0;
outHeader->nTimeStamp = inHeader->nTimeStamp;
- const uint8_t *srcLine = (const uint8_t *)img->planes[PLANE_Y];
uint8_t *dst = outHeader->pBuffer;
- for (size_t i = 0; i < img->d_h; ++i) {
- memcpy(dst, srcLine, img->d_w);
-
- srcLine += img->stride[PLANE_Y];
- dst += img->d_w;
- }
-
- srcLine = (const uint8_t *)img->planes[PLANE_U];
- for (size_t i = 0; i < img->d_h / 2; ++i) {
- memcpy(dst, srcLine, img->d_w / 2);
-
- srcLine += img->stride[PLANE_U];
- dst += img->d_w / 2;
- }
-
- srcLine = (const uint8_t *)img->planes[PLANE_V];
- for (size_t i = 0; i < img->d_h / 2; ++i) {
- memcpy(dst, srcLine, img->d_w / 2);
-
- srcLine += img->stride[PLANE_V];
- dst += img->d_w / 2;
- }
-
+ const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y];
+ const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U];
+ const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V];
+ size_t srcYStride = mImg->stride[PLANE_Y];
+ size_t srcUStride = mImg->stride[PLANE_U];
+ size_t srcVStride = mImg->stride[PLANE_V];
+ copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
+
+ mImg = NULL;
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
outInfo = NULL;
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index cd5eb28..8f68693 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -20,6 +20,10 @@
#include "SoftVideoDecoderOMXComponent.h"
+#include "vpx/vpx_decoder.h"
+#include "vpx/vpx_codec.h"
+#include "vpx/vp8dx.h"
+
namespace android {
struct SoftVPX : public SoftVideoDecoderOMXComponent {
@@ -47,6 +51,8 @@ private:
void *mCtx;
+ vpx_image_t *mImg;
+
status_t initDecoder();
DISALLOW_EVIL_CONSTRUCTORS(SoftVPX);
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 5efe022..cabd6bd 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -27,7 +27,6 @@
namespace android {
-
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -141,17 +140,27 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name,
mWidth(176),
mHeight(144),
mBitrate(192000), // in bps
+ mFramerate(30 << 16), // in Q16 format
mBitrateUpdated(false),
mBitrateControlMode(VPX_VBR), // variable bitrate
- mFrameDurationUs(33333), // Defaults to 30 fps
mDCTPartitions(0),
mErrorResilience(OMX_FALSE),
mColorFormat(OMX_COLOR_FormatYUV420Planar),
mLevel(OMX_VIDEO_VP8Level_Version0),
+ mKeyFrameInterval(0),
+ mMinQuantizer(0),
+ mMaxQuantizer(0),
+ mTemporalLayers(0),
+ mTemporalPatternType(OMX_VIDEO_VPXTemporalLayerPatternNone),
+ mTemporalPatternLength(0),
+ mTemporalPatternIdx(0),
+ mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
mConversionBuffer(NULL),
mInputDataIsMeta(false),
mGrallocModule(NULL),
mKeyFrameRequested(false) {
+ memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
+ mTemporalLayerBitrateRatio[0] = 100;
initPorts();
}
@@ -180,9 +189,8 @@ void SoftVPXEncoder::initPorts() {
inputPort.format.video.nStride = inputPort.format.video.nFrameWidth;
inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight;
inputPort.format.video.nBitrate = 0;
- // frameRate is reciprocal of frameDuration, which is
- // in microseconds. It is also in Q16 format.
- inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16;
+ // frameRate is in Q16 format.
+ inputPort.format.video.xFramerate = mFramerate;
inputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
inputPort.nPortIndex = kInputPortIndex;
inputPort.eDir = OMX_DirInput;
@@ -220,7 +228,7 @@ void SoftVPXEncoder::initPorts() {
outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVP8;
outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused;
outputPort.format.video.pNativeWindow = NULL;
- outputPort.nBufferSize = 256 * 1024; // arbitrary
+ outputPort.nBufferSize = 1024 * 1024; // arbitrary
addPort(outputPort);
}
@@ -236,7 +244,9 @@ status_t SoftVPXEncoder::initEncoder() {
if (mCodecInterface == NULL) {
return UNKNOWN_ERROR;
}
-
+ ALOGD("VP8: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
+ mMinQuantizer, mMaxQuantizer);
codec_return = vpx_codec_enc_config_default(mCodecInterface,
mCodecConfiguration,
0); // Codec specific flags
@@ -277,8 +287,120 @@ status_t SoftVPXEncoder::initEncoder() {
mCodecConfiguration->g_timebase.num = 1;
mCodecConfiguration->g_timebase.den = 1000000;
// rc_target_bitrate is in kbps, mBitrate in bps
- mCodecConfiguration->rc_target_bitrate = mBitrate/1000;
+ mCodecConfiguration->rc_target_bitrate = (mBitrate + 500) / 1000;
mCodecConfiguration->rc_end_usage = mBitrateControlMode;
+ // Disable frame drop - not allowed in MediaCodec now.
+ mCodecConfiguration->rc_dropframe_thresh = 0;
+ if (mBitrateControlMode == VPX_CBR) {
+ // Disable spatial resizing.
+ mCodecConfiguration->rc_resize_allowed = 0;
+ // Single-pass mode.
+ mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
+ // Maximum amount of bits that can be subtracted from the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_undershoot_pct = 100;
+ // Maximum amount of bits that can be added to the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_overshoot_pct = 15;
+ // Initial value of the buffer level in ms.
+ mCodecConfiguration->rc_buf_initial_sz = 500;
+ // Amount of data that the encoder should try to maintain in ms.
+ mCodecConfiguration->rc_buf_optimal_sz = 600;
+ // The amount of data that may be buffered by the decoding
+ // application in ms.
+ mCodecConfiguration->rc_buf_sz = 1000;
+ // Enable error resilience - needed for packet loss.
+ mCodecConfiguration->g_error_resilient = 1;
+ // Disable lagged encoding.
+ mCodecConfiguration->g_lag_in_frames = 0;
+ // Maximum key frame interval - for CBR boost to 3000
+ mCodecConfiguration->kf_max_dist = 3000;
+ // Encoder determines optimal key frame placement automatically.
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+
+ // Frames temporal pattern - for now WebRTC like pattern is only supported.
+ switch (mTemporalLayers) {
+ case 0:
+ {
+ mTemporalPatternLength = 0;
+ break;
+ }
+ case 1:
+ {
+ mCodecConfiguration->ts_number_layers = 1;
+ mCodecConfiguration->ts_rate_decimator[0] = 1;
+ mCodecConfiguration->ts_periodicity = 1;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mTemporalPattern[0] = kTemporalUpdateLastRefAll;
+ mTemporalPatternLength = 1;
+ break;
+ }
+ case 2:
+ {
+ mCodecConfiguration->ts_number_layers = 2;
+ mCodecConfiguration->ts_rate_decimator[0] = 2;
+ mCodecConfiguration->ts_rate_decimator[1] = 1;
+ mCodecConfiguration->ts_periodicity = 2;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 1;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ }
+ case 3:
+ {
+ mCodecConfiguration->ts_number_layers = 3;
+ mCodecConfiguration->ts_rate_decimator[0] = 4;
+ mCodecConfiguration->ts_rate_decimator[1] = 2;
+ mCodecConfiguration->ts_rate_decimator[2] = 1;
+ mCodecConfiguration->ts_periodicity = 4;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 2;
+ mCodecConfiguration->ts_layer_id[2] = 1;
+ mCodecConfiguration->ts_layer_id[3] = 2;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateNone;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateNone;
+ mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ }
+ default:
+ {
+ ALOGE("Wrong number of temporal layers %zu", mTemporalLayers);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Set bitrate values for each layer
+ for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
+ mCodecConfiguration->ts_target_bitrate[i] =
+ mCodecConfiguration->rc_target_bitrate *
+ mTemporalLayerBitrateRatio[i] / 100;
+ }
+ if (mKeyFrameInterval > 0) {
+ mCodecConfiguration->kf_max_dist = mKeyFrameInterval;
+ mCodecConfiguration->kf_min_dist = mKeyFrameInterval;
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+ if (mMinQuantizer > 0) {
+ mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
+ }
+ if (mMaxQuantizer > 0) {
+ mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+ }
codec_return = vpx_codec_enc_init(mCodecContext,
mCodecInterface,
@@ -298,6 +420,33 @@ status_t SoftVPXEncoder::initEncoder() {
return UNKNOWN_ERROR;
}
+ // Extra CBR settings
+ if (mBitrateControlMode == VPX_CBR) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_STATIC_THRESHOLD,
+ 1);
+ if (codec_return == VPX_CODEC_OK) {
+ uint32_t rc_max_intra_target =
+ mCodecConfiguration->rc_buf_optimal_sz * (mFramerate >> 17) / 10;
+ // Don't go below 3 times per frame bandwidth.
+ if (rc_max_intra_target < 300) {
+ rc_max_intra_target = 300;
+ }
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_MAX_INTRA_BITRATE_PCT,
+ rc_max_intra_target);
+ }
+ if (codec_return == VPX_CODEC_OK) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_CPUUSED,
+ -8);
+ }
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error setting cbr parameters for vpx encoder.");
+ return UNKNOWN_ERROR;
+ }
+ }
+
if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || mInputDataIsMeta) {
if (mConversionBuffer == NULL) {
mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
@@ -361,9 +510,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
}
formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- // Converting from microseconds
- // Also converting to Q16 format
- formatParams->xFramerate = (1000000/mFrameDurationUs) << 16;
+ formatParams->xFramerate = mFramerate;
return OMX_ErrorNone;
} else if (formatParams->nPortIndex == kOutputPortIndex) {
formatParams->eCompressionFormat = OMX_VIDEO_CodingVP8;
@@ -411,6 +558,24 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
return OMX_ErrorNone;
}
+ case OMX_IndexParamVideoAndroidVp8Encoder: {
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vp8AndroidParams =
+ (OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param;
+
+ if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ vp8AndroidParams->nKeyFrameInterval = mKeyFrameInterval;
+ vp8AndroidParams->eTemporalPattern = mTemporalPatternType;
+ vp8AndroidParams->nTemporalLayerCount = mTemporalLayers;
+ vp8AndroidParams->nMinQuantizer = mMinQuantizer;
+ vp8AndroidParams->nMaxQuantizer = mMaxQuantizer;
+ memcpy(vp8AndroidParams->nTemporalLayerBitrateRatio,
+ mTemporalLayerBitrateRatio, sizeof(mTemporalLayerBitrateRatio));
+ return OMX_ErrorNone;
+ }
+
case OMX_IndexParamVideoProfileLevelQuerySupported: {
OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
(OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
@@ -497,11 +662,15 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
return internalSetVp8Params(
(const OMX_VIDEO_PARAM_VP8TYPE *)param);
+ case OMX_IndexParamVideoAndroidVp8Encoder:
+ return internalSetAndroidVp8Params(
+ (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
+
case OMX_IndexParamVideoProfileLevelCurrent:
return internalSetProfileLevel(
(const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param);
- case OMX_IndexVendorStartUnused:
+ case kStoreMetaDataExtensionIndex:
{
// storeMetaDataInBuffers
const StoreMetaDataInBuffersParams *storeParam =
@@ -610,6 +779,50 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
return OMX_ErrorNone;
}
+OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVp8Params(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams) {
+ if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+ if (vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternNone &&
+ vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vp8AndroidParams->nTemporalLayerCount > OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vp8AndroidParams->nMinQuantizer > vp8AndroidParams->nMaxQuantizer) {
+ return OMX_ErrorBadParameter;
+ }
+
+ mTemporalPatternType = vp8AndroidParams->eTemporalPattern;
+ if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ mTemporalLayers = vp8AndroidParams->nTemporalLayerCount;
+ } else if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternNone) {
+ mTemporalLayers = 0;
+ }
+ // Check the bitrate distribution between layers is in increasing order
+ if (mTemporalLayers > 1) {
+ for (size_t i = 0; i < mTemporalLayers - 1; i++) {
+ if (vp8AndroidParams->nTemporalLayerBitrateRatio[i + 1] <=
+ vp8AndroidParams->nTemporalLayerBitrateRatio[i]) {
+ ALOGE("Wrong bitrate ratio - should be in increasing order.");
+ return OMX_ErrorBadParameter;
+ }
+ }
+ }
+ mKeyFrameInterval = vp8AndroidParams->nKeyFrameInterval;
+ mMinQuantizer = vp8AndroidParams->nMinQuantizer;
+ mMaxQuantizer = vp8AndroidParams->nMaxQuantizer;
+ memcpy(mTemporalLayerBitrateRatio, vp8AndroidParams->nTemporalLayerBitrateRatio,
+ sizeof(mTemporalLayerBitrateRatio));
+ ALOGD("VP8: internalSetAndroidVp8Params. BRMode: %u. TS: %zu. KF: %u."
+ " QP: %u - %u BR0: %u. BR1: %u. BR2: %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
+ mMinQuantizer, mMaxQuantizer, mTemporalLayerBitrateRatio[0],
+ mTemporalLayerBitrateRatio[1], mTemporalLayerBitrateRatio[2]);
+ return OMX_ErrorNone;
+}
OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams(
const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) {
@@ -660,9 +873,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
mHeight = port->format.video.nFrameHeight;
// xFramerate comes in Q16 format, in frames per second unit
- const uint32_t framerate = port->format.video.xFramerate >> 16;
- // frame duration is in microseconds
- mFrameDurationUs = (1000000/framerate);
+ mFramerate = port->format.video.xFramerate;
if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
@@ -675,7 +886,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
def->format.video.nFrameWidth = mWidth;
def->format.video.nFrameHeight = mHeight;
- def->format.video.xFramerate = port->format.video.xFramerate;
+ def->format.video.xFramerate = mFramerate;
def->format.video.eColorFormat = mColorFormat;
def = &editPortInfo(kOutputPortIndex)->mDef;
def->format.video.nFrameWidth = mWidth;
@@ -684,6 +895,13 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
return OMX_ErrorNone;
} else if (port->nPortIndex == kOutputPortIndex) {
mBitrate = port->format.video.nBitrate;
+ mWidth = port->format.video.nFrameWidth;
+ mHeight = port->format.video.nFrameHeight;
+
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nBitrate = mBitrate;
return OMX_ErrorNone;
} else {
return OMX_ErrorBadPortIndex;
@@ -710,6 +928,74 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
return OMX_ErrorNone;
}
+vpx_enc_frame_flags_t SoftVPXEncoder::getEncodeFlags() {
+ vpx_enc_frame_flags_t flags = 0;
+ int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
+ mTemporalPatternIdx++;
+ switch (mTemporalPattern[patternIdx]) {
+ case kTemporalUpdateLast:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ break;
+ case kTemporalUpdateGoldenWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateGolden:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateAltrefWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateAltref:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateNoneNoRefAltref:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ // Deliberately no break here.
+ case kTemporalUpdateNone:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateNoneNoRefGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastAndGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateLastRefAll:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ break;
+ }
+ return flags;
+}
void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
// Initialize encoder if not already
@@ -794,6 +1080,9 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
kInputBufferAlignment, source);
vpx_enc_frame_flags_t flags = 0;
+ if (mTemporalPatternLength > 0) {
+ flags = getEncodeFlags();
+ }
if (mKeyFrameRequested) {
flags |= VPX_EFLAG_FORCE_KF;
mKeyFrameRequested = false;
@@ -814,11 +1103,18 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
mBitrateUpdated = false;
}
+ uint32_t frameDuration;
+ if (inputBufferHeader->nTimeStamp > mLastTimestamp) {
+ frameDuration = (uint32_t)(inputBufferHeader->nTimeStamp - mLastTimestamp);
+ } else {
+ frameDuration = (uint32_t)(((uint64_t)1000000 << 16) / mFramerate);
+ }
+ mLastTimestamp = inputBufferHeader->nTimeStamp;
codec_return = vpx_codec_encode(
mCodecContext,
&raw_frame,
inputBufferHeader->nTimeStamp, // in timebase units
- mFrameDurationUs, // frame duration in timebase units
+ frameDuration, // frame duration in timebase units
flags, // frame flags
VPX_DL_REALTIME); // encoding deadline
if (codec_return != VPX_CODEC_OK) {
@@ -860,10 +1156,9 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
OMX_ERRORTYPE SoftVPXEncoder::getExtensionIndex(
const char *name, OMX_INDEXTYPE *index) {
if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *index = OMX_IndexVendorStartUnused;
+ *(int32_t*)index = kStoreMetaDataExtensionIndex;
return OMX_ErrorNone;
}
-
return SimpleSoftOMXComponent::getExtensionIndex(name, index);
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 076830f..5b4c954 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -91,6 +91,43 @@ protected:
const char *name, OMX_INDEXTYPE *index);
private:
+ enum TemporalReferences {
+ // For 1 layer case: reference all (last, golden, and alt ref), but only
+ // update last.
+ kTemporalUpdateLastRefAll = 12,
+ // First base layer frame for 3 temporal layers, which updates last and
+ // golden with alt ref dependency.
+ kTemporalUpdateLastAndGoldenRefAltRef = 11,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenRefAltRef = 10,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9,
+ // Base layer with alt ref dependency.
+ kTemporalUpdateLastRefAltRef = 8,
+ // Highest enhacement layer without dependency on golden with alt ref
+ // dependency.
+ kTemporalUpdateNoneNoRefGoldenRefAltRef = 7,
+ // Second layer and last frame in cycle, for 2 layers.
+ kTemporalUpdateNoneNoRefAltref = 6,
+ // Highest enhancement layer.
+ kTemporalUpdateNone = 5,
+ // Second enhancement layer.
+ kTemporalUpdateAltref = 4,
+ // Second enhancement layer without dependency on previous frames in
+ // the second enhancement layer.
+ kTemporalUpdateAltrefWithoutDependency = 3,
+ // First enhancement layer.
+ kTemporalUpdateGolden = 2,
+ // First enhancement layer without dependency on previous frames in
+ // the first enhancement layer.
+ kTemporalUpdateGoldenWithoutDependency = 1,
+ // Base layer.
+ kTemporalUpdateLast = 0,
+ };
+ enum {
+ kMaxTemporalPattern = 8
+ };
+
// number of buffers allocated per port
static const uint32_t kNumBuffers = 4;
@@ -130,16 +167,15 @@ private:
// Target bitrate set for the encoder, in bits per second.
uint32_t mBitrate;
+ // Target framerate set for the encoder.
+ uint32_t mFramerate;
+
// If a request for a change it bitrate has been received.
bool mBitrateUpdated;
// Bitrate control mode, either constant or variable
vpx_rc_mode mBitrateControlMode;
- // Frame duration is the reciprocal of framerate, denoted
- // in microseconds
- uint64_t mFrameDurationUs;
-
// vp8 specific configuration parameter
// that enables token partitioning of
// the stream into substreams
@@ -160,6 +196,36 @@ private:
// something else.
OMX_VIDEO_VP8LEVELTYPE mLevel;
+ // Key frame interval in frames
+ uint32_t mKeyFrameInterval;
+
+ // Minimum (best quality) quantizer
+ uint32_t mMinQuantizer;
+
+ // Maximum (worst quality) quantizer
+ uint32_t mMaxQuantizer;
+
+ // Number of coding temporal layers to be used.
+ size_t mTemporalLayers;
+
+ // Temporal layer bitrare ratio in percentage
+ uint32_t mTemporalLayerBitrateRatio[OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS];
+
+ // Temporal pattern type
+ OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE mTemporalPatternType;
+
+ // Temporal pattern length
+ size_t mTemporalPatternLength;
+
+ // Temporal pattern current index
+ size_t mTemporalPatternIdx;
+
+ // Frame type temporal pattern
+ TemporalReferences mTemporalPattern[kMaxTemporalPattern];
+
+ // Last input buffer timestamp
+ OMX_TICKS mLastTimestamp;
+
// Conversion buffer is needed to convert semi
// planar yuv420 to planar format
// It is only allocated if input format is
@@ -185,6 +251,9 @@ private:
// dtor.
status_t releaseEncoder();
+ // Get current encode flags
+ vpx_enc_frame_flags_t getEncodeFlags();
+
// Handles port changes with respect to color formats
OMX_ERRORTYPE internalSetFormatParams(
const OMX_VIDEO_PARAM_PORTFORMATTYPE* format);
@@ -206,6 +275,10 @@ private:
OMX_ERRORTYPE internalSetVp8Params(
const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
+ // Handles Android vp8 specific parameters.
+ OMX_ERRORTYPE internalSetAndroidVp8Params(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams);
+
// Updates encoder profile
OMX_ERRORTYPE internalSetProfileLevel(
const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel);
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index a7bde97..cf3c3e3 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -58,7 +58,6 @@ SoftAVC::SoftAVC(
320 /* width */, 240 /* height */, callbacks, appData, component),
mHandle(NULL),
mInputBufferCount(0),
- mPictureSize(mWidth * mHeight * 3 / 2),
mFirstPicture(NULL),
mFirstPictureId(-1),
mPicId(0),
@@ -118,7 +117,7 @@ void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
}
H264SwDecRet ret = H264SWDEC_PIC_RDY;
- bool portSettingsChanged = false;
+ bool portWillReset = false;
while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
&& outQueue.size() == kNumOutputBuffers) {
@@ -161,17 +160,13 @@ void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
H264SwDecInfo decoderInfo;
CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK);
- if (handlePortSettingChangeEvent(&decoderInfo)) {
- portSettingsChanged = true;
- }
-
- if (decoderInfo.croppingFlag &&
- handleCropRectEvent(&decoderInfo.cropParams)) {
- portSettingsChanged = true;
- }
+ bool cropChanged = handleCropChange(decoderInfo);
+ handlePortSettingsChange(
+ &portWillReset, decoderInfo.picWidth, decoderInfo.picHeight,
+ cropChanged);
}
} else {
- if (portSettingsChanged) {
+ if (portWillReset) {
if (H264SwDecNextPicture(mHandle, &decodedPicture, 0)
== H264SWDEC_PIC_RDY) {
@@ -199,8 +194,7 @@ void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
inInfo->mOwnedByUs = false;
notifyEmptyBufferDone(inHeader);
- if (portSettingsChanged) {
- portSettingsChanged = false;
+ if (portWillReset) {
return;
}
@@ -215,44 +209,33 @@ void SoftAVC::onQueueFilled(OMX_U32 /* portIndex */) {
}
}
-bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) {
- if (mWidth != info->picWidth || mHeight != info->picHeight) {
- mWidth = info->picWidth;
- mHeight = info->picHeight;
- mPictureSize = mWidth * mHeight * 3 / 2;
- updatePortDefinitions();
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- return true;
+bool SoftAVC::handleCropChange(const H264SwDecInfo& decInfo) {
+ if (!decInfo.croppingFlag) {
+ return false;
}
- return false;
-}
-
-bool SoftAVC::handleCropRectEvent(const CropParams *crop) {
- if (mCropLeft != crop->cropLeftOffset ||
- mCropTop != crop->cropTopOffset ||
- mCropWidth != crop->cropOutWidth ||
- mCropHeight != crop->cropOutHeight) {
- mCropLeft = crop->cropLeftOffset;
- mCropTop = crop->cropTopOffset;
- mCropWidth = crop->cropOutWidth;
- mCropHeight = crop->cropOutHeight;
-
- notify(OMX_EventPortSettingsChanged, 1,
- OMX_IndexConfigCommonOutputCrop, NULL);
-
- return true;
+ const CropParams& crop = decInfo.cropParams;
+ if (mCropLeft == crop.cropLeftOffset &&
+ mCropTop == crop.cropTopOffset &&
+ mCropWidth == crop.cropOutWidth &&
+ mCropHeight == crop.cropOutHeight) {
+ return false;
}
- return false;
+
+ mCropLeft = crop.cropLeftOffset;
+ mCropTop = crop.cropTopOffset;
+ mCropWidth = crop.cropOutWidth;
+ mCropHeight = crop.cropOutHeight;
+ return true;
}
void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) {
CHECK(mFirstPicture == NULL);
mFirstPictureId = picId;
- mFirstPicture = new uint8_t[mPictureSize];
- memcpy(mFirstPicture, data, mPictureSize);
+ uint32_t pictureSize = mWidth * mHeight * 3 / 2;
+ mFirstPicture = new uint8_t[pictureSize];
+ memcpy(mFirstPicture, data, pictureSize);
}
void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
@@ -263,9 +246,17 @@ void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) {
OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId);
outHeader->nTimeStamp = header->nTimeStamp;
outHeader->nFlags = header->nFlags;
- outHeader->nFilledLen = mPictureSize;
- memcpy(outHeader->pBuffer + outHeader->nOffset,
- data, mPictureSize);
+ outHeader->nFilledLen = mWidth * mHeight * 3 / 2;
+
+ uint8_t *dst = outHeader->pBuffer + outHeader->nOffset;
+ const uint8_t *srcY = data;
+ const uint8_t *srcU = srcY + mWidth * mHeight;
+ const uint8_t *srcV = srcU + mWidth * mHeight / 4;
+ size_t srcYStride = mWidth;
+ size_t srcUStride = mWidth / 2;
+ size_t srcVStride = srcUStride;
+ copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
+
mPicToHeaderMap.removeItem(picId);
delete header;
outInfo->mOwnedByUs = false;
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
index ee69926..253a406 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.h
@@ -55,8 +55,6 @@ private:
size_t mInputBufferCount;
- uint32_t mPictureSize;
-
uint8_t *mFirstPicture;
int32_t mFirstPictureId;
@@ -75,8 +73,7 @@ private:
void drainAllOutputBuffers(bool eos);
void drainOneOutputBuffer(int32_t picId, uint8_t *data);
void saveFirstOutputBuffer(int32_t pidId, uint8_t *data);
- bool handleCropRectEvent(const CropParams* crop);
- bool handlePortSettingChangeEvent(const H264SwDecInfo *info);
+ bool handleCropChange(const H264SwDecInfo& decInfo);
DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
};
diff --git a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
index 2bb4c4d..524a3f0 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
@@ -42,6 +42,8 @@
#include "h264bsd_decoder.h"
#include "h264bsd_util.h"
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
Version Information
------------------------------------------------------------------------------*/
@@ -73,6 +75,7 @@ H264DEC_EVALUATION Compile evaluation version, restricts number of frames
#endif
void H264SwDecTrace(char *string) {
+ UNUSED(string);
}
void* H264SwDecMalloc(u32 size) {
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
index c948776..b409a06 100755
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
@@ -42,6 +42,8 @@
#include "armVC.h"
#endif /* H264DEC_OMXDL */
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
2. External compiler flags
--------------------------------------------------------------------------------
@@ -2136,7 +2138,8 @@ static void FillRow1(
i32 center,
i32 right)
{
-
+ UNUSED(left);
+ UNUSED(right);
ASSERT(ref);
ASSERT(fill);
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
index a7c6f64..23401c6 100755
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
@@ -47,6 +47,8 @@
#include "h264bsd_nal_unit.h"
#include "h264bsd_dpb.h"
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
2. External compiler flags
--------------------------------------------------------------------------------
@@ -1407,6 +1409,7 @@ u32 h264bsdCheckPriorPicsFlag(u32 * noOutputOfPriorPicsFlag,
u32 tmp, value, i;
i32 ivalue;
strmData_t tmpStrmData[1];
+ UNUSED(nalUnitType);
/* Code */
diff --git a/media/libstagefright/codecs/opus/Android.mk b/media/libstagefright/codecs/opus/Android.mk
new file mode 100644
index 0000000..365b179
--- /dev/null
+++ b/media/libstagefright/codecs/opus/Android.mk
@@ -0,0 +1,4 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+include $(call all-makefiles-under,$(LOCAL_PATH)) \ No newline at end of file
diff --git a/media/libstagefright/codecs/opus/dec/Android.mk b/media/libstagefright/codecs/opus/dec/Android.mk
new file mode 100644
index 0000000..2379c5f
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/Android.mk
@@ -0,0 +1,19 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftOpus.cpp
+
+LOCAL_C_INCLUDES := \
+ external/libopus/include \
+ frameworks/av/media/libstagefright/include \
+ frameworks/native/include/media/openmax \
+
+LOCAL_SHARED_LIBRARIES := \
+ libopus libstagefright libstagefright_omx \
+ libstagefright_foundation libutils liblog
+
+LOCAL_MODULE := libstagefright_soft_opusdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY) \ No newline at end of file
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
new file mode 100644
index 0000000..b8084ae
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -0,0 +1,540 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftOpus"
+#include <utils/Log.h>
+
+#include "SoftOpus.h"
+#include <OMX_AudioExt.h>
+#include <OMX_IndexExt.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+extern "C" {
+ #include <opus.h>
+ #include <opus_multistream.h>
+}
+
+namespace android {
+
+static const int kRate = 48000;
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftOpus::SoftOpus(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mInputBufferCount(0),
+ mDecoder(NULL),
+ mHeader(NULL),
+ mCodecDelay(0),
+ mSeekPreRoll(0),
+ mAnchorTimeUs(0),
+ mNumFramesOutput(0),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftOpus::~SoftOpus() {
+ if (mDecoder != NULL) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = NULL;
+ }
+ if (mHeader != NULL) {
+ delete mHeader;
+ mHeader = NULL;
+ }
+}
+
+void SoftOpus::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 960 * 6;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_AUDIO_OPUS);
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding =
+ (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidOPUS;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t);
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+status_t SoftOpus::initDecoder() {
+ return OK;
+}
+
+OMX_ERRORTYPE SoftOpus::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch ((int)index) {
+ case OMX_IndexParamAudioAndroidOpus:
+ {
+ OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
+ (OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
+
+ if (opusParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ opusParams->nAudioBandWidth = 0;
+ opusParams->nSampleRate = kRate;
+ opusParams->nBitRate = 0;
+
+ if (!isConfigured()) {
+ opusParams->nChannels = 1;
+ } else {
+ opusParams->nChannels = mHeader->channels;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+ pcmParams->nSamplingRate = kRate;
+
+ if (!isConfigured()) {
+ pcmParams->nChannels = 1;
+ } else {
+ pcmParams->nChannels = mHeader->channels;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftOpus::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch ((int)index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.opus",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioAndroidOpus:
+ {
+ const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
+ (const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
+
+ if (opusParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+bool SoftOpus::isConfigured() const {
+ return mInputBufferCount >= 1;
+}
+
+static uint16_t ReadLE16(const uint8_t *data, size_t data_size,
+ uint32_t read_offset) {
+ if (read_offset + 1 > data_size)
+ return 0;
+ uint16_t val;
+ val = data[read_offset];
+ val |= data[read_offset + 1] << 8;
+ return val;
+}
+
+// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
+// mappings for up to 8 channels. This information is part of the Vorbis I
+// Specification:
+// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html
+static const int kMaxChannels = 8;
+
+// Maximum packet size used in Xiph's opusdec.
+static const int kMaxOpusOutputPacketSizeSamples = 960 * 6;
+
+// Default audio output channel layout. Used to initialize |stream_map| in
+// OpusHeader, and passed to opus_multistream_decoder_create() when the header
+// does not contain mapping information. The values are valid only for mono and
+// stereo output: Opus streams with more than 2 channels require a stream map.
+static const int kMaxChannelsWithDefaultLayout = 2;
+static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 };
+
+// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header
+static bool ParseOpusHeader(const uint8_t *data, size_t data_size,
+ OpusHeader* header) {
+ // Size of the Opus header excluding optional mapping information.
+ const size_t kOpusHeaderSize = 19;
+
+ // Offset to the channel count byte in the Opus header.
+ const size_t kOpusHeaderChannelsOffset = 9;
+
+ // Offset to the pre-skip value in the Opus header.
+ const size_t kOpusHeaderSkipSamplesOffset = 10;
+
+ // Offset to the gain value in the Opus header.
+ const size_t kOpusHeaderGainOffset = 16;
+
+ // Offset to the channel mapping byte in the Opus header.
+ const size_t kOpusHeaderChannelMappingOffset = 18;
+
+ // Opus Header contains a stream map. The mapping values are in the header
+ // beyond the always present |kOpusHeaderSize| bytes of data. The mapping
+ // data contains stream count, coupling information, and per channel mapping
+ // values:
+ // - Byte 0: Number of streams.
+ // - Byte 1: Number coupled.
+ // - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping
+ // values.
+ const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize;
+ const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1;
+ const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2;
+
+ if (data_size < kOpusHeaderSize) {
+ ALOGV("Header size is too small.");
+ return false;
+ }
+ header->channels = *(data + kOpusHeaderChannelsOffset);
+
+ if (header->channels <= 0 || header->channels > kMaxChannels) {
+ ALOGV("Invalid Header, wrong channel count: %d", header->channels);
+ return false;
+ }
+ header->skip_samples = ReadLE16(data, data_size,
+ kOpusHeaderSkipSamplesOffset);
+ header->gain_db = static_cast<int16_t>(
+ ReadLE16(data, data_size,
+ kOpusHeaderGainOffset));
+ header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset);
+ if (!header->channel_mapping) {
+ if (header->channels > kMaxChannelsWithDefaultLayout) {
+ ALOGV("Invalid Header, missing stream map.");
+ return false;
+ }
+ header->num_streams = 1;
+ header->num_coupled = header->channels > 1;
+ header->stream_map[0] = 0;
+ header->stream_map[1] = 1;
+ return true;
+ }
+ if (data_size < kOpusHeaderStreamMapOffset + header->channels) {
+ ALOGV("Invalid stream map; insufficient data for current channel "
+ "count: %d", header->channels);
+ return false;
+ }
+ header->num_streams = *(data + kOpusHeaderNumStreamsOffset);
+ header->num_coupled = *(data + kOpusHeaderNumCoupledOffset);
+ if (header->num_streams + header->num_coupled != header->channels) {
+ ALOGV("Inconsistent channel mapping.");
+ return false;
+ }
+ for (int i = 0; i < header->channels; ++i)
+ header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i);
+ return true;
+}
+
+// Convert nanoseconds to number of samples.
+static uint64_t ns_to_samples(uint64_t ns, int kRate) {
+ return static_cast<double>(ns) * kRate / 1000000000;
+}
+
+void SoftOpus::onQueueFilled(OMX_U32 portIndex) {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ if (portIndex == 0 && mInputBufferCount < 3) {
+ BufferInfo *info = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *header = info->mHeader;
+
+ const uint8_t *data = header->pBuffer + header->nOffset;
+ size_t size = header->nFilledLen;
+
+ if (mInputBufferCount == 0) {
+ CHECK(mHeader == NULL);
+ mHeader = new OpusHeader();
+ memset(mHeader, 0, sizeof(*mHeader));
+ if (!ParseOpusHeader(data, size, mHeader)) {
+ ALOGV("Parsing Opus Header failed.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ uint8_t channel_mapping[kMaxChannels] = {0};
+ memcpy(&channel_mapping,
+ kDefaultOpusChannelLayout,
+ kMaxChannelsWithDefaultLayout);
+
+ int status = OPUS_INVALID_STATE;
+ mDecoder = opus_multistream_decoder_create(kRate,
+ mHeader->channels,
+ mHeader->num_streams,
+ mHeader->num_coupled,
+ channel_mapping,
+ &status);
+ if (!mDecoder || status != OPUS_OK) {
+ ALOGV("opus_multistream_decoder_create failed status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ status =
+ opus_multistream_decoder_ctl(mDecoder,
+ OPUS_SET_GAIN(mHeader->gain_db));
+ if (status != OPUS_OK) {
+ ALOGV("Failed to set OPUS header gain; status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ } else if (mInputBufferCount == 1) {
+ mCodecDelay = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(header->pBuffer +
+ header->nOffset)),
+ kRate);
+ mSamplesToDiscard = mCodecDelay;
+ } else {
+ mSeekPreRoll = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(header->pBuffer +
+ header->nOffset)),
+ kRate);
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+
+ inQueue.erase(inQueue.begin());
+ info->mOwnedByUs = false;
+ notifyEmptyBufferDone(header);
+ ++mInputBufferCount;
+ return;
+ }
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumFramesOutput = 0;
+ }
+
+ // When seeking to zero, |mCodecDelay| samples has to be discarded
+ // instead of |mSeekPreRoll| samples (as we would when seeking to any
+ // other timestamp).
+ if (inHeader->nTimeStamp == 0) {
+ mSamplesToDiscard = mCodecDelay;
+ }
+
+ const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+ const uint32_t size = inHeader->nFilledLen;
+
+ int numFrames = opus_multistream_decode(mDecoder,
+ data,
+ size,
+ (int16_t *)outHeader->pBuffer,
+ kMaxOpusOutputPacketSizeSamples,
+ 0);
+ if (numFrames < 0) {
+ ALOGE("opus_multistream_decode returned %d", numFrames);
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ outHeader->nOffset = 0;
+ if (mSamplesToDiscard > 0) {
+ if (mSamplesToDiscard > numFrames) {
+ mSamplesToDiscard -= numFrames;
+ numFrames = 0;
+ } else {
+ numFrames -= mSamplesToDiscard;
+ outHeader->nOffset = mSamplesToDiscard * sizeof(int16_t) *
+ mHeader->channels;
+ mSamplesToDiscard = 0;
+ }
+ }
+
+ outHeader->nFilledLen = numFrames * sizeof(int16_t) * mHeader->channels;
+ outHeader->nFlags = 0;
+
+ outHeader->nTimeStamp = mAnchorTimeUs +
+ (mNumFramesOutput * 1000000ll) /
+ kRate;
+
+ mNumFramesOutput += numFrames;
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+
+ ++mInputBufferCount;
+ }
+}
+
+void SoftOpus::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0 && mDecoder != NULL) {
+ // Make sure that the next buffer output does not still
+ // depend on fragments from the last one decoded.
+ mNumFramesOutput = 0;
+ opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE);
+ mAnchorTimeUs = 0;
+ mSamplesToDiscard = mSeekPreRoll;
+ }
+}
+
+void SoftOpus::onReset() {
+ mInputBufferCount = 0;
+ mNumFramesOutput = 0;
+ if (mDecoder != NULL) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = NULL;
+ }
+ if (mHeader != NULL) {
+ delete mHeader;
+ mHeader = NULL;
+ }
+
+ mOutputPortSettingsChange = NONE;
+}
+
+void SoftOpus::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftOpus(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
new file mode 100644
index 0000000..97f6561
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * The Opus specification is part of IETF RFC 6716:
+ * http://tools.ietf.org/html/rfc6716
+ */
+
+#ifndef SOFT_OPUS_H_
+
+#define SOFT_OPUS_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct OpusMSDecoder;
+
+namespace android {
+
+struct OpusHeader {
+ int channels;
+ int skip_samples;
+ int channel_mapping;
+ int num_streams;
+ int num_coupled;
+ int16_t gain_db;
+ uint8_t stream_map[8];
+};
+
+struct SoftOpus : public SimpleSoftOMXComponent {
+ SoftOpus(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftOpus();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+ virtual void onReset();
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kMaxNumSamplesPerBuffer = 960 * 6
+ };
+
+ size_t mInputBufferCount;
+
+ OpusMSDecoder *mDecoder;
+ OpusHeader *mHeader;
+
+ int64_t mCodecDelay;
+ int64_t mSeekPreRoll;
+ int64_t mSamplesToDiscard;
+ int64_t mAnchorTimeUs;
+ int64_t mNumFramesOutput;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftOpus);
+};
+
+} // namespace android
+
+#endif // SOFT_OPUS_H_
diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk
index fe90a03..87080e7 100644
--- a/media/libstagefright/codecs/raw/Android.mk
+++ b/media/libstagefright/codecs/raw/Android.mk
@@ -8,6 +8,8 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/include \
frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk
index 2232353..217a6d2 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.mk
+++ b/media/libstagefright/codecs/vorbis/dec/Android.mk
@@ -16,4 +16,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_vorbisdec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)