diff options
author | Lajos Molnar <lajos@google.com> | 2015-04-16 03:07:16 +0000 |
---|---|---|
committer | Android Git Automerger <android-git-automerger@android.com> | 2015-04-16 03:07:16 +0000 |
commit | 655084f979b3067cb5a6fd2d4de148b411fbec23 (patch) | |
tree | 604fb047a4647ee7acb240cc88df2f6ed1b27fe6 | |
parent | 5280631330b6e06c2b9f3af5f2afa7d82a022618 (diff) | |
parent | e7a53499125586272adb2c7592db5c4038dabe3b (diff) | |
download | frameworks_av-655084f979b3067cb5a6fd2d4de148b411fbec23.zip frameworks_av-655084f979b3067cb5a6fd2d4de148b411fbec23.tar.gz frameworks_av-655084f979b3067cb5a6fd2d4de148b411fbec23.tar.bz2 |
am e7a53499: am 0a45cda6: am 187b196e: Merge changes Ic51e92e9,Ie808aa82,I74f5f4a1
* commit 'e7a53499125586272adb2c7592db5c4038dabe3b':
stagefright: add SoftAVCEnc using libavc
stagefright: add SoftAVCDec using libavc
stagefright: add SoftMPEG2 decoder using libmpeg2
-rw-r--r-- | media/libstagefright/codecs/avcdec/Android.mk | 27 | ||||
-rw-r--r-- | media/libstagefright/codecs/avcdec/SoftAVCDec.cpp | 808 | ||||
-rw-r--r-- | media/libstagefright/codecs/avcdec/SoftAVCDec.h | 177 | ||||
-rw-r--r-- | media/libstagefright/codecs/avcenc/Android.mk | 30 | ||||
-rw-r--r-- | media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp | 1335 | ||||
-rw-r--r-- | media/libstagefright/codecs/avcenc/SoftAVCEnc.h | 309 | ||||
-rw-r--r-- | media/libstagefright/codecs/mpeg2dec/Android.mk | 27 | ||||
-rw-r--r-- | media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp | 756 | ||||
-rw-r--r-- | media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h | 178 | ||||
-rwxr-xr-x[-rw-r--r--] | media/libstagefright/data/media_codecs_google_video.xml | 27 | ||||
-rwxr-xr-x[-rw-r--r--] | media/libstagefright/omx/SoftOMXPlugin.cpp | 5 |
11 files changed, 3668 insertions, 11 deletions
diff --git a/media/libstagefright/codecs/avcdec/Android.mk b/media/libstagefright/codecs/avcdec/Android.mk new file mode 100644 index 0000000..902ab57 --- /dev/null +++ b/media/libstagefright/codecs/avcdec/Android.mk @@ -0,0 +1,27 @@ +#ifeq ($(if $(wildcard external/libh264),1,0),1) + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE := libstagefright_soft_avcdec +LOCAL_MODULE_TAGS := optional + +LOCAL_STATIC_LIBRARIES := libavcdec +LOCAL_SRC_FILES := SoftAVCDec.cpp + +LOCAL_C_INCLUDES := $(TOP)/external/libavc/decoder +LOCAL_C_INCLUDES += $(TOP)/external/libavc/common +LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include +LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax + +LOCAL_SHARED_LIBRARIES := libstagefright +LOCAL_SHARED_LIBRARIES += libstagefright_omx +LOCAL_SHARED_LIBRARIES += libstagefright_foundation +LOCAL_SHARED_LIBRARIES += libutils +LOCAL_SHARED_LIBRARIES += liblog + +LOCAL_LDFLAGS := -Wl,-Bsymbolic + +include $(BUILD_SHARED_LIBRARY) + +#endif diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp new file mode 100644 index 0000000..8388472 --- /dev/null +++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp @@ -0,0 +1,808 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SoftAVCDec" +#include <utils/Log.h> + +#include "ih264_typedefs.h" +#include "iv.h" +#include "ivd.h" +#include "ithread.h" +#include "ih264d.h" +#include "SoftAVCDec.h" + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaDefs.h> +#include <OMX_VideoExt.h> + +namespace android { + +#define PRINT_TIME ALOGV + +#define componentName "video_decoder.avc" +#define codingType OMX_VIDEO_CodingAVC +#define CODEC_MIME_TYPE MEDIA_MIMETYPE_VIDEO_AVC + +/** Function and structure definitions to keep code similar for each codec */ +#define ivdec_api_function ih264d_api_function +#define ivdext_init_ip_t ih264d_init_ip_t +#define ivdext_init_op_t ih264d_init_op_t +#define ivdext_fill_mem_rec_ip_t ih264d_fill_mem_rec_ip_t +#define ivdext_fill_mem_rec_op_t ih264d_fill_mem_rec_op_t +#define ivdext_ctl_set_num_cores_ip_t ih264d_ctl_set_num_cores_ip_t +#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t + +#define IVDEXT_CMD_CTL_SET_NUM_CORES \ + (IVD_CONTROL_API_COMMAND_TYPE_T)IH264D_CMD_CTL_SET_NUM_CORES + +static const CodecProfileLevel kProfileLevels[] = { + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel42 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel5 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel51 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel52 }, + + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel1b }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel11 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel12 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel13 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel2 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel21 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel22 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel3 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel31 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel32 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel4 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel41 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel42 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel5 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel51 }, + { OMX_VIDEO_AVCProfileMain, OMX_VIDEO_AVCLevel52 }, + + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel1 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel1b }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel11 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel12 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel13 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel2 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel21 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel22 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel3 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel31 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel32 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel4 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel41 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel42 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel5 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel51 }, + { OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCLevel52 }, +}; + +SoftAVC::SoftAVC( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SoftVideoDecoderOMXComponent( + name, componentName, codingType, + kProfileLevels, ARRAY_SIZE(kProfileLevels), + 320 /* width */, 240 /* height */, callbacks, + appData, component), + mMemRecords(NULL), + mFlushOutBuffer(NULL), + mOmxColorFormat(OMX_COLOR_FormatYUV420Planar), + mIvColorFormat(IV_YUV_420P), + mNewWidth(mWidth), + mNewHeight(mHeight), + mChangingResolution(false) { + initPorts( + kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE); + + GETTIME(&mTimeStart, NULL); + + // If input dump is enabled, then open create an empty file + GENERATE_FILE_NAMES(); + CREATE_DUMP_FILE(mInFile); + + CHECK_EQ(initDecoder(), (status_t)OK); +} + +SoftAVC::~SoftAVC() { + CHECK_EQ(deInitDecoder(), (status_t)OK); +} + +static size_t GetCPUCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGD("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} + +void SoftAVC::logVersion() { + ivd_ctl_getversioninfo_ip_t s_ctl_ip; + ivd_ctl_getversioninfo_op_t s_ctl_op; + UWORD8 au1_buf[512]; + IV_API_CALL_STATUS_T status; + + s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION; + s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t); + s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t); + s_ctl_ip.pv_version_buffer = au1_buf; + s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf); + + status = + ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in getting version number: 0x%x", + s_ctl_op.u4_error_code); + } else { + ALOGV("Ittiam decoder version number: %s", + (char *)s_ctl_ip.pv_version_buffer); + } + return; +} + +status_t SoftAVC::setParams(size_t stride) { + ivd_ctl_set_config_ip_t s_ctl_ip; + ivd_ctl_set_config_op_t s_ctl_op; + IV_API_CALL_STATUS_T status; + s_ctl_ip.u4_disp_wd = (UWORD32)stride; + s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE; + + s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT; + s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME; + s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS; + s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t); + s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t); + + ALOGV("Set the run-time (dynamic) parameters stride = %u", stride); + status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in setting the run-time parameters: 0x%x", + s_ctl_op.u4_error_code); + + return UNKNOWN_ERROR; + } + return OK; +} + +status_t SoftAVC::resetPlugin() { + mIsInFlush = false; + mReceivedEOS = false; + memset(mTimeStamps, 0, sizeof(mTimeStamps)); + memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid)); + + /* Initialize both start and end times */ + gettimeofday(&mTimeStart, NULL); + gettimeofday(&mTimeEnd, NULL); + + return OK; +} + +status_t SoftAVC::resetDecoder() { + ivd_ctl_reset_ip_t s_ctl_ip; + ivd_ctl_reset_op_t s_ctl_op; + IV_API_CALL_STATUS_T status; + + s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET; + s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t); + s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t); + + status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op); + if (IV_SUCCESS != status) { + ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code); + return UNKNOWN_ERROR; + } + + /* Set the run-time (dynamic) parameters */ + setParams(outputBufferWidth()); + + /* Set number of cores/threads to be used by the codec */ + setNumCores(); + + return OK; +} + +status_t SoftAVC::setNumCores() { + ivdext_ctl_set_num_cores_ip_t s_set_cores_ip; + ivdext_ctl_set_num_cores_op_t s_set_cores_op; + IV_API_CALL_STATUS_T status; + s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES; + s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES); + s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t); + s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t); + status = ivdec_api_function( + mCodecCtx, (void *)&s_set_cores_ip, (void *)&s_set_cores_op); + if (IV_SUCCESS != status) { + ALOGE("Error in setting number of cores: 0x%x", + s_set_cores_op.u4_error_code); + return UNKNOWN_ERROR; + } + return OK; +} + +status_t SoftAVC::setFlushMode() { + IV_API_CALL_STATUS_T status; + ivd_ctl_flush_ip_t s_video_flush_ip; + ivd_ctl_flush_op_t s_video_flush_op; + + s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH; + s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t); + s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t); + + /* Set the decoder in Flush mode, subsequent decode() calls will flush */ + status = ivdec_api_function( + mCodecCtx, (void *)&s_video_flush_ip, (void *)&s_video_flush_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status, + s_video_flush_op.u4_error_code); + return UNKNOWN_ERROR; + } + + mIsInFlush = true; + return OK; +} + +status_t SoftAVC::initDecoder() { + IV_API_CALL_STATUS_T status; + + UWORD32 u4_num_reorder_frames; + UWORD32 u4_num_ref_frames; + UWORD32 u4_share_disp_buf; + WORD32 i4_level; + + mNumCores = GetCPUCoreCount(); + + /* Initialize number of ref and reorder modes (for H264) */ + u4_num_reorder_frames = 16; + u4_num_ref_frames = 16; + u4_share_disp_buf = 0; + + uint32_t displayStride = outputBufferWidth(); + uint32_t displayHeight = outputBufferHeight(); + uint32_t displaySizeY = displayStride * displayHeight; + + if (displaySizeY > (1920 * 1088)) { + i4_level = 50; + } else if (displaySizeY > (1280 * 720)) { + i4_level = 40; + } else if (displaySizeY > (720 * 576)) { + i4_level = 31; + } else if (displaySizeY > (624 * 320)) { + i4_level = 30; + } else if (displaySizeY > (352 * 288)) { + i4_level = 21; + } else { + i4_level = 20; + } + + { + iv_num_mem_rec_ip_t s_num_mem_rec_ip; + iv_num_mem_rec_op_t s_num_mem_rec_op; + + s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip); + s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op); + s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC; + + ALOGV("Get number of mem records"); + status = ivdec_api_function( + mCodecCtx, (void *)&s_num_mem_rec_ip, (void *)&s_num_mem_rec_op); + if (IV_SUCCESS != status) { + ALOGE("Error in getting mem records: 0x%x", + s_num_mem_rec_op.u4_error_code); + return UNKNOWN_ERROR; + } + + mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec; + } + + mMemRecords = (iv_mem_rec_t *)ivd_aligned_malloc( + 128, mNumMemRecords * sizeof(iv_mem_rec_t)); + if (mMemRecords == NULL) { + ALOGE("Allocation failure"); + return NO_MEMORY; + } + + memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t)); + + { + size_t i; + ivdext_fill_mem_rec_ip_t s_fill_mem_ip; + ivdext_fill_mem_rec_op_t s_fill_mem_op; + iv_mem_rec_t *ps_mem_rec; + + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = + sizeof(ivdext_fill_mem_rec_ip_t); + s_fill_mem_ip.i4_level = i4_level; + s_fill_mem_ip.u4_num_reorder_frames = u4_num_reorder_frames; + s_fill_mem_ip.u4_num_ref_frames = u4_num_ref_frames; + s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf; + s_fill_mem_ip.u4_num_extra_disp_buf = 0; + s_fill_mem_ip.e_output_format = mIvColorFormat; + + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight; + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size = + sizeof(ivdext_fill_mem_rec_op_t); + + ps_mem_rec = mMemRecords; + for (i = 0; i < mNumMemRecords; i++) { + ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t); + } + + status = ivdec_api_function( + mCodecCtx, (void *)&s_fill_mem_ip, (void *)&s_fill_mem_op); + + if (IV_SUCCESS != status) { + ALOGE("Error in filling mem records: 0x%x", + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + mNumMemRecords = + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled; + + ps_mem_rec = mMemRecords; + + for (i = 0; i < mNumMemRecords; i++) { + ps_mem_rec->pv_base = ivd_aligned_malloc( + ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size); + if (ps_mem_rec->pv_base == NULL) { + ALOGE("Allocation failure for memory record #%zu of size %u", + i, ps_mem_rec->u4_mem_size); + status = IV_FAIL; + return NO_MEMORY; + } + + ps_mem_rec++; + } + } + + /* Initialize the decoder */ + { + ivdext_init_ip_t s_init_ip; + ivdext_init_op_t s_init_op; + + void *dec_fxns = (void *)ivdec_api_function; + + s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t); + s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT; + s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords; + s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride; + s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight; + + s_init_ip.i4_level = i4_level; + s_init_ip.u4_num_reorder_frames = u4_num_reorder_frames; + s_init_ip.u4_num_ref_frames = u4_num_ref_frames; + s_init_ip.u4_share_disp_buf = u4_share_disp_buf; + s_init_ip.u4_num_extra_disp_buf = 0; + + s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op); + + s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords; + s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat; + + mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base; + mCodecCtx->pv_fxns = dec_fxns; + mCodecCtx->u4_size = sizeof(iv_obj_t); + + status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op); + if (status != IV_SUCCESS) { + ALOGE("Error in init: 0x%x", + s_init_op.s_ivd_init_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + } + + /* Reset the plugin state */ + resetPlugin(); + + /* Set the run time (dynamic) parameters */ + setParams(displayStride); + + /* Set number of cores/threads to be used by the codec */ + setNumCores(); + + /* Get codec version */ + logVersion(); + + /* Allocate internal picture buffer */ + uint32_t bufferSize = displaySizeY * 3 / 2; + mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize); + if (NULL == mFlushOutBuffer) { + ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize); + return NO_MEMORY; + } + + mInitNeeded = false; + mFlushNeeded = false; + return OK; +} + +status_t SoftAVC::deInitDecoder() { + size_t i; + + if (mMemRecords) { + iv_mem_rec_t *ps_mem_rec; + + ps_mem_rec = mMemRecords; + for (i = 0; i < mNumMemRecords; i++) { + if (ps_mem_rec->pv_base) { + ivd_aligned_free(ps_mem_rec->pv_base); + } + ps_mem_rec++; + } + ivd_aligned_free(mMemRecords); + mMemRecords = NULL; + } + + if (mFlushOutBuffer) { + ivd_aligned_free(mFlushOutBuffer); + mFlushOutBuffer = NULL; + } + + mInitNeeded = true; + mChangingResolution = false; + + return OK; +} + +status_t SoftAVC::reInitDecoder() { + status_t ret; + + deInitDecoder(); + + ret = initDecoder(); + if (OK != ret) { + ALOGE("Create failure"); + deInitDecoder(); + return NO_MEMORY; + } + return OK; +} + +void SoftAVC::onReset() { + SoftVideoDecoderOMXComponent::onReset(); + + resetDecoder(); + resetPlugin(); +} + +OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) { + const uint32_t oldWidth = mWidth; + const uint32_t oldHeight = mHeight; + OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params); + if (mWidth != oldWidth || mHeight != oldHeight) { + reInitDecoder(); + } + return ret; +} + +void SoftAVC::setDecodeArgs( + ivd_video_decode_ip_t *ps_dec_ip, + ivd_video_decode_op_t *ps_dec_op, + OMX_BUFFERHEADERTYPE *inHeader, + OMX_BUFFERHEADERTYPE *outHeader, + size_t timeStampIx) { + size_t sizeY = outputBufferWidth() * outputBufferHeight(); + size_t sizeUV; + uint8_t *pBuf; + + ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t); + ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t); + + ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE; + + /* When in flush and after EOS with zero byte input, + * inHeader is set to zero. Hence check for non-null */ + if (inHeader) { + ps_dec_ip->u4_ts = timeStampIx; + ps_dec_ip->pv_stream_buffer = + inHeader->pBuffer + inHeader->nOffset; + ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen; + } else { + ps_dec_ip->u4_ts = 0; + ps_dec_ip->pv_stream_buffer = NULL; + ps_dec_ip->u4_num_Bytes = 0; + } + + if (outHeader) { + pBuf = outHeader->pBuffer; + } else { + pBuf = mFlushOutBuffer; + } + + sizeUV = sizeY / 4; + ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY; + ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV; + ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV; + + ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf; + ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY; + ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV; + ps_dec_ip->s_out_buffer.u4_num_bufs = 3; + return; +} +void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) { + /* Once the output buffers are flushed, ignore any buffers that are held in decoder */ + if (kOutputPortIndex == portIndex) { + setFlushMode(); + + while (true) { + ivd_video_decode_ip_t s_dec_ip; + ivd_video_decode_op_t s_dec_op; + IV_API_CALL_STATUS_T status; + size_t sizeY, sizeUV; + + setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0); + + status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op); + if (0 == s_dec_op.u4_output_present) { + resetPlugin(); + break; + } + } + } +} + +void SoftAVC::onQueueFilled(OMX_U32 portIndex) { + UNUSED(portIndex); + + if (mOutputPortSettingsChange != NONE) { + return; + } + + List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex); + List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex); + + /* If input EOS is seen and decoder is not in flush mode, + * set the decoder in flush mode. + * There can be a case where EOS is sent along with last picture data + * In that case, only after decoding that input data, decoder has to be + * put in flush. This case is handled here */ + + if (mReceivedEOS && !mIsInFlush) { + setFlushMode(); + } + + while (!outQueue.empty()) { + BufferInfo *inInfo; + OMX_BUFFERHEADERTYPE *inHeader; + + BufferInfo *outInfo; + OMX_BUFFERHEADERTYPE *outHeader; + size_t timeStampIx; + + inInfo = NULL; + inHeader = NULL; + + if (!mIsInFlush) { + if (!inQueue.empty()) { + inInfo = *inQueue.begin(); + inHeader = inInfo->mHeader; + } else { + break; + } + } + + outInfo = *outQueue.begin(); + outHeader = outInfo->mHeader; + outHeader->nFlags = 0; + outHeader->nTimeStamp = 0; + outHeader->nOffset = 0; + + if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) { + mReceivedEOS = true; + if (inHeader->nFilledLen == 0) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + setFlushMode(); + } + } + + // When there is an init required and the decoder is not in flush mode, + // update output port's definition and reinitialize decoder. + if (mInitNeeded && !mIsInFlush) { + bool portWillReset = false; + handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight); + + CHECK_EQ(reInitDecoder(), (status_t)OK); + return; + } + + /* Get a free slot in timestamp array to hold input timestamp */ + { + size_t i; + timeStampIx = 0; + for (i = 0; i < MAX_TIME_STAMPS; i++) { + if (!mTimeStampsValid[i]) { + timeStampIx = i; + break; + } + } + if (inHeader != NULL) { + mTimeStampsValid[timeStampIx] = true; + mTimeStamps[timeStampIx] = inHeader->nTimeStamp; + } + } + + { + ivd_video_decode_ip_t s_dec_ip; + ivd_video_decode_op_t s_dec_op; + WORD32 timeDelay, timeTaken; + size_t sizeY, sizeUV; + + setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx); + // If input dump is enabled, then write to file + DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes); + + GETTIME(&mTimeStart, NULL); + /* Compute time elapsed between end of previous decode() + * to start of current decode() */ + TIME_DIFF(mTimeEnd, mTimeStart, timeDelay); + + IV_API_CALL_STATUS_T status; + status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op); + + bool unsupportedDimensions = + (IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF)); + bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF)); + + GETTIME(&mTimeEnd, NULL); + /* Compute time taken for decode() */ + TIME_DIFF(mTimeStart, mTimeEnd, timeTaken); + + PRINT_TIME("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay, + s_dec_op.u4_num_bytes_consumed); + if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) { + mFlushNeeded = true; + } + + if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) { + /* If the input did not contain picture data, then ignore + * the associated timestamp */ + mTimeStampsValid[timeStampIx] = false; + } + + // This is needed to handle CTS DecoderTest testCodecResetsH264WithoutSurface, + // which is not sending SPS/PPS after port reconfiguration and flush to the codec. + if (unsupportedDimensions && !mFlushNeeded) { + bool portWillReset = false; + handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht); + + CHECK_EQ(reInitDecoder(), (status_t)OK); + + setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx); + + ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op); + return; + } + + // If the decoder is in the changing resolution mode and there is no output present, + // that means the switching is done and it's ready to reset the decoder and the plugin. + if (mChangingResolution && !s_dec_op.u4_output_present) { + mChangingResolution = false; + resetDecoder(); + resetPlugin(); + continue; + } + + if (unsupportedDimensions || resChanged) { + mChangingResolution = true; + if (mFlushNeeded) { + setFlushMode(); + } + + if (unsupportedDimensions) { + mNewWidth = s_dec_op.u4_pic_wd; + mNewHeight = s_dec_op.u4_pic_ht; + mInitNeeded = true; + } + continue; + } + + if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) { + uint32_t width = s_dec_op.u4_pic_wd; + uint32_t height = s_dec_op.u4_pic_ht; + bool portWillReset = false; + handlePortSettingsChange(&portWillReset, width, height); + + if (portWillReset) { + resetDecoder(); + return; + } + } + + if (s_dec_op.u4_output_present) { + outHeader->nFilledLen = (mWidth * mHeight * 3) / 2; + + outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts]; + mTimeStampsValid[s_dec_op.u4_ts] = false; + + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + } else { + /* If in flush mode and no output is returned by the codec, + * then come out of flush mode */ + mIsInFlush = false; + + /* If EOS was recieved on input port and there is no output + * from the codec, then signal EOS on output port */ + if (mReceivedEOS) { + outHeader->nFilledLen = 0; + outHeader->nFlags |= OMX_BUFFERFLAG_EOS; + + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + resetPlugin(); + } + } + } + + if (inHeader != NULL) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } + } +} + +} // namespace android + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, + OMX_COMPONENTTYPE **component) { + return new android::SoftAVC(name, callbacks, appData, component); +} diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h new file mode 100644 index 0000000..191a71d --- /dev/null +++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h @@ -0,0 +1,177 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_H264_DEC_H_ + +#define SOFT_H264_DEC_H_ + +#include "SoftVideoDecoderOMXComponent.h" +#include <sys/time.h> + +namespace android { + +#define ivd_aligned_malloc(alignment, size) memalign(alignment, size) +#define ivd_aligned_free(buf) free(buf) + +/** Number of entries in the time-stamp array */ +#define MAX_TIME_STAMPS 64 + +/** Maximum number of cores supported by the codec */ +#define CODEC_MAX_NUM_CORES 4 + +#define CODEC_MAX_WIDTH 1920 + +#define CODEC_MAX_HEIGHT 1088 + +/** Input buffer size */ +#define INPUT_BUF_SIZE (1024 * 1024) + +#define MIN(a, b) ((a) < (b)) ? (a) : (b) + +/** Used to remove warnings about unused parameters */ +#define UNUSED(x) ((void)(x)) + +/** Get time */ +#define GETTIME(a, b) gettimeofday(a, b); + +/** Compute difference between start and end */ +#define TIME_DIFF(start, end, diff) \ + diff = ((end.tv_sec - start.tv_sec) * 1000000) + \ + (end.tv_usec - start.tv_usec); + +struct SoftAVC : public SoftVideoDecoderOMXComponent { + SoftAVC(const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component); + +protected: + virtual ~SoftAVC(); + + virtual void onQueueFilled(OMX_U32 portIndex); + virtual void onPortFlushCompleted(OMX_U32 portIndex); + virtual void onReset(); + virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params); +private: + // Number of input and output buffers + enum { + kNumBuffers = 8 + }; + + iv_obj_t *mCodecCtx; // Codec context + iv_mem_rec_t *mMemRecords; // Memory records requested by the codec + size_t mNumMemRecords; // Number of memory records requested by the codec + + size_t mNumCores; // Number of cores to be uesd by the codec + + struct timeval mTimeStart; // Time at the start of decode() + struct timeval mTimeEnd; // Time at the end of decode() + + // Internal buffer to be used to flush out the buffers from decoder + uint8_t *mFlushOutBuffer; + + // Status of entries in the timestamp array + bool mTimeStampsValid[MAX_TIME_STAMPS]; + + // Timestamp array - Since codec does not take 64 bit timestamps, + // they are maintained in the plugin + OMX_S64 mTimeStamps[MAX_TIME_STAMPS]; + +#ifdef FILE_DUMP_ENABLE + char mInFile[200]; +#endif /* FILE_DUMP_ENABLE */ + + OMX_COLOR_FORMATTYPE mOmxColorFormat; // OMX Color format + IV_COLOR_FORMAT_T mIvColorFormat; // Ittiam Color format + + bool mIsInFlush; // codec is flush mode + bool mReceivedEOS; // EOS is receieved on input port + bool mInitNeeded; + uint32_t mNewWidth; + uint32_t mNewHeight; + // The input stream has changed to a different resolution, which is still supported by the + // codec. So the codec is switching to decode the new resolution. + bool mChangingResolution; + bool mFlushNeeded; + + status_t initDecoder(); + status_t deInitDecoder(); + status_t setFlushMode(); + status_t setParams(size_t stride); + void logVersion(); + status_t setNumCores(); + status_t resetDecoder(); + status_t resetPlugin(); + status_t reInitDecoder(); + + void setDecodeArgs( + ivd_video_decode_ip_t *ps_dec_ip, + ivd_video_decode_op_t *ps_dec_op, + OMX_BUFFERHEADERTYPE *inHeader, + OMX_BUFFERHEADERTYPE *outHeader, + size_t timeStampIx); + + DISALLOW_EVIL_CONSTRUCTORS(SoftAVC); +}; + +#ifdef FILE_DUMP_ENABLE + +#define INPUT_DUMP_PATH "/sdcard/media/avcd_input" +#define INPUT_DUMP_EXT "h264" + +#define GENERATE_FILE_NAMES() { \ + GETTIME(&mTimeStart, NULL); \ + strcpy(mInFile, ""); \ + sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + INPUT_DUMP_EXT); \ +} + +#define CREATE_DUMP_FILE(m_filename) { \ + FILE *fp = fopen(m_filename, "wb"); \ + if (fp != NULL) { \ + fclose(fp); \ + } else { \ + ALOGD("Could not open file %s", m_filename); \ + } \ +} +#define DUMP_TO_FILE(m_filename, m_buf, m_size) \ +{ \ + FILE *fp = fopen(m_filename, "ab"); \ + if (fp != NULL && m_buf != NULL) { \ + int i; \ + i = fwrite(m_buf, 1, m_size, fp); \ + ALOGD("fwrite ret %d to write %d", i, m_size); \ + if (i != (int) m_size) { \ + ALOGD("Error in fwrite, returned %d", i); \ + perror("Error in write to file"); \ + } \ + fclose(fp); \ + } else { \ + ALOGD("Could not write to file %s", m_filename);\ + } \ +} +#else /* FILE_DUMP_ENABLE */ +#define INPUT_DUMP_PATH +#define INPUT_DUMP_EXT +#define OUTPUT_DUMP_PATH +#define OUTPUT_DUMP_EXT +#define GENERATE_FILE_NAMES() +#define CREATE_DUMP_FILE(m_filename) +#define DUMP_TO_FILE(m_filename, m_buf, m_size) +#endif /* FILE_DUMP_ENABLE */ + +} // namespace android + +#endif // SOFT_H264_DEC_H_ diff --git a/media/libstagefright/codecs/avcenc/Android.mk b/media/libstagefright/codecs/avcenc/Android.mk new file mode 100644 index 0000000..24a4db9 --- /dev/null +++ b/media/libstagefright/codecs/avcenc/Android.mk @@ -0,0 +1,30 @@ +#ifeq ($(if $(wildcard external/libh264),1,0),1) + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE := libstagefright_soft_avcenc +LOCAL_MODULE_TAGS := optional + +LOCAL_STATIC_LIBRARIES := libavcenc +LOCAL_SRC_FILES := SoftAVCEnc.cpp + +LOCAL_C_INCLUDES := $(TOP)/external/libavc/encoder +LOCAL_C_INCLUDES += $(TOP)/external/libavc/common +LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include +LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax +LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include +LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/hardware +LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax + +LOCAL_SHARED_LIBRARIES := libstagefright +LOCAL_SHARED_LIBRARIES += libstagefright_omx +LOCAL_SHARED_LIBRARIES += libstagefright_foundation +LOCAL_SHARED_LIBRARIES += libutils +LOCAL_SHARED_LIBRARIES += liblog + +LOCAL_LDFLAGS := -Wl,-Bsymbolic + +include $(BUILD_SHARED_LIBRARY) + +#endif diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp new file mode 100644 index 0000000..bf5e353 --- /dev/null +++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp @@ -0,0 +1,1335 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SoftAVCEnc" +#include <utils/Log.h> +#include <utils/misc.h> + +#include "OMX_Video.h" + +#include <HardwareAPI.h> +#include <MetadataBufferType.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/Utils.h> +#include <ui/Rect.h> + +#include "ih264_typedefs.h" +#include "iv2.h" +#include "ive2.h" +#include "ih264e.h" +#include "SoftAVCEnc.h" + +namespace android { + + #define ive_api_function ih264e_api_function + +template<class T> +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 0; + params->nVersion.s.nRevision = 0; + params->nVersion.s.nStep = 0; +} + +typedef struct LevelConversion { + OMX_VIDEO_AVCLEVELTYPE omxLevel; + WORD32 avcLevel; +} LevelConcersion; + +static LevelConversion ConversionTable[] = { + { OMX_VIDEO_AVCLevel1, 10 }, + { OMX_VIDEO_AVCLevel1b, 9 }, + { OMX_VIDEO_AVCLevel11, 11 }, + { OMX_VIDEO_AVCLevel12, 12 }, + { OMX_VIDEO_AVCLevel13, 13 }, + { OMX_VIDEO_AVCLevel2, 20 }, + { OMX_VIDEO_AVCLevel21, 21 }, + { OMX_VIDEO_AVCLevel22, 22 }, + { OMX_VIDEO_AVCLevel3, 30 }, + { OMX_VIDEO_AVCLevel31, 31 }, + { OMX_VIDEO_AVCLevel32, 32 }, + { OMX_VIDEO_AVCLevel4, 40 }, + { OMX_VIDEO_AVCLevel41, 41 }, + { OMX_VIDEO_AVCLevel42, 42 }, + { OMX_VIDEO_AVCLevel5, 50 }, + { OMX_VIDEO_AVCLevel51, 51 }, +}; + +static const CodecProfileLevel kProfileLevels[] = { + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel13 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel2 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel21 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel22 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel3 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel31 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel32 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel4 }, + { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel41 }, +}; + + +static size_t GetCPUCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGD("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} + +static status_t ConvertOmxAvcLevelToAvcSpecLevel( + OMX_VIDEO_AVCLEVELTYPE omxLevel, WORD32 *avcLevel) { + for (size_t i = 0; i < NELEM(ConversionTable); ++i) { + if (omxLevel == ConversionTable[i].omxLevel) { + *avcLevel = ConversionTable[i].avcLevel; + return OK; + } + } + + ALOGE("ConvertOmxAvcLevelToAvcSpecLevel: %d level not supported", + (int32_t)omxLevel); + + return BAD_VALUE; +} + +static status_t ConvertAvcSpecLevelToOmxAvcLevel( + WORD32 avcLevel, OMX_VIDEO_AVCLEVELTYPE *omxLevel) { + for (size_t i = 0; i < NELEM(ConversionTable); ++i) { + if (avcLevel == ConversionTable[i].avcLevel) { + *omxLevel = ConversionTable[i].omxLevel; + return OK; + } + } + + ALOGE("ConvertAvcSpecLevelToOmxAvcLevel: %d level not supported", + (int32_t)avcLevel); + + return BAD_VALUE; +} + + +SoftAVC::SoftAVC( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SoftVideoEncoderOMXComponent( + name, "video_encoder.avc", OMX_VIDEO_CodingAVC, + kProfileLevels, NELEM(kProfileLevels), + 176 /* width */, 144 /* height */, + callbacks, appData, component), + mIvVideoColorFormat(IV_YUV_420P), + mIDRFrameRefreshIntervalInSec(1), + mAVCEncProfile(IV_PROFILE_BASE), + mAVCEncLevel(31), + mPrevTimestampUs(-1), + mStarted(false), + mSawInputEOS(false), + mSignalledError(false), + mConversionBuffer(NULL), + mCodecCtx(NULL) { + + initPorts(kNumBuffers, kNumBuffers, ((mWidth * mHeight * 3) >> 1), + MEDIA_MIMETYPE_VIDEO_AVC, 2); + + // If dump is enabled, then open create an empty file + GENERATE_FILE_NAMES(); + CREATE_DUMP_FILE(mInFile); + CREATE_DUMP_FILE(mOutFile); + +} + +SoftAVC::~SoftAVC() { + releaseEncoder(); + List<BufferInfo *> &outQueue = getPortQueue(1); + List<BufferInfo *> &inQueue = getPortQueue(0); + CHECK(outQueue.empty()); + CHECK(inQueue.empty()); +} + +OMX_ERRORTYPE SoftAVC::initEncParams() { + mCodecCtx = NULL; + mMemRecords = NULL; + mNumMemRecords = DEFAULT_MEM_REC_CNT; + mHeaderGenerated = 0; + mNumCores = GetCPUCoreCount(); + mArch = DEFAULT_ARCH; + mSliceMode = DEFAULT_SLICE_MODE; + mSliceParam = DEFAULT_SLICE_PARAM; + mHalfPelEnable = DEFAULT_HPEL; + mIInterval = DEFAULT_I_INTERVAL; + mIDRInterval = DEFAULT_IDR_INTERVAL; + mDisableDeblkLevel = DEFAULT_DISABLE_DEBLK_LEVEL; + mFrameRate = DEFAULT_SRC_FRAME_RATE; + mEnableFastSad = DEFAULT_ENABLE_FAST_SAD; + mEnableAltRef = DEFAULT_ENABLE_ALT_REF; + mEncSpeed = DEFAULT_ENC_SPEED; + mIntra4x4 = DEFAULT_INTRA4x4; + mAIRMode = DEFAULT_AIR; + mAIRRefreshPeriod = DEFAULT_AIR_REFRESH_PERIOD; + mPSNREnable = DEFAULT_PSNR_ENABLE; + mReconEnable = DEFAULT_RECON_ENABLE; + + gettimeofday(&mTimeStart, NULL); + gettimeofday(&mTimeEnd, NULL); + + return OMX_ErrorNone; +} + + +OMX_ERRORTYPE SoftAVC::setDimensions() { + ive_ctl_set_dimensions_ip_t s_dimensions_ip; + ive_ctl_set_dimensions_op_t s_dimensions_op; + IV_STATUS_T status; + + s_dimensions_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_dimensions_ip.e_sub_cmd = IVE_CMD_CTL_SET_DIMENSIONS; + s_dimensions_ip.u4_ht = mHeight; + s_dimensions_ip.u4_wd = mWidth; + s_dimensions_ip.u4_strd = mStride; + + s_dimensions_ip.u4_timestamp_high = -1; + s_dimensions_ip.u4_timestamp_low = -1; + + s_dimensions_ip.u4_size = sizeof(ive_ctl_set_dimensions_ip_t); + s_dimensions_op.u4_size = sizeof(ive_ctl_set_dimensions_op_t); + + status = ive_api_function(mCodecCtx, &s_dimensions_ip, &s_dimensions_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set frame dimensions = 0x%x\n", + s_dimensions_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setNumCores() { + IV_STATUS_T status; + ive_ctl_set_num_cores_ip_t s_num_cores_ip; + ive_ctl_set_num_cores_op_t s_num_cores_op; + s_num_cores_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_num_cores_ip.e_sub_cmd = IVE_CMD_CTL_SET_NUM_CORES; + s_num_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_CORES); + s_num_cores_ip.u4_timestamp_high = -1; + s_num_cores_ip.u4_timestamp_low = -1; + s_num_cores_ip.u4_size = sizeof(ive_ctl_set_num_cores_ip_t); + + s_num_cores_op.u4_size = sizeof(ive_ctl_set_num_cores_op_t); + + status = ive_api_function( + mCodecCtx, (void *) &s_num_cores_ip, (void *) &s_num_cores_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set processor params = 0x%x\n", + s_num_cores_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setFrameRate() { + ive_ctl_set_frame_rate_ip_t s_frame_rate_ip; + ive_ctl_set_frame_rate_op_t s_frame_rate_op; + IV_STATUS_T status; + + s_frame_rate_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_frame_rate_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMERATE; + + s_frame_rate_ip.u4_src_frame_rate = mFrameRate; + s_frame_rate_ip.u4_tgt_frame_rate = mFrameRate; + + s_frame_rate_ip.u4_timestamp_high = -1; + s_frame_rate_ip.u4_timestamp_low = -1; + + s_frame_rate_ip.u4_size = sizeof(ive_ctl_set_frame_rate_ip_t); + s_frame_rate_op.u4_size = sizeof(ive_ctl_set_frame_rate_op_t); + + status = ive_api_function(mCodecCtx, &s_frame_rate_ip, &s_frame_rate_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set frame rate = 0x%x\n", + s_frame_rate_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setIpeParams() { + ive_ctl_set_ipe_params_ip_t s_ipe_params_ip; + ive_ctl_set_ipe_params_op_t s_ipe_params_op; + IV_STATUS_T status; + + s_ipe_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_ipe_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_IPE_PARAMS; + + s_ipe_params_ip.u4_enable_intra_4x4 = mIntra4x4; + s_ipe_params_ip.u4_enc_speed_preset = mEncSpeed; + + s_ipe_params_ip.u4_timestamp_high = -1; + s_ipe_params_ip.u4_timestamp_low = -1; + + s_ipe_params_ip.u4_size = sizeof(ive_ctl_set_ipe_params_ip_t); + s_ipe_params_op.u4_size = sizeof(ive_ctl_set_ipe_params_op_t); + + status = ive_api_function(mCodecCtx, &s_ipe_params_ip, &s_ipe_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set ipe params = 0x%x\n", + s_ipe_params_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setBitRate() { + ive_ctl_set_bitrate_ip_t s_bitrate_ip; + ive_ctl_set_bitrate_op_t s_bitrate_op; + IV_STATUS_T status; + + s_bitrate_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_bitrate_ip.e_sub_cmd = IVE_CMD_CTL_SET_BITRATE; + + s_bitrate_ip.u4_target_bitrate = mBitrate; + + s_bitrate_ip.u4_timestamp_high = -1; + s_bitrate_ip.u4_timestamp_low = -1; + + s_bitrate_ip.u4_size = sizeof(ive_ctl_set_bitrate_ip_t); + s_bitrate_op.u4_size = sizeof(ive_ctl_set_bitrate_op_t); + + status = ive_api_function(mCodecCtx, &s_bitrate_ip, &s_bitrate_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set bit rate = 0x%x\n", s_bitrate_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type) { + ive_ctl_set_frame_type_ip_t s_frame_type_ip; + ive_ctl_set_frame_type_op_t s_frame_type_op; + IV_STATUS_T status; + + s_frame_type_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_frame_type_ip.e_sub_cmd = IVE_CMD_CTL_SET_FRAMETYPE; + + s_frame_type_ip.e_frame_type = e_frame_type; + + s_frame_type_ip.u4_timestamp_high = -1; + s_frame_type_ip.u4_timestamp_low = -1; + + s_frame_type_ip.u4_size = sizeof(ive_ctl_set_frame_type_ip_t); + s_frame_type_op.u4_size = sizeof(ive_ctl_set_frame_type_op_t); + + status = ive_api_function(mCodecCtx, &s_frame_type_ip, &s_frame_type_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set frame type = 0x%x\n", + s_frame_type_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setQp() { + ive_ctl_set_qp_ip_t s_qp_ip; + ive_ctl_set_qp_op_t s_qp_op; + IV_STATUS_T status; + + s_qp_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_qp_ip.e_sub_cmd = IVE_CMD_CTL_SET_QP; + + s_qp_ip.u4_i_qp = DEFAULT_I_QP; + s_qp_ip.u4_i_qp_max = DEFAULT_QP_MAX; + s_qp_ip.u4_i_qp_min = DEFAULT_QP_MIN; + + s_qp_ip.u4_p_qp = DEFAULT_P_QP; + s_qp_ip.u4_p_qp_max = DEFAULT_QP_MAX; + s_qp_ip.u4_p_qp_min = DEFAULT_QP_MIN; + + s_qp_ip.u4_b_qp = DEFAULT_P_QP; + s_qp_ip.u4_b_qp_max = DEFAULT_QP_MAX; + s_qp_ip.u4_b_qp_min = DEFAULT_QP_MIN; + + s_qp_ip.u4_timestamp_high = -1; + s_qp_ip.u4_timestamp_low = -1; + + s_qp_ip.u4_size = sizeof(ive_ctl_set_qp_ip_t); + s_qp_op.u4_size = sizeof(ive_ctl_set_qp_op_t); + + status = ive_api_function(mCodecCtx, &s_qp_ip, &s_qp_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set qp 0x%x\n", s_qp_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setEncMode(IVE_ENC_MODE_T e_enc_mode) { + IV_STATUS_T status; + ive_ctl_set_enc_mode_ip_t s_enc_mode_ip; + ive_ctl_set_enc_mode_op_t s_enc_mode_op; + + s_enc_mode_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_enc_mode_ip.e_sub_cmd = IVE_CMD_CTL_SET_ENC_MODE; + + s_enc_mode_ip.e_enc_mode = e_enc_mode; + + s_enc_mode_ip.u4_timestamp_high = -1; + s_enc_mode_ip.u4_timestamp_low = -1; + + s_enc_mode_ip.u4_size = sizeof(ive_ctl_set_enc_mode_ip_t); + s_enc_mode_op.u4_size = sizeof(ive_ctl_set_enc_mode_op_t); + + status = ive_api_function(mCodecCtx, &s_enc_mode_ip, &s_enc_mode_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set in header encode mode = 0x%x\n", + s_enc_mode_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setVbvParams() { + ive_ctl_set_vbv_params_ip_t s_vbv_ip; + ive_ctl_set_vbv_params_op_t s_vbv_op; + IV_STATUS_T status; + + s_vbv_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_vbv_ip.e_sub_cmd = IVE_CMD_CTL_SET_VBV_PARAMS; + + s_vbv_ip.u4_vbv_buf_size = 0; + s_vbv_ip.u4_vbv_buffer_delay = 1000; + + s_vbv_ip.u4_timestamp_high = -1; + s_vbv_ip.u4_timestamp_low = -1; + + s_vbv_ip.u4_size = sizeof(ive_ctl_set_vbv_params_ip_t); + s_vbv_op.u4_size = sizeof(ive_ctl_set_vbv_params_op_t); + + status = ive_api_function(mCodecCtx, &s_vbv_ip, &s_vbv_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set VBC params = 0x%x\n", s_vbv_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setAirParams() { + ive_ctl_set_air_params_ip_t s_air_ip; + ive_ctl_set_air_params_op_t s_air_op; + IV_STATUS_T status; + + s_air_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_air_ip.e_sub_cmd = IVE_CMD_CTL_SET_AIR_PARAMS; + + s_air_ip.e_air_mode = mAIRMode; + s_air_ip.u4_air_refresh_period = mAIRRefreshPeriod; + + s_air_ip.u4_timestamp_high = -1; + s_air_ip.u4_timestamp_low = -1; + + s_air_ip.u4_size = sizeof(ive_ctl_set_air_params_ip_t); + s_air_op.u4_size = sizeof(ive_ctl_set_air_params_op_t); + + status = ive_api_function(mCodecCtx, &s_air_ip, &s_air_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set air params = 0x%x\n", s_air_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setMeParams() { + IV_STATUS_T status; + ive_ctl_set_me_params_ip_t s_me_params_ip; + ive_ctl_set_me_params_op_t s_me_params_op; + + s_me_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_me_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_ME_PARAMS; + + s_me_params_ip.u4_enable_fast_sad = mEnableFastSad; + s_me_params_ip.u4_enable_alt_ref = mEnableAltRef; + + s_me_params_ip.u4_enable_hpel = mHalfPelEnable; + s_me_params_ip.u4_enable_qpel = DEFAULT_QPEL; + s_me_params_ip.u4_me_speed_preset = DEFAULT_ME_SPEED; + s_me_params_ip.u4_srch_rng_x = DEFAULT_SRCH_RNG_X; + s_me_params_ip.u4_srch_rng_y = DEFAULT_SRCH_RNG_Y; + + s_me_params_ip.u4_timestamp_high = -1; + s_me_params_ip.u4_timestamp_low = -1; + + s_me_params_ip.u4_size = sizeof(ive_ctl_set_me_params_ip_t); + s_me_params_op.u4_size = sizeof(ive_ctl_set_me_params_op_t); + + status = ive_api_function(mCodecCtx, &s_me_params_ip, &s_me_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set me params = 0x%x\n", s_me_params_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setGopParams() { + IV_STATUS_T status; + ive_ctl_set_gop_params_ip_t s_gop_params_ip; + ive_ctl_set_gop_params_op_t s_gop_params_op; + + s_gop_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_gop_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_GOP_PARAMS; + + s_gop_params_ip.u4_i_frm_interval = mIInterval; + s_gop_params_ip.u4_idr_frm_interval = mIDRInterval; + s_gop_params_ip.u4_num_b_frames = DEFAULT_B_FRAMES; + + s_gop_params_ip.u4_timestamp_high = -1; + s_gop_params_ip.u4_timestamp_low = -1; + + s_gop_params_ip.u4_size = sizeof(ive_ctl_set_gop_params_ip_t); + s_gop_params_op.u4_size = sizeof(ive_ctl_set_gop_params_op_t); + + status = ive_api_function(mCodecCtx, &s_gop_params_ip, &s_gop_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set ME params = 0x%x\n", + s_gop_params_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setProfileParams() { + IV_STATUS_T status; + ive_ctl_set_profile_params_ip_t s_profile_params_ip; + ive_ctl_set_profile_params_op_t s_profile_params_op; + + s_profile_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_profile_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_PROFILE_PARAMS; + + s_profile_params_ip.e_profile = DEFAULT_EPROFILE; + + s_profile_params_ip.u4_timestamp_high = -1; + s_profile_params_ip.u4_timestamp_low = -1; + + s_profile_params_ip.u4_size = sizeof(ive_ctl_set_profile_params_ip_t); + s_profile_params_op.u4_size = sizeof(ive_ctl_set_profile_params_op_t); + + status = ive_api_function(mCodecCtx, &s_profile_params_ip, &s_profile_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to set profile params = 0x%x\n", + s_profile_params_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setDeblockParams() { + IV_STATUS_T status; + ive_ctl_set_deblock_params_ip_t s_deblock_params_ip; + ive_ctl_set_deblock_params_op_t s_deblock_params_op; + + s_deblock_params_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_deblock_params_ip.e_sub_cmd = IVE_CMD_CTL_SET_DEBLOCK_PARAMS; + + s_deblock_params_ip.u4_disable_deblock_level = mDisableDeblkLevel; + + s_deblock_params_ip.u4_timestamp_high = -1; + s_deblock_params_ip.u4_timestamp_low = -1; + + s_deblock_params_ip.u4_size = sizeof(ive_ctl_set_deblock_params_ip_t); + s_deblock_params_op.u4_size = sizeof(ive_ctl_set_deblock_params_op_t); + + status = ive_api_function(mCodecCtx, &s_deblock_params_ip, &s_deblock_params_op); + if (status != IV_SUCCESS) { + ALOGE("Unable to enable/disable deblock params = 0x%x\n", + s_deblock_params_op.u4_error_code); + return OMX_ErrorUndefined; + } + return OMX_ErrorNone; +} + +void SoftAVC::logVersion() { + ive_ctl_getversioninfo_ip_t s_ctl_ip; + ive_ctl_getversioninfo_op_t s_ctl_op; + UWORD8 au1_buf[512]; + IV_STATUS_T status; + + s_ctl_ip.e_cmd = IVE_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVE_CMD_CTL_GETVERSION; + s_ctl_ip.u4_size = sizeof(ive_ctl_getversioninfo_ip_t); + s_ctl_op.u4_size = sizeof(ive_ctl_getversioninfo_op_t); + s_ctl_ip.pu1_version = au1_buf; + s_ctl_ip.u4_version_bufsize = sizeof(au1_buf); + + status = ive_api_function(mCodecCtx, (void *) &s_ctl_ip, (void *) &s_ctl_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in getting version: 0x%x", s_ctl_op.u4_error_code); + } else { + ALOGV("Ittiam encoder version: %s", (char *)s_ctl_ip.pu1_version); + } + return; +} + +OMX_ERRORTYPE SoftAVC::initEncoder() { + IV_STATUS_T status; + size_t i; + WORD32 level; + uint32_t displaySizeY; + CHECK(!mStarted); + + OMX_ERRORTYPE errType = OMX_ErrorNone; + + displaySizeY = mWidth * mHeight; + if (displaySizeY > (1920 * 1088)) { + level = 50; + } else if (displaySizeY > (1280 * 720)) { + level = 40; + } else if (displaySizeY > (720 * 576)) { + level = 31; + } else if (displaySizeY > (624 * 320)) { + level = 30; + } else if (displaySizeY > (352 * 288)) { + level = 21; + } else { + level = 20; + } + mAVCEncLevel = MAX(level, mAVCEncLevel); + + if (OMX_ErrorNone != (errType = initEncParams())) { + ALOGE("Failed to initialize encoder params"); + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return errType; + } + + mStride = ALIGN16(mWidth); + + if (mInputDataIsMeta) { + if (mConversionBuffer) { + free(mConversionBuffer); + mConversionBuffer = NULL; + } + + if (mConversionBuffer == NULL) { + mConversionBuffer = (uint8_t *)malloc(mStride * mHeight * 3 / 2); + if (mConversionBuffer == NULL) { + ALOGE("Allocating conversion buffer failed."); + return OMX_ErrorUndefined; + } + } + } + + switch (mColorFormat) { + case OMX_COLOR_FormatYUV420SemiPlanar: + mIvVideoColorFormat = IV_YUV_420SP_UV; + ALOGV("colorFormat YUV_420SP"); + break; + default: + case OMX_COLOR_FormatYUV420Planar: + mIvVideoColorFormat = IV_YUV_420P; + ALOGV("colorFormat YUV_420P"); + break; + } + + ALOGV("Params width %d height %d level %d colorFormat %d", mWidth, + mHeight, mAVCEncLevel, mIvVideoColorFormat); + + /* Getting Number of MemRecords */ + { + iv_num_mem_rec_ip_t s_num_mem_rec_ip; + iv_num_mem_rec_op_t s_num_mem_rec_op; + + s_num_mem_rec_ip.u4_size = sizeof(iv_num_mem_rec_ip_t); + s_num_mem_rec_op.u4_size = sizeof(iv_num_mem_rec_op_t); + + s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC; + + status = ive_api_function(0, &s_num_mem_rec_ip, &s_num_mem_rec_op); + + if (status != IV_SUCCESS) { + ALOGE("Get number of memory records failed = 0x%x\n", + s_num_mem_rec_op.u4_error_code); + return OMX_ErrorUndefined; + } + + mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec; + } + + /* Allocate array to hold memory records */ + mMemRecords = (iv_mem_rec_t *)malloc(mNumMemRecords * sizeof(iv_mem_rec_t)); + if (NULL == mMemRecords) { + ALOGE("Unable to allocate memory for hold memory records: Size %d", + mNumMemRecords * sizeof(iv_mem_rec_t)); + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return OMX_ErrorUndefined; + } + + { + iv_mem_rec_t *ps_mem_rec; + ps_mem_rec = mMemRecords; + for (i = 0; i < mNumMemRecords; i++) { + ps_mem_rec->u4_size = sizeof(iv_mem_rec_t); + ps_mem_rec->pv_base = NULL; + ps_mem_rec->u4_mem_size = 0; + ps_mem_rec->u4_mem_alignment = 0; + ps_mem_rec->e_mem_type = IV_NA_MEM_TYPE; + + ps_mem_rec++; + } + } + + /* Getting MemRecords Attributes */ + { + iv_fill_mem_rec_ip_t s_fill_mem_rec_ip; + iv_fill_mem_rec_op_t s_fill_mem_rec_op; + + s_fill_mem_rec_ip.u4_size = sizeof(iv_fill_mem_rec_ip_t); + s_fill_mem_rec_op.u4_size = sizeof(iv_fill_mem_rec_op_t); + + s_fill_mem_rec_ip.e_cmd = IV_CMD_FILL_NUM_MEM_REC; + s_fill_mem_rec_ip.ps_mem_rec = mMemRecords; + s_fill_mem_rec_ip.u4_num_mem_rec = mNumMemRecords; + s_fill_mem_rec_ip.u4_max_wd = mWidth; + s_fill_mem_rec_ip.u4_max_ht = mHeight; + s_fill_mem_rec_ip.u4_max_level = mAVCEncLevel; + s_fill_mem_rec_ip.e_color_format = DEFAULT_INP_COLOR_FORMAT; + s_fill_mem_rec_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM; + s_fill_mem_rec_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM; + s_fill_mem_rec_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X; + s_fill_mem_rec_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y; + + status = ive_api_function(0, &s_fill_mem_rec_ip, &s_fill_mem_rec_op); + + if (status != IV_SUCCESS) { + ALOGE("Fill memory records failed = 0x%x\n", + s_fill_mem_rec_op.u4_error_code); + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return OMX_ErrorUndefined; + } + } + + /* Allocating Memory for Mem Records */ + { + WORD32 total_size; + iv_mem_rec_t *ps_mem_rec; + total_size = 0; + + ps_mem_rec = mMemRecords; + for (i = 0; i < mNumMemRecords; i++) { + ps_mem_rec->pv_base = ive_aligned_malloc( + ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size); + if (ps_mem_rec->pv_base == NULL) { + ALOGE("Allocation failure for mem record id %d size %d\n", i, + ps_mem_rec->u4_mem_size); + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return OMX_ErrorUndefined; + + } + total_size += ps_mem_rec->u4_mem_size; + + ps_mem_rec++; + } + printf("\nTotal memory for codec %d\n", total_size); + } + + /* Codec Instance Creation */ + { + ive_init_ip_t s_init_ip; + ive_init_op_t s_init_op; + + mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base; + mCodecCtx->u4_size = sizeof(iv_obj_t); + mCodecCtx->pv_fxns = (void *)ive_api_function; + + s_init_ip.u4_size = sizeof(ive_init_ip_t); + s_init_op.u4_size = sizeof(ive_init_op_t); + + s_init_ip.e_cmd = IV_CMD_INIT; + s_init_ip.u4_num_mem_rec = mNumMemRecords; + s_init_ip.ps_mem_rec = mMemRecords; + s_init_ip.u4_max_wd = mWidth; + s_init_ip.u4_max_ht = mHeight; + s_init_ip.u4_max_ref_cnt = DEFAULT_MAX_REF_FRM; + s_init_ip.u4_max_reorder_cnt = DEFAULT_MAX_REORDER_FRM; + s_init_ip.u4_max_level = mAVCEncLevel; + s_init_ip.e_inp_color_fmt = mIvVideoColorFormat; + + if (mReconEnable || mPSNREnable) { + s_init_ip.u4_enable_recon = 1; + } else { + s_init_ip.u4_enable_recon = 0; + } + s_init_ip.e_recon_color_fmt = DEFAULT_RECON_COLOR_FORMAT; + s_init_ip.e_rc_mode = DEFAULT_RC_MODE; + s_init_ip.u4_max_framerate = DEFAULT_MAX_FRAMERATE; + s_init_ip.u4_max_bitrate = DEFAULT_MAX_BITRATE; + s_init_ip.u4_max_num_bframes = DEFAULT_B_FRAMES; + s_init_ip.e_content_type = IV_PROGRESSIVE; + s_init_ip.u4_max_srch_rng_x = DEFAULT_MAX_SRCH_RANGE_X; + s_init_ip.u4_max_srch_rng_y = DEFAULT_MAX_SRCH_RANGE_Y; + s_init_ip.e_slice_mode = mSliceMode; + s_init_ip.u4_slice_param = mSliceParam; + s_init_ip.e_arch = mArch; + s_init_ip.e_soc = DEFAULT_SOC; + + status = ive_api_function(mCodecCtx, &s_init_ip, &s_init_op); + + if (status != IV_SUCCESS) { + ALOGE("Init memory records failed = 0x%x\n", + s_init_op.u4_error_code); + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0 /* arg2 */, NULL /* data */); + return OMX_ErrorUndefined; + } + } + + /* Get Codec Version */ + logVersion(); + + /* set processor details */ + setNumCores(); + + /* Video control Set Frame dimensions */ + setDimensions(); + + /* Video control Set Frame rates */ + setFrameRate(); + + /* Video control Set IPE Params */ + setIpeParams(); + + /* Video control Set Bitrate */ + setBitRate(); + + /* Video control Set QP */ + setQp(); + + /* Video control Set AIR params */ + setAirParams(); + + /* Video control Set VBV params */ + setVbvParams(); + + /* Video control Set Motion estimation params */ + setMeParams(); + + /* Video control Set GOP params */ + setGopParams(); + + /* Video control Set Deblock params */ + setDeblockParams(); + + /* Video control Set Profile params */ + setProfileParams(); + + /* Video control Set in Encode header mode */ + setEncMode(IVE_ENC_MODE_HEADER); + + ALOGV("init_codec successfull"); + + mSpsPpsHeaderReceived = false; + mStarted = true; + + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::releaseEncoder() { + IV_STATUS_T status = IV_SUCCESS; + iv_retrieve_mem_rec_ip_t s_retrieve_mem_ip; + iv_retrieve_mem_rec_op_t s_retrieve_mem_op; + iv_mem_rec_t *ps_mem_rec; + UWORD32 i; + + if (!mStarted) { + return OMX_ErrorNone; + } + + s_retrieve_mem_ip.u4_size = sizeof(iv_retrieve_mem_rec_ip_t); + s_retrieve_mem_op.u4_size = sizeof(iv_retrieve_mem_rec_op_t); + s_retrieve_mem_ip.e_cmd = IV_CMD_RETRIEVE_MEMREC; + s_retrieve_mem_ip.ps_mem_rec = mMemRecords; + + status = ive_api_function(mCodecCtx, &s_retrieve_mem_ip, &s_retrieve_mem_op); + + if (status != IV_SUCCESS) { + ALOGE("Unable to retrieve memory records = 0x%x\n", + s_retrieve_mem_op.u4_error_code); + return OMX_ErrorUndefined; + } + + /* Free memory records */ + ps_mem_rec = mMemRecords; + for (i = 0; i < s_retrieve_mem_op.u4_num_mem_rec_filled; i++) { + ive_aligned_free(ps_mem_rec->pv_base); + ps_mem_rec++; + } + + free(mMemRecords); + + if (mConversionBuffer != NULL) { + free(mConversionBuffer); + mConversionBuffer = NULL; + } + + mStarted = false; + + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::internalGetParameter(OMX_INDEXTYPE index, OMX_PTR params) { + switch (index) { + case OMX_IndexParamVideoBitrate: + { + OMX_VIDEO_PARAM_BITRATETYPE *bitRate = + (OMX_VIDEO_PARAM_BITRATETYPE *)params; + + if (bitRate->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + bitRate->eControlRate = OMX_Video_ControlRateVariable; + bitRate->nTargetBitrate = mBitrate; + return OMX_ErrorNone; + } + + case OMX_IndexParamVideoAvc: + { + OMX_VIDEO_PARAM_AVCTYPE *avcParams = (OMX_VIDEO_PARAM_AVCTYPE *)params; + + if (avcParams->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + avcParams->eProfile = OMX_VIDEO_AVCProfileBaseline; + OMX_VIDEO_AVCLEVELTYPE omxLevel = OMX_VIDEO_AVCLevel31; + if (OMX_ErrorNone + != ConvertAvcSpecLevelToOmxAvcLevel(mAVCEncLevel, &omxLevel)) { + return OMX_ErrorUndefined; + } + + avcParams->eLevel = omxLevel; + avcParams->nRefFrames = 1; + avcParams->nBFrames = 0; + avcParams->bUseHadamard = OMX_TRUE; + avcParams->nAllowedPictureTypes = (OMX_VIDEO_PictureTypeI + | OMX_VIDEO_PictureTypeP); + avcParams->nRefIdx10ActiveMinus1 = 0; + avcParams->nRefIdx11ActiveMinus1 = 0; + avcParams->bWeightedPPrediction = OMX_FALSE; + avcParams->bEntropyCodingCABAC = OMX_FALSE; + avcParams->bconstIpred = OMX_FALSE; + avcParams->bDirect8x8Inference = OMX_FALSE; + avcParams->bDirectSpatialTemporal = OMX_FALSE; + avcParams->nCabacInitIdc = 0; + return OMX_ErrorNone; + } + + default: + return SoftVideoEncoderOMXComponent::internalGetParameter(index, params); + } +} + +OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) { + int32_t indexFull = index; + + switch (indexFull) { + case OMX_IndexParamVideoBitrate: + { + return internalSetBitrateParams( + (const OMX_VIDEO_PARAM_BITRATETYPE *)params); + } + + case OMX_IndexParamVideoAvc: + { + OMX_VIDEO_PARAM_AVCTYPE *avcType = (OMX_VIDEO_PARAM_AVCTYPE *)params; + + if (avcType->nPortIndex != 1) { + return OMX_ErrorUndefined; + } + + if (avcType->eProfile != OMX_VIDEO_AVCProfileBaseline + || avcType->nRefFrames != 1 || avcType->nBFrames != 0 + || avcType->bUseHadamard != OMX_TRUE + || (avcType->nAllowedPictureTypes & OMX_VIDEO_PictureTypeB) != 0 + || avcType->nRefIdx10ActiveMinus1 != 0 + || avcType->nRefIdx11ActiveMinus1 != 0 + || avcType->bWeightedPPrediction != OMX_FALSE + || avcType->bEntropyCodingCABAC != OMX_FALSE + || avcType->bconstIpred != OMX_FALSE + || avcType->bDirect8x8Inference != OMX_FALSE + || avcType->bDirectSpatialTemporal != OMX_FALSE + || avcType->nCabacInitIdc != 0) { + return OMX_ErrorUndefined; + } + + if (OK != ConvertOmxAvcLevelToAvcSpecLevel(avcType->eLevel, &mAVCEncLevel)) { + return OMX_ErrorUndefined; + } + + return OMX_ErrorNone; + } + + default: + return SoftVideoEncoderOMXComponent::internalSetParameter(index, params); + } +} + +OMX_ERRORTYPE SoftAVC::setConfig( + OMX_INDEXTYPE index, const OMX_PTR _params) { + switch (index) { + case OMX_IndexConfigVideoIntraVOPRefresh: + { + OMX_CONFIG_INTRAREFRESHVOPTYPE *params = + (OMX_CONFIG_INTRAREFRESHVOPTYPE *)_params; + + if (params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorBadPortIndex; + } + + mKeyFrameRequested = params->IntraRefreshVOP; + return OMX_ErrorNone; + } + + case OMX_IndexConfigVideoBitrate: + { + OMX_VIDEO_CONFIG_BITRATETYPE *params = + (OMX_VIDEO_CONFIG_BITRATETYPE *)_params; + + if (params->nPortIndex != kOutputPortIndex) { + return OMX_ErrorBadPortIndex; + } + + if (mBitrate != params->nEncodeBitrate) { + mBitrate = params->nEncodeBitrate; + mBitrateUpdated = true; + } + return OMX_ErrorNone; + } + + default: + return SimpleSoftOMXComponent::setConfig(index, _params); + } +} + +OMX_ERRORTYPE SoftAVC::internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE *bitrate) { + if (bitrate->nPortIndex != kOutputPortIndex) { + return OMX_ErrorUnsupportedIndex; + } + + mBitrate = bitrate->nTargetBitrate; + mBitrateUpdated = true; + + return OMX_ErrorNone; +} + +OMX_ERRORTYPE SoftAVC::setEncodeArgs( + ive_video_encode_ip_t *ps_encode_ip, + ive_video_encode_op_t *ps_encode_op, + OMX_BUFFERHEADERTYPE *inputBufferHeader, + OMX_BUFFERHEADERTYPE *outputBufferHeader) { + iv_raw_buf_t *ps_inp_raw_buf; + const uint8_t *source; + UWORD8 *pu1_buf; + + ps_inp_raw_buf = &ps_encode_ip->s_inp_buf; + ps_encode_ip->s_out_buf.pv_buf = outputBufferHeader->pBuffer; + ps_encode_ip->s_out_buf.u4_bytes = 0; + ps_encode_ip->s_out_buf.u4_bufsize = outputBufferHeader->nAllocLen; + ps_encode_ip->u4_size = sizeof(ive_video_encode_ip_t); + ps_encode_op->u4_size = sizeof(ive_video_encode_op_t); + + ps_encode_ip->e_cmd = IVE_CMD_VIDEO_ENCODE; + ps_encode_ip->pv_bufs = NULL; + ps_encode_ip->pv_mb_info = NULL; + ps_encode_ip->pv_pic_info = NULL; + ps_encode_ip->u4_mb_info_type = 0; + ps_encode_ip->u4_pic_info_type = 0; + ps_encode_op->s_out_buf.pv_buf = NULL; + + /* Initialize color formats */ + ps_inp_raw_buf->e_color_fmt = mIvVideoColorFormat; + + source = NULL; + if (inputBufferHeader) { + source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset; + + if (mInputDataIsMeta) { + source = extractGraphicBuffer( + mConversionBuffer, (mWidth * mHeight * 3 / 2), source, + inputBufferHeader->nFilledLen, mWidth, mHeight); + + if (source == NULL) { + ALOGE("Error in extractGraphicBuffer"); + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return OMX_ErrorUndefined; + } + } + } + + pu1_buf = (UWORD8 *)source; + switch (mIvVideoColorFormat) { + case IV_YUV_420P: + { + ps_inp_raw_buf->apv_bufs[0] = pu1_buf; + pu1_buf += (mStride) * mHeight; + ps_inp_raw_buf->apv_bufs[1] = pu1_buf; + pu1_buf += (mStride / 2) * mHeight / 2; + ps_inp_raw_buf->apv_bufs[2] = pu1_buf; + + ps_inp_raw_buf->au4_wd[0] = mWidth; + ps_inp_raw_buf->au4_wd[1] = mWidth / 2; + ps_inp_raw_buf->au4_wd[2] = mWidth / 2; + + ps_inp_raw_buf->au4_ht[0] = mHeight; + ps_inp_raw_buf->au4_ht[1] = mHeight / 2; + ps_inp_raw_buf->au4_ht[2] = mHeight / 2; + + ps_inp_raw_buf->au4_strd[0] = mStride; + ps_inp_raw_buf->au4_strd[1] = (mStride / 2); + ps_inp_raw_buf->au4_strd[2] = (mStride / 2); + break; + } + + case IV_YUV_422ILE: + { + ps_inp_raw_buf->apv_bufs[0] = pu1_buf; + ps_inp_raw_buf->au4_wd[0] = mWidth * 2; + ps_inp_raw_buf->au4_ht[0] = mHeight; + ps_inp_raw_buf->au4_strd[0] = mStride * 2; + break; + } + + case IV_YUV_420SP_UV: + case IV_YUV_420SP_VU: + default: + { + ps_inp_raw_buf->apv_bufs[0] = pu1_buf; + pu1_buf += (mStride) * mHeight; + ps_inp_raw_buf->apv_bufs[1] = pu1_buf; + + ps_inp_raw_buf->au4_wd[0] = mWidth; + ps_inp_raw_buf->au4_wd[1] = mWidth; + + ps_inp_raw_buf->au4_ht[0] = mHeight; + ps_inp_raw_buf->au4_ht[1] = mHeight / 2; + + ps_inp_raw_buf->au4_strd[0] = mStride; + ps_inp_raw_buf->au4_strd[1] = mStride; + break; + } + } + + ps_encode_ip->u4_is_last = 0; + + if (inputBufferHeader) { + ps_encode_ip->u4_timestamp_high = (inputBufferHeader->nTimeStamp) >> 32; + ps_encode_ip->u4_timestamp_low = (inputBufferHeader->nTimeStamp) & 0xFFFFFFFF; + } + + return OMX_ErrorNone; +} + +void SoftAVC::onQueueFilled(OMX_U32 portIndex) { + IV_STATUS_T status; + WORD32 timeDelay, timeTaken; + + UNUSED(portIndex); + + // Initialize encoder if not already initialized + if (mCodecCtx == NULL) { + if (OMX_ErrorNone != initEncoder()) { + ALOGE("Failed to initialize encoder"); + notify(OMX_EventError, OMX_ErrorUndefined, 0 /* arg2 */, NULL /* data */); + return; + } + } + if (mSignalledError || mSawInputEOS) { + return; + } + + List<BufferInfo *> &inQueue = getPortQueue(0); + List<BufferInfo *> &outQueue = getPortQueue(1); + + while (!mSawInputEOS && !inQueue.empty() && !outQueue.empty()) { + OMX_ERRORTYPE error; + ive_video_encode_ip_t s_encode_ip; + ive_video_encode_op_t s_encode_op; + + BufferInfo *inputBufferInfo = *inQueue.begin(); + OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader; + + BufferInfo *outputBufferInfo = *outQueue.begin(); + OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader; + + if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { + inQueue.erase(inQueue.begin()); + inputBufferInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inputBufferHeader); + + outputBufferHeader->nFilledLen = 0; + outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS; + + outQueue.erase(outQueue.begin()); + outputBufferInfo->mOwnedByUs = false; + notifyFillBufferDone(outputBufferHeader); + return; + } + + outputBufferHeader->nTimeStamp = 0; + outputBufferHeader->nFlags = 0; + outputBufferHeader->nOffset = 0; + outputBufferHeader->nFilledLen = 0; + outputBufferHeader->nOffset = 0; + + uint8_t *outPtr = (uint8_t *)outputBufferHeader->pBuffer; + + if (!mSpsPpsHeaderReceived) { + error = setEncodeArgs(&s_encode_ip, &s_encode_op, NULL, outputBufferHeader); + if (error != OMX_ErrorNone) { + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return; + } + status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op); + + if (IV_SUCCESS != status) { + ALOGE("Encode Frame failed = 0x%x\n", + s_encode_op.u4_error_code); + } else { + ALOGV("Bytes Generated in header %d\n", + s_encode_op.s_out_buf.u4_bytes); + } + + mSpsPpsHeaderReceived = true; + + outputBufferHeader->nFlags = OMX_BUFFERFLAG_CODECCONFIG; + outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes; + outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp; + + outQueue.erase(outQueue.begin()); + outputBufferInfo->mOwnedByUs = false; + DUMP_TO_FILE( + mOutFile, outputBufferHeader->pBuffer, + outputBufferHeader->nFilledLen); + notifyFillBufferDone(outputBufferHeader); + + setEncMode(IVE_ENC_MODE_PICTURE); + return; + } + + if (mBitrateUpdated) { + setBitRate(); + } + + if (mKeyFrameRequested) { + setFrameType(IV_IDR_FRAME); + } + + mPrevTimestampUs = inputBufferHeader->nTimeStamp; + + if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) { + mSawInputEOS = true; + } + + error = setEncodeArgs( + &s_encode_ip, &s_encode_op, inputBufferHeader, outputBufferHeader); + if (error != OMX_ErrorNone) { + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return; + } + + DUMP_TO_FILE( + mInFile, s_encode_ip.s_inp_buf.apv_bufs[0], + (mHeight * mStride * 3 / 2)); + //DUMP_TO_FILE(mInFile, inputBufferHeader->pBuffer + inputBufferHeader->nOffset, + // inputBufferHeader->nFilledLen); + + GETTIME(&mTimeStart, NULL); + /* Compute time elapsed between end of previous decode() + * to start of current decode() */ + TIME_DIFF(mTimeEnd, mTimeStart, timeDelay); + + status = ive_api_function(mCodecCtx, &s_encode_ip, &s_encode_op); + + if (IV_SUCCESS != status) { + ALOGE("Encode Frame failed = 0x%x\n", + s_encode_op.u4_error_code); + mSignalledError = true; + notify(OMX_EventError, OMX_ErrorUndefined, 0, 0); + return; + } + + GETTIME(&mTimeEnd, NULL); + /* Compute time taken for decode() */ + TIME_DIFF(mTimeStart, mTimeEnd, timeTaken); + + ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay, + s_encode_op.s_out_buf.u4_bytes); + + + outputBufferHeader->nFlags = inputBufferHeader->nFlags; + outputBufferHeader->nFilledLen = s_encode_op.s_out_buf.u4_bytes; + outputBufferHeader->nTimeStamp = inputBufferHeader->nTimeStamp; + + if (IV_IDR_FRAME + == s_encode_op.u4_encoded_frame_type) { + outputBufferHeader->nFlags |= OMX_BUFFERFLAG_SYNCFRAME; + } + + inQueue.erase(inQueue.begin()); + inputBufferInfo->mOwnedByUs = false; + + notifyEmptyBufferDone(inputBufferHeader); + + if (mSawInputEOS) { + outputBufferHeader->nFlags |= OMX_BUFFERFLAG_EOS; + } + + outputBufferInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + + DUMP_TO_FILE( + mOutFile, outputBufferHeader->pBuffer, + outputBufferHeader->nFilledLen); + notifyFillBufferDone(outputBufferHeader); + + } + return; +} + + +} // namespace android + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component) { + return new android::SoftAVC(name, callbacks, appData, component); +} diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.h b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h new file mode 100644 index 0000000..c4e26a9 --- /dev/null +++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.h @@ -0,0 +1,309 @@ +/* + * Copyright 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __SOFT_AVC_ENC_H__ +#define __SOFT_AVC_ENC_H__ + +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/foundation/ABase.h> +#include <utils/Vector.h> + +#include "SoftVideoEncoderOMXComponent.h" + +namespace android { + +struct MediaBuffer; + +#define CODEC_MAX_CORES 4 +#define LEN_STATUS_BUFFER (10 * 1024) +#define MAX_VBV_BUFF_SIZE (120 * 16384) +#define MAX_NUM_IO_BUFS 3 + +#define DEFAULT_MAX_REF_FRM 1 +#define DEFAULT_MAX_REORDER_FRM 0 +#define DEFAULT_QP_MIN 10 +#define DEFAULT_QP_MAX 40 +#define DEFAULT_MAX_BITRATE 20000000 +#define DEFAULT_MAX_SRCH_RANGE_X 256 +#define DEFAULT_MAX_SRCH_RANGE_Y 256 +#define DEFAULT_MAX_FRAMERATE 120000 +#define DEFAULT_NUM_CORES 1 +#define DEFAULT_NUM_CORES_PRE_ENC 0 +#define DEFAULT_FPS 30 +#define DEFAULT_ENC_SPEED IVE_NORMAL + +#define DEFAULT_MEM_REC_CNT 0 +#define DEFAULT_RECON_ENABLE 0 +#define DEFAULT_CHKSUM_ENABLE 0 +#define DEFAULT_START_FRM 0 +#define DEFAULT_NUM_FRMS 0xFFFFFFFF +#define DEFAULT_INP_COLOR_FORMAT IV_YUV_420SP_VU +#define DEFAULT_RECON_COLOR_FORMAT IV_YUV_420P +#define DEFAULT_LOOPBACK 0 +#define DEFAULT_SRC_FRAME_RATE 30 +#define DEFAULT_TGT_FRAME_RATE 30 +#define DEFAULT_MAX_WD 1920 +#define DEFAULT_MAX_HT 1920 +#define DEFAULT_MAX_LEVEL 40 +#define DEFAULT_STRIDE 0 +#define DEFAULT_WD 1280 +#define DEFAULT_HT 720 +#define DEFAULT_PSNR_ENABLE 0 +#define DEFAULT_ME_SPEED 100 +#define DEFAULT_ENABLE_FAST_SAD 0 +#define DEFAULT_ENABLE_ALT_REF 0 +#define DEFAULT_RC_MODE IVE_RC_STORAGE +#define DEFAULT_BITRATE 6000000 +#define DEFAULT_I_QP 22 +#define DEFAULT_I_QP_MAX DEFAULT_QP_MAX +#define DEFAULT_I_QP_MIN DEFAULT_QP_MIN +#define DEFAULT_P_QP 28 +#define DEFAULT_P_QP_MAX DEFAULT_QP_MAX +#define DEFAULT_P_QP_MIN DEFAULT_QP_MIN +#define DEFAULT_B_QP 22 +#define DEFAULT_B_QP_MAX DEFAULT_QP_MAX +#define DEFAULT_B_QP_MIN DEFAULT_QP_MIN +#define DEFAULT_AIR IVE_AIR_MODE_NONE +#define DEFAULT_AIR_REFRESH_PERIOD 30 +#define DEFAULT_SRCH_RNG_X 64 +#define DEFAULT_SRCH_RNG_Y 48 +#define DEFAULT_I_INTERVAL 30 +#define DEFAULT_IDR_INTERVAL 1000 +#define DEFAULT_B_FRAMES 0 +#define DEFAULT_DISABLE_DEBLK_LEVEL 0 +#define DEFAULT_HPEL 1 +#define DEFAULT_QPEL 1 +#define DEFAULT_I4 1 +#define DEFAULT_EPROFILE IV_PROFILE_BASE +#define DEFAULT_SLICE_MODE IVE_SLICE_MODE_NONE +#define DEFAULT_SLICE_PARAM 256 +#define DEFAULT_ARCH ARCH_ARM_A9Q +#define DEFAULT_SOC SOC_GENERIC +#define DEFAULT_INTRA4x4 0 +#define STRLENGTH 500 + + + +#define MIN(a, b) ((a) < (b))? (a) : (b) +#define MAX(a, b) ((a) > (b))? (a) : (b) +#define ALIGN16(x) ((((x) + 15) >> 4) << 4) +#define ALIGN128(x) ((((x) + 127) >> 7) << 7) +#define ALIGN4096(x) ((((x) + 4095) >> 12) << 12) + +/** Used to remove warnings about unused parameters */ +#define UNUSED(x) ((void)(x)) + +/** Get time */ +#define GETTIME(a, b) gettimeofday(a, b); + +/** Compute difference between start and end */ +#define TIME_DIFF(start, end, diff) \ + diff = ((end.tv_sec - start.tv_sec) * 1000000) + \ + (end.tv_usec - start.tv_usec); + +#define ive_aligned_malloc(alignment, size) memalign(alignment, size) +#define ive_aligned_free(buf) free(buf) + +struct SoftAVC : public SoftVideoEncoderOMXComponent { + SoftAVC( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component); + + // Override SimpleSoftOMXComponent methods + virtual OMX_ERRORTYPE internalGetParameter( + OMX_INDEXTYPE index, OMX_PTR params); + + virtual OMX_ERRORTYPE internalSetParameter( + OMX_INDEXTYPE index, const OMX_PTR params); + + virtual void onQueueFilled(OMX_U32 portIndex); + +protected: + virtual ~SoftAVC(); + +private: + enum { + kNumBuffers = 2, + }; + + // OMX input buffer's timestamp and flags + typedef struct { + int64_t mTimeUs; + int32_t mFlags; + } InputBufferInfo; + + int32_t mStride; + + uint32_t mFrameRate; + + struct timeval mTimeStart; // Time at the start of decode() + struct timeval mTimeEnd; // Time at the end of decode() + + + // If a request for a change it bitrate has been received. + bool mBitrateUpdated; + + bool mKeyFrameRequested; + +#ifdef FILE_DUMP_ENABLE + char mInFile[200]; + char mOutFile[200]; +#endif /* FILE_DUMP_ENABLE */ + + IV_COLOR_FORMAT_T mIvVideoColorFormat; + + int32_t mIDRFrameRefreshIntervalInSec; + IV_PROFILE_T mAVCEncProfile; + WORD32 mAVCEncLevel; + int64_t mNumInputFrames; + int64_t mPrevTimestampUs; + bool mStarted; + bool mSpsPpsHeaderReceived; + + bool mSawInputEOS; + bool mSignalledError; + bool mIntra4x4; + bool mEnableFastSad; + bool mEnableAltRef; + bool mReconEnable; + bool mPSNREnable; + IVE_SPEED_CONFIG mEncSpeed; + + uint8_t *mConversionBuffer; + + iv_obj_t *mCodecCtx; // Codec context + iv_mem_rec_t *mMemRecords; // Memory records requested by the codec + size_t mNumMemRecords; // Number of memory records requested by codec + size_t mNumCores; // Number of cores used by the codec + + UWORD32 mHeaderGenerated; + + IV_ARCH_T mArch; + IVE_SLICE_MODE_T mSliceMode; + UWORD32 mSliceParam; + bool mHalfPelEnable; + UWORD32 mIInterval; + UWORD32 mIDRInterval; + UWORD32 mDisableDeblkLevel; + IVE_AIR_MODE_T mAIRMode; + UWORD32 mAIRRefreshPeriod; + + OMX_ERRORTYPE initEncParams(); + OMX_ERRORTYPE initEncoder(); + OMX_ERRORTYPE releaseEncoder(); + + // Verifies the component role tried to be set to this OMX component is + // strictly video_encoder.avc + OMX_ERRORTYPE internalSetRoleParams( + const OMX_PARAM_COMPONENTROLETYPE *role); + + // Updates bitrate to reflect port settings. + OMX_ERRORTYPE internalSetBitrateParams( + const OMX_VIDEO_PARAM_BITRATETYPE *bitrate); + + OMX_ERRORTYPE setConfig( + OMX_INDEXTYPE index, const OMX_PTR _params); + + // Handles port definition changes. + OMX_ERRORTYPE internalSetPortParams( + const OMX_PARAM_PORTDEFINITIONTYPE *port); + + OMX_ERRORTYPE internalSetFormatParams( + const OMX_VIDEO_PARAM_PORTFORMATTYPE *format); + + OMX_ERRORTYPE setFrameType(IV_PICTURE_CODING_TYPE_T e_frame_type); + OMX_ERRORTYPE setQp(); + OMX_ERRORTYPE setEncMode(IVE_ENC_MODE_T e_enc_mode); + OMX_ERRORTYPE setDimensions(); + OMX_ERRORTYPE setNumCores(); + OMX_ERRORTYPE setFrameRate(); + OMX_ERRORTYPE setIpeParams(); + OMX_ERRORTYPE setBitRate(); + OMX_ERRORTYPE setAirParams(); + OMX_ERRORTYPE setMeParams(); + OMX_ERRORTYPE setGopParams(); + OMX_ERRORTYPE setProfileParams(); + OMX_ERRORTYPE setDeblockParams(); + OMX_ERRORTYPE setVbvParams(); + void logVersion(); + OMX_ERRORTYPE setEncodeArgs( + ive_video_encode_ip_t *ps_encode_ip, + ive_video_encode_op_t *ps_encode_op, + OMX_BUFFERHEADERTYPE *inputBufferHeader, + OMX_BUFFERHEADERTYPE *outputBufferHeader); + + DISALLOW_EVIL_CONSTRUCTORS(SoftAVC); +}; + +#ifdef FILE_DUMP_ENABLE + +#define INPUT_DUMP_PATH "/sdcard/media/avce_input" +#define INPUT_DUMP_EXT "yuv" +#define OUTPUT_DUMP_PATH "/sdcard/media/avce_output" +#define OUTPUT_DUMP_EXT "h264" + +#define GENERATE_FILE_NAMES() { \ + GETTIME(&mTimeStart, NULL); \ + strcpy(mInFile, ""); \ + sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + INPUT_DUMP_EXT); \ + strcpy(mOutFile, ""); \ + sprintf(mOutFile, "%s_%ld.%ld.%s", OUTPUT_DUMP_PATH,\ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + OUTPUT_DUMP_EXT); \ +} + +#define CREATE_DUMP_FILE(m_filename) { \ + FILE *fp = fopen(m_filename, "wb"); \ + if (fp != NULL) { \ + ALOGD("Opened file %s", m_filename); \ + fclose(fp); \ + } else { \ + ALOGD("Could not open file %s", m_filename); \ + } \ +} +#define DUMP_TO_FILE(m_filename, m_buf, m_size) \ +{ \ + FILE *fp = fopen(m_filename, "ab"); \ + if (fp != NULL && m_buf != NULL) { \ + int i; \ + i = fwrite(m_buf, 1, m_size, fp); \ + ALOGD("fwrite ret %d to write %d", i, m_size); \ + if (i != (int)m_size) { \ + ALOGD("Error in fwrite, returned %d", i); \ + perror("Error in write to file"); \ + } \ + fclose(fp); \ + } else { \ + ALOGD("Could not write to file %s", m_filename);\ + } \ +} +#else /* FILE_DUMP_ENABLE */ +#define INPUT_DUMP_PATH +#define INPUT_DUMP_EXT +#define OUTPUT_DUMP_PATH +#define OUTPUT_DUMP_EXT +#define GENERATE_FILE_NAMES() +#define CREATE_DUMP_FILE(m_filename) +#define DUMP_TO_FILE(m_filename, m_buf, m_size) +#endif /* FILE_DUMP_ENABLE */ + +} // namespace android + +#endif // __SOFT_AVC_ENC_H__ diff --git a/media/libstagefright/codecs/mpeg2dec/Android.mk b/media/libstagefright/codecs/mpeg2dec/Android.mk new file mode 100644 index 0000000..23b126d --- /dev/null +++ b/media/libstagefright/codecs/mpeg2dec/Android.mk @@ -0,0 +1,27 @@ +ifeq ($(if $(wildcard external/libmpeg2),1,0),1) + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE := libstagefright_soft_mpeg2dec +LOCAL_MODULE_TAGS := optional + +LOCAL_STATIC_LIBRARIES := libmpeg2dec +LOCAL_SRC_FILES := SoftMPEG2.cpp + +LOCAL_C_INCLUDES := $(TOP)/external/libmpeg2/decoder +LOCAL_C_INCLUDES += $(TOP)/external/libmpeg2/common +LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include +LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax + +LOCAL_SHARED_LIBRARIES := libstagefright +LOCAL_SHARED_LIBRARIES += libstagefright_omx +LOCAL_SHARED_LIBRARIES += libstagefright_foundation +LOCAL_SHARED_LIBRARIES += libutils +LOCAL_SHARED_LIBRARIES += liblog + +LOCAL_LDFLAGS := -Wl,-Bsymbolic + +include $(BUILD_SHARED_LIBRARY) + +endif diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp new file mode 100644 index 0000000..fb7394b --- /dev/null +++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp @@ -0,0 +1,756 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SoftMPEG2" +#include <utils/Log.h> + +#include "iv_datatypedef.h" +#include "iv.h" +#include "ivd.h" +#include "ithread.h" +#include "impeg2d.h" +#include "SoftMPEG2.h" + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MediaDefs.h> +#include <OMX_VideoExt.h> + +namespace android { + +#define componentName "video_decoder.mpeg2" +#define codingType OMX_VIDEO_CodingMPEG2 +#define CODEC_MIME_TYPE MEDIA_MIMETYPE_VIDEO_MPEG2 + +/** Function and structure definitions to keep code similar for each codec */ +#define ivdec_api_function impeg2d_api_function +#define ivdext_init_ip_t impeg2d_init_ip_t +#define ivdext_init_op_t impeg2d_init_op_t +#define ivdext_fill_mem_rec_ip_t impeg2d_fill_mem_rec_ip_t +#define ivdext_fill_mem_rec_op_t impeg2d_fill_mem_rec_op_t +#define ivdext_ctl_set_num_cores_ip_t impeg2d_ctl_set_num_cores_ip_t +#define ivdext_ctl_set_num_cores_op_t impeg2d_ctl_set_num_cores_op_t + +#define IVDEXT_CMD_CTL_SET_NUM_CORES \ + (IVD_CONTROL_API_COMMAND_TYPE_T)IMPEG2D_CMD_CTL_SET_NUM_CORES + +static const CodecProfileLevel kProfileLevels[] = { + { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelLL }, + { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelML }, + { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelH14 }, + { OMX_VIDEO_MPEG2ProfileSimple, OMX_VIDEO_MPEG2LevelHL }, + + { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelLL }, + { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelML }, + { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelH14 }, + { OMX_VIDEO_MPEG2ProfileMain , OMX_VIDEO_MPEG2LevelHL }, +}; + +SoftMPEG2::SoftMPEG2( + const char *name, + const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, + OMX_COMPONENTTYPE **component) + : SoftVideoDecoderOMXComponent( + name, componentName, codingType, + kProfileLevels, ARRAY_SIZE(kProfileLevels), + 320 /* width */, 240 /* height */, callbacks, + appData, component), + mMemRecords(NULL), + mFlushOutBuffer(NULL), + mOmxColorFormat(OMX_COLOR_FormatYUV420Planar), + mIvColorFormat(IV_YUV_420P), + mNewWidth(mWidth), + mNewHeight(mHeight), + mChangingResolution(false) { + initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE); + + // If input dump is enabled, then open create an empty file + GENERATE_FILE_NAMES(); + CREATE_DUMP_FILE(mInFile); + + CHECK_EQ(initDecoder(), (status_t)OK); +} + +SoftMPEG2::~SoftMPEG2() { + CHECK_EQ(deInitDecoder(), (status_t)OK); +} + + +static size_t getMinTimestampIdx(OMX_S64 *pNTimeStamp, bool *pIsTimeStampValid) { + OMX_S64 minTimeStamp = LLONG_MAX; + int idx = -1; + for (size_t i = 0; i < MAX_TIME_STAMPS; i++) { + if (pIsTimeStampValid[i]) { + if (pNTimeStamp[i] < minTimeStamp) { + minTimeStamp = pNTimeStamp[i]; + idx = i; + } + } + } + return idx; +} + +static size_t GetCPUCoreCount() { + long cpuCoreCount = 1; +#if defined(_SC_NPROCESSORS_ONLN) + cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN); +#else + // _SC_NPROC_ONLN must be defined... + cpuCoreCount = sysconf(_SC_NPROC_ONLN); +#endif + CHECK(cpuCoreCount >= 1); + ALOGV("Number of CPU cores: %ld", cpuCoreCount); + return (size_t)cpuCoreCount; +} + +void SoftMPEG2::logVersion() { + ivd_ctl_getversioninfo_ip_t s_ctl_ip; + ivd_ctl_getversioninfo_op_t s_ctl_op; + UWORD8 au1_buf[512]; + IV_API_CALL_STATUS_T status; + + s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION; + s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t); + s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t); + s_ctl_ip.pv_version_buffer = au1_buf; + s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf); + + status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in getting version number: 0x%x", + s_ctl_op.u4_error_code); + } else { + ALOGV("Ittiam decoder version number: %s", + (char *)s_ctl_ip.pv_version_buffer); + } + return; +} + +status_t SoftMPEG2::setParams(size_t stride) { + ivd_ctl_set_config_ip_t s_ctl_ip; + ivd_ctl_set_config_op_t s_ctl_op; + IV_API_CALL_STATUS_T status; + s_ctl_ip.u4_disp_wd = (UWORD32)stride; + s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE; + + s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT; + s_ctl_ip.e_vid_dec_mode = IVD_DECODE_FRAME; + s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS; + s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t); + s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t); + + ALOGV("Set the run-time (dynamic) parameters stride = %u", stride); + status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in setting the run-time parameters: 0x%x", + s_ctl_op.u4_error_code); + + return UNKNOWN_ERROR; + } + return OK; +} + +status_t SoftMPEG2::resetPlugin() { + mIsInFlush = false; + mReceivedEOS = false; + memset(mTimeStamps, 0, sizeof(mTimeStamps)); + memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid)); + + /* Initialize both start and end times */ + gettimeofday(&mTimeStart, NULL); + gettimeofday(&mTimeEnd, NULL); + + return OK; +} + +status_t SoftMPEG2::resetDecoder() { + ivd_ctl_reset_ip_t s_ctl_ip; + ivd_ctl_reset_op_t s_ctl_op; + IV_API_CALL_STATUS_T status; + + s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET; + s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t); + s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t); + + status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip, (void *)&s_ctl_op); + if (IV_SUCCESS != status) { + ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code); + return UNKNOWN_ERROR; + } + + /* Set the run-time (dynamic) parameters */ + setParams(outputBufferWidth()); + + /* Set number of cores/threads to be used by the codec */ + setNumCores(); + + return OK; +} + +status_t SoftMPEG2::setNumCores() { + ivdext_ctl_set_num_cores_ip_t s_set_cores_ip; + ivdext_ctl_set_num_cores_op_t s_set_cores_op; + IV_API_CALL_STATUS_T status; + s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES; + s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES); + s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t); + s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t); + + status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip, (void *)&s_set_cores_op); + if (IV_SUCCESS != status) { + ALOGE("Error in setting number of cores: 0x%x", + s_set_cores_op.u4_error_code); + return UNKNOWN_ERROR; + } + return OK; +} + +status_t SoftMPEG2::setFlushMode() { + IV_API_CALL_STATUS_T status; + ivd_ctl_flush_ip_t s_video_flush_ip; + ivd_ctl_flush_op_t s_video_flush_op; + + s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL; + s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH; + s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t); + s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t); + + /* Set the decoder in Flush mode, subsequent decode() calls will flush */ + status = ivdec_api_function( + mCodecCtx, (void *)&s_video_flush_ip, (void *)&s_video_flush_op); + + if (status != IV_SUCCESS) { + ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status, + s_video_flush_op.u4_error_code); + return UNKNOWN_ERROR; + } + + mIsInFlush = true; + return OK; +} + +status_t SoftMPEG2::initDecoder() { + IV_API_CALL_STATUS_T status; + + UWORD32 u4_num_reorder_frames; + UWORD32 u4_num_ref_frames; + UWORD32 u4_share_disp_buf; + + mNumCores = GetCPUCoreCount(); + + /* Initialize number of ref and reorder modes (for MPEG2) */ + u4_num_reorder_frames = 16; + u4_num_ref_frames = 16; + u4_share_disp_buf = 0; + + uint32_t displayStride = outputBufferWidth(); + uint32_t displayHeight = outputBufferHeight(); + uint32_t displaySizeY = displayStride * displayHeight; + + { + iv_num_mem_rec_ip_t s_num_mem_rec_ip; + iv_num_mem_rec_op_t s_num_mem_rec_op; + + s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip); + s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op); + s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC; + + status = ivdec_api_function( + mCodecCtx, (void *)&s_num_mem_rec_ip, (void *)&s_num_mem_rec_op); + if (IV_SUCCESS != status) { + ALOGE("Error in getting mem records: 0x%x", + s_num_mem_rec_op.u4_error_code); + return UNKNOWN_ERROR; + } + + mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec; + } + + mMemRecords = (iv_mem_rec_t *)ivd_aligned_malloc( + 128, mNumMemRecords * sizeof(iv_mem_rec_t)); + if (mMemRecords == NULL) { + ALOGE("Allocation failure"); + return NO_MEMORY; + } + + memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t)); + + { + size_t i; + ivdext_fill_mem_rec_ip_t s_fill_mem_ip; + ivdext_fill_mem_rec_op_t s_fill_mem_op; + iv_mem_rec_t *ps_mem_rec; + + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = + sizeof(ivdext_fill_mem_rec_ip_t); + + s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf; + s_fill_mem_ip.e_output_format = mIvColorFormat; + + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride; + s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight; + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size = + sizeof(ivdext_fill_mem_rec_op_t); + + ps_mem_rec = mMemRecords; + for (i = 0; i < mNumMemRecords; i++) { + ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t); + } + + status = ivdec_api_function( + mCodecCtx, (void *)&s_fill_mem_ip, (void *)&s_fill_mem_op); + + if (IV_SUCCESS != status) { + ALOGE("Error in filling mem records: 0x%x", + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + mNumMemRecords = + s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled; + + ps_mem_rec = mMemRecords; + + for (i = 0; i < mNumMemRecords; i++) { + ps_mem_rec->pv_base = ivd_aligned_malloc( + ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size); + if (ps_mem_rec->pv_base == NULL) { + ALOGE("Allocation failure for memory record #%zu of size %u", + i, ps_mem_rec->u4_mem_size); + status = IV_FAIL; + return NO_MEMORY; + } + + ps_mem_rec++; + } + } + + /* Initialize the decoder */ + { + ivdext_init_ip_t s_init_ip; + ivdext_init_op_t s_init_op; + + void *dec_fxns = (void *)ivdec_api_function; + + s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t); + s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT; + s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords; + s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride; + s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight; + + s_init_ip.u4_share_disp_buf = u4_share_disp_buf; + + s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op); + + s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords; + s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat; + + mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base; + mCodecCtx->pv_fxns = dec_fxns; + mCodecCtx->u4_size = sizeof(iv_obj_t); + + status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op); + if (status != IV_SUCCESS) { + ALOGE("Error in init: 0x%x", + s_init_op.s_ivd_init_op_t.u4_error_code); + return UNKNOWN_ERROR; + } + } + + /* Reset the plugin state */ + resetPlugin(); + + /* Set the run time (dynamic) parameters */ + setParams(displayStride); + + /* Set number of cores/threads to be used by the codec */ + setNumCores(); + + /* Get codec version */ + logVersion(); + + /* Allocate internal picture buffer */ + uint32_t bufferSize = displaySizeY * 3 / 2; + mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize); + if (NULL == mFlushOutBuffer) { + ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize); + return NO_MEMORY; + } + + mInitNeeded = false; + mFlushNeeded = false; + return OK; +} + +status_t SoftMPEG2::deInitDecoder() { + size_t i; + + if (mMemRecords) { + iv_mem_rec_t *ps_mem_rec; + + ps_mem_rec = mMemRecords; + for (i = 0; i < mNumMemRecords; i++) { + if (ps_mem_rec->pv_base) { + ivd_aligned_free(ps_mem_rec->pv_base); + } + ps_mem_rec++; + } + ivd_aligned_free(mMemRecords); + mMemRecords = NULL; + } + + if (mFlushOutBuffer) { + ivd_aligned_free(mFlushOutBuffer); + mFlushOutBuffer = NULL; + } + + mInitNeeded = true; + mChangingResolution = false; + + return OK; +} + +status_t SoftMPEG2::reInitDecoder() { + status_t ret; + + deInitDecoder(); + + ret = initDecoder(); + if (OK != ret) { + ALOGE("Create failure"); + deInitDecoder(); + return NO_MEMORY; + } + return OK; +} + +void SoftMPEG2::onReset() { + SoftVideoDecoderOMXComponent::onReset(); + + resetDecoder(); + resetPlugin(); +} + +OMX_ERRORTYPE SoftMPEG2::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) { + const uint32_t oldWidth = mWidth; + const uint32_t oldHeight = mHeight; + OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params); + if (mWidth != oldWidth || mHeight != oldHeight) { + reInitDecoder(); + } + return ret; +} + +void SoftMPEG2::setDecodeArgs( + ivd_video_decode_ip_t *ps_dec_ip, + ivd_video_decode_op_t *ps_dec_op, + OMX_BUFFERHEADERTYPE *inHeader, + OMX_BUFFERHEADERTYPE *outHeader, + size_t timeStampIx) { + size_t sizeY = outputBufferWidth() * outputBufferHeight(); + size_t sizeUV; + uint8_t *pBuf; + + ps_dec_ip->u4_size = sizeof(ivd_video_decode_ip_t); + ps_dec_op->u4_size = sizeof(ivd_video_decode_op_t); + + ps_dec_ip->e_cmd = IVD_CMD_VIDEO_DECODE; + + /* When in flush and after EOS with zero byte input, + * inHeader is set to zero. Hence check for non-null */ + if (inHeader) { + ps_dec_ip->u4_ts = timeStampIx; + ps_dec_ip->pv_stream_buffer = inHeader->pBuffer + + inHeader->nOffset; + ps_dec_ip->u4_num_Bytes = inHeader->nFilledLen; + } else { + ps_dec_ip->u4_ts = 0; + ps_dec_ip->pv_stream_buffer = NULL; + ps_dec_ip->u4_num_Bytes = 0; + } + + if (outHeader) { + pBuf = outHeader->pBuffer; + } else { + pBuf = mFlushOutBuffer; + } + + sizeUV = sizeY / 4; + ps_dec_ip->s_out_buffer.u4_min_out_buf_size[0] = sizeY; + ps_dec_ip->s_out_buffer.u4_min_out_buf_size[1] = sizeUV; + ps_dec_ip->s_out_buffer.u4_min_out_buf_size[2] = sizeUV; + + ps_dec_ip->s_out_buffer.pu1_bufs[0] = pBuf; + ps_dec_ip->s_out_buffer.pu1_bufs[1] = pBuf + sizeY; + ps_dec_ip->s_out_buffer.pu1_bufs[2] = pBuf + sizeY + sizeUV; + ps_dec_ip->s_out_buffer.u4_num_bufs = 3; + return; +} +void SoftMPEG2::onPortFlushCompleted(OMX_U32 portIndex) { + /* Once the output buffers are flushed, ignore any buffers that are held in decoder */ + if (kOutputPortIndex == portIndex) { + setFlushMode(); + + while (true) { + ivd_video_decode_ip_t s_dec_ip; + ivd_video_decode_op_t s_dec_op; + IV_API_CALL_STATUS_T status; + size_t sizeY, sizeUV; + + setDecodeArgs(&s_dec_ip, &s_dec_op, NULL, NULL, 0); + + status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op); + if (0 == s_dec_op.u4_output_present) { + resetPlugin(); + break; + } + } + } +} + +void SoftMPEG2::onQueueFilled(OMX_U32 portIndex) { + UNUSED(portIndex); + + if (mOutputPortSettingsChange != NONE) { + return; + } + + List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex); + List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex); + + /* If input EOS is seen and decoder is not in flush mode, + * set the decoder in flush mode. + * There can be a case where EOS is sent along with last picture data + * In that case, only after decoding that input data, decoder has to be + * put in flush. This case is handled here */ + + if (mReceivedEOS && !mIsInFlush) { + setFlushMode(); + } + + while (!outQueue.empty()) { + BufferInfo *inInfo; + OMX_BUFFERHEADERTYPE *inHeader; + + BufferInfo *outInfo; + OMX_BUFFERHEADERTYPE *outHeader; + size_t timeStampIx; + + inInfo = NULL; + inHeader = NULL; + + if (!mIsInFlush) { + if (!inQueue.empty()) { + inInfo = *inQueue.begin(); + inHeader = inInfo->mHeader; + } else { + break; + } + } + + outInfo = *outQueue.begin(); + outHeader = outInfo->mHeader; + outHeader->nFlags = 0; + outHeader->nTimeStamp = 0; + outHeader->nOffset = 0; + + if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) { + mReceivedEOS = true; + if (inHeader->nFilledLen == 0) { + inQueue.erase(inQueue.begin()); + inInfo->mOwnedByUs = false; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + setFlushMode(); + } + } + + // When there is an init required and the decoder is not in flush mode, + // update output port's definition and reinitialize decoder. + if (mInitNeeded && !mIsInFlush) { + bool portWillReset = false; + handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight); + + CHECK_EQ(reInitDecoder(), (status_t)OK); + return; + } + + /* Get a free slot in timestamp array to hold input timestamp */ + { + size_t i; + timeStampIx = 0; + for (i = 0; i < MAX_TIME_STAMPS; i++) { + if (!mTimeStampsValid[i]) { + timeStampIx = i; + break; + } + } + if (inHeader != NULL) { + mTimeStampsValid[timeStampIx] = true; + mTimeStamps[timeStampIx] = inHeader->nTimeStamp; + } + } + + { + ivd_video_decode_ip_t s_dec_ip; + ivd_video_decode_op_t s_dec_op; + WORD32 timeDelay, timeTaken; + size_t sizeY, sizeUV; + + setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx); + // If input dump is enabled, then write to file + DUMP_TO_FILE(mInFile, s_dec_ip.pv_stream_buffer, s_dec_ip.u4_num_Bytes); + + if (s_dec_ip.u4_num_Bytes > 0) { + char *ptr = (char *)s_dec_ip.pv_stream_buffer; + } + + GETTIME(&mTimeStart, NULL); + /* Compute time elapsed between end of previous decode() + * to start of current decode() */ + TIME_DIFF(mTimeEnd, mTimeStart, timeDelay); + + IV_API_CALL_STATUS_T status; + status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op); + + bool unsupportedDimensions = (IMPEG2D_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code); + bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF)); + + GETTIME(&mTimeEnd, NULL); + /* Compute time taken for decode() */ + TIME_DIFF(mTimeStart, mTimeEnd, timeTaken); + + ALOGV("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay, + s_dec_op.u4_num_bytes_consumed); + if (s_dec_op.u4_frame_decoded_flag && !mFlushNeeded) { + mFlushNeeded = true; + } + + if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) { + /* If the input did not contain picture data, then ignore + * the associated timestamp */ + mTimeStampsValid[timeStampIx] = false; + } + + // This is needed to handle CTS DecoderTest testCodecResetsMPEG2WithoutSurface, + // which is not sending SPS/PPS after port reconfiguration and flush to the codec. + if (unsupportedDimensions && !mFlushNeeded) { + bool portWillReset = false; + handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht); + + CHECK_EQ(reInitDecoder(), (status_t)OK); + + setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx); + + ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op); + return; + } + + // If the decoder is in the changing resolution mode and there is no output present, + // that means the switching is done and it's ready to reset the decoder and the plugin. + if (mChangingResolution && !s_dec_op.u4_output_present) { + mChangingResolution = false; + resetDecoder(); + resetPlugin(); + continue; + } + + if (unsupportedDimensions || resChanged) { + mChangingResolution = true; + if (mFlushNeeded) { + setFlushMode(); + } + + if (unsupportedDimensions) { + mNewWidth = s_dec_op.u4_pic_wd; + mNewHeight = s_dec_op.u4_pic_ht; + mInitNeeded = true; + } + continue; + } + + if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) { + uint32_t width = s_dec_op.u4_pic_wd; + uint32_t height = s_dec_op.u4_pic_ht; + bool portWillReset = false; + handlePortSettingsChange(&portWillReset, width, height); + + if (portWillReset) { + resetDecoder(); + return; + } + } + + if (s_dec_op.u4_output_present) { + size_t timeStampIdx; + outHeader->nFilledLen = (mWidth * mHeight * 3) / 2; + + timeStampIdx = getMinTimestampIdx(mTimeStamps, mTimeStampsValid); + outHeader->nTimeStamp = mTimeStamps[timeStampIdx]; + mTimeStampsValid[timeStampIdx] = false; + + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + } else { + /* If in flush mode and no output is returned by the codec, + * then come out of flush mode */ + mIsInFlush = false; + + /* If EOS was recieved on input port and there is no output + * from the codec, then signal EOS on output port */ + if (mReceivedEOS) { + outHeader->nFilledLen = 0; + outHeader->nFlags |= OMX_BUFFERFLAG_EOS; + + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + resetPlugin(); + } + } + } + + // TODO: Handle more than one picture data + if (inHeader != NULL) { + inInfo->mOwnedByUs = false; + inQueue.erase(inQueue.begin()); + inInfo = NULL; + notifyEmptyBufferDone(inHeader); + inHeader = NULL; + } + } +} + +} // namespace android + +android::SoftOMXComponent *createSoftOMXComponent( + const char *name, const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData, + OMX_COMPONENTTYPE **component) { + return new android::SoftMPEG2(name, callbacks, appData, component); +} diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h new file mode 100644 index 0000000..f7b1961 --- /dev/null +++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.h @@ -0,0 +1,178 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SOFT_MPEG2_H_ + +#define SOFT_MPEG2_H_ + +#include "SoftVideoDecoderOMXComponent.h" +#include <sys/time.h> + +namespace android { + +#define ivd_aligned_malloc(alignment, size) memalign(alignment, size) +#define ivd_aligned_free(buf) free(buf) + +/** Number of entries in the time-stamp array */ +#define MAX_TIME_STAMPS 64 + +/** Maximum number of cores supported by the codec */ +#define CODEC_MAX_NUM_CORES 4 + +#define CODEC_MAX_WIDTH 1920 + +#define CODEC_MAX_HEIGHT 1088 + +/** Input buffer size */ +#define INPUT_BUF_SIZE (1024 * 1024) + +#define MIN(a, b) ((a) < (b)) ? (a) : (b) + +/** Used to remove warnings about unused parameters */ +#define UNUSED(x) ((void)(x)) + +/** Get time */ +#define GETTIME(a, b) gettimeofday(a, b); + +/** Compute difference between start and end */ +#define TIME_DIFF(start, end, diff) \ + diff = ((end.tv_sec - start.tv_sec) * 1000000) + \ + (end.tv_usec - start.tv_usec); + +struct SoftMPEG2 : public SoftVideoDecoderOMXComponent { + SoftMPEG2( + const char *name, const OMX_CALLBACKTYPE *callbacks, + OMX_PTR appData, OMX_COMPONENTTYPE **component); + +protected: + virtual ~SoftMPEG2(); + + virtual void onQueueFilled(OMX_U32 portIndex); + virtual void onPortFlushCompleted(OMX_U32 portIndex); + virtual void onReset(); + virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params); +private: + // Number of input and output buffers + enum { + kNumBuffers = 8 + }; + + iv_obj_t *mCodecCtx; // Codec context + iv_mem_rec_t *mMemRecords; // Memory records requested by the codec + size_t mNumMemRecords; // Number of memory records requested by the codec + + size_t mNumCores; // Number of cores to be uesd by the codec + + struct timeval mTimeStart; // Time at the start of decode() + struct timeval mTimeEnd; // Time at the end of decode() + + // Internal buffer to be used to flush out the buffers from decoder + uint8_t *mFlushOutBuffer; + + // Status of entries in the timestamp array + bool mTimeStampsValid[MAX_TIME_STAMPS]; + + // Timestamp array - Since codec does not take 64 bit timestamps, + // they are maintained in the plugin + OMX_S64 mTimeStamps[MAX_TIME_STAMPS]; + +#ifdef FILE_DUMP_ENABLE + char mInFile[200]; +#endif /* FILE_DUMP_ENABLE */ + + OMX_COLOR_FORMATTYPE mOmxColorFormat; // OMX Color format + IV_COLOR_FORMAT_T mIvColorFormat; // Ittiam Color format + + bool mIsInFlush; // codec is flush mode + bool mReceivedEOS; // EOS is receieved on input port + bool mInitNeeded; + uint32_t mNewWidth; + uint32_t mNewHeight; + // The input stream has changed to a different resolution, which is still supported by the + // codec. So the codec is switching to decode the new resolution. + bool mChangingResolution; + bool mFlushNeeded; + + status_t initDecoder(); + status_t deInitDecoder(); + status_t setFlushMode(); + status_t setParams(size_t stride); + void logVersion(); + status_t setNumCores(); + status_t resetDecoder(); + status_t resetPlugin(); + status_t reInitDecoder(); + + void setDecodeArgs( + ivd_video_decode_ip_t *ps_dec_ip, + ivd_video_decode_op_t *ps_dec_op, + OMX_BUFFERHEADERTYPE *inHeader, + OMX_BUFFERHEADERTYPE *outHeader, + size_t timeStampIx); + + DISALLOW_EVIL_CONSTRUCTORS(SoftMPEG2); +}; + +#ifdef FILE_DUMP_ENABLE + +#define INPUT_DUMP_PATH "/sdcard/media/mpeg2d_input" +#define INPUT_DUMP_EXT "m2v" + +#define GENERATE_FILE_NAMES() { \ + GETTIME(&mTimeStart, NULL); \ + strcpy(mInFile, ""); \ + sprintf(mInFile, "%s_%ld.%ld.%s", INPUT_DUMP_PATH, \ + mTimeStart.tv_sec, mTimeStart.tv_usec, \ + INPUT_DUMP_EXT); \ +} + +#define CREATE_DUMP_FILE(m_filename) { \ + FILE *fp = fopen(m_filename, "wb"); \ + if (fp != NULL) { \ + fclose(fp); \ + } else { \ + ALOGD("Could not open file %s", m_filename); \ + } \ +} +#define DUMP_TO_FILE(m_filename, m_buf, m_size) \ +{ \ + FILE *fp = fopen(m_filename, "ab"); \ + if (fp != NULL && m_buf != NULL) { \ + int i; \ + i = fwrite(m_buf, 1, m_size, fp); \ + ALOGD("fwrite ret %d to write %d", i, m_size); \ + if (i != (int)m_size) { \ + ALOGD("Error in fwrite, returned %d", i); \ + perror("Error in write to file"); \ + } \ + fclose(fp); \ + } else { \ + ALOGD("Could not write to file %s", m_filename);\ + } \ +} +#else /* FILE_DUMP_ENABLE */ +#define INPUT_DUMP_PATH +#define INPUT_DUMP_EXT +#define OUTPUT_DUMP_PATH +#define OUTPUT_DUMP_EXT +#define GENERATE_FILE_NAMES() +#define CREATE_DUMP_FILE(m_filename) +#define DUMP_TO_FILE(m_filename, m_buf, m_size) +#endif /* FILE_DUMP_ENABLE */ + +} // namespace android + +#endif // SOFT_MPEG2_H_ diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml index 7e9fa18..740f96b 100644..100755 --- a/media/libstagefright/data/media_codecs_google_video.xml +++ b/media/libstagefright/data/media_codecs_google_video.xml @@ -16,6 +16,15 @@ <Included> <Decoders> + <MediaCodec name="OMX.google.mpeg2.decoder" type="video/mpeg2"> + <!-- profiles and levels: ProfileMain : LevelHL --> + <Limit name="size" min="16x16" max="1920x1088" /> + <Limit name="alignment" value="2x2" /> + <Limit name="block-size" value="16x16" /> + <Limit name="blocks-per-second" range="1-244800" /> + <Limit name="bitrate" range="1-20000000" /> + <Feature name="adaptive-playback" /> + </MediaCodec> <MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es"> <!-- profiles and levels: ProfileSimple : Level3 --> <Limit name="size" min="2x2" max="352x288" /> @@ -34,12 +43,12 @@ <Feature name="adaptive-playback" /> </MediaCodec> <MediaCodec name="OMX.google.h264.decoder" type="video/avc"> - <!-- profiles and levels: ProfileBaseline : Level51 --> - <Limit name="size" min="2x2" max="2048x2048" /> + <!-- profiles and levels: ProfileHigh : Level41 --> + <Limit name="size" min="16x16" max="1920x1088" /> <Limit name="alignment" value="2x2" /> <Limit name="block-size" value="16x16" /> - <Limit name="blocks-per-second" range="1-983040" /> - <Limit name="bitrate" range="1-40000000" /> + <Limit name="blocks-per-second" range="1-244800" /> + <Limit name="bitrate" range="1-12000000" /> <Feature name="adaptive-playback" /> </MediaCodec> <MediaCodec name="OMX.google.hevc.decoder" type="video/hevc"> @@ -78,12 +87,12 @@ <Limit name="bitrate" range="1-128000" /> </MediaCodec> <MediaCodec name="OMX.google.h264.encoder" type="video/avc"> - <!-- profiles and levels: ProfileBaseline : Level2 --> - <Limit name="size" min="16x16" max="896x896" /> - <Limit name="alignment" value="16x16" /> + <!-- profiles and levels: ProfileBaseline : Level41 --> + <Limit name="size" min="16x16" max="1920x1088" /> + <Limit name="alignment" value="2x2" /> <Limit name="block-size" value="16x16" /> - <Limit name="blocks-per-second" range="1-11880" /> - <Limit name="bitrate" range="1-2000000" /> + <Limit name="blocks-per-second" range="1-244800" /> + <Limit name="bitrate" range="1-12000000" /> </MediaCodec> <MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es"> <!-- profiles and levels: ProfileCore : Level2 --> diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp index 3ab241a..0f9c00c 100644..100755 --- a/media/libstagefright/omx/SoftOMXPlugin.cpp +++ b/media/libstagefright/omx/SoftOMXPlugin.cpp @@ -40,11 +40,12 @@ static const struct { { "OMX.google.amrnb.encoder", "amrnbenc", "audio_encoder.amrnb" }, { "OMX.google.amrwb.decoder", "amrdec", "audio_decoder.amrwb" }, { "OMX.google.amrwb.encoder", "amrwbenc", "audio_encoder.amrwb" }, - { "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" }, - { "OMX.google.h264.encoder", "h264enc", "video_encoder.avc" }, + { "OMX.google.h264.decoder", "avcdec", "video_decoder.avc" }, + { "OMX.google.h264.encoder", "avcenc", "video_encoder.avc" }, { "OMX.google.hevc.decoder", "hevcdec", "video_decoder.hevc" }, { "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" }, { "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" }, + { "OMX.google.mpeg2.decoder", "mpeg2dec", "video_decoder.mpeg2" }, { "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" }, { "OMX.google.h263.encoder", "mpeg4enc", "video_encoder.h263" }, { "OMX.google.mpeg4.decoder", "mpeg4dec", "video_decoder.mpeg4" }, |