diff options
Diffstat (limited to 'libvideoeditor/vss')
19 files changed, 2799 insertions, 976 deletions
diff --git a/libvideoeditor/vss/common/inc/M4DA_Types.h b/libvideoeditor/vss/common/inc/M4DA_Types.h index 7f182e0..42637e1 100755 --- a/libvideoeditor/vss/common/inc/M4DA_Types.h +++ b/libvideoeditor/vss/common/inc/M4DA_Types.h @@ -67,8 +67,9 @@ typedef enum M4DA_StreamTypeAudioBpc = 25, /**< BPC audio */ /* ADPCM */ - M4DA_StreamTypeAudioADPcm = 26 /**< ADPCM */ + M4DA_StreamTypeAudioADPcm = 26, /**< ADPCM */ + M4DA_StreamTypeVideoARGB8888 = 27 } M4_StreamType; /** diff --git a/libvideoeditor/vss/common/inc/M4DECODER_Common.h b/libvideoeditor/vss/common/inc/M4DECODER_Common.h index 1ccde6d..4608af1 100755 --- a/libvideoeditor/vss/common/inc/M4DECODER_Common.h +++ b/libvideoeditor/vss/common/inc/M4DECODER_Common.h @@ -113,6 +113,22 @@ typedef enum M4DECODER_kOptionID_NextRenderedFrameCTS = M4OSA_OPTION_ID_CREATE(M4_READ, M4DECODER_COMMON,\ 0x05), + /** + Set the YUV data to the dummy video decoder + */ + M4DECODER_kOptionID_DecYuvData = + M4OSA_OPTION_ID_CREATE(M4_READ, M4DECODER_COMMON, 0x06), + /** + Set the YUV data with color effect applied to the dummy video decoder + */ + M4DECODER_kOptionID_YuvWithEffectNonContiguous = + M4OSA_OPTION_ID_CREATE(M4_READ, M4DECODER_COMMON, 0x07), + + M4DECODER_kOptionID_YuvWithEffectContiguous = + M4OSA_OPTION_ID_CREATE(M4_READ, M4DECODER_COMMON, 0x08), + + M4DECODER_kOptionID_EnableYuvWithEffect = + M4OSA_OPTION_ID_CREATE(M4_READ, M4DECODER_COMMON, 0x09), /* common to MPEG4 decoders */ /** @@ -201,6 +217,7 @@ typedef enum M4DECODER_kVideoTypeAVC, M4DECODER_kVideoTypeWMV, M4DECODER_kVideoTypeREAL, + M4DECODER_kVideoTypeYUV420P, M4DECODER_kVideoType_NB /* number of decoders, keep it as last enum entry */ diff --git a/libvideoeditor/vss/common/inc/M4DECODER_Null.h b/libvideoeditor/vss/common/inc/M4DECODER_Null.h new file mode 100644 index 0000000..047d857 --- /dev/null +++ b/libvideoeditor/vss/common/inc/M4DECODER_Null.h @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2011 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/** +************************************************************************* + * @file M4VD_Null.h + * @brief Implementation of the a "null" video decoder,i.e. a decoder + * that does not do actual decoding. + * @note This file defines the getInterface function. +************************************************************************* +*/ +#ifndef __M4DECODER_NULL_H__ +#define __M4DECODER_NULL_H__ + +#include "M4DECODER_Common.h" + +#ifdef __cplusplus +extern "C" { +#endif + + +/** + ************************************************************************ + * @brief Retrieves the interface implemented by the decoder + * @param pDecoderType : Pointer to a M4DECODER_VideoType + * (allocated by the caller) + * that will be filled with the decoder type + * @param pDecoderInterface : Address of a pointer that will be set to + * the interface implemented by this decoder. + * The interface is a structure allocated by + * this function and must be freed by the caller. + * + * @returns : M4NO_ERROR if OK + * M4ERR_ALLOC if allocation failed + ************************************************************************ +*/ +M4OSA_ERR M4DECODER_NULL_getInterface( M4DECODER_VideoType *pDecoderType, + M4DECODER_VideoInterface **pDecoderInterface); + +#ifdef __cplusplus +} +#endif /* __cplusplus */ + +#endif /*__M4DECODER_NULL_H__*/ + diff --git a/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h b/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h index 027affa..a9f6412 100755 --- a/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h +++ b/libvideoeditor/vss/common/inc/M4_VideoEditingCommon.h @@ -369,6 +369,7 @@ typedef struct { M4OSA_UInt32 uiStillPicWidth; /**< Image width */ M4OSA_UInt32 uiStillPicHeight; /**< Image height */ M4OSA_UInt32 uiClipAudioVolumePercentage; + M4OSA_Bool bSetImageData; } M4VIDEOEDITING_ClipProperties; diff --git a/libvideoeditor/vss/common/inc/NXPSW_CompilerSwitches_MCS.h b/libvideoeditor/vss/common/inc/NXPSW_CompilerSwitches_MCS.h index 86a325f..e1b62e1 100755 --- a/libvideoeditor/vss/common/inc/NXPSW_CompilerSwitches_MCS.h +++ b/libvideoeditor/vss/common/inc/NXPSW_CompilerSwitches_MCS.h @@ -43,6 +43,7 @@ /* ----- AAC decoder support ----- */ #define M4VSS_SUPPORT_AUDEC_AAC /**< [default] Support AAC, AAC+ and eAAC+ streams */ +#define M4VSS_SUPPORT_VIDEC_NULL /* ----- MP4/H263 video decoder support ----- */ #define M4VSS_SUPPORT_VIDEC_3GP /**< [default] Support mpeg4 and H263 decoders */ diff --git a/libvideoeditor/vss/inc/M4VSS3GPP_API.h b/libvideoeditor/vss/inc/M4VSS3GPP_API.h index 0356805..0bb7141 100755 --- a/libvideoeditor/vss/inc/M4VSS3GPP_API.h +++ b/libvideoeditor/vss/inc/M4VSS3GPP_API.h @@ -366,6 +366,7 @@ typedef struct with M4VSS3GPP_editAnalyseClip */ M4OSA_UInt32 uiBeginCutTime; /**< Begin cut time, in milliseconds */ M4OSA_UInt32 uiEndCutTime; /**< End cut time, in milliseconds */ + M4OSA_Bool bTranscodingRequired; #ifdef M4VSS_SUPPORT_EXTENDED_FEATURES M4xVSS_ClipSettings xVSS; diff --git a/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h b/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h index d621a1b..2dd892a 100755 --- a/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h +++ b/libvideoeditor/vss/inc/M4VSS3GPP_InternalTypes.h @@ -374,6 +374,11 @@ typedef struct * Interfaces of the used modules */ /**< Filesystem and shell reader, decoder functions */ M4VSS3GPP_MediaAndCodecCtxt ShellAPI; + M4VIFI_ImagePlane *pPlaneYuv; /* YUV420 image plane, converted from ARGB888 */ + M4VIFI_ImagePlane* m_pPreResizeFrame; /* The decoded image before resize + (allocated only if resize needed)*/ + M4VIFI_ImagePlane *pPlaneYuvWithEffect; /* YUV420 image plane, with color effect */ + M4OSA_Bool bGetYuvDataFromDecoder; /* Boolean used to get YUV data from dummy video decoder only for first time */ } M4VSS3GPP_ClipContext; @@ -601,6 +606,8 @@ typedef struct external effect is active */ M4OSA_Int32 iInOutTimeOffset; M4OSA_Bool bEncodeTillEoF; + M4xVSS_EditSettings xVSS; + M4OSA_Context m_air_context; } M4VSS3GPP_InternalEditContext; diff --git a/libvideoeditor/vss/inc/M4xVSS_Internal.h b/libvideoeditor/vss/inc/M4xVSS_Internal.h index e8013f4..8811beb 100755 --- a/libvideoeditor/vss/inc/M4xVSS_Internal.h +++ b/libvideoeditor/vss/inc/M4xVSS_Internal.h @@ -347,6 +347,7 @@ typedef struct { M4MCS_MediaRendering MediaRendering; /**< FB: to crop, resize, or render black borders*/ + M4OSA_UInt32 videoclipnumber; } M4xVSS_MCS_params; diff --git a/libvideoeditor/vss/mcs/src/M4MCS_API.c b/libvideoeditor/vss/mcs/src/M4MCS_API.c index 7781f68..19053e9 100755 --- a/libvideoeditor/vss/mcs/src/M4MCS_API.c +++ b/libvideoeditor/vss/mcs/src/M4MCS_API.c @@ -1690,7 +1690,10 @@ M4OSA_ERR H264MCS_ProcessSPS_PPS( NSWAVC_MCS_t *instance, M4OSA_UInt8 *inbuff, p_bs->Buffer = (M4OSA_UInt8 *)(lClipDSI + 1); DecBitStreamReset_MCS(p_bs, lSize - 1); - DecSPSMCS(p_bs, &instance->clip_sps); + err = DecSPSMCS(p_bs, &instance->clip_sps); + if(err != M4NO_ERROR) { + return M4ERR_PARAMETER; + } //Clip_SPSID[cnt] = H264MCS_DecVLCReadExpGolombCode(p_bs); //Clip_UsedSPSID[Clip_SPSID[cnt]] = 1; @@ -1703,7 +1706,10 @@ M4OSA_ERR H264MCS_ProcessSPS_PPS( NSWAVC_MCS_t *instance, M4OSA_UInt8 *inbuff, /* Decode encoder SPS */ p_bs->Buffer = (M4OSA_UInt8 *)(instance->m_pEncoderSPS + 1); DecBitStreamReset_MCS(p_bs, instance->m_encoderSPSSize - 1); - DecSPSMCS(p_bs, &instance->encoder_sps); + err = DecSPSMCS(p_bs, &instance->encoder_sps); + if(err != M4NO_ERROR) { + return M4ERR_PARAMETER; + } if( instance->encoder_sps.num_ref_frames > instance->clip_sps.num_ref_frames ) @@ -8843,7 +8849,7 @@ static M4OSA_ERR M4MCS_intVideoNullEncoding( M4MCS_InternalContext *pC ) } - if( ( pC->bH264Trim == M4OSA_TRUE) + if( (pC->EncodingVideoFormat = M4ENCODER_kNULL) && (pC->bLastDecodedFrameCTS == M4OSA_FALSE) && (pC->uiBeginCutTime > 0) ) { diff --git a/libvideoeditor/vss/src/Android.mk b/libvideoeditor/vss/src/Android.mk index 55d78d3..11483fa 100755 --- a/libvideoeditor/vss/src/Android.mk +++ b/libvideoeditor/vss/src/Android.mk @@ -46,7 +46,8 @@ LOCAL_SRC_FILES:= \ M4AMRR_CoreReader.c \ M4READER_Amr.c \ M4VD_Tools.c \ - VideoEditorResampler.cpp + VideoEditorResampler.cpp \ + M4DECODER_Null.c LOCAL_MODULE_TAGS := optional diff --git a/libvideoeditor/vss/src/M4DECODER_Null.c b/libvideoeditor/vss/src/M4DECODER_Null.c new file mode 100755 index 0000000..8a54f3d --- /dev/null +++ b/libvideoeditor/vss/src/M4DECODER_Null.c @@ -0,0 +1,434 @@ +/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+*************************************************************************
+ * @file M4DECODER_Null.c
+ * @brief Implementation of the Null decoder public interface
+ * @note This file implements a "null" video decoder, i.e. a decoder
+ * that does nothing
+*************************************************************************
+*/
+#include "NXPSW_CompilerSwitches.h"
+
+#include "M4OSA_Types.h"
+#include "M4OSA_Debug.h"
+#include "M4TOOL_VersionInfo.h"
+#include "M4DA_Types.h"
+#include "M4DECODER_Common.h"
+#include "M4DECODER_Null.h"
+
+/**
+ ************************************************************************
+ * NULL Video Decoder version information
+ ************************************************************************
+*/
+/* CHANGE_VERSION_HERE */
+#define M4DECODER_NULL_MAJOR 1
+#define M4DECODER_NULL_MINOR 0
+#define M4DECODER_NULL_REVISION 0
+
+/**
+ ************************************************************************
+ * structure M4_VideoHandler_Context
+ * @brief Defines the internal context of a video decoder instance
+ * @note The context is allocated and freed by the video decoder
+ ************************************************************************
+*/
+typedef struct {
+ void* m_pLibrary; // Core library identifier
+ M4OSA_Int32 m_DecoderId; // Core decoder identifier
+ M4OSA_Int32 m_RendererId; // Core renderer identifier
+ M4_VideoStreamHandler* m_pVideoStreamhandler; // Video stream description
+ M4_AccessUnit* m_pNextAccessUnitToDecode; // Access unit used to
+ // read and decode one frame
+ void* m_pUserData; // Pointer to any user data
+ M4READER_DataInterface* m_pReader; // Reader data interface
+ M4OSA_Bool m_bDoRendering; // Decides if render required
+ M4OSA_Int32 m_structSize; // Size of the structure
+
+ M4DECODER_OutputFilter* m_pVideoFilter; // Color conversion filter
+ M4VIFI_ImagePlane *pDecYuvData; // Pointer to Yuv data plane
+ M4VIFI_ImagePlane *pDecYuvWithEffect; // Pointer to Yuv plane with color effect
+ M4OSA_Bool bYuvWithEffectSet; // Original Yuv data OR Yuv with color effect
+
+} M4_VideoHandler_Context;
+
+/***********************************************************************/
+/************** M4DECODER_VideoInterface implementation ****************/
+/***********************************************************************/
+
+/**
+ ************************************************************************
+ * @brief Creates an instance of the decoder
+ * @note Allocates the context
+ *
+ * @param pContext: (OUT) Context of the decoder
+ * @param pStreamHandler: (IN) Pointer to a video stream description
+ * @param pSrcInterface: (IN) Pointer to the M4READER_DataInterface
+ * structure that must be used by the
+ * decoder to read data from the stream
+ * @param pAccessUnit (IN) Pointer to an access unit
+ * (allocated by the caller) where decoded data
+ * are stored
+ *
+ * @return M4NO_ERROR There is no error
+ * @return M4ERR_STATE State automaton is not applied
+ * @return M4ERR_ALLOC A memory allocation has failed
+ * @return M4ERR_PARAMETER At least one input parameter is not proper
+ ************************************************************************
+*/
+M4OSA_ERR M4DECODER_NULL_create(M4OSA_Context *pContext,
+ M4_StreamHandler *pStreamHandler,
+ M4READER_DataInterface *pReaderDataInterface,
+ M4_AccessUnit* pAccessUnit,
+ M4OSA_Void* pUserData) {
+
+ M4_VideoHandler_Context* pStreamContext = M4OSA_NULL;
+
+ *pContext = M4OSA_NULL;
+ pStreamContext = (M4_VideoHandler_Context*)M4OSA_32bitAlignedMalloc (
+ sizeof(M4_VideoHandler_Context), M4DECODER_MPEG4,
+ (M4OSA_Char *)"M4_VideoHandler_Context");
+ if (pStreamContext == 0) {
+ return M4ERR_ALLOC;
+ }
+
+ pStreamContext->m_structSize = sizeof(M4_VideoHandler_Context);
+ pStreamContext->m_pNextAccessUnitToDecode = M4OSA_NULL;
+ pStreamContext->m_pLibrary = M4OSA_NULL;
+ pStreamContext->m_pVideoStreamhandler = M4OSA_NULL;
+ pStreamContext->m_DecoderId = -1;
+ pStreamContext->m_RendererId = -1;
+
+ pStreamContext->m_pUserData = M4OSA_NULL;
+ pStreamContext->m_bDoRendering = M4OSA_TRUE;
+ pStreamContext->m_pVideoFilter = M4OSA_NULL;
+ pStreamContext->bYuvWithEffectSet = M4OSA_FALSE;
+
+ *pContext=pStreamContext;
+ return M4NO_ERROR;
+}
+
+/**
+ ************************************************************************
+ * @brief Destroy the instance of the decoder
+ * @note After this call the context is invalid
+ *
+ * @param context: (IN) Context of the decoder
+ *
+ * @return M4NO_ERROR There is no error
+ * @return M4ERR_PARAMETER The context is invalid
+ ************************************************************************
+*/
+M4OSA_ERR M4DECODER_NULL_destroy(M4OSA_Context pContext) {
+
+ M4_VideoHandler_Context* pStreamContext = (M4_VideoHandler_Context*)pContext;
+
+ M4OSA_DEBUG_IF1((M4OSA_NULL == pStreamContext),
+ M4ERR_PARAMETER, "M4DECODER_NULL_destroy: invalid context pointer");
+
+ free(pStreamContext);
+
+ return M4NO_ERROR;
+}
+
+/**
+ ************************************************************************
+ * @brief Get an option value from the decoder
+ * @note This function allows the caller to retrieve a property value:
+ *
+ * @param context: (IN) Context of the decoder
+ * @param optionId: (IN) Indicates the option to get
+ * @param pValue: (IN/OUT) Pointer to structure or value where
+ * option is stored
+ *
+ * @return M4NO_ERROR There is no error
+ * @return M4ERR_PARAMETER The context is invalid (in DEBUG only)
+ * @return M4ERR_BAD_OPTION_ID When the option ID is not a valid one
+ * @return M4ERR_STATE State automaton is not applied
+ * @return M4ERR_NOT_IMPLEMENTED Function not implemented
+ ************************************************************************
+*/
+M4OSA_ERR M4DECODER_NULL_getOption(M4OSA_Context context,
+ M4OSA_OptionID optionId,
+ M4OSA_DataOption pValue) {
+
+ return M4ERR_NOT_IMPLEMENTED;
+}
+
+/**
+ ************************************************************************
+ * @brief Set an option value of the decoder
+ * @note Allows the caller to set a property value:
+ *
+ * @param context: (IN) Context of the decoder
+ * @param optionId: (IN) Identifier indicating the option to set
+ * @param pValue: (IN) Pointer to structure or value
+ * where option is stored
+ *
+ * @return M4NO_ERROR There is no error
+ * @return M4ERR_BAD_OPTION_ID The option ID is not a valid one
+ * @return M4ERR_STATE State automaton is not applied
+ * @return M4ERR_PARAMETER The option parameter is invalid
+ ************************************************************************
+*/
+M4OSA_ERR M4DECODER_NULL_setOption(M4OSA_Context context,
+ M4OSA_OptionID optionId,
+ M4OSA_DataOption pValue) {
+
+ M4DECODER_OutputFilter *pFilterOption;
+
+ M4_VideoHandler_Context *pStreamContext =
+ (M4_VideoHandler_Context*)context;
+
+ M4OSA_ERR err = M4NO_ERROR;
+ M4OSA_UInt32 height = 0;
+ M4OSA_UInt8 *p_src,*p_des;
+ M4VIFI_ImagePlane* pTempDecYuvData = M4OSA_NULL;
+
+ switch (optionId) {
+ case M4DECODER_kOptionID_DecYuvData:
+ pStreamContext->pDecYuvData = (M4VIFI_ImagePlane *)pValue;
+ break;
+
+ case M4DECODER_kOptionID_YuvWithEffectContiguous:
+ pStreamContext->pDecYuvWithEffect = (M4VIFI_ImagePlane *)pValue;
+ break;
+
+ case M4DECODER_kOptionID_EnableYuvWithEffect:
+ pStreamContext->bYuvWithEffectSet = (M4OSA_Bool)pValue;
+ break;
+
+ case M4DECODER_kOptionID_YuvWithEffectNonContiguous:
+ pTempDecYuvData = (M4VIFI_ImagePlane *)pValue;
+
+ p_des = pStreamContext->pDecYuvWithEffect[0].pac_data +
+ pStreamContext->pDecYuvWithEffect[0].u_topleft;
+ p_src = pTempDecYuvData[0].pac_data +
+ pTempDecYuvData[0].u_topleft;
+
+ for (height = 0; height<pStreamContext->pDecYuvWithEffect[0].u_height;
+ height++) {
+ memcpy((void *)p_des, (void *)p_src,
+ pStreamContext->pDecYuvWithEffect[0].u_width);
+
+ p_des += pStreamContext->pDecYuvWithEffect[0].u_stride;
+ p_src += pTempDecYuvData[0].u_stride;
+ }
+
+ p_des = pStreamContext->pDecYuvWithEffect[1].pac_data +
+ pStreamContext->pDecYuvWithEffect[1].u_topleft;
+ p_src = pTempDecYuvData[1].pac_data +
+ pTempDecYuvData[1].u_topleft;
+
+ for (height = 0; height<pStreamContext->pDecYuvWithEffect[1].u_height;
+ height++) {
+ memcpy((void *)p_des, (void *)p_src,
+ pStreamContext->pDecYuvWithEffect[1].u_width);
+
+ p_des += pStreamContext->pDecYuvWithEffect[1].u_stride;
+ p_src += pTempDecYuvData[1].u_stride;
+ }
+
+ p_des = pStreamContext->pDecYuvWithEffect[2].pac_data +
+ pStreamContext->pDecYuvWithEffect[2].u_topleft;
+ p_src = pTempDecYuvData[2].pac_data +
+ pTempDecYuvData[2].u_topleft;
+
+ for (height = 0; height<pStreamContext->pDecYuvWithEffect[2].u_height;
+ height++) {
+ memcpy((void *)p_des, (void *)p_src,
+ pStreamContext->pDecYuvWithEffect[2].u_width);
+
+ p_des += pStreamContext->pDecYuvWithEffect[2].u_stride;
+ p_src += pTempDecYuvData[2].u_stride;
+ }
+ break;
+
+ case M4DECODER_kOptionID_OutputFilter:
+ pFilterOption = (M4DECODER_OutputFilter*)pValue;
+ break;
+
+ case M4DECODER_kOptionID_DeblockingFilter:
+ err = M4ERR_BAD_OPTION_ID;
+ break;
+
+ default:
+ err = M4ERR_BAD_OPTION_ID;
+ break;
+ }
+ return err;
+}
+
+/**
+ ************************************************************************
+ * @brief Decode video Access Units up to a target time
+ * @note Parse and decode the video until it can output a decoded image
+ * for which the composition time is equal or greater to the
+ * passed targeted time.
+ * The data are read from the reader data interface passed to
+ * M4DECODER_MPEG4_create.
+ *
+ * @param context: (IN) Context of the decoder
+ * @param pTime: (IN/OUT) IN: Time to decode up to (in msec)
+ * OUT:Time of the last decoded frame (in msec)
+ * @param bJump: (IN) 0 if no jump occured just before this call
+ * 1 if a a jump has just been made
+ * @return M4NO_ERROR there is no error
+ * @return M4ERR_PARAMETER at least one parameter is not properly set
+ * @return M4WAR_NO_MORE_AU there is no more access unit to decode (EOS)
+ ************************************************************************
+*/
+M4OSA_ERR M4DECODER_NULL_decode(M4OSA_Context context,
+ M4_MediaTime* pTime, M4OSA_Bool bJump) {
+
+ // Do nothing; input time stamp itself returned
+ return M4NO_ERROR;
+}
+
+/**
+ ************************************************************************
+ * @brief Renders the video at the specified time.
+ * @note
+ * @param context: (IN) Context of the decoder
+ * @param pTime: (IN/OUT) IN: Time to render to (in msecs)
+ * OUT:Time of the rendered frame (in ms)
+ * @param pOutputPlane:(OUT) Output plane filled with decoded data
+ * @param bForceRender:(IN) 1 if the image must be rendered even it
+ * has been rendered already
+ * 0 if not
+ *
+ * @return M4NO_ERROR There is no error
+ * @return M4ERR_PARAMETER At least one parameter is not properly set
+ * @return M4ERR_STATE State automaton is not applied
+ * @return M4ERR_ALLOC There is no more available memory
+ * @return M4WAR_VIDEORENDERER_NO_NEW_FRAME If the frame has already been rendered
+ ************************************************************************
+*/
+M4OSA_ERR M4DECODER_NULL_render(M4OSA_Context context, M4_MediaTime* pTime,
+ M4VIFI_ImagePlane* pOutputPlane,
+ M4OSA_Bool bForceRender) {
+
+ M4OSA_ERR err = M4NO_ERROR;
+ M4OSA_UInt32 height;
+ M4OSA_UInt8 *p_src,*p_des;
+ M4_VideoHandler_Context* pStreamContext =
+ (M4_VideoHandler_Context*)context;
+
+ if (pStreamContext->bYuvWithEffectSet == M4OSA_TRUE) {
+
+ p_des = pOutputPlane[0].pac_data + pOutputPlane[0].u_topleft;
+ p_src = pStreamContext->pDecYuvWithEffect[0].pac_data +
+ pStreamContext->pDecYuvWithEffect[0].u_topleft;
+
+ for (height = 0; height<pOutputPlane[0].u_height; height++) {
+ memcpy((void *)p_des, (void *)p_src, pOutputPlane[0].u_width);
+ p_des += pOutputPlane[0].u_stride;
+ p_src += pStreamContext->pDecYuvWithEffect[0].u_stride;
+ }
+
+ p_des = pOutputPlane[1].pac_data + pOutputPlane[1].u_topleft;
+ p_src = pStreamContext->pDecYuvWithEffect[1].pac_data +
+ pStreamContext->pDecYuvWithEffect[1].u_topleft;
+
+ for (height = 0; height<pOutputPlane[1].u_height; height++) {
+ memcpy((void *)p_des, (void *)p_src, pOutputPlane[1].u_width);
+ p_des += pOutputPlane[1].u_stride;
+ p_src += pStreamContext->pDecYuvWithEffect[1].u_stride;
+ }
+
+ p_des = pOutputPlane[2].pac_data + pOutputPlane[2].u_topleft;
+ p_src = pStreamContext->pDecYuvWithEffect[2].pac_data +
+ pStreamContext->pDecYuvWithEffect[2].u_topleft;
+
+ for (height = 0; height<pOutputPlane[2].u_height; height++) {
+ memcpy((void *)p_des, (void *)p_src, pOutputPlane[2].u_width);
+ p_des += pOutputPlane[2].u_stride;
+ p_src += pStreamContext->pDecYuvWithEffect[2].u_stride;
+ }
+ } else {
+
+ p_des = pOutputPlane[0].pac_data + pOutputPlane[0].u_topleft;
+ p_src = pStreamContext->pDecYuvData[0].pac_data +
+ pStreamContext->pDecYuvData[0].u_topleft;
+
+ for (height = 0; height<pOutputPlane[0].u_height; height++) {
+ memcpy((void *)p_des, (void *)p_src, pOutputPlane[0].u_width);
+ p_des += pOutputPlane[0].u_stride;
+ p_src += pStreamContext->pDecYuvData[0].u_stride;
+ }
+
+ p_des = pOutputPlane[1].pac_data + pOutputPlane[1].u_topleft;
+ p_src = pStreamContext->pDecYuvData[1].pac_data +
+ pStreamContext->pDecYuvData[1].u_topleft;
+
+ for (height = 0; height<pOutputPlane[1].u_height; height++) {
+ memcpy((void *)p_des, (void *)p_src, pOutputPlane[1].u_width);
+ p_des += pOutputPlane[1].u_stride;
+ p_src += pStreamContext->pDecYuvData[1].u_stride;
+ }
+
+ p_des = pOutputPlane[2].pac_data + pOutputPlane[2].u_topleft;
+ p_src = pStreamContext->pDecYuvData[2].pac_data +
+ pStreamContext->pDecYuvData[2].u_topleft;
+
+ for (height = 0; height<pOutputPlane[2].u_height; height++) {
+ memcpy((void *)p_des,(void *)p_src,pOutputPlane[2].u_width);
+ p_des += pOutputPlane[2].u_stride;
+ p_src += pStreamContext->pDecYuvData[2].u_stride;
+ }
+ }
+ return err;
+}
+
+/**
+ ************************************************************************
+ * @brief Retrieves the interface implemented by the decoder
+ * @param pDecoderType : Pointer to a M4DECODER_VideoType
+ * (allocated by the caller)
+ * that will be filled with the decoder type
+ * @param pDecoderInterface : Address of a pointer that will be set to
+ * the interface implemented by this decoder.
+ * The interface is a structure allocated by
+ * this function and must be freed by the caller.
+ *
+ * @returns : M4NO_ERROR if OK
+ * M4ERR_ALLOC if allocation failed
+ ************************************************************************
+*/
+M4OSA_ERR M4DECODER_NULL_getInterface (M4DECODER_VideoType *pDecoderType,
+ M4DECODER_VideoInterface **pDecoderInterface) {
+
+ *pDecoderInterface =
+ (M4DECODER_VideoInterface*)M4OSA_32bitAlignedMalloc(
+ sizeof(M4DECODER_VideoInterface),
+ M4DECODER_MPEG4, (M4OSA_Char *)"M4DECODER_VideoInterface");
+
+ if (M4OSA_NULL == *pDecoderInterface) {
+ return M4ERR_ALLOC;
+ }
+
+ *pDecoderType = M4DECODER_kVideoTypeYUV420P;
+
+ (*pDecoderInterface)->m_pFctCreate = M4DECODER_NULL_create;
+ (*pDecoderInterface)->m_pFctDestroy = M4DECODER_NULL_destroy;
+ (*pDecoderInterface)->m_pFctGetOption = M4DECODER_NULL_getOption;
+ (*pDecoderInterface)->m_pFctSetOption = M4DECODER_NULL_setOption;
+ (*pDecoderInterface)->m_pFctDecode = M4DECODER_NULL_decode;
+ (*pDecoderInterface)->m_pFctRender = M4DECODER_NULL_render;
+
+ return M4NO_ERROR;
+}
diff --git a/libvideoeditor/vss/src/M4VSS3GPP_Clip.c b/libvideoeditor/vss/src/M4VSS3GPP_Clip.c index 91ba966..0303877 100755 --- a/libvideoeditor/vss/src/M4VSS3GPP_Clip.c +++ b/libvideoeditor/vss/src/M4VSS3GPP_Clip.c @@ -119,6 +119,8 @@ M4OSA_ERR M4VSS3GPP_intClipInit( M4VSS3GPP_ClipContext ** hClipCtxt, /** * Init the clip context */ + pClipCtxt->iVoffset = 0; + pClipCtxt->iAoffset = 0; pClipCtxt->Vstatus = M4VSS3GPP_kClipStatus_READ; pClipCtxt->Astatus = M4VSS3GPP_kClipStatus_READ; @@ -129,6 +131,11 @@ M4OSA_ERR M4VSS3GPP_intClipInit( M4VSS3GPP_ClipContext ** hClipCtxt, pClipCtxt->AudioAU.m_dataAddress = M4OSA_NULL; pClipCtxt->pViDecCtxt = M4OSA_NULL; + pClipCtxt->iVideoDecCts = 0; + pClipCtxt->iVideoRenderCts = 0; + pClipCtxt->lastDecodedPlane = M4OSA_NULL; + pClipCtxt->iActualVideoBeginCut = 0; + pClipCtxt->iActualAudioBeginCut = 0; pClipCtxt->bVideoAuAvailable = M4OSA_FALSE; pClipCtxt->bFirstAuWritten = M4OSA_FALSE; @@ -136,9 +143,16 @@ M4OSA_ERR M4VSS3GPP_intClipInit( M4VSS3GPP_ClipContext ** hClipCtxt, pClipCtxt->bAudioFrameAvailable = M4OSA_FALSE; pClipCtxt->pAudioFramePtr = M4OSA_NULL; + pClipCtxt->iAudioFrameCts = 0; + pClipCtxt->pAudioDecCtxt = 0; + pClipCtxt->AudioDecBufferOut.m_bufferSize = 0; pClipCtxt->AudioDecBufferOut.m_dataAddress = M4OSA_NULL; pClipCtxt->pFileReadPtrFct = pFileReadPtrFct; + pClipCtxt->pPlaneYuv = M4OSA_NULL; + pClipCtxt->pPlaneYuvWithEffect = M4OSA_NULL; + pClipCtxt->m_pPreResizeFrame = M4OSA_NULL; + pClipCtxt->bGetYuvDataFromDecoder = M4OSA_TRUE; /* * Reset pointers for media and codecs interfaces */ @@ -161,6 +175,7 @@ M4OSA_ERR M4VSS3GPP_intClipOpen( M4VSS3GPP_ClipContext *pClipCtxt, M4OSA_ERR err; M4READER_MediaFamily mediaFamily; M4_StreamHandler *pStreamHandler; + M4_StreamHandler dummyStreamHandler; M4OSA_Int32 iDuration; M4OSA_Void *decoderUserData; #ifdef M4VSS_ENABLE_EXTERNAL_DECODERS @@ -185,6 +200,29 @@ M4OSA_ERR M4VSS3GPP_intClipOpen( M4VSS3GPP_ClipContext *pClipCtxt, /** * Keep a pointer to the clip settings. Remember that we don't possess it! */ pClipCtxt->pSettings = pClipSettings; + if(M4VIDEOEDITING_kFileType_ARGB8888 == pClipCtxt->pSettings->FileType) { + M4OSA_TRACE3_0("M4VSS3GPP_intClipOpen: Image stream; set current vid dec"); + err = M4VSS3GPP_setCurrentVideoDecoder( + &pClipCtxt->ShellAPI, M4DA_StreamTypeVideoARGB8888); + M4ERR_CHECK_RETURN(err); + + decoderUserData = M4OSA_NULL; + + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctCreate( + &pClipCtxt->pViDecCtxt, &dummyStreamHandler, + pClipCtxt->ShellAPI.m_pReaderDataIt, &pClipCtxt->VideoAU, + decoderUserData); + + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intClipOpen: \ + m_pVideoDecoder->m_pFctCreate returns 0x%x", err); + return err; + } + M4OSA_TRACE3_1("M4VSS3GPP_intClipOpen: \ + Vid dec started; pViDecCtxt=0x%x", pClipCtxt->pViDecCtxt); + + return M4NO_ERROR; + } /** * Get the correct reader interface */ @@ -755,45 +793,46 @@ M4OSA_ERR M4VSS3GPP_intClipDecodeVideoUpToCts( M4VSS3GPP_ClipContext *pClipCtxt, { /** * Jump to the previous RAP in the clip (first get the time, then jump) */ - iRapCts = iClipCts; + if(M4VIDEOEDITING_kFileType_ARGB8888 != pClipCtxt->pSettings->FileType) { + iRapCts = iClipCts; - err = pClipCtxt->ShellAPI.m_pReader->m_pFctGetPrevRapTime( - pClipCtxt->pReaderContext, - (M4_StreamHandler *)pClipCtxt->pVideoStream, &iRapCts); - - if( M4WAR_READER_INFORMATION_NOT_PRESENT == err ) - { - /* No RAP table, jump backward and predecode */ - iRapCts = iClipCts - M4VSS3GPP_NO_STSS_JUMP_POINT; + err = pClipCtxt->ShellAPI.m_pReader->m_pFctGetPrevRapTime( + pClipCtxt->pReaderContext, + (M4_StreamHandler *)pClipCtxt->pVideoStream, &iRapCts); - if( iRapCts < 0 ) - iRapCts = 0; - } - else if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_intClipDecodeVideoUpToCts: m_pFctGetPrevRapTime returns 0x%x!", - err); - return err; - } + if( M4WAR_READER_INFORMATION_NOT_PRESENT == err ) + { + /* No RAP table, jump backward and predecode */ + iRapCts = iClipCts - M4VSS3GPP_NO_STSS_JUMP_POINT; - err = - pClipCtxt->ShellAPI.m_pReader->m_pFctJump(pClipCtxt->pReaderContext, - (M4_StreamHandler *)pClipCtxt->pVideoStream, &iRapCts); + if( iRapCts < 0 ) + iRapCts = 0; + } + else if( M4NO_ERROR != err ) + { + M4OSA_TRACE1_1( + "M4VSS3GPP_intClipDecodeVideoUpToCts: m_pFctGetPrevRapTime returns 0x%x!", + err); + return err; + } - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_intClipDecodeVideoUpToCts: m_pFctJump returns 0x%x!", - err); - return err; - } + err = + pClipCtxt->ShellAPI.m_pReader->m_pFctJump(pClipCtxt->pReaderContext, + (M4_StreamHandler *)pClipCtxt->pVideoStream, &iRapCts); - /** - * The decoder must be told that we jumped */ - bClipJump = M4OSA_TRUE; - pClipCtxt->iVideoDecCts = iRapCts; + if( M4NO_ERROR != err ) + { + M4OSA_TRACE1_1( + "M4VSS3GPP_intClipDecodeVideoUpToCts: m_pFctJump returns 0x%x!", + err); + return err; + } + /** + * The decoder must be told that we jumped */ + bClipJump = M4OSA_TRUE; + pClipCtxt->iVideoDecCts = iRapCts; + } /** * Remember the clip reading state */ pClipCtxt->Vstatus = M4VSS3GPP_kClipStatus_DECODE_UP_TO; @@ -1826,6 +1865,23 @@ M4OSA_ERR M4VSS3GPP_intClipCleanUp( M4VSS3GPP_ClipContext *pClipCtxt ) pClipCtxt->pReaderContext = M4OSA_NULL; } + if(pClipCtxt->pPlaneYuv != M4OSA_NULL) { + if(pClipCtxt->pPlaneYuv[0].pac_data != M4OSA_NULL) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + pClipCtxt->pPlaneYuv[0].pac_data = M4OSA_NULL; + } + free(pClipCtxt->pPlaneYuv); + pClipCtxt->pPlaneYuv = M4OSA_NULL; + } + + if(pClipCtxt->pPlaneYuvWithEffect != M4OSA_NULL) { + if(pClipCtxt->pPlaneYuvWithEffect[0].pac_data != M4OSA_NULL) { + free(pClipCtxt->pPlaneYuvWithEffect[0].pac_data); + pClipCtxt->pPlaneYuvWithEffect[0].pac_data = M4OSA_NULL; + } + free(pClipCtxt->pPlaneYuvWithEffect); + pClipCtxt->pPlaneYuvWithEffect = M4OSA_NULL; + } /** * Free the shells interfaces */ M4VSS3GPP_unRegisterAllWriters(&pClipCtxt->ShellAPI); diff --git a/libvideoeditor/vss/src/M4VSS3GPP_ClipAnalysis.c b/libvideoeditor/vss/src/M4VSS3GPP_ClipAnalysis.c index 248ab61..723c0c1 100755 --- a/libvideoeditor/vss/src/M4VSS3GPP_ClipAnalysis.c +++ b/libvideoeditor/vss/src/M4VSS3GPP_ClipAnalysis.c @@ -143,20 +143,21 @@ M4OSA_ERR M4VSS3GPP_editAnalyseClip( M4OSA_Void *pClip, /** * Analyse the clip */ - err = M4VSS3GPP_intBuildAnalysis(pClipContext, pClipProperties); + if(M4VIDEOEDITING_kFileType_ARGB8888 != pClipContext->pSettings->FileType) { + err = M4VSS3GPP_intBuildAnalysis(pClipContext, pClipProperties); - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_editAnalyseClip: M4VSS3GPP_intBuildAnalysis() returns 0x%x!", - err); + if( M4NO_ERROR != err ) + { + M4OSA_TRACE1_1( + "M4VSS3GPP_editAnalyseClip: M4VSS3GPP_intBuildAnalysis() returns 0x%x!", + err); - /** - * Free the clip */ - M4VSS3GPP_intClipCleanUp(pClipContext); - return err; + /** + * Free the clip */ + M4VSS3GPP_intClipCleanUp(pClipContext); + return err; + } } - /** * Free the clip */ err = M4VSS3GPP_intClipClose(pClipContext); @@ -174,17 +175,18 @@ M4OSA_ERR M4VSS3GPP_editAnalyseClip( M4OSA_Void *pClip, /** * Check the clip is compatible with VSS editing */ - err = M4VSS3GPP_intCheckClipCompatibleWithVssEditing(pClipProperties); + if(M4VIDEOEDITING_kFileType_ARGB8888 != ClipSettings.FileType) { + err = M4VSS3GPP_intCheckClipCompatibleWithVssEditing(pClipProperties); - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_editAnalyseClip:\ - M4VSS3GPP_intCheckClipCompatibleWithVssEditing() returns 0x%x!", - err); - return err; + if( M4NO_ERROR != err ) + { + M4OSA_TRACE1_1( + "M4VSS3GPP_editAnalyseClip:\ + M4VSS3GPP_intCheckClipCompatibleWithVssEditing() returns 0x%x!", + err); + return err; + } } - /** * Return with no error */ M4OSA_TRACE3_0("M4VSS3GPP_editAnalyseClip(): returning M4NO_ERROR"); @@ -230,34 +232,6 @@ M4OSA_ERR M4VSS3GPP_editCheckClipCompatibility( M4VIDEOEDITING_ClipProperties *p M4OSA_DEBUG_IF2((M4OSA_NULL == pClip2Properties), M4ERR_PARAMETER, "M4VSS3GPP_editCheckClipCompatibility: pClip2Properties is M4OSA_NULL"); - /** - * Check if the two clips are, alone, comptible with VSS 3GPP. - * - * Note: if a clip is not compatible with VSS3GPP, M4VSS3GPP_editAnalyseClip() - * did return an error to the integrator. So he should not call - * M4VSS3GPP_editCheckClipCompatibility - * with the ClipAnalysis... - * Still, I think it is good to redo the test here, to be sure. - * M4VSS3GPP_intCheckClipCompatibleWithVssEditing is not a long function to execute.*/ - err = M4VSS3GPP_intCheckClipCompatibleWithVssEditing(pClip1Properties); - - if( err != M4NO_ERROR ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_editCheckClipCompatibility: Clip1 not compatible with VSS3GPP,\ - returning 0x%x", err); - return err; - } - err = M4VSS3GPP_intCheckClipCompatibleWithVssEditing(pClip2Properties); - - if( err != M4NO_ERROR ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_editCheckClipCompatibility: Clip2 not compatible with VSS3GPP,\ - returning 0x%x", err); - return err; - } - if( ( M4VIDEOEDITING_kFileType_MP3 == pClip1Properties->FileType) || (M4VIDEOEDITING_kFileType_AMR == pClip1Properties->FileType) ) { @@ -275,81 +249,6 @@ M4OSA_ERR M4VSS3GPP_editCheckClipCompatibility( M4VIDEOEDITING_ClipProperties *p } } - /********** Video ************/ - - /** - * Check both clips have same video stream type */ - if( pClip1Properties->VideoStreamType != pClip2Properties->VideoStreamType ) - { - M4OSA_TRACE1_0( - "M4VSS3GPP_editCheckClipCompatibility: Clips don't have the same video format"); - video_err = M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_FORMAT; - goto audio_analysis; - } - - /** - * Check both clips have the same video frame size */ - if( ( pClip1Properties->uiVideoWidth != pClip2Properties->uiVideoWidth) - || (pClip1Properties->uiVideoHeight - != pClip2Properties->uiVideoHeight) ) - { - M4OSA_TRACE1_0( - "M4VSS3GPP_editCheckClipCompatibility: Clips don't have the same video frame size"); - video_err = M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_FRAME_SIZE; - goto audio_analysis; - } - - switch( pClip1Properties->VideoStreamType ) - { - case M4VIDEOEDITING_kH263: - case M4VIDEOEDITING_kH264: - /**< nothing to check here */ - break; - - case M4VIDEOEDITING_kMPEG4_EMP: - case M4VIDEOEDITING_kMPEG4: - /** - * Check both streams have the same time scale */ - if( pClip1Properties->uiVideoTimeScale - != pClip2Properties->uiVideoTimeScale ) - { - M4OSA_TRACE1_2( - "M4VSS3GPP_editCheckClipCompatibility: Clips don't have the same video time\ - scale (%d != %d), returning M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_TIME_SCALE", - pClip1Properties->uiVideoTimeScale, - pClip2Properties->uiVideoTimeScale); - video_err = M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_TIME_SCALE; - goto audio_analysis; - } - /** - * Check both streams have the same use of data partitioning */ - if( pClip1Properties->bMPEG4dataPartition - != pClip2Properties->bMPEG4dataPartition ) - { - M4OSA_TRACE1_2( - "M4VSS3GPP_editCheckClipCompatibility:\ - Clips don't have the same use of data partitioning (%d != %d),\ - returning M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING", - pClip1Properties->bMPEG4dataPartition, - pClip2Properties->bMPEG4dataPartition); - video_err = M4VSS3GPP_ERR_INCOMPATIBLE_VIDEO_DATA_PARTITIONING; - goto audio_analysis; - } - break; - - default: - M4OSA_TRACE1_1( - "M4VSS3GPP_editCheckClipCompatibility: unknown video stream type (0x%x),\ - returning M4VSS3GPP_ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT", - pClip1Properties->VideoStreamType); - video_err = - M4VSS3GPP_ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT; /**< this error should never happen, - it's here for code safety only... */ - goto audio_analysis; - } - - pClip2Properties->bVideoIsCompatibleWithMasterClip = M4OSA_TRUE; - /********** Audio ************/ audio_analysis: @@ -995,20 +894,6 @@ M4OSA_ERR M4VSS3GPP_intCheckClipCompatibleWithVssEditing( M4OSA_UInt32 uiNbOfValidStreams = 0; M4OSA_ERR video_err = M4NO_ERROR; M4OSA_ERR audio_err = M4NO_ERROR; - - /** - * Check that analysis has been generated by this version of the VSS3GPP library */ - if( ( pClipProperties->Version[0] != M4VIDEOEDITING_VERSION_MAJOR) - || (pClipProperties->Version[1] != M4VIDEOEDITING_VERSION_MINOR) - || (pClipProperties->Version[2] - != M4VIDEOEDITING_VERSION_REVISION) ) - { - M4OSA_TRACE1_0( - "M4VSS3GPP_intCheckClipCompatibleWithVssEditing: The clip analysis has been generated\ - by another version, returning M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_VERSION"); - return M4VSS3GPP_ERR_INVALID_CLIP_ANALYSIS_VERSION; - } - /********* file type *********/ if( M4VIDEOEDITING_kFileType_AMR == pClipProperties->FileType ) diff --git a/libvideoeditor/vss/src/M4VSS3GPP_Codecs.c b/libvideoeditor/vss/src/M4VSS3GPP_Codecs.c index 10cb76e..1ced937 100755 --- a/libvideoeditor/vss/src/M4VSS3GPP_Codecs.c +++ b/libvideoeditor/vss/src/M4VSS3GPP_Codecs.c @@ -943,7 +943,9 @@ M4OSA_ERR M4VSS3GPP_setCurrentVideoDecoder( M4VSS3GPP_MediaAndCodecCtxt *pC, case M4DA_StreamTypeVideoMpeg4Avc: decoderType = M4DECODER_kVideoTypeAVC; break; - + case M4DA_StreamTypeVideoARGB8888: + decoderType = M4DECODER_kVideoTypeYUV420P; + break; default: M4OSA_DEBUG_IF1(M4OSA_TRUE, M4VSS3GPP_ERR_UNSUPPORTED_INPUT_VIDEO_FORMAT, diff --git a/libvideoeditor/vss/src/M4VSS3GPP_Edit.c b/libvideoeditor/vss/src/M4VSS3GPP_Edit.c index f8d4fb6..5bde983 100755 --- a/libvideoeditor/vss/src/M4VSS3GPP_Edit.c +++ b/libvideoeditor/vss/src/M4VSS3GPP_Edit.c @@ -46,6 +46,9 @@ with M4OSA_String...) */ #endif /* WIN32 */ +#ifdef M4VSS_ENABLE_EXTERNAL_DECODERS +#include "M4VD_EXTERNAL_Interface.h" +#endif /************************************************************************/ /* Static local functions */ @@ -148,11 +151,13 @@ M4OSA_ERR M4VSS3GPP_editInit( M4VSS3GPP_EditContext *pContext, /* Init the context. */ + pC->uiClipNumber = 0; pC->pClipList = M4OSA_NULL; pC->pTransitionList = M4OSA_NULL; pC->pEffectsList = M4OSA_NULL; pC->pActiveEffectsList = M4OSA_NULL; pC->pActiveEffectsList1 = M4OSA_NULL; + pC->uiCurrentClip = 0; pC->pC1 = M4OSA_NULL; pC->pC2 = M4OSA_NULL; pC->yuv1[0].pac_data = pC->yuv1[1].pac_data = pC-> @@ -164,26 +169,49 @@ M4OSA_ERR M4VSS3GPP_editInit( M4VSS3GPP_EditContext *pContext, pC->yuv4[0].pac_data = pC->yuv4[1].pac_data = pC-> yuv4[2].pac_data = M4OSA_NULL; pC->bClip1AtBeginCut = M4OSA_FALSE; + pC->iClip1ActiveEffect = 0; + pC->iClip2ActiveEffect = 0; pC->bTransitionEffect = M4OSA_FALSE; pC->bSupportSilence = M4OSA_FALSE; /** * Init PC->ewc members */ // Decorrelate input and output encoding timestamp to handle encoder prefetch + pC->ewc.dInputVidCts = 0.0; + pC->ewc.dOutputVidCts = 0.0; + pC->ewc.dATo = 0.0; + pC->ewc.iOutputDuration = 0; pC->ewc.VideoStreamType = M4SYS_kVideoUnknown; + pC->ewc.uiVideoBitrate = 0; + pC->ewc.uiVideoWidth = 0; + pC->ewc.uiVideoHeight = 0; + pC->ewc.uiVideoTimeScale = 0; pC->ewc.bVideoDataPartitioning = M4OSA_FALSE; pC->ewc.pVideoOutputDsi = M4OSA_NULL; + pC->ewc.uiVideoOutputDsiSize = 0; pC->ewc.bActivateEmp = M4OSA_FALSE; pC->ewc.AudioStreamType = M4SYS_kAudioUnknown; pC->ewc.uiNbChannels = 1; + pC->ewc.uiAudioBitrate = 0; + pC->ewc.uiSamplingFrequency = 0; pC->ewc.pAudioOutputDsi = M4OSA_NULL; + pC->ewc.uiAudioOutputDsiSize = 0; pC->ewc.pAudioEncCtxt = M4OSA_NULL; + pC->ewc.pAudioEncDSI.infoSize = 0; pC->ewc.pAudioEncDSI.pInfo = M4OSA_NULL; + pC->ewc.uiSilencePcmSize = 0; pC->ewc.pSilenceFrameData = M4OSA_NULL; + pC->ewc.uiSilenceFrameSize = 0; + pC->ewc.iSilenceFrameDuration = 0; + pC->ewc.scale_audio = 0.0; pC->ewc.pEncContext = M4OSA_NULL; pC->ewc.pDummyAuBuffer = M4OSA_NULL; + pC->ewc.iMpeg4GovOffset = 0; + pC->ewc.VppError = 0; pC->ewc.encoderState = M4VSS3GPP_kNoEncoder; pC->ewc.p3gpWriterContext = M4OSA_NULL; + pC->ewc.uiVideoMaxAuSize = 0; + pC->ewc.uiAudioMaxAuSize = 0; /** * Keep the OSAL file functions pointer set in our context */ pC->pOsaFileReadPtr = pFileReadPtrFct; @@ -212,7 +240,10 @@ M4OSA_ERR M4VSS3GPP_editInit( M4VSS3GPP_EditContext *pContext, pC->iInOutTimeOffset = 0; pC->bEncodeTillEoF = M4OSA_FALSE; - + pC->nbActiveEffects = 0; + pC->nbActiveEffects1 = 0; + pC->bIssecondClip = M4OSA_FALSE; + pC->m_air_context = M4OSA_NULL; /** * Return with no error */ M4OSA_TRACE3_0("M4VSS3GPP_editInit(): returning M4NO_ERROR"); @@ -289,6 +320,7 @@ M4VSS3GPP_editCreateClipSettings( M4VSS3GPP_ClipSettings *pClipSettings, pClipSettings->uiBeginCutTime = 0; /**< no begin cut */ pClipSettings->uiEndCutTime = 0; /**< no end cut */ + pClipSettings->ClipProperties.bSetImageData = M4OSA_FALSE; /** * Reset video characteristics */ @@ -598,37 +630,45 @@ M4OSA_ERR M4VSS3GPP_editOpen( M4VSS3GPP_EditContext pContext, * Check clip compatibility */ for ( i = 0; i < pC->uiClipNumber; i++ ) { - /** - * Check all the clips are compatible with VSS 3GPP */ - err = M4VSS3GPP_intCheckClipCompatibleWithVssEditing( - &pC->pClipList[i].ClipProperties); + if (pC->pClipList[i].FileType !=M4VIDEOEDITING_kFileType_ARGB8888) { + /** + * Check all the clips are compatible with VSS 3GPP */ + err = M4VSS3GPP_intCheckClipCompatibleWithVssEditing( + &pC->pClipList[i].ClipProperties); - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_2( - "M4VSS3GPP_editOpen:\ - M4VSS3GPP_intCheckClipCompatibleWithVssEditing(%d) returns 0x%x!", - i, err); - return err; + if( M4NO_ERROR != err ) + { + M4OSA_TRACE1_2( + "M4VSS3GPP_editOpen:\ + M4VSS3GPP_intCheckClipCompatibleWithVssEditing(%d) returns 0x%x!", + i, err); + return err; + } } /** * Check the master clip versus all the other ones. (including master clip with itself, else variables for master clip are not properly setted) */ - err = M4VSS3GPP_editCheckClipCompatibility( - &pC->pClipList[pSettings->uiMasterClip].ClipProperties, - &pC->pClipList[i].ClipProperties); - /* in case of warning regarding audio incompatibility, editing continues */ - if( M4OSA_ERR_IS_ERROR(err) ) - { - M4OSA_TRACE1_2( - "M4VSS3GPP_editOpen: M4VSS3GPP_editCheckClipCompatibility(%d) returns 0x%x!", - i, err); - return err; + if(pC->pClipList[i].FileType != M4VIDEOEDITING_kFileType_ARGB8888) { + + err = M4VSS3GPP_editCheckClipCompatibility( + &pC->pClipList[pSettings->uiMasterClip].ClipProperties, + &pC->pClipList[i].ClipProperties); + /* in case of warning regarding audio incompatibility, + editing continues */ + if( M4OSA_ERR_IS_ERROR(err) ) + { + M4OSA_TRACE1_2( + "M4VSS3GPP_editOpen: M4VSS3GPP_editCheckClipCompatibility \ + (%d) returns 0x%x!", i, err); + return err; + } + } else { + pC->pClipList[i].ClipProperties.bAudioIsCompatibleWithMasterClip = + M4OSA_FALSE; } } - /* Search audio tracks that cannot be edited : * - delete all audio effects for the clip * - if master clip is editable let the transition @@ -691,19 +731,49 @@ M4OSA_ERR M4VSS3GPP_editOpen( M4VSS3GPP_EditContext pContext, * Avoid weird clip settings */ for ( i = 0; i < pSettings->uiClipNumber; i++ ) { - err = M4VSS3GPP_intClipSettingsSanityCheck(&pC->pClipList[i]); + if (pC->pClipList[i].FileType !=M4VIDEOEDITING_kFileType_ARGB8888) { + err = M4VSS3GPP_intClipSettingsSanityCheck(&pC->pClipList[i]); - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_editOpen: M4VSS3GPP_intClipSettingsSanityCheck returns 0x%x!", - err); - return err; + if( M4NO_ERROR != err ) + { + M4OSA_TRACE1_1( + "M4VSS3GPP_editOpen: M4VSS3GPP_intClipSettingsSanityCheck returns 0x%x!", + err); + return err; + } } } for ( i = 0; i < (pSettings->uiClipNumber - 1); i++ ) { + if (pC->pTransitionList[i].uiTransitionDuration != 0) { + if (pC->pClipList[i].FileType == M4VIDEOEDITING_kFileType_ARGB8888) { + pC->pClipList[i].uiBeginCutTime = 0; + pC->pClipList[i].uiEndCutTime = + pC->pTransitionList[i].uiTransitionDuration; + } + + if (pC->pClipList[i+1].FileType == M4VIDEOEDITING_kFileType_ARGB8888) { + pC->pClipList[i+1].uiBeginCutTime = 0; + pC->pClipList[i+1].uiEndCutTime = + pC->pTransitionList[i].uiTransitionDuration; + } + } else { + + if (pC->pClipList[i].FileType == M4VIDEOEDITING_kFileType_ARGB8888) { + pC->pClipList[i].uiEndCutTime = + pC->pClipList[i].uiEndCutTime - pC->pClipList[i].uiBeginCutTime; + pC->pClipList[i].uiBeginCutTime = 0; + } + + if (pC->pClipList[i+1].FileType == M4VIDEOEDITING_kFileType_ARGB8888) { + pC->pClipList[i+1].uiEndCutTime = + pC->pClipList[i+1].uiEndCutTime - pC->pClipList[i+1].uiBeginCutTime; + pC->pClipList[i+1].uiBeginCutTime = 0; + } + + } + /** * Maximum transition duration between clip n and clip n+1 is the duration * of the shortest clip */ @@ -809,34 +879,89 @@ M4OSA_ERR M4VSS3GPP_editOpen( M4VSS3GPP_EditContext pContext, pC->ewc.iOutputDuration -= pC->pTransitionList[i].uiTransitionDuration; } - /** - * Copy the video properties of the master clip to the output properties */ - pC->ewc.uiVideoWidth = - pC->pClipList[pSettings->uiMasterClip].ClipProperties.uiVideoWidth; - pC->ewc.uiVideoHeight = - pC->pClipList[pSettings->uiMasterClip].ClipProperties.uiVideoHeight; - pC->ewc.uiVideoTimeScale = - pC->pClipList[pSettings->uiMasterClip].ClipProperties.uiVideoTimeScale; - pC->ewc.bVideoDataPartitioning = pC->pClipList[pSettings-> - uiMasterClip].ClipProperties.bMPEG4dataPartition; + /* Get video properties from output properties */ - switch( pC->pClipList[pSettings->uiMasterClip].ClipProperties.VideoStreamType ) - { - case M4VIDEOEDITING_kH263: - pC->ewc.VideoStreamType = M4SYS_kH263; + /* Get output width and height */ + switch(pC->xVSS.outputVideoSize) { + case M4VIDEOEDITING_kSQCIF: + pC->ewc.uiVideoWidth = 128; + pC->ewc.uiVideoHeight = 96; + break; + case M4VIDEOEDITING_kQQVGA: + pC->ewc.uiVideoWidth = 160; + pC->ewc.uiVideoHeight = 120; + break; + case M4VIDEOEDITING_kQCIF: + pC->ewc.uiVideoWidth = 176; + pC->ewc.uiVideoHeight = 144; + break; + case M4VIDEOEDITING_kQVGA: + pC->ewc.uiVideoWidth = 320; + pC->ewc.uiVideoHeight = 240; + break; + case M4VIDEOEDITING_kCIF: + pC->ewc.uiVideoWidth = 352; + pC->ewc.uiVideoHeight = 288; + break; + case M4VIDEOEDITING_kVGA: + pC->ewc.uiVideoWidth = 640; + pC->ewc.uiVideoHeight = 480; + break; + /* +PR LV5807 */ + case M4VIDEOEDITING_kWVGA: + pC->ewc.uiVideoWidth = 800; + pC->ewc.uiVideoHeight = 480; + break; + case M4VIDEOEDITING_kNTSC: + pC->ewc.uiVideoWidth = 720; + pC->ewc.uiVideoHeight = 480; + break; + /* -PR LV5807 */ + /* +CR Google */ + case M4VIDEOEDITING_k640_360: + pC->ewc.uiVideoWidth = 640; + pC->ewc.uiVideoHeight = 360; + break; + + case M4VIDEOEDITING_k854_480: + pC->ewc.uiVideoWidth = M4ENCODER_854_480_Width; + pC->ewc.uiVideoHeight = 480; + break; + + case M4VIDEOEDITING_k1280_720: + pC->ewc.uiVideoWidth = 1280; + pC->ewc.uiVideoHeight = 720; + break; + case M4VIDEOEDITING_k1080_720: + pC->ewc.uiVideoWidth = M4ENCODER_1080_720_Width; + + pC->ewc.uiVideoHeight = 720; + break; + case M4VIDEOEDITING_k960_720: + pC->ewc.uiVideoWidth = 960; + pC->ewc.uiVideoHeight = 720; + break; + + default: /* If output video size is not given, we take QCIF size */ + pC->ewc.uiVideoWidth = 176; + pC->ewc.uiVideoHeight = 144; + pC->xVSS.outputVideoSize = M4VIDEOEDITING_kQCIF; break; + } - case M4VIDEOEDITING_kMPEG4_EMP: - pC->ewc.bActivateEmp = M4OSA_TRUE; /* no break */ + pC->ewc.uiVideoTimeScale = 30; + pC->ewc.bVideoDataPartitioning = 0; + switch(pC->xVSS.outputVideoFormat) { + case M4VIDEOEDITING_kH263: + pC->ewc.VideoStreamType = M4SYS_kH263; + break; case M4VIDEOEDITING_kMPEG4: pC->ewc.VideoStreamType = M4SYS_kMPEG_4; break; - case M4VIDEOEDITING_kH264: pC->ewc.VideoStreamType = M4SYS_kH264; break; - default: pC->ewc.VideoStreamType = M4SYS_kVideoUnknown; break; @@ -913,6 +1038,22 @@ M4OSA_ERR M4VSS3GPP_editOpen( M4VSS3GPP_EditContext pContext, break; } + for (i=0; i<pC->uiClipNumber; i++) { + if ((pC->pClipList[i].ClipProperties.VideoStreamType != + pC->xVSS.outputVideoFormat)|| + (pC->pClipList[i].ClipProperties.uiVideoWidth != + pC->ewc.uiVideoWidth) || + (pC->pClipList[i].ClipProperties.uiVideoHeight != + pC->ewc.uiVideoHeight) || + (pC->pClipList[i].ClipProperties.VideoStreamType == + M4VIDEOEDITING_kH264) || + (pC->pClipList[i].ClipProperties.VideoStreamType == + M4VIDEOEDITING_kMPEG4 && + pC->pClipList[i].ClipProperties.uiVideoTimeScale != + pC->ewc.uiVideoTimeScale)) { + pC->pClipList[i].bTranscodingRequired = M4OSA_TRUE; + } + } /** * We produce a 3gpp file, unless it is mp3 */ if( M4VIDEOEDITING_kMP3 == pC-> @@ -1108,8 +1249,6 @@ M4OSA_ERR M4VSS3GPP_editOpen( M4VSS3GPP_EditContext pContext, * Create the 3GPP output file */ else if( M4VIDEOEDITING_kFileType_3GPP == outputFileType ) { - /* Compute an average bitrate from mixed bitrates of the input clips */ - M4VSS3GPP_intComputeOutputAverageVideoBitrate(pC); pC->ewc.uiVideoBitrate = pSettings->xVSS.outputVideoBitrate; /** @@ -1526,6 +1665,10 @@ M4OSA_ERR M4VSS3GPP_editClose( M4VSS3GPP_EditContext pContext ) free(pC->pActiveEffectsList1); pC->pActiveEffectsList1 = M4OSA_NULL; } + if(pC->m_air_context != M4OSA_NULL) { + free(pC->m_air_context); + pC->m_air_context = M4OSA_NULL; + } /** * Update state automaton */ pC->State = M4VSS3GPP_kEditState_CLOSED; @@ -2474,6 +2617,9 @@ M4VSS3GPP_intComputeOutputVideoAndAudioDsi( M4VSS3GPP_InternalEditContext *pC, M4VSS3GPP_ClipContext *pClip; M4OSA_ERR err; M4OSA_UInt32 i; + M4DECODER_MPEG4_DecoderConfigInfo DecConfigInfo; + M4DECODER_VideoSize dummySize; + M4OSA_Bool bGetDSiFromEncoder = M4OSA_FALSE; M4ENCODER_Header *encHeader; M4SYS_StreamIDmemAddr streamHeader; @@ -2519,11 +2665,11 @@ M4VSS3GPP_intComputeOutputVideoAndAudioDsi( M4VSS3GPP_InternalEditContext *pC, for ( i = 0; i < pC->uiClipNumber; i++ ) { - uiNewLevel = pC->pClipList[i].ClipProperties.uiH263level; - - if( uiNewLevel > uiCurrentLevel ) - { - uiCurrentLevel = uiNewLevel; + if(pC->pClipList[i].bTranscodingRequired == M4OSA_FALSE) { + uiNewLevel = pC->pClipList[i].ClipProperties.uiH263level; + if (uiNewLevel > uiCurrentLevel) { + uiCurrentLevel = uiNewLevel; + } } } @@ -2560,68 +2706,100 @@ M4VSS3GPP_intComputeOutputVideoAndAudioDsi( M4VSS3GPP_InternalEditContext *pC, /** * Start with profile of the first clip */ - uiCurrentProf = pC->pClipList[0].ClipProperties.uiVideoProfile; + M4OSA_TRACE1_0("M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + get DSI for Mpeg4 stream"); + if(M4OSA_NULL == pC->ewc.pEncContext) { + M4OSA_TRACE1_0("M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + pC->ewc.pEncContext is NULL"); + err = M4VSS3GPP_intCreateVideoEncoder(pC); + if(M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + M4VSS3GPP_intCreateVideoEncoder returned error 0x%x", err); + } + } + if(M4OSA_NULL != pC->ewc.pEncContext) { + err = pC->ShellAPI.pVideoEncoderGlobalFcts->pFctGetOption( + pC->ewc.pEncContext, M4ENCODER_kOptionID_EncoderHeader, + (M4OSA_DataOption)&encHeader); + if ( (M4NO_ERROR != err) || (M4OSA_NULL == encHeader->pBuf)) { + M4OSA_TRACE1_1("M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + failed to get the encoder header (err 0x%x)", err); + M4OSA_TRACE1_2("M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + encHeader->pBuf=0x%x, size=0x%x", encHeader->pBuf, + encHeader->Size); + } + } + err = M4DECODER_EXTERNAL_ParseVideoDSI((M4OSA_UInt8 *)encHeader->pBuf, + encHeader->Size, &DecConfigInfo, &dummySize); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intBuildAnalysis(): \ + M4DECODER_EXTERNAL_ParseVideoDSI returns 0x%08X", err); + return err; + } + + uiCurrentProf = DecConfigInfo.uiProfile; /** * Combine current profile with the one of the next clip */ - for ( i = 1; i < pC->uiClipNumber; i++ ) - { - uiNewProf = pC->pClipList[i].ClipProperties.uiVideoProfile; - - switch( uiNewProf ) - { - case 8: - /**< 8 + x --> x */ - /**< uiCurrentProf is not updated */ - break; + for (i=0; i<pC->uiClipNumber; i++) { + if(pC->pClipList[i].bTranscodingRequired == M4OSA_FALSE) { + uiNewProf = pC->pClipList[i].ClipProperties.uiVideoProfile; - case 1: - case 2: - case 3: - switch( uiCurrentProf ) - { - case 1: - case 2: - case 3: - case 4: - case 5: - /**< 1, 2, 3, 4 or 5 -> max */ - uiCurrentProf = (uiCurrentProf > uiNewProf) - ? uiCurrentProf : uiNewProf; - break; - - case 8: /**< 8 + x -> x */ - uiCurrentProf = uiNewProf; - break; - - case 9: - /**< 9 and 1 -> 2 */ - /**< 9 and 2 -> 2 */ - /**< 9 and 3 -> 3 */ - /**< 9 and 4 -> 4 */ - /**< 9 and 5 -> 5 */ - uiCurrentProf = (uiNewProf > 2) ? uiNewProf : 2; - break; + switch( uiNewProf ) + { + case 8: + /**< 8 + x --> x */ + /**< uiCurrentProf is not updated */ + break; + + case 1: + case 2: + case 3: + switch( uiCurrentProf ) + { + case 1: + case 2: + case 3: + case 4: + case 5: + /**< 1, 2, 3, 4 or 5 -> max */ + uiCurrentProf = (uiCurrentProf > uiNewProf) + ? uiCurrentProf : uiNewProf; + break; + + case 8: /**< 8 + x -> x */ + uiCurrentProf = uiNewProf; + break; + + case 9: + /**< 9 and 1 -> 2 */ + /**< 9 and 2 -> 2 */ + /**< 9 and 3 -> 3 */ + /**< 9 and 4 -> 4 */ + /**< 9 and 5 -> 5 */ + uiCurrentProf = (uiNewProf > 2) ? uiNewProf : 2; + break; + } + break; + + case 9: + switch( uiCurrentProf ) + { + case 1: + case 2: + case 3: + /**< 9 and 1 -> 2 */ + /**< 9 and 2 -> 2 */ + /**< 9 and 3 -> 3 */ + uiCurrentProf = + (uiCurrentProf > 2) ? uiCurrentProf : 2; + break; + + case 9: /**< 9 + x -> x */ + case 8: /**< 8 + x -> x */ + uiCurrentProf = uiNewProf; + break; } - break; - - case 9: - switch( uiCurrentProf ) - { - case 1: - case 2: - case 3: - /**< 9 and 1 -> 2 */ - /**< 9 and 2 -> 2 */ - /**< 9 and 3 -> 3 */ - uiCurrentProf = - (uiCurrentProf > 2) ? uiCurrentProf : 2; - break; - - case 9: /**< 9 + x -> x */ - case 8: /**< 8 + x -> x */ - uiCurrentProf = uiNewProf; - break; } } } @@ -2634,9 +2812,9 @@ M4VSS3GPP_intComputeOutputVideoAndAudioDsi( M4VSS3GPP_InternalEditContext *pC, while( i < pC->uiClipNumber ) { - if( M4OSA_TRUE - == pC->pClipList[i].ClipProperties.bMPEG4resynchMarker ) - { + if ((M4OSA_TRUE == + pC->pClipList[i].ClipProperties.bMPEG4resynchMarker) && + (pC->pClipList[i].bTranscodingRequired == M4OSA_FALSE)) { iResynchMarkerDsiIndex = i; break; /**< we found it, get out the while loop */ } @@ -2648,7 +2826,62 @@ M4VSS3GPP_intComputeOutputVideoAndAudioDsi( M4VSS3GPP_InternalEditContext *pC, * Else we must open it (and later close it...) */ if( 0 == iResynchMarkerDsiIndex ) { - pStreamForDsi = &(pC->pC1->pVideoStream->m_basicProperties); + for (i=0; i<pC->uiClipNumber; i++) { + if(pC->pClipList[i].bTranscodingRequired == M4OSA_FALSE) { + /** + * We can use the fast open mode and the skip audio mode + to get the DSI */ + err = M4VSS3GPP_intClipInit(&pClip, pC->pOsaFileReadPtr); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1( + "M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + M4VSS3GPP_intClipInit() returns 0x%x!", err); + if (M4OSA_NULL != pClip) { + M4VSS3GPP_intClipCleanUp(pClip); + } + return err; + } + err = M4VSS3GPP_intClipOpen( + pClip, &pC->pClipList[i], + M4OSA_TRUE, M4OSA_TRUE, + M4OSA_TRUE); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1( + "M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + M4VSS3GPP_intClipOpen() returns 0x%x!", err); + M4VSS3GPP_intClipCleanUp(pClip); + return err; + } + pStreamForDsi = &(pClip->pVideoStream->m_basicProperties); + /*got the DSI */ + + bGetDSiFromEncoder = M4OSA_TRUE; + + break; + } + } + if(bGetDSiFromEncoder == M4OSA_FALSE) { + /** + * Allocate and copy the new DSI */ + pC->ewc.pVideoOutputDsi = (M4OSA_MemAddr8)M4OSA_32bitAlignedMalloc( + encHeader->Size, M4VSS3GPP, (M4OSA_Char *)"ewc dsi (MPEG4)"); + if (M4OSA_NULL == pC->ewc.pVideoOutputDsi) { + M4OSA_TRACE1_0( + "M4VSS3GPP_intComputeOutputVideoAndAudioDsi(): \ + unable to allocate pVideoOutputDsi (MPEG4)"); + return M4ERR_ALLOC; + } + pC->ewc.uiVideoOutputDsiSize = (M4OSA_UInt16)encHeader->Size; + memcpy((void *)pC->ewc.pVideoOutputDsi, + (void *)encHeader->pBuf, pC->ewc.uiVideoOutputDsiSize); + + } + + err = M4VSS3GPP_intDestroyVideoEncoder(pC); + if(M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intComputeOutputVideoAndAudioDsi: \ + M4VSS3GPP_intDestroyVideoEncoder error 0x%x", err); + } } else { @@ -2687,24 +2920,27 @@ M4VSS3GPP_intComputeOutputVideoAndAudioDsi( M4VSS3GPP_InternalEditContext *pC, pStreamForDsi = &(pClip->pVideoStream->m_basicProperties); } - /** - * Allocate and copy the new DSI */ - pC->ewc.pVideoOutputDsi = (M4OSA_MemAddr8)M4OSA_32bitAlignedMalloc( - pStreamForDsi->m_decoderSpecificInfoSize, - M4VSS3GPP, (M4OSA_Char *)"pC->ewc.pVideoOutputDsi (MPEG4)"); + if(pC->ewc.pVideoOutputDsi == M4OSA_NULL) { - if( M4OSA_NULL == pC->ewc.pVideoOutputDsi ) - { - M4OSA_TRACE1_0( - "M4VSS3GPP_intComputeOutputVideoAndAudioDsi():\ - unable to allocate pVideoOutputDsi (MPEG4), returning M4ERR_ALLOC"); - return M4ERR_ALLOC; + /** + * Allocate and copy the new DSI */ + pC->ewc.pVideoOutputDsi = (M4OSA_MemAddr8)M4OSA_32bitAlignedMalloc( + pStreamForDsi->m_decoderSpecificInfoSize, + M4VSS3GPP, (M4OSA_Char *)"pC->ewc.pVideoOutputDsi (MPEG4)"); + + if( M4OSA_NULL == pC->ewc.pVideoOutputDsi ) + { + M4OSA_TRACE1_0( + "M4VSS3GPP_intComputeOutputVideoAndAudioDsi():\ + unable to allocate pVideoOutputDsi (MPEG4), returning M4ERR_ALLOC"); + return M4ERR_ALLOC; + } + pC->ewc.uiVideoOutputDsiSize = + (M4OSA_UInt16)pStreamForDsi->m_decoderSpecificInfoSize; + memcpy((void *)pC->ewc.pVideoOutputDsi, + (void *)pStreamForDsi->m_pDecoderSpecificInfo, + pC->ewc.uiVideoOutputDsiSize); } - pC->ewc.uiVideoOutputDsiSize = - (M4OSA_UInt16)pStreamForDsi->m_decoderSpecificInfoSize; - memcpy((void *)pC->ewc.pVideoOutputDsi, - (void *)pStreamForDsi->m_pDecoderSpecificInfo, - pC->ewc.uiVideoOutputDsiSize); /** * We rewrite the profile in the output DSI because it may not be the good one @@ -2914,6 +3150,22 @@ static M4OSA_ERR M4VSS3GPP_intSwitchToNextClip( if( M4OSA_NULL != pC->pC1 ) { + if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { + if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame[0].pac_data) { + free(pC->pC1->m_pPreResizeFrame[0].pac_data); + pC->pC1->m_pPreResizeFrame[0].pac_data = M4OSA_NULL; + } + if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame[1].pac_data) { + free(pC->pC1->m_pPreResizeFrame[1].pac_data); + pC->pC1->m_pPreResizeFrame[1].pac_data = M4OSA_NULL; + } + if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame[2].pac_data) { + free(pC->pC1->m_pPreResizeFrame[2].pac_data); + pC->pC1->m_pPreResizeFrame[2].pac_data = M4OSA_NULL; + } + free(pC->pC1->m_pPreResizeFrame); + pC->pC1->m_pPreResizeFrame = M4OSA_NULL; + } /** * Close the current first clip */ err = M4VSS3GPP_intClipCleanUp(pC->pC1); @@ -2949,6 +3201,9 @@ static M4OSA_ERR M4VSS3GPP_intSwitchToNextClip( if( M4OSA_NULL != pC->pC2 ) { pC->pC1 = pC->pC2; + if(M4OSA_NULL != pC->pC2->m_pPreResizeFrame) { + pC->pC1->m_pPreResizeFrame = pC->pC2->m_pPreResizeFrame; + } pC->pC2 = M4OSA_NULL; } /** @@ -3145,7 +3400,7 @@ M4OSA_ERR M4VSS3GPP_intOpenClip( M4VSS3GPP_InternalEditContext *pC, { M4OSA_ERR err; M4VSS3GPP_ClipContext *pClip; /**< shortcut */ - M4VIDEOEDITING_ClipProperties *pClipProperties; + M4VIDEOEDITING_ClipProperties *pClipProperties = M4OSA_NULL; M4OSA_Int32 iCts; M4OSA_UInt32 i; @@ -3171,20 +3426,25 @@ M4OSA_ERR M4VSS3GPP_intOpenClip( M4VSS3GPP_InternalEditContext *pC, * Set shortcut */ pClip = *hClip; - err = M4VSS3GPP_intClipOpen(pClip, pClipSettings, M4OSA_FALSE, M4OSA_FALSE, - M4OSA_FALSE); + if (pClipSettings->FileType == M4VIDEOEDITING_kFileType_ARGB8888 ) { + pClipProperties = &pClipSettings->ClipProperties; + pClip->pSettings = pClipSettings; + pClip->iEndTime = pClipSettings->uiEndCutTime; + } - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_intOpenClip: M4VSS3GPP_intClipOpen() returns 0x%x!", - err); + err = M4VSS3GPP_intClipOpen(pClip, pClipSettings, + M4OSA_FALSE, M4OSA_FALSE, M4OSA_FALSE); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intOpenClip: \ + M4VSS3GPP_intClipOpen() returns 0x%x!", err); M4VSS3GPP_intClipCleanUp(pClip); *hClip = M4OSA_NULL; return err; } - pClipProperties = &pClip->pSettings->ClipProperties; + if (pClipSettings->FileType != M4VIDEOEDITING_kFileType_ARGB8888 ) { + pClipProperties = &pClip->pSettings->ClipProperties; + } /** * Copy common 'silence frame stuff' to ClipContext */ @@ -3213,8 +3473,7 @@ M4OSA_ERR M4VSS3GPP_intOpenClip( M4VSS3GPP_InternalEditContext *pC, pClip->iActualVideoBeginCut = 0; pClip->iActualAudioBeginCut = 0; } - else - { + else if(pClipSettings->FileType != M4VIDEOEDITING_kFileType_ARGB8888) { if( M4SYS_kVideoUnknown != pC->ewc.VideoStreamType ) { /** @@ -3274,25 +3533,128 @@ M4OSA_ERR M4VSS3GPP_intOpenClip( M4VSS3GPP_InternalEditContext *pC, if( M4SYS_kVideoUnknown != pC->ewc.VideoStreamType ) { - /** - * Read the first Video AU of the clip */ - err = pClip->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( - pClip->pReaderContext, - (M4_StreamHandler *)pClip->pVideoStream, &pClip->VideoAU); + if ((pClipSettings->FileType != M4VIDEOEDITING_kFileType_ARGB8888 )) { - if( M4WAR_NO_MORE_AU == err ) - { /** - * If we (already!) reach the end of the clip, we filter the error. - * It will be correctly managed at the first step. */ - err = M4NO_ERROR; + * Read the first Video AU of the clip */ + err = pClip->ShellAPI.m_pReaderDataIt->m_pFctGetNextAu( + pClip->pReaderContext, + (M4_StreamHandler *)pClip->pVideoStream, &pClip->VideoAU); + + if( M4WAR_NO_MORE_AU == err ) + { + /** + * If we (already!) reach the end of the clip, we filter the error. + * It will be correctly managed at the first step. */ + err = M4NO_ERROR; + } + else if( M4NO_ERROR != err ) + { + M4OSA_TRACE1_1("M4VSS3GPP_intOpenClip: \ + m_pReaderDataIt->m_pFctGetNextAu() returns 0x%x!", err); + return err; + } + } else { + pClipProperties->uiVideoWidth = pClipProperties->uiStillPicWidth; + pClipProperties->uiVideoHeight = pClipProperties->uiStillPicHeight; } - else if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_intOpenClip: m_pReaderDataIt->m_pFctGetNextAu() returns 0x%x!", - err); - return err; + /* state check not to allocate buffer during save start */ + + + /******************************/ + /* Video resize management */ + /******************************/ + /** + * Compare input video size with output video size + to check if resize needed */ + if (((M4OSA_UInt32)pC->ewc.uiVideoWidth != + pClipProperties->uiVideoWidth) || + ((M4OSA_UInt32)pC->ewc.uiVideoHeight != + pClipProperties->uiVideoHeight)) { + if(pClip->m_pPreResizeFrame == M4OSA_NULL) { + /** + * Allocate the intermediate video plane that will + receive the decoded image before resizing */ + pClip->m_pPreResizeFrame = + (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( + 3*sizeof(M4VIFI_ImagePlane), M4VSS3GPP, + (M4OSA_Char *)"pPreResizeFrame"); + if (M4OSA_NULL == pClip->m_pPreResizeFrame) { + M4OSA_TRACE1_0("M4MCS_intPrepareVideoEncoder(): \ + unable to allocate m_pPreResizeFrame"); + return M4ERR_ALLOC; + } + + pClip->m_pPreResizeFrame[0].pac_data = M4OSA_NULL; + pClip->m_pPreResizeFrame[1].pac_data = M4OSA_NULL; + pClip->m_pPreResizeFrame[2].pac_data = M4OSA_NULL; + + /** + * Allocate the Y plane */ + pClip->m_pPreResizeFrame[0].u_topleft = 0; + pClip->m_pPreResizeFrame[0].u_width = + pClipProperties->uiVideoWidth; + pClip->m_pPreResizeFrame[0].u_height = + pClipProperties->uiVideoHeight; + pClip->m_pPreResizeFrame[0].u_stride = + pClip->m_pPreResizeFrame[0].u_width; + + pClip->m_pPreResizeFrame[0].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc ( + pClip->m_pPreResizeFrame[0].u_stride * pClip->m_pPreResizeFrame[0].u_height, + M4MCS, (M4OSA_Char *)"m_pPreResizeFrame[0].pac_data"); + if (M4OSA_NULL == pClip->m_pPreResizeFrame[0].pac_data) { + M4OSA_TRACE1_0("M4MCS_intPrepareVideoEncoder(): \ + unable to allocate m_pPreResizeFrame[0].pac_data"); + free(pClip->m_pPreResizeFrame); + return M4ERR_ALLOC; + } + + /** + * Allocate the U plane */ + pClip->m_pPreResizeFrame[1].u_topleft = 0; + pClip->m_pPreResizeFrame[1].u_width = + pClip->m_pPreResizeFrame[0].u_width >> 1; + pClip->m_pPreResizeFrame[1].u_height = + pClip->m_pPreResizeFrame[0].u_height >> 1; + pClip->m_pPreResizeFrame[1].u_stride = + pClip->m_pPreResizeFrame[1].u_width; + + pClip->m_pPreResizeFrame[1].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc ( + pClip->m_pPreResizeFrame[1].u_stride * pClip->m_pPreResizeFrame[1].u_height, + M4MCS, (M4OSA_Char *)"m_pPreResizeFrame[1].pac_data"); + if (M4OSA_NULL == pClip->m_pPreResizeFrame[1].pac_data) { + M4OSA_TRACE1_0("M4MCS_intPrepareVideoEncoder(): \ + unable to allocate m_pPreResizeFrame[1].pac_data"); + free(pClip->m_pPreResizeFrame[0].pac_data); + free(pClip->m_pPreResizeFrame); + return M4ERR_ALLOC; + } + + /** + * Allocate the V plane */ + pClip->m_pPreResizeFrame[2].u_topleft = 0; + pClip->m_pPreResizeFrame[2].u_width = + pClip->m_pPreResizeFrame[1].u_width; + pClip->m_pPreResizeFrame[2].u_height = + pClip->m_pPreResizeFrame[1].u_height; + pClip->m_pPreResizeFrame[2].u_stride = + pClip->m_pPreResizeFrame[2].u_width; + + pClip->m_pPreResizeFrame[2].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc ( + pClip->m_pPreResizeFrame[2].u_stride * pClip->m_pPreResizeFrame[2].u_height, + M4MCS, (M4OSA_Char *)"m_pPreResizeFrame[2].pac_data"); + if (M4OSA_NULL == pClip->m_pPreResizeFrame[2].pac_data) { + M4OSA_TRACE1_0("M4MCS_intPrepareVideoEncoder(): \ + unable to allocate m_pPreResizeFrame[2].pac_data"); + free(pClip->m_pPreResizeFrame[0].pac_data); + free(pClip->m_pPreResizeFrame[1].pac_data); + free(pClip->m_pPreResizeFrame); + return M4ERR_ALLOC; + } + } } /** diff --git a/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c b/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c index f73487f..e6271ea 100755 --- a/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c +++ b/libvideoeditor/vss/src/M4VSS3GPP_EditVideo.c @@ -47,6 +47,13 @@ /*for transition behaviour*/ #include <math.h> +#include "M4AIR_API.h" +#include "M4VSS3GPP_Extended_API.h" +/** Determine absolute value of a. */ +#define M4xVSS_ABS(a) ( ( (a) < (0) ) ? (-(a)) : (a) ) +#define Y_PLANE_BORDER_VALUE 0x00 +#define U_PLANE_BORDER_VALUE 0x80 +#define V_PLANE_BORDER_VALUE 0x80 /************************************************************************/ /* Static local functions */ @@ -75,6 +82,26 @@ static M4OSA_Void M4VSS3GPP_intGetMPEG4Gov( M4OSA_MemAddr8 pAuDataBuffer, M4OSA_UInt32 *pCtsSec ); static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, M4OSA_UInt32 uiWidth, M4OSA_UInt32 uiHeight ); +static M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420( + M4OSA_Void* pFileIn, M4OSA_FileReadPointer* pFileReadPtr, + M4VIFI_ImagePlane* pImagePlanes, + M4OSA_UInt32 width,M4OSA_UInt32 height); +static M4OSA_ERR M4VSS3GPP_intApplyRenderingMode( + M4VSS3GPP_InternalEditContext *pC, + M4xVSS_MediaRendering renderingMode, + M4VIFI_ImagePlane* pInplane, + M4VIFI_ImagePlane* pOutplane); + +static M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( + M4VSS3GPP_InternalEditContext *pC, + M4VSS3GPP_ClipContext* pClipCtxt); +static M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect( + M4VSS3GPP_InternalEditContext *pC, + M4VSS3GPP_ClipContext* pClipCtxt, + M4OSA_Bool bIsClip1, + M4VIFI_ImagePlane *pResizePlane, + M4VIFI_ImagePlane *pPlaneNoResize, + M4VIFI_ImagePlane *pPlaneOut); /** ****************************************************************************** @@ -381,6 +408,19 @@ M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC ) M4OSA_TRACE3_0( "M4VSS3GPP_intEditStepVideo DECODE_ENCODE / BEGIN_CUT"); + if ((pC->pC1->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) && + (M4OSA_FALSE == + pC->pC1->pSettings->ClipProperties.bSetImageData)) { + + err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); + if( M4NO_ERROR != err ) { + M4OSA_TRACE1_1( + "M4VSS3GPP_intEditStepVideo: DECODE_ENCODE:\ + M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); + return err; + } + } /** * Decode the video up to the target time (will jump to the previous RAP if needed ) */ @@ -496,6 +536,19 @@ M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC ) /** * Decode the clip1 video up to the target time (will jump to the previous RAP if needed */ + if ((pC->pC1->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) && + (M4OSA_FALSE == + pC->pC1->pSettings->ClipProperties.bSetImageData)) { + + err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC1); + if( M4NO_ERROR != err ) { + M4OSA_TRACE1_1( + "M4VSS3GPP_intEditStepVideo: TRANSITION:\ + M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); + return err; + } + } // Decorrelate input and output encoding timestamp to handle encoder prefetch err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC1, (M4OSA_Int32)pC->ewc.dInputVidCts); @@ -522,6 +575,20 @@ M4OSA_ERR M4VSS3GPP_intEditStepVideo( M4VSS3GPP_InternalEditContext *pC ) /** * Decode the clip2 video up to the target time (will jump to the previous RAP if needed) */ + if ((pC->pC2->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) && + (M4OSA_FALSE == + pC->pC2->pSettings->ClipProperties.bSetImageData)) { + + err = M4VSS3GPP_intSetYuv420PlaneFromARGB888(pC, pC->pC2); + if( M4NO_ERROR != err ) { + M4OSA_TRACE1_1( + "M4VSS3GPP_intEditStepVideo: TRANSITION:\ + M4VSS3GPP_intSetYuv420PlaneFromARGB888 err=%x", err); + return err; + } + } + // Decorrelate input and output encoding timestamp to handle encoder prefetch err = M4VSS3GPP_intClipDecodeVideoUpToCts(pC->pC2, (M4OSA_Int32)pC->ewc.dInputVidCts); @@ -670,6 +737,8 @@ static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( * Open second clip for transition, if not yet opened */ if( M4OSA_NULL == pC->pC2 ) { + pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; + err = M4VSS3GPP_intOpenClip(pC, &pC->pC2, &pC->pClipList[pC->uiCurrentClip + 1]); @@ -720,8 +789,10 @@ static M4OSA_ERR M4VSS3GPP_intCheckVideoMode( pC->bTransitionEffect = M4OSA_FALSE; /* If there is an effect we go to decode/encode mode */ - if ((pC->nbActiveEffects > 0) ||(pC->nbActiveEffects1 > 0)) - { + if((pC->nbActiveEffects > 0) || (pC->nbActiveEffects1 > 0) || + (pC->pC1->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) || + (pC->pC1->pSettings->bTranscodingRequired == M4OSA_TRUE)) { pC->Vstate = M4VSS3GPP_kEditVideoState_DECODE_ENCODE; } /* We do a begin cut, except if already done (time is not progressing because we want @@ -976,16 +1047,25 @@ M4OSA_ERR M4VSS3GPP_intProcessAU( M4WRITER_Context pContext, M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut ) { - M4OSA_ERR err; - M4_MediaTime t; + M4OSA_ERR err = M4NO_ERROR; + M4_MediaTime ts; M4VIFI_ImagePlane *pTmp = M4OSA_NULL; + M4VIFI_ImagePlane *pLastDecodedFrame = M4OSA_NULL ; + M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; M4VIFI_ImagePlane pTemp1[3],pTemp2[3]; - M4OSA_UInt32 i =0; + M4VIFI_ImagePlane pTempPlaneClip1[3],pTempPlaneClip2[3]; + M4OSA_UInt32 i = 0; + /** * VPP context is actually the VSS3GPP context */ M4VSS3GPP_InternalEditContext *pC = (M4VSS3GPP_InternalEditContext *)pContext; - pTemp1[0].pac_data = pTemp2[0].pac_data = M4OSA_NULL; + + memset((void *)pTemp1, 0, 3*sizeof(M4VIFI_ImagePlane)); + memset((void *)pTemp2, 0, 3*sizeof(M4VIFI_ImagePlane)); + memset((void *)pTempPlaneClip1, 0, 3*sizeof(M4VIFI_ImagePlane)); + memset((void *)pTempPlaneClip2, 0, 3*sizeof(M4VIFI_ImagePlane)); + /** * Reset VPP error remembered in context */ pC->ewc.VppError = M4NO_ERROR; @@ -1098,134 +1178,84 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn, } /** - * Compute the time in the clip1 base: t = to - Offset */ + * Compute the time in the clip1 base: ts = to - Offset */ // Decorrelate input and output encoding timestamp to handle encoder prefetch - t = pC->ewc.dInputVidCts - pC->pC1->iVoffset; + ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; /** * Render Clip1 */ if( pC->pC1->isRenderDup == M4OSA_FALSE ) { - if(pC->nbActiveEffects > 0) - { - err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt, - &t, pTemp1, - M4OSA_TRUE); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \ - returning M4NO_ERROR", err); - pC->ewc.VppError = err; - return M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ - } - pC->bIssecondClip = M4OSA_FALSE; - err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp1 ,pC->yuv1 ); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \ - returning M4NO_ERROR", err); - pC->ewc.VppError = err; - return M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ - } - pC->pC1->lastDecodedPlane = pTemp1; - } - else - { - err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC1->pViDecCtxt, - &t, pC->yuv1, - M4OSA_TRUE); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C1) returns 0x%x, \ - returning M4NO_ERROR", err); - pC->ewc.VppError = err; - return M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ - } - pC->pC1->lastDecodedPlane = pC->yuv1; + pC->bIssecondClip = M4OSA_FALSE; + + err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC1, M4OSA_TRUE, + pTempPlaneClip1, pTemp1, + pPlaneOut); + if ((M4NO_ERROR != err) && + (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { + M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ + M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); + pC->ewc.VppError = err; + /** Return no error to the encoder core + * else it may leak in some situations.*/ + return M4NO_ERROR; } - pC->pC1->iVideoRenderCts = (M4OSA_Int32)t; } - else - { - /* Copy last decoded plane to output plane */ - memcpy((void *)pTmp[0].pac_data, - (void *)pC->pC1->lastDecodedPlane[0].pac_data, - (pTmp[0].u_height * pTmp[0].u_width)); - memcpy((void *)pTmp[1].pac_data, - (void *)pC->pC1->lastDecodedPlane[1].pac_data, - (pTmp[1].u_height * pTmp[1].u_width)); - memcpy((void *)pTmp[2].pac_data, - (void *)pC->pC1->lastDecodedPlane[2].pac_data, - (pTmp[2].u_height * pTmp[2].u_width)); + if ((pC->pC1->isRenderDup == M4OSA_TRUE) || + (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { + pTmp = pC->yuv1; + if (pC->pC1->lastDecodedPlane != M4NO_ERROR) { + /* Copy last decoded plane to output plane */ + memcpy((void *)pTmp[0].pac_data, + (void *)pC->pC1->lastDecodedPlane[0].pac_data, + (pTmp[0].u_height * pTmp[0].u_width)); + memcpy((void *)pTmp[1].pac_data, + (void *)pC->pC1->lastDecodedPlane[1].pac_data, + (pTmp[1].u_height * pTmp[1].u_width)); + memcpy((void *)pTmp[2].pac_data, + (void *)pC->pC1->lastDecodedPlane[2].pac_data, + (pTmp[2].u_height * pTmp[2].u_width)); + } pC->pC1->lastDecodedPlane = pTmp; } /** - * Compute the time in the clip2 base: t = to - Offset */ + * Compute the time in the clip2 base: ts = to - Offset */ // Decorrelate input and output encoding timestamp to handle encoder prefetch - t = pC->ewc.dInputVidCts - pC->pC2->iVoffset; + ts = pC->ewc.dInputVidCts - pC->pC2->iVoffset; /** * Render Clip2 */ if( pC->pC2->isRenderDup == M4OSA_FALSE ) { - if(pC->nbActiveEffects1 > 0) - { - err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt, - &t, pTemp2, - M4OSA_TRUE); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \ - returning M4NO_ERROR", err); - pC->ewc.VppError = err; - return M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ - } - pC->bIssecondClip = M4OSA_TRUE; - err = M4VSS3GPP_intApplyVideoEffect(pC, pTemp2 ,pC->yuv2); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x, \ - returning M4NO_ERROR", err); - pC->ewc.VppError = err; - return M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ - } - pC->pC2->lastDecodedPlane = pTemp2; - } - else - { - err = pC->pC2->ShellAPI.m_pVideoDecoder->m_pFctRender(pC->pC2->pViDecCtxt, - &t, pC->yuv2, - M4OSA_TRUE); - if (M4NO_ERROR != err) - { - M4OSA_TRACE1_1("M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender(C2) returns 0x%x, \ - returning M4NO_ERROR", err); - pC->ewc.VppError = err; - return M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ - } - pC->pC2->lastDecodedPlane = pC->yuv2; + err = M4VSS3GPP_intRenderFrameWithEffect(pC, pC->pC2, M4OSA_FALSE, + pTempPlaneClip2, pTemp2, + pPlaneOut); + if ((M4NO_ERROR != err) && + (M4WAR_VIDEORENDERER_NO_NEW_FRAME != err)) { + M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ + M4VSS3GPP_intRenderFrameWithEffect returns 0x%x", err); + pC->ewc.VppError = err; + /** Return no error to the encoder core + * else it may leak in some situations.*/ + return M4NO_ERROR; } - pC->pC2->iVideoRenderCts = (M4OSA_Int32)t; } - else - { - /* Copy last decoded plane to output plane */ - memcpy((void *)pTmp[0].pac_data, - (void *)pC->pC2->lastDecodedPlane[0].pac_data, - (pTmp[0].u_height * pTmp[0].u_width)); - memcpy((void *)pTmp[1].pac_data, - (void *)pC->pC2->lastDecodedPlane[1].pac_data, - (pTmp[1].u_height * pTmp[1].u_width)); - memcpy((void *)pTmp[2].pac_data, - (void *)pC->pC2->lastDecodedPlane[2].pac_data, - (pTmp[2].u_height * pTmp[2].u_width)); + if ((pC->pC2->isRenderDup == M4OSA_TRUE) || + (M4WAR_VIDEORENDERER_NO_NEW_FRAME == err)) { + pTmp = pC->yuv2; + if (pC->pC2->lastDecodedPlane != M4NO_ERROR) { + /* Copy last decoded plane to output plane */ + memcpy((void *)pTmp[0].pac_data, + (void *)pC->pC2->lastDecodedPlane[0].pac_data, + (pTmp[0].u_height * pTmp[0].u_width)); + memcpy((void *)pTmp[1].pac_data, + (void *)pC->pC2->lastDecodedPlane[1].pac_data, + (pTmp[1].u_height * pTmp[1].u_width)); + memcpy((void *)pTmp[2].pac_data, + (void *)pC->pC2->lastDecodedPlane[2].pac_data, + (pTmp[2].u_height * pTmp[2].u_width)); + } pC->pC2->lastDecodedPlane = pTmp; } @@ -1245,117 +1275,278 @@ M4OSA_ERR M4VSS3GPP_intVPP( M4VPP_Context pContext, M4VIFI_ImagePlane *pPlaneIn, } for (i=0; i < 3; i++) { - if (pTemp2[i].pac_data != M4OSA_NULL) - { + if(pTempPlaneClip2[i].pac_data != M4OSA_NULL) { + free(pTempPlaneClip2[i].pac_data); + pTempPlaneClip2[i].pac_data = M4OSA_NULL; + } + + if(pTempPlaneClip1[i].pac_data != M4OSA_NULL) { + free(pTempPlaneClip1[i].pac_data); + pTempPlaneClip1[i].pac_data = M4OSA_NULL; + } + + if (pTemp2[i].pac_data != M4OSA_NULL) { free(pTemp2[i].pac_data); pTemp2[i].pac_data = M4OSA_NULL; } - - if (pTemp1[i].pac_data != M4OSA_NULL) - { - free(pTemp1[i].pac_data); - pTemp1[i].pac_data = M4OSA_NULL; - } + if (pTemp1[i].pac_data != M4OSA_NULL) { + free(pTemp1[i].pac_data); + pTemp1[i].pac_data = M4OSA_NULL; } + } } /** **************** No Transition case ****************/ else { + M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO transition case"); /** - * Check if there is a filter */ - if( pC->nbActiveEffects > 0 ) - { + * Compute the time in the clip base: ts = to - Offset */ + ts = pC->ewc.dInputVidCts - pC->pC1->iVoffset; + /** + * Render */ + if (pC->pC1->isRenderDup == M4OSA_FALSE) { + M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup false"); /** - * If we do modify the image, we need an intermediate image plane */ - if( M4OSA_NULL == pC->yuv1[0].pac_data ) + * Check if resizing is needed */ + if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { + if ((pC->pC1->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) && + (pC->nbActiveEffects == 0) && + (pC->pC1->bGetYuvDataFromDecoder == M4OSA_FALSE)) { + err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pC->pC1->pViDecCtxt, + M4DECODER_kOptionID_EnableYuvWithEffect, + (M4OSA_DataOption)M4OSA_TRUE); + if (M4NO_ERROR == err ) { + err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( + pC->pC1->pViDecCtxt, &ts, + pPlaneOut, M4OSA_TRUE); + } + } else { + if (pC->pC1->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) { + err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pC->pC1->pViDecCtxt, + M4DECODER_kOptionID_EnableYuvWithEffect, + (M4OSA_DataOption)M4OSA_FALSE); + } + if (M4NO_ERROR == err) { + err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( + pC->pC1->pViDecCtxt, &ts, + pC->pC1->m_pPreResizeFrame, M4OSA_TRUE); + } + } + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ + m_pFctRender() returns error 0x%x", err); + pC->ewc.VppError = err; + return M4NO_ERROR; + } + + if (pC->nbActiveEffects > 0) { + pC->pC1->bGetYuvDataFromDecoder = M4OSA_TRUE; + /** + * If we do modify the image, we need an intermediate + * image plane */ + if (M4OSA_NULL == pTemp1[0].pac_data) { + err = M4VSS3GPP_intAllocateYUV420(pTemp1, + pC->pC1->m_pPreResizeFrame[0].u_width, + pC->pC1->m_pPreResizeFrame[0].u_height); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ + M4VSS3GPP_intAllocateYUV420 error 0x%x", err); + pC->ewc.VppError = err; + return M4NO_ERROR; + } + } + err = M4VSS3GPP_intApplyVideoEffect(pC, + pC->pC1->m_pPreResizeFrame,pTemp1); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ + M4VSS3GPP_intApplyVideoEffect() error 0x%x", err); + pC->ewc.VppError = err; + return M4NO_ERROR; + } + pDecoderRenderFrame= pTemp1; + + } else { + pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; + } + + pTmp = pPlaneOut; + if ((pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE) || + (pC->pC1->pSettings->FileType != + M4VIDEOEDITING_kFileType_ARGB8888)) { + + err = M4VSS3GPP_intApplyRenderingMode(pC, + pC->pC1->pSettings->xVSS.MediaRendering, + pDecoderRenderFrame, pTmp); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intVPP: \ + M4VSS3GPP_intApplyRenderingMode) error 0x%x ", err); + pC->ewc.VppError = err; + return M4NO_ERROR; + } + } + + if ((pC->pC1->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) && + (pC->nbActiveEffects == 0) && + (pC->pC1->bGetYuvDataFromDecoder == M4OSA_TRUE)) { + + err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pC->pC1->pViDecCtxt, + M4DECODER_kOptionID_YuvWithEffectNonContiguous, + (M4OSA_DataOption)pTmp); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } + pC->pC1->bGetYuvDataFromDecoder = M4OSA_FALSE; + } + } + else { - err = - M4VSS3GPP_intAllocateYUV420(pC->yuv1, pC->ewc.uiVideoWidth, - pC->ewc.uiVideoHeight); + M4OSA_TRACE3_0("M4VSS3GPP_intVPP: NO resize required"); + if (pC->nbActiveEffects > 0) { + /** If we do modify the image, we need an + * intermediate image plane */ + if (M4OSA_NULL == pTemp1[0].pac_data) { + err = M4VSS3GPP_intAllocateYUV420(pTemp1, + pC->ewc.uiVideoWidth, + pC->ewc.uiVideoHeight); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } + } + pDecoderRenderFrame = pTemp1; + } + else { + pDecoderRenderFrame = pPlaneOut; + } - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_intVPP: M4VSS3GPP_intAllocateYUV420 returns 0x%x,\ - returning M4NO_ERROR", - err); + pTmp = pPlaneOut; + err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( + pC->pC1->pViDecCtxt, &ts, + pDecoderRenderFrame, M4OSA_TRUE); + if (M4NO_ERROR != err) { pC->ewc.VppError = err; - return - M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ + return M4NO_ERROR; + } + + if (pC->nbActiveEffects > 0) { + err = M4VSS3GPP_intApplyVideoEffect(pC, + pDecoderRenderFrame,pPlaneOut); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } } } - /** - * The image is rendered in the intermediate image plane */ - pTmp = pC->yuv1; - } - else - { - /** - * No filter, the image is directly rendered in pPlaneOut */ - pTmp = pPlaneOut; - } + pC->pC1->lastDecodedPlane = pTmp; + pC->pC1->iVideoRenderCts = (M4OSA_Int32)ts; - /** - * Compute the time in the clip base: t = to - Offset */ - // Decorrelate input and output encoding timestamp to handle encoder prefetch - t = pC->ewc.dInputVidCts - pC->pC1->iVoffset; + } else { + M4OSA_TRACE3_0("M4VSS3GPP_intVPP: renderdup true"); - if( pC->pC1->isRenderDup == M4OSA_FALSE ) - { - err = pC->pC1->ShellAPI.m_pVideoDecoder->m_pFctRender( - pC->pC1->pViDecCtxt, &t, pTmp, M4OSA_TRUE); + if (M4OSA_NULL != pC->pC1->m_pPreResizeFrame) { + /** + * Copy last decoded plane to output plane */ + memcpy((void *)pC->pC1->m_pPreResizeFrame[0].pac_data, + (void *)pC->pC1->lastDecodedPlane[0].pac_data, + (pC->pC1->m_pPreResizeFrame[0].u_height * pC->pC1->m_pPreResizeFrame[0].u_width)); - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_intVPP: m_pVideoDecoder->m_pFctRender returns 0x%x,\ - returning M4NO_ERROR", - err); - pC->ewc.VppError = err; - return - M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ + memcpy((void *)pC->pC1->m_pPreResizeFrame[1].pac_data, + (void *)pC->pC1->lastDecodedPlane[1].pac_data, + (pC->pC1->m_pPreResizeFrame[1].u_height * pC->pC1->m_pPreResizeFrame[1].u_width)); + + memcpy((void *)pC->pC1->m_pPreResizeFrame[2].pac_data, + (void *)pC->pC1->lastDecodedPlane[2].pac_data, + (pC->pC1->m_pPreResizeFrame[2].u_height * pC->pC1->m_pPreResizeFrame[2].u_width)); + + if(pC->nbActiveEffects > 0) { + /** + * If we do modify the image, we need an + * intermediate image plane */ + if (M4OSA_NULL == pTemp1[0].pac_data) { + err = M4VSS3GPP_intAllocateYUV420(pTemp1, + pC->pC1->m_pPreResizeFrame[0].u_width, + pC->pC1->m_pPreResizeFrame[0].u_height); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } + } + + err = M4VSS3GPP_intApplyVideoEffect(pC, + pC->pC1->m_pPreResizeFrame,pTemp1); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } + pDecoderRenderFrame= pTemp1; + + } else { + pDecoderRenderFrame = pC->pC1->m_pPreResizeFrame; + } + + pTmp = pPlaneOut; + err = M4VSS3GPP_intApplyRenderingMode(pC, + pC->pC1->pSettings->xVSS.MediaRendering, + pDecoderRenderFrame, pTmp); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } + } else { + + if (M4OSA_NULL == pTemp1[0].pac_data) { + err = M4VSS3GPP_intAllocateYUV420(pTemp1, + pC->ewc.uiVideoWidth, + pC->ewc.uiVideoHeight); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } + } + /** + * Copy last decoded plane to output plane */ + memcpy((void *)pLastDecodedFrame[0].pac_data, + (void *)pC->pC1->lastDecodedPlane[0].pac_data, + (pLastDecodedFrame[0].u_height * pLastDecodedFrame[0].u_width)); + + memcpy((void *)pLastDecodedFrame[1].pac_data, + (void *)pC->pC1->lastDecodedPlane[1].pac_data, + (pLastDecodedFrame[1].u_height * pLastDecodedFrame[1].u_width)); + + memcpy((void *)pLastDecodedFrame[2].pac_data, + (void *)pC->pC1->lastDecodedPlane[2].pac_data, + (pLastDecodedFrame[2].u_height * pLastDecodedFrame[2].u_width)); + + pTmp = pPlaneOut; + /** + * Check if there is a filter */ + if(pC->nbActiveEffects > 0) { + err = M4VSS3GPP_intApplyVideoEffect(pC, + pLastDecodedFrame, pTmp); + if (M4NO_ERROR != err) { + pC->ewc.VppError = err; + return M4NO_ERROR; + } + } } pC->pC1->lastDecodedPlane = pTmp; - pC->pC1->iVideoRenderCts = (M4OSA_Int32)t; - } - else - { - /* Copy last decoded plane to output plane */ - memcpy((void *)pTmp[0].pac_data, - (void *)pC->pC1->lastDecodedPlane[0].pac_data, - (pTmp[0].u_height * pTmp[0].u_width)); - memcpy((void *)pTmp[1].pac_data, - (void *)pC->pC1->lastDecodedPlane[1].pac_data, - (pTmp[1].u_height * pTmp[1].u_width)); - memcpy((void *)pTmp[2].pac_data, - (void *)pC->pC1->lastDecodedPlane[2].pac_data, - (pTmp[2].u_height * pTmp[2].u_width)); - pC->pC1->lastDecodedPlane = pTmp; } - M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", t); - - /** - * Apply the clip1 effect */ - // if (pC->iClip1ActiveEffect >= 0) - if( pC->nbActiveEffects > 0 ) - { - err = M4VSS3GPP_intApplyVideoEffect(pC,/*1,*/ pC->yuv1, pPlaneOut); + M4OSA_TRACE3_1("M4VSS3GPP_intVPP: Rendered at CTS %.3f", ts); - if( M4NO_ERROR != err ) - { - M4OSA_TRACE1_1( - "M4VSS3GPP_intVPP: M4VSS3GPP_intApplyVideoEffect(1) returns 0x%x,\ - returning M4NO_ERROR", - err); - pC->ewc.VppError = err; - return - M4NO_ERROR; /**< Return no error to the encoder core - (else it may leak in some situations...) */ + for(i=0;i<3;i++) { + if(pTemp1[i].pac_data != M4OSA_NULL) { + free(pTemp1[i].pac_data); + pTemp1[i].pac_data = M4OSA_NULL; } } } @@ -1394,6 +1585,7 @@ M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC, M4VIFI_ImagePlane *pPlaneTempIn; M4VIFI_ImagePlane *pPlaneTempOut; + M4VIFI_ImagePlane pTempYuvPlane[3]; M4OSA_UInt8 i; M4OSA_UInt8 NumActiveEffects =0; @@ -1408,12 +1600,14 @@ M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC, NumActiveEffects = pC->nbActiveEffects; } + memset((void *)pTempYuvPlane, 0, 3*sizeof(M4VIFI_ImagePlane)); + /** * Allocate temporary plane if needed RC */ - if (M4OSA_NULL == pC->yuv4[0].pac_data && NumActiveEffects > 1) + if (M4OSA_NULL == pTempYuvPlane[0].pac_data && NumActiveEffects > 1) { - err = M4VSS3GPP_intAllocateYUV420(pC->yuv4, pC->ewc.uiVideoWidth, - pC->ewc.uiVideoHeight); + err = M4VSS3GPP_intAllocateYUV420(pTempYuvPlane, pPlaneOut->u_width, + pPlaneOut->u_height); if( M4NO_ERROR != err ) { @@ -1431,7 +1625,7 @@ M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC, if (NumActiveEffects % 2 == 0) { pPlaneTempIn = pPlaneIn; - pPlaneTempOut = pC->yuv4; + pPlaneTempOut = pTempYuvPlane; } else { @@ -1574,13 +1768,20 @@ M4VSS3GPP_intApplyVideoEffect( M4VSS3GPP_InternalEditContext *pC, if (((i % 2 == 0) && (NumActiveEffects % 2 == 0)) || ((i % 2 != 0) && (NumActiveEffects % 2 != 0))) { - pPlaneTempIn = pC->yuv4; + pPlaneTempIn = pTempYuvPlane; pPlaneTempOut = pPlaneOut; } else { pPlaneTempIn = pPlaneOut; - pPlaneTempOut = pC->yuv4; + pPlaneTempOut = pTempYuvPlane; + } + } + + for(i=0; i<3; i++) { + if(pTempYuvPlane[i].pac_data != M4OSA_NULL) { + free(pTempYuvPlane[i].pac_data); + pTempYuvPlane[i].pac_data = M4OSA_NULL; } } @@ -1908,7 +2109,11 @@ M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, M4OSA_Int32 t = (M4OSA_Int32)pC->ewc.dInputVidCts; uiClipIndex = pC->uiCurrentClip; - pClip = pC->pC1; + if (uiClipNumber == 1) { + pClip = pC->pC1; + } else { + pClip = pC->pC2; + } /** * Shortcuts for code readability */ Off = pClip->iVoffset; @@ -1948,13 +2153,8 @@ M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, { pC->m_bClipExternalHasStarted = M4OSA_TRUE; } - - /** - * The third effect has the highest priority, then the - * second one, then the first one. Hence, as soon as we - * found an active effect, we can get out of this loop. - */ } + } else { @@ -1983,9 +2183,14 @@ M4VSS3GPP_intCheckVideoEffects( M4VSS3GPP_InternalEditContext *pC, * Hence, as soon as we found an active effect, we can get out of this loop */ } } + if (M4VIDEOEDITING_kH264 != + pC->pC1->pSettings->ClipProperties.VideoStreamType) { + + // For Mpeg4 and H263 clips, full decode encode not required + pC->m_bClipExternalHasStarted = M4OSA_FALSE; + } } } - if(1==uiClipNumber) { /** @@ -2160,15 +2365,8 @@ M4OSA_ERR M4VSS3GPP_intCreateVideoEncoder( M4VSS3GPP_InternalEditContext *pC ) if( pC->bIsMMS == M4OSA_FALSE ) { - /* Compute max bitrate depending on input files bitrates and transitions */ - if( pC->Vstate == M4VSS3GPP_kEditVideoState_TRANSITION ) - { - EncParams.Bitrate = pC->ewc.uiVideoBitrate; - } - else - { - EncParams.Bitrate = pC->ewc.uiVideoBitrate; - } + EncParams.Bitrate = pC->xVSS.outputVideoBitrate; + } else { @@ -2497,6 +2695,8 @@ static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, M4OSA_TRACE1_0( "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[1].pac_data,\ returning M4ERR_ALLOC"); + free((void *)pPlanes[0].pac_data); + pPlanes[0].pac_data = M4OSA_NULL; return M4ERR_ALLOC; } @@ -2512,11 +2712,774 @@ static M4OSA_ERR M4VSS3GPP_intAllocateYUV420( M4VIFI_ImagePlane *pPlanes, M4OSA_TRACE1_0( "M4VSS3GPP_intAllocateYUV420: unable to allocate pPlanes[2].pac_data,\ returning M4ERR_ALLOC"); + free((void *)pPlanes[0].pac_data); + free((void *)pPlanes[1].pac_data); + pPlanes[0].pac_data = M4OSA_NULL; + pPlanes[1].pac_data = M4OSA_NULL; return M4ERR_ALLOC; } + memset((void *)pPlanes[0].pac_data, 0, pPlanes[0].u_stride*pPlanes[0].u_height); + memset((void *)pPlanes[1].pac_data, 0, pPlanes[1].u_stride*pPlanes[1].u_height); + memset((void *)pPlanes[2].pac_data, 0, pPlanes[2].u_stride*pPlanes[2].u_height); /** * Return */ M4OSA_TRACE3_0("M4VSS3GPP_intAllocateYUV420: returning M4NO_ERROR"); return M4NO_ERROR; } + +/** +****************************************************************************** +* M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, +* M4OSA_FileReadPointer* pFileReadPtr, +* M4VIFI_ImagePlane* pImagePlanes, +* M4OSA_UInt32 width, +* M4OSA_UInt32 height); +* @brief It Coverts and resizes a ARGB8888 image to YUV420 +* @note +* @param pFileIn (IN) The ARGB888 input file +* @param pFileReadPtr (IN) Pointer on filesystem functions +* @param pImagePlanes (IN/OUT) Pointer on YUV420 output planes allocated by the user. +* ARGB8888 image will be converted and resized to output +* YUV420 plane size +* @param width (IN) width of the ARGB8888 +* @param height (IN) height of the ARGB8888 +* @return M4NO_ERROR: No error +* @return M4ERR_ALLOC: memory error +* @return M4ERR_PARAMETER: At least one of the function parameters is null +****************************************************************************** +*/ + +M4OSA_ERR M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420(M4OSA_Void* pFileIn, + M4OSA_FileReadPointer* pFileReadPtr, + M4VIFI_ImagePlane* pImagePlanes, + M4OSA_UInt32 width,M4OSA_UInt32 height) { + M4OSA_Context pARGBIn; + M4VIFI_ImagePlane rgbPlane1 ,rgbPlane2; + M4OSA_UInt32 frameSize_argb = width * height * 4; + M4OSA_UInt32 frameSize_rgb888 = width * height * 3; + M4OSA_UInt32 i = 0,j= 0; + M4OSA_ERR err = M4NO_ERROR; + + M4OSA_UInt8 *pArgbPlane = + (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, + M4VS, (M4OSA_Char*)"argb data"); + if (pArgbPlane == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420: \ + Failed to allocate memory for ARGB plane"); + return M4ERR_ALLOC; + } + + /* Get file size */ + err = pFileReadPtr->openRead(&pARGBIn, pFileIn, M4OSA_kFileRead); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 : \ + Can not open input ARGB8888 file %s, error: 0x%x\n",pFileIn, err); + free(pArgbPlane); + pArgbPlane = M4OSA_NULL; + goto cleanup; + } + + err = pFileReadPtr->readData(pARGBIn,(M4OSA_MemAddr8)pArgbPlane, + &frameSize_argb); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ + Can not read ARGB8888 file %s, error: 0x%x\n",pFileIn, err); + pFileReadPtr->closeRead(pARGBIn); + free(pArgbPlane); + pArgbPlane = M4OSA_NULL; + goto cleanup; + } + + err = pFileReadPtr->closeRead(pARGBIn); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_2("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ + Can not close ARGB8888 file %s, error: 0x%x\n",pFileIn, err); + free(pArgbPlane); + pArgbPlane = M4OSA_NULL; + goto cleanup; + } + + rgbPlane1.pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, + M4VS, (M4OSA_Char*)"RGB888 plane1"); + if(rgbPlane1.pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 \ + Failed to allocate memory for rgb plane1"); + free(pArgbPlane); + return M4ERR_ALLOC; + } + + rgbPlane1.u_height = height; + rgbPlane1.u_width = width; + rgbPlane1.u_stride = width*3; + rgbPlane1.u_topleft = 0; + + + /** Remove the alpha channel */ + for (i=0, j = 0; i < frameSize_argb; i++) { + if ((i % 4) == 0) continue; + rgbPlane1.pac_data[j] = pArgbPlane[i]; + j++; + } + free(pArgbPlane); + + /** + * Check if resizing is required with color conversion */ + if(width != pImagePlanes->u_width || height != pImagePlanes->u_height) { + + frameSize_rgb888 = pImagePlanes->u_width * pImagePlanes->u_height * 3; + rgbPlane2.pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize_rgb888, M4VS, + (M4OSA_Char*)"rgb Plane2"); + if(rgbPlane2.pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("Failed to allocate memory for rgb plane2"); + free(rgbPlane1.pac_data); + return M4ERR_ALLOC; + } + rgbPlane2.u_height = pImagePlanes->u_height; + rgbPlane2.u_width = pImagePlanes->u_width; + rgbPlane2.u_stride = pImagePlanes->u_width*3; + rgbPlane2.u_topleft = 0; + + /* Resizing */ + err = M4VIFI_ResizeBilinearRGB888toRGB888(M4OSA_NULL, + &rgbPlane1, &rgbPlane2); + free(rgbPlane1.pac_data); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("error resizing RGB888 to RGB888: 0x%x\n", err); + free(rgbPlane2.pac_data); + return err; + } + + /*Converting Resized RGB888 to YUV420 */ + err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane2, pImagePlanes); + free(rgbPlane2.pac_data); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("error converting from RGB888 to YUV: 0x%x\n", err); + return err; + } + } else { + err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane1, pImagePlanes); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("error when converting from RGB to YUV: 0x%x\n", err); + } + free(rgbPlane1.pac_data); + } +cleanup: + M4OSA_TRACE3_0("M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 exit"); + return err; +} + +M4OSA_ERR M4VSS3GPP_intApplyRenderingMode(M4VSS3GPP_InternalEditContext *pC, + M4xVSS_MediaRendering renderingMode, + M4VIFI_ImagePlane* pInplane, + M4VIFI_ImagePlane* pOutplane) { + + M4OSA_ERR err = M4NO_ERROR; + M4AIR_Params airParams; + M4VIFI_ImagePlane pImagePlanesTemp[3]; + M4OSA_UInt32 i = 0; + + if (renderingMode == M4xVSS_kBlackBorders) { + memset((void *)pOutplane[0].pac_data, Y_PLANE_BORDER_VALUE, + (pOutplane[0].u_height*pOutplane[0].u_stride)); + memset((void *)pOutplane[1].pac_data, U_PLANE_BORDER_VALUE, + (pOutplane[1].u_height*pOutplane[1].u_stride)); + memset((void *)pOutplane[2].pac_data, V_PLANE_BORDER_VALUE, + (pOutplane[2].u_height*pOutplane[2].u_stride)); + } + + if (renderingMode == M4xVSS_kResizing) { + /** + * Call the resize filter. + * From the intermediate frame to the encoder image plane */ + err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, + pInplane, pOutplane); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ + M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err); + return err; + } + } else { + M4VIFI_ImagePlane* pPlaneTemp = M4OSA_NULL; + M4OSA_UInt8* pOutPlaneY = + pOutplane[0].pac_data + pOutplane[0].u_topleft; + M4OSA_UInt8* pOutPlaneU = + pOutplane[1].pac_data + pOutplane[1].u_topleft; + M4OSA_UInt8* pOutPlaneV = + pOutplane[2].pac_data + pOutplane[2].u_topleft; + M4OSA_UInt8* pInPlaneY = M4OSA_NULL; + M4OSA_UInt8* pInPlaneU = M4OSA_NULL; + M4OSA_UInt8* pInPlaneV = M4OSA_NULL; + + /* To keep media aspect ratio*/ + /* Initialize AIR Params*/ + airParams.m_inputCoord.m_x = 0; + airParams.m_inputCoord.m_y = 0; + airParams.m_inputSize.m_height = pInplane->u_height; + airParams.m_inputSize.m_width = pInplane->u_width; + airParams.m_outputSize.m_width = pOutplane->u_width; + airParams.m_outputSize.m_height = pOutplane->u_height; + airParams.m_bOutputStripe = M4OSA_FALSE; + airParams.m_outputOrientation = M4COMMON_kOrientationTopLeft; + + /** + Media rendering: Black borders*/ + if (renderingMode == M4xVSS_kBlackBorders) { + pImagePlanesTemp[0].u_width = pOutplane[0].u_width; + pImagePlanesTemp[0].u_height = pOutplane[0].u_height; + pImagePlanesTemp[0].u_stride = pOutplane[0].u_width; + pImagePlanesTemp[0].u_topleft = 0; + + pImagePlanesTemp[1].u_width = pOutplane[1].u_width; + pImagePlanesTemp[1].u_height = pOutplane[1].u_height; + pImagePlanesTemp[1].u_stride = pOutplane[1].u_width; + pImagePlanesTemp[1].u_topleft = 0; + + pImagePlanesTemp[2].u_width = pOutplane[2].u_width; + pImagePlanesTemp[2].u_height = pOutplane[2].u_height; + pImagePlanesTemp[2].u_stride = pOutplane[2].u_width; + pImagePlanesTemp[2].u_topleft = 0; + + /** + * Allocates plan in local image plane structure */ + pImagePlanesTemp[0].pac_data = + (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, + M4VS, (M4OSA_Char *)"pImagePlaneTemp Y") ; + if (pImagePlanesTemp[0].pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); + return M4ERR_ALLOC; + } + pImagePlanesTemp[1].pac_data = + (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, + M4VS, (M4OSA_Char *)"pImagePlaneTemp U") ; + if (pImagePlanesTemp[1].pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); + free(pImagePlanesTemp[0].pac_data); + return M4ERR_ALLOC; + } + pImagePlanesTemp[2].pac_data = + (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc( + pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, + M4VS, (M4OSA_Char *)"pImagePlaneTemp V") ; + if (pImagePlanesTemp[2].pac_data == M4OSA_NULL) { + M4OSA_TRACE1_0("M4VSS3GPP_intApplyRenderingMode: Alloc Error"); + free(pImagePlanesTemp[0].pac_data); + free(pImagePlanesTemp[1].pac_data); + return M4ERR_ALLOC; + } + + pInPlaneY = pImagePlanesTemp[0].pac_data ; + pInPlaneU = pImagePlanesTemp[1].pac_data ; + pInPlaneV = pImagePlanesTemp[2].pac_data ; + + memset((void *)pImagePlanesTemp[0].pac_data, Y_PLANE_BORDER_VALUE, + (pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride)); + memset((void *)pImagePlanesTemp[1].pac_data, U_PLANE_BORDER_VALUE, + (pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride)); + memset((void *)pImagePlanesTemp[2].pac_data, V_PLANE_BORDER_VALUE, + (pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride)); + + M4OSA_UInt32 height = + (pInplane->u_height * pOutplane->u_width) /pInplane->u_width; + + if (height <= pOutplane->u_height) { + /** + * Black borders will be on the top and the bottom side */ + airParams.m_outputSize.m_width = pOutplane->u_width; + airParams.m_outputSize.m_height = height; + /** + * Number of lines at the top */ + pImagePlanesTemp[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height - + airParams.m_outputSize.m_height)>>1)) * + pImagePlanesTemp[0].u_stride; + pImagePlanesTemp[0].u_height = airParams.m_outputSize.m_height; + pImagePlanesTemp[1].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height - + (airParams.m_outputSize.m_height>>1)))>>1) * + pImagePlanesTemp[1].u_stride; + pImagePlanesTemp[1].u_height = + airParams.m_outputSize.m_height>>1; + pImagePlanesTemp[2].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height - + (airParams.m_outputSize.m_height>>1)))>>1) * + pImagePlanesTemp[2].u_stride; + pImagePlanesTemp[2].u_height = + airParams.m_outputSize.m_height>>1; + } else { + /** + * Black borders will be on the left and right side */ + airParams.m_outputSize.m_height = pOutplane->u_height; + airParams.m_outputSize.m_width = + (M4OSA_UInt32)((pInplane->u_width * pOutplane->u_height)/pInplane->u_height); + + pImagePlanesTemp[0].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width - + airParams.m_outputSize.m_width)>>1)); + pImagePlanesTemp[0].u_width = airParams.m_outputSize.m_width; + pImagePlanesTemp[1].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width - + (airParams.m_outputSize.m_width>>1)))>>1); + pImagePlanesTemp[1].u_width = airParams.m_outputSize.m_width>>1; + pImagePlanesTemp[2].u_topleft = + (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width - + (airParams.m_outputSize.m_width>>1)))>>1); + pImagePlanesTemp[2].u_width = airParams.m_outputSize.m_width>>1; + } + + /** + * Width and height have to be even */ + airParams.m_outputSize.m_width = + (airParams.m_outputSize.m_width>>1)<<1; + airParams.m_outputSize.m_height = + (airParams.m_outputSize.m_height>>1)<<1; + airParams.m_inputSize.m_width = + (airParams.m_inputSize.m_width>>1)<<1; + airParams.m_inputSize.m_height = + (airParams.m_inputSize.m_height>>1)<<1; + pImagePlanesTemp[0].u_width = + (pImagePlanesTemp[0].u_width>>1)<<1; + pImagePlanesTemp[1].u_width = + (pImagePlanesTemp[1].u_width>>1)<<1; + pImagePlanesTemp[2].u_width = + (pImagePlanesTemp[2].u_width>>1)<<1; + pImagePlanesTemp[0].u_height = + (pImagePlanesTemp[0].u_height>>1)<<1; + pImagePlanesTemp[1].u_height = + (pImagePlanesTemp[1].u_height>>1)<<1; + pImagePlanesTemp[2].u_height = + (pImagePlanesTemp[2].u_height>>1)<<1; + + /** + * Check that values are coherent */ + if (airParams.m_inputSize.m_height == + airParams.m_outputSize.m_height) { + airParams.m_inputSize.m_width = + airParams.m_outputSize.m_width; + } else if (airParams.m_inputSize.m_width == + airParams.m_outputSize.m_width) { + airParams.m_inputSize.m_height = + airParams.m_outputSize.m_height; + } + pPlaneTemp = pImagePlanesTemp; + } + + /** + * Media rendering: Cropping*/ + if (renderingMode == M4xVSS_kCropping) { + airParams.m_outputSize.m_height = pOutplane->u_height; + airParams.m_outputSize.m_width = pOutplane->u_width; + if ((airParams.m_outputSize.m_height * + airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width < + airParams.m_inputSize.m_height) { + /* Height will be cropped */ + airParams.m_inputSize.m_height = + (M4OSA_UInt32)((airParams.m_outputSize.m_height * + airParams.m_inputSize.m_width)/airParams.m_outputSize.m_width); + airParams.m_inputSize.m_height = + (airParams.m_inputSize.m_height>>1)<<1; + airParams.m_inputCoord.m_y = + (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_height - + airParams.m_inputSize.m_height))>>1); + } else { + /* Width will be cropped */ + airParams.m_inputSize.m_width = + (M4OSA_UInt32)((airParams.m_outputSize.m_width * + airParams.m_inputSize.m_height)/airParams.m_outputSize.m_height); + airParams.m_inputSize.m_width = + (airParams.m_inputSize.m_width>>1)<<1; + airParams.m_inputCoord.m_x = + (M4OSA_Int32)((M4OSA_Int32)((pInplane->u_width - + airParams.m_inputSize.m_width))>>1); + } + pPlaneTemp = pOutplane; + } + /** + * Call AIR functions */ + if (M4OSA_NULL == pC->m_air_context) { + err = M4AIR_create(&pC->m_air_context, M4AIR_kYUV420P); + if(err != M4NO_ERROR) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ + M4AIR_create returned error 0x%x", err); + goto cleanUp; + } + } + + err = M4AIR_configure(pC->m_air_context, &airParams); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ + Error when configuring AIR: 0x%x", err); + M4AIR_cleanUp(pC->m_air_context); + goto cleanUp; + } + + err = M4AIR_get(pC->m_air_context, pInplane, pPlaneTemp); + if (err != M4NO_ERROR) { + M4OSA_TRACE1_1("M4VSS3GPP_intApplyRenderingMode: \ + Error when getting AIR plane: 0x%x", err); + M4AIR_cleanUp(pC->m_air_context); + goto cleanUp; + } + + if (renderingMode == M4xVSS_kBlackBorders) { + for (i=0; i<pOutplane[0].u_height; i++) { + memcpy((void *)pOutPlaneY, (void *)pInPlaneY, + pOutplane[0].u_width); + pInPlaneY += pOutplane[0].u_width; + pOutPlaneY += pOutplane[0].u_stride; + } + for (i=0; i<pOutplane[1].u_height; i++) { + memcpy((void *)pOutPlaneU, (void *)pInPlaneU, + pOutplane[1].u_width); + pInPlaneU += pOutplane[1].u_width; + pOutPlaneU += pOutplane[1].u_stride; + } + for (i=0; i<pOutplane[2].u_height; i++) { + memcpy((void *)pOutPlaneV, (void *)pInPlaneV, + pOutplane[2].u_width); + pInPlaneV += pOutplane[2].u_width; + pOutPlaneV += pOutplane[2].u_stride; + } + } + } +cleanUp: + if (renderingMode == M4xVSS_kBlackBorders) { + for (i=0; i<3; i++) { + if (pImagePlanesTemp[i].pac_data != M4OSA_NULL) { + free(pImagePlanesTemp[i].pac_data); + pImagePlanesTemp[i].pac_data = M4OSA_NULL; + } + } + } + return err; +} + +M4OSA_ERR M4VSS3GPP_intSetYuv420PlaneFromARGB888 ( + M4VSS3GPP_InternalEditContext *pC, + M4VSS3GPP_ClipContext* pClipCtxt) { + + M4OSA_ERR err= M4NO_ERROR; + + // Allocate memory for YUV plane + pClipCtxt->pPlaneYuv = + (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( + 3*sizeof(M4VIFI_ImagePlane), M4VS, + (M4OSA_Char*)"pPlaneYuv"); + + if (pClipCtxt->pPlaneYuv == M4OSA_NULL) { + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuv[0].u_height = + pClipCtxt->pSettings->ClipProperties.uiStillPicHeight; + pClipCtxt->pPlaneYuv[0].u_width = + pClipCtxt->pSettings->ClipProperties.uiStillPicWidth; + pClipCtxt->pPlaneYuv[0].u_stride = pClipCtxt->pPlaneYuv[0].u_width; + pClipCtxt->pPlaneYuv[0].u_topleft = 0; + + pClipCtxt->pPlaneYuv[0].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( + pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width * 1.5, + M4VS, (M4OSA_Char*)"imageClip YUV data"); + if (pClipCtxt->pPlaneYuv[0].pac_data == M4OSA_NULL) { + free(pClipCtxt->pPlaneYuv); + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuv[1].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; + pClipCtxt->pPlaneYuv[1].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; + pClipCtxt->pPlaneYuv[1].u_stride = pClipCtxt->pPlaneYuv[1].u_width; + pClipCtxt->pPlaneYuv[1].u_topleft = 0; + pClipCtxt->pPlaneYuv[1].pac_data = (M4VIFI_UInt8*)( + pClipCtxt->pPlaneYuv[0].pac_data + + pClipCtxt->pPlaneYuv[0].u_height * pClipCtxt->pPlaneYuv[0].u_width); + + pClipCtxt->pPlaneYuv[2].u_height = pClipCtxt->pPlaneYuv[0].u_height >>1; + pClipCtxt->pPlaneYuv[2].u_width = pClipCtxt->pPlaneYuv[0].u_width >> 1; + pClipCtxt->pPlaneYuv[2].u_stride = pClipCtxt->pPlaneYuv[2].u_width; + pClipCtxt->pPlaneYuv[2].u_topleft = 0; + pClipCtxt->pPlaneYuv[2].pac_data = (M4VIFI_UInt8*)( + pClipCtxt->pPlaneYuv[1].pac_data + + pClipCtxt->pPlaneYuv[1].u_height * pClipCtxt->pPlaneYuv[1].u_width); + + err = M4VSS3GPP_internalConvertAndResizeARGB8888toYUV420 ( + pClipCtxt->pSettings->pFile, + pC->pOsaFileReadPtr, + pClipCtxt->pPlaneYuv, + pClipCtxt->pSettings->ClipProperties.uiStillPicWidth, + pClipCtxt->pSettings->ClipProperties.uiStillPicHeight); + if (M4NO_ERROR != err) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + return err; + } + + // Set the YUV data to the decoder using setoption + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption ( + pClipCtxt->pViDecCtxt, + M4DECODER_kOptionID_DecYuvData, + (M4OSA_DataOption)pClipCtxt->pPlaneYuv); + if (M4NO_ERROR != err) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + return err; + } + + pClipCtxt->pSettings->ClipProperties.bSetImageData = M4OSA_TRUE; + + // Allocate Yuv plane with effect + pClipCtxt->pPlaneYuvWithEffect = + (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc( + 3*sizeof(M4VIFI_ImagePlane), M4VS, + (M4OSA_Char*)"pPlaneYuvWithEffect"); + if (pClipCtxt->pPlaneYuvWithEffect == M4OSA_NULL) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuvWithEffect[0].u_height = pC->ewc.uiVideoHeight; + pClipCtxt->pPlaneYuvWithEffect[0].u_width = pC->ewc.uiVideoWidth; + pClipCtxt->pPlaneYuvWithEffect[0].u_stride = pC->ewc.uiVideoWidth; + pClipCtxt->pPlaneYuvWithEffect[0].u_topleft = 0; + + pClipCtxt->pPlaneYuvWithEffect[0].pac_data = + (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc( + pC->ewc.uiVideoHeight * pC->ewc.uiVideoWidth * 1.5, + M4VS, (M4OSA_Char*)"imageClip YUV data"); + if (pClipCtxt->pPlaneYuvWithEffect[0].pac_data == M4OSA_NULL) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + free(pClipCtxt->pPlaneYuvWithEffect); + return M4ERR_ALLOC; + } + + pClipCtxt->pPlaneYuvWithEffect[1].u_height = + pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; + pClipCtxt->pPlaneYuvWithEffect[1].u_width = + pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; + pClipCtxt->pPlaneYuvWithEffect[1].u_stride = + pClipCtxt->pPlaneYuvWithEffect[1].u_width; + pClipCtxt->pPlaneYuvWithEffect[1].u_topleft = 0; + pClipCtxt->pPlaneYuvWithEffect[1].pac_data = (M4VIFI_UInt8*)( + pClipCtxt->pPlaneYuvWithEffect[0].pac_data + + pClipCtxt->pPlaneYuvWithEffect[0].u_height * pClipCtxt->pPlaneYuvWithEffect[0].u_width); + + pClipCtxt->pPlaneYuvWithEffect[2].u_height = + pClipCtxt->pPlaneYuvWithEffect[0].u_height >>1; + pClipCtxt->pPlaneYuvWithEffect[2].u_width = + pClipCtxt->pPlaneYuvWithEffect[0].u_width >> 1; + pClipCtxt->pPlaneYuvWithEffect[2].u_stride = + pClipCtxt->pPlaneYuvWithEffect[2].u_width; + pClipCtxt->pPlaneYuvWithEffect[2].u_topleft = 0; + pClipCtxt->pPlaneYuvWithEffect[2].pac_data = (M4VIFI_UInt8*)( + pClipCtxt->pPlaneYuvWithEffect[1].pac_data + + pClipCtxt->pPlaneYuvWithEffect[1].u_height * pClipCtxt->pPlaneYuvWithEffect[1].u_width); + + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pClipCtxt->pViDecCtxt, M4DECODER_kOptionID_YuvWithEffectContiguous, + (M4OSA_DataOption)pClipCtxt->pPlaneYuvWithEffect); + if (M4NO_ERROR != err) { + free(pClipCtxt->pPlaneYuv[0].pac_data); + free(pClipCtxt->pPlaneYuv); + free(pClipCtxt->pPlaneYuvWithEffect); + return err; + } + + return M4NO_ERROR; +} + +M4OSA_ERR M4VSS3GPP_intRenderFrameWithEffect(M4VSS3GPP_InternalEditContext *pC, + M4VSS3GPP_ClipContext* pClipCtxt, + M4OSA_Bool bIsClip1, + M4VIFI_ImagePlane *pResizePlane, + M4VIFI_ImagePlane *pPlaneNoResize, + M4VIFI_ImagePlane *pPlaneOut) { + + M4OSA_ERR err = M4NO_ERROR; + M4OSA_UInt8 numEffects = 0; + M4_MediaTime ts; + M4VIFI_ImagePlane *pDecoderRenderFrame = M4OSA_NULL; + + /** + Check if resizing is needed */ + if (M4OSA_NULL != pClipCtxt->m_pPreResizeFrame) { + /** + * If we do modify the image, we need an intermediate image plane */ + if (M4OSA_NULL == pResizePlane[0].pac_data) { + err = M4VSS3GPP_intAllocateYUV420(pResizePlane, + pClipCtxt->m_pPreResizeFrame[0].u_width, + pClipCtxt->m_pPreResizeFrame[0].u_height); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + M4VSS3GPP_intAllocateYUV420 returns 0x%x", err); + return err; + } + } + + if ((pClipCtxt->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) && + (pC->nbActiveEffects == 0) && + (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_FALSE)) { + + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pClipCtxt->pViDecCtxt, + M4DECODER_kOptionID_EnableYuvWithEffect, + (M4OSA_DataOption)M4OSA_TRUE); + if (M4NO_ERROR == err) { + pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( + pClipCtxt->pViDecCtxt, &ts, + pClipCtxt->pPlaneYuvWithEffect, M4OSA_TRUE); + } + + } else { + if (pClipCtxt->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) { + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pClipCtxt->pViDecCtxt, + M4DECODER_kOptionID_EnableYuvWithEffect, + (M4OSA_DataOption)M4OSA_FALSE); + } + if (M4NO_ERROR == err) { + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( + pClipCtxt->pViDecCtxt, &ts, + pClipCtxt->m_pPreResizeFrame, M4OSA_TRUE); + } + + } + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + returns error 0x%x", err); + return err; + } + + if (bIsClip1 == M4OSA_TRUE) { + numEffects = pC->nbActiveEffects; + } else { + numEffects = pC->nbActiveEffects1; + } + + if ( numEffects > 0) { + pClipCtxt->bGetYuvDataFromDecoder = M4OSA_TRUE; + err = M4VSS3GPP_intApplyVideoEffect(pC, + pClipCtxt->m_pPreResizeFrame, pResizePlane); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + M4VSS3GPP_intApplyVideoEffect() err 0x%x", err); + return err; + } + + pDecoderRenderFrame= pResizePlane; + + } else { + pDecoderRenderFrame = pClipCtxt->m_pPreResizeFrame; + } + + if ((pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE) || + (pClipCtxt->pSettings->FileType != + M4VIDEOEDITING_kFileType_ARGB8888)) { + if (bIsClip1 == M4OSA_TRUE) { + err = M4VSS3GPP_intApplyRenderingMode (pC, + pClipCtxt->pSettings->xVSS.MediaRendering, + pDecoderRenderFrame,pC->yuv1); + } else { + err = M4VSS3GPP_intApplyRenderingMode (pC, + pClipCtxt->pSettings->xVSS.MediaRendering, + pDecoderRenderFrame,pC->yuv2); + } + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + M4VSS3GPP_intApplyRenderingMode error 0x%x ", err); + return err; + } + + if (bIsClip1 == M4OSA_TRUE) { + pClipCtxt->lastDecodedPlane = pC->yuv1; + } else { + pClipCtxt->lastDecodedPlane = pC->yuv2; + } + + } else { + pClipCtxt->lastDecodedPlane = pClipCtxt->pPlaneYuvWithEffect; + } + + if ((pClipCtxt->pSettings->FileType == + M4VIDEOEDITING_kFileType_ARGB8888) && + (pC->nbActiveEffects == 0) && + (pClipCtxt->bGetYuvDataFromDecoder == M4OSA_TRUE)) { + if (bIsClip1 == M4OSA_TRUE) { + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pClipCtxt->pViDecCtxt, + M4DECODER_kOptionID_YuvWithEffectNonContiguous, + (M4OSA_DataOption)pC->yuv1); + } else { + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctSetOption( + pClipCtxt->pViDecCtxt, + M4DECODER_kOptionID_YuvWithEffectNonContiguous, + (M4OSA_DataOption)pPlaneOut); + } + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + null decoder setOption error 0x%x ", err); + return err; + } + pClipCtxt->bGetYuvDataFromDecoder = M4OSA_FALSE; + } + + } else { + if (bIsClip1 == M4OSA_TRUE) { + numEffects = pC->nbActiveEffects; + } else { + numEffects = pC->nbActiveEffects1; + } + + if(numEffects > 0) { + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( + pClipCtxt->pViDecCtxt, &ts, pPlaneNoResize, M4OSA_TRUE); + + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + Render returns error 0x%x", err); + return err; + } + if (bIsClip1 == M4OSA_TRUE) { + pC->bIssecondClip = M4OSA_FALSE; + err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize ,pC->yuv1); + pClipCtxt->lastDecodedPlane = pC->yuv1; + } else { + pC->bIssecondClip = M4OSA_TRUE; + err = M4VSS3GPP_intApplyVideoEffect(pC, pPlaneNoResize ,pC->yuv2); + pClipCtxt->lastDecodedPlane = pC->yuv2; + } + + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + M4VSS3GPP_intApplyVideoEffect error 0x%x", err); + return err; + } + + } else { + if (bIsClip1 == M4OSA_TRUE) { + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( + pClipCtxt->pViDecCtxt, &ts, pC->yuv1, M4OSA_TRUE); + pClipCtxt->lastDecodedPlane = pC->yuv1; + } else { + err = pClipCtxt->ShellAPI.m_pVideoDecoder->m_pFctRender( + pClipCtxt->pViDecCtxt, &ts, pC->yuv2, M4OSA_TRUE); + pClipCtxt->lastDecodedPlane = pC->yuv2; + } + if (M4NO_ERROR != err) { + M4OSA_TRACE1_1("M4VSS3GPP_intRenderFrameWithEffect: \ + Render returns error 0x%x,", err); + return err; + } + } + pClipCtxt->iVideoRenderCts = (M4OSA_Int32)ts; + } + + return err; +} diff --git a/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c b/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c index ea13c9e..f30f705 100755 --- a/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c +++ b/libvideoeditor/vss/src/M4VSS3GPP_MediaAndCodecSubscription.c @@ -79,6 +79,7 @@ Seriously, I'd love to know." */ #include "VideoEditorAudioDecoder.h" #include "VideoEditorVideoDecoder.h" +#include "M4DECODER_Null.h" #ifdef M4VSS_SUPPORT_AUDEC_NULL #include "M4AD_Null.h" #endif @@ -259,6 +260,18 @@ M4OSA_ERR M4VSS3GPP_subscribeMediaAndCodec(M4VSS3GPP_MediaAndCodecCtxt *pContext "M4VSS3GPP_subscribeMediaAndCodec: can't register H264 decoder"); #endif /* M4VSS_SUPPORT_VIDEC_3GP */ +#ifdef M4VSS_SUPPORT_VIDEC_NULL + err = M4DECODER_NULL_getInterface( + &videoDecoderType, &pVideoDecoderInterface); + if (M4NO_ERROR != err) { + M4OSA_TRACE1_0("M4VD NULL Decoder interface allocation error"); + return err; + } + err = M4VSS3GPP_registerVideoDecoder( + pContext, videoDecoderType, pVideoDecoderInterface); + M4OSA_DEBUG_IF1((err != M4NO_ERROR), err, "M4VSS3GPP_subscribeMediaAndCodec: \ + can't register video NULL decoder"); +#endif /* ______________________________ */ /*| |*/ /*| audio decoder subscription |*/ diff --git a/libvideoeditor/vss/src/M4xVSS_API.c b/libvideoeditor/vss/src/M4xVSS_API.c index d09b145..6bf8c81 100755 --- a/libvideoeditor/vss/src/M4xVSS_API.c +++ b/libvideoeditor/vss/src/M4xVSS_API.c @@ -1196,6 +1196,9 @@ M4OSA_ERR M4xVSS_SendCommand( M4OSA_Context pContext, return err; } + xVSS_context->pSettings->pClipList[i]->bTranscodingRequired = + M4OSA_FALSE; + /* Because there is 1 less transition than clip number */ if( i < xVSS_context->pSettings->uiClipNumber - 1 ) { @@ -1648,476 +1651,479 @@ M4OSA_ERR M4xVSS_SendCommand( M4OSA_Context pContext, if( xVSS_context->pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_ARGB8888 ) { - M4OSA_Char out_img[M4XVSS_MAX_PATH_LEN]; - M4OSA_Char out_img_tmp[M4XVSS_MAX_PATH_LEN]; - M4xVSS_Pto3GPP_params *pParams = M4OSA_NULL; - M4OSA_Context pARGBFileIn; - /*UTF conversion support*/ - M4OSA_Void *pDecodedPath = pSettings->pClipList[i]->pFile; - - /* Parse Pto3GPP params chained list to know if input file has already been - converted */ - if( xVSS_context->pPTo3GPPparamsList != M4OSA_NULL ) - { - M4OSA_UInt32 pCmpResult = 0; - - pParams = xVSS_context->pPTo3GPPparamsList; - /* We parse all Pto3gpp Param chained list */ - while( pParams != M4OSA_NULL ) + if(M4OSA_TRUE == + xVSS_context->pSettings->pClipList[i]->xVSS.isPanZoom) { + M4OSA_Char out_img[M4XVSS_MAX_PATH_LEN]; + M4OSA_Char out_img_tmp[M4XVSS_MAX_PATH_LEN]; + M4xVSS_Pto3GPP_params *pParams = M4OSA_NULL; + M4OSA_Context pARGBFileIn; + /*UTF conversion support*/ + M4OSA_Void *pDecodedPath = pSettings->pClipList[i]->pFile; + + /* Parse Pto3GPP params chained list to know if input file has already been + converted */ + if( xVSS_context->pPTo3GPPparamsList != M4OSA_NULL ) { - pCmpResult = strcmp((const char *)pSettings->pClipList[i]->pFile, - (const char *)pParams->pFileIn); + M4OSA_UInt32 pCmpResult = 0; - if( pCmpResult == 0 - && (pSettings->pClipList[i]->uiEndCutTime - == pParams->duration - || pSettings->pClipList[i]->xVSS.uiDuration - == pParams->duration) - && pSettings->pClipList[i]->xVSS.MediaRendering - == pParams->MediaRendering ) + pParams = xVSS_context->pPTo3GPPparamsList; + /* We parse all Pto3gpp Param chained list */ + while( pParams != M4OSA_NULL ) + { + pCmpResult = strcmp((const char *)pSettings->pClipList[i]->pFile, + (const char *)pParams->pFileIn); + if( pCmpResult == 0 + && (pSettings->pClipList[i]->uiEndCutTime + == pParams->duration + || pSettings->pClipList[i]->xVSS.uiDuration + == pParams->duration) + && pSettings->pClipList[i]->xVSS.MediaRendering + == pParams->MediaRendering ) - { - /* Replace JPG filename with existing 3GP filename */ - goto replaceARGB_3GP; + + { + /* Replace JPG filename with existing 3GP filename */ + goto replaceARGB_3GP; + } + /* We need to update this variable, in case some pictures have been + added between two */ + /* calls to M4xVSS_sendCommand */ + pPto3GPP_last = pParams; + pParams = pParams->pNext; } - /* We need to update this variable, in case some pictures have been - added between two */ - /* calls to M4xVSS_sendCommand */ - pPto3GPP_last = pParams; - pParams = pParams->pNext; } - } - - /* Construct output temporary 3GP filename */ - err = M4OSA_chrSPrintf(out_img, M4XVSS_MAX_PATH_LEN - 1, (M4OSA_Char *)"%simg%d.3gp", - xVSS_context->pTempPath, xVSS_context->tempFileIndex); - if( err != M4NO_ERROR ) - { - M4OSA_TRACE1_1("Error in M4OSA_chrSPrintf: 0x%x", err); - /*FB: to avoid leaks when there is an error in the send command*/ - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - /**/ - return err; - } + /* Construct output temporary 3GP filename */ + err = M4OSA_chrSPrintf(out_img, M4XVSS_MAX_PATH_LEN - 1, (M4OSA_Char *)"%simg%d.3gp", + xVSS_context->pTempPath, xVSS_context->tempFileIndex); -#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE + if( err != M4NO_ERROR ) + { + M4OSA_TRACE1_1("Error in M4OSA_chrSPrintf: 0x%x", err); + /*FB: to avoid leaks when there is an error in the send command*/ + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + /**/ + return err; + } - err = M4OSA_chrSPrintf(out_img_tmp, M4XVSS_MAX_PATH_LEN - 1, "%simg%d.tmp", - xVSS_context->pTempPath, xVSS_context->tempFileIndex); + #ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE - if( err != M4NO_ERROR ) - { - M4OSA_TRACE1_1("Error in M4OSA_chrSPrintf: 0x%x", err); - /*FB: to avoid leaks when there is an error in the send command*/ - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - /**/ - return err; - } + err = M4OSA_chrSPrintf(out_img_tmp, M4XVSS_MAX_PATH_LEN - 1, "%simg%d.tmp", + xVSS_context->pTempPath, xVSS_context->tempFileIndex); -#endif /*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ + if( err != M4NO_ERROR ) + { + M4OSA_TRACE1_1("Error in M4OSA_chrSPrintf: 0x%x", err); + /*FB: to avoid leaks when there is an error in the send command*/ + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + /**/ + return err; + } - xVSS_context->tempFileIndex++; + #endif /*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ - /* Allocate last element Pto3GPP params structure */ - pParams = (M4xVSS_Pto3GPP_params - *)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_Pto3GPP_params), - M4VS, (M4OSA_Char *)"Element of Pto3GPP Params"); + xVSS_context->tempFileIndex++; - if( pParams == M4OSA_NULL ) - { - M4OSA_TRACE1_0( - "M4xVSS_sendCommand: Problem when allocating one element Pto3GPP Params"); - /*FB: to avoid leaks when there is an error in the send command*/ - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - /**/ - return M4ERR_ALLOC; - } + /* Allocate last element Pto3GPP params structure */ + pParams = (M4xVSS_Pto3GPP_params + *)M4OSA_32bitAlignedMalloc(sizeof(M4xVSS_Pto3GPP_params), + M4VS, (M4OSA_Char *)"Element of Pto3GPP Params"); - /* Initializes pfilexxx members of pParams to be able to free them correctly */ - pParams->pFileIn = M4OSA_NULL; - pParams->pFileOut = M4OSA_NULL; - pParams->pFileTemp = M4OSA_NULL; - pParams->pNext = M4OSA_NULL; - pParams->MediaRendering = M4xVSS_kResizing; - - /*To support ARGB8888 :get the width and height */ - pParams->height = pSettings->pClipList[ - i]->ClipProperties.uiStillPicHeight; //ARGB_Height; - pParams->width = pSettings->pClipList[ - i]->ClipProperties.uiStillPicWidth; //ARGB_Width; - M4OSA_TRACE3_1("CLIP M4xVSS_SendCommand ARGB8888 H = %d", pParams->height); - M4OSA_TRACE3_1("CLIP M4xVSS_SendCommand ARGB8888 W = %d", pParams->width); - - if( xVSS_context->pPTo3GPPparamsList - == M4OSA_NULL ) /* Means it is the first element of the list */ - { - /* Initialize the xVSS context with the first element of the list */ - xVSS_context->pPTo3GPPparamsList = pParams; - - /* Save this element in case of other file to convert */ - pPto3GPP_last = pParams; - } - else - { - /* Update next pointer of the previous last element of the chain */ - pPto3GPP_last->pNext = pParams; + if( pParams == M4OSA_NULL ) + { + M4OSA_TRACE1_0( + "M4xVSS_sendCommand: Problem when allocating one element Pto3GPP Params"); + /*FB: to avoid leaks when there is an error in the send command*/ + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + /**/ + return M4ERR_ALLOC; + } - /* Update save of last element of the chain */ - pPto3GPP_last = pParams; - } + /* Initializes pfilexxx members of pParams to be able to free them correctly */ + pParams->pFileIn = M4OSA_NULL; + pParams->pFileOut = M4OSA_NULL; + pParams->pFileTemp = M4OSA_NULL; + pParams->pNext = M4OSA_NULL; + pParams->MediaRendering = M4xVSS_kResizing; - /* Fill the last M4xVSS_Pto3GPP_params element */ - pParams->duration = - xVSS_context->pSettings->pClipList[i]->uiEndCutTime; - /* If duration is filled, let's use it instead of EndCutTime */ - if( xVSS_context->pSettings->pClipList[i]->xVSS.uiDuration != 0 ) - { - pParams->duration = - xVSS_context->pSettings->pClipList[i]->xVSS.uiDuration; - } + /*To support ARGB8888 :get the width and height */ + pParams->height = pSettings->pClipList[ + i]->ClipProperties.uiStillPicHeight; //ARGB_Height; + pParams->width = pSettings->pClipList[ + i]->ClipProperties.uiStillPicWidth; //ARGB_Width; + M4OSA_TRACE3_1("CLIP M4xVSS_SendCommand ARGB8888 H = %d", pParams->height); + M4OSA_TRACE3_1("CLIP M4xVSS_SendCommand ARGB8888 W = %d", pParams->width); - pParams->InputFileType = M4VIDEOEDITING_kFileType_ARGB8888; + if( xVSS_context->pPTo3GPPparamsList + == M4OSA_NULL ) /* Means it is the first element of the list */ + { + /* Initialize the xVSS context with the first element of the list */ + xVSS_context->pPTo3GPPparamsList = pParams; - /** - * UTF conversion: convert into the customer format, before being used*/ - pDecodedPath = xVSS_context->pSettings->pClipList[i]->pFile; - length = strlen(pDecodedPath); + /* Save this element in case of other file to convert */ + pPto3GPP_last = pParams; + } + else + { + /* Update next pointer of the previous last element of the chain */ + pPto3GPP_last->pNext = pParams; - /** - * UTF conversion: convert into the customer format, before being used*/ - if( xVSS_context->UTFConversionContext.pConvFromUTF8Fct - != M4OSA_NULL && xVSS_context-> - UTFConversionContext.pTempOutConversionBuffer - != M4OSA_NULL ) - { - err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void - *)xVSS_context->pSettings->pClipList[i]->pFile, - (M4OSA_Void *)xVSS_context-> - UTFConversionContext.pTempOutConversionBuffer, - &length); + /* Update save of last element of the chain */ + pPto3GPP_last = pParams; + } - if( err != M4NO_ERROR ) + /* Fill the last M4xVSS_Pto3GPP_params element */ + pParams->duration = + xVSS_context->pSettings->pClipList[i]->uiEndCutTime; + /* If duration is filled, let's use it instead of EndCutTime */ + if( xVSS_context->pSettings->pClipList[i]->xVSS.uiDuration != 0 ) { - M4OSA_TRACE1_1( - "M4xVSS_SendCommand: pConvFromUTF8Fct returns err: 0x%x", - err); - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - return err; + pParams->duration = + xVSS_context->pSettings->pClipList[i]->xVSS.uiDuration; } - pDecodedPath = - xVSS_context->UTFConversionContext.pTempOutConversionBuffer; - } - /** - * End of the UTF conversion, use the converted file path*/ - pParams->pFileIn = (M4OSA_Void *)M4OSA_32bitAlignedMalloc(length + 1, M4VS, - (M4OSA_Char *)"Pto3GPP Params: file in"); + pParams->InputFileType = M4VIDEOEDITING_kFileType_ARGB8888; - if( pParams->pFileIn == M4OSA_NULL ) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); - /*FB: to avoid leaks when there is an error in the send command*/ - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - /**/ - return M4ERR_ALLOC; - } - memcpy((void *)pParams->pFileIn, (void *)pDecodedPath, - (length + 1)); /* Copy input file path */ + /** + * UTF conversion: convert into the customer format, before being used*/ + pDecodedPath = xVSS_context->pSettings->pClipList[i]->pFile; + length = strlen(pDecodedPath); - /* Check that JPG file is present on the FS (P4ME00002974) by just opening - and closing it */ - err = - xVSS_context->pFileReadPtr->openRead(&pARGBFileIn, pDecodedPath, - M4OSA_kFileRead); + /** + * UTF conversion: convert into the customer format, before being used*/ + if( xVSS_context->UTFConversionContext.pConvFromUTF8Fct + != M4OSA_NULL && xVSS_context-> + UTFConversionContext.pTempOutConversionBuffer + != M4OSA_NULL ) + { + err = M4xVSS_internalConvertFromUTF8(xVSS_context, (M4OSA_Void + *)xVSS_context->pSettings->pClipList[i]->pFile, + (M4OSA_Void *)xVSS_context-> + UTFConversionContext.pTempOutConversionBuffer, + &length); - if( err != M4NO_ERROR ) - { - M4OSA_TRACE1_2("Can't open input jpg file %s, error: 0x%x\n", - pDecodedPath, err); - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - return err; - } - err = xVSS_context->pFileReadPtr->closeRead(pARGBFileIn); + if( err != M4NO_ERROR ) + { + M4OSA_TRACE1_1( + "M4xVSS_SendCommand: pConvFromUTF8Fct returns err: 0x%x", + err); + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + return err; + } + pDecodedPath = + xVSS_context->UTFConversionContext.pTempOutConversionBuffer; + } - if( err != M4NO_ERROR ) - { - M4OSA_TRACE1_2("Can't close input jpg file %s, error: 0x%x\n", - pDecodedPath, err); - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - return err; - } + /** + * End of the UTF conversion, use the converted file path*/ + pParams->pFileIn = (M4OSA_Void *)M4OSA_32bitAlignedMalloc(length + 1, M4VS, + (M4OSA_Char *)"Pto3GPP Params: file in"); - /** - * UTF conversion: convert into the customer format, before being used*/ - pDecodedPath = out_img; - length = strlen(pDecodedPath); + if( pParams->pFileIn == M4OSA_NULL ) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); + /*FB: to avoid leaks when there is an error in the send command*/ + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + /**/ + return M4ERR_ALLOC; + } + memcpy((void *)pParams->pFileIn, (void *)pDecodedPath, + (length + 1)); /* Copy input file path */ - if( xVSS_context->UTFConversionContext.pConvFromUTF8Fct - != M4OSA_NULL && xVSS_context-> - UTFConversionContext.pTempOutConversionBuffer - != M4OSA_NULL ) - { - err = M4xVSS_internalConvertFromUTF8(xVSS_context, - (M4OSA_Void *)out_img, (M4OSA_Void *)xVSS_context-> - UTFConversionContext.pTempOutConversionBuffer, &length); + /* Check that JPG file is present on the FS (P4ME00002974) by just opening + and closing it */ + err = + xVSS_context->pFileReadPtr->openRead(&pARGBFileIn, pDecodedPath, + M4OSA_kFileRead); if( err != M4NO_ERROR ) { - M4OSA_TRACE1_1( - "M4xVSS_SendCommand: pConvFromUTF8Fct returns err: 0x%x", - err); + M4OSA_TRACE1_2("Can't open input jpg file %s, error: 0x%x\n", + pDecodedPath, err); /* Free Send command */ M4xVSS_freeCommand(xVSS_context); return err; } - pDecodedPath = - xVSS_context->UTFConversionContext.pTempOutConversionBuffer; - } + err = xVSS_context->pFileReadPtr->closeRead(pARGBFileIn); - /** - * End of the UTF conversion, use the converted file path*/ - pParams->pFileOut = (M4OSA_Void *)M4OSA_32bitAlignedMalloc((length + 1), M4VS, - (M4OSA_Char *)"Pto3GPP Params: file out"); + if( err != M4NO_ERROR ) + { + M4OSA_TRACE1_2("Can't close input jpg file %s, error: 0x%x\n", + pDecodedPath, err); + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + return err; + } - if( pParams->pFileOut == M4OSA_NULL ) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); - /*FB: to avoid leaks when there is an error in the send command*/ - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - /**/ - return M4ERR_ALLOC; - } - memcpy((void *)pParams->pFileOut, (void *)pDecodedPath, - (length + 1)); /* Copy output file path */ + /** + * UTF conversion: convert into the customer format, before being used*/ + pDecodedPath = out_img; + length = strlen(pDecodedPath); -#ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE - /** - * UTF conversion: convert into the customer format, before being used*/ + if( xVSS_context->UTFConversionContext.pConvFromUTF8Fct + != M4OSA_NULL && xVSS_context-> + UTFConversionContext.pTempOutConversionBuffer + != M4OSA_NULL ) + { + err = M4xVSS_internalConvertFromUTF8(xVSS_context, + (M4OSA_Void *)out_img, (M4OSA_Void *)xVSS_context-> + UTFConversionContext.pTempOutConversionBuffer, &length); - pDecodedPath = out_img_tmp; - length = strlen(pDecodedPath); + if( err != M4NO_ERROR ) + { + M4OSA_TRACE1_1( + "M4xVSS_SendCommand: pConvFromUTF8Fct returns err: 0x%x", + err); + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + return err; + } + pDecodedPath = + xVSS_context->UTFConversionContext.pTempOutConversionBuffer; + } - if( xVSS_context->UTFConversionContext.pConvFromUTF8Fct - != M4OSA_NULL && xVSS_context-> - UTFConversionContext.pTempOutConversionBuffer - != M4OSA_NULL ) - { - err = M4xVSS_internalConvertFromUTF8(xVSS_context, - (M4OSA_Void *)out_img_tmp, (M4OSA_Void *)xVSS_context-> - UTFConversionContext.pTempOutConversionBuffer, &length); + /** + * End of the UTF conversion, use the converted file path*/ + pParams->pFileOut = (M4OSA_Void *)M4OSA_32bitAlignedMalloc((length + 1), M4VS, + (M4OSA_Char *)"Pto3GPP Params: file out"); - if( err != M4NO_ERROR ) + if( pParams->pFileOut == M4OSA_NULL ) { - M4OSA_TRACE1_1("M4xVSS_SendCommand: M4xVSS_internalConvertFromUTF8\ - returns err: 0x%x", - err); + M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); + /*FB: to avoid leaks when there is an error in the send command*/ /* Free Send command */ M4xVSS_freeCommand(xVSS_context); - return err; + /**/ + return M4ERR_ALLOC; } - pDecodedPath = - xVSS_context->UTFConversionContext.pTempOutConversionBuffer; - } + memcpy((void *)pParams->pFileOut, (void *)pDecodedPath, + (length + 1)); /* Copy output file path */ - /** - * End of the UTF conversion, use the converted file path*/ - pParams->pFileTemp = (M4OSA_Void *)M4OSA_32bitAlignedMalloc((length + 1), M4VS, - (M4OSA_Char *)"Pto3GPP Params: file temp"); + #ifdef M4xVSS_RESERVED_MOOV_DISK_SPACE + /** + * UTF conversion: convert into the customer format, before being used*/ - if( pParams->pFileTemp == M4OSA_NULL ) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); - /*FB: to avoid leaks when there is an error in the send command*/ - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - /**/ - return M4ERR_ALLOC; - } - memcpy((void *)pParams->pFileTemp, (void *)pDecodedPath, - (length + 1)); /* Copy temporary file path */ + pDecodedPath = out_img_tmp; + length = strlen(pDecodedPath); -#endif /*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ + if( xVSS_context->UTFConversionContext.pConvFromUTF8Fct + != M4OSA_NULL && xVSS_context-> + UTFConversionContext.pTempOutConversionBuffer + != M4OSA_NULL ) + { + err = M4xVSS_internalConvertFromUTF8(xVSS_context, + (M4OSA_Void *)out_img_tmp, (M4OSA_Void *)xVSS_context-> + UTFConversionContext.pTempOutConversionBuffer, &length); - /* Fill PanAndZoom settings if needed */ + if( err != M4NO_ERROR ) + { + M4OSA_TRACE1_1("M4xVSS_SendCommand: M4xVSS_internalConvertFromUTF8\ + returns err: 0x%x", + err); + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + return err; + } + pDecodedPath = + xVSS_context->UTFConversionContext.pTempOutConversionBuffer; + } - if( M4OSA_TRUE - == xVSS_context->pSettings->pClipList[i]->xVSS.isPanZoom ) - { - pParams->isPanZoom = - xVSS_context->pSettings->pClipList[i]->xVSS.isPanZoom; - /* Check that Pan & Zoom parameters are corrects */ - if( xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXa > 1000 - || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXa - <= 0 || xVSS_context->pSettings->pClipList[i]->xVSS. - PanZoomTopleftXa > 1000 - || xVSS_context->pSettings->pClipList[i]->xVSS. - PanZoomTopleftYa > 1000 - || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb - > 1000 - || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb - <= 0 || xVSS_context->pSettings->pClipList[i]->xVSS. - PanZoomTopleftXb > 1000 - || xVSS_context->pSettings->pClipList[i]->xVSS. - PanZoomTopleftYb > 1000) + /** + * End of the UTF conversion, use the converted file path*/ + pParams->pFileTemp = (M4OSA_Void *)M4OSA_32bitAlignedMalloc((length + 1), M4VS, + (M4OSA_Char *)"Pto3GPP Params: file temp"); + + if( pParams->pFileTemp == M4OSA_NULL ) { M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); + /*FB: to avoid leaks when there is an error in the send command*/ + /* Free Send command */ M4xVSS_freeCommand(xVSS_context); - return M4ERR_PARAMETER; + /**/ + return M4ERR_ALLOC; } + memcpy((void *)pParams->pFileTemp, (void *)pDecodedPath, + (length + 1)); /* Copy temporary file path */ - pParams->PanZoomXa = - xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXa; - pParams->PanZoomTopleftXa = - xVSS_context->pSettings-> - pClipList[i]->xVSS.PanZoomTopleftXa; - pParams->PanZoomTopleftYa = - xVSS_context->pSettings-> - pClipList[i]->xVSS.PanZoomTopleftYa; - pParams->PanZoomXb = - xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb; - pParams->PanZoomTopleftXb = - xVSS_context->pSettings-> - pClipList[i]->xVSS.PanZoomTopleftXb; - pParams->PanZoomTopleftYb = - xVSS_context->pSettings-> - pClipList[i]->xVSS.PanZoomTopleftYb; - } - else - { - pParams->isPanZoom = M4OSA_FALSE; - } - /*+ PR No: blrnxpsw#223*/ - /*Intializing the Video Frame Rate as it may not be intialized*/ - /*Other changes made is @ M4xVSS_Internal.c @ line 1518 in - M4xVSS_internalStartConvertPictureTo3gp*/ - switch( xVSS_context->pSettings->videoFrameRate ) - { - case M4VIDEOEDITING_k30_FPS: - pParams->framerate = 33; - break; + #endif /*M4xVSS_RESERVED_MOOV_DISK_SPACE*/ - case M4VIDEOEDITING_k25_FPS: - pParams->framerate = 40; - break; + /* Fill PanAndZoom settings if needed */ - case M4VIDEOEDITING_k20_FPS: - pParams->framerate = 50; - break; + if( M4OSA_TRUE + == xVSS_context->pSettings->pClipList[i]->xVSS.isPanZoom ) + { + pParams->isPanZoom = + xVSS_context->pSettings->pClipList[i]->xVSS.isPanZoom; + /* Check that Pan & Zoom parameters are corrects */ + if( xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXa > 1000 + || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXa + <= 0 || xVSS_context->pSettings->pClipList[i]->xVSS. + PanZoomTopleftXa > 1000 + || xVSS_context->pSettings->pClipList[i]->xVSS. + PanZoomTopleftYa > 1000 + || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb + > 1000 + || xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb + <= 0 || xVSS_context->pSettings->pClipList[i]->xVSS. + PanZoomTopleftXb > 1000 + || xVSS_context->pSettings->pClipList[i]->xVSS. + PanZoomTopleftYb > 1000) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); + M4xVSS_freeCommand(xVSS_context); + return M4ERR_PARAMETER; + } - case M4VIDEOEDITING_k15_FPS: - pParams->framerate = 66; - break; + pParams->PanZoomXa = + xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXa; + pParams->PanZoomTopleftXa = + xVSS_context->pSettings-> + pClipList[i]->xVSS.PanZoomTopleftXa; + pParams->PanZoomTopleftYa = + xVSS_context->pSettings-> + pClipList[i]->xVSS.PanZoomTopleftYa; + pParams->PanZoomXb = + xVSS_context->pSettings->pClipList[i]->xVSS.PanZoomXb; + pParams->PanZoomTopleftXb = + xVSS_context->pSettings-> + pClipList[i]->xVSS.PanZoomTopleftXb; + pParams->PanZoomTopleftYb = + xVSS_context->pSettings-> + pClipList[i]->xVSS.PanZoomTopleftYb; + } + else + { + pParams->isPanZoom = M4OSA_FALSE; + } + /*+ PR No: blrnxpsw#223*/ + /*Intializing the Video Frame Rate as it may not be intialized*/ + /*Other changes made is @ M4xVSS_Internal.c @ line 1518 in + M4xVSS_internalStartConvertPictureTo3gp*/ + switch( xVSS_context->pSettings->videoFrameRate ) + { + case M4VIDEOEDITING_k30_FPS: + pParams->framerate = 33; + break; - case M4VIDEOEDITING_k12_5_FPS: - pParams->framerate = 80; - break; + case M4VIDEOEDITING_k25_FPS: + pParams->framerate = 40; + break; - case M4VIDEOEDITING_k10_FPS: - pParams->framerate = 100; - break; + case M4VIDEOEDITING_k20_FPS: + pParams->framerate = 50; + break; - case M4VIDEOEDITING_k7_5_FPS: - pParams->framerate = 133; - break; + case M4VIDEOEDITING_k15_FPS: + pParams->framerate = 66; + break; - case M4VIDEOEDITING_k5_FPS: - pParams->framerate = 200; - break; + case M4VIDEOEDITING_k12_5_FPS: + pParams->framerate = 80; + break; - default: - /*Making Default Frame Rate @ 15 FPS*/ - pParams->framerate = 66; - break; - } - /*-PR No: blrnxpsw#223*/ - if( xVSS_context->pSettings->pClipList[i]->xVSS.MediaRendering - == M4xVSS_kCropping - || xVSS_context->pSettings->pClipList[i]->xVSS. - MediaRendering == M4xVSS_kBlackBorders - || xVSS_context->pSettings->pClipList[i]->xVSS. - MediaRendering == M4xVSS_kResizing ) - { - pParams->MediaRendering = - xVSS_context->pSettings->pClipList[i]->xVSS.MediaRendering; - } + case M4VIDEOEDITING_k10_FPS: + pParams->framerate = 100; + break; - pParams->pNext = M4OSA_NULL; - pParams->isCreated = M4OSA_FALSE; - xVSS_context->nbStepTotal++; + case M4VIDEOEDITING_k7_5_FPS: + pParams->framerate = 133; + break; -replaceARGB_3GP: - /* Update total duration */ - totalDuration += pParams->duration; + case M4VIDEOEDITING_k5_FPS: + pParams->framerate = 200; + break; - /* Replacing in VSS structure the JPG file by the 3gp file */ - xVSS_context->pSettings->pClipList[i]->FileType = - M4VIDEOEDITING_kFileType_3GPP; + default: + /*Making Default Frame Rate @ 15 FPS*/ + pParams->framerate = 66; + break; + } + /*-PR No: blrnxpsw#223*/ + if( xVSS_context->pSettings->pClipList[i]->xVSS.MediaRendering + == M4xVSS_kCropping + || xVSS_context->pSettings->pClipList[i]->xVSS. + MediaRendering == M4xVSS_kBlackBorders + || xVSS_context->pSettings->pClipList[i]->xVSS. + MediaRendering == M4xVSS_kResizing ) + { + pParams->MediaRendering = + xVSS_context->pSettings->pClipList[i]->xVSS.MediaRendering; + } - if( xVSS_context->pSettings->pClipList[i]->pFile != M4OSA_NULL ) - { - free(xVSS_context->pSettings->pClipList[i]->pFile); - xVSS_context->pSettings->pClipList[i]->pFile = M4OSA_NULL; - } + pParams->pNext = M4OSA_NULL; + pParams->isCreated = M4OSA_FALSE; + xVSS_context->nbStepTotal++; - /** - * UTF conversion: convert into UTF8, before being used*/ - pDecodedPath = pParams->pFileOut; + replaceARGB_3GP: + /* Update total duration */ + totalDuration += pParams->duration; - if( xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL - && xVSS_context->UTFConversionContext.pTempOutConversionBuffer - != M4OSA_NULL ) - { - err = M4xVSS_internalConvertToUTF8(xVSS_context, - (M4OSA_Void *)pParams->pFileOut, - (M4OSA_Void *)xVSS_context-> - UTFConversionContext.pTempOutConversionBuffer, - &length); + /* Replacing in VSS structure the JPG file by the 3gp file */ + xVSS_context->pSettings->pClipList[i]->FileType = + M4VIDEOEDITING_kFileType_3GPP; - if( err != M4NO_ERROR ) + if( xVSS_context->pSettings->pClipList[i]->pFile != M4OSA_NULL ) { - M4OSA_TRACE1_1( - "M4xVSS_SendCommand: M4xVSS_internalConvertToUTF8 returns err: \ - 0x%x",err); + free(xVSS_context->pSettings->pClipList[i]->pFile); + xVSS_context->pSettings->pClipList[i]->pFile = M4OSA_NULL; + } + + /** + * UTF conversion: convert into UTF8, before being used*/ + pDecodedPath = pParams->pFileOut; + + if( xVSS_context->UTFConversionContext.pConvToUTF8Fct != M4OSA_NULL + && xVSS_context->UTFConversionContext.pTempOutConversionBuffer + != M4OSA_NULL ) + { + err = M4xVSS_internalConvertToUTF8(xVSS_context, + (M4OSA_Void *)pParams->pFileOut, + (M4OSA_Void *)xVSS_context-> + UTFConversionContext.pTempOutConversionBuffer, + &length); + + if( err != M4NO_ERROR ) + { + M4OSA_TRACE1_1( + "M4xVSS_SendCommand: M4xVSS_internalConvertToUTF8 returns err: \ + 0x%x",err); + /* Free Send command */ + M4xVSS_freeCommand(xVSS_context); + return err; + } + pDecodedPath = + xVSS_context->UTFConversionContext.pTempOutConversionBuffer; + } + else + { + length = strlen(pDecodedPath); + } + /** + * End of the UTF conversion, use the converted file path*/ + xVSS_context->pSettings->pClipList[i]->pFile = M4OSA_32bitAlignedMalloc((length + + 1), M4VS, (M4OSA_Char *)"xVSS file path of ARGB to 3gp"); + + if( xVSS_context->pSettings->pClipList[i]->pFile == M4OSA_NULL ) + { + M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); + /*FB: to avoid leaks when there is an error in the send command*/ /* Free Send command */ M4xVSS_freeCommand(xVSS_context); - return err; + /**/ + return M4ERR_ALLOC; } - pDecodedPath = - xVSS_context->UTFConversionContext.pTempOutConversionBuffer; - } - else - { - length = strlen(pDecodedPath); - } - /** - * End of the UTF conversion, use the converted file path*/ - xVSS_context->pSettings->pClipList[i]->pFile = M4OSA_32bitAlignedMalloc((length - + 1), M4VS, (M4OSA_Char *)"xVSS file path of ARGB to 3gp"); - - if( xVSS_context->pSettings->pClipList[i]->pFile == M4OSA_NULL ) - { - M4OSA_TRACE1_0("Allocation error in M4xVSS_SendCommand"); - /*FB: to avoid leaks when there is an error in the send command*/ - /* Free Send command */ - M4xVSS_freeCommand(xVSS_context); - /**/ - return M4ERR_ALLOC; - } - memcpy((void *)xVSS_context->pSettings->pClipList[i]->pFile, - (void *)pDecodedPath, (length + 1)); - /*FB: add file path size because of UTF16 conversion*/ - xVSS_context->pSettings->pClipList[i]->filePathSize = length+1; + memcpy((void *)xVSS_context->pSettings->pClipList[i]->pFile, + (void *)pDecodedPath, (length + 1)); + /*FB: add file path size because of UTF16 conversion*/ + xVSS_context->pSettings->pClipList[i]->filePathSize = length+1; + } } /************************ 3GP input file type case @@ -2380,22 +2386,6 @@ replaceARGB_3GP: i); } - /* If the output video format/size is not the same as provided video, - let's transcode it */ - - if( fileProperties.VideoStreamType - != xVSS_context->pSettings->xVSS.outputVideoFormat - || fileProperties.uiVideoWidth != width - || fileProperties.uiVideoHeight != height - || (fileProperties.VideoStreamType == M4VIDEOEDITING_kMPEG4 - && fileProperties.uiVideoTimeScale - != xVSS_context->targetedTimescale) ) - { - videoIsDifferent = M4OSA_TRUE; - } - /*temp solution for fixng issue for H.264 compressed domain */ - videoIsDifferent = M4OSA_TRUE; - if( fileProperties.uiNbChannels == 1 ) { bAudioMono = M4OSA_TRUE; @@ -2479,6 +2469,7 @@ replaceARGB_3GP: return M4ERR_ALLOC; } pParams->MediaRendering = M4xVSS_kResizing; + pParams->videoclipnumber = i; // Indicates video clip index if( xVSS_context->pMCSparamsList == M4OSA_NULL ) /* Means it is the first element of the list */ @@ -2893,11 +2884,6 @@ replace3GP_3GP: xVSS_context->pSettings->pClipList[i]->uiEndCutTime - xVSS_context->pSettings->pClipList[i]->uiBeginCutTime; - /*the cuts are done in the MCS, so we need to replace the beginCutTime - and endCutTime to keep the entire video*/ - xVSS_context->pSettings->pClipList[i]->uiBeginCutTime = 0; - xVSS_context->pSettings->pClipList[i]->uiEndCutTime = 0; - /* Replacing in VSS structure the original 3GP file by the transcoded 3GP file */ xVSS_context->pSettings->pClipList[i]->FileType = M4VIDEOEDITING_kFileType_3GPP; @@ -4924,7 +4910,9 @@ M4OSA_ERR M4xVSS_SaveStart( M4OSA_Context pContext, M4OSA_Void *pFilePath, || (xVSS_context->pSettings->pClipList[i]->FileType == M4VIDEOEDITING_kFileType_MP4) || (xVSS_context->pSettings->pClipList[i]->FileType - == M4VIDEOEDITING_kFileType_M4V) ) + == M4VIDEOEDITING_kFileType_M4V) + || (xVSS_context->pSettings->pClipList[i]->FileType + == M4VIDEOEDITING_kFileType_ARGB8888)) { /* Copy data from given structure to our saving structure */ @@ -5800,10 +5788,21 @@ M4OSA_ERR M4xVSS_Step( M4OSA_Context pContext, M4OSA_UInt8 *pProgress ) code ? */ return err; } - M4OSA_TRACE1_1("M4xVSS_Step: M4xVSS_internalStartTranscoding returned\ + int32_t index = xVSS_context->pMCScurrentParams->videoclipnumber; + if(xVSS_context->pSettings->pClipList[index]->bTranscodingRequired + == M4OSA_FALSE) { + /*the cuts are done in the MCS, so we need to replace + the beginCutTime and endCutTime to keep the entire video*/ + xVSS_context->pSettings->pClipList[index]->uiBeginCutTime = 0; + xVSS_context->pSettings->pClipList[index]->uiEndCutTime = 0; + } + + M4OSA_TRACE1_1("M4xVSS_Step: \ + M4xVSS_internalStartTranscoding returned \ success; MCS context: 0x%x", - xVSS_context->pMCS_Ctxt)xVSS_context->analyseStep = - M4xVSS_kMicroStateTranscodeMCS; + xVSS_context->pMCS_Ctxt); + xVSS_context->analyseStep = + M4xVSS_kMicroStateTranscodeMCS; } } else if( xVSS_context->analyseStep diff --git a/libvideoeditor/vss/src/M4xVSS_internal.c b/libvideoeditor/vss/src/M4xVSS_internal.c index 3827720..66e3b80 100755 --- a/libvideoeditor/vss/src/M4xVSS_internal.c +++ b/libvideoeditor/vss/src/M4xVSS_internal.c @@ -2354,6 +2354,22 @@ M4OSA_ERR M4xVSS_internalGenerateEditedFile(M4OSA_Context pContext) return err; } + M4VSS3GPP_InternalEditContext* pVSSContext = + (M4VSS3GPP_InternalEditContext*)pVssCtxt; + pVSSContext->xVSS.outputVideoFormat = + xVSS_context->pSettings->xVSS.outputVideoFormat; + pVSSContext->xVSS.outputVideoSize = + xVSS_context->pSettings->xVSS.outputVideoSize ; + pVSSContext->xVSS.outputAudioFormat = + xVSS_context->pSettings->xVSS.outputAudioFormat; + pVSSContext->xVSS.outputAudioSamplFreq = + xVSS_context->pSettings->xVSS.outputAudioSamplFreq; + pVSSContext->xVSS.outputVideoBitrate = + xVSS_context->pSettings->xVSS.outputVideoBitrate ; + pVSSContext->xVSS.outputAudioBitrate = + xVSS_context->pSettings->xVSS.outputAudioBitrate ; + pVSSContext->xVSS.bAudioMono = + xVSS_context->pSettings->xVSS.bAudioMono; /* In case of MMS use case, we fill directly into the VSS context the targeted bitrate */ if(xVSS_context->targetedBitrate != 0) { |