summaryrefslogtreecommitdiffstats
path: root/libvideoeditor/lvpp
diff options
context:
space:
mode:
Diffstat (limited to 'libvideoeditor/lvpp')
-rwxr-xr-xlibvideoeditor/lvpp/Android.mk100
-rwxr-xr-xlibvideoeditor/lvpp/DummyAudioSource.cpp163
-rwxr-xr-xlibvideoeditor/lvpp/DummyAudioSource.h73
-rwxr-xr-xlibvideoeditor/lvpp/DummyVideoSource.cpp172
-rwxr-xr-xlibvideoeditor/lvpp/DummyVideoSource.h74
-rwxr-xr-xlibvideoeditor/lvpp/I420ColorConverter.cpp55
-rwxr-xr-xlibvideoeditor/lvpp/I420ColorConverter.h35
-rw-r--r--libvideoeditor/lvpp/MODULE_LICENSE_APACHE20
-rw-r--r--libvideoeditor/lvpp/NOTICE190
-rwxr-xr-xlibvideoeditor/lvpp/NativeWindowRenderer.cpp620
-rwxr-xr-xlibvideoeditor/lvpp/NativeWindowRenderer.h182
-rwxr-xr-xlibvideoeditor/lvpp/PreviewPlayer.cpp2082
-rwxr-xr-xlibvideoeditor/lvpp/PreviewPlayer.h298
-rwxr-xr-xlibvideoeditor/lvpp/PreviewRenderer.cpp140
-rwxr-xr-xlibvideoeditor/lvpp/PreviewRenderer.h66
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorAudioPlayer.cpp900
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorAudioPlayer.h142
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp295
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorBGAudioProcessing.h110
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPlayer.cpp595
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPlayer.h164
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPreviewController.cpp1467
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPreviewController.h157
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorSRC.cpp334
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorSRC.h87
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorTools.cpp3883
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorTools.h153
27 files changed, 0 insertions, 12537 deletions
diff --git a/libvideoeditor/lvpp/Android.mk b/libvideoeditor/lvpp/Android.mk
deleted file mode 100755
index a84ddad..0000000
--- a/libvideoeditor/lvpp/Android.mk
+++ /dev/null
@@ -1,100 +0,0 @@
-#
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-LOCAL_PATH:= $(call my-dir)
-
-#
-# libvideoeditorplayer
-#
-
-include $(CLEAR_VARS)
-
-LOCAL_MODULE:= libvideoeditorplayer
-
-LOCAL_SRC_FILES:= \
- VideoEditorTools.cpp \
- VideoEditorPlayer.cpp \
- PreviewPlayer.cpp \
- VideoEditorAudioPlayer.cpp \
- VideoEditorPreviewController.cpp \
- VideoEditorSRC.cpp \
- DummyAudioSource.cpp \
- DummyVideoSource.cpp \
- VideoEditorBGAudioProcessing.cpp \
- PreviewRenderer.cpp \
- I420ColorConverter.cpp \
- NativeWindowRenderer.cpp
-
-LOCAL_MODULE_TAGS := optional
-
-LOCAL_STATIC_LIBRARIES := \
- libstagefright_color_conversion
-
-
-
-LOCAL_SHARED_LIBRARIES := \
- libaudioresampler \
- libaudioutils \
- libbinder \
- libcutils \
- liblog \
- libEGL \
- libGLESv2 \
- libgui \
- libmedia \
- libdrmframework \
- libstagefright \
- libstagefright_foundation \
- libstagefright_omx \
- libsync \
- libui \
- libutils \
- libvideoeditor_osal \
-
-
-LOCAL_C_INCLUDES += \
- $(TOP)/system/media/audio_utils/include \
- $(TOP)/frameworks/av/media/libmediaplayerservice \
- $(TOP)/frameworks/av/media/libstagefright \
- $(TOP)/frameworks/av/media/libstagefright/include \
- $(TOP)/frameworks/av/media/libstagefright/rtsp \
- $(call include-path-for, corecg graphics) \
- $(TOP)/frameworks/av/libvideoeditor/osal/inc \
- $(TOP)/frameworks/av/libvideoeditor/vss/common/inc \
- $(TOP)/frameworks/av/libvideoeditor/vss/mcs/inc \
- $(TOP)/frameworks/av/libvideoeditor/vss/inc \
- $(TOP)/frameworks/av/libvideoeditor/vss/stagefrightshells/inc \
- $(TOP)/frameworks/av/libvideoeditor/lvpp \
- $(TOP)/frameworks/av/services/audioflinger \
- $(TOP)/frameworks/native/include/media/editor \
- $(TOP)/frameworks/native/include/media/openmax \
-
-
-LOCAL_SHARED_LIBRARIES += libdl
-
-LOCAL_CFLAGS += -Wno-multichar \
- -DM4_ENABLE_RENDERINGMODE \
- -DUSE_STAGEFRIGHT_CODECS \
- -DUSE_STAGEFRIGHT_AUDIODEC \
- -DUSE_STAGEFRIGHT_VIDEODEC \
- -DUSE_STAGEFRIGHT_AUDIOENC \
- -DUSE_STAGEFRIGHT_VIDEOENC \
- -DUSE_STAGEFRIGHT_READERS \
- -DUSE_STAGEFRIGHT_3GPP_READER
-
-include $(BUILD_SHARED_LIBRARY)
-
-#include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/libvideoeditor/lvpp/DummyAudioSource.cpp b/libvideoeditor/lvpp/DummyAudioSource.cpp
deleted file mode 100755
index dbcab68..0000000
--- a/libvideoeditor/lvpp/DummyAudioSource.cpp
+++ /dev/null
@@ -1,163 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "DummyAudioSource"
-#include <utils/Log.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MetaData.h>
-#include "DummyAudioSource.h"
-
-
-namespace android {
-
-//static
-sp<DummyAudioSource> DummyAudioSource::Create(
- int32_t samplingRate, int32_t channelCount,
- int64_t frameDurationUs, int64_t audioDurationUs) {
-
- ALOGV("Create ");
- return new DummyAudioSource(samplingRate,
- channelCount,
- frameDurationUs,
- audioDurationUs);
-
-}
-
-DummyAudioSource::DummyAudioSource(
- int32_t samplingRate, int32_t channelCount,
- int64_t frameDurationUs, int64_t audioDurationUs)
- : mSamplingRate(samplingRate),
- mChannelCount(channelCount),
- mFrameDurationUs(frameDurationUs),
- mNumberOfSamplePerFrame(0),
- mAudioDurationUs(audioDurationUs),
- mTimeStampUs(0),
- mBufferGroup(NULL) {
-
- mNumberOfSamplePerFrame = (int32_t)
- ((1L * mSamplingRate * mFrameDurationUs)/1000000);
- mNumberOfSamplePerFrame = mNumberOfSamplePerFrame * mChannelCount;
-
- ALOGV("Constructor: E");
- ALOGV("samplingRate = %d", samplingRate);
- ALOGV("channelCount = %d", channelCount);
- ALOGV("frameDurationUs = %lld", frameDurationUs);
- ALOGV("audioDurationUs = %lld", audioDurationUs);
- ALOGV("mNumberOfSamplePerFrame = %d", mNumberOfSamplePerFrame);
- ALOGV("Constructor: X");
-}
-
-DummyAudioSource::~DummyAudioSource() {
- /* Do nothing here? */
- ALOGV("~DummyAudioSource");
-}
-
-void DummyAudioSource::setDuration(int64_t audioDurationUs) {
- ALOGV("setDuration: %lld us added to %lld us",
- audioDurationUs, mAudioDurationUs);
-
- Mutex::Autolock autoLock(mLock);
- mAudioDurationUs += audioDurationUs;
-}
-
-status_t DummyAudioSource::start(MetaData *params) {
- ALOGV("start: E");
- status_t err = OK;
-
- mTimeStampUs = 0;
-
- mBufferGroup = new MediaBufferGroup;
- mBufferGroup->add_buffer(
- new MediaBuffer(mNumberOfSamplePerFrame * sizeof(int16_t)));
-
- ALOGV("start: X");
-
- return err;
-}
-
-status_t DummyAudioSource::stop() {
- ALOGV("stop");
-
- delete mBufferGroup;
- mBufferGroup = NULL;
-
- return OK;
-}
-
-
-sp<MetaData> DummyAudioSource::getFormat() {
- ALOGV("getFormat");
-
- sp<MetaData> meta = new MetaData;
- meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
- meta->setInt32(kKeyChannelCount, mChannelCount);
- meta->setInt32(kKeySampleRate, mSamplingRate);
- meta->setInt64(kKeyDuration, mFrameDurationUs);
- meta->setCString(kKeyDecoderComponent, "DummyAudioSource");
-
- return meta;
-}
-
-status_t DummyAudioSource::read(
- MediaBuffer **out, const MediaSource::ReadOptions *options) {
-
- ALOGV("read: E");
-
- int64_t seekTimeUs;
- ReadOptions::SeekMode mode;
-
- if (options && options->getSeekTo(&seekTimeUs, &mode)) {
- CHECK(seekTimeUs >= 0);
- mTimeStampUs = seekTimeUs;
- }
-
- {
- Mutex::Autolock autoLock(mLock);
- if (mTimeStampUs >= mAudioDurationUs) {
- ALOGI("read: EOS reached %lld > %lld",
- mTimeStampUs, mAudioDurationUs);
-
- *out = NULL;
- return ERROR_END_OF_STREAM;
- }
- }
-
- MediaBuffer *buffer;
- status_t err = mBufferGroup->acquire_buffer(&buffer);
- if (err != OK) {
- ALOGE("Failed to acquire buffer from mBufferGroup: %d", err);
- return err;
- }
-
- memset((uint8_t *) buffer->data() + buffer->range_offset(),
- 0, mNumberOfSamplePerFrame << 1);
- buffer->set_range(buffer->range_offset(), (mNumberOfSamplePerFrame << 1));
- buffer->meta_data()->setInt64(kKeyTime, mTimeStampUs);
-
- ALOGV("read: offset = %d, size = %d, mTimeStampUs = %lld",
- buffer->range_offset(), buffer->size(), mTimeStampUs);
-
- mTimeStampUs = mTimeStampUs + mFrameDurationUs;
- *out = buffer;
-
- return OK;
-}
-
-}// namespace android
diff --git a/libvideoeditor/lvpp/DummyAudioSource.h b/libvideoeditor/lvpp/DummyAudioSource.h
deleted file mode 100755
index 5f25a8c..0000000
--- a/libvideoeditor/lvpp/DummyAudioSource.h
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DUMMY_AUDIOSOURCE_H_
-#define DUMMY_AUDIOSOURCE_H_
-
-#include <media/stagefright/MediaSource.h>
-
-
-namespace android {
-
-class MetaData;
-struct MediaBufferGroup;
-
-struct DummyAudioSource : public MediaSource {
-
-public:
- static sp<DummyAudioSource> Create(
- int32_t samplingRate, int32_t channelCount,
- int64_t frameDurationUs, int64_t audioDurationUs);
-
- virtual status_t start(MetaData *params = NULL);
- virtual status_t stop();
- virtual sp<MetaData> getFormat();
-
- virtual status_t read(
- MediaBuffer **buffer,
- const MediaSource::ReadOptions *options = NULL);
-
- void setDuration(int64_t audioDurationUs);
-
-protected:
- virtual ~DummyAudioSource();
-
-private:
- int32_t mSamplingRate;
- int32_t mChannelCount;
- int64_t mFrameDurationUs;
- int32_t mNumberOfSamplePerFrame;
- int64_t mAudioDurationUs;
- int64_t mTimeStampUs;
- Mutex mLock;
-
- MediaBufferGroup *mBufferGroup;
-
- DummyAudioSource(
- int32_t samplingRate, int32_t channelCount,
- int64_t frameDurationUs, int64_t audioDurationUs);
-
- // Don't call me
- DummyAudioSource(const DummyAudioSource &);
- DummyAudioSource &operator=(const DummyAudioSource &);
-
-};
-
-}//namespace android
-
-
-#endif //DUMMY_AUDIOSOURCE_H_
-
diff --git a/libvideoeditor/lvpp/DummyVideoSource.cpp b/libvideoeditor/lvpp/DummyVideoSource.cpp
deleted file mode 100755
index 6dbcf2a..0000000
--- a/libvideoeditor/lvpp/DummyVideoSource.cpp
+++ /dev/null
@@ -1,172 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "DummyVideoSource"
-#include <inttypes.h>
-#include <stdlib.h>
-#include <utils/Log.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MetaData.h>
-#include "VideoEditorTools.h"
-#include "DummyVideoSource.h"
-
-
-namespace android {
-
-sp<DummyVideoSource> DummyVideoSource::Create(
- uint32_t width, uint32_t height,
- uint64_t clipDuration, const char *imageUri) {
-
- ALOGV("Create");
- return new DummyVideoSource(
- width, height, clipDuration, imageUri);
-
-}
-
-
-DummyVideoSource::DummyVideoSource(
- uint32_t width, uint32_t height,
- uint64_t clipDuration, const char *imageUri) {
-
- ALOGV("Constructor: E");
-
- mFrameWidth = width;
- mFrameHeight = height;
- mImageClipDuration = clipDuration;
- mUri = imageUri;
- mImageBuffer = NULL;
-
- ALOGV("%s", mUri);
- ALOGV("Constructor: X");
-}
-
-
-DummyVideoSource::~DummyVideoSource() {
- /* Do nothing here? */
- ALOGV("~DummyVideoSource");
-}
-
-
-
-status_t DummyVideoSource::start(MetaData *params) {
- ALOGV("start: E");
-
- // Get the frame buffer from the rgb file, mUri,
- // and store its content into a MediaBuffer
- status_t err = LvGetImageThumbNail(
- (const char *)mUri,
- mFrameHeight, mFrameWidth,
- (M4OSA_Void **) &mImageBuffer);
- if (err != OK) {
- ALOGE("LvGetImageThumbNail failed: %d", err);
- return err;
- }
-
- mIsFirstImageFrame = true;
- mImageSeekTime = 0;
- mImagePlayStartTime = 0;
- mFrameTimeUs = 0;
-
- ALOGV("start: X");
- return OK;
-}
-
-
-status_t DummyVideoSource::stop() {
- ALOGV("stop");
- status_t err = OK;
-
- if (mImageBuffer != NULL) {
- free(mImageBuffer);
- mImageBuffer = NULL;
- }
-
- return err;
-}
-
-
-sp<MetaData> DummyVideoSource::getFormat() {
- ALOGV("getFormat");
-
- sp<MetaData> meta = new MetaData;
- meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
- meta->setInt32(kKeyWidth, mFrameWidth);
- meta->setInt32(kKeyHeight, mFrameHeight);
- meta->setInt64(kKeyDuration, mImageClipDuration);
- meta->setCString(kKeyDecoderComponent, "DummyVideoSource");
-
- return meta;
-}
-
-status_t DummyVideoSource::read(
- MediaBuffer **out,
- const MediaSource::ReadOptions *options) {
-
- ALOGV("read: E");
-
- const int32_t kTimeScale = 1000; /* time scale in ms */
- bool seeking = false;
- int64_t seekTimeUs;
- ReadOptions::SeekMode seekMode;
- if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
- seeking = true;
- mImageSeekTime = seekTimeUs;
- M4OSA_clockGetTime(&mImagePlayStartTime, kTimeScale);
- }
-
- if ((mImageSeekTime == mImageClipDuration) ||
- (mFrameTimeUs == (int64_t)mImageClipDuration)) {
- ALOGV("read: EOS reached");
- *out = NULL;
- return ERROR_END_OF_STREAM;
- }
-
- status_t err = OK;
- MediaBuffer *buffer = new MediaBuffer(
- mImageBuffer, (mFrameWidth * mFrameHeight * 1.5));
-
- // Set timestamp of buffer
- if (mIsFirstImageFrame) {
- M4OSA_clockGetTime(&mImagePlayStartTime, kTimeScale);
- mFrameTimeUs = (mImageSeekTime + 1);
- ALOGV("read: jpg 1st frame timeUs = %lld, begin cut time = %" PRIu32,
- mFrameTimeUs, mImageSeekTime);
-
- mIsFirstImageFrame = false;
- } else {
- M4OSA_Time currentTimeMs;
- M4OSA_clockGetTime(&currentTimeMs, kTimeScale);
-
- mFrameTimeUs = mImageSeekTime +
- (currentTimeMs - mImagePlayStartTime) * 1000LL;
-
- ALOGV("read: jpg frame timeUs = %lld", mFrameTimeUs);
- }
-
- buffer->meta_data()->setInt64(kKeyTime, mFrameTimeUs);
- buffer->set_range(buffer->range_offset(),
- mFrameWidth * mFrameHeight * 1.5);
-
- *out = buffer;
- return err;
-}
-
-}// namespace android
diff --git a/libvideoeditor/lvpp/DummyVideoSource.h b/libvideoeditor/lvpp/DummyVideoSource.h
deleted file mode 100755
index 16514f2..0000000
--- a/libvideoeditor/lvpp/DummyVideoSource.h
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef DUMMY_VIDEOSOURCE_H_
-#define DUMMY_VIDEOSOURCE_H_
-
-#include <media/stagefright/MediaSource.h>
-#include "M4OSA_Clock.h"
-#include "M4OSA_Time.h"
-#include "M4OSA_Types.h"
-
-namespace android {
-
-class MediaBuffer;
-class MetaData;
-
-struct DummyVideoSource : public MediaSource {
-
-public:
- static sp<DummyVideoSource> Create(
- uint32_t width, uint32_t height,
- uint64_t clipDuration, const char *imageUri);
-
- virtual status_t start(MetaData *params = NULL);
- virtual status_t stop();
- virtual sp<MetaData> getFormat();
-
- virtual status_t read(
- MediaBuffer **buffer,
- const MediaSource::ReadOptions *options = NULL);
-
-protected:
- virtual ~DummyVideoSource();
-
-private:
- uint32_t mFrameWidth;
- uint32_t mFrameHeight;
- uint64_t mImageClipDuration;
- const char *mUri;
- int64_t mFrameTimeUs;
- bool mIsFirstImageFrame;
- void *mImageBuffer;
- M4OSA_Time mImagePlayStartTime;
- uint32_t mImageSeekTime;
-
- DummyVideoSource(
- uint32_t width, uint32_t height,
- uint64_t clipDuration, const char *imageUri);
-
- // Don't call me
- DummyVideoSource(const DummyVideoSource &);
- DummyVideoSource &operator=(const DummyVideoSource &);
-
-};
-
-
-}//namespace android
-
-
-#endif //DUMMY_VIDEOSOURCE_H_
-
diff --git a/libvideoeditor/lvpp/I420ColorConverter.cpp b/libvideoeditor/lvpp/I420ColorConverter.cpp
deleted file mode 100755
index 321d3fe..0000000
--- a/libvideoeditor/lvpp/I420ColorConverter.cpp
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <I420ColorConverter.h>
-#include <cutils/log.h>
-#include <dlfcn.h>
-
-I420ColorConverter::I420ColorConverter() {
- // Open the shared library
- mHandle = dlopen("libI420colorconvert.so", RTLD_NOW);
-
- if (mHandle == NULL) {
- ALOGW("I420ColorConverter: cannot load libI420colorconvert.so");
- return;
- }
-
- // Find the entry point
- void (*getI420ColorConverter)(I420ColorConverter *converter) =
- (void (*)(I420ColorConverter*)) dlsym(mHandle, "getI420ColorConverter");
-
- if (getI420ColorConverter == NULL) {
- ALOGW("I420ColorConverter: cannot load getI420ColorConverter");
- dlclose(mHandle);
- mHandle = NULL;
- return;
- }
-
- // Fill the function pointers.
- getI420ColorConverter(this);
-
- ALOGI("I420ColorConverter: libI420colorconvert.so loaded");
-}
-
-bool I420ColorConverter::isLoaded() {
- return mHandle != NULL;
-}
-
-I420ColorConverter::~I420ColorConverter() {
- if (mHandle) {
- dlclose(mHandle);
- }
-}
diff --git a/libvideoeditor/lvpp/I420ColorConverter.h b/libvideoeditor/lvpp/I420ColorConverter.h
deleted file mode 100755
index 8d48e44..0000000
--- a/libvideoeditor/lvpp/I420ColorConverter.h
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef I420_COLOR_CONVERTER_H
-#define I420_COLOR_CONVERTER_H
-
-#include <II420ColorConverter.h>
-
-// This is a wrapper around the I420 color converter functions in
-// II420ColorConverter, which is loaded from a shared library.
-class I420ColorConverter: public II420ColorConverter {
-public:
- I420ColorConverter();
- ~I420ColorConverter();
-
- // Returns true if the converter functions are successfully loaded.
- bool isLoaded();
-private:
- void* mHandle;
-};
-
-#endif /* I420_COLOR_CONVERTER_H */
diff --git a/libvideoeditor/lvpp/MODULE_LICENSE_APACHE2 b/libvideoeditor/lvpp/MODULE_LICENSE_APACHE2
deleted file mode 100644
index e69de29..0000000
--- a/libvideoeditor/lvpp/MODULE_LICENSE_APACHE2
+++ /dev/null
diff --git a/libvideoeditor/lvpp/NOTICE b/libvideoeditor/lvpp/NOTICE
deleted file mode 100644
index c5b1efa..0000000
--- a/libvideoeditor/lvpp/NOTICE
+++ /dev/null
@@ -1,190 +0,0 @@
-
- Copyright (c) 2005-2008, The Android Open Source Project
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
-
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
deleted file mode 100755
index 8b362ef..0000000
--- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp
+++ /dev/null
@@ -1,620 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "NativeWindowRenderer"
-#include "NativeWindowRenderer.h"
-
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-#include <cutils/log.h>
-#include <gui/GLConsumer.h>
-#include <gui/Surface.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include "VideoEditorTools.h"
-
-#define CHECK_EGL_ERROR CHECK(EGL_SUCCESS == eglGetError())
-#define CHECK_GL_ERROR CHECK(GLenum(GL_NO_ERROR) == glGetError())
-
-//
-// Vertex and fragment programs
-//
-
-// The matrix is derived from
-// frameworks/base/media/libstagefright/colorconversion/ColorConverter.cpp
-//
-// R * 255 = 1.164 * (Y - 16) + 1.596 * (V - 128)
-// G * 255 = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
-// B * 255 = 1.164 * (Y - 16) + 2.018 * (U - 128)
-//
-// Here we assume YUV are in the range of [0,255], RGB are in the range of
-// [0, 1]
-#define RGB2YUV_MATRIX \
-"const mat4 rgb2yuv = mat4("\
-" 65.52255, -37.79398, 111.98732, 0.00000,"\
-" 128.62729, -74.19334, -93.81088, 0.00000,"\
-" 24.92233, 111.98732, -18.17644, 0.00000,"\
-" 16.00000, 128.00000, 128.00000, 1.00000);\n"
-
-#define YUV2RGB_MATRIX \
-"const mat4 yuv2rgb = mat4("\
-" 0.00456, 0.00456, 0.00456, 0.00000,"\
-" 0.00000, -0.00153, 0.00791, 0.00000,"\
-" 0.00626, -0.00319, 0.00000, 0.00000,"\
-" -0.87416, 0.53133, -1.08599, 1.00000);\n"
-
-static const char vSrcNormal[] =
- "attribute vec4 vPosition;\n"
- "attribute vec2 vTexPos;\n"
- "uniform mat4 texMatrix;\n"
- "varying vec2 texCoords;\n"
- "varying float topDown;\n"
- "void main() {\n"
- " gl_Position = vPosition;\n"
- " texCoords = (texMatrix * vec4(vTexPos, 0.0, 1.0)).xy;\n"
- " topDown = vTexPos.y;\n"
- "}\n";
-
-static const char fSrcNormal[] =
- "#extension GL_OES_EGL_image_external : require\n"
- "precision mediump float;\n"
- "uniform samplerExternalOES texSampler;\n"
- "varying vec2 texCoords;\n"
- "void main() {\n"
- " gl_FragColor = texture2D(texSampler, texCoords);\n"
- "}\n";
-
-static const char fSrcSepia[] =
- "#extension GL_OES_EGL_image_external : require\n"
- "precision mediump float;\n"
- "uniform samplerExternalOES texSampler;\n"
- "varying vec2 texCoords;\n"
- RGB2YUV_MATRIX
- YUV2RGB_MATRIX
- "void main() {\n"
- " vec4 rgb = texture2D(texSampler, texCoords);\n"
- " vec4 yuv = rgb2yuv * rgb;\n"
- " yuv = vec4(yuv.x, 117.0, 139.0, 1.0);\n"
- " gl_FragColor = yuv2rgb * yuv;\n"
- "}\n";
-
-static const char fSrcNegative[] =
- "#extension GL_OES_EGL_image_external : require\n"
- "precision mediump float;\n"
- "uniform samplerExternalOES texSampler;\n"
- "varying vec2 texCoords;\n"
- RGB2YUV_MATRIX
- YUV2RGB_MATRIX
- "void main() {\n"
- " vec4 rgb = texture2D(texSampler, texCoords);\n"
- " vec4 yuv = rgb2yuv * rgb;\n"
- " yuv = vec4(255.0 - yuv.x, yuv.y, yuv.z, 1.0);\n"
- " gl_FragColor = yuv2rgb * yuv;\n"
- "}\n";
-
-static const char fSrcGradient[] =
- "#extension GL_OES_EGL_image_external : require\n"
- "precision mediump float;\n"
- "uniform samplerExternalOES texSampler;\n"
- "varying vec2 texCoords;\n"
- "varying float topDown;\n"
- RGB2YUV_MATRIX
- YUV2RGB_MATRIX
- "void main() {\n"
- " vec4 rgb = texture2D(texSampler, texCoords);\n"
- " vec4 yuv = rgb2yuv * rgb;\n"
- " vec4 mixin = vec4(15.0/31.0, 59.0/63.0, 31.0/31.0, 1.0);\n"
- " vec4 yuv2 = rgb2yuv * vec4((mixin.xyz * topDown), 1);\n"
- " yuv = vec4(yuv.x, yuv2.y, yuv2.z, 1);\n"
- " gl_FragColor = yuv2rgb * yuv;\n"
- "}\n";
-
-namespace android {
-
-NativeWindowRenderer::NativeWindowRenderer(sp<ANativeWindow> nativeWindow,
- int width, int height)
- : mNativeWindow(nativeWindow)
- , mDstWidth(width)
- , mDstHeight(height)
- , mLastVideoEffect(-1)
- , mNextTextureId(100)
- , mActiveInputs(0)
- , mThreadCmd(CMD_IDLE) {
- createThread(threadStart, this);
-}
-
-// The functions below run in the GL thread.
-//
-// All GL-related work is done in this thread, and other threads send
-// requests to this thread using a command code. We expect most of the
-// time there will only be one thread sending in requests, so we let
-// other threads wait until the request is finished by GL thread.
-
-int NativeWindowRenderer::threadStart(void* self) {
- ALOGD("create thread");
- ((NativeWindowRenderer*)self)->glThread();
- return 0;
-}
-
-void NativeWindowRenderer::glThread() {
- initializeEGL();
- createPrograms();
-
- Mutex::Autolock autoLock(mLock);
- bool quit = false;
- while (!quit) {
- switch (mThreadCmd) {
- case CMD_IDLE:
- mCond.wait(mLock);
- continue;
- case CMD_RENDER_INPUT:
- render(mThreadRenderInput);
- break;
- case CMD_RESERVE_TEXTURE:
- glBindTexture(GL_TEXTURE_EXTERNAL_OES, mThreadTextureId);
- CHECK_GL_ERROR;
- break;
- case CMD_DELETE_TEXTURE:
- glDeleteTextures(1, &mThreadTextureId);
- break;
- case CMD_QUIT:
- terminateEGL();
- quit = true;
- break;
- }
- // Tell the requester that the command is finished.
- mThreadCmd = CMD_IDLE;
- mCond.broadcast();
- }
- ALOGD("quit");
-}
-
-void NativeWindowRenderer::initializeEGL() {
- mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
- CHECK_EGL_ERROR;
-
- EGLint majorVersion;
- EGLint minorVersion;
- eglInitialize(mEglDisplay, &majorVersion, &minorVersion);
- CHECK_EGL_ERROR;
-
- EGLConfig config;
- EGLint numConfigs = -1;
- EGLint configAttribs[] = {
- EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
- EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
- EGL_RED_SIZE, 8,
- EGL_GREEN_SIZE, 8,
- EGL_BLUE_SIZE, 8,
- EGL_NONE
- };
- eglChooseConfig(mEglDisplay, configAttribs, &config, 1, &numConfigs);
- CHECK_EGL_ERROR;
-
- mEglSurface = eglCreateWindowSurface(mEglDisplay, config,
- mNativeWindow.get(), NULL);
- CHECK_EGL_ERROR;
-
- EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE };
- mEglContext = eglCreateContext(mEglDisplay, config, EGL_NO_CONTEXT,
- contextAttribs);
- CHECK_EGL_ERROR;
-
- eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext);
- CHECK_EGL_ERROR;
-}
-
-void NativeWindowRenderer::terminateEGL() {
- eglDestroyContext(mEglDisplay, mEglContext);
- eglDestroySurface(mEglDisplay, mEglSurface);
- eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
- eglTerminate(mEglDisplay);
-}
-
-void NativeWindowRenderer::createPrograms() {
- GLuint vShader;
- loadShader(GL_VERTEX_SHADER, vSrcNormal, &vShader);
-
- const char* fSrc[NUMBER_OF_EFFECTS] = {
- fSrcNormal, fSrcSepia, fSrcNegative, fSrcGradient
- };
-
- for (int i = 0; i < NUMBER_OF_EFFECTS; i++) {
- GLuint fShader;
- loadShader(GL_FRAGMENT_SHADER, fSrc[i], &fShader);
- createProgram(vShader, fShader, &mProgram[i]);
- glDeleteShader(fShader);
- CHECK_GL_ERROR;
- }
-
- glDeleteShader(vShader);
- CHECK_GL_ERROR;
-}
-
-void NativeWindowRenderer::createProgram(
- GLuint vertexShader, GLuint fragmentShader, GLuint* outPgm) {
-
- GLuint program = glCreateProgram();
- CHECK_GL_ERROR;
-
- glAttachShader(program, vertexShader);
- CHECK_GL_ERROR;
-
- glAttachShader(program, fragmentShader);
- CHECK_GL_ERROR;
-
- glLinkProgram(program);
- CHECK_GL_ERROR;
-
- GLint linkStatus = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
- if (linkStatus != GL_TRUE) {
- GLint infoLen = 0;
- glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLen);
- if (infoLen) {
- char* buf = (char*) malloc(infoLen);
- if (buf) {
- glGetProgramInfoLog(program, infoLen, NULL, buf);
- ALOGE("Program link log:\n%s\n", buf);
- free(buf);
- }
- }
- glDeleteProgram(program);
- program = 0;
- }
-
- *outPgm = program;
-}
-
-void NativeWindowRenderer::loadShader(GLenum shaderType, const char* pSource,
- GLuint* outShader) {
- GLuint shader = glCreateShader(shaderType);
- CHECK_GL_ERROR;
-
- glShaderSource(shader, 1, &pSource, NULL);
- CHECK_GL_ERROR;
-
- glCompileShader(shader);
- CHECK_GL_ERROR;
-
- GLint compiled = 0;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
- if (!compiled) {
- GLint infoLen = 0;
- glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
- char* buf = (char*) malloc(infoLen);
- if (buf) {
- glGetShaderInfoLog(shader, infoLen, NULL, buf);
- ALOGE("Shader compile log:\n%s\n", buf);
- free(buf);
- }
- glDeleteShader(shader);
- shader = 0;
- }
- *outShader = shader;
-}
-
-NativeWindowRenderer::~NativeWindowRenderer() {
- CHECK(mActiveInputs == 0);
- startRequest(CMD_QUIT);
- sendRequest();
-}
-
-void NativeWindowRenderer::render(RenderInput* input) {
- sp<GLConsumer> ST = input->mST;
- sp<Surface> STC = input->mSTC;
-
- if (input->mIsExternalBuffer) {
- queueExternalBuffer(STC.get(), input->mBuffer,
- input->mWidth, input->mHeight);
- } else {
- queueInternalBuffer(STC.get(), input->mBuffer);
- }
-
- ST->updateTexImage();
- glClearColor(0, 0, 0, 0);
- glClear(GL_COLOR_BUFFER_BIT);
-
- calculatePositionCoordinates(input->mRenderingMode,
- input->mWidth, input->mHeight);
-
- const GLfloat textureCoordinates[] = {
- 0.0f, 1.0f,
- 0.0f, 0.0f,
- 1.0f, 0.0f,
- 1.0f, 1.0f,
- };
-
- updateProgramAndHandle(input->mVideoEffect);
-
- glVertexAttribPointer(mPositionHandle, 2, GL_FLOAT, GL_FALSE, 0,
- mPositionCoordinates);
- CHECK_GL_ERROR;
-
- glEnableVertexAttribArray(mPositionHandle);
- CHECK_GL_ERROR;
-
- glVertexAttribPointer(mTexPosHandle, 2, GL_FLOAT, GL_FALSE, 0,
- textureCoordinates);
- CHECK_GL_ERROR;
-
- glEnableVertexAttribArray(mTexPosHandle);
- CHECK_GL_ERROR;
-
- GLfloat texMatrix[16];
- ST->getTransformMatrix(texMatrix);
- glUniformMatrix4fv(mTexMatrixHandle, 1, GL_FALSE, texMatrix);
- CHECK_GL_ERROR;
-
- glBindTexture(GL_TEXTURE_EXTERNAL_OES, input->mTextureId);
- CHECK_GL_ERROR;
-
- glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- glTexParameteri(
- GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(
- GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
- CHECK_GL_ERROR;
-
- glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
- CHECK_GL_ERROR;
-
- eglSwapBuffers(mEglDisplay, mEglSurface);
-}
-
-void NativeWindowRenderer::queueInternalBuffer(ANativeWindow *anw,
- MediaBuffer* buffer) {
- int64_t timeUs;
- CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
- native_window_set_buffers_timestamp(anw, timeUs * 1000);
- status_t err = anw->queueBuffer(anw, buffer->graphicBuffer().get(), -1);
- if (err != 0) {
- ALOGE("queueBuffer failed with error %s (%d)", strerror(-err), -err);
- return;
- }
-
- sp<MetaData> metaData = buffer->meta_data();
- metaData->setInt32(kKeyRendered, 1);
-}
-
-void NativeWindowRenderer::queueExternalBuffer(ANativeWindow* anw,
- MediaBuffer* buffer, int width, int height) {
- native_window_set_buffers_geometry(anw, width, height,
- HAL_PIXEL_FORMAT_YV12);
- native_window_set_usage(anw, GRALLOC_USAGE_SW_WRITE_OFTEN);
-
- ANativeWindowBuffer* anb;
- CHECK(NO_ERROR == native_window_dequeue_buffer_and_wait(anw, &anb));
- CHECK(anb != NULL);
-
- // Copy the buffer
- uint8_t* img = NULL;
- sp<GraphicBuffer> buf(new GraphicBuffer(anb, false));
- buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
- copyI420Buffer(buffer, img, width, height, buf->getStride());
- buf->unlock();
- CHECK(NO_ERROR == anw->queueBuffer(anw, buf->getNativeBuffer(), -1));
-}
-
-void NativeWindowRenderer::copyI420Buffer(MediaBuffer* src, uint8_t* dst,
- int srcWidth, int srcHeight, int stride) {
- int strideUV = (stride / 2 + 0xf) & ~0xf;
- uint8_t* p = (uint8_t*)src->data() + src->range_offset();
- // Y
- for (int i = srcHeight; i > 0; i--) {
- memcpy(dst, p, srcWidth);
- dst += stride;
- p += srcWidth;
- }
- // The src is I420, the dst is YV12.
- // U
- p += srcWidth * srcHeight / 4;
- for (int i = srcHeight / 2; i > 0; i--) {
- memcpy(dst, p, srcWidth / 2);
- dst += strideUV;
- p += srcWidth / 2;
- }
- // V
- p -= srcWidth * srcHeight / 2;
- for (int i = srcHeight / 2; i > 0; i--) {
- memcpy(dst, p, srcWidth / 2);
- dst += strideUV;
- p += srcWidth / 2;
- }
-}
-
-void NativeWindowRenderer::updateProgramAndHandle(uint32_t videoEffect) {
- if (mLastVideoEffect == videoEffect) {
- return;
- }
-
- mLastVideoEffect = videoEffect;
- int i;
- switch (mLastVideoEffect) {
- case VIDEO_EFFECT_NONE:
- i = 0;
- break;
- case VIDEO_EFFECT_SEPIA:
- i = 1;
- break;
- case VIDEO_EFFECT_NEGATIVE:
- i = 2;
- break;
- case VIDEO_EFFECT_GRADIENT:
- i = 3;
- break;
- default:
- i = 0;
- break;
- }
- glUseProgram(mProgram[i]);
- CHECK_GL_ERROR;
-
- mPositionHandle = glGetAttribLocation(mProgram[i], "vPosition");
- mTexPosHandle = glGetAttribLocation(mProgram[i], "vTexPos");
- mTexMatrixHandle = glGetUniformLocation(mProgram[i], "texMatrix");
- CHECK_GL_ERROR;
-}
-
-void NativeWindowRenderer::calculatePositionCoordinates(
- M4xVSS_MediaRendering renderingMode, int srcWidth, int srcHeight) {
- float x, y;
- switch (renderingMode) {
- case M4xVSS_kResizing:
- default:
- x = 1;
- y = 1;
- break;
- case M4xVSS_kCropping:
- x = float(srcWidth) / mDstWidth;
- y = float(srcHeight) / mDstHeight;
- // Make the smaller side 1
- if (x > y) {
- x /= y;
- y = 1;
- } else {
- y /= x;
- x = 1;
- }
- break;
- case M4xVSS_kBlackBorders:
- x = float(srcWidth) / mDstWidth;
- y = float(srcHeight) / mDstHeight;
- // Make the larger side 1
- if (x > y) {
- y /= x;
- x = 1;
- } else {
- x /= y;
- y = 1;
- }
- break;
- }
-
- mPositionCoordinates[0] = -x;
- mPositionCoordinates[1] = y;
- mPositionCoordinates[2] = -x;
- mPositionCoordinates[3] = -y;
- mPositionCoordinates[4] = x;
- mPositionCoordinates[5] = -y;
- mPositionCoordinates[6] = x;
- mPositionCoordinates[7] = y;
-}
-
-//
-// The functions below run in other threads.
-//
-
-void NativeWindowRenderer::startRequest(int cmd) {
- mLock.lock();
- while (mThreadCmd != CMD_IDLE) {
- mCond.wait(mLock);
- }
- mThreadCmd = cmd;
-}
-
-void NativeWindowRenderer::sendRequest() {
- mCond.broadcast();
- while (mThreadCmd != CMD_IDLE) {
- mCond.wait(mLock);
- }
- mLock.unlock();
-}
-
-RenderInput* NativeWindowRenderer::createRenderInput() {
- ALOGD("new render input %d", mNextTextureId);
- RenderInput* input = new RenderInput(this, mNextTextureId);
-
- startRequest(CMD_RESERVE_TEXTURE);
- mThreadTextureId = mNextTextureId;
- sendRequest();
-
- mNextTextureId++;
- mActiveInputs++;
- return input;
-}
-
-void NativeWindowRenderer::destroyRenderInput(RenderInput* input) {
- ALOGD("destroy render input %d", input->mTextureId);
- GLuint textureId = input->mTextureId;
- delete input;
-
- startRequest(CMD_DELETE_TEXTURE);
- mThreadTextureId = textureId;
- sendRequest();
-
- mActiveInputs--;
-}
-
-//
-// RenderInput
-//
-
-RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId)
- : mRenderer(renderer)
- , mTextureId(textureId) {
- sp<BufferQueue> bq = new BufferQueue();
- mST = new GLConsumer(bq, mTextureId);
- mSTC = new Surface(bq);
- native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA);
-}
-
-RenderInput::~RenderInput() {
-}
-
-ANativeWindow* RenderInput::getTargetWindow() {
- return mSTC.get();
-}
-
-void RenderInput::updateVideoSize(sp<MetaData> meta) {
- CHECK(meta->findInt32(kKeyWidth, &mWidth));
- CHECK(meta->findInt32(kKeyHeight, &mHeight));
-
- int left, top, right, bottom;
- if (meta->findRect(kKeyCropRect, &left, &top, &right, &bottom)) {
- mWidth = right - left + 1;
- mHeight = bottom - top + 1;
- }
-
- // If rotation degrees is 90 or 270, swap width and height
- // (mWidth and mHeight are the _rotated_ source rectangle).
- int32_t rotationDegrees;
- if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
- rotationDegrees = 0;
- }
-
- if (rotationDegrees == 90 || rotationDegrees == 270) {
- int tmp = mWidth;
- mWidth = mHeight;
- mHeight = tmp;
- }
-}
-
-void RenderInput::render(MediaBuffer* buffer, uint32_t videoEffect,
- M4xVSS_MediaRendering renderingMode, bool isExternalBuffer) {
- mVideoEffect = videoEffect;
- mRenderingMode = renderingMode;
- mIsExternalBuffer = isExternalBuffer;
- mBuffer = buffer;
-
- mRenderer->startRequest(NativeWindowRenderer::CMD_RENDER_INPUT);
- mRenderer->mThreadRenderInput = this;
- mRenderer->sendRequest();
-}
-
-} // namespace android
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.h b/libvideoeditor/lvpp/NativeWindowRenderer.h
deleted file mode 100755
index 26b4cba..0000000
--- a/libvideoeditor/lvpp/NativeWindowRenderer.h
+++ /dev/null
@@ -1,182 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef NATIVE_WINDOW_RENDERER_H_
-#define NATIVE_WINDOW_RENDERER_H_
-
-#include <EGL/egl.h>
-#include <GLES2/gl2.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MetaData.h>
-#include <utils/RefBase.h>
-#include <utils/threads.h>
-
-#include "M4xVSS_API.h"
-
-// The NativeWindowRenderer draws video frames stored in MediaBuffers to
-// an ANativeWindow. It can apply "rendering mode" and color effects to
-// the frames. "Rendering mode" is the option to do resizing, cropping,
-// or black-bordering when the source and destination aspect ratio are
-// different. Color effects include sepia, negative, and gradient.
-//
-// The input to NativeWindowRenderer is provided by the RenderInput class,
-// and there can be multiple active RenderInput at the same time. Although
-// we only expect that happens briefly when one clip is about to finish
-// and the next clip is about to start.
-//
-// We allocate a Surface for each RenderInput and the user can use
-// the getTargetWindow() function to get the corresponding ANativeWindow
-// for that Surface. The intention is that the user can pass that
-// ANativeWindow to OMXCodec::Create() so the codec can decode directly
-// to buffers provided by the texture.
-
-namespace android {
-
-class GLConsumer;
-class Surface;
-class RenderInput;
-
-class NativeWindowRenderer {
-public:
- NativeWindowRenderer(sp<ANativeWindow> nativeWindow, int width, int height);
- ~NativeWindowRenderer();
-
- RenderInput* createRenderInput();
- void destroyRenderInput(RenderInput* input);
-
-private:
- // No copy constructor and assignment
- NativeWindowRenderer(const NativeWindowRenderer &);
- NativeWindowRenderer &operator=(const NativeWindowRenderer &);
-
- // Initialization and finialization
- void initializeEGL();
- void terminateEGL();
- void createPrograms();
- void createProgram(
- GLuint vertexShader, GLuint fragmentShader, GLuint* outPgm);
- void loadShader(
- GLenum shaderType, const char* pSource, GLuint* outShader);
-
- // These functions are executed every frame.
- void render(RenderInput* input);
- void queueInternalBuffer(ANativeWindow* anw, MediaBuffer* buffer);
- void queueExternalBuffer(ANativeWindow* anw, MediaBuffer* buffer,
- int width, int height);
- void copyI420Buffer(MediaBuffer* src, uint8_t* dst,
- int srcWidth, int srcHeight, int stride);
- void updateProgramAndHandle(uint32_t videoEffect);
- void calculatePositionCoordinates(M4xVSS_MediaRendering renderingMode,
- int srcWidth, int srcHeight);
-
- // These variables are initialized once and doesn't change afterwards.
- sp<ANativeWindow> mNativeWindow;
- int mDstWidth, mDstHeight;
- EGLDisplay mEglDisplay;
- EGLSurface mEglSurface;
- EGLContext mEglContext;
- enum {
- EFFECT_NORMAL,
- EFFECT_SEPIA,
- EFFECT_NEGATIVE,
- EFFECT_GRADIENT,
- NUMBER_OF_EFFECTS
- };
- GLuint mProgram[NUMBER_OF_EFFECTS];
-
- // We use one shader program for each effect. mLastVideoEffect remembers
- // the program used for the last frame. when the effect used changes,
- // we change the program used and update the handles.
- uint32_t mLastVideoEffect;
- GLint mPositionHandle;
- GLint mTexPosHandle;
- GLint mTexMatrixHandle;
-
- // This is the vertex coordinates used for the frame texture.
- // It's calculated according the the rendering mode and the source and
- // destination aspect ratio.
- GLfloat mPositionCoordinates[8];
-
- // We use a different GL id for each Surface.
- GLuint mNextTextureId;
-
- // Number of existing RenderInputs, just for debugging.
- int mActiveInputs;
-
- // The GL thread functions
- static int threadStart(void* self);
- void glThread();
-
- // These variables are used to communicate between the GL thread and
- // other threads.
- Mutex mLock;
- Condition mCond;
- enum {
- CMD_IDLE,
- CMD_RENDER_INPUT,
- CMD_RESERVE_TEXTURE,
- CMD_DELETE_TEXTURE,
- CMD_QUIT,
- };
- int mThreadCmd;
- RenderInput* mThreadRenderInput;
- GLuint mThreadTextureId;
-
- // These functions are used to send commands to the GL thread.
- // sendRequest() also waits for the GL thread acknowledges the
- // command is finished.
- void startRequest(int cmd);
- void sendRequest();
-
- friend class RenderInput;
-};
-
-class RenderInput {
-public:
- // Returns the ANativeWindow corresponds to the Surface.
- ANativeWindow* getTargetWindow();
-
- // Updates video frame size from the MediaSource's metadata. Specifically
- // we look for kKeyWidth, kKeyHeight, and (optionally) kKeyCropRect.
- void updateVideoSize(sp<MetaData> meta);
-
- // Renders the buffer with the given video effect and rending mode.
- // The video effets are defined in VideoEditorTools.h
- // Set isExternalBuffer to true only when the buffer given is not
- // provided by the Surface.
- void render(MediaBuffer *buffer, uint32_t videoEffect,
- M4xVSS_MediaRendering renderingMode, bool isExternalBuffer);
-private:
- RenderInput(NativeWindowRenderer* renderer, GLuint textureId);
- ~RenderInput();
- NativeWindowRenderer* mRenderer;
- GLuint mTextureId;
- sp<GLConsumer> mST;
- sp<Surface> mSTC;
- int mWidth, mHeight;
-
- // These are only valid during render() calls
- uint32_t mVideoEffect;
- M4xVSS_MediaRendering mRenderingMode;
- bool mIsExternalBuffer;
- MediaBuffer* mBuffer;
-
- friend class NativeWindowRenderer;
-};
-
-} // namespace android
-
-#endif // NATIVE_WINDOW_RENDERER_H_
diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp
deleted file mode 100755
index 2bd9f84..0000000
--- a/libvideoeditor/lvpp/PreviewPlayer.cpp
+++ /dev/null
@@ -1,2082 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "PreviewPlayer"
-#include <utils/Log.h>
-
-#include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
-#include <media/IMediaPlayerService.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaExtractor.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/OMXCodec.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <gui/Surface.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/Surface.h>
-
-#include "VideoEditorPreviewController.h"
-#include "DummyAudioSource.h"
-#include "DummyVideoSource.h"
-#include "VideoEditorSRC.h"
-#include "PreviewPlayer.h"
-
-namespace android {
-
-
-void addBatteryData(uint32_t params) {
- sp<IBinder> binder =
- defaultServiceManager()->getService(String16("media.player"));
- sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
- CHECK(service.get() != NULL);
-
- service->addBatteryData(params);
-}
-
-struct PreviewPlayerEvent : public TimedEventQueue::Event {
- PreviewPlayerEvent(
- PreviewPlayer *player,
- void (PreviewPlayer::*method)())
- : mPlayer(player),
- mMethod(method) {
- }
-
-protected:
- virtual ~PreviewPlayerEvent() {}
-
- virtual void fire(TimedEventQueue *queue, int64_t /* now_us */) {
- (mPlayer->*mMethod)();
- }
-
-private:
- PreviewPlayer *mPlayer;
- void (PreviewPlayer::*mMethod)();
-
- PreviewPlayerEvent(const PreviewPlayerEvent &);
- PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
-};
-
-PreviewPlayer::PreviewPlayer(NativeWindowRenderer* renderer)
- : mQueueStarted(false),
- mTimeSource(NULL),
- mVideoRendererIsPreview(false),
- mAudioPlayer(NULL),
- mDisplayWidth(0),
- mDisplayHeight(0),
- mFlags(0),
- mExtractorFlags(0),
- mVideoBuffer(NULL),
- mLastVideoTimeUs(-1),
- mNativeWindowRenderer(renderer),
- mCurrFramingEffectIndex(0),
- mFrameRGBBuffer(NULL),
- mFrameYUVBuffer(NULL) {
-
- CHECK_EQ(mClient.connect(), (status_t)OK);
- DataSource::RegisterDefaultSniffers();
-
-
- mVideoRenderer = NULL;
- mEffectsSettings = NULL;
- mAudioPlayer = NULL;
- mAudioMixStoryBoardTS = 0;
- mCurrentMediaBeginCutTime = 0;
- mCurrentMediaVolumeValue = 0;
- mNumberEffects = 0;
- mDecodedVideoTs = 0;
- mDecVideoTsStoryBoard = 0;
- mCurrentVideoEffect = VIDEO_EFFECT_NONE;
- mProgressCbInterval = 0;
- mNumberDecVideoFrames = 0;
- mOverlayUpdateEventPosted = false;
- mIsChangeSourceRequired = true;
-
- mVideoEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoEvent);
- mVideoEventPending = false;
- mVideoLagEvent = new PreviewPlayerEvent(this, &PreviewPlayer::onVideoLagUpdate);
- mVideoEventPending = false;
- mCheckAudioStatusEvent = new PreviewPlayerEvent(
- this, &PreviewPlayer::onCheckAudioStatus);
- mAudioStatusEventPending = false;
- mStreamDoneEvent = new PreviewPlayerEvent(
- this, &PreviewPlayer::onStreamDone);
- mStreamDoneEventPending = false;
- mProgressCbEvent = new PreviewPlayerEvent(this,
- &PreviewPlayer::onProgressCbEvent);
-
- mOverlayUpdateEvent = new PreviewPlayerEvent(this,
- &PreviewPlayer::onUpdateOverlayEvent);
- mProgressCbEventPending = false;
-
- mOverlayUpdateEventPending = false;
- mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
- mIsFiftiesEffectStarted = false;
- reset();
-}
-
-PreviewPlayer::~PreviewPlayer() {
-
- if (mQueueStarted) {
- mQueue.stop();
- }
-
- reset();
-
- if (mVideoRenderer) {
- mNativeWindowRenderer->destroyRenderInput(mVideoRenderer);
- }
-
- Mutex::Autolock lock(mLock);
- clear_l();
- mClient.disconnect();
-}
-
-void PreviewPlayer::cancelPlayerEvents_l(bool updateProgressCb) {
- mQueue.cancelEvent(mVideoEvent->eventID());
- mVideoEventPending = false;
- mQueue.cancelEvent(mStreamDoneEvent->eventID());
- mStreamDoneEventPending = false;
- mQueue.cancelEvent(mCheckAudioStatusEvent->eventID());
- mAudioStatusEventPending = false;
- mQueue.cancelEvent(mVideoLagEvent->eventID());
- mVideoLagEventPending = false;
- if (updateProgressCb) {
- mQueue.cancelEvent(mProgressCbEvent->eventID());
- mProgressCbEventPending = false;
- }
-}
-
-status_t PreviewPlayer::setDataSource(const char *path) {
- Mutex::Autolock autoLock(mLock);
- return setDataSource_l(path);
-}
-
-status_t PreviewPlayer::setDataSource_l(const char *path) {
- reset_l();
-
- mUri = path;
-
- // The actual work will be done during preparation in the call to
- // ::finishSetDataSource_l to avoid blocking the calling thread in
- // setDataSource for any significant time.
- return OK;
-}
-
-status_t PreviewPlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
- bool haveAudio = false;
- bool haveVideo = false;
- for (size_t i = 0; i < extractor->countTracks(); ++i) {
- sp<MetaData> meta = extractor->getTrackMetaData(i);
-
- const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- if (!haveVideo && !strncasecmp(mime, "video/", 6)) {
- setVideoSource(extractor->getTrack(i));
- haveVideo = true;
- } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) {
- setAudioSource(extractor->getTrack(i));
- haveAudio = true;
-
- if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
- // Only do this for vorbis audio, none of the other audio
- // formats even support this ringtone specific hack and
- // retrieving the metadata on some extractors may turn out
- // to be very expensive.
- sp<MetaData> fileMeta = extractor->getMetaData();
- int32_t loop;
- if (fileMeta != NULL
- && fileMeta->findInt32(kKeyAutoLoop, &loop)
- && loop != 0) {
- mFlags |= AUTO_LOOPING;
- }
- }
- }
-
- if (haveAudio && haveVideo) {
- break;
- }
- }
-
- /* Add the support for Dummy audio*/
- if( !haveAudio ){
- mAudioTrack = DummyAudioSource::Create(32000, 2, 20000,
- ((mPlayEndTimeMsec)*1000LL));
- if(mAudioTrack != NULL) {
- haveAudio = true;
- }
- }
-
- if (!haveAudio && !haveVideo) {
- return UNKNOWN_ERROR;
- }
-
- mExtractorFlags = extractor->flags();
- return OK;
-}
-
-status_t PreviewPlayer::setDataSource_l_jpg() {
- ALOGV("setDataSource_l_jpg");
-
- M4OSA_ERR err = M4NO_ERROR;
-
- mAudioSource = DummyAudioSource::Create(32000, 2, 20000,
- ((mPlayEndTimeMsec)*1000LL));
- if(mAudioSource != NULL) {
- setAudioSource(mAudioSource);
- }
- status_t error = mAudioSource->start();
- if (error != OK) {
- ALOGE("Error starting dummy audio source");
- mAudioSource.clear();
- return err;
- }
-
- mDurationUs = (mPlayEndTimeMsec - mPlayBeginTimeMsec)*1000LL;
-
- mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
- mDurationUs, mUri);
-
- updateSizeToRender(mVideoSource->getFormat());
- setVideoSource(mVideoSource);
- status_t err1 = mVideoSource->start();
- if (err1 != OK) {
- mVideoSource.clear();
- return err;
- }
-
- mIsVideoSourceJpg = true;
- return OK;
-}
-
-void PreviewPlayer::reset_l() {
-
- if (mFlags & PREPARING) {
- mFlags |= PREPARE_CANCELLED;
- }
-
- while (mFlags & PREPARING) {
- mPreparedCondition.wait(mLock);
- }
-
- cancelPlayerEvents_l();
- mAudioTrack.clear();
- mVideoTrack.clear();
-
- // Shutdown audio first, so that the respone to the reset request
- // appears to happen instantaneously as far as the user is concerned
- // If we did this later, audio would continue playing while we
- // shutdown the video-related resources and the player appear to
- // not be as responsive to a reset request.
- if (mAudioPlayer == NULL && mAudioSource != NULL) {
- // If we had an audio player, it would have effectively
- // taken possession of the audio source and stopped it when
- // _it_ is stopped. Otherwise this is still our responsibility.
- mAudioSource->stop();
- }
- mAudioSource.clear();
-
- mTimeSource = NULL;
-
- //Single audio player instance used
- //So donot delete it here
- //It is deleted from PreviewController class
- //delete mAudioPlayer;
- mAudioPlayer = NULL;
-
- if (mVideoBuffer) {
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- }
-
- if (mVideoSource != NULL) {
- mVideoSource->stop();
-
- // The following hack is necessary to ensure that the OMX
- // component is completely released by the time we may try
- // to instantiate it again.
- wp<MediaSource> tmp = mVideoSource;
- mVideoSource.clear();
- while (tmp.promote() != NULL) {
- usleep(1000);
- }
- IPCThreadState::self()->flushCommands();
- }
-
- mDurationUs = -1;
- mFlags = 0;
- mExtractorFlags = 0;
- mVideoWidth = mVideoHeight = -1;
- mTimeSourceDeltaUs = 0;
- mVideoTimeUs = 0;
-
- mSeeking = NO_SEEK;
- mSeekNotificationSent = false;
- mSeekTimeUs = 0;
-
- mUri.setTo("");
-
- mCurrentVideoEffect = VIDEO_EFFECT_NONE;
- mIsVideoSourceJpg = false;
- mFrameRGBBuffer = NULL;
- if(mFrameYUVBuffer != NULL) {
- free(mFrameYUVBuffer);
- mFrameYUVBuffer = NULL;
- }
-}
-
-status_t PreviewPlayer::play() {
- ALOGV("play");
- Mutex::Autolock autoLock(mLock);
-
- mFlags &= ~CACHE_UNDERRUN;
- mFlags &= ~INFORMED_AV_EOS;
- return play_l();
-}
-
-status_t PreviewPlayer::startAudioPlayer_l() {
- ALOGV("startAudioPlayer_l");
- CHECK(!(mFlags & AUDIO_RUNNING));
-
- if (mAudioSource == NULL || mAudioPlayer == NULL) {
- return OK;
- }
-
- if (!(mFlags & AUDIOPLAYER_STARTED)) {
- mFlags |= AUDIOPLAYER_STARTED;
-
- // We've already started the MediaSource in order to enable
- // the prefetcher to read its data.
- status_t err = mAudioPlayer->start(
- true /* sourceAlreadyStarted */);
-
- if (err != OK) {
- notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
- return err;
- }
- } else {
- mAudioPlayer->resume();
- }
-
- mFlags |= AUDIO_RUNNING;
-
- mWatchForAudioEOS = true;
-
- return OK;
-}
-
-status_t PreviewPlayer::setAudioPlayer(VideoEditorAudioPlayer *audioPlayer) {
- ALOGV("setAudioPlayer");
- Mutex::Autolock autoLock(mLock);
- CHECK(!(mFlags & PLAYING));
- mAudioPlayer = audioPlayer;
-
- ALOGV("SetAudioPlayer");
- mIsChangeSourceRequired = true;
-
- // check if the new and old source are dummy
- sp<MediaSource> anAudioSource = mAudioPlayer->getSource();
- if (anAudioSource == NULL) {
- // Audio player does not have any source set.
- ALOGV("setAudioPlayer: Audio player does not have any source set");
- return OK;
- }
-
- // If new video source is not dummy, then always change source
- // Else audio player continues using old audio source and there are
- // frame drops to maintain AV sync
- sp<MetaData> meta;
- if (mVideoSource != NULL) {
- meta = mVideoSource->getFormat();
- const char *pVidSrcType;
- if (meta->findCString(kKeyDecoderComponent, &pVidSrcType)) {
- if (strcmp(pVidSrcType, "DummyVideoSource") != 0) {
- ALOGV(" Video clip with silent audio; need to change source");
- return OK;
- }
- }
- }
-
- const char *pSrcType1;
- const char *pSrcType2;
- meta = anAudioSource->getFormat();
-
- if (meta->findCString(kKeyDecoderComponent, &pSrcType1)) {
- if (strcmp(pSrcType1, "DummyAudioSource") == 0) {
- meta = mAudioSource->getFormat();
- if (meta->findCString(kKeyDecoderComponent, &pSrcType2)) {
- if (strcmp(pSrcType2, "DummyAudioSource") == 0) {
- mIsChangeSourceRequired = false;
- // Just set the new play duration for the existing source
- MediaSource *pMediaSrc = anAudioSource.get();
- DummyAudioSource *pDummyAudioSource = (DummyAudioSource*)pMediaSrc;
- //Increment the duration of audio source
- pDummyAudioSource->setDuration(
- (int64_t)((mPlayEndTimeMsec)*1000LL));
-
- // Stop the new audio source
- // since we continue using old source
- ALOGV("setAudioPlayer: stop new audio source");
- mAudioSource->stop();
- }
- }
- }
- }
-
- return OK;
-}
-
-void PreviewPlayer::onStreamDone() {
- ALOGV("onStreamDone");
- // Posted whenever any stream finishes playing.
-
- Mutex::Autolock autoLock(mLock);
- if (!mStreamDoneEventPending) {
- return;
- }
- mStreamDoneEventPending = false;
-
- if (mStreamDoneStatus != ERROR_END_OF_STREAM) {
- ALOGV("MEDIA_ERROR %d", mStreamDoneStatus);
-
- notifyListener_l(
- MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, mStreamDoneStatus);
-
- pause_l(true /* at eos */);
-
- mFlags |= AT_EOS;
- return;
- }
-
- const bool allDone =
- (mVideoSource == NULL || (mFlags & VIDEO_AT_EOS))
- && (mAudioSource == NULL || (mFlags & AUDIO_AT_EOS));
-
- if (!allDone) {
- return;
- }
-
- if (mFlags & (LOOPING | AUTO_LOOPING)) {
- seekTo_l(0);
-
- if (mVideoSource != NULL) {
- postVideoEvent_l();
- }
- } else {
- ALOGV("MEDIA_PLAYBACK_COMPLETE");
- //pause before sending event
- pause_l(true /* at eos */);
-
- //This lock is used to syncronize onStreamDone() in PreviewPlayer and
- //stopPreview() in PreviewController
- Mutex::Autolock autoLock(mLockControl);
- /* Make sure PreviewPlayer only notifies MEDIA_PLAYBACK_COMPLETE once for each clip!
- * It happens twice in following scenario.
- * To make the clips in preview storyboard are played and switched smoothly,
- * PreviewController uses two PreviewPlayer instances and one AudioPlayer.
- * The two PreviewPlayer use the same AudioPlayer to play the audio,
- * and change the audio source of the AudioPlayer.
- * If the audio source of current playing clip and next clip are dummy
- * audio source(image or video without audio), it will not change the audio source
- * to avoid the "audio glitch", and keep using the current audio source.
- * When the video of current clip reached the EOS, PreviewPlayer will set EOS flag
- * for video and audio, and it will notify MEDIA_PLAYBACK_COMPLETE.
- * But the audio(dummy audio source) is still playing(for next clip),
- * and when it reached the EOS, and video reached EOS,
- * PreviewPlayer will notify MEDIA_PLAYBACK_COMPLETE again. */
- if (!(mFlags & INFORMED_AV_EOS)) {
- notifyListener_l(MEDIA_PLAYBACK_COMPLETE);
- mFlags |= INFORMED_AV_EOS;
- }
- mFlags |= AT_EOS;
- ALOGV("onStreamDone end");
- return;
- }
-}
-
-
-status_t PreviewPlayer::play_l() {
- ALOGV("play_l");
-
- mFlags &= ~SEEK_PREVIEW;
-
- if (mFlags & PLAYING) {
- return OK;
- }
- mStartNextPlayer = false;
-
- if (!(mFlags & PREPARED)) {
- status_t err = prepare_l();
-
- if (err != OK) {
- return err;
- }
- }
-
- mFlags |= PLAYING;
- mFlags |= FIRST_FRAME;
-
- bool deferredAudioSeek = false;
-
- if (mAudioSource != NULL) {
- if (mAudioPlayer == NULL) {
- if (mAudioSink != NULL) {
-
- mAudioPlayer = new VideoEditorAudioPlayer(mAudioSink, this);
- mAudioPlayer->setSource(mAudioSource);
-
- mAudioPlayer->setAudioMixSettings(
- mPreviewPlayerAudioMixSettings);
-
- mAudioPlayer->setAudioMixPCMFileHandle(
- mAudioMixPCMFileHandle);
-
- mAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
- mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
- mCurrentMediaVolumeValue);
-
- mFlags |= AUDIOPLAYER_STARTED;
- // We've already started the MediaSource in order to enable
- // the prefetcher to read its data.
- status_t err = mAudioPlayer->start(
- true /* sourceAlreadyStarted */);
-
- if (err != OK) {
- //delete mAudioPlayer;
- mAudioPlayer = NULL;
-
- mFlags &= ~(PLAYING | FIRST_FRAME);
- return err;
- }
-
- mTimeSource = mAudioPlayer;
- mFlags |= AUDIO_RUNNING;
- deferredAudioSeek = true;
- mWatchForAudioSeekComplete = false;
- mWatchForAudioEOS = true;
- }
- } else {
- bool isAudioPlayerStarted = mAudioPlayer->isStarted();
-
- if (mIsChangeSourceRequired == true) {
- ALOGV("play_l: Change audio source required");
-
- if (isAudioPlayerStarted == true) {
- mAudioPlayer->pause();
- }
-
- mAudioPlayer->setSource(mAudioSource);
- mAudioPlayer->setObserver(this);
-
- mAudioPlayer->setAudioMixSettings(
- mPreviewPlayerAudioMixSettings);
-
- mAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
- mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
- mCurrentMediaVolumeValue);
-
- if (isAudioPlayerStarted == true) {
- mAudioPlayer->resume();
- } else {
- status_t err = OK;
- err = mAudioPlayer->start(true);
- if (err != OK) {
- mAudioPlayer = NULL;
- mAudioPlayer = NULL;
-
- mFlags &= ~(PLAYING | FIRST_FRAME);
- return err;
- }
- }
- } else {
- ALOGV("play_l: No Source change required");
- mAudioPlayer->setAudioMixStoryBoardSkimTimeStamp(
- mAudioMixStoryBoardTS, mCurrentMediaBeginCutTime,
- mCurrentMediaVolumeValue);
-
- mAudioPlayer->resume();
- }
-
- mFlags |= AUDIOPLAYER_STARTED;
- mFlags |= AUDIO_RUNNING;
- mTimeSource = mAudioPlayer;
- deferredAudioSeek = true;
- mWatchForAudioSeekComplete = false;
- mWatchForAudioEOS = true;
- }
- }
-
- if (mTimeSource == NULL && mAudioPlayer == NULL) {
- mTimeSource = &mSystemTimeSource;
- }
-
- // Set the seek option for Image source files and read.
- // This resets the timestamping for image play
- if (mIsVideoSourceJpg) {
- MediaSource::ReadOptions options;
- MediaBuffer *aLocalBuffer;
- options.setSeekTo(mSeekTimeUs);
- mVideoSource->read(&aLocalBuffer, &options);
- aLocalBuffer->release();
- }
-
- if (mVideoSource != NULL) {
- // Kick off video playback
- postVideoEvent_l();
- }
-
- if (deferredAudioSeek) {
- // If there was a seek request while we were paused
- // and we're just starting up again, honor the request now.
- seekAudioIfNecessary_l();
- }
-
- if (mFlags & AT_EOS) {
- // Legacy behaviour, if a stream finishes playing and then
- // is started again, we play from the start...
- seekTo_l(0);
- }
-
- return OK;
-}
-
-
-status_t PreviewPlayer::initRenderer_l() {
- if (mSurface != NULL) {
- if(mVideoRenderer == NULL) {
- mVideoRenderer = mNativeWindowRenderer->createRenderInput();
- if (mVideoSource != NULL) {
- updateSizeToRender(mVideoSource->getFormat());
- }
- }
- }
- return OK;
-}
-
-
-status_t PreviewPlayer::seekTo(int64_t timeUs) {
- Mutex::Autolock autoLock(mLock);
- if ((mExtractorFlags & MediaExtractor::CAN_SEEK) || (mIsVideoSourceJpg)) {
- return seekTo_l(timeUs);
- }
-
- return OK;
-}
-
-
-status_t PreviewPlayer::getVideoDimensions(
- int32_t *width, int32_t *height) const {
- Mutex::Autolock autoLock(mLock);
-
- if (mVideoWidth < 0 || mVideoHeight < 0) {
- return UNKNOWN_ERROR;
- }
-
- *width = mVideoWidth;
- *height = mVideoHeight;
-
- return OK;
-}
-
-
-status_t PreviewPlayer::initAudioDecoder_l() {
- sp<MetaData> meta = mAudioTrack->getFormat();
- const char *mime;
- CHECK(meta->findCString(kKeyMIMEType, &mime));
-
- if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
- mAudioSource = mAudioTrack;
- } else {
- sp<MediaSource> aRawSource;
- aRawSource = OMXCodec::Create(
- mClient.interface(), mAudioTrack->getFormat(),
- false, // createEncoder
- mAudioTrack);
-
- if(aRawSource != NULL) {
- mAudioSource = new VideoEditorSRC(aRawSource);
- }
- }
-
- if (mAudioSource != NULL) {
- int64_t durationUs;
- if (mAudioTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
- setDuration_l(durationUs);
- }
- status_t err = mAudioSource->start();
-
- if (err != OK) {
- mAudioSource.clear();
- return err;
- }
- } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_QCELP)) {
- // For legacy reasons we're simply going to ignore the absence
- // of an audio decoder for QCELP instead of aborting playback
- // altogether.
- return OK;
- }
-
- return mAudioSource != NULL ? OK : UNKNOWN_ERROR;
-}
-
-status_t PreviewPlayer::initVideoDecoder_l(uint32_t flags) {
- initRenderer_l();
-
- if (mVideoRenderer == NULL) {
- ALOGE("Cannot create renderer");
- return UNKNOWN_ERROR;
- }
-
- mVideoSource = OMXCodec::Create(
- mClient.interface(), mVideoTrack->getFormat(),
- false,
- mVideoTrack,
- NULL, flags, mVideoRenderer->getTargetWindow());
-
- if (mVideoSource != NULL) {
- int64_t durationUs;
- if (mVideoTrack->getFormat()->findInt64(kKeyDuration, &durationUs)) {
- setDuration_l(durationUs);
- }
-
- updateSizeToRender(mVideoTrack->getFormat());
-
- status_t err = mVideoSource->start();
-
- if (err != OK) {
- mVideoSource.clear();
- return err;
- }
- }
-
- return mVideoSource != NULL ? OK : UNKNOWN_ERROR;
-}
-
-
-void PreviewPlayer::onVideoEvent() {
- uint32_t i=0;
- M4OSA_ERR err1 = M4NO_ERROR;
- int64_t imageFrameTimeUs = 0;
-
- Mutex::Autolock autoLock(mLock);
- if (!mVideoEventPending) {
- // The event has been cancelled in reset_l() but had already
- // been scheduled for execution at that time.
- return;
- }
- mVideoEventPending = false;
-
- if (mFlags & SEEK_PREVIEW) {
- mFlags &= ~SEEK_PREVIEW;
- return;
- }
-
- TimeSource *ts_st = &mSystemTimeSource;
- int64_t timeStartUs = ts_st->getRealTimeUs();
-
- if (mSeeking != NO_SEEK) {
-
- if(mAudioSource != NULL) {
-
- // We're going to seek the video source first, followed by
- // the audio source.
- // In order to avoid jumps in the DataSource offset caused by
- // the audio codec prefetching data from the old locations
- // while the video codec is already reading data from the new
- // locations, we'll "pause" the audio source, causing it to
- // stop reading input data until a subsequent seek.
-
- if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
- mAudioPlayer->pause();
- mFlags &= ~AUDIO_RUNNING;
- }
- mAudioSource->pause();
- }
- }
-
- if (!mVideoBuffer) {
- MediaSource::ReadOptions options;
- if (mSeeking != NO_SEEK) {
- ALOGV("LV PLAYER seeking to %lld us (%.2f secs)", mSeekTimeUs,
- mSeekTimeUs / 1E6);
-
- options.setSeekTo(
- mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
- }
- for (;;) {
- status_t err = mVideoSource->read(&mVideoBuffer, &options);
- options.clearSeekTo();
-
- if (err != OK) {
- CHECK(!mVideoBuffer);
-
- if (err == INFO_FORMAT_CHANGED) {
- ALOGV("LV PLAYER VideoSource signalled format change");
- notifyVideoSize_l();
-
- if (mVideoRenderer != NULL) {
- mVideoRendererIsPreview = false;
- err = initRenderer_l();
- if (err != OK) {
- postStreamDoneEvent_l(err);
- }
-
- }
-
- updateSizeToRender(mVideoSource->getFormat());
- continue;
- }
- // So video playback is complete, but we may still have
- // a seek request pending that needs to be applied to the audio track
- if (mSeeking != NO_SEEK) {
- ALOGV("video stream ended while seeking!");
- }
- finishSeekIfNecessary(-1);
- ALOGV("PreviewPlayer: onVideoEvent EOS reached.");
- mFlags |= VIDEO_AT_EOS;
- mFlags |= AUDIO_AT_EOS;
- mOverlayUpdateEventPosted = false;
- postStreamDoneEvent_l(err);
- // Set the last decoded timestamp to duration
- mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
- return;
- }
-
- if (mVideoBuffer->range_length() == 0) {
- // Some decoders, notably the PV AVC software decoder
- // return spurious empty buffers that we just want to ignore.
-
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- continue;
- }
-
- int64_t videoTimeUs;
- CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
-
- if (mSeeking != NO_SEEK) {
- if (videoTimeUs < mSeekTimeUs) {
- // buffers are before seek time
- // ignore them
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- continue;
- }
- } else {
- if((videoTimeUs/1000) < mPlayBeginTimeMsec) {
- // Frames are before begin cut time
- // Donot render
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- continue;
- }
- }
- break;
- }
- }
-
- mNumberDecVideoFrames++;
-
- int64_t timeUs;
- CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
- setPosition_l(timeUs);
-
- if (!mStartNextPlayer) {
- int64_t playbackTimeRemaining = (mPlayEndTimeMsec * 1000LL) - timeUs;
- if (playbackTimeRemaining <= 1500000) {
- //When less than 1.5 sec of playback left
- // send notification to start next player
-
- mStartNextPlayer = true;
- notifyListener_l(0xAAAAAAAA);
- }
- }
-
- SeekType wasSeeking = mSeeking;
- finishSeekIfNecessary(timeUs);
- if (mAudioPlayer != NULL && !(mFlags & (AUDIO_RUNNING))) {
- status_t err = startAudioPlayer_l();
- if (err != OK) {
- ALOGE("Starting the audio player failed w/ err %d", err);
- return;
- }
- }
-
- TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
-
- if(ts == NULL) {
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- return;
- }
-
- if(!mIsVideoSourceJpg) {
- if (mFlags & FIRST_FRAME) {
- mFlags &= ~FIRST_FRAME;
-
- mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs;
- }
-
- int64_t realTimeUs, mediaTimeUs;
- if (!(mFlags & AUDIO_AT_EOS) && mAudioPlayer != NULL
- && mAudioPlayer->getMediaTimeMapping(&realTimeUs, &mediaTimeUs)) {
- mTimeSourceDeltaUs = realTimeUs - mediaTimeUs;
- }
-
- int64_t nowUs = ts->getRealTimeUs() - mTimeSourceDeltaUs;
-
- int64_t latenessUs = nowUs - timeUs;
-
- if (wasSeeking != NO_SEEK) {
- // Let's display the first frame after seeking right away.
- latenessUs = 0;
- }
- ALOGV("Audio time stamp = %lld and video time stamp = %lld",
- ts->getRealTimeUs(),timeUs);
- if (latenessUs > 40000) {
- // We're more than 40ms late.
-
- ALOGV("LV PLAYER we're late by %lld us (%.2f secs)",
- latenessUs, latenessUs / 1E6);
-
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- postVideoEvent_l(0);
- return;
- }
-
- if (latenessUs < -25000) {
- // We're more than 25ms early.
- ALOGV("We're more than 25ms early, lateness %lld", latenessUs);
-
- postVideoEvent_l(25000);
- return;
- }
- }
-
- if (mVideoRendererIsPreview || mVideoRenderer == NULL) {
- mVideoRendererIsPreview = false;
-
- status_t err = initRenderer_l();
- if (err != OK) {
- postStreamDoneEvent_l(err);
- }
- }
-
- // If timestamp exceeds endCutTime of clip, donot render
- if((timeUs/1000) > mPlayEndTimeMsec) {
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- mFlags |= VIDEO_AT_EOS;
- mFlags |= AUDIO_AT_EOS;
- ALOGV("PreviewPlayer: onVideoEvent timeUs > mPlayEndTime; send EOS..");
- mOverlayUpdateEventPosted = false;
- // Set the last decoded timestamp to duration
- mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
- postStreamDoneEvent_l(ERROR_END_OF_STREAM);
- return;
- }
- // Capture the frame timestamp to be rendered
- mDecodedVideoTs = timeUs;
-
- // Post processing to apply video effects
- for(i=0;i<mNumberEffects;i++) {
- // First check if effect starttime matches the clip being previewed
- if((mEffectsSettings[i].uiStartTime < (mDecVideoTsStoryBoard/1000)) ||
- (mEffectsSettings[i].uiStartTime >=
- ((mDecVideoTsStoryBoard/1000) + mPlayEndTimeMsec - mPlayBeginTimeMsec)))
- {
- // This effect doesn't belong to this clip, check next one
- continue;
- }
- // Check if effect applies to this particular frame timestamp
- if((mEffectsSettings[i].uiStartTime <=
- (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec)) &&
- ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
- (((timeUs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec))
- && (mEffectsSettings[i].uiDuration != 0)) {
- setVideoPostProcessingNode(
- mEffectsSettings[i].VideoEffectType, TRUE);
- }
- else {
- setVideoPostProcessingNode(
- mEffectsSettings[i].VideoEffectType, FALSE);
- }
- }
-
- //Provide the overlay Update indication when there is an overlay effect
- if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
- mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
- if (!mOverlayUpdateEventPosted) {
- // Find the effect in effectSettings array
- M4OSA_UInt32 index;
- for (index = 0; index < mNumberEffects; index++) {
- M4OSA_UInt32 timeMs = mDecodedVideoTs/1000;
- M4OSA_UInt32 timeOffset = mDecVideoTsStoryBoard/1000;
- if(mEffectsSettings[index].VideoEffectType ==
- (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
- if (((mEffectsSettings[index].uiStartTime + 1) <=
- timeMs + timeOffset - mPlayBeginTimeMsec) &&
- ((mEffectsSettings[index].uiStartTime - 1 +
- mEffectsSettings[index].uiDuration) >=
- timeMs + timeOffset - mPlayBeginTimeMsec))
- {
- break;
- }
- }
- }
- if (index < mNumberEffects) {
- mCurrFramingEffectIndex = index;
- mOverlayUpdateEventPosted = true;
- postOverlayUpdateEvent_l();
- ALOGV("Framing index = %ld", mCurrFramingEffectIndex);
- } else {
- ALOGV("No framing effects found");
- }
- }
-
- } else if (mOverlayUpdateEventPosted) {
- //Post the event when the overlay is no more valid
- ALOGV("Overlay is Done");
- mOverlayUpdateEventPosted = false;
- postOverlayUpdateEvent_l();
- }
-
- if (mVideoRenderer != NULL) {
- mVideoRenderer->render(mVideoBuffer, mCurrentVideoEffect,
- mRenderingMode, mIsVideoSourceJpg);
- }
-
- mVideoBuffer->release();
- mVideoBuffer = NULL;
-
- // Post progress callback based on callback interval set
- if(mNumberDecVideoFrames >= mProgressCbInterval) {
- postProgressCallbackEvent_l();
- mNumberDecVideoFrames = 0; // reset counter
- }
-
- // if reached EndCutTime of clip, post EOS event
- if((timeUs/1000) >= mPlayEndTimeMsec) {
- ALOGV("PreviewPlayer: onVideoEvent EOS.");
- mFlags |= VIDEO_AT_EOS;
- mFlags |= AUDIO_AT_EOS;
- mOverlayUpdateEventPosted = false;
- // Set the last decoded timestamp to duration
- mDecodedVideoTs = (mPlayEndTimeMsec*1000LL);
- postStreamDoneEvent_l(ERROR_END_OF_STREAM);
- }
- else {
- if ((wasSeeking != NO_SEEK) && (mFlags & SEEK_PREVIEW)) {
- mFlags &= ~SEEK_PREVIEW;
- return;
- }
-
- if(!mIsVideoSourceJpg) {
- postVideoEvent_l(0);
- }
- else {
- postVideoEvent_l(33000);
- }
- }
-}
-
-status_t PreviewPlayer::prepare() {
- ALOGV("prepare");
- Mutex::Autolock autoLock(mLock);
- return prepare_l();
-}
-
-status_t PreviewPlayer::prepare_l() {
- ALOGV("prepare_l");
- if (mFlags & PREPARED) {
- return OK;
- }
-
- if (mFlags & PREPARING) {
- return UNKNOWN_ERROR;
- }
-
- mIsAsyncPrepare = false;
- status_t err = prepareAsync_l();
-
- if (err != OK) {
- return err;
- }
-
- while (mFlags & PREPARING) {
- mPreparedCondition.wait(mLock);
- }
-
- return mPrepareResult;
-}
-
-status_t PreviewPlayer::prepareAsync() {
- ALOGV("prepareAsync");
- Mutex::Autolock autoLock(mLock);
- return prepareAsync_l();
-}
-
-status_t PreviewPlayer::prepareAsync_l() {
- ALOGV("prepareAsync_l");
- if (mFlags & PREPARING) {
- return UNKNOWN_ERROR; // async prepare already pending
- }
-
- if (!mQueueStarted) {
- mQueue.start();
- mQueueStarted = true;
- }
-
- mFlags |= PREPARING;
- mAsyncPrepareEvent = new PreviewPlayerEvent(
- this, &PreviewPlayer::onPrepareAsyncEvent);
-
- mQueue.postEvent(mAsyncPrepareEvent);
-
- return OK;
-}
-
-status_t PreviewPlayer::finishSetDataSource_l() {
- sp<DataSource> dataSource;
- sp<MediaExtractor> extractor;
-
- dataSource = DataSource::CreateFromURI(mUri.string(), NULL);
-
- if (dataSource == NULL) {
- return UNKNOWN_ERROR;
- }
-
- //If file type is .rgb, then no need to check for Extractor
- int uriLen = strlen(mUri);
- int startOffset = uriLen - 4;
- if(!strncasecmp(mUri+startOffset, ".rgb", 4)) {
- extractor = NULL;
- }
- else {
- extractor = MediaExtractor::Create(dataSource,
- MEDIA_MIMETYPE_CONTAINER_MPEG4);
- }
-
- if (extractor == NULL) {
- ALOGV("finishSetDataSource_l: failed to create extractor");
- return setDataSource_l_jpg();
- }
-
- return setDataSource_l(extractor);
-}
-
-void PreviewPlayer::onPrepareAsyncEvent() {
- Mutex::Autolock autoLock(mLock);
- ALOGV("onPrepareAsyncEvent");
-
- if (mFlags & PREPARE_CANCELLED) {
- ALOGV("prepare was cancelled before doing anything");
- abortPrepare(UNKNOWN_ERROR);
- return;
- }
-
- if (mUri.size() > 0) {
- status_t err = finishSetDataSource_l();
-
- if (err != OK) {
- abortPrepare(err);
- return;
- }
- }
-
- if (mVideoTrack != NULL && mVideoSource == NULL) {
- status_t err = initVideoDecoder_l(OMXCodec::kHardwareCodecsOnly);
-
- if (err != OK) {
- abortPrepare(err);
- return;
- }
- }
-
- if (mAudioTrack != NULL && mAudioSource == NULL) {
- status_t err = initAudioDecoder_l();
-
- if (err != OK) {
- abortPrepare(err);
- return;
- }
- }
- finishAsyncPrepare_l();
-
-}
-
-void PreviewPlayer::finishAsyncPrepare_l() {
- ALOGV("finishAsyncPrepare_l");
- if (mIsAsyncPrepare) {
- if (mVideoSource == NULL) {
- notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
- } else {
- notifyVideoSize_l();
- }
- notifyListener_l(MEDIA_PREPARED);
- }
-
- mPrepareResult = OK;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
- mFlags |= PREPARED;
- mAsyncPrepareEvent = NULL;
- mPreparedCondition.broadcast();
-}
-
-void PreviewPlayer::acquireLock() {
- ALOGV("acquireLock");
- mLockControl.lock();
-}
-
-void PreviewPlayer::releaseLock() {
- ALOGV("releaseLock");
- mLockControl.unlock();
-}
-
-status_t PreviewPlayer::loadEffectsSettings(
- M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
-
- ALOGV("loadEffectsSettings");
- mNumberEffects = nEffects;
- mEffectsSettings = pEffectSettings;
- return OK;
-}
-
-status_t PreviewPlayer::loadAudioMixSettings(
- M4xVSS_AudioMixingSettings* pAudioMixSettings) {
-
- ALOGV("loadAudioMixSettings");
- mPreviewPlayerAudioMixSettings = pAudioMixSettings;
- return OK;
-}
-
-status_t PreviewPlayer::setAudioMixPCMFileHandle(
- M4OSA_Context pAudioMixPCMFileHandle) {
-
- ALOGV("setAudioMixPCMFileHandle");
- mAudioMixPCMFileHandle = pAudioMixPCMFileHandle;
- return OK;
-}
-
-status_t PreviewPlayer::setAudioMixStoryBoardParam(
- M4OSA_UInt32 audioMixStoryBoardTS,
- M4OSA_UInt32 currentMediaBeginCutTime,
- M4OSA_UInt32 primaryTrackVolValue ) {
-
- ALOGV("setAudioMixStoryBoardParam");
- mAudioMixStoryBoardTS = audioMixStoryBoardTS;
- mCurrentMediaBeginCutTime = currentMediaBeginCutTime;
- mCurrentMediaVolumeValue = primaryTrackVolValue;
- return OK;
-}
-
-status_t PreviewPlayer::setPlaybackBeginTime(uint32_t msec) {
-
- mPlayBeginTimeMsec = msec;
- return OK;
-}
-
-status_t PreviewPlayer::setPlaybackEndTime(uint32_t msec) {
-
- mPlayEndTimeMsec = msec;
- return OK;
-}
-
-status_t PreviewPlayer::setStoryboardStartTime(uint32_t msec) {
-
- mStoryboardStartTimeMsec = msec;
- mDecVideoTsStoryBoard = mStoryboardStartTimeMsec * 1000LL;
- return OK;
-}
-
-status_t PreviewPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
-
- mProgressCbInterval = cbInterval;
- return OK;
-}
-
-
-status_t PreviewPlayer::setMediaRenderingMode(
- M4xVSS_MediaRendering mode,
- M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
-
- mRenderingMode = mode;
-
- /* get the video width and height by resolution */
- return getVideoSizeByResolution(
- outputVideoSize,
- &mOutputVideoWidth, &mOutputVideoHeight);
-
-}
-
-status_t PreviewPlayer::resetJniCallbackTimeStamp() {
-
- mDecVideoTsStoryBoard = mStoryboardStartTimeMsec * 1000LL;
- return OK;
-}
-
-void PreviewPlayer::postProgressCallbackEvent_l() {
- if (mProgressCbEventPending) {
- return;
- }
- mProgressCbEventPending = true;
-
- mQueue.postEvent(mProgressCbEvent);
-}
-
-
-void PreviewPlayer::onProgressCbEvent() {
- Mutex::Autolock autoLock(mLock);
- if (!mProgressCbEventPending) {
- return;
- }
- mProgressCbEventPending = false;
- // If playback starts from previous I-frame,
- // then send frame storyboard duration
- if ((mDecodedVideoTs/1000) < mPlayBeginTimeMsec) {
- notifyListener_l(MEDIA_INFO, 0, mDecVideoTsStoryBoard/1000);
- } else {
- notifyListener_l(MEDIA_INFO, 0,
- (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec));
- }
-}
-
-void PreviewPlayer::postOverlayUpdateEvent_l() {
- if (mOverlayUpdateEventPending) {
- return;
- }
- mOverlayUpdateEventPending = true;
- mQueue.postEvent(mOverlayUpdateEvent);
-}
-
-void PreviewPlayer::onUpdateOverlayEvent() {
- Mutex::Autolock autoLock(mLock);
-
- if (!mOverlayUpdateEventPending) {
- return;
- }
- mOverlayUpdateEventPending = false;
-
- int updateState = mOverlayUpdateEventPosted? 1: 0;
- notifyListener_l(0xBBBBBBBB, updateState, mCurrFramingEffectIndex);
-}
-
-
-void PreviewPlayer::setVideoPostProcessingNode(
- M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
-
- uint32_t effect = VIDEO_EFFECT_NONE;
-
- //Map M4VSS3GPP_VideoEffectType to local enum
- switch(type) {
- case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
- effect = VIDEO_EFFECT_FADEFROMBLACK;
- break;
-
- case M4VSS3GPP_kVideoEffectType_FadeToBlack:
- effect = VIDEO_EFFECT_FADETOBLACK;
- break;
-
- case M4xVSS_kVideoEffectType_BlackAndWhite:
- effect = VIDEO_EFFECT_BLACKANDWHITE;
- break;
-
- case M4xVSS_kVideoEffectType_Pink:
- effect = VIDEO_EFFECT_PINK;
- break;
-
- case M4xVSS_kVideoEffectType_Green:
- effect = VIDEO_EFFECT_GREEN;
- break;
-
- case M4xVSS_kVideoEffectType_Sepia:
- effect = VIDEO_EFFECT_SEPIA;
- break;
-
- case M4xVSS_kVideoEffectType_Negative:
- effect = VIDEO_EFFECT_NEGATIVE;
- break;
-
- case M4xVSS_kVideoEffectType_Framing:
- effect = VIDEO_EFFECT_FRAMING;
- break;
-
- case M4xVSS_kVideoEffectType_Fifties:
- effect = VIDEO_EFFECT_FIFTIES;
- break;
-
- case M4xVSS_kVideoEffectType_ColorRGB16:
- effect = VIDEO_EFFECT_COLOR_RGB16;
- break;
-
- case M4xVSS_kVideoEffectType_Gradient:
- effect = VIDEO_EFFECT_GRADIENT;
- break;
-
- default:
- effect = VIDEO_EFFECT_NONE;
- break;
- }
-
- if (enable == M4OSA_TRUE) {
- //If already set, then no need to set again
- if (!(mCurrentVideoEffect & effect)) {
- mCurrentVideoEffect |= effect;
- if (effect == VIDEO_EFFECT_FIFTIES) {
- mIsFiftiesEffectStarted = true;
- }
- }
- } else {
- //Reset only if already set
- if (mCurrentVideoEffect & effect) {
- mCurrentVideoEffect &= ~effect;
- }
- }
-}
-
-status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
- mVideoWidth = width;
- mVideoHeight = height;
- return OK;
-}
-
-status_t PreviewPlayer::readFirstVideoFrame() {
- ALOGV("readFirstVideoFrame");
-
- if (!mVideoBuffer) {
- MediaSource::ReadOptions options;
- if (mSeeking != NO_SEEK) {
- ALOGV("seeking to %lld us (%.2f secs)", mSeekTimeUs,
- mSeekTimeUs / 1E6);
-
- options.setSeekTo(
- mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
- }
- for (;;) {
- status_t err = mVideoSource->read(&mVideoBuffer, &options);
- options.clearSeekTo();
-
- if (err != OK) {
- CHECK(!mVideoBuffer);
-
- if (err == INFO_FORMAT_CHANGED) {
- ALOGV("VideoSource signalled format change");
- notifyVideoSize_l();
-
- if (mVideoRenderer != NULL) {
- mVideoRendererIsPreview = false;
- err = initRenderer_l();
- if (err != OK) {
- postStreamDoneEvent_l(err);
- }
- }
-
- updateSizeToRender(mVideoSource->getFormat());
- continue;
- }
- ALOGV("EOS reached.");
- mFlags |= VIDEO_AT_EOS;
- mFlags |= AUDIO_AT_EOS;
- postStreamDoneEvent_l(err);
- return OK;
- }
-
- if (mVideoBuffer->range_length() == 0) {
- // Some decoders, notably the PV AVC software decoder
- // return spurious empty buffers that we just want to ignore.
-
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- continue;
- }
-
- int64_t videoTimeUs;
- CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &videoTimeUs));
- if (mSeeking != NO_SEEK) {
- if (videoTimeUs < mSeekTimeUs) {
- // buffers are before seek time
- // ignore them
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- continue;
- }
- } else {
- if ((videoTimeUs/1000) < mPlayBeginTimeMsec) {
- // buffers are before begin cut time
- // ignore them
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- continue;
- }
- }
- break;
- }
- }
-
- int64_t timeUs;
- CHECK(mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
- setPosition_l(timeUs);
-
- mDecodedVideoTs = timeUs;
-
- return OK;
-
-}
-
-status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
- *lastRenderedTimeMs = (((mDecodedVideoTs+mDecVideoTsStoryBoard)/1000)-mPlayBeginTimeMsec);
- return OK;
-}
-
-void PreviewPlayer::updateSizeToRender(sp<MetaData> meta) {
- if (mVideoRenderer) {
- mVideoRenderer->updateVideoSize(meta);
- }
-}
-
-void PreviewPlayer::setListener(const wp<MediaPlayerBase> &listener) {
- Mutex::Autolock autoLock(mLock);
- mListener = listener;
-}
-
-status_t PreviewPlayer::setDataSource(const sp<IStreamSource> &source) {
- return INVALID_OPERATION;
-}
-
-void PreviewPlayer::reset() {
- Mutex::Autolock autoLock(mLock);
- reset_l();
-}
-
-void PreviewPlayer::clear_l() {
- mDisplayWidth = 0;
- mDisplayHeight = 0;
-
- if (mFlags & PLAYING) {
- updateBatteryUsage_l();
- }
-
- if (mFlags & PREPARING) {
- mFlags |= PREPARE_CANCELLED;
-
- if (mFlags & PREPARING_CONNECTED) {
- // We are basically done preparing, we're just buffering
- // enough data to start playback, we can safely interrupt that.
- finishAsyncPrepare_l();
- }
- }
-
- while (mFlags & PREPARING) {
- mPreparedCondition.wait(mLock);
- }
-
- cancelPlayerEvents_l(true);
-
- mAudioTrack.clear();
- mVideoTrack.clear();
-
- // Shutdown audio first, so that the respone to the reset request
- // appears to happen instantaneously as far as the user is concerned
- // If we did this later, audio would continue playing while we
- // shutdown the video-related resources and the player appear to
- // not be as responsive to a reset request.
- if (mAudioPlayer == NULL && mAudioSource != NULL) {
- // If we had an audio player, it would have effectively
- // taken possession of the audio source and stopped it when
- // _it_ is stopped. Otherwise this is still our responsibility.
- mAudioSource->stop();
- }
- mAudioSource.clear();
-
- mTimeSource = NULL;
-
- delete mAudioPlayer;
- mAudioPlayer = NULL;
-
- if (mVideoSource != NULL) {
- shutdownVideoDecoder_l();
- }
-
- mDurationUs = -1;
- mFlags = 0;
- mExtractorFlags = 0;
- mTimeSourceDeltaUs = 0;
- mVideoTimeUs = 0;
-
- mSeeking = NO_SEEK;
- mSeekNotificationSent = false;
- mSeekTimeUs = 0;
-
- mUri.setTo("");
-
- mBitrate = -1;
- mLastVideoTimeUs = -1;
-}
-
-void PreviewPlayer::notifyListener_l(int msg, int ext1, int ext2) {
- if (mListener != NULL) {
- sp<MediaPlayerBase> listener = mListener.promote();
-
- if (listener != NULL) {
- listener->sendEvent(msg, ext1, ext2);
- }
- }
-}
-
-void PreviewPlayer::onVideoLagUpdate() {
- Mutex::Autolock autoLock(mLock);
- if (!mVideoLagEventPending) {
- return;
- }
- mVideoLagEventPending = false;
-
- int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs();
- int64_t videoLateByUs = audioTimeUs - mVideoTimeUs;
-
- if (!(mFlags & VIDEO_AT_EOS) && videoLateByUs > 300000ll) {
- ALOGV("video late by %lld ms.", videoLateByUs / 1000ll);
-
- notifyListener_l(
- MEDIA_INFO,
- MEDIA_INFO_VIDEO_TRACK_LAGGING,
- videoLateByUs / 1000ll);
- }
-
- postVideoLagEvent_l();
-}
-
-void PreviewPlayer::notifyVideoSize_l() {
- sp<MetaData> meta = mVideoSource->getFormat();
-
- int32_t vWidth, vHeight;
- int32_t cropLeft, cropTop, cropRight, cropBottom;
-
- CHECK(meta->findInt32(kKeyWidth, &vWidth));
- CHECK(meta->findInt32(kKeyHeight, &vHeight));
-
- mGivenWidth = vWidth;
- mGivenHeight = vHeight;
-
- if (!meta->findRect(
- kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
-
- cropLeft = cropTop = 0;
- cropRight = vWidth - 1;
- cropBottom = vHeight - 1;
-
- ALOGD("got dimensions only %d x %d", vWidth, vHeight);
- } else {
- ALOGD("got crop rect %d, %d, %d, %d",
- cropLeft, cropTop, cropRight, cropBottom);
- }
-
- mCropRect.left = cropLeft;
- mCropRect.right = cropRight;
- mCropRect.top = cropTop;
- mCropRect.bottom = cropBottom;
-
- int32_t displayWidth;
- if (meta->findInt32(kKeyDisplayWidth, &displayWidth)) {
- ALOGV("Display width changed (%d=>%d)", mDisplayWidth, displayWidth);
- mDisplayWidth = displayWidth;
- }
- int32_t displayHeight;
- if (meta->findInt32(kKeyDisplayHeight, &displayHeight)) {
- ALOGV("Display height changed (%d=>%d)", mDisplayHeight, displayHeight);
- mDisplayHeight = displayHeight;
- }
-
- int32_t usableWidth = cropRight - cropLeft + 1;
- int32_t usableHeight = cropBottom - cropTop + 1;
- if (mDisplayWidth != 0) {
- usableWidth = mDisplayWidth;
- }
- if (mDisplayHeight != 0) {
- usableHeight = mDisplayHeight;
- }
-
- int32_t rotationDegrees;
- if (!mVideoTrack->getFormat()->findInt32(
- kKeyRotation, &rotationDegrees)) {
- rotationDegrees = 0;
- }
-
- if (rotationDegrees == 90 || rotationDegrees == 270) {
- notifyListener_l(
- MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth);
- } else {
- notifyListener_l(
- MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight);
- }
-}
-
-status_t PreviewPlayer::pause() {
- Mutex::Autolock autoLock(mLock);
-
- mFlags &= ~CACHE_UNDERRUN;
-
- return pause_l();
-}
-
-status_t PreviewPlayer::pause_l(bool at_eos) {
- if (!(mFlags & PLAYING)) {
- return OK;
- }
-
- cancelPlayerEvents_l();
-
- if (mAudioPlayer != NULL && (mFlags & AUDIO_RUNNING)) {
- if (at_eos) {
- // If we played the audio stream to completion we
- // want to make sure that all samples remaining in the audio
- // track's queue are played out.
- mAudioPlayer->pause(true /* playPendingSamples */);
- } else {
- mAudioPlayer->pause();
- }
-
- mFlags &= ~AUDIO_RUNNING;
- }
-
- mFlags &= ~PLAYING;
- updateBatteryUsage_l();
-
- return OK;
-}
-
-bool PreviewPlayer::isPlaying() const {
- return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
-}
-
-void PreviewPlayer::setSurface(const sp<Surface> &surface) {
- Mutex::Autolock autoLock(mLock);
-
- mSurface = surface;
- setNativeWindow_l(surface);
-}
-
-void PreviewPlayer::setSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer) {
- Mutex::Autolock autoLock(mLock);
-
- mSurface.clear();
- if (bufferProducer != NULL) {
- setNativeWindow_l(new Surface(bufferProducer));
- }
-}
-
-void PreviewPlayer::shutdownVideoDecoder_l() {
- if (mVideoBuffer) {
- mVideoBuffer->release();
- mVideoBuffer = NULL;
- }
-
- mVideoSource->stop();
-
- // The following hack is necessary to ensure that the OMX
- // component is completely released by the time we may try
- // to instantiate it again.
- wp<MediaSource> tmp = mVideoSource;
- mVideoSource.clear();
- while (tmp.promote() != NULL) {
- usleep(1000);
- }
- IPCThreadState::self()->flushCommands();
-}
-
-void PreviewPlayer::setNativeWindow_l(const sp<ANativeWindow> &native) {
- mNativeWindow = native;
-
- if (mVideoSource == NULL) {
- return;
- }
-
- ALOGI("attempting to reconfigure to use new surface");
-
- bool wasPlaying = (mFlags & PLAYING) != 0;
-
- pause_l();
-
- shutdownVideoDecoder_l();
-
- CHECK_EQ(initVideoDecoder_l(), (status_t)OK);
-
- if (mLastVideoTimeUs >= 0) {
- mSeeking = SEEK;
- mSeekNotificationSent = true;
- mSeekTimeUs = mLastVideoTimeUs;
- mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
- }
-
- if (wasPlaying) {
- play_l();
- }
-}
-
-void PreviewPlayer::setAudioSink(
- const sp<MediaPlayerBase::AudioSink> &audioSink) {
- Mutex::Autolock autoLock(mLock);
-
- mAudioSink = audioSink;
-}
-
-status_t PreviewPlayer::setLooping(bool shouldLoop) {
- Mutex::Autolock autoLock(mLock);
-
- mFlags = mFlags & ~LOOPING;
-
- if (shouldLoop) {
- mFlags |= LOOPING;
- }
-
- return OK;
-}
-
-void PreviewPlayer::setDuration_l(int64_t durationUs) {
- if (mDurationUs < 0 || durationUs > mDurationUs) {
- mDurationUs = durationUs;
- }
-}
-
-status_t PreviewPlayer::getDuration(int64_t *durationUs) {
- Mutex::Autolock autoLock(mLock);
- if (mDurationUs < 0) {
- return UNKNOWN_ERROR;
- }
-
- *durationUs = mDurationUs;
- return OK;
-}
-
-status_t PreviewPlayer::getPosition(int64_t *positionUs) {
- Mutex::Autolock autoLock(mLock);
-
- if (mSeeking != NO_SEEK) {
- *positionUs = mSeekTimeUs;
- } else if (mVideoSource != NULL
- && (mAudioPlayer == NULL || !(mFlags & VIDEO_AT_EOS))) {
- *positionUs = mVideoTimeUs;
- } else if (mAudioPlayer != NULL) {
- *positionUs = mAudioPlayer->getMediaTimeUs();
- } else {
- *positionUs = 0;
- }
-
- return OK;
-}
-
-void PreviewPlayer::setPosition_l(int64_t timeUs) {
- mVideoTimeUs = timeUs;
-}
-
-status_t PreviewPlayer::seekTo_l(int64_t timeUs) {
- ALOGV("seekTo_l");
- if (mFlags & CACHE_UNDERRUN) {
- mFlags &= ~CACHE_UNDERRUN;
- play_l();
- }
-
- if ((mFlags & PLAYING) && mVideoSource != NULL && (mFlags & VIDEO_AT_EOS)) {
- // Video playback completed before, there's no pending
- // video event right now. In order for this new seek
- // to be honored, we need to post one.
-
- postVideoEvent_l();
- }
-
- mSeeking = SEEK;
- mSeekNotificationSent = false;
- mSeekTimeUs = timeUs;
- mFlags &= ~(AT_EOS | AUDIO_AT_EOS | VIDEO_AT_EOS);
-
- seekAudioIfNecessary_l();
-
- if (!(mFlags & PLAYING)) {
- ALOGV("seeking while paused, sending SEEK_COMPLETE notification"
- " immediately.");
-
- notifyListener_l(MEDIA_SEEK_COMPLETE);
- mSeekNotificationSent = true;
-
- if ((mFlags & PREPARED) && mVideoSource != NULL) {
- mFlags |= SEEK_PREVIEW;
- postVideoEvent_l();
- }
- }
-
- return OK;
-}
-
-void PreviewPlayer::seekAudioIfNecessary_l() {
- if (mSeeking != NO_SEEK && mVideoSource == NULL && mAudioPlayer != NULL) {
- mAudioPlayer->seekTo(mSeekTimeUs);
-
- mWatchForAudioSeekComplete = true;
- mWatchForAudioEOS = true;
- }
-}
-
-void PreviewPlayer::setAudioSource(const sp<MediaSource>& source) {
- CHECK(source != NULL);
- mAudioTrack = source;
-}
-
-void PreviewPlayer::setVideoSource(const sp<MediaSource>& source) {
- CHECK(source != NULL);
- mVideoTrack = source;
-}
-
-void PreviewPlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
- if (mSeeking == SEEK_VIDEO_ONLY) {
- mSeeking = NO_SEEK;
- return;
- }
-
- if (mSeeking == NO_SEEK || (mFlags & SEEK_PREVIEW)) {
- return;
- }
-
- if (mAudioPlayer != NULL) {
- ALOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
-
- // If we don't have a video time, seek audio to the originally
- // requested seek time instead.
-
- mAudioPlayer->seekTo(videoTimeUs < 0 ? mSeekTimeUs : videoTimeUs);
- mWatchForAudioSeekComplete = true;
- mWatchForAudioEOS = true;
- } else if (!mSeekNotificationSent) {
- // If we're playing video only, report seek complete now,
- // otherwise audio player will notify us later.
- notifyListener_l(MEDIA_SEEK_COMPLETE);
- mSeekNotificationSent = true;
- }
-
- mFlags |= FIRST_FRAME;
- mSeeking = NO_SEEK;
-}
-
-void PreviewPlayer::onCheckAudioStatus() {
- Mutex::Autolock autoLock(mLock);
- if (!mAudioStatusEventPending) {
- // Event was dispatched and while we were blocking on the mutex,
- // has already been cancelled.
- return;
- }
-
- mAudioStatusEventPending = false;
-
- if (mWatchForAudioSeekComplete && !mAudioPlayer->isSeeking()) {
- mWatchForAudioSeekComplete = false;
-
- if (!mSeekNotificationSent) {
- notifyListener_l(MEDIA_SEEK_COMPLETE);
- mSeekNotificationSent = true;
- }
-
- mSeeking = NO_SEEK;
- }
-
- status_t finalStatus;
- if (mWatchForAudioEOS && mAudioPlayer->reachedEOS(&finalStatus)) {
- mWatchForAudioEOS = false;
- mFlags |= AUDIO_AT_EOS;
- mFlags |= FIRST_FRAME;
- postStreamDoneEvent_l(finalStatus);
- }
-}
-
-void PreviewPlayer::postVideoEvent_l(int64_t delayUs) {
- if (mVideoEventPending) {
- return;
- }
-
- mVideoEventPending = true;
- mQueue.postEventWithDelay(mVideoEvent, delayUs < 0 ? 10000 : delayUs);
-}
-
-void PreviewPlayer::postStreamDoneEvent_l(status_t status) {
- if (mStreamDoneEventPending) {
- return;
- }
- mStreamDoneEventPending = true;
-
- mStreamDoneStatus = status;
- mQueue.postEvent(mStreamDoneEvent);
-}
-
-void PreviewPlayer::postVideoLagEvent_l() {
- if (mVideoLagEventPending) {
- return;
- }
- mVideoLagEventPending = true;
- mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll);
-}
-
-void PreviewPlayer::postCheckAudioStatusEvent_l(int64_t delayUs) {
- if (mAudioStatusEventPending) {
- return;
- }
- mAudioStatusEventPending = true;
- mQueue.postEventWithDelay(mCheckAudioStatusEvent, delayUs);
-}
-
-void PreviewPlayer::abortPrepare(status_t err) {
- CHECK(err != OK);
-
- if (mIsAsyncPrepare) {
- notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err);
- }
-
- mPrepareResult = err;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
- mAsyncPrepareEvent = NULL;
- mPreparedCondition.broadcast();
-}
-
-uint32_t PreviewPlayer::getSourceSeekFlags() const {
- Mutex::Autolock lock(mLock);
- return mExtractorFlags;
-}
-
-void PreviewPlayer::postAudioEOS(int64_t delayUs) {
- Mutex::Autolock autoLock(mLock);
- postCheckAudioStatusEvent_l(delayUs);
-}
-
-void PreviewPlayer::postAudioSeekComplete() {
- Mutex::Autolock autoLock(mLock);
- postCheckAudioStatusEvent_l(0 /* delayUs */);
-}
-
-void PreviewPlayer::updateBatteryUsage_l() {
- uint32_t params = IMediaPlayerService::kBatteryDataTrackDecoder;
- if ((mAudioSource != NULL) && (mAudioSource != mAudioTrack)) {
- params |= IMediaPlayerService::kBatteryDataTrackAudio;
- }
- if (mVideoSource != NULL) {
- params |= IMediaPlayerService::kBatteryDataTrackVideo;
- }
- addBatteryData(params);
-}
-
-} // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayer.h b/libvideoeditor/lvpp/PreviewPlayer.h
deleted file mode 100755
index 5a13b58..0000000
--- a/libvideoeditor/lvpp/PreviewPlayer.h
+++ /dev/null
@@ -1,298 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PREVIEW_PLAYER_H_
-
-#define PREVIEW_PLAYER_H_
-
-#include "TimedEventQueue.h"
-#include "VideoEditorAudioPlayer.h"
-
-#include <media/MediaPlayerInterface.h>
-#include <media/stagefright/OMXClient.h>
-#include <media/stagefright/TimeSource.h>
-#include <utils/threads.h>
-#include "NativeWindowRenderer.h"
-
-namespace android {
-
-struct VideoEditorAudioPlayer;
-struct MediaExtractor;
-
-struct PreviewPlayer {
- PreviewPlayer(NativeWindowRenderer* renderer);
- ~PreviewPlayer();
-
- void setListener(const wp<MediaPlayerBase> &listener);
- void reset();
-
- status_t play();
- status_t pause();
-
- bool isPlaying() const;
- void setSurface(const sp<Surface> &surface);
- void setSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer);
- status_t seekTo(int64_t timeUs);
-
- status_t getVideoDimensions(int32_t *width, int32_t *height) const;
-
-
- // FIXME: Sync between ...
- void acquireLock();
- void releaseLock();
-
- status_t prepare();
- status_t prepareAsync();
- status_t setDataSource(const char *path);
- status_t setDataSource(const sp<IStreamSource> &source);
-
- void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
- status_t setLooping(bool shouldLoop);
- status_t getDuration(int64_t *durationUs);
- status_t getPosition(int64_t *positionUs);
-
- uint32_t getSourceSeekFlags() const;
-
- void postAudioEOS(int64_t delayUs = 0ll);
- void postAudioSeekComplete();
-
- status_t loadEffectsSettings(M4VSS3GPP_EffectSettings* pEffectSettings,
- int nEffects);
- status_t loadAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);
- status_t setAudioMixPCMFileHandle(M4OSA_Context pAudioMixPCMFileHandle);
- status_t setAudioMixStoryBoardParam(M4OSA_UInt32 audioMixStoryBoardTS,
- M4OSA_UInt32 currentMediaBeginCutTime,
- M4OSA_UInt32 currentMediaVolumeVol);
-
- status_t setPlaybackBeginTime(uint32_t msec);
- status_t setPlaybackEndTime(uint32_t msec);
- status_t setStoryboardStartTime(uint32_t msec);
- status_t setProgressCallbackInterval(uint32_t cbInterval);
- status_t setMediaRenderingMode(M4xVSS_MediaRendering mode,
- M4VIDEOEDITING_VideoFrameSize outputVideoSize);
-
- status_t resetJniCallbackTimeStamp();
- status_t setImageClipProperties(uint32_t width, uint32_t height);
- status_t readFirstVideoFrame();
- status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);
- status_t setAudioPlayer(VideoEditorAudioPlayer *audioPlayer);
-
-private:
- enum {
- PLAYING = 1,
- LOOPING = 2,
- FIRST_FRAME = 4,
- PREPARING = 8,
- PREPARED = 16,
- AT_EOS = 32,
- PREPARE_CANCELLED = 64,
- CACHE_UNDERRUN = 128,
- AUDIO_AT_EOS = 256,
- VIDEO_AT_EOS = 512,
- AUTO_LOOPING = 1024,
- INFORMED_AV_EOS = 2048,
-
- // We are basically done preparing but are currently buffering
- // sufficient data to begin playback and finish the preparation phase
- // for good.
- PREPARING_CONNECTED = 2048,
-
- // We're triggering a single video event to display the first frame
- // after the seekpoint.
- SEEK_PREVIEW = 4096,
-
- AUDIO_RUNNING = 8192,
- AUDIOPLAYER_STARTED = 16384,
-
- INCOGNITO = 32768,
- };
-
- mutable Mutex mLock;
-
- OMXClient mClient;
- TimedEventQueue mQueue;
- bool mQueueStarted;
- wp<MediaPlayerBase> mListener;
-
- sp<Surface> mSurface;
- sp<ANativeWindow> mNativeWindow;
- sp<MediaPlayerBase::AudioSink> mAudioSink;
-
- SystemTimeSource mSystemTimeSource;
- TimeSource *mTimeSource;
-
- String8 mUri;
-
- sp<MediaSource> mVideoTrack;
- sp<MediaSource> mVideoSource;
- bool mVideoRendererIsPreview;
-
- sp<MediaSource> mAudioTrack;
- sp<MediaSource> mAudioSource;
- VideoEditorAudioPlayer *mAudioPlayer;
- int64_t mDurationUs;
-
- int32_t mDisplayWidth;
- int32_t mDisplayHeight;
-
- uint32_t mFlags;
- uint32_t mExtractorFlags;
-
- int64_t mTimeSourceDeltaUs;
- int64_t mVideoTimeUs;
-
- enum SeekType {
- NO_SEEK,
- SEEK,
- SEEK_VIDEO_ONLY
- };
- SeekType mSeeking;
-
- bool mSeekNotificationSent;
- int64_t mSeekTimeUs;
-
- int64_t mBitrate; // total bitrate of the file (in bps) or -1 if unknown.
-
- bool mWatchForAudioSeekComplete;
- bool mWatchForAudioEOS;
-
- sp<TimedEventQueue::Event> mVideoEvent;
- bool mVideoEventPending;
- sp<TimedEventQueue::Event> mStreamDoneEvent;
- bool mStreamDoneEventPending;
- sp<TimedEventQueue::Event> mCheckAudioStatusEvent;
- bool mAudioStatusEventPending;
- sp<TimedEventQueue::Event> mVideoLagEvent;
- bool mVideoLagEventPending;
-
- sp<TimedEventQueue::Event> mAsyncPrepareEvent;
- Condition mPreparedCondition;
- bool mIsAsyncPrepare;
- status_t mPrepareResult;
- status_t mStreamDoneStatus;
-
- MediaBuffer *mVideoBuffer;
- int64_t mLastVideoTimeUs;
- ARect mCropRect;
- int32_t mGivenWidth, mGivenHeight;
-
-
- bool mIsChangeSourceRequired;
-
- NativeWindowRenderer *mNativeWindowRenderer;
- RenderInput *mVideoRenderer;
-
- int32_t mVideoWidth, mVideoHeight;
-
- //Data structures used for audio and video effects
- M4VSS3GPP_EffectSettings* mEffectsSettings;
- M4xVSS_AudioMixingSettings* mPreviewPlayerAudioMixSettings;
- M4OSA_Context mAudioMixPCMFileHandle;
- M4OSA_UInt32 mAudioMixStoryBoardTS;
- M4OSA_UInt32 mCurrentMediaBeginCutTime;
- M4OSA_UInt32 mCurrentMediaVolumeValue;
- M4OSA_UInt32 mCurrFramingEffectIndex;
-
- uint32_t mNumberEffects;
- uint32_t mPlayBeginTimeMsec;
- uint32_t mPlayEndTimeMsec;
- uint64_t mDecodedVideoTs; // timestamp of current decoded video frame buffer
- uint64_t mDecVideoTsStoryBoard; // timestamp of frame relative to storyboard
- uint32_t mCurrentVideoEffect;
- uint32_t mProgressCbInterval;
- uint32_t mNumberDecVideoFrames; // Counter of number of video frames decoded
- sp<TimedEventQueue::Event> mProgressCbEvent;
- bool mProgressCbEventPending;
- sp<TimedEventQueue::Event> mOverlayUpdateEvent;
- bool mOverlayUpdateEventPending;
- bool mOverlayUpdateEventPosted;
-
- M4xVSS_MediaRendering mRenderingMode;
- uint32_t mOutputVideoWidth;
- uint32_t mOutputVideoHeight;
-
- uint32_t mStoryboardStartTimeMsec;
-
- bool mIsVideoSourceJpg;
- bool mIsFiftiesEffectStarted;
- int64_t mImageFrameTimeUs;
- bool mStartNextPlayer;
- mutable Mutex mLockControl;
-
- M4VIFI_UInt8* mFrameRGBBuffer;
- M4VIFI_UInt8* mFrameYUVBuffer;
-
- void cancelPlayerEvents_l(bool updateProgressCb = false);
- status_t setDataSource_l(const sp<MediaExtractor> &extractor);
- status_t setDataSource_l(const char *path);
- void setNativeWindow_l(const sp<ANativeWindow> &native);
- void reset_l();
- void clear_l();
- status_t play_l();
- status_t pause_l(bool at_eos = false);
- status_t initRenderer_l();
- status_t initAudioDecoder_l();
- status_t initVideoDecoder_l(uint32_t flags = 0);
- void notifyVideoSize_l();
- void notifyListener_l(int msg, int ext1 = 0, int ext2 = 0);
- void onVideoEvent();
- void onVideoLagUpdate();
- void onStreamDone();
- void onCheckAudioStatus();
- void onPrepareAsyncEvent();
-
- void finishAsyncPrepare_l();
- void abortPrepare(status_t err);
-
- status_t startAudioPlayer_l();
- void setVideoSource(const sp<MediaSource>& source);
- status_t finishSetDataSource_l();
- void setAudioSource(const sp<MediaSource>& source);
-
- status_t seekTo_l(int64_t timeUs);
- void seekAudioIfNecessary_l();
- void finishSeekIfNecessary(int64_t videoTimeUs);
-
- void postCheckAudioStatusEvent_l(int64_t delayUs);
- void postVideoLagEvent_l();
- void postStreamDoneEvent_l(status_t status);
- void postVideoEvent_l(int64_t delayUs = -1);
- void setVideoPostProcessingNode(
- M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);
- void postProgressCallbackEvent_l();
- void shutdownVideoDecoder_l();
- void onProgressCbEvent();
-
- void postOverlayUpdateEvent_l();
- void onUpdateOverlayEvent();
-
- status_t setDataSource_l_jpg();
- status_t prepare_l();
- status_t prepareAsync_l();
- void updateBatteryUsage_l();
- void updateSizeToRender(sp<MetaData> meta);
-
- void setDuration_l(int64_t durationUs);
- void setPosition_l(int64_t timeUs);
-
- PreviewPlayer(const PreviewPlayer &);
- PreviewPlayer &operator=(const PreviewPlayer &);
-};
-
-} // namespace android
-
-#endif // PREVIEW_PLAYER_H_
-
diff --git a/libvideoeditor/lvpp/PreviewRenderer.cpp b/libvideoeditor/lvpp/PreviewRenderer.cpp
deleted file mode 100755
index b1cfc8e..0000000
--- a/libvideoeditor/lvpp/PreviewRenderer.cpp
+++ /dev/null
@@ -1,140 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#define LOG_NDEBUG 1
-#define LOG_TAG "PreviewRenderer"
-#include <utils/Log.h>
-
-#include "PreviewRenderer.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <gui/Surface.h>
-
-namespace android {
-
-PreviewRenderer* PreviewRenderer::CreatePreviewRenderer (
- const sp<Surface> &surface, size_t width, size_t height) {
-
- PreviewRenderer* renderer = new PreviewRenderer(surface, width, height);
-
- if (renderer->init() != 0) {
- delete renderer;
- return NULL;
- }
-
- return renderer;
-}
-
-PreviewRenderer::PreviewRenderer(
- const sp<Surface> &surface,
- size_t width, size_t height)
- : mSurface(surface),
- mWidth(width),
- mHeight(height) {
-}
-
-int PreviewRenderer::init() {
- int err = 0;
- ANativeWindow* anw = mSurface.get();
-
- err = native_window_api_connect(anw, NATIVE_WINDOW_API_CPU);
- if (err) goto fail;
-
- err = native_window_set_usage(
- anw, GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN);
- if (err) goto fail;
-
- err = native_window_set_buffer_count(anw, 3);
- if (err) goto fail;
-
- err = native_window_set_scaling_mode(
- anw, NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
- if (err) goto fail;
-
- err = native_window_set_buffers_geometry(
- anw, mWidth, mHeight, HAL_PIXEL_FORMAT_YV12);
- if (err) goto fail;
-
- err = native_window_set_buffers_transform(anw, 0);
- if (err) goto fail;
-
-fail:
- return err;
-}
-
-PreviewRenderer::~PreviewRenderer() {
- native_window_api_disconnect(mSurface.get(), NATIVE_WINDOW_API_CPU);
-}
-
-
-//
-// Provides a buffer and associated stride
-// This buffer is allocated by the SurfaceFlinger
-//
-// For optimal display performances, you should :
-// 1) call getBufferYV12()
-// 2) fill the buffer with your data
-// 3) call renderYV12() to take these changes into account
-//
-// For each call to getBufferYV12(), you must also call renderYV12()
-// Expected format in the buffer is YV12 formats (similar to YUV420 planar fromat)
-// for more details on this YV12 cf hardware/libhardware/include/hardware/hardware.h
-//
-void PreviewRenderer::getBufferYV12(uint8_t **data, size_t *stride) {
- int err = OK;
-
- if ((err = native_window_dequeue_buffer_and_wait(mSurface.get(),
- &mBuf)) != 0) {
- ALOGW("native_window_dequeue_buffer_and_wait returned error %d", err);
- return;
- }
-
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
-
- Rect bounds(mWidth, mHeight);
-
- void *dst;
- CHECK_EQ(0, mapper.lock(mBuf->handle,
- GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN,
- bounds, &dst));
-
- *data = (uint8_t*)dst;
- *stride = mBuf->stride;
-}
-
-
-//
-// Display the content of the buffer provided by last call to getBufferYV12()
-//
-// See getBufferYV12() for details.
-//
-void PreviewRenderer::renderYV12() {
- int err = OK;
-
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
-
- if (mBuf!= NULL) {
- CHECK_EQ(0, mapper.unlock(mBuf->handle));
-
- if ((err = mSurface->ANativeWindow::queueBuffer(mSurface.get(), mBuf, -1)) != 0) {
- ALOGW("Surface::queueBuffer returned error %d", err);
- }
- }
- mBuf = NULL;
-}
-
-} // namespace android
diff --git a/libvideoeditor/lvpp/PreviewRenderer.h b/libvideoeditor/lvpp/PreviewRenderer.h
deleted file mode 100755
index ce28276..0000000
--- a/libvideoeditor/lvpp/PreviewRenderer.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PREVIEW_RENDERER_H_
-
-#define PREVIEW_RENDERER_H_
-
-#include <media/stagefright/ColorConverter.h>
-#include <utils/RefBase.h>
-#include <system/window.h>
-#include <ui/GraphicBufferMapper.h>
-
-
-namespace android {
-
-class Surface;
-
-class PreviewRenderer {
-public:
-
-static PreviewRenderer* CreatePreviewRenderer (
- const sp<Surface> &surface,
- size_t width, size_t height);
-
- ~PreviewRenderer();
-
- void getBufferYV12(uint8_t **data, size_t *stride);
-
- void renderYV12();
-
- static size_t ALIGN(size_t x, size_t alignment) {
- return (x + alignment - 1) & ~(alignment - 1);
- }
-
-private:
- PreviewRenderer(
- const sp<Surface> &surface,
- size_t width, size_t height);
-
- int init();
-
- sp<Surface> mSurface;
- size_t mWidth, mHeight;
-
- ANativeWindowBuffer *mBuf;
-
- PreviewRenderer(const PreviewRenderer &);
- PreviewRenderer &operator=(const PreviewRenderer &);
-};
-
-} // namespace android
-
-#endif // PREVIEW_RENDERER_H_
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp b/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
deleted file mode 100755
index 91dc590..0000000
--- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.cpp
+++ /dev/null
@@ -1,900 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <inttypes.h>
-
-#define LOG_NDEBUG 1
-#define LOG_TAG "VideoEditorAudioPlayer"
-#include <utils/Log.h>
-
-#include <binder/IPCThreadState.h>
-#include <media/AudioTrack.h>
-#include <VideoEditorAudioPlayer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-
-#include <system/audio.h>
-
-#include "PreviewPlayer.h"
-namespace android {
-
-VideoEditorAudioPlayer::VideoEditorAudioPlayer(
- const sp<MediaPlayerBase::AudioSink> &audioSink,
- PreviewPlayer *observer)
- : mInputBuffer(NULL),
- mSampleRate(0),
- mLatencyUs(0),
- mFrameSize(0),
- mNumFramesPlayed(0),
- mPositionTimeMediaUs(-1),
- mPositionTimeRealUs(-1),
- mSeeking(false),
- mReachedEOS(false),
- mFinalStatus(OK),
- mStarted(false),
- mIsFirstBuffer(false),
- mFirstBufferResult(OK),
- mFirstBuffer(NULL),
- mAudioSink(audioSink),
- mObserver(observer) {
-
- ALOGV("Constructor");
- mBGAudioPCMFileHandle = NULL;
- mAudioProcess = NULL;
- mBGAudioPCMFileLength = 0;
- mBGAudioPCMFileTrimmedLength = 0;
- mBGAudioPCMFileDuration = 0;
- mBGAudioPCMFileSeekPoint = 0;
- mBGAudioPCMFileOriginalSeekPoint = 0;
- mBGAudioStoryBoardSkimTimeStamp = 0;
- mBGAudioStoryBoardCurrentMediaBeginCutTS = 0;
- mBGAudioStoryBoardCurrentMediaVolumeVal = 0;
- mSeekTimeUs = 0;
- mSource = NULL;
-}
-
-VideoEditorAudioPlayer::~VideoEditorAudioPlayer() {
-
- ALOGV("Destructor");
- if (mStarted) {
- reset();
- }
- if (mAudioProcess != NULL) {
- delete mAudioProcess;
- mAudioProcess = NULL;
- }
-}
-
-void VideoEditorAudioPlayer::pause(bool playPendingSamples) {
- ALOGV("pause: playPendingSamples=%d", playPendingSamples);
- CHECK(mStarted);
-
- if (playPendingSamples) {
- if (mAudioSink.get() != NULL) {
- mAudioSink->stop();
- } else {
- mAudioTrack->stop();
- }
- } else {
- if (mAudioSink.get() != NULL) {
- mAudioSink->pause();
- } else {
- mAudioTrack->pause();
- }
- }
-}
-
-void VideoEditorAudioPlayer::clear() {
- ALOGV("clear");
- if (!mStarted) {
- return;
- }
-
- if (mAudioSink.get() != NULL) {
- mAudioSink->stop();
- mAudioSink->close();
- } else {
- mAudioTrack->stop();
-
- mAudioTrack.clear();
- }
-
- // Make sure to release any buffer we hold onto so that the
- // source is able to stop().
-
- if (mFirstBuffer != NULL) {
- mFirstBuffer->release();
- mFirstBuffer = NULL;
- }
-
- if (mInputBuffer != NULL) {
- ALOGV("AudioPlayerBase releasing input buffer.");
-
- mInputBuffer->release();
- mInputBuffer = NULL;
- }
-
- mSource->stop();
-
- // The following hack is necessary to ensure that the OMX
- // component is completely released by the time we may try
- // to instantiate it again.
- wp<MediaSource> tmp = mSource;
- mSource.clear();
- while (tmp.promote() != NULL) {
- usleep(1000);
- }
- IPCThreadState::self()->flushCommands();
-
- mNumFramesPlayed = 0;
- mPositionTimeMediaUs = -1;
- mPositionTimeRealUs = -1;
- mSeeking = false;
- mReachedEOS = false;
- mFinalStatus = OK;
- mStarted = false;
-}
-
-status_t VideoEditorAudioPlayer::resume() {
- ALOGV("resume");
-
- AudioMixSettings audioMixSettings;
-
- // Single audio player is used;
- // Pass on the audio ducking parameters
- // which might have changed with new audio source
- audioMixSettings.lvInDucking_threshold =
- mAudioMixSettings->uiInDucking_threshold;
- audioMixSettings.lvInDucking_lowVolume =
- ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
- audioMixSettings.lvInDucking_enable =
- mAudioMixSettings->bInDucking_enable;
- audioMixSettings.lvPTVolLevel =
- ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
- audioMixSettings.lvBTVolLevel =
- ((M4OSA_Float)mAudioMixSettings->uiAddVolume) / 100.0;
- audioMixSettings.lvBTChannelCount = mAudioMixSettings->uiBTChannelCount;
- audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
-
- // Call to Audio mix param setting
- mAudioProcess->setMixParams(audioMixSettings);
-
- CHECK(mStarted);
-
- if (mAudioSink.get() != NULL) {
- mAudioSink->start();
- } else {
- mAudioTrack->start();
- }
- return OK;
-}
-
-status_t VideoEditorAudioPlayer::seekTo(int64_t time_us) {
- ALOGV("seekTo: %lld", time_us);
- Mutex::Autolock autoLock(mLock);
-
- mSeeking = true;
- mPositionTimeRealUs = mPositionTimeMediaUs = -1;
- mReachedEOS = false;
- mSeekTimeUs = time_us;
-
- if (mAudioSink != NULL) {
- mAudioSink->flush();
- } else {
- mAudioTrack->flush();
- }
-
- return OK;
-}
-
-bool VideoEditorAudioPlayer::isSeeking() {
- Mutex::Autolock lock(mLock);
- ALOGV("isSeeking: mSeeking=%d", mSeeking);
- return mSeeking;
-}
-
-bool VideoEditorAudioPlayer::reachedEOS(status_t *finalStatus) {
- ALOGV("reachedEOS: status=%d", mFinalStatus);
- *finalStatus = OK;
-
- Mutex::Autolock autoLock(mLock);
- *finalStatus = mFinalStatus;
- return mReachedEOS;
-}
-
-int64_t VideoEditorAudioPlayer::getRealTimeUs() {
- Mutex::Autolock autoLock(mLock);
- return getRealTimeUs_l();
-}
-
-int64_t VideoEditorAudioPlayer::getRealTimeUs_l() {
- return -mLatencyUs + (mNumFramesPlayed * 1000000) / mSampleRate;
-}
-
-int64_t VideoEditorAudioPlayer::getMediaTimeUs() {
- ALOGV("getMediaTimeUs");
- Mutex::Autolock autoLock(mLock);
-
- if (mPositionTimeMediaUs < 0 || mPositionTimeRealUs < 0) {
- if (mSeeking) {
- return mSeekTimeUs;
- }
-
- return 0;
- }
-
- int64_t realTimeOffset = getRealTimeUs_l() - mPositionTimeRealUs;
- if (realTimeOffset < 0) {
- realTimeOffset = 0;
- }
-
- return mPositionTimeMediaUs + realTimeOffset;
-}
-
-bool VideoEditorAudioPlayer::getMediaTimeMapping(
- int64_t *realtime_us, int64_t *mediatime_us) {
- ALOGV("getMediaTimeMapping");
- Mutex::Autolock autoLock(mLock);
-
- *realtime_us = mPositionTimeRealUs;
- *mediatime_us = mPositionTimeMediaUs;
-
- return mPositionTimeRealUs != -1 && mPositionTimeMediaUs != -1;
-}
-
-void VideoEditorAudioPlayer::setSource(const sp<MediaSource> &source) {
- Mutex::Autolock autoLock(mLock);
-
- // Before setting source, stop any existing source.
- // Make sure to release any buffer we hold onto so that the
- // source is able to stop().
-
- if (mFirstBuffer != NULL) {
- mFirstBuffer->release();
- mFirstBuffer = NULL;
- }
-
- if (mInputBuffer != NULL) {
- ALOGV("VideoEditorAudioPlayer releasing input buffer.");
-
- mInputBuffer->release();
- mInputBuffer = NULL;
- }
-
- if (mSource != NULL) {
- mSource->stop();
- mSource.clear();
- }
-
- mSource = source;
- mReachedEOS = false;
-}
-
-sp<MediaSource> VideoEditorAudioPlayer::getSource() {
- Mutex::Autolock autoLock(mLock);
- return mSource;
-}
-
-void VideoEditorAudioPlayer::setObserver(PreviewPlayer *observer) {
- ALOGV("setObserver");
- //CHECK(!mStarted);
- mObserver = observer;
-}
-
-bool VideoEditorAudioPlayer::isStarted() {
- return mStarted;
-}
-
-// static
-void VideoEditorAudioPlayer::AudioCallback(int event, void *user, void *info) {
- static_cast<VideoEditorAudioPlayer *>(user)->AudioCallback(event, info);
-}
-
-
-void VideoEditorAudioPlayer::AudioCallback(int event, void *info) {
- if (event != AudioTrack::EVENT_MORE_DATA) {
- return;
- }
-
- AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
- size_t numBytesWritten = fillBuffer(buffer->raw, buffer->size);
-
- buffer->size = numBytesWritten;
-}
-
-status_t VideoEditorAudioPlayer::start(bool sourceAlreadyStarted) {
- Mutex::Autolock autoLock(mLock);
- CHECK(!mStarted);
- CHECK(mSource != NULL);
- ALOGV("Start");
- status_t err;
- M4OSA_ERR result = M4NO_ERROR;
- M4OSA_UInt32 startTime = 0;
- M4OSA_UInt32 seekTimeStamp = 0;
- M4OSA_Bool bStoryBoardTSBeyondBTEndCutTime = M4OSA_FALSE;
-
- if (!sourceAlreadyStarted) {
- err = mSource->start();
- if (err != OK) {
- return err;
- }
- }
-
- // Create the BG Audio handler
- mAudioProcess = new VideoEditorBGAudioProcessing();
- AudioMixSettings audioMixSettings;
-
- // Pass on the audio ducking parameters
- audioMixSettings.lvInDucking_threshold =
- mAudioMixSettings->uiInDucking_threshold;
- audioMixSettings.lvInDucking_lowVolume =
- ((M4OSA_Float)mAudioMixSettings->uiInDucking_lowVolume) / 100.0;
- audioMixSettings.lvInDucking_enable =
- mAudioMixSettings->bInDucking_enable;
- audioMixSettings.lvPTVolLevel =
- ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal) / 100.0;
- audioMixSettings.lvBTVolLevel =
- ((M4OSA_Float)mAudioMixSettings->uiAddVolume) / 100.0;
- audioMixSettings.lvBTChannelCount = mAudioMixSettings->uiBTChannelCount;
- audioMixSettings.lvPTChannelCount = mAudioMixSettings->uiNbChannels;
-
- // Call to Audio mix param setting
- mAudioProcess->setMixParams(audioMixSettings);
-
- // Get the BG Audio PCM file details
- if ( mBGAudioPCMFileHandle ) {
-
- // TODO : 32bits required for OSAL, to be updated once OSAL is updated
- M4OSA_UInt32 tmp32 = 0;
- result = M4OSA_fileReadGetOption(mBGAudioPCMFileHandle,
- M4OSA_kFileReadGetFileSize,
- (M4OSA_Void**)&tmp32);
- mBGAudioPCMFileLength = tmp32;
- mBGAudioPCMFileTrimmedLength = mBGAudioPCMFileLength;
-
-
- ALOGV("VideoEditorAudioPlayer::start M4OSA_kFileReadGetFileSize = %lld",
- mBGAudioPCMFileLength);
-
- // Get the duration in time of the audio BT
- if ( result == M4NO_ERROR ) {
- ALOGV("VEAP: channels = %" PRIu32 " freq = %" PRIu32,
- mAudioMixSettings->uiNbChannels, mAudioMixSettings->uiSamplingFrequency);
-
- // No trim
- mBGAudioPCMFileDuration = ((
- (int64_t)(mBGAudioPCMFileLength/sizeof(M4OSA_UInt16)/
- mAudioMixSettings->uiNbChannels))*1000 ) /
- mAudioMixSettings->uiSamplingFrequency;
-
- ALOGV("VideoEditorAudioPlayer:: beginCutMs %d , endCutMs %d",
- (unsigned int) mAudioMixSettings->beginCutMs,
- (unsigned int) mAudioMixSettings->endCutMs);
-
- // Remove the trim part
- if ((mAudioMixSettings->beginCutMs == 0) &&
- (mAudioMixSettings->endCutMs != 0)) {
- // End time itself the file duration
- mBGAudioPCMFileDuration = mAudioMixSettings->endCutMs;
- // Limit the file length also
- mBGAudioPCMFileTrimmedLength = ((
- (int64_t)(mBGAudioPCMFileDuration *
- mAudioMixSettings->uiSamplingFrequency) *
- mAudioMixSettings->uiNbChannels) *
- sizeof(M4OSA_UInt16)) / 1000;
- }
- else if ((mAudioMixSettings->beginCutMs != 0) &&
- (mAudioMixSettings->endCutMs == mBGAudioPCMFileDuration)) {
- // End time itself the file duration
- mBGAudioPCMFileDuration = mBGAudioPCMFileDuration -
- mAudioMixSettings->beginCutMs;
- // Limit the file length also
- mBGAudioPCMFileTrimmedLength = ((
- (int64_t)(mBGAudioPCMFileDuration *
- mAudioMixSettings->uiSamplingFrequency) *
- mAudioMixSettings->uiNbChannels) *
- sizeof(M4OSA_UInt16)) / 1000;
- }
- else if ((mAudioMixSettings->beginCutMs != 0) &&
- (mAudioMixSettings->endCutMs != 0)) {
- // End time itself the file duration
- mBGAudioPCMFileDuration = mAudioMixSettings->endCutMs -
- mAudioMixSettings->beginCutMs;
- // Limit the file length also
- mBGAudioPCMFileTrimmedLength = ((
- (int64_t)(mBGAudioPCMFileDuration *
- mAudioMixSettings->uiSamplingFrequency) *
- mAudioMixSettings->uiNbChannels) *
- sizeof(M4OSA_UInt16)) / 1000; /*make to sec from ms*/
- }
-
- ALOGV("VideoEditorAudioPlayer: file duration recorded : %lld",
- mBGAudioPCMFileDuration);
- }
-
- // Last played location to be seeked at for next media item
- if ( result == M4NO_ERROR ) {
- ALOGV("VideoEditorAudioPlayer::mBGAudioStoryBoardSkimTimeStamp %lld",
- mBGAudioStoryBoardSkimTimeStamp);
- ALOGV("VideoEditorAudioPlayer::uiAddCts %d",
- mAudioMixSettings->uiAddCts);
- if (mBGAudioStoryBoardSkimTimeStamp >= mAudioMixSettings->uiAddCts) {
- startTime = (mBGAudioStoryBoardSkimTimeStamp -
- mAudioMixSettings->uiAddCts);
- }
- else {
- // do nothing
- }
-
- ALOGV("VideoEditorAudioPlayer::startTime %" PRIu32, startTime);
- seekTimeStamp = 0;
- if (startTime) {
- if (startTime >= mBGAudioPCMFileDuration) {
- // The BG track should be looped and started again
- if (mAudioMixSettings->bLoop) {
- // Add begin cut time to the mod value
- seekTimeStamp = ((startTime%mBGAudioPCMFileDuration) +
- mAudioMixSettings->beginCutMs);
- }else {
- // Looping disabled, donot do BT Mix , set to file end
- seekTimeStamp = (mBGAudioPCMFileDuration +
- mAudioMixSettings->beginCutMs);
- }
- }else {
- // BT still present , just seek to story board time
- seekTimeStamp = startTime + mAudioMixSettings->beginCutMs;
- }
- }
- else {
- seekTimeStamp = mAudioMixSettings->beginCutMs;
- }
-
- // Convert the seekTimeStamp to file location
- mBGAudioPCMFileOriginalSeekPoint = (
- (int64_t)(mAudioMixSettings->beginCutMs)
- * mAudioMixSettings->uiSamplingFrequency
- * mAudioMixSettings->uiNbChannels
- * sizeof(M4OSA_UInt16))/ 1000 ; /*make to sec from ms*/
-
- mBGAudioPCMFileSeekPoint = ((int64_t)(seekTimeStamp)
- * mAudioMixSettings->uiSamplingFrequency
- * mAudioMixSettings->uiNbChannels
- * sizeof(M4OSA_UInt16))/ 1000 ;
- }
- }
-
- // We allow an optional INFO_FORMAT_CHANGED at the very beginning
- // of playback, if there is one, getFormat below will retrieve the
- // updated format, if there isn't, we'll stash away the valid buffer
- // of data to be used on the first audio callback.
-
- CHECK(mFirstBuffer == NULL);
-
- mFirstBufferResult = mSource->read(&mFirstBuffer);
- if (mFirstBufferResult == INFO_FORMAT_CHANGED) {
- ALOGV("INFO_FORMAT_CHANGED!!!");
-
- CHECK(mFirstBuffer == NULL);
- mFirstBufferResult = OK;
- mIsFirstBuffer = false;
- } else {
- mIsFirstBuffer = true;
- }
-
- sp<MetaData> format = mSource->getFormat();
- const char *mime;
- bool success = format->findCString(kKeyMIMEType, &mime);
- CHECK(success);
- CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
-
- success = format->findInt32(kKeySampleRate, &mSampleRate);
- CHECK(success);
-
- int32_t numChannels;
- success = format->findInt32(kKeyChannelCount, &numChannels);
- CHECK(success);
-
- if (mAudioSink.get() != NULL) {
- status_t err = mAudioSink->open(
- mSampleRate, numChannels, CHANNEL_MASK_USE_CHANNEL_ORDER, AUDIO_FORMAT_PCM_16_BIT,
- DEFAULT_AUDIOSINK_BUFFERCOUNT,
- &VideoEditorAudioPlayer::AudioSinkCallback, this);
- if (err != OK) {
- if (mFirstBuffer != NULL) {
- mFirstBuffer->release();
- mFirstBuffer = NULL;
- }
-
- if (!sourceAlreadyStarted) {
- mSource->stop();
- }
-
- return err;
- }
-
- mLatencyUs = (int64_t)mAudioSink->latency() * 1000;
- mFrameSize = mAudioSink->frameSize();
-
- mAudioSink->start();
- } else {
- mAudioTrack = new AudioTrack(
- AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT,
- audio_channel_out_mask_from_count(numChannels),
- 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0);
-
- if ((err = mAudioTrack->initCheck()) != OK) {
- mAudioTrack.clear();
-
- if (mFirstBuffer != NULL) {
- mFirstBuffer->release();
- mFirstBuffer = NULL;
- }
-
- if (!sourceAlreadyStarted) {
- mSource->stop();
- }
-
- return err;
- }
-
- mLatencyUs = (int64_t)mAudioTrack->latency() * 1000;
- mFrameSize = mAudioTrack->frameSize();
-
- mAudioTrack->start();
- }
-
- mStarted = true;
-
- return OK;
-}
-
-
-void VideoEditorAudioPlayer::reset() {
-
- ALOGV("reset");
- clear();
-
- // Capture the current seek point
- mBGAudioPCMFileSeekPoint = 0;
- mBGAudioStoryBoardSkimTimeStamp =0;
- mBGAudioStoryBoardCurrentMediaBeginCutTS=0;
-}
-
-size_t VideoEditorAudioPlayer::AudioSinkCallback(
- MediaPlayerBase::AudioSink *audioSink,
- void *buffer, size_t size, void *cookie,
- MediaPlayerBase::AudioSink::cb_event_t event) {
- VideoEditorAudioPlayer *me = (VideoEditorAudioPlayer *)cookie;
-
- if (event == MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER ) {
- return me->fillBuffer(buffer, size);
- } else {
- return 0;
- }
-}
-
-
-size_t VideoEditorAudioPlayer::fillBuffer(void *data, size_t size) {
-
- if (mReachedEOS) {
- return 0;
- }
-
- size_t size_done = 0;
- size_t size_remaining = size;
-
- M4OSA_ERR err = M4NO_ERROR;
- M4AM_Buffer16 bgFrame = {NULL, 0};
- M4AM_Buffer16 mixFrame = {NULL, 0};
- M4AM_Buffer16 ptFrame = {NULL, 0};
- int64_t currentSteamTS = 0;
- int64_t startTimeForBT = 0;
- M4OSA_Float fPTVolLevel =
- ((M4OSA_Float)mBGAudioStoryBoardCurrentMediaVolumeVal)/100;
- M4OSA_Int16 *pPTMdata=NULL;
- M4OSA_UInt32 uiPCMsize = 0;
-
- bool postSeekComplete = false;
- bool postEOS = false;
-
- while ((size_remaining > 0)&&(err==M4NO_ERROR)) {
- MediaSource::ReadOptions options;
-
- {
- Mutex::Autolock autoLock(mLock);
- if (mSeeking) {
- if (mIsFirstBuffer) {
- if (mFirstBuffer != NULL) {
- mFirstBuffer->release();
- mFirstBuffer = NULL;
- }
- mIsFirstBuffer = false;
- }
-
- options.setSeekTo(mSeekTimeUs);
-
- if (mInputBuffer != NULL) {
- mInputBuffer->release();
- mInputBuffer = NULL;
- }
-
- mSeeking = false;
-
- if (mObserver) {
- postSeekComplete = true;
- }
- }
- }
-
- if (mInputBuffer == NULL) {
- status_t status = OK;
-
- if (mIsFirstBuffer) {
- mInputBuffer = mFirstBuffer;
- mFirstBuffer = NULL;
- status = mFirstBufferResult;
-
- mIsFirstBuffer = false;
- } else {
-
- {
- Mutex::Autolock autoLock(mLock);
- status = mSource->read(&mInputBuffer, &options);
- }
- // Data is Primary Track, mix with background track
- // after reading same size from Background track PCM file
- if (status == OK)
- {
- // Mix only when skim point is after startTime of BT
- if (((mBGAudioStoryBoardSkimTimeStamp* 1000) +
- (mPositionTimeMediaUs - mSeekTimeUs)) >=
- (int64_t)(mAudioMixSettings->uiAddCts * 1000)) {
-
- ALOGV("VideoEditorAudioPlayer::INSIDE MIXING");
- ALOGV("Checking %lld <= %lld",
- mBGAudioPCMFileSeekPoint-mBGAudioPCMFileOriginalSeekPoint,
- mBGAudioPCMFileTrimmedLength);
-
-
- M4OSA_Void* ptr;
- ptr = reinterpret_cast<M4OSA_Void*>(
- reinterpret_cast<uintptr_t>(mInputBuffer->data()) +
- mInputBuffer->range_offset());
-
- M4OSA_UInt32 len = mInputBuffer->range_length();
- M4OSA_Context fp = M4OSA_NULL;
-
- uiPCMsize = (mInputBuffer->range_length())/2;
- pPTMdata = (M4OSA_Int16*) ((uint8_t*) mInputBuffer->data()
- + mInputBuffer->range_offset());
-
- ALOGV("mix with background malloc to do len %d", len);
-
- bgFrame.m_dataAddress = (M4OSA_UInt16*)M4OSA_32bitAlignedMalloc( len, 1,
- (M4OSA_Char*)"bgFrame");
- bgFrame.m_bufferSize = len;
-
- mixFrame.m_dataAddress = (M4OSA_UInt16*)M4OSA_32bitAlignedMalloc(len, 1,
- (M4OSA_Char*)"mixFrame");
- mixFrame.m_bufferSize = len;
-
- ALOGV("mix with bgm with size %lld", mBGAudioPCMFileLength);
-
- CHECK(mInputBuffer->meta_data()->findInt64(kKeyTime,
- &mPositionTimeMediaUs));
-
- if (mBGAudioPCMFileSeekPoint -
- mBGAudioPCMFileOriginalSeekPoint <=
- (mBGAudioPCMFileTrimmedLength - len)) {
-
- ALOGV("Checking mBGAudioPCMFileHandle %p",
- mBGAudioPCMFileHandle);
-
- if (mBGAudioPCMFileHandle != M4OSA_NULL) {
- ALOGV("fillBuffer seeking file to %lld",
- mBGAudioPCMFileSeekPoint);
-
- // TODO : 32bits required for OSAL
- M4OSA_UInt32 tmp32 =
- (M4OSA_UInt32)mBGAudioPCMFileSeekPoint;
- err = M4OSA_fileReadSeek(mBGAudioPCMFileHandle,
- M4OSA_kFileSeekBeginning,
- (M4OSA_FilePosition*)&tmp32);
-
- mBGAudioPCMFileSeekPoint = tmp32;
-
- if (err != M4NO_ERROR){
- ALOGE("M4OSA_fileReadSeek err %d",(int)err);
- }
-
- err = M4OSA_fileReadData(mBGAudioPCMFileHandle,
- (M4OSA_Int8*)bgFrame.m_dataAddress,
- (M4OSA_UInt32*)&len);
- if (err == M4WAR_NO_DATA_YET ) {
-
- ALOGV("fillBuffer End of file reached");
- err = M4NO_ERROR;
-
- // We reached the end of file
- // move to begin cut time equal value
- if (mAudioMixSettings->bLoop) {
- mBGAudioPCMFileSeekPoint =
- (((int64_t)(mAudioMixSettings->beginCutMs) *
- mAudioMixSettings->uiSamplingFrequency) *
- mAudioMixSettings->uiNbChannels *
- sizeof(M4OSA_UInt16)) / 1000;
- ALOGV("fillBuffer Looping \
- to mBGAudioPCMFileSeekPoint %lld",
- mBGAudioPCMFileSeekPoint);
- }
- else {
- // No mixing;
- // take care of volume of primary track
- if (fPTVolLevel < 1.0) {
- setPrimaryTrackVolume(pPTMdata,
- uiPCMsize, fPTVolLevel);
- }
- }
- } else if (err != M4NO_ERROR ) {
- ALOGV("fileReadData for audio err %d", err);
- } else {
- mBGAudioPCMFileSeekPoint += len;
- ALOGV("fillBuffer mBGAudioPCMFileSeekPoint \
- %lld", mBGAudioPCMFileSeekPoint);
-
- // Assign the ptr data to primary track
- ptFrame.m_dataAddress = (M4OSA_UInt16*)ptr;
- ptFrame.m_bufferSize = len;
-
- // Call to mix and duck
- mAudioProcess->mixAndDuck(
- &ptFrame, &bgFrame, &mixFrame);
-
- // Overwrite the decoded buffer
- memcpy((void *)ptr,
- (void *)mixFrame.m_dataAddress, len);
- }
- }
- } else if (mAudioMixSettings->bLoop){
- // Move to begin cut time equal value
- mBGAudioPCMFileSeekPoint =
- mBGAudioPCMFileOriginalSeekPoint;
- } else {
- // No mixing;
- // take care of volume level of primary track
- if(fPTVolLevel < 1.0) {
- setPrimaryTrackVolume(
- pPTMdata, uiPCMsize, fPTVolLevel);
- }
- }
- if (bgFrame.m_dataAddress) {
- free(bgFrame.m_dataAddress);
- }
- if (mixFrame.m_dataAddress) {
- free(mixFrame.m_dataAddress);
- }
- } else {
- // No mixing;
- // take care of volume level of primary track
- if(fPTVolLevel < 1.0) {
- setPrimaryTrackVolume(pPTMdata, uiPCMsize,
- fPTVolLevel);
- }
- }
- }
- }
-
- CHECK((status == OK && mInputBuffer != NULL)
- || (status != OK && mInputBuffer == NULL));
-
- Mutex::Autolock autoLock(mLock);
-
- if (status != OK) {
- ALOGV("fillBuffer: mSource->read returned err %d", status);
- if (mObserver && !mReachedEOS) {
- postEOS = true;
- }
-
- mReachedEOS = true;
- mFinalStatus = status;
- break;
- }
-
- CHECK(mInputBuffer->meta_data()->findInt64(
- kKeyTime, &mPositionTimeMediaUs));
-
- mPositionTimeRealUs =
- ((mNumFramesPlayed + size_done / mFrameSize) * 1000000)
- / mSampleRate;
-
- ALOGV("buffer->size() = %d, "
- "mPositionTimeMediaUs=%.2f mPositionTimeRealUs=%.2f",
- mInputBuffer->range_length(),
- mPositionTimeMediaUs / 1E6, mPositionTimeRealUs / 1E6);
- }
-
- if (mInputBuffer->range_length() == 0) {
- mInputBuffer->release();
- mInputBuffer = NULL;
-
- continue;
- }
-
- size_t copy = size_remaining;
- if (copy > mInputBuffer->range_length()) {
- copy = mInputBuffer->range_length();
- }
-
- memcpy((char *)data + size_done,
- (const char *)mInputBuffer->data() + mInputBuffer->range_offset(),
- copy);
-
- mInputBuffer->set_range(mInputBuffer->range_offset() + copy,
- mInputBuffer->range_length() - copy);
-
- size_done += copy;
- size_remaining -= copy;
- }
-
- {
- Mutex::Autolock autoLock(mLock);
- mNumFramesPlayed += size_done / mFrameSize;
- }
-
- if (postEOS) {
- mObserver->postAudioEOS();
- }
-
- if (postSeekComplete) {
- mObserver->postAudioSeekComplete();
- }
-
- return size_done;
-}
-
-void VideoEditorAudioPlayer::setAudioMixSettings(
- M4xVSS_AudioMixingSettings* pAudioMixSettings) {
- mAudioMixSettings = pAudioMixSettings;
-}
-
-void VideoEditorAudioPlayer::setAudioMixPCMFileHandle(
- M4OSA_Context pBGAudioPCMFileHandle){
- mBGAudioPCMFileHandle = pBGAudioPCMFileHandle;
-}
-
-void VideoEditorAudioPlayer::setAudioMixStoryBoardSkimTimeStamp(
- M4OSA_UInt32 pBGAudioStoryBoardSkimTimeStamp,
- M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,
- M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal) {
-
- mBGAudioStoryBoardSkimTimeStamp = pBGAudioStoryBoardSkimTimeStamp;
- mBGAudioStoryBoardCurrentMediaBeginCutTS = pBGAudioCurrentMediaBeginCutTS;
- mBGAudioStoryBoardCurrentMediaVolumeVal = pBGAudioCurrentMediaVolumeVal;
-}
-
-void VideoEditorAudioPlayer::setPrimaryTrackVolume(
- M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel) {
-
- while(size-- > 0) {
- *data = (M4OSA_Int16)((*data)*volLevel);
- data++;
- }
-}
-
-}
diff --git a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h b/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
deleted file mode 100755
index 2caf5e8..0000000
--- a/libvideoeditor/lvpp/VideoEditorAudioPlayer.h
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef VE_AUDIO_PLAYER_H_
-#define VE_AUDIO_PLAYER_H_
-
-#include <media/MediaPlayerInterface.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/TimeSource.h>
-#include <utils/threads.h>
-
-#include "M4xVSS_API.h"
-#include "VideoEditorMain.h"
-#include "M4OSA_FileReader.h"
-#include "VideoEditorBGAudioProcessing.h"
-
-
-namespace android {
-
-class MediaSource;
-class AudioTrack;
-class PreviewPlayer;
-
-class VideoEditorAudioPlayer : public TimeSource {
-public:
- enum {
- REACHED_EOS,
- SEEK_COMPLETE
- };
-
- VideoEditorAudioPlayer(const sp<MediaPlayerBase::AudioSink> &audioSink,
- PreviewPlayer *audioObserver = NULL);
-
- ~VideoEditorAudioPlayer();
-
- // Return time in us.
- int64_t getRealTimeUs();
-
- // Returns the timestamp of the last buffer played (in us).
- int64_t getMediaTimeUs();
-
- // Returns true iff a mapping is established, i.e. the AudioPlayerBase
- // has played at least one frame of audio.
- bool getMediaTimeMapping(int64_t *realtime_us, int64_t *mediatime_us);
-
- status_t start(bool sourceAlreadyStarted = false);
- void pause(bool playPendingSamples = false);
- status_t resume();
- status_t seekTo(int64_t time_us);
- bool isSeeking();
- bool reachedEOS(status_t *finalStatus);
-
- void setAudioMixSettings(M4xVSS_AudioMixingSettings* pAudioMixSettings);
- void setAudioMixPCMFileHandle(M4OSA_Context pBGAudioPCMFileHandle);
- void setAudioMixStoryBoardSkimTimeStamp(
- M4OSA_UInt32 pBGAudioStoryBoardSkimTimeStamp,
- M4OSA_UInt32 pBGAudioCurrentMediaBeginCutTS,
- M4OSA_UInt32 pBGAudioCurrentMediaVolumeVal);
-
- void setObserver(PreviewPlayer *observer);
- void setSource(const sp<MediaSource> &source);
- sp<MediaSource> getSource();
-
- bool isStarted();
-private:
-
- M4xVSS_AudioMixingSettings *mAudioMixSettings;
- VideoEditorBGAudioProcessing *mAudioProcess;
-
- M4OSA_Context mBGAudioPCMFileHandle;
- int64_t mBGAudioPCMFileLength;
- int64_t mBGAudioPCMFileTrimmedLength;
- int64_t mBGAudioPCMFileDuration;
- int64_t mBGAudioPCMFileSeekPoint;
- int64_t mBGAudioPCMFileOriginalSeekPoint;
- int64_t mBGAudioStoryBoardSkimTimeStamp;
- int64_t mBGAudioStoryBoardCurrentMediaBeginCutTS;
- int64_t mBGAudioStoryBoardCurrentMediaVolumeVal;
-
- sp<MediaSource> mSource;
- sp<AudioTrack> mAudioTrack;
-
- MediaBuffer *mInputBuffer;
-
- int mSampleRate;
- int64_t mLatencyUs;
- size_t mFrameSize;
-
- Mutex mLock;
- int64_t mNumFramesPlayed;
-
- int64_t mPositionTimeMediaUs;
- int64_t mPositionTimeRealUs;
-
- bool mSeeking;
- bool mReachedEOS;
- status_t mFinalStatus;
- int64_t mSeekTimeUs;
-
- bool mStarted;
-
- bool mIsFirstBuffer;
- status_t mFirstBufferResult;
- MediaBuffer *mFirstBuffer;
-
- sp<MediaPlayerBase::AudioSink> mAudioSink;
- PreviewPlayer *mObserver;
-
- static void AudioCallback(int event, void *user, void *info);
- void AudioCallback(int event, void *info);
- size_t fillBuffer(void *data, size_t size);
- static size_t AudioSinkCallback(
- MediaPlayerBase::AudioSink *audioSink,
- void *data, size_t size, void *me,
- MediaPlayerBase::AudioSink::cb_event_t event);
-
- void reset();
- void clear();
- int64_t getRealTimeUs_l();
- void setPrimaryTrackVolume(
- M4OSA_Int16 *data, M4OSA_UInt32 size, M4OSA_Float volLevel);
-
- VideoEditorAudioPlayer(const VideoEditorAudioPlayer &);
- VideoEditorAudioPlayer &operator=(const VideoEditorAudioPlayer &);
-};
-
-} // namespace android
-
-#endif // VE_AUDIO_PLAYER_H_
diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
deleted file mode 100755
index 0c12aac..0000000
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.cpp
+++ /dev/null
@@ -1,295 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <inttypes.h>
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "VideoEditorBGAudioProcessing"
-#include <utils/Log.h>
-#include "VideoEditorBGAudioProcessing.h"
-
-namespace android {
-
-VideoEditorBGAudioProcessing::VideoEditorBGAudioProcessing() {
- ALOGV("Constructor");
-
- mAudVolArrIndex = 0;
- mDoDucking = 0;
- mDucking_enable = 0;
- mDucking_lowVolume = 0;
- mDucking_threshold = 0;
- mDuckingFactor = 0;
-
- mBTVolLevel = 0;
- mPTVolLevel = 0;
-
- mIsSSRCneeded = 0;
- mChannelConversion = 0;
-
- mBTFormat = MONO_16_BIT;
-
- mInSampleRate = 8000;
- mOutSampleRate = 16000;
- mPTChannelCount = 2;
- mBTChannelCount = 1;
-}
-
-M4OSA_Int32 VideoEditorBGAudioProcessing::mixAndDuck(
- void *primaryTrackBuffer,
- void *backgroundTrackBuffer,
- void *outBuffer) {
-
- ALOGV("mixAndDuck: track buffers (primary: %p and background: %p) "
- "and out buffer %p",
- primaryTrackBuffer, backgroundTrackBuffer, outBuffer);
-
- M4AM_Buffer16* pPrimaryTrack = (M4AM_Buffer16*)primaryTrackBuffer;
- M4AM_Buffer16* pBackgroundTrack = (M4AM_Buffer16*)backgroundTrackBuffer;
- M4AM_Buffer16* pMixedOutBuffer = (M4AM_Buffer16*)outBuffer;
-
- // Output size if same as PT size
- pMixedOutBuffer->m_bufferSize = pPrimaryTrack->m_bufferSize;
-
- // Before mixing, we need to have only PT as out buffer
- memcpy((void *)pMixedOutBuffer->m_dataAddress,
- (void *)pPrimaryTrack->m_dataAddress, pMixedOutBuffer->m_bufferSize);
-
- // Initialize ducking variables
- // Initially contains the input primary track
- M4OSA_Int16 *pPTMdata2 = (M4OSA_Int16*)pMixedOutBuffer->m_dataAddress;
-
- // Contains BG track processed data(like channel conversion etc..
- M4OSA_Int16 *pBTMdata1 = (M4OSA_Int16*) pBackgroundTrack->m_dataAddress;
-
- // Since we need to give sample count and not buffer size
- M4OSA_UInt32 uiPCMsize = pMixedOutBuffer->m_bufferSize / 2 ;
-
- if ((mDucking_enable) && (mPTVolLevel != 0.0)) {
- M4OSA_Int32 peakDbValue = 0;
- M4OSA_Int32 previousDbValue = 0;
- M4OSA_Int16 *pPCM16Sample = (M4OSA_Int16*)pPrimaryTrack->m_dataAddress;
- const size_t n = pPrimaryTrack->m_bufferSize / sizeof(M4OSA_Int16);
-
- for (size_t loopIndex = 0; loopIndex < n; ++loopIndex) {
- if (pPCM16Sample[loopIndex] >= 0) {
- peakDbValue = previousDbValue > pPCM16Sample[loopIndex] ?
- previousDbValue : pPCM16Sample[loopIndex];
- previousDbValue = peakDbValue;
- } else {
- peakDbValue = previousDbValue > -pPCM16Sample[loopIndex] ?
- previousDbValue: -pPCM16Sample[loopIndex];
- previousDbValue = peakDbValue;
- }
- }
-
- mAudioVolumeArray[mAudVolArrIndex] = getDecibelSound(peakDbValue);
-
- // Check for threshold is done after kProcessingWindowSize cycles
- if (mAudVolArrIndex >= kProcessingWindowSize - 1) {
- mDoDucking = isThresholdBreached(
- mAudioVolumeArray, mAudVolArrIndex, mDucking_threshold);
-
- mAudVolArrIndex = 0;
- } else {
- mAudVolArrIndex++;
- }
-
- //
- // Below logic controls the mixing weightage
- // for Background and Primary Tracks
- // for the duration of window under analysis,
- // to give fade-out for Background and fade-in for primary
- // Current fading factor is distributed in equal range over
- // the defined window size.
- // For a window size = 25
- // (500 ms (window under analysis) / 20 ms (sample duration))
- //
-
- if (mDoDucking) {
- if (mDuckingFactor > mDucking_lowVolume) {
- // FADE OUT BG Track
- // Increment ducking factor in total steps in factor
- // of low volume steps to reach low volume level
- mDuckingFactor -= mDucking_lowVolume;
- } else {
- mDuckingFactor = mDucking_lowVolume;
- }
- } else {
- if (mDuckingFactor < 1.0 ) {
- // FADE IN BG Track
- // Increment ducking factor in total steps of
- // low volume factor to reach orig.volume level
- mDuckingFactor += mDucking_lowVolume;
- } else {
- mDuckingFactor = 1.0;
- }
- }
- } // end if - mDucking_enable
-
-
- // Mixing logic
- ALOGV("Out of Ducking analysis uiPCMsize %d %f %f",
- mDoDucking, mDuckingFactor, mBTVolLevel);
- while (uiPCMsize-- > 0) {
-
- // Set vol factor for BT and PT
- *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1*mBTVolLevel);
- *pPTMdata2 = (M4OSA_Int16)(*pPTMdata2*mPTVolLevel);
-
- // Mix the two samples
- if (mDoDucking) {
-
- // Duck the BG track to ducking factor value before mixing
- *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(mDuckingFactor));
-
- // mix as normal case
- *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);
- } else {
-
- *pBTMdata1 = (M4OSA_Int16)((*pBTMdata1)*(mDuckingFactor));
- *pBTMdata1 = (M4OSA_Int16)(*pBTMdata1 /2 + *pPTMdata2 /2);
- }
-
- M4OSA_Int32 temp;
- if (*pBTMdata1 < 0) {
- temp = -(*pBTMdata1) * 2; // bring to original Amplitude level
-
- if (temp > 32767) {
- *pBTMdata1 = -32766; // less then max allowed value
- } else {
- *pBTMdata1 = (M4OSA_Int16)(-temp);
- }
- } else {
- temp = (*pBTMdata1) * 2; // bring to original Amplitude level
- if ( temp > 32768) {
- *pBTMdata1 = 32767; // less than max allowed value
- } else {
- *pBTMdata1 = (M4OSA_Int16)temp;
- }
- }
-
- pBTMdata1++;
- pPTMdata2++;
- }
-
- memcpy((void *)pMixedOutBuffer->m_dataAddress,
- (void *)pBackgroundTrack->m_dataAddress,
- pBackgroundTrack->m_bufferSize);
-
- ALOGV("mixAndDuck: X");
- return M4NO_ERROR;
-}
-
-M4OSA_Int32 VideoEditorBGAudioProcessing::calculateOutResampleBufSize() {
-
- // This already takes care of channel count in mBTBuffer.m_bufferSize
- return (mOutSampleRate / mInSampleRate) * mBTBuffer.m_bufferSize;
-}
-
-void VideoEditorBGAudioProcessing::setMixParams(
- const AudioMixSettings& setting) {
- ALOGV("setMixParams");
-
- mDucking_enable = setting.lvInDucking_enable;
- mDucking_lowVolume = setting.lvInDucking_lowVolume;
- mDucking_threshold = setting.lvInDucking_threshold;
- mPTVolLevel = setting.lvPTVolLevel;
- mBTVolLevel = setting.lvBTVolLevel ;
- mBTChannelCount = setting.lvBTChannelCount;
- mPTChannelCount = setting.lvPTChannelCount;
- mBTFormat = setting.lvBTFormat;
- mInSampleRate = setting.lvInSampleRate;
- mOutSampleRate = setting.lvOutSampleRate;
-
- // Reset the following params to default values
- mAudVolArrIndex = 0;
- mDoDucking = 0;
- mDuckingFactor = 1.0;
-
- ALOGV("ducking enable 0x%x lowVolume %f threshold %" PRIu32 " "
- "fPTVolLevel %f BTVolLevel %f",
- mDucking_enable, mDucking_lowVolume, mDucking_threshold,
- mPTVolLevel, mPTVolLevel);
-
- // Decides if SSRC support is needed for this mixing
- mIsSSRCneeded = (setting.lvInSampleRate != setting.lvOutSampleRate);
- if (setting.lvBTChannelCount != setting.lvPTChannelCount){
- if (setting.lvBTChannelCount == 2){
- mChannelConversion = 1; // convert to MONO
- } else {
- mChannelConversion = 2; // Convert to STEREO
- }
- } else {
- mChannelConversion = 0;
- }
-}
-
-// Fast way to compute 10 * log(value)
-M4OSA_Int32 VideoEditorBGAudioProcessing::getDecibelSound(M4OSA_UInt32 value) {
- ALOGV("getDecibelSound: %ld", value);
-
- if (value <= 0 || value > 0x8000) {
- return 0;
- } else if (value > 0x4000) { // 32768
- return 90;
- } else if (value > 0x2000) { // 16384
- return 84;
- } else if (value > 0x1000) { // 8192
- return 78;
- } else if (value > 0x0800) { // 4028
- return 72;
- } else if (value > 0x0400) { // 2048
- return 66;
- } else if (value > 0x0200) { // 1024
- return 60;
- } else if (value > 0x0100) { // 512
- return 54;
- } else if (value > 0x0080) { // 256
- return 48;
- } else if (value > 0x0040) { // 128
- return 42;
- } else if (value > 0x0020) { // 64
- return 36;
- } else if (value > 0x0010) { // 32
- return 30;
- } else if (value > 0x0008) { // 16
- return 24;
- } else if (value > 0x0007) { // 8
- return 24;
- } else if (value > 0x0003) { // 4
- return 18;
- } else if (value > 0x0001) { // 2
- return 12;
- } else { // 1
- return 6;
- }
-}
-
-M4OSA_Bool VideoEditorBGAudioProcessing::isThresholdBreached(
- M4OSA_Int32* averageValue,
- M4OSA_Int32 storeCount,
- M4OSA_Int32 thresholdValue) {
-
- ALOGV("isThresholdBreached");
-
- int totalValue = 0;
- for (int i = 0; i < storeCount; ++i) {
- totalValue += averageValue[i];
- }
- return (totalValue / storeCount > thresholdValue);
-}
-
-}//namespace android
diff --git a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h b/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
deleted file mode 100755
index cb7a69f..0000000
--- a/libvideoeditor/lvpp/VideoEditorBGAudioProcessing.h
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef VE_BACKGROUND_AUDIO_PROC_H
-#define VE_BACKGROUND_AUDIO_PROC_H
-
-#include "M4OSA_Error.h"
-#include "M4OSA_Types.h"
-#include "M4OSA_Memory.h"
-#include "M4OSA_Export.h"
-#include "M4OSA_CoreID.h"
-
-
-namespace android {
-
-typedef struct {
- M4OSA_UInt16* m_dataAddress; // Android SRC needs a Int16 pointer
- M4OSA_UInt32 m_bufferSize;
-} M4AM_Buffer16; // Structure contains Int16_t pointer
-
-enum AudioFormat {
- MONO_16_BIT,
- STEREO_16_BIT
-};
-
-// Following struct will be used by app to supply the PT and BT properties
-// along with ducking values
-typedef struct {
- M4OSA_Int32 lvInSampleRate; // Sampling audio freq (8000,16000 or more )
- M4OSA_Int32 lvOutSampleRate; //Sampling audio freq (8000,16000 or more )
- AudioFormat lvBTFormat;
-
- M4OSA_Int32 lvInDucking_threshold;
- M4OSA_Float lvInDucking_lowVolume;
- M4OSA_Bool lvInDucking_enable;
- M4OSA_Float lvPTVolLevel;
- M4OSA_Float lvBTVolLevel;
- M4OSA_Int32 lvBTChannelCount;
- M4OSA_Int32 lvPTChannelCount;
-} AudioMixSettings;
-
-// This class is defined to get SF SRC access
-class VideoEditorBGAudioProcessing {
-public:
- VideoEditorBGAudioProcessing();
- ~VideoEditorBGAudioProcessing() {}
-
- void setMixParams(const AudioMixSettings& params);
-
- M4OSA_Int32 mixAndDuck(
- void* primaryTrackBuffer,
- void* backgroundTrackBuffer,
- void* mixedOutputBuffer);
-
-private:
- enum {
- kProcessingWindowSize = 10,
- };
-
- M4OSA_Int32 mInSampleRate;
- M4OSA_Int32 mOutSampleRate;
- AudioFormat mBTFormat;
-
- M4OSA_Bool mIsSSRCneeded;
- M4OSA_Int32 mBTChannelCount;
- M4OSA_Int32 mPTChannelCount;
- M4OSA_UInt8 mChannelConversion;
-
- M4OSA_UInt32 mDucking_threshold;
- M4OSA_Float mDucking_lowVolume;
- M4OSA_Float mDuckingFactor ;
- M4OSA_Bool mDucking_enable;
- M4OSA_Int32 mAudioVolumeArray[kProcessingWindowSize];
- M4OSA_Int32 mAudVolArrIndex;
- M4OSA_Bool mDoDucking;
- M4OSA_Float mPTVolLevel;
- M4OSA_Float mBTVolLevel;
-
- M4AM_Buffer16 mBTBuffer;
-
- M4OSA_Int32 getDecibelSound(M4OSA_UInt32 value);
- M4OSA_Bool isThresholdBreached(M4OSA_Int32* averageValue,
- M4OSA_Int32 storeCount, M4OSA_Int32 thresholdValue);
-
- // This returns the size of buffer which needs to allocated
- // before resampling is called
- M4OSA_Int32 calculateOutResampleBufSize();
-
- // Don't call me.
- VideoEditorBGAudioProcessing(const VideoEditorBGAudioProcessing&);
- VideoEditorBGAudioProcessing& operator=(
- const VideoEditorBGAudioProcessing&);
-};
-
-} // namespace android
-
-#endif // VE_BACKGROUND_AUDIO_PROC_H
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
deleted file mode 100755
index 8d656c4..0000000
--- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp
+++ /dev/null
@@ -1,595 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_NDEBUG 1
-#define LOG_TAG "VideoEditorPlayer"
-#include <utils/Log.h>
-
-#include "VideoEditorPlayer.h"
-#include "PreviewPlayer.h"
-
-#include <media/Metadata.h>
-#include <media/stagefright/MediaExtractor.h>
-
-#include <system/audio.h>
-
-namespace android {
-
-VideoEditorPlayer::VideoEditorPlayer(NativeWindowRenderer* renderer)
- : mPlayer(new PreviewPlayer(renderer)) {
-
- ALOGV("VideoEditorPlayer");
- mPlayer->setListener(this);
-}
-
-VideoEditorPlayer::~VideoEditorPlayer() {
- ALOGV("~VideoEditorPlayer");
-
- reset();
- mVeAudioSink.clear();
-
- delete mPlayer;
- mPlayer = NULL;
-}
-
-status_t VideoEditorPlayer::initCheck() {
- ALOGV("initCheck");
- return OK;
-}
-
-
-status_t VideoEditorPlayer::setAudioPlayer(VideoEditorAudioPlayer *audioPlayer) {
- return mPlayer->setAudioPlayer(audioPlayer);
-}
-
-
-status_t VideoEditorPlayer::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
- ALOGI("setDataSource('%s')", url);
- if (headers != NULL) {
- ALOGE("Headers parameter is not supported");
- return INVALID_OPERATION;
- }
-
- return mPlayer->setDataSource(url);
-}
-
-//We donot use this in preview, dummy implimentation as this is pure virtual
-status_t VideoEditorPlayer::setDataSource(int fd, int64_t offset,
- int64_t length) {
- ALOGE("setDataSource(%d, %lld, %lld) Not supported", fd, offset, length);
- return (!OK);
-}
-
-status_t VideoEditorPlayer::setVideoSurface(const sp<Surface> &surface) {
- ALOGV("setVideoSurface");
-
- mPlayer->setSurface(surface);
- return OK;
-}
-
-status_t VideoEditorPlayer::setVideoSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer) {
- ALOGV("setVideoSurfaceTexture");
-
- mPlayer->setSurfaceTexture(bufferProducer);
- return OK;
-}
-
-status_t VideoEditorPlayer::prepare() {
- ALOGV("prepare");
- return mPlayer->prepare();
-}
-
-status_t VideoEditorPlayer::prepareAsync() {
- return mPlayer->prepareAsync();
-}
-
-status_t VideoEditorPlayer::start() {
- ALOGV("start");
- return mPlayer->play();
-}
-
-status_t VideoEditorPlayer::stop() {
- ALOGV("stop");
- return pause();
-}
-
-status_t VideoEditorPlayer::pause() {
- ALOGV("pause");
- return mPlayer->pause();
-}
-
-bool VideoEditorPlayer::isPlaying() {
- ALOGV("isPlaying");
- return mPlayer->isPlaying();
-}
-
-status_t VideoEditorPlayer::seekTo(int msec) {
- ALOGV("seekTo");
- status_t err = mPlayer->seekTo((int64_t)msec * 1000);
- return err;
-}
-
-status_t VideoEditorPlayer::getCurrentPosition(int *msec) {
- ALOGV("getCurrentPosition");
- int64_t positionUs;
- status_t err = mPlayer->getPosition(&positionUs);
-
- if (err != OK) {
- return err;
- }
-
- *msec = (positionUs + 500) / 1000;
- return OK;
-}
-
-status_t VideoEditorPlayer::getDuration(int *msec) {
- ALOGV("getDuration");
-
- int64_t durationUs;
- status_t err = mPlayer->getDuration(&durationUs);
-
- if (err != OK) {
- *msec = 0;
- return OK;
- }
-
- *msec = (durationUs + 500) / 1000;
- return OK;
-}
-
-status_t VideoEditorPlayer::reset() {
- ALOGV("reset");
- mPlayer->reset();
- return OK;
-}
-
-status_t VideoEditorPlayer::setLooping(int loop) {
- ALOGV("setLooping");
- return mPlayer->setLooping(loop);
-}
-
-status_t VideoEditorPlayer::setParameter(int key, const Parcel &request) {
- ALOGE("setParameter not implemented");
- return INVALID_OPERATION;
-}
-
-status_t VideoEditorPlayer::getParameter(int key, Parcel *reply) {
- ALOGE("getParameter not implemented");
- return INVALID_OPERATION;
-}
-
-player_type VideoEditorPlayer::playerType() {
- ALOGV("playerType");
- return STAGEFRIGHT_PLAYER;
-}
-
-void VideoEditorPlayer::acquireLock() {
- ALOGV("acquireLock");
- mPlayer->acquireLock();
-}
-
-void VideoEditorPlayer::releaseLock() {
- ALOGV("releaseLock");
- mPlayer->releaseLock();
-}
-
-status_t VideoEditorPlayer::invoke(const Parcel &request, Parcel *reply) {
- return INVALID_OPERATION;
-}
-
-void VideoEditorPlayer::setAudioSink(const sp<AudioSink> &audioSink) {
- MediaPlayerInterface::setAudioSink(audioSink);
-
- mPlayer->setAudioSink(audioSink);
-}
-
-status_t VideoEditorPlayer::getMetadata(
- const media::Metadata::Filter& ids, Parcel *records) {
- using media::Metadata;
-
- uint32_t flags = mPlayer->getSourceSeekFlags();
-
- Metadata metadata(records);
-
- metadata.appendBool(
- Metadata::kPauseAvailable,
- flags & MediaExtractor::CAN_PAUSE);
-
- metadata.appendBool(
- Metadata::kSeekBackwardAvailable,
- flags & MediaExtractor::CAN_SEEK_BACKWARD);
-
- metadata.appendBool(
- Metadata::kSeekForwardAvailable,
- flags & MediaExtractor::CAN_SEEK_FORWARD);
-
- metadata.appendBool(
- Metadata::kSeekAvailable,
- flags & MediaExtractor::CAN_SEEK);
-
- return OK;
-}
-
-status_t VideoEditorPlayer::loadEffectsSettings(
- M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects) {
- ALOGV("loadEffectsSettings");
- return mPlayer->loadEffectsSettings(pEffectSettings, nEffects);
-}
-
-status_t VideoEditorPlayer::loadAudioMixSettings(
- M4xVSS_AudioMixingSettings* pAudioMixSettings) {
- ALOGV("VideoEditorPlayer: loadAudioMixSettings");
- return mPlayer->loadAudioMixSettings(pAudioMixSettings);
-}
-
-status_t VideoEditorPlayer::setAudioMixPCMFileHandle(
- M4OSA_Context pAudioMixPCMFileHandle) {
-
- ALOGV("VideoEditorPlayer: loadAudioMixSettings");
- return mPlayer->setAudioMixPCMFileHandle(pAudioMixPCMFileHandle);
-}
-
-status_t VideoEditorPlayer::setAudioMixStoryBoardParam(
- M4OSA_UInt32 audioMixStoryBoardTS,
- M4OSA_UInt32 currentMediaBeginCutTime,
- M4OSA_UInt32 primaryTrackVolValue) {
-
- ALOGV("VideoEditorPlayer: loadAudioMixSettings");
- return mPlayer->setAudioMixStoryBoardParam(audioMixStoryBoardTS,
- currentMediaBeginCutTime, primaryTrackVolValue);
-}
-
-status_t VideoEditorPlayer::setPlaybackBeginTime(uint32_t msec) {
- ALOGV("setPlaybackBeginTime");
- return mPlayer->setPlaybackBeginTime(msec);
-}
-
-status_t VideoEditorPlayer::setPlaybackEndTime(uint32_t msec) {
- ALOGV("setPlaybackEndTime");
- return mPlayer->setPlaybackEndTime(msec);
-}
-
-status_t VideoEditorPlayer::setStoryboardStartTime(uint32_t msec) {
- ALOGV("setStoryboardStartTime");
- return mPlayer->setStoryboardStartTime(msec);
-}
-
-status_t VideoEditorPlayer::setProgressCallbackInterval(uint32_t cbInterval) {
- ALOGV("setProgressCallbackInterval");
- return mPlayer->setProgressCallbackInterval(cbInterval);
-}
-
-status_t VideoEditorPlayer::setMediaRenderingMode(
- M4xVSS_MediaRendering mode,
- M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
-
- ALOGV("setMediaRenderingMode");
- return mPlayer->setMediaRenderingMode(mode, outputVideoSize);
-}
-
-status_t VideoEditorPlayer::resetJniCallbackTimeStamp() {
- ALOGV("resetJniCallbackTimeStamp");
- return mPlayer->resetJniCallbackTimeStamp();
-}
-
-status_t VideoEditorPlayer::setImageClipProperties(
- uint32_t width, uint32_t height) {
- return mPlayer->setImageClipProperties(width, height);
-}
-
-status_t VideoEditorPlayer::readFirstVideoFrame() {
- return mPlayer->readFirstVideoFrame();
-}
-
-status_t VideoEditorPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
- mPlayer->getLastRenderedTimeMs(lastRenderedTimeMs);
- return NO_ERROR;
-}
-
-/* Implementation of AudioSink interface */
-#undef LOG_TAG
-#define LOG_TAG "VeAudioSink"
-
-int VideoEditorPlayer::VeAudioOutput::mMinBufferCount = 4;
-bool VideoEditorPlayer::VeAudioOutput::mIsOnEmulator = false;
-
-VideoEditorPlayer::VeAudioOutput::VeAudioOutput()
- : mCallback(NULL),
- mCallbackCookie(NULL) {
- mStreamType = AUDIO_STREAM_MUSIC;
- mLeftVolume = 1.0;
- mRightVolume = 1.0;
- mLatency = 0;
- mMsecsPerFrame = 0;
- mNumFramesWritten = 0;
- setMinBufferCount();
-}
-
-VideoEditorPlayer::VeAudioOutput::~VeAudioOutput() {
- close();
-}
-
-void VideoEditorPlayer::VeAudioOutput::setMinBufferCount() {
-
- mIsOnEmulator = false;
- mMinBufferCount = 4;
-}
-
-bool VideoEditorPlayer::VeAudioOutput::isOnEmulator() {
-
- setMinBufferCount();
- return mIsOnEmulator;
-}
-
-int VideoEditorPlayer::VeAudioOutput::getMinBufferCount() {
-
- setMinBufferCount();
- return mMinBufferCount;
-}
-
-ssize_t VideoEditorPlayer::VeAudioOutput::bufferSize() const {
-
- if (mTrack == 0) return NO_INIT;
- return mTrack->frameCount() * frameSize();
-}
-
-ssize_t VideoEditorPlayer::VeAudioOutput::frameCount() const {
-
- if (mTrack == 0) return NO_INIT;
- return mTrack->frameCount();
-}
-
-ssize_t VideoEditorPlayer::VeAudioOutput::channelCount() const
-{
- if (mTrack == 0) return NO_INIT;
- return mTrack->channelCount();
-}
-
-ssize_t VideoEditorPlayer::VeAudioOutput::frameSize() const
-{
- if (mTrack == 0) return NO_INIT;
- return mTrack->frameSize();
-}
-
-uint32_t VideoEditorPlayer::VeAudioOutput::latency () const
-{
- return mLatency;
-}
-
-float VideoEditorPlayer::VeAudioOutput::msecsPerFrame() const
-{
- return mMsecsPerFrame;
-}
-
-status_t VideoEditorPlayer::VeAudioOutput::getPosition(uint32_t *position) const {
-
- if (mTrack == 0) return NO_INIT;
- return mTrack->getPosition(position);
-}
-
-status_t VideoEditorPlayer::VeAudioOutput::getFramesWritten(uint32_t *written) const {
-
- if (mTrack == 0) return NO_INIT;
- *written = mNumFramesWritten;
- return OK;
-}
-
-status_t VideoEditorPlayer::VeAudioOutput::open(
- uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
- audio_format_t format, int bufferCount,
- AudioCallback cb, void *cookie, audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo) {
-
- mCallback = cb;
- mCallbackCookie = cookie;
-
- // Check argument "bufferCount" against the mininum buffer count
- if (bufferCount < mMinBufferCount) {
- ALOGV("bufferCount (%d) is too small and increased to %d",
- bufferCount, mMinBufferCount);
- bufferCount = mMinBufferCount;
-
- }
- ALOGV("open(%u, %d, %d, %d)", sampleRate, channelCount, format, bufferCount);
- if (mTrack != 0) close();
- uint32_t afSampleRate;
- size_t afFrameCount;
- int frameCount;
-
- if (AudioSystem::getOutputFrameCount(&afFrameCount, mStreamType) !=
- NO_ERROR) {
- return NO_INIT;
- }
- if (AudioSystem::getOutputSamplingRate(&afSampleRate, mStreamType) !=
- NO_ERROR) {
- return NO_INIT;
- }
-
- frameCount = (sampleRate*afFrameCount*bufferCount)/afSampleRate;
-
- if (channelMask == CHANNEL_MASK_USE_CHANNEL_ORDER) {
- switch(channelCount) {
- case 1:
- channelMask = AUDIO_CHANNEL_OUT_MONO;
- break;
- case 2:
- channelMask = AUDIO_CHANNEL_OUT_STEREO;
- break;
- default:
- return NO_INIT;
- }
- }
-
- sp<AudioTrack> t;
- if (mCallback != NULL) {
- t = new AudioTrack(
- mStreamType,
- sampleRate,
- format,
- channelMask,
- frameCount,
- flags,
- CallbackWrapper,
- this);
- } else {
- t = new AudioTrack(
- mStreamType,
- sampleRate,
- format,
- channelMask,
- frameCount,
- flags);
- }
-
- if ((t == 0) || (t->initCheck() != NO_ERROR)) {
- ALOGE("Unable to create audio track");
- return NO_INIT;
- }
-
- ALOGV("setVolume");
- t->setVolume(mLeftVolume, mRightVolume);
- mMsecsPerFrame = 1.e3 / (float) sampleRate;
- mLatency = t->latency();
- mTrack = t;
- return NO_ERROR;
-}
-
-status_t VideoEditorPlayer::VeAudioOutput::start() {
-
- ALOGV("start");
- if (mTrack != 0) {
- mTrack->setVolume(mLeftVolume, mRightVolume);
- status_t status = mTrack->start();
- if (status == NO_ERROR) {
- mTrack->getPosition(&mNumFramesWritten);
- }
- return status;
- }
- return NO_INIT;
-}
-
-void VideoEditorPlayer::VeAudioOutput::snoopWrite(
- const void* buffer, size_t size) {
- // Visualization buffers not supported
- return;
-
-}
-
-ssize_t VideoEditorPlayer::VeAudioOutput::write(
- const void* buffer, size_t size) {
-
- LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
-
- //ALOGV("write(%p, %u)", buffer, size);
- if (mTrack != 0) {
- snoopWrite(buffer, size);
- ssize_t ret = mTrack->write(buffer, size);
- mNumFramesWritten += ret / 4; // assume 16 bit stereo
- return ret;
- }
- return NO_INIT;
-}
-
-void VideoEditorPlayer::VeAudioOutput::stop() {
-
- ALOGV("stop");
- if (mTrack != 0) mTrack->stop();
-}
-
-void VideoEditorPlayer::VeAudioOutput::flush() {
-
- ALOGV("flush");
- if (mTrack != 0) mTrack->flush();
-}
-
-void VideoEditorPlayer::VeAudioOutput::pause() {
-
- ALOGV("VeAudioOutput::pause");
- if (mTrack != 0) mTrack->pause();
-}
-
-void VideoEditorPlayer::VeAudioOutput::close() {
-
- ALOGV("close");
- mTrack.clear();
-}
-
-void VideoEditorPlayer::VeAudioOutput::setVolume(float left, float right) {
-
- ALOGV("setVolume(%f, %f)", left, right);
- mLeftVolume = left;
- mRightVolume = right;
- if (mTrack != 0) {
- mTrack->setVolume(left, right);
- }
-}
-
-// static
-void VideoEditorPlayer::VeAudioOutput::CallbackWrapper(
- int event, void *cookie, void *info) {
- //ALOGV("VeAudioOutput::callbackwrapper");
- if (event != AudioTrack::EVENT_MORE_DATA) {
- return;
- }
-
- VeAudioOutput *me = (VeAudioOutput *)cookie;
- AudioTrack::Buffer *buffer = (AudioTrack::Buffer *)info;
-
- size_t actualSize = (*me->mCallback)(
- me, buffer->raw, buffer->size, me->mCallbackCookie,
- MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER);
-
- buffer->size = actualSize;
-
- if (actualSize > 0) {
- me->snoopWrite(buffer->raw, actualSize);
- }
-}
-
-status_t VideoEditorPlayer::VeAudioOutput::dump(int fd, const Vector<String16>& args) const
-{
- const size_t SIZE = 256;
- char buffer[SIZE];
- String8 result;
-
- result.append(" VeAudioOutput\n");
- snprintf(buffer, SIZE-1, " stream type(%d), left - right volume(%f, %f)\n",
- mStreamType, mLeftVolume, mRightVolume);
- result.append(buffer);
- snprintf(buffer, SIZE-1, " msec per frame(%f), latency (%d)\n",
- mMsecsPerFrame, mLatency);
- result.append(buffer);
- ::write(fd, result.string(), result.size());
- if (mTrack != 0) {
- mTrack->dump(fd, args);
- }
- return NO_ERROR;
-}
-
-int VideoEditorPlayer::VeAudioOutput::getSessionId() const {
-
- return mSessionId;
-}
-
-uint32_t VideoEditorPlayer::VeAudioOutput::getSampleRate() const {
- if (mMsecsPerFrame == 0) {
- return 0;
- }
- return (uint32_t)(1.e3 / mMsecsPerFrame);
-}
-
-} // namespace android
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
deleted file mode 100755
index b8c1254..0000000
--- a/libvideoeditor/lvpp/VideoEditorPlayer.h
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_VIDEOEDITOR_PLAYER_H
-#define ANDROID_VIDEOEDITOR_PLAYER_H
-
-#include <media/MediaPlayerInterface.h>
-#include <media/AudioTrack.h>
-#include "M4xVSS_API.h"
-#include "VideoEditorMain.h"
-#include "VideoEditorTools.h"
-#include "VideoEditorAudioPlayer.h"
-#include "NativeWindowRenderer.h"
-
-namespace android {
-
-struct PreviewPlayer;
-
-class VideoEditorPlayer : public MediaPlayerInterface {
- public:
- class VeAudioOutput: public MediaPlayerBase::AudioSink
- {
- public:
- VeAudioOutput();
- virtual ~VeAudioOutput();
-
- virtual bool ready() const { return mTrack != NULL; }
- virtual bool realtime() const { return true; }
- virtual ssize_t bufferSize() const;
- virtual ssize_t frameCount() const;
- virtual ssize_t channelCount() const;
- virtual ssize_t frameSize() const;
- virtual uint32_t latency() const;
- virtual float msecsPerFrame() const;
- virtual status_t getPosition(uint32_t *position) const;
- virtual status_t getFramesWritten(uint32_t*) const;
- virtual int getSessionId() const;
- virtual uint32_t getSampleRate() const;
-
- virtual status_t open(
- uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
- audio_format_t format, int bufferCount,
- AudioCallback cb, void *cookie, audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo);
-
- virtual status_t start();
- virtual ssize_t write(const void* buffer, size_t size);
- virtual void stop();
- virtual void flush();
- virtual void pause();
- virtual void close();
- void setAudioStreamType(audio_stream_type_t streamType) { mStreamType = streamType; }
- virtual audio_stream_type_t getAudioStreamType() const { return mStreamType; }
- void setVolume(float left, float right);
- virtual status_t dump(int fd,const Vector<String16>& args) const;
-
- static bool isOnEmulator();
- static int getMinBufferCount();
- private:
- static void setMinBufferCount();
- static void CallbackWrapper(
- int event, void *me, void *info);
-
- sp<AudioTrack> mTrack;
- AudioCallback mCallback;
- void * mCallbackCookie;
- audio_stream_type_t mStreamType;
- float mLeftVolume;
- float mRightVolume;
- float mMsecsPerFrame;
- uint32_t mLatency;
- int mSessionId;
- static bool mIsOnEmulator;
- static int mMinBufferCount; // 12 for emulator; otherwise 4
-
- public:
- uint32_t mNumFramesWritten;
- void snoopWrite(const void*, size_t);
- };
-
-public:
- VideoEditorPlayer(NativeWindowRenderer* renderer);
- virtual ~VideoEditorPlayer();
-
- virtual status_t initCheck();
-
- virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
-
- virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<Surface> &surface);
- virtual status_t setVideoSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer);
- virtual status_t prepare();
- virtual status_t prepareAsync();
- virtual status_t start();
- virtual status_t stop();
- virtual status_t pause();
- virtual bool isPlaying();
- virtual status_t seekTo(int msec);
- virtual status_t getCurrentPosition(int *msec);
- virtual status_t getDuration(int *msec);
- virtual status_t reset();
- virtual status_t setLooping(int loop);
- virtual player_type playerType();
- virtual status_t invoke(const Parcel &request, Parcel *reply);
- virtual void setAudioSink(const sp<AudioSink> &audioSink);
- virtual void acquireLock();
- virtual void releaseLock();
- virtual status_t setParameter(int key, const Parcel &request);
- virtual status_t getParameter(int key, Parcel *reply);
-
- virtual status_t getMetadata(
- const media::Metadata::Filter& ids, Parcel *records);
-
- virtual status_t loadEffectsSettings(
- M4VSS3GPP_EffectSettings* pEffectSettings, int nEffects);
-
- virtual status_t loadAudioMixSettings(
- M4xVSS_AudioMixingSettings* pAudioMixSettings);
-
- virtual status_t setAudioMixPCMFileHandle(
- M4OSA_Context pAudioMixPCMFileHandle);
-
- virtual status_t setAudioMixStoryBoardParam(
- M4OSA_UInt32 x, M4OSA_UInt32 y, M4OSA_UInt32 z);
-
- virtual status_t setPlaybackBeginTime(uint32_t msec);
- virtual status_t setPlaybackEndTime(uint32_t msec);
- virtual status_t setStoryboardStartTime(uint32_t msec);
- virtual status_t setProgressCallbackInterval(uint32_t cbInterval);
-
- virtual status_t setMediaRenderingMode(M4xVSS_MediaRendering mode,
- M4VIDEOEDITING_VideoFrameSize outputVideoSize);
-
- virtual status_t resetJniCallbackTimeStamp();
- virtual status_t setImageClipProperties(uint32_t width, uint32_t height);
- virtual status_t readFirstVideoFrame();
- virtual status_t getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs);
-
- status_t setAudioPlayer(VideoEditorAudioPlayer *audioPlayer);
-private:
- PreviewPlayer *mPlayer;
- sp<VeAudioOutput> mVeAudioSink;
-
- VideoEditorPlayer(const VideoEditorPlayer &);
- VideoEditorPlayer &operator=(const VideoEditorPlayer &);
-};
-
-} // namespace android
-
-#endif // ANDROID_VIDEOEDITOR_PLAYER_H
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
deleted file mode 100755
index c3cd3d0..0000000
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
+++ /dev/null
@@ -1,1467 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "PreviewController"
-#include <utils/Log.h>
-
-#include <gui/Surface.h>
-
-#include "VideoEditorAudioPlayer.h"
-#include "PreviewRenderer.h"
-#include "M4OSA_Semaphore.h"
-#include "M4OSA_Thread.h"
-#include "VideoEditorPreviewController.h"
-
-namespace android {
-
-
-VideoEditorPreviewController::VideoEditorPreviewController()
- : mCurrentPlayer(0),
- mThreadContext(NULL),
- mPlayerState(VePlayerIdle),
- mPrepareReqest(M4OSA_FALSE),
- mClipList(NULL),
- mNumberClipsInStoryBoard(0),
- mNumberClipsToPreview(0),
- mStartingClipIndex(0),
- mPreviewLooping(M4OSA_FALSE),
- mCallBackAfterFrameCnt(0),
- mEffectsSettings(NULL),
- mNumberEffects(0),
- mCurrentClipNumber(-1),
- mClipTotalDuration(0),
- mCurrentVideoEffect(VIDEO_EFFECT_NONE),
- mBackgroundAudioSetting(NULL),
- mAudioMixPCMFileHandle(NULL),
- mTarget(NULL),
- mJniCookie(NULL),
- mJniCallback(NULL),
- mCurrentPlayedDuration(0),
- mCurrentClipDuration(0),
- mVideoStoryBoardTimeMsUptoFirstPreviewClip(0),
- mOverlayState(OVERLAY_CLEAR),
- mActivePlayerIndex(0),
- mOutputVideoWidth(0),
- mOutputVideoHeight(0),
- bStopThreadInProgress(false),
- mSemThreadWait(NULL) {
- ALOGV("VideoEditorPreviewController");
- mRenderingMode = M4xVSS_kBlackBorders;
- mIsFiftiesEffectStarted = false;
-
- for (int i = 0; i < kTotalNumPlayerInstances; ++i) {
- mVePlayer[i] = NULL;
- }
-}
-
-VideoEditorPreviewController::~VideoEditorPreviewController() {
- ALOGV("~VideoEditorPreviewController");
- M4OSA_UInt32 i = 0;
- M4OSA_ERR err = M4NO_ERROR;
-
- // Stop the thread if its still running
- if(mThreadContext != NULL) {
- err = M4OSA_threadSyncStop(mThreadContext);
- if(err != M4NO_ERROR) {
- ALOGV("~VideoEditorPreviewController: error 0x%x \
- in trying to stop thread", err);
- // Continue even if error
- }
-
- err = M4OSA_threadSyncClose(mThreadContext);
- if(err != M4NO_ERROR) {
- ALOGE("~VideoEditorPreviewController: error 0x%x \
- in trying to close thread", (unsigned int) err);
- // Continue even if error
- }
-
- mThreadContext = NULL;
- }
-
- for (int playerInst=0; playerInst<kTotalNumPlayerInstances;
- playerInst++) {
- if(mVePlayer[playerInst] != NULL) {
- ALOGV("clearing mVePlayer %d", playerInst);
- mVePlayer[playerInst].clear();
- }
- }
-
- if(mClipList != NULL) {
- // Clean up
- for(i=0;i<mNumberClipsInStoryBoard;i++)
- {
- if(mClipList[i]->pFile != NULL) {
- free(mClipList[i]->pFile);
- mClipList[i]->pFile = NULL;
- }
-
- free(mClipList[i]);
- }
- free(mClipList);
- mClipList = NULL;
- }
-
- if(mEffectsSettings) {
- for(i=0;i<mNumberEffects;i++) {
- if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {
- free(mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);
-
- free(mEffectsSettings[i].xVSS.pFramingBuffer);
-
- mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
- }
- }
- free(mEffectsSettings);
- mEffectsSettings = NULL;
- }
-
- if (mAudioMixPCMFileHandle) {
- err = M4OSA_fileReadClose (mAudioMixPCMFileHandle);
- mAudioMixPCMFileHandle = M4OSA_NULL;
- }
-
- if (mBackgroundAudioSetting != NULL) {
- free(mBackgroundAudioSetting);
- mBackgroundAudioSetting = NULL;
- }
-
- if(mTarget != NULL) {
- delete mTarget;
- mTarget = NULL;
- }
-
- mOverlayState = OVERLAY_CLEAR;
-
- ALOGV("~VideoEditorPreviewController returns");
-}
-
-M4OSA_ERR VideoEditorPreviewController::loadEditSettings(
- M4VSS3GPP_EditSettings* pSettings,M4xVSS_AudioMixingSettings* bgmSettings) {
-
- M4OSA_UInt32 i = 0, iClipDuration = 0, rgbSize = 0;
- M4VIFI_UInt8 *tmp = NULL;
- M4OSA_ERR err = M4NO_ERROR;
-
- ALOGV("loadEditSettings");
- ALOGV("loadEditSettings Channels = %d, sampling Freq %d",
- bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency );
- bgmSettings->uiSamplingFrequency = 32000;
-
- ALOGV("loadEditSettings Channels = %d, sampling Freq %d",
- bgmSettings->uiNbChannels, bgmSettings->uiSamplingFrequency );
- Mutex::Autolock autoLock(mLock);
-
- // Clean up any previous Edit settings before loading new ones
- mCurrentVideoEffect = VIDEO_EFFECT_NONE;
-
- if(mAudioMixPCMFileHandle) {
- err = M4OSA_fileReadClose (mAudioMixPCMFileHandle);
- mAudioMixPCMFileHandle = M4OSA_NULL;
- }
-
- if(mBackgroundAudioSetting != NULL) {
- free(mBackgroundAudioSetting);
- mBackgroundAudioSetting = NULL;
- }
-
- if(mClipList != NULL) {
- // Clean up
- for(i=0;i<mNumberClipsInStoryBoard;i++)
- {
- if(mClipList[i]->pFile != NULL) {
- free(mClipList[i]->pFile);
- mClipList[i]->pFile = NULL;
- }
-
- free(mClipList[i]);
- }
- free(mClipList);
- mClipList = NULL;
- }
-
- if(mEffectsSettings) {
- for(i=0;i<mNumberEffects;i++) {
- if(mEffectsSettings[i].xVSS.pFramingBuffer != NULL) {
- free(mEffectsSettings[i].xVSS.pFramingBuffer->pac_data);
-
- free(mEffectsSettings[i].xVSS.pFramingBuffer);
-
- mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
- }
- }
- free(mEffectsSettings);
- mEffectsSettings = NULL;
- }
-
- if(mClipList == NULL) {
- mNumberClipsInStoryBoard = pSettings->uiClipNumber;
- ALOGV("loadEditSettings: # of Clips = %d", mNumberClipsInStoryBoard);
-
- mClipList = (M4VSS3GPP_ClipSettings**)M4OSA_32bitAlignedMalloc(
- sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber, M4VS,
- (M4OSA_Char*)"LvPP, copy of pClipList");
-
- if(NULL == mClipList) {
- ALOGE("loadEditSettings: Malloc error");
- return M4ERR_ALLOC;
- }
- memset((void *)mClipList,0,
- sizeof(M4VSS3GPP_ClipSettings*)*pSettings->uiClipNumber);
-
- for(i=0;i<pSettings->uiClipNumber;i++) {
-
- // Allocate current clip
- mClipList[i] =
- (M4VSS3GPP_ClipSettings*)M4OSA_32bitAlignedMalloc(
- sizeof(M4VSS3GPP_ClipSettings),M4VS,(M4OSA_Char*)"clip settings");
-
- if(mClipList[i] == NULL) {
-
- ALOGE("loadEditSettings: Allocation error for mClipList[%d]", (int)i);
- return M4ERR_ALLOC;
- }
- // Copy plain structure
- memcpy((void *)mClipList[i],
- (void *)pSettings->pClipList[i],
- sizeof(M4VSS3GPP_ClipSettings));
-
- if(NULL != pSettings->pClipList[i]->pFile) {
- mClipList[i]->pFile = (M4OSA_Char*)M4OSA_32bitAlignedMalloc(
- pSettings->pClipList[i]->filePathSize, M4VS,
- (M4OSA_Char*)"pClipSettingsDest->pFile");
-
- if(NULL == mClipList[i]->pFile)
- {
- ALOGE("loadEditSettings : ERROR allocating filename");
- return M4ERR_ALLOC;
- }
-
- memcpy((void *)mClipList[i]->pFile,
- (void *)pSettings->pClipList[i]->pFile,
- pSettings->pClipList[i]->filePathSize);
- }
- else {
- ALOGE("NULL file path");
- return M4ERR_PARAMETER;
- }
-
- // Calculate total duration of all clips
- iClipDuration = pSettings->pClipList[i]->uiEndCutTime -
- pSettings->pClipList[i]->uiBeginCutTime;
-
- mClipTotalDuration = mClipTotalDuration+iClipDuration;
- }
- }
-
- if(mEffectsSettings == NULL) {
- mNumberEffects = pSettings->nbEffects;
- ALOGV("loadEditSettings: mNumberEffects = %d", mNumberEffects);
-
- if(mNumberEffects != 0) {
- mEffectsSettings = (M4VSS3GPP_EffectSettings*)M4OSA_32bitAlignedMalloc(
- mNumberEffects*sizeof(M4VSS3GPP_EffectSettings),
- M4VS, (M4OSA_Char*)"effects settings");
-
- if(mEffectsSettings == NULL) {
- ALOGE("loadEffectsSettings: Allocation error");
- return M4ERR_ALLOC;
- }
-
- memset((void *)mEffectsSettings,0,
- mNumberEffects*sizeof(M4VSS3GPP_EffectSettings));
-
- for(i=0;i<mNumberEffects;i++) {
-
- mEffectsSettings[i].xVSS.pFramingFilePath = NULL;
- mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
- mEffectsSettings[i].xVSS.pTextBuffer = NULL;
-
- memcpy((void *)&(mEffectsSettings[i]),
- (void *)&(pSettings->Effects[i]),
- sizeof(M4VSS3GPP_EffectSettings));
-
- if(pSettings->Effects[i].VideoEffectType ==
- (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
- // Allocate the pFraming RGB buffer
- mEffectsSettings[i].xVSS.pFramingBuffer =
- (M4VIFI_ImagePlane *)M4OSA_32bitAlignedMalloc(sizeof(M4VIFI_ImagePlane),
- M4VS, (M4OSA_Char*)"lvpp framing buffer");
-
- if(mEffectsSettings[i].xVSS.pFramingBuffer == NULL) {
- ALOGE("loadEffectsSettings:Alloc error for pFramingBuf");
- free(mEffectsSettings);
- mEffectsSettings = NULL;
- return M4ERR_ALLOC;
- }
-
- // Allocate the pac_data (RGB)
- if(pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB565){
- rgbSize =
- pSettings->Effects[i].xVSS.pFramingBuffer->u_width *
- pSettings->Effects[i].xVSS.pFramingBuffer->u_height*2;
- }
- else if(
- pSettings->Effects[i].xVSS.rgbType == M4VSS3GPP_kRGB888) {
- rgbSize =
- pSettings->Effects[i].xVSS.pFramingBuffer->u_width *
- pSettings->Effects[i].xVSS.pFramingBuffer->u_height*3;
- }
- else {
- ALOGE("loadEffectsSettings: wrong RGB type");
- free(mEffectsSettings);
- mEffectsSettings = NULL;
- return M4ERR_PARAMETER;
- }
-
- tmp = (M4VIFI_UInt8 *)M4OSA_32bitAlignedMalloc(rgbSize, M4VS,
- (M4OSA_Char*)"framing buffer pac_data");
-
- if(tmp == NULL) {
- ALOGE("loadEffectsSettings:Alloc error pFramingBuf pac");
- free(mEffectsSettings);
- mEffectsSettings = NULL;
- free(mEffectsSettings[i].xVSS.pFramingBuffer);
-
- mEffectsSettings[i].xVSS.pFramingBuffer = NULL;
- return M4ERR_ALLOC;
- }
- /* Initialize the pFramingBuffer*/
- mEffectsSettings[i].xVSS.pFramingBuffer->pac_data = tmp;
- mEffectsSettings[i].xVSS.pFramingBuffer->u_height =
- pSettings->Effects[i].xVSS.pFramingBuffer->u_height;
-
- mEffectsSettings[i].xVSS.pFramingBuffer->u_width =
- pSettings->Effects[i].xVSS.pFramingBuffer->u_width;
-
- mEffectsSettings[i].xVSS.pFramingBuffer->u_stride =
- pSettings->Effects[i].xVSS.pFramingBuffer->u_stride;
-
- mEffectsSettings[i].xVSS.pFramingBuffer->u_topleft =
- pSettings->Effects[i].xVSS.pFramingBuffer->u_topleft;
-
- mEffectsSettings[i].xVSS.uialphaBlendingStart =
- pSettings->Effects[i].xVSS.uialphaBlendingStart;
-
- mEffectsSettings[i].xVSS.uialphaBlendingMiddle =
- pSettings->Effects[i].xVSS.uialphaBlendingMiddle;
-
- mEffectsSettings[i].xVSS.uialphaBlendingEnd =
- pSettings->Effects[i].xVSS.uialphaBlendingEnd;
-
- mEffectsSettings[i].xVSS.uialphaBlendingFadeInTime =
- pSettings->Effects[i].xVSS.uialphaBlendingFadeInTime;
- mEffectsSettings[i].xVSS.uialphaBlendingFadeOutTime =
- pSettings->Effects[i].xVSS.uialphaBlendingFadeOutTime;
-
- // Copy the pFraming data
- memcpy((void *)
- mEffectsSettings[i].xVSS.pFramingBuffer->pac_data,
- (void *)pSettings->Effects[i].xVSS.pFramingBuffer->pac_data,
- rgbSize);
-
- mEffectsSettings[i].xVSS.rgbType =
- pSettings->Effects[i].xVSS.rgbType;
- }
- }
- }
- }
-
- if (mBackgroundAudioSetting == NULL) {
-
- mBackgroundAudioSetting = (M4xVSS_AudioMixingSettings*)M4OSA_32bitAlignedMalloc(
- sizeof(M4xVSS_AudioMixingSettings), M4VS,
- (M4OSA_Char*)"LvPP, copy of bgmSettings");
-
- if(NULL == mBackgroundAudioSetting) {
- ALOGE("loadEditSettings: mBackgroundAudioSetting Malloc failed");
- return M4ERR_ALLOC;
- }
-
- memset((void *)mBackgroundAudioSetting, 0,sizeof(M4xVSS_AudioMixingSettings*));
- memcpy((void *)mBackgroundAudioSetting, (void *)bgmSettings, sizeof(M4xVSS_AudioMixingSettings));
-
- if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {
-
- mBackgroundAudioSetting->pFile = (M4OSA_Void*) bgmSettings->pPCMFilePath;
- mBackgroundAudioSetting->uiNbChannels = 2;
- mBackgroundAudioSetting->uiSamplingFrequency = 32000;
- }
-
- // Open the BG file
- if ( mBackgroundAudioSetting->pFile != M4OSA_NULL ) {
- err = M4OSA_fileReadOpen(&mAudioMixPCMFileHandle,
- mBackgroundAudioSetting->pFile, M4OSA_kFileRead);
-
- if (err != M4NO_ERROR) {
- ALOGE("loadEditSettings: mBackgroundAudio PCM File open failed");
- return M4ERR_PARAMETER;
- }
- }
- }
-
- mOutputVideoSize = pSettings->xVSS.outputVideoSize;
- mFrameStr.pBuffer = M4OSA_NULL;
- return M4NO_ERROR;
-}
-
-M4OSA_ERR VideoEditorPreviewController::setSurface(const sp<Surface> &surface) {
- ALOGV("setSurface");
- Mutex::Autolock autoLock(mLock);
-
- mSurface = surface;
- return M4NO_ERROR;
-}
-
-M4OSA_ERR VideoEditorPreviewController::startPreview(
- M4OSA_UInt32 fromMS, M4OSA_Int32 toMs, M4OSA_UInt16 callBackAfterFrameCount,
- M4OSA_Bool loop) {
-
- M4OSA_ERR err = M4NO_ERROR;
- M4OSA_UInt32 i = 0, iIncrementedDuration = 0;
- ALOGV("startPreview");
-
- if(fromMS > (M4OSA_UInt32)toMs) {
- ALOGE("startPreview: fromMS > toMs");
- return M4ERR_PARAMETER;
- }
-
- if(toMs == 0) {
- ALOGE("startPreview: toMs is 0");
- return M4ERR_PARAMETER;
- }
-
- // If already started, then stop preview first
- for(int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
- if(mVePlayer[playerInst] != NULL) {
- ALOGV("startPreview: stopping previously started preview playback");
- stopPreview();
- break;
- }
- }
-
- // If renderPreview was called previously, then delete Renderer object first
- if(mTarget != NULL) {
- ALOGV("startPreview: delete previous PreviewRenderer");
- delete mTarget;
- mTarget = NULL;
- }
-
- // Create Audio player to be used for entire
- // storyboard duration
- mVEAudioSink = new VideoEditorPlayer::VeAudioOutput();
- mVEAudioPlayer = new VideoEditorAudioPlayer(mVEAudioSink);
- mVEAudioPlayer->setAudioMixSettings(mBackgroundAudioSetting);
- mVEAudioPlayer->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);
-
- // Create Video Renderer to be used for the entire storyboard duration.
- uint32_t width, height;
- getVideoSizeByResolution(mOutputVideoSize, &width, &height);
- mNativeWindowRenderer = new NativeWindowRenderer(mSurface, width, height);
-
- ALOGV("startPreview: loop = %d", loop);
- mPreviewLooping = loop;
-
- ALOGV("startPreview: callBackAfterFrameCount = %d", callBackAfterFrameCount);
- mCallBackAfterFrameCnt = callBackAfterFrameCount;
-
- for (int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
- mVePlayer[playerInst] = new VideoEditorPlayer(mNativeWindowRenderer);
- if(mVePlayer[playerInst] == NULL) {
- ALOGE("startPreview:Error creating VideoEditorPlayer %d",playerInst);
- return M4ERR_ALLOC;
- }
- ALOGV("startPreview: object created");
-
- mVePlayer[playerInst]->setNotifyCallback(this,(notify_callback_f)notify);
- ALOGV("startPreview: notify callback set");
-
- mVePlayer[playerInst]->loadEffectsSettings(mEffectsSettings,
- mNumberEffects);
- ALOGV("startPreview: effects settings loaded");
-
- mVePlayer[playerInst]->loadAudioMixSettings(mBackgroundAudioSetting);
- ALOGV("startPreview: AudioMixSettings settings loaded");
-
- mVePlayer[playerInst]->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);
- ALOGV("startPreview: AudioMixPCMFileHandle set");
-
- mVePlayer[playerInst]->setProgressCallbackInterval(
- mCallBackAfterFrameCnt);
- ALOGV("startPreview: setProgressCallBackInterval");
- }
-
- mPlayerState = VePlayerIdle;
- mPrepareReqest = M4OSA_FALSE;
-
- if(fromMS == 0) {
- mCurrentClipNumber = -1;
- // Save original value
- mFirstPreviewClipBeginTime = mClipList[0]->uiBeginCutTime;
- mVideoStoryBoardTimeMsUptoFirstPreviewClip = 0;
- }
- else {
- ALOGV("startPreview: fromMS=%d", fromMS);
- if(fromMS >= mClipTotalDuration) {
- ALOGE("startPreview: fromMS >= mClipTotalDuration");
- return M4ERR_PARAMETER;
- }
- for(i=0;i<mNumberClipsInStoryBoard;i++) {
- if(fromMS < (iIncrementedDuration + (mClipList[i]->uiEndCutTime -
- mClipList[i]->uiBeginCutTime))) {
- // Set to 1 index below,
- // as threadProcess first increments the clip index
- // and then processes clip in thread loop
- mCurrentClipNumber = i-1;
- ALOGD("startPreview:mCurrentClipNumber = %d fromMS=%d",i,fromMS);
-
- // Save original value
- mFirstPreviewClipBeginTime = mClipList[i]->uiBeginCutTime;
-
- // Set correct begin time to start playback
- if((fromMS+mClipList[i]->uiBeginCutTime) >
- (iIncrementedDuration+mClipList[i]->uiBeginCutTime)) {
-
- mClipList[i]->uiBeginCutTime =
- mClipList[i]->uiBeginCutTime +
- (fromMS - iIncrementedDuration);
- }
- break;
- }
- else {
- iIncrementedDuration = iIncrementedDuration +
- (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);
- }
- }
- mVideoStoryBoardTimeMsUptoFirstPreviewClip = iIncrementedDuration;
- }
-
- for (int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
- mVePlayer[playerInst]->setAudioMixStoryBoardParam(fromMS,
- mFirstPreviewClipBeginTime,
- mClipList[i]->ClipProperties.uiClipAudioVolumePercentage);
-
- ALOGV("startPreview:setAudioMixStoryBoardSkimTimeStamp set %d cuttime \
- %d", fromMS, mFirstPreviewClipBeginTime);
- }
-
- mStartingClipIndex = mCurrentClipNumber+1;
-
- // Start playing with player instance 0
- mCurrentPlayer = 0;
- mActivePlayerIndex = 0;
-
- if(toMs == -1) {
- ALOGV("startPreview: Preview till end of storyboard");
- mNumberClipsToPreview = mNumberClipsInStoryBoard;
- // Save original value
- mLastPreviewClipEndTime =
- mClipList[mNumberClipsToPreview-1]->uiEndCutTime;
- }
- else {
- ALOGV("startPreview: toMs=%d", toMs);
- if((M4OSA_UInt32)toMs > mClipTotalDuration) {
- ALOGE("startPreview: toMs > mClipTotalDuration");
- return M4ERR_PARAMETER;
- }
-
- iIncrementedDuration = 0;
-
- for(i=0;i<mNumberClipsInStoryBoard;i++) {
- if((M4OSA_UInt32)toMs <= (iIncrementedDuration +
- (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime))) {
- // Save original value
- mLastPreviewClipEndTime = mClipList[i]->uiEndCutTime;
- // Set the end cut time of clip index i to toMs
- mClipList[i]->uiEndCutTime = toMs;
-
- // Number of clips to be previewed is from index 0 to i
- // increment by 1 as i starts from 0
- mNumberClipsToPreview = i+1;
- break;
- }
- else {
- iIncrementedDuration = iIncrementedDuration +
- (mClipList[i]->uiEndCutTime - mClipList[i]->uiBeginCutTime);
- }
- }
- }
-
- // Open the thread semaphore
- M4OSA_semaphoreOpen(&mSemThreadWait, 1);
-
- // Open the preview process thread
- err = M4OSA_threadSyncOpen(&mThreadContext, (M4OSA_ThreadDoIt)threadProc);
- if (M4NO_ERROR != err) {
- ALOGE("VideoEditorPreviewController:M4OSA_threadSyncOpen error %d", (int) err);
- return err;
- }
-
- // Set the stacksize
- err = M4OSA_threadSyncSetOption(mThreadContext, M4OSA_ThreadStackSize,
- (M4OSA_DataOption) kPreviewThreadStackSize);
-
- if (M4NO_ERROR != err) {
- ALOGE("VideoEditorPreviewController: threadSyncSetOption error %d", (int) err);
- M4OSA_threadSyncClose(mThreadContext);
- mThreadContext = NULL;
- return err;
- }
-
- // Start the thread
- err = M4OSA_threadSyncStart(mThreadContext, (M4OSA_Void*)this);
- if (M4NO_ERROR != err) {
- ALOGE("VideoEditorPreviewController: threadSyncStart error %d", (int) err);
- M4OSA_threadSyncClose(mThreadContext);
- mThreadContext = NULL;
- return err;
- }
- bStopThreadInProgress = false;
-
- ALOGV("startPreview: process thread started");
- return M4NO_ERROR;
-}
-
-M4OSA_UInt32 VideoEditorPreviewController::stopPreview() {
- M4OSA_ERR err = M4NO_ERROR;
- uint32_t lastRenderedFrameTimeMs = 0;
- ALOGV("stopPreview");
-
- // Stop the thread
- if(mThreadContext != NULL) {
- bStopThreadInProgress = true;
- {
- Mutex::Autolock autoLock(mLockSem);
- if (mSemThreadWait != NULL) {
- err = M4OSA_semaphorePost(mSemThreadWait);
- }
- }
-
- err = M4OSA_threadSyncStop(mThreadContext);
- if(err != M4NO_ERROR) {
- ALOGV("stopPreview: error 0x%x in trying to stop thread", err);
- // Continue even if error
- }
-
- err = M4OSA_threadSyncClose(mThreadContext);
- if(err != M4NO_ERROR) {
- ALOGE("stopPreview: error 0x%x in trying to close thread", (unsigned int)err);
- // Continue even if error
- }
-
- mThreadContext = NULL;
- }
-
- // Close the semaphore first
- {
- Mutex::Autolock autoLock(mLockSem);
- if(mSemThreadWait != NULL) {
- err = M4OSA_semaphoreClose(mSemThreadWait);
- ALOGV("stopPreview: close semaphore returns 0x%x", err);
- mSemThreadWait = NULL;
- }
- }
-
- for (int playerInst=0; playerInst<kTotalNumPlayerInstances; playerInst++) {
- if(mVePlayer[playerInst] != NULL) {
- if(mVePlayer[playerInst]->isPlaying()) {
- ALOGV("stop the player first");
- mVePlayer[playerInst]->stop();
- }
- if (playerInst == mActivePlayerIndex) {
- // Return the last rendered frame time stamp
- mVePlayer[mActivePlayerIndex]->getLastRenderedTimeMs(&lastRenderedFrameTimeMs);
- }
-
- //This is used to syncronize onStreamDone() in PreviewPlayer and
- //stopPreview() in PreviewController
- sp<VideoEditorPlayer> temp = mVePlayer[playerInst];
- temp->acquireLock();
- ALOGV("stopPreview: clearing mVePlayer");
- mVePlayer[playerInst].clear();
- mVePlayer[playerInst] = NULL;
- temp->releaseLock();
- }
- }
- ALOGV("stopPreview: clear audioSink and audioPlayer");
- mVEAudioSink.clear();
- if (mVEAudioPlayer) {
- delete mVEAudioPlayer;
- mVEAudioPlayer = NULL;
- }
-
- delete mNativeWindowRenderer;
- mNativeWindowRenderer = NULL;
-
- // If image file playing, then free the buffer pointer
- if(mFrameStr.pBuffer != M4OSA_NULL) {
- free(mFrameStr.pBuffer);
- mFrameStr.pBuffer = M4OSA_NULL;
- }
-
- // Reset original begin cuttime of first previewed clip*/
- mClipList[mStartingClipIndex]->uiBeginCutTime = mFirstPreviewClipBeginTime;
- // Reset original end cuttime of last previewed clip*/
- mClipList[mNumberClipsToPreview-1]->uiEndCutTime = mLastPreviewClipEndTime;
-
- mPlayerState = VePlayerIdle;
- mPrepareReqest = M4OSA_FALSE;
-
- mCurrentPlayedDuration = 0;
- mCurrentClipDuration = 0;
- mRenderingMode = M4xVSS_kBlackBorders;
- mOutputVideoWidth = 0;
- mOutputVideoHeight = 0;
-
- ALOGV("stopPreview() lastRenderedFrameTimeMs %ld", lastRenderedFrameTimeMs);
- return lastRenderedFrameTimeMs;
-}
-
-M4OSA_ERR VideoEditorPreviewController::clearSurface(
- const sp<Surface> &surface, VideoEditor_renderPreviewFrameStr* pFrameInfo) {
-
- M4OSA_ERR err = M4NO_ERROR;
- VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;
- M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;
- M4VIFI_ImagePlane planeOut[3];
- ALOGV("Inside preview clear frame");
-
- Mutex::Autolock autoLock(mLock);
-
- // Delete previous renderer instance
- if(mTarget != NULL) {
- delete mTarget;
- mTarget = NULL;
- }
-
- outputBufferWidth = pFrameStr->uiFrameWidth;
- outputBufferHeight = pFrameStr->uiFrameHeight;
-
- // Initialize the renderer
- if(mTarget == NULL) {
-
- mTarget = PreviewRenderer::CreatePreviewRenderer(
- surface,
- outputBufferWidth, outputBufferHeight);
-
- if(mTarget == NULL) {
- ALOGE("renderPreviewFrame: cannot create PreviewRenderer");
- return M4ERR_ALLOC;
- }
- }
-
- // Out plane
- uint8_t* outBuffer;
- size_t outBufferStride = 0;
-
- ALOGV("doMediaRendering CALL getBuffer()");
- mTarget->getBufferYV12(&outBuffer, &outBufferStride);
-
- // Set the output YUV420 plane to be compatible with YV12 format
- //In YV12 format, sizes must be even
- M4OSA_UInt32 yv12PlaneWidth = ((outputBufferWidth +1)>>1)<<1;
- M4OSA_UInt32 yv12PlaneHeight = ((outputBufferHeight+1)>>1)<<1;
-
- prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
- (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
-
- /* Fill the surface with black frame */
- memset((void *)planeOut[0].pac_data,0x00,planeOut[0].u_width *
- planeOut[0].u_height * 1.5);
- memset((void *)planeOut[1].pac_data,128,planeOut[1].u_width *
- planeOut[1].u_height);
- memset((void *)planeOut[2].pac_data,128,planeOut[2].u_width *
- planeOut[2].u_height);
-
- mTarget->renderYV12();
- return err;
-}
-
-M4OSA_ERR VideoEditorPreviewController::renderPreviewFrame(
- const sp<Surface> &surface,
- VideoEditor_renderPreviewFrameStr* pFrameInfo,
- VideoEditorCurretEditInfo *pCurrEditInfo) {
-
- M4OSA_ERR err = M4NO_ERROR;
- M4OSA_UInt32 i = 0, iIncrementedDuration = 0, tnTimeMs=0, framesize =0;
- VideoEditor_renderPreviewFrameStr* pFrameStr = pFrameInfo;
- M4VIFI_UInt8 *pixelArray = NULL;
- Mutex::Autolock autoLock(mLock);
-
- if (pCurrEditInfo != NULL) {
- pCurrEditInfo->overlaySettingsIndex = -1;
- }
- // Delete previous renderer instance
- if(mTarget != NULL) {
- delete mTarget;
- mTarget = NULL;
- }
-
- if(mOutputVideoWidth == 0) {
- mOutputVideoWidth = pFrameStr->uiFrameWidth;
- }
-
- if(mOutputVideoHeight == 0) {
- mOutputVideoHeight = pFrameStr->uiFrameHeight;
- }
-
- // Initialize the renderer
- if(mTarget == NULL) {
- mTarget = PreviewRenderer::CreatePreviewRenderer(
- surface,
- mOutputVideoWidth, mOutputVideoHeight);
-
- if(mTarget == NULL) {
- ALOGE("renderPreviewFrame: cannot create PreviewRenderer");
- return M4ERR_ALLOC;
- }
- }
-
- pixelArray = NULL;
-
- // Apply rotation if required
- if (pFrameStr->videoRotationDegree != 0) {
- err = applyVideoRotation((M4OSA_Void *)pFrameStr->pBuffer,
- pFrameStr->uiFrameWidth, pFrameStr->uiFrameHeight,
- pFrameStr->videoRotationDegree);
- if (M4NO_ERROR != err) {
- ALOGE("renderPreviewFrame: cannot rotate video, err 0x%x", (unsigned int)err);
- delete mTarget;
- mTarget = NULL;
- return err;
- } else {
- // Video rotation done.
- // Swap width and height if 90 or 270 degrees
- if (pFrameStr->videoRotationDegree != 180) {
- int32_t temp = pFrameStr->uiFrameWidth;
- pFrameStr->uiFrameWidth = pFrameStr->uiFrameHeight;
- pFrameStr->uiFrameHeight = temp;
- }
- }
- }
- // Postprocessing (apply video effect)
- if(pFrameStr->bApplyEffect == M4OSA_TRUE) {
-
- for(i=0;i<mNumberEffects;i++) {
- // First check if effect starttime matches the clip being previewed
- if((mEffectsSettings[i].uiStartTime < pFrameStr->clipBeginCutTime)
- ||(mEffectsSettings[i].uiStartTime >= pFrameStr->clipEndCutTime)) {
- // This effect doesn't belong to this clip, check next one
- continue;
- }
- if((mEffectsSettings[i].uiStartTime <= pFrameStr->timeMs) &&
- ((mEffectsSettings[i].uiStartTime+mEffectsSettings[i].uiDuration) >=
- pFrameStr->timeMs) && (mEffectsSettings[i].uiDuration != 0)) {
- setVideoEffectType(mEffectsSettings[i].VideoEffectType, TRUE);
- }
- else {
- setVideoEffectType(mEffectsSettings[i].VideoEffectType, FALSE);
- }
- }
-
- //Provide the overlay Update indication when there is an overlay effect
- if (mCurrentVideoEffect & VIDEO_EFFECT_FRAMING) {
- M4OSA_UInt32 index;
- mCurrentVideoEffect &= ~VIDEO_EFFECT_FRAMING; //never apply framing here.
-
- // Find the effect in effectSettings array
- for (index = 0; index < mNumberEffects; index++) {
- if(mEffectsSettings[index].VideoEffectType ==
- (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
-
- if((mEffectsSettings[index].uiStartTime <= pFrameInfo->timeMs) &&
- ((mEffectsSettings[index].uiStartTime+
- mEffectsSettings[index].uiDuration) >= pFrameInfo->timeMs))
- {
- break;
- }
- }
- }
- if ((index < mNumberEffects) && (pCurrEditInfo != NULL)) {
- pCurrEditInfo->overlaySettingsIndex = index;
- ALOGV("Framing index = %d", index);
- } else {
- ALOGV("No framing effects found");
- }
- }
-
- if(mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
- err = applyVideoEffect((M4OSA_Void *)pFrameStr->pBuffer,
- OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,
- pFrameStr->uiFrameHeight, pFrameStr->timeMs,
- (M4OSA_Void *)pixelArray);
-
- if(err != M4NO_ERROR) {
- ALOGE("renderPreviewFrame: applyVideoEffect error 0x%x", (unsigned int)err);
- delete mTarget;
- mTarget = NULL;
- free(pixelArray);
- pixelArray = NULL;
- return err;
- }
- mCurrentVideoEffect = VIDEO_EFFECT_NONE;
- }
- else {
- // Apply the rendering mode
- err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,
- OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,
- pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);
-
- if(err != M4NO_ERROR) {
- ALOGE("renderPreviewFrame:doImageRenderingMode error 0x%x", (unsigned int)err);
- delete mTarget;
- mTarget = NULL;
- free(pixelArray);
- pixelArray = NULL;
- return err;
- }
- }
- }
- else {
- // Apply the rendering mode
- err = doImageRenderingMode((M4OSA_Void *)pFrameStr->pBuffer,
- OMX_COLOR_FormatYUV420Planar, pFrameStr->uiFrameWidth,
- pFrameStr->uiFrameHeight, (M4OSA_Void *)pixelArray);
-
- if(err != M4NO_ERROR) {
- ALOGE("renderPreviewFrame: doImageRenderingMode error 0x%x", (unsigned int)err);
- delete mTarget;
- mTarget = NULL;
- free(pixelArray);
- pixelArray = NULL;
- return err;
- }
- }
-
- mTarget->renderYV12();
- return err;
-}
-
-M4OSA_Void VideoEditorPreviewController::setJniCallback(void* cookie,
- jni_progress_callback_fct callbackFct) {
- //ALOGV("setJniCallback");
- mJniCookie = cookie;
- mJniCallback = callbackFct;
-}
-
-M4OSA_ERR VideoEditorPreviewController::preparePlayer(
- void* param, int playerInstance, int index) {
-
- M4OSA_ERR err = M4NO_ERROR;
- VideoEditorPreviewController *pController =
- (VideoEditorPreviewController *)param;
-
- ALOGV("preparePlayer: instance %d file %d", playerInstance, index);
-
- const char* fileName = (const char*) pController->mClipList[index]->pFile;
- pController->mVePlayer[playerInstance]->setDataSource(fileName, NULL);
-
- ALOGV("preparePlayer: setDataSource instance %s",
- (const char *)pController->mClipList[index]->pFile);
-
- pController->mVePlayer[playerInstance]->setVideoSurface(
- pController->mSurface);
- ALOGV("preparePlayer: setVideoSurface");
-
- pController->mVePlayer[playerInstance]->setMediaRenderingMode(
- pController->mClipList[index]->xVSS.MediaRendering,
- pController->mOutputVideoSize);
- ALOGV("preparePlayer: setMediaRenderingMode");
-
- if((M4OSA_UInt32)index == pController->mStartingClipIndex) {
- pController->mVePlayer[playerInstance]->setPlaybackBeginTime(
- pController->mFirstPreviewClipBeginTime);
- }
- else {
- pController->mVePlayer[playerInstance]->setPlaybackBeginTime(
- pController->mClipList[index]->uiBeginCutTime);
- }
- ALOGV("preparePlayer: setPlaybackBeginTime(%d)",
- pController->mClipList[index]->uiBeginCutTime);
-
- pController->mVePlayer[playerInstance]->setPlaybackEndTime(
- pController->mClipList[index]->uiEndCutTime);
- ALOGV("preparePlayer: setPlaybackEndTime(%d)",
- pController->mClipList[index]->uiEndCutTime);
-
- if(pController->mClipList[index]->FileType == M4VIDEOEDITING_kFileType_ARGB8888) {
- pController->mVePlayer[playerInstance]->setImageClipProperties(
- pController->mClipList[index]->ClipProperties.uiVideoWidth,
- pController->mClipList[index]->ClipProperties.uiVideoHeight);
- ALOGV("preparePlayer: setImageClipProperties");
- }
-
- pController->mVePlayer[playerInstance]->prepare();
- ALOGV("preparePlayer: prepared");
-
- if(pController->mClipList[index]->uiBeginCutTime > 0) {
- pController->mVePlayer[playerInstance]->seekTo(
- pController->mClipList[index]->uiBeginCutTime);
-
- ALOGV("preparePlayer: seekTo(%d)",
- pController->mClipList[index]->uiBeginCutTime);
- }
- pController->mVePlayer[pController->mCurrentPlayer]->setAudioPlayer(pController->mVEAudioPlayer);
-
- pController->mVePlayer[playerInstance]->readFirstVideoFrame();
- ALOGV("preparePlayer: readFirstVideoFrame of clip");
-
- return err;
-}
-
-M4OSA_ERR VideoEditorPreviewController::threadProc(M4OSA_Void* param) {
- M4OSA_ERR err = M4NO_ERROR;
- M4OSA_Int32 index = 0;
- VideoEditorPreviewController *pController =
- (VideoEditorPreviewController *)param;
-
- ALOGV("inside threadProc");
- if(pController->mPlayerState == VePlayerIdle) {
- (pController->mCurrentClipNumber)++;
-
- ALOGD("threadProc: playing file index %d total clips %d",
- pController->mCurrentClipNumber, pController->mNumberClipsToPreview);
-
- if((M4OSA_UInt32)pController->mCurrentClipNumber >=
- pController->mNumberClipsToPreview) {
-
- ALOGD("All clips previewed");
-
- pController->mCurrentPlayedDuration = 0;
- pController->mCurrentClipDuration = 0;
- pController->mCurrentPlayer = 0;
-
- if(pController->mPreviewLooping == M4OSA_TRUE) {
- pController->mCurrentClipNumber =
- pController->mStartingClipIndex;
-
- ALOGD("Preview looping TRUE, restarting from clip index %d",
- pController->mCurrentClipNumber);
-
- // Reset the story board timestamp inside the player
- for (int playerInst=0; playerInst<kTotalNumPlayerInstances;
- playerInst++) {
- pController->mVePlayer[playerInst]->resetJniCallbackTimeStamp();
- }
- }
- else {
- M4OSA_UInt32 endArgs = 0;
- if(pController->mJniCallback != NULL) {
- pController->mJniCallback(
- pController->mJniCookie, MSG_TYPE_PREVIEW_END, &endArgs);
- }
- pController->mPlayerState = VePlayerAutoStop;
-
- // Reset original begin cuttime of first previewed clip
- pController->mClipList[pController->mStartingClipIndex]->uiBeginCutTime =
- pController->mFirstPreviewClipBeginTime;
- // Reset original end cuttime of last previewed clip
- pController->mClipList[pController->mNumberClipsToPreview-1]->uiEndCutTime =
- pController->mLastPreviewClipEndTime;
-
- // Return a warning to M4OSA thread handler
- // so that thread is moved from executing state to open state
- return M4WAR_NO_MORE_STREAM;
- }
- }
-
- index=pController->mCurrentClipNumber;
- if((M4OSA_UInt32)pController->mCurrentClipNumber == pController->mStartingClipIndex) {
- pController->mCurrentPlayedDuration +=
- pController->mVideoStoryBoardTimeMsUptoFirstPreviewClip;
-
- pController->mCurrentClipDuration =
- pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime
- - pController->mFirstPreviewClipBeginTime;
-
- preparePlayer((void*)pController, pController->mCurrentPlayer, index);
- }
- else {
- pController->mCurrentPlayedDuration +=
- pController->mCurrentClipDuration;
-
- pController->mCurrentClipDuration =
- pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime -
- pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;
- }
-
- pController->mVePlayer[pController->mCurrentPlayer]->setStoryboardStartTime(
- pController->mCurrentPlayedDuration);
- ALOGV("threadProc: setStoryboardStartTime");
-
- // Set the next clip duration for Audio mix here
- if((M4OSA_UInt32)pController->mCurrentClipNumber != pController->mStartingClipIndex) {
-
- pController->mVePlayer[pController->mCurrentPlayer]->setAudioMixStoryBoardParam(
- pController->mCurrentPlayedDuration,
- pController->mClipList[index]->uiBeginCutTime,
- pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);
-
- ALOGV("threadProc: setAudioMixStoryBoardParam fromMS %d \
- ClipBeginTime %d", pController->mCurrentPlayedDuration +
- pController->mClipList[index]->uiBeginCutTime,
- pController->mClipList[index]->uiBeginCutTime,
- pController->mClipList[index]->ClipProperties.uiClipAudioVolumePercentage);
- }
- // Capture the active player being used
- pController->mActivePlayerIndex = pController->mCurrentPlayer;
-
- pController->mVePlayer[pController->mCurrentPlayer]->start();
- ALOGV("threadProc: started");
-
- pController->mPlayerState = VePlayerBusy;
-
- } else if(pController->mPlayerState == VePlayerAutoStop) {
- ALOGV("Preview completed..auto stop the player");
- } else if ((pController->mPlayerState == VePlayerBusy) && (pController->mPrepareReqest)) {
- // Prepare the player here
- pController->mPrepareReqest = M4OSA_FALSE;
- preparePlayer((void*)pController, pController->mCurrentPlayer,
- pController->mCurrentClipNumber+1);
- if (pController->mSemThreadWait != NULL) {
- err = M4OSA_semaphoreWait(pController->mSemThreadWait,
- M4OSA_WAIT_FOREVER);
- }
- } else {
- if (!pController->bStopThreadInProgress) {
- ALOGV("threadProc: state busy...wait for sem");
- if (pController->mSemThreadWait != NULL) {
- err = M4OSA_semaphoreWait(pController->mSemThreadWait,
- M4OSA_WAIT_FOREVER);
- }
- }
- ALOGV("threadProc: sem wait returned err = 0x%x", err);
- }
-
- //Always return M4NO_ERROR to ensure the thread keeps running
- return M4NO_ERROR;
-}
-
-void VideoEditorPreviewController::notify(
- void* cookie, int msg, int ext1, int ext2)
-{
- VideoEditorPreviewController *pController =
- (VideoEditorPreviewController *)cookie;
-
- M4OSA_ERR err = M4NO_ERROR;
- uint32_t clipDuration = 0;
- switch (msg) {
- case MEDIA_NOP: // interface test message
- ALOGV("MEDIA_NOP");
- break;
- case MEDIA_PREPARED:
- ALOGV("MEDIA_PREPARED");
- break;
- case MEDIA_PLAYBACK_COMPLETE:
- {
- ALOGD("notify:MEDIA_PLAYBACK_COMPLETE, mCurrentClipNumber = %d",
- pController->mCurrentClipNumber);
- pController->mPlayerState = VePlayerIdle;
-
- //send progress callback with last frame timestamp
- if((M4OSA_UInt32)pController->mCurrentClipNumber ==
- pController->mStartingClipIndex) {
- clipDuration =
- pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime
- - pController->mFirstPreviewClipBeginTime;
- }
- else {
- clipDuration =
- pController->mClipList[pController->mCurrentClipNumber]->uiEndCutTime
- - pController->mClipList[pController->mCurrentClipNumber]->uiBeginCutTime;
- }
-
- M4OSA_UInt32 playedDuration = clipDuration+pController->mCurrentPlayedDuration;
- pController->mJniCallback(
- pController->mJniCookie, MSG_TYPE_PROGRESS_INDICATION,
- &playedDuration);
-
- if ((pController->mOverlayState == OVERLAY_UPDATE) &&
- ((M4OSA_UInt32)pController->mCurrentClipNumber !=
- (pController->mNumberClipsToPreview-1))) {
- VideoEditorCurretEditInfo *pEditInfo =
- (VideoEditorCurretEditInfo*)M4OSA_32bitAlignedMalloc(sizeof(VideoEditorCurretEditInfo),
- M4VS, (M4OSA_Char*)"Current Edit info");
- pEditInfo->overlaySettingsIndex = ext2;
- pEditInfo->clipIndex = pController->mCurrentClipNumber;
- pController->mOverlayState == OVERLAY_CLEAR;
- if (pController->mJniCallback != NULL) {
- pController->mJniCallback(pController->mJniCookie,
- MSG_TYPE_OVERLAY_CLEAR, pEditInfo);
- }
- free(pEditInfo);
- }
- {
- Mutex::Autolock autoLock(pController->mLockSem);
- if (pController->mSemThreadWait != NULL) {
- M4OSA_semaphorePost(pController->mSemThreadWait);
- return;
- }
- }
-
- break;
- }
- case MEDIA_ERROR:
- {
- int err_val = ext1;
- // Always log errors.
- // ext1: Media framework error code.
- // ext2: Implementation dependant error code.
- ALOGE("MEDIA_ERROR; error (%d, %d)", ext1, ext2);
- if(pController->mJniCallback != NULL) {
- pController->mJniCallback(pController->mJniCookie,
- MSG_TYPE_PLAYER_ERROR, &err_val);
- }
- break;
- }
- case MEDIA_INFO:
- {
- int info_val = ext2;
- // ext1: Media framework error code.
- // ext2: Implementation dependant error code.
- //ALOGW("MEDIA_INFO; info/warning (%d, %d)", ext1, ext2);
- if(pController->mJniCallback != NULL) {
- pController->mJniCallback(pController->mJniCookie,
- MSG_TYPE_PROGRESS_INDICATION, &info_val);
- }
- break;
- }
- case MEDIA_SEEK_COMPLETE:
- ALOGV("MEDIA_SEEK_COMPLETE; Received seek complete");
- break;
- case MEDIA_BUFFERING_UPDATE:
- ALOGV("MEDIA_BUFFERING_UPDATE; buffering %d", ext1);
- break;
- case MEDIA_SET_VIDEO_SIZE:
- ALOGV("MEDIA_SET_VIDEO_SIZE; New video size %d x %d", ext1, ext2);
- break;
- case static_cast<int>(0xAAAAAAAA):
- ALOGV("VIDEO PLAYBACK ALMOST over, prepare next player");
- // Select next player and prepare it
- // If there is a clip after this one
- if ((M4OSA_UInt32)(pController->mCurrentClipNumber+1) <
- pController->mNumberClipsToPreview) {
- pController->mPrepareReqest = M4OSA_TRUE;
- pController->mCurrentPlayer++;
- if (pController->mCurrentPlayer >= kTotalNumPlayerInstances) {
- pController->mCurrentPlayer = 0;
- }
- // Prepare the first clip to be played
- {
- Mutex::Autolock autoLock(pController->mLockSem);
- if (pController->mSemThreadWait != NULL) {
- M4OSA_semaphorePost(pController->mSemThreadWait);
- }
- }
- }
- break;
- case static_cast<int>(0xBBBBBBBB):
- {
- ALOGV("VIDEO PLAYBACK, Update Overlay");
- int overlayIndex = ext2;
- VideoEditorCurretEditInfo *pEditInfo =
- (VideoEditorCurretEditInfo*)M4OSA_32bitAlignedMalloc(sizeof(VideoEditorCurretEditInfo),
- M4VS, (M4OSA_Char*)"Current Edit info");
- //ext1 = 1; start the overlay display
- // = 2; Clear the overlay.
- pEditInfo->overlaySettingsIndex = ext2;
- pEditInfo->clipIndex = pController->mCurrentClipNumber;
- ALOGV("pController->mCurrentClipNumber = %d",pController->mCurrentClipNumber);
- if (pController->mJniCallback != NULL) {
- if (ext1 == 1) {
- pController->mOverlayState = OVERLAY_UPDATE;
- pController->mJniCallback(pController->mJniCookie,
- MSG_TYPE_OVERLAY_UPDATE, pEditInfo);
- } else {
- pController->mOverlayState = OVERLAY_CLEAR;
- pController->mJniCallback(pController->mJniCookie,
- MSG_TYPE_OVERLAY_CLEAR, pEditInfo);
- }
- }
- free(pEditInfo);
- break;
- }
- default:
- ALOGV("unrecognized message: (%d, %d, %d)", msg, ext1, ext2);
- break;
- }
-}
-
-void VideoEditorPreviewController::setVideoEffectType(
- M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable) {
-
- M4OSA_UInt32 effect = VIDEO_EFFECT_NONE;
-
- // map M4VSS3GPP_VideoEffectType to local enum
- switch(type) {
- case M4VSS3GPP_kVideoEffectType_FadeFromBlack:
- effect = VIDEO_EFFECT_FADEFROMBLACK;
- break;
-
- case M4VSS3GPP_kVideoEffectType_FadeToBlack:
- effect = VIDEO_EFFECT_FADETOBLACK;
- break;
-
- case M4xVSS_kVideoEffectType_BlackAndWhite:
- effect = VIDEO_EFFECT_BLACKANDWHITE;
- break;
-
- case M4xVSS_kVideoEffectType_Pink:
- effect = VIDEO_EFFECT_PINK;
- break;
-
- case M4xVSS_kVideoEffectType_Green:
- effect = VIDEO_EFFECT_GREEN;
- break;
-
- case M4xVSS_kVideoEffectType_Sepia:
- effect = VIDEO_EFFECT_SEPIA;
- break;
-
- case M4xVSS_kVideoEffectType_Negative:
- effect = VIDEO_EFFECT_NEGATIVE;
- break;
-
- case M4xVSS_kVideoEffectType_Framing:
- effect = VIDEO_EFFECT_FRAMING;
- break;
-
- case M4xVSS_kVideoEffectType_Fifties:
- effect = VIDEO_EFFECT_FIFTIES;
- break;
-
- case M4xVSS_kVideoEffectType_ColorRGB16:
- effect = VIDEO_EFFECT_COLOR_RGB16;
- break;
-
- case M4xVSS_kVideoEffectType_Gradient:
- effect = VIDEO_EFFECT_GRADIENT;
- break;
-
- default:
- effect = VIDEO_EFFECT_NONE;
- break;
- }
-
- if(enable == M4OSA_TRUE) {
- // If already set, then no need to set again
- if(!(mCurrentVideoEffect & effect))
- mCurrentVideoEffect |= effect;
- if(effect == VIDEO_EFFECT_FIFTIES) {
- mIsFiftiesEffectStarted = true;
- }
- }
- else {
- // Reset only if already set
- if(mCurrentVideoEffect & effect)
- mCurrentVideoEffect &= ~effect;
- }
-
- return;
-}
-
-
-M4OSA_ERR VideoEditorPreviewController::applyVideoEffect(
- M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
- M4OSA_UInt32 videoHeight, M4OSA_UInt32 timeMs, M4OSA_Void* outPtr) {
-
- M4OSA_ERR err = M4NO_ERROR;
- vePostProcessParams postProcessParams;
-
- postProcessParams.vidBuffer = (M4VIFI_UInt8*)dataPtr;
- postProcessParams.videoWidth = videoWidth;
- postProcessParams.videoHeight = videoHeight;
- postProcessParams.timeMs = timeMs;
- postProcessParams.timeOffset = 0; //Since timeMS already takes care of offset in this case
- postProcessParams.effectsSettings = mEffectsSettings;
- postProcessParams.numberEffects = mNumberEffects;
- postProcessParams.outVideoWidth = mOutputVideoWidth;
- postProcessParams.outVideoHeight = mOutputVideoHeight;
- postProcessParams.currentVideoEffect = mCurrentVideoEffect;
- postProcessParams.renderingMode = mRenderingMode;
- if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
- postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
- mIsFiftiesEffectStarted = M4OSA_FALSE;
- }
- else {
- postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
- }
- //postProcessParams.renderer = mTarget;
- postProcessParams.overlayFrameRGBBuffer = NULL;
- postProcessParams.overlayFrameYUVBuffer = NULL;
-
- mTarget->getBufferYV12(&(postProcessParams.pOutBuffer), &(postProcessParams.outBufferStride));
-
- err = applyEffectsAndRenderingMode(&postProcessParams, videoWidth, videoHeight);
- return err;
-}
-
-status_t VideoEditorPreviewController::setPreviewFrameRenderingMode(
- M4xVSS_MediaRendering mode, M4VIDEOEDITING_VideoFrameSize outputVideoSize) {
-
- ALOGV("setMediaRenderingMode: outputVideoSize = %d", outputVideoSize);
- mRenderingMode = mode;
-
- status_t err = OK;
- /* get the video width and height by resolution */
- err = getVideoSizeByResolution(outputVideoSize,
- &mOutputVideoWidth, &mOutputVideoHeight);
-
- return err;
-}
-
-M4OSA_ERR VideoEditorPreviewController::doImageRenderingMode(
- M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
- M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr) {
-
- M4OSA_ERR err = M4NO_ERROR;
- M4VIFI_ImagePlane planeIn[3], planeOut[3];
- M4VIFI_UInt8 *inBuffer = M4OSA_NULL;
- M4OSA_UInt32 outputBufferWidth =0, outputBufferHeight=0;
-
- //frameSize = (videoWidth*videoHeight*3) >> 1;
- inBuffer = (M4OSA_UInt8 *)dataPtr;
-
- // In plane
- prepareYUV420ImagePlane(planeIn, videoWidth,
- videoHeight, (M4VIFI_UInt8 *)inBuffer, videoWidth, videoHeight);
-
- outputBufferWidth = mOutputVideoWidth;
- outputBufferHeight = mOutputVideoHeight;
-
- // Out plane
- uint8_t* outBuffer;
- size_t outBufferStride = 0;
-
- ALOGV("doMediaRendering CALL getBuffer()");
- mTarget->getBufferYV12(&outBuffer, &outBufferStride);
-
- // Set the output YUV420 plane to be compatible with YV12 format
- //In YV12 format, sizes must be even
- M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
- M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
-
- prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
- (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
-
- err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
- if(err != M4NO_ERROR) {
- ALOGE("doImageRenderingMode: applyRenderingMode returned err=0x%x", (unsigned int)err);
- }
- return err;
-}
-
-} //namespace android
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.h b/libvideoeditor/lvpp/VideoEditorPreviewController.h
deleted file mode 100755
index 1756f32..0000000
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_VE_PREVIEWCONTROLLER_H
-#define ANDROID_VE_PREVIEWCONTROLLER_H
-
-#include "VideoEditorPlayer.h"
-#include "VideoEditorTools.h"
-
-namespace android {
-
-// Callback mechanism from PreviewController to Jni */
-typedef void (*jni_progress_callback_fct)(void* cookie, M4OSA_UInt32 msgType, void *argc);
-
-struct Surface;
-struct PreviewRenderer;
-
-class VideoEditorPreviewController {
-
-public:
- VideoEditorPreviewController();
- ~VideoEditorPreviewController();
-
- M4OSA_ERR loadEditSettings(
- M4VSS3GPP_EditSettings* pSettings,
- M4xVSS_AudioMixingSettings* bgmSettings);
-
- M4OSA_ERR setSurface(const sp<Surface>& surface);
-
- M4OSA_ERR startPreview(
- M4OSA_UInt32 fromMS, M4OSA_Int32 toMs,
- M4OSA_UInt16 callBackAfterFrameCount,
- M4OSA_Bool loop) ;
-
- M4OSA_UInt32 stopPreview();
-
- M4OSA_ERR renderPreviewFrame(
- const sp<Surface>& surface,
- VideoEditor_renderPreviewFrameStr* pFrameInfo,
- VideoEditorCurretEditInfo *pCurrEditInfo);
-
- M4OSA_ERR clearSurface(
- const sp<Surface>& surface,
- VideoEditor_renderPreviewFrameStr* pFrameInfo);
-
- M4OSA_Void setJniCallback(
- void* cookie,
- jni_progress_callback_fct callbackFct);
-
- status_t setPreviewFrameRenderingMode(
- M4xVSS_MediaRendering mode,
- M4VIDEOEDITING_VideoFrameSize outputVideoSize);
-
-private:
- enum {
- kTotalNumPlayerInstances = 2,
- kPreviewThreadStackSize = 65536,
- };
-
- typedef enum {
- VePlayerIdle = 0,
- VePlayerBusy,
- VePlayerAutoStop
- } PlayerState;
-
- typedef enum {
- OVERLAY_UPDATE = 0,
- OVERLAY_CLEAR
- } OverlayState;
-
- sp<VideoEditorPlayer> mVePlayer[kTotalNumPlayerInstances];
- int mCurrentPlayer; // player instance currently being used
- sp<Surface> mSurface;
- mutable Mutex mLock;
- M4OSA_Context mThreadContext;
- PlayerState mPlayerState;
- M4OSA_Bool mPrepareReqest;
- M4VSS3GPP_ClipSettings **mClipList;
- M4OSA_UInt32 mNumberClipsInStoryBoard;
- M4OSA_UInt32 mNumberClipsToPreview;
- M4OSA_UInt32 mStartingClipIndex;
- M4OSA_Bool mPreviewLooping;
- M4OSA_UInt32 mCallBackAfterFrameCnt;
- M4VSS3GPP_EffectSettings* mEffectsSettings;
- M4OSA_UInt32 mNumberEffects;
- M4OSA_Int32 mCurrentClipNumber;
- M4OSA_UInt32 mClipTotalDuration;
- M4OSA_UInt32 mCurrentVideoEffect;
- M4xVSS_AudioMixingSettings* mBackgroundAudioSetting;
- M4OSA_Context mAudioMixPCMFileHandle;
- PreviewRenderer *mTarget;
- M4OSA_Context mJniCookie;
- jni_progress_callback_fct mJniCallback;
- VideoEditor_renderPreviewFrameStr mFrameStr;
- M4OSA_UInt32 mCurrentPlayedDuration;
- M4OSA_UInt32 mCurrentClipDuration;
- M4VIDEOEDITING_VideoFrameSize mOutputVideoSize;
- M4OSA_UInt32 mFirstPreviewClipBeginTime;
- M4OSA_UInt32 mLastPreviewClipEndTime;
- M4OSA_UInt32 mVideoStoryBoardTimeMsUptoFirstPreviewClip;
- OverlayState mOverlayState;
- int mActivePlayerIndex;
-
- M4xVSS_MediaRendering mRenderingMode;
- uint32_t mOutputVideoWidth;
- uint32_t mOutputVideoHeight;
- bool bStopThreadInProgress;
- M4OSA_Context mSemThreadWait;
- bool mIsFiftiesEffectStarted;
-
- sp<VideoEditorPlayer::VeAudioOutput> mVEAudioSink;
- VideoEditorAudioPlayer *mVEAudioPlayer;
- NativeWindowRenderer* mNativeWindowRenderer;
-
- M4VIFI_UInt8* mFrameRGBBuffer;
- M4VIFI_UInt8* mFrameYUVBuffer;
- mutable Mutex mLockSem;
-
-
- static M4OSA_ERR preparePlayer(void* param, int playerInstance, int index);
- static M4OSA_ERR threadProc(M4OSA_Void* param);
- static void notify(void* cookie, int msg, int ext1, int ext2);
-
- void setVideoEffectType(M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);
-
- M4OSA_ERR applyVideoEffect(
- M4OSA_Void * dataPtr, M4OSA_UInt32 colorFormat,
- M4OSA_UInt32 videoWidth, M4OSA_UInt32 videoHeight,
- M4OSA_UInt32 timeMs, M4OSA_Void* outPtr);
-
- M4OSA_ERR doImageRenderingMode(
- M4OSA_Void * dataPtr,
- M4OSA_UInt32 colorFormat, M4OSA_UInt32 videoWidth,
- M4OSA_UInt32 videoHeight, M4OSA_Void* outPtr);
-
- // Don't call me!
- VideoEditorPreviewController(const VideoEditorPreviewController &);
- VideoEditorPreviewController &operator=(
- const VideoEditorPreviewController &);
-};
-
-}
-
-#endif // ANDROID_VE_PREVIEWCONTROLLER_H
diff --git a/libvideoeditor/lvpp/VideoEditorSRC.cpp b/libvideoeditor/lvpp/VideoEditorSRC.cpp
deleted file mode 100755
index 6beabfa..0000000
--- a/libvideoeditor/lvpp/VideoEditorSRC.cpp
+++ /dev/null
@@ -1,334 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "VideoEditorSRC"
-
-#include <stdlib.h>
-#include <utils/Log.h>
-#include <audio_utils/primitives.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaDefs.h>
-#include "VideoEditorSRC.h"
-
-
-namespace android {
-
-VideoEditorSRC::VideoEditorSRC(const sp<MediaSource> &source) {
- ALOGV("VideoEditorSRC %p(%p)", this, source.get());
- static const int32_t kDefaultSamplingFreqencyHz = kFreq32000Hz;
- mSource = source;
- mResampler = NULL;
- mChannelCnt = 0;
- mSampleRate = 0;
- mOutputSampleRate = kDefaultSamplingFreqencyHz;
- mStarted = false;
- mInitialTimeStampUs = -1;
- mAccuOutBufferSize = 0;
- mSeekTimeUs = -1;
- mBuffer = NULL;
- mLeftover = 0;
- mFormatChanged = false;
- mStopPending = false;
- mSeekMode = ReadOptions::SEEK_PREVIOUS_SYNC;
-
- // Input Source validation
- sp<MetaData> format = mSource->getFormat();
- const char *mime;
- CHECK(format->findCString(kKeyMIMEType, &mime));
- CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
-
- // Set the metadata of the output after resampling.
- mOutputFormat = new MetaData;
- mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
- mOutputFormat->setInt32(kKeySampleRate, kDefaultSamplingFreqencyHz);
- mOutputFormat->setInt32(kKeyChannelCount, 2); // always stereo
-}
-
-VideoEditorSRC::~VideoEditorSRC() {
- ALOGV("~VideoEditorSRC %p(%p)", this, mSource.get());
- stop();
-}
-
-status_t VideoEditorSRC::start(MetaData *params) {
- ALOGV("start %p(%p)", this, mSource.get());
- CHECK(!mStarted);
-
- // Set resampler if required
- checkAndSetResampler();
-
- mSeekTimeUs = -1;
- mSeekMode = ReadOptions::SEEK_PREVIOUS_SYNC;
- mStarted = true;
- mSource->start();
-
- return OK;
-}
-
-status_t VideoEditorSRC::stop() {
- ALOGV("stop %p(%p)", this, mSource.get());
- if (!mStarted) {
- return OK;
- }
-
- if (mBuffer) {
- mBuffer->release();
- mBuffer = NULL;
- }
- mSource->stop();
- if (mResampler != NULL) {
- delete mResampler;
- mResampler = NULL;
- }
-
- mStarted = false;
- mInitialTimeStampUs = -1;
- mAccuOutBufferSize = 0;
- mLeftover = 0;
-
- return OK;
-}
-
-sp<MetaData> VideoEditorSRC::getFormat() {
- ALOGV("getFormat");
- return mOutputFormat;
-}
-
-status_t VideoEditorSRC::read(
- MediaBuffer **buffer_out, const ReadOptions *options) {
- ALOGV("read %p(%p)", this, mSource.get());
- *buffer_out = NULL;
-
- if (!mStarted) {
- return ERROR_END_OF_STREAM;
- }
-
- if (mResampler) {
- // Store the seek parameters
- int64_t seekTimeUs;
- ReadOptions::SeekMode mode = ReadOptions::SEEK_PREVIOUS_SYNC;
- if (options && options->getSeekTo(&seekTimeUs, &mode)) {
- ALOGV("read Seek %lld", seekTimeUs);
- mSeekTimeUs = seekTimeUs;
- mSeekMode = mode;
- }
-
- // We ask for 1024 frames in output
- // resampler output is always 2 channels and 32 bits
- const size_t kOutputFrameCount = 1024;
- const size_t kBytes = kOutputFrameCount * 2 * sizeof(int32_t);
- int32_t *pTmpBuffer = (int32_t *)calloc(1, kBytes);
- if (!pTmpBuffer) {
- ALOGE("calloc failed to allocate memory: %d bytes", kBytes);
- return NO_MEMORY;
- }
-
- // Resample to target quality
- mResampler->resample(pTmpBuffer, kOutputFrameCount, this);
-
- if (mStopPending) {
- stop();
- mStopPending = false;
- }
-
- // Change resampler and retry if format change happened
- if (mFormatChanged) {
- mFormatChanged = false;
- checkAndSetResampler();
- free(pTmpBuffer);
- return read(buffer_out, NULL);
- }
-
- // Create a new MediaBuffer
- int32_t outBufferSize = kOutputFrameCount * 2 * sizeof(int16_t);
- MediaBuffer* outBuffer = new MediaBuffer(outBufferSize);
-
- // Convert back to 2 channels and 16 bits
- ditherAndClamp(
- (int32_t *)((uint8_t*)outBuffer->data() + outBuffer->range_offset()),
- pTmpBuffer, kOutputFrameCount);
- free(pTmpBuffer);
-
- // Compute and set the new timestamp
- sp<MetaData> to = outBuffer->meta_data();
- int64_t totalOutDurationUs = (mAccuOutBufferSize * 1000000) / (mOutputSampleRate * 2 * 2);
- int64_t timeUs = mInitialTimeStampUs + totalOutDurationUs;
- to->setInt64(kKeyTime, timeUs);
-
- // update the accumulate size
- mAccuOutBufferSize += outBufferSize;
- *buffer_out = outBuffer;
- } else {
- // Resampling not required. Read and pass-through.
- MediaBuffer *aBuffer;
- status_t err = mSource->read(&aBuffer, options);
- if (err != OK) {
- ALOGV("read returns err = %d", err);
- }
-
- if (err == INFO_FORMAT_CHANGED) {
- checkAndSetResampler();
- return read(buffer_out, NULL);
- }
-
- // EOS or some other error
- if(err != OK) {
- stop();
- *buffer_out = NULL;
- return err;
- }
- *buffer_out = aBuffer;
- }
-
- return OK;
-}
-
-status_t VideoEditorSRC::getNextBuffer(AudioBufferProvider::Buffer *pBuffer, int64_t pts) {
- ALOGV("getNextBuffer %d, chan = %d", pBuffer->frameCount, mChannelCnt);
- uint32_t done = 0;
- uint32_t want = pBuffer->frameCount * mChannelCnt * 2;
- pBuffer->raw = malloc(want);
-
- while (mStarted && want > 0) {
- // If we don't have any data left, read a new buffer.
- if (!mBuffer) {
- // if we seek, reset the initial time stamp and accumulated time
- ReadOptions options;
- if (mSeekTimeUs >= 0) {
- ALOGV("%p cacheMore_l Seek requested = %lld", this, mSeekTimeUs);
- ReadOptions::SeekMode mode = mSeekMode;
- options.setSeekTo(mSeekTimeUs, mode);
- mSeekTimeUs = -1;
- mInitialTimeStampUs = -1;
- mAccuOutBufferSize = 0;
- }
-
- status_t err = mSource->read(&mBuffer, &options);
-
- if (err != OK) {
- free(pBuffer->raw);
- pBuffer->raw = NULL;
- pBuffer->frameCount = 0;
- }
-
- if (err == INFO_FORMAT_CHANGED) {
- ALOGV("getNextBuffer: source read returned INFO_FORMAT_CHANGED");
- // At this point we cannot switch to a new AudioResampler because
- // we are in a callback called by the AudioResampler itself. So
- // just remember the fact that the format has changed, and let
- // read() handles this.
- mFormatChanged = true;
- return err;
- }
-
- // EOS or some other error
- if (err != OK) {
- ALOGV("EOS or some err: %d", err);
- // We cannot call stop() here because stop() will release the
- // AudioResampler, and we are in a callback of the AudioResampler.
- // So just remember the fact and let read() call stop().
- mStopPending = true;
- return err;
- }
-
- CHECK(mBuffer);
- mLeftover = mBuffer->range_length();
- if (mInitialTimeStampUs == -1) {
- int64_t curTS;
- sp<MetaData> from = mBuffer->meta_data();
- from->findInt64(kKeyTime, &curTS);
- ALOGV("setting mInitialTimeStampUs to %lld", mInitialTimeStampUs);
- mInitialTimeStampUs = curTS;
- }
- }
-
- // Now copy data to the destination
- uint32_t todo = mLeftover;
- if (todo > want) {
- todo = want;
- }
-
- uint8_t* end = (uint8_t*)mBuffer->data() + mBuffer->range_offset()
- + mBuffer->range_length();
- memcpy((uint8_t*)pBuffer->raw + done, end - mLeftover, todo);
- done += todo;
- want -= todo;
- mLeftover -= todo;
-
- // Release MediaBuffer as soon as possible.
- if (mLeftover == 0) {
- mBuffer->release();
- mBuffer = NULL;
- }
- }
-
- pBuffer->frameCount = done / (mChannelCnt * 2);
- ALOGV("getNextBuffer done %d", pBuffer->frameCount);
- return OK;
-}
-
-
-void VideoEditorSRC::releaseBuffer(AudioBufferProvider::Buffer *pBuffer) {
- ALOGV("releaseBuffer: %p", pBuffer);
- free(pBuffer->raw);
- pBuffer->raw = NULL;
- pBuffer->frameCount = 0;
-}
-
-void VideoEditorSRC::checkAndSetResampler() {
- ALOGV("checkAndSetResampler");
-
- static const uint16_t kUnityGain = 0x1000;
- sp<MetaData> format = mSource->getFormat();
- const char *mime;
- CHECK(format->findCString(kKeyMIMEType, &mime));
- CHECK(!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW));
-
- CHECK(format->findInt32(kKeySampleRate, &mSampleRate));
- CHECK(format->findInt32(kKeyChannelCount, &mChannelCnt));
-
- // If a resampler exists, delete it first
- if (mResampler != NULL) {
- delete mResampler;
- mResampler = NULL;
- }
-
- // Clear previous buffer
- if (mBuffer) {
- mBuffer->release();
- mBuffer = NULL;
- }
-
- if (mSampleRate != mOutputSampleRate || mChannelCnt != 2) {
- ALOGV("Resampling required (%d => %d Hz, # channels = %d)",
- mSampleRate, mOutputSampleRate, mChannelCnt);
-
- mResampler = AudioResampler::create(
- 16 /* bit depth */,
- mChannelCnt,
- mOutputSampleRate);
- CHECK(mResampler);
- mResampler->setSampleRate(mSampleRate);
- mResampler->setVolume(kUnityGain, kUnityGain);
- } else {
- ALOGV("Resampling not required (%d => %d Hz, # channels = %d)",
- mSampleRate, mOutputSampleRate, mChannelCnt);
- }
-}
-
-} //namespce android
diff --git a/libvideoeditor/lvpp/VideoEditorSRC.h b/libvideoeditor/lvpp/VideoEditorSRC.h
deleted file mode 100755
index 1707d4d..0000000
--- a/libvideoeditor/lvpp/VideoEditorSRC.h
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#include <stdint.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/AudioBufferProvider.h>
-#include "AudioResampler.h"
-
-namespace android {
-
-struct MediaBuffer;
-
-class VideoEditorSRC : public MediaSource , public AudioBufferProvider {
-
-public:
- VideoEditorSRC(const sp<MediaSource> &source);
-
- virtual status_t start (MetaData *params = NULL);
- virtual status_t stop();
- virtual sp<MetaData> getFormat();
- virtual status_t read (
- MediaBuffer **buffer, const ReadOptions *options = NULL);
-
- virtual status_t getNextBuffer(Buffer* buffer, int64_t pts);
- virtual void releaseBuffer(Buffer* buffer);
-
- // Sampling freqencies
- enum {
- kFreq8000Hz = 8000,
- kFreq11025Hz = 11025,
- kFreq12000Hz = 12000,
- kFreq16000Hz = 16000,
- kFreq22050Hz = 22050,
- kFreq24000Hz = 24000,
- kFreq32000Hz = 32000,
- kFreq44100Hz = 44100,
- kFreq48000Hz = 48000,
- };
-
-protected :
- virtual ~VideoEditorSRC();
-
-private:
- AudioResampler *mResampler;
- sp<MediaSource> mSource;
- int mChannelCnt;
- int mSampleRate;
- int32_t mOutputSampleRate;
- bool mStarted;
- sp<MetaData> mOutputFormat;
-
- MediaBuffer* mBuffer;
- int32_t mLeftover;
- bool mFormatChanged;
- bool mStopPending;
-
- int64_t mInitialTimeStampUs;
- int64_t mAccuOutBufferSize;
-
- int64_t mSeekTimeUs;
- ReadOptions::SeekMode mSeekMode;
-
- VideoEditorSRC();
- void checkAndSetResampler();
-
- // Don't call me
- VideoEditorSRC(const VideoEditorSRC&);
- VideoEditorSRC &operator=(const VideoEditorSRC &);
-
-};
-
-} //namespce android
-
diff --git a/libvideoeditor/lvpp/VideoEditorTools.cpp b/libvideoeditor/lvpp/VideoEditorTools.cpp
deleted file mode 100755
index 2b9fd60..0000000
--- a/libvideoeditor/lvpp/VideoEditorTools.cpp
+++ /dev/null
@@ -1,3883 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "VideoEditorTools.h"
-#include "PreviewRenderer.h"
-/*+ Handle the image files here */
-#include <utils/Log.h>
-/*- Handle the image files here */
-
-const M4VIFI_UInt8 M4VIFI_ClipTable[1256]
-= {
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
-0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03,
-0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b,
-0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13,
-0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b,
-0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23,
-0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b,
-0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33,
-0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b,
-0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x41, 0x42, 0x43,
-0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b,
-0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53,
-0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b,
-0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x63,
-0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b,
-0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x73,
-0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x7b,
-0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83,
-0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b,
-0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93,
-0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b,
-0x9c, 0x9d, 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3,
-0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab,
-0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3,
-0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb,
-0xbc, 0xbd, 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3,
-0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb,
-0xcc, 0xcd, 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3,
-0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb,
-0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3,
-0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb,
-0xec, 0xed, 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3,
-0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb,
-0xfc, 0xfd, 0xfe, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
-0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff
-};
-
-/* Division table for ( 65535/x ); x = 0 to 512 */
-const M4VIFI_UInt16 M4VIFI_DivTable[512]
-= {
-0, 65535, 32768, 21845, 16384, 13107, 10922, 9362,
-8192, 7281, 6553, 5957, 5461, 5041, 4681, 4369,
-4096, 3855, 3640, 3449, 3276, 3120, 2978, 2849,
-2730, 2621, 2520, 2427, 2340, 2259, 2184, 2114,
-2048, 1985, 1927, 1872, 1820, 1771, 1724, 1680,
-1638, 1598, 1560, 1524, 1489, 1456, 1424, 1394,
-1365, 1337, 1310, 1285, 1260, 1236, 1213, 1191,
-1170, 1149, 1129, 1110, 1092, 1074, 1057, 1040,
-1024, 1008, 992, 978, 963, 949, 936, 923,
-910, 897, 885, 873, 862, 851, 840, 829,
-819, 809, 799, 789, 780, 771, 762, 753,
-744, 736, 728, 720, 712, 704, 697, 689,
-682, 675, 668, 661, 655, 648, 642, 636,
-630, 624, 618, 612, 606, 601, 595, 590,
-585, 579, 574, 569, 564, 560, 555, 550,
-546, 541, 537, 532, 528, 524, 520, 516,
-512, 508, 504, 500, 496, 492, 489, 485,
-481, 478, 474, 471, 468, 464, 461, 458,
-455, 451, 448, 445, 442, 439, 436, 434,
-431, 428, 425, 422, 420, 417, 414, 412,
-409, 407, 404, 402, 399, 397, 394, 392,
-390, 387, 385, 383, 381, 378, 376, 374,
-372, 370, 368, 366, 364, 362, 360, 358,
-356, 354, 352, 350, 348, 346, 344, 343,
-341, 339, 337, 336, 334, 332, 330, 329,
-327, 326, 324, 322, 321, 319, 318, 316,
-315, 313, 312, 310, 309, 307, 306, 304,
-303, 302, 300, 299, 297, 296, 295, 293,
-292, 291, 289, 288, 287, 286, 284, 283,
-282, 281, 280, 278, 277, 276, 275, 274,
-273, 271, 270, 269, 268, 267, 266, 265,
-264, 263, 262, 261, 260, 259, 258, 257,
-256, 255, 254, 253, 252, 251, 250, 249,
-248, 247, 246, 245, 244, 243, 242, 241,
-240, 240, 239, 238, 237, 236, 235, 234,
-234, 233, 232, 231, 230, 229, 229, 228,
-227, 226, 225, 225, 224, 223, 222, 222,
-221, 220, 219, 219, 218, 217, 217, 216,
-215, 214, 214, 213, 212, 212, 211, 210,
-210, 209, 208, 208, 207, 206, 206, 205,
-204, 204, 203, 202, 202, 201, 201, 200,
-199, 199, 198, 197, 197, 196, 196, 195,
-195, 194, 193, 193, 192, 192, 191, 191,
-190, 189, 189, 188, 188, 187, 187, 186,
-186, 185, 185, 184, 184, 183, 183, 182,
-182, 181, 181, 180, 180, 179, 179, 178,
-178, 177, 177, 176, 176, 175, 175, 174,
-174, 173, 173, 172, 172, 172, 171, 171,
-170, 170, 169, 169, 168, 168, 168, 167,
-167, 166, 166, 165, 165, 165, 164, 164,
-163, 163, 163, 162, 162, 161, 161, 161,
-160, 160, 159, 159, 159, 158, 158, 157,
-157, 157, 156, 156, 156, 155, 155, 154,
-154, 154, 153, 153, 153, 152, 152, 152,
-151, 151, 151, 150, 150, 149, 149, 149,
-148, 148, 148, 147, 147, 147, 146, 146,
-146, 145, 145, 145, 144, 144, 144, 144,
-143, 143, 143, 142, 142, 142, 141, 141,
-141, 140, 140, 140, 140, 139, 139, 139,
-138, 138, 138, 137, 137, 137, 137, 136,
-136, 136, 135, 135, 135, 135, 134, 134,
-134, 134, 133, 133, 133, 132, 132, 132,
-132, 131, 131, 131, 131, 130, 130, 130,
-130, 129, 129, 129, 129, 128, 128, 128
-};
-
-const M4VIFI_Int32 const_storage1[8]
-= {
-0x00002568, 0x00003343,0x00000649,0x00000d0f, 0x0000D86C, 0x0000D83B, 0x00010000, 0x00010000
-};
-
-const M4VIFI_Int32 const_storage[8]
-= {
-0x00002568, 0x00003343, 0x1BF800, 0x00000649, 0x00000d0f, 0x110180, 0x40cf, 0x22BE00
-};
-
-
-const M4VIFI_UInt16 *M4VIFI_DivTable_zero
- = &M4VIFI_DivTable[0];
-
-const M4VIFI_UInt8 *M4VIFI_ClipTable_zero
- = &M4VIFI_ClipTable[500];
-
-M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data,
- M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {
-
- M4VIFI_UInt32 i;
- M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;
- M4VIFI_UInt8 return_code = M4VIFI_OK;
-
- /* the filter is implemented with the assumption that the width is equal to stride */
- if(PlaneIn[0].u_width != PlaneIn[0].u_stride)
- return M4VIFI_INVALID_PARAM;
-
- /* The input Y Plane is the same as the output Y Plane */
- p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);
- p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);
- memcpy((void *)p_buf_dest,(void *)p_buf_src ,
- PlaneOut[0].u_width * PlaneOut[0].u_height);
-
- /* The U and V components are planar. The need to be made interleaved */
- p_buf_src_u = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);
- p_buf_src_v = &(PlaneIn[2].pac_data[PlaneIn[2].u_topleft]);
- p_buf_dest = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);
-
- for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)
- {
- *p_buf_dest++ = *p_buf_src_u++;
- *p_buf_dest++ = *p_buf_src_v++;
- }
- return return_code;
-}
-
-M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data,
- M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut ) {
-
- M4VIFI_UInt32 i;
- M4VIFI_UInt8 *p_buf_src, *p_buf_dest, *p_buf_src_u, *p_buf_src_v;
- M4VIFI_UInt8 *p_buf_dest_u,*p_buf_dest_v,*p_buf_src_uv;
- M4VIFI_UInt8 return_code = M4VIFI_OK;
-
- /* the filter is implemented with the assumption that the width is equal to stride */
- if(PlaneIn[0].u_width != PlaneIn[0].u_stride)
- return M4VIFI_INVALID_PARAM;
-
- /* The input Y Plane is the same as the output Y Plane */
- p_buf_src = &(PlaneIn[0].pac_data[PlaneIn[0].u_topleft]);
- p_buf_dest = &(PlaneOut[0].pac_data[PlaneOut[0].u_topleft]);
- memcpy((void *)p_buf_dest,(void *)p_buf_src ,
- PlaneOut[0].u_width * PlaneOut[0].u_height);
-
- /* The U and V components are planar. The need to be made interleaved */
- p_buf_src_uv = &(PlaneIn[1].pac_data[PlaneIn[1].u_topleft]);
- p_buf_dest_u = &(PlaneOut[1].pac_data[PlaneOut[1].u_topleft]);
- p_buf_dest_v = &(PlaneOut[2].pac_data[PlaneOut[2].u_topleft]);
-
- for(i = 0; i < PlaneOut[1].u_width*PlaneOut[1].u_height; i++)
- {
- *p_buf_dest_u++ = *p_buf_src_uv++;
- *p_buf_dest_v++ = *p_buf_src_uv++;
- }
- return return_code;
-}
-
-
-/**
- ******************************************************************************
- * prototype M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
- * M4VIFI_ImagePlane *PlaneIn,
- * M4VIFI_ImagePlane *PlaneOut,
- * M4VSS3GPP_ExternalProgress *pProgress,
- * M4OSA_UInt32 uiEffectKind)
- *
- * @brief This function apply a color effect on an input YUV420 planar frame
- * @note
- * @param pFunctionContext(IN) Contains which color to apply (not very clean ...)
- * @param PlaneIn (IN) Input YUV420 planar
- * @param PlaneOut (IN/OUT) Output YUV420 planar
- * @param pProgress (IN/OUT) Progress indication (0-100)
- * @param uiEffectKind (IN) Unused
- *
- * @return M4VIFI_OK: No error
- ******************************************************************************
-*/
-M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext,
- M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut,
- M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind) {
-
- M4VIFI_Int32 plane_number;
- M4VIFI_UInt32 i,j;
- M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
- M4xVSS_ColorStruct* ColorContext = (M4xVSS_ColorStruct*)pFunctionContext;
-
- for (plane_number = 0; plane_number < 3; plane_number++)
- {
- p_buf_src =
- &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
-
- p_buf_dest =
- &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
- for (i = 0; i < PlaneOut[plane_number].u_height; i++)
- {
- /**
- * Chrominance */
- if(plane_number==1 || plane_number==2)
- {
- //switch ((M4OSA_UInt32)pFunctionContext) // commented because a structure for the effects context exist
- switch (ColorContext->colorEffectType)
- {
- case M4xVSS_kVideoEffectType_BlackAndWhite:
- memset((void *)p_buf_dest,128,
- PlaneIn[plane_number].u_width);
- break;
- case M4xVSS_kVideoEffectType_Pink:
- memset((void *)p_buf_dest,255,
- PlaneIn[plane_number].u_width);
- break;
- case M4xVSS_kVideoEffectType_Green:
- memset((void *)p_buf_dest,0,
- PlaneIn[plane_number].u_width);
- break;
- case M4xVSS_kVideoEffectType_Sepia:
- if(plane_number==1)
- {
- memset((void *)p_buf_dest,117,
- PlaneIn[plane_number].u_width);
- }
- else
- {
- memset((void *)p_buf_dest,139,
- PlaneIn[plane_number].u_width);
- }
- break;
- case M4xVSS_kVideoEffectType_Negative:
- memcpy((void *)p_buf_dest,
- (void *)p_buf_src ,PlaneOut[plane_number].u_width);
- break;
-
- case M4xVSS_kVideoEffectType_ColorRGB16:
- {
- M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
-
- /*first get the r, g, b*/
- b = (ColorContext->rgb16ColorData & 0x001f);
- g = (ColorContext->rgb16ColorData & 0x07e0)>>5;
- r = (ColorContext->rgb16ColorData & 0xf800)>>11;
-
- /*keep y, but replace u and v*/
- if(plane_number==1)
- {
- /*then convert to u*/
- u = U16(r, g, b);
- memset((void *)p_buf_dest,(M4OSA_UInt8)u,
- PlaneIn[plane_number].u_width);
- }
- if(plane_number==2)
- {
- /*then convert to v*/
- v = V16(r, g, b);
- memset((void *)p_buf_dest,(M4OSA_UInt8)v,
- PlaneIn[plane_number].u_width);
- }
- }
- break;
- case M4xVSS_kVideoEffectType_Gradient:
- {
- M4OSA_UInt16 r = 0,g = 0,b = 0,y = 0,u = 0,v = 0;
-
- /*first get the r, g, b*/
- b = (ColorContext->rgb16ColorData & 0x001f);
- g = (ColorContext->rgb16ColorData & 0x07e0)>>5;
- r = (ColorContext->rgb16ColorData & 0xf800)>>11;
-
- /*for color gradation*/
- b = (M4OSA_UInt16)( b - ((b*i)/PlaneIn[plane_number].u_height));
- g = (M4OSA_UInt16)(g - ((g*i)/PlaneIn[plane_number].u_height));
- r = (M4OSA_UInt16)(r - ((r*i)/PlaneIn[plane_number].u_height));
-
- /*keep y, but replace u and v*/
- if(plane_number==1)
- {
- /*then convert to u*/
- u = U16(r, g, b);
- memset((void *)p_buf_dest,(M4OSA_UInt8)u,
- PlaneIn[plane_number].u_width);
- }
- if(plane_number==2)
- {
- /*then convert to v*/
- v = V16(r, g, b);
- memset((void *)p_buf_dest,(M4OSA_UInt8)v,
- PlaneIn[plane_number].u_width);
- }
- }
- break;
- default:
- return M4VIFI_INVALID_PARAM;
- }
- }
- /**
- * Luminance */
- else
- {
- //switch ((M4OSA_UInt32)pFunctionContext)// commented because a structure for the effects context exist
- switch (ColorContext->colorEffectType)
- {
- case M4xVSS_kVideoEffectType_Negative:
- for(j=0;j<PlaneOut[plane_number].u_width;j++)
- {
- p_buf_dest[j] = 255 - p_buf_src[j];
- }
- break;
- default:
- memcpy((void *)p_buf_dest,
- (void *)p_buf_src ,PlaneOut[plane_number].u_width);
- break;
- }
- }
- p_buf_src += PlaneIn[plane_number].u_stride;
- p_buf_dest += PlaneOut[plane_number].u_stride;
- }
- }
-
- return M4VIFI_OK;
-}
-
-/**
- ******************************************************************************
- * prototype M4VSS3GPP_externalVideoEffectFraming(M4OSA_Void *pFunctionContext,
- * M4VIFI_ImagePlane *PlaneIn,
- * M4VIFI_ImagePlane *PlaneOut,
- * M4VSS3GPP_ExternalProgress *pProgress,
- * M4OSA_UInt32 uiEffectKind)
- *
- * @brief This function add a fixed or animated image on an input YUV420 planar frame
- * @note
- * @param pFunctionContext(IN) Contains which color to apply (not very clean ...)
- * @param PlaneIn (IN) Input YUV420 planar
- * @param PlaneOut (IN/OUT) Output YUV420 planar
- * @param pProgress (IN/OUT) Progress indication (0-100)
- * @param uiEffectKind (IN) Unused
- *
- * @return M4VIFI_OK: No error
- ******************************************************************************
-*/
-M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming(
- M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3],
- M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress,
- M4OSA_UInt32 uiEffectKind ) {
-
- M4VIFI_UInt32 x,y;
-
- M4VIFI_UInt8 *p_in_Y = PlaneIn[0].pac_data;
- M4VIFI_UInt8 *p_in_U = PlaneIn[1].pac_data;
- M4VIFI_UInt8 *p_in_V = PlaneIn[2].pac_data;
-
- M4xVSS_FramingStruct* Framing = M4OSA_NULL;
- M4xVSS_FramingStruct* currentFraming = M4OSA_NULL;
- M4VIFI_UInt8 *FramingRGB = M4OSA_NULL;
-
- M4VIFI_UInt8 *p_out0;
- M4VIFI_UInt8 *p_out1;
- M4VIFI_UInt8 *p_out2;
-
- M4VIFI_UInt32 topleft[2];
-
- M4OSA_UInt8 transparent1 =
- (M4OSA_UInt8)((TRANSPARENT_COLOR & 0xFF00)>>8);
- M4OSA_UInt8 transparent2 = (M4OSA_UInt8)TRANSPARENT_COLOR;
-
-#ifndef DECODE_GIF_ON_SAVING
- Framing = (M4xVSS_FramingStruct *)userData;
- currentFraming = (M4xVSS_FramingStruct *)Framing->pCurrent;
- FramingRGB = Framing->FramingRgb->pac_data;
-#endif /*DECODE_GIF_ON_SAVING*/
-
-#ifdef DECODE_GIF_ON_SAVING
- M4OSA_ERR err;
- Framing =
- (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
- if(Framing == M4OSA_NULL)
- {
- ((M4xVSS_FramingContext*)userData)->clipTime = pProgress->uiOutputTime;
- err = M4xVSS_internalDecodeGIF(userData);
- if(M4NO_ERROR != err)
- {
- M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: \
- Error in M4xVSS_internalDecodeGIF: 0x%x", err);
- return err;
- }
- Framing =
- (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
- /* Initializes first GIF time */
- ((M4xVSS_FramingContext*)userData)->current_gif_time =
- pProgress->uiOutputTime;
- }
- currentFraming = (M4xVSS_FramingStruct *)Framing;
- FramingRGB = Framing->FramingRgb->pac_data;
-#endif /*DECODE_GIF_ON_SAVING*/
-
- /**
- * Initialize input / output plane pointers */
- p_in_Y += PlaneIn[0].u_topleft;
- p_in_U += PlaneIn[1].u_topleft;
- p_in_V += PlaneIn[2].u_topleft;
-
- p_out0 = PlaneOut[0].pac_data;
- p_out1 = PlaneOut[1].pac_data;
- p_out2 = PlaneOut[2].pac_data;
-
- /**
- * Depending on time, initialize Framing frame to use */
- if(Framing->previousClipTime == -1)
- {
- Framing->previousClipTime = pProgress->uiOutputTime;
- }
-
- /**
- * If the current clip time has reach the duration of one frame of the framing picture
- * we need to step to next framing picture */
-#ifdef DECODE_GIF_ON_SAVING
- if(((M4xVSS_FramingContext*)userData)->b_animated == M4OSA_TRUE)
- {
- while((((M4xVSS_FramingContext*)userData)->current_gif_time + currentFraming->duration) < pProgress->uiOutputTime)
- {
- ((M4xVSS_FramingContext*)userData)->clipTime =
- pProgress->uiOutputTime;
-
- err = M4xVSS_internalDecodeGIF(userData);
- if(M4NO_ERROR != err)
- {
- M4OSA_TRACE1_1("M4VSS3GPP_externalVideoEffectFraming: Error in M4xVSS_internalDecodeGIF: 0x%x", err);
- return err;
- }
- if(currentFraming->duration != 0)
- {
- ((M4xVSS_FramingContext*)userData)->current_gif_time += currentFraming->duration;
- }
- else
- {
- ((M4xVSS_FramingContext*)userData)->current_gif_time +=
- pProgress->uiOutputTime - Framing->previousClipTime;
- }
- Framing = (M4xVSS_FramingStruct *)((M4xVSS_FramingContext*)userData)->aFramingCtx;
- currentFraming = (M4xVSS_FramingStruct *)Framing;
- FramingRGB = Framing->FramingRgb->pac_data;
- }
- }
-#else
- Framing->pCurrent = currentFraming->pNext;
- currentFraming = (M4xVSS_FramingStruct*)Framing->pCurrent;
-#endif /*DECODE_GIF_ON_SAVING*/
-
- Framing->previousClipTime = pProgress->uiOutputTime;
- FramingRGB = currentFraming->FramingRgb->pac_data;
- topleft[0] = currentFraming->topleft_x;
- topleft[1] = currentFraming->topleft_y;
-
- for( x=0 ;x < PlaneIn[0].u_height ; x++)
- {
- for( y=0 ;y < PlaneIn[0].u_width ; y++)
- {
- /**
- * To handle framing with input size != output size
- * Framing is applyed if coordinates matches between framing/topleft and input plane */
- if( y < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
- y >= topleft[0] &&
- x < (topleft[1] + currentFraming->FramingYuv[0].u_height) &&
- x >= topleft[1])
- {
-
- /*Alpha blending support*/
- M4OSA_Float alphaBlending = 1;
-#ifdef DECODE_GIF_ON_SAVING
- M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =
- (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingContext*)userData)->alphaBlendingStruct;
-#else
- M4xVSS_internalEffectsAlphaBlending* alphaBlendingStruct =
- (M4xVSS_internalEffectsAlphaBlending*)((M4xVSS_FramingStruct*)userData)->alphaBlendingStruct;
-#endif //#ifdef DECODE_GIF_ON_SAVING
-
- if(alphaBlendingStruct != M4OSA_NULL)
- {
- if(pProgress->uiProgress < (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10))
- {
- alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_start)*pProgress->uiProgress/(alphaBlendingStruct->m_fadeInTime*10));
- alphaBlending += alphaBlendingStruct->m_start;
- alphaBlending /= 100;
- }
- else if(pProgress->uiProgress >= (M4OSA_UInt32)(alphaBlendingStruct->m_fadeInTime*10) && pProgress->uiProgress < 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
- {
- alphaBlending = (M4OSA_Float)((M4OSA_Float)alphaBlendingStruct->m_middle/100);
- }
- else if(pProgress->uiProgress >= 1000 - (M4OSA_UInt32)(alphaBlendingStruct->m_fadeOutTime*10))
- {
- alphaBlending = ((M4OSA_Float)(alphaBlendingStruct->m_middle - alphaBlendingStruct->m_end))*(1000 - pProgress->uiProgress)/(alphaBlendingStruct->m_fadeOutTime*10);
- alphaBlending += alphaBlendingStruct->m_end;
- alphaBlending /= 100;
- }
- }
-
- /**/
-
- if((*(FramingRGB)==transparent1) && (*(FramingRGB+1)==transparent2))
- {
- *( p_out0+y+x*PlaneOut[0].u_stride)=(*(p_in_Y+y+x*PlaneIn[0].u_stride));
- *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride));
- *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride));
- }
- else
- {
- *( p_out0+y+x*PlaneOut[0].u_stride)=(*(currentFraming->FramingYuv[0].pac_data+(y-topleft[0])+(x-topleft[1])*currentFraming->FramingYuv[0].u_stride))*alphaBlending;
- *( p_out0+y+x*PlaneOut[0].u_stride)+=(*(p_in_Y+y+x*PlaneIn[0].u_stride))*(1-alphaBlending);
- *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=(*(currentFraming->FramingYuv[1].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[1].u_stride))*alphaBlending;
- *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)+=(*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride))*(1-alphaBlending);
- *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=(*(currentFraming->FramingYuv[2].pac_data+((y-topleft[0])>>1)+((x-topleft[1])>>1)*currentFraming->FramingYuv[2].u_stride))*alphaBlending;
- *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)+=(*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride))*(1-alphaBlending);
- }
- if( PlaneIn[0].u_width < (topleft[0] + currentFraming->FramingYuv[0].u_width) &&
- y == PlaneIn[0].u_width-1)
- {
- FramingRGB = FramingRGB + 2 * (topleft[0] + currentFraming->FramingYuv[0].u_width - PlaneIn[0].u_width + 1);
- }
- else
- {
- FramingRGB = FramingRGB + 2;
- }
- }
- /**
- * Just copy input plane to output plane */
- else
- {
- *( p_out0+y+x*PlaneOut[0].u_stride)=*(p_in_Y+y+x*PlaneIn[0].u_stride);
- *( p_out1+(y>>1)+(x>>1)*PlaneOut[1].u_stride)=*(p_in_U+(y>>1)+(x>>1)*PlaneIn[1].u_stride);
- *( p_out2+(y>>1)+(x>>1)*PlaneOut[2].u_stride)=*(p_in_V+(y>>1)+(x>>1)*PlaneIn[2].u_stride);
- }
- }
- }
-
-#ifdef DECODE_GIF_ON_SAVING
- if(pProgress->bIsLast == M4OSA_TRUE
- && (M4OSA_Bool)((M4xVSS_FramingContext*)userData)->b_IsFileGif == M4OSA_TRUE)
- {
- M4xVSS_internalDecodeGIF_Cleaning((M4xVSS_FramingContext*)userData);
- }
-#endif /*DECODE_GIF_ON_SAVING*/
- return M4VIFI_OK;
-}
-
-
-/**
- ******************************************************************************
- * prototype M4VSS3GPP_externalVideoEffectFifties(M4OSA_Void *pFunctionContext,
- * M4VIFI_ImagePlane *PlaneIn,
- * M4VIFI_ImagePlane *PlaneOut,
- * M4VSS3GPP_ExternalProgress *pProgress,
- * M4OSA_UInt32 uiEffectKind)
- *
- * @brief This function make a video look as if it was taken in the fifties
- * @note
- * @param pUserData (IN) Context
- * @param pPlaneIn (IN) Input YUV420 planar
- * @param pPlaneOut (IN/OUT) Output YUV420 planar
- * @param pProgress (IN/OUT) Progress indication (0-100)
- * @param uiEffectKind (IN) Unused
- *
- * @return M4VIFI_OK: No error
- * @return M4ERR_PARAMETER: pFiftiesData, pPlaneOut or pProgress are NULL (DEBUG only)
- ******************************************************************************
-*/
-M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties(
- M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
- M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress,
- M4OSA_UInt32 uiEffectKind )
-{
- M4VIFI_UInt32 x, y, xShift;
- M4VIFI_UInt8 *pInY = pPlaneIn[0].pac_data;
- M4VIFI_UInt8 *pOutY, *pInYbegin;
- M4VIFI_UInt8 *pInCr,* pOutCr;
- M4VIFI_Int32 plane_number;
-
- /* Internal context*/
- M4xVSS_FiftiesStruct* p_FiftiesData = (M4xVSS_FiftiesStruct *)pUserData;
-
- /* Initialize input / output plane pointers */
- pInY += pPlaneIn[0].u_topleft;
- pOutY = pPlaneOut[0].pac_data;
- pInYbegin = pInY;
-
- /* Initialize the random */
- if(p_FiftiesData->previousClipTime < 0)
- {
- M4OSA_randInit();
- M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
- M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
- p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
- }
-
- /* Choose random values if we have reached the duration of a partial effect */
- else if( (pProgress->uiOutputTime - p_FiftiesData->previousClipTime) > p_FiftiesData->fiftiesEffectDuration)
- {
- M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->shiftRandomValue), (pPlaneIn[0].u_height) >> 4);
- M4OSA_rand((M4OSA_Int32*)&(p_FiftiesData->stripeRandomValue), (pPlaneIn[0].u_width)<< 2);
- p_FiftiesData->previousClipTime = pProgress->uiOutputTime;
- }
-
- /* Put in Sepia the chrominance */
- for (plane_number = 1; plane_number < 3; plane_number++)
- {
- pInCr = pPlaneIn[plane_number].pac_data + pPlaneIn[plane_number].u_topleft;
- pOutCr = pPlaneOut[plane_number].pac_data + pPlaneOut[plane_number].u_topleft;
-
- for (x = 0; x < pPlaneOut[plane_number].u_height; x++)
- {
- if (1 == plane_number)
- memset((void *)pOutCr, 117,pPlaneIn[plane_number].u_width); /* U value */
- else
- memset((void *)pOutCr, 139,pPlaneIn[plane_number].u_width); /* V value */
-
- pInCr += pPlaneIn[plane_number].u_stride;
- pOutCr += pPlaneOut[plane_number].u_stride;
- }
- }
-
- /* Compute the new pixels values */
- for( x = 0 ; x < pPlaneIn[0].u_height ; x++)
- {
- M4VIFI_UInt8 *p_outYtmp, *p_inYtmp;
-
- /* Compute the xShift (random value) */
- if (0 == (p_FiftiesData->shiftRandomValue % 5 ))
- xShift = (x + p_FiftiesData->shiftRandomValue ) % (pPlaneIn[0].u_height - 1);
- else
- xShift = (x + (pPlaneIn[0].u_height - p_FiftiesData->shiftRandomValue) ) % (pPlaneIn[0].u_height - 1);
-
- /* Initialize the pointers */
- p_outYtmp = pOutY + 1; /* yShift of 1 pixel */
- p_inYtmp = pInYbegin + (xShift * pPlaneIn[0].u_stride); /* Apply the xShift */
-
- for( y = 0 ; y < pPlaneIn[0].u_width ; y++)
- {
- /* Set Y value */
- if (xShift > (pPlaneIn[0].u_height - 4))
- *p_outYtmp = 40; /* Add some horizontal black lines between the two parts of the image */
- else if ( y == p_FiftiesData->stripeRandomValue)
- *p_outYtmp = 90; /* Add a random vertical line for the bulk */
- else
- *p_outYtmp = *p_inYtmp;
-
-
- /* Go to the next pixel */
- p_outYtmp++;
- p_inYtmp++;
-
- /* Restart at the beginning of the line for the last pixel*/
- if (y == (pPlaneIn[0].u_width - 2))
- p_outYtmp = pOutY;
- }
-
- /* Go to the next line */
- pOutY += pPlaneOut[0].u_stride;
- }
-
- return M4VIFI_OK;
-}
-
-unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in,
- M4ViComImagePlane *plane_out,
- unsigned long lum_factor,
- void *user_data)
-{
- unsigned short *p_src, *p_dest, *p_src_line, *p_dest_line;
- unsigned char *p_csrc, *p_cdest, *p_csrc_line, *p_cdest_line;
- unsigned long pix_src;
- unsigned long u_outpx, u_outpx2;
- unsigned long u_width, u_stride, u_stride_out,u_height, pix;
- long i, j;
-
- /* copy or filter chroma */
- u_width = plane_in[1].u_width;
- u_height = plane_in[1].u_height;
- u_stride = plane_in[1].u_stride;
- u_stride_out = plane_out[1].u_stride;
- p_cdest_line = (unsigned char *) &plane_out[1].pac_data[plane_out[1].u_topleft];
- p_csrc_line = (unsigned char *) &plane_in[1].pac_data[plane_in[1].u_topleft];
-
- if (lum_factor > 256)
- {
- p_cdest = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];
- p_csrc = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];
- /* copy chroma */
- for (j = u_height; j != 0; j--)
- {
- for (i = u_width; i != 0; i--)
- {
- memcpy((void *)p_cdest_line, (void *)p_csrc_line, u_width);
- memcpy((void *)p_cdest, (void *)p_csrc, u_width);
- }
- p_cdest_line += u_stride_out;
- p_cdest += u_stride_out;
- p_csrc_line += u_stride;
- p_csrc += u_stride;
- }
- }
- else
- {
- /* filter chroma */
- pix = (1024 - lum_factor) << 7;
- for (j = u_height; j != 0; j--)
- {
- p_cdest = p_cdest_line;
- p_csrc = p_csrc_line;
- for (i = u_width; i != 0; i--)
- {
- *p_cdest++ = ((pix + (*p_csrc++ & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);
- }
- p_cdest_line += u_stride_out;
- p_csrc_line += u_stride;
- }
- p_cdest_line = (unsigned char *) &plane_out[2].pac_data[plane_out[2].u_topleft];
- p_csrc_line = (unsigned char *) &plane_in[2].pac_data[plane_in[2].u_topleft];
- for (j = u_height; j != 0; j--)
- {
- p_cdest = p_cdest_line;
- p_csrc = p_csrc_line;
- for (i = u_width; i != 0; i--)
- {
- *p_cdest++ = ((pix + (*p_csrc & 0xFF) * lum_factor) >> LUM_FACTOR_MAX);
- }
- p_cdest_line += u_stride_out;
- p_csrc_line += u_stride;
- }
- }
- /* apply luma factor */
- u_width = plane_in[0].u_width;
- u_height = plane_in[0].u_height;
- u_stride = (plane_in[0].u_stride >> 1);
- u_stride_out = (plane_out[0].u_stride >> 1);
- p_dest = (unsigned short *) &plane_out[0].pac_data[plane_out[0].u_topleft];
- p_src = (unsigned short *) &plane_in[0].pac_data[plane_in[0].u_topleft];
- p_dest_line = p_dest;
- p_src_line = p_src;
-
- for (j = u_height; j != 0; j--)
- {
- p_dest = p_dest_line;
- p_src = p_src_line;
- for (i = (u_width >> 1); i != 0; i--)
- {
- pix_src = (unsigned long) *p_src++;
- pix = pix_src & 0xFF;
- u_outpx = ((pix * lum_factor) >> LUM_FACTOR_MAX);
- pix = ((pix_src & 0xFF00) >> 8);
- u_outpx2 = (((pix * lum_factor) >> LUM_FACTOR_MAX)<< 8) ;
- *p_dest++ = (unsigned short) (u_outpx2 | u_outpx);
- }
- p_dest_line += u_stride_out;
- p_src_line += u_stride;
- }
-
- return 0;
-}
-
-/******************************************************************************
- * prototype M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
- * @brief This function converts an RGB565 plane to YUV420 planar
- * @note It is used only for framing effect
- * It allocates output YUV planes
- * @param framingCtx (IN) The framing struct containing input RGB565 plane
- *
- * @return M4NO_ERROR: No error
- * @return M4ERR_PARAMETER: At least one of the function parameters is null
- * @return M4ERR_ALLOC: Allocation error (no more memory)
- ******************************************************************************
-*/
-M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx)
-{
- M4OSA_ERR err;
-
- /**
- * Allocate output YUV planes */
- framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
- if(framingCtx->FramingYuv == M4OSA_NULL)
- {
- M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
- return M4ERR_ALLOC;
- }
- framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
- framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
- framingCtx->FramingYuv[0].u_topleft = 0;
- framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
- framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;
- if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
- {
- M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
- return M4ERR_ALLOC;
- }
- framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
- framingCtx->FramingYuv[1].u_topleft = 0;
- framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
- framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
- framingCtx->FramingYuv[2].u_topleft = 0;
- framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
-
- /**
- * Convert input RGB 565 to YUV 420 to be able to merge it with output video in framing effect */
- err = M4VIFI_xVSS_RGB565toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
- if(err != M4NO_ERROR)
- {
- M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);
- }
-
- framingCtx->duration = 0;
- framingCtx->previousClipTime = -1;
- framingCtx->previewOffsetClipTime = -1;
-
- /**
- * Only one element in the chained list (no animated image with RGB buffer...) */
- framingCtx->pCurrent = framingCtx;
- framingCtx->pNext = framingCtx;
-
- return M4NO_ERROR;
-}
-
-/******************************************************************************
- * prototype M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)
- * @brief This function converts an RGB888 plane to YUV420 planar
- * @note It is used only for framing effect
- * It allocates output YUV planes
- * @param framingCtx (IN) The framing struct containing input RGB888 plane
- *
- * @return M4NO_ERROR: No error
- * @return M4ERR_PARAMETER: At least one of the function parameters is null
- * @return M4ERR_ALLOC: Allocation error (no more memory)
- ******************************************************************************
-*/
-M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx)
-{
- M4OSA_ERR err;
-
- /**
- * Allocate output YUV planes */
- framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
- if(framingCtx->FramingYuv == M4OSA_NULL)
- {
- M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
- return M4ERR_ALLOC;
- }
- framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
- framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
- framingCtx->FramingYuv[0].u_topleft = 0;
- framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
- framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc((framingCtx->FramingYuv[0].u_width*framingCtx->FramingYuv[0].u_height*3)>>1, M4VS, (M4OSA_Char*)"Alloc for the Convertion output YUV");;
- if(framingCtx->FramingYuv[0].pac_data == M4OSA_NULL)
- {
- M4OSA_TRACE1_0("Allocation error in M4xVSS_internalConvertRGBtoYUV");
- return M4ERR_ALLOC;
- }
- framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
- framingCtx->FramingYuv[1].u_topleft = 0;
- framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data + framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
- framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
- framingCtx->FramingYuv[2].u_topleft = 0;
- framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data + framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
-
- /**
- * Convert input RGB888 to YUV 420 to be able to merge it with output video in framing effect */
- err = M4VIFI_RGB888toYUV420(M4OSA_NULL, framingCtx->FramingRgb, framingCtx->FramingYuv);
- if(err != M4NO_ERROR)
- {
- M4OSA_TRACE1_1("M4xVSS_internalConvertRGBtoYUV: error when converting from RGB to YUV: 0x%x\n", err);
- }
-
- framingCtx->duration = 0;
- framingCtx->previousClipTime = -1;
- framingCtx->previewOffsetClipTime = -1;
-
- /**
- * Only one element in the chained list (no animated image with RGB buffer...) */
- framingCtx->pCurrent = framingCtx;
- framingCtx->pNext = framingCtx;
-
- return M4NO_ERROR;
-}
-
-/**
- ******************************************************************************
- * M4VIFI_UInt8 M4VIFI_RGB565toYUV420 (void *pUserData,
- * M4VIFI_ImagePlane *pPlaneIn,
- * M4VIFI_ImagePlane *pPlaneOut)
- * @author Patrice Martinez / Philips Digital Networks - MP4Net
- * @brief transform RGB565 image to a YUV420 image.
- * @note Convert RGB565 to YUV420,
- * Loop on each row ( 2 rows by 2 rows )
- * Loop on each column ( 2 col by 2 col )
- * Get 4 RGB samples from input data and build 4 output Y samples
- * and each single U & V data
- * end loop on col
- * end loop on row
- * @param pUserData: (IN) User Specific Data
- * @param pPlaneIn: (IN) Pointer to RGB565 Plane
- * @param pPlaneOut: (OUT) Pointer to YUV420 buffer Plane
- * @return M4VIFI_OK: there is no error
- * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: YUV Plane height is ODD
- * @return M4VIFI_ILLEGAL_FRAME_WIDTH: YUV Plane width is ODD
- ******************************************************************************
-*/
-M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
- M4VIFI_ImagePlane *pPlaneOut)
-{
- M4VIFI_UInt32 u32_width, u32_height;
- M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V;
- M4VIFI_UInt32 u32_stride_rgb, u32_stride_2rgb;
- M4VIFI_UInt32 u32_col, u32_row;
-
- M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11;
- M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11;
- M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11;
- M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11;
- M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11;
- M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11;
- M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;
- M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data;
- M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn;
- M4VIFI_UInt16 u16_pix1, u16_pix2, u16_pix3, u16_pix4;
- M4VIFI_UInt8 count_null=0;
-
- /* Check planes height are appropriate */
- if( (pPlaneIn->u_height != pPlaneOut[0].u_height) ||
- (pPlaneOut[0].u_height != (pPlaneOut[1].u_height<<1)) ||
- (pPlaneOut[0].u_height != (pPlaneOut[2].u_height<<1)))
- {
- return M4VIFI_ILLEGAL_FRAME_HEIGHT;
- }
-
- /* Check planes width are appropriate */
- if( (pPlaneIn->u_width != pPlaneOut[0].u_width) ||
- (pPlaneOut[0].u_width != (pPlaneOut[1].u_width<<1)) ||
- (pPlaneOut[0].u_width != (pPlaneOut[2].u_width<<1)))
- {
- return M4VIFI_ILLEGAL_FRAME_WIDTH;
- }
-
- /* Set the pointer to the beginning of the output data buffers */
- pu8_y_data = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;
- pu8_u_data = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;
- pu8_v_data = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;
-
- /* Set the pointer to the beginning of the input data buffers */
- pu8_rgbn_data = pPlaneIn->pac_data + pPlaneIn->u_topleft;
-
- /* Get the size of the output image */
- u32_width = pPlaneOut[0].u_width;
- u32_height = pPlaneOut[0].u_height;
-
- /* Set the size of the memory jumps corresponding to row jump in each output plane */
- u32_stride_Y = pPlaneOut[0].u_stride;
- u32_stride2_Y = u32_stride_Y << 1;
- u32_stride_U = pPlaneOut[1].u_stride;
- u32_stride_V = pPlaneOut[2].u_stride;
-
- /* Set the size of the memory jumps corresponding to row jump in input plane */
- u32_stride_rgb = pPlaneIn->u_stride;
- u32_stride_2rgb = u32_stride_rgb << 1;
-
-
- /* Loop on each row of the output image, input coordinates are estimated from output ones */
- /* Two YUV rows are computed at each pass */
- for (u32_row = u32_height ;u32_row != 0; u32_row -=2)
- {
- /* Current Y plane row pointers */
- pu8_yn = pu8_y_data;
- /* Next Y plane row pointers */
- pu8_ys = pu8_yn + u32_stride_Y;
- /* Current U plane row pointer */
- pu8_u = pu8_u_data;
- /* Current V plane row pointer */
- pu8_v = pu8_v_data;
-
- pu8_rgbn = pu8_rgbn_data;
-
- /* Loop on each column of the output image */
- for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)
- {
- /* Get four RGB 565 samples from input data */
- u16_pix1 = *( (M4VIFI_UInt16 *) pu8_rgbn);
- u16_pix2 = *( (M4VIFI_UInt16 *) (pu8_rgbn + CST_RGB_16_SIZE));
- u16_pix3 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb));
- u16_pix4 = *( (M4VIFI_UInt16 *) (pu8_rgbn + u32_stride_rgb + CST_RGB_16_SIZE));
-
- /* Unpack RGB565 to 8bit R, G, B */
- /* (x,y) */
- GET_RGB565(i32_b00,i32_g00,i32_r00,u16_pix1);
- /* (x+1,y) */
- GET_RGB565(i32_b10,i32_g10,i32_r10,u16_pix2);
- /* (x,y+1) */
- GET_RGB565(i32_b01,i32_g01,i32_r01,u16_pix3);
- /* (x+1,y+1) */
- GET_RGB565(i32_b11,i32_g11,i32_r11,u16_pix4);
- /* If RGB is transparent color (0, 63, 0), we transform it to white (31,63,31) */
- if(i32_b00 == 0 && i32_g00 == 63 && i32_r00 == 0)
- {
- i32_b00 = 31;
- i32_r00 = 31;
- }
- if(i32_b10 == 0 && i32_g10 == 63 && i32_r10 == 0)
- {
- i32_b10 = 31;
- i32_r10 = 31;
- }
- if(i32_b01 == 0 && i32_g01 == 63 && i32_r01 == 0)
- {
- i32_b01 = 31;
- i32_r01 = 31;
- }
- if(i32_b11 == 0 && i32_g11 == 63 && i32_r11 == 0)
- {
- i32_b11 = 31;
- i32_r11 = 31;
- }
- /* Convert RGB value to YUV */
- i32_u00 = U16(i32_r00, i32_g00, i32_b00);
- i32_v00 = V16(i32_r00, i32_g00, i32_b00);
- /* luminance value */
- i32_y00 = Y16(i32_r00, i32_g00, i32_b00);
-
- i32_u10 = U16(i32_r10, i32_g10, i32_b10);
- i32_v10 = V16(i32_r10, i32_g10, i32_b10);
- /* luminance value */
- i32_y10 = Y16(i32_r10, i32_g10, i32_b10);
-
- i32_u01 = U16(i32_r01, i32_g01, i32_b01);
- i32_v01 = V16(i32_r01, i32_g01, i32_b01);
- /* luminance value */
- i32_y01 = Y16(i32_r01, i32_g01, i32_b01);
-
- i32_u11 = U16(i32_r11, i32_g11, i32_b11);
- i32_v11 = V16(i32_r11, i32_g11, i32_b11);
- /* luminance value */
- i32_y11 = Y16(i32_r11, i32_g11, i32_b11);
-
- /* Store luminance data */
- pu8_yn[0] = (M4VIFI_UInt8)i32_y00;
- pu8_yn[1] = (M4VIFI_UInt8)i32_y10;
- pu8_ys[0] = (M4VIFI_UInt8)i32_y01;
- pu8_ys[1] = (M4VIFI_UInt8)i32_y11;
- *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);
- *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);
- /* Prepare for next column */
- pu8_rgbn += (CST_RGB_16_SIZE<<1);
- /* Update current Y plane line pointer*/
- pu8_yn += 2;
- /* Update next Y plane line pointer*/
- pu8_ys += 2;
- /* Update U plane line pointer*/
- pu8_u ++;
- /* Update V plane line pointer*/
- pu8_v ++;
- } /* End of horizontal scanning */
-
- /* Prepare pointers for the next row */
- pu8_y_data += u32_stride2_Y;
- pu8_u_data += u32_stride_U;
- pu8_v_data += u32_stride_V;
- pu8_rgbn_data += u32_stride_2rgb;
-
-
- } /* End of vertical scanning */
-
- return M4VIFI_OK;
-}
-
-/***************************************************************************
-Proto:
-M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);
-Author: Patrice Martinez / Philips Digital Networks - MP4Net
-Purpose: filling of the YUV420 plane from a BGR24 plane
-Abstract: Loop on each row ( 2 rows by 2 rows )
- Loop on each column ( 2 col by 2 col )
- Get 4 BGR samples from input data and build 4 output Y samples and each single U & V data
- end loop on col
- end loop on row
-
-In: RGB24 plane
-InOut: none
-Out: array of 3 M4VIFI_ImagePlane structures
-Modified: ML: RGB function modified to BGR.
-***************************************************************************/
-M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3])
-{
-
- M4VIFI_UInt32 u32_width, u32_height;
- M4VIFI_UInt32 u32_stride_Y, u32_stride2_Y, u32_stride_U, u32_stride_V, u32_stride_rgb, u32_stride_2rgb;
- M4VIFI_UInt32 u32_col, u32_row;
-
- M4VIFI_Int32 i32_r00, i32_r01, i32_r10, i32_r11;
- M4VIFI_Int32 i32_g00, i32_g01, i32_g10, i32_g11;
- M4VIFI_Int32 i32_b00, i32_b01, i32_b10, i32_b11;
- M4VIFI_Int32 i32_y00, i32_y01, i32_y10, i32_y11;
- M4VIFI_Int32 i32_u00, i32_u01, i32_u10, i32_u11;
- M4VIFI_Int32 i32_v00, i32_v01, i32_v10, i32_v11;
- M4VIFI_UInt8 *pu8_yn, *pu8_ys, *pu8_u, *pu8_v;
- M4VIFI_UInt8 *pu8_y_data, *pu8_u_data, *pu8_v_data;
- M4VIFI_UInt8 *pu8_rgbn_data, *pu8_rgbn;
-
- /* check sizes */
- if( (PlaneIn->u_height != PlaneOut[0].u_height) ||
- (PlaneOut[0].u_height != (PlaneOut[1].u_height<<1)) ||
- (PlaneOut[0].u_height != (PlaneOut[2].u_height<<1)))
- return M4VIFI_ILLEGAL_FRAME_HEIGHT;
-
- if( (PlaneIn->u_width != PlaneOut[0].u_width) ||
- (PlaneOut[0].u_width != (PlaneOut[1].u_width<<1)) ||
- (PlaneOut[0].u_width != (PlaneOut[2].u_width<<1)))
- return M4VIFI_ILLEGAL_FRAME_WIDTH;
-
-
- /* set the pointer to the beginning of the output data buffers */
- pu8_y_data = PlaneOut[0].pac_data + PlaneOut[0].u_topleft;
- pu8_u_data = PlaneOut[1].pac_data + PlaneOut[1].u_topleft;
- pu8_v_data = PlaneOut[2].pac_data + PlaneOut[2].u_topleft;
-
- /* idem for input buffer */
- pu8_rgbn_data = PlaneIn->pac_data + PlaneIn->u_topleft;
-
- /* get the size of the output image */
- u32_width = PlaneOut[0].u_width;
- u32_height = PlaneOut[0].u_height;
-
- /* set the size of the memory jumps corresponding to row jump in each output plane */
- u32_stride_Y = PlaneOut[0].u_stride;
- u32_stride2_Y= u32_stride_Y << 1;
- u32_stride_U = PlaneOut[1].u_stride;
- u32_stride_V = PlaneOut[2].u_stride;
-
- /* idem for input plane */
- u32_stride_rgb = PlaneIn->u_stride;
- u32_stride_2rgb = u32_stride_rgb << 1;
-
- /* loop on each row of the output image, input coordinates are estimated from output ones */
- /* two YUV rows are computed at each pass */
- for (u32_row = u32_height ;u32_row != 0; u32_row -=2)
- {
- /* update working pointers */
- pu8_yn = pu8_y_data;
- pu8_ys = pu8_yn + u32_stride_Y;
-
- pu8_u = pu8_u_data;
- pu8_v = pu8_v_data;
-
- pu8_rgbn= pu8_rgbn_data;
-
- /* loop on each column of the output image*/
- for (u32_col = u32_width; u32_col != 0 ; u32_col -=2)
- {
- /* get RGB samples of 4 pixels */
- GET_RGB24(i32_r00, i32_g00, i32_b00, pu8_rgbn, 0);
- GET_RGB24(i32_r10, i32_g10, i32_b10, pu8_rgbn, CST_RGB_24_SIZE);
- GET_RGB24(i32_r01, i32_g01, i32_b01, pu8_rgbn, u32_stride_rgb);
- GET_RGB24(i32_r11, i32_g11, i32_b11, pu8_rgbn, u32_stride_rgb + CST_RGB_24_SIZE);
-
- i32_u00 = U24(i32_r00, i32_g00, i32_b00);
- i32_v00 = V24(i32_r00, i32_g00, i32_b00);
- i32_y00 = Y24(i32_r00, i32_g00, i32_b00); /* matrix luminance */
- pu8_yn[0]= (M4VIFI_UInt8)i32_y00;
-
- i32_u10 = U24(i32_r10, i32_g10, i32_b10);
- i32_v10 = V24(i32_r10, i32_g10, i32_b10);
- i32_y10 = Y24(i32_r10, i32_g10, i32_b10);
- pu8_yn[1]= (M4VIFI_UInt8)i32_y10;
-
- i32_u01 = U24(i32_r01, i32_g01, i32_b01);
- i32_v01 = V24(i32_r01, i32_g01, i32_b01);
- i32_y01 = Y24(i32_r01, i32_g01, i32_b01);
- pu8_ys[0]= (M4VIFI_UInt8)i32_y01;
-
- i32_u11 = U24(i32_r11, i32_g11, i32_b11);
- i32_v11 = V24(i32_r11, i32_g11, i32_b11);
- i32_y11 = Y24(i32_r11, i32_g11, i32_b11);
- pu8_ys[1] = (M4VIFI_UInt8)i32_y11;
-
- *pu8_u = (M4VIFI_UInt8)((i32_u00 + i32_u01 + i32_u10 + i32_u11 + 2) >> 2);
- *pu8_v = (M4VIFI_UInt8)((i32_v00 + i32_v01 + i32_v10 + i32_v11 + 2) >> 2);
-
- pu8_rgbn += (CST_RGB_24_SIZE<<1);
- pu8_yn += 2;
- pu8_ys += 2;
-
- pu8_u ++;
- pu8_v ++;
- } /* end of horizontal scanning */
-
- pu8_y_data += u32_stride2_Y;
- pu8_u_data += u32_stride_U;
- pu8_v_data += u32_stride_V;
- pu8_rgbn_data += u32_stride_2rgb;
-
-
- } /* End of vertical scanning */
-
- return M4VIFI_OK;
-}
-
-/** YUV420 to YUV420 */
-/**
- *******************************************************************************************
- * M4VIFI_UInt8 M4VIFI_YUV420toYUV420 (void *pUserData,
- * M4VIFI_ImagePlane *pPlaneIn,
- * M4VIFI_ImagePlane *pPlaneOut)
- * @brief Transform YUV420 image to a YUV420 image.
- * @param pUserData: (IN) User Specific Data (Unused - could be NULL)
- * @param pPlaneIn: (IN) Pointer to YUV plane buffer
- * @param pPlaneOut: (OUT) Pointer to YUV Plane
- * @return M4VIFI_OK: there is no error
- * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in plane height
- * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in plane width
- *******************************************************************************************
- */
-
-M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut )
-{
- M4VIFI_Int32 plane_number;
- M4VIFI_UInt32 i;
- M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
-
- for (plane_number = 0; plane_number < 3; plane_number++)
- {
- p_buf_src = &(PlaneIn[plane_number].pac_data[PlaneIn[plane_number].u_topleft]);
- p_buf_dest = &(PlaneOut[plane_number].pac_data[PlaneOut[plane_number].u_topleft]);
- for (i = 0; i < PlaneOut[plane_number].u_height; i++)
- {
- memcpy((void *)p_buf_dest, (void *)p_buf_src ,PlaneOut[plane_number].u_width);
- p_buf_src += PlaneIn[plane_number].u_stride;
- p_buf_dest += PlaneOut[plane_number].u_stride;
- }
- }
- return M4VIFI_OK;
-}
-
-/**
- ***********************************************************************************************
- * M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
- * M4VIFI_ImagePlane *pPlaneOut)
- * @author David Dana (PHILIPS Software)
- * @brief Resizes YUV420 Planar plane.
- * @note Basic structure of the function
- * Loop on each row (step 2)
- * Loop on each column (step 2)
- * Get four Y samples and 1 U & V sample
- * Resize the Y with corresponing U and V samples
- * Place the YUV in the ouput plane
- * end loop column
- * end loop row
- * For resizing bilinear interpolation linearly interpolates along
- * each row, and then uses that result in a linear interpolation down each column.
- * Each estimated pixel in the output image is a weighted
- * combination of its four neighbours. The ratio of compression
- * or dilatation is estimated using input and output sizes.
- * @param pUserData: (IN) User Data
- * @param pPlaneIn: (IN) Pointer to YUV420 (Planar) plane buffer
- * @param pPlaneOut: (OUT) Pointer to YUV420 (Planar) plane
- * @return M4VIFI_OK: there is no error
- * @return M4VIFI_ILLEGAL_FRAME_HEIGHT: Error in height
- * @return M4VIFI_ILLEGAL_FRAME_WIDTH: Error in width
- ***********************************************************************************************
-*/
-M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,
- M4VIFI_ImagePlane *pPlaneIn,
- M4VIFI_ImagePlane *pPlaneOut)
-{
- M4VIFI_UInt8 *pu8_data_in, *pu8_data_out, *pu8dum;
- M4VIFI_UInt32 u32_plane;
- M4VIFI_UInt32 u32_width_in, u32_width_out, u32_height_in, u32_height_out;
- M4VIFI_UInt32 u32_stride_in, u32_stride_out;
- M4VIFI_UInt32 u32_x_inc, u32_y_inc;
- M4VIFI_UInt32 u32_x_accum, u32_y_accum, u32_x_accum_start;
- M4VIFI_UInt32 u32_width, u32_height;
- M4VIFI_UInt32 u32_y_frac;
- M4VIFI_UInt32 u32_x_frac;
- M4VIFI_UInt32 u32_temp_value;
- M4VIFI_UInt8 *pu8_src_top;
- M4VIFI_UInt8 *pu8_src_bottom;
-
- M4VIFI_UInt8 u8Wflag = 0;
- M4VIFI_UInt8 u8Hflag = 0;
- M4VIFI_UInt32 loop = 0;
-
-
- /*
- If input width is equal to output width and input height equal to
- output height then M4VIFI_YUV420toYUV420 is called.
- */
- if ((pPlaneIn[0].u_height == pPlaneOut[0].u_height) &&
- (pPlaneIn[0].u_width == pPlaneOut[0].u_width))
- {
- return M4VIFI_YUV420toYUV420(pUserData, pPlaneIn, pPlaneOut);
- }
-
- /* Check for the YUV width and height are even */
- if ((IS_EVEN(pPlaneIn[0].u_height) == FALSE) ||
- (IS_EVEN(pPlaneOut[0].u_height) == FALSE))
- {
- return M4VIFI_ILLEGAL_FRAME_HEIGHT;
- }
-
- if ((IS_EVEN(pPlaneIn[0].u_width) == FALSE) ||
- (IS_EVEN(pPlaneOut[0].u_width) == FALSE))
- {
- return M4VIFI_ILLEGAL_FRAME_WIDTH;
- }
-
- /* Loop on planes */
- for(u32_plane = 0;u32_plane < PLANES;u32_plane++)
- {
- /* Set the working pointers at the beginning of the input/output data field */
- pu8_data_in = pPlaneIn[u32_plane].pac_data + pPlaneIn[u32_plane].u_topleft;
- pu8_data_out = pPlaneOut[u32_plane].pac_data + pPlaneOut[u32_plane].u_topleft;
-
- /* Get the memory jump corresponding to a row jump */
- u32_stride_in = pPlaneIn[u32_plane].u_stride;
- u32_stride_out = pPlaneOut[u32_plane].u_stride;
-
- /* Set the bounds of the active image */
- u32_width_in = pPlaneIn[u32_plane].u_width;
- u32_height_in = pPlaneIn[u32_plane].u_height;
-
- u32_width_out = pPlaneOut[u32_plane].u_width;
- u32_height_out = pPlaneOut[u32_plane].u_height;
-
- /*
- For the case , width_out = width_in , set the flag to avoid
- accessing one column beyond the input width.In this case the last
- column is replicated for processing
- */
- if (u32_width_out == u32_width_in) {
- u32_width_out = u32_width_out-1;
- u8Wflag = 1;
- }
-
- /* Compute horizontal ratio between src and destination width.*/
- if (u32_width_out >= u32_width_in)
- {
- u32_x_inc = ((u32_width_in-1) * MAX_SHORT) / (u32_width_out-1);
- }
- else
- {
- u32_x_inc = (u32_width_in * MAX_SHORT) / (u32_width_out);
- }
-
- /*
- For the case , height_out = height_in , set the flag to avoid
- accessing one row beyond the input height.In this case the last
- row is replicated for processing
- */
- if (u32_height_out == u32_height_in) {
- u32_height_out = u32_height_out-1;
- u8Hflag = 1;
- }
-
- /* Compute vertical ratio between src and destination height.*/
- if (u32_height_out >= u32_height_in)
- {
- u32_y_inc = ((u32_height_in - 1) * MAX_SHORT) / (u32_height_out-1);
- }
- else
- {
- u32_y_inc = (u32_height_in * MAX_SHORT) / (u32_height_out);
- }
-
- /*
- Calculate initial accumulator value : u32_y_accum_start.
- u32_y_accum_start is coded on 15 bits, and represents a value
- between 0 and 0.5
- */
- if (u32_y_inc >= MAX_SHORT)
- {
- /*
- Keep the fractionnal part, assimung that integer part is coded
- on the 16 high bits and the fractional on the 15 low bits
- */
- u32_y_accum = u32_y_inc & 0xffff;
-
- if (!u32_y_accum)
- {
- u32_y_accum = MAX_SHORT;
- }
-
- u32_y_accum >>= 1;
- }
- else
- {
- u32_y_accum = 0;
- }
-
-
- /*
- Calculate initial accumulator value : u32_x_accum_start.
- u32_x_accum_start is coded on 15 bits, and represents a value
- between 0 and 0.5
- */
- if (u32_x_inc >= MAX_SHORT)
- {
- u32_x_accum_start = u32_x_inc & 0xffff;
-
- if (!u32_x_accum_start)
- {
- u32_x_accum_start = MAX_SHORT;
- }
-
- u32_x_accum_start >>= 1;
- }
- else
- {
- u32_x_accum_start = 0;
- }
-
- u32_height = u32_height_out;
-
- /*
- Bilinear interpolation linearly interpolates along each row, and
- then uses that result in a linear interpolation donw each column.
- Each estimated pixel in the output image is a weighted combination
- of its four neighbours according to the formula:
- F(p',q')=f(p,q)R(-a)R(b)+f(p,q-1)R(-a)R(b-1)+f(p+1,q)R(1-a)R(b)+
- f(p+&,q+1)R(1-a)R(b-1) with R(x) = / x+1 -1 =< x =< 0 \ 1-x
- 0 =< x =< 1 and a (resp. b)weighting coefficient is the distance
- from the nearest neighbor in the p (resp. q) direction
- */
-
- do { /* Scan all the row */
-
- /* Vertical weight factor */
- u32_y_frac = (u32_y_accum>>12)&15;
-
- /* Reinit accumulator */
- u32_x_accum = u32_x_accum_start;
-
- u32_width = u32_width_out;
-
- do { /* Scan along each row */
- pu8_src_top = pu8_data_in + (u32_x_accum >> 16);
- pu8_src_bottom = pu8_src_top + u32_stride_in;
- u32_x_frac = (u32_x_accum >> 12)&15; /* Horizontal weight factor */
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
- pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[0]*(16-u32_x_frac) +
- pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update horizontal accumulator */
- u32_x_accum += u32_x_inc;
- } while(--u32_width);
-
- /*
- This u8Wflag flag gets in to effect if input and output
- width is same, and height may be different. So previous
- pixel is replicated here
- */
- if (u8Wflag) {
- *pu8_data_out = (M4VIFI_UInt8)u32_temp_value;
- }
-
- pu8dum = (pu8_data_out-u32_width_out);
- pu8_data_out = pu8_data_out + u32_stride_out - u32_width_out;
-
- /* Update vertical accumulator */
- u32_y_accum += u32_y_inc;
- if (u32_y_accum>>16) {
- pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * u32_stride_in;
- u32_y_accum &= 0xffff;
- }
- } while(--u32_height);
-
- /*
- This u8Hflag flag gets in to effect if input and output height
- is same, and width may be different. So previous pixel row is
- replicated here
- */
- if (u8Hflag) {
- for(loop =0; loop < (u32_width_out+u8Wflag); loop++) {
- *pu8_data_out++ = (M4VIFI_UInt8)*pu8dum++;
- }
- }
- }
-
- return M4VIFI_OK;
-}
-
-M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering)
-{
- M4OSA_ERR err = M4NO_ERROR;
-
- if(mediaRendering == M4xVSS_kResizing)
- {
- /**
- * Call the resize filter. From the intermediate frame to the encoder image plane */
- err = M4VIFI_ResizeBilinearYUV420toYUV420(M4OSA_NULL, pPlaneIn, pPlaneOut);
- if (M4NO_ERROR != err)
- {
- M4OSA_TRACE1_1("applyRenderingMode: M4ViFilResizeBilinearYUV420toYUV420 returns 0x%x!", err);
- return err;
- }
- }
- else
- {
- M4AIR_Params Params;
- M4OSA_Context m_air_context;
- M4VIFI_ImagePlane pImagePlanesTemp[3];
- M4VIFI_ImagePlane* pPlaneTemp;
- M4OSA_UInt8* pOutPlaneY = pPlaneOut[0].pac_data + pPlaneOut[0].u_topleft;
- M4OSA_UInt8* pOutPlaneU = pPlaneOut[1].pac_data + pPlaneOut[1].u_topleft;
- M4OSA_UInt8* pOutPlaneV = pPlaneOut[2].pac_data + pPlaneOut[2].u_topleft;
- M4OSA_UInt8* pInPlaneY = NULL;
- M4OSA_UInt8* pInPlaneU = NULL;
- M4OSA_UInt8* pInPlaneV = NULL;
- M4OSA_UInt32 i;
-
- /*to keep media aspect ratio*/
- /*Initialize AIR Params*/
- Params.m_inputCoord.m_x = 0;
- Params.m_inputCoord.m_y = 0;
- Params.m_inputSize.m_height = pPlaneIn->u_height;
- Params.m_inputSize.m_width = pPlaneIn->u_width;
- Params.m_outputSize.m_width = pPlaneOut->u_width;
- Params.m_outputSize.m_height = pPlaneOut->u_height;
- Params.m_bOutputStripe = M4OSA_FALSE;
- Params.m_outputOrientation = M4COMMON_kOrientationTopLeft;
-
- /**
- Media rendering: Black borders*/
- if(mediaRendering == M4xVSS_kBlackBorders)
- {
- memset((void *)pPlaneOut[0].pac_data,Y_PLANE_BORDER_VALUE,(pPlaneOut[0].u_height*pPlaneOut[0].u_stride));
- memset((void *)pPlaneOut[1].pac_data,U_PLANE_BORDER_VALUE,(pPlaneOut[1].u_height*pPlaneOut[1].u_stride));
- memset((void *)pPlaneOut[2].pac_data,V_PLANE_BORDER_VALUE,(pPlaneOut[2].u_height*pPlaneOut[2].u_stride));
-
- pImagePlanesTemp[0].u_width = pPlaneOut[0].u_width;
- pImagePlanesTemp[0].u_height = pPlaneOut[0].u_height;
- pImagePlanesTemp[0].u_stride = pPlaneOut[0].u_width;
- pImagePlanesTemp[0].u_topleft = 0;
- pImagePlanesTemp[0].pac_data = M4OSA_NULL;
-
- pImagePlanesTemp[1].u_width = pPlaneOut[1].u_width;
- pImagePlanesTemp[1].u_height = pPlaneOut[1].u_height;
- pImagePlanesTemp[1].u_stride = pPlaneOut[1].u_width;
- pImagePlanesTemp[1].u_topleft = 0;
- pImagePlanesTemp[1].pac_data = M4OSA_NULL;
-
- pImagePlanesTemp[2].u_width = pPlaneOut[2].u_width;
- pImagePlanesTemp[2].u_height = pPlaneOut[2].u_height;
- pImagePlanesTemp[2].u_stride = pPlaneOut[2].u_width;
- pImagePlanesTemp[2].u_topleft = 0;
- pImagePlanesTemp[2].pac_data = M4OSA_NULL;
-
- /* Allocates plan in local image plane structure */
- pImagePlanesTemp[0].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[0].u_width * pImagePlanesTemp[0].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferY") ;
- if(pImagePlanesTemp[0].pac_data == M4OSA_NULL)
- {
- M4OSA_TRACE1_0("Error alloc in applyRenderingMode");
- return M4ERR_ALLOC;
- }
- pImagePlanesTemp[1].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[1].u_width * pImagePlanesTemp[1].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferU") ;
- if(pImagePlanesTemp[1].pac_data == M4OSA_NULL)
- {
-
- M4OSA_TRACE1_0("Error alloc in applyRenderingMode");
- return M4ERR_ALLOC;
- }
- pImagePlanesTemp[2].pac_data = (M4OSA_UInt8*)M4OSA_32bitAlignedMalloc(pImagePlanesTemp[2].u_width * pImagePlanesTemp[2].u_height, M4VS, (M4OSA_Char*)"applyRenderingMode: temporary plane bufferV") ;
- if(pImagePlanesTemp[2].pac_data == M4OSA_NULL)
- {
-
- M4OSA_TRACE1_0("Error alloc in applyRenderingMode");
- return M4ERR_ALLOC;
- }
-
- pInPlaneY = pImagePlanesTemp[0].pac_data ;
- pInPlaneU = pImagePlanesTemp[1].pac_data ;
- pInPlaneV = pImagePlanesTemp[2].pac_data ;
-
- memset((void *)pImagePlanesTemp[0].pac_data,Y_PLANE_BORDER_VALUE,(pImagePlanesTemp[0].u_height*pImagePlanesTemp[0].u_stride));
- memset((void *)pImagePlanesTemp[1].pac_data,U_PLANE_BORDER_VALUE,(pImagePlanesTemp[1].u_height*pImagePlanesTemp[1].u_stride));
- memset((void *)pImagePlanesTemp[2].pac_data,V_PLANE_BORDER_VALUE,(pImagePlanesTemp[2].u_height*pImagePlanesTemp[2].u_stride));
-
- if((M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width) <= pPlaneOut->u_height)//Params.m_inputSize.m_height < Params.m_inputSize.m_width)
- {
- /*it is height so black borders will be on the top and on the bottom side*/
- Params.m_outputSize.m_width = pPlaneOut->u_width;
- Params.m_outputSize.m_height = (M4OSA_UInt32)((pPlaneIn->u_height * pPlaneOut->u_width) /pPlaneIn->u_width);
- /*number of lines at the top*/
- pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_height-Params.m_outputSize.m_height)>>1))*pImagePlanesTemp[0].u_stride;
- pImagePlanesTemp[0].u_height = Params.m_outputSize.m_height;
- pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[1].u_stride;
- pImagePlanesTemp[1].u_height = Params.m_outputSize.m_height>>1;
- pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_height-(Params.m_outputSize.m_height>>1)))>>1)*pImagePlanesTemp[2].u_stride;
- pImagePlanesTemp[2].u_height = Params.m_outputSize.m_height>>1;
- }
- else
- {
- /*it is width so black borders will be on the left and right side*/
- Params.m_outputSize.m_height = pPlaneOut->u_height;
- Params.m_outputSize.m_width = (M4OSA_UInt32)((pPlaneIn->u_width * pPlaneOut->u_height) /pPlaneIn->u_height);
-
- pImagePlanesTemp[0].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[0].u_width-Params.m_outputSize.m_width)>>1));
- pImagePlanesTemp[0].u_width = Params.m_outputSize.m_width;
- pImagePlanesTemp[1].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[1].u_width-(Params.m_outputSize.m_width>>1)))>>1);
- pImagePlanesTemp[1].u_width = Params.m_outputSize.m_width>>1;
- pImagePlanesTemp[2].u_topleft = (M4xVSS_ABS((M4OSA_Int32)(pImagePlanesTemp[2].u_width-(Params.m_outputSize.m_width>>1)))>>1);
- pImagePlanesTemp[2].u_width = Params.m_outputSize.m_width>>1;
- }
-
- /*Width and height have to be even*/
- Params.m_outputSize.m_width = (Params.m_outputSize.m_width>>1)<<1;
- Params.m_outputSize.m_height = (Params.m_outputSize.m_height>>1)<<1;
- Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
- Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
- pImagePlanesTemp[0].u_width = (pImagePlanesTemp[0].u_width>>1)<<1;
- pImagePlanesTemp[1].u_width = (pImagePlanesTemp[1].u_width>>1)<<1;
- pImagePlanesTemp[2].u_width = (pImagePlanesTemp[2].u_width>>1)<<1;
- pImagePlanesTemp[0].u_height = (pImagePlanesTemp[0].u_height>>1)<<1;
- pImagePlanesTemp[1].u_height = (pImagePlanesTemp[1].u_height>>1)<<1;
- pImagePlanesTemp[2].u_height = (pImagePlanesTemp[2].u_height>>1)<<1;
-
- /*Check that values are coherent*/
- if(Params.m_inputSize.m_height == Params.m_outputSize.m_height)
- {
- Params.m_inputSize.m_width = Params.m_outputSize.m_width;
- }
- else if(Params.m_inputSize.m_width == Params.m_outputSize.m_width)
- {
- Params.m_inputSize.m_height = Params.m_outputSize.m_height;
- }
- pPlaneTemp = pImagePlanesTemp;
-
-
- }
-
- /**
- Media rendering: Cropping*/
- if(mediaRendering == M4xVSS_kCropping)
- {
- Params.m_outputSize.m_height = pPlaneOut->u_height;
- Params.m_outputSize.m_width = pPlaneOut->u_width;
- if((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width<Params.m_inputSize.m_height)
- {
- /*height will be cropped*/
- Params.m_inputSize.m_height = (M4OSA_UInt32)((Params.m_outputSize.m_height * Params.m_inputSize.m_width) /Params.m_outputSize.m_width);
- Params.m_inputSize.m_height = (Params.m_inputSize.m_height>>1)<<1;
- Params.m_inputCoord.m_y = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_height - Params.m_inputSize.m_height))>>1);
- }
- else
- {
- /*width will be cropped*/
- Params.m_inputSize.m_width = (M4OSA_UInt32)((Params.m_outputSize.m_width * Params.m_inputSize.m_height) /Params.m_outputSize.m_height);
- Params.m_inputSize.m_width = (Params.m_inputSize.m_width>>1)<<1;
- Params.m_inputCoord.m_x = (M4OSA_Int32)((M4OSA_Int32)((pPlaneIn->u_width - Params.m_inputSize.m_width))>>1);
- }
- pPlaneTemp = pPlaneOut;
- }
-
- /**
- * Call AIR functions */
- err = M4AIR_create(&m_air_context, M4AIR_kYUV420P);
- if(err != M4NO_ERROR)
- {
-
- M4OSA_TRACE1_1("applyRenderingMode: Error when initializing AIR: 0x%x", err);
- for(i=0; i<3; i++)
- {
- if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
- {
- free(pImagePlanesTemp[i].pac_data);
- pImagePlanesTemp[i].pac_data = M4OSA_NULL;
- }
- }
- return err;
- }
-
-
- err = M4AIR_configure(m_air_context, &Params);
- if(err != M4NO_ERROR)
- {
-
- M4OSA_TRACE1_1("applyRenderingMode: Error when configuring AIR: 0x%x", err);
- M4AIR_cleanUp(m_air_context);
- for(i=0; i<3; i++)
- {
- if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
- {
- free(pImagePlanesTemp[i].pac_data);
- pImagePlanesTemp[i].pac_data = M4OSA_NULL;
- }
- }
- return err;
- }
-
- err = M4AIR_get(m_air_context, pPlaneIn, pPlaneTemp);
- if(err != M4NO_ERROR)
- {
- M4OSA_TRACE1_1("applyRenderingMode: Error when getting AIR plane: 0x%x", err);
- M4AIR_cleanUp(m_air_context);
- for(i=0; i<3; i++)
- {
- if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
- {
- free(pImagePlanesTemp[i].pac_data);
- pImagePlanesTemp[i].pac_data = M4OSA_NULL;
- }
- }
- return err;
- }
-
- if(mediaRendering == M4xVSS_kBlackBorders)
- {
- for(i=0; i<pPlaneOut[0].u_height; i++)
- {
- memcpy((void *)pOutPlaneY, (void *)pInPlaneY, pPlaneOut[0].u_width);
- pInPlaneY += pPlaneOut[0].u_width;
- pOutPlaneY += pPlaneOut[0].u_stride;
- }
- for(i=0; i<pPlaneOut[1].u_height; i++)
- {
- memcpy((void *)pOutPlaneU, (void *)pInPlaneU, pPlaneOut[1].u_width);
- pInPlaneU += pPlaneOut[1].u_width;
- pOutPlaneU += pPlaneOut[1].u_stride;
- }
- for(i=0; i<pPlaneOut[2].u_height; i++)
- {
- memcpy((void *)pOutPlaneV, (void *)pInPlaneV, pPlaneOut[2].u_width);
- pInPlaneV += pPlaneOut[2].u_width;
- pOutPlaneV += pPlaneOut[2].u_stride;
- }
-
- for(i=0; i<3; i++)
- {
- if(pImagePlanesTemp[i].pac_data != M4OSA_NULL)
- {
- free(pImagePlanesTemp[i].pac_data);
- pImagePlanesTemp[i].pac_data = M4OSA_NULL;
- }
- }
- }
-
- if (m_air_context != M4OSA_NULL) {
- M4AIR_cleanUp(m_air_context);
- m_air_context = M4OSA_NULL;
- }
- }
-
- return err;
-}
-
-//TODO: remove this code after link with videoartist lib
-/* M4AIR code*/
-#define M4AIR_YUV420_FORMAT_SUPPORTED
-#define M4AIR_YUV420A_FORMAT_SUPPORTED
-
-/************************* COMPILATION CHECKS ***************************/
-#ifndef M4AIR_YUV420_FORMAT_SUPPORTED
-#ifndef M4AIR_BGR565_FORMAT_SUPPORTED
-#ifndef M4AIR_RGB565_FORMAT_SUPPORTED
-#ifndef M4AIR_BGR888_FORMAT_SUPPORTED
-#ifndef M4AIR_RGB888_FORMAT_SUPPORTED
-#ifndef M4AIR_JPG_FORMAT_SUPPORTED
-
-#error "Please define at least one input format for the AIR component"
-
-#endif
-#endif
-#endif
-#endif
-#endif
-#endif
-
-/************************ M4AIR INTERNAL TYPES DEFINITIONS ***********************/
-
-/**
- ******************************************************************************
- * enum M4AIR_States
- * @brief The following enumeration defines the internal states of the AIR.
- ******************************************************************************
-*/
-typedef enum
-{
- M4AIR_kCreated, /**< State after M4AIR_create has been called */
- M4AIR_kConfigured /**< State after M4AIR_configure has been called */
-}M4AIR_States;
-
-
-/**
- ******************************************************************************
- * struct M4AIR_InternalContext
- * @brief The following structure is the internal context of the AIR.
- ******************************************************************************
-*/
-typedef struct
-{
- M4AIR_States m_state; /**< Internal state */
- M4AIR_InputFormatType m_inputFormat; /**< Input format like YUV420Planar, RGB565, JPG, etc ... */
- M4AIR_Params m_params; /**< Current input Parameter of the processing */
- M4OSA_UInt32 u32_x_inc[4]; /**< ratio between input and ouput width for YUV */
- M4OSA_UInt32 u32_y_inc[4]; /**< ratio between input and ouput height for YUV */
- M4OSA_UInt32 u32_x_accum_start[4]; /**< horizontal initial accumulator value */
- M4OSA_UInt32 u32_y_accum_start[4]; /**< Vertical initial accumulator value */
- M4OSA_UInt32 u32_x_accum[4]; /**< save of horizontal accumulator value */
- M4OSA_UInt32 u32_y_accum[4]; /**< save of vertical accumulator value */
- M4OSA_UInt8* pu8_data_in[4]; /**< Save of input plane pointers in case of stripe mode */
- M4OSA_UInt32 m_procRows; /**< Number of processed rows, used in stripe mode only */
- M4OSA_Bool m_bOnlyCopy; /**< Flag to know if we just perform a copy or a bilinear interpolation */
- M4OSA_Bool m_bFlipX; /**< Depend on output orientation, used during processing to revert processing order in X coordinates */
- M4OSA_Bool m_bFlipY; /**< Depend on output orientation, used during processing to revert processing order in Y coordinates */
- M4OSA_Bool m_bRevertXY; /**< Depend on output orientation, used during processing to revert X and Y processing order (+-90° rotation) */
-}M4AIR_InternalContext;
-
-/********************************* MACROS *******************************/
-#define M4ERR_CHECK_NULL_RETURN_VALUE(retval, pointer) if ((pointer) == M4OSA_NULL) return ((M4OSA_ERR)(retval));
-
-
-/********************** M4AIR PUBLIC API IMPLEMENTATION ********************/
-/**
- ******************************************************************************
- * M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)
- * @author Arnaud Collard
- * @brief This function initialize an instance of the AIR.
- * @param pContext: (IN/OUT) Address of the context to create
- * @param inputFormat: (IN) input format type.
- * @return M4NO_ERROR: there is no error
- * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only). Invalid formatType
- * @return M4ERR_ALLOC: No more memory is available
- ******************************************************************************
-*/
-M4OSA_ERR M4AIR_create(M4OSA_Context* pContext,M4AIR_InputFormatType inputFormat)
-{
- M4OSA_ERR err = M4NO_ERROR ;
- M4AIR_InternalContext* pC = M4OSA_NULL ;
- /* Check that the address on the context is not NULL */
- M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
-
- *pContext = M4OSA_NULL ;
-
- /* Internal Context creation */
- pC = (M4AIR_InternalContext*)M4OSA_32bitAlignedMalloc(sizeof(M4AIR_InternalContext), M4AIR, (M4OSA_Char*)"AIR internal context") ;
- M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_ALLOC, pC) ;
-
-
- /* Check if the input format is supported */
- switch(inputFormat)
- {
-#ifdef M4AIR_YUV420_FORMAT_SUPPORTED
- case M4AIR_kYUV420P:
- break ;
-#endif
-#ifdef M4AIR_YUV420A_FORMAT_SUPPORTED
- case M4AIR_kYUV420AP:
- break ;
-#endif
- default:
- err = M4ERR_AIR_FORMAT_NOT_SUPPORTED;
- goto M4AIR_create_cleanup ;
- }
-
- /**< Save input format and update state */
- pC->m_inputFormat = inputFormat;
- pC->m_state = M4AIR_kCreated;
-
- /* Return the context to the caller */
- *pContext = pC ;
-
- return M4NO_ERROR ;
-
-M4AIR_create_cleanup:
- /* Error management : we destroy the context if needed */
- if(M4OSA_NULL != pC)
- {
- free(pC) ;
- }
-
- *pContext = M4OSA_NULL ;
-
- return err ;
-}
-
-
-
-/**
- ******************************************************************************
- * M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)
- * @author Arnaud Collard
- * @brief This function destroys an instance of the AIR component
- * @param pContext: (IN) Context identifying the instance to destroy
- * @return M4NO_ERROR: there is no error
- * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).
- * @return M4ERR_STATE: Internal state is incompatible with this function call.
-******************************************************************************
-*/
-M4OSA_ERR M4AIR_cleanUp(M4OSA_Context pContext)
-{
- M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;
-
- M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
-
- /**< Check state */
- if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))
- {
- return M4ERR_STATE;
- }
- free(pC) ;
-
- return M4NO_ERROR ;
-
-}
-
-
-/**
- ******************************************************************************
- * M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)
- * @brief This function will configure the AIR.
- * @note It will set the input and output coordinates and sizes,
- * and indicates if we will proceed in stripe or not.
- * In case a M4AIR_get in stripe mode was on going, it will cancel this previous processing
- * and reset the get process.
- * @param pContext: (IN) Context identifying the instance
- * @param pParams->m_bOutputStripe:(IN) Stripe mode.
- * @param pParams->m_inputCoord: (IN) X,Y coordinates of the first valid pixel in input.
- * @param pParams->m_inputSize: (IN) input ROI size.
- * @param pParams->m_outputSize: (IN) output size.
- * @return M4NO_ERROR: there is no error
- * @return M4ERR_ALLOC: No more memory space to add a new effect.
- * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).
- * @return M4ERR_AIR_FORMAT_NOT_SUPPORTED: the requested input format is not supported.
- ******************************************************************************
-*/
-M4OSA_ERR M4AIR_configure(M4OSA_Context pContext, M4AIR_Params* pParams)
-{
- M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;
- M4OSA_UInt32 i,u32_width_in, u32_width_out, u32_height_in, u32_height_out;
- M4OSA_UInt32 nb_planes;
-
- M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
-
- if(M4AIR_kYUV420AP == pC->m_inputFormat)
- {
- nb_planes = 4;
- }
- else
- {
- nb_planes = 3;
- }
-
- /**< Check state */
- if((M4AIR_kCreated != pC->m_state)&&(M4AIR_kConfigured != pC->m_state))
- {
- return M4ERR_STATE;
- }
-
- /** Save parameters */
- pC->m_params = *pParams;
-
- /* Check for the input&output width and height are even */
- if( ((pC->m_params.m_inputSize.m_height)&0x1) ||
- ((pC->m_params.m_inputSize.m_height)&0x1))
- {
- return M4ERR_AIR_ILLEGAL_FRAME_SIZE;
- }
-
- if( ((pC->m_params.m_inputSize.m_width)&0x1) ||
- ((pC->m_params.m_inputSize.m_width)&0x1))
- {
- return M4ERR_AIR_ILLEGAL_FRAME_SIZE;
- }
- if(((pC->m_params.m_inputSize.m_width) == (pC->m_params.m_outputSize.m_width))
- &&((pC->m_params.m_inputSize.m_height) == (pC->m_params.m_outputSize.m_height)))
- {
- /**< No resize in this case, we will just copy input in output */
- pC->m_bOnlyCopy = M4OSA_TRUE;
- }
- else
- {
- pC->m_bOnlyCopy = M4OSA_FALSE;
-
- /**< Initialize internal variables used for resize filter */
- for(i=0;i<nb_planes;i++)
- {
-
- u32_width_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_width:(pC->m_params.m_inputSize.m_width+1)>>1;
- u32_height_in = ((i==0)||(i==3))?pC->m_params.m_inputSize.m_height:(pC->m_params.m_inputSize.m_height+1)>>1;
- u32_width_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_width:(pC->m_params.m_outputSize.m_width+1)>>1;
- u32_height_out = ((i==0)||(i==3))?pC->m_params.m_outputSize.m_height:(pC->m_params.m_outputSize.m_height+1)>>1;
-
- /* Compute horizontal ratio between src and destination width.*/
- if (u32_width_out >= u32_width_in)
- {
- pC->u32_x_inc[i] = ((u32_width_in-1) * 0x10000) / (u32_width_out-1);
- }
- else
- {
- pC->u32_x_inc[i] = (u32_width_in * 0x10000) / (u32_width_out);
- }
-
- /* Compute vertical ratio between src and destination height.*/
- if (u32_height_out >= u32_height_in)
- {
- pC->u32_y_inc[i] = ((u32_height_in - 1) * 0x10000) / (u32_height_out-1);
- }
- else
- {
- pC->u32_y_inc[i] = (u32_height_in * 0x10000) / (u32_height_out);
- }
-
- /*
- Calculate initial accumulator value : u32_y_accum_start.
- u32_y_accum_start is coded on 15 bits, and represents a value between 0 and 0.5
- */
- if (pC->u32_y_inc[i] >= 0x10000)
- {
- /*
- Keep the fractionnal part, assimung that integer part is coded
- on the 16 high bits and the fractionnal on the 15 low bits
- */
- pC->u32_y_accum_start[i] = pC->u32_y_inc[i] & 0xffff;
-
- if (!pC->u32_y_accum_start[i])
- {
- pC->u32_y_accum_start[i] = 0x10000;
- }
-
- pC->u32_y_accum_start[i] >>= 1;
- }
- else
- {
- pC->u32_y_accum_start[i] = 0;
- }
- /**< Take into account that Y coordinate can be odd
- in this case we have to put a 0.5 offset
- for U and V plane as there a 2 times sub-sampled vs Y*/
- if((pC->m_params.m_inputCoord.m_y&0x1)&&((i==1)||(i==2)))
- {
- pC->u32_y_accum_start[i] += 0x8000;
- }
-
- /*
- Calculate initial accumulator value : u32_x_accum_start.
- u32_x_accum_start is coded on 15 bits, and represents a value between 0 and 0.5
- */
-
- if (pC->u32_x_inc[i] >= 0x10000)
- {
- pC->u32_x_accum_start[i] = pC->u32_x_inc[i] & 0xffff;
-
- if (!pC->u32_x_accum_start[i])
- {
- pC->u32_x_accum_start[i] = 0x10000;
- }
-
- pC->u32_x_accum_start[i] >>= 1;
- }
- else
- {
- pC->u32_x_accum_start[i] = 0;
- }
- /**< Take into account that X coordinate can be odd
- in this case we have to put a 0.5 offset
- for U and V plane as there a 2 times sub-sampled vs Y*/
- if((pC->m_params.m_inputCoord.m_x&0x1)&&((i==1)||(i==2)))
- {
- pC->u32_x_accum_start[i] += 0x8000;
- }
- }
- }
-
- /**< Reset variable used for stripe mode */
- pC->m_procRows = 0;
-
- /**< Initialize var for X/Y processing order according to orientation */
- pC->m_bFlipX = M4OSA_FALSE;
- pC->m_bFlipY = M4OSA_FALSE;
- pC->m_bRevertXY = M4OSA_FALSE;
- switch(pParams->m_outputOrientation)
- {
- case M4COMMON_kOrientationTopLeft:
- break;
- case M4COMMON_kOrientationTopRight:
- pC->m_bFlipX = M4OSA_TRUE;
- break;
- case M4COMMON_kOrientationBottomRight:
- pC->m_bFlipX = M4OSA_TRUE;
- pC->m_bFlipY = M4OSA_TRUE;
- break;
- case M4COMMON_kOrientationBottomLeft:
- pC->m_bFlipY = M4OSA_TRUE;
- break;
- case M4COMMON_kOrientationLeftTop:
- pC->m_bRevertXY = M4OSA_TRUE;
- break;
- case M4COMMON_kOrientationRightTop:
- pC->m_bRevertXY = M4OSA_TRUE;
- pC->m_bFlipY = M4OSA_TRUE;
- break;
- case M4COMMON_kOrientationRightBottom:
- pC->m_bRevertXY = M4OSA_TRUE;
- pC->m_bFlipX = M4OSA_TRUE;
- pC->m_bFlipY = M4OSA_TRUE;
- break;
- case M4COMMON_kOrientationLeftBottom:
- pC->m_bRevertXY = M4OSA_TRUE;
- pC->m_bFlipX = M4OSA_TRUE;
- break;
- default:
- return M4ERR_PARAMETER;
- }
- /**< Update state */
- pC->m_state = M4AIR_kConfigured;
-
- return M4NO_ERROR ;
-}
-
-
-/**
- ******************************************************************************
- * M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)
- * @brief This function will provide the requested resized area of interest according to settings
- * provided in M4AIR_configure.
- * @note In case the input format type is JPEG, input plane(s)
- * in pIn is not used. In normal mode, dimension specified in output plane(s) structure must be the
- * same than the one specified in M4AIR_configure. In stripe mode, only the width will be the same,
- * height will be taken as the stripe height (typically 16).
- * In normal mode, this function is call once to get the full output picture. In stripe mode, it is called
- * for each stripe till the whole picture has been retrieved,and the position of the output stripe in the output picture
- * is internally incremented at each step.
- * Any call to M4AIR_configure during stripe process will reset this one to the beginning of the output picture.
- * @param pContext: (IN) Context identifying the instance
- * @param pIn: (IN) Plane structure containing input Plane(s).
- * @param pOut: (IN/OUT) Plane structure containing output Plane(s).
- * @return M4NO_ERROR: there is no error
- * @return M4ERR_ALLOC: No more memory space to add a new effect.
- * @return M4ERR_PARAMETER: pContext is M4OSA_NULL (debug only).
- ******************************************************************************
-*/
-M4OSA_ERR M4AIR_get(M4OSA_Context pContext, M4VIFI_ImagePlane* pIn, M4VIFI_ImagePlane* pOut)
-{
- M4AIR_InternalContext* pC = (M4AIR_InternalContext*)pContext ;
- M4OSA_UInt32 i,j,k,u32_x_frac,u32_y_frac,u32_x_accum,u32_y_accum,u32_shift;
- M4OSA_UInt8 *pu8_data_in, *pu8_data_in_org, *pu8_data_in_tmp, *pu8_data_out;
- M4OSA_UInt8 *pu8_src_top;
- M4OSA_UInt8 *pu8_src_bottom;
- M4OSA_UInt32 u32_temp_value;
- M4OSA_Int32 i32_tmp_offset;
- M4OSA_UInt32 nb_planes;
-
-
-
- M4ERR_CHECK_NULL_RETURN_VALUE(M4ERR_PARAMETER, pContext) ;
-
- /**< Check state */
- if(M4AIR_kConfigured != pC->m_state)
- {
- return M4ERR_STATE;
- }
-
- if(M4AIR_kYUV420AP == pC->m_inputFormat)
- {
- nb_planes = 4;
- }
- else
- {
- nb_planes = 3;
- }
-
- /**< Loop on each Plane */
- for(i=0;i<nb_planes;i++)
- {
-
- /* Set the working pointers at the beginning of the input/output data field */
-
- u32_shift = ((i==0)||(i==3))?0:1; /**< Depend on Luma or Chroma */
-
- if((M4OSA_FALSE == pC->m_params.m_bOutputStripe)||((M4OSA_TRUE == pC->m_params.m_bOutputStripe)&&(0 == pC->m_procRows)))
- {
- /**< For input, take care about ROI */
- pu8_data_in = pIn[i].pac_data + pIn[i].u_topleft + (pC->m_params.m_inputCoord.m_x>>u32_shift)
- + (pC->m_params.m_inputCoord.m_y >> u32_shift) * pIn[i].u_stride;
-
- /** Go at end of line/column in case X/Y scanning is flipped */
- if(M4OSA_TRUE == pC->m_bFlipX)
- {
- pu8_data_in += ((pC->m_params.m_inputSize.m_width)>>u32_shift) -1 ;
- }
- if(M4OSA_TRUE == pC->m_bFlipY)
- {
- pu8_data_in += ((pC->m_params.m_inputSize.m_height>>u32_shift) -1) * pIn[i].u_stride;
- }
-
- /**< Initialize accumulators in case we are using it (bilinear interpolation) */
- if( M4OSA_FALSE == pC->m_bOnlyCopy)
- {
- pC->u32_x_accum[i] = pC->u32_x_accum_start[i];
- pC->u32_y_accum[i] = pC->u32_y_accum_start[i];
- }
-
- }
- else
- {
- /**< In case of stripe mode for other than first stripe, we need to recover input pointer from internal context */
- pu8_data_in = pC->pu8_data_in[i];
- }
-
- /**< In every mode, output data are at the beginning of the output plane */
- pu8_data_out = pOut[i].pac_data + pOut[i].u_topleft;
-
- /**< Initialize input offset applied after each pixel */
- if(M4OSA_FALSE == pC->m_bFlipY)
- {
- i32_tmp_offset = pIn[i].u_stride;
- }
- else
- {
- i32_tmp_offset = -pIn[i].u_stride;
- }
-
- /**< In this case, no bilinear interpolation is needed as input and output dimensions are the same */
- if( M4OSA_TRUE == pC->m_bOnlyCopy)
- {
- /**< No +-90° rotation */
- if(M4OSA_FALSE == pC->m_bRevertXY)
- {
- /**< No flip on X abscissa */
- if(M4OSA_FALSE == pC->m_bFlipX)
- {
- M4OSA_UInt32 loc_height = pOut[i].u_height;
- M4OSA_UInt32 loc_width = pOut[i].u_width;
- M4OSA_UInt32 loc_stride = pIn[i].u_stride;
- /**< Loop on each row */
- for (j=0; j<loc_height; j++)
- {
- /**< Copy one whole line */
- memcpy((void *)pu8_data_out, (void *)pu8_data_in, loc_width);
-
- /**< Update pointers */
- pu8_data_out += pOut[i].u_stride;
- if(M4OSA_FALSE == pC->m_bFlipY)
- {
- pu8_data_in += loc_stride;
- }
- else
- {
- pu8_data_in -= loc_stride;
- }
- }
- }
- else
- {
- /**< Loop on each row */
- for(j=0;j<pOut[i].u_height;j++)
- {
- /**< Loop on each pixel of 1 row */
- for(k=0;k<pOut[i].u_width;k++)
- {
- *pu8_data_out++ = *pu8_data_in--;
- }
-
- /**< Update pointers */
- pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);
-
- pu8_data_in += pOut[i].u_width + i32_tmp_offset;
-
- }
- }
- }
- /**< Here we have a +-90° rotation */
- else
- {
-
- /**< Loop on each row */
- for(j=0;j<pOut[i].u_height;j++)
- {
- pu8_data_in_tmp = pu8_data_in;
-
- /**< Loop on each pixel of 1 row */
- for(k=0;k<pOut[i].u_width;k++)
- {
- *pu8_data_out++ = *pu8_data_in_tmp;
-
- /**< Update input pointer in order to go to next/past line */
- pu8_data_in_tmp += i32_tmp_offset;
- }
-
- /**< Update pointers */
- pu8_data_out += (pOut[i].u_stride - pOut[i].u_width);
- if(M4OSA_FALSE == pC->m_bFlipX)
- {
- pu8_data_in ++;
- }
- else
- {
- pu8_data_in --;
- }
- }
- }
- }
- /**< Bilinear interpolation */
- else
- {
-
- if(3 != i) /**< other than alpha plane */
- {
- /**No +-90° rotation */
- if(M4OSA_FALSE == pC->m_bRevertXY)
- {
-
- /**< Loop on each row */
- for(j=0;j<pOut[i].u_height;j++)
- {
- /* Vertical weight factor */
- u32_y_frac = (pC->u32_y_accum[i]>>12)&15;
-
- /* Reinit horizontal weight factor */
- u32_x_accum = pC->u32_x_accum_start[i];
-
-
-
- if(M4OSA_TRUE == pC->m_bFlipX)
- {
-
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
-
- u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
-
- pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
- pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[1]*(16-u32_x_frac) +
- pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update horizontal accumulator */
- u32_x_accum += pC->u32_x_inc[i];
- }
- }
-
- else
- {
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
- u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
-
- pu8_src_top = pu8_data_in + (u32_x_accum >> 16);
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
- pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[0]*(16-u32_x_frac) +
- pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update horizontal accumulator */
- u32_x_accum += pC->u32_x_inc[i];
- }
-
- }
-
- pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
-
- /* Update vertical accumulator */
- pC->u32_y_accum[i] += pC->u32_y_inc[i];
- if (pC->u32_y_accum[i]>>16)
- {
- pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;
- pC->u32_y_accum[i] &= 0xffff;
- }
- }
- }
- /** +-90° rotation */
- else
- {
- pu8_data_in_org = pu8_data_in;
-
- /**< Loop on each output row */
- for(j=0;j<pOut[i].u_height;j++)
- {
- /* horizontal weight factor */
- u32_x_frac = (pC->u32_x_accum[i]>>12)&15;
-
- /* Reinit accumulator */
- u32_y_accum = pC->u32_y_accum_start[i];
-
- if(M4OSA_TRUE == pC->m_bFlipX)
- {
-
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
-
- u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
-
-
- pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
- pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[1]*(16-u32_x_frac) +
- pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update vertical accumulator */
- u32_y_accum += pC->u32_y_inc[i];
- if (u32_y_accum>>16)
- {
- pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
- u32_y_accum &= 0xffff;
- }
-
- }
- }
- else
- {
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
-
- u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
-
- pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
- pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[0]*(16-u32_x_frac) +
- pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update vertical accumulator */
- u32_y_accum += pC->u32_y_inc[i];
- if (u32_y_accum>>16)
- {
- pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
- u32_y_accum &= 0xffff;
- }
- }
- }
- pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
-
- /* Update horizontal accumulator */
- pC->u32_x_accum[i] += pC->u32_x_inc[i];
-
- pu8_data_in = pu8_data_in_org;
- }
-
- }
- }/** 3 != i */
- else
- {
- /**No +-90° rotation */
- if(M4OSA_FALSE == pC->m_bRevertXY)
- {
-
- /**< Loop on each row */
- for(j=0;j<pOut[i].u_height;j++)
- {
- /* Vertical weight factor */
- u32_y_frac = (pC->u32_y_accum[i]>>12)&15;
-
- /* Reinit horizontal weight factor */
- u32_x_accum = pC->u32_x_accum_start[i];
-
-
-
- if(M4OSA_TRUE == pC->m_bFlipX)
- {
-
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
-
- u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
-
- pu8_src_top = (pu8_data_in - (u32_x_accum >> 16)) -1 ;
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
- pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[1]*(16-u32_x_frac) +
- pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
-
- u32_temp_value= (u32_temp_value >> 7)*0xff;
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update horizontal accumulator */
- u32_x_accum += pC->u32_x_inc[i];
- }
- }
-
- else
- {
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
- u32_x_frac = (u32_x_accum >> 12)&15; /* Fraction of Horizontal weight factor */
-
- pu8_src_top = pu8_data_in + (u32_x_accum >> 16);
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
- pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[0]*(16-u32_x_frac) +
- pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
-
- u32_temp_value= (u32_temp_value >> 7)*0xff;
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update horizontal accumulator */
- u32_x_accum += pC->u32_x_inc[i];
- }
-
- }
-
- pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
-
- /* Update vertical accumulator */
- pC->u32_y_accum[i] += pC->u32_y_inc[i];
- if (pC->u32_y_accum[i]>>16)
- {
- pu8_data_in = pu8_data_in + (pC->u32_y_accum[i] >> 16) * i32_tmp_offset;
- pC->u32_y_accum[i] &= 0xffff;
- }
- }
-
- } /**< M4OSA_FALSE == pC->m_bRevertXY */
- /** +-90° rotation */
- else
- {
- pu8_data_in_org = pu8_data_in;
-
- /**< Loop on each output row */
- for(j=0;j<pOut[i].u_height;j++)
- {
- /* horizontal weight factor */
- u32_x_frac = (pC->u32_x_accum[i]>>12)&15;
-
- /* Reinit accumulator */
- u32_y_accum = pC->u32_y_accum_start[i];
-
- if(M4OSA_TRUE == pC->m_bFlipX)
- {
-
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
-
- u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
-
-
- pu8_src_top = (pu8_data_in - (pC->u32_x_accum[i] >> 16)) - 1;
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[1]*(16-u32_x_frac) +
- pu8_src_top[0]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[1]*(16-u32_x_frac) +
- pu8_src_bottom[0]*u32_x_frac)*u32_y_frac )>>8);
-
- u32_temp_value= (u32_temp_value >> 7)*0xff;
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update vertical accumulator */
- u32_y_accum += pC->u32_y_inc[i];
- if (u32_y_accum>>16)
- {
- pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
- u32_y_accum &= 0xffff;
- }
-
- }
- }
- else
- {
- /**< Loop on each output pixel in a row */
- for(k=0;k<pOut[i].u_width;k++)
- {
-
- u32_y_frac = (u32_y_accum >> 12)&15; /* Vertical weight factor */
-
- pu8_src_top = pu8_data_in + (pC->u32_x_accum[i] >> 16);
-
- pu8_src_bottom = pu8_src_top + i32_tmp_offset;
-
- /* Weighted combination */
- u32_temp_value = (M4VIFI_UInt8)(((pu8_src_top[0]*(16-u32_x_frac) +
- pu8_src_top[1]*u32_x_frac)*(16-u32_y_frac) +
- (pu8_src_bottom[0]*(16-u32_x_frac) +
- pu8_src_bottom[1]*u32_x_frac)*u32_y_frac )>>8);
-
- u32_temp_value= (u32_temp_value >> 7)*0xff;
-
- *pu8_data_out++ = (M4VIFI_UInt8)u32_temp_value;
-
- /* Update vertical accumulator */
- u32_y_accum += pC->u32_y_inc[i];
- if (u32_y_accum>>16)
- {
- pu8_data_in = pu8_data_in + (u32_y_accum >> 16) * i32_tmp_offset;
- u32_y_accum &= 0xffff;
- }
- }
- }
- pu8_data_out += pOut[i].u_stride - pOut[i].u_width;
-
- /* Update horizontal accumulator */
- pC->u32_x_accum[i] += pC->u32_x_inc[i];
-
- pu8_data_in = pu8_data_in_org;
-
- }
- } /**< M4OSA_TRUE == pC->m_bRevertXY */
- }/** 3 == i */
- }
- /**< In case of stripe mode, save current input pointer */
- if(M4OSA_TRUE == pC->m_params.m_bOutputStripe)
- {
- pC->pu8_data_in[i] = pu8_data_in;
- }
- }
-
- /**< Update number of processed rows, reset it if we have finished with the whole processing */
- pC->m_procRows += pOut[0].u_height;
- if(M4OSA_FALSE == pC->m_bRevertXY)
- {
- if(pC->m_params.m_outputSize.m_height <= pC->m_procRows) pC->m_procRows = 0;
- }
- else
- {
- if(pC->m_params.m_outputSize.m_width <= pC->m_procRows) pC->m_procRows = 0;
- }
-
- return M4NO_ERROR ;
-
-}
-/*+ Handle the image files here */
-
-/**
- ******************************************************************************
- * M4OSA_ERR LvGetImageThumbNail(M4OSA_UChar *fileName, M4OSA_Void **pBuffer)
- * @brief This function gives YUV420 buffer of a given image file (in argb888 format)
- * @Note: The caller of the function is responsible to free the yuv buffer allocated
- * @param fileName: (IN) Path to the filename of the image argb data
- * @param height: (IN) Height of the image
- * @param width: (OUT) pBuffer pointer to the address where the yuv data address needs to be returned.
- * @return M4NO_ERROR: there is no error
- * @return M4ERR_ALLOC: No more memory space to add a new effect.
- * @return M4ERR_FILE_NOT_FOUND: if the file passed does not exists.
- ******************************************************************************
-*/
-M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer) {
-
- M4VIFI_ImagePlane rgbPlane, *yuvPlane;
- M4OSA_UInt32 frameSize_argb = (width * height * 4); // argb data
- M4OSA_Context lImageFileFp = M4OSA_NULL;
- M4OSA_ERR err = M4NO_ERROR;
-
- M4OSA_UInt8 *pTmpData = (M4OSA_UInt8*) M4OSA_32bitAlignedMalloc(frameSize_argb, M4VS, (M4OSA_Char*)"Image argb data");
- if(pTmpData == M4OSA_NULL) {
- ALOGE("Failed to allocate memory for Image clip");
- return M4ERR_ALLOC;
- }
-
- /** Read the argb data from the passed file. */
- M4OSA_ERR lerr = M4OSA_fileReadOpen(&lImageFileFp, (M4OSA_Void *) fileName, M4OSA_kFileRead);
-
- if((lerr != M4NO_ERROR) || (lImageFileFp == M4OSA_NULL))
- {
- ALOGE("LVPreviewController: Can not open the file ");
- free(pTmpData);
- return M4ERR_FILE_NOT_FOUND;
- }
- lerr = M4OSA_fileReadData(lImageFileFp, (M4OSA_MemAddr8)pTmpData, &frameSize_argb);
- if(lerr != M4NO_ERROR)
- {
- ALOGE("LVPreviewController: can not read the data ");
- M4OSA_fileReadClose(lImageFileFp);
- free(pTmpData);
- return lerr;
- }
- M4OSA_fileReadClose(lImageFileFp);
-
- M4OSA_UInt32 frameSize = (width * height * 3); //Size of YUV420 data.
- rgbPlane.pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS, (M4OSA_Char*)"Image clip RGB888 data");
- if(rgbPlane.pac_data == M4OSA_NULL)
- {
- ALOGE("Failed to allocate memory for Image clip");
- free(pTmpData);
- return M4ERR_ALLOC;
- }
-
- /** Remove the alpha channel */
- for (M4OSA_UInt32 i=0, j = 0; i < frameSize_argb; i++) {
- if ((i % 4) == 0) continue;
- rgbPlane.pac_data[j] = pTmpData[i];
- j++;
- }
- free(pTmpData);
-
-#ifdef FILE_DUMP
- FILE *fp = fopen("/sdcard/Input/test_rgb.raw", "wb");
- if(fp == NULL)
- ALOGE("Errors file can not be created");
- else {
- fwrite(rgbPlane.pac_data, frameSize, 1, fp);
- fclose(fp);
- }
-#endif
- rgbPlane.u_height = height;
- rgbPlane.u_width = width;
- rgbPlane.u_stride = width*3;
- rgbPlane.u_topleft = 0;
-
- yuvPlane = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(3*sizeof(M4VIFI_ImagePlane),
- M4VS, (M4OSA_Char*)"M4xVSS_internalConvertRGBtoYUV: Output plane YUV");
- yuvPlane[0].u_height = height;
- yuvPlane[0].u_width = width;
- yuvPlane[0].u_stride = width;
- yuvPlane[0].u_topleft = 0;
- yuvPlane[0].pac_data = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, M4VS, (M4OSA_Char*)"imageClip YUV data");
-
- yuvPlane[1].u_height = yuvPlane[0].u_height >>1;
- yuvPlane[1].u_width = yuvPlane[0].u_width >> 1;
- yuvPlane[1].u_stride = yuvPlane[1].u_width;
- yuvPlane[1].u_topleft = 0;
- yuvPlane[1].pac_data = (M4VIFI_UInt8*)(yuvPlane[0].pac_data + yuvPlane[0].u_height * yuvPlane[0].u_width);
-
- yuvPlane[2].u_height = yuvPlane[0].u_height >>1;
- yuvPlane[2].u_width = yuvPlane[0].u_width >> 1;
- yuvPlane[2].u_stride = yuvPlane[2].u_width;
- yuvPlane[2].u_topleft = 0;
- yuvPlane[2].pac_data = (M4VIFI_UInt8*)(yuvPlane[1].pac_data + yuvPlane[1].u_height * yuvPlane[1].u_width);
-
-
- err = M4VIFI_RGB888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);
- //err = M4VIFI_BGR888toYUV420(M4OSA_NULL, &rgbPlane, yuvPlane);
- if(err != M4NO_ERROR)
- {
- ALOGE("error when converting from RGB to YUV: 0x%x\n", (unsigned int)err);
- }
- free(rgbPlane.pac_data);
-
- //ALOGE("RGB to YUV done");
-#ifdef FILE_DUMP
- FILE *fp1 = fopen("/sdcard/Input/test_yuv.raw", "wb");
- if(fp1 == NULL)
- ALOGE("Errors file can not be created");
- else {
- fwrite(yuvPlane[0].pac_data, yuvPlane[0].u_height * yuvPlane[0].u_width * 1.5, 1, fp1);
- fclose(fp1);
- }
-#endif
- *pBuffer = yuvPlane[0].pac_data;
- free(yuvPlane);
- return M4NO_ERROR;
-
-}
-M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,
- M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer,
- M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight) {
-
- //Y plane
- plane[0].u_width = width;
- plane[0].u_height = height;
- plane[0].u_stride = reportedWidth;
- plane[0].u_topleft = 0;
- plane[0].pac_data = buffer;
-
- // U plane
- plane[1].u_width = width/2;
- plane[1].u_height = height/2;
- plane[1].u_stride = reportedWidth >> 1;
- plane[1].u_topleft = 0;
- plane[1].pac_data = buffer+(reportedWidth*reportedHeight);
-
- // V Plane
- plane[2].u_width = width/2;
- plane[2].u_height = height/2;
- plane[2].u_stride = reportedWidth >> 1;
- plane[2].u_topleft = 0;
- plane[2].pac_data = plane[1].pac_data + ((reportedWidth/2)*(reportedHeight/2));
-}
-
-M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,
- M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride,
- M4VIFI_UInt8 *buffer) {
-
- //Y plane
- plane[0].u_width = width;
- plane[0].u_height = height;
- plane[0].u_stride = stride;
- plane[0].u_topleft = 0;
- plane[0].pac_data = buffer;
-
- // U plane
- plane[1].u_width = width/2;
- plane[1].u_height = height/2;
- plane[1].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);
- plane[1].u_topleft = 0;
- plane[1].pac_data = (buffer
- + plane[0].u_height * plane[0].u_stride
- + (plane[0].u_height/2) * android::PreviewRenderer::ALIGN((
- plane[0].u_stride / 2), 16));
-
- // V Plane
- plane[2].u_width = width/2;
- plane[2].u_height = height/2;
- plane[2].u_stride = android::PreviewRenderer::ALIGN(plane[0].u_stride/2, 16);
- plane[2].u_topleft = 0;
- plane[2].pac_data = (buffer +
- plane[0].u_height * android::PreviewRenderer::ALIGN(plane[0].u_stride, 16));
-
-
-}
-
-M4OSA_Void swapImagePlanes(
- M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
- M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2) {
-
- planeIn[0].u_height = planeOut[0].u_height;
- planeIn[0].u_width = planeOut[0].u_width;
- planeIn[0].u_stride = planeOut[0].u_stride;
- planeIn[0].u_topleft = planeOut[0].u_topleft;
- planeIn[0].pac_data = planeOut[0].pac_data;
-
- /**
- * U plane */
- planeIn[1].u_width = planeOut[1].u_width;
- planeIn[1].u_height = planeOut[1].u_height;
- planeIn[1].u_stride = planeOut[1].u_stride;
- planeIn[1].u_topleft = planeOut[1].u_topleft;
- planeIn[1].pac_data = planeOut[1].pac_data;
- /**
- * V Plane */
- planeIn[2].u_width = planeOut[2].u_width;
- planeIn[2].u_height = planeOut[2].u_height;
- planeIn[2].u_stride = planeOut[2].u_stride;
- planeIn[2].u_topleft = planeOut[2].u_topleft;
- planeIn[2].pac_data = planeOut[2].pac_data;
-
- if(planeOut[0].pac_data == (M4VIFI_UInt8*)buffer1)
- {
- planeOut[0].pac_data = (M4VIFI_UInt8*)buffer2;
- planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer2 +
- planeOut[0].u_width*planeOut[0].u_height);
-
- planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer2 +
- planeOut[0].u_width*planeOut[0].u_height +
- planeOut[1].u_width*planeOut[1].u_height);
- }
- else
- {
- planeOut[0].pac_data = (M4VIFI_UInt8*)buffer1;
- planeOut[1].pac_data = (M4VIFI_UInt8*)(buffer1 +
- planeOut[0].u_width*planeOut[0].u_height);
-
- planeOut[2].pac_data = (M4VIFI_UInt8*)(buffer1 +
- planeOut[0].u_width*planeOut[0].u_height +
- planeOut[1].u_width*planeOut[1].u_height);
- }
-
-}
-
-M4OSA_Void computePercentageDone(
- M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
- M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone) {
-
- M4OSA_Double videoEffectTime =0;
-
- // Compute how far from the beginning of the effect we are, in clip-base time.
- videoEffectTime =
- (M4OSA_Int32)(ctsMs+ 0.5) - effectStartTimeMs;
-
- // To calculate %, substract timeIncrement
- // because effect should finish on the last frame
- // which is from CTS = (eof-timeIncrement) till CTS = eof
- *percentageDone =
- videoEffectTime / ((M4OSA_Float)effectDuration);
-
- if(*percentageDone < 0.0) *percentageDone = 0.0;
- if(*percentageDone > 1.0) *percentageDone = 1.0;
-
-}
-
-
-M4OSA_Void computeProgressForVideoEffect(
- M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
- M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress) {
-
- M4OSA_Double percentageDone =0;
-
- computePercentageDone(ctsMs, effectStartTimeMs, effectDuration, &percentageDone);
-
- extProgress->uiProgress = (M4OSA_UInt32)( percentageDone * 1000 );
- extProgress->uiOutputTime = (M4OSA_UInt32)(ctsMs + 0.5);
- extProgress->uiClipTime = extProgress->uiOutputTime;
- extProgress->bIsLast = M4OSA_FALSE;
-}
-
-M4OSA_ERR prepareFramingStructure(
- M4xVSS_FramingStruct* framingCtx,
- M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,
- M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV) {
-
- M4OSA_ERR err = M4NO_ERROR;
-
- // Force input RGB buffer to even size to avoid errors in YUV conversion
- framingCtx->FramingRgb = effectsSettings[index].xVSS.pFramingBuffer;
- framingCtx->FramingRgb->u_width = framingCtx->FramingRgb->u_width & ~1;
- framingCtx->FramingRgb->u_height = framingCtx->FramingRgb->u_height & ~1;
- framingCtx->FramingYuv = NULL;
-
- framingCtx->duration = effectsSettings[index].uiDuration;
- framingCtx->topleft_x = effectsSettings[index].xVSS.topleft_x;
- framingCtx->topleft_y = effectsSettings[index].xVSS.topleft_y;
- framingCtx->pCurrent = framingCtx;
- framingCtx->pNext = framingCtx;
- framingCtx->previousClipTime = -1;
-
- framingCtx->alphaBlendingStruct =
- (M4xVSS_internalEffectsAlphaBlending*)M4OSA_32bitAlignedMalloc(
- sizeof(M4xVSS_internalEffectsAlphaBlending), M4VS,
- (M4OSA_Char*)"alpha blending struct");
-
- framingCtx->alphaBlendingStruct->m_fadeInTime =
- effectsSettings[index].xVSS.uialphaBlendingFadeInTime;
-
- framingCtx->alphaBlendingStruct->m_fadeOutTime =
- effectsSettings[index].xVSS.uialphaBlendingFadeOutTime;
-
- framingCtx->alphaBlendingStruct->m_end =
- effectsSettings[index].xVSS.uialphaBlendingEnd;
-
- framingCtx->alphaBlendingStruct->m_middle =
- effectsSettings[index].xVSS.uialphaBlendingMiddle;
-
- framingCtx->alphaBlendingStruct->m_start =
- effectsSettings[index].xVSS.uialphaBlendingStart;
-
- // If new Overlay buffer, convert from RGB to YUV
- if((overlayRGB != framingCtx->FramingRgb->pac_data) || (overlayYUV == NULL) ) {
-
- // If YUV buffer exists, delete it
- if(overlayYUV != NULL) {
- free(overlayYUV);
- overlayYUV = NULL;
- }
- if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB565) {
- // Input RGB565 plane is provided,
- // let's convert it to YUV420, and update framing structure
- err = M4xVSS_internalConvertRGBtoYUV(framingCtx);
- }
- else if(effectsSettings[index].xVSS.rgbType == M4VSS3GPP_kRGB888) {
- // Input RGB888 plane is provided,
- // let's convert it to YUV420, and update framing structure
- err = M4xVSS_internalConvertRGB888toYUV(framingCtx);
- }
- else {
- err = M4ERR_PARAMETER;
- }
- overlayYUV = framingCtx->FramingYuv[0].pac_data;
- overlayRGB = framingCtx->FramingRgb->pac_data;
-
- }
- else {
- ALOGV(" YUV buffer reuse");
- framingCtx->FramingYuv = (M4VIFI_ImagePlane*)M4OSA_32bitAlignedMalloc(
- 3*sizeof(M4VIFI_ImagePlane), M4VS, (M4OSA_Char*)"YUV");
-
- if(framingCtx->FramingYuv == M4OSA_NULL) {
- return M4ERR_ALLOC;
- }
-
- framingCtx->FramingYuv[0].u_width = framingCtx->FramingRgb->u_width;
- framingCtx->FramingYuv[0].u_height = framingCtx->FramingRgb->u_height;
- framingCtx->FramingYuv[0].u_topleft = 0;
- framingCtx->FramingYuv[0].u_stride = framingCtx->FramingRgb->u_width;
- framingCtx->FramingYuv[0].pac_data = (M4VIFI_UInt8*)overlayYUV;
-
- framingCtx->FramingYuv[1].u_width = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[1].u_height = (framingCtx->FramingRgb->u_height)>>1;
- framingCtx->FramingYuv[1].u_topleft = 0;
- framingCtx->FramingYuv[1].u_stride = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[1].pac_data = framingCtx->FramingYuv[0].pac_data +
- framingCtx->FramingYuv[0].u_width * framingCtx->FramingYuv[0].u_height;
-
- framingCtx->FramingYuv[2].u_width = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[2].u_height = (framingCtx->FramingRgb->u_height)>>1;
- framingCtx->FramingYuv[2].u_topleft = 0;
- framingCtx->FramingYuv[2].u_stride = (framingCtx->FramingRgb->u_width)>>1;
- framingCtx->FramingYuv[2].pac_data = framingCtx->FramingYuv[1].pac_data +
- framingCtx->FramingYuv[1].u_width * framingCtx->FramingYuv[1].u_height;
-
- framingCtx->duration = 0;
- framingCtx->previousClipTime = -1;
- framingCtx->previewOffsetClipTime = -1;
-
- }
- return err;
-}
-
-M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,
- M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
- M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData) {
-
- M4xVSS_ColorStruct colorContext;
- M4OSA_ERR err = M4NO_ERROR;
-
- colorContext.colorEffectType = colorEffect;
- colorContext.rgb16ColorData = rgbColorData;
-
- err = M4VSS3GPP_externalVideoEffectColor(
- (M4OSA_Void *)&colorContext, planeIn, planeOut, NULL,
- colorEffect);
-
- if(err != M4NO_ERROR) {
- ALOGV("M4VSS3GPP_externalVideoEffectColor(%d) error %d",
- colorEffect, err);
-
- if(NULL != buffer1) {
- free(buffer1);
- buffer1 = NULL;
- }
- if(NULL != buffer2) {
- free(buffer2);
- buffer2 = NULL;
- }
- return err;
- }
-
- // The out plane now becomes the in plane for adding other effects
- swapImagePlanes(planeIn, planeOut, buffer1, buffer2);
-
- return err;
-}
-
-M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,
- M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
- M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor) {
-
- M4OSA_ERR err = M4NO_ERROR;
-
- err = M4VFL_modifyLumaWithScale(
- (M4ViComImagePlane*)planeIn,(M4ViComImagePlane*)planeOut,
- lum_factor, NULL);
-
- if(err != M4NO_ERROR) {
- ALOGE("M4VFL_modifyLumaWithScale(%d) error %d", videoEffect, (int)err);
-
- if(NULL != buffer1) {
- free(buffer1);
- buffer1= NULL;
- }
- if(NULL != buffer2) {
- free(buffer2);
- buffer2= NULL;
- }
- return err;
- }
-
- // The out plane now becomes the in plane for adding other effects
- swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)buffer1,
- (M4VIFI_UInt8 *)buffer2);
-
- return err;
-}
-
-M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams *params,
- M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight) {
-
- M4OSA_ERR err = M4NO_ERROR;
- M4VIFI_ImagePlane planeIn[3], planeOut[3];
- M4VIFI_UInt8 *finalOutputBuffer = NULL, *tempOutputBuffer= NULL;
- M4OSA_Double percentageDone =0;
- M4OSA_Int32 lum_factor;
- M4VSS3GPP_ExternalProgress extProgress;
- M4xVSS_FiftiesStruct fiftiesCtx;
- M4OSA_UInt32 frameSize = 0, i=0;
-
- frameSize = (params->videoWidth*params->videoHeight*3) >> 1;
-
- finalOutputBuffer = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(frameSize, M4VS,
- (M4OSA_Char*)("lvpp finalOutputBuffer"));
-
- if(finalOutputBuffer == NULL) {
- ALOGE("applyEffectsAndRenderingMode: malloc error");
- return M4ERR_ALLOC;
- }
-
- // allocate the tempOutputBuffer
- tempOutputBuffer = (M4VIFI_UInt8*)M4OSA_32bitAlignedMalloc(
- ((params->videoHeight*params->videoWidth*3)>>1), M4VS, (M4OSA_Char*)("lvpp colorBuffer"));
-
- if(tempOutputBuffer == NULL) {
- ALOGE("applyEffectsAndRenderingMode: malloc error tempOutputBuffer");
- if(NULL != finalOutputBuffer) {
- free(finalOutputBuffer);
- finalOutputBuffer = NULL;
- }
- return M4ERR_ALLOC;
- }
-
- // Initialize the In plane
- prepareYUV420ImagePlane(planeIn, params->videoWidth, params->videoHeight,
- params->vidBuffer, reportedWidth, reportedHeight);
-
- // Initialize the Out plane
- prepareYUV420ImagePlane(planeOut, params->videoWidth, params->videoHeight,
- (M4VIFI_UInt8 *)tempOutputBuffer, params->videoWidth, params->videoHeight);
-
- // The planeIn contains the YUV420 input data to postprocessing node
- // and planeOut will contain the YUV420 data with effect
- // In each successive if condition, apply filter to successive
- // output YUV frame so that concurrent effects are both applied
-
- if(params->currentVideoEffect & VIDEO_EFFECT_BLACKANDWHITE) {
- err = applyColorEffect(M4xVSS_kVideoEffectType_BlackAndWhite,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer, 0);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_PINK) {
- err = applyColorEffect(M4xVSS_kVideoEffectType_Pink,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer, 0);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_GREEN) {
- err = applyColorEffect(M4xVSS_kVideoEffectType_Green,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer, 0);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_SEPIA) {
- err = applyColorEffect(M4xVSS_kVideoEffectType_Sepia,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer, 0);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_NEGATIVE) {
- err = applyColorEffect(M4xVSS_kVideoEffectType_Negative,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer, 0);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_GRADIENT) {
- // find the effect in effectSettings array
- for(i=0;i<params->numberEffects;i++) {
- if(params->effectsSettings[i].VideoEffectType ==
- (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Gradient)
- break;
- }
- err = applyColorEffect(M4xVSS_kVideoEffectType_Gradient,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer,
- params->effectsSettings[i].xVSS.uiRgb16InputColor);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_COLOR_RGB16) {
- // Find the effect in effectSettings array
- for(i=0;i<params->numberEffects;i++) {
- if(params->effectsSettings[i].VideoEffectType ==
- (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_ColorRGB16)
- break;
- }
- err = applyColorEffect(M4xVSS_kVideoEffectType_ColorRGB16,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer,
- params->effectsSettings[i].xVSS.uiRgb16InputColor);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_FIFTIES) {
- // Find the effect in effectSettings array
- for(i=0;i<params->numberEffects;i++) {
- if(params->effectsSettings[i].VideoEffectType ==
- (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Fifties)
- break;
- }
- if(i < params->numberEffects) {
- computeProgressForVideoEffect(params->timeMs,
- params->effectsSettings[i].uiStartTime,
- params->effectsSettings[i].uiDuration, &extProgress);
-
- if(params->isFiftiesEffectStarted) {
- fiftiesCtx.previousClipTime = -1;
- }
- fiftiesCtx.fiftiesEffectDuration =
- 1000/params->effectsSettings[i].xVSS.uiFiftiesOutFrameRate;
-
- fiftiesCtx.shiftRandomValue = 0;
- fiftiesCtx.stripeRandomValue = 0;
-
- err = M4VSS3GPP_externalVideoEffectFifties(
- (M4OSA_Void *)&fiftiesCtx, planeIn, planeOut, &extProgress,
- M4xVSS_kVideoEffectType_Fifties);
-
- if(err != M4NO_ERROR) {
- ALOGE("M4VSS3GPP_externalVideoEffectFifties error 0x%x", (unsigned int)err);
-
- if(NULL != finalOutputBuffer) {
- free(finalOutputBuffer);
- finalOutputBuffer = NULL;
- }
- if(NULL != tempOutputBuffer) {
- free(tempOutputBuffer);
- tempOutputBuffer = NULL;
- }
- return err;
- }
-
- // The out plane now becomes the in plane for adding other effects
- swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer);
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_FRAMING) {
-
- M4xVSS_FramingStruct framingCtx;
- // Find the effect in effectSettings array
- for(i=0;i<params->numberEffects;i++) {
- if(params->effectsSettings[i].VideoEffectType ==
- (M4VSS3GPP_VideoEffectType)M4xVSS_kVideoEffectType_Framing) {
- if((params->effectsSettings[i].uiStartTime <= params->timeMs + params->timeOffset) &&
- ((params->effectsSettings[i].uiStartTime+
- params->effectsSettings[i].uiDuration) >= params->timeMs + params->timeOffset))
- {
- break;
- }
- }
- }
- if(i < params->numberEffects) {
- computeProgressForVideoEffect(params->timeMs,
- params->effectsSettings[i].uiStartTime,
- params->effectsSettings[i].uiDuration, &extProgress);
-
- err = prepareFramingStructure(&framingCtx,
- params->effectsSettings, i, params->overlayFrameRGBBuffer,
- params->overlayFrameYUVBuffer);
-
- if(err == M4NO_ERROR) {
- err = M4VSS3GPP_externalVideoEffectFraming(
- (M4OSA_Void *)&framingCtx, planeIn, planeOut, &extProgress,
- M4xVSS_kVideoEffectType_Framing);
- }
-
- free(framingCtx.alphaBlendingStruct);
-
- if(framingCtx.FramingYuv != NULL) {
- free(framingCtx.FramingYuv);
- framingCtx.FramingYuv = NULL;
- }
- //If prepareFramingStructure / M4VSS3GPP_externalVideoEffectFraming
- // returned error, then return from function
- if(err != M4NO_ERROR) {
-
- if(NULL != finalOutputBuffer) {
- free(finalOutputBuffer);
- finalOutputBuffer = NULL;
- }
- if(NULL != tempOutputBuffer) {
- free(tempOutputBuffer);
- tempOutputBuffer = NULL;
- }
- return err;
- }
-
- // The out plane now becomes the in plane for adding other effects
- swapImagePlanes(planeIn, planeOut,(M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer);
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_FADEFROMBLACK) {
- /* find the effect in effectSettings array*/
- for(i=0;i<params->numberEffects;i++) {
- if(params->effectsSettings[i].VideoEffectType ==
- M4VSS3GPP_kVideoEffectType_FadeFromBlack)
- break;
- }
-
- if(i < params->numberEffects) {
- computePercentageDone(params->timeMs,
- params->effectsSettings[i].uiStartTime,
- params->effectsSettings[i].uiDuration, &percentageDone);
-
- // Compute where we are in the effect (scale is 0->1024)
- lum_factor = (M4OSA_Int32)( percentageDone * 1024 );
- // Apply the darkening effect
- err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeFromBlack,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
- }
-
- if(params->currentVideoEffect & VIDEO_EFFECT_FADETOBLACK) {
- // Find the effect in effectSettings array
- for(i=0;i<params->numberEffects;i++) {
- if(params->effectsSettings[i].VideoEffectType ==
- M4VSS3GPP_kVideoEffectType_FadeToBlack)
- break;
- }
- if(i < params->numberEffects) {
- computePercentageDone(params->timeMs,
- params->effectsSettings[i].uiStartTime,
- params->effectsSettings[i].uiDuration, &percentageDone);
-
- // Compute where we are in the effect (scale is 0->1024)
- lum_factor = (M4OSA_Int32)( (1.0-percentageDone) * 1024 );
- // Apply the darkening effect
- err = applyLumaEffect(M4VSS3GPP_kVideoEffectType_FadeToBlack,
- planeIn, planeOut, (M4VIFI_UInt8 *)finalOutputBuffer,
- (M4VIFI_UInt8 *)tempOutputBuffer, lum_factor);
- if(err != M4NO_ERROR) {
- return err;
- }
- }
- }
-
- ALOGV("doMediaRendering CALL getBuffer()");
- // Set the output YUV420 plane to be compatible with YV12 format
- // W & H even
- // YVU instead of YUV
- // align buffers on 32 bits
-
- // Y plane
- //in YV12 format, sizes must be even
- M4OSA_UInt32 yv12PlaneWidth = ((params->outVideoWidth +1)>>1)<<1;
- M4OSA_UInt32 yv12PlaneHeight = ((params->outVideoHeight+1)>>1)<<1;
-
- prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
- (M4OSA_UInt32)params->outBufferStride, (M4VIFI_UInt8 *)params->pOutBuffer);
-
- err = applyRenderingMode(planeIn, planeOut, params->renderingMode);
-
- if(M4OSA_NULL != finalOutputBuffer) {
- free(finalOutputBuffer);
- finalOutputBuffer= M4OSA_NULL;
- }
- if(M4OSA_NULL != tempOutputBuffer) {
- free(tempOutputBuffer);
- tempOutputBuffer = M4OSA_NULL;
- }
- if(err != M4NO_ERROR) {
- ALOGV("doVideoPostProcessing: applyRenderingMode returned err=%d",err);
- return err;
- }
- return M4NO_ERROR;
-}
-
-android::status_t getVideoSizeByResolution(
- M4VIDEOEDITING_VideoFrameSize resolution,
- uint32_t *pWidth, uint32_t *pHeight) {
-
- uint32_t frameWidth, frameHeight;
-
- if (pWidth == NULL) {
- ALOGE("getVideoFrameSizeByResolution invalid pointer for pWidth");
- return android::BAD_VALUE;
- }
- if (pHeight == NULL) {
- ALOGE("getVideoFrameSizeByResolution invalid pointer for pHeight");
- return android::BAD_VALUE;
- }
-
- switch (resolution) {
- case M4VIDEOEDITING_kSQCIF:
- frameWidth = 128;
- frameHeight = 96;
- break;
-
- case M4VIDEOEDITING_kQQVGA:
- frameWidth = 160;
- frameHeight = 120;
- break;
-
- case M4VIDEOEDITING_kQCIF:
- frameWidth = 176;
- frameHeight = 144;
- break;
-
- case M4VIDEOEDITING_kQVGA:
- frameWidth = 320;
- frameHeight = 240;
- break;
-
- case M4VIDEOEDITING_kCIF:
- frameWidth = 352;
- frameHeight = 288;
- break;
-
- case M4VIDEOEDITING_kVGA:
- frameWidth = 640;
- frameHeight = 480;
- break;
-
- case M4VIDEOEDITING_kWVGA:
- frameWidth = 800;
- frameHeight = 480;
- break;
-
- case M4VIDEOEDITING_kNTSC:
- frameWidth = 720;
- frameHeight = 480;
- break;
-
- case M4VIDEOEDITING_k640_360:
- frameWidth = 640;
- frameHeight = 360;
- break;
-
- case M4VIDEOEDITING_k854_480:
- frameWidth = 854;
- frameHeight = 480;
- break;
-
- case M4VIDEOEDITING_k1280_720:
- frameWidth = 1280;
- frameHeight = 720;
- break;
-
- case M4VIDEOEDITING_k1080_720:
- frameWidth = 1080;
- frameHeight = 720;
- break;
-
- case M4VIDEOEDITING_k960_720:
- frameWidth = 960;
- frameHeight = 720;
- break;
-
- case M4VIDEOEDITING_k1920_1080:
- frameWidth = 1920;
- frameHeight = 1080;
- break;
-
- default:
- ALOGE("Unsupported video resolution %d.", resolution);
- return android::BAD_VALUE;
- }
-
- *pWidth = frameWidth;
- *pHeight = frameHeight;
-
- return android::OK;
-}
-
-M4VIFI_UInt8 M4VIFI_Rotate90LeftYUV420toYUV420(void* pUserData,
- M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) {
-
- M4VIFI_Int32 plane_number;
- M4VIFI_UInt32 i,j, u_stride;
- M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
-
- /**< Loop on Y,U and V planes */
- for (plane_number = 0; plane_number < 3; plane_number++) {
- /**< Get adresses of first valid pixel in input and output buffer */
- /**< As we have a -90° rotation, first needed pixel is the upper-right one */
- p_buf_src =
- &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) +
- pPlaneOut[plane_number].u_height - 1 ;
- p_buf_dest =
- &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]);
- u_stride = pPlaneIn[plane_number].u_stride;
- /**< Loop on output rows */
- for (i = 0; i < pPlaneOut[plane_number].u_height; i++) {
- /**< Loop on all output pixels in a row */
- for (j = 0; j < pPlaneOut[plane_number].u_width; j++) {
- *p_buf_dest++= *p_buf_src;
- p_buf_src += u_stride; /**< Go to the next row */
- }
-
- /**< Go on next row of the output frame */
- p_buf_dest +=
- pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width;
- /**< Go to next pixel in the last row of the input frame*/
- p_buf_src -=
- pPlaneIn[plane_number].u_stride * pPlaneOut[plane_number].u_width + 1 ;
- }
- }
-
- return M4VIFI_OK;
-}
-
-M4VIFI_UInt8 M4VIFI_Rotate90RightYUV420toYUV420(void* pUserData,
- M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) {
-
- M4VIFI_Int32 plane_number;
- M4VIFI_UInt32 i,j, u_stride;
- M4VIFI_UInt8 *p_buf_src, *p_buf_dest;
-
- /**< Loop on Y,U and V planes */
- for (plane_number = 0; plane_number < 3; plane_number++) {
- /**< Get adresses of first valid pixel in input and output buffer */
- /**< As we have a +90° rotation, first needed pixel is the left-down one */
- p_buf_src =
- &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]) +
- (pPlaneIn[plane_number].u_stride * (pPlaneOut[plane_number].u_width - 1));
- p_buf_dest =
- &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]);
- u_stride = pPlaneIn[plane_number].u_stride;
- /**< Loop on output rows */
- for (i = 0; i < pPlaneOut[plane_number].u_height; i++) {
- /**< Loop on all output pixels in a row */
- for (j = 0; j < pPlaneOut[plane_number].u_width; j++) {
- *p_buf_dest++= *p_buf_src;
- p_buf_src -= u_stride; /**< Go to the previous row */
- }
-
- /**< Go on next row of the output frame */
- p_buf_dest +=
- pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width;
- /**< Go to next pixel in the last row of the input frame*/
- p_buf_src +=
- pPlaneIn[plane_number].u_stride * pPlaneOut[plane_number].u_width +1 ;
- }
- }
-
- return M4VIFI_OK;
-}
-
-M4VIFI_UInt8 M4VIFI_Rotate180YUV420toYUV420(void* pUserData,
- M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut) {
- M4VIFI_Int32 plane_number;
- M4VIFI_UInt32 i,j;
- M4VIFI_UInt8 *p_buf_src, *p_buf_dest, temp_pix1;
-
- /**< Loop on Y,U and V planes */
- for (plane_number = 0; plane_number < 3; plane_number++) {
- /**< Get adresses of first valid pixel in input and output buffer */
- p_buf_src =
- &(pPlaneIn[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]);
- p_buf_dest =
- &(pPlaneOut[plane_number].pac_data[pPlaneOut[plane_number].u_topleft]);
-
- /**< If pPlaneIn = pPlaneOut, the algorithm will be different */
- if (p_buf_src == p_buf_dest) {
- /**< Get Address of last pixel in the last row of the frame */
- p_buf_dest +=
- pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) +
- pPlaneOut[plane_number].u_width - 1;
-
- /**< We loop (height/2) times on the rows.
- * In case u_height is odd, the row at the middle of the frame
- * has to be processed as must be mirrored */
- for (i = 0; i < ((pPlaneOut[plane_number].u_height)>>1); i++) {
- for (j = 0; j < pPlaneOut[plane_number].u_width; j++) {
- temp_pix1= *p_buf_dest;
- *p_buf_dest--= *p_buf_src;
- *p_buf_src++ = temp_pix1;
- }
- /**< Go on next row in top of frame */
- p_buf_src +=
- pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width;
- /**< Go to the last pixel in previous row in bottom of frame*/
- p_buf_dest -=
- pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width;
- }
-
- /**< Mirror middle row in case height is odd */
- if ((pPlaneOut[plane_number].u_height%2)!= 0) {
- p_buf_src =
- &(pPlaneOut[plane_number].pac_data[pPlaneIn[plane_number].u_topleft]);
- p_buf_src +=
- pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height>>1);
- p_buf_dest =
- p_buf_src + pPlaneOut[plane_number].u_width;
-
- /**< We loop u_width/2 times on this row.
- * In case u_width is odd, the pixel at the middle of this row
- * remains unchanged */
- for (j = 0; j < (pPlaneOut[plane_number].u_width>>1); j++) {
- temp_pix1= *p_buf_dest;
- *p_buf_dest--= *p_buf_src;
- *p_buf_src++ = temp_pix1;
- }
- }
- } else {
- /**< Get Address of last pixel in the last row of the output frame */
- p_buf_dest +=
- pPlaneOut[plane_number].u_stride*(pPlaneOut[plane_number].u_height-1) +
- pPlaneIn[plane_number].u_width - 1;
-
- /**< Loop on rows */
- for (i = 0; i < pPlaneOut[plane_number].u_height; i++) {
- for (j = 0; j < pPlaneOut[plane_number].u_width; j++) {
- *p_buf_dest--= *p_buf_src++;
- }
-
- /**< Go on next row in top of input frame */
- p_buf_src +=
- pPlaneIn[plane_number].u_stride - pPlaneOut[plane_number].u_width;
- /**< Go to last pixel of previous row in bottom of input frame*/
- p_buf_dest -=
- pPlaneOut[plane_number].u_stride - pPlaneOut[plane_number].u_width;
- }
- }
- }
-
- return M4VIFI_OK;
-}
-
-M4OSA_ERR applyVideoRotation(M4OSA_Void* pBuffer, M4OSA_UInt32 width,
- M4OSA_UInt32 height, M4OSA_UInt32 rotation) {
-
- M4OSA_ERR err = M4NO_ERROR;
- M4VIFI_ImagePlane planeIn[3], planeOut[3];
-
- if (pBuffer == M4OSA_NULL) {
- ALOGE("applyVideoRotation: NULL input frame");
- return M4ERR_PARAMETER;
- }
- M4OSA_UInt8* outPtr = (M4OSA_UInt8 *)M4OSA_32bitAlignedMalloc(
- (width*height*1.5), M4VS, (M4OSA_Char*)("rotation out ptr"));
- if (outPtr == M4OSA_NULL) {
- return M4ERR_ALLOC;
- }
-
- // In plane
- prepareYUV420ImagePlane(planeIn, width,
- height, (M4VIFI_UInt8 *)pBuffer, width, height);
-
- // Out plane
- if (rotation != 180) {
- prepareYUV420ImagePlane(planeOut, height,
- width, outPtr, height, width);
- }
-
- switch(rotation) {
- case 90:
- M4VIFI_Rotate90RightYUV420toYUV420(M4OSA_NULL, planeIn, planeOut);
- memcpy(pBuffer, (void *)outPtr, (width*height*1.5));
- break;
-
- case 180:
- // In plane rotation, so planeOut = planeIn
- M4VIFI_Rotate180YUV420toYUV420(M4OSA_NULL, planeIn, planeIn);
- break;
-
- case 270:
- M4VIFI_Rotate90LeftYUV420toYUV420(M4OSA_NULL, planeIn, planeOut);
- memcpy(pBuffer, (void *)outPtr, (width*height*1.5));
- break;
-
- default:
- ALOGE("invalid rotation param %d", (int)rotation);
- err = M4ERR_PARAMETER;
- break;
- }
-
- free((void *)outPtr);
- return err;
-
-}
-
diff --git a/libvideoeditor/lvpp/VideoEditorTools.h b/libvideoeditor/lvpp/VideoEditorTools.h
deleted file mode 100755
index 9b464da..0000000
--- a/libvideoeditor/lvpp/VideoEditorTools.h
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_VE_TOOLS_H
-#define ANDROID_VE_TOOLS_H
-
-#include "M4OSA_Types.h"
-#include "M4OSA_Memory.h"
-#include "M4OSA_Debug.h"
-#include "M4VIFI_FiltersAPI.h"
-/* Macro definitions */
-#include "M4VIFI_Defines.h"
-/* Clip table declaration */
-#include "M4VIFI_Clip.h"
-#include "M4VFL_transition.h"
-#include "M4VSS3GPP_API.h"
-#include "M4xVSS_API.h"
-#include "M4xVSS_Internal.h"
-#include "M4AIR_API.h"
-#include "PreviewRenderer.h"
-
-#define MEDIA_RENDERING_INVALID 255
-#define TRANSPARENT_COLOR 0x7E0
-#define LUM_FACTOR_MAX 10
-enum {
- VIDEO_EFFECT_NONE = 0,
- VIDEO_EFFECT_BLACKANDWHITE = 1,
- VIDEO_EFFECT_PINK = 2,
- VIDEO_EFFECT_GREEN = 4,
- VIDEO_EFFECT_SEPIA = 8,
- VIDEO_EFFECT_NEGATIVE = 16,
- VIDEO_EFFECT_FRAMING = 32,
- VIDEO_EFFECT_FIFTIES = 64,
- VIDEO_EFFECT_COLOR_RGB16 = 128,
- VIDEO_EFFECT_GRADIENT = 256,
- VIDEO_EFFECT_FADEFROMBLACK = 512,
- VIDEO_EFFECT_FADETOBLACK = 2048,
-};
-
-typedef struct {
- M4VIFI_UInt8 *vidBuffer;
- M4OSA_UInt32 videoWidth;
- M4OSA_UInt32 videoHeight;
- M4OSA_UInt32 timeMs;
- M4OSA_UInt32 timeOffset; //has the duration of clips played.
- //The flag shall be used for Framing.
- M4VSS3GPP_EffectSettings* effectsSettings;
- M4OSA_UInt32 numberEffects;
- M4OSA_UInt32 outVideoWidth;
- M4OSA_UInt32 outVideoHeight;
- M4OSA_UInt32 currentVideoEffect;
- M4OSA_Bool isFiftiesEffectStarted;
- M4xVSS_MediaRendering renderingMode;
- uint8_t *pOutBuffer;
- size_t outBufferStride;
- M4VIFI_UInt8* overlayFrameRGBBuffer;
- M4VIFI_UInt8* overlayFrameYUVBuffer;
-} vePostProcessParams;
-
-M4VIFI_UInt8 M4VIFI_YUV420PlanarToYUV420Semiplanar(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );
-M4VIFI_UInt8 M4VIFI_SemiplanarYUV420toYUV420(void *user_data, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane *PlaneOut );
-
-M4OSA_ERR M4VSS3GPP_externalVideoEffectColor(M4OSA_Void *pFunctionContext, M4VIFI_ImagePlane *PlaneIn,
- M4VIFI_ImagePlane *PlaneOut,M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind);
-
-M4OSA_ERR M4VSS3GPP_externalVideoEffectFraming( M4OSA_Void *userData, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );
-
-M4OSA_ERR M4VSS3GPP_externalVideoEffectFifties( M4OSA_Void *pUserData, M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut, M4VSS3GPP_ExternalProgress *pProgress, M4OSA_UInt32 uiEffectKind );
-
-unsigned char M4VFL_modifyLumaWithScale(M4ViComImagePlane *plane_in, M4ViComImagePlane *plane_out, unsigned long lum_factor, void *user_data);
-
-M4OSA_ERR M4xVSS_internalConvertRGBtoYUV(M4xVSS_FramingStruct* framingCtx);
-M4VIFI_UInt8 M4VIFI_xVSS_RGB565toYUV420(void *pUserData, M4VIFI_ImagePlane *pPlaneIn,
- M4VIFI_ImagePlane *pPlaneOut);
-
-M4OSA_ERR M4xVSS_internalConvertRGB888toYUV(M4xVSS_FramingStruct* framingCtx);
-M4VIFI_UInt8 M4VIFI_RGB888toYUV420(void *pUserData, M4VIFI_ImagePlane *PlaneIn, M4VIFI_ImagePlane PlaneOut[3]);
-
-/*+ Handle the image files here */
-M4OSA_ERR LvGetImageThumbNail(const char *fileName, M4OSA_UInt32 height, M4OSA_UInt32 width, M4OSA_Void **pBuffer);
-/*- Handle the image files here */
-
-M4OSA_ERR applyRenderingMode(M4VIFI_ImagePlane* pPlaneIn, M4VIFI_ImagePlane* pPlaneOut, M4xVSS_MediaRendering mediaRendering);
-
-
-M4VIFI_UInt8 M4VIFI_YUV420toYUV420(void *user_data, M4VIFI_ImagePlane PlaneIn[3], M4VIFI_ImagePlane *PlaneOut );
-M4VIFI_UInt8 M4VIFI_ResizeBilinearYUV420toYUV420(void *pUserData,
- M4VIFI_ImagePlane *pPlaneIn,
- M4VIFI_ImagePlane *pPlaneOut);
-
-M4OSA_Void prepareYUV420ImagePlane(M4VIFI_ImagePlane *plane,
- M4OSA_UInt32 width, M4OSA_UInt32 height, M4VIFI_UInt8 *buffer,
- M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight);
-
-M4OSA_Void prepareYV12ImagePlane(M4VIFI_ImagePlane *plane,
- M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 stride, M4VIFI_UInt8 *buffer);
-
-M4OSA_Void swapImagePlanes(
- M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
- M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2);
-
-M4OSA_Void computePercentageDone(
- M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
- M4OSA_UInt32 effectDuration, M4OSA_Double *percentageDone);
-
-M4OSA_Void computeProgressForVideoEffect(
- M4OSA_UInt32 ctsMs, M4OSA_UInt32 effectStartTimeMs,
- M4OSA_UInt32 effectDuration, M4VSS3GPP_ExternalProgress* extProgress);
-
-M4OSA_ERR prepareFramingStructure(
- M4xVSS_FramingStruct* framingCtx,
- M4VSS3GPP_EffectSettings* effectsSettings, M4OSA_UInt32 index,
- M4VIFI_UInt8* overlayRGB, M4VIFI_UInt8* overlayYUV);
-
-M4OSA_ERR applyColorEffect(M4xVSS_VideoEffectType colorEffect,
- M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
- M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_UInt16 rgbColorData);
-
-M4OSA_ERR applyLumaEffect(M4VSS3GPP_VideoEffectType videoEffect,
- M4VIFI_ImagePlane *planeIn, M4VIFI_ImagePlane *planeOut,
- M4VIFI_UInt8 *buffer1, M4VIFI_UInt8 *buffer2, M4OSA_Int32 lum_factor);
-
-M4OSA_ERR applyEffectsAndRenderingMode(vePostProcessParams *params,
- M4OSA_UInt32 reportedWidth, M4OSA_UInt32 reportedHeight);
-
-android::status_t getVideoSizeByResolution(M4VIDEOEDITING_VideoFrameSize resolution,
- uint32_t *pWidth, uint32_t *pHeight);
-
-M4VIFI_UInt8 M4VIFI_Rotate90LeftYUV420toYUV420(void* pUserData,
- M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut);
-
-M4VIFI_UInt8 M4VIFI_Rotate90RightYUV420toYUV420(void* pUserData,
- M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut);
-
-M4VIFI_UInt8 M4VIFI_Rotate180YUV420toYUV420(void* pUserData,
- M4VIFI_ImagePlane *pPlaneIn, M4VIFI_ImagePlane *pPlaneOut);
-
-M4OSA_ERR applyVideoRotation(M4OSA_Void* pBuffer,
- M4OSA_UInt32 width, M4OSA_UInt32 height, M4OSA_UInt32 rotation);
-#endif // ANDROID_VE_TOOLS_H