summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChih-Chung Chang <chihchung@google.com>2011-08-02 16:17:39 +0800
committerChih-Chung Chang <chihchung@google.com>2011-08-08 18:11:00 +0800
commit43fcc396614a587851e2b7c4cea2876ec58b8648 (patch)
tree22a07a3df45003665acbb9c0a510f0ff02037f78
parentb9410c5dacdb44f2f19bc2487dd9ef84445e1f32 (diff)
downloadframeworks_av-43fcc396614a587851e2b7c4cea2876ec58b8648.zip
frameworks_av-43fcc396614a587851e2b7c4cea2876ec58b8648.tar.gz
frameworks_av-43fcc396614a587851e2b7c4cea2876ec58b8648.tar.bz2
Use GL to render preview.
To speed up the preview, we direct the decoder output to a SurfaceTexture, then draw the texture to a surface. The media rendering parameters (crop, black-border) are implemented using different vertex coordinates. The color effects are implemented using fragment shaders. Currently only three color effects are implemented, but that's all the appplication uses. Change-Id: If84439fee572ed37ea077749ef9f2bd4f78703e1
-rwxr-xr-xlibvideoeditor/lvpp/Android.mk7
-rwxr-xr-xlibvideoeditor/lvpp/NativeWindowRenderer.cpp621
-rwxr-xr-xlibvideoeditor/lvpp/NativeWindowRenderer.h182
-rwxr-xr-xlibvideoeditor/lvpp/PreviewPlayer.cpp193
-rwxr-xr-xlibvideoeditor/lvpp/PreviewPlayer.h15
-rw-r--r--libvideoeditor/lvpp/PreviewPlayerBase.cpp65
-rw-r--r--libvideoeditor/lvpp/PreviewPlayerBase.h7
-rwxr-xr-xlibvideoeditor/lvpp/PreviewRenderer.cpp94
-rwxr-xr-xlibvideoeditor/lvpp/PreviewRenderer.h3
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPlayer.cpp4
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPlayer.h3
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPreviewController.cpp10
-rwxr-xr-xlibvideoeditor/lvpp/VideoEditorPreviewController.h1
13 files changed, 862 insertions, 343 deletions
diff --git a/libvideoeditor/lvpp/Android.mk b/libvideoeditor/lvpp/Android.mk
index 64e7c73..7478ba5 100755
--- a/libvideoeditor/lvpp/Android.mk
+++ b/libvideoeditor/lvpp/Android.mk
@@ -37,7 +37,8 @@ LOCAL_SRC_FILES:= \
AudioPlayerBase.cpp \
PreviewPlayerBase.cpp \
PreviewRenderer.cpp \
- YV12ColorConverter.cpp
+ YV12ColorConverter.cpp \
+ NativeWindowRenderer.cpp
LOCAL_MODULE_TAGS := optional
@@ -58,7 +59,9 @@ LOCAL_SHARED_LIBRARIES := \
libstagefright_foundation \
libgui \
libaudioflinger \
- libui
+ libui \
+ libEGL \
+ libGLESv2
LOCAL_C_INCLUDES += \
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
new file mode 100755
index 0000000..cde8b89
--- /dev/null
+++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
@@ -0,0 +1,621 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "NativeWindowRenderer"
+#include "NativeWindowRenderer.h"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+#include <cutils/log.h>
+#include <gui/SurfaceTexture.h>
+#include <gui/SurfaceTextureClient.h>
+#include <stagefright/MediaBuffer.h>
+#include <stagefright/MediaDebug.h>
+#include <stagefright/MetaData.h>
+#include <surfaceflinger/Surface.h>
+#include "VideoEditorTools.h"
+
+#define CHECK_EGL_ERROR CHECK(EGL_SUCCESS == eglGetError())
+#define CHECK_GL_ERROR CHECK(GLenum(GL_NO_ERROR) == glGetError())
+
+//
+// Vertex and fragment programs
+//
+
+// The matrix is derived from
+// frameworks/base/media/libstagefright/colorconversion/ColorConverter.cpp
+//
+// R * 255 = 1.164 * (Y - 16) + 1.596 * (V - 128)
+// G * 255 = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128)
+// B * 255 = 1.164 * (Y - 16) + 2.018 * (U - 128)
+//
+// Here we assume YUV are in the range of [0,255], RGB are in the range of
+// [0, 1]
+#define RGB2YUV_MATRIX \
+"const mat4 rgb2yuv = mat4("\
+" 65.52255, -37.79398, 111.98732, 0.00000,"\
+" 128.62729, -74.19334, -93.81088, 0.00000,"\
+" 24.92233, 111.98732, -18.17644, 0.00000,"\
+" 16.00000, 128.00000, 128.00000, 1.00000);\n"
+
+#define YUV2RGB_MATRIX \
+"const mat4 yuv2rgb = mat4("\
+" 0.00456, 0.00456, 0.00456, 0.00000,"\
+" 0.00000, -0.00153, 0.00791, 0.00000,"\
+" 0.00626, -0.00319, 0.00000, 0.00000,"\
+" -0.87416, 0.53133, -1.08599, 1.00000);\n"
+
+static const char vSrcNormal[] =
+ "attribute vec4 vPosition;\n"
+ "attribute vec2 vTexPos;\n"
+ "uniform mat4 texMatrix;\n"
+ "varying vec2 texCoords;\n"
+ "varying float topDown;\n"
+ "void main() {\n"
+ " gl_Position = vPosition;\n"
+ " texCoords = (texMatrix * vec4(vTexPos, 0.0, 1.0)).xy;\n"
+ " topDown = vTexPos.y;\n"
+ "}\n";
+
+static const char fSrcNormal[] =
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES texSampler;\n"
+ "varying vec2 texCoords;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(texSampler, texCoords);\n"
+ "}\n";
+
+static const char fSrcSepia[] =
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES texSampler;\n"
+ "varying vec2 texCoords;\n"
+ RGB2YUV_MATRIX
+ YUV2RGB_MATRIX
+ "void main() {\n"
+ " vec4 rgb = texture2D(texSampler, texCoords);\n"
+ " vec4 yuv = rgb2yuv * rgb;\n"
+ " yuv = vec4(yuv.x, 117.0, 139.0, 1.0);\n"
+ " gl_FragColor = yuv2rgb * yuv;\n"
+ "}\n";
+
+static const char fSrcNegative[] =
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES texSampler;\n"
+ "varying vec2 texCoords;\n"
+ RGB2YUV_MATRIX
+ YUV2RGB_MATRIX
+ "void main() {\n"
+ " vec4 rgb = texture2D(texSampler, texCoords);\n"
+ " vec4 yuv = rgb2yuv * rgb;\n"
+ " yuv = vec4(255.0 - yuv.x, yuv.y, yuv.z, 1.0);\n"
+ " gl_FragColor = yuv2rgb * yuv;\n"
+ "}\n";
+
+static const char fSrcGradient[] =
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES texSampler;\n"
+ "varying vec2 texCoords;\n"
+ "varying float topDown;\n"
+ RGB2YUV_MATRIX
+ YUV2RGB_MATRIX
+ "void main() {\n"
+ " vec4 rgb = texture2D(texSampler, texCoords);\n"
+ " vec4 yuv = rgb2yuv * rgb;\n"
+ " vec4 mixin = vec4(15.0/31.0, 59.0/63.0, 31.0/31.0, 1.0);\n"
+ " vec4 yuv2 = rgb2yuv * vec4((mixin.xyz * topDown), 1);\n"
+ " yuv = vec4(yuv.x, yuv2.y, yuv2.z, 1);\n"
+ " gl_FragColor = yuv2rgb * yuv;\n"
+ "}\n";
+
+namespace android {
+
+NativeWindowRenderer::NativeWindowRenderer(sp<ANativeWindow> nativeWindow,
+ int width, int height)
+ : mNativeWindow(nativeWindow)
+ , mDstWidth(width)
+ , mDstHeight(height)
+ , mLastVideoEffect(-1)
+ , mNextTextureId(100)
+ , mActiveInputs(0)
+ , mThreadCmd(CMD_IDLE) {
+ createThread(threadStart, this);
+}
+
+// The functions below run in the GL thread.
+//
+// All GL-related work is done in this thread, and other threads send
+// requests to this thread using a command code. We expect most of the
+// time there will only be one thread sending in requests, so we let
+// other threads wait until the request is finished by GL thread.
+
+int NativeWindowRenderer::threadStart(void* self) {
+ LOGD("create thread");
+ ((NativeWindowRenderer*)self)->glThread();
+ return 0;
+}
+
+void NativeWindowRenderer::glThread() {
+ initializeEGL();
+ createPrograms();
+
+ Mutex::Autolock autoLock(mLock);
+ bool quit = false;
+ while (!quit) {
+ switch (mThreadCmd) {
+ case CMD_IDLE:
+ mCond.wait(mLock);
+ continue;
+ case CMD_RENDER_INPUT:
+ render(mThreadRenderInput);
+ break;
+ case CMD_RESERVE_TEXTURE:
+ glBindTexture(GL_TEXTURE_EXTERNAL_OES, mThreadTextureId);
+ CHECK_GL_ERROR;
+ break;
+ case CMD_DELETE_TEXTURE:
+ glDeleteTextures(1, &mThreadTextureId);
+ break;
+ case CMD_QUIT:
+ terminateEGL();
+ quit = true;
+ break;
+ }
+ // Tell the requester that the command is finished.
+ mThreadCmd = CMD_IDLE;
+ mCond.broadcast();
+ }
+ LOGD("quit");
+}
+
+void NativeWindowRenderer::initializeEGL() {
+ mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
+ CHECK_EGL_ERROR;
+
+ EGLint majorVersion;
+ EGLint minorVersion;
+ eglInitialize(mEglDisplay, &majorVersion, &minorVersion);
+ CHECK_EGL_ERROR;
+
+ EGLConfig config;
+ EGLint numConfigs = -1;
+ EGLint configAttribs[] = {
+ EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
+ EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RED_SIZE, 8,
+ EGL_GREEN_SIZE, 8,
+ EGL_BLUE_SIZE, 8,
+ EGL_NONE
+ };
+ eglChooseConfig(mEglDisplay, configAttribs, &config, 1, &numConfigs);
+ CHECK_EGL_ERROR;
+
+ mEglSurface = eglCreateWindowSurface(mEglDisplay, config,
+ mNativeWindow.get(), NULL);
+ CHECK_EGL_ERROR;
+
+ EGLint contextAttribs[] = { EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE };
+ mEglContext = eglCreateContext(mEglDisplay, config, EGL_NO_CONTEXT,
+ contextAttribs);
+ CHECK_EGL_ERROR;
+
+ eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext);
+ CHECK_EGL_ERROR;
+}
+
+void NativeWindowRenderer::terminateEGL() {
+ eglDestroyContext(mEglDisplay, mEglContext);
+ eglDestroySurface(mEglDisplay, mEglSurface);
+ eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
+ eglTerminate(mEglDisplay);
+}
+
+void NativeWindowRenderer::createPrograms() {
+ GLuint vShader;
+ loadShader(GL_VERTEX_SHADER, vSrcNormal, &vShader);
+
+ const char* fSrc[NUMBER_OF_EFFECTS] = {
+ fSrcNormal, fSrcSepia, fSrcNegative, fSrcGradient
+ };
+
+ for (int i = 0; i < NUMBER_OF_EFFECTS; i++) {
+ GLuint fShader;
+ loadShader(GL_FRAGMENT_SHADER, fSrc[i], &fShader);
+ createProgram(vShader, fShader, &mProgram[i]);
+ glDeleteShader(fShader);
+ CHECK_GL_ERROR;
+ }
+
+ glDeleteShader(vShader);
+ CHECK_GL_ERROR;
+}
+
+void NativeWindowRenderer::createProgram(
+ GLuint vertexShader, GLuint fragmentShader, GLuint* outPgm) {
+
+ GLuint program = glCreateProgram();
+ CHECK_GL_ERROR;
+
+ glAttachShader(program, vertexShader);
+ CHECK_GL_ERROR;
+
+ glAttachShader(program, fragmentShader);
+ CHECK_GL_ERROR;
+
+ glLinkProgram(program);
+ CHECK_GL_ERROR;
+
+ GLint linkStatus = GL_FALSE;
+ glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+ if (linkStatus != GL_TRUE) {
+ GLint infoLen = 0;
+ glGetProgramiv(program, GL_INFO_LOG_LENGTH, &infoLen);
+ if (infoLen) {
+ char* buf = (char*) malloc(infoLen);
+ if (buf) {
+ glGetProgramInfoLog(program, infoLen, NULL, buf);
+ LOGE("Program link log:\n%s\n", buf);
+ free(buf);
+ }
+ }
+ glDeleteProgram(program);
+ program = 0;
+ }
+
+ *outPgm = program;
+}
+
+void NativeWindowRenderer::loadShader(GLenum shaderType, const char* pSource,
+ GLuint* outShader) {
+ GLuint shader = glCreateShader(shaderType);
+ CHECK_GL_ERROR;
+
+ glShaderSource(shader, 1, &pSource, NULL);
+ CHECK_GL_ERROR;
+
+ glCompileShader(shader);
+ CHECK_GL_ERROR;
+
+ GLint compiled = 0;
+ glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+ if (!compiled) {
+ GLint infoLen = 0;
+ glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
+ char* buf = (char*) malloc(infoLen);
+ if (buf) {
+ glGetShaderInfoLog(shader, infoLen, NULL, buf);
+ LOGE("Shader compile log:\n%s\n", buf);
+ free(buf);
+ }
+ glDeleteShader(shader);
+ shader = 0;
+ }
+ *outShader = shader;
+}
+
+NativeWindowRenderer::~NativeWindowRenderer() {
+ CHECK(mActiveInputs == 0);
+ startRequest(CMD_QUIT);
+ sendRequest();
+}
+
+void NativeWindowRenderer::render(RenderInput* input) {
+ sp<SurfaceTexture> ST = input->mST;
+ sp<SurfaceTextureClient> STC = input->mSTC;
+
+ if (input->mIsExternalBuffer) {
+ queueExternalBuffer(STC.get(), input->mBuffer,
+ input->mWidth, input->mHeight);
+ } else {
+ queueInternalBuffer(STC.get(), input->mBuffer);
+ }
+
+ ST->updateTexImage();
+ glClearColor(0, 0, 0, 0);
+ glClear(GL_COLOR_BUFFER_BIT);
+
+ calculatePositionCoordinates(input->mRenderingMode,
+ input->mWidth, input->mHeight);
+
+ const GLfloat textureCoordinates[] = {
+ 0.0f, 1.0f,
+ 0.0f, 0.0f,
+ 1.0f, 0.0f,
+ 1.0f, 1.0f,
+ };
+
+ updateProgramAndHandle(input->mVideoEffect);
+
+ glVertexAttribPointer(mPositionHandle, 2, GL_FLOAT, GL_FALSE, 0,
+ mPositionCoordinates);
+ CHECK_GL_ERROR;
+
+ glEnableVertexAttribArray(mPositionHandle);
+ CHECK_GL_ERROR;
+
+ glVertexAttribPointer(mTexPosHandle, 2, GL_FLOAT, GL_FALSE, 0,
+ textureCoordinates);
+ CHECK_GL_ERROR;
+
+ glEnableVertexAttribArray(mTexPosHandle);
+ CHECK_GL_ERROR;
+
+ GLfloat texMatrix[16];
+ ST->getTransformMatrix(texMatrix);
+ glUniformMatrix4fv(mTexMatrixHandle, 1, GL_FALSE, texMatrix);
+ CHECK_GL_ERROR;
+
+ glBindTexture(GL_TEXTURE_EXTERNAL_OES, input->mTextureId);
+ CHECK_GL_ERROR;
+
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+ glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+ glTexParameteri(
+ GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+ glTexParameteri(
+ GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+ CHECK_GL_ERROR;
+
+ glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
+ CHECK_GL_ERROR;
+
+ eglSwapBuffers(mEglDisplay, mEglSurface);
+}
+
+void NativeWindowRenderer::queueInternalBuffer(ANativeWindow *anw,
+ MediaBuffer* buffer) {
+ int64_t timeUs;
+ CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
+ native_window_set_buffers_timestamp(anw, timeUs * 1000);
+ status_t err = anw->queueBuffer(anw, buffer->graphicBuffer().get());
+ if (err != 0) {
+ LOGE("queueBuffer failed with error %s (%d)", strerror(-err), -err);
+ return;
+ }
+
+ sp<MetaData> metaData = buffer->meta_data();
+ metaData->setInt32(kKeyRendered, 1);
+}
+
+void NativeWindowRenderer::queueExternalBuffer(ANativeWindow* anw,
+ MediaBuffer* buffer, int width, int height) {
+ native_window_set_buffers_geometry(anw, width, height,
+ HAL_PIXEL_FORMAT_YV12);
+ native_window_set_usage(anw, GRALLOC_USAGE_SW_WRITE_OFTEN);
+
+ ANativeWindowBuffer* anb;
+ anw->dequeueBuffer(anw, &anb);
+ CHECK(anb != NULL);
+
+ sp<GraphicBuffer> buf(new GraphicBuffer(anb, false));
+ CHECK(NO_ERROR == anw->lockBuffer(anw, buf->getNativeBuffer()));
+
+ // Copy the buffer
+ uint8_t* img = NULL;
+ buf->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&img));
+ copyYV12Buffer(buffer, img, width, height, buf->getStride());
+ buf->unlock();
+ CHECK(NO_ERROR == anw->queueBuffer(anw, buf->getNativeBuffer()));
+}
+
+void NativeWindowRenderer::copyYV12Buffer(MediaBuffer* src, uint8_t* dst,
+ int srcWidth, int srcHeight, int stride) {
+ int strideUV = (stride / 2 + 0xf) & ~0xf;
+ uint8_t* p = (uint8_t*)src->data() + src->range_offset();
+ // Y
+ for (int i = srcHeight; i > 0; i--) {
+ memcpy(dst, p, srcWidth);
+ dst += stride;
+ p += srcWidth;
+ }
+ // The src is I420, the dst is YV12.
+ // U
+ p += srcWidth * srcHeight / 4;
+ for (int i = srcHeight / 2; i > 0; i--) {
+ memcpy(dst, p, srcWidth / 2);
+ dst += strideUV;
+ p += srcWidth / 2;
+ }
+ // V
+ p -= srcWidth * srcHeight / 2;
+ for (int i = srcHeight / 2; i > 0; i--) {
+ memcpy(dst, p, srcWidth / 2);
+ dst += strideUV;
+ p += srcWidth / 2;
+ }
+}
+
+void NativeWindowRenderer::updateProgramAndHandle(uint32_t videoEffect) {
+ if (mLastVideoEffect == videoEffect) {
+ return;
+ }
+
+ mLastVideoEffect = videoEffect;
+ int i;
+ switch (mLastVideoEffect) {
+ case VIDEO_EFFECT_NONE:
+ i = 0;
+ break;
+ case VIDEO_EFFECT_SEPIA:
+ i = 1;
+ break;
+ case VIDEO_EFFECT_NEGATIVE:
+ i = 2;
+ break;
+ case VIDEO_EFFECT_GRADIENT:
+ i = 3;
+ break;
+ default:
+ i = 0;
+ break;
+ }
+ glUseProgram(mProgram[i]);
+ CHECK_GL_ERROR;
+
+ mPositionHandle = glGetAttribLocation(mProgram[i], "vPosition");
+ mTexPosHandle = glGetAttribLocation(mProgram[i], "vTexPos");
+ mTexMatrixHandle = glGetUniformLocation(mProgram[i], "texMatrix");
+ CHECK_GL_ERROR;
+}
+
+void NativeWindowRenderer::calculatePositionCoordinates(
+ M4xVSS_MediaRendering renderingMode, int srcWidth, int srcHeight) {
+ float x, y;
+ switch (renderingMode) {
+ case M4xVSS_kResizing:
+ default:
+ x = 1;
+ y = 1;
+ break;
+ case M4xVSS_kCropping:
+ x = float(srcWidth) / mDstWidth;
+ y = float(srcHeight) / mDstHeight;
+ // Make the smaller side 1
+ if (x > y) {
+ x /= y;
+ y = 1;
+ } else {
+ y /= x;
+ x = 1;
+ }
+ break;
+ case M4xVSS_kBlackBorders:
+ x = float(srcWidth) / mDstWidth;
+ y = float(srcHeight) / mDstHeight;
+ // Make the larger side 1
+ if (x > y) {
+ y /= x;
+ x = 1;
+ } else {
+ x /= y;
+ y = 1;
+ }
+ break;
+ }
+
+ mPositionCoordinates[0] = -x;
+ mPositionCoordinates[1] = y;
+ mPositionCoordinates[2] = -x;
+ mPositionCoordinates[3] = -y;
+ mPositionCoordinates[4] = x;
+ mPositionCoordinates[5] = -y;
+ mPositionCoordinates[6] = x;
+ mPositionCoordinates[7] = y;
+}
+
+//
+// The functions below run in other threads.
+//
+
+void NativeWindowRenderer::startRequest(int cmd) {
+ mLock.lock();
+ while (mThreadCmd != CMD_IDLE) {
+ mCond.wait(mLock);
+ }
+ mThreadCmd = cmd;
+}
+
+void NativeWindowRenderer::sendRequest() {
+ mCond.broadcast();
+ while (mThreadCmd != CMD_IDLE) {
+ mCond.wait(mLock);
+ }
+ mLock.unlock();
+}
+
+RenderInput* NativeWindowRenderer::createRenderInput() {
+ LOGD("new render input %d", mNextTextureId);
+ RenderInput* input = new RenderInput(this, mNextTextureId);
+
+ startRequest(CMD_RESERVE_TEXTURE);
+ mThreadTextureId = mNextTextureId;
+ sendRequest();
+
+ mNextTextureId++;
+ mActiveInputs++;
+ return input;
+}
+
+void NativeWindowRenderer::destroyRenderInput(RenderInput* input) {
+ LOGD("destroy render input %d", input->mTextureId);
+ GLuint textureId = input->mTextureId;
+ delete input;
+
+ startRequest(CMD_DELETE_TEXTURE);
+ mThreadTextureId = textureId;
+ sendRequest();
+
+ mActiveInputs--;
+}
+
+//
+// RenderInput
+//
+
+RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId)
+ : mRenderer(renderer)
+ , mTextureId(textureId) {
+ mST = new SurfaceTexture(mTextureId);
+ mSTC = new SurfaceTextureClient(mST);
+}
+
+RenderInput::~RenderInput() {
+}
+
+ANativeWindow* RenderInput::getTargetWindow() {
+ return mSTC.get();
+}
+
+void RenderInput::updateVideoSize(sp<MetaData> meta) {
+ CHECK(meta->findInt32(kKeyWidth, &mWidth));
+ CHECK(meta->findInt32(kKeyHeight, &mHeight));
+
+ int left, top, right, bottom;
+ if (meta->findRect(kKeyCropRect, &left, &top, &right, &bottom)) {
+ mWidth = right - left + 1;
+ mHeight = bottom - top + 1;
+ }
+
+ // If rotation degrees is 90 or 270, swap width and height
+ // (mWidth and mHeight are the _rotated_ source rectangle).
+ int32_t rotationDegrees;
+ if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
+
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ int tmp = mWidth;
+ mWidth = mHeight;
+ mHeight = tmp;
+ }
+}
+
+void RenderInput::render(MediaBuffer* buffer, uint32_t videoEffect,
+ M4xVSS_MediaRendering renderingMode, bool isExternalBuffer) {
+ mVideoEffect = videoEffect;
+ mRenderingMode = renderingMode;
+ mIsExternalBuffer = isExternalBuffer;
+ mBuffer = buffer;
+
+ mRenderer->startRequest(NativeWindowRenderer::CMD_RENDER_INPUT);
+ mRenderer->mThreadRenderInput = this;
+ mRenderer->sendRequest();
+}
+
+} // namespace android
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.h b/libvideoeditor/lvpp/NativeWindowRenderer.h
new file mode 100755
index 0000000..2af19a3
--- /dev/null
+++ b/libvideoeditor/lvpp/NativeWindowRenderer.h
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef NATIVE_WINDOW_RENDERER_H_
+#define NATIVE_WINDOW_RENDERER_H_
+
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+#include <stagefright/MediaBuffer.h>
+#include <stagefright/MetaData.h>
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+#include "M4xVSS_API.h"
+
+// The NativeWindowRenderer draws video frames stored in MediaBuffers to
+// an ANativeWindow. It can apply "rendering mode" and color effects to
+// the frames. "Rendering mode" is the option to do resizing, cropping,
+// or black-bordering when the source and destination aspect ratio are
+// different. Color effects include sepia, negative, and gradient.
+//
+// The input to NativeWindowRenderer is provided by the RenderInput class,
+// and there can be multiple active RenderInput at the same time. Although
+// we only expect that happens briefly when one clip is about to finish
+// and the next clip is about to start.
+//
+// We allocate a SurfaceTexture for each RenderInput and the user can use
+// the getTargetWindow() function to get the corresponding ANativeWindow
+// for that SurfaceTexture. The intention is that the user can pass that
+// ANativeWindow to OMXCodec::Create() so the codec can decode directly
+// to buffers provided by the texture.
+
+namespace android {
+
+class SurfaceTexture;
+class SurfaceTextureClient;
+class RenderInput;
+
+class NativeWindowRenderer {
+public:
+ NativeWindowRenderer(sp<ANativeWindow> nativeWindow, int width, int height);
+ ~NativeWindowRenderer();
+
+ RenderInput* createRenderInput();
+ void destroyRenderInput(RenderInput* input);
+
+private:
+ // No copy constructor and assignment
+ NativeWindowRenderer(const NativeWindowRenderer &);
+ NativeWindowRenderer &operator=(const NativeWindowRenderer &);
+
+ // Initialization and finialization
+ void initializeEGL();
+ void terminateEGL();
+ void createPrograms();
+ void createProgram(
+ GLuint vertexShader, GLuint fragmentShader, GLuint* outPgm);
+ void loadShader(
+ GLenum shaderType, const char* pSource, GLuint* outShader);
+
+ // These functions are executed every frame.
+ void render(RenderInput* input);
+ void queueInternalBuffer(ANativeWindow* anw, MediaBuffer* buffer);
+ void queueExternalBuffer(ANativeWindow* anw, MediaBuffer* buffer,
+ int width, int height);
+ void copyYV12Buffer(MediaBuffer* src, uint8_t* dst,
+ int srcWidth, int srcHeight, int stride);
+ void updateProgramAndHandle(uint32_t videoEffect);
+ void calculatePositionCoordinates(M4xVSS_MediaRendering renderingMode,
+ int srcWidth, int srcHeight);
+
+ // These variables are initialized once and doesn't change afterwards.
+ sp<ANativeWindow> mNativeWindow;
+ int mDstWidth, mDstHeight;
+ EGLDisplay mEglDisplay;
+ EGLSurface mEglSurface;
+ EGLContext mEglContext;
+ enum {
+ EFFECT_NORMAL,
+ EFFECT_SEPIA,
+ EFFECT_NEGATIVE,
+ EFFECT_GRADIENT,
+ NUMBER_OF_EFFECTS
+ };
+ GLuint mProgram[NUMBER_OF_EFFECTS];
+
+ // We use one shader program for each effect. mLastVideoEffect remembers
+ // the program used for the last frame. when the effect used changes,
+ // we change the program used and update the handles.
+ uint32_t mLastVideoEffect;
+ GLint mPositionHandle;
+ GLint mTexPosHandle;
+ GLint mTexMatrixHandle;
+
+ // This is the vertex coordinates used for the frame texture.
+ // It's calculated according the the rendering mode and the source and
+ // destination aspect ratio.
+ GLfloat mPositionCoordinates[8];
+
+ // We use a different GL id for each SurfaceTexture.
+ GLuint mNextTextureId;
+
+ // Number of existing RenderInputs, just for debugging.
+ int mActiveInputs;
+
+ // The GL thread functions
+ static int threadStart(void* self);
+ void glThread();
+
+ // These variables are used to communicate between the GL thread and
+ // other threads.
+ Mutex mLock;
+ Condition mCond;
+ enum {
+ CMD_IDLE,
+ CMD_RENDER_INPUT,
+ CMD_RESERVE_TEXTURE,
+ CMD_DELETE_TEXTURE,
+ CMD_QUIT,
+ };
+ int mThreadCmd;
+ RenderInput* mThreadRenderInput;
+ GLuint mThreadTextureId;
+
+ // These functions are used to send commands to the GL thread.
+ // sendRequest() also waits for the GL thread acknowledges the
+ // command is finished.
+ void startRequest(int cmd);
+ void sendRequest();
+
+ friend class RenderInput;
+};
+
+class RenderInput {
+public:
+ // Returns the ANativeWindow corresponds to the SurfaceTexture.
+ ANativeWindow* getTargetWindow();
+
+ // Updates video frame size from the MediaSource's metadata. Specifically
+ // we look for kKeyWidth, kKeyHeight, and (optionally) kKeyCropRect.
+ void updateVideoSize(sp<MetaData> meta);
+
+ // Renders the buffer with the given video effect and rending mode.
+ // The video effets are defined in VideoEditorTools.h
+ // Set isExternalBuffer to true only when the buffer given is not
+ // provided by the SurfaceTexture.
+ void render(MediaBuffer *buffer, uint32_t videoEffect,
+ M4xVSS_MediaRendering renderingMode, bool isExternalBuffer);
+private:
+ RenderInput(NativeWindowRenderer* renderer, GLuint textureId);
+ ~RenderInput();
+ NativeWindowRenderer* mRenderer;
+ GLuint mTextureId;
+ sp<SurfaceTexture> mST;
+ sp<SurfaceTextureClient> mSTC;
+ int mWidth, mHeight;
+
+ // These are only valid during render() calls
+ uint32_t mVideoEffect;
+ M4xVSS_MediaRendering mRenderingMode;
+ bool mIsExternalBuffer;
+ MediaBuffer* mBuffer;
+
+ friend class NativeWindowRenderer;
+};
+
+} // namespace android
+
+#endif // NATIVE_WINDOW_RENDERER_H_
diff --git a/libvideoeditor/lvpp/PreviewPlayer.cpp b/libvideoeditor/lvpp/PreviewPlayer.cpp
index 78ca1cd..b63d0d2 100755
--- a/libvideoeditor/lvpp/PreviewPlayer.cpp
+++ b/libvideoeditor/lvpp/PreviewPlayer.cpp
@@ -30,8 +30,6 @@
#include "include/ThrottledSource.h"
-#include "PreviewRenderer.h"
-
#include <binder/IPCThreadState.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
@@ -72,13 +70,12 @@ private:
PreviewPlayerEvent &operator=(const PreviewPlayerEvent &);
};
-PreviewPlayer::PreviewPlayer()
+PreviewPlayer::PreviewPlayer(NativeWindowRenderer* renderer)
: PreviewPlayerBase(),
- mCurrFramingEffectIndex(0) ,
- mReportedWidth(0),
- mReportedHeight(0),
+ mNativeWindowRenderer(renderer),
+ mCurrFramingEffectIndex(0),
mFrameRGBBuffer(NULL),
- mFrameYUVBuffer(NULL){
+ mFrameYUVBuffer(NULL) {
mVideoRenderer = NULL;
mEffectsSettings = NULL;
@@ -115,8 +112,6 @@ PreviewPlayer::PreviewPlayer()
mProgressCbEventPending = false;
mOverlayUpdateEventPending = false;
- mResizedVideoBuffer = NULL;
- mVideoResizedOrCropped = false;
mRenderingMode = (M4xVSS_MediaRendering)MEDIA_RENDERING_INVALID;
mIsFiftiesEffectStarted = false;
reset();
@@ -130,12 +125,9 @@ PreviewPlayer::~PreviewPlayer() {
reset();
- if(mResizedVideoBuffer != NULL) {
- free((mResizedVideoBuffer->data()));
- mResizedVideoBuffer = NULL;
+ if (mVideoRenderer) {
+ mNativeWindowRenderer->destroyRenderInput(mVideoRenderer);
}
-
- delete mVideoRenderer;
}
void PreviewPlayer::cancelPlayerEvents(bool keepBufferingGoing) {
@@ -249,9 +241,8 @@ status_t PreviewPlayer::setDataSource_l_jpg() {
mVideoSource = DummyVideoSource::Create(mVideoWidth, mVideoHeight,
mDurationUs, mUri);
- mReportedWidth = mVideoWidth;
- mReportedHeight = mVideoHeight;
+ updateSizeToRender(mVideoSource->getFormat());
setVideoSource(mVideoSource);
status_t err1 = mVideoSource->start();
if (err1 != OK) {
@@ -650,33 +641,11 @@ status_t PreviewPlayer::play_l() {
status_t PreviewPlayer::initRenderer_l() {
if (mSurface != NULL) {
- sp<MetaData> meta = mVideoSource->getFormat();
-
- const char *component;
- CHECK(meta->findCString(kKeyDecoderComponent, &component));
-
- // Must ensure that mVideoRenderer's destructor is actually executed
- // before creating a new one.
- IPCThreadState::self()->flushCommands();
-
- // always use localrenderer since decoded buffers are modified
- // by postprocessing module
- // Other decoders are instantiated locally and as a consequence
- // allocate their buffers in local address space.
if(mVideoRenderer == NULL) {
-
- mVideoRenderer = PreviewRenderer::CreatePreviewRenderer(
- OMX_COLOR_FormatYUV420Planar,
- mSurface,
- mOutputVideoWidth, mOutputVideoHeight,
- mOutputVideoWidth, mOutputVideoHeight,
- 0);
-
- if ( mVideoRenderer == NULL )
- {
- return UNKNOWN_ERROR;
+ mVideoRenderer = mNativeWindowRenderer->createRenderInput();
+ if (mVideoSource != NULL) {
+ updateSizeToRender(mVideoSource->getFormat());
}
- return OK;
}
}
return OK;
@@ -756,11 +725,18 @@ status_t PreviewPlayer::initAudioDecoder() {
status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
+ initRenderer_l();
+
+ if (mVideoRenderer == NULL) {
+ LOGE("Cannot create renderer");
+ return UNKNOWN_ERROR;
+ }
+
mVideoSource = OMXCodec::Create(
mClient.interface(), mVideoTrack->getFormat(),
false,
mVideoTrack,
- NULL, flags);
+ NULL, flags, mVideoRenderer->getTargetWindow());
if (mVideoSource != NULL) {
int64_t durationUs;
@@ -771,9 +747,7 @@ status_t PreviewPlayer::initVideoDecoder(uint32_t flags) {
}
}
- getVideoBufferSize(mVideoTrack->getFormat(), &mVideoWidth, &mVideoHeight);
- mReportedWidth = mVideoWidth;
- mReportedHeight = mVideoHeight;
+ updateSizeToRender(mVideoTrack->getFormat());
status_t err = mVideoSource->start();
@@ -838,7 +812,7 @@ void PreviewPlayer::onVideoEvent() {
mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
}
for (;;) {
- status_t err = readYV12Buffer(mVideoSource, &mVideoBuffer, &options);
+ status_t err = mVideoSource->read(&mVideoBuffer, &options);
options.clearSeekTo();
if (err != OK) {
@@ -847,8 +821,6 @@ void PreviewPlayer::onVideoEvent() {
if (err == INFO_FORMAT_CHANGED) {
LOGV("LV PLAYER VideoSource signalled format change");
notifyVideoSize_l();
- sp<MetaData> meta = mVideoSource->getFormat();
- getVideoBufferSize(meta, &mReportedWidth, &mReportedHeight);
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
@@ -858,6 +830,8 @@ void PreviewPlayer::onVideoEvent() {
}
}
+
+ updateSizeToRender(mVideoSource->getFormat());
continue;
}
// So video playback is complete, but we may still have
@@ -1081,28 +1055,9 @@ void PreviewPlayer::onVideoEvent() {
postOverlayUpdateEvent_l();
}
-
- if (mCurrentVideoEffect != VIDEO_EFFECT_NONE) {
- err1 = doVideoPostProcessing();
- if(err1 != M4NO_ERROR) {
- LOGE("doVideoPostProcessing returned err");
- }
- }
- else {
- if(mRenderingMode != MEDIA_RENDERING_INVALID) {
- // No effects to be applied, but media rendering to be done
- err1 = doMediaRendering();
- if(err1 != M4NO_ERROR) {
- LOGE("doMediaRendering returned err");
- //Use original mVideoBuffer for rendering
- mVideoResizedOrCropped = false;
- }
- }
- }
-
if (mVideoRenderer != NULL) {
- LOGV("mVideoRenderer CALL render()");
- mVideoRenderer->renderYV12();
+ mVideoRenderer->render(mVideoBuffer, mCurrentVideoEffect,
+ mRenderingMode, mIsVideoSourceJpg);
}
mVideoBuffer->release();
@@ -1362,9 +1317,6 @@ status_t PreviewPlayer::setMediaRenderingMode(
mRenderingMode = mode;
- /* reset boolean for each clip*/
- mVideoResizedOrCropped = false;
-
status_t err = OK;
/* get the video width and height by resolution */
err = getVideoSizeByResolution(outputVideoSize,
@@ -1373,56 +1325,6 @@ status_t PreviewPlayer::setMediaRenderingMode(
return err;
}
-M4OSA_ERR PreviewPlayer::doMediaRendering() {
- M4OSA_ERR err = M4NO_ERROR;
- M4VIFI_ImagePlane planeIn[3], planeOut[3];
- M4VIFI_UInt8 *inBuffer = M4OSA_NULL, *finalOutputBuffer = M4OSA_NULL;
- M4VIFI_UInt8 *tempOutputBuffer= M4OSA_NULL;
- size_t videoBufferSize = 0;
- M4OSA_UInt32 frameSize = 0, i=0, index =0, nFrameCount =0, bufferOffset =0;
-
- videoBufferSize = mVideoBuffer->size();
- frameSize = (mVideoWidth*mVideoHeight*3) >> 1;
-
- uint8_t* outBuffer;
- size_t outBufferStride = 0;
-
- mVideoRenderer->getBufferYV12(&outBuffer, &outBufferStride);
-
- bufferOffset = index*frameSize;
- inBuffer = (M4OSA_UInt8 *)mVideoBuffer->data()+
- mVideoBuffer->range_offset()+bufferOffset;
-
-
- /* In plane*/
- prepareYUV420ImagePlane(planeIn, mVideoWidth,
- mVideoHeight, (M4VIFI_UInt8 *)inBuffer, mReportedWidth, mReportedHeight);
-
- // Set the output YUV420 plane to be compatible with YV12 format
- // W & H even
- // YVU instead of YUV
- // align buffers on 32 bits
-
- //In YV12 format, sizes must be even
- M4OSA_UInt32 yv12PlaneWidth = ((mOutputVideoWidth +1)>>1)<<1;
- M4OSA_UInt32 yv12PlaneHeight = ((mOutputVideoHeight+1)>>1)<<1;
-
- prepareYV12ImagePlane(planeOut, yv12PlaneWidth, yv12PlaneHeight,
- (M4OSA_UInt32)outBufferStride, (M4VIFI_UInt8 *)outBuffer);
-
-
- err = applyRenderingMode(planeIn, planeOut, mRenderingMode);
-
- if(err != M4NO_ERROR)
- {
- LOGE("doMediaRendering: applyRenderingMode returned err=0x%x", (int)err);
- return err;
- }
- mVideoResizedOrCropped = true;
-
- return err;
-}
-
status_t PreviewPlayer::resetJniCallbackTimeStamp() {
mDecVideoTsStoryBoard = mStoryboardStartTimeMsec*1000LL;
@@ -1561,41 +1463,6 @@ status_t PreviewPlayer::setImageClipProperties(uint32_t width,uint32_t height) {
return OK;
}
-
-M4OSA_ERR PreviewPlayer::doVideoPostProcessing() {
- M4OSA_ERR err = M4NO_ERROR;
- vePostProcessParams postProcessParams;
-
- postProcessParams.vidBuffer = (M4VIFI_UInt8*)mVideoBuffer->data()
- + mVideoBuffer->range_offset();
-
- postProcessParams.videoWidth = mVideoWidth;
- postProcessParams.videoHeight = mVideoHeight;
- postProcessParams.timeMs = mDecodedVideoTs/1000;
- postProcessParams.timeOffset = mDecVideoTsStoryBoard/1000;
- postProcessParams.effectsSettings = mEffectsSettings;
- postProcessParams.numberEffects = mNumberEffects;
- postProcessParams.outVideoWidth = mOutputVideoWidth;
- postProcessParams.outVideoHeight = mOutputVideoHeight;
- postProcessParams.currentVideoEffect = mCurrentVideoEffect;
- postProcessParams.renderingMode = mRenderingMode;
- if(mIsFiftiesEffectStarted == M4OSA_TRUE) {
- postProcessParams.isFiftiesEffectStarted = M4OSA_TRUE;
- mIsFiftiesEffectStarted = M4OSA_FALSE;
- }
- else {
- postProcessParams.isFiftiesEffectStarted = M4OSA_FALSE;
- }
-
- postProcessParams.overlayFrameRGBBuffer = mFrameRGBBuffer;
- postProcessParams.overlayFrameYUVBuffer = mFrameYUVBuffer;
- mVideoRenderer->getBufferYV12(&(postProcessParams.pOutBuffer),
- &(postProcessParams.outBufferStride));
- err = applyEffectsAndRenderingMode(&postProcessParams, mReportedWidth, mReportedHeight);
-
- return err;
-}
-
status_t PreviewPlayer::readFirstVideoFrame() {
LOGV("PreviewPlayer::readFirstVideoFrame");
@@ -1609,7 +1476,7 @@ status_t PreviewPlayer::readFirstVideoFrame() {
mSeekTimeUs, MediaSource::ReadOptions::SEEK_CLOSEST);
}
for (;;) {
- status_t err = readYV12Buffer(mVideoSource, &mVideoBuffer, &options);
+ status_t err = mVideoSource->read(&mVideoBuffer, &options);
options.clearSeekTo();
if (err != OK) {
@@ -1618,8 +1485,6 @@ status_t PreviewPlayer::readFirstVideoFrame() {
if (err == INFO_FORMAT_CHANGED) {
LOGV("LV PLAYER VideoSource signalled format change");
notifyVideoSize_l();
- sp<MetaData> meta = mVideoSource->getFormat();
- getVideoBufferSize(meta, &mReportedWidth, &mReportedHeight);
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
@@ -1628,6 +1493,8 @@ status_t PreviewPlayer::readFirstVideoFrame() {
postStreamDoneEvent_l(err);
}
}
+
+ updateSizeToRender(mVideoSource->getFormat());
continue;
}
LOGV("PreviewPlayer: onVideoEvent EOS reached.");
@@ -1688,4 +1555,10 @@ status_t PreviewPlayer::getLastRenderedTimeMs(uint32_t *lastRenderedTimeMs) {
return OK;
}
+void PreviewPlayer::updateSizeToRender(sp<MetaData> meta) {
+ if (mVideoRenderer) {
+ mVideoRenderer->updateVideoSize(meta);
+ }
+}
+
} // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayer.h b/libvideoeditor/lvpp/PreviewPlayer.h
index fd2e6d2..c6fb62b 100755
--- a/libvideoeditor/lvpp/PreviewPlayer.h
+++ b/libvideoeditor/lvpp/PreviewPlayer.h
@@ -28,6 +28,7 @@
#include <utils/threads.h>
#include "PreviewPlayerBase.h"
#include "VideoEditorPreviewController.h"
+#include "NativeWindowRenderer.h"
namespace android {
@@ -38,7 +39,7 @@ struct MediaExtractor;
struct MediaSource;
struct PreviewPlayer : public PreviewPlayerBase {
- PreviewPlayer();
+ PreviewPlayer(NativeWindowRenderer* renderer);
~PreviewPlayer();
//Override baseclass methods
@@ -114,7 +115,8 @@ private:
status_t startAudioPlayer_l();
bool mIsChangeSourceRequired;
- PreviewRenderer *mVideoRenderer;
+ NativeWindowRenderer *mNativeWindowRenderer;
+ RenderInput *mVideoRenderer;
int32_t mVideoWidth, mVideoHeight;
@@ -141,15 +143,10 @@ private:
bool mOverlayUpdateEventPending;
bool mOverlayUpdateEventPosted;
- MediaBuffer *mResizedVideoBuffer;
- bool mVideoResizedOrCropped;
M4xVSS_MediaRendering mRenderingMode;
uint32_t mOutputVideoWidth;
uint32_t mOutputVideoHeight;
- int32_t mReportedWidth; //docoder reported width
- int32_t mReportedHeight; //docoder reported height
-
uint32_t mStoryboardStartTimeMsec;
bool mIsVideoSourceJpg;
@@ -163,8 +160,6 @@ private:
void setVideoPostProcessingNode(
M4VSS3GPP_VideoEffectType type, M4OSA_Bool enable);
- M4OSA_ERR doVideoPostProcessing();
- M4OSA_ERR doMediaRendering();
void postProgressCallbackEvent_l();
void onProgressCbEvent();
@@ -176,6 +171,8 @@ private:
status_t prepare_l();
status_t prepareAsync_l();
+ void updateSizeToRender(sp<MetaData> meta);
+
VideoEditorAudioPlayer *mVeAudioPlayer;
PreviewPlayer(const PreviewPlayer &);
diff --git a/libvideoeditor/lvpp/PreviewPlayerBase.cpp b/libvideoeditor/lvpp/PreviewPlayerBase.cpp
index e890423..3f75fb9 100644
--- a/libvideoeditor/lvpp/PreviewPlayerBase.cpp
+++ b/libvideoeditor/lvpp/PreviewPlayerBase.cpp
@@ -204,13 +204,6 @@ PreviewPlayerBase::PreviewPlayerBase()
mAudioStatusEventPending = false;
- mYV12ColorConverter = new YV12ColorConverter();
- if (!mYV12ColorConverter->isLoaded() ||
- mYV12ColorConverter->getDecoderOutputFormat() == OMX_COLOR_FormatYUV420Planar) {
- delete mYV12ColorConverter;
- mYV12ColorConverter = NULL;
- }
-
reset();
}
@@ -219,8 +212,6 @@ PreviewPlayerBase::~PreviewPlayerBase() {
mQueue.stop();
}
- delete mYV12ColorConverter;
-
reset();
mClient.disconnect();
@@ -1411,7 +1402,7 @@ void PreviewPlayerBase::onVideoEvent() {
: MediaSource::ReadOptions::SEEK_CLOSEST_SYNC);
}
for (;;) {
- status_t err = readYV12Buffer(mVideoSource, &mVideoBuffer, &options);
+ status_t err = mVideoSource->read(&mVideoBuffer, &options);
options.clearSeekTo();
if (err != OK) {
@@ -1929,58 +1920,4 @@ status_t PreviewPlayerBase::getParameter(int key, Parcel *reply) {
return OK;
}
-status_t PreviewPlayerBase::readYV12Buffer(sp<MediaSource> source, MediaBuffer **buffer,
- const MediaSource::ReadOptions *options) {
- status_t result = source->read(buffer, options);
- if (mYV12ColorConverter == NULL || *buffer == NULL) {
- return result;
- }
-
- int width = mCropRect.right - mCropRect.left + 1;
- int height = mCropRect.bottom - mCropRect.top + 1;
-
- MediaBuffer *origBuffer = *buffer;
- MediaBuffer *newBuffer = new MediaBuffer(width * height * 3 / 2);
-
- LOGV("convertDecoderOutputToYV12: mGivenWidth = %d, mGivenHeight = %d",
- mGivenWidth, mGivenHeight);
- LOGV("width = %d, height = %d", width, height);
-
- if (mYV12ColorConverter->convertDecoderOutputToYV12(
- (uint8_t *)origBuffer->data(), // ?? + origBuffer->range_offset(), // decoderBits
- mGivenWidth, // decoderWidth
- mGivenHeight, // decoderHeight
- mCropRect, // decoderRect
- (uint8_t *)newBuffer->data() + newBuffer->range_offset() /* dstBits */) < 0) {
- LOGE("convertDecoderOutputToYV12 failed");
- }
-
- // Copy the timestamp
- int64_t timeUs;
- CHECK(origBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
- newBuffer->meta_data()->setInt64(kKeyTime, timeUs);
-
- origBuffer->release();
- *buffer = newBuffer;
-
- return result;
-}
-
-void PreviewPlayerBase::getVideoBufferSize(sp<MetaData> meta, int* width, int* height) {
- if (mYV12ColorConverter) {
- int32_t cropLeft, cropTop, cropRight, cropBottom;
- if (meta->findRect(
- kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) {
- *width = cropRight - cropLeft + 1;
- *height = cropBottom - cropTop + 1;
- } else {
- CHECK(meta->findInt32(kKeyWidth, width));
- CHECK(meta->findInt32(kKeyHeight, height));
- }
- } else {
- CHECK(meta->findInt32(kKeyWidth, width));
- CHECK(meta->findInt32(kKeyHeight, height));
- }
-}
-
} // namespace android
diff --git a/libvideoeditor/lvpp/PreviewPlayerBase.h b/libvideoeditor/lvpp/PreviewPlayerBase.h
index c094e25..954d8fc 100644
--- a/libvideoeditor/lvpp/PreviewPlayerBase.h
+++ b/libvideoeditor/lvpp/PreviewPlayerBase.h
@@ -28,7 +28,6 @@
#include <media/stagefright/TimeSource.h>
#include <utils/threads.h>
#include <drm/DrmManagerClient.h>
-#include <YV12ColorConverter.h>
namespace android {
@@ -101,11 +100,6 @@ struct PreviewPlayerBase {
void postAudioEOS(int64_t delayUs = 0ll);
void postAudioSeekComplete();
-protected:
- status_t readYV12Buffer(sp<MediaSource> source, MediaBuffer **buffer,
- const MediaSource::ReadOptions *options);
- void getVideoBufferSize(sp<MetaData> meta, int* width, int* height);
-
private:
friend struct AwesomeEvent;
friend struct PreviewPlayer;
@@ -232,7 +226,6 @@ private:
ARect mCropRect;
int32_t mGivenWidth, mGivenHeight;
- YV12ColorConverter *mYV12ColorConverter;
status_t setDataSource_l(
const char *uri,
diff --git a/libvideoeditor/lvpp/PreviewRenderer.cpp b/libvideoeditor/lvpp/PreviewRenderer.cpp
index eadba57..9ca5c7a 100755
--- a/libvideoeditor/lvpp/PreviewRenderer.cpp
+++ b/libvideoeditor/lvpp/PreviewRenderer.cpp
@@ -200,98 +200,4 @@ void PreviewRenderer::renderYV12() {
LOGV("renderYV12() END");
}
-
-
-//
-// Display the given data buffer
-// platformPrivate is not used (kept for backwrad compatibility)
-// Please rather use getbuffer() and the other render()functions (with no params)
-// for optimal display
-//
-void PreviewRenderer::render(
- const void *data, size_t size, void *platformPrivate) {
- ANativeWindowBuffer *buf;
- int err;
-
- if ((err = mSurface->ANativeWindow::dequeueBuffer(mSurface.get(), &buf)) != 0) {
- LOGW("Surface::dequeueBuffer returned error %d", err);
- return;
- }
-
- CHECK_EQ(0, mSurface->ANativeWindow::lockBuffer(mSurface.get(), buf));
-
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
-
- Rect bounds(mDecodedWidth, mDecodedHeight);
-
- void *dst;
- CHECK_EQ(0, mapper.lock(
- buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
- LOGV("Buffer locked");
-
- if (mConverter) {
- LOGV("Convert to RGB565");
- mConverter->convert(data,
- mDecodedWidth, mDecodedHeight,
- 0,0,mDecodedWidth, mDecodedHeight,
- dst, mDecodedWidth, mDecodedHeight,
- 0,0,mDecodedWidth, mDecodedHeight);
- } else if (mYUVMode == None) {
- // Input and output are both YUV420sp, but the alignment requirements
- // are different.
- LOGV("mYUVMode == None %d x %d", mDecodedWidth, mDecodedHeight);
- size_t srcYStride = mDecodedWidth;
- const uint8_t *srcY = (const uint8_t *)data;
- uint8_t *dstY = (uint8_t *)dst;
- LOGV("srcY = %p dstY = %p", srcY, dstY);
- LOGV("srcYStride = %d dstYstride = %d", srcYStride, buf->stride);
- for (size_t i = 0; i < mDecodedHeight; ++i) {
- memcpy(dstY, srcY, mDecodedWidth);
- srcY += srcYStride;
- dstY += buf->stride;
- }
-
- size_t srcUVStride = (mDecodedWidth + 1) / 2;
- size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32);
- LOGV("srcUVStride = %d dstUVStride = %d", srcUVStride, dstUVStride);
-
- // Copy V
- // Source buffer is YUV, skip U
- const uint8_t *srcV = (const uint8_t *)data
- + mDecodedHeight * mDecodedWidth + (mDecodedHeight * mDecodedWidth)/4;
- // Destination buffer is YVU
- uint8_t *dstUV = (uint8_t *)dst
- + buf->stride*mDecodedHeight;
- LOGV("srcV = %p dstUV = %p", srcV, dstUV);
- for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {
- memcpy(dstUV, srcV, mDecodedWidth/2);
- srcV += srcUVStride;
- dstUV += dstUVStride;
- }
-
-
- // Copy V
- // Source buffer is YUV, go back to end of Y
- const uint8_t *srcU = (const uint8_t *)data
- + mDecodedHeight * mDecodedWidth ;
- // Destination buffer is YVU
- // Keep writing after V buffer has been filled, U follows immediately
- LOGV("srcU = %p dstUV = %p", srcU, dstUV);
- for (size_t i = 0; i < (mDecodedHeight+1)/2; ++i) {
- memcpy(dstUV, srcU, mDecodedWidth/2);
- srcU += srcUVStride;
- dstUV += dstUVStride;
- }
- } else {
- memcpy(dst, data, size);
- }
-
- CHECK_EQ(0, mapper.unlock(buf->handle));
-
- if ((err = mSurface->ANativeWindow::queueBuffer(mSurface.get(), buf)) != 0) {
- LOGW("Surface::queueBuffer returned error %d", err);
- }
- buf = NULL;
-}
-
} // namespace android
diff --git a/libvideoeditor/lvpp/PreviewRenderer.h b/libvideoeditor/lvpp/PreviewRenderer.h
index b215f6d..430fd9f 100755
--- a/libvideoeditor/lvpp/PreviewRenderer.h
+++ b/libvideoeditor/lvpp/PreviewRenderer.h
@@ -40,9 +40,6 @@ static PreviewRenderer* CreatePreviewRenderer (OMX_COLOR_FORMATTYPE colorFormat,
~PreviewRenderer();
- void render(
- const void *data, size_t size, void *platformPrivate);
-
void getBufferYV12(uint8_t **data, size_t *stride);
void renderYV12();
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.cpp b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
index 1a298c6..90a5821 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.cpp
@@ -28,8 +28,8 @@
namespace android {
-VideoEditorPlayer::VideoEditorPlayer()
- : mPlayer(new PreviewPlayer) {
+VideoEditorPlayer::VideoEditorPlayer(NativeWindowRenderer* renderer)
+ : mPlayer(new PreviewPlayer(renderer)) {
LOGV("VideoEditorPlayer");
mPlayer->setListener(this);
diff --git a/libvideoeditor/lvpp/VideoEditorPlayer.h b/libvideoeditor/lvpp/VideoEditorPlayer.h
index d1df0e0..6b10b36 100755
--- a/libvideoeditor/lvpp/VideoEditorPlayer.h
+++ b/libvideoeditor/lvpp/VideoEditorPlayer.h
@@ -23,6 +23,7 @@
#include "VideoEditorMain.h"
#include "VideoEditorTools.h"
#include "VideoEditorAudioPlayer.h"
+#include "NativeWindowRenderer.h"
namespace android {
@@ -87,7 +88,7 @@ class VideoEditorPlayer : public MediaPlayerInterface {
};
public:
- VideoEditorPlayer();
+ VideoEditorPlayer(NativeWindowRenderer* renderer);
virtual ~VideoEditorPlayer();
virtual status_t initCheck();
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
index f99ca79..a17015e 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.cpp
@@ -460,6 +460,11 @@ M4OSA_ERR VideoEditorPreviewController::startPreview(
mVEAudioPlayer->setAudioMixSettings(mBackgroundAudioSetting);
mVEAudioPlayer->setAudioMixPCMFileHandle(mAudioMixPCMFileHandle);
+ // Create Video Renderer to be used for the entire storyboard duration.
+ uint32_t width, height;
+ getVideoSizeByResolution(mOutputVideoSize, &width, &height);
+ mNativeWindowRenderer = new NativeWindowRenderer(mSurface, width, height);
+
LOGV("startPreview: loop = %d", loop);
mPreviewLooping = loop;
@@ -467,7 +472,7 @@ M4OSA_ERR VideoEditorPreviewController::startPreview(
mCallBackAfterFrameCnt = callBackAfterFrameCount;
for (int playerInst=0; playerInst<NBPLAYER_INSTANCES; playerInst++) {
- mVePlayer[playerInst] = new VideoEditorPlayer();
+ mVePlayer[playerInst] = new VideoEditorPlayer(mNativeWindowRenderer);
if(mVePlayer[playerInst] == NULL) {
LOGE("startPreview:Error creating VideoEditorPlayer %d",playerInst);
return M4ERR_ALLOC;
@@ -691,6 +696,9 @@ M4OSA_UInt32 VideoEditorPreviewController::stopPreview() {
mVEAudioPlayer = NULL;
}
+ delete mNativeWindowRenderer;
+ mNativeWindowRenderer = NULL;
+
// If image file playing, then free the buffer pointer
if(mFrameStr.pBuffer != M4OSA_NULL) {
free(mFrameStr.pBuffer);
diff --git a/libvideoeditor/lvpp/VideoEditorPreviewController.h b/libvideoeditor/lvpp/VideoEditorPreviewController.h
index 01fc65b..b4537ec 100755
--- a/libvideoeditor/lvpp/VideoEditorPreviewController.h
+++ b/libvideoeditor/lvpp/VideoEditorPreviewController.h
@@ -132,6 +132,7 @@ private:
sp<VideoEditorPlayer::VeAudioOutput> mVEAudioSink;
VideoEditorAudioPlayer *mVEAudioPlayer;
+ NativeWindowRenderer* mNativeWindowRenderer;
M4VIFI_UInt8* mFrameRGBBuffer;
M4VIFI_UInt8* mFrameYUVBuffer;