summaryrefslogtreecommitdiffstats
path: root/media/tests/MediaDump/src/com/android
diff options
context:
space:
mode:
authorHuahui Wu <hwu@google.com>2011-05-18 15:02:16 -0700
committerHuahui Wu <hwu@google.com>2011-05-23 15:34:29 -0700
commitea0bad0574451212591841ba84f477ecc216003a (patch)
tree36475017926bf2941afab8a871f56223fa789e26 /media/tests/MediaDump/src/com/android
parent560e97f8e0cb63a0fa6e88db6badc142a99517d2 (diff)
downloadframeworks_base-ea0bad0574451212591841ba84f477ecc216003a.zip
frameworks_base-ea0bad0574451212591841ba84f477ecc216003a.tar.gz
frameworks_base-ea0bad0574451212591841ba84f477ecc216003a.tar.bz2
b/4452171 Dumping video playbacks to files.
MediaDump: a tool app to dump video playback into raw files and a viewer to display the dumped files. Change-Id: I7bf116e38bb1f9e85d5a1680ae92b5b72bc10ea8
Diffstat (limited to 'media/tests/MediaDump/src/com/android')
-rw-r--r--media/tests/MediaDump/src/com/android/mediadump/MediaDump.java60
-rw-r--r--media/tests/MediaDump/src/com/android/mediadump/RgbPlayerActivity.java249
-rw-r--r--media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java88
-rw-r--r--media/tests/MediaDump/src/com/android/mediadump/VideoDumpView.java650
4 files changed, 1047 insertions, 0 deletions
diff --git a/media/tests/MediaDump/src/com/android/mediadump/MediaDump.java b/media/tests/MediaDump/src/com/android/mediadump/MediaDump.java
new file mode 100644
index 0000000..2d95f03
--- /dev/null
+++ b/media/tests/MediaDump/src/com/android/mediadump/MediaDump.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediadump;
+
+import android.app.TabActivity;
+import android.content.Intent;
+import android.content.SharedPreferences;
+import android.os.Bundle;
+import android.widget.TabHost;
+
+/**
+ * A media tool to play a video and dump the screen display
+ * into raw RGB files. Check VideoDumpView for tech details.
+ */
+public class MediaDump extends TabActivity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ // TODO: Read/Write the settings.
+
+ setContentView(R.layout.main);
+
+ TabHost tab = getTabHost();
+
+ // Setup video dumping tab
+ TabHost.TabSpec videoDumpTab = tab.newTabSpec("VideoDump");
+ videoDumpTab.setIndicator("VideoDump");
+
+ Intent videoDumpIntent = new Intent(this, VideoDumpActivity.class);
+ videoDumpTab.setContent(videoDumpIntent);
+
+ tab.addTab(videoDumpTab);
+
+ // Setup rgb player tab
+ TabHost.TabSpec rgbPlayerTab = tab.newTabSpec("RgbPlayer");
+ rgbPlayerTab.setIndicator("RgbPlayer");
+
+ Intent rgbPlayerIntent = new Intent(this, RgbPlayerActivity.class);
+ rgbPlayerTab.setContent(rgbPlayerIntent);
+
+ tab.addTab(rgbPlayerTab);
+ }
+}
+
diff --git a/media/tests/MediaDump/src/com/android/mediadump/RgbPlayerActivity.java b/media/tests/MediaDump/src/com/android/mediadump/RgbPlayerActivity.java
new file mode 100644
index 0000000..fbbc570
--- /dev/null
+++ b/media/tests/MediaDump/src/com/android/mediadump/RgbPlayerActivity.java
@@ -0,0 +1,249 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediadump;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileReader;
+import java.io.FilenameFilter;
+import java.lang.Integer;
+import java.nio.ByteBuffer;
+import java.nio.ShortBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Properties;
+import java.util.Random;
+import java.util.Timer;
+import java.util.TimerTask;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.content.SharedPreferences;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Canvas;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Message;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.MotionEvent;
+import android.view.View;
+import android.widget.ImageView;
+import android.widget.LinearLayout;
+import android.widget.MediaController;
+import android.widget.MediaController.MediaPlayerControl;
+
+
+/**
+ * A simple player to display the raw rgb files that are generated from
+ * VideDumpView class. It reads the "/sdcard/mediadump/prop.xml" to get
+ * the meta data such as width, height, frame rate, and bytes per pixel.
+ */
+public class RgbPlayerActivity extends Activity {
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ setContentView(new RgbView(this));
+ }
+
+ private static class RgbView extends View implements MediaPlayerControl {
+ private static final String TAG = "RgbView";
+ private Bitmap mBitmap;
+ private int mStartX = 0;
+ private int mStartY = 0;
+ private int mWidth = 0;
+ private int mHeight = 0;
+ private int mBytesPerPixel = 0;
+ private int mBytesPerLine = 0;
+ private int mBytesPerImage = 0;
+ private byte[] mImageBytes;
+ private ByteBuffer mFlipBuf;
+
+ private int mFrameRate = 0;
+
+ private MediaController mMediaController;
+ private boolean mMediaControllerAttached;
+ private boolean mIsPlaying = false;
+ private int mImageIndex = 0;
+ private List<String> mImageList;
+ private Timer mTimer;
+ private TimerTask mImageTask = new TimerTask() {
+ @Override
+ public void run() {
+ if (mIsPlaying) {
+ mImageIndex++;
+ LoadImage();
+ }
+ }
+ };
+ private Handler mHandler = new Handler() {
+ @Override
+ public void handleMessage(Message msg) {
+ super.handleMessage(msg);
+ invalidate();
+ }
+ };
+
+
+ public RgbView(Context context) {
+ super(context);
+
+ // read properties
+ Properties prop = new Properties();
+ try {
+ prop.loadFromXML(new FileInputStream("/sdcard/mediadump/prop.xml"));
+ } catch (java.io.IOException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+
+ try {
+ mStartX = Integer.parseInt(prop.getProperty("startX"));
+ mStartY = Integer.parseInt(prop.getProperty("startY"));
+ mWidth = Integer.parseInt(prop.getProperty("width"));
+ mHeight = Integer.parseInt(prop.getProperty("height"));
+ mBytesPerPixel = Integer.parseInt(prop.getProperty("bytesPerPixel"));
+ mFrameRate = Integer.parseInt(prop.getProperty("frameRate"));
+ } catch (java.lang.NumberFormatException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+
+ mBytesPerLine = mWidth * mBytesPerPixel;
+ mBytesPerImage = mHeight * mBytesPerLine;
+ mFlipBuf = ByteBuffer.allocate(mBytesPerImage);
+ mBitmap = Bitmap.createBitmap(mWidth, mHeight,
+ mBytesPerPixel == 2
+ ? Bitmap.Config.RGB_565
+ : Bitmap.Config.ARGB_8888);
+
+ mImageList = new ArrayList<String>();
+ try {
+ BufferedReader reader = new BufferedReader(
+ new FileReader("/sdcard/mediadump/images.lst"));
+ String line;
+ while ((line = reader.readLine()) != null) {
+ mImageList.add(line);
+ }
+ reader.close();
+ } catch (java.io.IOException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+
+ mMediaController = new MediaController(context);
+ mTimer = new Timer();
+ LoadImage();
+ }
+
+ private void attachMediaController() {
+ if (mMediaController != null) {
+ if (!mMediaControllerAttached) {
+ mMediaController.setMediaPlayer(this);
+ View anchorView = this.getParent() instanceof View ?
+ (View)this.getParent() : this;
+ mMediaController.setAnchorView(anchorView);
+ mMediaController.setEnabled(true);
+ mMediaControllerAttached = true;
+ mTimer.scheduleAtFixedRate(mImageTask, 0, 1000 / mFrameRate);
+ }
+ mMediaController.show();
+ }
+ }
+
+ @Override
+ public boolean onTouchEvent(MotionEvent event) {
+ attachMediaController();
+ return true;
+ }
+
+ private void LoadImage() {
+ try {
+ if (mImageIndex < 0 || mImageIndex >= mImageList.size()) {
+ mImageIndex = 0;
+ mIsPlaying = false;
+ }
+
+ String filename = mImageList.get(mImageIndex);
+
+ FileInputStream in = new FileInputStream(filename);
+ mImageBytes = new byte[mBytesPerImage];
+ in.read(mImageBytes);
+ } catch (Exception e) {
+ Log.e("Error reading file", e.toString());
+ }
+
+ // Flip the image vertically since the image from MediaDump is
+ // upside down.
+ for (int i = mHeight - 1; i >= 0; i--) {
+ mFlipBuf.put(mImageBytes, i * mBytesPerLine, mBytesPerLine);
+ }
+ mFlipBuf.rewind();
+ mBitmap.copyPixelsFromBuffer(mFlipBuf);
+ mFlipBuf.rewind();
+ mHandler.sendEmptyMessage(0);
+ }
+
+ @Override
+ protected void onDraw(Canvas canvas) {
+ canvas.drawBitmap(mBitmap, mStartX, mStartY, null);
+ }
+
+ public boolean canPause() {
+ return true;
+ }
+
+ public boolean canSeekBackward() {
+ return true;
+ }
+
+ public boolean canSeekForward() {
+ return true;
+ }
+
+ public int getBufferPercentage() {
+ return 1;
+ }
+
+ public int getCurrentPosition() {
+ return mImageIndex * 1000 / mFrameRate;
+ }
+
+ public int getDuration() {
+ return mImageList.size() * 1000 / mFrameRate;
+ }
+
+ public boolean isPlaying() {
+ return mIsPlaying;
+ }
+
+ public void pause() {
+ mIsPlaying = false;
+ }
+
+ public void seekTo(int pos) {
+ mImageIndex = pos * mFrameRate / 1000;
+ }
+
+ public void start() {
+ mIsPlaying = true;
+ }
+ }
+
+}
diff --git a/media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java b/media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java
new file mode 100644
index 0000000..46cb64e
--- /dev/null
+++ b/media/tests/MediaDump/src/com/android/mediadump/VideoDumpActivity.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediadump;
+
+import android.app.Activity;
+import android.app.AlertDialog;
+import android.content.Context;
+import android.content.DialogInterface;;
+import android.content.Intent;
+import android.content.SharedPreferences;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.View;
+import android.widget.EditText;
+import android.widget.FrameLayout;
+import android.widget.LinearLayout;
+import android.widget.MediaController;
+
+/**
+ * A media tool to play a video and dump the screen display
+ * into raw RGB files. Check VideoDumpView for tech details.
+ */
+public class VideoDumpActivity extends Activity {
+
+ private Context context;
+
+ private View mainView;
+ private VideoDumpView mVideoView;
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+
+ context = this;
+
+ mainView = createView();
+ setContentView(mainView);
+ }
+
+ @Override
+ protected void onPause() {
+ super.onPause();
+ mVideoView.onPause();
+ }
+
+ @Override
+ protected void onResume() {
+ super.onResume();
+ mVideoView.onResume();
+ }
+
+ private View createView() {
+ mVideoView = new VideoDumpView(this);
+ mVideoView.setMediaController(new MediaController(context));
+
+ LinearLayout mainLayout = new LinearLayout(this);
+ mainLayout.addView(mVideoView, new LinearLayout.LayoutParams(
+ LinearLayout.LayoutParams.MATCH_PARENT,
+ LinearLayout.LayoutParams.MATCH_PARENT));
+
+ return mainLayout;
+ }
+
+ protected void onStop() {
+ if (mVideoView != null) {
+ if (mVideoView.isPlaying()) {
+ mVideoView.stopPlayback();
+ }
+ }
+ super.onStop();
+ }
+}
+
diff --git a/media/tests/MediaDump/src/com/android/mediadump/VideoDumpView.java b/media/tests/MediaDump/src/com/android/mediadump/VideoDumpView.java
new file mode 100644
index 0000000..809ee82
--- /dev/null
+++ b/media/tests/MediaDump/src/com/android/mediadump/VideoDumpView.java
@@ -0,0 +1,650 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.mediadump;
+
+import java.io.IOException;
+import java.io.BufferedOutputStream;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileWriter;
+import java.io.FilenameFilter;
+import java.io.FileOutputStream;
+import java.io.File;
+
+import java.lang.Integer;
+import java.lang.Math;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.IntBuffer;
+import java.util.Properties;
+
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.app.Activity;
+import android.content.Context;
+import android.content.pm.ActivityInfo;
+import android.graphics.SurfaceTexture;
+import android.media.MediaPlayer;
+import android.opengl.GLES20;
+import android.opengl.GLSurfaceView;
+import android.opengl.GLUtils;
+import android.opengl.Matrix;
+import android.os.Bundle;
+import android.util.Log;
+import android.view.MotionEvent;
+import android.view.SurfaceHolder;
+import android.view.View;
+import android.widget.MediaController;
+import android.widget.MediaController.MediaPlayerControl;
+
+/**
+ * A view to play a video, specified by VideoDumpConfig.VIDEO_URI, and dump the screen
+ * into raw RGB files.
+ * It uses a renderer to display each video frame over a surface texture, read pixels,
+ * and writes the pixels into a rgb file on sdcard.
+ * Those raw rgb files will be used to compare the quality distortion against
+ * the original video. They can be viewed with the RgbPlayer app for debugging.
+ */
+class VideoDumpView extends GLSurfaceView implements MediaPlayerControl {
+ private static final String TAG = "VideoDumpView";
+ VideoDumpRenderer mRenderer;
+ private MediaController mMediaController;
+ private boolean mMediaControllerAttached = false;
+ private MediaPlayer mMediaPlayer = null;
+ private BufferedWriter mImageListWriter = null;
+
+ // A serials of configuration constants.
+ class VideoDumpConfig {
+ // Currently we are running with a local copy of the video.
+ // It should work with a "http://" sort of streaming url as well.
+ public static final String VIDEO_URI = "/sdcard/mediadump/sample.mp4";
+ public static final String ROOT_DIR = "/sdcard/mediadump/";
+ public static final String IMAGES_LIST = "images.lst";
+ public static final String IMAGE_PREFIX = "img";
+ public static final String IMAGE_SUFFIX = ".rgb";
+ public static final String PROPERTY_FILE = "prop.xml";
+
+ // So far, glReadPixels only supports two (format, type) combinations
+ // GL_RGB GL_UNSIGNED_SHORT_5_6_5 16 bits per pixel (default)
+ // GL_RGBA GL_UNSIGNED_BYTE 32 bits per pixel
+ public static final int PIXEL_FORMAT = GLES20.GL_RGB;
+ public static final int PIXEL_TYPE = PIXEL_FORMAT == GLES20.GL_RGBA
+ ? GLES20.GL_UNSIGNED_BYTE : GLES20.GL_UNSIGNED_SHORT_5_6_5;
+ public static final int BYTES_PER_PIXEL =
+ PIXEL_FORMAT == GLES20.GL_RGBA ? 4 : 2;
+ public static final boolean SET_CHOOSER
+ = PIXEL_FORMAT == GLES20.GL_RGBA ? true : false;
+
+ // On Motorola Xoom, it takes 100ms to read pixels and 180ms to write to a file
+ // to dump a complete 720p(1280*720) video frame. It's much slower than the frame
+ // playback interval (40ms). So we only dump a center block and it should be able
+ // to catch all the e2e distortion. A reasonable size of the block is 256x256,
+ // which takes 4ms to read pixels and 25 ms to write to a file.
+ public static final int MAX_DUMP_WIDTH = 256;
+ public static final int MAX_DUMP_HEIGHT = 256;
+
+ // TODO: MediaPlayer doesn't give back the video frame rate and we'll need to
+ // figure it by dividing the total number of frames by the duration.
+ public static final int FRAME_RATE = 25;
+ }
+
+ public VideoDumpView(Context context) {
+ super(context);
+ setEGLContextClientVersion(2);
+ // GLSurfaceView uses RGB_5_6_5 by default.
+ if (VideoDumpConfig.SET_CHOOSER) {
+ setEGLConfigChooser(8, 8, 8, 8, 8, 8);
+ }
+ mRenderer = new VideoDumpRenderer(context);
+ setRenderer(mRenderer);
+ }
+
+ @Override
+ public void onPause() {
+ stopPlayback();
+ super.onPause();
+ }
+
+ @Override
+ public void onResume() {
+ Log.d(TAG, "onResume");
+
+ mMediaPlayer = new MediaPlayer();
+ try {
+ mMediaPlayer.setDataSource(VideoDumpConfig.VIDEO_URI);
+
+ class RGBFilter implements FilenameFilter {
+ public boolean accept(File dir, String name) {
+ return (name.endsWith(VideoDumpConfig.IMAGE_SUFFIX));
+ }
+ }
+ File dump_dir = new File(VideoDumpConfig.ROOT_DIR);
+ File[] dump_files = dump_dir.listFiles(new RGBFilter());
+ for (File dump_file :dump_files) {
+ dump_file.delete();
+ }
+
+ File image_list = new File(VideoDumpConfig.ROOT_DIR
+ + VideoDumpConfig.IMAGES_LIST);
+ image_list.delete();
+ mImageListWriter = new BufferedWriter(new FileWriter(image_list));
+ } catch (java.io.IOException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+
+ queueEvent(new Runnable(){
+ public void run() {
+ mRenderer.setMediaPlayer(mMediaPlayer);
+ mRenderer.setImageListWriter(mImageListWriter);
+ }});
+
+ super.onResume();
+ }
+
+ public void start() {
+ mMediaPlayer.start();
+ }
+
+ public void pause() {
+ mMediaPlayer.pause();
+ try {
+ mImageListWriter.flush();
+ } catch (java.io.IOException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+ }
+
+ public void stopPlayback() {
+ Log.d(TAG, "stopPlayback");
+
+ if (mMediaPlayer != null) {
+ mMediaPlayer.stop();
+ mMediaPlayer.release();
+ mMediaPlayer = null;
+ }
+ if (mImageListWriter != null) {
+ try {
+ mImageListWriter.flush();
+ mImageListWriter.close();
+ } catch (java.io.IOException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+ } else {
+ Log.d(TAG, "image list file was not written successfully.");
+ }
+ }
+
+ public void setMediaController(MediaController controller) {
+ if (mMediaController != null) {
+ mMediaController.hide();
+ }
+ mMediaController = controller;
+ }
+
+ private void attachMediaController() {
+ if (mMediaPlayer != null && mMediaController != null) {
+ if (!mMediaControllerAttached) {
+ mMediaController.setMediaPlayer(this);
+ View anchorView = this.getParent() instanceof View ?
+ (View)this.getParent() : this;
+ mMediaController.setAnchorView(anchorView);
+ mMediaController.setEnabled(true);
+ mMediaControllerAttached = true;
+ }
+ mMediaController.show();
+ }
+ }
+
+ private boolean isInPlaybackState() {
+ return (mMediaPlayer != null && mMediaPlayer.isPlaying());
+ }
+
+ public boolean canPause () {
+ return true;
+ }
+
+ public boolean canSeekBackward () {
+ return true;
+ }
+
+ public boolean canSeekForward () {
+ return true;
+ }
+
+ public int getBufferPercentage () {
+ return 1;
+ }
+
+ public int getCurrentPosition () {
+ if (isInPlaybackState()) {
+ return mMediaPlayer.getCurrentPosition();
+ }
+ return 0;
+ }
+
+ public int getDuration () {
+ return mMediaPlayer.getDuration();
+ }
+
+ public boolean isPlaying () {
+ return isInPlaybackState() && mMediaPlayer.isPlaying();
+ }
+
+ public void seekTo (int pos) {
+ mMediaPlayer.seekTo(pos);
+ }
+
+ @Override
+ public boolean onTouchEvent(MotionEvent ev) {
+ attachMediaController();
+ return true;
+ }
+
+ /**
+ * A renderer to read each video frame from a media player, draw it over a surface
+ * texture, dump the on-screen pixels into a buffer, and writes the pixels into
+ * a rgb file on sdcard.
+ */
+ private static class VideoDumpRenderer
+ implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
+ private static String TAG = "VideoDumpRenderer";
+
+ /* All GL related fields from
+ * http://developer.android.com/resources/samples/ApiDemos/src/com/example
+ * /android/apis/graphics/GLES20TriangleRenderer.html
+ */
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+ private final float[] mTriangleVerticesData = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 0.f,
+ 1.0f, -1.0f, 0, 1.f, 0.f,
+ -1.0f, 1.0f, 0, 0.f, 1.f,
+ 1.0f, 1.0f, 0, 1.f, 1.f,
+ };
+
+ private FloatBuffer mTriangleVertices;
+
+ private final String mVertexShader =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ private final String mFragmentShader =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+
+ private float[] mMVPMatrix = new float[16];
+ private float[] mSTMatrix = new float[16];
+
+ private int mProgram;
+ private int mTextureID;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+
+ private SurfaceTexture mSurface;
+ private boolean updateSurface = false;
+
+ // Magic key
+ private static int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+
+
+ /**
+ * Fields that reads video source and dumps to file.
+ */
+ // The media player that loads and decodes the video.
+ // Not owned by this class.
+ private MediaPlayer mMediaPlayer;
+ // The frame number from media player.
+ private int mFrameNumber = 0;
+ // The frame number that is drawing on screen.
+ private int mDrawNumber = 0;
+ // The width and height of dumping block.
+ private int mWidth = 0;
+ private int mHeight = 0;
+ // The offset of the dumping block.
+ private int mStartX = 0;
+ private int mStartY = 0;
+ // A buffer to hold the dumping pixels.
+ private ByteBuffer mBuffer = null;
+ // A file writer to write the filenames of images.
+ private BufferedWriter mImageListWriter;
+
+ public VideoDumpRenderer(Context context) {
+ mTriangleVertices = ByteBuffer.allocateDirect(
+ mTriangleVerticesData.length * FLOAT_SIZE_BYTES)
+ .order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mTriangleVertices.put(mTriangleVerticesData).position(0);
+
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+
+ public void setMediaPlayer(MediaPlayer player) {
+ mMediaPlayer = player;
+ }
+
+ public void setImageListWriter(BufferedWriter imageListWriter) {
+ mImageListWriter = imageListWriter;
+ }
+
+ /**
+ * Called to draw the current frame.
+ * This method is responsible for drawing the current frame.
+ */
+ public void onDrawFrame(GL10 glUnused) {
+ boolean isNewFrame = false;
+ int frameNumber = 0;
+
+ synchronized(this) {
+ if (updateSurface) {
+ isNewFrame = true;
+ frameNumber = mFrameNumber;
+ mSurface.updateTexImage();
+ mSurface.getTransformMatrix(mSTMatrix);
+ updateSurface = false;
+ }
+ }
+
+ // Initial clear.
+ GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
+ GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+
+ // Load the program, which is the basics rules to draw the vertexes and textures.
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+
+ // Activate the texture.
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+ // Load the vertexes coordinates. Simple here since it only draw a rectangle
+ // that fits the whole screen.
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+ // Load the texture coordinates, which is essentially a rectangle that fits
+ // the whole video frame.
+ mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
+ TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+ // Set up the GL matrices.
+ Matrix.setIdentityM(mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);
+
+ // Draw a rectangle and render the video frame as a texture on it.
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
+ checkGlError("glDrawArrays");
+ GLES20.glFinish();
+
+ if (isNewFrame) { // avoid duplicates.
+ Log.d(TAG, mDrawNumber + "/" + frameNumber + " before dumping "
+ + System.currentTimeMillis());
+ DumpToFile(frameNumber);
+ Log.d(TAG, mDrawNumber + "/" + frameNumber + " after dumping "
+ + System.currentTimeMillis());
+
+ mDrawNumber++;
+ }
+ }
+
+ // Call the GL function that dumps the screen into a buffer, then write to a file.
+ private void DumpToFile(int frameNumber) {
+ GLES20.glReadPixels(mStartX, mStartY, mWidth, mHeight,
+ VideoDumpConfig.PIXEL_FORMAT,
+ VideoDumpConfig.PIXEL_TYPE,
+ mBuffer);
+ checkGlError("glReadPixels");
+
+ Log.d(TAG, mDrawNumber + "/" + frameNumber + " after glReadPixels "
+ + System.currentTimeMillis());
+
+ String filename = VideoDumpConfig.ROOT_DIR + VideoDumpConfig.IMAGE_PREFIX
+ + frameNumber + VideoDumpConfig.IMAGE_SUFFIX;
+ try {
+ mImageListWriter.write(filename);
+ mImageListWriter.newLine();
+ FileOutputStream fos = new FileOutputStream(filename);
+ fos.write(mBuffer.array());
+ fos.close();
+ } catch (java.io.IOException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+ }
+
+ /**
+ * Called when the surface changed size.
+ * Called after the surface is created and whenever the OpenGL surface size changes.
+ */
+ public void onSurfaceChanged(GL10 glUnused, int width, int height) {
+ Log.d(TAG, "Surface size: " + width + "x" + height);
+
+ int video_width = mMediaPlayer.getVideoWidth();
+ int video_height = mMediaPlayer.getVideoHeight();
+ Log.d(TAG, "Video size: " + video_width
+ + "x" + video_height);
+
+ // TODO: adjust video_width and video_height with the surface size.
+ GLES20.glViewport(0, 0, video_width, video_height);
+
+ mWidth = Math.min(VideoDumpConfig.MAX_DUMP_WIDTH, video_width);
+ mHeight = Math.min(VideoDumpConfig.MAX_DUMP_HEIGHT, video_height);
+ mStartX = video_width / mWidth / 2 * mWidth;
+ mStartY = video_height / mHeight / 2 * mHeight;
+
+ Log.d(TAG, "dumping block start at (" + mStartX + "," + mStartY + ") "
+ + "size " + mWidth + "x" + mHeight);
+
+ int image_size = mWidth * mHeight * VideoDumpConfig.BYTES_PER_PIXEL;
+ mBuffer = ByteBuffer.allocate(image_size);
+
+ int bpp[] = new int[3];
+ GLES20.glGetIntegerv(GLES20.GL_RED_BITS, bpp, 0);
+ GLES20.glGetIntegerv(GLES20.GL_GREEN_BITS, bpp, 1);
+ GLES20.glGetIntegerv(GLES20.GL_BLUE_BITS, bpp, 2);
+ Log.d(TAG, "rgb bits: " + bpp[0] + "-" + bpp[1] + "-" + bpp[2]);
+
+ // Save the properties into a xml file
+ // so the RgbPlayer can understand the output format.
+ Properties prop = new Properties();
+ prop.setProperty("width", Integer.toString(mWidth));
+ prop.setProperty("height", Integer.toString(mHeight));
+ prop.setProperty("startX", Integer.toString(mStartX));
+ prop.setProperty("startY", Integer.toString(mStartY));
+ prop.setProperty("bytesPerPixel",
+ Integer.toString(VideoDumpConfig.BYTES_PER_PIXEL));
+ prop.setProperty("frameRate", Integer.toString(VideoDumpConfig.FRAME_RATE));
+ try {
+ prop.storeToXML(new FileOutputStream(VideoDumpConfig.ROOT_DIR
+ + VideoDumpConfig.PROPERTY_FILE), "");
+ } catch (java.io.IOException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+ }
+
+ /**
+ * Called when the surface is created or recreated.
+ * Called when the rendering thread starts and whenever the EGL context is lost.
+ * A place to put code to create resources that need to be created when the rendering
+ * starts, and that need to be recreated when the EGL context is lost e.g. texture.
+ * Note that when the EGL context is lost, all OpenGL resources associated with
+ * that context will be automatically deleted.
+ */
+ public void onSurfaceCreated(GL10 glUnused, EGLConfig config) {
+ Log.d(TAG, "onSurfaceCreated");
+
+ /* Set up shaders and handles to their variables */
+ mProgram = createProgram(mVertexShader, mFragmentShader);
+ if (mProgram == 0) {
+ return;
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for aTextureCoord");
+ }
+
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uMVPMatrix");
+ }
+
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new RuntimeException("Could not get attrib location for uSTMatrix");
+ }
+
+
+ // Create our texture. This has to be done each time the surface is created.
+ int[] textures = new int[1];
+ GLES20.glGenTextures(1, textures, 0);
+
+ mTextureID = textures[0];
+ GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ checkGlError("glBindTexture mTextureID");
+
+ // Can't do mipmapping with mediaplayer source
+ GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ // Clamp to edge is the only option
+ GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameteri mTextureID");
+
+ /*
+ * Create the SurfaceTexture that will feed this textureID,
+ * and pass it to the MediaPlayer
+ */
+ mSurface = new SurfaceTexture(mTextureID);
+ mSurface.setOnFrameAvailableListener(this);
+
+ mMediaPlayer.setTexture(mSurface);
+
+ try {
+ mMediaPlayer.prepare();
+ } catch (IOException t) {
+ Log.e(TAG, "media player prepare failed");
+ }
+
+ synchronized(this) {
+ updateSurface = false;
+ }
+ }
+
+ synchronized public void onFrameAvailable(SurfaceTexture surface) {
+ /* For simplicity, SurfaceTexture calls here when it has new
+ * data available. Call may come in from some random thread,
+ * so let's be safe and use synchronize. No OpenGL calls can be done here.
+ */
+ mFrameNumber++;
+ updateSurface = true;
+ }
+
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ if (shader != 0) {
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ shader = 0;
+ }
+ }
+ return shader;
+ }
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+
+ int program = GLES20.glCreateProgram();
+ if (program != 0) {
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ program = 0;
+ }
+ }
+ return program;
+ }
+
+ private void checkGlError(String op) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ Log.e(TAG, op + ": glError " + error);
+ throw new RuntimeException(op + ": glError " + error);
+ }
+ }
+
+ } // End of class VideoDumpRender.
+
+} // End of class VideoDumpView.