summaryrefslogtreecommitdiffstats
path: root/cmds
diff options
context:
space:
mode:
Diffstat (limited to 'cmds')
-rw-r--r--cmds/screenrecord/Android.mk1
-rw-r--r--cmds/screenrecord/EglWindow.cpp68
-rw-r--r--cmds/screenrecord/EglWindow.h5
-rw-r--r--cmds/screenrecord/FrameOutput.cpp210
-rw-r--r--cmds/screenrecord/FrameOutput.h99
-rw-r--r--cmds/screenrecord/Overlay.cpp9
-rw-r--r--cmds/screenrecord/Overlay.h10
-rw-r--r--cmds/screenrecord/Program.cpp12
-rw-r--r--cmds/screenrecord/Program.h8
-rw-r--r--cmds/screenrecord/screenrecord.cpp311
-rw-r--r--cmds/screenrecord/screenrecord.h2
-rw-r--r--cmds/stagefright/SimplePlayer.cpp4
-rw-r--r--cmds/stagefright/SineSource.cpp4
-rw-r--r--cmds/stagefright/codec.cpp3
-rw-r--r--cmds/stagefright/muxer.cpp3
-rw-r--r--cmds/stagefright/record.cpp2
-rw-r--r--cmds/stagefright/sf2.cpp65
-rw-r--r--cmds/stagefright/stagefright.cpp12
-rw-r--r--cmds/stagefright/stream.cpp7
19 files changed, 691 insertions, 144 deletions
diff --git a/cmds/screenrecord/Android.mk b/cmds/screenrecord/Android.mk
index 57a2234..6ee2884 100644
--- a/cmds/screenrecord/Android.mk
+++ b/cmds/screenrecord/Android.mk
@@ -19,6 +19,7 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
screenrecord.cpp \
EglWindow.cpp \
+ FrameOutput.cpp \
TextRenderer.cpp \
Overlay.cpp \
Program.cpp
diff --git a/cmds/screenrecord/EglWindow.cpp b/cmds/screenrecord/EglWindow.cpp
index aa0517f..c16f2ad 100644
--- a/cmds/screenrecord/EglWindow.cpp
+++ b/cmds/screenrecord/EglWindow.cpp
@@ -35,11 +35,16 @@ using namespace android;
status_t EglWindow::createWindow(const sp<IGraphicBufferProducer>& surface) {
- status_t err = eglSetupContext();
+ if (mEglSurface != EGL_NO_SURFACE) {
+ ALOGE("surface already created");
+ return UNKNOWN_ERROR;
+ }
+ status_t err = eglSetupContext(false);
if (err != NO_ERROR) {
return err;
}
+ // Cache the current dimensions. We're not expecting these to change.
surface->query(NATIVE_WINDOW_WIDTH, &mWidth);
surface->query(NATIVE_WINDOW_HEIGHT, &mHeight);
@@ -56,6 +61,34 @@ status_t EglWindow::createWindow(const sp<IGraphicBufferProducer>& surface) {
return NO_ERROR;
}
+status_t EglWindow::createPbuffer(int width, int height) {
+ if (mEglSurface != EGL_NO_SURFACE) {
+ ALOGE("surface already created");
+ return UNKNOWN_ERROR;
+ }
+ status_t err = eglSetupContext(true);
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ mWidth = width;
+ mHeight = height;
+
+ EGLint pbufferAttribs[] = {
+ EGL_WIDTH, width,
+ EGL_HEIGHT, height,
+ EGL_NONE
+ };
+ mEglSurface = eglCreatePbufferSurface(mEglDisplay, mEglConfig, pbufferAttribs);
+ if (mEglSurface == EGL_NO_SURFACE) {
+ ALOGE("eglCreatePbufferSurface error: %#x", eglGetError());
+ eglRelease();
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+
status_t EglWindow::makeCurrent() const {
if (!eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
ALOGE("eglMakeCurrent failed: %#x", eglGetError());
@@ -64,7 +97,7 @@ status_t EglWindow::makeCurrent() const {
return NO_ERROR;
}
-status_t EglWindow::eglSetupContext() {
+status_t EglWindow::eglSetupContext(bool forPbuffer) {
EGLBoolean result;
mEglDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
@@ -82,17 +115,28 @@ status_t EglWindow::eglSetupContext() {
ALOGV("Initialized EGL v%d.%d", majorVersion, minorVersion);
EGLint numConfigs = 0;
- EGLint configAttribs[] = {
- EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
- EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
- EGL_RECORDABLE_ANDROID, 1,
- EGL_RED_SIZE, 8,
- EGL_GREEN_SIZE, 8,
- EGL_BLUE_SIZE, 8,
- EGL_NONE
+ EGLint windowConfigAttribs[] = {
+ EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
+ EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL_RED_SIZE, 8,
+ EGL_GREEN_SIZE, 8,
+ EGL_BLUE_SIZE, 8,
+ // no alpha
+ EGL_NONE
+ };
+ EGLint pbufferConfigAttribs[] = {
+ EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
+ EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL_RED_SIZE, 8,
+ EGL_GREEN_SIZE, 8,
+ EGL_BLUE_SIZE, 8,
+ EGL_ALPHA_SIZE, 8,
+ EGL_NONE
};
- result = eglChooseConfig(mEglDisplay, configAttribs, &mEglConfig, 1,
- &numConfigs);
+ result = eglChooseConfig(mEglDisplay,
+ forPbuffer ? pbufferConfigAttribs : windowConfigAttribs,
+ &mEglConfig, 1, &numConfigs);
if (result != EGL_TRUE) {
ALOGE("eglChooseConfig error: %#x", eglGetError());
return UNKNOWN_ERROR;
diff --git a/cmds/screenrecord/EglWindow.h b/cmds/screenrecord/EglWindow.h
index 02a2efc..69d0c31 100644
--- a/cmds/screenrecord/EglWindow.h
+++ b/cmds/screenrecord/EglWindow.h
@@ -44,6 +44,9 @@ public:
// Creates an EGL window for the supplied surface.
status_t createWindow(const sp<IGraphicBufferProducer>& surface);
+ // Creates an EGL pbuffer surface.
+ status_t createPbuffer(int width, int height);
+
// Return width and height values (obtained from IGBP).
int getWidth() const { return mWidth; }
int getHeight() const { return mHeight; }
@@ -65,7 +68,7 @@ private:
EglWindow& operator=(const EglWindow&);
// Init display, create config and context.
- status_t eglSetupContext();
+ status_t eglSetupContext(bool forPbuffer);
void eglRelease();
// Basic EGL goodies.
diff --git a/cmds/screenrecord/FrameOutput.cpp b/cmds/screenrecord/FrameOutput.cpp
new file mode 100644
index 0000000..06b1f70
--- /dev/null
+++ b/cmds/screenrecord/FrameOutput.cpp
@@ -0,0 +1,210 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "ScreenRecord"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+#include "FrameOutput.h"
+
+using namespace android;
+
+static const bool kShowTiming = false; // set to "true" for debugging
+static const int kGlBytesPerPixel = 4; // GL_RGBA
+static const int kOutBytesPerPixel = 3; // RGB only
+
+inline void FrameOutput::setValueLE(uint8_t* buf, uint32_t value) {
+ // Since we're running on an Android device, we're (almost) guaranteed
+ // to be little-endian, and (almost) guaranteed that unaligned 32-bit
+ // writes will work without any performance penalty... but do it
+ // byte-by-byte anyway.
+ buf[0] = (uint8_t) value;
+ buf[1] = (uint8_t) (value >> 8);
+ buf[2] = (uint8_t) (value >> 16);
+ buf[3] = (uint8_t) (value >> 24);
+}
+
+status_t FrameOutput::createInputSurface(int width, int height,
+ sp<IGraphicBufferProducer>* pBufferProducer) {
+ status_t err;
+
+ err = mEglWindow.createPbuffer(width, height);
+ if (err != NO_ERROR) {
+ return err;
+ }
+ mEglWindow.makeCurrent();
+
+ glViewport(0, 0, width, height);
+ glDisable(GL_DEPTH_TEST);
+ glDisable(GL_CULL_FACE);
+
+ // Shader for rendering the external texture.
+ err = mExtTexProgram.setup(Program::PROGRAM_EXTERNAL_TEXTURE);
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ // Input side (buffers from virtual display).
+ glGenTextures(1, &mExtTextureName);
+ if (mExtTextureName == 0) {
+ ALOGE("glGenTextures failed: %#x", glGetError());
+ return UNKNOWN_ERROR;
+ }
+
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mGlConsumer = new GLConsumer(consumer, mExtTextureName,
+ GL_TEXTURE_EXTERNAL_OES);
+ mGlConsumer->setName(String8("virtual display"));
+ mGlConsumer->setDefaultBufferSize(width, height);
+ mGlConsumer->setDefaultMaxBufferCount(5);
+ mGlConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_TEXTURE);
+
+ mGlConsumer->setFrameAvailableListener(this);
+
+ mPixelBuf = new uint8_t[width * height * kGlBytesPerPixel];
+
+ *pBufferProducer = producer;
+
+ ALOGD("FrameOutput::createInputSurface OK");
+ return NO_ERROR;
+}
+
+status_t FrameOutput::copyFrame(FILE* fp, long timeoutUsec) {
+ Mutex::Autolock _l(mMutex);
+ ALOGV("copyFrame %ld\n", timeoutUsec);
+
+ if (!mFrameAvailable) {
+ nsecs_t timeoutNsec = (nsecs_t)timeoutUsec * 1000;
+ int cc = mEventCond.waitRelative(mMutex, timeoutNsec);
+ if (cc == -ETIMEDOUT) {
+ ALOGV("cond wait timed out");
+ return ETIMEDOUT;
+ } else if (cc != 0) {
+ ALOGW("cond wait returned error %d", cc);
+ return cc;
+ }
+ }
+ if (!mFrameAvailable) {
+ // This happens when Ctrl-C is hit. Apparently POSIX says that the
+ // pthread wait call doesn't return EINTR, treating this instead as
+ // an instance of a "spurious wakeup". We didn't get a frame, so
+ // we just treat it as a timeout.
+ return ETIMEDOUT;
+ }
+
+ // A frame is available. Clear the flag for the next round.
+ mFrameAvailable = false;
+
+ float texMatrix[16];
+ mGlConsumer->updateTexImage();
+ mGlConsumer->getTransformMatrix(texMatrix);
+
+ // The data is in an external texture, so we need to render it to the
+ // pbuffer to get access to RGB pixel data. We also want to flip it
+ // upside-down for easy conversion to a bitmap.
+ int width = mEglWindow.getWidth();
+ int height = mEglWindow.getHeight();
+ status_t err = mExtTexProgram.blit(mExtTextureName, texMatrix, 0, 0,
+ width, height, true);
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ // GLES only guarantees that glReadPixels() will work with GL_RGBA, so we
+ // need to get 4 bytes/pixel and reduce it. Depending on the size of the
+ // screen and the device capabilities, this can take a while.
+ int64_t startWhenNsec, pixWhenNsec, endWhenNsec;
+ if (kShowTiming) {
+ startWhenNsec = systemTime(CLOCK_MONOTONIC);
+ }
+ GLenum glErr;
+ glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, mPixelBuf);
+ if ((glErr = glGetError()) != GL_NO_ERROR) {
+ ALOGE("glReadPixels failed: %#x", glErr);
+ return UNKNOWN_ERROR;
+ }
+ if (kShowTiming) {
+ pixWhenNsec = systemTime(CLOCK_MONOTONIC);
+ }
+ reduceRgbaToRgb(mPixelBuf, width * height);
+ if (kShowTiming) {
+ endWhenNsec = systemTime(CLOCK_MONOTONIC);
+ ALOGD("got pixels (get=%.3f ms, reduce=%.3fms)",
+ (pixWhenNsec - startWhenNsec) / 1000000.0,
+ (endWhenNsec - pixWhenNsec) / 1000000.0);
+ }
+
+ // Fill out the header.
+ size_t headerLen = sizeof(uint32_t) * 5;
+ size_t rgbDataLen = width * height * kOutBytesPerPixel;
+ size_t packetLen = headerLen - sizeof(uint32_t) + rgbDataLen;
+ uint8_t header[headerLen];
+ setValueLE(&header[0], packetLen);
+ setValueLE(&header[4], width);
+ setValueLE(&header[8], height);
+ setValueLE(&header[12], width * kOutBytesPerPixel);
+ setValueLE(&header[16], HAL_PIXEL_FORMAT_RGB_888);
+
+ // Currently using buffered I/O rather than writev(). Not expecting it
+ // to make much of a difference, but it might be worth a test for larger
+ // frame sizes.
+ if (kShowTiming) {
+ startWhenNsec = systemTime(CLOCK_MONOTONIC);
+ }
+ fwrite(header, 1, headerLen, fp);
+ fwrite(mPixelBuf, 1, rgbDataLen, fp);
+ fflush(fp);
+ if (kShowTiming) {
+ endWhenNsec = systemTime(CLOCK_MONOTONIC);
+ ALOGD("wrote pixels (%.3f ms)",
+ (endWhenNsec - startWhenNsec) / 1000000.0);
+ }
+
+ if (ferror(fp)) {
+ // errno may not be useful; log it anyway
+ ALOGE("write failed (errno=%d)", errno);
+ return UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+
+void FrameOutput::reduceRgbaToRgb(uint8_t* buf, unsigned int pixelCount) {
+ // Convert RGBA to RGB.
+ //
+ // Unaligned 32-bit accesses are allowed on ARM, so we could do this
+ // with 32-bit copies advancing at different rates (taking care at the
+ // end to not go one byte over).
+ const uint8_t* readPtr = buf;
+ for (unsigned int i = 0; i < pixelCount; i++) {
+ *buf++ = *readPtr++;
+ *buf++ = *readPtr++;
+ *buf++ = *readPtr++;
+ readPtr++;
+ }
+}
+
+// Callback; executes on arbitrary thread.
+void FrameOutput::onFrameAvailable() {
+ Mutex::Autolock _l(mMutex);
+ mFrameAvailable = true;
+ mEventCond.signal();
+}
diff --git a/cmds/screenrecord/FrameOutput.h b/cmds/screenrecord/FrameOutput.h
new file mode 100644
index 0000000..c1148d0
--- /dev/null
+++ b/cmds/screenrecord/FrameOutput.h
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SCREENRECORD_FRAMEOUTPUT_H
+#define SCREENRECORD_FRAMEOUTPUT_H
+
+#include "Program.h"
+#include "EglWindow.h"
+
+#include <gui/BufferQueue.h>
+#include <gui/GLConsumer.h>
+
+namespace android {
+
+/*
+ * Support for "frames" output format.
+ */
+class FrameOutput : public GLConsumer::FrameAvailableListener {
+public:
+ FrameOutput() : mFrameAvailable(false),
+ mExtTextureName(0),
+ mPixelBuf(NULL)
+ {}
+
+ // Create an "input surface", similar in purpose to a MediaCodec input
+ // surface, that the virtual display can send buffers to. Also configures
+ // EGL with a pbuffer surface on the current thread.
+ status_t createInputSurface(int width, int height,
+ sp<IGraphicBufferProducer>* pBufferProducer);
+
+ // Copy one from input to output. If no frame is available, this will wait up to the
+ // specified number of microseconds.
+ //
+ // Returns ETIMEDOUT if the timeout expired before we found a frame.
+ status_t copyFrame(FILE* fp, long timeoutUsec);
+
+ // Prepare to copy frames. Makes the EGL context used by this object current.
+ void prepareToCopy() {
+ mEglWindow.makeCurrent();
+ }
+
+private:
+ FrameOutput(const FrameOutput&);
+ FrameOutput& operator=(const FrameOutput&);
+
+ // Destruction via RefBase.
+ virtual ~FrameOutput() {
+ delete[] mPixelBuf;
+ }
+
+ // (overrides GLConsumer::FrameAvailableListener method)
+ virtual void onFrameAvailable();
+
+ // Reduces RGBA to RGB, in place.
+ static void reduceRgbaToRgb(uint8_t* buf, unsigned int pixelCount);
+
+ // Put a 32-bit value into a buffer, in little-endian byte order.
+ static void setValueLE(uint8_t* buf, uint32_t value);
+
+ // Used to wait for the FrameAvailableListener callback.
+ Mutex mMutex;
+ Condition mEventCond;
+
+ // Set by the FrameAvailableListener callback.
+ bool mFrameAvailable;
+
+ // This receives frames from the virtual display and makes them available
+ // as an external texture.
+ sp<GLConsumer> mGlConsumer;
+
+ // EGL display / context / surface.
+ EglWindow mEglWindow;
+
+ // GL rendering support.
+ Program mExtTexProgram;
+
+ // External texture, updated by GLConsumer.
+ GLuint mExtTextureName;
+
+ // Pixel data buffer.
+ uint8_t* mPixelBuf;
+};
+
+}; // namespace android
+
+#endif /*SCREENRECORD_FRAMEOUTPUT_H*/
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index 2e98874..94f560d 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -84,7 +84,7 @@ status_t Overlay::start(const sp<IGraphicBufferProducer>& outputSurface,
assert(mState == RUNNING);
ALOGV("Overlay::start successful");
- *pBufferProducer = mBufferQueue;
+ *pBufferProducer = mProducer;
return NO_ERROR;
}
@@ -169,8 +169,9 @@ status_t Overlay::setup_l() {
return UNKNOWN_ERROR;
}
- mBufferQueue = new BufferQueue(/*new GraphicBufferAlloc()*/);
- mGlConsumer = new GLConsumer(mBufferQueue, mExtTextureName,
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&mProducer, &consumer);
+ mGlConsumer = new GLConsumer(consumer, mExtTextureName,
GL_TEXTURE_EXTERNAL_OES);
mGlConsumer->setName(String8("virtual display"));
mGlConsumer->setDefaultBufferSize(width, height);
@@ -187,7 +188,7 @@ void Overlay::release_l() {
ALOGV("Overlay::release_l");
mOutputSurface.clear();
mGlConsumer.clear();
- mBufferQueue.clear();
+ mProducer.clear();
mTexProgram.release();
mExtTexProgram.release();
diff --git a/cmds/screenrecord/Overlay.h b/cmds/screenrecord/Overlay.h
index b8473b4..b1b5c29 100644
--- a/cmds/screenrecord/Overlay.h
+++ b/cmds/screenrecord/Overlay.h
@@ -47,7 +47,6 @@ public:
mLastFrameNumber(-1),
mTotalDroppedFrames(0)
{}
- virtual ~Overlay() { assert(mState == UNINITIALIZED || mState == STOPPED); }
// Creates a thread that performs the overlay. Pass in the surface that
// output will be sent to.
@@ -71,6 +70,9 @@ private:
Overlay(const Overlay&);
Overlay& operator=(const Overlay&);
+ // Destruction via RefBase.
+ virtual ~Overlay() { assert(mState == UNINITIALIZED || mState == STOPPED); }
+
// Draw the initial info screen.
static void doDrawInfoPage(const EglWindow& window,
const Program& texRender, TextRenderer& textRenderer);
@@ -120,9 +122,9 @@ private:
// surface.
sp<IGraphicBufferProducer> mOutputSurface;
- // Our queue. The producer side is passed to the virtual display, the
- // consumer side feeds into our GLConsumer.
- sp<BufferQueue> mBufferQueue;
+ // Producer side of queue, passed into the virtual display.
+ // The consumer end feeds into our GLConsumer.
+ sp<IGraphicBufferProducer> mProducer;
// This receives frames from the virtual display and makes them available
// as an external texture.
diff --git a/cmds/screenrecord/Program.cpp b/cmds/screenrecord/Program.cpp
index a198204..73cae6e 100644
--- a/cmds/screenrecord/Program.cpp
+++ b/cmds/screenrecord/Program.cpp
@@ -201,7 +201,7 @@ status_t Program::linkShaderProgram(GLuint vs, GLuint fs, GLuint* outPgm) {
status_t Program::blit(GLuint texName, const float* texMatrix,
- int32_t x, int32_t y, int32_t w, int32_t h) const {
+ int32_t x, int32_t y, int32_t w, int32_t h, bool invert) const {
ALOGV("Program::blit %d xy=%d,%d wh=%d,%d", texName, x, y, w, h);
const float pos[] = {
@@ -218,7 +218,7 @@ status_t Program::blit(GLuint texName, const float* texMatrix,
};
status_t err;
- err = beforeDraw(texName, texMatrix, pos, uv);
+ err = beforeDraw(texName, texMatrix, pos, uv, invert);
if (err == NO_ERROR) {
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
err = afterDraw();
@@ -232,7 +232,7 @@ status_t Program::drawTriangles(GLuint texName, const float* texMatrix,
status_t err;
- err = beforeDraw(texName, texMatrix, vertices, texes);
+ err = beforeDraw(texName, texMatrix, vertices, texes, false);
if (err == NO_ERROR) {
glDrawArrays(GL_TRIANGLES, 0, count);
err = afterDraw();
@@ -241,7 +241,7 @@ status_t Program::drawTriangles(GLuint texName, const float* texMatrix,
}
status_t Program::beforeDraw(GLuint texName, const float* texMatrix,
- const float* vertices, const float* texes) const {
+ const float* vertices, const float* texes, bool invert) const {
// Create an orthographic projection matrix based on viewport size.
GLint vp[4];
glGetIntegerv(GL_VIEWPORT, vp);
@@ -251,6 +251,10 @@ status_t Program::beforeDraw(GLuint texName, const float* texMatrix,
0.0f, 0.0f, 1.0f, 0.0f,
-1.0f, 1.0f, 0.0f, 1.0f,
};
+ if (invert) {
+ screenToNdc[5] = -screenToNdc[5];
+ screenToNdc[13] = -screenToNdc[13];
+ }
glUseProgram(mProgram);
diff --git a/cmds/screenrecord/Program.h b/cmds/screenrecord/Program.h
index e47bc0d..558be8d 100644
--- a/cmds/screenrecord/Program.h
+++ b/cmds/screenrecord/Program.h
@@ -51,9 +51,11 @@ public:
// Release the program and associated resources.
void release();
- // Blit the specified texture to { x, y, x+w, y+h }.
+ // Blit the specified texture to { x, y, x+w, y+h }. Inverts the
+ // content if "invert" is set.
status_t blit(GLuint texName, const float* texMatrix,
- int32_t x, int32_t y, int32_t w, int32_t h) const;
+ int32_t x, int32_t y, int32_t w, int32_t h,
+ bool invert = false) const;
// Draw a number of triangles.
status_t drawTriangles(GLuint texName, const float* texMatrix,
@@ -67,7 +69,7 @@ private:
// Common code for draw functions.
status_t beforeDraw(GLuint texName, const float* texMatrix,
- const float* vertices, const float* texes) const;
+ const float* vertices, const float* texes, bool invert) const;
status_t afterDraw() const;
// GLES 2 shader utilities.
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 61f83e3..a17fc51 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -45,10 +45,12 @@
#include <signal.h>
#include <getopt.h>
#include <sys/wait.h>
+#include <termios.h>
#include <assert.h>
#include "screenrecord.h"
#include "Overlay.h"
+#include "FrameOutput.h"
using namespace android;
@@ -57,10 +59,14 @@ static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
static const uint32_t kFallbackWidth = 1280; // 720p
static const uint32_t kFallbackHeight = 720;
+static const char* kMimeTypeAvc = "video/avc";
// Command-line parameters.
static bool gVerbose = false; // chatty on stdout
static bool gRotate = false; // rotate 90 degrees
+static enum {
+ FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES
+} gOutputFormat = FORMAT_MP4; // data format for output
static bool gSizeSpecified = false; // was size explicitly requested?
static bool gWantInfoScreen = false; // do we want initial info screen?
static bool gWantFrameTime = false; // do we want times on each frame?
@@ -140,14 +146,14 @@ static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
status_t err;
if (gVerbose) {
- printf("Configuring recorder for %dx%d video at %.2fMbps\n",
- gVideoWidth, gVideoHeight, gBitRate / 1000000.0);
+ printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
+ gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
}
sp<AMessage> format = new AMessage;
format->setInt32("width", gVideoWidth);
format->setInt32("height", gVideoHeight);
- format->setString("mime", "video/avc");
+ format->setString("mime", kMimeTypeAvc);
format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
format->setInt32("bitrate", gBitRate);
format->setFloat("frame-rate", displayFps);
@@ -157,16 +163,18 @@ static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
looper->setName("screenrecord_looper");
looper->start();
ALOGV("Creating codec");
- sp<MediaCodec> codec = MediaCodec::CreateByType(looper, "video/avc", true);
+ sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
if (codec == NULL) {
- fprintf(stderr, "ERROR: unable to create video/avc codec instance\n");
+ fprintf(stderr, "ERROR: unable to create %s codec instance\n",
+ kMimeTypeAvc);
return UNKNOWN_ERROR;
}
err = codec->configure(format, NULL, NULL,
MediaCodec::CONFIGURE_FLAG_ENCODE);
if (err != NO_ERROR) {
- fprintf(stderr, "ERROR: unable to configure codec (err=%d)\n", err);
+ fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
+ kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
codec->release();
return err;
}
@@ -298,10 +306,12 @@ static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
* input frames are coming from the virtual display as fast as SurfaceFlinger
* wants to send them.
*
+ * Exactly one of muxer or rawFp must be non-null.
+ *
* The muxer must *not* have been started before calling.
*/
static status_t runEncoder(const sp<MediaCodec>& encoder,
- const sp<MediaMuxer>& muxer, const sp<IBinder>& mainDpy,
+ const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
const sp<IBinder>& virtualDpy, uint8_t orientation) {
static int kTimeout = 250000; // be responsive on signal
status_t err;
@@ -311,6 +321,8 @@ static status_t runEncoder(const sp<MediaCodec>& encoder,
int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
DisplayInfo mainDpyInfo;
+ assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
+
Vector<sp<ABuffer> > buffers;
err = encoder->getOutputBuffers(&buffers);
if (err != NO_ERROR) {
@@ -342,15 +354,16 @@ static status_t runEncoder(const sp<MediaCodec>& encoder,
case NO_ERROR:
// got a buffer
if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
- // ignore this -- we passed the CSD into MediaMuxer when
- // we got the format change notification
- ALOGV("Got codec config buffer (%u bytes); ignoring", size);
- size = 0;
+ ALOGV("Got codec config buffer (%u bytes)", size);
+ if (muxer != NULL) {
+ // ignore this -- we passed the CSD into MediaMuxer when
+ // we got the format change notification
+ size = 0;
+ }
}
if (size != 0) {
ALOGV("Got data in buffer %d, size=%d, pts=%lld",
bufIndex, size, ptsUsec);
- assert(trackIdx != -1);
{ // scope
ATRACE_NAME("orientation");
@@ -379,14 +392,23 @@ static status_t runEncoder(const sp<MediaCodec>& encoder,
ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
}
- // The MediaMuxer docs are unclear, but it appears that we
- // need to pass either the full set of BufferInfo flags, or
- // (flags & BUFFER_FLAG_SYNCFRAME).
- //
- // If this blocks for too long we could drop frames. We may
- // want to queue these up and do them on a different thread.
- { // scope
+ if (muxer == NULL) {
+ fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
+ // Flush the data immediately in case we're streaming.
+ // We don't want to do this if all we've written is
+ // the SPS/PPS data because mplayer gets confused.
+ if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
+ fflush(rawFp);
+ }
+ } else {
+ // The MediaMuxer docs are unclear, but it appears that we
+ // need to pass either the full set of BufferInfo flags, or
+ // (flags & BUFFER_FLAG_SYNCFRAME).
+ //
+ // If this blocks for too long we could drop frames. We may
+ // want to queue these up and do them on a different thread.
ATRACE_NAME("write sample");
+ assert(trackIdx != -1);
err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
ptsUsec, flags);
if (err != NO_ERROR) {
@@ -418,12 +440,14 @@ static status_t runEncoder(const sp<MediaCodec>& encoder,
ALOGV("Encoder format changed");
sp<AMessage> newFormat;
encoder->getOutputFormat(&newFormat);
- trackIdx = muxer->addTrack(newFormat);
- ALOGV("Starting muxer");
- err = muxer->start();
- if (err != NO_ERROR) {
- fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
- return err;
+ if (muxer != NULL) {
+ trackIdx = muxer->addTrack(newFormat);
+ ALOGV("Starting muxer");
+ err = muxer->start();
+ if (err != NO_ERROR) {
+ fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
+ return err;
+ }
}
}
break;
@@ -457,7 +481,45 @@ static status_t runEncoder(const sp<MediaCodec>& encoder,
}
/*
- * Main "do work" method.
+ * Raw H.264 byte stream output requested. Send the output to stdout
+ * if desired. If the output is a tty, reconfigure it to avoid the
+ * CRLF line termination that we see with "adb shell" commands.
+ */
+static FILE* prepareRawOutput(const char* fileName) {
+ FILE* rawFp = NULL;
+
+ if (strcmp(fileName, "-") == 0) {
+ if (gVerbose) {
+ fprintf(stderr, "ERROR: verbose output and '-' not compatible");
+ return NULL;
+ }
+ rawFp = stdout;
+ } else {
+ rawFp = fopen(fileName, "w");
+ if (rawFp == NULL) {
+ fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
+ return NULL;
+ }
+ }
+
+ int fd = fileno(rawFp);
+ if (isatty(fd)) {
+ // best effort -- reconfigure tty for "raw"
+ ALOGD("raw video output to tty (fd=%d)", fd);
+ struct termios term;
+ if (tcgetattr(fd, &term) == 0) {
+ cfmakeraw(&term);
+ if (tcsetattr(fd, TCSANOW, &term) == 0) {
+ ALOGD("tty successfully configured for raw");
+ }
+ }
+ }
+
+ return rawFp;
+}
+
+/*
+ * Main "do work" start point.
*
* Configures codec, muxer, and virtual display, then starts moving bits
* around.
@@ -499,30 +561,40 @@ static status_t recordScreen(const char* fileName) {
// Configure and start the encoder.
sp<MediaCodec> encoder;
+ sp<FrameOutput> frameOutput;
sp<IGraphicBufferProducer> encoderInputSurface;
- err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
-
- if (err != NO_ERROR && !gSizeSpecified) {
- // fallback is defined for landscape; swap if we're in portrait
- bool needSwap = gVideoWidth < gVideoHeight;
- uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
- uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
- if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
- ALOGV("Retrying with 720p");
- fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
- gVideoWidth, gVideoHeight, newWidth, newHeight);
- gVideoWidth = newWidth;
- gVideoHeight = newHeight;
- err = prepareEncoder(mainDpyInfo.fps, &encoder,
- &encoderInputSurface);
+ if (gOutputFormat != FORMAT_FRAMES) {
+ err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
+
+ if (err != NO_ERROR && !gSizeSpecified) {
+ // fallback is defined for landscape; swap if we're in portrait
+ bool needSwap = gVideoWidth < gVideoHeight;
+ uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
+ uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
+ if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
+ ALOGV("Retrying with 720p");
+ fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
+ gVideoWidth, gVideoHeight, newWidth, newHeight);
+ gVideoWidth = newWidth;
+ gVideoHeight = newHeight;
+ err = prepareEncoder(mainDpyInfo.fps, &encoder,
+ &encoderInputSurface);
+ }
}
- }
- if (err != NO_ERROR) return err;
-
- // From here on, we must explicitly release() the encoder before it goes
- // out of scope, or we will get an assertion failure from stagefright
- // later on in a different thread.
+ if (err != NO_ERROR) return err;
+ // From here on, we must explicitly release() the encoder before it goes
+ // out of scope, or we will get an assertion failure from stagefright
+ // later on in a different thread.
+ } else {
+ // We're not using an encoder at all. The "encoder input surface" we hand to
+ // SurfaceFlinger will just feed directly to us.
+ frameOutput = new FrameOutput();
+ err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
+ if (err != NO_ERROR) {
+ return err;
+ }
+ }
// Draw the "info" page by rendering a frame with GLES and sending
// it directly to the encoder.
@@ -539,7 +611,7 @@ static status_t recordScreen(const char* fileName) {
overlay = new Overlay();
err = overlay->start(encoderInputSurface, &bufferProducer);
if (err != NO_ERROR) {
- encoder->release();
+ if (encoder != NULL) encoder->release();
return err;
}
if (gVerbose) {
@@ -554,40 +626,91 @@ static status_t recordScreen(const char* fileName) {
sp<IBinder> dpy;
err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
if (err != NO_ERROR) {
- encoder->release();
+ if (encoder != NULL) encoder->release();
return err;
}
- // Configure muxer. We have to wait for the CSD blob from the encoder
- // before we can start it.
- sp<MediaMuxer> muxer = new MediaMuxer(fileName,
- MediaMuxer::OUTPUT_FORMAT_MPEG_4);
- if (gRotate) {
- muxer->setOrientationHint(90); // TODO: does this do anything?
- }
-
- // Main encoder loop.
- err = runEncoder(encoder, muxer, mainDpy, dpy, mainDpyInfo.orientation);
- if (err != NO_ERROR) {
- fprintf(stderr, "Encoder failed (err=%d)\n", err);
- // fall through to cleanup
- }
+ sp<MediaMuxer> muxer = NULL;
+ FILE* rawFp = NULL;
+ switch (gOutputFormat) {
+ case FORMAT_MP4: {
+ // Configure muxer. We have to wait for the CSD blob from the encoder
+ // before we can start it.
+ muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+ if (gRotate) {
+ muxer->setOrientationHint(90); // TODO: does this do anything?
+ }
+ break;
+ }
+ case FORMAT_H264:
+ case FORMAT_FRAMES: {
+ rawFp = prepareRawOutput(fileName);
+ if (rawFp == NULL) {
+ if (encoder != NULL) encoder->release();
+ return -1;
+ }
+ break;
+ }
+ default:
+ fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
+ abort();
+ }
+
+ if (gOutputFormat == FORMAT_FRAMES) {
+ // TODO: if we want to make this a proper feature, we should output
+ // an outer header with version info. Right now we never change
+ // the frame size or format, so we could conceivably just send
+ // the current frame header once and then follow it with an
+ // unbroken stream of data.
+
+ // Make the EGL context current again. This gets unhooked if we're
+ // using "--bugreport" mode.
+ // TODO: figure out if we can eliminate this
+ frameOutput->prepareToCopy();
+
+ while (!gStopRequested) {
+ // Poll for frames, the same way we do for MediaCodec. We do
+ // all of the work on the main thread.
+ //
+ // Ideally we'd sleep indefinitely and wake when the
+ // stop was requested, but this will do for now. (It almost
+ // works because wait() wakes when a signal hits, but we
+ // need to handle the edge cases.)
+ err = frameOutput->copyFrame(rawFp, 250000);
+ if (err == ETIMEDOUT) {
+ err = NO_ERROR;
+ } else if (err != NO_ERROR) {
+ ALOGE("Got error %d from copyFrame()", err);
+ break;
+ }
+ }
+ } else {
+ // Main encoder loop.
+ err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
+ mainDpyInfo.orientation);
+ if (err != NO_ERROR) {
+ fprintf(stderr, "Encoder failed (err=%d)\n", err);
+ // fall through to cleanup
+ }
- if (gVerbose) {
- printf("Stopping encoder and muxer\n");
+ if (gVerbose) {
+ printf("Stopping encoder and muxer\n");
+ }
}
// Shut everything down, starting with the producer side.
encoderInputSurface = NULL;
SurfaceComposerClient::destroyDisplay(dpy);
- if (overlay != NULL) {
- overlay->stop();
+ if (overlay != NULL) overlay->stop();
+ if (encoder != NULL) encoder->stop();
+ if (muxer != NULL) {
+ // If we don't stop muxer explicitly, i.e. let the destructor run,
+ // it may hang (b/11050628).
+ muxer->stop();
+ } else if (rawFp != stdout) {
+ fclose(rawFp);
}
- encoder->stop();
- // If we don't stop muxer explicitly, i.e. let the destructor run,
- // it may hang (b/11050628).
- muxer->stop();
- encoder->release();
+ if (encoder != NULL) encoder->release();
return err;
}
@@ -749,10 +872,12 @@ int main(int argc, char* const argv[]) {
{ "size", required_argument, NULL, 's' },
{ "bit-rate", required_argument, NULL, 'b' },
{ "time-limit", required_argument, NULL, 't' },
+ { "bugreport", no_argument, NULL, 'u' },
+ // "unofficial" options
{ "show-device-info", no_argument, NULL, 'i' },
{ "show-frame-time", no_argument, NULL, 'f' },
- { "bugreport", no_argument, NULL, 'u' },
{ "rotate", no_argument, NULL, 'r' },
+ { "output-format", required_argument, NULL, 'o' },
{ NULL, 0, NULL, 0 }
};
@@ -804,20 +929,32 @@ int main(int argc, char* const argv[]) {
return 2;
}
break;
- case 'i':
+ case 'u':
gWantInfoScreen = true;
- break;
- case 'f':
gWantFrameTime = true;
break;
- case 'u':
+ case 'i':
gWantInfoScreen = true;
+ break;
+ case 'f':
gWantFrameTime = true;
break;
case 'r':
// experimental feature
gRotate = true;
break;
+ case 'o':
+ if (strcmp(optarg, "mp4") == 0) {
+ gOutputFormat = FORMAT_MP4;
+ } else if (strcmp(optarg, "h264") == 0) {
+ gOutputFormat = FORMAT_H264;
+ } else if (strcmp(optarg, "frames") == 0) {
+ gOutputFormat = FORMAT_FRAMES;
+ } else {
+ fprintf(stderr, "Unknown format '%s'\n", optarg);
+ return 2;
+ }
+ break;
default:
if (ic != '?') {
fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
@@ -831,17 +968,19 @@ int main(int argc, char* const argv[]) {
return 2;
}
- // MediaMuxer tries to create the file in the constructor, but we don't
- // learn about the failure until muxer.start(), which returns a generic
- // error code without logging anything. We attempt to create the file
- // now for better diagnostics.
const char* fileName = argv[optind];
- int fd = open(fileName, O_CREAT | O_RDWR, 0644);
- if (fd < 0) {
- fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
- return 1;
+ if (gOutputFormat == FORMAT_MP4) {
+ // MediaMuxer tries to create the file in the constructor, but we don't
+ // learn about the failure until muxer.start(), which returns a generic
+ // error code without logging anything. We attempt to create the file
+ // now for better diagnostics.
+ int fd = open(fileName, O_CREAT | O_RDWR, 0644);
+ if (fd < 0) {
+ fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
+ return 1;
+ }
+ close(fd);
}
- close(fd);
status_t err = recordScreen(fileName);
if (err == NO_ERROR) {
diff --git a/cmds/screenrecord/screenrecord.h b/cmds/screenrecord/screenrecord.h
index 95e8a68..9b058c2 100644
--- a/cmds/screenrecord/screenrecord.h
+++ b/cmds/screenrecord/screenrecord.h
@@ -18,6 +18,6 @@
#define SCREENRECORD_SCREENRECORD_H
#define kVersionMajor 1
-#define kVersionMinor 1
+#define kVersionMinor 2
#endif /*SCREENRECORD_SCREENRECORD_H*/
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
index 5d2d721..1b2f792 100644
--- a/cmds/stagefright/SimplePlayer.cpp
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -23,6 +23,7 @@
#include <gui/Surface.h>
#include <media/AudioTrack.h>
#include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -275,7 +276,8 @@ status_t SimplePlayer::onPrepare() {
mExtractor = new NuMediaExtractor;
- status_t err = mExtractor->setDataSource(mPath.c_str());
+ status_t err = mExtractor->setDataSource(
+ NULL /* httpService */, mPath.c_str());
if (err != OK) {
mExtractor.clear();
diff --git a/cmds/stagefright/SineSource.cpp b/cmds/stagefright/SineSource.cpp
index 14b4306..587077a 100644
--- a/cmds/stagefright/SineSource.cpp
+++ b/cmds/stagefright/SineSource.cpp
@@ -24,7 +24,7 @@ SineSource::~SineSource() {
}
}
-status_t SineSource::start(MetaData *params) {
+status_t SineSource::start(MetaData * /* params */) {
CHECK(!mStarted);
mGroup = new MediaBufferGroup;
@@ -58,7 +58,7 @@ sp<MetaData> SineSource::getFormat() {
}
status_t SineSource::read(
- MediaBuffer **out, const ReadOptions *options) {
+ MediaBuffer **out, const ReadOptions * /* options */) {
*out = NULL;
MediaBuffer *buffer;
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index d125ad1..fd02bcc 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -24,6 +24,7 @@
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
#include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -76,7 +77,7 @@ static int decode(
static int64_t kTimeout = 500ll;
sp<NuMediaExtractor> extractor = new NuMediaExtractor;
- if (extractor->setDataSource(path) != OK) {
+ if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
fprintf(stderr, "unable to instantiate extractor.\n");
return 1;
}
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index 90daea2..f4a33e8 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -20,6 +20,7 @@
#include <utils/Log.h>
#include <binder/ProcessState.h>
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -59,7 +60,7 @@ static int muxing(
int trimEndTimeMs,
int rotationDegrees) {
sp<NuMediaExtractor> extractor = new NuMediaExtractor;
- if (extractor->setDataSource(path) != OK) {
+ if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
fprintf(stderr, "unable to instantiate extractor. %s\n", path);
return 1;
}
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
index b7a40c2..fdc352e 100644
--- a/cmds/stagefright/record.cpp
+++ b/cmds/stagefright/record.cpp
@@ -296,7 +296,7 @@ int main(int argc, char **argv) {
}
#else
-int main(int argc, char **argv) {
+int main(int /* argc */, char ** /* argv */) {
android::ProcessState::self()->startThreadPool();
OMXClient client;
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
index b2b9ce5..3c0c7ec 100644
--- a/cmds/stagefright/sf2.cpp
+++ b/cmds/stagefright/sf2.cpp
@@ -19,8 +19,12 @@
#include <inttypes.h>
#include <utils/Log.h>
+#include <signal.h>
+
#include <binder/ProcessState.h>
+#include <media/IMediaHTTPService.h>
+
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -43,6 +47,18 @@
using namespace android;
+volatile static bool ctrlc = false;
+
+static sighandler_t oldhandler = NULL;
+
+static void mysighandler(int signum) {
+ if (signum == SIGINT) {
+ ctrlc = true;
+ return;
+ }
+ oldhandler(signum);
+}
+
struct Controller : public AHandler {
Controller(const char *uri, bool decodeAudio,
const sp<Surface> &surface, bool renderToSurface)
@@ -63,7 +79,30 @@ protected:
virtual ~Controller() {
}
+ virtual void printStatistics() {
+ int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs;
+
+ if (mDecodeAudio) {
+ printf("%" PRId64 " bytes received. %.2f KB/sec\n",
+ mTotalBytesReceived,
+ mTotalBytesReceived * 1E6 / 1024 / delayUs);
+ } else {
+ printf("%d frames decoded, %.2f fps. %" PRId64 " bytes "
+ "received. %.2f KB/sec\n",
+ mNumOutputBuffersReceived,
+ mNumOutputBuffersReceived * 1E6 / delayUs,
+ mTotalBytesReceived,
+ mTotalBytesReceived * 1E6 / 1024 / delayUs);
+ }
+ }
+
virtual void onMessageReceived(const sp<AMessage> &msg) {
+ if (ctrlc) {
+ printf("\n");
+ printStatistics();
+ (new AMessage(kWhatStop, id()))->post();
+ ctrlc = false;
+ }
switch (msg->what()) {
case kWhatStart:
{
@@ -76,7 +115,8 @@ protected:
#endif
sp<DataSource> dataSource =
- DataSource::CreateFromURI(mURI.c_str());
+ DataSource::CreateFromURI(
+ NULL /* httpService */, mURI.c_str());
sp<MediaExtractor> extractor =
MediaExtractor::Create(dataSource);
@@ -99,7 +139,10 @@ protected:
break;
}
}
- CHECK(mSource != NULL);
+ if (mSource == NULL) {
+ printf("no %s track found\n", mDecodeAudio ? "audio" : "video");
+ exit (1);
+ }
CHECK_EQ(mSource->start(), (status_t)OK);
@@ -181,21 +224,7 @@ protected:
|| what == ACodec::kWhatError) {
printf((what == ACodec::kWhatEOS) ? "$\n" : "E\n");
- int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs;
-
- if (mDecodeAudio) {
- printf("%" PRId64 " bytes received. %.2f KB/sec\n",
- mTotalBytesReceived,
- mTotalBytesReceived * 1E6 / 1024 / delayUs);
- } else {
- printf("%d frames decoded, %.2f fps. %" PRId64 " bytes "
- "received. %.2f KB/sec\n",
- mNumOutputBuffersReceived,
- mNumOutputBuffersReceived * 1E6 / delayUs,
- mTotalBytesReceived,
- mTotalBytesReceived * 1E6 / 1024 / delayUs);
- }
-
+ printStatistics();
(new AMessage(kWhatStop, id()))->post();
} else if (what == ACodec::kWhatFlushCompleted) {
mSeekState = SEEK_FLUSH_COMPLETED;
@@ -639,6 +668,8 @@ int main(int argc, char **argv) {
looper->registerHandler(controller);
+ signal(SIGINT, mysighandler);
+
controller->startAsync();
CHECK_EQ(looper->start(true /* runOnCallingThread */), (status_t)OK);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index ab2c54b..b70afe6 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -29,6 +29,7 @@
#include <binder/IServiceManager.h>
#include <binder/ProcessState.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/ALooper.h>
#include "include/NuCachedSource2.h"
@@ -938,9 +939,11 @@ int main(int argc, char **argv) {
} else {
CHECK(useSurfaceTexAlloc);
- sp<BufferQueue> bq = new BufferQueue();
- sp<GLConsumer> texture = new GLConsumer(bq, 0 /* tex */);
- gSurface = new Surface(bq);
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ sp<GLConsumer> texture = new GLConsumer(consumer, 0 /* tex */);
+ gSurface = new Surface(producer);
}
CHECK_EQ((status_t)OK,
@@ -958,7 +961,8 @@ int main(int argc, char **argv) {
const char *filename = argv[k];
- sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
+ sp<DataSource> dataSource =
+ DataSource::CreateFromURI(NULL /* httpService */, filename);
if (strncasecmp(filename, "sine:", 5) && dataSource == NULL) {
fprintf(stderr, "Unable to create data source.\n");
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index dba67a9..0566d14 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -21,6 +21,7 @@
#include <binder/ProcessState.h>
#include <cutils/properties.h> // for property_get
+#include <media/IMediaHTTPService.h>
#include <media/IStreamSource.h>
#include <media/mediaplayer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -159,7 +160,9 @@ private:
MyConvertingStreamSource::MyConvertingStreamSource(const char *filename)
: mCurrentBufferIndex(-1),
mCurrentBufferOffset(0) {
- sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
+ sp<DataSource> dataSource =
+ DataSource::CreateFromURI(NULL /* httpService */, filename);
+
CHECK(dataSource != NULL);
sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
@@ -371,7 +374,7 @@ int main(int argc, char **argv) {
}
sp<IMediaPlayer> player =
- service->create(client, 0);
+ service->create(client, AUDIO_SESSION_ALLOCATE);
if (player != NULL && player->setDataSource(source) == NO_ERROR) {
player->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());