summaryrefslogtreecommitdiffstats
path: root/cmds
diff options
context:
space:
mode:
authorAnatol Pomozov <anatol.pomozov@gmail.com>2012-03-28 09:12:55 -0700
committerAnatol Pomozov <anatol.pomozov@gmail.com>2012-03-28 12:02:47 -0700
commitb0b2b4d890cf3bfb274797a759642b4e733343d7 (patch)
tree12ad21cbad346f02d542aa4d672ffd76407d58a9 /cmds
parent51f8eec23a2bcc2cc190373cdd1195972d9b8804 (diff)
parent5a5491c17d74bd2c80cf451c6ddbba22d5d5f08a (diff)
downloadframeworks_av-b0b2b4d890cf3bfb274797a759642b4e733343d7.zip
frameworks_av-b0b2b4d890cf3bfb274797a759642b4e733343d7.tar.gz
frameworks_av-b0b2b4d890cf3bfb274797a759642b4e733343d7.tar.bz2
Merge media files with history from frameworks/base.git
Diffstat (limited to 'cmds')
-rw-r--r--cmds/stagefright/Android.mk167
-rw-r--r--cmds/stagefright/SimplePlayer.cpp645
-rw-r--r--cmds/stagefright/SimplePlayer.h109
-rw-r--r--cmds/stagefright/SineSource.cpp102
-rw-r--r--cmds/stagefright/SineSource.h39
-rw-r--r--cmds/stagefright/WaveWriter.h71
-rw-r--r--cmds/stagefright/audioloop.cpp101
-rw-r--r--cmds/stagefright/codec.cpp495
-rw-r--r--cmds/stagefright/jpeg.cpp91
-rw-r--r--cmds/stagefright/jpeg.h6
-rw-r--r--cmds/stagefright/record.cpp354
-rw-r--r--cmds/stagefright/recordvideo.cpp302
-rw-r--r--cmds/stagefright/sf2.cpp658
-rw-r--r--cmds/stagefright/stagefright.cpp1108
-rw-r--r--cmds/stagefright/stream.cpp375
15 files changed, 4623 insertions, 0 deletions
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
new file mode 100644
index 0000000..52a9293
--- /dev/null
+++ b/cmds/stagefright/Android.mk
@@ -0,0 +1,167 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ stagefright.cpp \
+ jpeg.cpp \
+ SineSource.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright libmedia libmedia_native libutils libbinder libstagefright_foundation \
+ libjpeg libgui
+
+LOCAL_C_INCLUDES:= \
+ frameworks/base/media/libstagefright \
+ frameworks/base/media/libstagefright/include \
+ $(TOP)/frameworks/native/include/media/openmax \
+ external/jpeg \
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= stagefright
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ SineSource.cpp \
+ record.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation
+
+LOCAL_C_INCLUDES:= \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= record
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ SineSource.cpp \
+ recordvideo.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation
+
+LOCAL_C_INCLUDES:= \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= recordvideo
+
+include $(BUILD_EXECUTABLE)
+
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ SineSource.cpp \
+ audioloop.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation
+
+LOCAL_C_INCLUDES:= \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= audioloop
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ stream.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libgui \
+ libstagefright_foundation libmedia libmedia_native
+
+LOCAL_C_INCLUDES:= \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= stream
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ sf2.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation \
+ libmedia libmedia_native libgui libcutils libui
+
+LOCAL_C_INCLUDES:= \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= sf2
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ codec.cpp \
+ SimplePlayer.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation \
+ libmedia libmedia_native libgui libcutils libui
+
+LOCAL_C_INCLUDES:= \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= codec
+
+include $(BUILD_EXECUTABLE)
+
diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp
new file mode 100644
index 0000000..0cfeb3e
--- /dev/null
+++ b/cmds/stagefright/SimplePlayer.cpp
@@ -0,0 +1,645 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SimplePlayer"
+#include <utils/Log.h>
+
+#include "SimplePlayer.h"
+
+#include <gui/SurfaceTextureClient.h>
+#include <media/AudioTrack.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/NativeWindowWrapper.h>
+#include <media/stagefright/NuMediaExtractor.h>
+
+namespace android {
+
+SimplePlayer::SimplePlayer()
+ : mState(UNINITIALIZED),
+ mDoMoreStuffGeneration(0),
+ mStartTimeRealUs(-1ll) {
+}
+
+SimplePlayer::~SimplePlayer() {
+}
+
+// static
+status_t PostAndAwaitResponse(
+ const sp<AMessage> &msg, sp<AMessage> *response) {
+ status_t err = msg->postAndAwaitResponse(response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!(*response)->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+status_t SimplePlayer::setDataSource(const char *path) {
+ sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
+ msg->setString("path", path);
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::setSurface(const sp<ISurfaceTexture> &surfaceTexture) {
+ sp<AMessage> msg = new AMessage(kWhatSetSurface, id());
+
+ sp<SurfaceTextureClient> surfaceTextureClient;
+ if (surfaceTexture != NULL) {
+ surfaceTextureClient = new SurfaceTextureClient(surfaceTexture);
+ }
+
+ msg->setObject(
+ "native-window", new NativeWindowWrapper(surfaceTextureClient));
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::prepare() {
+ sp<AMessage> msg = new AMessage(kWhatPrepare, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::start() {
+ sp<AMessage> msg = new AMessage(kWhatStart, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::stop() {
+ sp<AMessage> msg = new AMessage(kWhatStop, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t SimplePlayer::reset() {
+ sp<AMessage> msg = new AMessage(kWhatReset, id());
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatSetDataSource:
+ {
+ status_t err;
+ if (mState != UNINITIALIZED) {
+ err = INVALID_OPERATION;
+ } else {
+ CHECK(msg->findString("path", &mPath));
+ mState = UNPREPARED;
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatSetSurface:
+ {
+ status_t err;
+ if (mState != UNPREPARED) {
+ err = INVALID_OPERATION;
+ } else {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("native-window", &obj));
+
+ mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get());
+
+ err = OK;
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatPrepare:
+ {
+ status_t err;
+ if (mState != UNPREPARED) {
+ err = INVALID_OPERATION;
+ } else {
+ err = onPrepare();
+
+ if (err == OK) {
+ mState = STOPPED;
+ }
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStart:
+ {
+ status_t err = OK;
+
+ if (mState == UNPREPARED) {
+ err = onPrepare();
+
+ if (err == OK) {
+ mState = STOPPED;
+ }
+ }
+
+ if (err == OK) {
+ if (mState != STOPPED) {
+ err = INVALID_OPERATION;
+ } else {
+ err = onStart();
+
+ if (err == OK) {
+ mState = STARTED;
+ }
+ }
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStop:
+ {
+ status_t err;
+
+ if (mState != STARTED) {
+ err = INVALID_OPERATION;
+ } else {
+ err = onStop();
+
+ if (err == OK) {
+ mState = STOPPED;
+ }
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatReset:
+ {
+ status_t err = OK;
+
+ if (mState == STARTED) {
+ CHECK_EQ(onStop(), (status_t)OK);
+ mState = STOPPED;
+ }
+
+ if (mState == STOPPED) {
+ err = onReset();
+ mState = UNINITIALIZED;
+ }
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatDoMoreStuff:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mDoMoreStuffGeneration) {
+ break;
+ }
+
+ status_t err = onDoMoreStuff();
+
+ if (err == OK) {
+ msg->post(10000ll);
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+status_t SimplePlayer::onPrepare() {
+ CHECK_EQ(mState, UNPREPARED);
+
+ mExtractor = new NuMediaExtractor;
+
+ status_t err = mExtractor->setDataSource(mPath.c_str());
+
+ if (err != OK) {
+ mExtractor.clear();
+ return err;
+ }
+
+ if (mCodecLooper == NULL) {
+ mCodecLooper = new ALooper;
+ mCodecLooper->start();
+ }
+
+ bool haveAudio = false;
+ bool haveVideo = false;
+ for (size_t i = 0; i < mExtractor->countTracks(); ++i) {
+ sp<AMessage> format;
+ status_t err = mExtractor->getTrackFormat(i, &format);
+ CHECK_EQ(err, (status_t)OK);
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ if (!haveAudio && !strncasecmp(mime.c_str(), "audio/", 6)) {
+ haveAudio = true;
+ } else if (!haveVideo && !strncasecmp(mime.c_str(), "video/", 6)) {
+ haveVideo = true;
+ } else {
+ continue;
+ }
+
+ err = mExtractor->selectTrack(i);
+ CHECK_EQ(err, (status_t)OK);
+
+ CodecState *state =
+ &mStateByTrackIndex.editValueAt(
+ mStateByTrackIndex.add(i, CodecState()));
+
+ state->mNumFramesWritten = 0;
+ state->mCodec = MediaCodec::CreateByType(
+ mCodecLooper, mime.c_str(), false /* encoder */);
+
+ CHECK(state->mCodec != NULL);
+
+ err = state->mCodec->configure(
+ format, mNativeWindow->getSurfaceTextureClient(),
+ 0 /* flags */);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ size_t j = 0;
+ sp<ABuffer> buffer;
+ while (format->findBuffer(StringPrintf("csd-%d", j).c_str(), &buffer)) {
+ state->mCSD.push_back(buffer);
+
+ ++j;
+ }
+ }
+
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ status_t err = state->mCodec->start();
+ CHECK_EQ(err, (status_t)OK);
+
+ err = state->mCodec->getInputBuffers(&state->mBuffers[0]);
+ CHECK_EQ(err, (status_t)OK);
+
+ err = state->mCodec->getOutputBuffers(&state->mBuffers[1]);
+ CHECK_EQ(err, (status_t)OK);
+
+ for (size_t j = 0; j < state->mCSD.size(); ++j) {
+ const sp<ABuffer> &srcBuffer = state->mCSD.itemAt(j);
+
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index, -1ll);
+ CHECK_EQ(err, (status_t)OK);
+
+ const sp<ABuffer> &dstBuffer = state->mBuffers[0].itemAt(index);
+
+ CHECK_LE(srcBuffer->size(), dstBuffer->capacity());
+ dstBuffer->setRange(0, srcBuffer->size());
+ memcpy(dstBuffer->data(), srcBuffer->data(), srcBuffer->size());
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ 0,
+ dstBuffer->size(),
+ 0ll,
+ MediaCodec::BUFFER_FLAG_CODECCONFIG);
+ CHECK_EQ(err, (status_t)OK);
+ }
+ }
+
+ return OK;
+}
+
+status_t SimplePlayer::onStart() {
+ CHECK_EQ(mState, STOPPED);
+
+ mStartTimeRealUs = -1ll;
+
+ sp<AMessage> msg = new AMessage(kWhatDoMoreStuff, id());
+ msg->setInt32("generation", ++mDoMoreStuffGeneration);
+ msg->post();
+
+ return OK;
+}
+
+status_t SimplePlayer::onStop() {
+ CHECK_EQ(mState, STARTED);
+
+ ++mDoMoreStuffGeneration;
+
+ return OK;
+}
+
+status_t SimplePlayer::onReset() {
+ CHECK_EQ(mState, STOPPED);
+
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ CHECK_EQ(state->mCodec->release(), (status_t)OK);
+ }
+
+ mStartTimeRealUs = -1ll;
+
+ mStateByTrackIndex.clear();
+ mCodecLooper.clear();
+ mExtractor.clear();
+ mNativeWindow.clear();
+ mPath.clear();
+
+ return OK;
+}
+
+status_t SimplePlayer::onDoMoreStuff() {
+ ALOGV("onDoMoreStuff");
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ status_t err;
+ do {
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index);
+
+ if (err == OK) {
+ ALOGV("dequeued input buffer on track %d",
+ mStateByTrackIndex.keyAt(i));
+
+ state->mAvailInputBufferIndices.push_back(index);
+ } else {
+ ALOGV("dequeueInputBuffer on track %d returned %d",
+ mStateByTrackIndex.keyAt(i), err);
+ }
+ } while (err == OK);
+
+ do {
+ BufferInfo info;
+ err = state->mCodec->dequeueOutputBuffer(
+ &info.mIndex,
+ &info.mOffset,
+ &info.mSize,
+ &info.mPresentationTimeUs,
+ &info.mFlags);
+
+ if (err == OK) {
+ ALOGV("dequeued output buffer on track %d",
+ mStateByTrackIndex.keyAt(i));
+
+ state->mAvailOutputBufferInfos.push_back(info);
+ } else if (err == INFO_FORMAT_CHANGED) {
+ err = onOutputFormatChanged(mStateByTrackIndex.keyAt(i), state);
+ CHECK_EQ(err, (status_t)OK);
+ } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+ err = state->mCodec->getOutputBuffers(&state->mBuffers[1]);
+ CHECK_EQ(err, (status_t)OK);
+ } else {
+ ALOGV("dequeueOutputBuffer on track %d returned %d",
+ mStateByTrackIndex.keyAt(i), err);
+ }
+ } while (err == OK
+ || err == INFO_FORMAT_CHANGED
+ || err == INFO_OUTPUT_BUFFERS_CHANGED);
+ }
+
+ for (;;) {
+ size_t trackIndex;
+ status_t err = mExtractor->getSampleTrackIndex(&trackIndex);
+
+ if (err != OK) {
+ ALOGI("encountered input EOS.");
+ break;
+ } else {
+ CodecState *state = &mStateByTrackIndex.editValueFor(trackIndex);
+
+ if (state->mAvailInputBufferIndices.empty()) {
+ break;
+ }
+
+ size_t index = *state->mAvailInputBufferIndices.begin();
+ state->mAvailInputBufferIndices.erase(
+ state->mAvailInputBufferIndices.begin());
+
+ const sp<ABuffer> &dstBuffer =
+ state->mBuffers[0].itemAt(index);
+
+ err = mExtractor->readSampleData(dstBuffer);
+ CHECK_EQ(err, (status_t)OK);
+
+ int64_t timeUs;
+ CHECK_EQ(mExtractor->getSampleTime(&timeUs), (status_t)OK);
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ dstBuffer->offset(),
+ dstBuffer->size(),
+ timeUs,
+ 0);
+ CHECK_EQ(err, (status_t)OK);
+
+ ALOGV("enqueued input data on track %d", trackIndex);
+
+ err = mExtractor->advance();
+ CHECK_EQ(err, (status_t)OK);
+ }
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mStartTimeRealUs < 0ll) {
+ mStartTimeRealUs = nowUs + 1000000ll;
+ }
+
+ for (size_t i = 0; i < mStateByTrackIndex.size(); ++i) {
+ CodecState *state = &mStateByTrackIndex.editValueAt(i);
+
+ while (!state->mAvailOutputBufferInfos.empty()) {
+ BufferInfo *info = &*state->mAvailOutputBufferInfos.begin();
+
+ int64_t whenRealUs = info->mPresentationTimeUs + mStartTimeRealUs;
+ int64_t lateByUs = nowUs - whenRealUs;
+
+ if (lateByUs > -10000ll) {
+ bool release = true;
+
+ if (lateByUs > 30000ll) {
+ ALOGI("track %d buffer late by %lld us, dropping.",
+ mStateByTrackIndex.keyAt(i), lateByUs);
+ state->mCodec->releaseOutputBuffer(info->mIndex);
+ } else {
+ if (state->mAudioTrack != NULL) {
+ const sp<ABuffer> &srcBuffer =
+ state->mBuffers[1].itemAt(info->mIndex);
+
+ renderAudio(state, info, srcBuffer);
+
+ if (info->mSize > 0) {
+ release = false;
+ }
+ }
+
+ if (release) {
+ state->mCodec->renderOutputBufferAndRelease(
+ info->mIndex);
+ }
+ }
+
+ if (release) {
+ state->mAvailOutputBufferInfos.erase(
+ state->mAvailOutputBufferInfos.begin());
+
+ info = NULL;
+ } else {
+ break;
+ }
+ } else {
+ ALOGV("track %d buffer early by %lld us.",
+ mStateByTrackIndex.keyAt(i), -lateByUs);
+ break;
+ }
+ }
+ }
+
+ return OK;
+}
+
+status_t SimplePlayer::onOutputFormatChanged(
+ size_t trackIndex, CodecState *state) {
+ sp<AMessage> format;
+ status_t err = state->mCodec->getOutputFormat(&format);
+
+ if (err != OK) {
+ return err;
+ }
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ if (!strncasecmp(mime.c_str(), "audio/", 6)) {
+ int32_t channelCount;
+ int32_t sampleRate;
+ CHECK(format->findInt32("channel-count", &channelCount));
+ CHECK(format->findInt32("sample-rate", &sampleRate));
+
+ state->mAudioTrack = new AudioTrack(
+ AUDIO_STREAM_MUSIC,
+ sampleRate,
+ AUDIO_FORMAT_PCM_16_BIT,
+ audio_channel_out_mask_from_count(channelCount),
+ 0);
+
+ state->mNumFramesWritten = 0;
+ }
+
+ return OK;
+}
+
+void SimplePlayer::renderAudio(
+ CodecState *state, BufferInfo *info, const sp<ABuffer> &buffer) {
+ CHECK(state->mAudioTrack != NULL);
+
+ if (state->mAudioTrack->stopped()) {
+ state->mAudioTrack->start();
+ }
+
+ uint32_t numFramesPlayed;
+ CHECK_EQ(state->mAudioTrack->getPosition(&numFramesPlayed), (status_t)OK);
+
+ uint32_t numFramesAvailableToWrite =
+ state->mAudioTrack->frameCount()
+ - (state->mNumFramesWritten - numFramesPlayed);
+
+ size_t numBytesAvailableToWrite =
+ numFramesAvailableToWrite * state->mAudioTrack->frameSize();
+
+ size_t copy = info->mSize;
+ if (copy > numBytesAvailableToWrite) {
+ copy = numBytesAvailableToWrite;
+ }
+
+ if (copy == 0) {
+ return;
+ }
+
+ int64_t startTimeUs = ALooper::GetNowUs();
+
+ ssize_t nbytes = state->mAudioTrack->write(
+ buffer->base() + info->mOffset, copy);
+
+ CHECK_EQ(nbytes, (ssize_t)copy);
+
+ int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+
+ uint32_t numFramesWritten = nbytes / state->mAudioTrack->frameSize();
+
+ if (delayUs > 2000ll) {
+ ALOGW("AudioTrack::write took %lld us, numFramesAvailableToWrite=%u, "
+ "numFramesWritten=%u",
+ delayUs, numFramesAvailableToWrite, numFramesWritten);
+ }
+
+ info->mOffset += nbytes;
+ info->mSize -= nbytes;
+
+ state->mNumFramesWritten += numFramesWritten;
+}
+
+} // namespace android
diff --git a/cmds/stagefright/SimplePlayer.h b/cmds/stagefright/SimplePlayer.h
new file mode 100644
index 0000000..2548252
--- /dev/null
+++ b/cmds/stagefright/SimplePlayer.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AString.h>
+#include <utils/KeyedVector.h>
+
+namespace android {
+
+struct ABuffer;
+struct ALooper;
+struct AudioTrack;
+struct ISurfaceTexture;
+struct MediaCodec;
+struct NativeWindowWrapper;
+struct NuMediaExtractor;
+
+struct SimplePlayer : public AHandler {
+ SimplePlayer();
+
+ status_t setDataSource(const char *path);
+ status_t setSurface(const sp<ISurfaceTexture> &surfaceTexture);
+ status_t prepare();
+ status_t start();
+ status_t stop();
+ status_t reset();
+
+protected:
+ virtual ~SimplePlayer();
+
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum State {
+ UNINITIALIZED,
+ UNPREPARED,
+ STOPPED,
+ STARTED
+ };
+
+ enum {
+ kWhatSetDataSource,
+ kWhatSetSurface,
+ kWhatPrepare,
+ kWhatStart,
+ kWhatStop,
+ kWhatReset,
+ kWhatDoMoreStuff,
+ };
+
+ struct BufferInfo {
+ size_t mIndex;
+ size_t mOffset;
+ size_t mSize;
+ int64_t mPresentationTimeUs;
+ uint32_t mFlags;
+ };
+
+ struct CodecState
+ {
+ sp<MediaCodec> mCodec;
+ Vector<sp<ABuffer> > mCSD;
+ Vector<sp<ABuffer> > mBuffers[2];
+
+ List<size_t> mAvailInputBufferIndices;
+ List<BufferInfo> mAvailOutputBufferInfos;
+
+ sp<AudioTrack> mAudioTrack;
+ uint32_t mNumFramesWritten;
+ };
+
+ State mState;
+ AString mPath;
+ sp<NativeWindowWrapper> mNativeWindow;
+
+ sp<NuMediaExtractor> mExtractor;
+ sp<ALooper> mCodecLooper;
+ KeyedVector<size_t, CodecState> mStateByTrackIndex;
+ int32_t mDoMoreStuffGeneration;
+
+ int64_t mStartTimeRealUs;
+
+ status_t onPrepare();
+ status_t onStart();
+ status_t onStop();
+ status_t onReset();
+ status_t onDoMoreStuff();
+ status_t onOutputFormatChanged(size_t trackIndex, CodecState *state);
+
+ void renderAudio(
+ CodecState *state, BufferInfo *info, const sp<ABuffer> &buffer);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SimplePlayer);
+};
+
+} // namespace android
diff --git a/cmds/stagefright/SineSource.cpp b/cmds/stagefright/SineSource.cpp
new file mode 100644
index 0000000..14b4306
--- /dev/null
+++ b/cmds/stagefright/SineSource.cpp
@@ -0,0 +1,102 @@
+#include "SineSource.h"
+
+#include <math.h>
+
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+SineSource::SineSource(int32_t sampleRate, int32_t numChannels)
+ : mStarted(false),
+ mSampleRate(sampleRate),
+ mNumChannels(numChannels),
+ mPhase(0),
+ mGroup(NULL) {
+ CHECK(numChannels == 1 || numChannels == 2);
+}
+
+SineSource::~SineSource() {
+ if (mStarted) {
+ stop();
+ }
+}
+
+status_t SineSource::start(MetaData *params) {
+ CHECK(!mStarted);
+
+ mGroup = new MediaBufferGroup;
+ mGroup->add_buffer(new MediaBuffer(kBufferSize));
+
+ mPhase = 0;
+ mStarted = true;
+
+ return OK;
+}
+
+status_t SineSource::stop() {
+ CHECK(mStarted);
+
+ delete mGroup;
+ mGroup = NULL;
+
+ mStarted = false;
+
+ return OK;
+}
+
+sp<MetaData> SineSource::getFormat() {
+ sp<MetaData> meta = new MetaData;
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
+ meta->setInt32(kKeyChannelCount, mNumChannels);
+ meta->setInt32(kKeySampleRate, mSampleRate);
+ meta->setInt32(kKeyMaxInputSize, kBufferSize);
+
+ return meta;
+}
+
+status_t SineSource::read(
+ MediaBuffer **out, const ReadOptions *options) {
+ *out = NULL;
+
+ MediaBuffer *buffer;
+ status_t err = mGroup->acquire_buffer(&buffer);
+
+ if (err != OK) {
+ return err;
+ }
+
+ size_t frameSize = mNumChannels * sizeof(int16_t);
+ size_t numFramesPerBuffer = buffer->size() / frameSize;
+
+ int16_t *ptr = (int16_t *)buffer->data();
+
+ const double k = kFrequency / mSampleRate * (2.0 * M_PI);
+
+ double x = mPhase * k;
+ for (size_t i = 0; i < numFramesPerBuffer; ++i) {
+ int16_t amplitude = (int16_t)(32767.0 * sin(x));
+
+ *ptr++ = amplitude;
+ if (mNumChannels == 2) {
+ *ptr++ = amplitude;
+ }
+
+ x += k;
+ }
+
+ buffer->meta_data()->setInt64(
+ kKeyTime, ((int64_t)mPhase * 1000000) / mSampleRate);
+
+ mPhase += numFramesPerBuffer;
+
+ buffer->set_range(0, numFramesPerBuffer * frameSize);
+
+ *out = buffer;
+
+ return OK;
+}
+
+} // namespace android
diff --git a/cmds/stagefright/SineSource.h b/cmds/stagefright/SineSource.h
new file mode 100644
index 0000000..76ab669
--- /dev/null
+++ b/cmds/stagefright/SineSource.h
@@ -0,0 +1,39 @@
+#ifndef SINE_SOURCE_H_
+
+#define SINE_SOURCE_H_
+
+#include <media/stagefright/MediaSource.h>
+
+namespace android {
+
+struct MediaBufferGroup;
+
+struct SineSource : public MediaSource {
+ SineSource(int32_t sampleRate, int32_t numChannels);
+
+ virtual status_t start(MetaData *params);
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **out, const ReadOptions *options = NULL);
+
+protected:
+ virtual ~SineSource();
+
+private:
+ enum { kBufferSize = 8192 };
+ static const double kFrequency = 500.0;
+
+ bool mStarted;
+ int32_t mSampleRate;
+ int32_t mNumChannels;
+ size_t mPhase;
+
+ MediaBufferGroup *mGroup;
+};
+
+} // namespace android
+
+#endif // SINE_SOURCE_H_
diff --git a/cmds/stagefright/WaveWriter.h b/cmds/stagefright/WaveWriter.h
new file mode 100644
index 0000000..a0eb66e
--- /dev/null
+++ b/cmds/stagefright/WaveWriter.h
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_WAVEWRITER_H_
+
+#define ANDROID_WAVEWRITER_H_
+
+namespace android {
+
+class WaveWriter {
+public:
+ WaveWriter(const char *filename,
+ uint16_t num_channels, uint32_t sampling_rate)
+ : mFile(fopen(filename, "wb")),
+ mTotalBytes(0) {
+ fwrite("RIFFxxxxWAVEfmt \x10\x00\x00\x00\x01\x00", 1, 22, mFile);
+ write_u16(num_channels);
+ write_u32(sampling_rate);
+ write_u32(sampling_rate * num_channels * 2);
+ write_u16(num_channels * 2);
+ write_u16(16);
+ fwrite("dataxxxx", 1, 8, mFile);
+ }
+
+ ~WaveWriter() {
+ fseek(mFile, 40, SEEK_SET);
+ write_u32(mTotalBytes);
+
+ fseek(mFile, 4, SEEK_SET);
+ write_u32(36 + mTotalBytes);
+
+ fclose(mFile);
+ mFile = NULL;
+ }
+
+ void Append(const void *data, size_t size) {
+ fwrite(data, 1, size, mFile);
+ mTotalBytes += size;
+ }
+
+private:
+ void write_u16(uint16_t x) {
+ fputc(x & 0xff, mFile);
+ fputc(x >> 8, mFile);
+ }
+
+ void write_u32(uint32_t x) {
+ write_u16(x & 0xffff);
+ write_u16(x >> 16);
+ }
+
+ FILE *mFile;
+ size_t mTotalBytes;
+};
+
+} // namespace android
+
+#endif // ANDROID_WAVEWRITER_H_
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
new file mode 100644
index 0000000..ed7d6cb
--- /dev/null
+++ b/cmds/stagefright/audioloop.cpp
@@ -0,0 +1,101 @@
+#include "SineSource.h"
+
+#include <binder/ProcessState.h>
+#include <media/mediarecorder.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/AMRWriter.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/AudioSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+
+#include <system/audio.h>
+
+using namespace android;
+
+int main() {
+ // We only have an AMR-WB encoder on sholes...
+ static bool outputWBAMR = false;
+ static const int32_t kSampleRate = outputWBAMR ? 16000 : 8000;
+ static const int32_t kNumChannels = 1;
+
+ android::ProcessState::self()->startThreadPool();
+
+ OMXClient client;
+ CHECK_EQ(client.connect(), (status_t)OK);
+
+#if 0
+ sp<MediaSource> source = new SineSource(kSampleRate, kNumChannels);
+#else
+ sp<MediaSource> source = new AudioSource(
+ AUDIO_SOURCE_DEFAULT,
+ kSampleRate,
+ audio_channel_in_mask_from_count(kNumChannels));
+#endif
+
+ sp<MetaData> meta = new MetaData;
+
+ meta->setCString(
+ kKeyMIMEType,
+ outputWBAMR ? MEDIA_MIMETYPE_AUDIO_AMR_WB
+ : MEDIA_MIMETYPE_AUDIO_AMR_NB);
+
+ meta->setInt32(kKeyChannelCount, kNumChannels);
+ meta->setInt32(kKeySampleRate, kSampleRate);
+
+ int32_t maxInputSize;
+ if (source->getFormat()->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+ meta->setInt32(kKeyMaxInputSize, maxInputSize);
+ }
+
+ sp<MediaSource> encoder = OMXCodec::Create(
+ client.interface(),
+ meta, true /* createEncoder */,
+ source);
+
+#if 1
+ sp<AMRWriter> writer = new AMRWriter("/sdcard/out.amr");
+ writer->addSource(encoder);
+ writer->start();
+ sleep(10);
+ writer->stop();
+#else
+ sp<MediaSource> decoder = OMXCodec::Create(
+ client.interface(),
+ meta, false /* createEncoder */,
+ encoder);
+
+#if 0
+ AudioPlayer *player = new AudioPlayer(NULL);
+ player->setSource(decoder);
+
+ player->start();
+
+ sleep(10);
+
+ player->stop();
+
+ delete player;
+ player = NULL;
+#elif 0
+ CHECK_EQ(decoder->start(), (status_t)OK);
+
+ MediaBuffer *buffer;
+ while (decoder->read(&buffer) == OK) {
+ // do something with buffer
+
+ putchar('.');
+ fflush(stdout);
+
+ buffer->release();
+ buffer = NULL;
+ }
+
+ CHECK_EQ(decoder->stop(), (status_t)OK);
+#endif
+#endif
+
+ return 0;
+}
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
new file mode 100644
index 0000000..cf2909e
--- /dev/null
+++ b/cmds/stagefright/codec.cpp
@@ -0,0 +1,495 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "codec"
+#include <utils/Log.h>
+
+#include "SimplePlayer.h"
+
+#include <binder/ProcessState.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <gui/SurfaceComposerClient.h>
+
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s [-a] use audio\n"
+ "\t\t[-v] use video\n"
+ "\t\t[-p] playback\n"
+ "\t\t[-S] allocate buffers from a surface\n"
+ "\t\t[-D] decrypt input buffers\n",
+ me);
+
+ exit(1);
+}
+
+namespace android {
+
+struct CodecState {
+ sp<MediaCodec> mCodec;
+ Vector<sp<ABuffer> > mCSD;
+ size_t mCSDIndex;
+ Vector<sp<ABuffer> > mInBuffers;
+ Vector<sp<ABuffer> > mOutBuffers;
+ bool mSignalledInputEOS;
+ bool mSawOutputEOS;
+ int64_t mNumBuffersDecoded;
+ int64_t mNumBytesDecoded;
+ bool mIsAudio;
+};
+
+} // namespace android
+
+static int decode(
+ const android::sp<android::ALooper> &looper,
+ const char *path,
+ bool useAudio,
+ bool useVideo,
+ const android::sp<android::Surface> &surface,
+ bool decryptInputBuffers) {
+ using namespace android;
+
+ static int64_t kTimeout = 500ll;
+
+ sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+ if (extractor->setDataSource(path) != OK) {
+ fprintf(stderr, "unable to instantiate extractor.\n");
+ return 1;
+ }
+
+ KeyedVector<size_t, CodecState> stateByTrack;
+
+ bool haveAudio = false;
+ bool haveVideo = false;
+ for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ sp<AMessage> format;
+ status_t err = extractor->getTrackFormat(i, &format);
+ CHECK_EQ(err, (status_t)OK);
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
+ bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
+
+ if (useAudio && !haveAudio && isAudio) {
+ haveAudio = true;
+ } else if (useVideo && !haveVideo && isVideo) {
+ haveVideo = true;
+ } else {
+ continue;
+ }
+
+ ALOGV("selecting track %d", i);
+
+ err = extractor->selectTrack(i);
+ CHECK_EQ(err, (status_t)OK);
+
+ CodecState *state =
+ &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
+
+ state->mNumBytesDecoded = 0;
+ state->mNumBuffersDecoded = 0;
+ state->mIsAudio = isAudio;
+
+ if (decryptInputBuffers && !isAudio) {
+ static const MediaCodecList *list = MediaCodecList::getInstance();
+
+ ssize_t index =
+ list->findCodecByType(mime.c_str(), false /* encoder */);
+
+ CHECK_GE(index, 0);
+
+ const char *componentName = list->getCodecName(index);
+
+ AString fullName = componentName;
+ fullName.append(".secure");
+
+ state->mCodec = MediaCodec::CreateByComponentName(
+ looper, fullName.c_str());
+ } else {
+ state->mCodec = MediaCodec::CreateByType(
+ looper, mime.c_str(), false /* encoder */);
+ }
+
+ CHECK(state->mCodec != NULL);
+
+ err = state->mCodec->configure(
+ format, isVideo ? surface : NULL,
+ decryptInputBuffers ? MediaCodec::CONFIGURE_FLAG_SECURE : 0);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ size_t j = 0;
+ sp<ABuffer> buffer;
+ while (format->findBuffer(StringPrintf("csd-%d", j).c_str(), &buffer)) {
+ state->mCSD.push_back(buffer);
+
+ ++j;
+ }
+
+ state->mCSDIndex = 0;
+ state->mSignalledInputEOS = false;
+ state->mSawOutputEOS = false;
+
+ ALOGV("got %d pieces of codec specific data.", state->mCSD.size());
+ }
+
+ CHECK(!stateByTrack.isEmpty());
+
+ int64_t startTimeUs = ALooper::GetNowUs();
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ sp<MediaCodec> codec = state->mCodec;
+
+ CHECK_EQ((status_t)OK, codec->start());
+
+ CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
+ CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
+
+ ALOGV("got %d input and %d output buffers",
+ state->mInBuffers.size(), state->mOutBuffers.size());
+
+ while (state->mCSDIndex < state->mCSD.size()) {
+ size_t index;
+ status_t err = codec->dequeueInputBuffer(&index, -1ll);
+ CHECK_EQ(err, (status_t)OK);
+
+ const sp<ABuffer> &srcBuffer =
+ state->mCSD.itemAt(state->mCSDIndex++);
+
+ const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+
+ memcpy(buffer->data(), srcBuffer->data(), srcBuffer->size());
+
+ err = codec->queueInputBuffer(
+ index,
+ 0 /* offset */,
+ srcBuffer->size(),
+ 0ll /* timeUs */,
+ MediaCodec::BUFFER_FLAG_CODECCONFIG);
+
+ CHECK_EQ(err, (status_t)OK);
+ }
+ }
+
+ bool sawInputEOS = false;
+
+ for (;;) {
+ if (!sawInputEOS) {
+ size_t trackIndex;
+ status_t err = extractor->getSampleTrackIndex(&trackIndex);
+
+ if (err != OK) {
+ ALOGV("saw input eos");
+ sawInputEOS = true;
+ } else {
+ CodecState *state = &stateByTrack.editValueFor(trackIndex);
+
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
+
+ if (err == OK) {
+ ALOGV("filling input buffer %d", index);
+
+ const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+
+ err = extractor->readSampleData(buffer);
+ CHECK_EQ(err, (status_t)OK);
+
+ int64_t timeUs;
+ err = extractor->getSampleTime(&timeUs);
+ CHECK_EQ(err, (status_t)OK);
+
+ uint32_t bufferFlags = 0;
+
+ uint32_t sampleFlags;
+ err = extractor->getSampleFlags(&sampleFlags);
+ CHECK_EQ(err, (status_t)OK);
+
+ if (sampleFlags & NuMediaExtractor::SAMPLE_FLAG_ENCRYPTED) {
+ CHECK(decryptInputBuffers);
+
+ bufferFlags |= MediaCodec::BUFFER_FLAG_ENCRYPTED;
+ }
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ 0 /* offset */,
+ buffer->size(),
+ timeUs,
+ bufferFlags);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ extractor->advance();
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+ }
+ } else {
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ if (!state->mSignalledInputEOS) {
+ size_t index;
+ status_t err =
+ state->mCodec->dequeueInputBuffer(&index, kTimeout);
+
+ if (err == OK) {
+ ALOGV("signalling input EOS on track %d", i);
+
+ err = state->mCodec->queueInputBuffer(
+ index,
+ 0 /* offset */,
+ 0 /* size */,
+ 0ll /* timeUs */,
+ MediaCodec::BUFFER_FLAG_EOS);
+
+ CHECK_EQ(err, (status_t)OK);
+
+ state->mSignalledInputEOS = true;
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+ }
+ }
+ }
+
+ bool sawOutputEOSOnAllTracks = true;
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+ if (!state->mSawOutputEOS) {
+ sawOutputEOSOnAllTracks = false;
+ break;
+ }
+ }
+
+ if (sawOutputEOSOnAllTracks) {
+ break;
+ }
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ if (state->mSawOutputEOS) {
+ continue;
+ }
+
+ size_t index;
+ size_t offset;
+ size_t size;
+ int64_t presentationTimeUs;
+ uint32_t flags;
+ status_t err = state->mCodec->dequeueOutputBuffer(
+ &index, &offset, &size, &presentationTimeUs, &flags,
+ kTimeout);
+
+ if (err == OK) {
+ ALOGV("draining output buffer %d, time = %lld us",
+ index, presentationTimeUs);
+
+ ++state->mNumBuffersDecoded;
+ state->mNumBytesDecoded += size;
+
+ err = state->mCodec->releaseOutputBuffer(index);
+ CHECK_EQ(err, (status_t)OK);
+
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ ALOGV("reached EOS on output.");
+
+ state->mSawOutputEOS = true;
+ }
+ } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+ ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
+ CHECK_EQ((status_t)OK,
+ state->mCodec->getOutputBuffers(&state->mOutBuffers));
+
+ ALOGV("got %d output buffers", state->mOutBuffers.size());
+ } else if (err == INFO_FORMAT_CHANGED) {
+ sp<AMessage> format;
+ CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
+
+ ALOGV("INFO_FORMAT_CHANGED: %s", format->debugString().c_str());
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+ }
+ }
+
+ int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs;
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ CHECK_EQ((status_t)OK, state->mCodec->release());
+
+ if (state->mIsAudio) {
+ printf("track %d: %lld bytes received. %.2f KB/sec\n",
+ i,
+ state->mNumBytesDecoded,
+ state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
+ } else {
+ printf("track %d: %lld frames decoded, %.2f fps. %lld bytes "
+ "received. %.2f KB/sec\n",
+ i,
+ state->mNumBuffersDecoded,
+ state->mNumBuffersDecoded * 1E6 / elapsedTimeUs,
+ state->mNumBytesDecoded,
+ state->mNumBytesDecoded * 1E6 / 1024 / elapsedTimeUs);
+ }
+ }
+
+ return 0;
+}
+
+int main(int argc, char **argv) {
+ using namespace android;
+
+ const char *me = argv[0];
+
+ bool useAudio = false;
+ bool useVideo = false;
+ bool playback = false;
+ bool useSurface = false;
+ bool decryptInputBuffers = false;
+
+ int res;
+ while ((res = getopt(argc, argv, "havpSD")) >= 0) {
+ switch (res) {
+ case 'a':
+ {
+ useAudio = true;
+ break;
+ }
+
+ case 'v':
+ {
+ useVideo = true;
+ break;
+ }
+
+ case 'p':
+ {
+ playback = true;
+ break;
+ }
+
+ case 'S':
+ {
+ useSurface = true;
+ break;
+ }
+
+ case 'D':
+ {
+ decryptInputBuffers = true;
+ break;
+ }
+
+ case '?':
+ case 'h':
+ default:
+ {
+ usage(me);
+ }
+ }
+ }
+
+ argc -= optind;
+ argv += optind;
+
+ if (argc != 1) {
+ usage(me);
+ }
+
+ if (!useAudio && !useVideo) {
+ useAudio = useVideo = true;
+ }
+
+ ProcessState::self()->startThreadPool();
+
+ DataSource::RegisterDefaultSniffers();
+
+ sp<ALooper> looper = new ALooper;
+ looper->start();
+
+ sp<SurfaceComposerClient> composerClient;
+ sp<SurfaceControl> control;
+ sp<Surface> surface;
+
+ if (playback || (useSurface && useVideo)) {
+ composerClient = new SurfaceComposerClient;
+ CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+ ssize_t displayWidth = composerClient->getDisplayWidth(0);
+ ssize_t displayHeight = composerClient->getDisplayHeight(0);
+
+ ALOGV("display is %ld x %ld\n", displayWidth, displayHeight);
+
+ control = composerClient->createSurface(
+ String8("A Surface"),
+ 0,
+ displayWidth,
+ displayHeight,
+ PIXEL_FORMAT_RGB_565,
+ 0);
+
+ CHECK(control != NULL);
+ CHECK(control->isValid());
+
+ SurfaceComposerClient::openGlobalTransaction();
+ CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
+ CHECK_EQ(control->show(), (status_t)OK);
+ SurfaceComposerClient::closeGlobalTransaction();
+
+ surface = control->getSurface();
+ CHECK(surface != NULL);
+ }
+
+ if (playback) {
+ sp<SimplePlayer> player = new SimplePlayer;
+ looper->registerHandler(player);
+
+ player->setDataSource(argv[0]);
+ player->setSurface(surface->getSurfaceTexture());
+ player->start();
+ sleep(60);
+ player->stop();
+ player->reset();
+ } else {
+ decode(looper, argv[0],
+ useAudio, useVideo, surface, decryptInputBuffers);
+ }
+
+ if (playback || (useSurface && useVideo)) {
+ composerClient->dispose();
+ }
+
+ looper->stop();
+
+ return 0;
+}
diff --git a/cmds/stagefright/jpeg.cpp b/cmds/stagefright/jpeg.cpp
new file mode 100644
index 0000000..7e859c3
--- /dev/null
+++ b/cmds/stagefright/jpeg.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <errno.h>
+#include <setjmp.h>
+#include <stdio.h>
+
+extern "C" {
+#include "jpeglib.h"
+}
+
+static inline uint8_t from565to8(uint16_t p, int start, int bits) {
+ uint8_t c = (p >> start) & ((1 << bits) - 1);
+ return (c << (8 - bits)) | (c >> (bits - (8 - bits)));
+}
+
+struct sf_jpeg_error_mgr {
+ struct jpeg_error_mgr jerr;
+ jmp_buf longjmp_buffer;
+};
+
+void sf_jpeg_error_exit(j_common_ptr cinfo) {
+ struct sf_jpeg_error_mgr *sf_err = (struct sf_jpeg_error_mgr *)cinfo->err;
+ longjmp(sf_err->longjmp_buffer, 0);
+}
+
+int writeJpegFile(const char *filename, uint8_t *frame, int width, int height) {
+ struct sf_jpeg_error_mgr sf_err;
+ struct jpeg_compress_struct cinfo;
+ uint8_t row_data[width * 3];
+ JSAMPROW row_pointer = row_data;
+ FILE *f;
+
+ f = fopen(filename, "w");
+ if (!f) {
+ return -errno;
+ }
+
+ cinfo.err = jpeg_std_error(&sf_err.jerr);
+ sf_err.jerr.error_exit = sf_jpeg_error_exit;
+ if (setjmp(sf_err.longjmp_buffer)) {
+ jpeg_destroy_compress(&cinfo);
+ fclose(f);
+ return -1;
+ }
+
+ jpeg_create_compress(&cinfo);
+ jpeg_stdio_dest(&cinfo, f);
+
+ cinfo.image_width = width;
+ cinfo.image_height = height;
+ cinfo.input_components = 3;
+ cinfo.in_color_space = JCS_RGB;
+
+ jpeg_set_defaults(&cinfo);
+ jpeg_set_quality(&cinfo, 80, TRUE);
+
+ jpeg_start_compress(&cinfo, TRUE);
+
+ for (int row = 0; row < height; row++) {
+ uint16_t *src = (uint16_t *)(frame + row * width * 2);
+ uint8_t *dst = row_data;
+ for (int col = 0; col < width; col++) {
+ dst[0] = from565to8(*src, 11, 5);
+ dst[1] = from565to8(*src, 5, 6);
+ dst[2] = from565to8(*src, 0, 5);
+ dst += 3;
+ src++;
+ }
+ jpeg_write_scanlines(&cinfo, &row_pointer, 1);
+ }
+
+ jpeg_finish_compress(&cinfo);
+ jpeg_destroy_compress(&cinfo);
+
+ fclose(f);
+ return 0;
+}
diff --git a/cmds/stagefright/jpeg.h b/cmds/stagefright/jpeg.h
new file mode 100644
index 0000000..ce86cf2
--- /dev/null
+++ b/cmds/stagefright/jpeg.h
@@ -0,0 +1,6 @@
+#ifndef _STAGEFRIGHT_JPEG_H_
+#define _STAGEFRIGHT_JPEG_H_
+
+int writeJpegFile(const char *filename, uint8_t *frame, int width, int height);
+
+#endif
diff --git a/cmds/stagefright/record.cpp b/cmds/stagefright/record.cpp
new file mode 100644
index 0000000..45c3f7b
--- /dev/null
+++ b/cmds/stagefright/record.cpp
@@ -0,0 +1,354 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SineSource.h"
+
+#include <binder/ProcessState.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/MediaPlayerInterface.h>
+
+using namespace android;
+
+static const int32_t kFramerate = 24; // fps
+static const int32_t kIFramesIntervalSec = 1;
+static const int32_t kVideoBitRate = 512 * 1024;
+static const int32_t kAudioBitRate = 12200;
+static const int64_t kDurationUs = 10000000LL; // 10 seconds
+
+#if 0
+class DummySource : public MediaSource {
+
+public:
+ DummySource(int width, int height, int colorFormat)
+ : mWidth(width),
+ mHeight(height),
+ mColorFormat(colorFormat),
+ mSize((width * height * 3) / 2) {
+ mGroup.add_buffer(new MediaBuffer(mSize));
+
+ // Check the color format to make sure
+ // that the buffer size mSize it set correctly above.
+ CHECK(colorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+ colorFormat == OMX_COLOR_FormatYUV420Planar);
+ }
+
+ virtual sp<MetaData> getFormat() {
+ sp<MetaData> meta = new MetaData;
+ meta->setInt32(kKeyWidth, mWidth);
+ meta->setInt32(kKeyHeight, mHeight);
+ meta->setInt32(kKeyColorFormat, mColorFormat);
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+
+ return meta;
+ }
+
+ virtual status_t start(MetaData *params) {
+ mNumFramesOutput = 0;
+ return OK;
+ }
+
+ virtual status_t stop() {
+ return OK;
+ }
+
+ virtual status_t read(
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+ if (mNumFramesOutput == kFramerate * 10) {
+ // Stop returning data after 10 secs.
+ return ERROR_END_OF_STREAM;
+ }
+
+ // printf("DummySource::read\n");
+ status_t err = mGroup.acquire_buffer(buffer);
+ if (err != OK) {
+ return err;
+ }
+
+ char x = (char)((double)rand() / RAND_MAX * 255);
+ memset((*buffer)->data(), x, mSize);
+ (*buffer)->set_range(0, mSize);
+ (*buffer)->meta_data()->clear();
+ (*buffer)->meta_data()->setInt64(
+ kKeyTime, (mNumFramesOutput * 1000000) / kFramerate);
+ ++mNumFramesOutput;
+
+ // printf("DummySource::read - returning buffer\n");
+ // ALOGI("DummySource::read - returning buffer");
+ return OK;
+ }
+
+protected:
+ virtual ~DummySource() {}
+
+private:
+ MediaBufferGroup mGroup;
+ int mWidth, mHeight;
+ int mColorFormat;
+ size_t mSize;
+ int64_t mNumFramesOutput;;
+
+ DummySource(const DummySource &);
+ DummySource &operator=(const DummySource &);
+};
+
+sp<MediaSource> createSource(const char *filename) {
+ sp<MediaSource> source;
+
+ sp<MediaExtractor> extractor =
+ MediaExtractor::Create(new FileSource(filename));
+ if (extractor == NULL) {
+ return NULL;
+ }
+
+ size_t num_tracks = extractor->countTracks();
+
+ sp<MetaData> meta;
+ for (size_t i = 0; i < num_tracks; ++i) {
+ meta = extractor->getTrackMetaData(i);
+ CHECK(meta.get() != NULL);
+
+ const char *mime;
+ if (!meta->findCString(kKeyMIMEType, &mime)) {
+ continue;
+ }
+
+ if (strncasecmp(mime, "video/", 6)) {
+ continue;
+ }
+
+ source = extractor->getTrack(i);
+ break;
+ }
+
+ return source;
+}
+
+enum {
+ kYUV420SP = 0,
+ kYUV420P = 1,
+};
+
+// returns -1 if mapping of the given color is unsuccessful
+// returns an omx color enum value otherwise
+static int translateColorToOmxEnumValue(int color) {
+ switch (color) {
+ case kYUV420SP:
+ return OMX_COLOR_FormatYUV420SemiPlanar;
+ case kYUV420P:
+ return OMX_COLOR_FormatYUV420Planar;
+ default:
+ fprintf(stderr, "Unsupported color: %d\n", color);
+ return -1;
+ }
+}
+
+int main(int argc, char **argv) {
+ android::ProcessState::self()->startThreadPool();
+
+ DataSource::RegisterDefaultSniffers();
+
+#if 1
+ if (argc != 3) {
+ fprintf(stderr, "usage: %s <filename> <input_color_format>\n", argv[0]);
+ fprintf(stderr, " <input_color_format>: 0 (YUV420SP) or 1 (YUV420P)\n");
+ return 1;
+ }
+
+ int colorFormat = translateColorToOmxEnumValue(atoi(argv[2]));
+ if (colorFormat == -1) {
+ fprintf(stderr, "input color format must be 0 (YUV420SP) or 1 (YUV420P)\n");
+ return 1;
+ }
+ OMXClient client;
+ CHECK_EQ(client.connect(), (status_t)OK);
+
+ status_t err = OK;
+
+#if 0
+ sp<MediaSource> source = createSource(argv[1]);
+
+ if (source == NULL) {
+ fprintf(stderr, "Unable to find a suitable video track.\n");
+ return 1;
+ }
+
+ sp<MetaData> meta = source->getFormat();
+
+ sp<MediaSource> decoder = OMXCodec::Create(
+ client.interface(), meta, false /* createEncoder */, source);
+
+ int width, height;
+ bool success = meta->findInt32(kKeyWidth, &width);
+ success = success && meta->findInt32(kKeyHeight, &height);
+ CHECK(success);
+#else
+ int width = 720;
+ int height = 480;
+ sp<MediaSource> decoder = new DummySource(width, height, colorFormat);
+#endif
+
+ sp<MetaData> enc_meta = new MetaData;
+ // enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ // enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ enc_meta->setInt32(kKeyWidth, width);
+ enc_meta->setInt32(kKeyHeight, height);
+ enc_meta->setInt32(kKeySampleRate, kFramerate);
+ enc_meta->setInt32(kKeyBitRate, kVideoBitRate);
+ enc_meta->setInt32(kKeyStride, width);
+ enc_meta->setInt32(kKeySliceHeight, height);
+ enc_meta->setInt32(kKeyIFramesInterval, kIFramesIntervalSec);
+ enc_meta->setInt32(kKeyColorFormat, colorFormat);
+
+ sp<MediaSource> encoder =
+ OMXCodec::Create(
+ client.interface(), enc_meta, true /* createEncoder */, decoder);
+
+#if 1
+ sp<MPEG4Writer> writer = new MPEG4Writer("/sdcard/output.mp4");
+ writer->addSource(encoder);
+ writer->setMaxFileDuration(kDurationUs);
+ CHECK_EQ((status_t)OK, writer->start());
+ while (!writer->reachedEOS()) {
+ fprintf(stderr, ".");
+ usleep(100000);
+ }
+ err = writer->stop();
+#else
+ CHECK_EQ((status_t)OK, encoder->start());
+
+ MediaBuffer *buffer;
+ while (encoder->read(&buffer) == OK) {
+ printf(".");
+ fflush(stdout);
+ int32_t isSync;
+ if (!buffer->meta_data()->findInt32(kKeyIsSyncFrame, &isSync)) {
+ isSync = false;
+ }
+
+ printf("got an output frame of size %d%s\n", buffer->range_length(),
+ isSync ? " (SYNC)" : "");
+
+ buffer->release();
+ buffer = NULL;
+ }
+
+ err = encoder->stop();
+#endif
+
+ printf("$\n");
+ client.disconnect();
+#endif
+
+#if 0
+ CameraSource *source = CameraSource::Create();
+ source->start();
+
+ printf("source = %p\n", source);
+
+ for (int i = 0; i < 100; ++i) {
+ MediaBuffer *buffer;
+ status_t err = source->read(&buffer);
+ CHECK_EQ(err, (status_t)OK);
+
+ printf("got a frame, data=%p, size=%d\n",
+ buffer->data(), buffer->range_length());
+
+ buffer->release();
+ buffer = NULL;
+ }
+
+ err = source->stop();
+
+ delete source;
+ source = NULL;
+#endif
+
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ fprintf(stderr, "record failed: %d\n", err);
+ return 1;
+ }
+ return 0;
+}
+#else
+
+int main(int argc, char **argv) {
+ android::ProcessState::self()->startThreadPool();
+
+ OMXClient client;
+ CHECK_EQ(client.connect(), (status_t)OK);
+
+ const int32_t kSampleRate = 22050;
+ const int32_t kNumChannels = 2;
+ sp<MediaSource> audioSource = new SineSource(kSampleRate, kNumChannels);
+
+#if 0
+ sp<MediaPlayerBase::AudioSink> audioSink;
+ AudioPlayer *player = new AudioPlayer(audioSink);
+ player->setSource(audioSource);
+ player->start();
+
+ sleep(10);
+
+ player->stop();
+#endif
+
+ sp<MetaData> encMeta = new MetaData;
+ encMeta->setCString(kKeyMIMEType,
+ 0 ? MEDIA_MIMETYPE_AUDIO_AMR_WB : MEDIA_MIMETYPE_AUDIO_AAC);
+ encMeta->setInt32(kKeySampleRate, kSampleRate);
+ encMeta->setInt32(kKeyChannelCount, kNumChannels);
+ encMeta->setInt32(kKeyMaxInputSize, 8192);
+ encMeta->setInt32(kKeyBitRate, kAudioBitRate);
+
+ sp<MediaSource> encoder =
+ OMXCodec::Create(client.interface(), encMeta, true, audioSource);
+
+ encoder->start();
+
+ int32_t n = 0;
+ status_t err;
+ MediaBuffer *buffer;
+ while ((err = encoder->read(&buffer)) == OK) {
+ printf(".");
+ fflush(stdout);
+
+ buffer->release();
+ buffer = NULL;
+
+ if (++n == 100) {
+ break;
+ }
+ }
+ printf("$\n");
+
+ encoder->stop();
+
+ client.disconnect();
+
+ return 0;
+}
+#endif
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
new file mode 100644
index 0000000..3bd1fe2
--- /dev/null
+++ b/cmds/stagefright/recordvideo.cpp
@@ -0,0 +1,302 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SineSource.h"
+
+#include <binder/ProcessState.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/MediaPlayerInterface.h>
+
+using namespace android;
+
+// Print usage showing how to use this utility to record videos
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s\n", me);
+ fprintf(stderr, " -h(elp)\n");
+ fprintf(stderr, " -b bit rate in bits per second (default: 300000)\n");
+ fprintf(stderr, " -c YUV420 color format: [0] semi planar or [1] planar or other omx YUV420 color format (default: 1)\n");
+ fprintf(stderr, " -f frame rate in frames per second (default: 30)\n");
+ fprintf(stderr, " -i I frame interval in seconds (default: 1)\n");
+ fprintf(stderr, " -n number of frames to be recorded (default: 300)\n");
+ fprintf(stderr, " -w width in pixels (default: 176)\n");
+ fprintf(stderr, " -t height in pixels (default: 144)\n");
+ fprintf(stderr, " -l encoder level. see omx il header (default: encoder specific)\n");
+ fprintf(stderr, " -p encoder profile. see omx il header (default: encoder specific)\n");
+ fprintf(stderr, " -v video codec: [0] AVC [1] M4V [2] H263 (default: 0)\n");
+ fprintf(stderr, "The output file is /sdcard/output.mp4\n");
+ exit(1);
+}
+
+class DummySource : public MediaSource {
+
+public:
+ DummySource(int width, int height, int nFrames, int fps, int colorFormat)
+ : mWidth(width),
+ mHeight(height),
+ mMaxNumFrames(nFrames),
+ mFrameRate(fps),
+ mColorFormat(colorFormat),
+ mSize((width * height * 3) / 2) {
+
+ mGroup.add_buffer(new MediaBuffer(mSize));
+ }
+
+ virtual sp<MetaData> getFormat() {
+ sp<MetaData> meta = new MetaData;
+ meta->setInt32(kKeyWidth, mWidth);
+ meta->setInt32(kKeyHeight, mHeight);
+ meta->setInt32(kKeyColorFormat, mColorFormat);
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+
+ return meta;
+ }
+
+ virtual status_t start(MetaData *params) {
+ mNumFramesOutput = 0;
+ return OK;
+ }
+
+ virtual status_t stop() {
+ return OK;
+ }
+
+ virtual status_t read(
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+
+ if (mNumFramesOutput % 10 == 0) {
+ fprintf(stderr, ".");
+ }
+ if (mNumFramesOutput == mMaxNumFrames) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ status_t err = mGroup.acquire_buffer(buffer);
+ if (err != OK) {
+ return err;
+ }
+
+ // We don't care about the contents. we just test video encoder
+ // Also, by skipping the content generation, we can return from
+ // read() much faster.
+ //char x = (char)((double)rand() / RAND_MAX * 255);
+ //memset((*buffer)->data(), x, mSize);
+ (*buffer)->set_range(0, mSize);
+ (*buffer)->meta_data()->clear();
+ (*buffer)->meta_data()->setInt64(
+ kKeyTime, (mNumFramesOutput * 1000000) / mFrameRate);
+ ++mNumFramesOutput;
+
+ return OK;
+ }
+
+protected:
+ virtual ~DummySource() {}
+
+private:
+ MediaBufferGroup mGroup;
+ int mWidth, mHeight;
+ int mMaxNumFrames;
+ int mFrameRate;
+ int mColorFormat;
+ size_t mSize;
+ int64_t mNumFramesOutput;;
+
+ DummySource(const DummySource &);
+ DummySource &operator=(const DummySource &);
+};
+
+enum {
+ kYUV420SP = 0,
+ kYUV420P = 1,
+};
+
+// returns -1 if mapping of the given color is unsuccessful
+// returns an omx color enum value otherwise
+static int translateColorToOmxEnumValue(int color) {
+ switch (color) {
+ case kYUV420SP:
+ return OMX_COLOR_FormatYUV420SemiPlanar;
+ case kYUV420P:
+ return OMX_COLOR_FormatYUV420Planar;
+ default:
+ fprintf(stderr, "Custom OMX color format: %d\n", color);
+ if (color == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar ||
+ color == OMX_QCOM_COLOR_FormatYVU420SemiPlanar) {
+ return color;
+ }
+ }
+ return -1;
+}
+
+int main(int argc, char **argv) {
+
+ // Default values for the program if not overwritten
+ int frameRateFps = 30;
+ int width = 176;
+ int height = 144;
+ int bitRateBps = 300000;
+ int iFramesIntervalSeconds = 1;
+ int colorFormat = OMX_COLOR_FormatYUV420Planar;
+ int nFrames = 300;
+ int level = -1; // Encoder specific default
+ int profile = -1; // Encoder specific default
+ int codec = 0;
+ const char *fileName = "/sdcard/output.mp4";
+
+ android::ProcessState::self()->startThreadPool();
+ int res;
+ while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) {
+ switch (res) {
+ case 'b':
+ {
+ bitRateBps = atoi(optarg);
+ break;
+ }
+
+ case 'c':
+ {
+ colorFormat = translateColorToOmxEnumValue(atoi(optarg));
+ if (colorFormat == -1) {
+ usage(argv[0]);
+ }
+ break;
+ }
+
+ case 'f':
+ {
+ frameRateFps = atoi(optarg);
+ break;
+ }
+
+ case 'i':
+ {
+ iFramesIntervalSeconds = atoi(optarg);
+ break;
+ }
+
+ case 'n':
+ {
+ nFrames = atoi(optarg);
+ break;
+ }
+
+ case 'w':
+ {
+ width = atoi(optarg);
+ break;
+ }
+
+ case 't':
+ {
+ height = atoi(optarg);
+ break;
+ }
+
+ case 'l':
+ {
+ level = atoi(optarg);
+ break;
+ }
+
+ case 'p':
+ {
+ profile = atoi(optarg);
+ break;
+ }
+
+ case 'v':
+ {
+ codec = atoi(optarg);
+ if (codec < 0 || codec > 2) {
+ usage(argv[0]);
+ }
+ break;
+ }
+
+ case 'h':
+ default:
+ {
+ usage(argv[0]);
+ break;
+ }
+ }
+ }
+
+ OMXClient client;
+ CHECK_EQ(client.connect(), (status_t)OK);
+
+ status_t err = OK;
+ sp<MediaSource> source =
+ new DummySource(width, height, nFrames, frameRateFps, colorFormat);
+
+ sp<MetaData> enc_meta = new MetaData;
+ switch (codec) {
+ case 1:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ break;
+ case 2:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ break;
+ default:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ break;
+ }
+ enc_meta->setInt32(kKeyWidth, width);
+ enc_meta->setInt32(kKeyHeight, height);
+ enc_meta->setInt32(kKeyFrameRate, frameRateFps);
+ enc_meta->setInt32(kKeyBitRate, bitRateBps);
+ enc_meta->setInt32(kKeyStride, width);
+ enc_meta->setInt32(kKeySliceHeight, height);
+ enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
+ enc_meta->setInt32(kKeyColorFormat, colorFormat);
+ if (level != -1) {
+ enc_meta->setInt32(kKeyVideoLevel, level);
+ }
+ if (profile != -1) {
+ enc_meta->setInt32(kKeyVideoProfile, profile);
+ }
+
+ sp<MediaSource> encoder =
+ OMXCodec::Create(
+ client.interface(), enc_meta, true /* createEncoder */, source);
+
+ sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+ writer->addSource(encoder);
+ int64_t start = systemTime();
+ CHECK_EQ((status_t)OK, writer->start());
+ while (!writer->reachedEOS()) {
+ }
+ err = writer->stop();
+ int64_t end = systemTime();
+
+ fprintf(stderr, "$\n");
+ client.disconnect();
+
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ fprintf(stderr, "record failed: %d\n", err);
+ return 1;
+ }
+ fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
+ fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
+ return 0;
+}
diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp
new file mode 100644
index 0000000..3bbfbdc
--- /dev/null
+++ b/cmds/stagefright/sf2.cpp
@@ -0,0 +1,658 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "sf2"
+#include <utils/Log.h>
+
+#include <binder/ProcessState.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/stagefright/ACodec.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/NativeWindowWrapper.h>
+#include <media/stagefright/Utils.h>
+
+#include <gui/SurfaceComposerClient.h>
+
+#include "include/ESDS.h"
+
+using namespace android;
+
+struct Controller : public AHandler {
+ Controller(const char *uri, bool decodeAudio,
+ const sp<Surface> &surface, bool renderToSurface)
+ : mURI(uri),
+ mDecodeAudio(decodeAudio),
+ mSurface(surface),
+ mRenderToSurface(renderToSurface),
+ mCodec(new ACodec),
+ mIsVorbis(false) {
+ CHECK(!mDecodeAudio || mSurface == NULL);
+ }
+
+ void startAsync() {
+ (new AMessage(kWhatStart, id()))->post();
+ }
+
+protected:
+ virtual ~Controller() {
+ }
+
+ virtual void onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatStart:
+ {
+#if 1
+ mDecodeLooper = looper();
+#else
+ mDecodeLooper = new ALooper;
+ mDecodeLooper->setName("sf2 decode looper");
+ mDecodeLooper->start();
+#endif
+
+ sp<DataSource> dataSource =
+ DataSource::CreateFromURI(mURI.c_str());
+
+ sp<MediaExtractor> extractor =
+ MediaExtractor::Create(dataSource);
+
+ for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ sp<MetaData> meta = extractor->getTrackMetaData(i);
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ if (!strncasecmp(mDecodeAudio ? "audio/" : "video/",
+ mime, 6)) {
+ mSource = extractor->getTrack(i);
+
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_VORBIS)) {
+ mIsVorbis = true;
+ } else {
+ mIsVorbis = false;
+ }
+ break;
+ }
+ }
+ CHECK(mSource != NULL);
+
+ CHECK_EQ(mSource->start(), (status_t)OK);
+
+ mDecodeLooper->registerHandler(mCodec);
+
+ mCodec->setNotificationMessage(
+ new AMessage(kWhatCodecNotify, id()));
+
+ sp<AMessage> format = makeFormat(mSource->getFormat());
+
+ if (mSurface != NULL) {
+ format->setObject(
+ "native-window", new NativeWindowWrapper(mSurface));
+ }
+
+ mCodec->initiateSetup(format);
+
+ mCSDIndex = 0;
+ mStartTimeUs = ALooper::GetNowUs();
+ mNumOutputBuffersReceived = 0;
+ mTotalBytesReceived = 0;
+ mLeftOverBuffer = NULL;
+ mFinalResult = OK;
+ mSeekState = SEEK_NONE;
+
+ // (new AMessage(kWhatSeek, id()))->post(5000000ll);
+ break;
+ }
+
+ case kWhatSeek:
+ {
+ printf("+");
+ fflush(stdout);
+
+ CHECK(mSeekState == SEEK_NONE
+ || mSeekState == SEEK_FLUSH_COMPLETED);
+
+ if (mLeftOverBuffer != NULL) {
+ mLeftOverBuffer->release();
+ mLeftOverBuffer = NULL;
+ }
+
+ mSeekState = SEEK_FLUSHING;
+ mSeekTimeUs = 30000000ll;
+
+ mCodec->signalFlush();
+ break;
+ }
+
+ case kWhatStop:
+ {
+ if (mLeftOverBuffer != NULL) {
+ mLeftOverBuffer->release();
+ mLeftOverBuffer = NULL;
+ }
+
+ CHECK_EQ(mSource->stop(), (status_t)OK);
+ mSource.clear();
+
+ mCodec->initiateShutdown();
+ break;
+ }
+
+ case kWhatCodecNotify:
+ {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ if (what == ACodec::kWhatFillThisBuffer) {
+ onFillThisBuffer(msg);
+ } else if (what == ACodec::kWhatDrainThisBuffer) {
+ if ((mNumOutputBuffersReceived++ % 16) == 0) {
+ printf(".");
+ fflush(stdout);
+ }
+
+ onDrainThisBuffer(msg);
+ } else if (what == ACodec::kWhatEOS
+ || what == ACodec::kWhatError) {
+ printf((what == ACodec::kWhatEOS) ? "$\n" : "E\n");
+
+ int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs;
+
+ if (mDecodeAudio) {
+ printf("%lld bytes received. %.2f KB/sec\n",
+ mTotalBytesReceived,
+ mTotalBytesReceived * 1E6 / 1024 / delayUs);
+ } else {
+ printf("%d frames decoded, %.2f fps. %lld bytes "
+ "received. %.2f KB/sec\n",
+ mNumOutputBuffersReceived,
+ mNumOutputBuffersReceived * 1E6 / delayUs,
+ mTotalBytesReceived,
+ mTotalBytesReceived * 1E6 / 1024 / delayUs);
+ }
+
+ (new AMessage(kWhatStop, id()))->post();
+ } else if (what == ACodec::kWhatFlushCompleted) {
+ mSeekState = SEEK_FLUSH_COMPLETED;
+ mCodec->signalResume();
+
+ (new AMessage(kWhatSeek, id()))->post(5000000ll);
+ } else if (what == ACodec::kWhatOutputFormatChanged) {
+ } else if (what == ACodec::kWhatShutdownCompleted) {
+ mDecodeLooper->unregisterHandler(mCodec->id());
+
+ if (mDecodeLooper != looper()) {
+ mDecodeLooper->stop();
+ }
+
+ looper()->stop();
+ } else if (what == ACodec::kWhatError) {
+ ALOGE("something went wrong, codec reported an error.");
+
+ printf("E\n");
+
+ (new AMessage(kWhatStop, id()))->post();
+ }
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+ }
+
+private:
+ enum {
+ kWhatStart = 'strt',
+ kWhatStop = 'stop',
+ kWhatCodecNotify = 'noti',
+ kWhatSeek = 'seek',
+ };
+
+ sp<ALooper> mDecodeLooper;
+
+ AString mURI;
+ bool mDecodeAudio;
+ sp<Surface> mSurface;
+ bool mRenderToSurface;
+ sp<ACodec> mCodec;
+ sp<MediaSource> mSource;
+ bool mIsVorbis;
+
+ Vector<sp<ABuffer> > mCSD;
+ size_t mCSDIndex;
+
+ MediaBuffer *mLeftOverBuffer;
+ status_t mFinalResult;
+
+ int64_t mStartTimeUs;
+ int32_t mNumOutputBuffersReceived;
+ int64_t mTotalBytesReceived;
+
+ enum SeekState {
+ SEEK_NONE,
+ SEEK_FLUSHING,
+ SEEK_FLUSH_COMPLETED,
+ };
+ SeekState mSeekState;
+ int64_t mSeekTimeUs;
+
+ sp<AMessage> makeFormat(const sp<MetaData> &meta) {
+ CHECK(mCSD.isEmpty());
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ sp<AMessage> msg = new AMessage;
+ msg->setString("mime", mime);
+
+ if (!strncasecmp("video/", mime, 6)) {
+ int32_t width, height;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+
+ msg->setInt32("width", width);
+ msg->setInt32("height", height);
+ } else {
+ CHECK(!strncasecmp("audio/", mime, 6));
+
+ int32_t numChannels, sampleRate;
+ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
+
+ msg->setInt32("channel-count", numChannels);
+ msg->setInt32("sample-rate", sampleRate);
+
+ int32_t isADTS;
+ if (meta->findInt32(kKeyIsADTS, &isADTS) && isADTS != 0) {
+ msg->setInt32("is-adts", true);
+ }
+ }
+
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+ // Parse the AVCDecoderConfigurationRecord
+
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ CHECK(size >= 7);
+ CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+ uint8_t profile = ptr[1];
+ uint8_t level = ptr[3];
+
+ // There is decodable content out there that fails the following
+ // assertion, let's be lenient for now...
+ // CHECK((ptr[4] >> 2) == 0x3f); // reserved
+
+ size_t lengthSize = 1 + (ptr[4] & 3);
+
+ // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
+ // violates it...
+ // CHECK((ptr[5] >> 5) == 7); // reserved
+
+ size_t numSeqParameterSets = ptr[5] & 31;
+
+ ptr += 6;
+ size -= 6;
+
+ sp<ABuffer> buffer = new ABuffer(1024);
+ buffer->setRange(0, 0);
+
+ for (size_t i = 0; i < numSeqParameterSets; ++i) {
+ CHECK(size >= 2);
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ CHECK(size >= length);
+
+ memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+ memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+ buffer->setRange(0, buffer->size() + 4 + length);
+
+ ptr += length;
+ size -= length;
+ }
+
+ buffer->meta()->setInt32("csd", true);
+ mCSD.push(buffer);
+
+ buffer = new ABuffer(1024);
+ buffer->setRange(0, 0);
+
+ CHECK(size >= 1);
+ size_t numPictureParameterSets = *ptr;
+ ++ptr;
+ --size;
+
+ for (size_t i = 0; i < numPictureParameterSets; ++i) {
+ CHECK(size >= 2);
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ CHECK(size >= length);
+
+ memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+ memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+ buffer->setRange(0, buffer->size() + 4 + length);
+
+ ptr += length;
+ size -= length;
+ }
+
+ buffer->meta()->setInt32("csd", true);
+ mCSD.push(buffer);
+
+ msg->setBuffer("csd", buffer);
+ } else if (meta->findData(kKeyESDS, &type, &data, &size)) {
+ ESDS esds((const char *)data, size);
+ CHECK_EQ(esds.InitCheck(), (status_t)OK);
+
+ const void *codec_specific_data;
+ size_t codec_specific_data_size;
+ esds.getCodecSpecificInfo(
+ &codec_specific_data, &codec_specific_data_size);
+
+ sp<ABuffer> buffer = new ABuffer(codec_specific_data_size);
+
+ memcpy(buffer->data(), codec_specific_data,
+ codec_specific_data_size);
+
+ buffer->meta()->setInt32("csd", true);
+ mCSD.push(buffer);
+ } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
+ sp<ABuffer> buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ mCSD.push(buffer);
+
+ CHECK(meta->findData(kKeyVorbisBooks, &type, &data, &size));
+
+ buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ mCSD.push(buffer);
+ }
+
+ int32_t maxInputSize;
+ if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
+ msg->setInt32("max-input-size", maxInputSize);
+ }
+
+ return msg;
+ }
+
+ void onFillThisBuffer(const sp<AMessage> &msg) {
+ sp<AMessage> reply;
+ CHECK(msg->findMessage("reply", &reply));
+
+ if (mSource == NULL || mSeekState == SEEK_FLUSHING) {
+ reply->setInt32("err", ERROR_END_OF_STREAM);
+ reply->post();
+ return;
+ }
+
+ sp<ABuffer> outBuffer;
+ CHECK(msg->findBuffer("buffer", &outBuffer));
+
+ if (mCSDIndex < mCSD.size()) {
+ outBuffer = mCSD.editItemAt(mCSDIndex++);
+ outBuffer->meta()->setInt64("timeUs", 0);
+ } else {
+ size_t sizeLeft = outBuffer->capacity();
+ outBuffer->setRange(0, 0);
+
+ int32_t n = 0;
+
+ for (;;) {
+ MediaBuffer *inBuffer;
+
+ if (mLeftOverBuffer != NULL) {
+ inBuffer = mLeftOverBuffer;
+ mLeftOverBuffer = NULL;
+ } else if (mFinalResult != OK) {
+ break;
+ } else {
+ MediaSource::ReadOptions options;
+ if (mSeekState == SEEK_FLUSH_COMPLETED) {
+ options.setSeekTo(mSeekTimeUs);
+ mSeekState = SEEK_NONE;
+ }
+ status_t err = mSource->read(&inBuffer, &options);
+
+ if (err != OK) {
+ mFinalResult = err;
+ break;
+ }
+ }
+
+ size_t sizeNeeded = inBuffer->range_length();
+ if (mIsVorbis) {
+ // Vorbis data is suffixed with the number of
+ // valid samples on the page.
+ sizeNeeded += sizeof(int32_t);
+ }
+
+ if (sizeNeeded > sizeLeft) {
+ if (outBuffer->size() == 0) {
+ ALOGE("Unable to fit even a single input buffer of size %d.",
+ sizeNeeded);
+ }
+ CHECK_GT(outBuffer->size(), 0u);
+
+ mLeftOverBuffer = inBuffer;
+ break;
+ }
+
+ ++n;
+
+ if (outBuffer->size() == 0) {
+ int64_t timeUs;
+ CHECK(inBuffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
+ outBuffer->meta()->setInt64("timeUs", timeUs);
+ }
+
+ memcpy(outBuffer->data() + outBuffer->size(),
+ (const uint8_t *)inBuffer->data()
+ + inBuffer->range_offset(),
+ inBuffer->range_length());
+
+ if (mIsVorbis) {
+ int32_t numPageSamples;
+ if (!inBuffer->meta_data()->findInt32(
+ kKeyValidSamples, &numPageSamples)) {
+ numPageSamples = -1;
+ }
+
+ memcpy(outBuffer->data()
+ + outBuffer->size() + inBuffer->range_length(),
+ &numPageSamples, sizeof(numPageSamples));
+ }
+
+ outBuffer->setRange(
+ 0, outBuffer->size() + sizeNeeded);
+
+ sizeLeft -= sizeNeeded;
+
+ inBuffer->release();
+ inBuffer = NULL;
+
+ break; // Don't coalesce
+ }
+
+ ALOGV("coalesced %d input buffers", n);
+
+ if (outBuffer->size() == 0) {
+ CHECK_NE(mFinalResult, (status_t)OK);
+
+ reply->setInt32("err", mFinalResult);
+ reply->post();
+ return;
+ }
+ }
+
+ reply->setBuffer("buffer", outBuffer);
+ reply->post();
+ }
+
+ void onDrainThisBuffer(const sp<AMessage> &msg) {
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
+
+ mTotalBytesReceived += buffer->size();
+
+ sp<AMessage> reply;
+ CHECK(msg->findMessage("reply", &reply));
+
+ if (mRenderToSurface) {
+ reply->setInt32("render", 1);
+ }
+
+ reply->post();
+ }
+
+ DISALLOW_EVIL_CONSTRUCTORS(Controller);
+};
+
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s\n", me);
+ fprintf(stderr, " -h(elp)\n");
+ fprintf(stderr, " -a(udio)\n");
+
+ fprintf(stderr,
+ " -S(urface) Allocate output buffers on a surface.\n"
+ " -R(ender) Render surface-allocated buffers.\n");
+}
+
+int main(int argc, char **argv) {
+ android::ProcessState::self()->startThreadPool();
+
+ bool decodeAudio = false;
+ bool useSurface = false;
+ bool renderToSurface = false;
+
+ int res;
+ while ((res = getopt(argc, argv, "haSR")) >= 0) {
+ switch (res) {
+ case 'a':
+ decodeAudio = true;
+ break;
+
+ case 'S':
+ useSurface = true;
+ break;
+
+ case 'R':
+ renderToSurface = true;
+ break;
+
+ case '?':
+ case 'h':
+ default:
+ {
+ usage(argv[0]);
+ return 1;
+ }
+ }
+ }
+
+ argc -= optind;
+ argv += optind;
+
+ if (argc != 1) {
+ usage(argv[-optind]);
+ return 1;
+ }
+
+ DataSource::RegisterDefaultSniffers();
+
+ sp<ALooper> looper = new ALooper;
+ looper->setName("sf2");
+
+ sp<SurfaceComposerClient> composerClient;
+ sp<SurfaceControl> control;
+ sp<Surface> surface;
+
+ if (!decodeAudio && useSurface) {
+ composerClient = new SurfaceComposerClient;
+ CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+ control = composerClient->createSurface(
+ String8("A Surface"),
+ 0,
+ 1280,
+ 800,
+ PIXEL_FORMAT_RGB_565,
+ 0);
+
+ CHECK(control != NULL);
+ CHECK(control->isValid());
+
+ SurfaceComposerClient::openGlobalTransaction();
+ CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
+ CHECK_EQ(control->show(), (status_t)OK);
+ SurfaceComposerClient::closeGlobalTransaction();
+
+ surface = control->getSurface();
+ CHECK(surface != NULL);
+
+ CHECK_EQ((status_t)OK,
+ native_window_api_connect(
+ surface.get(), NATIVE_WINDOW_API_MEDIA));
+ }
+
+ sp<Controller> controller =
+ new Controller(argv[0], decodeAudio, surface, renderToSurface);
+
+ looper->registerHandler(controller);
+
+ controller->startAsync();
+
+ CHECK_EQ(looper->start(true /* runOnCallingThread */), (status_t)OK);
+
+ looper->unregisterHandler(controller->id());
+
+ if (!decodeAudio && useSurface) {
+ CHECK_EQ((status_t)OK,
+ native_window_api_disconnect(
+ surface.get(), NATIVE_WINDOW_API_MEDIA));
+
+ composerClient->dispose();
+ }
+
+ return 0;
+}
+
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
new file mode 100644
index 0000000..d70c862
--- /dev/null
+++ b/cmds/stagefright/stagefright.cpp
@@ -0,0 +1,1108 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "stagefright"
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <sys/time.h>
+
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+
+#include "jpeg.h"
+#include "SineSource.h"
+
+#include <binder/IServiceManager.h>
+#include <binder/ProcessState.h>
+#include <media/IMediaPlayerService.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include "include/LiveSession.h"
+#include "include/NuCachedSource2.h"
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/JPEGSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/mediametadataretriever.h>
+
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MPEG4Writer.h>
+
+#include <private/media/VideoFrame.h>
+
+#include <fcntl.h>
+
+#include <gui/SurfaceTextureClient.h>
+#include <gui/SurfaceComposerClient.h>
+
+using namespace android;
+
+static long gNumRepetitions;
+static long gMaxNumFrames; // 0 means decode all available.
+static long gReproduceBug; // if not -1.
+static bool gPreferSoftwareCodec;
+static bool gForceToUseHardwareCodec;
+static bool gPlaybackAudio;
+static bool gWriteMP4;
+static bool gDisplayHistogram;
+static String8 gWriteMP4Filename;
+
+static sp<ANativeWindow> gSurface;
+
+static int64_t getNowUs() {
+ struct timeval tv;
+ gettimeofday(&tv, NULL);
+
+ return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll;
+}
+
+static int CompareIncreasing(const int64_t *a, const int64_t *b) {
+ return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0;
+}
+
+static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) {
+ printf("decode times:\n");
+
+ decodeTimesUs->sort(CompareIncreasing);
+
+ size_t n = decodeTimesUs->size();
+ int64_t minUs = decodeTimesUs->itemAt(0);
+ int64_t maxUs = decodeTimesUs->itemAt(n - 1);
+
+ printf("min decode time %lld us (%.2f secs)\n", minUs, minUs / 1E6);
+ printf("max decode time %lld us (%.2f secs)\n", maxUs, maxUs / 1E6);
+
+ size_t counts[100];
+ for (size_t i = 0; i < 100; ++i) {
+ counts[i] = 0;
+ }
+
+ for (size_t i = 0; i < n; ++i) {
+ int64_t x = decodeTimesUs->itemAt(i);
+
+ size_t slot = ((x - minUs) * 100) / (maxUs - minUs);
+ if (slot == 100) { slot = 99; }
+
+ ++counts[slot];
+ }
+
+ for (size_t i = 0; i < 100; ++i) {
+ int64_t slotUs = minUs + (i * (maxUs - minUs) / 100);
+
+ double fps = 1E6 / slotUs;
+ printf("[%.2f fps]: %d\n", fps, counts[i]);
+ }
+}
+
+static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyAVCC, &type, &data, &size)) {
+ const uint8_t *ptr = (const uint8_t *)data;
+ CHECK(size >= 7);
+ CHECK(ptr[0] == 1); // configurationVersion == 1
+ uint8_t profile = ptr[1];
+ uint8_t level = ptr[3];
+ fprintf(stderr, "AVC video profile %d and level %d\n", profile, level);
+ }
+}
+
+static void dumpSource(const sp<MediaSource> &source, const String8 &filename) {
+ FILE *out = fopen(filename.string(), "wb");
+
+ CHECK_EQ((status_t)OK, source->start());
+
+ status_t err;
+ for (;;) {
+ MediaBuffer *mbuf;
+ err = source->read(&mbuf);
+
+ if (err == INFO_FORMAT_CHANGED) {
+ continue;
+ } else if (err != OK) {
+ break;
+ }
+
+ CHECK_EQ(
+ fwrite((const uint8_t *)mbuf->data() + mbuf->range_offset(),
+ 1,
+ mbuf->range_length(),
+ out),
+ (ssize_t)mbuf->range_length());
+
+ mbuf->release();
+ mbuf = NULL;
+ }
+
+ CHECK_EQ((status_t)OK, source->stop());
+
+ fclose(out);
+ out = NULL;
+}
+
+static void playSource(OMXClient *client, sp<MediaSource> &source) {
+ sp<MetaData> meta = source->getFormat();
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ sp<MediaSource> rawSource;
+ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mime)) {
+ rawSource = source;
+ } else {
+ int flags = 0;
+ if (gPreferSoftwareCodec) {
+ flags |= OMXCodec::kPreferSoftwareCodecs;
+ }
+ if (gForceToUseHardwareCodec) {
+ CHECK(!gPreferSoftwareCodec);
+ flags |= OMXCodec::kHardwareCodecsOnly;
+ }
+ rawSource = OMXCodec::Create(
+ client->interface(), meta, false /* createEncoder */, source,
+ NULL /* matchComponentName */,
+ flags,
+ gSurface);
+
+ if (rawSource == NULL) {
+ fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime);
+ return;
+ }
+ displayAVCProfileLevelIfPossible(meta);
+ }
+
+ source.clear();
+
+ status_t err = rawSource->start();
+
+ if (err != OK) {
+ fprintf(stderr, "rawSource returned error %d (0x%08x)\n", err, err);
+ return;
+ }
+
+ if (gPlaybackAudio) {
+ AudioPlayer *player = new AudioPlayer(NULL);
+ player->setSource(rawSource);
+ rawSource.clear();
+
+ player->start(true /* sourceAlreadyStarted */);
+
+ status_t finalStatus;
+ while (!player->reachedEOS(&finalStatus)) {
+ usleep(100000ll);
+ }
+
+ delete player;
+ player = NULL;
+
+ return;
+ } else if (gReproduceBug >= 3 && gReproduceBug <= 5) {
+ int64_t durationUs;
+ CHECK(meta->findInt64(kKeyDuration, &durationUs));
+
+ status_t err;
+ MediaBuffer *buffer;
+ MediaSource::ReadOptions options;
+ int64_t seekTimeUs = -1;
+ for (;;) {
+ err = rawSource->read(&buffer, &options);
+ options.clearSeekTo();
+
+ bool shouldSeek = false;
+ if (err == INFO_FORMAT_CHANGED) {
+ CHECK(buffer == NULL);
+
+ printf("format changed.\n");
+ continue;
+ } else if (err != OK) {
+ printf("reached EOF.\n");
+
+ shouldSeek = true;
+ } else {
+ int64_t timestampUs;
+ CHECK(buffer->meta_data()->findInt64(kKeyTime, &timestampUs));
+
+ bool failed = false;
+
+ if (seekTimeUs >= 0) {
+ int64_t diff = timestampUs - seekTimeUs;
+
+ if (diff < 0) {
+ diff = -diff;
+ }
+
+ if ((gReproduceBug == 4 && diff > 500000)
+ || (gReproduceBug == 5 && timestampUs < 0)) {
+ printf("wanted: %.2f secs, got: %.2f secs\n",
+ seekTimeUs / 1E6, timestampUs / 1E6);
+
+ printf("ERROR: ");
+ failed = true;
+ }
+ }
+
+ printf("buffer has timestamp %lld us (%.2f secs)\n",
+ timestampUs, timestampUs / 1E6);
+
+ buffer->release();
+ buffer = NULL;
+
+ if (failed) {
+ break;
+ }
+
+ shouldSeek = ((double)rand() / RAND_MAX) < 0.1;
+
+ if (gReproduceBug == 3) {
+ shouldSeek = false;
+ }
+ }
+
+ seekTimeUs = -1;
+
+ if (shouldSeek) {
+ seekTimeUs = (rand() * (float)durationUs) / RAND_MAX;
+ options.setSeekTo(seekTimeUs);
+
+ printf("seeking to %lld us (%.2f secs)\n",
+ seekTimeUs, seekTimeUs / 1E6);
+ }
+ }
+
+ rawSource->stop();
+
+ return;
+ }
+
+ int n = 0;
+ int64_t startTime = getNowUs();
+
+ long numIterationsLeft = gNumRepetitions;
+ MediaSource::ReadOptions options;
+
+ int64_t sumDecodeUs = 0;
+ int64_t totalBytes = 0;
+
+ Vector<int64_t> decodeTimesUs;
+
+ while (numIterationsLeft-- > 0) {
+ long numFrames = 0;
+
+ MediaBuffer *buffer;
+
+ for (;;) {
+ int64_t startDecodeUs = getNowUs();
+ status_t err = rawSource->read(&buffer, &options);
+ int64_t delayDecodeUs = getNowUs() - startDecodeUs;
+
+ options.clearSeekTo();
+
+ if (err != OK) {
+ CHECK(buffer == NULL);
+
+ if (err == INFO_FORMAT_CHANGED) {
+ printf("format changed.\n");
+ continue;
+ }
+
+ break;
+ }
+
+ if (buffer->range_length() > 0) {
+ if (gDisplayHistogram && n > 0) {
+ // Ignore the first time since it includes some setup
+ // cost.
+ decodeTimesUs.push(delayDecodeUs);
+ }
+
+ if ((n++ % 16) == 0) {
+ printf(".");
+ fflush(stdout);
+ }
+ }
+
+ sumDecodeUs += delayDecodeUs;
+ totalBytes += buffer->range_length();
+
+ buffer->release();
+ buffer = NULL;
+
+ ++numFrames;
+ if (gMaxNumFrames > 0 && numFrames == gMaxNumFrames) {
+ break;
+ }
+
+ if (gReproduceBug == 1 && numFrames == 40) {
+ printf("seeking past the end now.");
+ options.setSeekTo(0x7fffffffL);
+ } else if (gReproduceBug == 2 && numFrames == 40) {
+ printf("seeking to 5 secs.");
+ options.setSeekTo(5000000);
+ }
+ }
+
+ printf("$");
+ fflush(stdout);
+
+ options.setSeekTo(0);
+ }
+
+ rawSource->stop();
+ printf("\n");
+
+ int64_t delay = getNowUs() - startTime;
+ if (!strncasecmp("video/", mime, 6)) {
+ printf("avg. %.2f fps\n", n * 1E6 / delay);
+
+ printf("avg. time to decode one buffer %.2f usecs\n",
+ (double)sumDecodeUs / n);
+
+ printf("decoded a total of %d frame(s).\n", n);
+
+ if (gDisplayHistogram) {
+ displayDecodeHistogram(&decodeTimesUs);
+ }
+ } else if (!strncasecmp("audio/", mime, 6)) {
+ // Frame count makes less sense for audio, as the output buffer
+ // sizes may be different across decoders.
+ printf("avg. %.2f KB/sec\n", totalBytes / 1024 * 1E6 / delay);
+
+ printf("decoded a total of %lld bytes\n", totalBytes);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct DetectSyncSource : public MediaSource {
+ DetectSyncSource(const sp<MediaSource> &source);
+
+ virtual status_t start(MetaData *params = NULL);
+ virtual status_t stop();
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options);
+
+private:
+ enum StreamType {
+ AVC,
+ MPEG4,
+ H263,
+ OTHER,
+ };
+
+ sp<MediaSource> mSource;
+ StreamType mStreamType;
+ bool mSawFirstIDRFrame;
+
+ DISALLOW_EVIL_CONSTRUCTORS(DetectSyncSource);
+};
+
+DetectSyncSource::DetectSyncSource(const sp<MediaSource> &source)
+ : mSource(source),
+ mStreamType(OTHER),
+ mSawFirstIDRFrame(false) {
+ const char *mime;
+ CHECK(mSource->getFormat()->findCString(kKeyMIMEType, &mime));
+
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+ mStreamType = AVC;
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)) {
+ mStreamType = MPEG4;
+ CHECK(!"sync frame detection not implemented yet for MPEG4");
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_H263)) {
+ mStreamType = H263;
+ CHECK(!"sync frame detection not implemented yet for H.263");
+ }
+}
+
+status_t DetectSyncSource::start(MetaData *params) {
+ mSawFirstIDRFrame = false;
+
+ return mSource->start(params);
+}
+
+status_t DetectSyncSource::stop() {
+ return mSource->stop();
+}
+
+sp<MetaData> DetectSyncSource::getFormat() {
+ return mSource->getFormat();
+}
+
+static bool isIDRFrame(MediaBuffer *buffer) {
+ const uint8_t *data =
+ (const uint8_t *)buffer->data() + buffer->range_offset();
+ size_t size = buffer->range_length();
+ for (size_t i = 0; i + 3 < size; ++i) {
+ if (!memcmp("\x00\x00\x01", &data[i], 3)) {
+ uint8_t nalType = data[i + 3] & 0x1f;
+ if (nalType == 5) {
+ return true;
+ }
+ }
+ }
+
+ return false;
+}
+
+status_t DetectSyncSource::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ for (;;) {
+ status_t err = mSource->read(buffer, options);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (mStreamType == AVC) {
+ bool isIDR = isIDRFrame(*buffer);
+ (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, isIDR);
+ if (isIDR) {
+ mSawFirstIDRFrame = true;
+ }
+ } else {
+ (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame, true);
+ }
+
+ if (mStreamType != AVC || mSawFirstIDRFrame) {
+ break;
+ }
+
+ // Ignore everything up to the first IDR frame.
+ (*buffer)->release();
+ *buffer = NULL;
+ }
+
+ return OK;
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+static void writeSourcesToMP4(
+ Vector<sp<MediaSource> > &sources, bool syncInfoPresent) {
+#if 0
+ sp<MPEG4Writer> writer =
+ new MPEG4Writer(gWriteMP4Filename.string());
+#else
+ sp<MPEG2TSWriter> writer =
+ new MPEG2TSWriter(gWriteMP4Filename.string());
+#endif
+
+ // at most one minute.
+ writer->setMaxFileDuration(60000000ll);
+
+ for (size_t i = 0; i < sources.size(); ++i) {
+ sp<MediaSource> source = sources.editItemAt(i);
+
+ CHECK_EQ(writer->addSource(
+ syncInfoPresent ? source : new DetectSyncSource(source)),
+ (status_t)OK);
+ }
+
+ sp<MetaData> params = new MetaData;
+ params->setInt32(kKeyNotRealTime, true);
+ CHECK_EQ(writer->start(params.get()), (status_t)OK);
+
+ while (!writer->reachedEOS()) {
+ usleep(100000);
+ }
+ writer->stop();
+}
+
+static void performSeekTest(const sp<MediaSource> &source) {
+ CHECK_EQ((status_t)OK, source->start());
+
+ int64_t durationUs;
+ CHECK(source->getFormat()->findInt64(kKeyDuration, &durationUs));
+
+ for (int64_t seekTimeUs = 0; seekTimeUs <= durationUs;
+ seekTimeUs += 60000ll) {
+ MediaSource::ReadOptions options;
+ options.setSeekTo(
+ seekTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+
+ MediaBuffer *buffer;
+ status_t err;
+ for (;;) {
+ err = source->read(&buffer, &options);
+
+ options.clearSeekTo();
+
+ if (err == INFO_FORMAT_CHANGED) {
+ CHECK(buffer == NULL);
+ continue;
+ }
+
+ if (err != OK) {
+ CHECK(buffer == NULL);
+ break;
+ }
+
+ if (buffer->range_length() > 0) {
+ break;
+ }
+
+ CHECK(buffer != NULL);
+
+ buffer->release();
+ buffer = NULL;
+ }
+
+ if (err == OK) {
+ int64_t timeUs;
+ CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
+ printf("%lld\t%lld\t%lld\n", seekTimeUs, timeUs, seekTimeUs - timeUs);
+
+ buffer->release();
+ buffer = NULL;
+ } else {
+ printf("ERROR\n");
+ break;
+ }
+ }
+
+ CHECK_EQ((status_t)OK, source->stop());
+}
+
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s\n", me);
+ fprintf(stderr, " -h(elp)\n");
+ fprintf(stderr, " -a(udio)\n");
+ fprintf(stderr, " -n repetitions\n");
+ fprintf(stderr, " -l(ist) components\n");
+ fprintf(stderr, " -m max-number-of-frames-to-decode in each pass\n");
+ fprintf(stderr, " -b bug to reproduce\n");
+ fprintf(stderr, " -p(rofiles) dump decoder profiles supported\n");
+ fprintf(stderr, " -t(humbnail) extract video thumbnail or album art\n");
+ fprintf(stderr, " -s(oftware) prefer software codec\n");
+ fprintf(stderr, " -r(hardware) force to use hardware codec\n");
+ fprintf(stderr, " -o playback audio\n");
+ fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n");
+ fprintf(stderr, " -k seek test\n");
+ fprintf(stderr, " -x display a histogram of decoding times/fps "
+ "(video only)\n");
+ fprintf(stderr, " -S allocate buffers from a surface\n");
+ fprintf(stderr, " -T allocate buffers from a surface texture\n");
+ fprintf(stderr, " -d(ump) filename (raw stream data to a file)\n");
+}
+
+int main(int argc, char **argv) {
+ android::ProcessState::self()->startThreadPool();
+
+ bool audioOnly = false;
+ bool listComponents = false;
+ bool dumpProfiles = false;
+ bool extractThumbnail = false;
+ bool seekTest = false;
+ bool useSurfaceAlloc = false;
+ bool useSurfaceTexAlloc = false;
+ bool dumpStream = false;
+ String8 dumpStreamFilename;
+ gNumRepetitions = 1;
+ gMaxNumFrames = 0;
+ gReproduceBug = -1;
+ gPreferSoftwareCodec = false;
+ gForceToUseHardwareCodec = false;
+ gPlaybackAudio = false;
+ gWriteMP4 = false;
+ gDisplayHistogram = false;
+
+ sp<ALooper> looper;
+ sp<LiveSession> liveSession;
+
+ int res;
+ while ((res = getopt(argc, argv, "han:lm:b:ptsrow:kxSTd:")) >= 0) {
+ switch (res) {
+ case 'a':
+ {
+ audioOnly = true;
+ break;
+ }
+
+ case 'd':
+ {
+ dumpStream = true;
+ dumpStreamFilename.setTo(optarg);
+ break;
+ }
+
+ case 'l':
+ {
+ listComponents = true;
+ break;
+ }
+
+ case 'm':
+ case 'n':
+ case 'b':
+ {
+ char *end;
+ long x = strtol(optarg, &end, 10);
+
+ if (*end != '\0' || end == optarg || x <= 0) {
+ x = 1;
+ }
+
+ if (res == 'n') {
+ gNumRepetitions = x;
+ } else if (res == 'm') {
+ gMaxNumFrames = x;
+ } else {
+ CHECK_EQ(res, 'b');
+ gReproduceBug = x;
+ }
+ break;
+ }
+
+ case 'w':
+ {
+ gWriteMP4 = true;
+ gWriteMP4Filename.setTo(optarg);
+ break;
+ }
+
+ case 'p':
+ {
+ dumpProfiles = true;
+ break;
+ }
+
+ case 't':
+ {
+ extractThumbnail = true;
+ break;
+ }
+
+ case 's':
+ {
+ gPreferSoftwareCodec = true;
+ break;
+ }
+
+ case 'r':
+ {
+ gForceToUseHardwareCodec = true;
+ break;
+ }
+
+ case 'o':
+ {
+ gPlaybackAudio = true;
+ break;
+ }
+
+ case 'k':
+ {
+ seekTest = true;
+ break;
+ }
+
+ case 'x':
+ {
+ gDisplayHistogram = true;
+ break;
+ }
+
+ case 'S':
+ {
+ useSurfaceAlloc = true;
+ break;
+ }
+
+ case 'T':
+ {
+ useSurfaceTexAlloc = true;
+ break;
+ }
+
+ case '?':
+ case 'h':
+ default:
+ {
+ usage(argv[0]);
+ exit(1);
+ break;
+ }
+ }
+ }
+
+ if (gPlaybackAudio && !audioOnly) {
+ // This doesn't make any sense if we're decoding the video track.
+ gPlaybackAudio = false;
+ }
+
+ argc -= optind;
+ argv += optind;
+
+ if (extractThumbnail) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.player"));
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
+ CHECK(service.get() != NULL);
+
+ sp<IMediaMetadataRetriever> retriever =
+ service->createMetadataRetriever(getpid());
+
+ CHECK(retriever != NULL);
+
+ for (int k = 0; k < argc; ++k) {
+ const char *filename = argv[k];
+
+ bool failed = true;
+ CHECK_EQ(retriever->setDataSource(filename), (status_t)OK);
+ sp<IMemory> mem =
+ retriever->getFrameAtTime(-1,
+ MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
+
+ if (mem != NULL) {
+ failed = false;
+ printf("getFrameAtTime(%s) => OK\n", filename);
+
+ VideoFrame *frame = (VideoFrame *)mem->pointer();
+
+ CHECK_EQ(writeJpegFile("/sdcard/out.jpg",
+ (uint8_t *)frame + sizeof(VideoFrame),
+ frame->mWidth, frame->mHeight), 0);
+ }
+
+ {
+ mem = retriever->extractAlbumArt();
+
+ if (mem != NULL) {
+ failed = false;
+ printf("extractAlbumArt(%s) => OK\n", filename);
+ }
+ }
+
+ if (failed) {
+ printf("both getFrameAtTime and extractAlbumArt "
+ "failed on file '%s'.\n", filename);
+ }
+ }
+
+ return 0;
+ }
+
+ if (dumpProfiles) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.player"));
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
+ CHECK(service.get() != NULL);
+
+ sp<IOMX> omx = service->getOMX();
+ CHECK(omx.get() != NULL);
+
+ const char *kMimeTypes[] = {
+ MEDIA_MIMETYPE_VIDEO_AVC, MEDIA_MIMETYPE_VIDEO_MPEG4,
+ MEDIA_MIMETYPE_VIDEO_H263, MEDIA_MIMETYPE_AUDIO_AAC,
+ MEDIA_MIMETYPE_AUDIO_AMR_NB, MEDIA_MIMETYPE_AUDIO_AMR_WB,
+ MEDIA_MIMETYPE_AUDIO_MPEG, MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+ MEDIA_MIMETYPE_AUDIO_G711_ALAW, MEDIA_MIMETYPE_AUDIO_VORBIS,
+ MEDIA_MIMETYPE_VIDEO_VPX
+ };
+
+ for (size_t k = 0; k < sizeof(kMimeTypes) / sizeof(kMimeTypes[0]);
+ ++k) {
+ printf("type '%s':\n", kMimeTypes[k]);
+
+ Vector<CodecCapabilities> results;
+ // will retrieve hardware and software codecs
+ CHECK_EQ(QueryCodecs(omx, kMimeTypes[k],
+ true, // queryDecoders
+ &results), (status_t)OK);
+
+ for (size_t i = 0; i < results.size(); ++i) {
+ printf(" decoder '%s' supports ",
+ results[i].mComponentName.string());
+
+ if (results[i].mProfileLevels.size() == 0) {
+ printf("NOTHING.\n");
+ continue;
+ }
+
+ for (size_t j = 0; j < results[i].mProfileLevels.size(); ++j) {
+ const CodecProfileLevel &profileLevel =
+ results[i].mProfileLevels[j];
+
+ printf("%s%ld/%ld", j > 0 ? ", " : "",
+ profileLevel.mProfile, profileLevel.mLevel);
+ }
+
+ printf("\n");
+ }
+ }
+ }
+
+ if (listComponents) {
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.player"));
+ sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+
+ CHECK(service.get() != NULL);
+
+ sp<IOMX> omx = service->getOMX();
+ CHECK(omx.get() != NULL);
+
+ List<IOMX::ComponentInfo> list;
+ omx->listNodes(&list);
+
+ for (List<IOMX::ComponentInfo>::iterator it = list.begin();
+ it != list.end(); ++it) {
+ printf("%s\t Roles: ", (*it).mName.string());
+ for (List<String8>::iterator itRoles = (*it).mRoles.begin() ;
+ itRoles != (*it).mRoles.end() ; ++itRoles) {
+ printf("%s\t", (*itRoles).string());
+ }
+ printf("\n");
+ }
+ }
+
+ sp<SurfaceComposerClient> composerClient;
+ sp<SurfaceControl> control;
+
+ if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
+ if (useSurfaceAlloc) {
+ composerClient = new SurfaceComposerClient;
+ CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+ control = composerClient->createSurface(
+ String8("A Surface"),
+ 0,
+ 1280,
+ 800,
+ PIXEL_FORMAT_RGB_565,
+ 0);
+
+ CHECK(control != NULL);
+ CHECK(control->isValid());
+
+ SurfaceComposerClient::openGlobalTransaction();
+ CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
+ CHECK_EQ(control->show(), (status_t)OK);
+ SurfaceComposerClient::closeGlobalTransaction();
+
+ gSurface = control->getSurface();
+ CHECK(gSurface != NULL);
+ } else {
+ CHECK(useSurfaceTexAlloc);
+
+ sp<SurfaceTexture> texture = new SurfaceTexture(0 /* tex */);
+ gSurface = new SurfaceTextureClient(texture);
+ }
+
+ CHECK_EQ((status_t)OK,
+ native_window_api_connect(
+ gSurface.get(), NATIVE_WINDOW_API_MEDIA));
+ }
+
+ DataSource::RegisterDefaultSniffers();
+
+ OMXClient client;
+ status_t err = client.connect();
+
+ for (int k = 0; k < argc; ++k) {
+ bool syncInfoPresent = true;
+
+ const char *filename = argv[k];
+
+ sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
+
+ if (strncasecmp(filename, "sine:", 5)
+ && strncasecmp(filename, "httplive://", 11)
+ && dataSource == NULL) {
+ fprintf(stderr, "Unable to create data source.\n");
+ return 1;
+ }
+
+ bool isJPEG = false;
+
+ size_t len = strlen(filename);
+ if (len >= 4 && !strcasecmp(filename + len - 4, ".jpg")) {
+ isJPEG = true;
+ }
+
+ Vector<sp<MediaSource> > mediaSources;
+ sp<MediaSource> mediaSource;
+
+ if (isJPEG) {
+ mediaSource = new JPEGSource(dataSource);
+ if (gWriteMP4) {
+ mediaSources.push(mediaSource);
+ }
+ } else if (!strncasecmp("sine:", filename, 5)) {
+ char *end;
+ long sampleRate = strtol(filename + 5, &end, 10);
+
+ if (end == filename + 5) {
+ sampleRate = 44100;
+ }
+ mediaSource = new SineSource(sampleRate, 1);
+ if (gWriteMP4) {
+ mediaSources.push(mediaSource);
+ }
+ } else {
+ sp<MediaExtractor> extractor;
+
+ if (!strncasecmp("httplive://", filename, 11)) {
+ String8 uri("http://");
+ uri.append(filename + 11);
+
+ if (looper == NULL) {
+ looper = new ALooper;
+ looper->start();
+ }
+ liveSession = new LiveSession;
+ looper->registerHandler(liveSession);
+
+ liveSession->connect(uri.string());
+ dataSource = liveSession->getDataSource();
+
+ extractor =
+ MediaExtractor::Create(
+ dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS);
+
+ syncInfoPresent = false;
+ } else {
+ extractor = MediaExtractor::Create(dataSource);
+
+ if (extractor == NULL) {
+ fprintf(stderr, "could not create extractor.\n");
+ return -1;
+ }
+
+ sp<MetaData> meta = extractor->getMetaData();
+
+ if (meta != NULL) {
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
+ syncInfoPresent = false;
+ }
+ }
+ }
+
+ size_t numTracks = extractor->countTracks();
+
+ if (gWriteMP4) {
+ bool haveAudio = false;
+ bool haveVideo = false;
+ for (size_t i = 0; i < numTracks; ++i) {
+ sp<MediaSource> source = extractor->getTrack(i);
+
+ const char *mime;
+ CHECK(source->getFormat()->findCString(
+ kKeyMIMEType, &mime));
+
+ bool useTrack = false;
+ if (!haveAudio && !strncasecmp("audio/", mime, 6)) {
+ haveAudio = true;
+ useTrack = true;
+ } else if (!haveVideo && !strncasecmp("video/", mime, 6)) {
+ haveVideo = true;
+ useTrack = true;
+ }
+
+ if (useTrack) {
+ mediaSources.push(source);
+
+ if (haveAudio && haveVideo) {
+ break;
+ }
+ }
+ }
+ } else {
+ sp<MetaData> meta;
+ size_t i;
+ for (i = 0; i < numTracks; ++i) {
+ meta = extractor->getTrackMetaData(
+ i, MediaExtractor::kIncludeExtensiveMetaData);
+
+ const char *mime;
+ meta->findCString(kKeyMIMEType, &mime);
+
+ if (audioOnly && !strncasecmp(mime, "audio/", 6)) {
+ break;
+ }
+
+ if (!audioOnly && !strncasecmp(mime, "video/", 6)) {
+ break;
+ }
+
+ meta = NULL;
+ }
+
+ if (meta == NULL) {
+ fprintf(stderr,
+ "No suitable %s track found. The '-a' option will "
+ "target audio tracks only, the default is to target "
+ "video tracks only.\n",
+ audioOnly ? "audio" : "video");
+ return -1;
+ }
+
+ int64_t thumbTimeUs;
+ if (meta->findInt64(kKeyThumbnailTime, &thumbTimeUs)) {
+ printf("thumbnailTime: %lld us (%.2f secs)\n",
+ thumbTimeUs, thumbTimeUs / 1E6);
+ }
+
+ mediaSource = extractor->getTrack(i);
+ }
+ }
+
+ if (gWriteMP4) {
+ writeSourcesToMP4(mediaSources, syncInfoPresent);
+ } else if (dumpStream) {
+ dumpSource(mediaSource, dumpStreamFilename);
+ } else if (seekTest) {
+ performSeekTest(mediaSource);
+ } else {
+ playSource(&client, mediaSource);
+ }
+ }
+
+ if ((useSurfaceAlloc || useSurfaceTexAlloc) && !audioOnly) {
+ CHECK_EQ((status_t)OK,
+ native_window_api_disconnect(
+ gSurface.get(), NATIVE_WINDOW_API_MEDIA));
+
+ gSurface.clear();
+
+ if (useSurfaceAlloc) {
+ composerClient->dispose();
+ }
+ }
+
+ client.disconnect();
+
+ return 0;
+}
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
new file mode 100644
index 0000000..efa1445
--- /dev/null
+++ b/cmds/stagefright/stream.cpp
@@ -0,0 +1,375 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "stream"
+#include "utils/Log.h"
+
+#include <binder/ProcessState.h>
+
+#include <media/IStreamSource.h>
+#include <media/mediaplayer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <gui/SurfaceComposerClient.h>
+
+#include <fcntl.h>
+
+using namespace android;
+
+struct MyStreamSource : public BnStreamSource {
+ // Object assumes ownership of fd.
+ MyStreamSource(int fd);
+
+ virtual void setListener(const sp<IStreamListener> &listener);
+ virtual void setBuffers(const Vector<sp<IMemory> > &buffers);
+
+ virtual void onBufferAvailable(size_t index);
+
+protected:
+ virtual ~MyStreamSource();
+
+private:
+ int mFd;
+ off64_t mFileSize;
+ uint64_t mNumPacketsSent;
+
+ sp<IStreamListener> mListener;
+ Vector<sp<IMemory> > mBuffers;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MyStreamSource);
+};
+
+MyStreamSource::MyStreamSource(int fd)
+ : mFd(fd),
+ mFileSize(0),
+ mNumPacketsSent(0) {
+ CHECK_GE(fd, 0);
+
+ mFileSize = lseek64(fd, 0, SEEK_END);
+ lseek64(fd, 0, SEEK_SET);
+}
+
+MyStreamSource::~MyStreamSource() {
+ close(mFd);
+ mFd = -1;
+}
+
+void MyStreamSource::setListener(const sp<IStreamListener> &listener) {
+ mListener = listener;
+}
+
+void MyStreamSource::setBuffers(const Vector<sp<IMemory> > &buffers) {
+ mBuffers = buffers;
+}
+
+void MyStreamSource::onBufferAvailable(size_t index) {
+ CHECK_LT(index, mBuffers.size());
+
+#if 0
+ if (mNumPacketsSent >= 20000) {
+ ALOGI("signalling discontinuity now");
+
+ off64_t offset = 0;
+ CHECK((offset % 188) == 0);
+
+ lseek(mFd, offset, SEEK_SET);
+
+ sp<AMessage> extra = new AMessage;
+ extra->setInt32(IStreamListener::kKeyFormatChange, 0);
+
+ mListener->issueCommand(
+ IStreamListener::DISCONTINUITY, false /* synchronous */, extra);
+
+ mNumPacketsSent = 0;
+ }
+#endif
+
+ sp<IMemory> mem = mBuffers.itemAt(index);
+
+ ssize_t n = read(mFd, mem->pointer(), mem->size());
+ if (n <= 0) {
+ mListener->issueCommand(IStreamListener::EOS, false /* synchronous */);
+ } else {
+ mListener->queueBuffer(index, n);
+
+ mNumPacketsSent += n / 188;
+ }
+}
+////////////////////////////////////////////////////////////////////////////////
+
+struct MyConvertingStreamSource : public BnStreamSource {
+ MyConvertingStreamSource(const char *filename);
+
+ virtual void setListener(const sp<IStreamListener> &listener);
+ virtual void setBuffers(const Vector<sp<IMemory> > &buffers);
+
+ virtual void onBufferAvailable(size_t index);
+
+protected:
+ virtual ~MyConvertingStreamSource();
+
+private:
+ Mutex mLock;
+ Condition mCondition;
+
+ sp<IStreamListener> mListener;
+ Vector<sp<IMemory> > mBuffers;
+
+ sp<MPEG2TSWriter> mWriter;
+
+ ssize_t mCurrentBufferIndex;
+ size_t mCurrentBufferOffset;
+
+ List<size_t> mBufferQueue;
+
+ static ssize_t WriteDataWrapper(void *me, const void *data, size_t size);
+ ssize_t writeData(const void *data, size_t size);
+
+ DISALLOW_EVIL_CONSTRUCTORS(MyConvertingStreamSource);
+};
+
+////////////////////////////////////////////////////////////////////////////////
+
+MyConvertingStreamSource::MyConvertingStreamSource(const char *filename)
+ : mCurrentBufferIndex(-1),
+ mCurrentBufferOffset(0) {
+ sp<DataSource> dataSource = DataSource::CreateFromURI(filename);
+ CHECK(dataSource != NULL);
+
+ sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource);
+ CHECK(extractor != NULL);
+
+ mWriter = new MPEG2TSWriter(
+ this, &MyConvertingStreamSource::WriteDataWrapper);
+
+ for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ const sp<MetaData> &meta = extractor->getTrackMetaData(i);
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ if (strncasecmp("video/", mime, 6) && strncasecmp("audio/", mime, 6)) {
+ continue;
+ }
+
+ CHECK_EQ(mWriter->addSource(extractor->getTrack(i)), (status_t)OK);
+ }
+
+ CHECK_EQ(mWriter->start(), (status_t)OK);
+}
+
+MyConvertingStreamSource::~MyConvertingStreamSource() {
+}
+
+void MyConvertingStreamSource::setListener(
+ const sp<IStreamListener> &listener) {
+ mListener = listener;
+}
+
+void MyConvertingStreamSource::setBuffers(
+ const Vector<sp<IMemory> > &buffers) {
+ mBuffers = buffers;
+}
+
+ssize_t MyConvertingStreamSource::WriteDataWrapper(
+ void *me, const void *data, size_t size) {
+ return static_cast<MyConvertingStreamSource *>(me)->writeData(data, size);
+}
+
+ssize_t MyConvertingStreamSource::writeData(const void *data, size_t size) {
+ size_t totalWritten = 0;
+
+ while (size > 0) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mCurrentBufferIndex < 0) {
+ while (mBufferQueue.empty()) {
+ mCondition.wait(mLock);
+ }
+
+ mCurrentBufferIndex = *mBufferQueue.begin();
+ mCurrentBufferOffset = 0;
+
+ mBufferQueue.erase(mBufferQueue.begin());
+ }
+
+ sp<IMemory> mem = mBuffers.itemAt(mCurrentBufferIndex);
+
+ size_t copy = size;
+ if (copy + mCurrentBufferOffset > mem->size()) {
+ copy = mem->size() - mCurrentBufferOffset;
+ }
+
+ memcpy((uint8_t *)mem->pointer() + mCurrentBufferOffset, data, copy);
+ mCurrentBufferOffset += copy;
+
+ if (mCurrentBufferOffset == mem->size()) {
+ mListener->queueBuffer(mCurrentBufferIndex, mCurrentBufferOffset);
+ mCurrentBufferIndex = -1;
+ }
+
+ data = (const uint8_t *)data + copy;
+ size -= copy;
+
+ totalWritten += copy;
+ }
+
+ return (ssize_t)totalWritten;
+}
+
+void MyConvertingStreamSource::onBufferAvailable(size_t index) {
+ Mutex::Autolock autoLock(mLock);
+
+ mBufferQueue.push_back(index);
+ mCondition.signal();
+
+ if (mWriter->reachedEOS()) {
+ if (mCurrentBufferIndex >= 0) {
+ mListener->queueBuffer(mCurrentBufferIndex, mCurrentBufferOffset);
+ mCurrentBufferIndex = -1;
+ }
+
+ mListener->issueCommand(IStreamListener::EOS, false /* synchronous */);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+
+struct MyClient : public BnMediaPlayerClient {
+ MyClient()
+ : mEOS(false) {
+ }
+
+ virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (msg == MEDIA_ERROR || msg == MEDIA_PLAYBACK_COMPLETE) {
+ mEOS = true;
+ mCondition.signal();
+ }
+ }
+
+ void waitForEOS() {
+ Mutex::Autolock autoLock(mLock);
+ while (!mEOS) {
+ mCondition.wait(mLock);
+ }
+ }
+
+protected:
+ virtual ~MyClient() {
+ }
+
+private:
+ Mutex mLock;
+ Condition mCondition;
+
+ bool mEOS;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MyClient);
+};
+
+int main(int argc, char **argv) {
+ android::ProcessState::self()->startThreadPool();
+
+ DataSource::RegisterDefaultSniffers();
+
+ if (argc != 2) {
+ fprintf(stderr, "Usage: %s filename\n", argv[0]);
+ return 1;
+ }
+
+ sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
+ CHECK_EQ(composerClient->initCheck(), (status_t)OK);
+
+ ssize_t displayWidth = composerClient->getDisplayWidth(0);
+ ssize_t displayHeight = composerClient->getDisplayHeight(0);
+
+ ALOGV("display is %d x %d\n", displayWidth, displayHeight);
+
+ sp<SurfaceControl> control =
+ composerClient->createSurface(
+ String8("A Surface"),
+ 0,
+ displayWidth,
+ displayHeight,
+ PIXEL_FORMAT_RGB_565,
+ 0);
+
+ CHECK(control != NULL);
+ CHECK(control->isValid());
+
+ SurfaceComposerClient::openGlobalTransaction();
+ CHECK_EQ(control->setLayer(INT_MAX), (status_t)OK);
+ CHECK_EQ(control->show(), (status_t)OK);
+ SurfaceComposerClient::closeGlobalTransaction();
+
+ sp<Surface> surface = control->getSurface();
+ CHECK(surface != NULL);
+
+ sp<IServiceManager> sm = defaultServiceManager();
+ sp<IBinder> binder = sm->getService(String16("media.player"));
+ sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+
+ CHECK(service.get() != NULL);
+
+ sp<MyClient> client = new MyClient;
+
+ sp<IStreamSource> source;
+
+ size_t len = strlen(argv[1]);
+ if (len >= 3 && !strcasecmp(".ts", &argv[1][len - 3])) {
+ int fd = open(argv[1], O_RDONLY);
+
+ if (fd < 0) {
+ fprintf(stderr, "Failed to open file '%s'.", argv[1]);
+ return 1;
+ }
+
+ source = new MyStreamSource(fd);
+ } else {
+ printf("Converting file to transport stream for streaming...\n");
+
+ source = new MyConvertingStreamSource(argv[1]);
+ }
+
+ sp<IMediaPlayer> player =
+ service->create(getpid(), client, 0);
+
+ if (player != NULL && player->setDataSource(source) == NO_ERROR) {
+ player->setVideoSurfaceTexture(surface->getSurfaceTexture());
+ player->start();
+
+ client->waitForEOS();
+
+ player->stop();
+ } else {
+ fprintf(stderr, "failed to instantiate player.\n");
+ }
+
+ composerClient->dispose();
+
+ return 0;
+}