summaryrefslogtreecommitdiffstats
path: root/cmds
diff options
context:
space:
mode:
Diffstat (limited to 'cmds')
-rw-r--r--cmds/screenrecord/screenrecord.cpp11
-rw-r--r--cmds/stagefright/Android.mk42
-rw-r--r--cmds/stagefright/audioloop.cpp11
-rw-r--r--cmds/stagefright/codec.cpp49
-rw-r--r--cmds/stagefright/filters/argbtorgba.rs26
-rw-r--r--cmds/stagefright/filters/nightvision.rs38
-rw-r--r--cmds/stagefright/filters/saturation.rs40
-rw-r--r--cmds/stagefright/mediafilter.cpp785
-rw-r--r--cmds/stagefright/muxer.cpp12
-rw-r--r--cmds/stagefright/recordvideo.cpp12
-rw-r--r--cmds/stagefright/stagefright.cpp9
11 files changed, 1020 insertions, 15 deletions
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 02df1d2..36a7e73 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -23,7 +23,10 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
#include <sys/wait.h>
+
#include <termios.h>
#include <unistd.h>
@@ -637,7 +640,13 @@ static status_t recordScreen(const char* fileName) {
case FORMAT_MP4: {
// Configure muxer. We have to wait for the CSD blob from the encoder
// before we can start it.
- muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+ int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ if (fd < 0) {
+ fprintf(stderr, "ERROR: couldn't open file\n");
+ abort();
+ }
+ muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+ close(fd);
if (gRotate) {
muxer->setOrientationHint(90); // TODO: does this do anything?
}
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 561ce02..0e3bc68 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -169,6 +169,48 @@ include $(BUILD_EXECUTABLE)
include $(CLEAR_VARS)
+LOCAL_SRC_FILES:= \
+ filters/argbtorgba.rs \
+ filters/nightvision.rs \
+ filters/saturation.rs \
+ mediafilter.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright \
+ liblog \
+ libutils \
+ libbinder \
+ libstagefright_foundation \
+ libmedia \
+ libgui \
+ libcutils \
+ libui \
+ libRScpp \
+
+LOCAL_C_INCLUDES:= \
+ $(TOP)/frameworks/av/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/rs/cpp \
+ $(TOP)/frameworks/rs \
+
+intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,)
+LOCAL_C_INCLUDES += $(intermediates)
+
+LOCAL_STATIC_LIBRARIES:= \
+ libstagefright_mediafilter
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_MODULE:= mediafilter
+
+include $(BUILD_EXECUTABLE)
+
+################################################################################
+
+include $(CLEAR_VARS)
+
LOCAL_SRC_FILES:= \
muxer.cpp \
diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp
index 96073f1..7b0de24 100644
--- a/cmds/stagefright/audioloop.cpp
+++ b/cmds/stagefright/audioloop.cpp
@@ -14,6 +14,10 @@
* limitations under the License.
*/
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
#include <binder/ProcessState.h>
#include <media/mediarecorder.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -109,7 +113,12 @@ int main(int argc, char* argv[])
if (fileOut != NULL) {
// target file specified, write encoded AMR output
- sp<AMRWriter> writer = new AMRWriter(fileOut);
+ int fd = open(fileOut, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ if (fd < 0) {
+ return 1;
+ }
+ sp<AMRWriter> writer = new AMRWriter(fd);
+ close(fd);
writer->addSource(encoder);
writer->start();
sleep(duration);
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index fd02bcc..d987250 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -45,9 +45,10 @@ static void usage(const char *me) {
fprintf(stderr, "usage: %s [-a] use audio\n"
"\t\t[-v] use video\n"
"\t\t[-p] playback\n"
- "\t\t[-S] allocate buffers from a surface\n",
+ "\t\t[-S] allocate buffers from a surface\n"
+ "\t\t[-R] render output to surface (enables -S)\n"
+ "\t\t[-T] use render timestamps (enables -R)\n",
me);
-
exit(1);
}
@@ -71,7 +72,9 @@ static int decode(
const char *path,
bool useAudio,
bool useVideo,
- const android::sp<android::Surface> &surface) {
+ const android::sp<android::Surface> &surface,
+ bool renderSurface,
+ bool useTimestamp) {
using namespace android;
static int64_t kTimeout = 500ll;
@@ -136,6 +139,7 @@ static int decode(
CHECK(!stateByTrack.isEmpty());
int64_t startTimeUs = ALooper::GetNowUs();
+ int64_t startTimeRender = -1;
for (size_t i = 0; i < stateByTrack.size(); ++i) {
CodecState *state = &stateByTrack.editValueAt(i);
@@ -260,7 +264,23 @@ static int decode(
++state->mNumBuffersDecoded;
state->mNumBytesDecoded += size;
- err = state->mCodec->releaseOutputBuffer(index);
+ if (surface == NULL || !renderSurface) {
+ err = state->mCodec->releaseOutputBuffer(index);
+ } else if (useTimestamp) {
+ if (startTimeRender == -1) {
+ // begin rendering 2 vsyncs (~33ms) after first decode
+ startTimeRender =
+ systemTime(SYSTEM_TIME_MONOTONIC) + 33000000
+ - (presentationTimeUs * 1000);
+ }
+ presentationTimeUs =
+ (presentationTimeUs * 1000) + startTimeRender;
+ err = state->mCodec->renderOutputBufferAndRelease(
+ index, presentationTimeUs);
+ } else {
+ err = state->mCodec->renderOutputBufferAndRelease(index);
+ }
+
CHECK_EQ(err, (status_t)OK);
if (flags & MediaCodec::BUFFER_FLAG_EOS) {
@@ -320,34 +340,42 @@ int main(int argc, char **argv) {
bool useVideo = false;
bool playback = false;
bool useSurface = false;
+ bool renderSurface = false;
+ bool useTimestamp = false;
int res;
- while ((res = getopt(argc, argv, "havpSD")) >= 0) {
+ while ((res = getopt(argc, argv, "havpSDRT")) >= 0) {
switch (res) {
case 'a':
{
useAudio = true;
break;
}
-
case 'v':
{
useVideo = true;
break;
}
-
case 'p':
{
playback = true;
break;
}
-
+ case 'T':
+ {
+ useTimestamp = true;
+ }
+ // fall through
+ case 'R':
+ {
+ renderSurface = true;
+ }
+ // fall through
case 'S':
{
useSurface = true;
break;
}
-
case '?':
case 'h':
default:
@@ -422,7 +450,8 @@ int main(int argc, char **argv) {
player->stop();
player->reset();
} else {
- decode(looper, argv[0], useAudio, useVideo, surface);
+ decode(looper, argv[0], useAudio, useVideo, surface, renderSurface,
+ useTimestamp);
}
if (playback || (useSurface && useVideo)) {
diff --git a/cmds/stagefright/filters/argbtorgba.rs b/cmds/stagefright/filters/argbtorgba.rs
new file mode 100644
index 0000000..229ff8c
--- /dev/null
+++ b/cmds/stagefright/filters/argbtorgba.rs
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+void root(const uchar4 *v_in, uchar4 *v_out) {
+ v_out->x = v_in->y;
+ v_out->y = v_in->z;
+ v_out->z = v_in->w;
+ v_out->w = v_in->x;
+} \ No newline at end of file
diff --git a/cmds/stagefright/filters/nightvision.rs b/cmds/stagefright/filters/nightvision.rs
new file mode 100644
index 0000000..f61413c
--- /dev/null
+++ b/cmds/stagefright/filters/nightvision.rs
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+const static float3 gNightVisionMult = {0.5f, 1.f, 0.5f};
+
+// calculates luminance of pixel, then biases color balance toward green
+void root(const uchar4 *v_in, uchar4 *v_out) {
+ v_out->x = v_in->x; // don't modify A
+
+ // get RGB, scale 0-255 uchar to 0-1.0 float
+ float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
+ v_in->w * 0.003921569f};
+
+ // apply filter
+ float3 result = dot(rgb, gMonoMult) * gNightVisionMult;
+
+ v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+ v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+ v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/cmds/stagefright/filters/saturation.rs b/cmds/stagefright/filters/saturation.rs
new file mode 100644
index 0000000..1de9dd8
--- /dev/null
+++ b/cmds/stagefright/filters/saturation.rs
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+// global variables (parameters accessible to application code)
+float gSaturation = 1.0f;
+
+void root(const uchar4 *v_in, uchar4 *v_out) {
+ v_out->x = v_in->x; // don't modify A
+
+ // get RGB, scale 0-255 uchar to 0-1.0 float
+ float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
+ v_in->w * 0.003921569f};
+
+ // apply saturation filter
+ float3 result = dot(rgb, gMonoMult);
+ result = mix(result, rgb, gSaturation);
+
+ v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+ v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+ v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
new file mode 100644
index 0000000..f77b38b
--- /dev/null
+++ b/cmds/stagefright/mediafilter.cpp
@@ -0,0 +1,785 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "mediafilterTest"
+
+#include <inttypes.h>
+
+#include <binder/ProcessState.h>
+#include <filters/ColorConvert.h>
+#include <gui/ISurfaceComposer.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/Surface.h>
+#include <media/ICrypto.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <media/stagefright/RenderScriptWrapper.h>
+#include <OMX_IVCommon.h>
+#include <ui/DisplayInfo.h>
+
+#include "RenderScript.h"
+#include "ScriptC_argbtorgba.h"
+#include "ScriptC_nightvision.h"
+#include "ScriptC_saturation.h"
+
+// test parameters
+static const bool kTestFlush = true; // Note: true will drop 1 out of
+static const int kFlushAfterFrames = 25; // kFlushAfterFrames output frames
+static const int64_t kTimeout = 500ll;
+
+// built-in filter parameters
+static const int32_t kInvert = false; // ZeroFilter param
+static const float kBlurRadius = 15.0f; // IntrinsicBlurFilter param
+static const float kSaturation = 0.0f; // SaturationFilter param
+
+static void usage(const char *me) {
+ fprintf(stderr, "usage: [flags] %s\n"
+ "\t[-b] use IntrinsicBlurFilter\n"
+ "\t[-c] use argb to rgba conversion RSFilter\n"
+ "\t[-n] use night vision RSFilter\n"
+ "\t[-r] use saturation RSFilter\n"
+ "\t[-s] use SaturationFilter\n"
+ "\t[-z] use ZeroFilter (copy filter)\n"
+ "\t[-R] render output to surface (enables -S)\n"
+ "\t[-S] allocate buffers from a surface\n"
+ "\t[-T] use render timestamps (enables -R)\n",
+ me);
+ exit(1);
+}
+
+namespace android {
+
+struct SaturationRSFilter : RenderScriptWrapper::RSFilterCallback {
+ void init(RSC::sp<RSC::RS> context) {
+ mScript = new ScriptC_saturation(context);
+ mScript->set_gSaturation(3.f);
+ }
+
+ virtual status_t processBuffers(
+ RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
+ mScript->forEach_root(inBuffer, outBuffer);
+
+ return OK;
+ }
+
+ status_t handleSetParameters(const sp<AMessage> &msg) {
+ return OK;
+ }
+
+private:
+ RSC::sp<ScriptC_saturation> mScript;
+};
+
+struct NightVisionRSFilter : RenderScriptWrapper::RSFilterCallback {
+ void init(RSC::sp<RSC::RS> context) {
+ mScript = new ScriptC_nightvision(context);
+ }
+
+ virtual status_t processBuffers(
+ RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
+ mScript->forEach_root(inBuffer, outBuffer);
+
+ return OK;
+ }
+
+ status_t handleSetParameters(const sp<AMessage> &msg) {
+ return OK;
+ }
+
+private:
+ RSC::sp<ScriptC_nightvision> mScript;
+};
+
+struct ARGBToRGBARSFilter : RenderScriptWrapper::RSFilterCallback {
+ void init(RSC::sp<RSC::RS> context) {
+ mScript = new ScriptC_argbtorgba(context);
+ }
+
+ virtual status_t processBuffers(
+ RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) {
+ mScript->forEach_root(inBuffer, outBuffer);
+
+ return OK;
+ }
+
+ status_t handleSetParameters(const sp<AMessage> &msg) {
+ return OK;
+ }
+
+private:
+ RSC::sp<ScriptC_argbtorgba> mScript;
+};
+
+struct CodecState {
+ sp<MediaCodec> mCodec;
+ Vector<sp<ABuffer> > mInBuffers;
+ Vector<sp<ABuffer> > mOutBuffers;
+ bool mSignalledInputEOS;
+ bool mSawOutputEOS;
+ int64_t mNumBuffersDecoded;
+};
+
+struct DecodedFrame {
+ size_t index;
+ size_t offset;
+ size_t size;
+ int64_t presentationTimeUs;
+ uint32_t flags;
+};
+
+enum FilterType {
+ FILTERTYPE_ZERO,
+ FILTERTYPE_INTRINSIC_BLUR,
+ FILTERTYPE_SATURATION,
+ FILTERTYPE_RS_SATURATION,
+ FILTERTYPE_RS_NIGHT_VISION,
+ FILTERTYPE_RS_ARGB_TO_RGBA,
+};
+
+size_t inputFramesSinceFlush = 0;
+void tryCopyDecodedBuffer(
+ List<DecodedFrame> *decodedFrameIndices,
+ CodecState *filterState,
+ CodecState *vidState) {
+ if (decodedFrameIndices->empty()) {
+ return;
+ }
+
+ size_t filterIndex;
+ status_t err = filterState->mCodec->dequeueInputBuffer(
+ &filterIndex, kTimeout);
+ if (err != OK) {
+ return;
+ }
+
+ ++inputFramesSinceFlush;
+
+ DecodedFrame frame = *decodedFrameIndices->begin();
+
+ // only consume a buffer if we are not going to flush, since we expect
+ // the dequeue -> flush -> queue operation to cause an error and
+ // not produce an output frame
+ if (!kTestFlush || inputFramesSinceFlush < kFlushAfterFrames) {
+ decodedFrameIndices->erase(decodedFrameIndices->begin());
+ }
+ size_t outIndex = frame.index;
+
+ const sp<ABuffer> &srcBuffer =
+ vidState->mOutBuffers.itemAt(outIndex);
+ const sp<ABuffer> &destBuffer =
+ filterState->mInBuffers.itemAt(filterIndex);
+
+ sp<AMessage> srcFormat, destFormat;
+ vidState->mCodec->getOutputFormat(&srcFormat);
+ filterState->mCodec->getInputFormat(&destFormat);
+
+ int32_t srcWidth, srcHeight, srcStride, srcSliceHeight;
+ int32_t srcColorFormat, destColorFormat;
+ int32_t destWidth, destHeight, destStride, destSliceHeight;
+ CHECK(srcFormat->findInt32("stride", &srcStride)
+ && srcFormat->findInt32("slice-height", &srcSliceHeight)
+ && srcFormat->findInt32("width", &srcWidth)
+ && srcFormat->findInt32("height", & srcHeight)
+ && srcFormat->findInt32("color-format", &srcColorFormat));
+ CHECK(destFormat->findInt32("stride", &destStride)
+ && destFormat->findInt32("slice-height", &destSliceHeight)
+ && destFormat->findInt32("width", &destWidth)
+ && destFormat->findInt32("height", & destHeight)
+ && destFormat->findInt32("color-format", &destColorFormat));
+
+ CHECK(srcWidth <= destStride && srcHeight <= destSliceHeight);
+
+ convertYUV420spToARGB(
+ srcBuffer->data(),
+ srcBuffer->data() + srcStride * srcSliceHeight,
+ srcWidth,
+ srcHeight,
+ destBuffer->data());
+
+ // copy timestamp
+ int64_t timeUs;
+ CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs));
+ destBuffer->meta()->setInt64("timeUs", timeUs);
+
+ if (kTestFlush && inputFramesSinceFlush >= kFlushAfterFrames) {
+ inputFramesSinceFlush = 0;
+
+ // check that queueing a buffer that was dequeued before flush
+ // fails with expected error EACCES
+ filterState->mCodec->flush();
+
+ err = filterState->mCodec->queueInputBuffer(
+ filterIndex, 0 /* offset */, destBuffer->size(),
+ timeUs, frame.flags);
+
+ if (err == OK) {
+ ALOGE("FAIL: queue after flush returned OK");
+ } else if (err != -EACCES) {
+ ALOGE("queueInputBuffer after flush returned %d, "
+ "expected -EACCES (-13)", err);
+ }
+ } else {
+ err = filterState->mCodec->queueInputBuffer(
+ filterIndex, 0 /* offset */, destBuffer->size(),
+ timeUs, frame.flags);
+ CHECK(err == OK);
+
+ err = vidState->mCodec->releaseOutputBuffer(outIndex);
+ CHECK(err == OK);
+ }
+}
+
+size_t outputFramesSinceFlush = 0;
+void tryDrainOutputBuffer(
+ CodecState *filterState,
+ const sp<Surface> &surface, bool renderSurface,
+ bool useTimestamp, int64_t *startTimeRender) {
+ size_t index;
+ size_t offset;
+ size_t size;
+ int64_t presentationTimeUs;
+ uint32_t flags;
+ status_t err = filterState->mCodec->dequeueOutputBuffer(
+ &index, &offset, &size, &presentationTimeUs, &flags,
+ kTimeout);
+
+ if (err != OK) {
+ return;
+ }
+
+ ++outputFramesSinceFlush;
+
+ if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
+ filterState->mCodec->flush();
+ }
+
+ if (surface == NULL || !renderSurface) {
+ err = filterState->mCodec->releaseOutputBuffer(index);
+ } else if (useTimestamp) {
+ if (*startTimeRender == -1) {
+ // begin rendering 2 vsyncs after first decode
+ *startTimeRender = systemTime(SYSTEM_TIME_MONOTONIC)
+ + 33000000 - (presentationTimeUs * 1000);
+ }
+ presentationTimeUs =
+ (presentationTimeUs * 1000) + *startTimeRender;
+ err = filterState->mCodec->renderOutputBufferAndRelease(
+ index, presentationTimeUs);
+ } else {
+ err = filterState->mCodec->renderOutputBufferAndRelease(index);
+ }
+
+ if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) {
+ outputFramesSinceFlush = 0;
+
+ // releasing the buffer dequeued before flush should cause an error
+ // if so, the frame will also be skipped in output stream
+ if (err == OK) {
+ ALOGE("FAIL: release after flush returned OK");
+ } else if (err != -EACCES) {
+ ALOGE("releaseOutputBuffer after flush returned %d, "
+ "expected -EACCES (-13)", err);
+ }
+ } else {
+ CHECK(err == OK);
+ }
+
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ ALOGV("reached EOS on output.");
+ filterState->mSawOutputEOS = true;
+ }
+}
+
+static int decode(
+ const sp<ALooper> &looper,
+ const char *path,
+ const sp<Surface> &surface,
+ bool renderSurface,
+ bool useTimestamp,
+ FilterType filterType) {
+
+ static int64_t kTimeout = 500ll;
+
+ sp<NuMediaExtractor> extractor = new NuMediaExtractor;
+ if (extractor->setDataSource(NULL /* httpService */, path) != OK) {
+ fprintf(stderr, "unable to instantiate extractor.\n");
+ return 1;
+ }
+
+ KeyedVector<size_t, CodecState> stateByTrack;
+
+ CodecState *vidState = NULL;
+ for (size_t i = 0; i < extractor->countTracks(); ++i) {
+ sp<AMessage> format;
+ status_t err = extractor->getTrackFormat(i, &format);
+ CHECK(err == OK);
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+ bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
+ if (!isVideo) {
+ continue;
+ }
+
+ ALOGV("selecting track %zu", i);
+
+ err = extractor->selectTrack(i);
+ CHECK(err == OK);
+
+ CodecState *state =
+ &stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
+
+ vidState = state;
+
+ state->mNumBuffersDecoded = 0;
+
+ state->mCodec = MediaCodec::CreateByType(
+ looper, mime.c_str(), false /* encoder */);
+
+ CHECK(state->mCodec != NULL);
+
+ err = state->mCodec->configure(
+ format, NULL /* surface */, NULL /* crypto */, 0 /* flags */);
+
+ CHECK(err == OK);
+
+ state->mSignalledInputEOS = false;
+ state->mSawOutputEOS = false;
+
+ break;
+ }
+ CHECK(!stateByTrack.isEmpty());
+ CHECK(vidState != NULL);
+ sp<AMessage> vidFormat;
+ vidState->mCodec->getOutputFormat(&vidFormat);
+
+ // set filter to use ARGB8888
+ vidFormat->setInt32("color-format", OMX_COLOR_Format32bitARGB8888);
+ // set app cache directory path
+ vidFormat->setString("cacheDir", "/system/bin");
+
+ // create RenderScript context for RSFilters
+ RSC::sp<RSC::RS> context = new RSC::RS();
+ context->init("/system/bin");
+
+ sp<RenderScriptWrapper::RSFilterCallback> rsFilter;
+
+ // create renderscript wrapper for RSFilters
+ sp<RenderScriptWrapper> rsWrapper = new RenderScriptWrapper;
+ rsWrapper->mContext = context.get();
+
+ CodecState *filterState = new CodecState();
+ filterState->mNumBuffersDecoded = 0;
+
+ sp<AMessage> params = new AMessage();
+
+ switch (filterType) {
+ case FILTERTYPE_ZERO:
+ {
+ filterState->mCodec = MediaCodec::CreateByComponentName(
+ looper, "android.filter.zerofilter");
+ params->setInt32("invert", kInvert);
+ break;
+ }
+ case FILTERTYPE_INTRINSIC_BLUR:
+ {
+ filterState->mCodec = MediaCodec::CreateByComponentName(
+ looper, "android.filter.intrinsicblur");
+ params->setFloat("blur-radius", kBlurRadius);
+ break;
+ }
+ case FILTERTYPE_SATURATION:
+ {
+ filterState->mCodec = MediaCodec::CreateByComponentName(
+ looper, "android.filter.saturation");
+ params->setFloat("saturation", kSaturation);
+ break;
+ }
+ case FILTERTYPE_RS_SATURATION:
+ {
+ SaturationRSFilter *satFilter = new SaturationRSFilter;
+ satFilter->init(context);
+ rsFilter = satFilter;
+ rsWrapper->mCallback = rsFilter;
+ vidFormat->setObject("rs-wrapper", rsWrapper);
+
+ filterState->mCodec = MediaCodec::CreateByComponentName(
+ looper, "android.filter.RenderScript");
+ break;
+ }
+ case FILTERTYPE_RS_NIGHT_VISION:
+ {
+ NightVisionRSFilter *nightVisionFilter = new NightVisionRSFilter;
+ nightVisionFilter->init(context);
+ rsFilter = nightVisionFilter;
+ rsWrapper->mCallback = rsFilter;
+ vidFormat->setObject("rs-wrapper", rsWrapper);
+
+ filterState->mCodec = MediaCodec::CreateByComponentName(
+ looper, "android.filter.RenderScript");
+ break;
+ }
+ case FILTERTYPE_RS_ARGB_TO_RGBA:
+ {
+ ARGBToRGBARSFilter *argbToRgbaFilter = new ARGBToRGBARSFilter;
+ argbToRgbaFilter->init(context);
+ rsFilter = argbToRgbaFilter;
+ rsWrapper->mCallback = rsFilter;
+ vidFormat->setObject("rs-wrapper", rsWrapper);
+
+ filterState->mCodec = MediaCodec::CreateByComponentName(
+ looper, "android.filter.RenderScript");
+ break;
+ }
+ default:
+ {
+ LOG_ALWAYS_FATAL("mediacodec.cpp error: unrecognized FilterType");
+ break;
+ }
+ }
+ CHECK(filterState->mCodec != NULL);
+
+ status_t err = filterState->mCodec->configure(
+ vidFormat /* format */, surface, NULL /* crypto */, 0 /* flags */);
+ CHECK(err == OK);
+
+ filterState->mSignalledInputEOS = false;
+ filterState->mSawOutputEOS = false;
+
+ int64_t startTimeUs = ALooper::GetNowUs();
+ int64_t startTimeRender = -1;
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ sp<MediaCodec> codec = state->mCodec;
+
+ CHECK_EQ((status_t)OK, codec->start());
+
+ CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
+ CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
+
+ ALOGV("got %zu input and %zu output buffers",
+ state->mInBuffers.size(), state->mOutBuffers.size());
+ }
+
+ CHECK_EQ((status_t)OK, filterState->mCodec->setParameters(params));
+
+ if (kTestFlush) {
+ status_t flushErr = filterState->mCodec->flush();
+ if (flushErr == OK) {
+ ALOGE("FAIL: Flush before start returned OK");
+ } else {
+ ALOGV("Flush before start returned status %d, usually ENOSYS (-38)",
+ flushErr);
+ }
+ }
+
+ CHECK_EQ((status_t)OK, filterState->mCodec->start());
+ CHECK_EQ((status_t)OK, filterState->mCodec->getInputBuffers(
+ &filterState->mInBuffers));
+ CHECK_EQ((status_t)OK, filterState->mCodec->getOutputBuffers(
+ &filterState->mOutBuffers));
+
+ if (kTestFlush) {
+ status_t flushErr = filterState->mCodec->flush();
+ if (flushErr != OK) {
+ ALOGE("FAIL: Flush after start returned %d, expect OK (0)",
+ flushErr);
+ } else {
+ ALOGV("Flush immediately after start OK");
+ }
+ }
+
+ List<DecodedFrame> decodedFrameIndices;
+
+ // loop until decoder reaches EOS
+ bool sawInputEOS = false;
+ bool sawOutputEOSOnAllTracks = false;
+ while (!sawOutputEOSOnAllTracks) {
+ if (!sawInputEOS) {
+ size_t trackIndex;
+ status_t err = extractor->getSampleTrackIndex(&trackIndex);
+
+ if (err != OK) {
+ ALOGV("saw input eos");
+ sawInputEOS = true;
+ } else {
+ CodecState *state = &stateByTrack.editValueFor(trackIndex);
+
+ size_t index;
+ err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
+
+ if (err == OK) {
+ ALOGV("filling input buffer %zu", index);
+
+ const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index);
+
+ err = extractor->readSampleData(buffer);
+ CHECK(err == OK);
+
+ int64_t timeUs;
+ err = extractor->getSampleTime(&timeUs);
+ CHECK(err == OK);
+
+ uint32_t bufferFlags = 0;
+
+ err = state->mCodec->queueInputBuffer(
+ index, 0 /* offset */, buffer->size(),
+ timeUs, bufferFlags);
+
+ CHECK(err == OK);
+
+ extractor->advance();
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+ }
+ } else {
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ if (!state->mSignalledInputEOS) {
+ size_t index;
+ status_t err =
+ state->mCodec->dequeueInputBuffer(&index, kTimeout);
+
+ if (err == OK) {
+ ALOGV("signalling input EOS on track %zu", i);
+
+ err = state->mCodec->queueInputBuffer(
+ index, 0 /* offset */, 0 /* size */,
+ 0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS);
+
+ CHECK(err == OK);
+
+ state->mSignalledInputEOS = true;
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+ }
+ }
+ }
+
+ sawOutputEOSOnAllTracks = true;
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ if (state->mSawOutputEOS) {
+ continue;
+ } else {
+ sawOutputEOSOnAllTracks = false;
+ }
+
+ DecodedFrame frame;
+ status_t err = state->mCodec->dequeueOutputBuffer(
+ &frame.index, &frame.offset, &frame.size,
+ &frame.presentationTimeUs, &frame.flags, kTimeout);
+
+ if (err == OK) {
+ ALOGV("draining decoded buffer %zu, time = %lld us",
+ frame.index, frame.presentationTimeUs);
+
+ ++(state->mNumBuffersDecoded);
+
+ decodedFrameIndices.push_back(frame);
+
+ if (frame.flags & MediaCodec::BUFFER_FLAG_EOS) {
+ ALOGV("reached EOS on decoder output.");
+ state->mSawOutputEOS = true;
+ }
+
+ } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+ ALOGV("INFO_OUTPUT_BUFFERS_CHANGED");
+ CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers(
+ &state->mOutBuffers));
+
+ ALOGV("got %zu output buffers", state->mOutBuffers.size());
+ } else if (err == INFO_FORMAT_CHANGED) {
+ sp<AMessage> format;
+ CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format));
+
+ ALOGV("INFO_FORMAT_CHANGED: %s",
+ format->debugString().c_str());
+ } else {
+ CHECK_EQ(err, -EAGAIN);
+ }
+
+ tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
+
+ tryDrainOutputBuffer(
+ filterState, surface, renderSurface,
+ useTimestamp, &startTimeRender);
+ }
+ }
+
+ // after EOS on decoder, let filter reach EOS
+ while (!filterState->mSawOutputEOS) {
+ tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState);
+
+ tryDrainOutputBuffer(
+ filterState, surface, renderSurface,
+ useTimestamp, &startTimeRender);
+ }
+
+ int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs;
+
+ for (size_t i = 0; i < stateByTrack.size(); ++i) {
+ CodecState *state = &stateByTrack.editValueAt(i);
+
+ CHECK_EQ((status_t)OK, state->mCodec->release());
+
+ printf("track %zu: %" PRId64 " frames decoded and filtered, "
+ "%.2f fps.\n", i, state->mNumBuffersDecoded,
+ state->mNumBuffersDecoded * 1E6 / elapsedTimeUs);
+ }
+
+ return 0;
+}
+
+} // namespace android
+
+int main(int argc, char **argv) {
+ using namespace android;
+
+ const char *me = argv[0];
+
+ bool useSurface = false;
+ bool renderSurface = false;
+ bool useTimestamp = false;
+ FilterType filterType = FILTERTYPE_ZERO;
+
+ int res;
+ while ((res = getopt(argc, argv, "bcnrszTRSh")) >= 0) {
+ switch (res) {
+ case 'b':
+ {
+ filterType = FILTERTYPE_INTRINSIC_BLUR;
+ break;
+ }
+ case 'c':
+ {
+ filterType = FILTERTYPE_RS_ARGB_TO_RGBA;
+ break;
+ }
+ case 'n':
+ {
+ filterType = FILTERTYPE_RS_NIGHT_VISION;
+ break;
+ }
+ case 'r':
+ {
+ filterType = FILTERTYPE_RS_SATURATION;
+ break;
+ }
+ case 's':
+ {
+ filterType = FILTERTYPE_SATURATION;
+ break;
+ }
+ case 'z':
+ {
+ filterType = FILTERTYPE_ZERO;
+ break;
+ }
+ case 'T':
+ {
+ useTimestamp = true;
+ }
+ // fall through
+ case 'R':
+ {
+ renderSurface = true;
+ }
+ // fall through
+ case 'S':
+ {
+ useSurface = true;
+ break;
+ }
+ case '?':
+ case 'h':
+ default:
+ {
+ usage(me);
+ break;
+ }
+ }
+ }
+
+ argc -= optind;
+ argv += optind;
+
+ if (argc != 1) {
+ usage(me);
+ }
+
+ ProcessState::self()->startThreadPool();
+
+ DataSource::RegisterDefaultSniffers();
+
+ android::sp<ALooper> looper = new ALooper;
+ looper->start();
+
+ android::sp<SurfaceComposerClient> composerClient;
+ android::sp<SurfaceControl> control;
+ android::sp<Surface> surface;
+
+ if (useSurface) {
+ composerClient = new SurfaceComposerClient;
+ CHECK_EQ((status_t)OK, composerClient->initCheck());
+
+ android::sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
+ ISurfaceComposer::eDisplayIdMain));
+ DisplayInfo info;
+ SurfaceComposerClient::getDisplayInfo(display, &info);
+ ssize_t displayWidth = info.w;
+ ssize_t displayHeight = info.h;
+
+ ALOGV("display is %zd x %zd", displayWidth, displayHeight);
+
+ control = composerClient->createSurface(
+ String8("A Surface"), displayWidth, displayHeight,
+ PIXEL_FORMAT_RGBA_8888, 0);
+
+ CHECK(control != NULL);
+ CHECK(control->isValid());
+
+ SurfaceComposerClient::openGlobalTransaction();
+ CHECK_EQ((status_t)OK, control->setLayer(INT_MAX));
+ CHECK_EQ((status_t)OK, control->show());
+ SurfaceComposerClient::closeGlobalTransaction();
+
+ surface = control->getSurface();
+ CHECK(surface != NULL);
+ }
+
+ decode(looper, argv[0], surface, renderSurface, useTimestamp, filterType);
+
+ if (useSurface) {
+ composerClient->dispose();
+ }
+
+ looper->stop();
+
+ return 0;
+}
diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp
index f4a33e8..461b56c 100644
--- a/cmds/stagefright/muxer.cpp
+++ b/cmds/stagefright/muxer.cpp
@@ -17,6 +17,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "muxer"
#include <inttypes.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
#include <utils/Log.h>
#include <binder/ProcessState.h>
@@ -72,8 +75,15 @@ static int muxing(
ALOGV("input file %s, output file %s", path, outputFileName);
ALOGV("useAudio %d, useVideo %d", useAudio, useVideo);
- sp<MediaMuxer> muxer = new MediaMuxer(outputFileName,
+ int fd = open(outputFileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+
+ if (fd < 0) {
+ ALOGE("couldn't open file");
+ return fd;
+ }
+ sp<MediaMuxer> muxer = new MediaMuxer(fd,
MediaMuxer::OUTPUT_FORMAT_MPEG_4);
+ close(fd);
size_t trackCount = extractor->countTracks();
// Map the extractor's track index to the muxer's track index.
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
index 9f547c7..2ad40bd 100644
--- a/cmds/stagefright/recordvideo.cpp
+++ b/cmds/stagefright/recordvideo.cpp
@@ -17,6 +17,10 @@
#include "SineSource.h"
#include <inttypes.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
#include <binder/ProcessState.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/AudioPlayer.h>
@@ -300,7 +304,13 @@ int main(int argc, char **argv) {
client.interface(), enc_meta, true /* createEncoder */, source,
0, preferSoftwareCodec ? OMXCodec::kPreferSoftwareCodecs : 0);
- sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+ int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ if (fd < 0) {
+ fprintf(stderr, "couldn't open file");
+ return 1;
+ }
+ sp<MPEG4Writer> writer = new MPEG4Writer(fd);
+ close(fd);
writer->addSource(encoder);
int64_t start = systemTime();
CHECK_EQ((status_t)OK, writer->start());
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 81edcb4..318b56d 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -19,6 +19,8 @@
#include <stdlib.h>
#include <string.h>
#include <sys/time.h>
+#include <sys/types.h>
+#include <sys/stat.h>
//#define LOG_NDEBUG 0
#define LOG_TAG "stagefright"
@@ -506,8 +508,13 @@ static void writeSourcesToMP4(
sp<MPEG4Writer> writer =
new MPEG4Writer(gWriteMP4Filename.string());
#else
+ int fd = open(gWriteMP4Filename.string(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ if (fd < 0) {
+ fprintf(stderr, "couldn't open file");
+ return;
+ }
sp<MPEG2TSWriter> writer =
- new MPEG2TSWriter(gWriteMP4Filename.string());
+ new MPEG2TSWriter(fd);
#endif
// at most one minute.