summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/libeffects/factory/EffectsFactory.c131
-rw-r--r--media/libmedia/Android.mk13
-rw-r--r--media/libmedia/AudioSystem.cpp18
-rw-r--r--media/libmedia/AudioTrack.cpp218
-rw-r--r--media/libmedia/AudioTrackShared.cpp227
-rw-r--r--media/libmedia/IAudioPolicyService.cpp30
-rw-r--r--media/libmedia/IMediaRecorder.cpp24
-rw-r--r--media/libmedia/JetPlayer.cpp6
-rw-r--r--media/libmedia/MediaProfiles.cpp26
-rw-r--r--media/libmedia/SingleStateQueue.cpp106
-rw-r--r--media/libmedia/SingleStateQueueInstantiations.cpp28
-rw-r--r--media/libmedia/StringArray.cpp2
-rw-r--r--media/libmedia/docs/Makefile2
-rw-r--r--media/libmedia/docs/paused.dot85
-rw-r--r--media/libmedia/mediaplayer.cpp11
-rw-r--r--media/libmedia/mediarecorder.cpp26
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp11
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.h1
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp8
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.h1
-rw-r--r--media/libmediaplayerservice/nuplayer/Android.mk1
-rw-r--r--media/libmediaplayerservice/nuplayer/MediaClock.cpp135
-rw-r--r--media/libmediaplayerservice/nuplayer/MediaClock.h68
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp8
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp438
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h49
-rw-r--r--media/libnbaio/Android.mk4
-rw-r--r--media/libnbaio/MonoPipe.cpp2
-rw-r--r--media/libnbaio/MonoPipeReader.cpp2
-rw-r--r--media/libnbaio/Pipe.cpp2
-rw-r--r--media/libnbaio/roundup.c32
-rw-r--r--media/libstagefright/AACWriter.cpp24
-rw-r--r--media/libstagefright/AMRWriter.cpp13
-rw-r--r--media/libstagefright/Android.mk6
-rw-r--r--media/libstagefright/FileSource.cpp4
-rw-r--r--media/libstagefright/MPEG2TSWriter.cpp13
-rw-r--r--media/libstagefright/MPEG4Writer.cpp25
-rw-r--r--media/libstagefright/MediaCodec.cpp12
-rw-r--r--media/libstagefright/MediaMuxer.cpp15
-rw-r--r--media/libstagefright/Utils.cpp22
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp231
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.h14
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.cpp12
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp103
-rw-r--r--media/libstagefright/filters/Android.mk27
-rw-r--r--media/libstagefright/filters/ColorConvert.cpp111
-rw-r--r--media/libstagefright/filters/ColorConvert.h43
-rw-r--r--media/libstagefright/filters/GraphicBufferListener.cpp154
-rw-r--r--media/libstagefright/filters/GraphicBufferListener.h70
-rw-r--r--media/libstagefright/filters/IntrinsicBlurFilter.cpp99
-rw-r--r--media/libstagefright/filters/IntrinsicBlurFilter.h50
-rw-r--r--media/libstagefright/filters/MediaFilter.cpp816
-rw-r--r--media/libstagefright/filters/RSFilter.cpp96
-rw-r--r--media/libstagefright/filters/RSFilter.h53
-rw-r--r--media/libstagefright/filters/SaturationFilter.cpp99
-rw-r--r--media/libstagefright/filters/SaturationFilter.h52
-rw-r--r--media/libstagefright/filters/SimpleFilter.cpp39
-rw-r--r--media/libstagefright/filters/SimpleFilter.h52
-rw-r--r--media/libstagefright/filters/ZeroFilter.cpp57
-rw-r--r--media/libstagefright/filters/ZeroFilter.h43
-rw-r--r--media/libstagefright/filters/saturation.rs40
-rw-r--r--media/libstagefright/filters/saturationARGB.rs40
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp46
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.h1
-rw-r--r--media/libstagefright/webm/WebmWriter.cpp32
-rw-r--r--media/libstagefright/webm/WebmWriter.h1
-rw-r--r--media/mediaserver/Android.mk2
-rw-r--r--media/ndk/NdkMediaCodec.cpp3
-rw-r--r--media/ndk/NdkMediaExtractor.cpp3
69 files changed, 3212 insertions, 1026 deletions
diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c
index 6d30d64..c310fe2 100644
--- a/media/libeffects/factory/EffectsFactory.c
+++ b/media/libeffects/factory/EffectsFactory.c
@@ -28,6 +28,7 @@
static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects
static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries
+static list_elem_t *gSkippedEffects; // list of effects skipped because of duplicate uuid
// list of effect_descriptor and list of sub effects : all currently loaded
// It does not contain effects without sub effects.
static list_sub_elem_t *gSubEffectList;
@@ -63,10 +64,10 @@ static int findEffect(const effect_uuid_t *type,
lib_entry_t **lib,
effect_descriptor_t **desc);
// To search a subeffect in the gSubEffectList
-int findSubEffect(const effect_uuid_t *uuid,
+static int findSubEffect(const effect_uuid_t *uuid,
lib_entry_t **lib,
effect_descriptor_t **desc);
-static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len);
+static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent);
static int stringToUuid(const char *str, effect_uuid_t *uuid);
static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen);
@@ -237,8 +238,8 @@ int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor)
}
#if (LOG_NDEBUG == 0)
- char str[256];
- dumpEffectDescriptor(pDescriptor, str, 256);
+ char str[512];
+ dumpEffectDescriptor(pDescriptor, str, sizeof(str), 0 /* indent */);
ALOGV("EffectQueryEffect() desc:%s", str);
#endif
pthread_mutex_unlock(&gLibLock);
@@ -503,15 +504,31 @@ int loadLibrary(cnode *root, const char *name)
audio_effect_library_t *desc;
list_elem_t *e;
lib_entry_t *l;
+ char path[PATH_MAX];
+ char *str;
+ size_t len;
node = config_find(root, PATH_TAG);
if (node == NULL) {
return -EINVAL;
}
+ // audio_effects.conf always specifies 32 bit lib path: convert to 64 bit path if needed
+ strlcpy(path, node->value, PATH_MAX);
+#ifdef __LP64__
+ str = strstr(path, "/lib/");
+ if (str == NULL)
+ return -EINVAL;
+ len = str - path;
+ path[len] = '\0';
+ strlcat(path, "/lib64/", PATH_MAX);
+ strlcat(path, node->value + len + strlen("/lib/"), PATH_MAX);
+#endif
+ if (strlen(path) >= PATH_MAX - 1)
+ return -EINVAL;
- hdl = dlopen(node->value, RTLD_NOW);
+ hdl = dlopen(path, RTLD_NOW);
if (hdl == NULL) {
- ALOGW("loadLibrary() failed to open %s", node->value);
+ ALOGW("loadLibrary() failed to open %s", path);
goto error;
}
@@ -535,7 +552,7 @@ int loadLibrary(cnode *root, const char *name)
// add entry for library in gLibraryList
l = malloc(sizeof(lib_entry_t));
l->name = strndup(name, PATH_MAX);
- l->path = strndup(node->value, PATH_MAX);
+ l->path = strndup(path, PATH_MAX);
l->handle = hdl;
l->desc = desc;
l->effects = NULL;
@@ -547,7 +564,7 @@ int loadLibrary(cnode *root, const char *name)
e->next = gLibraryList;
gLibraryList = e;
pthread_mutex_unlock(&gLibLock);
- ALOGV("getLibrary() linked library %p for path %s", l, node->value);
+ ALOGV("getLibrary() linked library %p for path %s", l, path);
return 0;
@@ -595,8 +612,8 @@ int addSubEffect(cnode *root)
return -EINVAL;
}
#if (LOG_NDEBUG==0)
- char s[256];
- dumpEffectDescriptor(d, s, 256);
+ char s[512];
+ dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);
ALOGV("addSubEffect() read descriptor %p:%s",d, s);
#endif
if (EFFECT_API_VERSION_MAJOR(d->apiVersion) !=
@@ -660,6 +677,13 @@ int loadEffect(cnode *root)
ALOGW("loadEffect() invalid uuid %s", node->value);
return -EINVAL;
}
+ lib_entry_t *tmp;
+ bool skip = false;
+ if (findEffect(NULL, &uuid, &tmp, NULL) == 0) {
+ ALOGW("skipping duplicate uuid %s %s", node->value,
+ node->next ? "and its sub-effects" : "");
+ skip = true;
+ }
d = malloc(sizeof(effect_descriptor_t));
if (l->desc->get_descriptor(&uuid, d) != 0) {
@@ -670,8 +694,8 @@ int loadEffect(cnode *root)
return -EINVAL;
}
#if (LOG_NDEBUG==0)
- char s[256];
- dumpEffectDescriptor(d, s, 256);
+ char s[512];
+ dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */);
ALOGV("loadEffect() read descriptor %p:%s",d, s);
#endif
if (EFFECT_API_VERSION_MAJOR(d->apiVersion) !=
@@ -682,8 +706,14 @@ int loadEffect(cnode *root)
}
e = malloc(sizeof(list_elem_t));
e->object = d;
- e->next = l->effects;
- l->effects = e;
+ if (skip) {
+ e->next = gSkippedEffects;
+ gSkippedEffects = e;
+ return -EINVAL;
+ } else {
+ e->next = l->effects;
+ l->effects = e;
+ }
// After the UUID node in the config_tree, if node->next is valid,
// that would be sub effect node.
@@ -876,22 +906,30 @@ int findEffect(const effect_uuid_t *type,
return ret;
}
-void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len) {
+void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent) {
char s[256];
+ char ss[256];
+ char idt[indent + 1];
- snprintf(str, len, "\nEffect Descriptor %p:\n", desc);
- strncat(str, "- TYPE: ", len);
- uuidToString(&desc->uuid, s, 256);
- snprintf(str, len, "- UUID: %s\n", s);
- uuidToString(&desc->type, s, 256);
- snprintf(str, len, "- TYPE: %s\n", s);
- sprintf(s, "- apiVersion: %08X\n- flags: %08X\n",
- desc->apiVersion, desc->flags);
- strncat(str, s, len);
- sprintf(s, "- name: %s\n", desc->name);
- strncat(str, s, len);
- sprintf(s, "- implementor: %s\n", desc->implementor);
- strncat(str, s, len);
+ memset(idt, ' ', indent);
+ idt[indent] = 0;
+
+ str[0] = 0;
+
+ snprintf(s, sizeof(s), "%s%s / %s\n", idt, desc->name, desc->implementor);
+ strlcat(str, s, len);
+
+ uuidToString(&desc->uuid, s, sizeof(s));
+ snprintf(ss, sizeof(ss), "%s UUID: %s\n", idt, s);
+ strlcat(str, ss, len);
+
+ uuidToString(&desc->type, s, sizeof(s));
+ snprintf(ss, sizeof(ss), "%s TYPE: %s\n", idt, s);
+ strlcat(str, ss, len);
+
+ sprintf(s, "%s apiVersion: %08X\n%s flags: %08X\n", idt,
+ desc->apiVersion, idt, desc->flags);
+ strlcat(str, s, len);
}
int stringToUuid(const char *str, effect_uuid_t *uuid)
@@ -934,3 +972,40 @@ int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen)
return 0;
}
+int EffectDumpEffects(int fd) {
+ char s[512];
+ list_elem_t *e = gLibraryList;
+ lib_entry_t *l = NULL;
+ effect_descriptor_t *d = NULL;
+ int found = 0;
+ int ret = 0;
+
+ while (e) {
+ l = (lib_entry_t *)e->object;
+ list_elem_t *efx = l->effects;
+ dprintf(fd, "Library %s\n", l->name);
+ if (!efx) {
+ dprintf(fd, " (no effects)\n");
+ }
+ while (efx) {
+ d = (effect_descriptor_t *)efx->object;
+ dumpEffectDescriptor(d, s, sizeof(s), 2);
+ dprintf(fd, "%s", s);
+ efx = efx->next;
+ }
+ e = e->next;
+ }
+
+ e = gSkippedEffects;
+ if (e) {
+ dprintf(fd, "Skipped effects\n");
+ while(e) {
+ d = (effect_descriptor_t *)e->object;
+ dumpEffectDescriptor(d, s, sizeof(s), 2 /* indent */);
+ dprintf(fd, "%s", s);
+ e = e->next;
+ }
+ }
+ return ret;
+}
+
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 6c585fb..5378bf2 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -61,15 +61,11 @@ LOCAL_SRC_FILES:= \
StringArray.cpp \
AudioPolicy.cpp
-LOCAL_SRC_FILES += ../libnbaio/roundup.c
-
LOCAL_SHARED_LIBRARIES := \
libui liblog libcutils libutils libbinder libsonivox libicuuc libicui18n libexpat \
libcamera_client libstagefright_foundation \
libgui libdl libaudioutils libnbaio
-LOCAL_STATIC_LIBRARIES += libinstantssq
-
LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper
LOCAL_MODULE:= libmedia
@@ -85,12 +81,3 @@ LOCAL_C_INCLUDES := \
include $(BUILD_SHARED_LIBRARY)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES += SingleStateQueue.cpp
-LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"'
-
-LOCAL_MODULE := libinstantssq
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 9cae21c..f5a5712 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -499,8 +499,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
OutputDescriptor *outputDesc = new OutputDescriptor(*desc);
gOutputs.add(ioHandle, outputDesc);
- ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x frameCount %zu "
- "latency %d",
+ ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x "
+ "frameCount %zu latency %d",
outputDesc->samplingRate, outputDesc->format, outputDesc->channelMask,
outputDesc->frameCount, outputDesc->latency);
} break;
@@ -523,8 +523,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
if (param2 == NULL) break;
desc = (const OutputDescriptor *)param2;
- ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x channel mask %#x "
- "frameCount %zu latency %d",
+ ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x "
+ "channel mask %#x frameCount %zu latency %d",
ioHandle, desc->samplingRate, desc->format,
desc->channelMask, desc->frameCount, desc->latency);
OutputDescriptor *outputDesc = gOutputs.valueAt(index);
@@ -590,18 +590,22 @@ const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service()
status_t AudioSystem::setDeviceConnectionState(audio_devices_t device,
audio_policy_dev_state_t state,
- const char *device_address)
+ const char *device_address,
+ const char *device_name)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
const char *address = "";
+ const char *name = "";
if (aps == 0) return PERMISSION_DENIED;
if (device_address != NULL) {
address = device_address;
}
-
- return aps->setDeviceConnectionState(device, state, address);
+ if (device_name != NULL) {
+ name = device_name;
+ }
+ return aps->setDeviceConnectionState(device, state, address, name);
}
audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device,
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 735db5c..d4bacc0 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -33,11 +33,16 @@
#define WAIT_PERIOD_MS 10
#define WAIT_STREAM_END_TIMEOUT_SEC 120
-
+static const int kMaxLoopCountNotifications = 32;
namespace android {
// ---------------------------------------------------------------------------
+template <typename T>
+const T &min(const T &x, const T &y) {
+ return x < y ? x : y;
+}
+
static int64_t convertTimespecToUs(const struct timespec &tv)
{
return tv.tv_sec * 1000000ll + tv.tv_nsec / 1000;
@@ -317,12 +322,6 @@ status_t AudioTrack::set(
uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
mChannelCount = channelCount;
- // AudioFlinger does not currently support 8-bit data in shared memory
- if (format == AUDIO_FORMAT_PCM_8_BIT && sharedBuffer != 0) {
- ALOGE("8-bit data in shared memory is not supported");
- return BAD_VALUE;
- }
-
// force direct flag if format is not linear PCM
// or offload was requested
if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)
@@ -346,12 +345,9 @@ status_t AudioTrack::set(
} else {
mFrameSize = sizeof(uint8_t);
}
- mFrameSizeAF = mFrameSize;
} else {
ALOG_ASSERT(audio_is_linear_pcm(format));
mFrameSize = channelCount * audio_bytes_per_sample(format);
- mFrameSizeAF = channelCount * audio_bytes_per_sample(
- format == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : format);
// createTrack will return an error if PCM format is not supported by server,
// so no need to check for specific PCM formats here
}
@@ -420,7 +416,10 @@ status_t AudioTrack::set(
mStatus = NO_ERROR;
mState = STATE_STOPPED;
mUserData = user;
- mLoopPeriod = 0;
+ mLoopCount = 0;
+ mLoopStart = 0;
+ mLoopEnd = 0;
+ mLoopCountNotified = 0;
mMarkerPosition = 0;
mMarkerReached = false;
mNewPosition = 0;
@@ -531,14 +530,12 @@ void AudioTrack::stop()
// the playback head position will reset to 0, so if a marker is set, we need
// to activate it again
mMarkerReached = false;
-#if 0
- // Force flush if a shared buffer is used otherwise audioflinger
- // will not stop before end of buffer is reached.
- // It may be needed to make sure that we stop playback, likely in case looping is on.
+
if (mSharedBuffer != 0) {
- flush_l();
+ // clear buffer position and loop count.
+ mStaticProxy->setBufferPositionAndLoop(0 /* position */,
+ 0 /* loopStart */, 0 /* loopEnd */, 0 /* loopCount */);
}
-#endif
sp<AudioTrackThread> t = mAudioTrackThread;
if (t != 0) {
@@ -740,10 +737,15 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount
void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount)
{
- // Setting the loop will reset next notification update period (like setPosition).
- mNewPosition = updateAndGetPosition_l() + mUpdatePeriod;
- mLoopPeriod = loopCount != 0 ? loopEnd - loopStart : 0;
+ // We do not update the periodic notification point.
+ // mNewPosition = updateAndGetPosition_l() + mUpdatePeriod;
+ mLoopCount = loopCount;
+ mLoopEnd = loopEnd;
+ mLoopStart = loopStart;
+ mLoopCountNotified = loopCount;
mStaticProxy->setLoop(loopStart, loopEnd, loopCount);
+
+ // Waking the AudioTrackThread is not needed as this cannot be called when active.
}
status_t AudioTrack::setMarkerPosition(uint32_t marker)
@@ -757,6 +759,10 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker)
mMarkerPosition = marker;
mMarkerReached = false;
+ sp<AudioTrackThread> t = mAudioTrackThread;
+ if (t != 0) {
+ t->wake();
+ }
return NO_ERROR;
}
@@ -786,6 +792,10 @@ status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod)
mNewPosition = updateAndGetPosition_l() + updatePeriod;
mUpdatePeriod = updatePeriod;
+ sp<AudioTrackThread> t = mAudioTrackThread;
+ if (t != 0) {
+ t->wake();
+ }
return NO_ERROR;
}
@@ -823,12 +833,11 @@ status_t AudioTrack::setPosition(uint32_t position)
if (mState == STATE_ACTIVE) {
return INVALID_OPERATION;
}
+ // After setting the position, use full update period before notification.
mNewPosition = updateAndGetPosition_l() + mUpdatePeriod;
- mLoopPeriod = 0;
- // FIXME Check whether loops and setting position are incompatible in old code.
- // If we use setLoop for both purposes we lose the capability to set the position while looping.
- mStaticProxy->setLoop(position, mFrameCount, 0);
+ mStaticProxy->setBufferPosition(position);
+ // Waking the AudioTrackThread is not needed as this cannot be called when active.
return NO_ERROR;
}
@@ -893,10 +902,18 @@ status_t AudioTrack::reload()
return INVALID_OPERATION;
}
mNewPosition = mUpdatePeriod;
- mLoopPeriod = 0;
- // FIXME The new code cannot reload while keeping a loop specified.
- // Need to check how the old code handled this, and whether it's a significant change.
- mStaticProxy->setLoop(0, mFrameCount, 0);
+ (void) updateAndGetPosition_l();
+ mPosition = 0;
+#if 0
+ // The documentation is not clear on the behavior of reload() and the restoration
+ // of loop count. Historically we have not restored loop count, start, end,
+ // but it makes sense if one desires to repeat playing a particular sound.
+ if (mLoopCount != 0) {
+ mLoopCountNotified = mLoopCount;
+ mStaticProxy->setLoop(mLoopStart, mLoopEnd, mLoopCount);
+ }
+#endif
+ mStaticProxy->setBufferPosition(0);
return NO_ERROR;
}
@@ -1019,12 +1036,12 @@ status_t AudioTrack::createTrack_l()
mNotificationFramesAct = frameCount;
}
} else if (mSharedBuffer != 0) {
-
- // Ensure that buffer alignment matches channel count
- // 8-bit data in shared memory is not currently supported by AudioFlinger
- size_t alignment = audio_bytes_per_sample(
- mFormat == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : mFormat);
+ // FIXME: Ensure client side memory buffers need
+ // not have additional alignment beyond sample
+ // (e.g. 16 bit stereo accessed as 32 bit frame).
+ size_t alignment = audio_bytes_per_sample(mFormat);
if (alignment & 1) {
+ // for AUDIO_FORMAT_PCM_24_BIT_PACKED (not exposed through Java).
alignment = 1;
}
if (mChannelCount > 1) {
@@ -1042,7 +1059,7 @@ status_t AudioTrack::createTrack_l()
// there's no frameCount parameter.
// But when initializing a shared buffer AudioTrack via set(),
// there _is_ a frameCount parameter. We silently ignore it.
- frameCount = mSharedBuffer->size() / mFrameSizeAF;
+ frameCount = mSharedBuffer->size() / mFrameSize;
} else if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
@@ -1103,10 +1120,7 @@ status_t AudioTrack::createTrack_l()
// but we will still need the original value also
sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
mSampleRate,
- // AudioFlinger only sees 16-bit PCM
- mFormat == AUDIO_FORMAT_PCM_8_BIT &&
- !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT) ?
- AUDIO_FORMAT_PCM_16_BIT : mFormat,
+ mFormat,
mChannelMask,
&temp,
&trackFlags,
@@ -1230,9 +1244,9 @@ status_t AudioTrack::createTrack_l()
// update proxy
if (mSharedBuffer == 0) {
mStaticProxy.clear();
- mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
+ mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);
} else {
- mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
+ mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize);
mProxy = mStaticProxy;
}
@@ -1352,7 +1366,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *re
} while ((status == DEAD_OBJECT) && (tryCounter-- > 0));
audioBuffer->frameCount = buffer.mFrameCount;
- audioBuffer->size = buffer.mFrameCount * mFrameSizeAF;
+ audioBuffer->size = buffer.mFrameCount * mFrameSize;
audioBuffer->raw = buffer.mRaw;
if (nonContig != NULL) {
*nonContig = buffer.mNonContig;
@@ -1366,7 +1380,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer)
return;
}
- size_t stepCount = audioBuffer->size / mFrameSizeAF;
+ size_t stepCount = audioBuffer->size / mFrameSize;
if (stepCount == 0) {
return;
}
@@ -1432,14 +1446,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize, bool blocking)
}
size_t toWrite;
- if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) {
- // Divide capacity by 2 to take expansion into account
- toWrite = audioBuffer.size >> 1;
- memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) buffer, toWrite);
- } else {
- toWrite = audioBuffer.size;
- memcpy(audioBuffer.i8, buffer, toWrite);
- }
+ toWrite = audioBuffer.size;
+ memcpy(audioBuffer.i8, buffer, toWrite);
buffer = ((const char *) buffer) + toWrite;
userSize -= toWrite;
written += toWrite;
@@ -1559,9 +1567,8 @@ nsecs_t AudioTrack::processAudioBuffer()
// that the upper layers can recreate the track
if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) {
status_t status = restoreTrack_l("processAudioBuffer");
- mLock.unlock();
- // Run again immediately, but with a new IAudioTrack
- return 0;
+ // after restoration, continue below to make sure that the loop and buffer events
+ // are notified because they have been cleared from mCblk->mFlags above.
}
}
@@ -1610,7 +1617,6 @@ nsecs_t AudioTrack::processAudioBuffer()
}
// Cache other fields that will be needed soon
- uint32_t loopPeriod = mLoopPeriod;
uint32_t sampleRate = mSampleRate;
uint32_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
@@ -1622,8 +1628,30 @@ nsecs_t AudioTrack::processAudioBuffer()
uint32_t sequence = mSequence;
sp<AudioTrackClientProxy> proxy = mProxy;
+ // Determine the number of new loop callback(s) that will be needed, while locked.
+ int loopCountNotifications = 0;
+ uint32_t loopPeriod = 0; // time in frames for next EVENT_LOOP_END or EVENT_BUFFER_END
+
+ if (mLoopCount > 0) {
+ int loopCount;
+ size_t bufferPosition;
+ mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount);
+ loopPeriod = ((loopCount > 0) ? mLoopEnd : mFrameCount) - bufferPosition;
+ loopCountNotifications = min(mLoopCountNotified - loopCount, kMaxLoopCountNotifications);
+ mLoopCountNotified = loopCount; // discard any excess notifications
+ } else if (mLoopCount < 0) {
+ // FIXME: We're not accurate with notification count and position with infinite looping
+ // since loopCount from server side will always return -1 (we could decrement it).
+ size_t bufferPosition = mStaticProxy->getBufferPosition();
+ loopCountNotifications = int((flags & (CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL)) != 0);
+ loopPeriod = mLoopEnd - bufferPosition;
+ } else if (/* mLoopCount == 0 && */ mSharedBuffer != 0) {
+ size_t bufferPosition = mStaticProxy->getBufferPosition();
+ loopPeriod = mFrameCount - bufferPosition;
+ }
+
// These fields don't need to be cached, because they are assigned only by set():
- // mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFrameSizeAF, mFlags
+ // mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFlags
// mFlags is also assigned by createTrack_l(), but not the bit we care about.
mLock.unlock();
@@ -1662,10 +1690,9 @@ nsecs_t AudioTrack::processAudioBuffer()
if (newUnderrun) {
mCbf(EVENT_UNDERRUN, mUserData, NULL);
}
- // FIXME we will miss loops if loop cycle was signaled several times since last call
- // to processAudioBuffer()
- if (flags & (CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL)) {
+ while (loopCountNotifications > 0) {
mCbf(EVENT_LOOP_END, mUserData, NULL);
+ --loopCountNotifications;
}
if (flags & CBLK_BUFFER_END) {
mCbf(EVENT_BUFFER_END, mUserData, NULL);
@@ -1701,10 +1728,11 @@ nsecs_t AudioTrack::processAudioBuffer()
minFrames = markerPosition - position;
}
if (loopPeriod > 0 && loopPeriod < minFrames) {
+ // loopPeriod is already adjusted for actual position.
minFrames = loopPeriod;
}
- if (updatePeriod > 0 && updatePeriod < minFrames) {
- minFrames = updatePeriod;
+ if (updatePeriod > 0) {
+ minFrames = min(minFrames, uint32_t(newPosition - position));
}
// If > 0, poll periodically to recover from a stuck server. A good value is 2.
@@ -1767,13 +1795,6 @@ nsecs_t AudioTrack::processAudioBuffer()
}
}
- // Divide buffer size by 2 to take into account the expansion
- // due to 8 to 16 bit conversion: the callback must fill only half
- // of the destination buffer
- if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) {
- audioBuffer.size >>= 1;
- }
-
size_t reqSize = audioBuffer.size;
mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
size_t writtenSize = audioBuffer.size;
@@ -1793,13 +1814,7 @@ nsecs_t AudioTrack::processAudioBuffer()
return WAIT_PERIOD_MS * 1000000LL;
}
- if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) {
- // 8 to 16 bit conversion, note that source and destination are the same address
- memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) audioBuffer.i8, writtenSize);
- audioBuffer.size <<= 1;
- }
-
- size_t releasedFrames = audioBuffer.size / mFrameSizeAF;
+ size_t releasedFrames = audioBuffer.size / mFrameSize;
audioBuffer.frameCount = releasedFrames;
mRemainingFrames -= releasedFrames;
if (misalignment >= releasedFrames) {
@@ -1856,7 +1871,11 @@ status_t AudioTrack::restoreTrack_l(const char *from)
}
// save the old static buffer position
- size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0;
+ size_t bufferPosition = 0;
+ int loopCount = 0;
+ if (mStaticProxy != 0) {
+ mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount);
+ }
// If a new IAudioTrack is successfully created, createTrack_l() will modify the
// following member variables: mAudioTrack, mCblkMemory and mCblk.
@@ -1865,30 +1884,26 @@ status_t AudioTrack::restoreTrack_l(const char *from)
result = createTrack_l();
// take the frames that will be lost by track recreation into account in saved position
+ // For streaming tracks, this is the amount we obtained from the user/client
+ // (not the number actually consumed at the server - those are already lost).
(void) updateAndGetPosition_l();
- mPosition = mReleased;
+ if (mStaticProxy != 0) {
+ mPosition = mReleased;
+ }
if (result == NO_ERROR) {
- // continue playback from last known position, but
- // don't attempt to restore loop after invalidation; it's difficult and not worthwhile
- if (mStaticProxy != NULL) {
- mLoopPeriod = 0;
- mStaticProxy->setLoop(bufferPosition, mFrameCount, 0);
- }
- // FIXME How do we simulate the fact that all frames present in the buffer at the time of
- // track destruction have been played? This is critical for SoundPool implementation
- // This must be broken, and needs to be tested/debugged.
-#if 0
- // restore write index and set other indexes to reflect empty buffer status
- if (!strcmp(from, "start")) {
- // Make sure that a client relying on callback events indicating underrun or
- // the actual amount of audio frames played (e.g SoundPool) receives them.
- if (mSharedBuffer == 0) {
- // restart playback even if buffer is not completely filled.
- android_atomic_or(CBLK_FORCEREADY, &mCblk->mFlags);
+ // Continue playback from last known position and restore loop.
+ if (mStaticProxy != 0) {
+ if (loopCount != 0) {
+ mStaticProxy->setBufferPositionAndLoop(bufferPosition,
+ mLoopStart, mLoopEnd, loopCount);
+ } else {
+ mStaticProxy->setBufferPosition(bufferPosition);
+ if (bufferPosition == mFrameCount) {
+ ALOGD("restoring track at end of static buffer");
+ }
}
}
-#endif
if (mState == STATE_ACTIVE) {
result = mAudioTrack->start();
}
@@ -2148,8 +2163,8 @@ bool AudioTrack::AudioTrackThread::threadLoop()
case NS_NEVER:
return false;
case NS_WHENEVER:
- // FIXME increase poll interval, or make event-driven
- ns = 1000000000LL;
+ // Event driven: call wake() when callback notifications conditions change.
+ ns = INT64_MAX;
// fall through
default:
LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
@@ -2182,6 +2197,17 @@ void AudioTrack::AudioTrackThread::resume()
}
}
+void AudioTrack::AudioTrackThread::wake()
+{
+ AutoMutex _l(mMyLock);
+ if (!mPaused && mPausedInt && mPausedNs > 0) {
+ // audio track is active and internally paused with timeout.
+ mIgnoreNextPausedInt = true;
+ mPausedInt = false;
+ mMyCond.signal();
+ }
+}
+
void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns)
{
AutoMutex _l(mMyLock);
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index ff24475..08241e2 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -28,7 +28,21 @@ namespace android {
// used to clamp a value to size_t. TODO: move to another file.
template <typename T>
size_t clampToSize(T x) {
- return x > SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
+ return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x;
+}
+
+// incrementSequence is used to determine the next sequence value
+// for the loop and position sequence counters. It should return
+// a value between "other" + 1 and "other" + INT32_MAX, the choice of
+// which needs to be the "least recently used" sequence value for "self".
+// In general, this means (new_self) returned is max(self, other) + 1.
+
+static uint32_t incrementSequence(uint32_t self, uint32_t other) {
+ int32_t diff = self - other;
+ if (diff >= 0 && diff < INT32_MAX) {
+ return self + 1; // we're already ahead of other.
+ }
+ return other + 1; // we're behind, so move just ahead of other.
}
audio_track_cblk_t::audio_track_cblk_t()
@@ -485,8 +499,11 @@ end:
StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers,
size_t frameCount, size_t frameSize)
: AudioTrackClientProxy(cblk, buffers, frameCount, frameSize),
- mMutator(&cblk->u.mStatic.mSingleStateQueue), mBufferPosition(0)
+ mMutator(&cblk->u.mStatic.mSingleStateQueue),
+ mPosLoopObserver(&cblk->u.mStatic.mPosLoopQueue)
{
+ memset(&mState, 0, sizeof(mState));
+ memset(&mPosLoop, 0, sizeof(mPosLoop));
}
void StaticAudioTrackClientProxy::flush()
@@ -501,30 +518,72 @@ void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int
// FIXME Should return an error status
return;
}
- StaticAudioTrackState newState;
- newState.mLoopStart = (uint32_t) loopStart;
- newState.mLoopEnd = (uint32_t) loopEnd;
- newState.mLoopCount = loopCount;
- size_t bufferPosition;
- if (loopCount == 0 || (bufferPosition = getBufferPosition()) >= loopEnd) {
- bufferPosition = loopStart;
+ mState.mLoopStart = (uint32_t) loopStart;
+ mState.mLoopEnd = (uint32_t) loopEnd;
+ mState.mLoopCount = loopCount;
+ mState.mLoopSequence = incrementSequence(mState.mLoopSequence, mState.mPositionSequence);
+ // set patch-up variables until the mState is acknowledged by the ServerProxy.
+ // observed buffer position and loop count will freeze until then to give the
+ // illusion of a synchronous change.
+ getBufferPositionAndLoopCount(NULL, NULL);
+ // preserve behavior to restart at mState.mLoopStart if position exceeds mState.mLoopEnd.
+ if (mState.mLoopCount != 0 && mPosLoop.mBufferPosition >= mState.mLoopEnd) {
+ mPosLoop.mBufferPosition = mState.mLoopStart;
}
- mBufferPosition = bufferPosition; // snapshot buffer position until loop is acknowledged.
- (void) mMutator.push(newState);
+ mPosLoop.mLoopCount = mState.mLoopCount;
+ (void) mMutator.push(mState);
+}
+
+void StaticAudioTrackClientProxy::setBufferPosition(size_t position)
+{
+ // This can only happen on a 64-bit client
+ if (position > UINT32_MAX) {
+ // FIXME Should return an error status
+ return;
+ }
+ mState.mPosition = (uint32_t) position;
+ mState.mPositionSequence = incrementSequence(mState.mPositionSequence, mState.mLoopSequence);
+ // set patch-up variables until the mState is acknowledged by the ServerProxy.
+ // observed buffer position and loop count will freeze until then to give the
+ // illusion of a synchronous change.
+ if (mState.mLoopCount > 0) { // only check if loop count is changing
+ getBufferPositionAndLoopCount(NULL, NULL); // get last position
+ }
+ mPosLoop.mBufferPosition = position;
+ if (position >= mState.mLoopEnd) {
+ // no ongoing loop is possible if position is greater than loopEnd.
+ mPosLoop.mLoopCount = 0;
+ }
+ (void) mMutator.push(mState);
+}
+
+void StaticAudioTrackClientProxy::setBufferPositionAndLoop(size_t position, size_t loopStart,
+ size_t loopEnd, int loopCount)
+{
+ setLoop(loopStart, loopEnd, loopCount);
+ setBufferPosition(position);
}
size_t StaticAudioTrackClientProxy::getBufferPosition()
{
- size_t bufferPosition;
- if (mMutator.ack()) {
- bufferPosition = (size_t) mCblk->u.mStatic.mBufferPosition;
- if (bufferPosition > mFrameCount) {
- bufferPosition = mFrameCount;
- }
- } else {
- bufferPosition = mBufferPosition;
+ getBufferPositionAndLoopCount(NULL, NULL);
+ return mPosLoop.mBufferPosition;
+}
+
+void StaticAudioTrackClientProxy::getBufferPositionAndLoopCount(
+ size_t *position, int *loopCount)
+{
+ if (mMutator.ack() == StaticAudioTrackSingleStateQueue::SSQ_DONE) {
+ if (mPosLoopObserver.poll(mPosLoop)) {
+ ; // a valid mPosLoop should be available if ackDone is true.
+ }
+ }
+ if (position != NULL) {
+ *position = mPosLoop.mBufferPosition;
+ }
+ if (loopCount != NULL) {
+ *loopCount = mPosLoop.mLoopCount;
}
- return bufferPosition;
}
// ---------------------------------------------------------------------------
@@ -560,7 +619,8 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
ssize_t filled = rear - newFront;
// Rather than shutting down on a corrupt flush, just treat it as a full flush
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, filled %d=%#x",
+ ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, "
+ "filled %d=%#x",
mFlush, flush, front, rear, mask, newFront, filled, filled);
newFront = rear;
}
@@ -739,13 +799,12 @@ void AudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers,
size_t frameCount, size_t frameSize)
: AudioTrackServerProxy(cblk, buffers, frameCount, frameSize),
- mObserver(&cblk->u.mStatic.mSingleStateQueue), mPosition(0),
+ mObserver(&cblk->u.mStatic.mSingleStateQueue),
+ mPosLoopMutator(&cblk->u.mStatic.mPosLoopQueue),
mFramesReadySafe(frameCount), mFramesReady(frameCount),
mFramesReadyIsCalledByMultipleThreads(false)
{
- mState.mLoopStart = 0;
- mState.mLoopEnd = 0;
- mState.mLoopCount = 0;
+ memset(&mState, 0, sizeof(mState));
}
void StaticAudioTrackServerProxy::framesReadyIsCalledByMultipleThreads()
@@ -762,55 +821,97 @@ size_t StaticAudioTrackServerProxy::framesReady()
return mFramesReadySafe;
}
-ssize_t StaticAudioTrackServerProxy::pollPosition()
+status_t StaticAudioTrackServerProxy::updateStateWithLoop(
+ StaticAudioTrackState *localState, const StaticAudioTrackState &update) const
{
- size_t position = mPosition;
- StaticAudioTrackState state;
- if (mObserver.poll(state)) {
+ if (localState->mLoopSequence != update.mLoopSequence) {
bool valid = false;
- size_t loopStart = state.mLoopStart;
- size_t loopEnd = state.mLoopEnd;
- if (state.mLoopCount == 0) {
- if (loopStart > mFrameCount) {
- loopStart = mFrameCount;
- }
- // ignore loopEnd
- mPosition = position = loopStart;
- mFramesReady = mFrameCount - mPosition;
- mState.mLoopCount = 0;
+ const size_t loopStart = update.mLoopStart;
+ const size_t loopEnd = update.mLoopEnd;
+ size_t position = localState->mPosition;
+ if (update.mLoopCount == 0) {
valid = true;
- } else if (state.mLoopCount >= -1) {
+ } else if (update.mLoopCount >= -1) {
if (loopStart < loopEnd && loopEnd <= mFrameCount &&
loopEnd - loopStart >= MIN_LOOP) {
// If the current position is greater than the end of the loop
// we "wrap" to the loop start. This might cause an audible pop.
if (position >= loopEnd) {
- mPosition = position = loopStart;
- }
- if (state.mLoopCount == -1) {
- mFramesReady = INT64_MAX;
- } else {
- // mFramesReady is 64 bits to handle the effective number of frames
- // that the static audio track contains, including loops.
- // TODO: Later consider fixing overflow, but does not seem needed now
- // as will not overflow if loopStart and loopEnd are Java "ints".
- mFramesReady = int64_t(state.mLoopCount) * (loopEnd - loopStart)
- + mFrameCount - mPosition;
+ position = loopStart;
}
- mState = state;
valid = true;
}
}
- if (!valid || mPosition > mFrameCount) {
+ if (!valid || position > mFrameCount) {
+ return NO_INIT;
+ }
+ localState->mPosition = position;
+ localState->mLoopCount = update.mLoopCount;
+ localState->mLoopEnd = loopEnd;
+ localState->mLoopStart = loopStart;
+ localState->mLoopSequence = update.mLoopSequence;
+ }
+ return OK;
+}
+
+status_t StaticAudioTrackServerProxy::updateStateWithPosition(
+ StaticAudioTrackState *localState, const StaticAudioTrackState &update) const
+{
+ if (localState->mPositionSequence != update.mPositionSequence) {
+ if (update.mPosition > mFrameCount) {
+ return NO_INIT;
+ } else if (localState->mLoopCount != 0 && update.mPosition >= localState->mLoopEnd) {
+ localState->mLoopCount = 0; // disable loop count if position is beyond loop end.
+ }
+ localState->mPosition = update.mPosition;
+ localState->mPositionSequence = update.mPositionSequence;
+ }
+ return OK;
+}
+
+ssize_t StaticAudioTrackServerProxy::pollPosition()
+{
+ StaticAudioTrackState state;
+ if (mObserver.poll(state)) {
+ StaticAudioTrackState trystate = mState;
+ bool result;
+ const int32_t diffSeq = state.mLoopSequence - state.mPositionSequence;
+
+ if (diffSeq < 0) {
+ result = updateStateWithLoop(&trystate, state) == OK &&
+ updateStateWithPosition(&trystate, state) == OK;
+ } else {
+ result = updateStateWithPosition(&trystate, state) == OK &&
+ updateStateWithLoop(&trystate, state) == OK;
+ }
+ if (!result) {
+ mObserver.done();
+ // caution: no update occurs so server state will be inconsistent with client state.
ALOGE("%s client pushed an invalid state, shutting down", __func__);
mIsShutdown = true;
return (ssize_t) NO_INIT;
}
+ mState = trystate;
+ if (mState.mLoopCount == -1) {
+ mFramesReady = INT64_MAX;
+ } else if (mState.mLoopCount == 0) {
+ mFramesReady = mFrameCount - mState.mPosition;
+ } else if (mState.mLoopCount > 0) {
+ // TODO: Later consider fixing overflow, but does not seem needed now
+ // as will not overflow if loopStart and loopEnd are Java "ints".
+ mFramesReady = int64_t(mState.mLoopCount) * (mState.mLoopEnd - mState.mLoopStart)
+ + mFrameCount - mState.mPosition;
+ }
mFramesReadySafe = clampToSize(mFramesReady);
// This may overflow, but client is not supposed to rely on it
- mCblk->u.mStatic.mBufferPosition = (uint32_t) position;
+ StaticAudioTrackPosLoop posLoop;
+
+ posLoop.mLoopCount = (int32_t) mState.mLoopCount;
+ posLoop.mBufferPosition = (uint32_t) mState.mPosition;
+ mPosLoopMutator.push(posLoop);
+ mObserver.done(); // safe to read mStatic variables.
}
- return (ssize_t) position;
+ return (ssize_t) mState.mPosition;
}
status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush __unused)
@@ -849,7 +950,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
}
// As mFramesReady is the total remaining frames in the static audio track,
// it is always larger or equal to avail.
- LOG_ALWAYS_FATAL_IF(mFramesReady < avail);
+ LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail);
buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail);
mUnreleased = avail;
return NO_ERROR;
@@ -858,7 +959,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
{
size_t stepCount = buffer->mFrameCount;
- LOG_ALWAYS_FATAL_IF(!(stepCount <= mFramesReady));
+ LOG_ALWAYS_FATAL_IF(!((int64_t) stepCount <= mFramesReady));
LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased));
if (stepCount == 0) {
// prevent accidental re-use of buffer
@@ -868,11 +969,12 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
}
mUnreleased -= stepCount;
audio_track_cblk_t* cblk = mCblk;
- size_t position = mPosition;
+ size_t position = mState.mPosition;
size_t newPosition = position + stepCount;
int32_t setFlags = 0;
if (!(position <= newPosition && newPosition <= mFrameCount)) {
- ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, mFrameCount);
+ ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position,
+ mFrameCount);
newPosition = mFrameCount;
} else if (mState.mLoopCount != 0 && newPosition == mState.mLoopEnd) {
newPosition = mState.mLoopStart;
@@ -885,7 +987,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
if (newPosition == mFrameCount) {
setFlags |= CBLK_BUFFER_END;
}
- mPosition = newPosition;
+ mState.mPosition = newPosition;
if (mFramesReady != INT64_MAX) {
mFramesReady -= stepCount;
}
@@ -893,7 +995,10 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
cblk->mServer += stepCount;
// This may overflow, but client is not supposed to rely on it
- cblk->u.mStatic.mBufferPosition = (uint32_t) newPosition;
+ StaticAudioTrackPosLoop posLoop;
+ posLoop.mBufferPosition = mState.mPosition;
+ posLoop.mLoopCount = mState.mLoopCount;
+ mPosLoopMutator.push(posLoop);
if (setFlags != 0) {
(void) android_atomic_or(setFlags, &cblk->mFlags);
// this would be a good place to wake a futex
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index dbc7a9e..f2ff27b 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -73,6 +73,8 @@ enum {
REGISTER_POLICY_MIXES,
};
+#define MAX_ITEMS_PER_LIST 1024
+
class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
{
public:
@@ -84,13 +86,15 @@ public:
virtual status_t setDeviceConnectionState(
audio_devices_t device,
audio_policy_dev_state_t state,
- const char *device_address)
+ const char *device_address,
+ const char *device_name)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.writeInt32(static_cast <uint32_t>(device));
data.writeInt32(static_cast <uint32_t>(state));
data.writeCString(device_address);
+ data.writeCString(device_name);
remote()->transact(SET_DEVICE_CONNECTION_STATE, data, &reply);
return static_cast <status_t> (reply.readInt32());
}
@@ -726,9 +730,11 @@ status_t BnAudioPolicyService::onTransact(
audio_policy_dev_state_t state =
static_cast <audio_policy_dev_state_t>(data.readInt32());
const char *device_address = data.readCString();
+ const char *device_name = data.readCString();
reply->writeInt32(static_cast<uint32_t> (setDeviceConnectionState(device,
state,
- device_address)));
+ device_address,
+ device_name)));
return NO_ERROR;
} break;
@@ -1054,10 +1060,18 @@ status_t BnAudioPolicyService::onTransact(
audio_port_role_t role = (audio_port_role_t)data.readInt32();
audio_port_type_t type = (audio_port_type_t)data.readInt32();
unsigned int numPortsReq = data.readInt32();
+ if (numPortsReq > MAX_ITEMS_PER_LIST) {
+ numPortsReq = MAX_ITEMS_PER_LIST;
+ }
unsigned int numPorts = numPortsReq;
- unsigned int generation;
struct audio_port *ports =
(struct audio_port *)calloc(numPortsReq, sizeof(struct audio_port));
+ if (ports == NULL) {
+ reply->writeInt32(NO_MEMORY);
+ reply->writeInt32(0);
+ return NO_ERROR;
+ }
+ unsigned int generation;
status_t status = listAudioPorts(role, type, &numPorts, ports, &generation);
reply->writeInt32(status);
reply->writeInt32(numPorts);
@@ -1111,11 +1125,19 @@ status_t BnAudioPolicyService::onTransact(
case LIST_AUDIO_PATCHES: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
unsigned int numPatchesReq = data.readInt32();
+ if (numPatchesReq > MAX_ITEMS_PER_LIST) {
+ numPatchesReq = MAX_ITEMS_PER_LIST;
+ }
unsigned int numPatches = numPatchesReq;
- unsigned int generation;
struct audio_patch *patches =
(struct audio_patch *)calloc(numPatchesReq,
sizeof(struct audio_patch));
+ if (patches == NULL) {
+ reply->writeInt32(NO_MEMORY);
+ reply->writeInt32(0);
+ return NO_ERROR;
+ }
+ unsigned int generation;
status_t status = listAudioPatches(&numPatches, patches, &generation);
reply->writeInt32(status);
reply->writeInt32(numPatches);
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index a733b68..9181f86 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -46,7 +46,6 @@ enum {
SET_OUTPUT_FORMAT,
SET_VIDEO_ENCODER,
SET_AUDIO_ENCODER,
- SET_OUTPUT_FILE_PATH,
SET_OUTPUT_FILE_FD,
SET_VIDEO_SIZE,
SET_VIDEO_FRAMERATE,
@@ -158,16 +157,6 @@ public:
return reply.readInt32();
}
- status_t setOutputFile(const char* path)
- {
- ALOGV("setOutputFile(%s)", path);
- Parcel data, reply;
- data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeCString(path);
- remote()->transact(SET_OUTPUT_FILE_PATH, data, &reply);
- return reply.readInt32();
- }
-
status_t setOutputFile(int fd, int64_t offset, int64_t length) {
ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
Parcel data, reply;
@@ -300,7 +289,8 @@ IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder");
// ----------------------------------------------------------------------
status_t BnMediaRecorder::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+ uint32_t code, const Parcel& data, Parcel* reply,
+ uint32_t flags)
{
switch (code) {
case RELEASE: {
@@ -390,13 +380,6 @@ status_t BnMediaRecorder::onTransact(
return NO_ERROR;
} break;
- case SET_OUTPUT_FILE_PATH: {
- ALOGV("SET_OUTPUT_FILE_PATH");
- CHECK_INTERFACE(IMediaRecorder, data, reply);
- const char* path = data.readCString();
- reply->writeInt32(setOutputFile(path));
- return NO_ERROR;
- } break;
case SET_OUTPUT_FILE_FD: {
ALOGV("SET_OUTPUT_FILE_FD");
CHECK_INTERFACE(IMediaRecorder, data, reply);
@@ -445,7 +428,8 @@ status_t BnMediaRecorder::onTransact(
case SET_PREVIEW_SURFACE: {
ALOGV("SET_PREVIEW_SURFACE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
- sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
+ sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>(
+ data.readStrongBinder());
reply->writeInt32(setPreviewSurface(surface));
return NO_ERROR;
} break;
diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp
index 721d8d7..271be0c 100644
--- a/media/libmedia/JetPlayer.cpp
+++ b/media/libmedia/JetPlayer.cpp
@@ -408,7 +408,8 @@ int JetPlayer::queueSegment(int segmentNum, int libNum, int repeatCount, int tra
ALOGV("JetPlayer::queueSegment segmentNum=%d, libNum=%d, repeatCount=%d, transpose=%d",
segmentNum, libNum, repeatCount, transpose);
Mutex::Autolock lock(mMutex);
- return JET_QueueSegment(mEasData, segmentNum, libNum, repeatCount, transpose, muteFlags, userID);
+ return JET_QueueSegment(mEasData, segmentNum, libNum, repeatCount, transpose, muteFlags,
+ userID);
}
//-------------------------------------------------------------------------------------------------
@@ -449,7 +450,8 @@ void JetPlayer::dump()
void JetPlayer::dumpJetStatus(S_JET_STATUS* pJetStatus)
{
if (pJetStatus!=NULL)
- ALOGV(">> current JET player status: userID=%d segmentRepeatCount=%d numQueuedSegments=%d paused=%d",
+ ALOGV(">> current JET player status: userID=%d segmentRepeatCount=%d numQueuedSegments=%d "
+ "paused=%d",
pJetStatus->currentUserID, pJetStatus->segmentRepeatCount,
pJetStatus->numQueuedSegments, pJetStatus->paused);
else
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index e2e6042..47f9258 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -163,7 +163,8 @@ MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap UNUSED
}
/*static*/ int
-MediaProfiles::findTagForName(const MediaProfiles::NameToTagMap *map, size_t nMappings, const char *name)
+MediaProfiles::findTagForName(const MediaProfiles::NameToTagMap *map, size_t nMappings,
+ const char *name)
{
int tag = -1;
for (size_t i = 0; i < nMappings; ++i) {
@@ -295,9 +296,8 @@ MediaProfiles::createAudioEncoderCap(const char **atts)
CHECK(codec != -1);
MediaProfiles::AudioEncoderCap *cap =
- new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]), atoi(atts[7]),
- atoi(atts[9]), atoi(atts[11]), atoi(atts[13]),
- atoi(atts[15]));
+ new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]),
+ atoi(atts[7]), atoi(atts[9]), atoi(atts[11]), atoi(atts[13]), atoi(atts[15]));
logAudioEncoderCap(*cap);
return cap;
}
@@ -330,7 +330,8 @@ MediaProfiles::createCamcorderProfile(int cameraId, const char **atts, Vector<in
!strcmp("fileFormat", atts[2]) &&
!strcmp("duration", atts[4]));
- const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/sizeof(sCamcorderQualityNameMap[0]);
+ const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/
+ sizeof(sCamcorderQualityNameMap[0]);
const int quality = findTagForName(sCamcorderQualityNameMap, nProfileMappings, atts[1]);
CHECK(quality != -1);
@@ -722,16 +723,20 @@ MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality qual
MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles(
MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) {
- *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW);
- *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF);
+ *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(
+ CAMCORDER_QUALITY_TIME_LAPSE_LOW);
+ *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(
+ CAMCORDER_QUALITY_TIME_LAPSE_QCIF);
}
/*static*/ void
MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles(
MediaProfiles::CamcorderProfile **highTimeLapseProfile,
MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) {
- *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH);
- *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P);
+ *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(
+ CAMCORDER_QUALITY_TIME_LAPSE_HIGH);
+ *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(
+ CAMCORDER_QUALITY_TIME_LAPSE_480P);
}
/*static*/ MediaProfiles::CamcorderProfile*
@@ -809,7 +814,8 @@ MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles)
// high camcorder time lapse profiles.
MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile;
- createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile);
+ createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile,
+ &highSpecificTimeLapseProfile);
profiles->mCamcorderProfiles.add(highTimeLapseProfile);
profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile);
diff --git a/media/libmedia/SingleStateQueue.cpp b/media/libmedia/SingleStateQueue.cpp
deleted file mode 100644
index c241184..0000000
--- a/media/libmedia/SingleStateQueue.cpp
+++ /dev/null
@@ -1,106 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <new>
-#include <cutils/atomic.h>
-#include <media/SingleStateQueue.h>
-
-namespace android {
-
-template<typename T> SingleStateQueue<T>::Mutator::Mutator(Shared *shared)
- : mSequence(0), mShared((Shared *) shared)
-{
- // exactly one of Mutator and Observer must initialize, currently it is Observer
- //shared->init();
-}
-
-template<typename T> int32_t SingleStateQueue<T>::Mutator::push(const T& value)
-{
- Shared *shared = mShared;
- int32_t sequence = mSequence;
- sequence++;
- android_atomic_acquire_store(sequence, &shared->mSequence);
- shared->mValue = value;
- sequence++;
- android_atomic_release_store(sequence, &shared->mSequence);
- mSequence = sequence;
- // consider signalling a futex here, if we know that observer is waiting
- return sequence;
-}
-
-template<typename T> bool SingleStateQueue<T>::Mutator::ack()
-{
- return mShared->mAck - mSequence == 0;
-}
-
-template<typename T> bool SingleStateQueue<T>::Mutator::ack(int32_t sequence)
-{
- // this relies on 2's complement rollover to detect an ancient sequence number
- return mShared->mAck - sequence >= 0;
-}
-
-template<typename T> SingleStateQueue<T>::Observer::Observer(Shared *shared)
- : mSequence(0), mSeed(1), mShared((Shared *) shared)
-{
- // exactly one of Mutator and Observer must initialize, currently it is Observer
- shared->init();
-}
-
-template<typename T> bool SingleStateQueue<T>::Observer::poll(T& value)
-{
- Shared *shared = mShared;
- int32_t before = shared->mSequence;
- if (before == mSequence) {
- return false;
- }
- for (int tries = 0; ; ) {
- const int MAX_TRIES = 5;
- if (before & 1) {
- if (++tries >= MAX_TRIES) {
- return false;
- }
- before = shared->mSequence;
- } else {
- android_memory_barrier();
- T temp = shared->mValue;
- int32_t after = android_atomic_release_load(&shared->mSequence);
- if (after == before) {
- value = temp;
- shared->mAck = before;
- mSequence = before;
- return true;
- }
- if (++tries >= MAX_TRIES) {
- return false;
- }
- before = after;
- }
- }
-}
-
-#if 0
-template<typename T> SingleStateQueue<T>::SingleStateQueue(void /*Shared*/ *shared)
-{
- ((Shared *) shared)->init();
-}
-#endif
-
-} // namespace android
-
-// hack for gcc
-#ifdef SINGLE_STATE_QUEUE_INSTANTIATIONS
-#include SINGLE_STATE_QUEUE_INSTANTIATIONS
-#endif
diff --git a/media/libmedia/SingleStateQueueInstantiations.cpp b/media/libmedia/SingleStateQueueInstantiations.cpp
deleted file mode 100644
index 0265c8c..0000000
--- a/media/libmedia/SingleStateQueueInstantiations.cpp
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/SingleStateQueue.h>
-#include <private/media/StaticAudioTrackState.h>
-#include <media/AudioTimestamp.h>
-
-// FIXME hack for gcc
-
-namespace android {
-
-template class SingleStateQueue<StaticAudioTrackState>; // typedef StaticAudioTrackSingleStateQueue
-template class SingleStateQueue<AudioTimestamp>; // typedef AudioTimestampSingleStateQueue
-
-}
diff --git a/media/libmedia/StringArray.cpp b/media/libmedia/StringArray.cpp
index 5f5b57a..477e3fd 100644
--- a/media/libmedia/StringArray.cpp
+++ b/media/libmedia/StringArray.cpp
@@ -16,7 +16,7 @@
//
// Sortable array of strings. STL-ish, but STL-free.
-//
+//
#include <stdlib.h>
#include <string.h>
diff --git a/media/libmedia/docs/Makefile b/media/libmedia/docs/Makefile
new file mode 100644
index 0000000..bddbc9b
--- /dev/null
+++ b/media/libmedia/docs/Makefile
@@ -0,0 +1,2 @@
+paused.png : paused.dot
+ dot -Tpng < $< > $@
diff --git a/media/libmedia/docs/paused.dot b/media/libmedia/docs/paused.dot
new file mode 100644
index 0000000..11e1777
--- /dev/null
+++ b/media/libmedia/docs/paused.dot
@@ -0,0 +1,85 @@
+digraph paused {
+initial [label="INITIAL\n\
+mIgnoreNextPausedInt = false\n\
+mPaused = false\n\
+mPausedInt = false"];
+
+resume_body [label="mIgnoreNextPausedInt = true\nif (mPaused || mPausedInt)"];
+resume_paused [label="mPaused = false\nmPausedInt = false\nsignal()"];
+resume_paused -> resume_merged;
+resume_merged [label="return"];
+
+Application -> ATstop;
+ATstop [label="AudioTrack::stop()"];
+ATstop -> pause;
+Application -> ATpause;
+ATpause [label="AudioTrack::pause()"];
+ATpause -> pause;
+ATstart -> resume;
+ATstart [label="AudioTrack::start()"];
+destructor [label="~AudioTrack()"];
+destructor -> requestExit;
+requestExit [label="AudioTrackThread::requestExit()"];
+requestExit -> resume;
+Application -> ATsetMarkerPosition
+ATsetMarkerPosition [label="AudioTrack::setMarkerPosition()\n[sets marker variables]"];
+ATsetMarkerPosition -> ATTwake
+Application -> ATsetPositionUpdatePeriod
+ATsetPositionUpdatePeriod [label="AudioTrack::setPositionUpdatePeriod()\n[sets update period variables]"];
+ATsetPositionUpdatePeriod -> ATTwake
+Application -> ATstart;
+
+resume [label="AudioTrackThread::resume()"];
+resume -> resume_body;
+
+resume_body -> resume_paused [label="true"];
+resume_body -> resume_merged [label="false"];
+
+ATTwake [label="AudioTrackThread::wake()\nif (!mPaused && mPausedInt && mPausedNs > 0)"];
+ATTwake-> ATTWake_wakeable [label="true"];
+ATTWake_wakeable [label="mIgnoreNextPausedInt = true\nmPausedInt = false\nsignal()"];
+ATTwake-> ATTWake_cannotwake [label="false"]
+ATTWake_cannotwake [label="ignore"];
+
+pause [label="mPaused = true"];
+pause -> return;
+
+threadLoop [label="AudioTrackThread::threadLoop()\nENTRY"];
+threadLoop -> threadLoop_1;
+threadLoop_1 [label="if (mPaused)"];
+threadLoop_1 -> threadLoop_1_true [label="true"];
+threadLoop_1 -> threadLoop_2 [label="false"];
+threadLoop_1_true [label="wait()\nreturn true"];
+threadLoop_2 [label="if (mIgnoreNextPausedInt)"];
+threadLoop_2 -> threadLoop_2_true [label="true"];
+threadLoop_2 -> threadLoop_3 [label="false"];
+threadLoop_2_true [label="mIgnoreNextPausedInt = false\nmPausedInt = false"];
+threadLoop_2_true -> threadLoop_3;
+threadLoop_3 [label="if (mPausedInt)"];
+threadLoop_3 -> threadLoop_3_true [label="true"];
+threadLoop_3 -> threadLoop_4 [label="false"];
+threadLoop_3_true [label="wait()\nmPausedInt = false\nreturn true"];
+threadLoop_4 [label="if (exitPending)"];
+threadLoop_4 -> threadLoop_4_true [label="true"];
+threadLoop_4 -> threadLoop_5 [label="false"];
+threadLoop_4_true [label="return false"];
+threadLoop_5 [label="ns = processAudioBuffer()"];
+threadLoop_5 -> threadLoop_6;
+threadLoop_6 [label="case ns"];
+threadLoop_6 -> threadLoop_6_0 [label="0"];
+threadLoop_6 -> threadLoop_6_NS_INACTIVE [label="NS_INACTIVE"];
+threadLoop_6 -> threadLoop_6_NS_NEVER [label="NS_NEVER"];
+threadLoop_6 -> threadLoop_6_NS_WHENEVER [label="NS_WHENEVER"];
+threadLoop_6 -> threadLoop_6_default [label="default"];
+threadLoop_6_default [label="if (ns < 0)"];
+threadLoop_6_default -> threadLoop_6_default_true [label="true"];
+threadLoop_6_default -> threadLoop_6_default_false [label="false"];
+threadLoop_6_default_true [label="FATAL"];
+threadLoop_6_default_false [label="pauseInternal(ns) [wake()-able]\nmPausedInternal = true\nmPausedNs = ns\nreturn true"];
+threadLoop_6_0 [label="return true"];
+threadLoop_6_NS_INACTIVE [label="pauseInternal()\nmPausedInternal = true\nmPausedNs = 0\nreturn true"];
+threadLoop_6_NS_NEVER [label="return false"];
+threadLoop_6_NS_WHENEVER [label="ns = 1s"];
+threadLoop_6_NS_WHENEVER -> threadLoop_6_default_false;
+
+}
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 05c89ed..432ecda 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -240,7 +240,7 @@ status_t MediaPlayer::setVideoSurfaceTexture(
// must call with lock held
status_t MediaPlayer::prepareAsync_l()
{
- if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) {
+ if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) {
mPlayer->setAudioStreamType(mStreamType);
if (mAudioAttributesParcel != NULL) {
mPlayer->setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel);
@@ -414,7 +414,8 @@ status_t MediaPlayer::getCurrentPosition(int *msec)
status_t MediaPlayer::getDuration_l(int *msec)
{
ALOGV("getDuration_l");
- bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE));
+ bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED |
+ MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE));
if (mPlayer != 0 && isValidState) {
int durationMs;
status_t ret = mPlayer->getDuration(&durationMs);
@@ -443,7 +444,8 @@ status_t MediaPlayer::getDuration(int *msec)
status_t MediaPlayer::seekTo_l(int msec)
{
ALOGV("seekTo %d", msec);
- if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE) ) ) {
+ if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PREPARED |
+ MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE) ) ) {
if ( msec < 0 ) {
ALOGW("Attempt to seek to invalid position: %d", msec);
msec = 0;
@@ -477,7 +479,8 @@ status_t MediaPlayer::seekTo_l(int msec)
return NO_ERROR;
}
}
- ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(), mCurrentState);
+ ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(),
+ mCurrentState);
return INVALID_OPERATION;
}
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 1952b86..973e156 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -264,32 +264,6 @@ status_t MediaRecorder::setAudioEncoder(int ae)
return ret;
}
-status_t MediaRecorder::setOutputFile(const char* path)
-{
- ALOGV("setOutputFile(%s)", path);
- if (mMediaRecorder == NULL) {
- ALOGE("media recorder is not initialized yet");
- return INVALID_OPERATION;
- }
- if (mIsOutputFileSet) {
- ALOGE("output file has already been set");
- return INVALID_OPERATION;
- }
- if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) {
- ALOGE("setOutputFile called in an invalid state(%d)", mCurrentState);
- return INVALID_OPERATION;
- }
-
- status_t ret = mMediaRecorder->setOutputFile(path);
- if (OK != ret) {
- ALOGV("setOutputFile failed: %d", ret);
- mCurrentState = MEDIA_RECORDER_ERROR;
- return ret;
- }
- mIsOutputFileSet = true;
- return ret;
-}
-
status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)
{
ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 194abbb..4d4de9b 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -154,17 +154,6 @@ status_t MediaRecorderClient::setAudioEncoder(int ae)
return mRecorder->setAudioEncoder((audio_encoder)ae);
}
-status_t MediaRecorderClient::setOutputFile(const char* path)
-{
- ALOGV("setOutputFile(%s)", path);
- Mutex::Autolock lock(mLock);
- if (mRecorder == NULL) {
- ALOGE("recorder is not initialized");
- return NO_INIT;
- }
- return mRecorder->setOutputFile(path);
-}
-
status_t MediaRecorderClient::setOutputFile(int fd, int64_t offset, int64_t length)
{
ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length);
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index a65ec9f..a444b6c 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -38,7 +38,6 @@ public:
virtual status_t setOutputFormat(int of);
virtual status_t setVideoEncoder(int ve);
virtual status_t setAudioEncoder(int ae);
- virtual status_t setOutputFile(const char* path);
virtual status_t setOutputFile(int fd, int64_t offset,
int64_t length);
virtual status_t setVideoSize(int width, int height);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 86639cb..5c16920 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -241,14 +241,6 @@ status_t StagefrightRecorder::setPreviewSurface(const sp<IGraphicBufferProducer>
return OK;
}
-status_t StagefrightRecorder::setOutputFile(const char * /* path */) {
- ALOGE("setOutputFile(const char*) must not be called");
- // We don't actually support this at all, as the media_server process
- // no longer has permissions to create files.
-
- return -EPERM;
-}
-
status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t length) {
ALOGV("setOutputFile: %d, %lld, %lld", fd, offset, length);
// These don't make any sense, do they?
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 54c38d3..a6eba39 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -53,7 +53,6 @@ struct StagefrightRecorder : public MediaRecorderBase {
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy);
virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface);
- virtual status_t setOutputFile(const char *path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
virtual status_t setParameters(const String8& params);
virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index 6609874..e2c72ed 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -4,6 +4,7 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
GenericSource.cpp \
HTTPLiveSource.cpp \
+ MediaClock.cpp \
NuPlayer.cpp \
NuPlayerCCDecoder.cpp \
NuPlayerDecoder.cpp \
diff --git a/media/libmediaplayerservice/nuplayer/MediaClock.cpp b/media/libmediaplayerservice/nuplayer/MediaClock.cpp
new file mode 100644
index 0000000..7bfff13
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/MediaClock.cpp
@@ -0,0 +1,135 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaClock"
+#include <utils/Log.h>
+
+#include "MediaClock.h"
+
+#include <media/stagefright/foundation/ALooper.h>
+
+namespace android {
+
+// Maximum time change between two updates.
+static const int64_t kMaxAnchorFluctuationUs = 1000ll;
+
+MediaClock::MediaClock()
+ : mAnchorTimeMediaUs(-1),
+ mAnchorTimeRealUs(-1),
+ mMaxTimeMediaUs(INT64_MAX),
+ mStartingTimeMediaUs(-1),
+ mPaused(false) {
+}
+
+MediaClock::~MediaClock() {
+}
+
+void MediaClock::setStartingTimeMedia(int64_t startingTimeMediaUs) {
+ Mutex::Autolock autoLock(mLock);
+ mStartingTimeMediaUs = startingTimeMediaUs;
+}
+
+void MediaClock::clearAnchor() {
+ Mutex::Autolock autoLock(mLock);
+ mAnchorTimeMediaUs = -1;
+ mAnchorTimeRealUs = -1;
+}
+
+void MediaClock::updateAnchor(
+ int64_t anchorTimeMediaUs,
+ int64_t anchorTimeRealUs,
+ int64_t maxTimeMediaUs) {
+ if (anchorTimeMediaUs < 0 || anchorTimeRealUs < 0) {
+ ALOGW("reject anchor time since it is negative.");
+ return;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t nowMediaUs = anchorTimeMediaUs + nowUs - anchorTimeRealUs;
+ if (nowMediaUs < 0) {
+ ALOGW("reject anchor time since it leads to negative media time.");
+ return;
+ }
+
+ Mutex::Autolock autoLock(mLock);
+ mAnchorTimeRealUs = nowUs;
+ mAnchorTimeMediaUs = nowMediaUs;
+ mMaxTimeMediaUs = maxTimeMediaUs;
+}
+
+void MediaClock::updateMaxTimeMedia(int64_t maxTimeMediaUs) {
+ Mutex::Autolock autoLock(mLock);
+ mMaxTimeMediaUs = maxTimeMediaUs;
+}
+
+void MediaClock::pause() {
+ Mutex::Autolock autoLock(mLock);
+ if (mPaused) {
+ return;
+ }
+
+ mPaused = true;
+ if (mAnchorTimeRealUs == -1) {
+ return;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ mAnchorTimeMediaUs += nowUs - mAnchorTimeRealUs;
+ if (mAnchorTimeMediaUs < 0) {
+ ALOGW("anchor time should not be negative, set to 0.");
+ mAnchorTimeMediaUs = 0;
+ }
+ mAnchorTimeRealUs = nowUs;
+}
+
+void MediaClock::resume() {
+ Mutex::Autolock autoLock(mLock);
+ if (!mPaused) {
+ return;
+ }
+
+ mPaused = false;
+ if (mAnchorTimeRealUs == -1) {
+ return;
+ }
+
+ mAnchorTimeRealUs = ALooper::GetNowUs();
+}
+
+int64_t MediaClock::getTimeMedia(int64_t realUs, bool allowPastMaxTime) {
+ Mutex::Autolock autoLock(mLock);
+ if (mAnchorTimeRealUs == -1) {
+ return -1ll;
+ }
+
+ if (mPaused) {
+ realUs = mAnchorTimeRealUs;
+ }
+ int64_t currentMediaUs = mAnchorTimeMediaUs + realUs - mAnchorTimeRealUs;
+ if (currentMediaUs > mMaxTimeMediaUs && !allowPastMaxTime) {
+ currentMediaUs = mMaxTimeMediaUs;
+ }
+ if (currentMediaUs < mStartingTimeMediaUs) {
+ currentMediaUs = mStartingTimeMediaUs;
+ }
+ if (currentMediaUs < 0) {
+ currentMediaUs = 0;
+ }
+ return currentMediaUs;
+}
+
+} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/MediaClock.h b/media/libmediaplayerservice/nuplayer/MediaClock.h
new file mode 100644
index 0000000..d005993
--- /dev/null
+++ b/media/libmediaplayerservice/nuplayer/MediaClock.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_CLOCK_H_
+
+#define MEDIA_CLOCK_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Mutex.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct AMessage;
+
+struct MediaClock : public RefBase {
+ MediaClock();
+
+ void setStartingTimeMedia(int64_t startingTimeMediaUs);
+
+ void clearAnchor();
+ // It's highly recommended to use timestamp of just rendered frame as
+ // anchor time, especially in paused state. Such restriction will be
+ // required when dynamic playback rate is supported in the future.
+ void updateAnchor(
+ int64_t anchorTimeMediaUs,
+ int64_t anchorTimeRealUs,
+ int64_t maxTimeMediaUs = INT64_MAX);
+
+ void updateMaxTimeMedia(int64_t maxTimeMediaUs);
+
+ void pause();
+ void resume();
+
+ int64_t getTimeMedia(int64_t realUs, bool allowPastMaxTime = false);
+
+protected:
+ virtual ~MediaClock();
+
+private:
+ Mutex mLock;
+
+ int64_t mAnchorTimeMediaUs;
+ int64_t mAnchorTimeRealUs;
+ int64_t mMaxTimeMediaUs;
+ int64_t mStartingTimeMediaUs;
+
+ bool mPaused;
+
+ DISALLOW_EVIL_CONSTRUCTORS(MediaClock);
+};
+
+} // namespace android
+
+#endif // MEDIA_CLOCK_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index bc79fdb..abfa4d3 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -351,6 +351,14 @@ status_t NuPlayerDriver::seekTo(int msec) {
case STATE_PREPARED:
case STATE_STOPPED_AND_PREPARED:
{
+ int curpos = 0;
+ if (mPositionUs > 0) {
+ curpos = (mPositionUs + 500ll) / 1000;
+ }
+ if (curpos == msec) {
+ // nothing to do, and doing something anyway could result in deadlock (b/15323063)
+ break;
+ }
mStartupSeekTimeUs = seekTimeUs;
// pretend that the seek completed. It will actually happen when starting playback.
// TODO: actually perform the seek here, so the player is ready to go at the new
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 04e52b8..83fc9e9 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,6 +20,8 @@
#include "NuPlayerRenderer.h"
+#include "MediaClock.h"
+
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -62,22 +64,18 @@ NuPlayer::Renderer::Renderer(
mDrainVideoQueuePending(false),
mAudioQueueGeneration(0),
mVideoQueueGeneration(0),
+ mAudioDrainGeneration(0),
+ mVideoDrainGeneration(0),
mAudioFirstAnchorTimeMediaUs(-1),
mAnchorTimeMediaUs(-1),
- mAnchorTimeRealUs(-1),
mAnchorNumFramesWritten(-1),
- mAnchorMaxMediaUs(-1),
mVideoLateByUs(0ll),
mHasAudio(false),
mHasVideo(false),
- mPauseStartedTimeRealUs(-1),
- mFlushingAudio(false),
- mFlushingVideo(false),
mNotifyCompleteAudio(false),
mNotifyCompleteVideo(false),
mSyncQueues(false),
mPaused(false),
- mPausePositionMediaTimeUs(-1),
mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
@@ -88,6 +86,7 @@ NuPlayer::Renderer::Renderer(
mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
mTotalBuffersQueued(0),
mLastAudioBufferDrained(0) {
+ mMediaClock = new MediaClock;
}
NuPlayer::Renderer::~Renderer() {
@@ -103,6 +102,7 @@ void NuPlayer::Renderer::queueBuffer(
const sp<ABuffer> &buffer,
const sp<AMessage> &notifyConsumed) {
sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id());
+ msg->setInt32("queueGeneration", getQueueGeneration(audio));
msg->setInt32("audio", static_cast<int32_t>(audio));
msg->setBuffer("buffer", buffer);
msg->setMessage("notifyConsumed", notifyConsumed);
@@ -113,6 +113,7 @@ void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
CHECK_NE(finalResult, (status_t)OK);
sp<AMessage> msg = new AMessage(kWhatQueueEOS, id());
+ msg->setInt32("queueGeneration", getQueueGeneration(audio));
msg->setInt32("audio", static_cast<int32_t>(audio));
msg->setInt32("finalResult", finalResult);
msg->post();
@@ -120,20 +121,21 @@ void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
{
- Mutex::Autolock autoLock(mFlushLock);
+ Mutex::Autolock autoLock(mLock);
if (audio) {
mNotifyCompleteAudio |= notifyComplete;
- if (mFlushingAudio) {
- return;
- }
- mFlushingAudio = true;
+ ++mAudioQueueGeneration;
+ ++mAudioDrainGeneration;
} else {
mNotifyCompleteVideo |= notifyComplete;
- if (mFlushingVideo) {
- return;
- }
- mFlushingVideo = true;
+ ++mVideoQueueGeneration;
+ ++mVideoDrainGeneration;
}
+
+ clearAnchorTime_l();
+ clearAudioFirstAnchorTime_l();
+ mVideoLateByUs = 0;
+ mSyncQueues = false;
}
sp<AMessage> msg = new AMessage(kWhatFlush, id());
@@ -142,17 +144,6 @@ void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
}
void NuPlayer::Renderer::signalTimeDiscontinuity() {
- Mutex::Autolock autoLock(mLock);
- // CHECK(mAudioQueue.empty());
- // CHECK(mVideoQueue.empty());
- setAudioFirstAnchorTime(-1);
- setAnchorTime(-1, -1);
- setVideoLateByUs(0);
- mSyncQueues = false;
-}
-
-void NuPlayer::Renderer::signalAudioSinkChanged() {
- (new AMessage(kWhatAudioSinkChanged, id()))->post();
}
void NuPlayer::Renderer::signalDisableOffloadAudio() {
@@ -177,127 +168,44 @@ void NuPlayer::Renderer::setVideoFrameRate(float fps) {
msg->post();
}
-// Called on any threads, except renderer's thread.
-status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
- {
- Mutex::Autolock autoLock(mLock);
- int64_t currentPositionUs;
- if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
- *mediaUs = currentPositionUs;
- return OK;
- }
- }
- return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs());
-}
-
-// Called on only renderer's thread.
-status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) {
- return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs());
-}
-
-// Called on only renderer's thread.
-// Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's
-// thread, no need to acquire mLock.
-status_t NuPlayer::Renderer::getCurrentPositionOnLooper(
- int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
- int64_t currentPositionUs;
- if (getCurrentPositionIfPaused_l(&currentPositionUs)) {
- *mediaUs = currentPositionUs;
- return OK;
- }
- return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo);
-}
-
-// Called either with mLock acquired or on renderer's thread.
-bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) {
- if (!mPaused || mPausePositionMediaTimeUs < 0ll) {
- return false;
- }
- *mediaUs = mPausePositionMediaTimeUs;
- return true;
-}
-
// Called on any threads.
-status_t NuPlayer::Renderer::getCurrentPositionFromAnchor(
- int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) {
- Mutex::Autolock autoLock(mTimeLock);
- if (!mHasAudio && !mHasVideo) {
- return NO_INIT;
- }
-
- if (mAnchorTimeMediaUs < 0) {
+status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
+ int64_t currentTimeUs = mMediaClock->getTimeMedia(ALooper::GetNowUs());
+ if (currentTimeUs == -1) {
return NO_INIT;
}
-
- int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs;
-
- if (mPauseStartedTimeRealUs != -1) {
- positionUs -= (nowUs - mPauseStartedTimeRealUs);
- }
-
- // limit position to the last queued media time (for video only stream
- // position will be discrete as we don't know how long each frame lasts)
- if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) {
- if (positionUs > mAnchorMaxMediaUs) {
- positionUs = mAnchorMaxMediaUs;
- }
- }
-
- if (positionUs < mAudioFirstAnchorTimeMediaUs) {
- positionUs = mAudioFirstAnchorTimeMediaUs;
- }
-
- *mediaUs = (positionUs <= 0) ? 0 : positionUs;
+ *mediaUs = currentTimeUs;
return OK;
}
-void NuPlayer::Renderer::setHasMedia(bool audio) {
- Mutex::Autolock autoLock(mTimeLock);
- if (audio) {
- mHasAudio = true;
- } else {
- mHasVideo = true;
- }
-}
-
-void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) {
- Mutex::Autolock autoLock(mTimeLock);
- mAudioFirstAnchorTimeMediaUs = mediaUs;
+void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
+ mAudioFirstAnchorTimeMediaUs = -1;
+ mMediaClock->setStartingTimeMedia(-1);
}
-void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) {
- Mutex::Autolock autoLock(mTimeLock);
+void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
if (mAudioFirstAnchorTimeMediaUs == -1) {
mAudioFirstAnchorTimeMediaUs = mediaUs;
+ mMediaClock->setStartingTimeMedia(mediaUs);
}
}
-void NuPlayer::Renderer::setAnchorTime(
- int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) {
- Mutex::Autolock autoLock(mTimeLock);
- mAnchorTimeMediaUs = mediaUs;
- mAnchorTimeRealUs = realUs;
- mAnchorNumFramesWritten = numFramesWritten;
- if (resume) {
- mPauseStartedTimeRealUs = -1;
- }
+void NuPlayer::Renderer::clearAnchorTime_l() {
+ mMediaClock->clearAnchor();
+ mAnchorTimeMediaUs = -1;
+ mAnchorNumFramesWritten = -1;
}
void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
- Mutex::Autolock autoLock(mTimeLock);
+ Mutex::Autolock autoLock(mLock);
mVideoLateByUs = lateUs;
}
int64_t NuPlayer::Renderer::getVideoLateByUs() {
- Mutex::Autolock autoLock(mTimeLock);
+ Mutex::Autolock autoLock(mLock);
return mVideoLateByUs;
}
-void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) {
- Mutex::Autolock autoLock(mTimeLock);
- mPauseStartedTimeRealUs = realUs;
-}
-
status_t NuPlayer::Renderer::openAudioSink(
const sp<AMessage> &format,
bool offloadOnly,
@@ -381,8 +289,8 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatDrainAudioQueue:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
- if (generation != mAudioQueueGeneration) {
+ CHECK(msg->findInt32("drainGeneration", &generation));
+ if (generation != getDrainGeneration(true /* audio */)) {
break;
}
@@ -404,9 +312,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
// Let's give it more data after about half that time
// has elapsed.
- // kWhatDrainAudioQueue is used for non-offloading mode,
- // and mLock is used only for offloading mode. Therefore,
- // no need to acquire mLock here.
+ Mutex::Autolock autoLock(mLock);
postDrainAudioQueue_l(delayUs / 2);
}
break;
@@ -415,8 +321,8 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatDrainVideoQueue:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
- if (generation != mVideoQueueGeneration) {
+ CHECK(msg->findInt32("drainGeneration", &generation));
+ if (generation != getDrainGeneration(false /* audio */)) {
break;
}
@@ -424,22 +330,20 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
onDrainVideoQueue();
- Mutex::Autolock autoLock(mLock);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
break;
}
case kWhatPostDrainVideoQueue:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
- if (generation != mVideoQueueGeneration) {
+ CHECK(msg->findInt32("drainGeneration", &generation));
+ if (generation != getDrainGeneration(false /* audio */)) {
break;
}
mDrainVideoQueuePending = false;
- Mutex::Autolock autoLock(mLock);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
break;
}
@@ -461,12 +365,6 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatAudioSinkChanged:
- {
- onAudioSinkChanged();
- break;
- }
-
case kWhatDisableOffloadAudio:
{
onDisableOffloadAudio();
@@ -508,7 +406,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
case kWhatAudioOffloadPauseTimeout:
{
int32_t generation;
- CHECK(msg->findInt32("generation", &generation));
+ CHECK(msg->findInt32("drainGeneration", &generation));
if (generation != mAudioOffloadPauseTimeoutGeneration) {
break;
}
@@ -535,18 +433,18 @@ void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
mDrainAudioQueuePending = true;
sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id());
- msg->setInt32("generation", mAudioQueueGeneration);
+ msg->setInt32("drainGeneration", mAudioDrainGeneration);
msg->post(delayUs);
}
-void NuPlayer::Renderer::prepareForMediaRenderingStart() {
- mAudioRenderingStartGeneration = mAudioQueueGeneration;
- mVideoRenderingStartGeneration = mVideoQueueGeneration;
+void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
+ mAudioRenderingStartGeneration = mAudioDrainGeneration;
+ mVideoRenderingStartGeneration = mVideoDrainGeneration;
}
-void NuPlayer::Renderer::notifyIfMediaRenderingStarted() {
- if (mVideoRenderingStartGeneration == mVideoQueueGeneration &&
- mAudioRenderingStartGeneration == mAudioQueueGeneration) {
+void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
+ if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
+ mAudioRenderingStartGeneration == mAudioDrainGeneration) {
mVideoRenderingStartGeneration = -1;
mAudioRenderingStartGeneration = -1;
@@ -614,7 +512,7 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
int64_t mediaTimeUs;
CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
- setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+ setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
}
size_t copy = entry->mBuffer->size() - entry->mOffset;
@@ -634,17 +532,18 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
entry = NULL;
}
sizeCopied += copy;
- notifyIfMediaRenderingStarted();
+
+ notifyIfMediaRenderingStarted_l();
}
if (mAudioFirstAnchorTimeMediaUs >= 0) {
int64_t nowUs = ALooper::GetNowUs();
- setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs));
+ // we don't know how much data we are queueing for offloaded tracks.
+ mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs,
+ nowUs - getPlayedOutAudioDurationUs(nowUs),
+ INT64_MAX);
}
- // we don't know how much data we are queueing for offloaded tracks
- mAnchorMaxMediaUs = -1;
-
if (hasEOS) {
(new AMessage(kWhatStopAudioSink, id()))->post();
}
@@ -729,7 +628,10 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
size_t copiedFrames = written / mAudioSink->frameSize();
mNumFramesWritten += copiedFrames;
- notifyIfMediaRenderingStarted();
+ {
+ Mutex::Autolock autoLock(mLock);
+ notifyIfMediaRenderingStarted_l();
+ }
if (written != (ssize_t)copy) {
// A short count was received from AudioSink::write()
@@ -752,10 +654,15 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
break;
}
}
- mAnchorMaxMediaUs =
- mAnchorTimeMediaUs +
- (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
- * 1000LL * mAudioSink->msecsPerFrame());
+ int64_t maxTimeMedia;
+ {
+ Mutex::Autolock autoLock(mLock);
+ maxTimeMedia =
+ mAnchorTimeMediaUs +
+ (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+ * 1000LL * mAudioSink->msecsPerFrame());
+ }
+ mMediaClock->updateMaxTimeMedia(maxTimeMedia);
return !mAudioQueue.empty();
}
@@ -767,31 +674,35 @@ int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
}
int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
- int64_t currentPositionUs;
- if (mPaused || getCurrentPositionOnLooper(
- &currentPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) {
- // If failed to get current position, e.g. due to audio clock is not ready, then just
- // play out video immediately without delay.
+ int64_t currentPositionUs =
+ mMediaClock->getTimeMedia(nowUs, true /* allowPastMaxTime */);
+ if (currentPositionUs == -1) {
+ // If failed to get current position, e.g. due to audio clock is
+ // not ready, then just play out video immediately without delay.
return nowUs;
}
return (mediaTimeUs - currentPositionUs) + nowUs;
}
void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
+ Mutex::Autolock autoLock(mLock);
// TRICKY: vorbis decoder generates multiple frames with the same
// timestamp, so only update on the first frame with a given timestamp
if (mediaTimeUs == mAnchorTimeMediaUs) {
return;
}
- setAudioFirstAnchorTimeIfNeeded(mediaTimeUs);
+ setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
int64_t nowUs = ALooper::GetNowUs();
- setAnchorTime(
- mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten);
+ mMediaClock->updateAnchor(mediaTimeUs,
+ nowUs + getPendingAudioPlayoutDurationUs(nowUs),
+ mediaTimeUs);
+ mAnchorTimeMediaUs = mediaTimeUs;
}
-void NuPlayer::Renderer::postDrainVideoQueue_l() {
+// Called without mLock acquired.
+void NuPlayer::Renderer::postDrainVideoQueue() {
if (mDrainVideoQueuePending
- || mSyncQueues
+ || getSyncQueues()
|| (mPaused && mVideoSampleReceived)) {
return;
}
@@ -803,7 +714,7 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() {
QueueEntry &entry = *mVideoQueue.begin();
sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id());
- msg->setInt32("generation", mVideoQueueGeneration);
+ msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
if (entry.mBuffer == NULL) {
// EOS doesn't carry a timestamp.
@@ -823,16 +734,19 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() {
int64_t mediaTimeUs;
CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
- if (mAnchorTimeMediaUs < 0) {
- setAnchorTime(mediaTimeUs, nowUs);
- mPausePositionMediaTimeUs = mediaTimeUs;
- mAnchorMaxMediaUs = mediaTimeUs;
- realTimeUs = nowUs;
- } else {
- realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mAnchorTimeMediaUs < 0) {
+ mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
+ mAnchorTimeMediaUs = mediaTimeUs;
+ realTimeUs = nowUs;
+ } else {
+ realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
+ }
}
if (!mHasAudio) {
- mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps
+ // smooth out videos >= 10fps
+ mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000);
}
// Heuristics to handle situation when media time changed without a
@@ -913,14 +827,15 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
} else {
ALOGV("rendering video at media time %.2f secs",
(mFlags & FLAG_REAL_TIME ? realTimeUs :
- (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
+ mMediaClock->getTimeMedia(realTimeUs)) / 1E6);
}
} else {
setVideoLateByUs(0);
if (!mVideoSampleReceived && !mHasAudio) {
// This will ensure that the first frame after a flush won't be used as anchor
// when renderer is in paused state, because resume can happen any time after seek.
- setAnchorTime(-1, -1);
+ Mutex::Autolock autoLock(mLock);
+ clearAnchorTime_l();
}
}
@@ -937,7 +852,8 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
mVideoRenderingStarted = true;
notifyVideoRenderingStart();
}
- notifyIfMediaRenderingStarted();
+ Mutex::Autolock autoLock(mLock);
+ notifyIfMediaRenderingStarted_l();
}
}
@@ -963,7 +879,15 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
- setHasMedia(audio);
+ if (dropBufferIfStale(audio, msg)) {
+ return;
+ }
+
+ if (audio) {
+ mHasAudio = true;
+ } else {
+ mHasVideo = true;
+ }
if (mHasVideo) {
if (mVideoScheduler == NULL) {
@@ -972,10 +896,6 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
}
}
- if (dropBufferWhileFlushing(audio, msg)) {
- return;
- }
-
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
@@ -989,15 +909,16 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
entry.mFinalResult = OK;
entry.mBufferOrdinal = ++mTotalBuffersQueued;
- Mutex::Autolock autoLock(mLock);
if (audio) {
+ Mutex::Autolock autoLock(mLock);
mAudioQueue.push_back(entry);
postDrainAudioQueue_l();
} else {
mVideoQueue.push_back(entry);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
}
+ Mutex::Autolock autoLock(mLock);
if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
return;
}
@@ -1046,7 +967,9 @@ void NuPlayer::Renderer::syncQueuesDone_l() {
}
if (!mVideoQueue.empty()) {
- postDrainVideoQueue_l();
+ mLock.unlock();
+ postDrainVideoQueue();
+ mLock.lock();
}
}
@@ -1054,7 +977,7 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
- if (dropBufferWhileFlushing(audio, msg)) {
+ if (dropBufferIfStale(audio, msg)) {
return;
}
@@ -1065,19 +988,20 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
entry.mOffset = 0;
entry.mFinalResult = finalResult;
- Mutex::Autolock autoLock(mLock);
if (audio) {
+ Mutex::Autolock autoLock(mLock);
if (mAudioQueue.empty() && mSyncQueues) {
syncQueuesDone_l();
}
mAudioQueue.push_back(entry);
postDrainAudioQueue_l();
} else {
- if (mVideoQueue.empty() && mSyncQueues) {
+ if (mVideoQueue.empty() && getSyncQueues()) {
+ Mutex::Autolock autoLock(mLock);
syncQueuesDone_l();
}
mVideoQueue.push_back(entry);
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
}
}
@@ -1086,31 +1010,25 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
CHECK(msg->findInt32("audio", &audio));
{
- Mutex::Autolock autoLock(mFlushLock);
+ Mutex::Autolock autoLock(mLock);
if (audio) {
- mFlushingAudio = false;
notifyComplete = mNotifyCompleteAudio;
mNotifyCompleteAudio = false;
} else {
- mFlushingVideo = false;
notifyComplete = mNotifyCompleteVideo;
mNotifyCompleteVideo = false;
}
- }
- // If we're currently syncing the queues, i.e. dropping audio while
- // aligning the first audio/video buffer times and only one of the
- // two queues has data, we may starve that queue by not requesting
- // more buffers from the decoder. If the other source then encounters
- // a discontinuity that leads to flushing, we'll never find the
- // corresponding discontinuity on the other queue.
- // Therefore we'll stop syncing the queues if at least one of them
- // is flushed.
- {
- Mutex::Autolock autoLock(mLock);
- syncQueuesDone_l();
- setPauseStartedTimeRealUs(-1);
- setAnchorTime(-1, -1);
+ // If we're currently syncing the queues, i.e. dropping audio while
+ // aligning the first audio/video buffer times and only one of the
+ // two queues has data, we may starve that queue by not requesting
+ // more buffers from the decoder. If the other source then encounters
+ // a discontinuity that leads to flushing, we'll never find the
+ // corresponding discontinuity on the other queue.
+ // Therefore we'll stop syncing the queues if at least one of them
+ // is flushed.
+ syncQueuesDone_l();
+ clearAnchorTime_l();
}
ALOGV("flushing %s", audio ? "audio" : "video");
@@ -1119,11 +1037,11 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
Mutex::Autolock autoLock(mLock);
flushQueue(&mAudioQueue);
- ++mAudioQueueGeneration;
- prepareForMediaRenderingStart();
+ ++mAudioDrainGeneration;
+ prepareForMediaRenderingStart_l();
if (offloadingAudio()) {
- setAudioFirstAnchorTime(-1);
+ clearAudioFirstAnchorTime_l();
}
}
@@ -1138,13 +1056,14 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
flushQueue(&mVideoQueue);
mDrainVideoQueuePending = false;
- ++mVideoQueueGeneration;
if (mVideoScheduler != NULL) {
mVideoScheduler->restart();
}
- prepareForMediaRenderingStart();
+ Mutex::Autolock autoLock(mLock);
+ ++mVideoDrainGeneration;
+ prepareForMediaRenderingStart_l();
}
mVideoSampleReceived = false;
@@ -1174,20 +1093,12 @@ void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
notify->post();
}
-bool NuPlayer::Renderer::dropBufferWhileFlushing(
+bool NuPlayer::Renderer::dropBufferIfStale(
bool audio, const sp<AMessage> &msg) {
- bool flushing = false;
-
- {
- Mutex::Autolock autoLock(mFlushLock);
- if (audio) {
- flushing = mFlushingAudio;
- } else {
- flushing = mFlushingVideo;
- }
- }
+ int32_t queueGeneration;
+ CHECK(msg->findInt32("queueGeneration", &queueGeneration));
- if (!flushing) {
+ if (queueGeneration == getQueueGeneration(audio)) {
return false;
}
@@ -1205,7 +1116,10 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
}
CHECK(!mDrainAudioQueuePending);
mNumFramesWritten = 0;
- mAnchorNumFramesWritten = -1;
+ {
+ Mutex::Autolock autoLock(mLock);
+ mAnchorNumFramesWritten = -1;
+ }
uint32_t written;
if (mAudioSink->getFramesWritten(&written) == OK) {
mNumFramesWritten = written;
@@ -1215,13 +1129,13 @@ void NuPlayer::Renderer::onAudioSinkChanged() {
void NuPlayer::Renderer::onDisableOffloadAudio() {
Mutex::Autolock autoLock(mLock);
mFlags &= ~FLAG_OFFLOAD_AUDIO;
- ++mAudioQueueGeneration;
+ ++mAudioDrainGeneration;
}
void NuPlayer::Renderer::onEnableOffloadAudio() {
Mutex::Autolock autoLock(mLock);
mFlags |= FLAG_OFFLOAD_AUDIO;
- ++mAudioQueueGeneration;
+ ++mAudioDrainGeneration;
}
void NuPlayer::Renderer::onPause() {
@@ -1230,25 +1144,13 @@ void NuPlayer::Renderer::onPause() {
return;
}
int64_t currentPositionUs;
- int64_t pausePositionMediaTimeUs;
- if (getCurrentPositionFromAnchor(
- &currentPositionUs, ALooper::GetNowUs()) == OK) {
- pausePositionMediaTimeUs = currentPositionUs;
- } else {
- // Set paused position to -1 (unavailabe) if we don't have anchor time
- // This could happen if client does a seekTo() immediately followed by
- // pause(). Renderer will be flushed with anchor time cleared. We don't
- // want to leave stale value in mPausePositionMediaTimeUs.
- pausePositionMediaTimeUs = -1;
- }
{
Mutex::Autolock autoLock(mLock);
- mPausePositionMediaTimeUs = pausePositionMediaTimeUs;
- ++mAudioQueueGeneration;
- ++mVideoQueueGeneration;
- prepareForMediaRenderingStart();
+ ++mAudioDrainGeneration;
+ ++mVideoDrainGeneration;
+ prepareForMediaRenderingStart_l();
mPaused = true;
- setPauseStartedTimeRealUs(ALooper::GetNowUs());
+ mMediaClock->pause();
}
mDrainAudioQueuePending = false;
@@ -1273,21 +1175,18 @@ void NuPlayer::Renderer::onResume() {
mAudioSink->start();
}
- Mutex::Autolock autoLock(mLock);
- mPaused = false;
- if (mPauseStartedTimeRealUs != -1) {
- int64_t newAnchorRealUs =
- mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs;
- setAnchorTime(
- mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */);
- }
+ {
+ Mutex::Autolock autoLock(mLock);
+ mPaused = false;
+ mMediaClock->resume();
- if (!mAudioQueue.empty()) {
- postDrainAudioQueue_l();
+ if (!mAudioQueue.empty()) {
+ postDrainAudioQueue_l();
+ }
}
if (!mVideoQueue.empty()) {
- postDrainVideoQueue_l();
+ postDrainVideoQueue();
}
}
@@ -1298,6 +1197,21 @@ void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
mVideoScheduler->init(fps);
}
+int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) {
+ Mutex::Autolock autoLock(mLock);
+ return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
+}
+
+int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) {
+ Mutex::Autolock autoLock(mLock);
+ return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
+}
+
+bool NuPlayer::Renderer::getSyncQueues() {
+ Mutex::Autolock autoLock(mLock);
+ return mSyncQueues;
+}
+
// TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs()
// as it acquires locks and may query the audio driver.
//
@@ -1369,7 +1283,7 @@ void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reaso
mAudioOffloadTornDown = true;
int64_t currentPositionUs;
- if (getCurrentPositionOnLooper(&currentPositionUs) != OK) {
+ if (getCurrentPosition(&currentPositionUs) != OK) {
currentPositionUs = 0;
}
@@ -1386,7 +1300,7 @@ void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reaso
void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
if (offloadingAudio()) {
sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id());
- msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration);
+ msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
msg->post(kOffloadPauseMaxUs);
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 98fc44d..b35c86a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -23,6 +23,7 @@
namespace android {
struct ABuffer;
+struct MediaClock;
struct VideoFrameScheduler;
struct NuPlayer::Renderer : public AHandler {
@@ -60,16 +61,8 @@ struct NuPlayer::Renderer : public AHandler {
void setVideoFrameRate(float fps);
- // Following setters and getters are protected by mTimeLock.
status_t getCurrentPosition(int64_t *mediaUs);
- void setHasMedia(bool audio);
- void setAudioFirstAnchorTime(int64_t mediaUs);
- void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs);
- void setAnchorTime(
- int64_t mediaUs, int64_t realUs, int64_t numFramesWritten = -1, bool resume = false);
- void setVideoLateByUs(int64_t lateUs);
int64_t getVideoLateByUs();
- void setPauseStartedTimeRealUs(int64_t realUs);
status_t openAudioSink(
const sp<AMessage> &format,
@@ -107,7 +100,6 @@ private:
kWhatQueueBuffer = 'queB',
kWhatQueueEOS = 'qEOS',
kWhatFlush = 'flus',
- kWhatAudioSinkChanged = 'auSC',
kWhatPause = 'paus',
kWhatResume = 'resm',
kWhatOpenAudioSink = 'opnA',
@@ -141,26 +133,17 @@ private:
bool mDrainVideoQueuePending;
int32_t mAudioQueueGeneration;
int32_t mVideoQueueGeneration;
+ int32_t mAudioDrainGeneration;
+ int32_t mVideoDrainGeneration;
- Mutex mTimeLock;
- // |mTimeLock| protects the following 7 member vars that are related to time.
- // Note: those members are only written on Renderer thread, so reading on Renderer thread
- // doesn't need to be protected. Otherwise accessing those members must be protected by
- // |mTimeLock|.
- // TODO: move those members to a seperated media clock class.
+ sp<MediaClock> mMediaClock;
int64_t mAudioFirstAnchorTimeMediaUs;
int64_t mAnchorTimeMediaUs;
- int64_t mAnchorTimeRealUs;
int64_t mAnchorNumFramesWritten;
- int64_t mAnchorMaxMediaUs;
int64_t mVideoLateByUs;
bool mHasAudio;
bool mHasVideo;
- int64_t mPauseStartedTimeRealUs;
- Mutex mFlushLock; // protects the following 2 member vars.
- bool mFlushingAudio;
- bool mFlushingVideo;
bool mNotifyCompleteAudio;
bool mNotifyCompleteVideo;
@@ -168,7 +151,6 @@ private:
// modified on only renderer's thread.
bool mPaused;
- int64_t mPausePositionMediaTimeUs;
bool mVideoSampleReceived;
bool mVideoRenderingStarted;
@@ -194,13 +176,6 @@ private:
int32_t mTotalBuffersQueued;
int32_t mLastAudioBufferDrained;
- status_t getCurrentPositionOnLooper(int64_t *mediaUs);
- status_t getCurrentPositionOnLooper(
- int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
- bool getCurrentPositionIfPaused_l(int64_t *mediaUs);
- status_t getCurrentPositionFromAnchor(
- int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo = false);
-
size_t fillAudioBuffer(void *buffer, size_t size);
bool onDrainAudioQueue();
@@ -208,14 +183,19 @@ private:
int64_t getPlayedOutAudioDurationUs(int64_t nowUs);
void postDrainAudioQueue_l(int64_t delayUs = 0);
+ void clearAnchorTime_l();
+ void clearAudioFirstAnchorTime_l();
+ void setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs);
+ void setVideoLateByUs(int64_t lateUs);
+
void onNewAudioMediaTime(int64_t mediaTimeUs);
int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs);
void onDrainVideoQueue();
- void postDrainVideoQueue_l();
+ void postDrainVideoQueue();
- void prepareForMediaRenderingStart();
- void notifyIfMediaRenderingStarted();
+ void prepareForMediaRenderingStart_l();
+ void notifyIfMediaRenderingStarted_l();
void onQueueBuffer(const sp<AMessage> &msg);
void onQueueEOS(const sp<AMessage> &msg);
@@ -226,6 +206,9 @@ private:
void onPause();
void onResume();
void onSetVideoFrameRate(float fps);
+ int32_t getQueueGeneration(bool audio);
+ int32_t getDrainGeneration(bool audio);
+ bool getSyncQueues();
void onAudioOffloadTearDown(AudioOffloadTearDownReason reason);
status_t onOpenAudioSink(
const sp<AMessage> &format,
@@ -242,7 +225,7 @@ private:
void notifyAudioOffloadTearDown();
void flushQueue(List<QueueEntry> *queue);
- bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg);
+ bool dropBufferIfStale(bool audio, const sp<AMessage> &msg);
void syncQueuesDone_l();
bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; }
diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk
index 9707c4a..1353f28 100644
--- a/media/libnbaio/Android.mk
+++ b/media/libnbaio/Android.mk
@@ -11,7 +11,6 @@ LOCAL_SRC_FILES := \
MonoPipeReader.cpp \
Pipe.cpp \
PipeReader.cpp \
- roundup.c \
SourceAudioBufferProvider.cpp
LOCAL_SRC_FILES += NBLog.cpp
@@ -27,12 +26,13 @@ LOCAL_SRC_FILES += NBLog.cpp
LOCAL_MODULE := libnbaio
LOCAL_SHARED_LIBRARIES := \
+ libaudioutils \
libbinder \
libcommon_time_client \
libcutils \
libutils \
liblog
-LOCAL_STATIC_LIBRARIES += libinstantssq
+LOCAL_C_INCLUDES := $(call include-path-for, audio-utils)
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp
index 0b65861..129e9ef 100644
--- a/media/libnbaio/MonoPipe.cpp
+++ b/media/libnbaio/MonoPipe.cpp
@@ -27,7 +27,7 @@
#include <utils/Trace.h>
#include <media/AudioBufferProvider.h>
#include <media/nbaio/MonoPipe.h>
-#include <media/nbaio/roundup.h>
+#include <audio_utils/roundup.h>
namespace android {
diff --git a/media/libnbaio/MonoPipeReader.cpp b/media/libnbaio/MonoPipeReader.cpp
index de82229..e4d3ed8 100644
--- a/media/libnbaio/MonoPipeReader.cpp
+++ b/media/libnbaio/MonoPipeReader.cpp
@@ -39,7 +39,7 @@ ssize_t MonoPipeReader::availableToRead()
return NEGOTIATE;
}
ssize_t ret = android_atomic_acquire_load(&mPipe->mRear) - mPipe->mFront;
- ALOG_ASSERT((0 <= ret) && (ret <= mMaxFrames));
+ ALOG_ASSERT((0 <= ret) && ((size_t) ret <= mPipe->mMaxFrames));
return ret;
}
diff --git a/media/libnbaio/Pipe.cpp b/media/libnbaio/Pipe.cpp
index 6e0ec8c..13f211d 100644
--- a/media/libnbaio/Pipe.cpp
+++ b/media/libnbaio/Pipe.cpp
@@ -21,7 +21,7 @@
#include <cutils/compiler.h>
#include <utils/Log.h>
#include <media/nbaio/Pipe.h>
-#include <media/nbaio/roundup.h>
+#include <audio_utils/roundup.h>
namespace android {
diff --git a/media/libnbaio/roundup.c b/media/libnbaio/roundup.c
deleted file mode 100644
index 1d552d1..0000000
--- a/media/libnbaio/roundup.c
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <media/nbaio/roundup.h>
-
-unsigned roundup(unsigned v)
-{
- // __builtin_clz is undefined for zero input
- if (v == 0) {
- v = 1;
- }
- int lz = __builtin_clz((int) v);
- unsigned rounded = ((unsigned) 0x80000000) >> lz;
- // 0x800000001 and higher are actually rounded _down_ to prevent overflow
- if (v > rounded && lz > 0) {
- rounded <<= 1;
- }
- return rounded;
-}
diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp
index 2e41d80..9d90dbd 100644
--- a/media/libstagefright/AACWriter.cpp
+++ b/media/libstagefright/AACWriter.cpp
@@ -36,33 +36,19 @@
namespace android {
-AACWriter::AACWriter(const char *filename)
- : mFd(-1),
- mInitCheck(NO_INIT),
- mStarted(false),
- mPaused(false),
- mResumed(false),
- mChannelCount(-1),
- mSampleRate(-1),
- mAACProfile(OMX_AUDIO_AACObjectLC) {
-
- ALOGV("AACWriter Constructor");
-
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- mInitCheck = OK;
- }
-}
-
AACWriter::AACWriter(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0? NO_INIT: OK),
mStarted(false),
mPaused(false),
mResumed(false),
+ mThread(0),
+ mEstimatedSizeBytes(0),
+ mEstimatedDurationUs(0),
mChannelCount(-1),
mSampleRate(-1),
- mAACProfile(OMX_AUDIO_AACObjectLC) {
+ mAACProfile(OMX_AUDIO_AACObjectLC),
+ mFrameDurationUs(0) {
}
AACWriter::~AACWriter() {
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index 9aa7d95..f53d7f0 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -31,19 +31,6 @@
namespace android {
-AMRWriter::AMRWriter(const char *filename)
- : mFd(-1),
- mInitCheck(NO_INIT),
- mStarted(false),
- mPaused(false),
- mResumed(false) {
-
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- mInitCheck = OK;
- }
-}
-
AMRWriter::AMRWriter(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0? NO_INIT: OK),
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 2629afc..6d9bbae 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -101,6 +101,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_color_conversion \
libstagefright_aacenc \
libstagefright_matroska \
+ libstagefright_mediafilter \
libstagefright_webm \
libstagefright_timedtext \
libvpx \
@@ -108,13 +109,14 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_mpeg2ts \
libstagefright_id3 \
libFLAC \
- libmedia_helper
+ libmedia_helper \
LOCAL_SHARED_LIBRARIES += \
libstagefright_enc_common \
libstagefright_avc_common \
libstagefright_foundation \
- libdl
+ libdl \
+ libRScpp \
LOCAL_CFLAGS += -Wno-multichar
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index a7ca3da..f0db76b 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -14,6 +14,10 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "FileSource"
+#include <utils/Log.h>
+
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/FileSource.h>
#include <sys/types.h>
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 9856f92..4359fb9 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -480,19 +480,6 @@ MPEG2TSWriter::MPEG2TSWriter(int fd)
init();
}
-MPEG2TSWriter::MPEG2TSWriter(const char *filename)
- : mFile(fopen(filename, "wb")),
- mWriteCookie(NULL),
- mWriteFunc(NULL),
- mStarted(false),
- mNumSourcesDone(0),
- mNumTSPacketsWritten(0),
- mNumTSPacketsBeforeMeta(0),
- mPATContinuityCounter(0),
- mPMTContinuityCounter(0) {
- init();
-}
-
MPEG2TSWriter::MPEG2TSWriter(
void *cookie,
ssize_t (*write)(void *cookie, const void *data, size_t size))
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 9f20b1d..beb6f20 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -345,31 +345,6 @@ private:
Track &operator=(const Track &);
};
-MPEG4Writer::MPEG4Writer(const char *filename)
- : mFd(-1),
- mInitCheck(NO_INIT),
- mIsRealTimeRecording(true),
- mUse4ByteNalLength(true),
- mUse32BitOffset(true),
- mIsFileSizeLimitExplicitlyRequested(false),
- mPaused(false),
- mStarted(false),
- mWriterThreadStarted(false),
- mOffset(0),
- mMdatOffset(0),
- mEstimatedMoovBoxSize(0),
- mInterleaveDurationUs(1000000),
- mLatitudex10000(0),
- mLongitudex10000(0),
- mAreGeoTagsAvailable(false),
- mStartTimeOffsetMs(-1) {
-
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- mInitCheck = OK;
- }
-}
-
MPEG4Writer::MPEG4Writer(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0? NO_INIT: OK),
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 6ca123a..50e6bd0 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -36,6 +36,7 @@
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaFilter.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/NativeWindowWrapper.h>
#include <private/android_filesystem_config.h>
@@ -189,7 +190,16 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) {
// quickly, violating the OpenMAX specs, until that is remedied
// we need to invest in an extra looper to free the main event
// queue.
- mCodec = new ACodec;
+
+ if (nameIsType || !strncasecmp(name.c_str(), "omx.", 4)) {
+ mCodec = new ACodec;
+ } else if (!nameIsType
+ && !strncasecmp(name.c_str(), "android.filter.", 15)) {
+ mCodec = new MediaFilter;
+ } else {
+ return NAME_NOT_FOUND;
+ }
+
bool needDedicatedLooper = false;
if (nameIsType && !strncasecmp(name.c_str(), "video/", 6)) {
needDedicatedLooper = true;
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index c7c6f34..b13877d 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -38,21 +38,6 @@
namespace android {
-MediaMuxer::MediaMuxer(const char *path, OutputFormat format)
- : mFormat(format),
- mState(UNINITIALIZED) {
- if (format == OUTPUT_FORMAT_MPEG_4) {
- mWriter = new MPEG4Writer(path);
- } else if (format == OUTPUT_FORMAT_WEBM) {
- mWriter = new WebmWriter(path);
- }
-
- if (mWriter != NULL) {
- mFileMeta = new MetaData;
- mState = INITIALIZED;
- }
-}
-
MediaMuxer::MediaMuxer(int fd, OutputFormat format)
: mFormat(format),
mState(UNINITIALIZED) {
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index b3a79a0..c0be136 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -344,6 +344,28 @@ status_t convertMetaDataToMessage(
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-0", buffer);
+
+ if (!meta->findData(kKeyOpusCodecDelay, &type, &data, &size)) {
+ return -EINVAL;
+ }
+
+ buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-1", buffer);
+
+ if (!meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size)) {
+ return -EINVAL;
+ }
+
+ buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-2", buffer);
}
*format = msg;
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index 8a95643..6e6a78a 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -38,7 +38,10 @@ SoftVPX::SoftVPX(
NULL /* profileLevels */, 0 /* numProfileLevels */,
320 /* width */, 240 /* height */, callbacks, appData, component),
mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9),
+ mEOSStatus(INPUT_DATA_AVAILABLE),
mCtx(NULL),
+ mFrameParallelMode(false),
+ mTimeStampIdx(0),
mImg(NULL) {
// arbitrary from avc/hevc as vpx does not specify a min compression ratio
const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4;
@@ -51,9 +54,7 @@ SoftVPX::SoftVPX(
}
SoftVPX::~SoftVPX() {
- vpx_codec_destroy((vpx_codec_ctx_t *)mCtx);
- delete (vpx_codec_ctx_t *)mCtx;
- mCtx = NULL;
+ destroyDecoder();
}
static int GetCPUCoreCount() {
@@ -73,12 +74,19 @@ status_t SoftVPX::initDecoder() {
mCtx = new vpx_codec_ctx_t;
vpx_codec_err_t vpx_err;
vpx_codec_dec_cfg_t cfg;
+ vpx_codec_flags_t flags;
memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t));
+ memset(&flags, 0, sizeof(vpx_codec_flags_t));
cfg.threads = GetCPUCoreCount();
+
+ if (mFrameParallelMode) {
+ flags |= VPX_CODEC_USE_FRAME_THREADING;
+ }
+
if ((vpx_err = vpx_codec_dec_init(
(vpx_codec_ctx_t *)mCtx,
mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo,
- &cfg, 0))) {
+ &cfg, flags))) {
ALOGE("on2 decoder failed to initialize. (%d)", vpx_err);
return UNKNOWN_ERROR;
}
@@ -86,86 +94,155 @@ status_t SoftVPX::initDecoder() {
return OK;
}
+status_t SoftVPX::destroyDecoder() {
+ vpx_codec_destroy((vpx_codec_ctx_t *)mCtx);
+ delete (vpx_codec_ctx_t *)mCtx;
+ mCtx = NULL;
+ return OK;
+}
+
+bool SoftVPX::outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset) {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+ BufferInfo *outInfo = NULL;
+ OMX_BUFFERHEADERTYPE *outHeader = NULL;
+ vpx_codec_iter_t iter = NULL;
+
+ if (flushDecoder && mFrameParallelMode) {
+ // Flush decoder by passing NULL data ptr and 0 size.
+ // Ideally, this should never fail.
+ if (vpx_codec_decode((vpx_codec_ctx_t *)mCtx, NULL, 0, NULL, 0)) {
+ ALOGE("Failed to flush on2 decoder.");
+ return false;
+ }
+ }
+
+ if (!display) {
+ if (!flushDecoder) {
+ ALOGE("Invalid operation.");
+ return false;
+ }
+ // Drop all the decoded frames in decoder.
+ while ((mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter))) {
+ }
+ return true;
+ }
+
+ while (!outQueue.empty()) {
+ if (mImg == NULL) {
+ mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
+ if (mImg == NULL) {
+ break;
+ }
+ }
+ uint32_t width = mImg->d_w;
+ uint32_t height = mImg->d_h;
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+ CHECK_EQ(mImg->fmt, IMG_FMT_I420);
+ handlePortSettingsChange(portWillReset, width, height);
+ if (*portWillReset) {
+ return true;
+ }
+
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = (width * height * 3) / 2;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = *(OMX_TICKS *)mImg->user_priv;
+
+ uint8_t *dst = outHeader->pBuffer;
+ const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y];
+ const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U];
+ const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V];
+ size_t srcYStride = mImg->stride[PLANE_Y];
+ size_t srcUStride = mImg->stride[PLANE_U];
+ size_t srcVStride = mImg->stride[PLANE_V];
+ copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
+
+ mImg = NULL;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+
+ if (!eos) {
+ return true;
+ }
+
+ if (!outQueue.empty()) {
+ outInfo = *outQueue.begin();
+ outQueue.erase(outQueue.begin());
+ outHeader = outInfo->mHeader;
+ outHeader->nTimeStamp = 0;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ mEOSStatus = OUTPUT_FRAMES_FLUSHED;
+ }
+ return true;
+}
+
void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
- if (mOutputPortSettingsChange != NONE) {
+ if (mOutputPortSettingsChange != NONE || mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
return;
}
List<BufferInfo *> &inQueue = getPortQueue(0);
List<BufferInfo *> &outQueue = getPortQueue(1);
bool EOSseen = false;
+ vpx_codec_err_t err;
+ bool portWillReset = false;
+
+ while ((mEOSStatus == INPUT_EOS_SEEN || !inQueue.empty())
+ && !outQueue.empty()) {
+ // Output the pending frames that left from last port reset or decoder flush.
+ if (mEOSStatus == INPUT_EOS_SEEN || mImg != NULL) {
+ if (!outputBuffers(
+ mEOSStatus == INPUT_EOS_SEEN, true /* display */,
+ mEOSStatus == INPUT_EOS_SEEN, &portWillReset)) {
+ ALOGE("on2 decoder failed to output frame.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ if (portWillReset || mEOSStatus == OUTPUT_FRAMES_FLUSHED ||
+ mEOSStatus == INPUT_EOS_SEEN) {
+ return;
+ }
+ }
- while (!inQueue.empty() && !outQueue.empty()) {
BufferInfo *inInfo = *inQueue.begin();
OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+ mTimeStamps[mTimeStampIdx] = inHeader->nTimeStamp;
BufferInfo *outInfo = *outQueue.begin();
OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-
if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ mEOSStatus = INPUT_EOS_SEEN;
EOSseen = true;
- if (inHeader->nFilledLen == 0) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
-
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
-
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
- return;
- }
}
- if (mImg == NULL) {
- if (vpx_codec_decode(
- (vpx_codec_ctx_t *)mCtx,
- inHeader->pBuffer + inHeader->nOffset,
- inHeader->nFilledLen,
- NULL,
- 0)) {
- ALOGE("on2 decoder failed to decode frame.");
-
- notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
- return;
- }
- vpx_codec_iter_t iter = NULL;
- mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter);
+ if (inHeader->nFilledLen > 0 &&
+ vpx_codec_decode((vpx_codec_ctx_t *)mCtx,
+ inHeader->pBuffer + inHeader->nOffset,
+ inHeader->nFilledLen,
+ &mTimeStamps[mTimeStampIdx], 0)) {
+ ALOGE("on2 decoder failed to decode frame.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
}
+ mTimeStampIdx = (mTimeStampIdx + 1) % kNumBuffers;
- if (mImg != NULL) {
- CHECK_EQ(mImg->fmt, IMG_FMT_I420);
-
- uint32_t width = mImg->d_w;
- uint32_t height = mImg->d_h;
- bool portWillReset = false;
- handlePortSettingsChange(&portWillReset, width, height);
- if (portWillReset) {
- return;
- }
-
- outHeader->nOffset = 0;
- outHeader->nFilledLen = (width * height * 3) / 2;
- outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0;
- outHeader->nTimeStamp = inHeader->nTimeStamp;
-
- uint8_t *dst = outHeader->pBuffer;
- const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y];
- const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U];
- const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V];
- size_t srcYStride = mImg->stride[PLANE_Y];
- size_t srcUStride = mImg->stride[PLANE_U];
- size_t srcVStride = mImg->stride[PLANE_V];
- copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride);
-
- mImg = NULL;
- outInfo->mOwnedByUs = false;
- outQueue.erase(outQueue.begin());
- outInfo = NULL;
- notifyFillBufferDone(outHeader);
- outHeader = NULL;
+ if (!outputBuffers(
+ EOSseen /* flushDecoder */, true /* display */, EOSseen, &portWillReset)) {
+ ALOGE("on2 decoder failed to output frame.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ if (portWillReset) {
+ return;
}
inInfo->mOwnedByUs = false;
@@ -176,6 +253,30 @@ void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) {
}
}
+void SoftVPX::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == kInputPortIndex) {
+ bool portWillReset = false;
+ if (!outputBuffers(
+ true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) {
+ ALOGE("Failed to flush decoder.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ mEOSStatus = INPUT_DATA_AVAILABLE;
+ }
+}
+
+void SoftVPX::onReset() {
+ bool portWillReset = false;
+ if (!outputBuffers(
+ true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) {
+ ALOGW("Failed to flush decoder. Try to hard reset decoder");
+ destroyDecoder();
+ initDecoder();
+ }
+ mEOSStatus = INPUT_DATA_AVAILABLE;
+}
+
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h
index 8f68693..8ccbae2 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.h
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h
@@ -38,6 +38,8 @@ protected:
virtual ~SoftVPX();
virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
private:
enum {
@@ -49,11 +51,21 @@ private:
MODE_VP9
} mMode;
- void *mCtx;
+ enum {
+ INPUT_DATA_AVAILABLE, // VPX component is ready to decode data.
+ INPUT_EOS_SEEN, // VPX component saw EOS and is flushing On2 decoder.
+ OUTPUT_FRAMES_FLUSHED // VPX component finished flushing On2 decoder.
+ } mEOSStatus;
+ void *mCtx;
+ bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder.
+ OMX_TICKS mTimeStamps[kNumBuffers];
+ uint8_t mTimeStampIdx;
vpx_image_t *mImg;
status_t initDecoder();
+ status_t destroyDecoder();
+ bool outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset);
DISALLOW_EVIL_CONSTRUCTORS(SoftVPX);
};
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
index b8084ae..6322dc2 100644
--- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -345,9 +345,15 @@ void SoftOpus::onQueueFilled(OMX_U32 portIndex) {
}
uint8_t channel_mapping[kMaxChannels] = {0};
- memcpy(&channel_mapping,
- kDefaultOpusChannelLayout,
- kMaxChannelsWithDefaultLayout);
+ if (mHeader->channels <= kMaxChannelsWithDefaultLayout) {
+ memcpy(&channel_mapping,
+ kDefaultOpusChannelLayout,
+ kMaxChannelsWithDefaultLayout);
+ } else {
+ memcpy(&channel_mapping,
+ mHeader->stream_map,
+ mHeader->channels);
+ }
int status = OPUS_INVALID_STATE;
mDecoder = opus_multistream_decoder_create(kRate,
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index a78465e..21da707 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -98,34 +98,49 @@ void SoftwareRenderer::resetFormatIfChanged(const sp<AMessage> &format) {
mCropWidth = mCropRight - mCropLeft + 1;
mCropHeight = mCropBottom - mCropTop + 1;
- int halFormat;
- size_t bufWidth, bufHeight;
-
- switch ((int)mColorFormat) {
- case OMX_COLOR_FormatYUV420Planar:
- case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
- case OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar:
- case OMX_COLOR_FormatYUV420SemiPlanar:
- {
- if (!runningInEmulator()) {
+ // by default convert everything to RGB565
+ int halFormat = HAL_PIXEL_FORMAT_RGB_565;
+ size_t bufWidth = mCropWidth;
+ size_t bufHeight = mCropHeight;
+
+ // hardware has YUV12 and RGBA8888 support, so convert known formats
+ if (!runningInEmulator()) {
+ switch (mColorFormat) {
+ case OMX_COLOR_FormatYUV420Planar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
+ {
halFormat = HAL_PIXEL_FORMAT_YV12;
bufWidth = (mCropWidth + 1) & ~1;
bufHeight = (mCropHeight + 1) & ~1;
break;
}
-
- // fall through.
+ case OMX_COLOR_Format24bitRGB888:
+ {
+ halFormat = HAL_PIXEL_FORMAT_RGB_888;
+ bufWidth = (mCropWidth + 1) & ~1;
+ bufHeight = (mCropHeight + 1) & ~1;
+ break;
+ }
+ case OMX_COLOR_Format32bitARGB8888:
+ case OMX_COLOR_Format32BitRGBA8888:
+ {
+ halFormat = HAL_PIXEL_FORMAT_RGBA_8888;
+ bufWidth = (mCropWidth + 1) & ~1;
+ bufHeight = (mCropHeight + 1) & ~1;
+ break;
+ }
+ default:
+ {
+ break;
+ }
}
+ }
- default:
- halFormat = HAL_PIXEL_FORMAT_RGB_565;
- bufWidth = mCropWidth;
- bufHeight = mCropHeight;
-
- mConverter = new ColorConverter(
- mColorFormat, OMX_COLOR_Format16bitRGB565);
- CHECK(mConverter->isValid());
- break;
+ if (halFormat == HAL_PIXEL_FORMAT_RGB_565) {
+ mConverter = new ColorConverter(
+ mColorFormat, OMX_COLOR_Format16bitRGB565);
+ CHECK(mConverter->isValid());
}
CHECK(mNativeWindow != NULL);
@@ -202,6 +217,8 @@ void SoftwareRenderer::render(
CHECK_EQ(0, mapper.lock(
buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+ // TODO move the other conversions also into ColorConverter, and
+ // fix cropping issues (when mCropLeft/Top != 0 or mWidth != mCropWidth)
if (mConverter) {
mConverter->convert(
data,
@@ -212,7 +229,8 @@ void SoftwareRenderer::render(
0, 0, mCropWidth - 1, mCropHeight - 1);
} else if (mColorFormat == OMX_COLOR_FormatYUV420Planar) {
const uint8_t *src_y = (const uint8_t *)data;
- const uint8_t *src_u = (const uint8_t *)data + mWidth * mHeight;
+ const uint8_t *src_u =
+ (const uint8_t *)data + mWidth * mHeight;
const uint8_t *src_v = src_u + (mWidth / 2 * mHeight / 2);
uint8_t *dst_y = (uint8_t *)dst;
@@ -239,13 +257,10 @@ void SoftwareRenderer::render(
dst_v += dst_c_stride;
}
} else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
- || mColorFormat == OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar
|| mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
- const uint8_t *src_y =
- (const uint8_t *)data;
-
- const uint8_t *src_uv =
- (const uint8_t *)data + mWidth * (mHeight - mCropTop / 2);
+ const uint8_t *src_y = (const uint8_t *)data;
+ const uint8_t *src_uv = (const uint8_t *)data
+ + mWidth * (mHeight - mCropTop / 2);
uint8_t *dst_y = (uint8_t *)dst;
@@ -273,6 +288,38 @@ void SoftwareRenderer::render(
dst_u += dst_c_stride;
dst_v += dst_c_stride;
}
+ } else if (mColorFormat == OMX_COLOR_Format24bitRGB888) {
+ uint8_t* srcPtr = (uint8_t*)data;
+ uint8_t* dstPtr = (uint8_t*)dst;
+
+ for (size_t y = 0; y < (size_t)mCropHeight; ++y) {
+ memcpy(dstPtr, srcPtr, mCropWidth * 3);
+ srcPtr += mWidth * 3;
+ dstPtr += buf->stride * 3;
+ }
+ } else if (mColorFormat == OMX_COLOR_Format32bitARGB8888) {
+ uint8_t *srcPtr, *dstPtr;
+
+ for (size_t y = 0; y < (size_t)mCropHeight; ++y) {
+ srcPtr = (uint8_t*)data + mWidth * 4 * y;
+ dstPtr = (uint8_t*)dst + buf->stride * 4 * y;
+ for (size_t x = 0; x < (size_t)mCropWidth; ++x) {
+ uint8_t a = *srcPtr++;
+ for (size_t i = 0; i < 3; ++i) { // copy RGB
+ *dstPtr++ = *srcPtr++;
+ }
+ *dstPtr++ = a; // alpha last (ARGB to RGBA)
+ }
+ }
+ } else if (mColorFormat == OMX_COLOR_Format32BitRGBA8888) {
+ uint8_t* srcPtr = (uint8_t*)data;
+ uint8_t* dstPtr = (uint8_t*)dst;
+
+ for (size_t y = 0; y < (size_t)mCropHeight; ++y) {
+ memcpy(dstPtr, srcPtr, mCropWidth * 4);
+ srcPtr += mWidth * 4;
+ dstPtr += buf->stride * 4;
+ }
} else {
LOG_ALWAYS_FATAL("bad color format %#x", mColorFormat);
}
diff --git a/media/libstagefright/filters/Android.mk b/media/libstagefright/filters/Android.mk
new file mode 100644
index 0000000..36ab444
--- /dev/null
+++ b/media/libstagefright/filters/Android.mk
@@ -0,0 +1,27 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ ColorConvert.cpp \
+ GraphicBufferListener.cpp \
+ IntrinsicBlurFilter.cpp \
+ MediaFilter.cpp \
+ RSFilter.cpp \
+ SaturationFilter.cpp \
+ saturationARGB.rs \
+ SimpleFilter.cpp \
+ ZeroFilter.cpp
+
+LOCAL_C_INCLUDES := \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/rs/cpp \
+ $(TOP)/frameworks/rs \
+
+intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,)
+LOCAL_C_INCLUDES += $(intermediates)
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE:= libstagefright_mediafilter
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/filters/ColorConvert.cpp b/media/libstagefright/filters/ColorConvert.cpp
new file mode 100644
index 0000000..a5039f9
--- /dev/null
+++ b/media/libstagefright/filters/ColorConvert.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ColorConvert.h"
+
+#ifndef max
+#define max(a,b) ((a) > (b) ? (a) : (b))
+#endif
+#ifndef min
+#define min(a,b) ((a) < (b) ? (a) : (b))
+#endif
+
+namespace android {
+
+void YUVToRGB(
+ int32_t y, int32_t u, int32_t v,
+ int32_t* r, int32_t* g, int32_t* b) {
+ y -= 16;
+ u -= 128;
+ v -= 128;
+
+ *b = 1192 * y + 2066 * u;
+ *g = 1192 * y - 833 * v - 400 * u;
+ *r = 1192 * y + 1634 * v;
+
+ *r = min(262143, max(0, *r));
+ *g = min(262143, max(0, *g));
+ *b = min(262143, max(0, *b));
+
+ *r >>= 10;
+ *g >>= 10;
+ *b >>= 10;
+}
+
+void convertYUV420spToARGB(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest) {
+ const int32_t bytes_per_pixel = 2;
+
+ for (int32_t i = 0; i < height; i++) {
+ for (int32_t j = 0; j < width; j++) {
+ int32_t y = *(pY + i * width + j);
+ int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
+ int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
+
+ int32_t r, g, b;
+ YUVToRGB(y, u, v, &r, &g, &b);
+
+ *dest++ = 0xFF;
+ *dest++ = r;
+ *dest++ = g;
+ *dest++ = b;
+ }
+ }
+}
+
+void convertYUV420spToRGB888(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest) {
+ const int32_t bytes_per_pixel = 2;
+
+ for (int32_t i = 0; i < height; i++) {
+ for (int32_t j = 0; j < width; j++) {
+ int32_t y = *(pY + i * width + j);
+ int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
+ int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
+
+ int32_t r, g, b;
+ YUVToRGB(y, u, v, &r, &g, &b);
+
+ *dest++ = r;
+ *dest++ = g;
+ *dest++ = b;
+ }
+ }
+}
+
+// HACK - not even slightly optimized
+// TODO: remove when RGBA support is added to SoftwareRenderer
+void convertRGBAToARGB(
+ uint8_t *src, int32_t width, int32_t height, uint32_t stride,
+ uint8_t *dest) {
+ for (size_t i = 0; i < height; ++i) {
+ for (size_t j = 0; j < width; ++j) {
+ uint8_t r = *src++;
+ uint8_t g = *src++;
+ uint8_t b = *src++;
+ uint8_t a = *src++;
+ *dest++ = a;
+ *dest++ = r;
+ *dest++ = g;
+ *dest++ = b;
+ }
+ src += (stride - width) * 4;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/ColorConvert.h b/media/libstagefright/filters/ColorConvert.h
new file mode 100644
index 0000000..13faa02
--- /dev/null
+++ b/media/libstagefright/filters/ColorConvert.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef COLOR_CONVERT_H_
+#define COLOR_CONVERT_H_
+
+#include <inttypes.h>
+
+namespace android {
+
+void YUVToRGB(
+ int32_t y, int32_t u, int32_t v,
+ int32_t* r, int32_t* g, int32_t* b);
+
+void convertYUV420spToARGB(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest);
+
+void convertYUV420spToRGB888(
+ uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height,
+ uint8_t *dest);
+
+// TODO: remove when RGBA support is added to SoftwareRenderer
+void convertRGBAToARGB(
+ uint8_t *src, int32_t width, int32_t height, uint32_t stride,
+ uint8_t *dest);
+
+} // namespace android
+
+#endif // COLOR_CONVERT_H_
diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp
new file mode 100644
index 0000000..fa38192
--- /dev/null
+++ b/media/libstagefright/filters/GraphicBufferListener.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "GraphicBufferListener"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include "GraphicBufferListener.h"
+
+namespace android {
+
+status_t GraphicBufferListener::init(
+ const sp<AMessage> &notify,
+ size_t bufferWidth, size_t bufferHeight, size_t bufferCount) {
+ mNotify = notify;
+
+ String8 name("GraphicBufferListener");
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setConsumerName(name);
+ mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mConsumer->setConsumerUsageBits(GRALLOC_USAGE_SW_READ_OFTEN);
+
+ status_t err = mConsumer->setMaxAcquiredBufferCount(bufferCount);
+ if (err != NO_ERROR) {
+ ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
+ bufferCount, err);
+ return err;
+ }
+
+ wp<BufferQueue::ConsumerListener> listener =
+ static_cast<BufferQueue::ConsumerListener*>(this);
+ sp<BufferQueue::ProxyConsumerListener> proxy =
+ new BufferQueue::ProxyConsumerListener(listener);
+
+ err = mConsumer->consumerConnect(proxy, false);
+ if (err != NO_ERROR) {
+ ALOGE("Error connecting to BufferQueue: %s (%d)",
+ strerror(-err), err);
+ return err;
+ }
+
+ ALOGV("init() successful.");
+
+ return OK;
+}
+
+void GraphicBufferListener::onFrameAvailable(const BufferItem& /* item */) {
+ ALOGV("onFrameAvailable() called");
+
+ {
+ Mutex::Autolock autoLock(mMutex);
+ mNumFramesAvailable++;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ mNotify->setWhat(kWhatFrameAvailable);
+ mNotify->post();
+}
+
+void GraphicBufferListener::onBuffersReleased() {
+ ALOGV("onBuffersReleased() called");
+ // nothing to do
+}
+
+void GraphicBufferListener::onSidebandStreamChanged() {
+ ALOGW("GraphicBufferListener cannot consume sideband streams.");
+ // nothing to do
+}
+
+BufferQueue::BufferItem GraphicBufferListener::getBufferItem() {
+ BufferQueue::BufferItem item;
+
+ {
+ Mutex::Autolock autoLock(mMutex);
+ if (mNumFramesAvailable <= 0) {
+ ALOGE("getBuffer() called with no frames available");
+ return item;
+ }
+ mNumFramesAvailable--;
+ }
+
+ status_t err = mConsumer->acquireBuffer(&item, 0);
+ if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
+ // shouldn't happen, since we track num frames available
+ ALOGE("frame was not available");
+ item.mBuf = -1;
+ return item;
+ } else if (err != OK) {
+ ALOGE("acquireBuffer returned err=%d", err);
+ item.mBuf = -1;
+ return item;
+ }
+
+ // Wait for it to become available.
+ err = item.mFence->waitForever("GraphicBufferListener::getBufferItem");
+ if (err != OK) {
+ ALOGW("failed to wait for buffer fence: %d", err);
+ // keep going
+ }
+
+ // If this is the first time we're seeing this buffer, add it to our
+ // slot table.
+ if (item.mGraphicBuffer != NULL) {
+ ALOGV("setting mBufferSlot %d", item.mBuf);
+ mBufferSlot[item.mBuf] = item.mGraphicBuffer;
+ }
+
+ return item;
+}
+
+sp<GraphicBuffer> GraphicBufferListener::getBuffer(
+ BufferQueue::BufferItem item) {
+ sp<GraphicBuffer> buf;
+ if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) {
+ ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf);
+ return buf;
+ }
+
+ buf = mBufferSlot[item.mBuf];
+ CHECK(buf.get() != NULL);
+
+ return buf;
+}
+
+status_t GraphicBufferListener::releaseBuffer(
+ BufferQueue::BufferItem item) {
+ if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) {
+ ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf);
+ return ERROR_OUT_OF_RANGE;
+ }
+
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/GraphicBufferListener.h b/media/libstagefright/filters/GraphicBufferListener.h
new file mode 100644
index 0000000..b3e0ee3
--- /dev/null
+++ b/media/libstagefright/filters/GraphicBufferListener.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GRAPHIC_BUFFER_LISTENER_H_
+#define GRAPHIC_BUFFER_LISTENER_H_
+
+#include <gui/BufferQueue.h>
+
+namespace android {
+
+struct AMessage;
+
+struct GraphicBufferListener : public BufferQueue::ConsumerListener {
+public:
+ GraphicBufferListener() {};
+
+ status_t init(
+ const sp<AMessage> &notify,
+ size_t bufferWidth, size_t bufferHeight, size_t bufferCount);
+
+ virtual void onFrameAvailable(const BufferItem& item);
+ virtual void onBuffersReleased();
+ virtual void onSidebandStreamChanged();
+
+ // Returns the handle to the producer side of the BufferQueue. Buffers
+ // queued on this will be received by GraphicBufferListener.
+ sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
+ return mProducer;
+ }
+
+ BufferQueue::BufferItem getBufferItem();
+ sp<GraphicBuffer> getBuffer(BufferQueue::BufferItem item);
+ status_t releaseBuffer(BufferQueue::BufferItem item);
+
+ enum {
+ kWhatFrameAvailable = 'frav',
+ };
+
+private:
+ sp<AMessage> mNotify;
+ size_t mNumFramesAvailable;
+
+ mutable Mutex mMutex;
+
+ // Our BufferQueue interfaces. mProducer is passed to the producer through
+ // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
+ // the buffers queued by the producer.
+ sp<IGraphicBufferProducer> mProducer;
+ sp<IGraphicBufferConsumer> mConsumer;
+
+ // Cache of GraphicBuffers from the buffer queue.
+ sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS];
+};
+
+} // namespace android
+
+#endif // GRAPHIC_BUFFER_LISTENER_H
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.cpp b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
new file mode 100644
index 0000000..cbcf699
--- /dev/null
+++ b/media/libstagefright/filters/IntrinsicBlurFilter.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IntrinsicBlurFilter"
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "IntrinsicBlurFilter.h"
+
+namespace android {
+
+status_t IntrinsicBlurFilter::configure(const sp<AMessage> &msg) {
+ status_t err = SimpleFilter::configure(msg);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!msg->findString("cacheDir", &mCacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ return OK;
+}
+
+status_t IntrinsicBlurFilter::start() {
+ // TODO: use a single RS context object for entire application
+ mRS = new RSC::RS();
+
+ if (!mRS->init(mCacheDir.c_str())) {
+ ALOGE("Failed to initialize RenderScript context.");
+ return NO_INIT;
+ }
+
+ // 32-bit elements for ARGB8888
+ RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
+
+ RSC::Type::Builder tb(mRS, e);
+ tb.setX(mWidth);
+ tb.setY(mHeight);
+ RSC::sp<const RSC::Type> t = tb.create();
+
+ mAllocIn = RSC::Allocation::createTyped(mRS, t);
+ mAllocOut = RSC::Allocation::createTyped(mRS, t);
+
+ mBlur = RSC::ScriptIntrinsicBlur::create(mRS, e);
+ mBlur->setRadius(mBlurRadius);
+ mBlur->setInput(mAllocIn);
+
+ return OK;
+}
+
+void IntrinsicBlurFilter::reset() {
+ mBlur.clear();
+ mAllocOut.clear();
+ mAllocIn.clear();
+ mRS.clear();
+}
+
+status_t IntrinsicBlurFilter::setParameters(const sp<AMessage> &msg) {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ float blurRadius;
+ if (params->findFloat("blur-radius", &blurRadius)) {
+ mBlurRadius = blurRadius;
+ }
+
+ return OK;
+}
+
+status_t IntrinsicBlurFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
+ mBlur->forEach(mAllocOut);
+ mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/IntrinsicBlurFilter.h
new file mode 100644
index 0000000..4707ab7
--- /dev/null
+++ b/media/libstagefright/filters/IntrinsicBlurFilter.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef INTRINSIC_BLUR_FILTER_H_
+#define INTRINSIC_BLUR_FILTER_H_
+
+#include "RenderScript.h"
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct IntrinsicBlurFilter : public SimpleFilter {
+public:
+ IntrinsicBlurFilter() : mBlurRadius(1.f) {};
+
+ virtual status_t configure(const sp<AMessage> &msg);
+ virtual status_t start();
+ virtual void reset();
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~IntrinsicBlurFilter() {};
+
+private:
+ AString mCacheDir;
+ RSC::sp<RSC::RS> mRS;
+ RSC::sp<RSC::Allocation> mAllocIn;
+ RSC::sp<RSC::Allocation> mAllocOut;
+ RSC::sp<RSC::ScriptIntrinsicBlur> mBlur;
+ float mBlurRadius;
+};
+
+} // namespace android
+
+#endif // INTRINSIC_BLUR_FILTER_H_
diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp
new file mode 100644
index 0000000..c5289b6
--- /dev/null
+++ b/media/libstagefright/filters/MediaFilter.cpp
@@ -0,0 +1,816 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaFilter"
+
+#include <inttypes.h>
+#include <utils/Trace.h>
+
+#include <binder/MemoryDealer.h>
+
+#include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaFilter.h>
+
+#include "ColorConvert.h"
+#include "GraphicBufferListener.h"
+#include "IntrinsicBlurFilter.h"
+#include "RSFilter.h"
+#include "SaturationFilter.h"
+#include "ZeroFilter.h"
+
+namespace android {
+
+// parameter: number of input and output buffers
+static const size_t kBufferCountActual = 4;
+
+MediaFilter::MediaFilter()
+ : mState(UNINITIALIZED),
+ mGeneration(0),
+ mGraphicBufferListener(NULL) {
+}
+
+MediaFilter::~MediaFilter() {
+}
+
+//////////////////// PUBLIC FUNCTIONS //////////////////////////////////////////
+
+void MediaFilter::setNotificationMessage(const sp<AMessage> &msg) {
+ mNotify = msg;
+}
+
+void MediaFilter::initiateAllocateComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatAllocateComponent);
+ msg->setTarget(id());
+ msg->post();
+}
+
+void MediaFilter::initiateConfigureComponent(const sp<AMessage> &msg) {
+ msg->setWhat(kWhatConfigureComponent);
+ msg->setTarget(id());
+ msg->post();
+}
+
+void MediaFilter::initiateCreateInputSurface() {
+ (new AMessage(kWhatCreateInputSurface, id()))->post();
+}
+
+void MediaFilter::initiateStart() {
+ (new AMessage(kWhatStart, id()))->post();
+}
+
+void MediaFilter::initiateShutdown(bool keepComponentAllocated) {
+ sp<AMessage> msg = new AMessage(kWhatShutdown, id());
+ msg->setInt32("keepComponentAllocated", keepComponentAllocated);
+ msg->post();
+}
+
+void MediaFilter::signalFlush() {
+ (new AMessage(kWhatFlush, id()))->post();
+}
+
+void MediaFilter::signalResume() {
+ (new AMessage(kWhatResume, id()))->post();
+}
+
+// nothing to do
+void MediaFilter::signalRequestIDRFrame() {
+ return;
+}
+
+void MediaFilter::signalSetParameters(const sp<AMessage> &params) {
+ sp<AMessage> msg = new AMessage(kWhatSetParameters, id());
+ msg->setMessage("params", params);
+ msg->post();
+}
+
+void MediaFilter::signalEndOfInputStream() {
+ (new AMessage(kWhatSignalEndOfInputStream, id()))->post();
+}
+
+void MediaFilter::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatAllocateComponent:
+ {
+ onAllocateComponent(msg);
+ break;
+ }
+ case kWhatConfigureComponent:
+ {
+ onConfigureComponent(msg);
+ break;
+ }
+ case kWhatStart:
+ {
+ onStart();
+ break;
+ }
+ case kWhatProcessBuffers:
+ {
+ processBuffers();
+ break;
+ }
+ case kWhatInputBufferFilled:
+ {
+ onInputBufferFilled(msg);
+ break;
+ }
+ case kWhatOutputBufferDrained:
+ {
+ onOutputBufferDrained(msg);
+ break;
+ }
+ case kWhatShutdown:
+ {
+ onShutdown(msg);
+ break;
+ }
+ case kWhatFlush:
+ {
+ onFlush();
+ break;
+ }
+ case kWhatResume:
+ {
+ // nothing to do
+ break;
+ }
+ case kWhatSetParameters:
+ {
+ onSetParameters(msg);
+ break;
+ }
+ case kWhatCreateInputSurface:
+ {
+ onCreateInputSurface();
+ break;
+ }
+ case GraphicBufferListener::kWhatFrameAvailable:
+ {
+ onInputFrameAvailable();
+ break;
+ }
+ case kWhatSignalEndOfInputStream:
+ {
+ onSignalEndOfInputStream();
+ break;
+ }
+ default:
+ {
+ ALOGE("Message not handled:\n%s", msg->debugString().c_str());
+ break;
+ }
+ }
+}
+
+//////////////////// PORT DESCRIPTION //////////////////////////////////////////
+
+MediaFilter::PortDescription::PortDescription() {
+}
+
+void MediaFilter::PortDescription::addBuffer(
+ IOMX::buffer_id id, const sp<ABuffer> &buffer) {
+ mBufferIDs.push_back(id);
+ mBuffers.push_back(buffer);
+}
+
+size_t MediaFilter::PortDescription::countBuffers() {
+ return mBufferIDs.size();
+}
+
+IOMX::buffer_id MediaFilter::PortDescription::bufferIDAt(size_t index) const {
+ return mBufferIDs.itemAt(index);
+}
+
+sp<ABuffer> MediaFilter::PortDescription::bufferAt(size_t index) const {
+ return mBuffers.itemAt(index);
+}
+
+//////////////////// HELPER FUNCTIONS //////////////////////////////////////////
+
+void MediaFilter::signalProcessBuffers() {
+ (new AMessage(kWhatProcessBuffers, id()))->post();
+}
+
+void MediaFilter::signalError(status_t error) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatError);
+ notify->setInt32("err", error);
+ notify->post();
+}
+
+status_t MediaFilter::allocateBuffersOnPort(OMX_U32 portIndex) {
+ CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
+ const bool isInput = portIndex == kPortIndexInput;
+ const size_t bufferSize = isInput ? mMaxInputSize : mMaxOutputSize;
+
+ CHECK(mDealer[portIndex] == NULL);
+ CHECK(mBuffers[portIndex].isEmpty());
+
+ ALOGV("Allocating %zu buffers of size %zu on %s port",
+ kBufferCountActual, bufferSize,
+ isInput ? "input" : "output");
+
+ size_t totalSize = kBufferCountActual * bufferSize;
+
+ mDealer[portIndex] = new MemoryDealer(totalSize, "MediaFilter");
+
+ for (size_t i = 0; i < kBufferCountActual; ++i) {
+ sp<IMemory> mem = mDealer[portIndex]->allocate(bufferSize);
+ CHECK(mem.get() != NULL);
+
+ BufferInfo info;
+ info.mStatus = BufferInfo::OWNED_BY_US;
+ info.mBufferID = i;
+ info.mGeneration = mGeneration;
+ info.mOutputFlags = 0;
+ info.mData = new ABuffer(mem->pointer(), bufferSize);
+ info.mData->meta()->setInt64("timeUs", 0);
+
+ mBuffers[portIndex].push_back(info);
+
+ if (!isInput) {
+ mAvailableOutputBuffers.push(
+ &mBuffers[portIndex].editItemAt(i));
+ }
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatBuffersAllocated);
+
+ notify->setInt32("portIndex", portIndex);
+
+ sp<PortDescription> desc = new PortDescription;
+
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ const BufferInfo &info = mBuffers[portIndex][i];
+
+ desc->addBuffer(info.mBufferID, info.mData);
+ }
+
+ notify->setObject("portDesc", desc);
+ notify->post();
+
+ return OK;
+}
+
+MediaFilter::BufferInfo* MediaFilter::findBufferByID(
+ uint32_t portIndex, IOMX::buffer_id bufferID,
+ ssize_t *index) {
+ for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
+ BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
+
+ if (info->mBufferID == bufferID) {
+ if (index != NULL) {
+ *index = i;
+ }
+ return info;
+ }
+ }
+
+ TRESPASS();
+
+ return NULL;
+}
+
+void MediaFilter::postFillThisBuffer(BufferInfo *info) {
+ ALOGV("postFillThisBuffer on buffer %d", info->mBufferID);
+ if (mPortEOS[kPortIndexInput]) {
+ return;
+ }
+
+ CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+ info->mGeneration = mGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFillThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
+
+ info->mData->meta()->clear();
+ notify->setBuffer("buffer", info->mData);
+
+ sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
+ reply->setInt32("buffer-id", info->mBufferID);
+
+ notify->setMessage("reply", reply);
+
+ info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
+ notify->post();
+}
+
+void MediaFilter::postDrainThisBuffer(BufferInfo *info) {
+ CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
+
+ info->mGeneration = mGeneration;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatDrainThisBuffer);
+ notify->setInt32("buffer-id", info->mBufferID);
+ notify->setInt32("flags", info->mOutputFlags);
+ notify->setBuffer("buffer", info->mData);
+
+ sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, id());
+ reply->setInt32("buffer-id", info->mBufferID);
+
+ notify->setMessage("reply", reply);
+
+ notify->post();
+
+ info->mStatus = BufferInfo::OWNED_BY_UPSTREAM;
+}
+
+void MediaFilter::postEOS() {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatEOS);
+ notify->setInt32("err", ERROR_END_OF_STREAM);
+ notify->post();
+
+ ALOGV("Sent kWhatEOS.");
+}
+
+void MediaFilter::sendFormatChange() {
+ sp<AMessage> notify = mNotify->dup();
+
+ notify->setInt32("what", kWhatOutputFormatChanged);
+
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+ notify->setString("mime", mime.c_str());
+
+ notify->setInt32("stride", mStride);
+ notify->setInt32("slice-height", mSliceHeight);
+ notify->setInt32("color-format", mColorFormatOut);
+ notify->setRect("crop", 0, 0, mStride - 1, mSliceHeight - 1);
+ notify->setInt32("width", mWidth);
+ notify->setInt32("height", mHeight);
+
+ notify->post();
+}
+
+void MediaFilter::requestFillEmptyInput() {
+ if (mPortEOS[kPortIndexInput]) {
+ return;
+ }
+
+ for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
+ BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
+
+ if (info->mStatus == BufferInfo::OWNED_BY_US) {
+ postFillThisBuffer(info);
+ }
+ }
+}
+
+void MediaFilter::processBuffers() {
+ if (mAvailableInputBuffers.empty() || mAvailableOutputBuffers.empty()) {
+ ALOGV("Skipping process (buffers unavailable)");
+ return;
+ }
+
+ if (mPortEOS[kPortIndexOutput]) {
+ // TODO notify caller of queueInput error when it is supported
+ // in MediaCodec
+ ALOGW("Tried to process a buffer after EOS.");
+ return;
+ }
+
+ BufferInfo *inputInfo = mAvailableInputBuffers[0];
+ mAvailableInputBuffers.removeAt(0);
+ BufferInfo *outputInfo = mAvailableOutputBuffers[0];
+ mAvailableOutputBuffers.removeAt(0);
+
+ status_t err;
+ err = mFilter->processBuffers(inputInfo->mData, outputInfo->mData);
+ if (err != (status_t)OK) {
+ outputInfo->mData->meta()->setInt32("err", err);
+ }
+
+ int64_t timeUs;
+ CHECK(inputInfo->mData->meta()->findInt64("timeUs", &timeUs));
+ outputInfo->mData->meta()->setInt64("timeUs", timeUs);
+ outputInfo->mOutputFlags = 0;
+ int32_t eos = 0;
+ if (inputInfo->mData->meta()->findInt32("eos", &eos) && eos != 0) {
+ outputInfo->mOutputFlags |= OMX_BUFFERFLAG_EOS;
+ mPortEOS[kPortIndexOutput] = true;
+ outputInfo->mData->meta()->setInt32("eos", eos);
+ postEOS();
+ ALOGV("Output stream saw EOS.");
+ }
+
+ ALOGV("Processed input buffer %u [%zu], output buffer %u [%zu]",
+ inputInfo->mBufferID, inputInfo->mData->size(),
+ outputInfo->mBufferID, outputInfo->mData->size());
+
+ if (mGraphicBufferListener != NULL) {
+ delete inputInfo;
+ } else {
+ postFillThisBuffer(inputInfo);
+ }
+ postDrainThisBuffer(outputInfo);
+
+ // prevent any corner case where buffers could get stuck in queue
+ signalProcessBuffers();
+}
+
+void MediaFilter::onAllocateComponent(const sp<AMessage> &msg) {
+ CHECK_EQ(mState, UNINITIALIZED);
+
+ CHECK(msg->findString("componentName", &mComponentName));
+ const char* name = mComponentName.c_str();
+ if (!strcasecmp(name, "android.filter.zerofilter")) {
+ mFilter = new ZeroFilter;
+ } else if (!strcasecmp(name, "android.filter.saturation")) {
+ mFilter = new SaturationFilter;
+ } else if (!strcasecmp(name, "android.filter.intrinsicblur")) {
+ mFilter = new IntrinsicBlurFilter;
+ } else if (!strcasecmp(name, "android.filter.RenderScript")) {
+ mFilter = new RSFilter;
+ } else {
+ ALOGE("Unrecognized filter name: %s", name);
+ signalError(NAME_NOT_FOUND);
+ return;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatComponentAllocated);
+ // HACK - need "OMX.google" to use MediaCodec's software renderer
+ notify->setString("componentName", "OMX.google.MediaFilter");
+ notify->post();
+ mState = INITIALIZED;
+ ALOGV("Handled kWhatAllocateComponent.");
+}
+
+void MediaFilter::onConfigureComponent(const sp<AMessage> &msg) {
+ // TODO: generalize to allow audio filters as well as video
+
+ CHECK_EQ(mState, INITIALIZED);
+
+ // get params - at least mime, width & height
+ AString mime;
+ CHECK(msg->findString("mime", &mime));
+ if (strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) {
+ ALOGE("Bad mime: %s", mime.c_str());
+ signalError(BAD_VALUE);
+ return;
+ }
+
+ CHECK(msg->findInt32("width", &mWidth));
+ CHECK(msg->findInt32("height", &mHeight));
+ if (!msg->findInt32("stride", &mStride)) {
+ mStride = mWidth;
+ }
+ if (!msg->findInt32("slice-height", &mSliceHeight)) {
+ mSliceHeight = mHeight;
+ }
+
+ mMaxInputSize = mWidth * mHeight * 4; // room for ARGB8888
+ int32_t maxInputSize;
+ if (msg->findInt32("max-input-size", &maxInputSize)
+ && (size_t)maxInputSize > mMaxInputSize) {
+ mMaxInputSize = maxInputSize;
+ }
+
+ if (!msg->findInt32("color-format", &mColorFormatIn)) {
+ // default to OMX_COLOR_Format32bitARGB8888
+ mColorFormatIn = OMX_COLOR_Format32bitARGB8888;
+ msg->setInt32("color-format", mColorFormatIn);
+ }
+ mColorFormatOut = mColorFormatIn;
+
+ mMaxOutputSize = mWidth * mHeight * 4; // room for ARGB8888
+
+ AString cacheDir;
+ if (!msg->findString("cacheDir", &cacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ signalError(NAME_NOT_FOUND);
+ return;
+ }
+
+ status_t err;
+ err = mFilter->configure(msg);
+ if (err != (status_t)OK) {
+ ALOGE("Failed to configure filter component, err %d", err);
+ signalError(err);
+ return;
+ }
+
+ mInputFormat = new AMessage();
+ mInputFormat->setString("mime", mime.c_str());
+ mInputFormat->setInt32("stride", mStride);
+ mInputFormat->setInt32("slice-height", mSliceHeight);
+ mInputFormat->setInt32("color-format", mColorFormatIn);
+ mInputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
+ mInputFormat->setInt32("width", mWidth);
+ mInputFormat->setInt32("height", mHeight);
+
+ mOutputFormat = new AMessage();
+ mOutputFormat->setString("mime", mime.c_str());
+ mOutputFormat->setInt32("stride", mStride);
+ mOutputFormat->setInt32("slice-height", mSliceHeight);
+ mOutputFormat->setInt32("color-format", mColorFormatOut);
+ mOutputFormat->setRect("crop", 0, 0, mStride, mSliceHeight);
+ mOutputFormat->setInt32("width", mWidth);
+ mOutputFormat->setInt32("height", mHeight);
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatComponentConfigured);
+ notify->setString("componentName", "MediaFilter");
+ notify->setMessage("input-format", mInputFormat);
+ notify->setMessage("output-format", mOutputFormat);
+ notify->post();
+ mState = CONFIGURED;
+ ALOGV("Handled kWhatConfigureComponent.");
+
+ sendFormatChange();
+}
+
+void MediaFilter::onStart() {
+ CHECK_EQ(mState, CONFIGURED);
+
+ allocateBuffersOnPort(kPortIndexInput);
+
+ allocateBuffersOnPort(kPortIndexOutput);
+
+ status_t err = mFilter->start();
+ if (err != (status_t)OK) {
+ ALOGE("Failed to start filter component, err %d", err);
+ signalError(err);
+ return;
+ }
+
+ mPortEOS[kPortIndexInput] = false;
+ mPortEOS[kPortIndexOutput] = false;
+ mInputEOSResult = OK;
+ mState = STARTED;
+
+ requestFillEmptyInput();
+ ALOGV("Handled kWhatStart.");
+}
+
+void MediaFilter::onInputBufferFilled(const sp<AMessage> &msg) {
+ IOMX::buffer_id bufferID;
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ BufferInfo *info = findBufferByID(kPortIndexInput, bufferID);
+
+ if (mState != STARTED) {
+ // we're not running, so we'll just keep that buffer...
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ return;
+ }
+
+ if (info->mGeneration != mGeneration) {
+ ALOGV("Caught a stale input buffer [ID %d]", bufferID);
+ // buffer is stale (taken before a flush/shutdown) - repost it
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
+ postFillThisBuffer(info);
+ return;
+ }
+
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+
+ sp<ABuffer> buffer;
+ int32_t err = OK;
+ bool eos = false;
+
+ if (!msg->findBuffer("buffer", &buffer)) {
+ // these are unfilled buffers returned by client
+ CHECK(msg->findInt32("err", &err));
+
+ if (err == OK) {
+ // buffers with no errors are returned on MediaCodec.flush
+ ALOGV("saw unfilled buffer (MediaCodec.flush)");
+ postFillThisBuffer(info);
+ return;
+ } else {
+ ALOGV("saw error %d instead of an input buffer", err);
+ eos = true;
+ }
+
+ buffer.clear();
+ }
+
+ int32_t isCSD;
+ if (buffer != NULL && buffer->meta()->findInt32("csd", &isCSD)
+ && isCSD != 0) {
+ // ignore codec-specific data buffers
+ ALOGW("MediaFilter received a codec-specific data buffer");
+ postFillThisBuffer(info);
+ return;
+ }
+
+ int32_t tmp;
+ if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) {
+ eos = true;
+ err = ERROR_END_OF_STREAM;
+ }
+
+ mAvailableInputBuffers.push_back(info);
+ processBuffers();
+
+ if (eos) {
+ mPortEOS[kPortIndexInput] = true;
+ mInputEOSResult = err;
+ }
+
+ ALOGV("Handled kWhatInputBufferFilled. [ID %u]", bufferID);
+}
+
+void MediaFilter::onOutputBufferDrained(const sp<AMessage> &msg) {
+ IOMX::buffer_id bufferID;
+ CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID));
+ BufferInfo *info = findBufferByID(kPortIndexOutput, bufferID);
+
+ if (mState != STARTED) {
+ // we're not running, so we'll just keep that buffer...
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ return;
+ }
+
+ if (info->mGeneration != mGeneration) {
+ ALOGV("Caught a stale output buffer [ID %d]", bufferID);
+ // buffer is stale (taken before a flush/shutdown) - keep it
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US);
+ return;
+ }
+
+ CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+
+ mAvailableOutputBuffers.push_back(info);
+
+ processBuffers();
+
+ ALOGV("Handled kWhatOutputBufferDrained. [ID %u]",
+ bufferID);
+}
+
+void MediaFilter::onShutdown(const sp<AMessage> &msg) {
+ mGeneration++;
+
+ if (mState != UNINITIALIZED) {
+ mFilter->reset();
+ }
+
+ int32_t keepComponentAllocated;
+ CHECK(msg->findInt32("keepComponentAllocated", &keepComponentAllocated));
+ if (!keepComponentAllocated || mState == UNINITIALIZED) {
+ mState = UNINITIALIZED;
+ } else {
+ mState = INITIALIZED;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatShutdownCompleted);
+ notify->post();
+}
+
+void MediaFilter::onFlush() {
+ mGeneration++;
+
+ mAvailableInputBuffers.clear();
+ for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) {
+ BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ }
+ mAvailableOutputBuffers.clear();
+ for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
+ BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
+ info->mStatus = BufferInfo::OWNED_BY_US;
+ mAvailableOutputBuffers.push_back(info);
+ }
+
+ mPortEOS[kPortIndexInput] = false;
+ mPortEOS[kPortIndexOutput] = false;
+ mInputEOSResult = OK;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatFlushCompleted);
+ notify->post();
+ ALOGV("Posted kWhatFlushCompleted");
+
+ // MediaCodec returns all input buffers after flush, so in
+ // onInputBufferFilled we call postFillThisBuffer on them
+}
+
+void MediaFilter::onSetParameters(const sp<AMessage> &msg) {
+ CHECK(mState != STARTED);
+
+ status_t err = mFilter->setParameters(msg);
+ if (err != (status_t)OK) {
+ ALOGE("setParameters returned err %d", err);
+ }
+}
+
+void MediaFilter::onCreateInputSurface() {
+ CHECK(mState == CONFIGURED);
+
+ mGraphicBufferListener = new GraphicBufferListener;
+
+ sp<AMessage> notify = new AMessage();
+ notify->setTarget(id());
+ status_t err = mGraphicBufferListener->init(
+ notify, mStride, mSliceHeight, kBufferCountActual);
+
+ if (err != OK) {
+ ALOGE("Failed to init mGraphicBufferListener: %d", err);
+ signalError(err);
+ return;
+ }
+
+ sp<AMessage> reply = mNotify->dup();
+ reply->setInt32("what", CodecBase::kWhatInputSurfaceCreated);
+ reply->setObject(
+ "input-surface",
+ new BufferProducerWrapper(
+ mGraphicBufferListener->getIGraphicBufferProducer()));
+ reply->post();
+}
+
+void MediaFilter::onInputFrameAvailable() {
+ BufferQueue::BufferItem item = mGraphicBufferListener->getBufferItem();
+ sp<GraphicBuffer> buf = mGraphicBufferListener->getBuffer(item);
+
+ // get pointer to graphic buffer
+ void* bufPtr;
+ buf->lock(GraphicBuffer::USAGE_SW_READ_OFTEN, &bufPtr);
+
+ // HACK - there is no OMX_COLOR_FORMATTYPE value for RGBA, so the format
+ // conversion is hardcoded until we add this.
+ // TODO: check input format and convert only if necessary
+ // copy RGBA graphic buffer into temporary ARGB input buffer
+ BufferInfo *inputInfo = new BufferInfo;
+ inputInfo->mData = new ABuffer(buf->getWidth() * buf->getHeight() * 4);
+ ALOGV("Copying surface data into temp buffer.");
+ convertRGBAToARGB(
+ (uint8_t*)bufPtr, buf->getWidth(), buf->getHeight(),
+ buf->getStride(), inputInfo->mData->data());
+ inputInfo->mBufferID = item.mBuf;
+ inputInfo->mGeneration = mGeneration;
+ inputInfo->mOutputFlags = 0;
+ inputInfo->mStatus = BufferInfo::OWNED_BY_US;
+ inputInfo->mData->meta()->setInt64("timeUs", item.mTimestamp / 1000);
+
+ mAvailableInputBuffers.push_back(inputInfo);
+
+ mGraphicBufferListener->releaseBuffer(item);
+
+ signalProcessBuffers();
+}
+
+void MediaFilter::onSignalEndOfInputStream() {
+ // if using input surface, need to send an EOS output buffer
+ if (mGraphicBufferListener != NULL) {
+ Vector<BufferInfo> *outputBufs = &mBuffers[kPortIndexOutput];
+ BufferInfo* eosBuf;
+ bool foundBuf = false;
+ for (size_t i = 0; i < kBufferCountActual; i++) {
+ eosBuf = &outputBufs->editItemAt(i);
+ if (eosBuf->mStatus == BufferInfo::OWNED_BY_US) {
+ foundBuf = true;
+ break;
+ }
+ }
+
+ if (!foundBuf) {
+ ALOGE("onSignalEndOfInputStream failed to find an output buffer");
+ return;
+ }
+
+ eosBuf->mOutputFlags = OMX_BUFFERFLAG_EOS;
+ eosBuf->mGeneration = mGeneration;
+ eosBuf->mData->setRange(0, 0);
+ postDrainThisBuffer(eosBuf);
+ ALOGV("Posted EOS on output buffer %zu", eosBuf->mBufferID);
+ }
+
+ mPortEOS[kPortIndexOutput] = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", CodecBase::kWhatSignaledInputEOS);
+ notify->post();
+
+ ALOGV("Output stream saw EOS.");
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/RSFilter.cpp b/media/libstagefright/filters/RSFilter.cpp
new file mode 100644
index 0000000..b569945
--- /dev/null
+++ b/media/libstagefright/filters/RSFilter.cpp
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RSFilter"
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "RSFilter.h"
+
+namespace android {
+
+RSFilter::RSFilter() {
+
+}
+
+RSFilter::~RSFilter() {
+
+}
+
+status_t RSFilter::configure(const sp<AMessage> &msg) {
+ status_t err = SimpleFilter::configure(msg);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!msg->findString("cacheDir", &mCacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ sp<RenderScriptWrapper> wrapper;
+ if (!msg->findObject("rs-wrapper", (sp<RefBase>*)&wrapper)) {
+ ALOGE("Failed to find RenderScriptWrapper in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ mRS = wrapper->mContext;
+ mCallback = wrapper->mCallback;
+
+ return OK;
+}
+
+status_t RSFilter::start() {
+ // 32-bit elements for ARGB8888
+ RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
+
+ RSC::Type::Builder tb(mRS, e);
+ tb.setX(mWidth);
+ tb.setY(mHeight);
+ RSC::sp<const RSC::Type> t = tb.create();
+
+ mAllocIn = RSC::Allocation::createTyped(mRS, t);
+ mAllocOut = RSC::Allocation::createTyped(mRS, t);
+
+ return OK;
+}
+
+void RSFilter::reset() {
+ mCallback.clear();
+ mAllocOut.clear();
+ mAllocIn.clear();
+ mRS.clear();
+}
+
+status_t RSFilter::setParameters(const sp<AMessage> &msg) {
+ return mCallback->handleSetParameters(msg);
+}
+
+status_t RSFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
+ mCallback->processBuffers(mAllocIn.get(), mAllocOut.get());
+ mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/RSFilter.h b/media/libstagefright/filters/RSFilter.h
new file mode 100644
index 0000000..c5b5074
--- /dev/null
+++ b/media/libstagefright/filters/RSFilter.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RS_FILTER_H_
+#define RS_FILTER_H_
+
+#include <media/stagefright/RenderScriptWrapper.h>
+#include <RenderScript.h>
+
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct AString;
+
+struct RSFilter : public SimpleFilter {
+public:
+ RSFilter();
+
+ virtual status_t configure(const sp<AMessage> &msg);
+ virtual status_t start();
+ virtual void reset();
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~RSFilter();
+
+private:
+ AString mCacheDir;
+ sp<RenderScriptWrapper::RSFilterCallback> mCallback;
+ RSC::sp<RSC::RS> mRS;
+ RSC::sp<RSC::Allocation> mAllocIn;
+ RSC::sp<RSC::Allocation> mAllocOut;
+};
+
+} // namespace android
+
+#endif // RS_FILTER_H_
diff --git a/media/libstagefright/filters/SaturationFilter.cpp b/media/libstagefright/filters/SaturationFilter.cpp
new file mode 100644
index 0000000..ba5f75a
--- /dev/null
+++ b/media/libstagefright/filters/SaturationFilter.cpp
@@ -0,0 +1,99 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SaturationFilter"
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "SaturationFilter.h"
+
+namespace android {
+
+status_t SaturationFilter::configure(const sp<AMessage> &msg) {
+ status_t err = SimpleFilter::configure(msg);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!msg->findString("cacheDir", &mCacheDir)) {
+ ALOGE("Failed to find cache directory in config message.");
+ return NAME_NOT_FOUND;
+ }
+
+ return OK;
+}
+
+status_t SaturationFilter::start() {
+ // TODO: use a single RS context object for entire application
+ mRS = new RSC::RS();
+
+ if (!mRS->init(mCacheDir.c_str())) {
+ ALOGE("Failed to initialize RenderScript context.");
+ return NO_INIT;
+ }
+
+ // 32-bit elements for ARGB8888
+ RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS);
+
+ RSC::Type::Builder tb(mRS, e);
+ tb.setX(mWidth);
+ tb.setY(mHeight);
+ RSC::sp<const RSC::Type> t = tb.create();
+
+ mAllocIn = RSC::Allocation::createTyped(mRS, t);
+ mAllocOut = RSC::Allocation::createTyped(mRS, t);
+
+ mScript = new ScriptC_saturationARGB(mRS);
+
+ mScript->set_gSaturation(mSaturation);
+
+ return OK;
+}
+
+void SaturationFilter::reset() {
+ mScript.clear();
+ mAllocOut.clear();
+ mAllocIn.clear();
+ mRS.clear();
+}
+
+status_t SaturationFilter::setParameters(const sp<AMessage> &msg) {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ float saturation;
+ if (params->findFloat("saturation", &saturation)) {
+ mSaturation = saturation;
+ }
+
+ return OK;
+}
+
+status_t SaturationFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data());
+ mScript->forEach_root(mAllocIn, mAllocOut);
+ mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/SaturationFilter.h b/media/libstagefright/filters/SaturationFilter.h
new file mode 100644
index 0000000..0545021
--- /dev/null
+++ b/media/libstagefright/filters/SaturationFilter.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SATURATION_FILTER_H_
+#define SATURATION_FILTER_H_
+
+#include <RenderScript.h>
+
+#include "ScriptC_saturationARGB.h"
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct SaturationFilter : public SimpleFilter {
+public:
+ SaturationFilter() : mSaturation(1.f) {};
+
+ virtual status_t configure(const sp<AMessage> &msg);
+ virtual status_t start();
+ virtual void reset();
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~SaturationFilter() {};
+
+private:
+ AString mCacheDir;
+ RSC::sp<RSC::RS> mRS;
+ RSC::sp<RSC::Allocation> mAllocIn;
+ RSC::sp<RSC::Allocation> mAllocOut;
+ RSC::sp<ScriptC_saturationARGB> mScript;
+ float mSaturation;
+};
+
+} // namespace android
+
+#endif // SATURATION_FILTER_H_
diff --git a/media/libstagefright/filters/SimpleFilter.cpp b/media/libstagefright/filters/SimpleFilter.cpp
new file mode 100644
index 0000000..6c1ca2c
--- /dev/null
+++ b/media/libstagefright/filters/SimpleFilter.cpp
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "SimpleFilter.h"
+
+namespace android {
+
+status_t SimpleFilter::configure(const sp<AMessage> &msg) {
+ CHECK(msg->findInt32("width", &mWidth));
+ CHECK(msg->findInt32("height", &mHeight));
+ if (!msg->findInt32("stride", &mStride)) {
+ mStride = mWidth;
+ }
+ if (!msg->findInt32("slice-height", &mSliceHeight)) {
+ mSliceHeight = mHeight;
+ }
+ CHECK(msg->findInt32("color-format", &mColorFormatIn));
+ mColorFormatOut = mColorFormatIn;
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/SimpleFilter.h b/media/libstagefright/filters/SimpleFilter.h
new file mode 100644
index 0000000..4cd37ef
--- /dev/null
+++ b/media/libstagefright/filters/SimpleFilter.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SIMPLE_FILTER_H_
+#define SIMPLE_FILTER_H_
+
+#include <stdint.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+struct ABuffer;
+struct AMessage;
+
+namespace android {
+
+struct SimpleFilter : public RefBase {
+public:
+ SimpleFilter() : mWidth(0), mHeight(0), mStride(0), mSliceHeight(0),
+ mColorFormatIn(0), mColorFormatOut(0) {};
+
+ virtual status_t configure(const sp<AMessage> &msg);
+
+ virtual status_t start() = 0;
+ virtual void reset() = 0;
+ virtual status_t setParameters(const sp<AMessage> &msg) = 0;
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) = 0;
+
+protected:
+ int32_t mWidth, mHeight;
+ int32_t mStride, mSliceHeight;
+ int32_t mColorFormatIn, mColorFormatOut;
+
+ virtual ~SimpleFilter() {};
+};
+
+} // namespace android
+
+#endif // SIMPLE_FILTER_H_
diff --git a/media/libstagefright/filters/ZeroFilter.cpp b/media/libstagefright/filters/ZeroFilter.cpp
new file mode 100644
index 0000000..3f1243c
--- /dev/null
+++ b/media/libstagefright/filters/ZeroFilter.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ZeroFilter"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include "ZeroFilter.h"
+
+namespace android {
+
+status_t ZeroFilter::setParameters(const sp<AMessage> &msg) {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ int32_t invert;
+ if (params->findInt32("invert", &invert)) {
+ mInvertData = (invert != 0);
+ }
+
+ return OK;
+}
+
+status_t ZeroFilter::processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) {
+ // assuming identical input & output buffers, since we're a copy filter
+ if (mInvertData) {
+ uint32_t* src = (uint32_t*)srcBuffer->data();
+ uint32_t* dest = (uint32_t*)outBuffer->data();
+ for (size_t i = 0; i < srcBuffer->size() / 4; ++i) {
+ *(dest++) = *(src++) ^ 0xFFFFFFFF;
+ }
+ } else {
+ memcpy(outBuffer->data(), srcBuffer->data(), srcBuffer->size());
+ }
+ outBuffer->setRange(0, srcBuffer->size());
+
+ return OK;
+}
+
+} // namespace android
diff --git a/media/libstagefright/filters/ZeroFilter.h b/media/libstagefright/filters/ZeroFilter.h
new file mode 100644
index 0000000..bd34dfb
--- /dev/null
+++ b/media/libstagefright/filters/ZeroFilter.h
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ZERO_FILTER_H_
+#define ZERO_FILTER_H_
+
+#include "SimpleFilter.h"
+
+namespace android {
+
+struct ZeroFilter : public SimpleFilter {
+public:
+ ZeroFilter() : mInvertData(false) {};
+
+ virtual status_t start() { return OK; };
+ virtual void reset() {};
+ virtual status_t setParameters(const sp<AMessage> &msg);
+ virtual status_t processBuffers(
+ const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer);
+
+protected:
+ virtual ~ZeroFilter() {};
+
+private:
+ bool mInvertData;
+};
+
+} // namespace android
+
+#endif // ZERO_FILTER_H_
diff --git a/media/libstagefright/filters/saturation.rs b/media/libstagefright/filters/saturation.rs
new file mode 100644
index 0000000..2c867ac
--- /dev/null
+++ b/media/libstagefright/filters/saturation.rs
@@ -0,0 +1,40 @@
+// Sample script for RGB888 support (compare to saturationARGB.rs)
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+// global variables (parameters accessible to application code)
+float gSaturation = 1.0f;
+
+void root(const uchar3 *v_in, uchar3 *v_out) {
+ // scale 0-255 uchar to 0-1.0 float
+ float3 in = {v_in->r * 0.003921569f, v_in->g * 0.003921569f,
+ v_in->b * 0.003921569f};
+
+ // apply saturation filter
+ float3 result = dot(in, gMonoMult);
+ result = mix(result, in, gSaturation);
+
+ // convert to uchar, copied from rsPackColorTo8888
+ v_out->x = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+ v_out->y = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+ v_out->z = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/media/libstagefright/filters/saturationARGB.rs b/media/libstagefright/filters/saturationARGB.rs
new file mode 100644
index 0000000..1de9dd8
--- /dev/null
+++ b/media/libstagefright/filters/saturationARGB.rs
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma version(1)
+#pragma rs java_package_name(com.android.rs.cppbasic)
+#pragma rs_fp_relaxed
+
+const static float3 gMonoMult = {0.299f, 0.587f, 0.114f};
+
+// global variables (parameters accessible to application code)
+float gSaturation = 1.0f;
+
+void root(const uchar4 *v_in, uchar4 *v_out) {
+ v_out->x = v_in->x; // don't modify A
+
+ // get RGB, scale 0-255 uchar to 0-1.0 float
+ float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f,
+ v_in->w * 0.003921569f};
+
+ // apply saturation filter
+ float3 result = dot(rgb, gMonoMult);
+ result = mix(result, rgb, gSaturation);
+
+ v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f);
+ v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f);
+ v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f);
+}
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index a605595..4cf3819 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -617,8 +617,6 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
// having to interpolate.
// The final AAC frame may well extend into the next RangeInfo but
// that's ok.
- // TODO: the logic commented above is skipped because codec cannot take
- // arbitrary sized input buffers;
size_t offset = 0;
while (offset < info.mLength) {
if (offset + 7 > mBuffer->size()) {
@@ -683,12 +681,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
size_t headerSize __unused = protection_absent ? 7 : 9;
offset += aac_frame_length;
- // TODO: move back to concatenation when codec can support arbitrary input buffers.
- // For now only queue a single buffer
- break;
}
- int64_t timeUs = fetchTimestampAAC(offset);
+ int64_t timeUs = fetchTimestamp(offset);
sp<ABuffer> accessUnit = new ABuffer(offset);
memcpy(accessUnit->data(), mBuffer->data(), offset);
@@ -735,45 +730,6 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {
return timeUs;
}
-// TODO: avoid interpolating timestamps once codec supports arbitrary sized input buffers
-int64_t ElementaryStreamQueue::fetchTimestampAAC(size_t size) {
- int64_t timeUs = -1;
- bool first = true;
-
- size_t samplesize = size;
- while (size > 0) {
- CHECK(!mRangeInfos.empty());
-
- RangeInfo *info = &*mRangeInfos.begin();
-
- if (first) {
- timeUs = info->mTimestampUs;
- first = false;
- }
-
- if (info->mLength > size) {
- int32_t sampleRate;
- CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate));
- info->mLength -= size;
- size_t numSamples = 1024 * size / samplesize;
- info->mTimestampUs += numSamples * 1000000ll / sampleRate;
- size = 0;
- } else {
- size -= info->mLength;
-
- mRangeInfos.erase(mRangeInfos.begin());
- info = NULL;
- }
-
- }
-
- if (timeUs == 0ll) {
- ALOGV("Returning 0 timestamp");
- }
-
- return timeUs;
-}
-
struct NALPosition {
size_t nalOffset;
size_t nalSize;
diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h
index eb4b1c9..45b4624 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.h
+++ b/media/libstagefright/mpeg2ts/ESQueue.h
@@ -77,7 +77,6 @@ private:
// consume a logical (compressed) access unit of size "size",
// returns its timestamp in us (or -1 if no time information).
int64_t fetchTimestamp(size_t size);
- int64_t fetchTimestampAAC(size_t size);
DISALLOW_EVIL_CONSTRUCTORS(ElementaryStreamQueue);
};
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index 069961b..737f144 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -80,38 +80,6 @@ WebmWriter::WebmWriter(int fd)
mCuePoints);
}
-WebmWriter::WebmWriter(const char *filename)
- : mInitCheck(NO_INIT),
- mTimeCodeScale(1000000),
- mStartTimestampUs(0),
- mStartTimeOffsetMs(0),
- mSegmentOffset(0),
- mSegmentDataStart(0),
- mInfoOffset(0),
- mInfoSize(0),
- mTracksOffset(0),
- mCuesOffset(0),
- mPaused(false),
- mStarted(false),
- mIsFileSizeLimitExplicitlyRequested(false),
- mIsRealTimeRecording(false),
- mStreamableFile(true),
- mEstimatedCuesSize(0) {
- mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
- if (mFd >= 0) {
- ALOGV("fd %d; flags: %o", mFd, fcntl(mFd, F_GETFL, 0));
- mInitCheck = OK;
- }
- mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack);
- mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack);
- mSinkThread = new WebmFrameSinkThread(
- mFd,
- mSegmentDataStart,
- mStreams[kVideoIndex].mSink,
- mStreams[kAudioIndex].mSink,
- mCuePoints);
-}
-
// static
sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) {
int32_t width, height;
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
index 36b6965..4ad770e 100644
--- a/media/libstagefright/webm/WebmWriter.h
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -37,7 +37,6 @@ namespace android {
class WebmWriter : public MediaWriter {
public:
WebmWriter(int fd);
- WebmWriter(const char *filename);
~WebmWriter() { reset(); }
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index 3a280f0..f1b84ad 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -11,7 +11,7 @@ endif
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- main_mediaserver.cpp
+ main_mediaserver.cpp
LOCAL_SHARED_LIBRARIES := \
libaudioflinger \
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
index ed00b72..3124e4a 100644
--- a/media/ndk/NdkMediaCodec.cpp
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -352,7 +352,8 @@ media_status_t AMediaCodec_releaseOutputBufferAtTime(
}
//EXPORT
-media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback, void *userdata) {
+media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback,
+ void *userdata) {
mData->mCallback = callback;
mData->mCallbackUserData = userdata;
return AMEDIA_OK;
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
index db57d0b..0ecd64f 100644
--- a/media/ndk/NdkMediaExtractor.cpp
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -70,7 +70,8 @@ media_status_t AMediaExtractor_delete(AMediaExtractor *mData) {
}
EXPORT
-media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset, off64_t length) {
+media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset,
+ off64_t length) {
ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
return translate_error(mData->mImpl->setDataSource(fd, offset, length));
}