summaryrefslogtreecommitdiffstats
path: root/media/jni
diff options
context:
space:
mode:
Diffstat (limited to 'media/jni')
-rw-r--r--media/jni/Android.mk13
-rw-r--r--media/jni/android_media_ImageReader.cpp459
-rw-r--r--media/jni/android_media_ImageWriter.cpp1083
-rw-r--r--media/jni/android_media_MediaCodec.cpp237
-rw-r--r--media/jni/android_media_MediaCodec.h5
-rw-r--r--media/jni/android_media_MediaCodecList.cpp25
-rw-r--r--media/jni/android_media_MediaCrypto.cpp43
-rw-r--r--media/jni/android_media_MediaDataSource.cpp148
-rw-r--r--media/jni/android_media_MediaDataSource.h73
-rw-r--r--media/jni/android_media_MediaDrm.cpp78
-rw-r--r--media/jni/android_media_MediaExtractor.cpp74
-rw-r--r--media/jni/android_media_MediaHTTPConnection.cpp1
-rw-r--r--media/jni/android_media_MediaMetadataRetriever.cpp37
-rw-r--r--media/jni/android_media_MediaPlayer.cpp202
-rw-r--r--media/jni/android_media_MediaRecorder.cpp33
-rw-r--r--media/jni/android_media_MediaSync.cpp551
-rw-r--r--media/jni/android_media_MediaSync.h65
-rw-r--r--media/jni/android_media_PlaybackSettings.h120
-rw-r--r--media/jni/android_media_SyncSettings.cpp91
-rw-r--r--media/jni/android_media_SyncSettings.h65
-rw-r--r--media/jni/audioeffect/android_media_AudioEffect.cpp10
-rw-r--r--media/jni/audioeffect/android_media_Visualizer.cpp11
-rw-r--r--media/jni/soundpool/Android.mk2
-rw-r--r--media/jni/soundpool/SoundPool.cpp106
-rw-r--r--media/jni/soundpool/SoundPool.h8
-rw-r--r--media/jni/soundpool/android_media_SoundPool.cpp (renamed from media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp)90
26 files changed, 3341 insertions, 289 deletions
diff --git a/media/jni/Android.mk b/media/jni/Android.mk
index 4ebbe26..dbb53b4 100644
--- a/media/jni/Android.mk
+++ b/media/jni/Android.mk
@@ -2,21 +2,25 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
+ android_media_AmrInputStream.cpp \
+ android_media_ImageWriter.cpp \
android_media_ImageReader.cpp \
android_media_MediaCrypto.cpp \
android_media_MediaCodec.cpp \
android_media_MediaCodecList.cpp \
+ android_media_MediaDataSource.cpp \
android_media_MediaDrm.cpp \
android_media_MediaExtractor.cpp \
android_media_MediaHTTPConnection.cpp \
+ android_media_MediaMetadataRetriever.cpp \
android_media_MediaMuxer.cpp \
android_media_MediaPlayer.cpp \
+ android_media_MediaProfiles.cpp \
android_media_MediaRecorder.cpp \
android_media_MediaScanner.cpp \
- android_media_MediaMetadataRetriever.cpp \
+ android_media_MediaSync.cpp \
android_media_ResampleInputStream.cpp \
- android_media_MediaProfiles.cpp \
- android_media_AmrInputStream.cpp \
+ android_media_SyncSettings.cpp \
android_media_Utils.cpp \
android_mtp_MtpDatabase.cpp \
android_mtp_MtpDevice.cpp \
@@ -40,7 +44,7 @@ LOCAL_SHARED_LIBRARIES := \
libusbhost \
libjhead \
libexif \
- libstagefright_amrnb_common \
+ libstagefright_amrnb_common
LOCAL_REQUIRED_MODULES := \
libjhead_jni
@@ -52,6 +56,7 @@ LOCAL_C_INCLUDES += \
external/libexif/ \
external/tremor/Tremor \
frameworks/base/core/jni \
+ frameworks/base/libs/hwui \
frameworks/av/media/libmedia \
frameworks/av/media/libstagefright \
frameworks/av/media/libstagefright/codecs/amrnb/enc/src \
diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp
index 5406130..043e20b 100644
--- a/media/jni/android_media_ImageReader.cpp
+++ b/media/jni/android_media_ImageReader.cpp
@@ -24,6 +24,7 @@
#include <cstdio>
#include <gui/CpuConsumer.h>
+#include <gui/BufferItemConsumer.h>
#include <gui/Surface.h>
#include <camera3.h>
@@ -39,7 +40,7 @@
#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
#define ANDROID_MEDIA_IMAGEREADER_CTX_JNI_ID "mNativeContext"
-#define ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID "mLockedBuffer"
+#define ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID "mNativeBuffer"
#define ANDROID_MEDIA_SURFACEIMAGE_TS_JNI_ID "mTimestamp"
// ----------------------------------------------------------------------------
@@ -62,7 +63,7 @@ static struct {
} gImageReaderClassInfo;
static struct {
- jfieldID mLockedBuffer;
+ jfieldID mNativeBuffer;
jfieldID mTimestamp;
} gSurfaceImageClassInfo;
@@ -73,7 +74,7 @@ static struct {
// ----------------------------------------------------------------------------
-class JNIImageReaderContext : public CpuConsumer::FrameAvailableListener
+class JNIImageReaderContext : public ConsumerBase::FrameAvailableListener
{
public:
JNIImageReaderContext(JNIEnv* env, jobject weakThiz, jclass clazz, int maxImages);
@@ -83,18 +84,28 @@ public:
virtual void onFrameAvailable(const BufferItem& item);
CpuConsumer::LockedBuffer* getLockedBuffer();
-
void returnLockedBuffer(CpuConsumer::LockedBuffer* buffer);
+ BufferItem* getOpaqueBuffer();
+ void returnOpaqueBuffer(BufferItem* buffer);
+
void setCpuConsumer(const sp<CpuConsumer>& consumer) { mConsumer = consumer; }
CpuConsumer* getCpuConsumer() { return mConsumer.get(); }
+ void setOpaqueConsumer(const sp<BufferItemConsumer>& consumer) { mOpaqueConsumer = consumer; }
+ BufferItemConsumer* getOpaqueConsumer() { return mOpaqueConsumer.get(); }
+ // This is the only opaque format exposed in the ImageFormat public API.
+ bool isOpaque() { return mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; }
+
void setProducer(const sp<IGraphicBufferProducer>& producer) { mProducer = producer; }
IGraphicBufferProducer* getProducer() { return mProducer.get(); }
void setBufferFormat(int format) { mFormat = format; }
int getBufferFormat() { return mFormat; }
+ void setBufferDataspace(android_dataspace dataSpace) { mDataSpace = dataSpace; }
+ android_dataspace getBufferDataspace() { return mDataSpace; }
+
void setBufferWidth(int width) { mWidth = width; }
int getBufferWidth() { return mWidth; }
@@ -106,11 +117,14 @@ private:
static void detachJNI();
List<CpuConsumer::LockedBuffer*> mBuffers;
+ List<BufferItem*> mOpaqueBuffers;
sp<CpuConsumer> mConsumer;
+ sp<BufferItemConsumer> mOpaqueConsumer;
sp<IGraphicBufferProducer> mProducer;
jobject mWeakThiz;
jclass mClazz;
int mFormat;
+ android_dataspace mDataSpace;
int mWidth;
int mHeight;
};
@@ -121,7 +135,9 @@ JNIImageReaderContext::JNIImageReaderContext(JNIEnv* env,
mClazz((jclass)env->NewGlobalRef(clazz)) {
for (int i = 0; i < maxImages; i++) {
CpuConsumer::LockedBuffer *buffer = new CpuConsumer::LockedBuffer;
+ BufferItem* opaqueBuffer = new BufferItem;
mBuffers.push_back(buffer);
+ mOpaqueBuffers.push_back(opaqueBuffer);
}
}
@@ -165,6 +181,21 @@ void JNIImageReaderContext::returnLockedBuffer(CpuConsumer::LockedBuffer* buffer
mBuffers.push_back(buffer);
}
+BufferItem* JNIImageReaderContext::getOpaqueBuffer() {
+ if (mOpaqueBuffers.empty()) {
+ return NULL;
+ }
+ // Return an opaque buffer pointer and remove it from the list
+ List<BufferItem*>::iterator it = mOpaqueBuffers.begin();
+ BufferItem* buffer = *it;
+ mOpaqueBuffers.erase(it);
+ return buffer;
+}
+
+void JNIImageReaderContext::returnOpaqueBuffer(BufferItem* buffer) {
+ mOpaqueBuffers.push_back(buffer);
+}
+
JNIImageReaderContext::~JNIImageReaderContext() {
bool needsDetach = false;
JNIEnv* env = getJNIEnv(&needsDetach);
@@ -183,8 +214,20 @@ JNIImageReaderContext::~JNIImageReaderContext() {
it != mBuffers.end(); it++) {
delete *it;
}
+
+ // Delete opaque buffers
+ for (List<BufferItem *>::iterator it = mOpaqueBuffers.begin();
+ it != mOpaqueBuffers.end(); it++) {
+ delete *it;
+ }
+
mBuffers.clear();
- mConsumer.clear();
+ if (mConsumer != 0) {
+ mConsumer.clear();
+ }
+ if (mOpaqueConsumer != 0) {
+ mOpaqueConsumer.clear();
+ }
}
void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
@@ -206,6 +249,11 @@ void JNIImageReaderContext::onFrameAvailable(const BufferItem& /*item*/)
extern "C" {
+static bool isFormatOpaque(int format) {
+ // Only treat IMPLEMENTATION_DEFINED as an opaque format for now.
+ return format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+}
+
static JNIImageReaderContext* ImageReader_getContext(JNIEnv* env, jobject thiz)
{
JNIImageReaderContext *ctx;
@@ -222,6 +270,13 @@ static CpuConsumer* ImageReader_getCpuConsumer(JNIEnv* env, jobject thiz)
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return NULL;
}
+
+ if (ctx->isOpaque()) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque ImageReader doesn't support this method");
+ return NULL;
+ }
+
return ctx->getCpuConsumer();
}
@@ -233,6 +288,7 @@ static IGraphicBufferProducer* ImageReader_getProducer(JNIEnv* env, jobject thiz
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return NULL;
}
+
return ctx->getProducer();
}
@@ -254,36 +310,19 @@ static void ImageReader_setNativeContext(JNIEnv* env,
static CpuConsumer::LockedBuffer* Image_getLockedBuffer(JNIEnv* env, jobject image)
{
return reinterpret_cast<CpuConsumer::LockedBuffer*>(
- env->GetLongField(image, gSurfaceImageClassInfo.mLockedBuffer));
+ env->GetLongField(image, gSurfaceImageClassInfo.mNativeBuffer));
}
static void Image_setBuffer(JNIEnv* env, jobject thiz,
const CpuConsumer::LockedBuffer* buffer)
{
- env->SetLongField(thiz, gSurfaceImageClassInfo.mLockedBuffer, reinterpret_cast<jlong>(buffer));
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer, reinterpret_cast<jlong>(buffer));
}
-// Some formats like JPEG defined with different values between android.graphics.ImageFormat and
-// graphics.h, need convert to the one defined in graphics.h here.
-static int Image_getPixelFormat(JNIEnv* env, int format)
+static void Image_setOpaqueBuffer(JNIEnv* env, jobject thiz,
+ const BufferItem* buffer)
{
- int jpegFormat;
- jfieldID fid;
-
- ALOGV("%s: format = 0x%x", __FUNCTION__, format);
-
- jclass imageFormatClazz = env->FindClass("android/graphics/ImageFormat");
- ALOG_ASSERT(imageFormatClazz != NULL);
-
- fid = env->GetStaticFieldID(imageFormatClazz, "JPEG", "I");
- jpegFormat = env->GetStaticIntField(imageFormatClazz, fid);
-
- // Translate the JPEG to BLOB for camera purpose.
- if (format == jpegFormat) {
- format = HAL_PIXEL_FORMAT_BLOB;
- }
-
- return format;
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer, reinterpret_cast<jlong>(buffer));
}
static uint32_t Image_getJpegSize(CpuConsumer::LockedBuffer* buffer, bool usingRGBAOverride)
@@ -430,7 +469,7 @@ static void Image_getLockedBufferInfo(JNIEnv* env, CpuConsumer::LockedBuffer* bu
pData = buffer->data;
dataSize = Image_getJpegSize(buffer, usingRGBAOverride);
break;
- case HAL_PIXEL_FORMAT_RAW_SENSOR:
+ case HAL_PIXEL_FORMAT_RAW16:
// Single plane 16bpp bayer data.
bytesPerPixel = 2;
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
@@ -450,6 +489,19 @@ static void Image_getLockedBufferInfo(JNIEnv* env, CpuConsumer::LockedBuffer* bu
pData = buffer->data;
dataSize = buffer->stride * buffer->height;
break;
+ case HAL_PIXEL_FORMAT_RAW12:
+ // Single plane 10bpp bayer data.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ LOG_ALWAYS_FATAL_IF(buffer->width % 4,
+ "Width is not multiple of 4 %d", buffer->width);
+ LOG_ALWAYS_FATAL_IF(buffer->height % 2,
+ "Height is not even %d", buffer->height);
+ LOG_ALWAYS_FATAL_IF(buffer->stride < (buffer->width * 12 / 8),
+ "stride (%d) should be at least %d",
+ buffer->stride, buffer->width * 12 / 8);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ break;
case HAL_PIXEL_FORMAT_RGBA_8888:
case HAL_PIXEL_FORMAT_RGBX_8888:
// Single plane, 32bpp.
@@ -483,7 +535,7 @@ static void Image_getLockedBufferInfo(JNIEnv* env, CpuConsumer::LockedBuffer* bu
}
static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* buffer, int idx,
- int32_t readerFormat)
+ int32_t halReaderFormat)
{
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
ALOG_ASSERT((idx < IMAGE_READER_MAX_NUM_PLANES) && (idx >= 0), "Index is out of range:%d", idx);
@@ -493,7 +545,7 @@ static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* bu
int32_t fmt = buffer->flexFormat;
- fmt = applyFormatOverrides(fmt, readerFormat);
+ fmt = applyFormatOverrides(fmt, halReaderFormat);
switch (fmt) {
case HAL_PIXEL_FORMAT_YCbCr_420_888:
@@ -511,13 +563,15 @@ static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* bu
break;
case HAL_PIXEL_FORMAT_BLOB:
case HAL_PIXEL_FORMAT_RAW10:
- // Blob is used for JPEG data, RAW10 is used for 10-bit raw data, they are
- // single plane, row and pixel strides are 0.
+ case HAL_PIXEL_FORMAT_RAW12:
+ // Blob is used for JPEG data, RAW10 and RAW12 is used for 10-bit and 12-bit raw data,
+ // those are single plane data with pixel stride 0 since they don't really have a
+ // well defined pixel stride
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
pixelStride = 0;
break;
case HAL_PIXEL_FORMAT_Y16:
- case HAL_PIXEL_FORMAT_RAW_SENSOR:
+ case HAL_PIXEL_FORMAT_RAW16:
case HAL_PIXEL_FORMAT_RGB_565:
// Single plane 16bpp data.
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
@@ -543,7 +597,7 @@ static jint Image_imageGetPixelStride(JNIEnv* env, CpuConsumer::LockedBuffer* bu
}
static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buffer, int idx,
- int32_t readerFormat)
+ int32_t halReaderFormat)
{
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
ALOG_ASSERT((idx < IMAGE_READER_MAX_NUM_PLANES) && (idx >= 0));
@@ -553,7 +607,7 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
int32_t fmt = buffer->flexFormat;
- fmt = applyFormatOverrides(fmt, readerFormat);
+ fmt = applyFormatOverrides(fmt, halReaderFormat);
switch (fmt) {
case HAL_PIXEL_FORMAT_YCbCr_420_888:
@@ -568,12 +622,14 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
rowStride = (idx == 0) ? buffer->stride : ALIGN(buffer->stride / 2, 16);
break;
case HAL_PIXEL_FORMAT_BLOB:
- // Blob is used for JPEG data, RAW10 is used for 10-bit raw data, they are
- // single plane, row and pixel strides are 0.
+ // Blob is used for JPEG data. It is single plane and has 0 row stride and
+ // 0 pixel stride
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
rowStride = 0;
break;
case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW12:
+ // RAW10 and RAW12 are used for 10-bit and 12-bit raw data, they are single plane
ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
rowStride = buffer->stride;
break;
@@ -584,7 +640,7 @@ static jint Image_imageGetRowStride(JNIEnv* env, CpuConsumer::LockedBuffer* buff
rowStride = buffer->stride;
break;
case HAL_PIXEL_FORMAT_Y16:
- case HAL_PIXEL_FORMAT_RAW_SENSOR:
+ case HAL_PIXEL_FORMAT_RAW16:
// In native side, strides are specified in pixels, not in bytes.
// Single plane 16bpp bayer data. even width/height,
// row stride multiple of 16 pixels (32 bytes)
@@ -635,6 +691,52 @@ static int Image_getBufferHeight(CpuConsumer::LockedBuffer* buffer) {
return buffer->height;
}
+// --------------------------Methods for opaque Image and ImageReader----------
+
+static BufferItemConsumer* ImageReader_getOpaqueConsumer(JNIEnv* env, jobject thiz)
+{
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* const ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
+ return NULL;
+ }
+
+ if (!ctx->isOpaque()) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Non-opaque ImageReader doesn't support this method");
+ }
+
+ return ctx->getOpaqueConsumer();
+}
+
+static BufferItem* Image_getOpaqueBuffer(JNIEnv* env, jobject image)
+{
+ return reinterpret_cast<BufferItem*>(
+ env->GetLongField(image, gSurfaceImageClassInfo.mNativeBuffer));
+}
+
+static int Image_getOpaqueBufferWidth(BufferItem* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->mCrop.isEmpty()) {
+ return buffer->mCrop.getWidth();
+ }
+ return buffer->mGraphicBuffer->getWidth();
+}
+
+static int Image_getOpaqueBufferHeight(BufferItem* buffer) {
+ if (buffer == NULL) return -1;
+
+ if (!buffer->mCrop.isEmpty()) {
+ return buffer->mCrop.getHeight();
+ }
+
+ return buffer->mGraphicBuffer->getHeight();
+}
+
+
+
// ----------------------------------------------------------------------------
static void ImageReader_classInit(JNIEnv* env, jclass clazz)
@@ -644,9 +746,9 @@ static void ImageReader_classInit(JNIEnv* env, jclass clazz)
jclass imageClazz = env->FindClass("android/media/ImageReader$SurfaceImage");
LOG_ALWAYS_FATAL_IF(imageClazz == NULL,
"can't find android/graphics/ImageReader$SurfaceImage");
- gSurfaceImageClassInfo.mLockedBuffer = env->GetFieldID(
+ gSurfaceImageClassInfo.mNativeBuffer = env->GetFieldID(
imageClazz, ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID, "J");
- LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mLockedBuffer == NULL,
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeBuffer == NULL,
"can't find android/graphics/ImageReader.%s",
ANDROID_MEDIA_SURFACEIMAGE_BUFFER_JNI_ID);
@@ -682,22 +784,16 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz,
{
status_t res;
int nativeFormat;
+ android_dataspace nativeDataspace;
ALOGV("%s: width:%d, height: %d, format: 0x%x, maxImages:%d",
__FUNCTION__, width, height, format, maxImages);
- nativeFormat = Image_getPixelFormat(env, format);
-
- sp<IGraphicBufferProducer> gbProducer;
- sp<IGraphicBufferConsumer> gbConsumer;
- BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
- sp<CpuConsumer> consumer = new CpuConsumer(gbConsumer, maxImages,
- /*controlledByApp*/true);
- // TODO: throw dvm exOutOfMemoryError?
- if (consumer == NULL) {
- jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer");
- return;
- }
+ PublicFormat publicFormat = static_cast<PublicFormat>(format);
+ nativeFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ publicFormat);
+ nativeDataspace = android_view_Surface_mapPublicFormatToHalDataspace(
+ publicFormat);
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
@@ -705,25 +801,80 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz,
return;
}
sp<JNIImageReaderContext> ctx(new JNIImageReaderContext(env, weakThiz, clazz, maxImages));
- ctx->setCpuConsumer(consumer);
+
+ sp<IGraphicBufferProducer> gbProducer;
+ sp<IGraphicBufferConsumer> gbConsumer;
+ BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+ sp<ConsumerBase> consumer;
+ sp<CpuConsumer> cpuConsumer;
+ sp<BufferItemConsumer> opaqueConsumer;
+ if (isFormatOpaque(nativeFormat)) {
+ // Use the SW_READ_NEVER usage to tell producer that this format is not for preview or video
+ // encoding. The only possibility will be ZSL output.
+ opaqueConsumer =
+ new BufferItemConsumer(gbConsumer, GRALLOC_USAGE_SW_READ_NEVER, maxImages,
+ /*controlledByApp*/true);
+ if (opaqueConsumer == NULL) {
+ jniThrowRuntimeException(env, "Failed to allocate native opaque consumer");
+ return;
+ }
+ ctx->setOpaqueConsumer(opaqueConsumer);
+ consumer = opaqueConsumer;
+ } else {
+ cpuConsumer = new CpuConsumer(gbConsumer, maxImages, /*controlledByApp*/true);
+ // TODO: throw dvm exOutOfMemoryError?
+ if (cpuConsumer == NULL) {
+ jniThrowRuntimeException(env, "Failed to allocate native CpuConsumer");
+ return;
+ }
+ ctx->setCpuConsumer(cpuConsumer);
+ consumer = cpuConsumer;
+ }
+
ctx->setProducer(gbProducer);
consumer->setFrameAvailableListener(ctx);
ImageReader_setNativeContext(env, thiz, ctx);
ctx->setBufferFormat(nativeFormat);
+ ctx->setBufferDataspace(nativeDataspace);
ctx->setBufferWidth(width);
ctx->setBufferHeight(height);
- // Set the width/height/format to the CpuConsumer
- res = consumer->setDefaultBufferSize(width, height);
- if (res != OK) {
- jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to set CpuConsumer buffer size");
- return;
- }
- res = consumer->setDefaultBufferFormat(nativeFormat);
- if (res != OK) {
- jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to set CpuConsumer buffer format");
+ // Set the width/height/format/dataspace to the CpuConsumer
+ // TODO: below code can be simplified once b/19977701 is fixed.
+ if (isFormatOpaque(nativeFormat)) {
+ res = opaqueConsumer->setDefaultBufferSize(width, height);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer size");
+ return;
+ }
+ res = opaqueConsumer->setDefaultBufferFormat(nativeFormat);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer format");
+ }
+ res = opaqueConsumer->setDefaultBufferDataSpace(nativeDataspace);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set opaque consumer buffer dataSpace");
+ }
+ } else {
+ res = cpuConsumer->setDefaultBufferSize(width, height);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer size");
+ return;
+ }
+ res = cpuConsumer->setDefaultBufferFormat(nativeFormat);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer format");
+ }
+ res = cpuConsumer->setDefaultBufferDataSpace(nativeDataspace);
+ if (res != OK) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to set CpuConsumer buffer dataSpace");
+ }
}
}
@@ -737,7 +888,13 @@ static void ImageReader_close(JNIEnv* env, jobject thiz)
return;
}
- CpuConsumer* consumer = ImageReader_getCpuConsumer(env, thiz);
+ ConsumerBase* consumer = NULL;
+ if (ctx->isOpaque()) {
+ consumer = ImageReader_getOpaqueConsumer(env, thiz);
+ } else {
+ consumer = ImageReader_getCpuConsumer(env, thiz);
+ }
+
if (consumer != NULL) {
consumer->abandon();
consumer->setFrameAvailableListener(NULL);
@@ -754,27 +911,66 @@ static void ImageReader_imageRelease(JNIEnv* env, jobject thiz, jobject image)
return;
}
- CpuConsumer* consumer = ctx->getCpuConsumer();
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, image);
- if (!buffer) {
- ALOGW("Image already released!!!");
- return;
+ if (ctx->isOpaque()) {
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, image);
+ opaqueConsumer->releaseBuffer(*opaqueBuffer); // Not using fence for now.
+ Image_setOpaqueBuffer(env, image, NULL);
+ ctx->returnOpaqueBuffer(opaqueBuffer);
+ ALOGV("%s: Opaque Image has been released", __FUNCTION__);
+ } else {
+ CpuConsumer* consumer = ctx->getCpuConsumer();
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, image);
+ if (!buffer) {
+ ALOGW("Image already released!!!");
+ return;
+ }
+ consumer->unlockBuffer(*buffer);
+ Image_setBuffer(env, image, NULL);
+ ctx->returnLockedBuffer(buffer);
+ ALOGV("%s: Image (format: 0x%x) has been released", __FUNCTION__, ctx->getBufferFormat());
}
- consumer->unlockBuffer(*buffer);
- Image_setBuffer(env, image, NULL);
- ctx->returnLockedBuffer(buffer);
}
-static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
- jobject image)
-{
+static jint ImageReader_opaqueImageSetup(JNIEnv* env, JNIImageReaderContext* ctx, jobject image) {
ALOGV("%s:", __FUNCTION__);
- JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
- if (ctx == NULL) {
+ if (ctx == NULL || !ctx->isOpaque()) {
jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
return -1;
}
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* buffer = ctx->getOpaqueBuffer();
+ if (buffer == NULL) {
+ ALOGW("Unable to acquire a buffer item, very likely client tried to acquire more than"
+ " maxImages buffers");
+ return ACQUIRE_MAX_IMAGES;
+ }
+
+ status_t res = opaqueConsumer->acquireBuffer(buffer, 0);
+ if (res != OK) {
+ ctx->returnOpaqueBuffer(buffer);
+ if (res == INVALID_OPERATION) {
+ // Max number of images were already acquired.
+ ALOGE("%s: Max number of buffers allowed are already acquired : %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return ACQUIRE_MAX_IMAGES;
+ } else {
+ ALOGE("%s: Acquire image failed with error: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return ACQUIRE_NO_BUFFERS;
+ }
+ }
+
+ // Set SurfaceImage instance member variables
+ Image_setOpaqueBuffer(env, image, buffer);
+ env->SetLongField(image, gSurfaceImageClassInfo.mTimestamp,
+ static_cast<jlong>(buffer->mTimestamp));
+
+ return ACQUIRE_SUCCESS;
+}
+
+static jint ImageReader_lockedImageSetup(JNIEnv* env, JNIImageReaderContext* ctx, jobject image) {
CpuConsumer* consumer = ctx->getCpuConsumer();
CpuConsumer::LockedBuffer* buffer = ctx->getLockedBuffer();
if (buffer == NULL) {
@@ -867,6 +1063,57 @@ static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz,
return ACQUIRE_SUCCESS;
}
+static jint ImageReader_imageSetup(JNIEnv* env, jobject thiz, jobject image) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowRuntimeException(env, "ImageReaderContext is not initialized");
+ return -1;
+ }
+
+ if (ctx->isOpaque()) {
+ return ImageReader_opaqueImageSetup(env, ctx, image);
+ } else {
+ return ImageReader_lockedImageSetup(env, ctx, image);
+ }
+}
+
+static jint ImageReader_detachImage(JNIEnv* env, jobject thiz, jobject image) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageReaderContext* ctx = ImageReader_getContext(env, thiz);
+ if (ctx == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", "ImageReader was already closed");
+ return -1;
+ }
+
+ status_t res = OK;
+ if (!ctx->isOpaque()) {
+ // TODO: Non-Opaque format detach is not implemented yet.
+ jniThrowRuntimeException(env,
+ "nativeDetachImage is not implemented yet for non-opaque format !!!");
+ return -1;
+ }
+
+ BufferItemConsumer* opaqueConsumer = ctx->getOpaqueConsumer();
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, image);
+ if (!opaqueBuffer) {
+ ALOGE(
+ "Opaque Image already released and can not be detached from ImageReader!!!");
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque Image detach from ImageReader failed: buffer was already released");
+ return -1;
+ }
+
+ res = opaqueConsumer->detachBuffer(opaqueBuffer->mSlot);
+ if (res != OK) {
+ ALOGE("Opaque Image detach failed: %s (%d)!!!", strerror(-res), res);
+ jniThrowRuntimeException(env,
+ "nativeDetachImage failed for opaque image!!!");
+ return res;
+ }
+ return OK;
+}
+
static jobject ImageReader_getSurface(JNIEnv* env, jobject thiz)
{
ALOGV("%s: ", __FUNCTION__);
@@ -884,7 +1131,16 @@ static jobject ImageReader_getSurface(JNIEnv* env, jobject thiz)
static jobject Image_createSurfacePlane(JNIEnv* env, jobject thiz, int idx, int readerFormat)
{
int rowStride, pixelStride;
+ PublicFormat publicReaderFormat = static_cast<PublicFormat>(readerFormat);
+ int halReaderFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ publicReaderFormat);
+
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
+ if (isFormatOpaque(halReaderFormat)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque images from Opaque ImageReader do not have any planes");
+ return NULL;
+ }
CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
@@ -893,10 +1149,8 @@ static jobject Image_createSurfacePlane(JNIEnv* env, jobject thiz, int idx, int
jniThrowException(env, "java/lang/IllegalStateException", "Image was released");
}
- readerFormat = Image_getPixelFormat(env, readerFormat);
-
- rowStride = Image_imageGetRowStride(env, buffer, idx, readerFormat);
- pixelStride = Image_imageGetPixelStride(env, buffer, idx, readerFormat);
+ rowStride = Image_imageGetRowStride(env, buffer, idx, halReaderFormat);
+ pixelStride = Image_imageGetPixelStride(env, buffer, idx, halReaderFormat);
jobject surfPlaneObj = env->NewObject(gSurfacePlaneClassInfo.clazz,
gSurfacePlaneClassInfo.ctor, thiz, idx, rowStride, pixelStride);
@@ -909,19 +1163,26 @@ static jobject Image_getByteBuffer(JNIEnv* env, jobject thiz, int idx, int reade
uint8_t *base = NULL;
uint32_t size = 0;
jobject byteBuffer;
+ PublicFormat readerPublicFormat = static_cast<PublicFormat>(readerFormat);
+ int readerHalFormat = android_view_Surface_mapPublicFormatToHalFormat(
+ readerPublicFormat);
ALOGV("%s: buffer index: %d", __FUNCTION__, idx);
+ if (isFormatOpaque(readerHalFormat)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Opaque images from Opaque ImageReader do not have any plane");
+ return NULL;
+ }
+
CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
if (buffer == NULL) {
jniThrowException(env, "java/lang/IllegalStateException", "Image was released");
}
- readerFormat = Image_getPixelFormat(env, readerFormat);
-
// Create byteBuffer from native buffer
- Image_getLockedBufferInfo(env, buffer, idx, &base, &size, readerFormat);
+ Image_getLockedBufferInfo(env, buffer, idx, &base, &size, readerHalFormat);
if (size > static_cast<uint32_t>(INT32_MAX)) {
// Byte buffer have 'int capacity', so check the range
@@ -939,19 +1200,28 @@ static jobject Image_getByteBuffer(JNIEnv* env, jobject thiz, int idx, int reade
return byteBuffer;
}
-static jint Image_getWidth(JNIEnv* env, jobject thiz)
+static jint Image_getWidth(JNIEnv* env, jobject thiz, jint format)
{
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
- return Image_getBufferWidth(buffer);
+ if (isFormatOpaque(format)) {
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, thiz);
+ return Image_getOpaqueBufferWidth(opaqueBuffer);
+ } else {
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferWidth(buffer);
+ }
}
-static jint Image_getHeight(JNIEnv* env, jobject thiz)
+static jint Image_getHeight(JNIEnv* env, jobject thiz, jint format)
{
- CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
- return Image_getBufferHeight(buffer);
+ if (isFormatOpaque(format)) {
+ BufferItem* opaqueBuffer = Image_getOpaqueBuffer(env, thiz);
+ return Image_getOpaqueBufferHeight(opaqueBuffer);
+ } else {
+ CpuConsumer::LockedBuffer* buffer = Image_getLockedBuffer(env, thiz);
+ return Image_getBufferHeight(buffer);
+ }
}
-
} // extern "C"
// ----------------------------------------------------------------------------
@@ -963,14 +1233,15 @@ static JNINativeMethod gImageReaderMethods[] = {
{"nativeReleaseImage", "(Landroid/media/Image;)V", (void*)ImageReader_imageRelease },
{"nativeImageSetup", "(Landroid/media/Image;)I", (void*)ImageReader_imageSetup },
{"nativeGetSurface", "()Landroid/view/Surface;", (void*)ImageReader_getSurface },
+ {"nativeDetachImage", "(Landroid/media/Image;)I", (void*)ImageReader_detachImage },
};
static JNINativeMethod gImageMethods[] = {
{"nativeImageGetBuffer", "(II)Ljava/nio/ByteBuffer;", (void*)Image_getByteBuffer },
{"nativeCreatePlane", "(II)Landroid/media/ImageReader$SurfaceImage$SurfacePlane;",
(void*)Image_createSurfacePlane },
- {"nativeGetWidth", "()I", (void*)Image_getWidth },
- {"nativeGetHeight", "()I", (void*)Image_getHeight },
+ {"nativeGetWidth", "(I)I", (void*)Image_getWidth },
+ {"nativeGetHeight", "(I)I", (void*)Image_getHeight },
};
int register_android_media_ImageReader(JNIEnv *env) {
diff --git a/media/jni/android_media_ImageWriter.cpp b/media/jni/android_media_ImageWriter.cpp
new file mode 100644
index 0000000..294cd84
--- /dev/null
+++ b/media/jni/android_media_ImageWriter.cpp
@@ -0,0 +1,1083 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ImageWriter_JNI"
+#include <utils/Log.h>
+#include <utils/String8.h>
+
+#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
+#include <gui/CpuConsumer.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_runtime/android_view_Surface.h>
+#include <camera3.h>
+
+#include <jni.h>
+#include <JNIHelp.h>
+
+#include <stdint.h>
+#include <inttypes.h>
+
+#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
+
+#define IMAGE_BUFFER_JNI_ID "mNativeBuffer"
+
+// ----------------------------------------------------------------------------
+
+using namespace android;
+
+enum {
+ IMAGE_WRITER_MAX_NUM_PLANES = 3,
+};
+
+static struct {
+ jmethodID postEventFromNative;
+ jfieldID mWriterFormat;
+} gImageWriterClassInfo;
+
+static struct {
+ jfieldID mNativeBuffer;
+ jfieldID mNativeFenceFd;
+ jfieldID mPlanes;
+} gSurfaceImageClassInfo;
+
+static struct {
+ jclass clazz;
+ jmethodID ctor;
+} gSurfacePlaneClassInfo;
+
+typedef CpuConsumer::LockedBuffer LockedImage;
+
+// ----------------------------------------------------------------------------
+
+class JNIImageWriterContext : public BnProducerListener {
+public:
+ JNIImageWriterContext(JNIEnv* env, jobject weakThiz, jclass clazz);
+
+ virtual ~JNIImageWriterContext();
+
+ // Implementation of IProducerListener, used to notify the ImageWriter that the consumer
+ // has returned a buffer and it is ready for ImageWriter to dequeue.
+ virtual void onBufferReleased();
+
+ void setProducer(const sp<Surface>& producer) { mProducer = producer; }
+ Surface* getProducer() { return mProducer.get(); }
+
+ void setBufferFormat(int format) { mFormat = format; }
+ int getBufferFormat() { return mFormat; }
+
+ void setBufferWidth(int width) { mWidth = width; }
+ int getBufferWidth() { return mWidth; }
+
+ void setBufferHeight(int height) { mHeight = height; }
+ int getBufferHeight() { return mHeight; }
+
+private:
+ static JNIEnv* getJNIEnv(bool* needsDetach);
+ static void detachJNI();
+
+ sp<Surface> mProducer;
+ jobject mWeakThiz;
+ jclass mClazz;
+ int mFormat;
+ int mWidth;
+ int mHeight;
+};
+
+JNIImageWriterContext::JNIImageWriterContext(JNIEnv* env, jobject weakThiz, jclass clazz) :
+ mWeakThiz(env->NewGlobalRef(weakThiz)),
+ mClazz((jclass)env->NewGlobalRef(clazz)),
+ mFormat(0),
+ mWidth(-1),
+ mHeight(-1) {
+}
+
+JNIImageWriterContext::~JNIImageWriterContext() {
+ ALOGV("%s", __FUNCTION__);
+ bool needsDetach = false;
+ JNIEnv* env = getJNIEnv(&needsDetach);
+ if (env != NULL) {
+ env->DeleteGlobalRef(mWeakThiz);
+ env->DeleteGlobalRef(mClazz);
+ } else {
+ ALOGW("leaking JNI object references");
+ }
+ if (needsDetach) {
+ detachJNI();
+ }
+
+ mProducer.clear();
+}
+
+JNIEnv* JNIImageWriterContext::getJNIEnv(bool* needsDetach) {
+ ALOGV("%s", __FUNCTION__);
+ LOG_ALWAYS_FATAL_IF(needsDetach == NULL, "needsDetach is null!!!");
+ *needsDetach = false;
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ if (env == NULL) {
+ JavaVMAttachArgs args = {JNI_VERSION_1_4, NULL, NULL};
+ JavaVM* vm = AndroidRuntime::getJavaVM();
+ int result = vm->AttachCurrentThread(&env, (void*) &args);
+ if (result != JNI_OK) {
+ ALOGE("thread attach failed: %#x", result);
+ return NULL;
+ }
+ *needsDetach = true;
+ }
+ return env;
+}
+
+void JNIImageWriterContext::detachJNI() {
+ ALOGV("%s", __FUNCTION__);
+ JavaVM* vm = AndroidRuntime::getJavaVM();
+ int result = vm->DetachCurrentThread();
+ if (result != JNI_OK) {
+ ALOGE("thread detach failed: %#x", result);
+ }
+}
+
+void JNIImageWriterContext::onBufferReleased() {
+ ALOGV("%s: buffer released", __FUNCTION__);
+ bool needsDetach = false;
+ JNIEnv* env = getJNIEnv(&needsDetach);
+ if (env != NULL) {
+ // Detach the buffer every time when a buffer consumption is done,
+ // need let this callback give a BufferItem, then only detach if it was attached to this
+ // Writer. Do the detach unconditionally for opaque format now. see b/19977520
+ if (mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ sp<Fence> fence;
+ sp<GraphicBuffer> buffer;
+ ALOGV("%s: One buffer is detached", __FUNCTION__);
+ mProducer->detachNextBuffer(&buffer, &fence);
+ }
+
+ env->CallStaticVoidMethod(mClazz, gImageWriterClassInfo.postEventFromNative, mWeakThiz);
+ } else {
+ ALOGW("onBufferReleased event will not posted");
+ }
+
+ if (needsDetach) {
+ detachJNI();
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+extern "C" {
+
+// -------------------------------Private method declarations--------------
+
+static bool isPossiblyYUV(PixelFormat format);
+static void Image_setNativeContext(JNIEnv* env, jobject thiz,
+ sp<GraphicBuffer> buffer, int fenceFd);
+static void Image_getNativeContext(JNIEnv* env, jobject thiz,
+ GraphicBuffer** buffer, int* fenceFd);
+static void Image_unlockIfLocked(JNIEnv* env, jobject thiz);
+static bool isFormatOpaque(int format);
+
+// --------------------------ImageWriter methods---------------------------------------
+
+static void ImageWriter_classInit(JNIEnv* env, jclass clazz) {
+ ALOGV("%s:", __FUNCTION__);
+ jclass imageClazz = env->FindClass("android/media/ImageWriter$WriterSurfaceImage");
+ LOG_ALWAYS_FATAL_IF(imageClazz == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage");
+ gSurfaceImageClassInfo.mNativeBuffer = env->GetFieldID(
+ imageClazz, IMAGE_BUFFER_JNI_ID, "J");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeBuffer == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.%s", IMAGE_BUFFER_JNI_ID);
+
+ gSurfaceImageClassInfo.mNativeFenceFd = env->GetFieldID(
+ imageClazz, "mNativeFenceFd", "I");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mNativeFenceFd == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.mNativeFenceFd");
+
+ gSurfaceImageClassInfo.mPlanes = env->GetFieldID(
+ imageClazz, "mPlanes", "[Landroid/media/ImageWriter$WriterSurfaceImage$SurfacePlane;");
+ LOG_ALWAYS_FATAL_IF(gSurfaceImageClassInfo.mPlanes == NULL,
+ "can't find android/media/ImageWriter$WriterSurfaceImage.mPlanes");
+
+ gImageWriterClassInfo.postEventFromNative = env->GetStaticMethodID(
+ clazz, "postEventFromNative", "(Ljava/lang/Object;)V");
+ LOG_ALWAYS_FATAL_IF(gImageWriterClassInfo.postEventFromNative == NULL,
+ "can't find android/media/ImageWriter.postEventFromNative");
+
+ gImageWriterClassInfo.mWriterFormat = env->GetFieldID(
+ clazz, "mWriterFormat", "I");
+ LOG_ALWAYS_FATAL_IF(gImageWriterClassInfo.mWriterFormat == NULL,
+ "can't find android/media/ImageWriter.mWriterFormat");
+
+ jclass planeClazz = env->FindClass("android/media/ImageWriter$WriterSurfaceImage$SurfacePlane");
+ LOG_ALWAYS_FATAL_IF(planeClazz == NULL, "Can not find SurfacePlane class");
+ // FindClass only gives a local reference of jclass object.
+ gSurfacePlaneClassInfo.clazz = (jclass) env->NewGlobalRef(planeClazz);
+ gSurfacePlaneClassInfo.ctor = env->GetMethodID(gSurfacePlaneClassInfo.clazz, "<init>",
+ "(Landroid/media/ImageWriter$WriterSurfaceImage;IILjava/nio/ByteBuffer;)V");
+ LOG_ALWAYS_FATAL_IF(gSurfacePlaneClassInfo.ctor == NULL,
+ "Can not find SurfacePlane constructor");
+}
+
+static jlong ImageWriter_init(JNIEnv* env, jobject thiz, jobject weakThiz, jobject jsurface,
+ jint maxImages) {
+ status_t res;
+
+ ALOGV("%s: maxImages:%d", __FUNCTION__, maxImages);
+
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface == NULL) {
+ jniThrowException(env,
+ "java/lang/IllegalArgumentException",
+ "The surface has been released");
+ return 0;
+ }
+ sp<IGraphicBufferProducer> bufferProducer = surface->getIGraphicBufferProducer();
+
+ jclass clazz = env->GetObjectClass(thiz);
+ if (clazz == NULL) {
+ jniThrowRuntimeException(env, "Can't find android/graphics/ImageWriter");
+ return 0;
+ }
+ sp<JNIImageWriterContext> ctx(new JNIImageWriterContext(env, weakThiz, clazz));
+
+ sp<Surface> producer = new Surface(bufferProducer, /*controlledByApp*/false);
+ ctx->setProducer(producer);
+ /**
+ * NATIVE_WINDOW_API_CPU isn't a good choice here, as it makes the bufferQueue not connectable
+ * after disconnect. MEDIA or CAMERA are treated the same internally. The producer listener
+ * will be cleared after disconnect call.
+ */
+ producer->connect(/*api*/NATIVE_WINDOW_API_CAMERA, /*listener*/ctx);
+ jlong nativeCtx = reinterpret_cast<jlong>(ctx.get());
+
+ // Get the dimension and format of the producer.
+ sp<ANativeWindow> anw = producer;
+ int32_t width, height, format;
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) {
+ ALOGE("%s: Query Surface width failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface width");
+ return 0;
+ }
+ ctx->setBufferWidth(width);
+
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_HEIGHT, &height)) != OK) {
+ ALOGE("%s: Query Surface height failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface height");
+ return 0;
+ }
+ ctx->setBufferHeight(height);
+
+ if ((res = anw->query(anw.get(), NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ ALOGE("%s: Query Surface format failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to query Surface format");
+ return 0;
+ }
+ ctx->setBufferFormat(format);
+ env->SetIntField(thiz, gImageWriterClassInfo.mWriterFormat, reinterpret_cast<jint>(format));
+
+
+ if (!isFormatOpaque(format)) {
+ res = native_window_set_usage(anw.get(), GRALLOC_USAGE_SW_WRITE_OFTEN);
+ if (res != OK) {
+ ALOGE("%s: Configure usage %08x for format %08x failed: %s (%d)",
+ __FUNCTION__, GRALLOC_USAGE_SW_WRITE_OFTEN, format, strerror(-res), res);
+ jniThrowRuntimeException(env, "Failed to SW_WRITE_OFTEN configure usage");
+ return 0;
+ }
+ }
+
+ int minUndequeuedBufferCount = 0;
+ res = anw->query(anw.get(),
+ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufferCount);
+ if (res != OK) {
+ ALOGE("%s: Query producer undequeued buffer count failed: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Query producer undequeued buffer count failed");
+ return 0;
+ }
+
+ size_t totalBufferCount = maxImages + minUndequeuedBufferCount;
+ res = native_window_set_buffer_count(anw.get(), totalBufferCount);
+ if (res != OK) {
+ ALOGE("%s: Set buffer count failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Set buffer count failed");
+ return 0;
+ }
+
+ if (ctx != 0) {
+ ctx->incStrong((void*)ImageWriter_init);
+ }
+ return nativeCtx;
+}
+
+static void ImageWriter_dequeueImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ sp<ANativeWindow> anw = ctx->getProducer();
+ android_native_buffer_t *anb = NULL;
+ int fenceFd = -1;
+ status_t res = anw->dequeueBuffer(anw.get(), &anb, &fenceFd);
+ if (res != OK) {
+ // TODO: handle different error cases here.
+ ALOGE("%s: Dequeue buffer failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "dequeue buffer failed");
+ return;
+ }
+ // New GraphicBuffer object doesn't own the handle, thus the native buffer
+ // won't be freed when this object is destroyed.
+ sp<GraphicBuffer> buffer(new GraphicBuffer(anb, /*keepOwnership*/false));
+
+ // Note that:
+ // 1. No need to lock buffer now, will only lock it when the first getPlanes() is called.
+ // 2. Fence will be saved to mNativeFenceFd, and will consumed by lock/queue/cancel buffer
+ // later.
+ // 3. need use lockAsync here, as it will handle the dequeued fence for us automatically.
+
+ // Finally, set the native info into image object.
+ Image_setNativeContext(env, image, buffer, fenceFd);
+}
+
+static void ImageWriter_close(JNIEnv* env, jobject thiz, jlong nativeCtx) {
+ ALOGV("%s:", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ ANativeWindow* producer = ctx->getProducer();
+ if (producer != NULL) {
+ /**
+ * NATIVE_WINDOW_API_CPU isn't a good choice here, as it makes the bufferQueue not
+ * connectable after disconnect. MEDIA or CAMERA are treated the same internally.
+ * The producer listener will be cleared after disconnect call.
+ */
+ status_t res = native_window_api_disconnect(producer, /*api*/NATIVE_WINDOW_API_CAMERA);
+ /**
+ * This is not an error. if client calling process dies, the window will
+ * also die and all calls to it will return DEAD_OBJECT, thus it's already
+ * "disconnected"
+ */
+ if (res == DEAD_OBJECT) {
+ ALOGW("%s: While disconnecting ImageWriter from native window, the"
+ " native window died already", __FUNCTION__);
+ } else if (res != OK) {
+ ALOGE("%s: native window disconnect failed: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ jniThrowRuntimeException(env, "Native window disconnect failed");
+ return;
+ }
+ }
+
+ ctx->decStrong((void*)ImageWriter_init);
+}
+
+static void ImageWriter_cancelImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ sp<ANativeWindow> anw = ctx->getProducer();
+
+ GraphicBuffer *buffer = NULL;
+ int fenceFd = -1;
+ Image_getNativeContext(env, image, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Unlock the image if it was locked
+ Image_unlockIfLocked(env, image);
+
+ anw->cancelBuffer(anw.get(), buffer, fenceFd);
+
+ Image_setNativeContext(env, image, NULL, -1);
+}
+
+static void ImageWriter_queueImage(JNIEnv* env, jobject thiz, jlong nativeCtx, jobject image,
+ jlong timestampNs, jint left, jint top, jint right, jint bottom) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return;
+ }
+
+ status_t res = OK;
+ sp<ANativeWindow> anw = ctx->getProducer();
+
+ GraphicBuffer *buffer = NULL;
+ int fenceFd = -1;
+ Image_getNativeContext(env, image, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Unlock image if it was locked.
+ Image_unlockIfLocked(env, image);
+
+ // Set timestamp
+ ALOGV("timestamp to be queued: %" PRId64, timestampNs);
+ res = native_window_set_buffers_timestamp(anw.get(), timestampNs);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set timestamp failed");
+ return;
+ }
+
+ // Set crop
+ android_native_rect_t cropRect;
+ cropRect.left = left;
+ cropRect.top = top;
+ cropRect.right = right;
+ cropRect.bottom = bottom;
+ res = native_window_set_crop(anw.get(), &cropRect);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set crop rect failed");
+ return;
+ }
+
+ // Finally, queue input buffer
+ res = anw->queueBuffer(anw.get(), buffer, fenceFd);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Queue input buffer failed");
+ return;
+ }
+
+ // Clear the image native context: end of this image's lifecycle in public API.
+ Image_setNativeContext(env, image, NULL, -1);
+}
+
+static jint ImageWriter_attachAndQueueImage(JNIEnv* env, jobject thiz, jlong nativeCtx,
+ jlong nativeBuffer, jint imageFormat, jlong timestampNs, jint left, jint top,
+ jint right, jint bottom) {
+ ALOGV("%s", __FUNCTION__);
+ JNIImageWriterContext* const ctx = reinterpret_cast<JNIImageWriterContext *>(nativeCtx);
+ if (ctx == NULL || thiz == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "ImageWriterContext is not initialized");
+ return -1;
+ }
+
+ sp<Surface> surface = ctx->getProducer();
+ status_t res = OK;
+ if (!isFormatOpaque(imageFormat)) {
+ // TODO: need implement, see b/19962027
+ jniThrowRuntimeException(env,
+ "nativeAttachImage for non-opaque image is not implement yet!!!");
+ return -1;
+ }
+
+ if (!isFormatOpaque(ctx->getBufferFormat())) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Trying to attach an opaque image into a non-opaque ImageWriter");
+ return -1;
+ }
+
+ // Image is guaranteed to be from ImageReader at this point, so it is safe to
+ // cast to BufferItem pointer.
+ BufferItem* opaqueBuffer = reinterpret_cast<BufferItem*>(nativeBuffer);
+ if (opaqueBuffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized or already closed");
+ return -1;
+ }
+
+ // Step 1. Attach Image
+ res = surface->attachBuffer(opaqueBuffer->mGraphicBuffer.get());
+ if (res != OK) {
+ // TODO: handle different error case separately.
+ ALOGE("Attach image failed: %s (%d)", strerror(-res), res);
+ jniThrowRuntimeException(env, "nativeAttachImage failed!!!");
+ return res;
+ }
+ sp < ANativeWindow > anw = surface;
+
+ // Step 2. Set timestamp and crop. Note that we do not need unlock the image because
+ // it was not locked.
+ ALOGV("timestamp to be queued: %" PRId64, timestampNs);
+ res = native_window_set_buffers_timestamp(anw.get(), timestampNs);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set timestamp failed");
+ return res;
+ }
+
+ android_native_rect_t cropRect;
+ cropRect.left = left;
+ cropRect.top = top;
+ cropRect.right = right;
+ cropRect.bottom = bottom;
+ res = native_window_set_crop(anw.get(), &cropRect);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Set crop rect failed");
+ return res;
+ }
+
+ // Step 3. Queue Image.
+ res = anw->queueBuffer(anw.get(), opaqueBuffer->mGraphicBuffer.get(), /*fenceFd*/
+ -1);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "Queue input buffer failed");
+ return res;
+ }
+
+ // Do not set the image native context. Since it would overwrite the existing native context
+ // of the image that is from ImageReader, the subsequent image close will run into issues.
+
+ return res;
+}
+
+// --------------------------Image methods---------------------------------------
+
+static void Image_getNativeContext(JNIEnv* env, jobject thiz,
+ GraphicBuffer** buffer, int* fenceFd) {
+ ALOGV("%s", __FUNCTION__);
+ if (buffer != NULL) {
+ GraphicBuffer *gb = reinterpret_cast<GraphicBuffer *>
+ (env->GetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer));
+ *buffer = gb;
+ }
+
+ if (fenceFd != NULL) {
+ *fenceFd = reinterpret_cast<jint>(env->GetIntField(
+ thiz, gSurfaceImageClassInfo.mNativeFenceFd));
+ }
+}
+
+static void Image_setNativeContext(JNIEnv* env, jobject thiz,
+ sp<GraphicBuffer> buffer, int fenceFd) {
+ ALOGV("%s:", __FUNCTION__);
+ GraphicBuffer* p = NULL;
+ Image_getNativeContext(env, thiz, &p, /*fenceFd*/NULL);
+ if (buffer != 0) {
+ buffer->incStrong((void*)Image_setNativeContext);
+ }
+ if (p) {
+ p->decStrong((void*)Image_setNativeContext);
+ }
+ env->SetLongField(thiz, gSurfaceImageClassInfo.mNativeBuffer,
+ reinterpret_cast<jlong>(buffer.get()));
+
+ env->SetIntField(thiz, gSurfaceImageClassInfo.mNativeFenceFd, reinterpret_cast<jint>(fenceFd));
+}
+
+static void Image_unlockIfLocked(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ // Is locked?
+ bool isLocked = false;
+ jobject planes = NULL;
+ if (!isFormatOpaque(buffer->getPixelFormat())) {
+ planes = env->GetObjectField(thiz, gSurfaceImageClassInfo.mPlanes);
+ }
+ isLocked = (planes != NULL);
+ if (isLocked) {
+ // no need to use fence here, as we it will be consumed by either cancel or queue buffer.
+ status_t res = buffer->unlock();
+ if (res != OK) {
+ jniThrowRuntimeException(env, "unlock buffer failed");
+ }
+ ALOGV("Successfully unlocked the image");
+ }
+}
+
+static jint Image_getWidth(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return -1;
+ }
+
+ return buffer->getWidth();
+}
+
+static jint Image_getHeight(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return -1;
+ }
+
+ return buffer->getHeight();
+}
+
+// Some formats like JPEG defined with different values between android.graphics.ImageFormat and
+// graphics.h, need convert to the one defined in graphics.h here.
+static int Image_getPixelFormat(JNIEnv* env, int format) {
+ int jpegFormat;
+ jfieldID fid;
+
+ ALOGV("%s: format = 0x%x", __FUNCTION__, format);
+
+ jclass imageFormatClazz = env->FindClass("android/graphics/ImageFormat");
+ ALOG_ASSERT(imageFormatClazz != NULL);
+
+ fid = env->GetStaticFieldID(imageFormatClazz, "JPEG", "I");
+ jpegFormat = env->GetStaticIntField(imageFormatClazz, fid);
+
+ // Translate the JPEG to BLOB for camera purpose.
+ if (format == jpegFormat) {
+ format = HAL_PIXEL_FORMAT_BLOB;
+ }
+
+ return format;
+}
+
+static jint Image_getFormat(JNIEnv* env, jobject thiz) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ Image_getNativeContext(env, thiz, &buffer, NULL);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return 0;
+ }
+
+ return Image_getPixelFormat(env, buffer->getPixelFormat());
+}
+
+static void Image_setFenceFd(JNIEnv* env, jobject thiz, int fenceFd) {
+ ALOGV("%s:", __FUNCTION__);
+ env->SetIntField(thiz, gSurfaceImageClassInfo.mNativeFenceFd, reinterpret_cast<jint>(fenceFd));
+}
+
+static void Image_getLockedImage(JNIEnv* env, jobject thiz, LockedImage *image) {
+ ALOGV("%s", __FUNCTION__);
+ GraphicBuffer* buffer;
+ int fenceFd = -1;
+ Image_getNativeContext(env, thiz, &buffer, &fenceFd);
+ if (buffer == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Image is not initialized");
+ return;
+ }
+
+ void* pData = NULL;
+ android_ycbcr ycbcr = android_ycbcr();
+ status_t res;
+ int format = Image_getFormat(env, thiz);
+ int flexFormat = format;
+ if (isPossiblyYUV(format)) {
+ // ImageWriter doesn't use crop by itself, app sets it, use the no crop version.
+ res = buffer->lockAsyncYCbCr(GRALLOC_USAGE_SW_WRITE_OFTEN, &ycbcr, fenceFd);
+ // Clear the fenceFd as it is already consumed by lock call.
+ Image_setFenceFd(env, thiz, /*fenceFd*/-1);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "lockAsyncYCbCr failed for YUV buffer");
+ return;
+ }
+ pData = ycbcr.y;
+ flexFormat = HAL_PIXEL_FORMAT_YCbCr_420_888;
+ }
+
+ // lockAsyncYCbCr for YUV is unsuccessful.
+ if (pData == NULL) {
+ res = buffer->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &pData, fenceFd);
+ if (res != OK) {
+ jniThrowRuntimeException(env, "lockAsync failed");
+ return;
+ }
+ }
+
+ image->data = reinterpret_cast<uint8_t*>(pData);
+ image->width = buffer->getWidth();
+ image->height = buffer->getHeight();
+ image->format = format;
+ image->flexFormat = flexFormat;
+ image->stride = (ycbcr.y != NULL) ? static_cast<uint32_t>(ycbcr.ystride) : buffer->getStride();
+
+ image->dataCb = reinterpret_cast<uint8_t*>(ycbcr.cb);
+ image->dataCr = reinterpret_cast<uint8_t*>(ycbcr.cr);
+ image->chromaStride = static_cast<uint32_t>(ycbcr.cstride);
+ image->chromaStep = static_cast<uint32_t>(ycbcr.chroma_step);
+ ALOGV("Successfully locked the image");
+ // crop, transform, scalingMode, timestamp, and frameNumber should be set by producer,
+ // and we don't set them here.
+}
+
+static bool usingRGBAToJpegOverride(int32_t bufferFormat, int32_t writerCtxFormat) {
+ return writerCtxFormat == HAL_PIXEL_FORMAT_BLOB && bufferFormat == HAL_PIXEL_FORMAT_RGBA_8888;
+}
+
+static int32_t applyFormatOverrides(int32_t bufferFormat, int32_t writerCtxFormat)
+{
+ // Using HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers containing JPEGs to get around SW
+ // write limitations for some platforms (b/17379185).
+ if (usingRGBAToJpegOverride(bufferFormat, writerCtxFormat)) {
+ return HAL_PIXEL_FORMAT_BLOB;
+ }
+ return bufferFormat;
+}
+
+static uint32_t Image_getJpegSize(LockedImage* buffer, bool usingRGBAOverride) {
+ ALOGV("%s", __FUNCTION__);
+ ALOG_ASSERT(buffer != NULL, "Input buffer is NULL!!!");
+ uint32_t size = 0;
+ uint32_t width = buffer->width;
+ uint8_t* jpegBuffer = buffer->data;
+
+ if (usingRGBAOverride) {
+ width = (buffer->width + buffer->stride * (buffer->height - 1)) * 4;
+ }
+
+ // First check for JPEG transport header at the end of the buffer
+ uint8_t* header = jpegBuffer + (width - sizeof(struct camera3_jpeg_blob));
+ struct camera3_jpeg_blob *blob = (struct camera3_jpeg_blob*)(header);
+ if (blob->jpeg_blob_id == CAMERA3_JPEG_BLOB_ID) {
+ size = blob->jpeg_size;
+ ALOGV("%s: Jpeg size = %d", __FUNCTION__, size);
+ }
+
+ // failed to find size, default to whole buffer
+ if (size == 0) {
+ /*
+ * This is a problem because not including the JPEG header
+ * means that in certain rare situations a regular JPEG blob
+ * will be misidentified as having a header, in which case
+ * we will get a garbage size value.
+ */
+ ALOGW("%s: No JPEG header detected, defaulting to size=width=%d",
+ __FUNCTION__, width);
+ size = width;
+ }
+
+ return size;
+}
+
+static void Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
+ int32_t writerFormat, uint8_t **base, uint32_t *size, int *pixelStride, int *rowStride) {
+ ALOGV("%s", __FUNCTION__);
+ ALOG_ASSERT(buffer != NULL, "Input buffer is NULL!!!");
+ ALOG_ASSERT(base != NULL, "base is NULL!!!");
+ ALOG_ASSERT(size != NULL, "size is NULL!!!");
+ ALOG_ASSERT(pixelStride != NULL, "pixelStride is NULL!!!");
+ ALOG_ASSERT(rowStride != NULL, "rowStride is NULL!!!");
+ ALOG_ASSERT((idx < IMAGE_WRITER_MAX_NUM_PLANES) && (idx >= 0));
+
+ ALOGV("%s: buffer: %p", __FUNCTION__, buffer);
+
+ uint32_t dataSize, ySize, cSize, cStride;
+ uint32_t pStride = 0, rStride = 0;
+ uint8_t *cb, *cr;
+ uint8_t *pData = NULL;
+ int bytesPerPixel = 0;
+
+ dataSize = ySize = cSize = cStride = 0;
+ int32_t fmt = buffer->flexFormat;
+
+ bool usingRGBAOverride = usingRGBAToJpegOverride(fmt, writerFormat);
+ fmt = applyFormatOverrides(fmt, writerFormat);
+ switch (fmt) {
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ buffer->dataCb :
+ buffer->dataCr;
+ // only map until last pixel
+ if (idx == 0) {
+ pStride = 1;
+ rStride = buffer->stride;
+ dataSize = buffer->stride * (buffer->height - 1) + buffer->width;
+ } else {
+ pStride = buffer->chromaStep;
+ rStride = buffer->chromaStride;
+ dataSize = buffer->chromaStride * (buffer->height / 2 - 1) +
+ buffer->chromaStep * (buffer->width / 2 - 1) + 1;
+ }
+ break;
+ // NV21
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ cr = buffer->data + (buffer->stride * buffer->height);
+ cb = cr + 1;
+ // only map until last pixel
+ ySize = buffer->width * (buffer->height - 1) + buffer->width;
+ cSize = buffer->width * (buffer->height / 2 - 1) + buffer->width - 1;
+
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ cb:
+ cr;
+
+ dataSize = (idx == 0) ? ySize : cSize;
+ pStride = (idx == 0) ? 1 : 2;
+ rStride = buffer->width;
+ break;
+ case HAL_PIXEL_FORMAT_YV12:
+ // Y and C stride need to be 16 pixel aligned.
+ LOG_ALWAYS_FATAL_IF(buffer->stride % 16,
+ "Stride is not 16 pixel aligned %d", buffer->stride);
+
+ ySize = buffer->stride * buffer->height;
+ cStride = ALIGN(buffer->stride / 2, 16);
+ cr = buffer->data + ySize;
+ cSize = cStride * buffer->height / 2;
+ cb = cr + cSize;
+
+ pData =
+ (idx == 0) ?
+ buffer->data :
+ (idx == 1) ?
+ cb :
+ cr;
+ dataSize = (idx == 0) ? ySize : cSize;
+ pStride = 1;
+ rStride = (idx == 0) ? buffer->stride : ALIGN(buffer->stride / 2, 16);
+ break;
+ case HAL_PIXEL_FORMAT_Y8:
+ // Single plane, 8bpp.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ pStride = 1;
+ rStride = buffer->stride;
+ break;
+ case HAL_PIXEL_FORMAT_Y16:
+ bytesPerPixel = 2;
+ // Single plane, 16bpp, strides are specified in pixels, not in bytes
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_BLOB:
+ // Used for JPEG data, height must be 1, width == size, single plane.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ ALOG_ASSERT(buffer->height == 1, "JPEG should has height value %d", buffer->height);
+
+ pData = buffer->data;
+ dataSize = Image_getJpegSize(buffer, usingRGBAOverride);
+ pStride = bytesPerPixel;
+ rowStride = 0;
+ break;
+ case HAL_PIXEL_FORMAT_RAW16:
+ // Single plane 16bpp bayer data.
+ bytesPerPixel = 2;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_RAW10:
+ // Single plane 10bpp bayer data.
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ LOG_ALWAYS_FATAL_IF(buffer->width % 4,
+ "Width is not multiple of 4 %d", buffer->width);
+ LOG_ALWAYS_FATAL_IF(buffer->height % 2,
+ "Height is not even %d", buffer->height);
+ LOG_ALWAYS_FATAL_IF(buffer->stride < (buffer->width * 10 / 8),
+ "stride (%d) should be at least %d",
+ buffer->stride, buffer->width * 10 / 8);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height;
+ pStride = 0;
+ rStride = buffer->stride;
+ break;
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ // Single plane, 32bpp.
+ bytesPerPixel = 4;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 4;
+ break;
+ case HAL_PIXEL_FORMAT_RGB_565:
+ // Single plane, 16bpp.
+ bytesPerPixel = 2;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 2;
+ break;
+ case HAL_PIXEL_FORMAT_RGB_888:
+ // Single plane, 24bpp.
+ bytesPerPixel = 3;
+ ALOG_ASSERT(idx == 0, "Wrong index: %d", idx);
+ pData = buffer->data;
+ dataSize = buffer->stride * buffer->height * bytesPerPixel;
+ pStride = bytesPerPixel;
+ rStride = buffer->stride * 3;
+ break;
+ default:
+ jniThrowExceptionFmt(env, "java/lang/UnsupportedOperationException",
+ "Pixel format: 0x%x is unsupported", fmt);
+ break;
+ }
+
+ *base = pData;
+ *size = dataSize;
+ *pixelStride = pStride;
+ *rowStride = rStride;
+}
+
+static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
+ int numPlanes, int writerFormat) {
+ ALOGV("%s: create SurfacePlane array with size %d", __FUNCTION__, numPlanes);
+ int rowStride, pixelStride;
+ uint8_t *pData;
+ uint32_t dataSize;
+ jobject byteBuffer;
+
+ int format = Image_getFormat(env, thiz);
+ if (isFormatOpaque(format) && numPlanes > 0) {
+ String8 msg;
+ msg.appendFormat("Format 0x%x is opaque, thus not writable, the number of planes (%d)"
+ " must be 0", format, numPlanes);
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg.string());
+ return NULL;
+ }
+
+ jobjectArray surfacePlanes = env->NewObjectArray(numPlanes, gSurfacePlaneClassInfo.clazz,
+ /*initial_element*/NULL);
+ if (surfacePlanes == NULL) {
+ jniThrowRuntimeException(env, "Failed to create SurfacePlane arrays,"
+ " probably out of memory");
+ return NULL;
+ }
+ if (isFormatOpaque(format)) {
+ return surfacePlanes;
+ }
+
+ // Buildup buffer info: rowStride, pixelStride and byteBuffers.
+ LockedImage lockedImg = LockedImage();
+ Image_getLockedImage(env, thiz, &lockedImg);
+
+ // Create all SurfacePlanes
+ writerFormat = Image_getPixelFormat(env, writerFormat);
+ for (int i = 0; i < numPlanes; i++) {
+ Image_getLockedImageInfo(env, &lockedImg, i, writerFormat,
+ &pData, &dataSize, &pixelStride, &rowStride);
+ byteBuffer = env->NewDirectByteBuffer(pData, dataSize);
+ if ((byteBuffer == NULL) && (env->ExceptionCheck() == false)) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to allocate ByteBuffer");
+ return NULL;
+ }
+
+ // Finally, create this SurfacePlane.
+ jobject surfacePlane = env->NewObject(gSurfacePlaneClassInfo.clazz,
+ gSurfacePlaneClassInfo.ctor, thiz, rowStride, pixelStride, byteBuffer);
+ env->SetObjectArrayElement(surfacePlanes, i, surfacePlane);
+ }
+
+ return surfacePlanes;
+}
+
+// -------------------------------Private convenience methods--------------------
+
+static bool isFormatOpaque(int format) {
+ // Only treat IMPLEMENTATION_DEFINED as an opaque format for now.
+ return format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+}
+
+static bool isPossiblyYUV(PixelFormat format) {
+ switch (static_cast<int>(format)) {
+ case HAL_PIXEL_FORMAT_RGBA_8888:
+ case HAL_PIXEL_FORMAT_RGBX_8888:
+ case HAL_PIXEL_FORMAT_RGB_888:
+ case HAL_PIXEL_FORMAT_RGB_565:
+ case HAL_PIXEL_FORMAT_BGRA_8888:
+ case HAL_PIXEL_FORMAT_Y8:
+ case HAL_PIXEL_FORMAT_Y16:
+ case HAL_PIXEL_FORMAT_RAW16:
+ case HAL_PIXEL_FORMAT_RAW10:
+ case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+ case HAL_PIXEL_FORMAT_BLOB:
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ return false;
+
+ case HAL_PIXEL_FORMAT_YV12:
+ case HAL_PIXEL_FORMAT_YCbCr_420_888:
+ case HAL_PIXEL_FORMAT_YCbCr_422_SP:
+ case HAL_PIXEL_FORMAT_YCrCb_420_SP:
+ case HAL_PIXEL_FORMAT_YCbCr_422_I:
+ default:
+ return true;
+ }
+}
+
+} // extern "C"
+
+// ----------------------------------------------------------------------------
+
+static JNINativeMethod gImageWriterMethods[] = {
+ {"nativeClassInit", "()V", (void*)ImageWriter_classInit },
+ {"nativeInit", "(Ljava/lang/Object;Landroid/view/Surface;I)J",
+ (void*)ImageWriter_init },
+ {"nativeClose", "(J)V", (void*)ImageWriter_close },
+ {"nativeAttachAndQueueImage", "(JJIJIIII)I", (void*)ImageWriter_attachAndQueueImage },
+ {"nativeDequeueInputImage", "(JLandroid/media/Image;)V", (void*)ImageWriter_dequeueImage },
+ {"nativeQueueInputImage", "(JLandroid/media/Image;JIIII)V", (void*)ImageWriter_queueImage },
+ {"cancelImage", "(JLandroid/media/Image;)V", (void*)ImageWriter_cancelImage },
+};
+
+static JNINativeMethod gImageMethods[] = {
+ {"nativeCreatePlanes", "(II)[Landroid/media/ImageWriter$WriterSurfaceImage$SurfacePlane;",
+ (void*)Image_createSurfacePlanes },
+ {"nativeGetWidth", "()I", (void*)Image_getWidth },
+ {"nativeGetHeight", "()I", (void*)Image_getHeight },
+ {"nativeGetFormat", "()I", (void*)Image_getFormat },
+};
+
+int register_android_media_ImageWriter(JNIEnv *env) {
+
+ int ret1 = AndroidRuntime::registerNativeMethods(env,
+ "android/media/ImageWriter", gImageWriterMethods, NELEM(gImageWriterMethods));
+
+ int ret2 = AndroidRuntime::registerNativeMethods(env,
+ "android/media/ImageWriter$WriterSurfaceImage", gImageMethods, NELEM(gImageMethods));
+
+ return (ret1 || ret2);
+}
+
diff --git a/media/jni/android_media_MediaCodec.cpp b/media/jni/android_media_MediaCodec.cpp
index 1cf589d..f808c0d 100644
--- a/media/jni/android_media_MediaCodec.cpp
+++ b/media/jni/android_media_MediaCodec.cpp
@@ -39,7 +39,7 @@
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/MediaErrors.h>
-
+#include <media/stagefright/PersistentSurface.h>
#include <nativehelper/ScopedLocalRef.h>
#include <system/window.h>
@@ -70,6 +70,19 @@ static struct CodecActionCodes {
jint codecActionRecoverable;
} gCodecActionCodes;
+static struct ExceptionReason {
+ jint reasonHardware;
+ jint reasonReclaimed;
+} gExceptionReason;
+
+static struct {
+ jclass clazz;
+ jfieldID mLock;
+ jfieldID mPersistentObject;
+ jmethodID ctor;
+ jmethodID setNativeObjectLocked;
+} gPersistentSurfaceClassInfo;
+
struct fields_t {
jfieldID context;
jmethodID postEventFromNativeID;
@@ -82,6 +95,7 @@ struct fields_t {
};
static fields_t gFields;
+static const void *sRefBaseOwner;
////////////////////////////////////////////////////////////////////////////////
@@ -215,7 +229,7 @@ void JMediaCodec::deleteJavaObjects(JNIEnv *env) {
status_t JMediaCodec::setCallback(jobject cb) {
if (cb != NULL) {
if (mCallbackNotification == NULL) {
- mCallbackNotification = new AMessage(kWhatCallbackNotify, id());
+ mCallbackNotification = new AMessage(kWhatCallbackNotify, this);
}
} else {
mCallbackNotification.clear();
@@ -240,11 +254,29 @@ status_t JMediaCodec::configure(
return mCodec->configure(format, mSurfaceTextureClient, crypto, flags);
}
+status_t JMediaCodec::setSurface(
+ const sp<IGraphicBufferProducer> &bufferProducer) {
+ sp<Surface> client;
+ if (bufferProducer != NULL) {
+ client = new Surface(bufferProducer, true /* controlledByApp */);
+ }
+ status_t err = mCodec->setSurface(client);
+ if (err == OK) {
+ mSurfaceTextureClient = client;
+ }
+ return err;
+}
+
status_t JMediaCodec::createInputSurface(
sp<IGraphicBufferProducer>* bufferProducer) {
return mCodec->createInputSurface(bufferProducer);
}
+status_t JMediaCodec::usePersistentInputSurface(
+ const sp<PersistentSurface> &surface) {
+ return mCodec->usePersistentInputSurface(surface);
+}
+
status_t JMediaCodec::start() {
return mCodec->start();
}
@@ -568,7 +600,7 @@ static jthrowable createCodecException(
env, env->FindClass("android/media/MediaCodec$CodecException"));
CHECK(clazz.get() != NULL);
- const jmethodID ctor = env->GetMethodID(clazz.get(), "<init>", "(IILjava/lang/String;)V");
+ const jmethodID ctor = env->GetMethodID(clazz.get(), "<init>", "(IILjava/lang/String;I)V");
CHECK(ctor != NULL);
ScopedLocalRef<jstring> msgObj(
@@ -587,7 +619,9 @@ static jthrowable createCodecException(
break;
}
- return (jthrowable)env->NewObject(clazz.get(), ctor, err, actionCode, msgObj.get());
+ int reason =
+ (err == DEAD_OBJECT) ? gExceptionReason.reasonReclaimed : gExceptionReason.reasonHardware;
+ return (jthrowable)env->NewObject(clazz.get(), ctor, err, actionCode, msgObj.get(), reason);
}
void JMediaCodec::handleCallback(const sp<AMessage> &msg) {
@@ -790,6 +824,10 @@ static jint throwExceptionAsNecessary(
jniThrowException(env, "java/lang/IllegalStateException", msg);
return 0;
+ case BAD_VALUE:
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg);
+ return 0;
+
default:
if (isCryptoError(err)) {
throwCryptoException(env, err, msg);
@@ -862,6 +900,149 @@ static void android_media_MediaCodec_native_configure(
throwExceptionAsNecessary(env, err);
}
+static void android_media_MediaCodec_native_setSurface(
+ JNIEnv *env,
+ jobject thiz,
+ jobject jsurface) {
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+ if (codec == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ if (jsurface != NULL) {
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface != NULL) {
+ bufferProducer = surface->getIGraphicBufferProducer();
+ } else {
+ jniThrowException(
+ env,
+ "java/lang/IllegalArgumentException",
+ "The surface has been released");
+ return;
+ }
+ }
+
+ status_t err = codec->setSurface(bufferProducer);
+ throwExceptionAsNecessary(env, err);
+}
+
+sp<PersistentSurface> android_media_MediaCodec_getPersistentInputSurface(
+ JNIEnv* env, jobject object) {
+ sp<PersistentSurface> persistentSurface;
+
+ jobject lock = env->GetObjectField(
+ object, gPersistentSurfaceClassInfo.mLock);
+ if (env->MonitorEnter(lock) == JNI_OK) {
+ persistentSurface = reinterpret_cast<PersistentSurface *>(
+ env->GetLongField(object,
+ gPersistentSurfaceClassInfo.mPersistentObject));
+ env->MonitorExit(lock);
+ }
+ env->DeleteLocalRef(lock);
+
+ return persistentSurface;
+}
+
+static jobject android_media_MediaCodec_createPersistentInputSurface(
+ JNIEnv* env, jclass /* clazz */) {
+ ALOGV("android_media_MediaCodec_createPersistentInputSurface");
+ sp<PersistentSurface> persistentSurface =
+ MediaCodec::CreatePersistentInputSurface();
+
+ if (persistentSurface == NULL) {
+ return NULL;
+ }
+
+ sp<Surface> surface = new Surface(
+ persistentSurface->getBufferProducer(), true);
+ if (surface == NULL) {
+ return NULL;
+ }
+
+ jobject object = env->NewObject(
+ gPersistentSurfaceClassInfo.clazz,
+ gPersistentSurfaceClassInfo.ctor);
+
+ if (object == NULL) {
+ if (env->ExceptionCheck()) {
+ ALOGE("Could not create PersistentSurface.");
+ env->ExceptionClear();
+ }
+ return NULL;
+ }
+
+ jobject lock = env->GetObjectField(
+ object, gPersistentSurfaceClassInfo.mLock);
+ if (env->MonitorEnter(lock) == JNI_OK) {
+ env->CallVoidMethod(
+ object,
+ gPersistentSurfaceClassInfo.setNativeObjectLocked,
+ (jlong)surface.get());
+ env->SetLongField(
+ object,
+ gPersistentSurfaceClassInfo.mPersistentObject,
+ (jlong)persistentSurface.get());
+ env->MonitorExit(lock);
+ } else {
+ env->DeleteLocalRef(object);
+ object = NULL;
+ }
+ env->DeleteLocalRef(lock);
+
+ if (object != NULL) {
+ surface->incStrong(&sRefBaseOwner);
+ persistentSurface->incStrong(&sRefBaseOwner);
+ }
+
+ return object;
+}
+
+static void android_media_MediaCodec_releasePersistentInputSurface(
+ JNIEnv* env, jclass /* clazz */, jobject object) {
+ sp<PersistentSurface> persistentSurface;
+
+ jobject lock = env->GetObjectField(
+ object, gPersistentSurfaceClassInfo.mLock);
+ if (env->MonitorEnter(lock) == JNI_OK) {
+ persistentSurface = reinterpret_cast<PersistentSurface *>(
+ env->GetLongField(
+ object, gPersistentSurfaceClassInfo.mPersistentObject));
+ env->SetLongField(
+ object,
+ gPersistentSurfaceClassInfo.mPersistentObject,
+ (jlong)0);
+ env->MonitorExit(lock);
+ }
+ env->DeleteLocalRef(lock);
+
+ if (persistentSurface != NULL) {
+ persistentSurface->decStrong(&sRefBaseOwner);
+ }
+ // no need to release surface as it will be released by Surface's jni
+}
+
+static void android_media_MediaCodec_usePersistentInputSurface(
+ JNIEnv* env, jobject thiz, jobject object) {
+ ALOGV("android_media_MediaCodec_usePersistentInputSurface");
+
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+ if (codec == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<PersistentSurface> persistentSurface =
+ android_media_MediaCodec_getPersistentInputSurface(env, object);
+
+ status_t err = codec->usePersistentInputSurface(persistentSurface);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ }
+}
+
static jobject android_media_MediaCodec_createInputSurface(JNIEnv* env,
jobject thiz) {
ALOGV("android_media_MediaCodec_createInputSurface");
@@ -1454,6 +1635,39 @@ static void android_media_MediaCodec_native_init(JNIEnv *env) {
CHECK(field != NULL);
gCodecActionCodes.codecActionRecoverable =
env->GetStaticIntField(clazz.get(), field);
+
+ field = env->GetStaticFieldID(clazz.get(), "REASON_HARDWARE", "I");
+ CHECK(field != NULL);
+ gExceptionReason.reasonHardware =
+ env->GetStaticIntField(clazz.get(), field);
+
+ field = env->GetStaticFieldID(clazz.get(), "REASON_RECLAIMED", "I");
+ CHECK(field != NULL);
+ gExceptionReason.reasonReclaimed =
+ env->GetStaticIntField(clazz.get(), field);
+
+ clazz.reset(env->FindClass("android/view/Surface"));
+ CHECK(clazz.get() != NULL);
+
+ field = env->GetFieldID(clazz.get(), "mLock", "Ljava/lang/Object;");
+ CHECK(field != NULL);
+ gPersistentSurfaceClassInfo.mLock = field;
+
+ jmethodID method = env->GetMethodID(clazz.get(), "setNativeObjectLocked", "(J)V");
+ CHECK(method != NULL);
+ gPersistentSurfaceClassInfo.setNativeObjectLocked = method;
+
+ clazz.reset(env->FindClass("android/media/MediaCodec$PersistentSurface"));
+ CHECK(clazz.get() != NULL);
+ gPersistentSurfaceClassInfo.clazz = (jclass)env->NewGlobalRef(clazz.get());
+
+ method = env->GetMethodID(clazz.get(), "<init>", "()V");
+ CHECK(method != NULL);
+ gPersistentSurfaceClassInfo.ctor = method;
+
+ field = env->GetFieldID(clazz.get(), "mPersistentObject", "J");
+ CHECK(field != NULL);
+ gPersistentSurfaceClassInfo.mPersistentObject = field;
}
static void android_media_MediaCodec_native_setup(
@@ -1504,6 +1718,17 @@ static JNINativeMethod gMethods[] = {
{ "native_reset", "()V", (void *)android_media_MediaCodec_reset },
+ { "native_releasePersistentInputSurface",
+ "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaCodec_releasePersistentInputSurface},
+
+ { "native_createPersistentInputSurface",
+ "()Landroid/media/MediaCodec$PersistentSurface;",
+ (void *)android_media_MediaCodec_createPersistentInputSurface },
+
+ { "native_usePersistentInputSurface", "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaCodec_usePersistentInputSurface },
+
{ "native_setCallback",
"(Landroid/media/MediaCodec$Callback;)V",
(void *)android_media_MediaCodec_native_setCallback },
@@ -1513,6 +1738,10 @@ static JNINativeMethod gMethods[] = {
"Landroid/media/MediaCrypto;I)V",
(void *)android_media_MediaCodec_native_configure },
+ { "native_setSurface",
+ "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaCodec_native_setSurface },
+
{ "createInputSurface", "()Landroid/view/Surface;",
(void *)android_media_MediaCodec_createInputSurface },
diff --git a/media/jni/android_media_MediaCodec.h b/media/jni/android_media_MediaCodec.h
index 9f2785a..bf61f42 100644
--- a/media/jni/android_media_MediaCodec.h
+++ b/media/jni/android_media_MediaCodec.h
@@ -33,6 +33,7 @@ struct AString;
struct ICrypto;
struct IGraphicBufferProducer;
struct MediaCodec;
+struct PersistentSurface;
class Surface;
struct JMediaCodec : public AHandler {
@@ -53,7 +54,11 @@ struct JMediaCodec : public AHandler {
const sp<ICrypto> &crypto,
int flags);
+ status_t setSurface(
+ const sp<IGraphicBufferProducer> &surface);
+
status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
+ status_t usePersistentInputSurface(const sp<PersistentSurface> &surface);
status_t start();
status_t stop();
diff --git a/media/jni/android_media_MediaCodecList.cpp b/media/jni/android_media_MediaCodecList.cpp
index f8c349b..82dd48d 100644
--- a/media/jni/android_media_MediaCodecList.cpp
+++ b/media/jni/android_media_MediaCodecList.cpp
@@ -262,6 +262,27 @@ static jobject android_media_MediaCodecList_getCodecCapabilities(
return caps;
}
+static jobject android_media_MediaCodecList_getGlobalSettings(JNIEnv *env, jobject /* thiz */) {
+ sp<IMediaCodecList> mcl = getCodecList(env);
+ if (mcl == NULL) {
+ // Runtime exception already pending.
+ return NULL;
+ }
+
+ const sp<AMessage> settings = mcl->getGlobalSettings();
+ if (settings == NULL) {
+ jniThrowException(env, "java/lang/RuntimeException", "cannot get global settings");
+ return NULL;
+ }
+
+ jobject settingsObj = NULL;
+ if (ConvertMessageToMap(env, settings, &settingsObj)) {
+ return NULL;
+ }
+
+ return settingsObj;
+}
+
static void android_media_MediaCodecList_native_init(JNIEnv* /* env */) {
}
@@ -277,6 +298,10 @@ static JNINativeMethod gMethods[] = {
"(ILjava/lang/String;)Landroid/media/MediaCodecInfo$CodecCapabilities;",
(void *)android_media_MediaCodecList_getCodecCapabilities },
+ { "native_getGlobalSettings",
+ "()Ljava/util/Map;",
+ (void *)android_media_MediaCodecList_getGlobalSettings },
+
{ "findCodecByName", "(Ljava/lang/String;)I",
(void *)android_media_MediaCodecList_findCodecByName },
diff --git a/media/jni/android_media_MediaCrypto.cpp b/media/jni/android_media_MediaCrypto.cpp
index d2216fb..a9accb0 100644
--- a/media/jni/android_media_MediaCrypto.cpp
+++ b/media/jni/android_media_MediaCrypto.cpp
@@ -140,6 +140,15 @@ sp<ICrypto> JCrypto::GetCrypto(JNIEnv *env, jobject obj) {
return jcrypto->mCrypto;
}
+// JNI conversion utilities
+static Vector<uint8_t> JByteArrayToVector(JNIEnv *env, jbyteArray const &byteArray) {
+ Vector<uint8_t> vector;
+ size_t length = env->GetArrayLength(byteArray);
+ vector.insertAt((size_t)0, length);
+ env->GetByteArrayRegion(byteArray, 0, length, (jbyte *)vector.editArray());
+ return vector;
+}
+
} // namespace android
using namespace android;
@@ -274,6 +283,37 @@ static jboolean android_media_MediaCrypto_requiresSecureDecoderComponent(
return result ? JNI_TRUE : JNI_FALSE;
}
+static void android_media_MediaCrypto_setMediaDrmSession(
+ JNIEnv *env, jobject thiz, jbyteArray jsessionId) {
+ if (jsessionId == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ sp<ICrypto> crypto = JCrypto::GetCrypto(env, thiz);
+
+ if (crypto == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ Vector<uint8_t> sessionId(JByteArrayToVector(env, jsessionId));
+
+ status_t err = crypto->setMediaDrmSession(sessionId);
+
+ String8 msg("setMediaDrmSession failed");
+ if (err == ERROR_DRM_SESSION_NOT_OPENED) {
+ msg += ": session not opened";
+ } else if (err == ERROR_UNSUPPORTED) {
+ msg += ": not supported by this crypto scheme";
+ } else if (err == NO_INIT) {
+ msg += ": crypto plugin not initialized";
+ } else if (err != OK) {
+ msg.appendFormat(": general failure (%d)", err);
+ }
+ jniThrowException(env, "android/media/MediaCryptoException", msg.string());
+}
+
static JNINativeMethod gMethods[] = {
{ "release", "()V", (void *)android_media_MediaCrypto_release },
{ "native_init", "()V", (void *)android_media_MediaCrypto_native_init },
@@ -289,6 +329,9 @@ static JNINativeMethod gMethods[] = {
{ "requiresSecureDecoderComponent", "(Ljava/lang/String;)Z",
(void *)android_media_MediaCrypto_requiresSecureDecoderComponent },
+
+ { "setMediaDrmSession", "([B)V",
+ (void *)android_media_MediaCrypto_setMediaDrmSession },
};
int register_android_media_Crypto(JNIEnv *env) {
diff --git a/media/jni/android_media_MediaDataSource.cpp b/media/jni/android_media_MediaDataSource.cpp
new file mode 100644
index 0000000..1e6d2af
--- /dev/null
+++ b/media/jni/android_media_MediaDataSource.cpp
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "JMediaDataSource-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaDataSource.h"
+
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/Log.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <binder/MemoryDealer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <nativehelper/ScopedLocalRef.h>
+
+namespace android {
+
+JMediaDataSource::JMediaDataSource(JNIEnv* env, jobject source)
+ : mJavaObjStatus(OK), mSizeIsCached(false), mCachedSize(0), mMemory(NULL) {
+ mMediaDataSourceObj = env->NewGlobalRef(source);
+ CHECK(mMediaDataSourceObj != NULL);
+
+ ScopedLocalRef<jclass> mediaDataSourceClass(env, env->GetObjectClass(mMediaDataSourceObj));
+ CHECK(mediaDataSourceClass.get() != NULL);
+
+ mReadMethod = env->GetMethodID(mediaDataSourceClass.get(), "readAt", "(J[BI)I");
+ CHECK(mReadMethod != NULL);
+ mGetSizeMethod = env->GetMethodID(mediaDataSourceClass.get(), "getSize", "()J");
+ CHECK(mGetSizeMethod != NULL);
+ mCloseMethod = env->GetMethodID(mediaDataSourceClass.get(), "close", "()V");
+ CHECK(mCloseMethod != NULL);
+
+ ScopedLocalRef<jbyteArray> tmp(env, env->NewByteArray(kBufferSize));
+ mByteArrayObj = (jbyteArray)env->NewGlobalRef(tmp.get());
+ CHECK(mByteArrayObj != NULL);
+
+ sp<MemoryDealer> memoryDealer = new MemoryDealer(kBufferSize, "JMediaDataSource");
+ mMemory = memoryDealer->allocate(kBufferSize);
+ if (mMemory == NULL) {
+ ALOGE("Failed to allocate memory!");
+ }
+}
+
+JMediaDataSource::~JMediaDataSource() {
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->DeleteGlobalRef(mMediaDataSourceObj);
+ env->DeleteGlobalRef(mByteArrayObj);
+}
+
+sp<IMemory> JMediaDataSource::getIMemory() {
+ Mutex::Autolock lock(mLock);
+ return mMemory;
+}
+
+ssize_t JMediaDataSource::readAt(off64_t offset, size_t size) {
+ Mutex::Autolock lock(mLock);
+
+ if (mJavaObjStatus != OK || mMemory == NULL) {
+ return -1;
+ }
+ if (size > kBufferSize) {
+ size = kBufferSize;
+ }
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ jint numread = env->CallIntMethod(mMediaDataSourceObj, mReadMethod,
+ (jlong)offset, mByteArrayObj, (jint)size);
+ if (env->ExceptionCheck()) {
+ ALOGW("An exception occurred in readAt()");
+ LOGW_EX(env);
+ env->ExceptionClear();
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+ if (numread < 0) {
+ ALOGW("An error occurred in readAt()");
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+ if ((size_t)numread > size) {
+ ALOGE("readAt read too many bytes.");
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return -1;
+ }
+
+ ALOGV("readAt %lld / %zu => %d.", (long long)offset, size, numread);
+ env->GetByteArrayRegion(mByteArrayObj, 0, numread, (jbyte*)mMemory->pointer());
+ return numread;
+}
+
+status_t JMediaDataSource::getSize(off64_t* size) {
+ Mutex::Autolock lock(mLock);
+
+ if (mJavaObjStatus != OK) {
+ return UNKNOWN_ERROR;
+ }
+ if (mSizeIsCached) {
+ return mCachedSize;
+ }
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ *size = env->CallLongMethod(mMediaDataSourceObj, mGetSizeMethod);
+ if (env->ExceptionCheck()) {
+ ALOGW("An exception occurred in getSize()");
+ LOGW_EX(env);
+ env->ExceptionClear();
+ // After returning an error, size shouldn't be used by callers.
+ *size = UNKNOWN_ERROR;
+ mJavaObjStatus = UNKNOWN_ERROR;
+ return UNKNOWN_ERROR;
+ }
+
+ // The minimum size should be -1, which indicates unknown size.
+ if (*size < 0) {
+ *size = -1;
+ }
+
+ mCachedSize = *size;
+ mSizeIsCached = true;
+ return OK;
+}
+
+void JMediaDataSource::close() {
+ Mutex::Autolock lock(mLock);
+
+ JNIEnv* env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(mMediaDataSourceObj, mCloseMethod);
+ // The closed state is effectively the same as an error state.
+ mJavaObjStatus = UNKNOWN_ERROR;
+}
+
+} // namespace android
diff --git a/media/jni/android_media_MediaDataSource.h b/media/jni/android_media_MediaDataSource.h
new file mode 100644
index 0000000..2bc237e
--- /dev/null
+++ b/media/jni/android_media_MediaDataSource.h
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIADATASOURCE_H_
+#define _ANDROID_MEDIA_MEDIADATASOURCE_H_
+
+#include "jni.h"
+
+#include <media/IDataSource.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/Errors.h>
+#include <utils/Mutex.h>
+
+namespace android {
+
+// The native counterpart to a Java android.media.MediaDataSource. It inherits from
+// IDataSource so that it can be accessed remotely.
+//
+// If the java DataSource returns an error or throws an exception it
+// will be considered to be in a broken state, and the only further call this
+// will make is to close().
+class JMediaDataSource : public BnDataSource {
+public:
+ enum {
+ kBufferSize = 64 * 1024,
+ };
+
+ JMediaDataSource(JNIEnv *env, jobject source);
+ virtual ~JMediaDataSource();
+
+ virtual sp<IMemory> getIMemory();
+ virtual ssize_t readAt(off64_t offset, size_t size);
+ virtual status_t getSize(off64_t* size);
+ virtual void close();
+
+private:
+ // Protect all member variables with mLock because this object will be
+ // accessed on different binder worker threads.
+ Mutex mLock;
+
+ // The status of the java DataSource. Set to OK unless an error occurred or
+ // close() was called.
+ status_t mJavaObjStatus;
+ // Only call the java getSize() once so the app can't change the size on us.
+ bool mSizeIsCached;
+ off64_t mCachedSize;
+ sp<IMemory> mMemory;
+
+ jobject mMediaDataSourceObj;
+ jmethodID mReadMethod;
+ jmethodID mGetSizeMethod;
+ jmethodID mCloseMethod;
+ jbyteArray mByteArrayObj;
+
+ DISALLOW_EVIL_CONSTRUCTORS(JMediaDataSource);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_MEDIADATASOURCE_H_
diff --git a/media/jni/android_media_MediaDrm.cpp b/media/jni/android_media_MediaDrm.cpp
index 5578416..f8146a7 100644
--- a/media/jni/android_media_MediaDrm.cpp
+++ b/media/jni/android_media_MediaDrm.cpp
@@ -59,6 +59,7 @@ namespace android {
struct RequestFields {
jfieldID data;
jfieldID defaultUrl;
+ jfieldID requestType;
};
struct ArrayListFields {
@@ -92,14 +93,27 @@ struct EventTypes {
jint kEventKeyRequired;
jint kEventKeyExpired;
jint kEventVendorDefined;
+ jint kEventSessionReclaimed;
} gEventTypes;
+struct EventWhat {
+ jint kWhatDrmEvent;
+ jint kWhatExpirationUpdate;
+ jint kWhatKeysChange;
+} gEventWhat;
+
struct KeyTypes {
jint kKeyTypeStreaming;
jint kKeyTypeOffline;
jint kKeyTypeRelease;
} gKeyTypes;
+struct KeyRequestTypes {
+ jint kKeyRequestTypeInitial;
+ jint kKeyRequestTypeRenewal;
+ jint kKeyRequestTypeRelease;
+} gKeyRequestTypes;
+
struct CertificateTypes {
jint kCertificateTypeNone;
jint kCertificateTypeX509;
@@ -178,22 +192,37 @@ JNIDrmListener::~JNIDrmListener()
void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
const Parcel *obj)
{
- jint jeventType;
+ jint jwhat;
+ jint jeventType = 0;
// translate DrmPlugin event types into their java equivalents
- switch(eventType) {
+ switch (eventType) {
case DrmPlugin::kDrmPluginEventProvisionRequired:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventProvisionRequired;
break;
case DrmPlugin::kDrmPluginEventKeyNeeded:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventKeyRequired;
break;
case DrmPlugin::kDrmPluginEventKeyExpired:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventKeyExpired;
break;
case DrmPlugin::kDrmPluginEventVendorDefined:
+ jwhat = gEventWhat.kWhatDrmEvent;
jeventType = gEventTypes.kEventVendorDefined;
break;
+ case DrmPlugin::kDrmPluginEventSessionReclaimed:
+ jwhat = gEventWhat.kWhatDrmEvent;
+ jeventType = gEventTypes.kEventSessionReclaimed;
+ break;
+ case DrmPlugin::kDrmPluginEventExpirationUpdate:
+ jwhat = gEventWhat.kWhatExpirationUpdate;
+ break;
+ case DrmPlugin::kDrmPluginEventKeysChange:
+ jwhat = gEventWhat.kWhatKeysChange;
+ break;
default:
ALOGE("Invalid event DrmPlugin::EventType %d, ignored", (int)eventType);
return;
@@ -206,7 +235,7 @@ void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
Parcel* nativeParcel = parcelForJavaObject(env, jParcel);
nativeParcel->setData(obj->data(), obj->dataSize());
env->CallStaticVoidMethod(mClass, gFields.post_event, mObject,
- jeventType, extra, jParcel);
+ jwhat, jeventType, extra, jParcel);
env->DeleteLocalRef(jParcel);
}
}
@@ -232,7 +261,7 @@ static bool throwExceptionAsNecessary(
const char *drmMessage = NULL;
- switch(err) {
+ switch (err) {
case ERROR_DRM_UNKNOWN:
drmMessage = "General DRM error";
break;
@@ -562,7 +591,7 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
FIND_CLASS(clazz, "android/media/MediaDrm");
GET_FIELD_ID(gFields.context, clazz, "mNativeContext", "J");
GET_STATIC_METHOD_ID(gFields.post_event, clazz, "postEventFromNative",
- "(Ljava/lang/Object;IILjava/lang/Object;)V");
+ "(Ljava/lang/Object;IIILjava/lang/Object;)V");
jfieldID field;
GET_STATIC_FIELD_ID(field, clazz, "EVENT_PROVISION_REQUIRED", "I");
@@ -573,6 +602,15 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
gEventTypes.kEventKeyExpired = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "EVENT_VENDOR_DEFINED", "I");
gEventTypes.kEventVendorDefined = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "EVENT_SESSION_RECLAIMED", "I");
+ gEventTypes.kEventSessionReclaimed = env->GetStaticIntField(clazz, field);
+
+ GET_STATIC_FIELD_ID(field, clazz, "DRM_EVENT", "I");
+ gEventWhat.kWhatDrmEvent = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "EXPIRATION_UPDATE", "I");
+ gEventWhat.kWhatExpirationUpdate = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "KEYS_CHANGE", "I");
+ gEventWhat.kWhatKeysChange = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "KEY_TYPE_STREAMING", "I");
gKeyTypes.kKeyTypeStreaming = env->GetStaticIntField(clazz, field);
@@ -581,6 +619,13 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
GET_STATIC_FIELD_ID(field, clazz, "KEY_TYPE_RELEASE", "I");
gKeyTypes.kKeyTypeRelease = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_INITIAL", "I");
+ gKeyRequestTypes.kKeyRequestTypeInitial = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_RENEWAL", "I");
+ gKeyRequestTypes.kKeyRequestTypeRenewal = env->GetStaticIntField(clazz, field);
+ GET_STATIC_FIELD_ID(field, clazz, "REQUEST_TYPE_RELEASE", "I");
+ gKeyRequestTypes.kKeyRequestTypeRelease = env->GetStaticIntField(clazz, field);
+
GET_STATIC_FIELD_ID(field, clazz, "CERTIFICATE_TYPE_NONE", "I");
gCertificateTypes.kCertificateTypeNone = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "CERTIFICATE_TYPE_X509", "I");
@@ -589,6 +634,7 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
FIND_CLASS(clazz, "android/media/MediaDrm$KeyRequest");
GET_FIELD_ID(gFields.keyRequest.data, clazz, "mData", "[B");
GET_FIELD_ID(gFields.keyRequest.defaultUrl, clazz, "mDefaultUrl", "Ljava/lang/String;");
+ GET_FIELD_ID(gFields.keyRequest.requestType, clazz, "mRequestType", "I");
FIND_CLASS(clazz, "android/media/MediaDrm$ProvisionRequest");
GET_FIELD_ID(gFields.provisionRequest.data, clazz, "mData", "[B");
@@ -780,9 +826,10 @@ static jobject android_media_MediaDrm_getKeyRequest(
Vector<uint8_t> request;
String8 defaultUrl;
+ DrmPlugin::KeyRequestType keyRequestType;
status_t err = drm->getKeyRequest(sessionId, initData, mimeType,
- keyType, optParams, request, defaultUrl);
+ keyType, optParams, request, defaultUrl, &keyRequestType);
if (throwExceptionAsNecessary(env, err, "Failed to get key request")) {
return NULL;
@@ -801,6 +848,25 @@ static jobject android_media_MediaDrm_getKeyRequest(
jstring jdefaultUrl = env->NewStringUTF(defaultUrl.string());
env->SetObjectField(keyObj, gFields.keyRequest.defaultUrl, jdefaultUrl);
+
+ switch (keyRequestType) {
+ case DrmPlugin::kKeyRequestType_Initial:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeInitial);
+ break;
+ case DrmPlugin::kKeyRequestType_Renewal:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeRenewal);
+ break;
+ case DrmPlugin::kKeyRequestType_Release:
+ env->SetIntField(keyObj, gFields.keyRequest.requestType,
+ gKeyRequestTypes.kKeyRequestTypeRelease);
+ break;
+ default:
+ throwStateException(env, "DRM plugin failure: unknown key request type",
+ ERROR_DRM_UNKNOWN);
+ break;
+ }
}
return keyObj;
diff --git a/media/jni/android_media_MediaExtractor.cpp b/media/jni/android_media_MediaExtractor.cpp
index c0795b6..b6b7a80 100644
--- a/media/jni/android_media_MediaExtractor.cpp
+++ b/media/jni/android_media_MediaExtractor.cpp
@@ -25,6 +25,7 @@
#include "android_runtime/Log.h"
#include "jni.h"
#include "JNIHelp.h"
+#include "android_media_MediaDataSource.h"
#include <media/IMediaHTTPService.h>
#include <media/hardware/CryptoAPI.h>
@@ -50,74 +51,6 @@ struct fields_t {
static fields_t gFields;
-class JavaDataSourceBridge : public DataSource {
- jmethodID mReadMethod;
- jmethodID mGetSizeMethod;
- jmethodID mCloseMethod;
- jobject mDataSource;
- public:
- JavaDataSourceBridge(JNIEnv *env, jobject source) {
- mDataSource = env->NewGlobalRef(source);
-
- jclass datasourceclass = env->GetObjectClass(mDataSource);
- CHECK(datasourceclass != NULL);
-
- mReadMethod = env->GetMethodID(datasourceclass, "readAt", "(J[BI)I");
- CHECK(mReadMethod != NULL);
-
- mGetSizeMethod = env->GetMethodID(datasourceclass, "getSize", "()J");
- CHECK(mGetSizeMethod != NULL);
-
- mCloseMethod = env->GetMethodID(datasourceclass, "close", "()V");
- CHECK(mCloseMethod != NULL);
- }
-
- ~JavaDataSourceBridge() {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
- env->CallVoidMethod(mDataSource, mCloseMethod);
- env->DeleteGlobalRef(mDataSource);
- }
-
- virtual status_t initCheck() const {
- return OK;
- }
-
- virtual ssize_t readAt(off64_t offset, void* buffer, size_t size) {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
-
- // XXX could optimize this by reusing the same array
- jbyteArray byteArrayObj = env->NewByteArray(size);
- env->DeleteLocalRef(env->GetObjectClass(mDataSource));
- env->DeleteLocalRef(env->GetObjectClass(byteArrayObj));
- ssize_t numread = env->CallIntMethod(mDataSource, mReadMethod, offset, byteArrayObj, (jint)size);
- env->GetByteArrayRegion(byteArrayObj, 0, size, (jbyte*) buffer);
- env->DeleteLocalRef(byteArrayObj);
- if (env->ExceptionCheck()) {
- ALOGW("Exception occurred while reading %zu at %lld", size, (long long)offset);
- LOGW_EX(env);
- env->ExceptionClear();
- return -1;
- }
- return numread;
- }
-
- virtual status_t getSize(off64_t *size) {
- JNIEnv *env = AndroidRuntime::getJNIEnv();
-
- CHECK(size != NULL);
-
- int64_t len = env->CallLongMethod(mDataSource, mGetSizeMethod);
- if (len < 0) {
- *size = ERROR_UNSUPPORTED;
- } else {
- *size = len;
- }
- return OK;
- }
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
JMediaExtractor::JMediaExtractor(JNIEnv *env, jobject thiz)
: mClass(NULL),
mObject(NULL) {
@@ -777,7 +710,8 @@ static void android_media_MediaExtractor_setDataSourceCallback(
return;
}
- sp<JavaDataSourceBridge> bridge = new JavaDataSourceBridge(env, callbackObj);
+ sp<DataSource> bridge =
+ DataSource::CreateFromIDataSource(new JMediaDataSource(env, callbackObj));
status_t err = extractor->setDataSource(bridge);
if (err != OK) {
@@ -881,7 +815,7 @@ static JNINativeMethod gMethods[] = {
{ "setDataSource", "(Ljava/io/FileDescriptor;JJ)V",
(void *)android_media_MediaExtractor_setDataSourceFd },
- { "setDataSource", "(Landroid/media/DataSource;)V",
+ { "setDataSource", "(Landroid/media/MediaDataSource;)V",
(void *)android_media_MediaExtractor_setDataSourceCallback },
{ "getCachedDuration", "()J",
diff --git a/media/jni/android_media_MediaHTTPConnection.cpp b/media/jni/android_media_MediaHTTPConnection.cpp
index 7226ef5..393003d 100644
--- a/media/jni/android_media_MediaHTTPConnection.cpp
+++ b/media/jni/android_media_MediaHTTPConnection.cpp
@@ -134,7 +134,6 @@ static jobject android_media_MediaHTTPConnection_native_getIMemory(
static jint android_media_MediaHTTPConnection_native_readAt(
JNIEnv *env, jobject thiz, jlong offset, jint size) {
sp<JMediaHTTPConnection> conn = getObject(env, thiz);
-
if (size > JMediaHTTPConnection::kBufferSize) {
size = JMediaHTTPConnection::kBufferSize;
}
diff --git a/media/jni/android_media_MediaMetadataRetriever.cpp b/media/jni/android_media_MediaMetadataRetriever.cpp
index fc7931e..59fb6d6 100644
--- a/media/jni/android_media_MediaMetadataRetriever.cpp
+++ b/media/jni/android_media_MediaMetadataRetriever.cpp
@@ -30,16 +30,17 @@
#include "jni.h"
#include "JNIHelp.h"
#include "android_runtime/AndroidRuntime.h"
+#include "android_media_MediaDataSource.h"
#include "android_media_Utils.h"
#include "android_util_Binder.h"
+#include "android/graphics/GraphicsJNI.h"
using namespace android;
struct fields_t {
jfieldID context;
jclass bitmapClazz; // Must be a global ref
- jfieldID nativeBitmap;
jmethodID createBitmapMethod;
jmethodID createScaledBitmapMethod;
jclass configClazz; // Must be a global ref
@@ -171,6 +172,23 @@ static void android_media_MediaMetadataRetriever_setDataSourceFD(JNIEnv *env, jo
process_media_retriever_call(env, retriever->setDataSource(fd, offset, length), "java/lang/RuntimeException", "setDataSource failed");
}
+static void android_media_MediaMetadataRetriever_setDataSourceCallback(JNIEnv *env, jobject thiz, jobject dataSource)
+{
+ ALOGV("setDataSourceCallback");
+ MediaMetadataRetriever* retriever = getRetriever(env, thiz);
+ if (retriever == 0) {
+ jniThrowException(env, "java/lang/IllegalStateException", "No retriever available");
+ return;
+ }
+ if (dataSource == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+
+ sp<IDataSource> callbackDataSource = new JMediaDataSource(env, dataSource);
+ process_media_retriever_call(env, retriever->setDataSource(callbackDataSource), "java/lang/RuntimeException", "setDataSourceCallback failed");
+}
+
template<typename T>
static void rotate0(T* dst, const T* src, size_t width, size_t height)
{
@@ -254,7 +272,7 @@ static jobject android_media_MediaMetadataRetriever_getFrameAtTime(JNIEnv *env,
jobject config = env->CallStaticObjectMethod(
fields.configClazz,
fields.createConfigMethod,
- SkBitmap::kRGB_565_Config);
+ GraphicsJNI::colorTypeToLegacyBitmapConfig(kRGB_565_SkColorType));
uint32_t width, height;
bool swapWidthAndHeight = false;
@@ -281,16 +299,16 @@ static jobject android_media_MediaMetadataRetriever_getFrameAtTime(JNIEnv *env,
return NULL;
}
- SkBitmap *bitmap =
- (SkBitmap *) env->GetLongField(jBitmap, fields.nativeBitmap);
+ SkBitmap bitmap;
+ GraphicsJNI::getSkBitmap(env, jBitmap, &bitmap);
- bitmap->lockPixels();
- rotate((uint16_t*)bitmap->getPixels(),
+ bitmap.lockPixels();
+ rotate((uint16_t*)bitmap.getPixels(),
(uint16_t*)((char*)videoFrame + sizeof(VideoFrame)),
videoFrame->mWidth,
videoFrame->mHeight,
videoFrame->mRotationAngle);
- bitmap->unlockPixels();
+ bitmap.unlockPixels();
if (videoFrame->mDisplayWidth != videoFrame->mWidth ||
videoFrame->mDisplayHeight != videoFrame->mHeight) {
@@ -420,10 +438,6 @@ static void android_media_MediaMetadataRetriever_native_init(JNIEnv *env)
if (fields.createScaledBitmapMethod == NULL) {
return;
}
- fields.nativeBitmap = env->GetFieldID(fields.bitmapClazz, "mNativeBitmap", "J");
- if (fields.nativeBitmap == NULL) {
- return;
- }
jclass configClazz = env->FindClass("android/graphics/Bitmap$Config");
if (configClazz == NULL) {
@@ -461,6 +475,7 @@ static JNINativeMethod nativeMethods[] = {
},
{"setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaMetadataRetriever_setDataSourceFD},
+ {"_setDataSource", "(Landroid/media/MediaDataSource;)V", (void *)android_media_MediaMetadataRetriever_setDataSourceCallback},
{"_getFrameAtTime", "(JI)Landroid/graphics/Bitmap;", (void *)android_media_MediaMetadataRetriever_getFrameAtTime},
{"extractMetadata", "(I)Ljava/lang/String;", (void *)android_media_MediaMetadataRetriever_extractMetadata},
{"getEmbeddedPicture", "(I)[B", (void *)android_media_MediaMetadataRetriever_getEmbeddedPicture},
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 820de5b..5b55a61 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -20,6 +20,7 @@
#include "utils/Log.h"
#include <media/mediaplayer.h>
+#include <media/AudioResamplerPublic.h>
#include <media/IMediaHTTPService.h>
#include <media/MediaPlayerInterface.h>
#include <stdio.h>
@@ -36,6 +37,9 @@
#include "utils/Errors.h" // for status_t
#include "utils/KeyedVector.h"
#include "utils/String8.h"
+#include "android_media_MediaDataSource.h"
+#include "android_media_PlaybackSettings.h"
+#include "android_media_SyncSettings.h"
#include "android_media_Utils.h"
#include "android_os_Parcel.h"
@@ -65,6 +69,9 @@ struct fields_t {
};
static fields_t fields;
+static PlaybackSettings::fields_t gPlaybackSettingsFields;
+static SyncSettings::fields_t gSyncSettingsFields;
+
static Mutex sLock;
// ----------------------------------------------------------------------------
@@ -167,6 +174,8 @@ static void process_media_player_call(JNIEnv *env, jobject thiz, status_t opStat
} else { // Throw exception!
if ( opStatus == (status_t) INVALID_OPERATION ) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ } else if ( opStatus == (status_t) BAD_VALUE ) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
} else if ( opStatus == (status_t) PERMISSION_DENIED ) {
jniThrowException(env, "java/lang/SecurityException", NULL);
} else if ( opStatus != (status_t) OK ) {
@@ -251,6 +260,23 @@ android_media_MediaPlayer_setDataSourceFD(JNIEnv *env, jobject thiz, jobject fil
process_media_player_call( env, thiz, mp->setDataSource(fd, offset, length), "java/io/IOException", "setDataSourceFD failed." );
}
+static void
+android_media_MediaPlayer_setDataSourceCallback(JNIEnv *env, jobject thiz, jobject dataSource)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL ) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ if (dataSource == NULL) {
+ jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
+ return;
+ }
+ sp<IDataSource> callbackDataSource = new JMediaDataSource(env, dataSource);
+ process_media_player_call(env, thiz, mp->setDataSource(callbackDataSource), "java/lang/RuntimeException", "setDataSourceCallback failed." );
+}
+
static sp<IGraphicBufferProducer>
getVideoSurfaceTexture(JNIEnv* env, jobject thiz) {
IGraphicBufferProducer * const p = (IGraphicBufferProducer*)env->GetLongField(thiz, fields.surface_texture);
@@ -402,6 +428,155 @@ android_media_MediaPlayer_isPlaying(JNIEnv *env, jobject thiz)
}
static void
+android_media_MediaPlayer_setPlaybackSettings(JNIEnv *env, jobject thiz, jobject settings)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ PlaybackSettings pbs;
+ pbs.fillFromJobject(env, gPlaybackSettingsFields, settings);
+ ALOGV("setPlaybackSettings: %d:%f %d:%f %d:%u %d:%u",
+ pbs.speedSet, pbs.audioRate.mSpeed,
+ pbs.pitchSet, pbs.audioRate.mPitch,
+ pbs.audioFallbackModeSet, pbs.audioRate.mFallbackMode,
+ pbs.audioStretchModeSet, pbs.audioRate.mStretchMode);
+
+ AudioPlaybackRate rate;
+ status_t err = mp->getPlaybackSettings(&rate);
+ if (err == OK) {
+ bool updatedRate = false;
+ if (pbs.speedSet) {
+ rate.mSpeed = pbs.audioRate.mSpeed;
+ updatedRate = true;
+ }
+ if (pbs.pitchSet) {
+ rate.mPitch = pbs.audioRate.mPitch;
+ updatedRate = true;
+ }
+ if (pbs.audioFallbackModeSet) {
+ rate.mFallbackMode = pbs.audioRate.mFallbackMode;
+ updatedRate = true;
+ }
+ if (pbs.audioStretchModeSet) {
+ rate.mStretchMode = pbs.audioRate.mStretchMode;
+ updatedRate = true;
+ }
+ if (updatedRate) {
+ err = mp->setPlaybackSettings(rate);
+ }
+ }
+ process_media_player_call(
+ env, thiz, err,
+ "java/lang/IllegalStateException", "unexpected error");
+}
+
+static jobject
+android_media_MediaPlayer_getPlaybackSettings(JNIEnv *env, jobject thiz)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ PlaybackSettings pbs;
+ AudioPlaybackRate &audioRate = pbs.audioRate;
+ process_media_player_call(
+ env, thiz, mp->getPlaybackSettings(&audioRate),
+ "java/lang/IllegalStateException", "unexpected error");
+ ALOGV("getPlaybackSettings: %f %f %d %d",
+ audioRate.mSpeed, audioRate.mPitch, audioRate.mFallbackMode, audioRate.mStretchMode);
+
+ pbs.speedSet = true;
+ pbs.pitchSet = true;
+ pbs.audioFallbackModeSet = true;
+ pbs.audioStretchModeSet = true;
+
+ return pbs.asJobject(env, gPlaybackSettingsFields);
+}
+
+static void
+android_media_MediaPlayer_setSyncSettings(JNIEnv *env, jobject thiz, jobject settings)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+
+ SyncSettings scs;
+ scs.fillFromJobject(env, gSyncSettingsFields, settings);
+ ALOGV("setSyncSettings: %d:%d %d:%d %d:%f %d:%f",
+ scs.syncSourceSet, scs.sync.mSource,
+ scs.audioAdjustModeSet, scs.sync.mAudioAdjustMode,
+ scs.toleranceSet, scs.sync.mTolerance,
+ scs.frameRateSet, scs.frameRate);
+
+ AVSyncSettings avsync;
+ float videoFrameRate;
+ status_t err = mp->getSyncSettings(&avsync, &videoFrameRate);
+ if (err == OK) {
+ bool updatedSync = scs.frameRateSet;
+ if (scs.syncSourceSet) {
+ avsync.mSource = scs.sync.mSource;
+ updatedSync = true;
+ }
+ if (scs.audioAdjustModeSet) {
+ avsync.mAudioAdjustMode = scs.sync.mAudioAdjustMode;
+ updatedSync = true;
+ }
+ if (scs.toleranceSet) {
+ avsync.mTolerance = scs.sync.mTolerance;
+ updatedSync = true;
+ }
+ if (updatedSync) {
+ err = mp->setSyncSettings(avsync, scs.frameRateSet ? scs.frameRate : -1.f);
+ }
+ }
+ process_media_player_call(
+ env, thiz, err,
+ "java/lang/IllegalStateException", "unexpected error");
+}
+
+static jobject
+android_media_MediaPlayer_getSyncSettings(JNIEnv *env, jobject thiz)
+{
+ sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
+ if (mp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ SyncSettings scs;
+ scs.frameRate = -1.f;
+ process_media_player_call(
+ env, thiz, mp->getSyncSettings(&scs.sync, &scs.frameRate),
+ "java/lang/IllegalStateException", "unexpected error");
+
+ ALOGV("getSyncSettings: %d %d %f %f",
+ scs.sync.mSource, scs.sync.mAudioAdjustMode, scs.sync.mTolerance, scs.frameRate);
+
+ // sanity check settings
+ if (scs.sync.mSource >= AVSYNC_SOURCE_MAX
+ || scs.sync.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX
+ || scs.sync.mTolerance < 0.f
+ || scs.sync.mTolerance >= AVSYNC_TOLERANCE_MAX) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return NULL;
+ }
+
+ scs.syncSourceSet = true;
+ scs.audioAdjustModeSet = true;
+ scs.toleranceSet = true;
+ scs.frameRateSet = scs.frameRate >= 0.f;
+
+ return scs.asJobject(env, gSyncSettingsFields);
+}
+
+static void
android_media_MediaPlayer_seekTo(JNIEnv *env, jobject thiz, jint msec)
{
sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
@@ -667,6 +842,8 @@ android_media_MediaPlayer_native_init(JNIEnv *env)
return;
}
+ env->DeleteLocalRef(clazz);
+
clazz = env->FindClass("android/net/ProxyInfo");
if (clazz == NULL) {
return;
@@ -680,6 +857,11 @@ android_media_MediaPlayer_native_init(JNIEnv *env)
fields.proxyConfigGetExclusionList =
env->GetMethodID(clazz, "getExclusionListAsString", "()Ljava/lang/String;");
+
+ env->DeleteLocalRef(clazz);
+
+ gPlaybackSettingsFields.init(env);
+ gSyncSettingsFields.init(env);
}
static void
@@ -859,7 +1041,8 @@ static JNINativeMethod gMethods[] = {
(void *)android_media_MediaPlayer_setDataSourceAndHeaders
},
- {"_setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaPlayer_setDataSourceFD},
+ {"_setDataSource", "(Ljava/io/FileDescriptor;JJ)V", (void *)android_media_MediaPlayer_setDataSourceFD},
+ {"_setDataSource", "(Landroid/media/MediaDataSource;)V",(void *)android_media_MediaPlayer_setDataSourceCallback },
{"_setVideoSurface", "(Landroid/view/Surface;)V", (void *)android_media_MediaPlayer_setVideoSurface},
{"_prepare", "()V", (void *)android_media_MediaPlayer_prepare},
{"prepareAsync", "()V", (void *)android_media_MediaPlayer_prepareAsync},
@@ -867,6 +1050,10 @@ static JNINativeMethod gMethods[] = {
{"_stop", "()V", (void *)android_media_MediaPlayer_stop},
{"getVideoWidth", "()I", (void *)android_media_MediaPlayer_getVideoWidth},
{"getVideoHeight", "()I", (void *)android_media_MediaPlayer_getVideoHeight},
+ {"setPlaybackSettings", "(Landroid/media/PlaybackSettings;)V", (void *)android_media_MediaPlayer_setPlaybackSettings},
+ {"getPlaybackSettings", "()Landroid/media/PlaybackSettings;", (void *)android_media_MediaPlayer_getPlaybackSettings},
+ {"setSyncSettings", "(Landroid/media/SyncSettings;)V", (void *)android_media_MediaPlayer_setSyncSettings},
+ {"getSyncSettings", "()Landroid/media/SyncSettings;", (void *)android_media_MediaPlayer_getSyncSettings},
{"seekTo", "(I)V", (void *)android_media_MediaPlayer_seekTo},
{"_pause", "()V", (void *)android_media_MediaPlayer_pause},
{"isPlaying", "()Z", (void *)android_media_MediaPlayer_isPlaying},
@@ -901,8 +1088,8 @@ static int register_android_media_MediaPlayer(JNIEnv *env)
return AndroidRuntime::registerNativeMethods(env,
"android/media/MediaPlayer", gMethods, NELEM(gMethods));
}
-
extern int register_android_media_ImageReader(JNIEnv *env);
+extern int register_android_media_ImageWriter(JNIEnv *env);
extern int register_android_media_Crypto(JNIEnv *env);
extern int register_android_media_Drm(JNIEnv *env);
extern int register_android_media_MediaCodec(JNIEnv *env);
@@ -913,6 +1100,7 @@ extern int register_android_media_MediaMetadataRetriever(JNIEnv *env);
extern int register_android_media_MediaMuxer(JNIEnv *env);
extern int register_android_media_MediaRecorder(JNIEnv *env);
extern int register_android_media_MediaScanner(JNIEnv *env);
+extern int register_android_media_MediaSync(JNIEnv *env);
extern int register_android_media_ResampleInputStream(JNIEnv *env);
extern int register_android_media_MediaProfiles(JNIEnv *env);
extern int register_android_media_AmrInputStream(JNIEnv *env);
@@ -931,6 +1119,11 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
}
assert(env != NULL);
+ if (register_android_media_ImageWriter(env) != JNI_OK) {
+ ALOGE("ERROR: ImageWriter native registration failed");
+ goto bail;
+ }
+
if (register_android_media_ImageReader(env) < 0) {
ALOGE("ERROR: ImageReader native registration failed");
goto bail;
@@ -991,6 +1184,11 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
goto bail;
}
+ if (register_android_media_MediaSync(env) < 0) {
+ ALOGE("ERROR: MediaSync native registration failed");
+ goto bail;
+ }
+
if (register_android_media_MediaExtractor(env) < 0) {
ALOGE("ERROR: MediaCodec native registration failed");
goto bail;
diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp
index 8b7d40d..0044bed 100644
--- a/media/jni/android_media_MediaRecorder.cpp
+++ b/media/jni/android_media_MediaRecorder.cpp
@@ -29,8 +29,11 @@
#include <camera/ICameraService.h>
#include <camera/Camera.h>
#include <media/mediarecorder.h>
+#include <media/stagefright/PersistentSurface.h>
#include <utils/threads.h>
+#include <ScopedUtfChars.h>
+
#include "jni.h"
#include "JNIHelp.h"
#include "android_runtime/AndroidRuntime.h"
@@ -46,6 +49,8 @@ using namespace android;
// helper function to extract a native Camera object from a Camera Java object
extern sp<Camera> get_native_camera(JNIEnv *env, jobject thiz, struct JNICameraContext** context);
+extern sp<PersistentSurface>
+android_media_MediaCodec_getPersistentInputSurface(JNIEnv* env, jobject object);
struct fields_t {
jfieldID context;
@@ -113,6 +118,12 @@ static sp<Surface> get_surface(JNIEnv* env, jobject clazz)
return android_view_Surface_getSurface(env, clazz);
}
+static sp<PersistentSurface> get_persistentSurface(JNIEnv* env, jobject object)
+{
+ ALOGV("get_persistentSurface");
+ return android_media_MediaCodec_getPersistentInputSurface(env, object);
+}
+
// Returns true if it throws an exception.
static bool process_media_recorder_call(JNIEnv *env, status_t opStatus, const char* exception, const char* message)
{
@@ -444,11 +455,13 @@ android_media_MediaRecorder_native_init(JNIEnv *env)
static void
android_media_MediaRecorder_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
- jstring packageName)
+ jstring packageName, jstring opPackageName)
{
ALOGV("setup");
- sp<MediaRecorder> mr = new MediaRecorder();
+ ScopedUtfChars opPackageNameStr(env, opPackageName);
+
+ sp<MediaRecorder> mr = new MediaRecorder(String16(opPackageNameStr.c_str()));
if (mr == NULL) {
jniThrowException(env, "java/lang/RuntimeException", "Out of memory");
return;
@@ -483,6 +496,18 @@ android_media_MediaRecorder_native_finalize(JNIEnv *env, jobject thiz)
android_media_MediaRecorder_release(env, thiz);
}
+void android_media_MediaRecorder_usePersistentSurface(
+ JNIEnv* env, jobject thiz, jobject object) {
+ ALOGV("android_media_MediaRecorder_usePersistentSurface");
+
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
+
+ sp<PersistentSurface> persistentSurface = get_persistentSurface(env, object);
+
+ process_media_recorder_call(env, mr->usePersistentSurface(persistentSurface),
+ "java/lang/IllegalArgumentException", "native_usePersistentSurface failed.");
+}
+
// ----------------------------------------------------------------------------
static JNINativeMethod gMethods[] = {
@@ -506,8 +531,10 @@ static JNINativeMethod gMethods[] = {
{"native_reset", "()V", (void *)android_media_MediaRecorder_native_reset},
{"release", "()V", (void *)android_media_MediaRecorder_release},
{"native_init", "()V", (void *)android_media_MediaRecorder_native_init},
- {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;)V", (void *)android_media_MediaRecorder_native_setup},
+ {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;)V",
+ (void *)android_media_MediaRecorder_native_setup},
{"native_finalize", "()V", (void *)android_media_MediaRecorder_native_finalize},
+ {"native_usePersistentSurface", "(Landroid/view/Surface;)V", (void *)android_media_MediaRecorder_usePersistentSurface },
};
// This function only registers the native methods, and is called from
diff --git a/media/jni/android_media_MediaSync.cpp b/media/jni/android_media_MediaSync.cpp
new file mode 100644
index 0000000..5c18901
--- /dev/null
+++ b/media/jni/android_media_MediaSync.cpp
@@ -0,0 +1,551 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSync-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaSync.h"
+
+#include "android_media_AudioTrack.h"
+#include "android_media_PlaybackSettings.h"
+#include "android_media_SyncSettings.h"
+#include "android_runtime/AndroidRuntime.h"
+#include "android_runtime/android_view_Surface.h"
+#include "jni.h"
+#include "JNIHelp.h"
+
+#include <gui/Surface.h>
+
+#include <media/AudioResamplerPublic.h>
+#include <media/AudioTrack.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaSync.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AString.h>
+
+#include <nativehelper/ScopedLocalRef.h>
+
+namespace android {
+
+struct fields_t {
+ jfieldID context;
+ jfieldID mediaTimestampMediaTimeUsID;
+ jfieldID mediaTimestampNanoTimeID;
+ jfieldID mediaTimestampClockRateID;
+};
+
+static fields_t gFields;
+static PlaybackSettings::fields_t gPlaybackSettingsFields;
+static SyncSettings::fields_t gSyncSettingsFields;
+
+////////////////////////////////////////////////////////////////////////////////
+
+JMediaSync::JMediaSync() {
+ mSync = MediaSync::create();
+}
+
+JMediaSync::~JMediaSync() {
+}
+
+status_t JMediaSync::setSurface(const sp<IGraphicBufferProducer> &bufferProducer) {
+ return mSync->setSurface(bufferProducer);
+}
+
+status_t JMediaSync::setAudioTrack(const sp<AudioTrack> &audioTrack) {
+ return mSync->setAudioTrack(audioTrack);
+}
+
+status_t JMediaSync::createInputSurface(
+ sp<IGraphicBufferProducer>* bufferProducer) {
+ return mSync->createInputSurface(bufferProducer);
+}
+
+sp<const MediaClock> JMediaSync::getMediaClock() {
+ return mSync->getMediaClock();
+}
+
+status_t JMediaSync::setPlaybackSettings(const AudioPlaybackRate& rate) {
+ return mSync->setPlaybackSettings(rate);
+}
+
+void JMediaSync::getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */) {
+ mSync->getPlaybackSettings(rate);
+}
+
+status_t JMediaSync::setSyncSettings(const AVSyncSettings& syncSettings) {
+ return mSync->setSyncSettings(syncSettings);
+}
+
+void JMediaSync::getSyncSettings(AVSyncSettings* syncSettings /* nonnull */) {
+ mSync->getSyncSettings(syncSettings);
+}
+
+status_t JMediaSync::setVideoFrameRateHint(float rate) {
+ return mSync->setVideoFrameRateHint(rate);
+}
+
+float JMediaSync::getVideoFrameRate() {
+ return mSync->getVideoFrameRate();
+}
+
+status_t JMediaSync::updateQueuedAudioData(
+ int sizeInBytes, int64_t presentationTimeUs) {
+ return mSync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
+}
+
+status_t JMediaSync::getPlayTimeForPendingAudioFrames(int64_t *outTimeUs) {
+ return mSync->getPlayTimeForPendingAudioFrames(outTimeUs);
+}
+
+} // namespace android
+
+////////////////////////////////////////////////////////////////////////////////
+
+using namespace android;
+
+static sp<JMediaSync> setMediaSync(JNIEnv *env, jobject thiz, const sp<JMediaSync> &sync) {
+ sp<JMediaSync> old = (JMediaSync *)env->GetLongField(thiz, gFields.context);
+ if (sync != NULL) {
+ sync->incStrong(thiz);
+ }
+ if (old != NULL) {
+ old->decStrong(thiz);
+ }
+
+ env->SetLongField(thiz, gFields.context, (jlong)sync.get());
+
+ return old;
+}
+
+static sp<JMediaSync> getMediaSync(JNIEnv *env, jobject thiz) {
+ return (JMediaSync *)env->GetLongField(thiz, gFields.context);
+}
+
+static void android_media_MediaSync_release(JNIEnv *env, jobject thiz) {
+ setMediaSync(env, thiz, NULL);
+}
+
+static void throwExceptionAsNecessary(
+ JNIEnv *env, status_t err, const char *msg = NULL) {
+ switch (err) {
+ case NO_ERROR:
+ break;
+
+ case BAD_VALUE:
+ jniThrowException(env, "java/lang/IllegalArgumentException", msg);
+ break;
+
+ case NO_INIT:
+ case INVALID_OPERATION:
+ default:
+ if (err > 0) {
+ break;
+ }
+ AString msgWithErrorCode(msg);
+ msgWithErrorCode.append(" error:");
+ msgWithErrorCode.append(err);
+ jniThrowException(env, "java/lang/IllegalStateException", msgWithErrorCode.c_str());
+ break;
+ }
+}
+
+static void android_media_MediaSync_native_setSurface(
+ JNIEnv *env, jobject thiz, jobject jsurface) {
+ ALOGV("android_media_MediaSync_setSurface");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ if (jsurface != NULL) {
+ sp<Surface> surface(android_view_Surface_getSurface(env, jsurface));
+ if (surface != NULL) {
+ bufferProducer = surface->getIGraphicBufferProducer();
+ } else {
+ throwExceptionAsNecessary(env, BAD_VALUE, "The surface has been released");
+ return;
+ }
+ }
+
+ status_t err = sync->setSurface(bufferProducer);
+
+ if (err == INVALID_OPERATION) {
+ throwExceptionAsNecessary(
+ env, INVALID_OPERATION, "Surface has already been configured");
+ } if (err != NO_ERROR) {
+ AString msg("Failed to connect to surface with error ");
+ msg.append(err);
+ throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
+ }
+}
+
+static void android_media_MediaSync_native_setAudioTrack(
+ JNIEnv *env, jobject thiz, jobject jaudioTrack) {
+ ALOGV("android_media_MediaSync_setAudioTrack");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<AudioTrack> audioTrack;
+ if (jaudioTrack != NULL) {
+ audioTrack = android_media_AudioTrack_getAudioTrack(env, jaudioTrack);
+ if (audioTrack == NULL) {
+ throwExceptionAsNecessary(env, BAD_VALUE, "The audio track has been released");
+ return;
+ }
+ }
+
+ status_t err = sync->setAudioTrack(audioTrack);
+
+ if (err == INVALID_OPERATION) {
+ throwExceptionAsNecessary(
+ env, INVALID_OPERATION, "Audio track has already been configured");
+ } if (err != NO_ERROR) {
+ AString msg("Failed to configure audio track with error ");
+ msg.append(err);
+ throwExceptionAsNecessary(env, BAD_VALUE, msg.c_str());
+ }
+}
+
+static jobject android_media_MediaSync_createInputSurface(
+ JNIEnv* env, jobject thiz) {
+ ALOGV("android_media_MediaSync_createInputSurface");
+
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ // Tell the MediaSync that we want to use a Surface as input.
+ sp<IGraphicBufferProducer> bufferProducer;
+ status_t err = sync->createInputSurface(&bufferProducer);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ // Wrap the IGBP in a Java-language Surface.
+ return android_view_Surface_createFromIGraphicBufferProducer(env,
+ bufferProducer);
+}
+
+static void android_media_MediaSync_native_updateQueuedAudioData(
+ JNIEnv *env, jobject thiz, jint sizeInBytes, jlong presentationTimeUs) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ status_t err = sync->updateQueuedAudioData(sizeInBytes, presentationTimeUs);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ return;
+ }
+}
+
+static jboolean android_media_MediaSync_native_getTimestamp(
+ JNIEnv *env, jobject thiz, jobject timestamp) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return JNI_FALSE;
+ }
+
+ sp<const MediaClock> mediaClock = sync->getMediaClock();
+ if (mediaClock == NULL) {
+ return JNI_FALSE;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t mediaUs = 0;
+ if (mediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
+ return JNI_FALSE;
+ }
+
+ env->SetLongField(timestamp, gFields.mediaTimestampMediaTimeUsID,
+ (jlong)mediaUs);
+ env->SetLongField(timestamp, gFields.mediaTimestampNanoTimeID,
+ (jlong)(nowUs * 1000));
+ env->SetFloatField(timestamp, gFields.mediaTimestampClockRateID,
+ (jfloat)mediaClock->getPlaybackRate());
+ return JNI_TRUE;
+}
+
+static jlong android_media_MediaSync_native_getPlayTimeForPendingAudioFrames(
+ JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ }
+
+ int64_t playTimeUs = 0;
+ status_t err = sync->getPlayTimeForPendingAudioFrames(&playTimeUs);
+ if (err != NO_ERROR) {
+ throwExceptionAsNecessary(env, err);
+ }
+ return (jlong)playTimeUs;
+}
+
+static jfloat android_media_MediaSync_setPlaybackSettings(
+ JNIEnv *env, jobject thiz, jobject settings) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return (jfloat)0.f;
+ }
+
+ PlaybackSettings pbs;
+ pbs.fillFromJobject(env, gPlaybackSettingsFields, settings);
+ ALOGV("setPlaybackSettings: %d:%f %d:%f %d:%u %d:%u",
+ pbs.speedSet, pbs.audioRate.mSpeed,
+ pbs.pitchSet, pbs.audioRate.mPitch,
+ pbs.audioFallbackModeSet, pbs.audioRate.mFallbackMode,
+ pbs.audioStretchModeSet, pbs.audioRate.mStretchMode);
+
+ AudioPlaybackRate rate;
+ sync->getPlaybackSettings(&rate);
+ bool updatedRate = false;
+ if (pbs.speedSet) {
+ rate.mSpeed = pbs.audioRate.mSpeed;
+ updatedRate = true;
+ }
+ if (pbs.pitchSet) {
+ rate.mPitch = pbs.audioRate.mPitch;
+ updatedRate = true;
+ }
+ if (pbs.audioFallbackModeSet) {
+ rate.mFallbackMode = pbs.audioRate.mFallbackMode;
+ updatedRate = true;
+ }
+ if (pbs.audioStretchModeSet) {
+ rate.mStretchMode = pbs.audioRate.mStretchMode;
+ updatedRate = true;
+ }
+ if (updatedRate) {
+ status_t err = sync->setPlaybackSettings(rate);
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ return (jfloat)0.f;
+ }
+ }
+
+ sp<const MediaClock> mediaClock = sync->getMediaClock();
+ if (mediaClock == NULL) {
+ return (jfloat)0.f;
+ }
+
+ return (jfloat)mediaClock->getPlaybackRate();
+}
+
+static jobject android_media_MediaSync_getPlaybackSettings(
+ JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ PlaybackSettings pbs;
+ AudioPlaybackRate &audioRate = pbs.audioRate;
+ sync->getPlaybackSettings(&audioRate);
+ ALOGV("getPlaybackSettings: %f %f %d %d",
+ audioRate.mSpeed, audioRate.mPitch, audioRate.mFallbackMode, audioRate.mStretchMode);
+
+ pbs.speedSet = true;
+ pbs.pitchSet = true;
+ pbs.audioFallbackModeSet = true;
+ pbs.audioStretchModeSet = true;
+
+ return pbs.asJobject(env, gPlaybackSettingsFields);
+}
+
+static jfloat android_media_MediaSync_setSyncSettings(
+ JNIEnv *env, jobject thiz, jobject settings) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return (jfloat)0.f;
+ }
+
+ SyncSettings scs;
+ scs.fillFromJobject(env, gSyncSettingsFields, settings);
+ ALOGV("setSyncSettings: %d:%d %d:%d %d:%f %d:%f",
+ scs.syncSourceSet, scs.sync.mSource,
+ scs.audioAdjustModeSet, scs.sync.mAudioAdjustMode,
+ scs.toleranceSet, scs.sync.mTolerance,
+ scs.frameRateSet, scs.frameRate);
+
+ AVSyncSettings avsync;
+ sync->getSyncSettings(&avsync);
+ bool updatedSync = false;
+ status_t err = OK;
+ if (scs.syncSourceSet) {
+ avsync.mSource = scs.sync.mSource;
+ updatedSync = true;
+ }
+ if (scs.audioAdjustModeSet) {
+ avsync.mAudioAdjustMode = scs.sync.mAudioAdjustMode;
+ updatedSync = true;
+ }
+ if (scs.toleranceSet) {
+ avsync.mTolerance = scs.sync.mTolerance;
+ updatedSync = true;
+ }
+ if (updatedSync) {
+ err = sync->setSyncSettings(avsync);
+ }
+
+ if (scs.frameRateSet && err == OK) {
+ err = sync->setVideoFrameRateHint(scs.frameRate);
+ }
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ return (jfloat)0.f;
+ }
+
+ sp<const MediaClock> mediaClock = sync->getMediaClock();
+ if (mediaClock == NULL) {
+ return (jfloat)0.f;
+ }
+
+ return (jfloat)mediaClock->getPlaybackRate();
+}
+
+static jobject android_media_MediaSync_getSyncSettings(JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = getMediaSync(env, thiz);
+ if (sync == NULL) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ SyncSettings scs;
+ sync->getSyncSettings(&scs.sync);
+ scs.frameRate = sync->getVideoFrameRate();
+
+ ALOGV("getSyncSettings: %d %d %f %f",
+ scs.sync.mSource, scs.sync.mAudioAdjustMode, scs.sync.mTolerance, scs.frameRate);
+
+ // sanity check settings
+ if (scs.sync.mSource >= AVSYNC_SOURCE_MAX
+ || scs.sync.mAudioAdjustMode >= AVSYNC_AUDIO_ADJUST_MODE_MAX
+ || scs.sync.mTolerance < 0.f
+ || scs.sync.mTolerance >= AVSYNC_TOLERANCE_MAX) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return NULL;
+ }
+
+ scs.syncSourceSet = true;
+ scs.audioAdjustModeSet = true;
+ scs.toleranceSet = true;
+ scs.frameRateSet = scs.frameRate >= 0.f;
+
+ return scs.asJobject(env, gSyncSettingsFields);
+}
+
+static void android_media_MediaSync_native_init(JNIEnv *env) {
+ ScopedLocalRef<jclass> clazz(env, env->FindClass("android/media/MediaSync"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.context = env->GetFieldID(clazz.get(), "mNativeContext", "J");
+ CHECK(gFields.context != NULL);
+
+ clazz.reset(env->FindClass("android/media/MediaTimestamp"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.mediaTimestampMediaTimeUsID =
+ env->GetFieldID(clazz.get(), "mediaTimeUs", "J");
+ CHECK(gFields.mediaTimestampMediaTimeUsID != NULL);
+
+ gFields.mediaTimestampNanoTimeID =
+ env->GetFieldID(clazz.get(), "nanoTime", "J");
+ CHECK(gFields.mediaTimestampNanoTimeID != NULL);
+
+ gFields.mediaTimestampClockRateID =
+ env->GetFieldID(clazz.get(), "clockRate", "F");
+ CHECK(gFields.mediaTimestampClockRateID != NULL);
+
+ gSyncSettingsFields.init(env);
+ gPlaybackSettingsFields.init(env);
+}
+
+static void android_media_MediaSync_native_setup(JNIEnv *env, jobject thiz) {
+ sp<JMediaSync> sync = new JMediaSync();
+
+ setMediaSync(env, thiz, sync);
+}
+
+static void android_media_MediaSync_native_finalize(JNIEnv *env, jobject thiz) {
+ android_media_MediaSync_release(env, thiz);
+}
+
+static JNINativeMethod gMethods[] = {
+ { "native_setSurface",
+ "(Landroid/view/Surface;)V",
+ (void *)android_media_MediaSync_native_setSurface },
+
+ { "native_setAudioTrack",
+ "(Landroid/media/AudioTrack;)V",
+ (void *)android_media_MediaSync_native_setAudioTrack },
+
+ { "createInputSurface", "()Landroid/view/Surface;",
+ (void *)android_media_MediaSync_createInputSurface },
+
+ { "native_updateQueuedAudioData",
+ "(IJ)V",
+ (void *)android_media_MediaSync_native_updateQueuedAudioData },
+
+ { "native_getTimestamp",
+ "(Landroid/media/MediaTimestamp;)Z",
+ (void *)android_media_MediaSync_native_getTimestamp },
+
+ { "native_getPlayTimeForPendingAudioFrames",
+ "()J",
+ (void *)android_media_MediaSync_native_getPlayTimeForPendingAudioFrames },
+
+ { "native_init", "()V", (void *)android_media_MediaSync_native_init },
+
+ { "native_setup", "()V", (void *)android_media_MediaSync_native_setup },
+
+ { "native_release", "()V", (void *)android_media_MediaSync_release },
+
+ { "native_setPlaybackSettings", "(Landroid/media/PlaybackSettings;)F",
+ (void *)android_media_MediaSync_setPlaybackSettings },
+
+ { "getPlaybackSettings", "()Landroid/media/PlaybackSettings;",
+ (void *)android_media_MediaSync_getPlaybackSettings },
+
+ { "native_setSyncSettings", "(Landroid/media/SyncSettings;)F",
+ (void *)android_media_MediaSync_setSyncSettings },
+
+ { "getSyncSettings", "()Landroid/media/SyncSettings;",
+ (void *)android_media_MediaSync_getSyncSettings },
+
+ { "native_finalize", "()V", (void *)android_media_MediaSync_native_finalize },
+};
+
+int register_android_media_MediaSync(JNIEnv *env) {
+ return AndroidRuntime::registerNativeMethods(
+ env, "android/media/MediaSync", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_MediaSync.h b/media/jni/android_media_MediaSync.h
new file mode 100644
index 0000000..fa5e5e0
--- /dev/null
+++ b/media/jni/android_media_MediaSync.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIASYNC_H_
+#define _ANDROID_MEDIA_MEDIASYNC_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/MediaSync.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct AudioPlaybackRate;
+class AudioTrack;
+struct IGraphicBufferProducer;
+struct MediaClock;
+class MediaSync;
+
+struct JMediaSync : public RefBase {
+ JMediaSync();
+
+ status_t setSurface(const sp<IGraphicBufferProducer> &bufferProducer);
+ status_t setAudioTrack(const sp<AudioTrack> &audioTrack);
+
+ status_t createInputSurface(sp<IGraphicBufferProducer>* bufferProducer);
+
+ status_t updateQueuedAudioData(int sizeInBytes, int64_t presentationTimeUs);
+
+ status_t getPlayTimeForPendingAudioFrames(int64_t *outTimeUs);
+
+ status_t setPlaybackSettings(const AudioPlaybackRate& rate);
+ void getPlaybackSettings(AudioPlaybackRate* rate /* nonnull */);
+ status_t setSyncSettings(const AVSyncSettings& syncSettings);
+ void getSyncSettings(AVSyncSettings* syncSettings /* nonnull */);
+ status_t setVideoFrameRateHint(float rate);
+ float getVideoFrameRate();
+
+ sp<const MediaClock> getMediaClock();
+
+protected:
+ virtual ~JMediaSync();
+
+private:
+ sp<MediaSync> mSync;
+
+ DISALLOW_EVIL_CONSTRUCTORS(JMediaSync);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_MEDIASYNC_H_
diff --git a/media/jni/android_media_PlaybackSettings.h b/media/jni/android_media_PlaybackSettings.h
new file mode 100644
index 0000000..1f4f256
--- /dev/null
+++ b/media/jni/android_media_PlaybackSettings.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_PLAYBACK_SETTINGS_H_
+#define _ANDROID_MEDIA_PLAYBACK_SETTINGS_H_
+
+#include <media/AudioResamplerPublic.h>
+
+namespace android {
+
+// This entire class is inline as it is used from both core and media
+struct PlaybackSettings {
+ AudioPlaybackRate audioRate;
+ bool speedSet;
+ bool pitchSet;
+ bool audioFallbackModeSet;
+ bool audioStretchModeSet;
+
+ struct fields_t {
+ jclass clazz;
+ jmethodID constructID;
+
+ jfieldID speed;
+ jfieldID pitch;
+ jfieldID audio_fallback_mode;
+ jfieldID audio_stretch_mode;
+ jfieldID set;
+ jint set_speed;
+ jint set_pitch;
+ jint set_audio_fallback_mode;
+ jint set_audio_stretch_mode;
+
+ void init(JNIEnv *env) {
+ jclass lclazz = env->FindClass("android/media/PlaybackSettings");
+ if (lclazz == NULL) {
+ return;
+ }
+
+ clazz = (jclass)env->NewGlobalRef(lclazz);
+ if (clazz == NULL) {
+ return;
+ }
+
+ constructID = env->GetMethodID(clazz, "<init>", "()V");
+
+ speed = env->GetFieldID(clazz, "mSpeed", "F");
+ pitch = env->GetFieldID(clazz, "mPitch", "F");
+ audio_fallback_mode = env->GetFieldID(clazz, "mAudioFallbackMode", "I");
+ audio_stretch_mode = env->GetFieldID(clazz, "mAudioStretchMode", "I");
+ set = env->GetFieldID(clazz, "mSet", "I");
+
+ set_speed =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_SPEED", "I"));
+ set_pitch =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_PITCH", "I"));
+ set_audio_fallback_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_FALLBACK_MODE", "I"));
+ set_audio_stretch_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_STRETCH_MODE", "I"));
+
+ env->DeleteLocalRef(lclazz);
+ }
+
+ void exit(JNIEnv *env) {
+ env->DeleteGlobalRef(clazz);
+ clazz = NULL;
+ }
+ };
+
+ void fillFromJobject(JNIEnv *env, const fields_t& fields, jobject settings) {
+ audioRate.mSpeed = env->GetFloatField(settings, fields.speed);
+ audioRate.mPitch = env->GetFloatField(settings, fields.pitch);
+ audioRate.mFallbackMode =
+ (AudioTimestretchFallbackMode)env->GetIntField(settings, fields.audio_fallback_mode);
+ audioRate.mStretchMode =
+ (AudioTimestretchStretchMode)env->GetIntField(settings, fields.audio_stretch_mode);
+ int set = env->GetIntField(settings, fields.set);
+
+ speedSet = set & fields.set_speed;
+ pitchSet = set & fields.set_pitch;
+ audioFallbackModeSet = set & fields.set_audio_fallback_mode;
+ audioStretchModeSet = set & fields.set_audio_stretch_mode;
+ }
+
+ jobject asJobject(JNIEnv *env, const fields_t& fields) {
+ jobject settings = env->NewObject(fields.clazz, fields.constructID);
+ if (settings == NULL) {
+ return NULL;
+ }
+ env->SetFloatField(settings, fields.speed, (jfloat)audioRate.mSpeed);
+ env->SetFloatField(settings, fields.pitch, (jfloat)audioRate.mPitch);
+ env->SetIntField(settings, fields.audio_fallback_mode, (jint)audioRate.mFallbackMode);
+ env->SetIntField(settings, fields.audio_stretch_mode, (jint)audioRate.mStretchMode);
+ env->SetIntField(
+ settings, fields.set,
+ (speedSet ? fields.set_speed : 0)
+ | (pitchSet ? fields.set_pitch : 0)
+ | (audioFallbackModeSet ? fields.set_audio_fallback_mode : 0)
+ | (audioStretchModeSet ? fields.set_audio_stretch_mode : 0));
+
+ return settings;
+ }
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_PLAYBACK_SETTINGS_H_
diff --git a/media/jni/android_media_SyncSettings.cpp b/media/jni/android_media_SyncSettings.cpp
new file mode 100644
index 0000000..5da35e7
--- /dev/null
+++ b/media/jni/android_media_SyncSettings.cpp
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "android_media_SyncSettings.h"
+
+#include "JNIHelp.h"
+
+namespace android {
+
+void SyncSettings::fields_t::init(JNIEnv *env) {
+ jclass lclazz = env->FindClass("android/media/SyncSettings");
+ if (lclazz == NULL) {
+ return;
+ }
+
+ clazz = (jclass)env->NewGlobalRef(lclazz);
+ if (clazz == NULL) {
+ return;
+ }
+
+ constructID = env->GetMethodID(clazz, "<init>", "()V");
+
+ sync_source = env->GetFieldID(clazz, "mSyncSource", "I");
+ audio_adjust_mode = env->GetFieldID(clazz, "mAudioAdjustMode", "I");
+ tolerance = env->GetFieldID(clazz, "mTolerance", "F");
+ frame_rate = env->GetFieldID(clazz, "mFrameRate", "F");
+ set = env->GetFieldID(clazz, "mSet", "I");
+
+ set_sync_source =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_SYNC_SOURCE", "I"));
+ set_audio_adjust_mode = env->GetStaticIntField(
+ clazz, env->GetStaticFieldID(clazz, "SET_AUDIO_ADJUST_MODE", "I"));
+ set_tolerance =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_TOLERANCE", "I"));
+ set_frame_rate =
+ env->GetStaticIntField(clazz, env->GetStaticFieldID(clazz, "SET_FRAME_RATE", "I"));
+
+ env->DeleteLocalRef(lclazz);
+}
+
+void SyncSettings::fields_t::exit(JNIEnv *env) {
+ env->DeleteGlobalRef(clazz);
+ clazz = NULL;
+}
+
+void SyncSettings::fillFromJobject(JNIEnv *env, const fields_t& fields, jobject settings) {
+ sync.mSource = (AVSyncSource)env->GetIntField(settings, fields.sync_source);
+ sync.mAudioAdjustMode = (AVSyncAudioAdjustMode)env->GetIntField(settings, fields.audio_adjust_mode);
+ sync.mTolerance = env->GetFloatField(settings, fields.tolerance);
+ frameRate = env->GetFloatField(settings, fields.frame_rate);
+ int set = env->GetIntField(settings, fields.set);
+
+ syncSourceSet = set & fields.set_sync_source;
+ audioAdjustModeSet = set & fields.set_audio_adjust_mode;
+ toleranceSet = set & fields.set_tolerance;
+ frameRateSet = set & fields.set_frame_rate;
+}
+
+jobject SyncSettings::asJobject(JNIEnv *env, const fields_t& fields) {
+ jobject settings = env->NewObject(fields.clazz, fields.constructID);
+ if (settings == NULL) {
+ return NULL;
+ }
+ env->SetIntField(settings, fields.sync_source, (jint)sync.mSource);
+ env->SetIntField(settings, fields.audio_adjust_mode, (jint)sync.mAudioAdjustMode);
+ env->SetFloatField(settings, fields.tolerance, (jfloat)sync.mTolerance);
+ env->SetFloatField(settings, fields.frame_rate, (jfloat)frameRate);
+ env->SetIntField(
+ settings, fields.set,
+ (syncSourceSet ? fields.set_sync_source : 0)
+ | (audioAdjustModeSet ? fields.set_audio_adjust_mode : 0)
+ | (toleranceSet ? fields.set_tolerance : 0)
+ | (frameRateSet ? fields.set_frame_rate : 0));
+
+ return settings;
+}
+
+} // namespace android
diff --git a/media/jni/android_media_SyncSettings.h b/media/jni/android_media_SyncSettings.h
new file mode 100644
index 0000000..23530db
--- /dev/null
+++ b/media/jni/android_media_SyncSettings.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2015, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_SYNC_SETTINGS_H_
+#define _ANDROID_MEDIA_SYNC_SETTINGS_H_
+
+#include "jni.h"
+
+#include <media/stagefright/MediaSync.h>
+
+namespace android {
+
+struct SyncSettings {
+ AVSyncSettings sync;
+ float frameRate;
+
+ bool syncSourceSet;
+ bool audioAdjustModeSet;
+ bool toleranceSet;
+ bool frameRateSet;
+
+ struct fields_t {
+ jclass clazz;
+ jmethodID constructID;
+
+ jfieldID sync_source;
+ jfieldID audio_adjust_mode;
+ jfieldID tolerance;
+ jfieldID frame_rate;
+ jfieldID set;
+ jint set_sync_source;
+ jint set_audio_adjust_mode;
+ jint set_tolerance;
+ jint set_frame_rate;
+
+ // initializes fields
+ void init(JNIEnv *env);
+
+ // releases global references held
+ void exit(JNIEnv *env);
+ };
+
+ // fills this from an android.media.SyncSettings object
+ void fillFromJobject(JNIEnv *env, const fields_t& fields, jobject settings);
+
+ // returns this as a android.media.SyncSettings object
+ jobject asJobject(JNIEnv *env, const fields_t& fields);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_SYNC_SETTINGS_H_
diff --git a/media/jni/audioeffect/android_media_AudioEffect.cpp b/media/jni/audioeffect/android_media_AudioEffect.cpp
index c364d46..96b72a2 100644
--- a/media/jni/audioeffect/android_media_AudioEffect.cpp
+++ b/media/jni/audioeffect/android_media_AudioEffect.cpp
@@ -25,6 +25,8 @@
#include <android_runtime/AndroidRuntime.h>
#include "media/AudioEffect.h"
+#include <ScopedUtfChars.h>
+
using namespace android;
#define AUDIOEFFECT_SUCCESS 0
@@ -249,7 +251,8 @@ android_media_AudioEffect_native_init(JNIEnv *env)
static jint
android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
- jstring type, jstring uuid, jint priority, jint sessionId, jintArray jId, jobjectArray javadesc)
+ jstring type, jstring uuid, jint priority, jint sessionId, jintArray jId,
+ jobjectArray javadesc, jstring opPackageName)
{
ALOGV("android_media_AudioEffect_native_setup");
AudioEffectJniStorage* lpJniStorage = NULL;
@@ -267,6 +270,8 @@ android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
jstring jdescName;
jstring jdescImplementor;
+ ScopedUtfChars opPackageNameStr(env, opPackageName);
+
if (type != NULL) {
typeStr = env->GetStringUTFChars(type, NULL);
if (typeStr == NULL) { // Out of memory
@@ -312,6 +317,7 @@ android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
// create the native AudioEffect object
lpAudioEffect = new AudioEffect(typeStr,
+ String16(opPackageNameStr.c_str()),
uuidStr,
priority,
effectCallback,
@@ -868,7 +874,7 @@ android_media_AudioEffect_native_queryPreProcessings(JNIEnv *env, jclass clazz _
// Dalvik VM type signatures
static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_AudioEffect_native_init},
- {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;II[I[Ljava/lang/Object;)I",
+ {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;II[I[Ljava/lang/Object;Ljava/lang/String;)I",
(void *)android_media_AudioEffect_native_setup},
{"native_finalize", "()V", (void *)android_media_AudioEffect_native_finalize},
{"native_release", "()V", (void *)android_media_AudioEffect_native_release},
diff --git a/media/jni/audioeffect/android_media_Visualizer.cpp b/media/jni/audioeffect/android_media_Visualizer.cpp
index 460277f..abc681e 100644
--- a/media/jni/audioeffect/android_media_Visualizer.cpp
+++ b/media/jni/audioeffect/android_media_Visualizer.cpp
@@ -26,6 +26,8 @@
#include <utils/threads.h>
#include "media/Visualizer.h"
+#include <ScopedUtfChars.h>
+
using namespace android;
#define VISUALIZER_SUCCESS 0
@@ -331,7 +333,7 @@ static void android_media_visualizer_effect_callback(int32_t event,
static jint
android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
- jint sessionId, jintArray jId)
+ jint sessionId, jintArray jId, jstring opPackageName)
{
ALOGV("android_media_visualizer_native_setup");
visualizerJniStorage* lpJniStorage = NULL;
@@ -339,6 +341,8 @@ android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_th
Visualizer* lpVisualizer = NULL;
jint* nId = NULL;
+ ScopedUtfChars opPackageNameStr(env, opPackageName);
+
lpJniStorage = new visualizerJniStorage();
if (lpJniStorage == NULL) {
ALOGE("setup: Error creating JNI Storage");
@@ -362,7 +366,8 @@ android_media_visualizer_native_setup(JNIEnv *env, jobject thiz, jobject weak_th
}
// create the native Visualizer object
- lpVisualizer = new Visualizer(0,
+ lpVisualizer = new Visualizer(String16(opPackageNameStr.c_str()),
+ 0,
android_media_visualizer_effect_callback,
lpJniStorage,
sessionId);
@@ -662,7 +667,7 @@ android_media_setPeriodicCapture(JNIEnv *env, jobject thiz, jint rate, jboolean
// Dalvik VM type signatures
static JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_visualizer_native_init},
- {"native_setup", "(Ljava/lang/Object;I[I)I",
+ {"native_setup", "(Ljava/lang/Object;I[ILjava/lang/String;)I",
(void *)android_media_visualizer_native_setup},
{"native_finalize", "()V", (void *)android_media_visualizer_native_finalize},
{"native_release", "()V", (void *)android_media_visualizer_native_release},
diff --git a/media/jni/soundpool/Android.mk b/media/jni/soundpool/Android.mk
index 71ab013..2476056 100644
--- a/media/jni/soundpool/Android.mk
+++ b/media/jni/soundpool/Android.mk
@@ -2,7 +2,7 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
- android_media_SoundPool_SoundPoolImpl.cpp \
+ android_media_SoundPool.cpp \
SoundPool.cpp \
SoundPoolThread.cpp
diff --git a/media/jni/soundpool/SoundPool.cpp b/media/jni/soundpool/SoundPool.cpp
index a73209b..84ae3b4 100644
--- a/media/jni/soundpool/SoundPool.cpp
+++ b/media/jni/soundpool/SoundPool.cpp
@@ -256,7 +256,7 @@ int SoundPool::play(int sampleID, float leftVolume, float rightVolume,
dump();
// allocate a channel
- channel = allocateChannel_l(priority);
+ channel = allocateChannel_l(priority, sampleID);
// no channel allocated - return 0
if (!channel) {
@@ -271,13 +271,25 @@ int SoundPool::play(int sampleID, float leftVolume, float rightVolume,
return channelID;
}
-SoundChannel* SoundPool::allocateChannel_l(int priority)
+SoundChannel* SoundPool::allocateChannel_l(int priority, int sampleID)
{
List<SoundChannel*>::iterator iter;
SoundChannel* channel = NULL;
- // allocate a channel
+ // check if channel for given sampleID still available
if (!mChannels.empty()) {
+ for (iter = mChannels.begin(); iter != mChannels.end(); ++iter) {
+ if (sampleID == (*iter)->getPrevSampleID() && (*iter)->state() == SoundChannel::IDLE) {
+ channel = *iter;
+ mChannels.erase(iter);
+ ALOGV("Allocated recycled channel for same sampleID");
+ break;
+ }
+ }
+ }
+
+ // allocate any channel
+ if (!channel && !mChannels.empty()) {
iter = mChannels.begin();
if (priority >= (*iter)->priority()) {
channel = *iter;
@@ -502,10 +514,11 @@ static status_t decode(int fd, int64_t offset, int64_t length,
if (strncmp(mime, "audio/", 6) == 0) {
AMediaCodec *codec = AMediaCodec_createDecoderByType(mime);
- if (AMediaCodec_configure(codec, format,
- NULL /* window */, NULL /* drm */, 0 /* flags */) != AMEDIA_OK
- || AMediaCodec_start(codec) != AMEDIA_OK
- || AMediaExtractor_selectTrack(ex, i) != AMEDIA_OK) {
+ if (codec == NULL
+ || AMediaCodec_configure(codec, format,
+ NULL /* window */, NULL /* drm */, 0 /* flags */) != AMEDIA_OK
+ || AMediaCodec_start(codec) != AMEDIA_OK
+ || AMediaExtractor_selectTrack(ex, i) != AMEDIA_OK) {
AMediaExtractor_delete(ex);
AMediaCodec_delete(codec);
AMediaFormat_delete(format);
@@ -626,7 +639,7 @@ status_t Sample::doLoad()
goto error;
}
- if ((numChannels < 1) || (numChannels > 2)) {
+ if ((numChannels < 1) || (numChannels > 8)) {
ALOGE("Sample channel count (%d) out of range", numChannels);
status = BAD_VALUE;
goto error;
@@ -648,6 +661,7 @@ error:
void SoundChannel::init(SoundPool* soundPool)
{
mSoundPool = soundPool;
+ mPrevSampleID = -1;
}
// call with sound pool lock held
@@ -656,7 +670,7 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
{
sp<AudioTrack> oldTrack;
sp<AudioTrack> newTrack;
- status_t status;
+ status_t status = NO_ERROR;
{ // scope for the lock
Mutex::Autolock lock(&mLock);
@@ -689,8 +703,10 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
size_t frameCount = 0;
if (loop) {
- frameCount = sample->size()/numChannels/
- ((sample->format() == AUDIO_FORMAT_PCM_16_BIT) ? sizeof(int16_t) : sizeof(uint8_t));
+ const audio_format_t format = sample->format();
+ const size_t frameSize = audio_is_linear_pcm(format)
+ ? numChannels * audio_bytes_per_sample(format) : 1;
+ frameCount = sample->size() / frameSize;
}
#ifndef USE_SHARED_MEM_BUFFER
@@ -701,38 +717,47 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
}
#endif
- // mToggle toggles each time a track is started on a given channel.
- // The toggle is concatenated with the SoundChannel address and passed to AudioTrack
- // as callback user data. This enables the detection of callbacks received from the old
- // audio track while the new one is being started and avoids processing them with
- // wrong audio audio buffer size (mAudioBufferSize)
- unsigned long toggle = mToggle ^ 1;
- void *userData = (void *)((unsigned long)this | toggle);
- audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(numChannels);
-
- // do not create a new audio track if current track is compatible with sample parameters
-#ifdef USE_SHARED_MEM_BUFFER
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channelMask, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
-#else
- uint32_t bufferFrames = (totalFrames + (kDefaultBufferCount - 1)) / kDefaultBufferCount;
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channelMask, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
- bufferFrames);
-#endif
- oldTrack = mAudioTrack;
- status = newTrack->initCheck();
- if (status != NO_ERROR) {
- ALOGE("Error creating AudioTrack");
- goto exit;
+ // check if the existing track has the same sample id.
+ if (mAudioTrack != 0 && mPrevSampleID == sample->sampleID()) {
+ // the sample rate may fail to change if the audio track is a fast track.
+ if (mAudioTrack->setSampleRate(sampleRate) == NO_ERROR) {
+ newTrack = mAudioTrack;
+ ALOGV("reusing track %p for sample %d", mAudioTrack.get(), sample->sampleID());
+ }
+ }
+ if (newTrack == 0) {
+ // mToggle toggles each time a track is started on a given channel.
+ // The toggle is concatenated with the SoundChannel address and passed to AudioTrack
+ // as callback user data. This enables the detection of callbacks received from the old
+ // audio track while the new one is being started and avoids processing them with
+ // wrong audio audio buffer size (mAudioBufferSize)
+ unsigned long toggle = mToggle ^ 1;
+ void *userData = (void *)((unsigned long)this | toggle);
+ audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(numChannels);
+
+ // do not create a new audio track if current track is compatible with sample parameters
+ #ifdef USE_SHARED_MEM_BUFFER
+ newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
+ channelMask, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
+ #else
+ uint32_t bufferFrames = (totalFrames + (kDefaultBufferCount - 1)) / kDefaultBufferCount;
+ newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
+ channelMask, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
+ bufferFrames);
+ #endif
+ oldTrack = mAudioTrack;
+ status = newTrack->initCheck();
+ if (status != NO_ERROR) {
+ ALOGE("Error creating AudioTrack");
+ goto exit;
+ }
+ // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
+ mToggle = toggle;
+ mAudioTrack = newTrack;
+ ALOGV("using new track %p for sample %d", newTrack.get(), sample->sampleID());
}
- ALOGV("setVolume %p", newTrack.get());
newTrack->setVolume(leftVolume, rightVolume);
newTrack->setLoop(0, frameCount, loop);
-
- // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
- mToggle = toggle;
- mAudioTrack = newTrack;
mPos = 0;
mSample = sample;
mChannelID = nextChannelID;
@@ -875,6 +900,7 @@ bool SoundChannel::doStop_l()
setVolume_l(0, 0);
ALOGV("stop");
mAudioTrack->stop();
+ mPrevSampleID = mSample->sampleID();
mSample.clear();
mState = IDLE;
mPriority = IDLE_PRIORITY;
diff --git a/media/jni/soundpool/SoundPool.h b/media/jni/soundpool/SoundPool.h
index 9d9cbdf..4aacf53 100644
--- a/media/jni/soundpool/SoundPool.h
+++ b/media/jni/soundpool/SoundPool.h
@@ -72,8 +72,8 @@ private:
volatile int32_t mRefCount;
uint16_t mSampleID;
uint16_t mSampleRate;
- uint8_t mState : 3;
- uint8_t mNumChannels : 2;
+ uint8_t mState;
+ uint8_t mNumChannels;
audio_format_t mFormat;
int mFd;
int64_t mOffset;
@@ -136,6 +136,7 @@ public:
void nextEvent();
int nextChannelID() { return mNextEvent.channelID(); }
void dump();
+ int getPrevSampleID(void) { return mPrevSampleID; }
private:
static void callback(int event, void* user, void *info);
@@ -152,6 +153,7 @@ private:
int mAudioBufferSize;
unsigned long mToggle;
bool mAutoPaused;
+ int mPrevSampleID;
};
// application object for managing a pool of sounds
@@ -193,7 +195,7 @@ private:
sp<Sample> findSample(int sampleID) { return mSamples.valueFor(sampleID); }
SoundChannel* findChannel (int channelID);
SoundChannel* findNextChannel (int channelID);
- SoundChannel* allocateChannel_l(int priority);
+ SoundChannel* allocateChannel_l(int priority, int sampleID);
void moveToFront_l(SoundChannel* channel);
void notify(SoundPoolEvent event);
void dump();
diff --git a/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp b/media/jni/soundpool/android_media_SoundPool.cpp
index b2333f8..fc4cf05 100644
--- a/media/jni/soundpool/android_media_SoundPool_SoundPoolImpl.cpp
+++ b/media/jni/soundpool/android_media_SoundPool.cpp
@@ -47,10 +47,10 @@ static audio_attributes_fields_t javaAudioAttrFields;
// ----------------------------------------------------------------------------
static jint
-android_media_SoundPool_SoundPoolImpl_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
+android_media_SoundPool_load_FD(JNIEnv *env, jobject thiz, jobject fileDescriptor,
jlong offset, jlong length, jint priority)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_load_FD");
+ ALOGV("android_media_SoundPool_load_FD");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return (jint) ap->load(jniGetFDFromFileDescriptor(env, fileDescriptor),
@@ -58,104 +58,104 @@ android_media_SoundPool_SoundPoolImpl_load_FD(JNIEnv *env, jobject thiz, jobject
}
static jboolean
-android_media_SoundPool_SoundPoolImpl_unload(JNIEnv *env, jobject thiz, jint sampleID) {
- ALOGV("android_media_SoundPool_SoundPoolImpl_unload\n");
+android_media_SoundPool_unload(JNIEnv *env, jobject thiz, jint sampleID) {
+ ALOGV("android_media_SoundPool_unload\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return JNI_FALSE;
return ap->unload(sampleID) ? JNI_TRUE : JNI_FALSE;
}
static jint
-android_media_SoundPool_SoundPoolImpl_play(JNIEnv *env, jobject thiz, jint sampleID,
+android_media_SoundPool_play(JNIEnv *env, jobject thiz, jint sampleID,
jfloat leftVolume, jfloat rightVolume, jint priority, jint loop,
jfloat rate)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_play\n");
+ ALOGV("android_media_SoundPool_play\n");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return 0;
return (jint) ap->play(sampleID, leftVolume, rightVolume, priority, loop, rate);
}
static void
-android_media_SoundPool_SoundPoolImpl_pause(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_pause(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_pause");
+ ALOGV("android_media_SoundPool_pause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->pause(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_resume(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_resume(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_resume");
+ ALOGV("android_media_SoundPool_resume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->resume(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_autoPause(JNIEnv *env, jobject thiz)
+android_media_SoundPool_autoPause(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_autoPause");
+ ALOGV("android_media_SoundPool_autoPause");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoPause();
}
static void
-android_media_SoundPool_SoundPoolImpl_autoResume(JNIEnv *env, jobject thiz)
+android_media_SoundPool_autoResume(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_autoResume");
+ ALOGV("android_media_SoundPool_autoResume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->autoResume();
}
static void
-android_media_SoundPool_SoundPoolImpl_stop(JNIEnv *env, jobject thiz, jint channelID)
+android_media_SoundPool_stop(JNIEnv *env, jobject thiz, jint channelID)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_stop");
+ ALOGV("android_media_SoundPool_stop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->stop(channelID);
}
static void
-android_media_SoundPool_SoundPoolImpl_setVolume(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setVolume(JNIEnv *env, jobject thiz, jint channelID,
jfloat leftVolume, jfloat rightVolume)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setVolume");
+ ALOGV("android_media_SoundPool_setVolume");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setVolume(channelID, (float) leftVolume, (float) rightVolume);
}
static void
-android_media_SoundPool_SoundPoolImpl_setPriority(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setPriority(JNIEnv *env, jobject thiz, jint channelID,
jint priority)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setPriority");
+ ALOGV("android_media_SoundPool_setPriority");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setPriority(channelID, (int) priority);
}
static void
-android_media_SoundPool_SoundPoolImpl_setLoop(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setLoop(JNIEnv *env, jobject thiz, jint channelID,
int loop)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setLoop");
+ ALOGV("android_media_SoundPool_setLoop");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setLoop(channelID, loop);
}
static void
-android_media_SoundPool_SoundPoolImpl_setRate(JNIEnv *env, jobject thiz, jint channelID,
+android_media_SoundPool_setRate(JNIEnv *env, jobject thiz, jint channelID,
jfloat rate)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_setRate");
+ ALOGV("android_media_SoundPool_setRate");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap == NULL) return;
ap->setRate(channelID, (float) rate);
@@ -169,7 +169,7 @@ static void android_media_callback(SoundPoolEvent event, SoundPool* soundPool, v
}
static jint
-android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
+android_media_SoundPool_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
jint maxChannels, jobject jaa)
{
if (jaa == 0) {
@@ -191,7 +191,7 @@ android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jo
(audio_content_type_t) env->GetIntField(jaa, javaAudioAttrFields.fieldContentType);
paa->flags = env->GetIntField(jaa, javaAudioAttrFields.fieldFlags);
- ALOGV("android_media_SoundPool_SoundPoolImpl_native_setup");
+ ALOGV("android_media_SoundPool_native_setup");
SoundPool *ap = new SoundPool(maxChannels, paa);
if (ap == NULL) {
return -1;
@@ -211,9 +211,9 @@ android_media_SoundPool_SoundPoolImpl_native_setup(JNIEnv *env, jobject thiz, jo
}
static void
-android_media_SoundPool_SoundPoolImpl_release(JNIEnv *env, jobject thiz)
+android_media_SoundPool_release(JNIEnv *env, jobject thiz)
{
- ALOGV("android_media_SoundPool_SoundPoolImpl_release");
+ ALOGV("android_media_SoundPool_release");
SoundPool *ap = MusterSoundPool(env, thiz);
if (ap != NULL) {
@@ -236,63 +236,63 @@ android_media_SoundPool_SoundPoolImpl_release(JNIEnv *env, jobject thiz)
static JNINativeMethod gMethods[] = {
{ "_load",
"(Ljava/io/FileDescriptor;JJI)I",
- (void *)android_media_SoundPool_SoundPoolImpl_load_FD
+ (void *)android_media_SoundPool_load_FD
},
{ "unload",
"(I)Z",
- (void *)android_media_SoundPool_SoundPoolImpl_unload
+ (void *)android_media_SoundPool_unload
},
{ "_play",
"(IFFIIF)I",
- (void *)android_media_SoundPool_SoundPoolImpl_play
+ (void *)android_media_SoundPool_play
},
{ "pause",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_pause
+ (void *)android_media_SoundPool_pause
},
{ "resume",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_resume
+ (void *)android_media_SoundPool_resume
},
{ "autoPause",
"()V",
- (void *)android_media_SoundPool_SoundPoolImpl_autoPause
+ (void *)android_media_SoundPool_autoPause
},
{ "autoResume",
"()V",
- (void *)android_media_SoundPool_SoundPoolImpl_autoResume
+ (void *)android_media_SoundPool_autoResume
},
{ "stop",
"(I)V",
- (void *)android_media_SoundPool_SoundPoolImpl_stop
+ (void *)android_media_SoundPool_stop
},
{ "_setVolume",
"(IFF)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setVolume
+ (void *)android_media_SoundPool_setVolume
},
{ "setPriority",
"(II)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setPriority
+ (void *)android_media_SoundPool_setPriority
},
{ "setLoop",
"(II)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setLoop
+ (void *)android_media_SoundPool_setLoop
},
{ "setRate",
"(IF)V",
- (void *)android_media_SoundPool_SoundPoolImpl_setRate
+ (void *)android_media_SoundPool_setRate
},
{ "native_setup",
"(Ljava/lang/Object;ILjava/lang/Object;)I",
- (void*)android_media_SoundPool_SoundPoolImpl_native_setup
+ (void*)android_media_SoundPool_native_setup
},
{ "release",
"()V",
- (void*)android_media_SoundPool_SoundPoolImpl_release
+ (void*)android_media_SoundPool_release
}
};
-static const char* const kClassPathName = "android/media/SoundPool$SoundPoolImpl";
+static const char* const kClassPathName = "android/media/SoundPool";
jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
{
@@ -314,14 +314,14 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
fields.mNativeContext = env->GetFieldID(clazz, "mNativeContext", "J");
if (fields.mNativeContext == NULL) {
- ALOGE("Can't find SoundPoolImpl.mNativeContext");
+ ALOGE("Can't find SoundPool.mNativeContext");
return result;
}
fields.mPostEvent = env->GetStaticMethodID(clazz, "postEventFromNative",
"(Ljava/lang/Object;IIILjava/lang/Object;)V");
if (fields.mPostEvent == NULL) {
- ALOGE("Can't find android/media/SoundPoolImpl.postEventFromNative");
+ ALOGE("Can't find android/media/SoundPool.postEventFromNative");
return result;
}