summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--core/jni/Android.mk5
-rw-r--r--core/jni/AndroidRuntime.cpp2
-rw-r--r--core/jni/android/graphics/YuvToJpegEncoder.cpp252
-rw-r--r--core/jni/android/graphics/YuvToJpegEncoder.h74
-rw-r--r--graphics/java/android/graphics/YuvImage.java171
5 files changed, 503 insertions, 1 deletions
diff --git a/core/jni/Android.mk b/core/jni/Android.mk
index 67a0bda..7fd58e8 100644
--- a/core/jni/Android.mk
+++ b/core/jni/Android.mk
@@ -99,6 +99,7 @@ LOCAL_SRC_FILES:= \
android/graphics/Shader.cpp \
android/graphics/Typeface.cpp \
android/graphics/Xfermode.cpp \
+ android/graphics/YuvToJpegEncoder.cpp \
android_media_AudioRecord.cpp \
android_media_AudioSystem.cpp \
android_media_AudioTrack.cpp \
@@ -148,6 +149,7 @@ LOCAL_C_INCLUDES += \
external/tremor/Tremor \
external/icu4c/i18n \
external/icu4c/common \
+ external/jpeg \
frameworks/opt/emoji
LOCAL_SHARED_LIBRARIES := \
@@ -175,7 +177,8 @@ LOCAL_SHARED_LIBRARIES := \
libicui18n \
libicudata \
libmedia \
- libwpa_client
+ libwpa_client \
+ libjpeg
ifeq ($(BOARD_HAVE_BLUETOOTH),true)
LOCAL_C_INCLUDES += \
diff --git a/core/jni/AndroidRuntime.cpp b/core/jni/AndroidRuntime.cpp
index 8364838..fa1ee0d 100644
--- a/core/jni/AndroidRuntime.cpp
+++ b/core/jni/AndroidRuntime.cpp
@@ -64,6 +64,7 @@ extern int register_android_graphics_PathEffect(JNIEnv* env);
extern int register_android_graphics_Region(JNIEnv* env);
extern int register_android_graphics_Shader(JNIEnv* env);
extern int register_android_graphics_Typeface(JNIEnv* env);
+extern int register_android_graphics_YuvImage(JNIEnv* env);
extern int register_com_google_android_gles_jni_EGLImpl(JNIEnv* env);
extern int register_com_google_android_gles_jni_GLImpl(JNIEnv* env);
@@ -1215,6 +1216,7 @@ static const RegJNIRec gRegJNI[] = {
REG_JNI(register_android_graphics_Shader),
REG_JNI(register_android_graphics_Typeface),
REG_JNI(register_android_graphics_Xfermode),
+ REG_JNI(register_android_graphics_YuvImage),
REG_JNI(register_com_android_internal_graphics_NativeUtils),
REG_JNI(register_android_database_CursorWindow),
diff --git a/core/jni/android/graphics/YuvToJpegEncoder.cpp b/core/jni/android/graphics/YuvToJpegEncoder.cpp
new file mode 100644
index 0000000..ef5c9ae
--- /dev/null
+++ b/core/jni/android/graphics/YuvToJpegEncoder.cpp
@@ -0,0 +1,252 @@
+#include "CreateJavaOutputStreamAdaptor.h"
+#include "SkJpegUtility.h"
+#include "YuvToJpegEncoder.h"
+#include "ui/PixelFormat.h"
+
+#include <jni.h>
+
+YuvToJpegEncoder* YuvToJpegEncoder::create(int format, int* strides) {
+ // Only PIXEL_FORMAT_YCbCr_420_SP and PIXEl_FOMAT_YCbCr_422_I are supported
+ // for now.
+ if (format == android::PIXEL_FORMAT_YCbCr_420_SP) {
+ return new Yuv420SpToJpegEncoder(strides);
+ } else if (format == android::PIXEL_FORMAT_YCbCr_422_I) {
+ return new Yuv422IToJpegEncoder(strides);
+ } else {
+ return NULL;
+ }
+}
+
+YuvToJpegEncoder::YuvToJpegEncoder(int* strides) : fStrides(strides) {
+}
+
+bool YuvToJpegEncoder::encode(SkWStream* stream, void* inYuv, int width,
+ int height, int* offsets, int jpegQuality) {
+ jpeg_compress_struct cinfo;
+ skjpeg_error_mgr sk_err;
+ skjpeg_destination_mgr sk_wstream(stream);
+
+ cinfo.err = jpeg_std_error(&sk_err);
+ sk_err.error_exit = skjpeg_error_exit;
+ if (setjmp(sk_err.fJmpBuf)) {
+ return false;
+ }
+ jpeg_create_compress(&cinfo);
+
+ cinfo.dest = &sk_wstream;
+
+ setJpegCompressStruct(&cinfo, width, height, jpegQuality);
+
+ jpeg_start_compress(&cinfo, TRUE);
+
+ compress(&cinfo, (uint8_t*) inYuv, offsets);
+
+ jpeg_finish_compress(&cinfo);
+
+ return true;
+}
+
+void YuvToJpegEncoder::setJpegCompressStruct(jpeg_compress_struct* cinfo,
+ int width, int height, int quality) {
+ jpeg_set_quality(cinfo, quality, TRUE);
+
+ cinfo->image_width = width;
+ cinfo->image_height = height;
+
+ cinfo->input_components = 3;
+ cinfo->in_color_space = JCS_YCbCr;
+ jpeg_set_defaults(cinfo);
+ jpeg_set_colorspace(cinfo, JCS_YCbCr);
+ cinfo->raw_data_in = TRUE;
+
+ cinfo->dct_method = JDCT_IFAST;
+
+ configSamplingFactors(cinfo);
+}
+
+///////////////////////////////////////////////////////////////////
+Yuv420SpToJpegEncoder::Yuv420SpToJpegEncoder(int* strides) :
+ YuvToJpegEncoder(strides) {
+ fNumPlanes = 2;
+}
+
+void Yuv420SpToJpegEncoder::compress(jpeg_compress_struct* cinfo,
+ uint8_t* yuv, int* offsets) {
+ SkDebugf("onFlyCompress");
+ JSAMPROW y[16];
+ JSAMPROW cb[8];
+ JSAMPROW cr[8];
+ JSAMPARRAY planes[3];
+ planes[0] = y;
+ planes[1] = cb;
+ planes[2] = cr;
+
+ int width = cinfo->image_width;
+ int height = cinfo->image_height;
+ uint8_t* yPlanar = yuv + offsets[0];
+ uint8_t* vuPlanar = yuv + offsets[1]; //width * height;
+ uint8_t* uRows = new uint8_t [8 * (width >> 1)];
+ uint8_t* vRows = new uint8_t [8 * (width >> 1)];
+
+
+ // process 16 lines of Y and 8 lines of U/V each time.
+ while (cinfo->next_scanline < cinfo->image_height) {
+ //deitnerleave u and v
+ deinterleave(vuPlanar, uRows, vRows, cinfo->next_scanline, width);
+
+ for (int i = 0; i < 16; i++) {
+ // y row
+ y[i] = yPlanar + (cinfo->next_scanline + i) * fStrides[0];
+
+ // construct u row and v row
+ if ((i & 1) == 0) {
+ // height and width are both halved because of downsampling
+ int offset = (i >> 1) * (width >> 1);
+ cb[i/2] = uRows + offset;
+ cr[i/2] = vRows + offset;
+ }
+ }
+ jpeg_write_raw_data(cinfo, planes, 16);
+ }
+ delete [] uRows;
+ delete [] vRows;
+
+}
+
+void Yuv420SpToJpegEncoder::deinterleave(uint8_t* vuPlanar, uint8_t* uRows,
+ uint8_t* vRows, int rowIndex, int width) {
+ for (int row = 0; row < 8; ++row) {
+ int offset = ((rowIndex >> 1) + row) * fStrides[1];
+ uint8_t* vu = vuPlanar + offset;
+ for (int i = 0; i < (width >> 1); ++i) {
+ int index = row * (width >> 1) + i;
+ uRows[index] = vu[1];
+ vRows[index] = vu[0];
+ vu += 2;
+ }
+ }
+}
+
+void Yuv420SpToJpegEncoder::configSamplingFactors(jpeg_compress_struct* cinfo) {
+ // cb and cr are horizontally downsampled and vertically downsampled as well.
+ cinfo->comp_info[0].h_samp_factor = 2;
+ cinfo->comp_info[0].v_samp_factor = 2;
+ cinfo->comp_info[1].h_samp_factor = 1;
+ cinfo->comp_info[1].v_samp_factor = 1;
+ cinfo->comp_info[2].h_samp_factor = 1;
+ cinfo->comp_info[2].v_samp_factor = 1;
+}
+
+///////////////////////////////////////////////////////////////////////////////
+Yuv422IToJpegEncoder::Yuv422IToJpegEncoder(int* strides) :
+ YuvToJpegEncoder(strides) {
+ fNumPlanes = 1;
+}
+
+void Yuv422IToJpegEncoder::compress(jpeg_compress_struct* cinfo,
+ uint8_t* yuv, int* offsets) {
+ SkDebugf("onFlyCompress_422");
+ JSAMPROW y[16];
+ JSAMPROW cb[16];
+ JSAMPROW cr[16];
+ JSAMPARRAY planes[3];
+ planes[0] = y;
+ planes[1] = cb;
+ planes[2] = cr;
+
+ int width = cinfo->image_width;
+ int height = cinfo->image_height;
+ uint8_t* yRows = new uint8_t [16 * width];
+ uint8_t* uRows = new uint8_t [16 * (width >> 1)];
+ uint8_t* vRows = new uint8_t [16 * (width >> 1)];
+
+ uint8_t* yuvOffset = yuv + offsets[0];
+
+ // process 16 lines of Y and 16 lines of U/V each time.
+ while (cinfo->next_scanline < cinfo->image_height) {
+ deinterleave(yuvOffset, yRows, uRows, vRows, cinfo->next_scanline, width, height);
+
+ for (int i = 0; i < 16; i++) {
+ // y row
+ y[i] = yRows + i * width;
+
+ // construct u row and v row
+ // width is halved because of downsampling
+ int offset = i * (width >> 1);
+ cb[i] = uRows + offset;
+ cr[i] = vRows + offset;
+ }
+
+ jpeg_write_raw_data(cinfo, planes, 16);
+ }
+ delete [] yRows;
+ delete [] uRows;
+ delete [] vRows;
+}
+
+
+void Yuv422IToJpegEncoder::deinterleave(uint8_t* yuv, uint8_t* yRows, uint8_t* uRows,
+ uint8_t* vRows, int rowIndex, int width, int height) {
+ for (int row = 0; row < 16; ++row) {
+ uint8_t* yuvSeg = yuv + (rowIndex + row) * fStrides[0];
+ for (int i = 0; i < (width >> 1); ++i) {
+ int indexY = row * width + (i << 1);
+ int indexU = row * (width >> 1) + i;
+ yRows[indexY] = yuvSeg[0];
+ yRows[indexY + 1] = yuvSeg[2];
+ uRows[indexU] = yuvSeg[1];
+ vRows[indexU] = yuvSeg[3];
+ yuvSeg += 4;
+ }
+ }
+}
+
+void Yuv422IToJpegEncoder::configSamplingFactors(jpeg_compress_struct* cinfo) {
+ // cb and cr are horizontally downsampled and vertically downsampled as well.
+ cinfo->comp_info[0].h_samp_factor = 2;
+ cinfo->comp_info[0].v_samp_factor = 2;
+ cinfo->comp_info[1].h_samp_factor = 1;
+ cinfo->comp_info[1].v_samp_factor = 2;
+ cinfo->comp_info[2].h_samp_factor = 1;
+ cinfo->comp_info[2].v_samp_factor = 2;
+}
+///////////////////////////////////////////////////////////////////////////////
+
+static jboolean YuvImage_compressToJpeg(JNIEnv* env, jobject, jbyteArray inYuv,
+ int format, int width, int height, jintArray offsets,
+ jintArray strides, int jpegQuality, jobject jstream,
+ jbyteArray jstorage) {
+ jbyte* yuv = env->GetByteArrayElements(inYuv, NULL);
+ SkWStream* strm = CreateJavaOutputStreamAdaptor(env, jstream, jstorage);
+
+ jint* imgOffsets = env->GetIntArrayElements(offsets, NULL);
+ jint* imgStrides = env->GetIntArrayElements(strides, NULL);
+ YuvToJpegEncoder* encoder = YuvToJpegEncoder::create(format, imgStrides);
+ if (encoder == NULL) {
+ return false;
+ }
+ encoder->encode(strm, yuv, width, height, imgOffsets, jpegQuality);
+
+ delete encoder;
+ env->ReleaseByteArrayElements(inYuv, yuv, 0);
+ env->ReleaseIntArrayElements(offsets, imgOffsets, 0);
+ env->ReleaseIntArrayElements(strides, imgStrides, 0);
+ return true;
+}
+///////////////////////////////////////////////////////////////////////////////
+
+#include <android_runtime/AndroidRuntime.h>
+
+static JNINativeMethod gYuvImageMethods[] = {
+ { "nativeCompressToJpeg", "([BIII[I[IILjava/io/OutputStream;[B)Z",
+ (void*)YuvImage_compressToJpeg }
+};
+
+#define kClassPathName "android/graphics/YuvImage"
+
+int register_android_graphics_YuvImage(JNIEnv* env);
+int register_android_graphics_YuvImage(JNIEnv* env)
+{
+ return android::AndroidRuntime::registerNativeMethods(env, kClassPathName,
+ gYuvImageMethods, SK_ARRAY_COUNT(gYuvImageMethods));
+}
diff --git a/core/jni/android/graphics/YuvToJpegEncoder.h b/core/jni/android/graphics/YuvToJpegEncoder.h
new file mode 100644
index 0000000..97106ce
--- /dev/null
+++ b/core/jni/android/graphics/YuvToJpegEncoder.h
@@ -0,0 +1,74 @@
+#ifndef YuvToJpegEncoder_DEFINED
+#define YuvToJpegEncoder_DEFINED
+
+#include "SkTypes.h"
+#include "SkStream.h"
+extern "C" {
+ #include "jpeglib.h"
+ #include "jerror.h"
+}
+
+class YuvToJpegEncoder {
+public:
+ /** Create an encoder based on the YUV format.
+ *
+ * @param pixelFormat The yuv pixel format as defined in ui/PixelFormat.h.
+ * @param strides The number of row bytes in each image plane.
+ * @return an encoder based on the pixelFormat.
+ */
+ static YuvToJpegEncoder* create(int pixelFormat, int* strides);
+
+ YuvToJpegEncoder(int* strides);
+
+ /** Encode YUV data to jpeg, which is output to a stream.
+ *
+ * @param stream The jpeg output stream.
+ * @param inYuv The input yuv data.
+ * @param width Width of the the Yuv data in terms of pixels.
+ * @param height Height of the Yuv data in terms of pixels.
+ * @param offsets The offsets in each image plane with respect to inYuv.
+ * @param jpegQuality Picture quality in [0, 100].
+ * @return true if successfully compressed the stream.
+ */
+ bool encode(SkWStream* stream, void* inYuv, int width,
+ int height, int* offsets, int jpegQuality);
+
+ virtual ~YuvToJpegEncoder() {}
+
+protected:
+ int fNumPlanes;
+ int* fStrides;
+ void setJpegCompressStruct(jpeg_compress_struct* cinfo, int width,
+ int height, int quality);
+ virtual void configSamplingFactors(jpeg_compress_struct* cinfo) = 0;
+ virtual void compress(jpeg_compress_struct* cinfo,
+ uint8_t* yuv, int* offsets) = 0;
+};
+
+class Yuv420SpToJpegEncoder : public YuvToJpegEncoder {
+public:
+ Yuv420SpToJpegEncoder(int* strides);
+ virtual ~Yuv420SpToJpegEncoder() {}
+
+private:
+ void configSamplingFactors(jpeg_compress_struct* cinfo);
+ void deinterleaveYuv(uint8_t* yuv, int width, int height,
+ uint8_t*& yPlanar, uint8_t*& uPlanar, uint8_t*& vPlanar);
+ void deinterleave(uint8_t* vuPlanar, uint8_t* uRows, uint8_t* vRows,
+ int rowIndex, int width);
+ void compress(jpeg_compress_struct* cinfo, uint8_t* yuv, int* offsets);
+};
+
+class Yuv422IToJpegEncoder : public YuvToJpegEncoder {
+public:
+ Yuv422IToJpegEncoder(int* strides);
+ virtual ~Yuv422IToJpegEncoder() {}
+
+private:
+ void configSamplingFactors(jpeg_compress_struct* cinfo);
+ void compress(jpeg_compress_struct* cinfo, uint8_t* yuv, int* offsets);
+ void deinterleave(uint8_t* yuv, uint8_t* yRows, uint8_t* uRows,
+ uint8_t* vRows, int rowIndex, int width, int height);
+};
+
+#endif
diff --git a/graphics/java/android/graphics/YuvImage.java b/graphics/java/android/graphics/YuvImage.java
new file mode 100644
index 0000000..4a3bd47
--- /dev/null
+++ b/graphics/java/android/graphics/YuvImage.java
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.graphics;
+
+import java.io.OutputStream;
+
+/**
+ * @hide pending API council approval
+ *
+ * YuvImage contains YUV data and provides a method that compresses a region of
+ * the YUV data to a Jpeg. The YUV data should be provided as a single byte
+ * array irrespective of the number of image planes in it. The stride of each
+ * image plane should be provided as well.
+ *
+ * To compress a rectangle region in the YUV data, users have to specify a
+ * region by width, height and offsets, where each image plane has a
+ * corresponding offset. All offsets are measured as a displacement in bytes
+ * from yuv[0], where yuv[0] is the beginning of the yuv data.
+ */
+public class YuvImage {
+
+ /**
+ * Number of bytes of temp storage we use for communicating between the
+ * native compressor and the java OutputStream.
+ */
+ private final static int WORKING_COMPRESS_STORAGE = 4096;
+
+ /**
+ * The YUV format as defined in {@link PixelFormat}.
+ */
+ private int mFormat;
+
+ /**
+ * The raw YUV data.
+ * In the case of more than one image plane, the image planes must be
+ * concatenated into a single byte array.
+ */
+ private byte[] mData;
+
+ /**
+ * The number of row bytes in each image plane.
+ */
+ private int[] mStrides;
+
+ /**
+ * Construct an YuvImage.
+ *
+ * @param yuv The YUV data. In the case of more than one image plane, all the planes must be
+ * concatenated into a single byte array.
+ * @param format The YUV data format as defined in {@link PixelFormat}.
+ * @param strides Row bytes of each image plane.
+ */
+ public YuvImage(byte[] yuv, int format, int[] strides) {
+ if ((yuv == null) || (strides == null)) {
+ throw new IllegalArgumentException(
+ "yuv or strides cannot be null");
+ }
+ mData = yuv;
+ mFormat = format;
+ mStrides = strides;
+ }
+
+ /**
+ * Compress a rectangle region in the YuvImage to a jpeg.
+ * Only PixelFormat.YCbCr_420_SP and PixelFormat.YCbCr_422_I
+ * are supported for now.
+ *
+ * @param width The width of the rectangle region.
+ * @param height The height of the rectangle region.
+ * @param offsets The offsets of the rectangle region in each image plane.
+ * The offsets are measured as a displacement in bytes from
+ * yuv[0], where yuv[0] is the beginning of the yuv data.
+ * @param quality Hint to the compressor, 0-100. 0 meaning compress for
+ * small size, 100 meaning compress for max quality.
+ * @param stream The outputstream to write the compressed data.
+ *
+ * @return true if successfully compressed to the specified stream.
+ *
+ */
+ public boolean compressToJpeg(int width, int height, int[] offsets, int quality,
+ OutputStream stream) {
+ if (!validate(mFormat, width, height, offsets)) {
+ return false;
+ }
+
+ if (quality < 0 || quality > 100) {
+ throw new IllegalArgumentException("quality must be 0..100");
+ }
+
+ if (stream == null) {
+ throw new NullPointerException();
+ }
+
+ return nativeCompressToJpeg(mData, mFormat, width, height, offsets,
+ mStrides, quality, stream, new byte[WORKING_COMPRESS_STORAGE]);
+ }
+
+ /**
+ * @return the YUV data.
+ */
+ public byte[] getYuvData() {
+ return mData;
+ }
+
+ /**
+ * @return the YUV format as defined in {@link PixelFormat}.
+ */
+ public int getYuvFormat() {
+ return mFormat;
+ }
+
+ /**
+ * @return the number of row bytes in each image plane.
+ */
+ public int[] getStrides() {
+ return mStrides;
+ }
+
+ protected boolean validate(int format, int width, int height, int[] offsets) {
+ if (format != PixelFormat.YCbCr_420_SP &&
+ format != PixelFormat.YCbCr_422_I) {
+ throw new IllegalArgumentException(
+ "only support PixelFormat.YCbCr_420_SP " +
+ "and PixelFormat.YCbCr_422_I for now");
+ }
+
+ if (offsets.length != mStrides.length) {
+ throw new IllegalArgumentException(
+ "the number of image planes are mismatched");
+ }
+
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException(
+ "width and height must large than 0");
+ }
+
+ int requiredSize;
+ if (format == PixelFormat.YCbCr_420_SP) {
+ requiredSize = height * mStrides[0] +(height >> 1) * mStrides[1];
+ } else {
+ requiredSize = height * mStrides[0];
+ }
+
+ if (requiredSize > mData.length) {
+ throw new IllegalArgumentException(
+ "width or/and height is larger than the yuv data");
+ }
+
+ return true;
+ }
+
+ //////////// native methods
+
+ private static native boolean nativeCompressToJpeg(byte[] oriYuv,
+ int format, int width, int height, int[] offsets, int[] strides,
+ int quality, OutputStream stream, byte[] tempStorage);
+}