summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--api/current.txt104
-rw-r--r--core/java/android/bluetooth/IBluetoothGatt.aidl2
-rw-r--r--core/java/android/hardware/camera2/CameraCharacteristics.java186
-rw-r--r--core/java/android/hardware/camera2/CameraDevice.java23
-rw-r--r--core/java/android/hardware/camera2/CameraMetadata.java14
-rw-r--r--core/java/android/hardware/camera2/CaptureRequest.java22
-rw-r--r--core/java/android/hardware/camera2/CaptureResult.java51
-rw-r--r--core/java/android/hardware/camera2/ICameraDeviceUser.aidl2
-rw-r--r--core/java/android/hardware/camera2/StreamConfigurationMap.java508
-rw-r--r--core/java/android/hardware/camera2/impl/CameraDevice.java2
-rw-r--r--core/java/android/hardware/camera2/impl/CameraMetadataNative.java108
-rw-r--r--core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java8
-rw-r--r--core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java2
-rw-r--r--core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java2
-rw-r--r--core/java/android/hardware/camera2/params/ReprocessFormatsMap.java (renamed from core/java/android/hardware/camera2/ReprocessFormatsMap.java)24
-rw-r--r--core/java/android/hardware/camera2/params/StreamConfiguration.java (renamed from core/java/android/hardware/camera2/StreamConfiguration.java)10
-rw-r--r--core/java/android/hardware/camera2/params/StreamConfigurationDuration.java (renamed from core/java/android/hardware/camera2/StreamConfigurationDuration.java)9
-rw-r--r--core/java/android/hardware/camera2/params/StreamConfigurationMap.java949
-rw-r--r--core/java/android/hardware/camera2/utils/LongParcelable.aidl (renamed from core/java/android/hardware/camera2/LongParcelable.aidl)4
-rw-r--r--core/java/android/hardware/camera2/utils/LongParcelable.java (renamed from core/java/android/hardware/camera2/LongParcelable.java)2
-rw-r--r--core/java/android/view/RenderNodeAnimator.java24
-rw-r--r--core/java/android/view/View.java3
-rw-r--r--core/java/android/view/accessibility/AccessibilityNodeInfo.java639
-rw-r--r--core/jni/Android.mk6
-rw-r--r--core/jni/AndroidRuntime.cpp2
-rw-r--r--core/jni/android/graphics/Matrix.cpp8
-rw-r--r--core/jni/android/graphics/pdf/PdfRenderer.cpp279
-rw-r--r--core/jni/android_view_GLES20Canvas.cpp4
-rw-r--r--core/jni/android_view_RenderNode.cpp32
-rw-r--r--core/jni/android_view_RenderNodeAnimator.cpp38
-rw-r--r--core/jni/android_view_ThreadedRenderer.cpp6
-rw-r--r--core/res/res/values-watch/config.xml3
-rw-r--r--graphics/java/android/graphics/ImageFormat.java1
-rw-r--r--graphics/java/android/graphics/LayerRasterizer.java3
-rw-r--r--graphics/java/android/graphics/Matrix.java11
-rw-r--r--graphics/java/android/graphics/Paint.java10
-rw-r--r--graphics/java/android/graphics/Rasterizer.java1
-rw-r--r--graphics/java/android/graphics/pdf/PdfDocument.java4
-rw-r--r--graphics/java/android/graphics/pdf/PdfRenderer.java391
-rw-r--r--libs/hwui/Animator.cpp145
-rw-r--r--libs/hwui/Animator.h102
-rw-r--r--libs/hwui/RenderNode.cpp34
-rw-r--r--libs/hwui/RenderNode.h28
-rw-r--r--libs/hwui/TreeInfo.h4
-rw-r--r--media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java375
-rw-r--r--packages/SystemUI/res/drawable-hdpi/ic_qs_location_off.pngbin1729 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable-hdpi/ic_qs_location_on.pngbin1354 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/ic_qs_location_off.pngbin1158 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable-mdpi/ic_qs_location_on.pngbin937 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable-xhdpi/ic_qs_location_off.pngbin2443 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable-xhdpi/ic_qs_location_on.pngbin1768 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_off.pngbin3202 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_on.pngbin2441 -> 0 bytes
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_01.xml28
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_02.xml28
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_03.xml28
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_04.xml28
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_05.xml28
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_06.xml33
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_07.xml30
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_08.xml30
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_09.xml30
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_10.xml30
-rw-r--r--packages/SystemUI/res/drawable/ic_location_24_11.xml30
-rw-r--r--packages/SystemUI/res/drawable/ic_location_off_anim.xml31
-rw-r--r--packages/SystemUI/res/drawable/ic_location_on_anim.xml31
-rw-r--r--packages/SystemUI/src/com/android/systemui/qs/tiles/LocationTile.java22
-rw-r--r--policy/src/com/android/internal/policy/impl/GlobalActions.java19
-rw-r--r--services/core/java/com/android/server/notification/ValidateNotificationPeople.java112
-rw-r--r--services/core/java/com/android/server/wm/ViewServer.java2
-rw-r--r--tests/HwAccelerationTest/src/com/android/test/hwui/CirclePropActivity.java11
-rw-r--r--tests/RenderThreadTest/src/com/example/renderthread/MainActivity.java9
-rw-r--r--tools/aapt/Command.cpp45
73 files changed, 3648 insertions, 1142 deletions
diff --git a/api/current.txt b/api/current.txt
index 7efafb8..b7fc562 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -10092,7 +10092,7 @@ package android.graphics {
enum_constant public static final android.graphics.Interpolator.Result NORMAL;
}
- public class LayerRasterizer extends android.graphics.Rasterizer {
+ public deprecated class LayerRasterizer extends android.graphics.Rasterizer {
ctor public LayerRasterizer();
method public void addLayer(android.graphics.Paint, float, float);
method public void addLayer(android.graphics.Paint);
@@ -10120,6 +10120,7 @@ package android.graphics {
ctor public Matrix(android.graphics.Matrix);
method public void getValues(float[]);
method public boolean invert(android.graphics.Matrix);
+ method public boolean isAffine();
method public boolean isIdentity();
method public void mapPoints(float[], int, float[], int, int);
method public void mapPoints(float[], float[]);
@@ -10251,7 +10252,7 @@ package android.graphics {
method public int getHinting();
method public android.graphics.MaskFilter getMaskFilter();
method public android.graphics.PathEffect getPathEffect();
- method public android.graphics.Rasterizer getRasterizer();
+ method public deprecated android.graphics.Rasterizer getRasterizer();
method public android.graphics.Shader getShader();
method public android.graphics.Paint.Cap getStrokeCap();
method public android.graphics.Paint.Join getStrokeJoin();
@@ -10302,7 +10303,7 @@ package android.graphics {
method public void setLinearText(boolean);
method public android.graphics.MaskFilter setMaskFilter(android.graphics.MaskFilter);
method public android.graphics.PathEffect setPathEffect(android.graphics.PathEffect);
- method public android.graphics.Rasterizer setRasterizer(android.graphics.Rasterizer);
+ method public deprecated android.graphics.Rasterizer setRasterizer(android.graphics.Rasterizer);
method public android.graphics.Shader setShader(android.graphics.Shader);
method public void setShadowLayer(float, float, float, int);
method public void setStrikeThruText(boolean);
@@ -10611,7 +10612,7 @@ package android.graphics {
ctor public RadialGradient(float, float, float, int, int, android.graphics.Shader.TileMode);
}
- public class Rasterizer {
+ public deprecated class Rasterizer {
ctor public Rasterizer();
}
@@ -11359,6 +11360,24 @@ package android.graphics.pdf {
method public android.graphics.pdf.PdfDocument.PageInfo.Builder setContentRect(android.graphics.Rect);
}
+ public final class PdfRenderer implements java.lang.AutoCloseable {
+ ctor public PdfRenderer(android.os.ParcelFileDescriptor) throws java.io.IOException;
+ method public void close();
+ method public void closePage(android.graphics.pdf.PdfRenderer.Page);
+ method public int getPageCount();
+ method public android.graphics.pdf.PdfRenderer.Page openPage(int);
+ method public boolean shouldScaleForPrinting();
+ }
+
+ public final class PdfRenderer.Page {
+ method public int getHeight();
+ method public int getIndex();
+ method public int getWidth();
+ method public void render(android.graphics.Bitmap, android.graphics.Rect, android.graphics.Matrix, int);
+ field public static final int RENDER_MODE_FOR_DISPLAY = 1; // 0x1
+ field public static final int RENDER_MODE_FOR_PRINT = 2; // 0x2
+ }
+
}
package android.hardware {
@@ -11862,16 +11881,8 @@ package android.hardware.camera2 {
field public static final android.hardware.camera2.CameraMetadata.Key REQUEST_MAX_NUM_OUTPUT_STREAMS;
field public static final android.hardware.camera2.CameraMetadata.Key REQUEST_PARTIAL_RESULT_COUNT;
field public static final android.hardware.camera2.CameraMetadata.Key REQUEST_PIPELINE_MAX_DEPTH;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_FORMATS;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_JPEG_MIN_DURATIONS;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_JPEG_SIZES;
field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_MAX_DIGITAL_ZOOM;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_PROCESSED_SIZES;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_STALL_DURATIONS;
- field public static final android.hardware.camera2.CameraMetadata.Key SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+ field public static final android.hardware.camera2.CameraMetadata.Key SCALER_STREAM_CONFIGURATION_MAP;
field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_AVAILABLE_TEST_PATTERN_MODES;
field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_BASE_GAIN_FACTOR;
field public static final android.hardware.camera2.CameraMetadata.Key SENSOR_BLACK_LEVEL_PATTERN;
@@ -12081,8 +12092,6 @@ package android.hardware.camera2 {
field public static final int REQUEST_AVAILABLE_CAPABILITIES_DNG = 5; // 0x5
field public static final int REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR = 2; // 0x2
field public static final int REQUEST_AVAILABLE_CAPABILITIES_ZSL = 4; // 0x4
- field public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT = 1; // 0x1
- field public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0; // 0x0
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3; // 0x3
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2; // 0x2
field public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1; // 0x1
@@ -12363,6 +12372,22 @@ package android.hardware.camera2 {
method public final int getWidth();
}
+ public final class TonemapCurve {
+ method public void copyColorCurve(int, float[], int);
+ method public android.graphics.PointF getPoint(int, int);
+ method public int getPointCount(int);
+ field public static final int CHANNEL_BLUE = 2; // 0x2
+ field public static final int CHANNEL_GREEN = 1; // 0x1
+ field public static final int CHANNEL_RED = 0; // 0x0
+ field public static final float LEVEL_BLACK = 0.0f;
+ field public static final float LEVEL_WHITE = 1.0f;
+ field public static final int POINT_SIZE = 2; // 0x2
+ }
+
+}
+
+package android.hardware.camera2.params {
+
public final class StreamConfigurationMap {
method public final int[] getOutputFormats();
method public long getOutputMinFrameDuration(int, android.util.Size);
@@ -12376,18 +12401,6 @@ package android.hardware.camera2 {
method public boolean isOutputSupportedFor(android.view.Surface);
}
- public final class TonemapCurve {
- method public void copyColorCurve(int, float[], int);
- method public android.graphics.PointF getPoint(int, int);
- method public int getPointCount(int);
- field public static final int CHANNEL_BLUE = 2; // 0x2
- field public static final int CHANNEL_GREEN = 1; // 0x1
- field public static final int CHANNEL_RED = 0; // 0x0
- field public static final float LEVEL_BLACK = 0.0f;
- field public static final float LEVEL_WHITE = 1.0f;
- field public static final int POINT_SIZE = 2; // 0x2
- }
-
}
package android.hardware.display {
@@ -32970,7 +32983,8 @@ package android.view.accessibility {
}
public class AccessibilityNodeInfo implements android.os.Parcelable {
- method public void addAction(int);
+ method public void addAction(android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction);
+ method public deprecated void addAction(int);
method public void addChild(android.view.View);
method public void addChild(android.view.View, int);
method public boolean canOpenPopup();
@@ -32979,7 +32993,8 @@ package android.view.accessibility {
method public java.util.List<android.view.accessibility.AccessibilityNodeInfo> findAccessibilityNodeInfosByViewId(java.lang.String);
method public android.view.accessibility.AccessibilityNodeInfo findFocus(int);
method public android.view.accessibility.AccessibilityNodeInfo focusSearch(int);
- method public int getActions();
+ method public java.util.List<android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction> getActionList();
+ method public deprecated int getActions();
method public void getBoundsInParent(android.graphics.Rect);
method public void getBoundsInScreen(android.graphics.Rect);
method public android.view.accessibility.AccessibilityNodeInfo getChild(int);
@@ -33027,7 +33042,8 @@ package android.view.accessibility {
method public boolean performAction(int, android.os.Bundle);
method public void recycle();
method public boolean refresh();
- method public void removeAction(int);
+ method public deprecated void removeAction(int);
+ method public boolean removeAction(android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction);
method public boolean removeChild(android.view.View);
method public boolean removeChild(android.view.View, int);
method public void setAccessibilityFocused(boolean);
@@ -33108,6 +33124,34 @@ package android.view.accessibility {
field public static final int MOVEMENT_GRANULARITY_WORD = 2; // 0x2
}
+ public static final class AccessibilityNodeInfo.AccessibilityAction {
+ ctor public AccessibilityNodeInfo.AccessibilityAction(int, java.lang.CharSequence);
+ method public int getId();
+ method public java.lang.CharSequence getLabel();
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_ACCESSIBILITY_FOCUS;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_CLEAR_ACCESSIBILITY_FOCUS;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_CLEAR_FOCUS;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_CLEAR_SELECTION;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_CLICK;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_COLLAPSE;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_COPY;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_CUT;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_DISMISS;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_EXPAND;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_FOCUS;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_LONG_CLICK;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_NEXT_AT_MOVEMENT_GRANULARITY;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_NEXT_HTML_ELEMENT;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_PASTE;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_PREVIOUS_HTML_ELEMENT;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_SCROLL_BACKWARD;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_SCROLL_FORWARD;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_SELECT;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_SET_SELECTION;
+ field public static final android.view.accessibility.AccessibilityNodeInfo.AccessibilityAction ACTION_SET_TEXT;
+ }
+
public static final class AccessibilityNodeInfo.CollectionInfo {
method public int getColumnCount();
method public int getRowCount();
diff --git a/core/java/android/bluetooth/IBluetoothGatt.aidl b/core/java/android/bluetooth/IBluetoothGatt.aidl
index 49b156d..3dd7094 100644
--- a/core/java/android/bluetooth/IBluetoothGatt.aidl
+++ b/core/java/android/bluetooth/IBluetoothGatt.aidl
@@ -31,6 +31,8 @@ interface IBluetoothGatt {
void startScan(in int appIf, in boolean isServer);
void startScanWithUuids(in int appIf, in boolean isServer, in ParcelUuid[] ids);
+ void startScanWithUuidsScanParam(in int appIf, in boolean isServer,
+ in ParcelUuid[] ids, int scanWindow, int scanInterval);
void stopScan(in int appIf, in boolean isServer);
void registerClient(in ParcelUuid appId, in IBluetoothGattCallback callback);
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index 5f2af8c..b1c1005 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -307,16 +307,14 @@ public final class CameraCharacteristics extends CameraMetadata {
* <li>The sizes will be sorted by increasing pixel area (width x height).
* If several resolutions have the same area, they will be sorted by increasing width.</li>
* <li>The aspect ratio of the largest thumbnail size will be same as the
- * aspect ratio of largest JPEG output size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.
+ * aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
* The largest size is defined as the size that has the largest pixel area
* in a given size list.</li>
- * <li>Each output JPEG size in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations} will have at least
+ * <li>Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
* one corresponding size that has the same aspect ratio in availableThumbnailSizes,
* and vice versa.</li>
* <li>All non (0, 0) sizes will have non-zero widths and heights.</li>
* </ul>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
*/
public static final Key<android.hardware.camera2.Size[]> JPEG_AVAILABLE_THUMBNAIL_SIZES =
new Key<android.hardware.camera2.Size[]>("android.jpeg.availableThumbnailSizes", android.hardware.camera2.Size[].class);
@@ -445,8 +443,10 @@ public final class CameraCharacteristics extends CameraMetadata {
* working at that point; DO NOT USE without careful
* consideration of future support.</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
* @hide
*/
+ @Deprecated
public static final Key<Byte> QUIRKS_USE_PARTIAL_RESULT =
new Key<Byte>("android.quirks.usePartialResult", byte.class);
@@ -461,8 +461,8 @@ public final class CameraCharacteristics extends CameraMetadata {
* <p>This lists the upper bound of the number of output streams supported by
* the camera device. Using more streams simultaneously may require more hardware and
* CPU resources that will consume more power. The image format for a output stream can
- * be any supported format provided by {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.
- * The formats defined in {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations} can be catergorized
+ * be any supported format provided by android.scaler.availableStreamConfigurations.
+ * The formats defined in android.scaler.availableStreamConfigurations can be catergorized
* into the 3 stream types as below:</p>
* <ul>
* <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
@@ -471,8 +471,6 @@ public final class CameraCharacteristics extends CameraMetadata {
* <li>Processed (but not-stalling): any non-RAW format without a stall duration.
* Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li>
* </ul>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
*/
public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
new Key<int[]>("android.request.maxNumOutputStreams", int[].class);
@@ -483,14 +481,12 @@ public final class CameraCharacteristics extends CameraMetadata {
* <p>When set to 0, it means no input stream is supported.</p>
* <p>The image format for a input stream can be any supported
* format provided by
- * {@link CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP android.scaler.availableInputOutputFormatsMap}. When using an
+ * android.scaler.availableInputOutputFormatsMap. When using an
* input stream, there must be at least one output stream
* configured to to receive the reprocessed images.</p>
* <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
* stream image format will be RAW_OPAQUE, the associated output stream image format
* should be JPEG.</p>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP
*/
public static final Key<Integer> REQUEST_MAX_NUM_INPUT_STREAMS =
new Key<Integer>("android.request.maxNumInputStreams", int.class);
@@ -629,22 +625,26 @@ public final class CameraCharacteristics extends CameraMetadata {
* camera device for output streams.</p>
* <p>All camera devices will support JPEG and YUV_420_888 formats.</p>
* <p>When set to YUV_420_888, application can access the YUV420 data directly.</p>
+ * @deprecated
+ * @hide
*/
+ @Deprecated
public static final Key<int[]> SCALER_AVAILABLE_FORMATS =
new Key<int[]>("android.scaler.availableFormats", int[].class);
/**
* <p>The minimum frame duration that is supported
- * for each resolution in {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES android.scaler.availableJpegSizes}.</p>
+ * for each resolution in android.scaler.availableJpegSizes.</p>
* <p>This corresponds to the minimum steady-state frame duration when only
* that JPEG stream is active and captured in a burst, with all
* processing (typically in android.*.mode) set to FAST.</p>
* <p>When multiple streams are configured, the minimum
* frame duration will be &gt;= max(individual stream min
* durations)</p>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES
+ * @deprecated
+ * @hide
*/
+ @Deprecated
public static final Key<long[]> SCALER_AVAILABLE_JPEG_MIN_DURATIONS =
new Key<long[]>("android.scaler.availableJpegMinDurations", long[].class);
@@ -654,7 +654,10 @@ public final class CameraCharacteristics extends CameraMetadata {
* sensor maximum resolution (defined by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}).</p>
*
* @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @deprecated
+ * @hide
*/
+ @Deprecated
public static final Key<android.hardware.camera2.Size[]> SCALER_AVAILABLE_JPEG_SIZES =
new Key<android.hardware.camera2.Size[]>("android.scaler.availableJpegSizes", android.hardware.camera2.Size[].class);
@@ -669,16 +672,17 @@ public final class CameraCharacteristics extends CameraMetadata {
/**
* <p>For each available processed output size (defined in
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES android.scaler.availableProcessedSizes}), this property lists the
+ * android.scaler.availableProcessedSizes), this property lists the
* minimum supportable frame duration for that size.</p>
* <p>This should correspond to the frame duration when only that processed
* stream is active, with all processing (typically in android.*.mode)
* set to FAST.</p>
* <p>When multiple streams are configured, the minimum frame duration will
* be &gt;= max(individual stream min durations).</p>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES
+ * @deprecated
+ * @hide
*/
+ @Deprecated
public static final Key<long[]> SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS =
new Key<long[]>("android.scaler.availableProcessedMinDurations", long[].class);
@@ -696,7 +700,10 @@ public final class CameraCharacteristics extends CameraMetadata {
* can provide.</p>
* <p>Please reference the documentation for the image data destination to
* check if it limits the maximum size for image data.</p>
+ * @deprecated
+ * @hide
*/
+ @Deprecated
public static final Key<android.hardware.camera2.Size[]> SCALER_AVAILABLE_PROCESSED_SIZES =
new Key<android.hardware.camera2.Size[]>("android.scaler.availableProcessedSizes", android.hardware.camera2.Size[].class);
@@ -746,13 +753,14 @@ public final class CameraCharacteristics extends CameraMetadata {
* </table>
* <p>For ZSL-capable camera devices, using the RAW_OPAQUE format
* as either input or output will never hurt maximum frame rate (i.e.
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} will not have RAW_OPAQUE).</p>
+ * StreamConfigurationMap#getOutputStallDuration(int,Size)
+ * for a <code>format =</code> RAW_OPAQUE is always 0).</p>
* <p>Attempting to configure an input stream with output streams not
* listed as available in this map is not valid.</p>
- * <p>TODO: Add java type mapping for this property.</p>
+ * <p>TODO: typedef to ReprocessFormatMap</p>
*
* @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS
- * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+ * @hide
*/
public static final Key<int[]> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
new Key<int[]>("android.scaler.availableInputOutputFormatsMap", int[].class);
@@ -775,7 +783,7 @@ public final class CameraCharacteristics extends CameraMetadata {
* check if it limits the maximum size for image data.</p>
* <p>Not all output formats may be supported in a configuration with
* an input stream of a particular format. For more details, see
- * {@link CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP android.scaler.availableInputOutputFormatsMap}.</p>
+ * android.scaler.availableInputOutputFormatsMap.</p>
* <p>The following table describes the minimum required output stream
* configurations based on the hardware level
* ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
@@ -844,13 +852,11 @@ public final class CameraCharacteristics extends CameraMetadata {
*
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
- * @see CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP
* @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
- * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
- * @see #SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT
+ * @hide
*/
- public static final Key<int[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
- new Key<int[]>("android.scaler.availableStreamConfigurations", int[].class);
+ public static final Key<android.hardware.camera2.params.StreamConfiguration[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.scaler.availableStreamConfigurations", android.hardware.camera2.params.StreamConfiguration[].class);
/**
* <p>This lists the minimum frame duration for each
@@ -863,14 +869,16 @@ public final class CameraCharacteristics extends CameraMetadata {
* <p>The minimum frame duration of a stream (of a particular format, size)
* is the same regardless of whether the stream is input or output.</p>
* <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations} for more details about
+ * android.scaler.availableStallDurations for more details about
* calculating the max frame rate.</p>
+ * <p>(Keep in sync with
+ * StreamConfigurationMap#getOutputMinFrameDuration)</p>
*
- * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
* @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @hide
*/
- public static final Key<long[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
- new Key<long[]>("android.scaler.availableMinFrameDurations", long[].class);
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableMinFrameDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
/**
* <p>This lists the maximum stall duration for each
@@ -929,12 +937,105 @@ public final class CameraCharacteristics extends CameraMetadata {
* for more details.</p>
* <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about
* calculating the max frame rate (absent stalls).</p>
+ * <p>(Keep up to date with
+ * StreamConfigurationMap#getOutputStallDuration(int, Size) )</p>
*
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
* @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @hide
*/
- public static final Key<long[]> SCALER_AVAILABLE_STALL_DURATIONS =
- new Key<long[]>("android.scaler.availableStallDurations", long[].class);
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_STALL_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableStallDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
+ /**
+ * <p>The available stream configurations that this
+ * camera device supports; also includes the minimum frame durations
+ * and the stall durations for each format/size combination.</p>
+ * <p>All camera devices will support sensor maximum resolution (defined by
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}) for the JPEG format.</p>
+ * <p>For a given use case, the actual maximum supported resolution
+ * may be lower than what is listed here, depending on the destination
+ * Surface for the image data. For example, for recording video,
+ * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ * can provide.</p>
+ * <p>Please reference the documentation for the image data destination to
+ * check if it limits the maximum size for image data.</p>
+ * <p>Not all output formats may be supported in a configuration with
+ * an input stream of a particular format. For more details, see
+ * android.scaler.availableInputOutputFormatsMap.</p>
+ * <p>The following table describes the minimum required output stream
+ * configurations based on the hardware level
+ * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">Format</th>
+ * <th align="center">Size</th>
+ * <th align="center">Hardware Level</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1920x1080 (1080p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 1080p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1280x720 (720)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 720p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">640x480 (480p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 480p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">320x240 (240p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 240p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG</td>
+ * <td align="center">FULL</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
+ * <td align="center">LIMITED</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">IMPLEMENTATION_DEFINED</td>
+ * <td align="center">same as YUV_420_888</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional
+ * mandatory stream configurations on a per-capability basis.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfigurationMap> SCALER_STREAM_CONFIGURATION_MAP =
+ new Key<android.hardware.camera2.params.StreamConfigurationMap>("android.scaler.streamConfigurationMap", android.hardware.camera2.params.StreamConfigurationMap.class);
/**
* <p>Area of raw data which corresponds to only
@@ -982,13 +1083,9 @@ public final class CameraCharacteristics extends CameraMetadata {
* being clipped to the maximum. See that control
* for a full definition of frame durations.</p>
* <p>Refer to
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS android.scaler.availableProcessedMinDurations},
- * {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS android.scaler.availableJpegMinDurations}, and
- * android.scaler.availableRawMinDurations for the minimum
- * frame duration values.</p>
+ * StreamConfigurationMap#getOutputMinFrameDuration(int,Size)
+ * for the minimum frame duration values.</p>
*
- * @see CameraCharacteristics#SCALER_AVAILABLE_JPEG_MIN_DURATIONS
- * @see CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS
* @see CaptureRequest#SENSOR_FRAME_DURATION
*/
public static final Key<Long> SENSOR_INFO_MAX_FRAME_DURATION =
@@ -1007,9 +1104,7 @@ public final class CameraCharacteristics extends CameraMetadata {
* including black calibration pixels.</p>
* <p>Maximum output resolution for raw format must
* match this in
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS android.scaler.availableStreamConfigurations}.</p>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ * android.scaler.availableStreamConfigurations.</p>
*/
public static final Key<android.hardware.camera2.Size> SENSOR_INFO_PIXEL_ARRAY_SIZE =
new Key<android.hardware.camera2.Size>("android.sensor.info.pixelArraySize", android.hardware.camera2.Size.class);
@@ -1420,4 +1515,13 @@ public final class CameraCharacteristics extends CameraMetadata {
/*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
* End generated code
*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+
+
+
+
+
+
}
diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java
index 9d0e0e1..ca03dae 100644
--- a/core/java/android/hardware/camera2/CameraDevice.java
+++ b/core/java/android/hardware/camera2/CameraDevice.java
@@ -16,6 +16,8 @@
package android.hardware.camera2;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.graphics.ImageFormat;
import android.os.Handler;
import android.view.Surface;
@@ -147,7 +149,7 @@ public interface CameraDevice extends AutoCloseable {
* the size of the Surface with
* {@link android.view.SurfaceHolder#setFixedSize} to be one of the
* supported
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}
+ * {@link StreamConfigurationMap#getOutputSizes(Class) processed sizes}
* before calling {@link android.view.SurfaceHolder#getSurface}.</li>
*
* <li>For accessing through an OpenGL texture via a
@@ -155,14 +157,14 @@ public interface CameraDevice extends AutoCloseable {
* the SurfaceTexture with
* {@link android.graphics.SurfaceTexture#setDefaultBufferSize} to be one
* of the supported
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}
+ * {@link StreamConfigurationMap#getOutputSizes(Class) processed sizes}
* before creating a Surface from the SurfaceTexture with
* {@link Surface#Surface}.</li>
*
* <li>For recording with {@link android.media.MediaCodec}: Call
* {@link android.media.MediaCodec#createInputSurface} after configuring
* the media codec to use one of the
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}
+ * {@link StreamConfigurationMap#getOutputSizes(Class) processed sizes}
* </li>
*
* <li>For recording with {@link android.media.MediaRecorder}: TODO</li>
@@ -171,18 +173,15 @@ public interface CameraDevice extends AutoCloseable {
* Create a RenderScript
* {@link android.renderscript.Allocation Allocation} with a supported YUV
* type, the IO_INPUT flag, and one of the supported
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed sizes}. Then
+ * {@link StreamConfigurationMap#getOutputSizes(int) processed sizes}. Then
* obtain the Surface with
* {@link android.renderscript.Allocation#getSurface}.</li>
*
- * <li>For access to uncompressed or JPEG data in the application: Create a
- * {@link android.media.ImageReader} object with the desired
- * {@link CameraCharacteristics#SCALER_AVAILABLE_FORMATS image format}, and a
- * size from the matching
- * {@link CameraCharacteristics#SCALER_AVAILABLE_PROCESSED_SIZES processed},
- * {@link CameraCharacteristics#SCALER_AVAILABLE_JPEG_SIZES jpeg}. Then obtain
- * a Surface from it.</li>
- *
+ * <li>For access to uncompressed or {@link ImageFormat#JPEG JPEG} data in the application:
+ * Create a {@link android.media.ImageReader} object with the desired
+ * {@link StreamConfigurationMap#getOutputFormats() image format}, and a size from the matching
+ * {@link StreamConfigurationMap#getOutputSizes(int) processed size} and {@code format}.
+ * Then obtain a {@link Surface} from it.</li>
* </ul>
*
* </p>
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index 6659278..a11390d 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -446,20 +446,6 @@ public abstract class CameraMetadata {
public static final int REQUEST_AVAILABLE_CAPABILITIES_DNG = 5;
//
- // Enumeration values for CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
- //
-
- /**
- * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
- */
- public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT = 0;
-
- /**
- * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
- */
- public static final int SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT = 1;
-
- //
// Enumeration values for CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
//
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index 0ca9161..8ae21f3 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -1185,7 +1185,8 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable {
* cannot process more than 1 capture at a time.</li>
* </ul>
* <p>The necessary information for the application, given the model above,
- * is provided via the {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} field.
+ * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field
+ * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size).
* These are used to determine the maximum frame rate / minimum frame
* duration that is possible for a given stream configuration.</p>
* <p>Specifically, the application can use the following rules to
@@ -1195,7 +1196,8 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable {
* <li>Let the set of currently configured input/output streams
* be called <code>S</code>.</li>
* <li>Find the minimum frame durations for each stream in <code>S</code>, by
- * looking it up in {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} (with
+ * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using
+ * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with
* its respective size/format). Let this set of frame durations be called
* <code>F</code>.</li>
* <li>For any given request <code>R</code>, the minimum frame duration allowed
@@ -1203,7 +1205,8 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable {
* used in <code>R</code> be called <code>S_r</code>.</li>
* </ol>
* <p>If none of the streams in <code>S_r</code> have a stall time (listed in
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}), then the frame duration in
+ * StreamConfigurationMap#getOutputStallDuration(int,Size) using its
+ * respective size/format), then the frame duration in
* <code>F</code> determines the steady state frame rate that the application will
* get if it uses <code>R</code> as a repeating request. Let this special kind
* of request be called <code>Rsimple</code>.</p>
@@ -1214,10 +1217,9 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable {
* if all buffers from the previous <code>Rstall</code> have already been
* delivered.</p>
* <p>For more details about stalling, see
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}.</p>
+ * StreamConfigurationMap#getOutputStallDuration(int,Size).</p>
*
- * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
- * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
*/
public static final Key<Long> SENSOR_FRAME_DURATION =
new Key<Long>("android.sensor.frameDuration", long.class);
@@ -1516,4 +1518,12 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable {
/*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
* End generated code
*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+
+
+
+
+
}
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index 42a3de8..0160622 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -216,18 +216,6 @@ public final class CaptureResult extends CameraMetadata {
new Key<float[]>("android.colorCorrection.gains", float[].class);
/**
- * <p>The ID sent with the latest
- * CAMERA2_TRIGGER_PRECAPTURE_METERING call</p>
- * <p>Must be 0 if no
- * CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet
- * by HAL. Always updated even if AE algorithm ignores the
- * trigger</p>
- * @hide
- */
- public static final Key<Integer> CONTROL_AE_PRECAPTURE_ID =
- new Key<Integer>("android.control.aePrecaptureId", int.class);
-
- /**
* <p>The desired setting for the camera device's auto-exposure
* algorithm's antibanding compensation.</p>
* <p>Some kinds of lighting fixtures, such as some fluorescent
@@ -1068,17 +1056,6 @@ public final class CaptureResult extends CameraMetadata {
new Key<Integer>("android.control.afState", int.class);
/**
- * <p>The ID sent with the latest
- * CAMERA2_TRIGGER_AUTOFOCUS call</p>
- * <p>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger
- * received yet by HAL. Always updated even if AF algorithm
- * ignores the trigger</p>
- * @hide
- */
- public static final Key<Integer> CONTROL_AF_TRIGGER_ID =
- new Key<Integer>("android.control.afTriggerId", int.class);
-
- /**
* <p>Whether AWB is currently locked to its
* latest calculated values.</p>
* <p>Note that AWB lock is only meaningful for AUTO
@@ -1713,8 +1690,10 @@ public final class CaptureResult extends CameraMetadata {
* capture must arrive before the FINAL buffer for that capture. This entry may
* only be used by the camera device if quirks.usePartialResult is set to 1.</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
* @hide
*/
+ @Deprecated
public static final Key<Boolean> QUIRKS_PARTIAL_RESULT =
new Key<Boolean>("android.quirks.partialResult", boolean.class);
@@ -1834,7 +1813,8 @@ public final class CaptureResult extends CameraMetadata {
* cannot process more than 1 capture at a time.</li>
* </ul>
* <p>The necessary information for the application, given the model above,
- * is provided via the {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} field.
+ * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field
+ * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size).
* These are used to determine the maximum frame rate / minimum frame
* duration that is possible for a given stream configuration.</p>
* <p>Specifically, the application can use the following rules to
@@ -1844,7 +1824,8 @@ public final class CaptureResult extends CameraMetadata {
* <li>Let the set of currently configured input/output streams
* be called <code>S</code>.</li>
* <li>Find the minimum frame durations for each stream in <code>S</code>, by
- * looking it up in {@link CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS android.scaler.availableMinFrameDurations} (with
+ * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using
+ * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with
* its respective size/format). Let this set of frame durations be called
* <code>F</code>.</li>
* <li>For any given request <code>R</code>, the minimum frame duration allowed
@@ -1852,7 +1833,8 @@ public final class CaptureResult extends CameraMetadata {
* used in <code>R</code> be called <code>S_r</code>.</li>
* </ol>
* <p>If none of the streams in <code>S_r</code> have a stall time (listed in
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}), then the frame duration in
+ * StreamConfigurationMap#getOutputStallDuration(int,Size) using its
+ * respective size/format), then the frame duration in
* <code>F</code> determines the steady state frame rate that the application will
* get if it uses <code>R</code> as a repeating request. Let this special kind
* of request be called <code>Rsimple</code>.</p>
@@ -1863,10 +1845,9 @@ public final class CaptureResult extends CameraMetadata {
* if all buffers from the previous <code>Rstall</code> have already been
* delivered.</p>
* <p>For more details about stalling, see
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS android.scaler.availableStallDurations}.</p>
+ * StreamConfigurationMap#getOutputStallDuration(int,Size).</p>
*
- * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
- * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
*/
public static final Key<Long> SENSOR_FRAME_DURATION =
new Key<Long>("android.sensor.frameDuration", long.class);
@@ -2141,8 +2122,10 @@ public final class CaptureResult extends CameraMetadata {
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
*
* @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @deprecated
* @hide
*/
+ @Deprecated
public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS =
new Key<float[]>("android.statistics.predictedColorGains", float[].class);
@@ -2163,8 +2146,10 @@ public final class CaptureResult extends CameraMetadata {
* <p>This value should always be calculated by the AWB block,
* regardless of the android.control.* current values.</p>
* <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
* @hide
*/
+ @Deprecated
public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM =
new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class);
@@ -2441,6 +2426,14 @@ public final class CaptureResult extends CameraMetadata {
* End generated code
*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+
+
+
+
+
/**
* <p>
* List of the {@link Face Faces} detected through camera face detection
diff --git a/core/java/android/hardware/camera2/ICameraDeviceUser.aidl b/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
index d77f3d1..0815170 100644
--- a/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
+++ b/core/java/android/hardware/camera2/ICameraDeviceUser.aidl
@@ -20,7 +20,7 @@ import android.view.Surface;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.CaptureRequest;
-import android.hardware.camera2.LongParcelable;
+import android.hardware.camera2.utils.LongParcelable;
/** @hide */
interface ICameraDeviceUser
diff --git a/core/java/android/hardware/camera2/StreamConfigurationMap.java b/core/java/android/hardware/camera2/StreamConfigurationMap.java
deleted file mode 100644
index 5ddd7d6..0000000
--- a/core/java/android/hardware/camera2/StreamConfigurationMap.java
+++ /dev/null
@@ -1,508 +0,0 @@
-/*
- * Copyright (C) 2014 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware.camera2;
-
-import android.graphics.ImageFormat;
-import android.graphics.PixelFormat;
-import android.hardware.camera2.utils.HashCodeHelpers;
-import android.view.Surface;
-import android.util.Size;
-
-import java.util.Arrays;
-
-import static com.android.internal.util.Preconditions.*;
-
-/**
- * Immutable class to store the available stream
- * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS configurations} to be used
- * when configuring streams with {@link CameraDevice#configureOutputs}.
- * <!-- TODO: link to input stream configuration -->
- *
- * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
- * for that format) that are supported by a camera device.</p>
- *
- * <p>This also contains the minimum frame durations and stall durations for each format/size
- * combination that can be used to calculate effective frame rate when submitting multiple captures.
- * </p>
- *
- * <p>An instance of this object is available from {@link CameraCharacteristics} using
- * the {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS} key and the
- * {@link CameraCharacteristics#get} method.</p.
- *
- * <pre>{@code
- * CameraCharacteristics characteristics = ...;
- * StreamConfigurationMap configs = characteristics.get(
- * CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
- * }</pre>
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
- * @see CameraDevice#configureOutputs
- */
-public final class StreamConfigurationMap {
-
- /**
- * Create a new {@link StreamConfigurationMap}.
- *
- * <p>The array parameters ownership is passed to this object after creation; do not
- * write to them after this constructor is invoked.</p>
- *
- * @param configurations a non-{@code null} array of {@link StreamConfiguration}
- * @param durations a non-{@code null} array of {@link StreamConfigurationDuration}
- *
- * @throws NullPointerException if any of the arguments or subelements were {@code null}
- *
- * @hide
- */
- public StreamConfigurationMap(
- StreamConfiguration[] configurations,
- StreamConfigurationDuration[] durations) {
- // TODO: format check against ImageFormat/PixelFormat ?
-
- mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
- mDurations = checkArrayElementsNotNull(durations, "durations");
-
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get the image {@code format} output formats in this stream configuration.
- *
- * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
- * or in {@link PixelFormat} (and there is no possibility of collision).</p>
- *
- * <p>Formats listed in this array are guaranteed to return true if queried with
- * {@link #isOutputSupportedFor(int).</p>
- *
- * @return an array of integer format
- *
- * @see ImageFormat
- * @see PixelFormat
- */
- public final int[] getOutputFormats() {
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get the image {@code format} input formats in this stream configuration.
- *
- * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
- * or in {@link PixelFormat} (and there is no possibility of collision).</p>
- *
- * @return an array of integer format
- *
- * @see ImageFormat
- * @see PixelFormat
- *
- * @hide
- */
- public final int[] getInputFormats() {
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get the supported input sizes for this input format.
- *
- * <p>The format must have come from {@link #getInputFormats}; otherwise
- * {@code null} is returned.</p>
- *
- * @param format a format from {@link #getInputFormats}
- * @return a non-empty array of sizes, or {@code null} if the format was not available.
- *
- * @hide
- */
- public Size[] getInputSizes(final int format) {
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Determine whether or not output streams can be
- * {@link CameraDevice#configureOutputs configured} with a particular user-defined format.
- *
- * <p>This method determines that the output {@code format} is supported by the camera device;
- * each output {@code surface} target may or may not itself support that {@code format}.
- * Refer to the class which provides the surface for additional documentation.</p>
- *
- * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
- * returned by {@link #getOutputSizes}.</p>
- *
- * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
- * @return
- * {@code true} iff using a {@code surface} with this {@code format} will be
- * supported with {@link CameraDevice#configureOutputs}
- *
- * @throws IllegalArgumentException
- * if the image format was not a defined named constant
- * from either {@link ImageFormat} or {@link PixelFormat}
- *
- * @see ImageFormat
- * @see PixelFormat
- * @see CameraDevice#configureOutputs
- */
- public boolean isOutputSupportedFor(int format) {
- checkArgumentFormat(format);
-
- final int[] formats = getOutputFormats();
- for (int i = 0; i < formats.length; ++i) {
- if (format == formats[i]) {
- return true;
- }
- }
-
- return false;
- }
-
- /**
- * Determine whether or not output streams can be configured with a particular class
- * as a consumer.
- *
- * <p>The following list is generally usable for outputs:
- * <ul>
- * <li>{@link android.media.ImageReader} -
- * Recommended for image processing or streaming to external resources (such as a file or
- * network)
- * <li>{@link android.media.MediaRecorder} -
- * Recommended for recording video (simple to use)
- * <li>{@link android.media.MediaCodec} -
- * Recommended for recording video (more complicated to use, with more flexibility)
- * <li>{@link android.renderscript.Allocation} -
- * Recommended for image processing with {@link android.renderscript RenderScript}
- * <li>{@link android.view.SurfaceHolder} -
- * Recommended for low-power camera preview with {@link android.view.SurfaceView}
- * <li>{@link android.graphics.SurfaceTexture} -
- * Recommended for OpenGL-accelerated preview processing or compositing with
- * {@link android.view.TextureView}
- * </ul>
- * </p>
- *
- * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
- * provide a producer endpoint that is suitable to be used with
- * {@link CameraDevice#configureOutputs}.</p>
- *
- * <p>Since not all of the above classes support output of all format and size combinations,
- * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
- *
- * @param klass a non-{@code null} {@link Class} object reference
- * @return {@code true} if this class is supported as an output, {@code false} otherwise
- *
- * @throws NullPointerException if {@code klass} was {@code null}
- *
- * @see CameraDevice#configureOutputs
- * @see #isOutputSupportedFor(Surface)
- */
- public static <T> boolean isOutputSupportedFor(final Class<T> klass) {
- checkNotNull(klass, "klass must not be null");
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Determine whether or not the {@code surface} in its current state is suitable to be
- * {@link CameraDevice#configureOutputs configured} as an output.
- *
- * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
- * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
- * compatible with the {@link CameraDevice} in general
- * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
- * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
- *
- * <p>Reasons for a {@code surface} being specifically incompatible might be:
- * <ul>
- * <li>Using a format that's not listed by {@link #getOutputFormats}
- * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
- * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
- * </li>
- * </ul>
- *
- * This is not an exhaustive list; see the particular class's documentation for further
- * possible reasons of incompatibility.</p>
- *
- * @param surface a non-{@code null} {@link Surface} object reference
- * @return {@code true} if this is supported, {@code false} otherwise
- *
- * @throws NullPointerException if {@code surface} was {@code null}
- *
- * @see CameraDevice#configureOutputs
- * @see #isOutputSupportedFor(Class)
- */
- public boolean isOutputSupportedFor(final Surface surface) {
- checkNotNull(surface, "surface must not be null");
-
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get a list of sizes compatible with {@code klass} to use as an output.
- *
- * <p>Since some of the supported classes may support additional formats beyond
- * an opaque/implementation-defined (under-the-hood) format; this function only returns
- * sizes for the implementation-defined format.</p>
- *
- * <p>Some classes such as {@link android.media.ImageReader} may only support user-defined
- * formats; in particular {@link #isOutputSupportedFor(Class)} will return {@code true} for
- * that class and this method will return an empty array (but not {@code null}).</p>
- *
- * <p>If a well-defined format such as {@code NV21} is required, use
- * {@link #getOutputSizes(int)} instead.</p>
- *
- * <p>The {@code klass} should be a supported output, that querying
- * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
- *
- * @param klass
- * a non-{@code null} {@link Class} object reference
- * @return
- * an array of supported sizes for implementation-defined formats,
- * or {@code null} iff the {@code klass} is not a supported output
- *
- * @throws NullPointerException if {@code klass} was {@code null}
- *
- * @see #isOutputSupportedFor(Class)
- */
- public <T> Size[] getOutputSizes(final Class<T> klass) {
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get a list of sizes compatible with the requested image {@code format}.
- *
- * <p>The {@code format} should be a supported format (one of the formats returned by
- * {@link #getOutputFormats}).</p>
- *
- * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
- * @return
- * an array of supported sizes,
- * or {@code null} if the {@code format} is not a supported output
- *
- * @see ImageFormat
- * @see PixelFormat
- * @see #getOutputFormats
- */
- public Size[] getOutputSizes(final int format) {
- try {
- checkArgumentFormatSupported(format, /*output*/true);
- } catch (IllegalArgumentException e) {
- return null;
- }
-
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
- * for the format/size combination (in nanoseconds).
- *
- * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
- * <p>{@code size} should be one of the ones returned by
- * {@link #getOutputSizes(int)}.</p>
- *
- * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
- * @param size an output-compatible size
- * @return a minimum frame duration {@code >=} 0 in nanoseconds
- *
- * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
- * @throws NullPointerException if {@code size} was {@code null}
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
- * @see CaptureRequest#SENSOR_FRAME_DURATION
- * @see ImageFormat
- * @see PixelFormat
- */
- public long getOutputMinFrameDuration(final int format, final Size size) {
- checkArgumentFormatSupported(format, /*output*/true);
-
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
- * for the class/size combination (in nanoseconds).
- *
- * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
- * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
- *
- * <p>{@code klass} should be one of the ones which is supported by
- * {@link #isOutputSupportedFor(Class)}.</p>
- *
- * <p>{@code size} should be one of the ones returned by
- * {@link #getOutputSizes(int)}.</p>
- *
- * @param klass
- * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
- * non-empty array returned by {@link #getOutputSizes(Class)}
- * @param size an output-compatible size
- * @return a minimum frame duration {@code >=} 0 in nanoseconds
- *
- * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
- * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
- * @see CaptureRequest#SENSOR_FRAME_DURATION
- * @see ImageFormat
- * @see PixelFormat
- */
- public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get the {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS stall duration}
- * for the format/size combination (in nanoseconds).
- *
- * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
- * <p>{@code size} should be one of the ones returned by
- * {@link #getOutputSizes(int)}.</p>
- *
- * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
- * @param size an output-compatible size
- * @return a stall duration {@code >=} 0 in nanoseconds
- *
- * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
- * @throws NullPointerException if {@code size} was {@code null}
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
- * @see ImageFormat
- * @see PixelFormat
- */
- public long getOutputStallDuration(final int format, final Size size) {
- checkArgumentFormatSupported(format, /*output*/true);
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Get the {@link CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS stall duration}
- * for the class/size combination (in nanoseconds).
- *
- * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
- * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
- *
- * <p>{@code klass} should be one of the ones with a non-empty array returned by
- * {@link #getOutputSizes(Class)}.</p>
- *
- * <p>{@code size} should be one of the ones returned by
- * {@link #getOutputSizes(Class)}.</p>
- *
- * @param klass
- * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
- * non-empty array returned by {@link #getOutputSizes(Class)}
- * @param size an output-compatible size
- * @return a minimum frame duration {@code >=} 0 in nanoseconds
- *
- * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
- * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
- *
- * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
- * @see CaptureRequest#SENSOR_FRAME_DURATION
- * @see ImageFormat
- * @see PixelFormat
- */
- public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
- throw new UnsupportedOperationException("Not implemented yet");
- }
-
- /**
- * Check if this {@link StreamConfigurationMap} is equal to another
- * {@link StreamConfigurationMap}.
- *
- * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
- *
- * @return {@code true} if the objects were equal, {@code false} otherwise
- */
- @Override
- public boolean equals(final Object obj) {
- if (obj == null) {
- return false;
- }
- if (this == obj) {
- return true;
- }
- if (obj instanceof StreamConfigurationMap) {
- final StreamConfigurationMap other = (StreamConfigurationMap) obj;
- // TODO: do we care about order?
- return Arrays.equals(mConfigurations, other.mConfigurations) &&
- Arrays.equals(mDurations, other.mDurations);
- }
- return false;
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- public int hashCode() {
- // TODO: do we care about order?
- return HashCodeHelpers.hashCode(mConfigurations) ^ HashCodeHelpers.hashCode(mDurations);
- }
-
- // Check that the argument is supported by #getOutputFormats or #getInputFormats
- private int checkArgumentFormatSupported(int format, boolean output) {
- checkArgumentFormat(format);
-
- int[] formats = output ? getOutputFormats() : getInputFormats();
- for (int i = 0; i < formats.length; ++i) {
- if (format == formats[i]) {
- return format;
- }
- }
-
- throw new IllegalArgumentException(String.format(
- "format %x is not supported by this stream configuration map", format));
- }
-
- /**
- * Ensures that the format is either user-defined or implementation defined.
- *
- * <p>Any invalid/undefined formats will raise an exception.</p>
- *
- * @param format image format
- * @return the format
- *
- * @throws IllegalArgumentException if the format was invalid
- */
- static int checkArgumentFormatInternal(int format) {
- if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
- return format;
- }
-
- return checkArgumentFormat(format);
- }
-
- /**
- * Ensures that the format is user-defined in either ImageFormat or PixelFormat.
- *
- * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
- * </p>
- *
- * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
- *
- * @param format image format
- * @return the format
- *
- * @throws IllegalArgumentException if the format was not user-defined
- */
- static int checkArgumentFormat(int format) {
- if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
- throw new IllegalArgumentException(String.format(
- "format %x was not defined in either ImageFormat or PixelFormat", format));
- }
-
- return format;
- }
-
- private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
-
- private final StreamConfiguration[] mConfigurations;
- private final StreamConfigurationDuration[] mDurations;
-
-}
diff --git a/core/java/android/hardware/camera2/impl/CameraDevice.java b/core/java/android/hardware/camera2/impl/CameraDevice.java
index 988f8f9..628d1c3 100644
--- a/core/java/android/hardware/camera2/impl/CameraDevice.java
+++ b/core/java/android/hardware/camera2/impl/CameraDevice.java
@@ -24,9 +24,9 @@ import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.CaptureResultExtras;
import android.hardware.camera2.ICameraDeviceCallbacks;
import android.hardware.camera2.ICameraDeviceUser;
-import android.hardware.camera2.LongParcelable;
import android.hardware.camera2.utils.CameraBinderDecorator;
import android.hardware.camera2.utils.CameraRuntimeException;
+import android.hardware.camera2.utils.LongParcelable;
import android.os.Handler;
import android.os.IBinder;
import android.os.Looper;
diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
index 9a06e97..d28f7bd 100644
--- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
+++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -43,6 +43,9 @@ import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF;
import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration;
import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration;
import android.hardware.camera2.marshal.impl.MarshalQueryableString;
+import android.hardware.camera2.params.StreamConfiguration;
+import android.hardware.camera2.params.StreamConfigurationDuration;
+import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Parcelable;
import android.os.Parcel;
import android.util.Log;
@@ -207,10 +210,8 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
return (T) getFaces();
} else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
return (T) getFaceRectangles();
- } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS)) {
- return (T) getAvailableStreamConfigurations();
- } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS)) {
- return (T) getAvailableMinFrameDurations();
+ } else if (key.equals(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)) {
+ return (T) getStreamConfigurationMap();
}
// For other keys, get() falls back to getBase()
@@ -231,50 +232,6 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
return availableFormats;
}
- private int[] getAvailableStreamConfigurations() {
- final int NUM_ELEMENTS_IN_CONFIG = 4;
- int[] availableConfigs =
- getBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
- if (availableConfigs != null) {
- if (availableConfigs.length % NUM_ELEMENTS_IN_CONFIG != 0) {
- Log.w(TAG, "availableStreamConfigurations is malformed, length must be multiple"
- + " of " + NUM_ELEMENTS_IN_CONFIG);
- return availableConfigs;
- }
-
- for (int i = 0; i < availableConfigs.length; i += NUM_ELEMENTS_IN_CONFIG) {
- // JPEG has different value between native and managed side, need override.
- if (availableConfigs[i] == NATIVE_JPEG_FORMAT) {
- availableConfigs[i] = ImageFormat.JPEG;
- }
- }
- }
-
- return availableConfigs;
- }
-
- private long[] getAvailableMinFrameDurations() {
- final int NUM_ELEMENTS_IN_DURATION = 4;
- long[] availableMinDurations =
- getBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
- if (availableMinDurations != null) {
- if (availableMinDurations.length % NUM_ELEMENTS_IN_DURATION != 0) {
- Log.w(TAG, "availableStreamConfigurations is malformed, length must be multiple"
- + " of " + NUM_ELEMENTS_IN_DURATION);
- return availableMinDurations;
- }
-
- for (int i = 0; i < availableMinDurations.length; i += NUM_ELEMENTS_IN_DURATION) {
- // JPEG has different value between native and managed side, need override.
- if (availableMinDurations[i] == NATIVE_JPEG_FORMAT) {
- availableMinDurations[i] = ImageFormat.JPEG;
- }
- }
- }
-
- return availableMinDurations;
- }
-
private Face[] getFaces() {
final int FACE_LANDMARK_SIZE = 6;
@@ -374,6 +331,17 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
return fixedFaceRectangles;
}
+ private StreamConfigurationMap getStreamConfigurationMap() {
+ StreamConfiguration[] configurations = getBase(
+ CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ StreamConfigurationDuration[] minFrameDurations = getBase(
+ CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+ StreamConfigurationDuration[] stallDurations = getBase(
+ CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS);
+
+ return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations);
+ }
+
private <T> void setBase(Key<T> key, T value) {
int tag = key.getTag();
@@ -401,56 +369,12 @@ public class CameraMetadataNative extends CameraMetadata implements Parcelable {
return setAvailableFormats((int[]) value);
} else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) {
return setFaceRectangles((Rect[]) value);
- } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS)) {
- return setAvailableStreamConfigurations((int[])value);
- } else if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS)) {
- return setAvailableMinFrameDurations((long[])value);
}
// For other keys, set() falls back to setBase().
return false;
}
- private boolean setAvailableStreamConfigurations(int[] value) {
- final int NUM_ELEMENTS_IN_CONFIG = 4;
- int[] availableConfigs = value;
- if (value == null) {
- // Let setBase() to handle the null value case.
- return false;
- }
-
- int[] newValues = new int[availableConfigs.length];
- for (int i = 0; i < availableConfigs.length; i++) {
- newValues[i] = availableConfigs[i];
- if (i % NUM_ELEMENTS_IN_CONFIG == 0 && availableConfigs[i] == ImageFormat.JPEG) {
- newValues[i] = NATIVE_JPEG_FORMAT;
- }
- }
-
- setBase(CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS, newValues);
- return true;
- }
-
- private boolean setAvailableMinFrameDurations(long[] value) {
- final int NUM_ELEMENTS_IN_DURATION = 4;
- long[] availableDurations = value;
- if (value == null) {
- // Let setBase() to handle the null value case.
- return false;
- }
-
- long[] newValues = new long[availableDurations.length];
- for (int i = 0; i < availableDurations.length; i++) {
- newValues[i] = availableDurations[i];
- if (i % NUM_ELEMENTS_IN_DURATION == 0 && availableDurations[i] == ImageFormat.JPEG) {
- newValues[i] = NATIVE_JPEG_FORMAT;
- }
- }
-
- setBase(CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS, newValues);
- return true;
- }
-
private boolean setAvailableFormats(int[] value) {
int[] availableFormat = value;
if (value == null) {
diff --git a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
index 3025cb4..98a7ad7 100644
--- a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
+++ b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
@@ -15,9 +15,10 @@
*/
package android.hardware.camera2.marshal.impl;
-import android.hardware.camera2.ReprocessFormatsMap;
import android.hardware.camera2.marshal.Marshaler;
import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.ReprocessFormatsMap;
+import android.hardware.camera2.params.StreamConfigurationMap;
import android.hardware.camera2.utils.TypeReference;
import static android.hardware.camera2.impl.CameraMetadataNative.*;
@@ -50,12 +51,13 @@ public class MarshalQueryableReprocessFormatsMap
* INPUT_FORMAT, OUTPUT_FORMAT_COUNT, [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
* };
*/
- int[] inputs = value.getInputs();
+ int[] inputs = StreamConfigurationMap.imageFormatToInternal(value.getInputs());
for (int input : inputs) {
// INPUT_FORMAT
buffer.putInt(input);
- int[] outputs = value.getOutputs(input);
+ int[] outputs =
+ StreamConfigurationMap.imageFormatToInternal(value.getOutputs(input));
// OUTPUT_FORMAT_COUNT
buffer.putInt(outputs.length);
diff --git a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
index 6a4e821..62ace31 100644
--- a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
+++ b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
@@ -15,9 +15,9 @@
*/
package android.hardware.camera2.marshal.impl;
-import android.hardware.camera2.StreamConfiguration;
import android.hardware.camera2.marshal.Marshaler;
import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.StreamConfiguration;
import android.hardware.camera2.utils.TypeReference;
import static android.hardware.camera2.impl.CameraMetadataNative.*;
diff --git a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
index c3d564e..fd3dfac 100644
--- a/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
+++ b/core/java/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
@@ -15,9 +15,9 @@
*/
package android.hardware.camera2.marshal.impl;
-import android.hardware.camera2.StreamConfigurationDuration;
import android.hardware.camera2.marshal.Marshaler;
import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.StreamConfigurationDuration;
import android.hardware.camera2.utils.TypeReference;
import static android.hardware.camera2.impl.CameraMetadataNative.*;
diff --git a/core/java/android/hardware/camera2/ReprocessFormatsMap.java b/core/java/android/hardware/camera2/params/ReprocessFormatsMap.java
index 894a499..d3f5bc3 100644
--- a/core/java/android/hardware/camera2/ReprocessFormatsMap.java
+++ b/core/java/android/hardware/camera2/params/ReprocessFormatsMap.java
@@ -14,10 +14,11 @@
* limitations under the License.
*/
-package android.hardware.camera2;
+package android.hardware.camera2.params;
import static com.android.internal.util.Preconditions.*;
+import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.utils.HashCodeHelpers;
import java.util.Arrays;
@@ -61,9 +62,12 @@ public final class ReprocessFormatsMap {
* @throws IllegalArgumentException
* if the data was poorly formatted
* (missing output format length or too few output formats)
+ * or if any of the input/formats were not valid
* @throws NullPointerException
* if entry was null
*
+ * @see StreamConfigurationMap#checkArgumentFormatInternal
+ *
* @hide
*/
public ReprocessFormatsMap(final int[] entry) {
@@ -72,26 +76,31 @@ public final class ReprocessFormatsMap {
int numInputs = 0;
int left = entry.length;
for (int i = 0; i < entry.length; ) {
- final int format = entry[i];
+ int inputFormat = StreamConfigurationMap.checkArgumentFormatInternal(entry[i]);
left--;
i++;
if (left < 1) {
throw new IllegalArgumentException(
- String.format("Input %x had no output format length listed", format));
+ String.format("Input %x had no output format length listed", inputFormat));
}
final int length = entry[i];
left--;
i++;
+ for (int j = 0; j < length; ++j) {
+ int outputFormat = entry[i + j];
+ StreamConfigurationMap.checkArgumentFormatInternal(outputFormat);
+ }
+
if (length > 0) {
if (left < length) {
throw new IllegalArgumentException(
String.format(
"Input %x had too few output formats listed (actual: %d, " +
- "expected: %d)", format, left, length));
+ "expected: %d)", inputFormat, left, length));
}
i += length;
@@ -131,7 +140,6 @@ public final class ReprocessFormatsMap {
throw new AssertionError(
String.format("Input %x had no output format length listed", format));
}
- // TODO: check format is a valid input format
final int length = mEntry[i];
left--;
@@ -149,12 +157,10 @@ public final class ReprocessFormatsMap {
left -= length;
}
- // TODO: check output format is a valid output format
-
inputs[j] = format;
}
- return inputs;
+ return StreamConfigurationMap.imageFormatToPublic(inputs);
}
/**
@@ -204,7 +210,7 @@ public final class ReprocessFormatsMap {
outputs[k] = mEntry[i + k];
}
- return outputs;
+ return StreamConfigurationMap.imageFormatToPublic(outputs);
}
i += length;
diff --git a/core/java/android/hardware/camera2/StreamConfiguration.java b/core/java/android/hardware/camera2/params/StreamConfiguration.java
index a514034..1c6b6e9 100644
--- a/core/java/android/hardware/camera2/StreamConfiguration.java
+++ b/core/java/android/hardware/camera2/params/StreamConfiguration.java
@@ -14,13 +14,16 @@
* limitations under the License.
*/
-package android.hardware.camera2;
+package android.hardware.camera2.params;
import static com.android.internal.util.Preconditions.*;
-import static android.hardware.camera2.StreamConfigurationMap.checkArgumentFormatInternal;
+import static android.hardware.camera2.params.StreamConfigurationMap.checkArgumentFormatInternal;
import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.utils.HashCodeHelpers;
+import android.graphics.PixelFormat;
import android.util.Size;
/**
@@ -62,11 +65,12 @@ public final class StreamConfiguration {
}
/**
- * Get the image {@code format} in this stream configuration.
+ * Get the internal image {@code format} in this stream configuration.
*
* @return an integer format
*
* @see ImageFormat
+ * @see PixelFormat
*/
public final int getFormat() {
return mFormat;
diff --git a/core/java/android/hardware/camera2/StreamConfigurationDuration.java b/core/java/android/hardware/camera2/params/StreamConfigurationDuration.java
index 6a31156..217059d 100644
--- a/core/java/android/hardware/camera2/StreamConfigurationDuration.java
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationDuration.java
@@ -14,13 +14,15 @@
* limitations under the License.
*/
-package android.hardware.camera2;
+package android.hardware.camera2.params;
import static com.android.internal.util.Preconditions.*;
-import static android.hardware.camera2.StreamConfigurationMap.checkArgumentFormatInternal;
+import static android.hardware.camera2.params.StreamConfigurationMap.checkArgumentFormatInternal;
import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.utils.HashCodeHelpers;
+import android.graphics.PixelFormat;
import android.util.Size;
/**
@@ -59,11 +61,12 @@ public final class StreamConfigurationDuration {
}
/**
- * Get the image {@code format} in this stream configuration duration
+ * Get the internal image {@code format} in this stream configuration duration
*
* @return an integer format
*
* @see ImageFormat
+ * @see PixelFormat
*/
public final int getFormat() {
return mFormat;
diff --git a/core/java/android/hardware/camera2/params/StreamConfigurationMap.java b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
new file mode 100644
index 0000000..4cd6d15
--- /dev/null
+++ b/core/java/android/hardware/camera2/params/StreamConfigurationMap.java
@@ -0,0 +1,949 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.view.Surface;
+import android.util.Log;
+import android.util.Size;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Objects;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Immutable class to store the available stream
+ * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to be used
+ * when configuring streams with {@link CameraDevice#configureOutputs}.
+ * <!-- TODO: link to input stream configuration -->
+ *
+ * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
+ * for that format) that are supported by a camera device.</p>
+ *
+ * <p>This also contains the minimum frame durations and stall durations for each format/size
+ * combination that can be used to calculate effective frame rate when submitting multiple captures.
+ * </p>
+ *
+ * <p>An instance of this object is available from {@link CameraCharacteristics} using
+ * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
+ * {@link CameraCharacteristics#get} method.</p>
+ *
+ * <pre><code>{@code
+ * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
+ * StreamConfigurationMap configs = characteristics.get(
+ * CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ * }</code></pre>
+ *
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ * @see CameraDevice#configureOutputs
+ */
+public final class StreamConfigurationMap {
+
+ private static final String TAG = "StreamConfigurationMap";
+ /**
+ * Create a new {@link StreamConfigurationMap}.
+ *
+ * <p>The array parameters ownership is passed to this object after creation; do not
+ * write to them after this constructor is invoked.</p>
+ *
+ * @param configurations a non-{@code null} array of {@link StreamConfiguration}
+ * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+ * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+ *
+ * @throws NullPointerException if any of the arguments or subelements were {@code null}
+ *
+ * @hide
+ */
+ public StreamConfigurationMap(
+ StreamConfiguration[] configurations,
+ StreamConfigurationDuration[] minFrameDurations,
+ StreamConfigurationDuration[] stallDurations) {
+
+ mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
+ mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
+ mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
+
+ // For each format, track how many sizes there are available to configure
+ for (StreamConfiguration config : configurations) {
+ HashMap<Integer, Integer> map = config.isOutput() ? mOutputFormats : mInputFormats;
+
+ Integer count = map.get(config.getFormat());
+
+ if (count == null) {
+ count = 0;
+ }
+ count = count + 1;
+
+ map.put(config.getFormat(), count);
+ }
+
+ if (!mOutputFormats.containsKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
+ throw new AssertionError(
+ "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
+ }
+ }
+
+ /**
+ * Get the image {@code format} output formats in this stream configuration.
+ *
+ * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
+ * or in {@link PixelFormat} (and there is no possibility of collision).</p>
+ *
+ * <p>Formats listed in this array are guaranteed to return true if queried with
+ * {@link #isOutputSupportedFor(int).</p>
+ *
+ * @return an array of integer format
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public final int[] getOutputFormats() {
+ return getPublicFormats(/*output*/true);
+ }
+
+ /**
+ * Get the image {@code format} input formats in this stream configuration.
+ *
+ * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
+ * or in {@link PixelFormat} (and there is no possibility of collision).</p>
+ *
+ * @return an array of integer format
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ *
+ * @hide
+ */
+ public final int[] getInputFormats() {
+ return getPublicFormats(/*output*/false);
+ }
+
+ /**
+ * Get the supported input sizes for this input format.
+ *
+ * <p>The format must have come from {@link #getInputFormats}; otherwise
+ * {@code null} is returned.</p>
+ *
+ * @param format a format from {@link #getInputFormats}
+ * @return a non-empty array of sizes, or {@code null} if the format was not available.
+ *
+ * @hide
+ */
+ public Size[] getInputSizes(final int format) {
+ return getPublicFormatSizes(format, /*output*/false);
+ }
+
+ /**
+ * Determine whether or not output streams can be
+ * {@link CameraDevice#configureOutputs configured} with a particular user-defined format.
+ *
+ * <p>This method determines that the output {@code format} is supported by the camera device;
+ * each output {@code surface} target may or may not itself support that {@code format}.
+ * Refer to the class which provides the surface for additional documentation.</p>
+ *
+ * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
+ * returned by {@link #getOutputSizes}.</p>
+ *
+ * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
+ * @return
+ * {@code true} iff using a {@code surface} with this {@code format} will be
+ * supported with {@link CameraDevice#configureOutputs}
+ *
+ * @throws IllegalArgumentException
+ * if the image format was not a defined named constant
+ * from either {@link ImageFormat} or {@link PixelFormat}
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ * @see CameraDevice#configureOutputs
+ */
+ public boolean isOutputSupportedFor(int format) {
+ checkArgumentFormat(format);
+
+ format = imageFormatToInternal(format);
+ return getFormatsMap(/*output*/true).containsKey(format);
+ }
+
+ /**
+ * Determine whether or not output streams can be configured with a particular class
+ * as a consumer.
+ *
+ * <p>The following list is generally usable for outputs:
+ * <ul>
+ * <li>{@link android.media.ImageReader} -
+ * Recommended for image processing or streaming to external resources (such as a file or
+ * network)
+ * <li>{@link android.media.MediaRecorder} -
+ * Recommended for recording video (simple to use)
+ * <li>{@link android.media.MediaCodec} -
+ * Recommended for recording video (more complicated to use, with more flexibility)
+ * <li>{@link android.renderscript.Allocation} -
+ * Recommended for image processing with {@link android.renderscript RenderScript}
+ * <li>{@link android.view.SurfaceHolder} -
+ * Recommended for low-power camera preview with {@link android.view.SurfaceView}
+ * <li>{@link android.graphics.SurfaceTexture} -
+ * Recommended for OpenGL-accelerated preview processing or compositing with
+ * {@link android.view.TextureView}
+ * </ul>
+ * </p>
+ *
+ * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
+ * provide a producer endpoint that is suitable to be used with
+ * {@link CameraDevice#configureOutputs}.</p>
+ *
+ * <p>Since not all of the above classes support output of all format and size combinations,
+ * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
+ *
+ * @param klass a non-{@code null} {@link Class} object reference
+ * @return {@code true} if this class is supported as an output, {@code false} otherwise
+ *
+ * @throws NullPointerException if {@code klass} was {@code null}
+ *
+ * @see CameraDevice#configureOutputs
+ * @see #isOutputSupportedFor(Surface)
+ */
+ public static <T> boolean isOutputSupportedFor(Class<T> klass) {
+ checkNotNull(klass, "klass must not be null");
+
+ if (klass == android.media.ImageReader.class) {
+ return true;
+ } else if (klass == android.media.MediaRecorder.class) {
+ return true;
+ } else if (klass == android.media.MediaCodec.class) {
+ return true;
+ } else if (klass == android.renderscript.Allocation.class) {
+ return true;
+ } else if (klass == android.view.SurfaceHolder.class) {
+ return true;
+ } else if (klass == android.graphics.SurfaceTexture.class) {
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Determine whether or not the {@code surface} in its current state is suitable to be
+ * {@link CameraDevice#configureOutputs configured} as an output.
+ *
+ * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
+ * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
+ * compatible with the {@link CameraDevice} in general
+ * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
+ * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
+ *
+ * <p>Reasons for a {@code surface} being specifically incompatible might be:
+ * <ul>
+ * <li>Using a format that's not listed by {@link #getOutputFormats}
+ * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
+ * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
+ * </li>
+ * </ul>
+ *
+ * This is not an exhaustive list; see the particular class's documentation for further
+ * possible reasons of incompatibility.</p>
+ *
+ * @param surface a non-{@code null} {@link Surface} object reference
+ * @return {@code true} if this is supported, {@code false} otherwise
+ *
+ * @throws NullPointerException if {@code surface} was {@code null}
+ *
+ * @see CameraDevice#configureOutputs
+ * @see #isOutputSupportedFor(Class)
+ */
+ public boolean isOutputSupportedFor(Surface surface) {
+ checkNotNull(surface, "surface must not be null");
+
+ throw new UnsupportedOperationException("Not implemented yet");
+
+ // TODO: JNI function that checks the Surface's IGraphicBufferProducer state
+ }
+
+ /**
+ * Get a list of sizes compatible with {@code klass} to use as an output.
+ *
+ * <p>Since some of the supported classes may support additional formats beyond
+ * an opaque/implementation-defined (under-the-hood) format; this function only returns
+ * sizes for the implementation-defined format.</p>
+ *
+ * <p>Some classes such as {@link android.media.ImageReader} may only support user-defined
+ * formats; in particular {@link #isOutputSupportedFor(Class)} will return {@code true} for
+ * that class and this method will return an empty array (but not {@code null}).</p>
+ *
+ * <p>If a well-defined format such as {@code NV21} is required, use
+ * {@link #getOutputSizes(int)} instead.</p>
+ *
+ * <p>The {@code klass} should be a supported output, that querying
+ * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
+ *
+ * @param klass
+ * a non-{@code null} {@link Class} object reference
+ * @return
+ * an array of supported sizes for implementation-defined formats,
+ * or {@code null} iff the {@code klass} is not a supported output
+ *
+ * @throws NullPointerException if {@code klass} was {@code null}
+ *
+ * @see #isOutputSupportedFor(Class)
+ */
+ public <T> Size[] getOutputSizes(Class<T> klass) {
+ if (isOutputSupportedFor(klass) == false) {
+ return null;
+ }
+
+ return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, /*output*/true);
+ }
+
+ /**
+ * Get a list of sizes compatible with the requested image {@code format}.
+ *
+ * <p>The {@code format} should be a supported format (one of the formats returned by
+ * {@link #getOutputFormats}).</p>
+ *
+ * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return
+ * an array of supported sizes,
+ * or {@code null} if the {@code format} is not a supported output
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ * @see #getOutputFormats
+ */
+ public Size[] getOutputSizes(int format) {
+ return getPublicFormatSizes(format, /*output*/true);
+ }
+
+ /**
+ * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
+ * for the format/size combination (in nanoseconds).
+ *
+ * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(int)}.</p>
+ *
+ * <p>This should correspond to the frame duration when only that stream is active, with all
+ * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
+ * </p>
+ *
+ * <p>When multiple streams are used in a request, the minimum frame duration will be
+ * {@code max(individual stream min durations)}.</p>
+ *
+ * <!--
+ * TODO: uncomment after adding input stream support
+ * <p>The minimum frame duration of a stream (of a particular format, size) is the same
+ * regardless of whether the stream is input or output.</p>
+ * -->
+ *
+ * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+ * @param size an output-compatible size
+ * @return a minimum frame duration {@code >=} 0 in nanoseconds
+ *
+ * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see #getOutputStallDuration(int, Size)
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public long getOutputMinFrameDuration(int format, Size size) {
+ checkNotNull(size, "size must not be null");
+ checkArgumentFormatSupported(format, /*output*/true);
+
+ return getInternalFormatDuration(imageFormatToInternal(format), size, DURATION_MIN_FRAME);
+ }
+
+ /**
+ * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
+ * for the class/size combination (in nanoseconds).
+ *
+ * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
+ * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
+ *
+ * <p>{@code klass} should be one of the ones which is supported by
+ * {@link #isOutputSupportedFor(Class)}.</p>
+ *
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(int)}.</p>
+ *
+ * <p>This should correspond to the frame duration when only that stream is active, with all
+ * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
+ * </p>
+ *
+ * <p>When multiple streams are used in a request, the minimum frame duration will be
+ * {@code max(individual stream min durations)}.</p>
+ *
+ * <!--
+ * TODO: uncomment after adding input stream support
+ * <p>The minimum frame duration of a stream (of a particular format, size) is the same
+ * regardless of whether the stream is input or output.</p>
+ * -->
+ *
+ * @param klass
+ * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
+ * non-empty array returned by {@link #getOutputSizes(Class)}
+ * @param size an output-compatible size
+ * @return a minimum frame duration {@code >=} 0 in nanoseconds
+ *
+ * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
+ if (!isOutputSupportedFor(klass)) {
+ throw new IllegalArgumentException("klass was not supported");
+ }
+
+ return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ size, DURATION_MIN_FRAME);
+ }
+
+ /**
+ * Get the stall duration for the format/size combination (in nanoseconds).
+ *
+ * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(int)}.</p>
+ *
+ * <p>
+ * A stall duration is how much extra time would get added to the normal minimum frame duration
+ * for a repeating request that has streams with non-zero stall.
+ *
+ * <p>For example, consider JPEG captures which have the following characteristics:
+ *
+ * <ul>
+ * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
+ * in requests in which they are directly referenced, they act as JPEG streams.
+ * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
+ * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
+ * requests that actually reference a JPEG stream.
+ * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
+ * process more than 1 capture at a time.
+ * </ul>
+ *
+ * <p>In other words, using a repeating YUV request would result in a steady frame rate
+ * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
+ * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
+ * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
+ * 30 FPS.</p>
+ *
+ * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
+ * frame rate drop unless there are still outstanding buffers for that stream from previous
+ * requests.</p>
+ *
+ * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
+ * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
+ * added with the maximum stall duration for {@code S}.</p>
+ *
+ * <p>If interleaving requests with and without a stall duration, a request will stall by the
+ * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
+ *
+ * <p>This means that a stalling request will not have an exposure start until the stall has
+ * completed.</p>
+ *
+ * <p>This should correspond to the stall duration when only that stream is active, with all
+ * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
+ * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
+ * indeterminate stall duration for all streams in a request (the regular stall calculation
+ * rules are ignored).</p>
+ *
+ * <p>The following formats may always have a stall duration:
+ * <ul>
+ * <li>{@link ImageFormat#JPEG JPEG}
+ * <li>{@link ImageFormat#RAW_SENSOR RAW16}
+ * </ul>
+ * </p>
+ *
+ * <p>The following formats will never have a stall duration:
+ * <ul>
+ * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
+ * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
+ * </ul></p>
+ *
+ * <p>
+ * All other formats may or may not have an allowed stall duration on a per-capability basis;
+ * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * android.request.availableCapabilities} for more details.</p>
+ * </p>
+ *
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
+ * for more information about calculating the max frame rate (absent stalls).</p>
+ *
+ * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+ * @param size an output-compatible size
+ * @return a stall duration {@code >=} 0 in nanoseconds
+ *
+ * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public long getOutputStallDuration(int format, Size size) {
+ checkArgumentFormatSupported(format, /*output*/true);
+
+ return getInternalFormatDuration(imageFormatToInternal(format),
+ size, DURATION_STALL);
+ }
+
+ /**
+ * Get the stall duration for the class/size combination (in nanoseconds).
+ *
+ * <p>This assumes a the {@code klass} is set up to use an implementation-defined format.
+ * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
+ *
+ * <p>{@code klass} should be one of the ones with a non-empty array returned by
+ * {@link #getOutputSizes(Class)}.</p>
+ *
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(Class)}.</p>
+ *
+ * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
+ * <em>stall duration</em>.</p>
+ *
+ * @param klass
+ * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
+ * non-empty array returned by {@link #getOutputSizes(Class)}
+ * @param size an output-compatible size
+ * @return a minimum frame duration {@code >=} 0 in nanoseconds
+ *
+ * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
+ if (!isOutputSupportedFor(klass)) {
+ throw new IllegalArgumentException("klass was not supported");
+ }
+
+ return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ size, DURATION_STALL);
+ }
+
+ /**
+ * Check if this {@link StreamConfigurationMap} is equal to another
+ * {@link StreamConfigurationMap}.
+ *
+ * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof StreamConfigurationMap) {
+ final StreamConfigurationMap other = (StreamConfigurationMap) obj;
+ // XX: do we care about order?
+ return Arrays.equals(mConfigurations, other.mConfigurations) &&
+ Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
+ Arrays.equals(mStallDurations, other.mStallDurations);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ // XX: do we care about order?
+ return HashCodeHelpers.hashCode(mConfigurations, mMinFrameDurations, mStallDurations);
+ }
+
+ // Check that the argument is supported by #getOutputFormats or #getInputFormats
+ private int checkArgumentFormatSupported(int format, boolean output) {
+ checkArgumentFormat(format);
+
+ int[] formats = output ? getOutputFormats() : getInputFormats();
+ for (int i = 0; i < formats.length; ++i) {
+ if (format == formats[i]) {
+ return format;
+ }
+ }
+
+ throw new IllegalArgumentException(String.format(
+ "format %x is not supported by this stream configuration map", format));
+ }
+
+ /**
+ * Ensures that the format is either user-defined or implementation defined.
+ *
+ * <p>If a format has a different internal representation than the public representation,
+ * passing in the public representation here will fail.</p>
+ *
+ * <p>For example if trying to use {@link ImageFormat#JPEG}:
+ * it has a different public representation than the internal representation
+ * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
+ *
+ * <p>Any invalid/undefined formats will raise an exception.</p>
+ *
+ * @param format image format
+ * @return the format
+ *
+ * @throws IllegalArgumentException if the format was invalid
+ */
+ static int checkArgumentFormatInternal(int format) {
+ switch (format) {
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ case HAL_PIXEL_FORMAT_BLOB:
+ return format;
+ case ImageFormat.JPEG:
+ throw new IllegalArgumentException(
+ "ImageFormat.JPEG is an unknown internal format");
+ default:
+ return checkArgumentFormat(format);
+ }
+ }
+
+ /**
+ * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
+ *
+ * <p>If a format has a different public representation than the internal representation,
+ * passing in the internal representation here will fail.</p>
+ *
+ * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
+ * it has a different internal representation than the public representation
+ * {@link ImageFormat#JPEG}, this check will fail.</p>
+ *
+ * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
+ * </p>
+ *
+ * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
+ *
+ * @param format image format
+ * @return the format
+ *
+ * @throws IllegalArgumentException if the format was not user-defined
+ */
+ static int checkArgumentFormat(int format) {
+ // TODO: remove this hack , CTS shouldn't have been using internal constants
+ if (format == HAL_PIXEL_FORMAT_RAW_OPAQUE) {
+ Log.w(TAG, "RAW_OPAQUE is not yet a published format; allowing it anyway");
+ return format;
+ }
+
+ if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
+ throw new IllegalArgumentException(String.format(
+ "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
+ }
+
+ return format;
+ }
+
+ /**
+ * Convert a public-visible {@code ImageFormat} into an internal format
+ * compatible with {@code graphics.h}.
+ *
+ * <p>In particular these formats are converted:
+ * <ul>
+ * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG
+ * </ul>
+ * </p>
+ *
+ * <p>Passing in an implementation-defined format which has no public equivalent will fail;
+ * as will passing in a public format which has a different internal format equivalent.
+ * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+ *
+ * <p>All other formats are returned as-is, no further invalid check is performed.</p>
+ *
+ * <p>This function is the dual of {@link #imageFormatToInternal}.</p>
+ *
+ * @param format image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return the converted image formats
+ *
+ * @throws IllegalArgumentException
+ * if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
+ * {@link ImageFormat#JPEG}
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ * @see #checkArgumentFormat
+ */
+ static int imageFormatToPublic(int format) {
+ switch (format) {
+ case HAL_PIXEL_FORMAT_BLOB:
+ return ImageFormat.JPEG;
+ case ImageFormat.JPEG:
+ throw new IllegalArgumentException(
+ "ImageFormat.JPEG is an unknown internal format");
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ throw new IllegalArgumentException(
+ "IMPLEMENTATION_DEFINED must not leak to public API");
+ default:
+ return format;
+ }
+ }
+
+ /**
+ * Convert image formats from internal to public formats (in-place).
+ *
+ * @param formats an array of image formats
+ * @return {@code formats}
+ *
+ * @see #imageFormatToPublic
+ */
+ static int[] imageFormatToPublic(int[] formats) {
+ if (formats == null) {
+ return null;
+ }
+
+ for (int i = 0; i < formats.length; ++i) {
+ formats[i] = imageFormatToPublic(formats[i]);
+ }
+
+ return formats;
+ }
+
+ /**
+ * Convert a public format compatible with {@code ImageFormat} to an internal format
+ * from {@code graphics.h}.
+ *
+ * <p>In particular these formats are converted:
+ * <ul>
+ * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
+ * </ul>
+ * </p>
+ *
+ * <p>Passing in an implementation-defined format here will fail (it's not a public format);
+ * as will passing in an internal format which has a different public format equivalent.
+ * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+ *
+ * <p>All other formats are returned as-is, no invalid check is performed.</p>
+ *
+ * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
+ *
+ * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return the converted image formats
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ *
+ * @throws IllegalArgumentException
+ * if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
+ */
+ static int imageFormatToInternal(int format) {
+ switch (format) {
+ case ImageFormat.JPEG:
+ return HAL_PIXEL_FORMAT_BLOB;
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ throw new IllegalArgumentException(
+ "IMPLEMENTATION_DEFINED is not allowed via public API");
+ default:
+ return format;
+ }
+ }
+
+ /**
+ * Convert image formats from public to internal formats (in-place).
+ *
+ * @param formats an array of image formats
+ * @return {@code formats}
+ *
+ * @see #imageFormatToInternal
+ *
+ * @hide
+ */
+ public static int[] imageFormatToInternal(int[] formats) {
+ if (formats == null) {
+ return null;
+ }
+
+ for (int i = 0; i < formats.length; ++i) {
+ formats[i] = imageFormatToInternal(formats[i]);
+ }
+
+ return formats;
+ }
+
+ private Size[] getPublicFormatSizes(int format, boolean output) {
+ try {
+ checkArgumentFormatSupported(format, output);
+ } catch (IllegalArgumentException e) {
+ return null;
+ }
+
+ format = imageFormatToInternal(format);
+
+ return getInternalFormatSizes(format, output);
+ }
+
+ private Size[] getInternalFormatSizes(int format, boolean output) {
+ HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
+
+ Integer sizesCount = formatsMap.get(format);
+ if (sizesCount == null) {
+ throw new IllegalArgumentException("format not available");
+ }
+
+ int len = sizesCount;
+ Size[] sizes = new Size[len];
+ int sizeIndex = 0;
+
+ for (StreamConfiguration config : mConfigurations) {
+ if (config.getFormat() == format && config.isOutput() == output) {
+ sizes[sizeIndex++] = config.getSize();
+ }
+ }
+
+ if (sizeIndex != len) {
+ throw new AssertionError(
+ "Too few sizes (expected " + len + ", actual " + sizeIndex + ")");
+ }
+
+ return sizes;
+ }
+
+ /** Get the list of publically visible output formats; does not include IMPL_DEFINED */
+ private int[] getPublicFormats(boolean output) {
+ int[] formats = new int[getPublicFormatCount(output)];
+
+ int i = 0;
+
+ for (int format : getFormatsMap(output).keySet()) {
+ if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ formats[i++] = format;
+ }
+ }
+
+ if (formats.length != i) {
+ throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
+ }
+
+ return imageFormatToPublic(formats);
+ }
+
+ /** Get the format -> size count map for either output or input formats */
+ private HashMap<Integer, Integer> getFormatsMap(boolean output) {
+ return output ? mOutputFormats : mInputFormats;
+ }
+
+ private long getInternalFormatDuration(int format, Size size, int duration) {
+ // assume format is already checked, since its internal
+
+ if (!arrayContains(getInternalFormatSizes(format, /*output*/true), size)) {
+ throw new IllegalArgumentException("size was not supported");
+ }
+
+ StreamConfigurationDuration[] durations = getDurations(duration);
+
+ for (StreamConfigurationDuration configurationDuration : durations) {
+ if (configurationDuration.getFormat() == format &&
+ configurationDuration.getWidth() == size.getWidth() &&
+ configurationDuration.getHeight() == size.getHeight()) {
+ return configurationDuration.getDuration();
+ }
+ }
+
+ return getDurationDefault(duration);
+ }
+
+ /**
+ * Get the durations array for the kind of duration
+ *
+ * @see #DURATION_MIN_FRAME
+ * @see #DURATION_STALL
+ * */
+ private StreamConfigurationDuration[] getDurations(int duration) {
+ switch (duration) {
+ case DURATION_MIN_FRAME:
+ return mMinFrameDurations;
+ case DURATION_STALL:
+ return mStallDurations;
+ default:
+ throw new IllegalArgumentException("duration was invalid");
+ }
+ }
+
+ private long getDurationDefault(int duration) {
+ switch (duration) {
+ case DURATION_MIN_FRAME:
+ throw new AssertionError("Minimum frame durations are required to be listed");
+ case DURATION_STALL:
+ return 0L; // OK. A lack of a stall duration implies a 0 stall duration
+ default:
+ throw new IllegalArgumentException("duration was invalid");
+ }
+ }
+
+ /** Count the number of publicly-visible output formats */
+ private int getPublicFormatCount(boolean output) {
+ HashMap<Integer, Integer> formatsMap = getFormatsMap(output);
+
+ int size = formatsMap.size();
+ if (formatsMap.containsKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
+ size -= 1;
+ }
+ return size;
+ }
+
+ private static <T> boolean arrayContains(T[] array, T element) {
+ if (array == null) {
+ return false;
+ }
+
+ for (T el : array) {
+ if (Objects.equals(el, element)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ // from system/core/include/system/graphics.h
+ private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
+ private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
+ private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
+
+ /**
+ * @see #getDurations(int)
+ * @see #getDurationDefault(int)
+ */
+ private static final int DURATION_MIN_FRAME = 0;
+ private static final int DURATION_STALL = 1;
+
+ private final StreamConfiguration[] mConfigurations;
+ private final StreamConfigurationDuration[] mMinFrameDurations;
+ private final StreamConfigurationDuration[] mStallDurations;
+
+ /** ImageFormat -> num output sizes mapping */
+ private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mOutputFormats =
+ new HashMap<Integer, Integer>();
+ /** ImageFormat -> num input sizes mapping */
+ private final HashMap</*ImageFormat*/Integer, /*Count*/Integer> mInputFormats =
+ new HashMap<Integer, Integer>();
+
+}
diff --git a/core/java/android/hardware/camera2/LongParcelable.aidl b/core/java/android/hardware/camera2/utils/LongParcelable.aidl
index 7d7e51b..98ad1b2 100644
--- a/core/java/android/hardware/camera2/LongParcelable.aidl
+++ b/core/java/android/hardware/camera2/utils/LongParcelable.aidl
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package android.hardware.camera2;
+package android.hardware.camera2.utils;
/** @hide */
-parcelable LongParcelable; \ No newline at end of file
+parcelable LongParcelable;
diff --git a/core/java/android/hardware/camera2/LongParcelable.java b/core/java/android/hardware/camera2/utils/LongParcelable.java
index 97b0631..c89b339 100644
--- a/core/java/android/hardware/camera2/LongParcelable.java
+++ b/core/java/android/hardware/camera2/utils/LongParcelable.java
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package android.hardware.camera2;
+package android.hardware.camera2.utils;
import android.os.Parcel;
import android.os.Parcelable;
diff --git a/core/java/android/view/RenderNodeAnimator.java b/core/java/android/view/RenderNodeAnimator.java
index ec4d560..f14e73f 100644
--- a/core/java/android/view/RenderNodeAnimator.java
+++ b/core/java/android/view/RenderNodeAnimator.java
@@ -21,7 +21,6 @@ import android.graphics.Canvas;
import android.graphics.CanvasProperty;
import android.graphics.Paint;
import android.util.SparseIntArray;
-import android.util.TimeUtils;
import com.android.internal.util.VirtualRefBasePtr;
import com.android.internal.view.animation.FallbackLUTInterpolator;
@@ -72,10 +71,6 @@ public final class RenderNodeAnimator {
put(ViewPropertyAnimator.ALPHA, ALPHA);
}};
- // Keep in sync DeltaValueType in Animator.h
- public static final int DELTA_TYPE_ABSOLUTE = 0;
- public static final int DELTA_TYPE_DELTA = 1;
-
private VirtualRefBasePtr mNativePtr;
private RenderNode mTarget;
@@ -86,22 +81,21 @@ public final class RenderNodeAnimator {
return sViewPropertyAnimatorMap.get(viewProperty);
}
- public RenderNodeAnimator(int property, int deltaType, float deltaValue) {
+ public RenderNodeAnimator(int property, float finalValue) {
init(nCreateAnimator(new WeakReference<RenderNodeAnimator>(this),
- property, deltaType, deltaValue));
+ property, finalValue));
}
- public RenderNodeAnimator(CanvasProperty<Float> property, int deltaType, float deltaValue) {
+ public RenderNodeAnimator(CanvasProperty<Float> property, float finalValue) {
init(nCreateCanvasPropertyFloatAnimator(
new WeakReference<RenderNodeAnimator>(this),
- property.getNativeContainer(), deltaType, deltaValue));
+ property.getNativeContainer(), finalValue));
}
- public RenderNodeAnimator(CanvasProperty<Paint> property, int paintField,
- int deltaType, float deltaValue) {
+ public RenderNodeAnimator(CanvasProperty<Paint> property, int paintField, float finalValue) {
init(nCreateCanvasPropertyPaintAnimator(
new WeakReference<RenderNodeAnimator>(this),
- property.getNativeContainer(), paintField, deltaType, deltaValue));
+ property.getNativeContainer(), paintField, finalValue));
}
private void init(long ptr) {
@@ -182,11 +176,11 @@ public final class RenderNodeAnimator {
}
private static native long nCreateAnimator(WeakReference<RenderNodeAnimator> weakThis,
- int property, int deltaValueType, float deltaValue);
+ int property, float deltaValue);
private static native long nCreateCanvasPropertyFloatAnimator(WeakReference<RenderNodeAnimator> weakThis,
- long canvasProperty, int deltaValueType, float deltaValue);
+ long canvasProperty, float deltaValue);
private static native long nCreateCanvasPropertyPaintAnimator(WeakReference<RenderNodeAnimator> weakThis,
- long canvasProperty, int paintField, int deltaValueType, float deltaValue);
+ long canvasProperty, int paintField, float deltaValue);
private static native void nSetDuration(long nativePtr, int duration);
private static native int nGetDuration(long nativePtr);
private static native void nSetInterpolator(long animPtr, long interpolatorPtr);
diff --git a/core/java/android/view/View.java b/core/java/android/view/View.java
index 2008f9e..dc9a340 100644
--- a/core/java/android/view/View.java
+++ b/core/java/android/view/View.java
@@ -7154,6 +7154,9 @@ public class View implements Drawable.Callback, KeyEvent.Callback,
if (viewRootImpl != null) {
viewRootImpl.setAccessibilityFocus(this, null);
}
+ Rect rect = (mAttachInfo != null) ? mAttachInfo.mTmpInvalRect : new Rect();
+ getDrawingRect(rect);
+ requestRectangleOnScreen(rect, false);
invalidate();
sendAccessibilityEvent(AccessibilityEvent.TYPE_VIEW_ACCESSIBILITY_FOCUSED);
return true;
diff --git a/core/java/android/view/accessibility/AccessibilityNodeInfo.java b/core/java/android/view/accessibility/AccessibilityNodeInfo.java
index 9d10930..34967df 100644
--- a/core/java/android/view/accessibility/AccessibilityNodeInfo.java
+++ b/core/java/android/view/accessibility/AccessibilityNodeInfo.java
@@ -17,15 +17,19 @@
package android.view.accessibility;
import android.accessibilityservice.AccessibilityServiceInfo;
+import android.annotation.Nullable;
import android.graphics.Rect;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.InputType;
+import android.text.TextUtils;
+import android.util.ArraySet;
import android.util.LongArray;
import android.util.Pools.SynchronizedPool;
import android.view.View;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
@@ -112,7 +116,7 @@ public class AccessibilityNodeInfo implements Parcelable {
public static final int ACTION_SELECT = 0x00000004;
/**
- * Action that unselects the node.
+ * Action that deselects the node.
*/
public static final int ACTION_CLEAR_SELECTION = 0x00000008;
@@ -307,6 +311,18 @@ public class AccessibilityNodeInfo implements Parcelable {
*/
public static final int ACTION_SET_TEXT = 0x00200000;
+ private static final int LAST_LEGACY_STANDARD_ACTION = ACTION_SET_TEXT;
+
+ /**
+ * Mask to see if the value is larger than the largest ACTION_ constant
+ */
+ private static final int ACTION_TYPE_MASK = 0xFF000000;
+
+ /**
+ * Mask to define standard not legacy actions.
+ */
+ private static final int STANDARD_NON_LEGACY_ACTION_MASK = 0x01000000;
+
// Action arguments
/**
@@ -548,7 +564,7 @@ public class AccessibilityNodeInfo implements Parcelable {
private String mViewIdResourceName;
private LongArray mChildNodeIds;
- private int mActions;
+ private ArrayList<AccessibilityAction> mActions;
private int mMovementGranularities;
@@ -875,6 +891,17 @@ public class AccessibilityNodeInfo implements Parcelable {
/**
* Gets the actions that can be performed on the node.
+ */
+ public List<AccessibilityAction> getActionList() {
+ if (mActions == null) {
+ return Collections.emptyList();
+ }
+
+ return mActions;
+ }
+
+ /**
+ * Gets the actions that can be performed on the node.
*
* @return The bit mask of with actions.
*
@@ -892,9 +919,61 @@ public class AccessibilityNodeInfo implements Parcelable {
* @see AccessibilityNodeInfo#ACTION_PREVIOUS_HTML_ELEMENT
* @see AccessibilityNodeInfo#ACTION_SCROLL_FORWARD
* @see AccessibilityNodeInfo#ACTION_SCROLL_BACKWARD
+ *
+ * @deprecated Use {@link #getActionList()}.
*/
+ @Deprecated
public int getActions() {
- return mActions;
+ int returnValue = 0;
+
+ if (mActions == null) {
+ return returnValue;
+ }
+
+ final int actionSize = mActions.size();
+ for (int i = 0; i < actionSize; i++) {
+ int actionId = mActions.get(i).getId();
+ if (actionId <= LAST_LEGACY_STANDARD_ACTION) {
+ returnValue |= actionId;
+ }
+ }
+
+ return returnValue;
+ }
+
+ /**
+ * Adds an action that can be performed on the node.
+ * <p>
+ * To add a standard action use the static constants on {@link AccessibilityAction}.
+ * To add a custom action create a new {@link AccessibilityAction} by passing in a
+ * resource id from your application as the action id and an optional label that
+ * describes the action. To override one of the standard actions use as the action
+ * id of a standard action id such as {@link #ACTION_CLICK} and an optional label that
+ * describes the action.
+ * </p>
+ * <p>
+ * <strong>Note:</strong> Cannot be called from an
+ * {@link android.accessibilityservice.AccessibilityService}.
+ * This class is made immutable before being delivered to an AccessibilityService.
+ * </p>
+ *
+ * @param action The action.
+ *
+ * @throws IllegalStateException If called from an AccessibilityService.
+ */
+ public void addAction(AccessibilityAction action) {
+ enforceNotSealed();
+
+ if (action == null) {
+ return;
+ }
+
+ if (mActions == null) {
+ mActions = new ArrayList<AccessibilityAction>();
+ }
+
+ mActions.remove(action);
+ mActions.add(action);
}
/**
@@ -908,10 +987,21 @@ public class AccessibilityNodeInfo implements Parcelable {
* @param action The action.
*
* @throws IllegalStateException If called from an AccessibilityService.
+ * @throws IllegalArgumentException If the argument is not one of the standard actions.
+ *
+ * @deprecated This has been deprecated for {@link #addAction(AccessibilityAction)}
*/
+ @Deprecated
public void addAction(int action) {
enforceNotSealed();
- mActions |= action;
+
+ AccessibilityAction newAction = getActionSingleton(action);
+ if (newAction == null) {
+ // This means it is not one of the standard actions
+ throw new IllegalArgumentException("Argument is not one of the standard actions");
+ }
+
+ addAction(newAction);
}
/**
@@ -923,13 +1013,40 @@ public class AccessibilityNodeInfo implements Parcelable {
* This class is made immutable before being delivered to an AccessibilityService.
* </p>
*
- * @param action The action.
+ * @param action The action to be removed.
*
* @throws IllegalStateException If called from an AccessibilityService.
+ * @deprecated Use {@link #removeAction(AccessibilityAction)}
*/
+ @Deprecated
public void removeAction(int action) {
enforceNotSealed();
- mActions &= ~action;
+
+ removeAction(getActionSingleton(action));
+ }
+
+ /**
+ * Removes an action that can be performed on the node. If the action was
+ * not already added to the node, calling this method has no effect.
+ * <p>
+ * <strong>Note:</strong> Cannot be called from an
+ * {@link android.accessibilityservice.AccessibilityService}.
+ * This class is made immutable before being delivered to an AccessibilityService.
+ * </p>
+ *
+ * @param action The action to be removed.
+ * @return The action removed from the list of actions.
+ *
+ * @throws IllegalStateException If called from an AccessibilityService.
+ */
+ public boolean removeAction(AccessibilityAction action) {
+ enforceNotSealed();
+
+ if (mActions == null || action == null) {
+ return false;
+ }
+
+ return mActions.remove(action);
}
/**
@@ -2307,7 +2424,29 @@ public class AccessibilityNodeInfo implements Parcelable {
parcel.writeInt(mBoundsInScreen.left);
parcel.writeInt(mBoundsInScreen.right);
- parcel.writeInt(mActions);
+ if (mActions != null && !mActions.isEmpty()) {
+ final int actionCount = mActions.size();
+ parcel.writeInt(actionCount);
+
+ int defaultLegacyStandardActions = 0;
+ for (int i = 0; i < actionCount; i++) {
+ AccessibilityAction action = mActions.get(i);
+ if (isDefaultLegacyStandardAction(action)) {
+ defaultLegacyStandardActions |= action.getId();
+ }
+ }
+ parcel.writeInt(defaultLegacyStandardActions);
+
+ for (int i = 0; i < actionCount; i++) {
+ AccessibilityAction action = mActions.get(i);
+ if (!isDefaultLegacyStandardAction(action)) {
+ parcel.writeInt(action.getId());
+ parcel.writeCharSequence(action.getLabel());
+ }
+ }
+ } else {
+ parcel.writeInt(0);
+ }
parcel.writeInt(mMovementGranularities);
@@ -2388,7 +2527,17 @@ public class AccessibilityNodeInfo implements Parcelable {
mText = other.mText;
mContentDescription = other.mContentDescription;
mViewIdResourceName = other.mViewIdResourceName;
- mActions= other.mActions;
+
+ final ArrayList<AccessibilityAction> otherActions = other.mActions;
+ if (otherActions != null && otherActions.size() > 0) {
+ if (mActions == null) {
+ mActions = new ArrayList(otherActions);
+ } else {
+ mActions.clear();
+ mActions.addAll(other.mActions);
+ }
+ }
+
mBooleanProperties = other.mBooleanProperties;
mMovementGranularities = other.mMovementGranularities;
@@ -2452,7 +2601,17 @@ public class AccessibilityNodeInfo implements Parcelable {
mBoundsInScreen.left = parcel.readInt();
mBoundsInScreen.right = parcel.readInt();
- mActions = parcel.readInt();
+ final int actionCount = parcel.readInt();
+ if (actionCount > 0) {
+ final int legacyStandardActions = parcel.readInt();
+ addLegacyStandardActions(legacyStandardActions);
+ final int nonLegacyActionCount = actionCount - Integer.bitCount(legacyStandardActions);
+ for (int i = 0; i < nonLegacyActionCount; i++) {
+ AccessibilityAction action = new AccessibilityAction(
+ parcel.readInt(), parcel.readCharSequence());
+ addAction(action);
+ }
+ }
mMovementGranularities = parcel.readInt();
@@ -2524,7 +2683,9 @@ public class AccessibilityNodeInfo implements Parcelable {
mText = null;
mContentDescription = null;
mViewIdResourceName = null;
- mActions = 0;
+ if (mActions != null) {
+ mActions.clear();
+ }
mTextSelectionStart = UNDEFINED_SELECTION_INDEX;
mTextSelectionEnd = UNDEFINED_SELECTION_INDEX;
mInputType = InputType.TYPE_NULL;
@@ -2546,6 +2707,33 @@ public class AccessibilityNodeInfo implements Parcelable {
}
}
+ private static boolean isDefaultLegacyStandardAction(AccessibilityAction action) {
+ return (action.getId() <= LAST_LEGACY_STANDARD_ACTION
+ && TextUtils.isEmpty(action.getLabel()));
+ }
+
+ private static AccessibilityAction getActionSingleton(int actionId) {
+ final int actions = AccessibilityAction.sStandardActions.size();
+ for (int i = 0; i < actions; i++) {
+ AccessibilityAction currentAction = AccessibilityAction.sStandardActions.valueAt(i);
+ if (actionId == currentAction.getId()) {
+ return currentAction;
+ }
+ }
+
+ return null;
+ }
+
+ private void addLegacyStandardActions(int actionMask) {
+ int remainingIds = actionMask;
+ while (remainingIds > 0) {
+ final int id = 1 << Integer.numberOfTrailingZeros(remainingIds);
+ remainingIds &= ~id;
+ AccessibilityAction action = getActionSingleton(id);
+ addAction(action);
+ }
+ }
+
/**
* Gets the human readable action symbolic name.
*
@@ -2709,20 +2897,429 @@ public class AccessibilityNodeInfo implements Parcelable {
builder.append("; longClickable: ").append(isLongClickable());
builder.append("; enabled: ").append(isEnabled());
builder.append("; password: ").append(isPassword());
- builder.append("; scrollable: " + isScrollable());
-
- builder.append("; [");
- for (int actionBits = mActions; actionBits != 0;) {
- final int action = 1 << Integer.numberOfTrailingZeros(actionBits);
- actionBits &= ~action;
- builder.append(getActionSymbolicName(action));
- if (actionBits != 0) {
- builder.append(", ");
+ builder.append("; scrollable: ").append(isScrollable());
+ builder.append("; actions: ").append(mActions);
+
+ return builder.toString();
+ }
+
+ /**
+ * A class defining an action that can be performed on an {@link AccessibilityNodeInfo}.
+ * Each action has a unique id that is mandatory and optional data.
+ * <p>
+ * There are three categories of actions:
+ * <ul>
+ * <li><strong>Standard actions</strong> - These are actions that are reported and
+ * handled by the standard UI widgets in the platform. For each standard action
+ * there is a static constant defined in this class, e.g. {@link #ACTION_FOCUS}.
+ * </li>
+ * <li><strong>Custom actions action</strong> - These are actions that are reported
+ * and handled by custom widgets. i.e. ones that are not part of the UI toolkit. For
+ * example, an application may define a custom action for clearing the user history.
+ * </li>
+ * <li><strong>Overriden standard actions</strong> - These are actions that override
+ * standard actions to customize them. For example, an app may add a label to the
+ * standard click action to announce that this action clears browsing history.
+ * </ul>
+ * </p>
+ */
+ public static final class AccessibilityAction {
+
+ /**
+ * Action that gives input focus to the node.
+ */
+ public static final AccessibilityAction ACTION_FOCUS =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_FOCUS, null);
+
+ /**
+ * Action that clears input focus of the node.
+ */
+ public static final AccessibilityAction ACTION_CLEAR_FOCUS =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_CLEAR_FOCUS, null);
+
+ /**
+ * Action that selects the node.
+ */
+ public static final AccessibilityAction ACTION_SELECT =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_SELECT, null);
+
+ /**
+ * Action that deselects the node.
+ */
+ public static final AccessibilityAction ACTION_CLEAR_SELECTION =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_CLEAR_SELECTION, null);
+
+ /**
+ * Action that clicks on the node info.
+ */
+ public static final AccessibilityAction ACTION_CLICK =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_CLICK, null);
+
+ /**
+ * Action that long clicks on the node.
+ */
+ public static final AccessibilityAction ACTION_LONG_CLICK =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_LONG_CLICK, null);
+
+ /**
+ * Action that gives accessibility focus to the node.
+ */
+ public static final AccessibilityAction ACTION_ACCESSIBILITY_FOCUS =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_ACCESSIBILITY_FOCUS, null);
+
+ /**
+ * Action that clears accessibility focus of the node.
+ */
+ public static final AccessibilityAction ACTION_CLEAR_ACCESSIBILITY_FOCUS =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_CLEAR_ACCESSIBILITY_FOCUS, null);
+
+ /**
+ * Action that requests to go to the next entity in this node's text
+ * at a given movement granularity. For example, move to the next character,
+ * word, etc.
+ * <p>
+ * <strong>Arguments:</strong>
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT},
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN}<br>
+ * <strong>Example:</strong> Move to the previous character and do not extend selection.
+ * <code><pre><p>
+ * Bundle arguments = new Bundle();
+ * arguments.putInt(AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT,
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER);
+ * arguments.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN,
+ * false);
+ * info.performAction(AccessibilityAction.ACTION_NEXT_AT_MOVEMENT_GRANULARITY.getId(),
+ * arguments);
+ * </code></pre></p>
+ * </p>
+ *
+ * @see AccessibilityNodeInfo#ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT
+ * @see AccessibilityNodeInfo#ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN
+ *
+ * @see AccessibilityNodeInfo#setMovementGranularities(int)
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN
+ * @see AccessibilityNodeInfo#getMovementGranularities()
+ * AccessibilityNodeInfo.getMovementGranularities()
+ *
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_CHARACTER
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_WORD
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_WORD
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_LINE
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_LINE
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_PARAGRAPH
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_PARAGRAPH
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_PAGE
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_PAGE
+ */
+ public static final AccessibilityAction ACTION_NEXT_AT_MOVEMENT_GRANULARITY =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_NEXT_AT_MOVEMENT_GRANULARITY, null);
+
+ /**
+ * Action that requests to go to the previous entity in this node's text
+ * at a given movement granularity. For example, move to the next character,
+ * word, etc.
+ * <p>
+ * <strong>Arguments:</strong>
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT},
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN}<br>
+ * <strong>Example:</strong> Move to the next character and do not extend selection.
+ * <code><pre><p>
+ * Bundle arguments = new Bundle();
+ * arguments.putInt(AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT,
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER);
+ * arguments.putBoolean(AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN,
+ * false);
+ * info.performAction(AccessibilityAction.ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY.getId(),
+ * arguments);
+ * </code></pre></p>
+ * </p>
+ *
+ * @see AccessibilityNodeInfo#ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_MOVEMENT_GRANULARITY_INT
+ * @see AccessibilityNodeInfo#ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_EXTEND_SELECTION_BOOLEAN
+ *
+ * @see AccessibilityNodeInfo#setMovementGranularities(int)
+ * AccessibilityNodeInfo.setMovementGranularities(int)
+ * @see AccessibilityNodeInfo#getMovementGranularities()
+ * AccessibilityNodeInfo.getMovementGranularities()
+ *
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_CHARACTER
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_CHARACTER
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_WORD
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_WORD
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_LINE
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_LINE
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_PARAGRAPH
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_PARAGRAPH
+ * @see AccessibilityNodeInfo#MOVEMENT_GRANULARITY_PAGE
+ * AccessibilityNodeInfo.MOVEMENT_GRANULARITY_PAGE
+ */
+ public static final AccessibilityAction ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY, null);
+
+ /**
+ * Action to move to the next HTML element of a given type. For example, move
+ * to the BUTTON, INPUT, TABLE, etc.
+ * <p>
+ * <strong>Arguments:</strong>
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_HTML_ELEMENT_STRING
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_HTML_ELEMENT_STRING}<br>
+ * <strong>Example:</strong>
+ * <code><pre><p>
+ * Bundle arguments = new Bundle();
+ * arguments.putString(AccessibilityNodeInfo.ACTION_ARGUMENT_HTML_ELEMENT_STRING, "BUTTON");
+ * info.performAction(AccessibilityAction.ACTION_NEXT_HTML_ELEMENT.getId(), arguments);
+ * </code></pre></p>
+ * </p>
+ */
+ public static final AccessibilityAction ACTION_NEXT_HTML_ELEMENT =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_NEXT_HTML_ELEMENT, null);
+
+ /**
+ * Action to move to the previous HTML element of a given type. For example, move
+ * to the BUTTON, INPUT, TABLE, etc.
+ * <p>
+ * <strong>Arguments:</strong>
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_HTML_ELEMENT_STRING
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_HTML_ELEMENT_STRING}<br>
+ * <strong>Example:</strong>
+ * <code><pre><p>
+ * Bundle arguments = new Bundle();
+ * arguments.putString(AccessibilityNodeInfo.ACTION_ARGUMENT_HTML_ELEMENT_STRING, "BUTTON");
+ * info.performAction(AccessibilityAction.ACTION_PREVIOUS_HTML_ELEMENT.getId(), arguments);
+ * </code></pre></p>
+ * </p>
+ */
+ public static final AccessibilityAction ACTION_PREVIOUS_HTML_ELEMENT =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_PREVIOUS_HTML_ELEMENT, null);
+
+ /**
+ * Action to scroll the node content forward.
+ */
+ public static final AccessibilityAction ACTION_SCROLL_FORWARD =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_SCROLL_FORWARD, null);
+
+ /**
+ * Action to scroll the node content backward.
+ */
+ public static final AccessibilityAction ACTION_SCROLL_BACKWARD =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_SCROLL_BACKWARD, null);
+
+ /**
+ * Action to copy the current selection to the clipboard.
+ */
+ public static final AccessibilityAction ACTION_COPY =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_COPY, null);
+
+ /**
+ * Action to paste the current clipboard content.
+ */
+ public static final AccessibilityAction ACTION_PASTE =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_PASTE, null);
+
+ /**
+ * Action to cut the current selection and place it to the clipboard.
+ */
+ public static final AccessibilityAction ACTION_CUT =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_CUT, null);
+
+ /**
+ * Action to set the selection. Performing this action with no arguments
+ * clears the selection.
+ * <p>
+ * <strong>Arguments:</strong>
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_SELECTION_START_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_START_INT},
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_SELECTION_END_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_END_INT}<br>
+ * <strong>Example:</strong>
+ * <code><pre><p>
+ * Bundle arguments = new Bundle();
+ * arguments.putInt(AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_START_INT, 1);
+ * arguments.putInt(AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_END_INT, 2);
+ * info.performAction(AccessibilityAction.ACTION_SET_SELECTION.getId(), arguments);
+ * </code></pre></p>
+ * </p>
+ *
+ * @see AccessibilityNodeInfo#ACTION_ARGUMENT_SELECTION_START_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_START_INT
+ * @see AccessibilityNodeInfo#ACTION_ARGUMENT_SELECTION_END_INT
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_SELECTION_END_INT
+ */
+ public static final AccessibilityAction ACTION_SET_SELECTION =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_SET_SELECTION, null);
+
+ /**
+ * Action to expand an expandable node.
+ */
+ public static final AccessibilityAction ACTION_EXPAND =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_EXPAND, null);
+
+ /**
+ * Action to collapse an expandable node.
+ */
+ public static final AccessibilityAction ACTION_COLLAPSE =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_COLLAPSE, null);
+
+ /**
+ * Action to dismiss a dismissable node.
+ */
+ public static final AccessibilityAction ACTION_DISMISS =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_DISMISS, null);
+
+ /**
+ * Action that sets the text of the node. Performing the action without argument,
+ * using <code> null</code> or empty {@link CharSequence} will clear the text. This
+ * action will also put the cursor at the end of text.
+ * <p>
+ * <strong>Arguments:</strong>
+ * {@link AccessibilityNodeInfo#ACTION_ARGUMENT_SET_TEXT_CHARSEQUENCE
+ * AccessibilityNodeInfo.ACTION_ARGUMENT_SET_TEXT_CHARSEQUENCE}<br>
+ * <strong>Example:</strong>
+ * <code><pre><p>
+ * Bundle arguments = new Bundle();
+ * arguments.putCharSequence(AccessibilityNodeInfo.ACTION_ARGUMENT_SET_TEXT_CHARSEQUENCE,
+ * "android");
+ * info.performAction(AccessibilityAction.ACTION_SET_TEXT.getId(), arguments);
+ * </code></pre></p>
+ */
+ public static final AccessibilityAction ACTION_SET_TEXT =
+ new AccessibilityAction(
+ AccessibilityNodeInfo.ACTION_SET_TEXT, null);
+
+ private static final ArraySet<AccessibilityAction> sStandardActions = new ArraySet<AccessibilityAction>();
+ static {
+ sStandardActions.add(ACTION_FOCUS);
+ sStandardActions.add(ACTION_CLEAR_FOCUS);
+ sStandardActions.add(ACTION_SELECT);
+ sStandardActions.add(ACTION_CLEAR_SELECTION);
+ sStandardActions.add(ACTION_CLICK);
+ sStandardActions.add(ACTION_LONG_CLICK);
+ sStandardActions.add(ACTION_ACCESSIBILITY_FOCUS);
+ sStandardActions.add(ACTION_CLEAR_ACCESSIBILITY_FOCUS);
+ sStandardActions.add(ACTION_NEXT_AT_MOVEMENT_GRANULARITY);
+ sStandardActions.add(ACTION_PREVIOUS_AT_MOVEMENT_GRANULARITY);
+ sStandardActions.add(ACTION_NEXT_HTML_ELEMENT);
+ sStandardActions.add(ACTION_PREVIOUS_HTML_ELEMENT);
+ sStandardActions.add(ACTION_SCROLL_FORWARD);
+ sStandardActions.add(ACTION_SCROLL_BACKWARD);
+ sStandardActions.add(ACTION_COPY);
+ sStandardActions.add(ACTION_PASTE);
+ sStandardActions.add(ACTION_CUT);
+ sStandardActions.add(ACTION_SET_SELECTION);
+ sStandardActions.add(ACTION_EXPAND);
+ sStandardActions.add(ACTION_COLLAPSE);
+ sStandardActions.add(ACTION_DISMISS);
+ sStandardActions.add(ACTION_SET_TEXT);
+ }
+
+ private final int mActionId;
+ private final CharSequence mLabel;
+
+ /**
+ * Creates a new AccessibilityAction. For adding a standard action without a specific label,
+ * use the static constants.
+ *
+ * You can also override the description for one the standard actions. Below is an example
+ * how to override the standard click action by adding a custom label:
+ * <pre>
+ * AccessibilityAction action = new AccessibilityAction(
+ * AccessibilityAction.ACTION_ACTION_CLICK, getLocalizedLabel());
+ * node.addAction(action);
+ * </pre>
+ *
+ * @param actionId The id for this action. This should either be one of the
+ * standard actions or a specific action for your app. In that case it is
+ * required to use a resource identifier.
+ * @param label The label for the new AccessibilityAction.
+ */
+ public AccessibilityAction(int actionId, @Nullable CharSequence label) {
+ if ((actionId & ACTION_TYPE_MASK) == 0 && Integer.bitCount(actionId) > 1) {
+ throw new IllegalArgumentException("Invalid standard action id");
+ }
+
+ if ((actionId & STANDARD_NON_LEGACY_ACTION_MASK) != 0) {
+ throw new IllegalArgumentException("action id not a resource id");
}
+
+ mActionId = actionId;
+ mLabel = label;
}
- builder.append("]");
- return builder.toString();
+ /**
+ * Gets the id for this action.
+ *
+ * @return The action id.
+ */
+ public int getId() {
+ return mActionId;
+ }
+
+ /**
+ * Gets the label for this action. Its purpose is to describe the
+ * action to user.
+ *
+ * @return The label.
+ */
+ public CharSequence getLabel() {
+ return mLabel;
+ }
+
+ @Override
+ public int hashCode() {
+ return mActionId;
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (other == null) {
+ return false;
+ }
+
+ if (other == this) {
+ return true;
+ }
+
+ if (getClass() != other.getClass()) {
+ return false;
+ }
+
+ return mActionId == ((AccessibilityAction)other).mActionId;
+ }
+
+ @Override
+ public String toString() {
+ return "AccessibilityAction: " + getActionSymbolicName(mActionId) + " - " + mLabel;
+ }
}
/**
diff --git a/core/jni/Android.mk b/core/jni/Android.mk
index 726b2f7..0ad2ab2 100644
--- a/core/jni/Android.mk
+++ b/core/jni/Android.mk
@@ -5,6 +5,7 @@ LOCAL_CFLAGS += -DHAVE_CONFIG_H -DKHTML_NO_EXCEPTIONS -DGKWQ_NO_JAVA
LOCAL_CFLAGS += -DNO_SUPPORT_JS_BINDING -DQT_NO_WHEELEVENT -DKHTML_NO_XBL
LOCAL_CFLAGS += -U__APPLE__
LOCAL_CFLAGS += -Wno-unused-parameter -Wno-int-to-pointer-cast
+LOCAL_CFLAGS += -Wno-non-virtual-dtor
LOCAL_CFLAGS += -Wno-maybe-uninitialized -Wno-parentheses
LOCAL_CPPFLAGS += -Wno-conversion-null
@@ -125,6 +126,7 @@ LOCAL_SRC_FILES:= \
android/graphics/Xfermode.cpp \
android/graphics/YuvToJpegEncoder.cpp \
android/graphics/pdf/PdfDocument.cpp \
+ android/graphics/pdf/PdfRenderer.cpp \
android_media_AudioRecord.cpp \
android_media_AudioSystem.cpp \
android_media_AudioTrack.cpp \
@@ -169,6 +171,9 @@ LOCAL_C_INCLUDES += \
$(call include-path-for, libhardware_legacy)/hardware_legacy \
$(TOP)/frameworks/av/include \
$(TOP)/system/media/camera/include \
+ external/pdfrenderer/core/include/fpdfapi \
+ external/pdfrenderer/core/include/fpdfdoc \
+ external/pdfrenderer/fpdfsdk/include \
external/skia/src/core \
external/skia/src/effects \
external/skia/src/images \
@@ -222,6 +227,7 @@ LOCAL_SHARED_LIBRARIES := \
libharfbuzz_ng \
libz \
libaudioutils \
+ libpdfrenderer \
ifeq ($(USE_OPENGL_RENDERER),true)
LOCAL_SHARED_LIBRARIES += libhwui
diff --git a/core/jni/AndroidRuntime.cpp b/core/jni/AndroidRuntime.cpp
index f41af04..a4dc824 100644
--- a/core/jni/AndroidRuntime.cpp
+++ b/core/jni/AndroidRuntime.cpp
@@ -120,6 +120,7 @@ extern int register_android_graphics_Region(JNIEnv* env);
extern int register_android_graphics_SurfaceTexture(JNIEnv* env);
extern int register_android_graphics_Xfermode(JNIEnv* env);
extern int register_android_graphics_pdf_PdfDocument(JNIEnv* env);
+extern int register_android_graphics_pdf_PdfRenderer(JNIEnv* env);
extern int register_android_view_DisplayEventReceiver(JNIEnv* env);
extern int register_android_view_RenderNode(JNIEnv* env);
extern int register_android_view_RenderNodeAnimator(JNIEnv* env);
@@ -1258,6 +1259,7 @@ static const RegJNIRec gRegJNI[] = {
REG_JNI(register_android_graphics_Xfermode),
REG_JNI(register_android_graphics_YuvImage),
REG_JNI(register_android_graphics_pdf_PdfDocument),
+ REG_JNI(register_android_graphics_pdf_PdfRenderer),
REG_JNI(register_android_database_CursorWindow),
REG_JNI(register_android_database_SQLiteConnection),
diff --git a/core/jni/android/graphics/Matrix.cpp b/core/jni/android/graphics/Matrix.cpp
index 23af860..cbd20e9 100644
--- a/core/jni/android/graphics/Matrix.cpp
+++ b/core/jni/android/graphics/Matrix.cpp
@@ -50,10 +50,17 @@ public:
SkMatrix* obj = reinterpret_cast<SkMatrix*>(objHandle);
return obj->isIdentity() ? JNI_TRUE : JNI_FALSE;
}
+
+ static jboolean isAffine(JNIEnv* env, jobject clazz, jlong objHandle) {
+ SkMatrix* obj = reinterpret_cast<SkMatrix*>(objHandle);
+ return obj->asAffine(NULL) ? JNI_TRUE : JNI_FALSE;
+ }
+
static jboolean rectStaysRect(JNIEnv* env, jobject clazz, jlong objHandle) {
SkMatrix* obj = reinterpret_cast<SkMatrix*>(objHandle);
return obj->rectStaysRect() ? JNI_TRUE : JNI_FALSE;
}
+
static void reset(JNIEnv* env, jobject clazz, jlong objHandle) {
SkMatrix* obj = reinterpret_cast<SkMatrix*>(objHandle);
obj->reset();
@@ -302,6 +309,7 @@ static JNINativeMethod methods[] = {
{"finalizer", "(J)V", (void*) SkMatrixGlue::finalizer},
{"native_create","(J)J", (void*) SkMatrixGlue::create},
{"native_isIdentity","(J)Z", (void*) SkMatrixGlue::isIdentity},
+ {"native_isAffine","(J)Z", (void*) SkMatrixGlue::isAffine},
{"native_rectStaysRect","(J)Z", (void*) SkMatrixGlue::rectStaysRect},
{"native_reset","(J)V", (void*) SkMatrixGlue::reset},
{"native_set","(JJ)V", (void*) SkMatrixGlue::set},
diff --git a/core/jni/android/graphics/pdf/PdfRenderer.cpp b/core/jni/android/graphics/pdf/PdfRenderer.cpp
new file mode 100644
index 0000000..15de24a
--- /dev/null
+++ b/core/jni/android/graphics/pdf/PdfRenderer.cpp
@@ -0,0 +1,279 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "jni.h"
+#include "JNIHelp.h"
+#include "GraphicsJNI.h"
+#include "SkBitmap.h"
+#include "SkMatrix.h"
+#include "fpdfview.h"
+#include "fsdk_rendercontext.h"
+
+#include <android_runtime/AndroidRuntime.h>
+#include <vector>
+#include <utils/Log.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+namespace android {
+
+static const int RENDER_MODE_FOR_DISPLAY = 1;
+static const int RENDER_MODE_FOR_PRINT = 2;
+
+static struct {
+ jfieldID x;
+ jfieldID y;
+} gPointClassInfo;
+
+static Mutex sLock;
+
+static int sUnmatchedInitRequestCount = 0;
+
+static void initializeLibraryIfNeeded() {
+ Mutex::Autolock _l(sLock);
+ if (sUnmatchedInitRequestCount == 0) {
+ FPDF_InitLibrary(NULL);
+ }
+ sUnmatchedInitRequestCount++;
+}
+
+static void destroyLibraryIfNeeded() {
+ Mutex::Autolock _l(sLock);
+ sUnmatchedInitRequestCount--;
+ if (sUnmatchedInitRequestCount == 0) {
+ FPDF_DestroyLibrary();
+ }
+}
+
+static int getBlock(void* param, unsigned long position, unsigned char* outBuffer,
+ unsigned long size) {
+ const int fd = reinterpret_cast<intptr_t>(param);
+ const int readCount = pread(fd, outBuffer, size, position);
+ if (readCount < 0) {
+ ALOGE("Cannot read from file descriptor. Error:%d", errno);
+ return 0;
+ }
+ return 1;
+}
+
+static jlong nativeCreate(JNIEnv* env, jclass thiz, jint fd, jlong size) {
+ initializeLibraryIfNeeded();
+
+ FPDF_FILEACCESS loader;
+ loader.m_FileLen = size;
+ loader.m_Param = reinterpret_cast<void*>(intptr_t(fd));
+ loader.m_GetBlock = &getBlock;
+
+ FPDF_DOCUMENT document = FPDF_LoadCustomDocument(&loader, NULL);
+
+ if (!document) {
+ const long error = FPDF_GetLastError();
+ jniThrowException(env, "java/io/IOException",
+ "cannot create document. Error:" + error);
+ destroyLibraryIfNeeded();
+ return -1;
+ }
+
+ return reinterpret_cast<jlong>(document);
+}
+
+static jlong nativeOpenPageAndGetSize(JNIEnv* env, jclass thiz, jlong documentPtr,
+ jint pageIndex, jobject outSize) {
+ FPDF_DOCUMENT document = reinterpret_cast<FPDF_DOCUMENT>(documentPtr);
+
+ FPDF_PAGE page = FPDF_LoadPage(document, pageIndex);
+
+ if (!page) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "cannot load page");
+ return -1;
+ }
+
+ double width = 0;
+ double height = 0;
+
+ const int result = FPDF_GetPageSizeByIndex(document, pageIndex, &width, &height);
+
+ if (!result) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "cannot get page size");
+ return -1;
+ }
+
+ env->SetIntField(outSize, gPointClassInfo.x, width);
+ env->SetIntField(outSize, gPointClassInfo.y, height);
+
+ return reinterpret_cast<jlong>(page);
+}
+
+static void nativeClosePage(JNIEnv* env, jclass thiz, jlong pagePtr) {
+ FPDF_PAGE page = reinterpret_cast<FPDF_PAGE>(pagePtr);
+ FPDF_ClosePage(page);
+}
+
+static void nativeClose(JNIEnv* env, jclass thiz, jlong documentPtr) {
+ FPDF_DOCUMENT document = reinterpret_cast<FPDF_DOCUMENT>(documentPtr);
+ FPDF_CloseDocument(document);
+ destroyLibraryIfNeeded();
+}
+
+static jint nativeGetPageCount(JNIEnv* env, jclass thiz, jlong documentPtr) {
+ FPDF_DOCUMENT document = reinterpret_cast<FPDF_DOCUMENT>(documentPtr);
+ return FPDF_GetPageCount(document);
+}
+
+static jboolean nativeScaleForPrinting(JNIEnv* env, jclass thiz, jlong documentPtr) {
+ FPDF_DOCUMENT document = reinterpret_cast<FPDF_DOCUMENT>(documentPtr);
+ return FPDF_VIEWERREF_GetPrintScaling(document);
+}
+
+static void DropContext(void* data) {
+ delete (CRenderContext*) data;
+}
+
+static void renderPageBitmap(FPDF_BITMAP bitmap, FPDF_PAGE page, int destLeft, int destTop,
+ int destRight, int destBottom, SkMatrix* transform, int flags) {
+ // Note: this code ignores the currently unused RENDER_NO_NATIVETEXT,
+ // FPDF_RENDER_LIMITEDIMAGECACHE, FPDF_RENDER_FORCEHALFTONE, FPDF_GRAYSCALE,
+ // and FPDF_ANNOT flags. To add support for that refer to FPDF_RenderPage_Retail
+ // in fpdfview.cpp
+
+ CRenderContext* pContext = FX_NEW CRenderContext;
+
+ CPDF_Page* pPage = (CPDF_Page*) page;
+ pPage->SetPrivateData((void*) 1, pContext, DropContext);
+
+ CFX_FxgeDevice* fxgeDevice = FX_NEW CFX_FxgeDevice;
+ pContext->m_pDevice = fxgeDevice;
+
+ // Reverse the bytes (last argument TRUE) since the Android
+ // format is ARGB while the renderer uses BGRA internally.
+ fxgeDevice->Attach((CFX_DIBitmap*) bitmap, 0, TRUE);
+
+ CPDF_RenderOptions* renderOptions = pContext->m_pOptions;
+
+ if (!renderOptions) {
+ renderOptions = FX_NEW CPDF_RenderOptions;
+ pContext->m_pOptions = renderOptions;
+ }
+
+ if (flags & FPDF_LCD_TEXT) {
+ renderOptions->m_Flags |= RENDER_CLEARTYPE;
+ } else {
+ renderOptions->m_Flags &= ~RENDER_CLEARTYPE;
+ }
+
+ const CPDF_OCContext::UsageType usage = (flags & FPDF_PRINTING)
+ ? CPDF_OCContext::Print : CPDF_OCContext::View;
+
+ renderOptions->m_AddFlags = flags >> 8;
+ renderOptions->m_pOCContext = new CPDF_OCContext(pPage->m_pDocument, usage);
+
+ fxgeDevice->SaveState();
+
+ FX_RECT clip;
+ clip.left = destLeft;
+ clip.right = destRight;
+ clip.top = destTop;
+ clip.bottom = destBottom;
+ fxgeDevice->SetClip_Rect(&clip);
+
+ CPDF_RenderContext* pageContext = FX_NEW CPDF_RenderContext;
+ pContext->m_pContext = pageContext;
+ pageContext->Create(pPage);
+
+ CFX_AffineMatrix matrix;
+ if (!transform) {
+ pPage->GetDisplayMatrix(matrix, destLeft, destTop, destRight - destLeft,
+ destBottom - destTop, 0);
+ } else {
+ // PDF's coordinate system origin is left-bottom while
+ // in graphics it is the top-left, so remap the origin.
+ matrix.Set(1, 0, 0, -1, 0, pPage->GetPageHeight());
+ matrix.Scale(transform->getScaleX(), transform->getScaleY());
+ matrix.Rotate(transform->getSkewX(), transform->getSkewY());
+ matrix.Translate(transform->getTranslateX(), transform->getTranslateY());
+ }
+ pageContext->AppendObjectList(pPage, &matrix);
+
+ pContext->m_pRenderer = FX_NEW CPDF_ProgressiveRenderer;
+ pContext->m_pRenderer->Start(pageContext, fxgeDevice, renderOptions, NULL);
+
+ fxgeDevice->RestoreState();
+
+ pPage->RemovePrivateData((void*) 1);
+
+ delete pContext;
+}
+
+static void nativeRenderPage(JNIEnv* env, jclass thiz, jlong documentPtr, jlong pagePtr,
+ jlong bitmapPtr, jint destLeft, jint destTop, jint destRight, jint destBottom,
+ jlong matrixPtr, jint renderMode) {
+
+ FPDF_DOCUMENT document = reinterpret_cast<FPDF_DOCUMENT>(documentPtr);
+ FPDF_PAGE page = reinterpret_cast<FPDF_PAGE>(pagePtr);
+ SkBitmap* skBitmap = reinterpret_cast<SkBitmap*>(bitmapPtr);
+ SkMatrix* skMatrix = reinterpret_cast<SkMatrix*>(matrixPtr);
+
+ skBitmap->lockPixels();
+
+ const int stride = skBitmap->width() * 4;
+
+ FPDF_BITMAP bitmap = FPDFBitmap_CreateEx(skBitmap->width(), skBitmap->height(),
+ FPDFBitmap_BGRA, skBitmap->getPixels(), stride);
+
+ if (!bitmap) {
+ ALOGE("Erorr creating bitmap");
+ return;
+ }
+
+ int renderFlags = 0;
+ if (renderMode == RENDER_MODE_FOR_DISPLAY) {
+ renderFlags |= FPDF_LCD_TEXT;
+ } else if (renderMode == RENDER_MODE_FOR_PRINT) {
+ renderFlags |= FPDF_PRINTING;
+ }
+
+ renderPageBitmap(bitmap, page, destLeft, destTop, destRight,
+ destBottom, skMatrix, renderFlags);
+
+ skBitmap->unlockPixels();
+}
+
+static JNINativeMethod gPdfRenderer_Methods[] = {
+ {"nativeCreate", "(IJ)J", (void*) nativeCreate},
+ {"nativeClose", "(J)V", (void*) nativeClose},
+ {"nativeGetPageCount", "(J)I", (void*) nativeGetPageCount},
+ {"nativeScaleForPrinting", "(J)Z", (void*) nativeScaleForPrinting},
+ {"nativeRenderPage", "(JJJIIIIJI)V", (void*) nativeRenderPage},
+ {"nativeOpenPageAndGetSize", "(JILandroid/graphics/Point;)J", (void*) nativeOpenPageAndGetSize},
+ {"nativeClosePage", "(J)V", (void*) nativeClosePage}
+};
+
+int register_android_graphics_pdf_PdfRenderer(JNIEnv* env) {
+ int result = android::AndroidRuntime::registerNativeMethods(
+ env, "android/graphics/pdf/PdfRenderer", gPdfRenderer_Methods,
+ NELEM(gPdfRenderer_Methods));
+
+ jclass clazz = env->FindClass("android/graphics/Point");
+ gPointClassInfo.x = env->GetFieldID(clazz, "x", "I");
+ gPointClassInfo.y = env->GetFieldID(clazz, "y", "I");
+
+ return result;
+};
+
+};
diff --git a/core/jni/android_view_GLES20Canvas.cpp b/core/jni/android_view_GLES20Canvas.cpp
index 50eb869..27d3f39 100644
--- a/core/jni/android_view_GLES20Canvas.cpp
+++ b/core/jni/android_view_GLES20Canvas.cpp
@@ -686,8 +686,8 @@ static void renderTextLayout(OpenGLRenderer* renderer, Layout* layout,
MinikinFontSkia* mfs = static_cast<MinikinFontSkia *>(layout->getFont(i));
skFace = mfs->GetSkTypeface();
glyphs[i] = layout->getGlyphId(i);
- pos[2 * i] = SkFloatToScalar(layout->getX(i));
- pos[2 * i + 1] = SkFloatToScalar(layout->getY(i));
+ pos[2 * i] = layout->getX(i);
+ pos[2 * i + 1] = layout->getY(i);
if (i > 0 && skFace != lastFace) {
paint->setTypeface(lastFace);
size_t glyphsCount = i - start;
diff --git a/core/jni/android_view_RenderNode.cpp b/core/jni/android_view_RenderNode.cpp
index e45a901..b0defdb 100644
--- a/core/jni/android_view_RenderNode.cpp
+++ b/core/jni/android_view_RenderNode.cpp
@@ -80,6 +80,7 @@ static void android_view_RenderNode_setCaching(JNIEnv* env,
jobject clazz, jlong renderNodePtr, jboolean caching) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setCaching(caching);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setStaticMatrix(JNIEnv* env,
@@ -87,6 +88,7 @@ static void android_view_RenderNode_setStaticMatrix(JNIEnv* env,
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
SkMatrix* matrix = reinterpret_cast<SkMatrix*>(matrixPtr);
renderNode->mutateStagingProperties().setStaticMatrix(matrix);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setAnimationMatrix(JNIEnv* env,
@@ -94,24 +96,28 @@ static void android_view_RenderNode_setAnimationMatrix(JNIEnv* env,
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
SkMatrix* matrix = reinterpret_cast<SkMatrix*>(matrixPtr);
renderNode->mutateStagingProperties().setAnimationMatrix(matrix);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setClipToBounds(JNIEnv* env,
jobject clazz, jlong renderNodePtr, jboolean clipToBounds) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setClipToBounds(clipToBounds);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setProjectBackwards(JNIEnv* env,
jobject clazz, jlong renderNodePtr, jboolean shouldProject) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setProjectBackwards(shouldProject);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setProjectionReceiver(JNIEnv* env,
jobject clazz, jlong renderNodePtr, jboolean shouldRecieve) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setProjectionReceiver(shouldRecieve);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setOutlineRoundRect(JNIEnv* env,
@@ -119,6 +125,7 @@ static void android_view_RenderNode_setOutlineRoundRect(JNIEnv* env,
jint right, jint bottom, jfloat radius) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().mutableOutline().setRoundRect(left, top, right, bottom, radius);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setOutlineConvexPath(JNIEnv* env,
@@ -126,18 +133,21 @@ static void android_view_RenderNode_setOutlineConvexPath(JNIEnv* env,
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
SkPath* outlinePath = reinterpret_cast<SkPath*>(outlinePathPtr);
renderNode->mutateStagingProperties().mutableOutline().setConvexPath(outlinePath);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setOutlineEmpty(JNIEnv* env,
jobject clazz, jlong renderNodePtr) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().mutableOutline().setEmpty();
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setClipToOutline(JNIEnv* env,
jobject clazz, jlong renderNodePtr, jboolean clipToOutline) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().mutableOutline().setShouldClip(clipToOutline);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setRevealClip(JNIEnv* env,
@@ -146,114 +156,133 @@ static void android_view_RenderNode_setRevealClip(JNIEnv* env,
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().mutableRevealClip().set(
shouldClip, inverseClip, x, y, radius);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setAlpha(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float alpha) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setAlpha(alpha);
+ renderNode->setPropertyFieldsDirty(RenderNode::ALPHA);
}
static void android_view_RenderNode_setHasOverlappingRendering(JNIEnv* env,
jobject clazz, jlong renderNodePtr, bool hasOverlappingRendering) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setHasOverlappingRendering(hasOverlappingRendering);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setElevation(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float elevation) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setElevation(elevation);
+ renderNode->setPropertyFieldsDirty(RenderNode::Z);
}
static void android_view_RenderNode_setTranslationX(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float tx) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setTranslationX(tx);
+ renderNode->setPropertyFieldsDirty(RenderNode::TRANSLATION_X | RenderNode::X);
}
static void android_view_RenderNode_setTranslationY(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float ty) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setTranslationY(ty);
+ renderNode->setPropertyFieldsDirty(RenderNode::TRANSLATION_Y | RenderNode::Y);
}
static void android_view_RenderNode_setTranslationZ(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float tz) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setTranslationZ(tz);
+ renderNode->setPropertyFieldsDirty(RenderNode::TRANSLATION_Z | RenderNode::Z);
}
static void android_view_RenderNode_setRotation(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float rotation) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setRotation(rotation);
+ renderNode->setPropertyFieldsDirty(RenderNode::ROTATION);
}
static void android_view_RenderNode_setRotationX(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float rx) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setRotationX(rx);
+ renderNode->setPropertyFieldsDirty(RenderNode::ROTATION_X);
}
static void android_view_RenderNode_setRotationY(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float ry) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setRotationY(ry);
+ renderNode->setPropertyFieldsDirty(RenderNode::ROTATION_Y);
}
static void android_view_RenderNode_setScaleX(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float sx) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setScaleX(sx);
+ renderNode->setPropertyFieldsDirty(RenderNode::SCALE_X);
}
static void android_view_RenderNode_setScaleY(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float sy) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setScaleY(sy);
+ renderNode->setPropertyFieldsDirty(RenderNode::SCALE_Y);
}
static void android_view_RenderNode_setPivotX(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float px) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setPivotX(px);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setPivotY(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float py) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setPivotY(py);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setCameraDistance(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float distance) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setCameraDistance(distance);
+ renderNode->setPropertyFieldsDirty(RenderNode::GENERIC);
}
static void android_view_RenderNode_setLeft(JNIEnv* env,
jobject clazz, jlong renderNodePtr, int left) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setLeft(left);
+ renderNode->setPropertyFieldsDirty(RenderNode::X);
}
static void android_view_RenderNode_setTop(JNIEnv* env,
jobject clazz, jlong renderNodePtr, int top) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setTop(top);
+ renderNode->setPropertyFieldsDirty(RenderNode::Y);
}
static void android_view_RenderNode_setRight(JNIEnv* env,
jobject clazz, jlong renderNodePtr, int right) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setRight(right);
+ renderNode->setPropertyFieldsDirty(RenderNode::X);
}
static void android_view_RenderNode_setBottom(JNIEnv* env,
jobject clazz, jlong renderNodePtr, int bottom) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setBottom(bottom);
+ renderNode->setPropertyFieldsDirty(RenderNode::Y);
}
static void android_view_RenderNode_setLeftTopRightBottom(JNIEnv* env,
@@ -261,18 +290,21 @@ static void android_view_RenderNode_setLeftTopRightBottom(JNIEnv* env,
int right, int bottom) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().setLeftTopRightBottom(left, top, right, bottom);
+ renderNode->setPropertyFieldsDirty(RenderNode::X | RenderNode::Y);
}
static void android_view_RenderNode_offsetLeftAndRight(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float offset) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().offsetLeftRight(offset);
+ renderNode->setPropertyFieldsDirty(RenderNode::X);
}
static void android_view_RenderNode_offsetTopAndBottom(JNIEnv* env,
jobject clazz, jlong renderNodePtr, float offset) {
RenderNode* renderNode = reinterpret_cast<RenderNode*>(renderNodePtr);
renderNode->mutateStagingProperties().offsetTopBottom(offset);
+ renderNode->setPropertyFieldsDirty(RenderNode::Y);
}
// ----------------------------------------------------------------------------
diff --git a/core/jni/android_view_RenderNodeAnimator.cpp b/core/jni/android_view_RenderNodeAnimator.cpp
index 5733f60..ea2f96e 100644
--- a/core/jni/android_view_RenderNodeAnimator.cpp
+++ b/core/jni/android_view_RenderNodeAnimator.cpp
@@ -62,7 +62,7 @@ public:
mWeakThis = NULL;
}
- virtual void onAnimationFinished(BaseAnimator*) {
+ virtual void onAnimationFinished(BaseRenderNodeAnimator*) {
JNIEnv* env = getEnv(mJvm);
env->CallStaticVoidMethod(
gRenderNodeAnimatorClassInfo.clazz,
@@ -81,13 +81,6 @@ static inline RenderPropertyAnimator::RenderProperty toRenderProperty(jint prope
return static_cast<RenderPropertyAnimator::RenderProperty>(property);
}
-static inline RenderPropertyAnimator::DeltaValueType toDeltaType(jint deltaType) {
- LOG_ALWAYS_FATAL_IF(deltaType != RenderPropertyAnimator::DELTA
- && deltaType != RenderPropertyAnimator::ABSOLUTE,
- "Invalid delta type %d", deltaType);
- return static_cast<RenderPropertyAnimator::DeltaValueType>(deltaType);
-}
-
static inline CanvasPropertyPaintAnimator::PaintField toPaintField(jint field) {
LOG_ALWAYS_FATAL_IF(field < 0
|| field > CanvasPropertyPaintAnimator::ALPHA,
@@ -96,49 +89,46 @@ static inline CanvasPropertyPaintAnimator::PaintField toPaintField(jint field) {
}
static jlong createAnimator(JNIEnv* env, jobject clazz, jobject weakThis,
- jint propertyRaw, jint deltaTypeRaw, jfloat deltaValue) {
+ jint propertyRaw, jfloat finalValue) {
RenderPropertyAnimator::RenderProperty property = toRenderProperty(propertyRaw);
- RenderPropertyAnimator::DeltaValueType deltaType = toDeltaType(deltaTypeRaw);
- BaseAnimator* animator = new RenderPropertyAnimator(property, deltaType, deltaValue);
+ BaseRenderNodeAnimator* animator = new RenderPropertyAnimator(property, finalValue);
animator->setListener(new AnimationListenerBridge(env, weakThis));
return reinterpret_cast<jlong>( animator );
}
static jlong createCanvasPropertyFloatAnimator(JNIEnv* env, jobject clazz,
- jobject weakThis, jlong canvasPropertyPtr, jint deltaTypeRaw, jfloat deltaValue) {
- RenderPropertyAnimator::DeltaValueType deltaType = toDeltaType(deltaTypeRaw);
+ jobject weakThis, jlong canvasPropertyPtr, jfloat finalValue) {
CanvasPropertyPrimitive* canvasProperty = reinterpret_cast<CanvasPropertyPrimitive*>(canvasPropertyPtr);
- BaseAnimator* animator = new CanvasPropertyPrimitiveAnimator(canvasProperty, deltaType, deltaValue);
+ BaseRenderNodeAnimator* animator = new CanvasPropertyPrimitiveAnimator(canvasProperty, finalValue);
animator->setListener(new AnimationListenerBridge(env, weakThis));
return reinterpret_cast<jlong>( animator );
}
static jlong createCanvasPropertyPaintAnimator(JNIEnv* env, jobject clazz,
jobject weakThis, jlong canvasPropertyPtr, jint paintFieldRaw,
- jint deltaTypeRaw, jfloat deltaValue) {
- RenderPropertyAnimator::DeltaValueType deltaType = toDeltaType(deltaTypeRaw);
+ jfloat finalValue) {
CanvasPropertyPaint* canvasProperty = reinterpret_cast<CanvasPropertyPaint*>(canvasPropertyPtr);
CanvasPropertyPaintAnimator::PaintField paintField = toPaintField(paintFieldRaw);
- BaseAnimator* animator = new CanvasPropertyPaintAnimator(
- canvasProperty, paintField, deltaType, deltaValue);
+ BaseRenderNodeAnimator* animator = new CanvasPropertyPaintAnimator(
+ canvasProperty, paintField, finalValue);
animator->setListener(new AnimationListenerBridge(env, weakThis));
return reinterpret_cast<jlong>( animator );
}
static void setDuration(JNIEnv* env, jobject clazz, jlong animatorPtr, jint duration) {
LOG_ALWAYS_FATAL_IF(duration < 0, "Duration cannot be negative");
- BaseAnimator* animator = reinterpret_cast<BaseAnimator*>(animatorPtr);
+ BaseRenderNodeAnimator* animator = reinterpret_cast<BaseRenderNodeAnimator*>(animatorPtr);
animator->setDuration(duration);
}
static jint getDuration(JNIEnv* env, jobject clazz, jlong animatorPtr) {
- BaseAnimator* animator = reinterpret_cast<BaseAnimator*>(animatorPtr);
+ BaseRenderNodeAnimator* animator = reinterpret_cast<BaseRenderNodeAnimator*>(animatorPtr);
return static_cast<jint>(animator->duration());
}
static void setInterpolator(JNIEnv* env, jobject clazz, jlong animatorPtr, jlong interpolatorPtr) {
- BaseAnimator* animator = reinterpret_cast<BaseAnimator*>(animatorPtr);
+ BaseRenderNodeAnimator* animator = reinterpret_cast<BaseRenderNodeAnimator*>(animatorPtr);
Interpolator* interpolator = reinterpret_cast<Interpolator*>(interpolatorPtr);
animator->setInterpolator(interpolator);
}
@@ -153,9 +143,9 @@ const char* const kClassPathName = "android/view/RenderNodeAnimator";
static JNINativeMethod gMethods[] = {
#ifdef USE_OPENGL_RENDERER
- { "nCreateAnimator", "(Ljava/lang/ref/WeakReference;IIF)J", (void*) createAnimator },
- { "nCreateCanvasPropertyFloatAnimator", "(Ljava/lang/ref/WeakReference;JIF)J", (void*) createCanvasPropertyFloatAnimator },
- { "nCreateCanvasPropertyPaintAnimator", "(Ljava/lang/ref/WeakReference;JIIF)J", (void*) createCanvasPropertyPaintAnimator },
+ { "nCreateAnimator", "(Ljava/lang/ref/WeakReference;IF)J", (void*) createAnimator },
+ { "nCreateCanvasPropertyFloatAnimator", "(Ljava/lang/ref/WeakReference;JF)J", (void*) createCanvasPropertyFloatAnimator },
+ { "nCreateCanvasPropertyPaintAnimator", "(Ljava/lang/ref/WeakReference;JIF)J", (void*) createCanvasPropertyPaintAnimator },
{ "nSetDuration", "(JI)V", (void*) setDuration },
{ "nGetDuration", "(J)I", (void*) getDuration },
{ "nSetInterpolator", "(JJ)V", (void*) setInterpolator },
diff --git a/core/jni/android_view_ThreadedRenderer.cpp b/core/jni/android_view_ThreadedRenderer.cpp
index d130a6d..2c10212 100644
--- a/core/jni/android_view_ThreadedRenderer.cpp
+++ b/core/jni/android_view_ThreadedRenderer.cpp
@@ -92,9 +92,9 @@ private:
class OnFinishedEvent {
public:
- OnFinishedEvent(BaseAnimator* animator, AnimationListener* listener)
+ OnFinishedEvent(BaseRenderNodeAnimator* animator, AnimationListener* listener)
: animator(animator), listener(listener) {}
- sp<BaseAnimator> animator;
+ sp<BaseRenderNodeAnimator> animator;
sp<AnimationListener> listener;
};
@@ -127,7 +127,7 @@ public:
virtual ~RootRenderNode() {}
- virtual void callOnFinished(BaseAnimator* animator, AnimationListener* listener) {
+ virtual void callOnFinished(BaseRenderNodeAnimator* animator, AnimationListener* listener) {
OnFinishedEvent event(animator, listener);
mOnFinishedEvents.push_back(event);
}
diff --git a/core/res/res/values-watch/config.xml b/core/res/res/values-watch/config.xml
index 44e258d..8d82a17 100644
--- a/core/res/res/values-watch/config.xml
+++ b/core/res/res/values-watch/config.xml
@@ -21,9 +21,8 @@
for watch products. Do not translate. -->
<resources xmlns:xliff="urn:oasis:names:tc:xliff:document:1.2">
- <!-- Only show power and settings items due to smaller real estate. -->
+ <!-- Only show settings item due to smaller real estate. -->
<string-array translatable="false" name="config_globalActionsList">
- <item>power</item>
<item>settings</item>
</string-array>
diff --git a/graphics/java/android/graphics/ImageFormat.java b/graphics/java/android/graphics/ImageFormat.java
index 062acaf..fe53a17 100644
--- a/graphics/java/android/graphics/ImageFormat.java
+++ b/graphics/java/android/graphics/ImageFormat.java
@@ -272,6 +272,7 @@ public class ImageFormat {
case NV16:
case YUY2:
case YV12:
+ case JPEG:
case NV21:
case YUV_420_888:
case RAW_SENSOR:
diff --git a/graphics/java/android/graphics/LayerRasterizer.java b/graphics/java/android/graphics/LayerRasterizer.java
index 5b35608..e7a24a4 100644
--- a/graphics/java/android/graphics/LayerRasterizer.java
+++ b/graphics/java/android/graphics/LayerRasterizer.java
@@ -16,11 +16,12 @@
package android.graphics;
+@Deprecated
public class LayerRasterizer extends Rasterizer {
public LayerRasterizer() {
native_instance = nativeConstructor();
}
-
+
/** Add a new layer (above any previous layers) to the rasterizer.
The layer will extract those fields that affect the mask from
the specified paint, but will not retain a reference to the paint
diff --git a/graphics/java/android/graphics/Matrix.java b/graphics/java/android/graphics/Matrix.java
index 66bf75c..b4e6bab 100644
--- a/graphics/java/android/graphics/Matrix.java
+++ b/graphics/java/android/graphics/Matrix.java
@@ -245,6 +245,16 @@ public class Matrix {
}
/**
+ * Gets whether this matrix is affine. An affine matrix preserves
+ * straight lines and has no perspective.
+ *
+ * @return Whether the matrix is affine.
+ */
+ public boolean isAffine() {
+ return native_isAffine(native_instance);
+ }
+
+ /**
* Returns true if will map a rectangle to another rectangle. This can be
* true if the matrix is identity, scale-only, or rotates a multiple of 90
* degrees.
@@ -828,6 +838,7 @@ public class Matrix {
private static native long native_create(long native_src_or_zero);
private static native boolean native_isIdentity(long native_object);
+ private static native boolean native_isAffine(long native_object);
private static native boolean native_rectStaysRect(long native_object);
private static native void native_reset(long native_object);
private static native void native_set(long native_object,
diff --git a/graphics/java/android/graphics/Paint.java b/graphics/java/android/graphics/Paint.java
index bdef404..92cfd6b 100644
--- a/graphics/java/android/graphics/Paint.java
+++ b/graphics/java/android/graphics/Paint.java
@@ -1064,14 +1064,17 @@ public class Paint {
mNativeTypeface = typefaceNative;
return typeface;
}
-
+
/**
* Get the paint's rasterizer (or null).
* <p />
* The raster controls/modifies how paths/text are turned into alpha masks.
*
* @return the paint's rasterizer (or null)
+ *
+ * @deprecated Rasterizer is not supported by either the HW or PDF backends.
*/
+ @Deprecated
public Rasterizer getRasterizer() {
return mRasterizer;
}
@@ -1085,7 +1088,10 @@ public class Paint {
* @param rasterizer May be null. The new rasterizer to be installed in
* the paint.
* @return rasterizer
+ *
+ * @deprecated Rasterizer is not supported by either the HW or PDF backends.
*/
+ @Deprecated
public Rasterizer setRasterizer(Rasterizer rasterizer) {
long rasterizerNative = 0;
if (rasterizer != null) {
@@ -1095,7 +1101,7 @@ public class Paint {
mRasterizer = rasterizer;
return rasterizer;
}
-
+
/**
* This draws a shadow layer below the main layer, with the specified
* offset and color, and blur radius. If radius is 0, then the shadow
diff --git a/graphics/java/android/graphics/Rasterizer.java b/graphics/java/android/graphics/Rasterizer.java
index 817814c..c351d94e 100644
--- a/graphics/java/android/graphics/Rasterizer.java
+++ b/graphics/java/android/graphics/Rasterizer.java
@@ -21,6 +21,7 @@
package android.graphics;
+@Deprecated
public class Rasterizer {
protected void finalize() throws Throwable {
diff --git a/graphics/java/android/graphics/pdf/PdfDocument.java b/graphics/java/android/graphics/pdf/PdfDocument.java
index f5b07c1..d603436 100644
--- a/graphics/java/android/graphics/pdf/PdfDocument.java
+++ b/graphics/java/android/graphics/pdf/PdfDocument.java
@@ -32,7 +32,7 @@ import java.util.List;
/**
* <p>
* This class enables generating a PDF document from native Android content. You
- * open a new document and then for every page you want to add you start a page,
+ * create a new document and then for every page you want to add you start a page,
* write content to the page, and finish the page. After you are done with all
* pages, you write the document to an output stream and close the document.
* After a document is closed you should not use it anymore. Note that pages are
@@ -64,7 +64,7 @@ import java.util.List;
* // write the document content
* document.writeTo(getOutputStream());
*
- * //close the document
+ * // close the document
* document.close();
* </pre>
*/
diff --git a/graphics/java/android/graphics/pdf/PdfRenderer.java b/graphics/java/android/graphics/pdf/PdfRenderer.java
new file mode 100644
index 0000000..3fa3b9f
--- /dev/null
+++ b/graphics/java/android/graphics/pdf/PdfRenderer.java
@@ -0,0 +1,391 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.graphics.pdf;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.graphics.Bitmap;
+import android.graphics.Bitmap.Config;
+import android.graphics.Matrix;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.os.ParcelFileDescriptor;
+import android.system.ErrnoException;
+import android.system.OsConstants;
+import dalvik.system.CloseGuard;
+import libcore.io.Libcore;
+
+import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * <p>
+ * This class enables rendering a PDF document. This class is not thread safe.
+ * </p>
+ * <p>
+ * If you want to render a PDF, you create a renderer and for every page you want
+ * to render, you open the page, render it, and close the page. After you are done
+ * with rendering, you close the renderer. After the renderer is closed it should not
+ * be used anymore. Note that the pages are rendered one by one, i.e. you can have
+ * only a single page opened at any given time.
+ * </p>
+ * <p>
+ * A typical use of the APIs to render a PDF looks like this:
+ * </p>
+ * <pre>
+ * // create a new renderer
+ * PdfRenderer renderer = new PdfRenderer(getSeekableFileDescriptor());
+ *
+ * // let us just render all pages
+ * final int pageCount = renderer.getPageCount();
+ * for (int i = 0; i < pageCount; i++) {
+ * Page page = renderer.openPage(i);
+ * Bitmap bitmap = getBitmapReuseIfPossible(page);
+ *
+ * // say we render for showing on the screen
+ * page.render(bitmap, getContentBoundsInBitmap(),
+ * getDesiredTransformation(), Page.RENDER_MODE_FOR_DISPLAY);
+ *
+ * // do stuff with the bitmap
+ *
+ * renderer.closePage(page);
+ * }
+ *
+ * // close the renderer
+ * renderer.close();
+ * </pre>
+ *
+ * @see #close()
+ */
+public final class PdfRenderer implements AutoCloseable {
+ private final CloseGuard mCloseGuard = CloseGuard.get();
+
+ private final Point mTempPoint = new Point();
+
+ private final long mNativeDocument;
+
+ private final int mPageCount;
+
+ private ParcelFileDescriptor mInput;
+
+ private Page mCurrentPage;
+
+ /** @hide */
+ @IntDef({
+ Page.RENDER_MODE_FOR_DISPLAY,
+ Page.RENDER_MODE_FOR_PRINT
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface RenderMode {}
+
+ /**
+ * Creates a new instance.
+ * <p>
+ * <strong>Note:</strong> The provided file descriptor must be <strong>seekable</strong>,
+ * i.e. its data being randomly accessed, e.g. pointing to a file.
+ * </p>
+ * <p>
+ * <strong>Note:</strong> This class takes ownership of the passed in file descriptor
+ * and is responsible for closing it when the renderer is closed.
+ * </p>
+ *
+ * @param input Seekable file descriptor to read from.
+ */
+ public PdfRenderer(@NonNull ParcelFileDescriptor input) throws IOException {
+ if (input == null) {
+ throw new NullPointerException("input cannot be null");
+ }
+
+ final long size;
+ try {
+ Libcore.os.lseek(input.getFileDescriptor(), 0, OsConstants.SEEK_SET);
+ size = Libcore.os.fstat(input.getFileDescriptor()).st_size;
+ } catch (ErrnoException ee) {
+ throw new IllegalArgumentException("file descriptor not seekable");
+ }
+
+ mInput = input;
+ mNativeDocument = nativeCreate(mInput.getFd(), size);
+ mPageCount = nativeGetPageCount(mNativeDocument);
+ mCloseGuard.open("close");
+ }
+
+ /**
+ * Closes this renderer. You should not use this instance
+ * after this method is called.
+ */
+ public void close() {
+ throwIfClosed();
+ throwIfPageOpened();
+ doClose();
+ }
+
+ /**
+ * Gets the number of pages in the document.
+ *
+ * @return The page count.
+ */
+ public int getPageCount() {
+ throwIfClosed();
+ return mPageCount;
+ }
+
+ /**
+ * Gets whether the document prefers to be scaled for printing.
+ * You should take this info account if the document is rendered
+ * for printing and the target media size differs from the page
+ * size.
+ *
+ * @return If to scale the document.
+ */
+ public boolean shouldScaleForPrinting() {
+ throwIfClosed();
+ return nativeScaleForPrinting(mNativeDocument);
+ }
+
+ /**
+ * Opens a page for rendering.
+ *
+ * @param index The page index.
+ * @return A page that can be rendered.
+ *
+ * @see #closePage(PdfRenderer.Page)
+ */
+ public Page openPage(int index) {
+ throwIfClosed();
+ throwIfPageOpened();
+ mCurrentPage = new Page(index);
+ return mCurrentPage;
+ }
+
+ /**
+ * Closes a page opened for rendering.
+ *
+ * @param page The page to close.
+ *
+ * @see #openPage(int)
+ */
+ public void closePage(@NonNull Page page) {
+ throwIfClosed();
+ throwIfNotCurrentPage(page);
+ throwIfCurrentPageClosed();
+ mCurrentPage.close();
+ mCurrentPage = null;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ mCloseGuard.warnIfOpen();
+ if (mInput != null) {
+ doClose();
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void doClose() {
+ if (mCurrentPage != null) {
+ mCurrentPage.close();
+ mCurrentPage = null;
+ }
+ nativeClose(mNativeDocument);
+ try {
+ mInput.close();
+ } catch (IOException ioe) {
+ /* ignore - best effort */
+ }
+ mInput = null;
+ mCloseGuard.close();
+ }
+
+ private void throwIfClosed() {
+ if (mInput == null) {
+ throw new IllegalStateException("Already closed");
+ }
+ }
+
+ private void throwIfPageOpened() {
+ if (mCurrentPage != null) {
+ throw new IllegalStateException("Current page not closed");
+ }
+ }
+
+ private void throwIfCurrentPageClosed() {
+ if (mCurrentPage == null) {
+ throw new IllegalStateException("Already closed");
+ }
+ }
+
+ private void throwIfNotCurrentPage(Page page) {
+ if (page != mCurrentPage) {
+ throw new IllegalArgumentException("Page not from document");
+ }
+ }
+
+ /**
+ * This class represents a PDF document page for rendering.
+ */
+ public final class Page {
+
+ /**
+ * Mode to render the content for display on a screen.
+ */
+ public static final int RENDER_MODE_FOR_DISPLAY = 1;
+
+ /**
+ * Mode to render the content for printing.
+ */
+ public static final int RENDER_MODE_FOR_PRINT = 2;
+
+ private final int mIndex;
+ private final int mWidth;
+ private final int mHeight;
+
+ private long mNativePage;
+
+ private Page(int index) {
+ Point size = mTempPoint;
+ mNativePage = nativeOpenPageAndGetSize(mNativeDocument, index, size);
+ mIndex = index;
+ mWidth = size.x;
+ mHeight = size.y;
+ }
+
+ /**
+ * Gets the page index.
+ *
+ * @return The index.
+ */
+ public int getIndex() {
+ return mIndex;
+ }
+
+ /**
+ * Gets the page width in points (1/72").
+ *
+ * @return The width in points.
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * Gets the page height in points (1/72").
+ *
+ * @return The height in points.
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * Renders a page to a bitmap.
+ * <p>
+ * You may optionally specify a rectangular clip in the bitmap bounds. No rendering
+ * outside the clip will be performed, hence it is your responsibility to initialize
+ * the bitmap outside the clip.
+ * </p>
+ * <p>
+ * You may optionally specify a matrix to transform the content from page coordinates
+ * which are in points (1/72") to bitmap coordintates which are in pixels. If this
+ * matrix is not provided this method will apply a transformation that will fit the
+ * whole page to the destination clip if profided or the destination bitmap if no
+ * clip is provided.
+ * </p>
+ * <p>
+ * The clip and transformation are useful for implementing tile rendering where the
+ * destination bitmap contains a portion of the image, for example when zooming.
+ * Another useful application is for printing where the size of the bitmap holding
+ * the page is too large and a client can render the page in stripes.
+ * </p>
+ * <p>
+ * <strong>Note: </strong> The destination bitmap format must be
+ * {@link Config#ARGB_8888 ARGB}.
+ * </p>
+ * <p>
+ * <strong>Note: </strong> The optional transformation matrix must be affine as per
+ * {@link android.graphics.Matrix#isAffine()}. Hence, you can specify rotation, scaling,
+ * translation but not a perspective transformation.
+ * </p>
+ *
+ * @param destination Destination bitmap to which to render.
+ * @param destClip Optional clip in the bitmap bounds.
+ * @param transform Optional transformation to apply when rendering.
+ * @param renderMode The render mode.
+ *
+ * @see #RENDER_MODE_FOR_DISPLAY
+ * @see #RENDER_MODE_FOR_PRINT
+ */
+ public void render(@NonNull Bitmap destination, @Nullable Rect destClip,
+ @Nullable Matrix transform, @RenderMode int renderMode) {
+ if (destination.getConfig() != Config.ARGB_8888) {
+ throw new IllegalArgumentException("Unsupported pixel format");
+ }
+
+ if (destClip != null) {
+ if (destClip.left < 0 || destClip.top < 0
+ || destClip.right > destination.getWidth()
+ || destClip.bottom > destination.getHeight()) {
+ throw new IllegalArgumentException("destBounds not in destination");
+ }
+ }
+
+ if (transform != null && !transform.isAffine()) {
+ throw new IllegalArgumentException("transform not affine");
+ }
+
+ if (renderMode != RENDER_MODE_FOR_PRINT && renderMode != RENDER_MODE_FOR_DISPLAY) {
+ throw new IllegalArgumentException("Unsupported render mode");
+ }
+
+ if (renderMode == RENDER_MODE_FOR_PRINT && renderMode == RENDER_MODE_FOR_DISPLAY) {
+ throw new IllegalArgumentException("Only single render mode supported");
+ }
+
+ final int contentLeft = (destClip != null) ? destClip.left : 0;
+ final int contentTop = (destClip != null) ? destClip.top : 0;
+ final int contentRight = (destClip != null) ? destClip.right
+ : destination.getWidth();
+ final int contentBottom = (destClip != null) ? destClip.bottom
+ : destination.getHeight();
+
+ final long transformPtr = (transform != null) ? transform.native_instance : 0;
+
+ nativeRenderPage(mNativeDocument, mNativePage, destination.mNativeBitmap, contentLeft,
+ contentTop, contentRight, contentBottom, transformPtr, renderMode);
+ }
+
+ void close() {
+ nativeClosePage(mNativePage);
+ mNativePage = 0;
+ }
+ }
+
+ private static native long nativeCreate(int fd, long size);
+ private static native void nativeClose(long documentPtr);
+ private static native int nativeGetPageCount(long documentPtr);
+ private static native boolean nativeScaleForPrinting(long documentPtr);
+ private static native void nativeRenderPage(long documentPtr, long pagePtr, long destPtr,
+ int destLeft, int destTop, int destRight, int destBottom, long matrixPtr, int renderMode);
+ private static native long nativeOpenPageAndGetSize(long documentPtr, int pageIndex,
+ Point outSize);
+ private static native void nativeClosePage(long pagePtr);
+}
diff --git a/libs/hwui/Animator.cpp b/libs/hwui/Animator.cpp
index a033f86..83eedfb 100644
--- a/libs/hwui/Animator.cpp
+++ b/libs/hwui/Animator.cpp
@@ -27,31 +27,48 @@ namespace android {
namespace uirenderer {
/************************************************************
- * Base animator
+ * BaseRenderNodeAnimator
************************************************************/
-BaseAnimator::BaseAnimator()
- : mInterpolator(0)
- , mPlayState(PENDING)
+BaseRenderNodeAnimator::BaseRenderNodeAnimator(float finalValue)
+ : mFinalValue(finalValue)
+ , mDeltaValue(0)
+ , mFromValue(0)
+ , mInterpolator(0)
+ , mPlayState(NEEDS_START)
, mStartTime(0)
- , mDuration(300) {
-
+ , mDuration(300){
}
-BaseAnimator::~BaseAnimator() {
+BaseRenderNodeAnimator::~BaseRenderNodeAnimator() {
setInterpolator(NULL);
}
-void BaseAnimator::setInterpolator(Interpolator* interpolator) {
+void BaseRenderNodeAnimator::setInterpolator(Interpolator* interpolator) {
delete mInterpolator;
mInterpolator = interpolator;
}
-void BaseAnimator::setDuration(nsecs_t duration) {
+void BaseRenderNodeAnimator::setDuration(nsecs_t duration) {
mDuration = duration;
}
-bool BaseAnimator::animateFrame(TreeInfo& info) {
+void BaseRenderNodeAnimator::setStartValue(float value) {
+ LOG_ALWAYS_FATAL_IF(mPlayState != NEEDS_START,
+ "Cannot set the start value after the animator has started!");
+ mFromValue = value;
+ mDeltaValue = (mFinalValue - mFromValue);
+ mPlayState = PENDING;
+}
+
+void BaseRenderNodeAnimator::setupStartValueIfNecessary(RenderNode* target, TreeInfo& info) {
+ if (mPlayState == NEEDS_START) {
+ setStartValue(getValue(target));
+ mPlayState = PENDING;
+ }
+}
+
+bool BaseRenderNodeAnimator::animate(RenderNode* target, TreeInfo& info) {
if (mPlayState == PENDING) {
mPlayState = RUNNING;
mStartTime = info.frameTimeMs;
@@ -59,7 +76,6 @@ bool BaseAnimator::animateFrame(TreeInfo& info) {
if (!mInterpolator) {
setInterpolator(Interpolator::createDefaultInterpolator());
}
- onAnimationStarted();
}
float fraction = 1.0f;
@@ -71,17 +87,16 @@ bool BaseAnimator::animateFrame(TreeInfo& info) {
}
}
fraction = mInterpolator->interpolate(fraction);
- onAnimationUpdated(fraction);
+ setValue(target, mFromValue + (mDeltaValue * fraction));
if (mPlayState == FINISHED) {
- onAnimationFinished();
callOnFinishedListener(info);
return true;
}
return false;
}
-void BaseAnimator::callOnFinishedListener(TreeInfo& info) {
+void BaseRenderNodeAnimator::callOnFinishedListener(TreeInfo& info) {
if (mListener.get()) {
if (!info.animationHook) {
mListener->onAnimationFinished(this);
@@ -92,70 +107,49 @@ void BaseAnimator::callOnFinishedListener(TreeInfo& info) {
}
/************************************************************
- * BaseRenderNodeAnimator
- ************************************************************/
-
-BaseRenderNodeAnimator::BaseRenderNodeAnimator(
- BaseRenderNodeAnimator::DeltaValueType deltaType, float delta)
- : mTarget(0)
- , mDeltaValueType(deltaType)
- , mDeltaValue(delta)
- , mFromValue(-1) {
-}
-
-bool BaseRenderNodeAnimator::animate(RenderNode* target, TreeInfo& info) {
- mTarget = target;
- bool finished = animateFrame(info);
- mTarget = NULL;
- return finished;
-}
-
-void BaseRenderNodeAnimator::onAnimationStarted() {
- mFromValue = getValue();
-
- if (mDeltaValueType == BaseRenderNodeAnimator::ABSOLUTE) {
- mDeltaValue = (mDeltaValue - mFromValue);
- mDeltaValueType = BaseRenderNodeAnimator::DELTA;
- }
-}
-
-void BaseRenderNodeAnimator::onAnimationUpdated(float fraction) {
- float value = mFromValue + (mDeltaValue * fraction);
- setValue(value);
-}
-
-/************************************************************
* RenderPropertyAnimator
************************************************************/
+struct RenderPropertyAnimator::PropertyAccessors {
+ RenderNode::DirtyPropertyMask dirtyMask;
+ GetFloatProperty getter;
+ SetFloatProperty setter;
+};
+
// Maps RenderProperty enum to accessors
const RenderPropertyAnimator::PropertyAccessors RenderPropertyAnimator::PROPERTY_ACCESSOR_LUT[] = {
- {&RenderProperties::getTranslationX, &RenderProperties::setTranslationX },
- {&RenderProperties::getTranslationY, &RenderProperties::setTranslationY },
- {&RenderProperties::getTranslationZ, &RenderProperties::setTranslationZ },
- {&RenderProperties::getScaleX, &RenderProperties::setScaleX },
- {&RenderProperties::getScaleY, &RenderProperties::setScaleY },
- {&RenderProperties::getRotation, &RenderProperties::setRotation },
- {&RenderProperties::getRotationX, &RenderProperties::setRotationX },
- {&RenderProperties::getRotationY, &RenderProperties::setRotationY },
- {&RenderProperties::getX, &RenderProperties::setX },
- {&RenderProperties::getY, &RenderProperties::setY },
- {&RenderProperties::getZ, &RenderProperties::setZ },
- {&RenderProperties::getAlpha, &RenderProperties::setAlpha },
+ {RenderNode::TRANSLATION_X, &RenderProperties::getTranslationX, &RenderProperties::setTranslationX },
+ {RenderNode::TRANSLATION_Y, &RenderProperties::getTranslationY, &RenderProperties::setTranslationY },
+ {RenderNode::TRANSLATION_X, &RenderProperties::getTranslationZ, &RenderProperties::setTranslationZ },
+ {RenderNode::SCALE_X, &RenderProperties::getScaleX, &RenderProperties::setScaleX },
+ {RenderNode::SCALE_Y, &RenderProperties::getScaleY, &RenderProperties::setScaleY },
+ {RenderNode::ROTATION, &RenderProperties::getRotation, &RenderProperties::setRotation },
+ {RenderNode::ROTATION_X, &RenderProperties::getRotationX, &RenderProperties::setRotationX },
+ {RenderNode::ROTATION_Y, &RenderProperties::getRotationY, &RenderProperties::setRotationY },
+ {RenderNode::X, &RenderProperties::getX, &RenderProperties::setX },
+ {RenderNode::Y, &RenderProperties::getY, &RenderProperties::setY },
+ {RenderNode::Z, &RenderProperties::getZ, &RenderProperties::setZ },
+ {RenderNode::ALPHA, &RenderProperties::getAlpha, &RenderProperties::setAlpha },
};
-RenderPropertyAnimator::RenderPropertyAnimator(RenderProperty property,
- DeltaValueType deltaType, float deltaValue)
- : BaseRenderNodeAnimator(deltaType, deltaValue)
- , mPropertyAccess(PROPERTY_ACCESSOR_LUT[property]) {
+RenderPropertyAnimator::RenderPropertyAnimator(RenderProperty property, float finalValue)
+ : BaseRenderNodeAnimator(finalValue)
+ , mPropertyAccess(&(PROPERTY_ACCESSOR_LUT[property])) {
+}
+
+void RenderPropertyAnimator::onAttached(RenderNode* target) {
+ if (target->isPropertyFieldDirty(mPropertyAccess->dirtyMask)) {
+ setStartValue((target->stagingProperties().*mPropertyAccess->getter)());
+ }
+ (target->mutateStagingProperties().*mPropertyAccess->setter)(finalValue());
}
-float RenderPropertyAnimator::getValue() const {
- return (target()->animatorProperties().*mPropertyAccess.getter)();
+float RenderPropertyAnimator::getValue(RenderNode* target) const {
+ return (target->properties().*mPropertyAccess->getter)();
}
-void RenderPropertyAnimator::setValue(float value) {
- (target()->animatorProperties().*mPropertyAccess.setter)(value);
+void RenderPropertyAnimator::setValue(RenderNode* target, float value) {
+ (target->animatorProperties().*mPropertyAccess->setter)(value);
}
/************************************************************
@@ -163,16 +157,16 @@ void RenderPropertyAnimator::setValue(float value) {
************************************************************/
CanvasPropertyPrimitiveAnimator::CanvasPropertyPrimitiveAnimator(
- CanvasPropertyPrimitive* property, DeltaValueType deltaType, float deltaValue)
- : BaseRenderNodeAnimator(deltaType, deltaValue)
+ CanvasPropertyPrimitive* property, float finalValue)
+ : BaseRenderNodeAnimator(finalValue)
, mProperty(property) {
}
-float CanvasPropertyPrimitiveAnimator::getValue() const {
+float CanvasPropertyPrimitiveAnimator::getValue(RenderNode* target) const {
return mProperty->value;
}
-void CanvasPropertyPrimitiveAnimator::setValue(float value) {
+void CanvasPropertyPrimitiveAnimator::setValue(RenderNode* target, float value) {
mProperty->value = value;
}
@@ -181,14 +175,13 @@ void CanvasPropertyPrimitiveAnimator::setValue(float value) {
************************************************************/
CanvasPropertyPaintAnimator::CanvasPropertyPaintAnimator(
- CanvasPropertyPaint* property, PaintField field,
- DeltaValueType deltaType, float deltaValue)
- : BaseRenderNodeAnimator(deltaType, deltaValue)
+ CanvasPropertyPaint* property, PaintField field, float finalValue)
+ : BaseRenderNodeAnimator(finalValue)
, mProperty(property)
, mField(field) {
}
-float CanvasPropertyPaintAnimator::getValue() const {
+float CanvasPropertyPaintAnimator::getValue(RenderNode* target) const {
switch (mField) {
case STROKE_WIDTH:
return mProperty->value.getStrokeWidth();
@@ -204,7 +197,7 @@ static uint8_t to_uint8(float value) {
return static_cast<uint8_t>( c < 0 ? 0 : c > 255 ? 255 : c );
}
-void CanvasPropertyPaintAnimator::setValue(float value) {
+void CanvasPropertyPaintAnimator::setValue(RenderNode* target, float value) {
switch (mField) {
case STROKE_WIDTH:
mProperty->value.setStrokeWidth(value);
diff --git a/libs/hwui/Animator.h b/libs/hwui/Animator.h
index 52a1807..fe88cbf 100644
--- a/libs/hwui/Animator.h
+++ b/libs/hwui/Animator.h
@@ -33,16 +33,14 @@ class RenderProperties;
class AnimationListener : public VirtualLightRefBase {
public:
- ANDROID_API virtual void onAnimationFinished(BaseAnimator*) = 0;
+ ANDROID_API virtual void onAnimationFinished(BaseRenderNodeAnimator*) = 0;
protected:
ANDROID_API virtual ~AnimationListener() {}
};
-// Helper class to contain generic animator helpers
-class BaseAnimator : public VirtualLightRefBase {
- PREVENT_COPY_AND_ASSIGN(BaseAnimator);
+class BaseRenderNodeAnimator : public VirtualLightRefBase {
+ PREVENT_COPY_AND_ASSIGN(BaseRenderNodeAnimator);
public:
-
ANDROID_API void setInterpolator(Interpolator* interpolator);
ANDROID_API void setDuration(nsecs_t durationInMs);
ANDROID_API nsecs_t duration() { return mDuration; }
@@ -50,31 +48,38 @@ public:
mListener = listener;
}
+ ANDROID_API virtual void onAttached(RenderNode* target) {}
+
+ // Guaranteed to happen before the staging push
+ void setupStartValueIfNecessary(RenderNode* target, TreeInfo& info);
+
+ bool animate(RenderNode* target, TreeInfo& info);
+
bool isFinished() { return mPlayState == FINISHED; }
+ float finalValue() { return mFinalValue; }
protected:
- BaseAnimator();
- virtual ~BaseAnimator();
-
- // This is the main animation entrypoint that subclasses should call
- // to generate the onAnimation* lifecycle events
- // Returns true if the animation has finished, false otherwise
- bool animateFrame(TreeInfo& info);
+ BaseRenderNodeAnimator(float finalValue);
+ virtual ~BaseRenderNodeAnimator();
- // Called when PlayState switches from PENDING to RUNNING
- virtual void onAnimationStarted() {}
- virtual void onAnimationUpdated(float fraction) = 0;
- virtual void onAnimationFinished() {}
+ void setStartValue(float value);
+ virtual float getValue(RenderNode* target) const = 0;
+ virtual void setValue(RenderNode* target, float value) = 0;
private:
void callOnFinishedListener(TreeInfo& info);
enum PlayState {
+ NEEDS_START,
PENDING,
RUNNING,
FINISHED,
};
+ float mFinalValue;
+ float mDeltaValue;
+ float mFromValue;
+
Interpolator* mInterpolator;
PlayState mPlayState;
long mStartTime;
@@ -83,42 +88,6 @@ private:
sp<AnimationListener> mListener;
};
-class BaseRenderNodeAnimator : public BaseAnimator {
-public:
- // Since the UI thread doesn't necessarily know what the current values
- // actually are and thus can't do the calculations, this is used to inform
- // the animator how to lazy-resolve the input value
- enum DeltaValueType {
- // The delta value represents an absolute value endpoint
- // mDeltaValue needs to be recalculated to be mDelta = (mDelta - fromValue)
- // in onAnimationStarted()
- ABSOLUTE = 0,
- // The final value represents an offset from the current value
- // No recalculation is needed
- DELTA,
- };
-
- bool animate(RenderNode* target, TreeInfo& info);
-
-protected:
- BaseRenderNodeAnimator(DeltaValueType deltaType, float deltaValue);
-
- RenderNode* target() const { return mTarget; }
- virtual float getValue() const = 0;
- virtual void setValue(float value) = 0;
-
-private:
- virtual void onAnimationStarted();
- virtual void onAnimationUpdated(float fraction);
-
- // mTarget is only valid inside animate()
- RenderNode* mTarget;
-
- BaseRenderNodeAnimator::DeltaValueType mDeltaValueType;
- float mDeltaValue;
- float mFromValue;
-};
-
class RenderPropertyAnimator : public BaseRenderNodeAnimator {
public:
enum RenderProperty {
@@ -136,23 +105,20 @@ public:
ALPHA,
};
- ANDROID_API RenderPropertyAnimator(RenderProperty property,
- DeltaValueType deltaType, float deltaValue);
+ ANDROID_API RenderPropertyAnimator(RenderProperty property, float finalValue);
+
+ ANDROID_API virtual void onAttached(RenderNode* target);
protected:
- ANDROID_API virtual float getValue() const;
- ANDROID_API virtual void setValue(float value);
+ virtual float getValue(RenderNode* target) const;
+ virtual void setValue(RenderNode* target, float value);
private:
typedef void (RenderProperties::*SetFloatProperty)(float value);
typedef float (RenderProperties::*GetFloatProperty)() const;
- struct PropertyAccessors {
- GetFloatProperty getter;
- SetFloatProperty setter;
- };
-
- PropertyAccessors mPropertyAccess;
+ struct PropertyAccessors;
+ const PropertyAccessors* mPropertyAccess;
static const PropertyAccessors PROPERTY_ACCESSOR_LUT[];
};
@@ -160,10 +126,10 @@ private:
class CanvasPropertyPrimitiveAnimator : public BaseRenderNodeAnimator {
public:
ANDROID_API CanvasPropertyPrimitiveAnimator(CanvasPropertyPrimitive* property,
- DeltaValueType deltaType, float deltaValue);
+ float finalValue);
protected:
- ANDROID_API virtual float getValue() const;
- ANDROID_API virtual void setValue(float value);
+ virtual float getValue(RenderNode* target) const;
+ virtual void setValue(RenderNode* target, float value);
private:
sp<CanvasPropertyPrimitive> mProperty;
};
@@ -176,10 +142,10 @@ public:
};
ANDROID_API CanvasPropertyPaintAnimator(CanvasPropertyPaint* property,
- PaintField field, DeltaValueType deltaType, float deltaValue);
+ PaintField field, float finalValue);
protected:
- ANDROID_API virtual float getValue() const;
- ANDROID_API virtual void setValue(float value);
+ virtual float getValue(RenderNode* target) const;
+ virtual void setValue(RenderNode* target, float value);
private:
sp<CanvasPropertyPaint> mProperty;
PaintField mField;
diff --git a/libs/hwui/RenderNode.cpp b/libs/hwui/RenderNode.cpp
index fba482d..d4ff4a3 100644
--- a/libs/hwui/RenderNode.cpp
+++ b/libs/hwui/RenderNode.cpp
@@ -53,7 +53,7 @@ void RenderNode::outputLogBuffer(int fd) {
}
RenderNode::RenderNode()
- : mNeedsPropertiesSync(false)
+ : mDirtyPropertyFields(0)
, mNeedsDisplayListDataSync(false)
, mDisplayListData(0)
, mStagingDisplayListData(0)
@@ -109,23 +109,37 @@ void RenderNode::prepareTreeImpl(TreeInfo& info) {
prepareSubTree(info, mDisplayListData);
}
-static bool is_finished(const sp<BaseRenderNodeAnimator>& animator) {
- return animator->isFinished();
-}
+class PushAnimatorsFunctor {
+public:
+ PushAnimatorsFunctor(RenderNode* target, TreeInfo& info)
+ : mTarget(target), mInfo(info) {}
-void RenderNode::pushStagingChanges(TreeInfo& info) {
- if (mNeedsPropertiesSync) {
- mNeedsPropertiesSync = false;
- mProperties = mStagingProperties;
+ bool operator() (const sp<BaseRenderNodeAnimator>& animator) {
+ animator->setupStartValueIfNecessary(mTarget, mInfo);
+ return animator->isFinished();
}
+private:
+ RenderNode* mTarget;
+ TreeInfo& mInfo;
+};
+
+void RenderNode::pushStagingChanges(TreeInfo& info) {
+ // Push the animators first so that setupStartValueIfNecessary() is called
+ // before properties() is trampled by stagingProperties(), as they are
+ // required by some animators.
if (mNeedsAnimatorsSync) {
mAnimators.resize(mStagingAnimators.size());
std::vector< sp<BaseRenderNodeAnimator> >::iterator it;
+ PushAnimatorsFunctor functor(this, info);
// hint: this means copy_if_not()
it = std::remove_copy_if(mStagingAnimators.begin(), mStagingAnimators.end(),
- mAnimators.begin(), is_finished);
+ mAnimators.begin(), functor);
mAnimators.resize(std::distance(mAnimators.begin(), it));
}
+ if (mDirtyPropertyFields) {
+ mDirtyPropertyFields = 0;
+ mProperties = mStagingProperties;
+ }
if (mNeedsDisplayListDataSync) {
mNeedsDisplayListDataSync = false;
// Do a push pass on the old tree to handle freeing DisplayListData
@@ -144,7 +158,7 @@ public:
AnimateFunctor(RenderNode* target, TreeInfo& info)
: mTarget(target), mInfo(info) {}
- bool operator() (sp<BaseRenderNodeAnimator>& animator) {
+ bool operator() (const sp<BaseRenderNodeAnimator>& animator) {
return animator->animate(mTarget, mInfo);
}
private:
diff --git a/libs/hwui/RenderNode.h b/libs/hwui/RenderNode.h
index bc62ee1..1811a7b 100644
--- a/libs/hwui/RenderNode.h
+++ b/libs/hwui/RenderNode.h
@@ -82,6 +82,22 @@ class DrawDisplayListOp;
*/
class RenderNode : public VirtualLightRefBase {
public:
+ enum DirtyPropertyMask {
+ GENERIC = 1 << 1,
+ TRANSLATION_X = 1 << 2,
+ TRANSLATION_Y = 1 << 3,
+ TRANSLATION_Z = 1 << 4,
+ SCALE_X = 1 << 5,
+ SCALE_Y = 1 << 6,
+ ROTATION = 1 << 7,
+ ROTATION_X = 1 << 8,
+ ROTATION_Y = 1 << 9,
+ X = 1 << 10,
+ Y = 1 << 11,
+ Z = 1 << 12,
+ ALPHA = 1 << 13,
+ };
+
ANDROID_API RenderNode();
ANDROID_API virtual ~RenderNode();
@@ -123,6 +139,14 @@ public:
}
}
+ bool isPropertyFieldDirty(DirtyPropertyMask field) const {
+ return mDirtyPropertyFields & field;
+ }
+
+ void setPropertyFieldsDirty(uint32_t fields) {
+ mDirtyPropertyFields |= fields;
+ }
+
const RenderProperties& properties() {
return mProperties;
}
@@ -136,7 +160,6 @@ public:
}
RenderProperties& mutateStagingProperties() {
- mNeedsPropertiesSync = true;
return mStagingProperties;
}
@@ -152,6 +175,7 @@ public:
// UI thread only!
ANDROID_API void addAnimator(const sp<BaseRenderNodeAnimator>& animator) {
+ animator->onAttached(this);
mStagingAnimators.insert(animator);
mNeedsAnimatorsSync = true;
}
@@ -227,7 +251,7 @@ private:
String8 mName;
- bool mNeedsPropertiesSync;
+ uint32_t mDirtyPropertyFields;
RenderProperties mProperties;
RenderProperties mStagingProperties;
diff --git a/libs/hwui/TreeInfo.h b/libs/hwui/TreeInfo.h
index fc5994c..d4a23b8 100644
--- a/libs/hwui/TreeInfo.h
+++ b/libs/hwui/TreeInfo.h
@@ -21,12 +21,12 @@
namespace android {
namespace uirenderer {
-class BaseAnimator;
+class BaseRenderNodeAnimator;
class AnimationListener;
class AnimationHook {
public:
- virtual void callOnFinished(BaseAnimator* animator, AnimationListener* listener) = 0;
+ virtual void callOnFinished(BaseRenderNodeAnimator* animator, AnimationListener* listener) = 0;
protected:
~AnimationHook() {}
};
diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
index b28733a..5ab586f 100644
--- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
+++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java
@@ -23,19 +23,21 @@ import android.graphics.ImageFormat;
import android.graphics.Point;
import android.graphics.PointF;
import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.ColorSpaceTransform;
import android.hardware.camera2.Face;
import android.hardware.camera2.MeteringRectangle;
import android.hardware.camera2.Rational;
-import android.hardware.camera2.ReprocessFormatsMap;
import android.hardware.camera2.RggbChannelVector;
import android.hardware.camera2.Size;
-import android.hardware.camera2.StreamConfiguration;
-import android.hardware.camera2.StreamConfigurationDuration;
import android.hardware.camera2.impl.CameraMetadataNative;
import android.hardware.camera2.marshal.impl.MarshalQueryableEnum;
+import android.hardware.camera2.params.ReprocessFormatsMap;
+import android.hardware.camera2.params.StreamConfiguration;
+import android.hardware.camera2.params.StreamConfigurationDuration;
+import android.hardware.camera2.params.StreamConfigurationMap;
import android.hardware.camera2.utils.TypeReference;
import static android.hardware.camera2.impl.CameraMetadataNative.*;
@@ -72,6 +74,9 @@ public class CameraMetadataTest extends junit.framework.TestCase {
static final int ANDROID_CONTROL_AE_ANTIBANDING_MODE = ANDROID_CONTROL_START;
static final int ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION = ANDROID_CONTROL_START + 1;
+ // From graphics.h
+ private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
+
@Override
public void setUp() {
mMetadata = new CameraMetadataNative();
@@ -293,6 +298,28 @@ public class CameraMetadataTest extends junit.framework.TestCase {
}
}
+ private static <T, T2> void assertArrayContains(T needle, T2 array) {
+ if (!array.getClass().isArray()) {
+ throw new IllegalArgumentException("actual must be array");
+ }
+
+ int len = Array.getLength(array);
+ for (int i = 0; i < len; ++i) {
+
+ Object actualElement = Array.get(array, i);
+
+ if (needle.equals(actualElement)) {
+ return;
+ }
+ }
+
+ fail(String.format(
+ "could not find element in array (needle %s). "
+ + "Array was: %s.",
+ needle,
+ formatArray(array, len)));
+ }
+
private <T> void checkKeyGetAndSet(String keyStr, TypeReference<T> typeToken, T expected,
boolean reuse) {
Key<T> key = new Key<T>(keyStr, typeToken);
@@ -804,18 +831,48 @@ public class CameraMetadataTest extends junit.framework.TestCase {
@SmallTest
public void testReadWriteReprocessFormatsMap() {
- final int RAW_OPAQUE = 0x24;
+ // final int RAW_OPAQUE = 0x24; // TODO: add RAW_OPAQUE to ImageFormat
final int RAW16 = ImageFormat.RAW_SENSOR;
final int YUV_420_888 = ImageFormat.YUV_420_888;
final int BLOB = 0x21;
+ // TODO: also test HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED as an output
int[] contents = new int[] {
- RAW_OPAQUE, 3, RAW16, YUV_420_888, BLOB,
+ YUV_420_888, 3, YUV_420_888, ImageFormat.NV21, BLOB,
RAW16, 2, YUV_420_888, BLOB,
+
};
// int32 x n
- checkKeyMarshal("android.scaler.availableInputOutputFormatsMap",
+ Key<ReprocessFormatsMap> key = new Key<ReprocessFormatsMap>(
+ "android.scaler.availableInputOutputFormatsMap", ReprocessFormatsMap.class);
+ mMetadata.writeValues(key.getTag(), toByteArray(contents));
+
+ ReprocessFormatsMap map = mMetadata.get(key);
+
+ /*
+ * Make sure the inputs/outputs were what we expected.
+ * - Use public image format constants here.
+ */
+
+ int[] expectedInputs = new int[] {
+ YUV_420_888, RAW16
+ };
+ assertArrayEquals(expectedInputs, map.getInputs());
+
+ int[] expectedYuvOutputs = new int[] {
+ YUV_420_888, ImageFormat.NV21, ImageFormat.JPEG,
+ };
+ assertArrayEquals(expectedYuvOutputs, map.getOutputs(ImageFormat.YUV_420_888));
+
+ int[] expectedRaw16Outputs = new int[] {
+ YUV_420_888, ImageFormat.JPEG,
+ };
+ assertArrayEquals(expectedRaw16Outputs, map.getOutputs(ImageFormat.RAW_SENSOR));
+
+ // Finally, do a round-trip check as a sanity
+ checkKeyMarshal(
+ "android.scaler.availableInputOutputFormatsMap",
new ReprocessFormatsMap(contents),
toByteArray(contents)
);
@@ -889,68 +946,6 @@ public class CameraMetadataTest extends junit.framework.TestCase {
expectedIntValues, availableFormatTag);
//
- // android.scaler.availableStreamConfigurations (int x n x 4 array)
- //
- final int OUTPUT = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
- int[] availableStreamConfigs = new int[] {
- 0x20, 3280, 2464, OUTPUT, // RAW16
- 0x23, 3264, 2448, OUTPUT, // YCbCr_420_888
- 0x23, 3200, 2400, OUTPUT, // YCbCr_420_888
- 0x100, 3264, 2448, OUTPUT, // ImageFormat.JPEG
- 0x100, 3200, 2400, OUTPUT, // ImageFormat.JPEG
- 0x100, 2592, 1944, OUTPUT, // ImageFormat.JPEG
- 0x100, 2048, 1536, OUTPUT, // ImageFormat.JPEG
- 0x100, 1920, 1080, OUTPUT // ImageFormat.JPEG
- };
- int[] expectedAvailableStreamConfigs = new int[] {
- 0x20, 3280, 2464, OUTPUT, // RAW16
- 0x23, 3264, 2448, OUTPUT, // YCbCr_420_888
- 0x23, 3200, 2400, OUTPUT, // YCbCr_420_888
- 0x21, 3264, 2448, OUTPUT, // BLOB
- 0x21, 3200, 2400, OUTPUT, // BLOB
- 0x21, 2592, 1944, OUTPUT, // BLOB
- 0x21, 2048, 1536, OUTPUT, // BLOB
- 0x21, 1920, 1080, OUTPUT // BLOB
- };
- int availableStreamConfigTag =
- CameraMetadataNative.getTag("android.scaler.availableStreamConfigurations");
-
- Key<int[]> configKey = CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
- validateArrayMetadataReadWriteOverride(configKey, availableStreamConfigs,
- expectedAvailableStreamConfigs, availableStreamConfigTag);
-
- //
- // android.scaler.availableMinFrameDurations (int x n x 4 array)
-
- //
- long[] availableMinDurations = new long[] {
- 0x20, 3280, 2464, 33333336, // RAW16
- 0x23, 3264, 2448, 33333336, // YCbCr_420_888
- 0x23, 3200, 2400, 33333336, // YCbCr_420_888
- 0x100, 3264, 2448, 33333336, // ImageFormat.JPEG
- 0x100, 3200, 2400, 33333336, // ImageFormat.JPEG
- 0x100, 2592, 1944, 33333336, // ImageFormat.JPEG
- 0x100, 2048, 1536, 33333336, // ImageFormat.JPEG
- 0x100, 1920, 1080, 33333336 // ImageFormat.JPEG
- };
- long[] expectedAvailableMinDurations = new long[] {
- 0x20, 3280, 2464, 33333336, // RAW16
- 0x23, 3264, 2448, 33333336, // YCbCr_420_888
- 0x23, 3200, 2400, 33333336, // YCbCr_420_888
- 0x21, 3264, 2448, 33333336, // BLOB
- 0x21, 3200, 2400, 33333336, // BLOB
- 0x21, 2592, 1944, 33333336, // BLOB
- 0x21, 2048, 1536, 33333336, // BLOB
- 0x21, 1920, 1080, 33333336 // BLOB
- };
- int availableMinDurationsTag =
- CameraMetadataNative.getTag("android.scaler.availableMinFrameDurations");
-
- Key<long[]> durationKey = CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
- validateArrayMetadataReadWriteOverride(durationKey, availableMinDurations,
- expectedAvailableMinDurations, availableMinDurationsTag);
-
- //
// android.statistics.faces (Face x n array)
//
int[] expectedFaceIds = new int[] {1, 2, 3, 4, 5};
@@ -1015,14 +1010,238 @@ public class CameraMetadataTest extends junit.framework.TestCase {
}
/**
+ * Set the raw native value of the available stream configurations; ensure that
+ * the read-out managed value is consistent with what we write in.
+ */
+ @SmallTest
+ public void testOverrideStreamConfigurationMap() {
+
+ /*
+ * First, write all the raw values:
+ * - availableStreamConfigurations
+ * - availableMinFrameDurations
+ * - availableStallDurations
+ *
+ * Then, read this out as a synthetic multi-key 'streamConfigurationMap'
+ *
+ * Finally, validate that the map was unmarshaled correctly
+ * and is converting the internal formats to public formats properly.
+ */
+
+ //
+ // android.scaler.availableStreamConfigurations (int x n x 4 array)
+ //
+ final int OUTPUT = 0;
+ final int INPUT = 1;
+ int[] rawAvailableStreamConfigs = new int[] {
+ 0x20, 3280, 2464, OUTPUT, // RAW16
+ 0x23, 3264, 2448, OUTPUT, // YCbCr_420_888
+ 0x23, 3200, 2400, OUTPUT, // YCbCr_420_888
+ 0x21, 3264, 2448, OUTPUT, // BLOB
+ 0x21, 3200, 2400, OUTPUT, // BLOB
+ 0x21, 2592, 1944, OUTPUT, // BLOB
+ 0x21, 2048, 1536, OUTPUT, // BLOB
+ 0x21, 1920, 1080, OUTPUT, // BLOB
+ 0x22, 640, 480, OUTPUT, // IMPLEMENTATION_DEFINED
+ 0x20, 320, 240, INPUT, // RAW16
+ };
+ Key<StreamConfiguration[]> configKey =
+ CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+ mMetadata.writeValues(configKey.getTag(),
+ toByteArray(rawAvailableStreamConfigs));
+
+ //
+ // android.scaler.availableMinFrameDurations (int x n x 4 array)
+ //
+ long[] expectedAvailableMinDurations = new long[] {
+ 0x20, 3280, 2464, 33333331, // RAW16
+ 0x23, 3264, 2448, 33333332, // YCbCr_420_888
+ 0x23, 3200, 2400, 33333333, // YCbCr_420_888
+ 0x100, 3264, 2448, 33333334, // ImageFormat.JPEG
+ 0x100, 3200, 2400, 33333335, // ImageFormat.JPEG
+ 0x100, 2592, 1944, 33333336, // ImageFormat.JPEG
+ 0x100, 2048, 1536, 33333337, // ImageFormat.JPEG
+ 0x100, 1920, 1080, 33333338 // ImageFormat.JPEG
+ };
+ long[] rawAvailableMinDurations = new long[] {
+ 0x20, 3280, 2464, 33333331, // RAW16
+ 0x23, 3264, 2448, 33333332, // YCbCr_420_888
+ 0x23, 3200, 2400, 33333333, // YCbCr_420_888
+ 0x21, 3264, 2448, 33333334, // BLOB
+ 0x21, 3200, 2400, 33333335, // BLOB
+ 0x21, 2592, 1944, 33333336, // BLOB
+ 0x21, 2048, 1536, 33333337, // BLOB
+ 0x21, 1920, 1080, 33333338 // BLOB
+ };
+ Key<StreamConfigurationDuration[]> durationKey =
+ CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+ mMetadata.writeValues(durationKey.getTag(),
+ toByteArray(rawAvailableMinDurations));
+
+ //
+ // android.scaler.availableStallDurations (int x n x 4 array)
+ //
+ long[] expectedAvailableStallDurations = new long[] {
+ 0x20, 3280, 2464, 0, // RAW16
+ 0x23, 3264, 2448, 0, // YCbCr_420_888
+ 0x23, 3200, 2400, 0, // YCbCr_420_888
+ 0x100, 3264, 2448, 33333334, // ImageFormat.JPEG
+ 0x100, 3200, 2400, 33333335, // ImageFormat.JPEG
+ 0x100, 2592, 1944, 33333336, // ImageFormat.JPEG
+ 0x100, 2048, 1536, 33333337, // ImageFormat.JPEG
+ 0x100, 1920, 1080, 33333338 // ImageFormat.JPEG
+ };
+ // Note: RAW16 and YUV_420_888 omitted intentionally; omitted values should default to 0
+ long[] rawAvailableStallDurations = new long[] {
+ 0x21, 3264, 2448, 33333334, // BLOB
+ 0x21, 3200, 2400, 33333335, // BLOB
+ 0x21, 2592, 1944, 33333336, // BLOB
+ 0x21, 2048, 1536, 33333337, // BLOB
+ 0x21, 1920, 1080, 33333338 // BLOB
+ };
+ Key<StreamConfigurationDuration[]> stallDurationKey =
+ CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS;
+ mMetadata.writeValues(stallDurationKey.getTag(),
+ toByteArray(rawAvailableStallDurations));
+
+ //
+ // android.scaler.streamConfigurationMap (synthetic as StreamConfigurationMap)
+ //
+ StreamConfigurationMap streamConfigMap = mMetadata.get(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ // Inputs
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.RAW_SENSOR, 320, 240, /*output*/false);
+
+ // Outputs
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, /*output*/true);
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.JPEG, 1920, 1080, /*output*/true);
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.JPEG, 2048, 1536, /*output*/true);
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.JPEG, 2592, 1944, /*output*/true);
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.JPEG, 3200, 2400, /*output*/true);
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.YUV_420_888, 3200, 2400, /*output*/true);
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.YUV_420_888, 3264, 2448, /*output*/true);
+ checkStreamConfigurationMapByFormatSize(
+ streamConfigMap, ImageFormat.RAW_SENSOR, 3280, 2464, /*output*/true);
+
+ // Min Frame Durations
+
+ final int DURATION_TUPLE_SIZE = 4;
+ for (int i = 0; i < expectedAvailableMinDurations.length; i += DURATION_TUPLE_SIZE) {
+ checkStreamConfigurationMapDurationByFormatSize(
+ streamConfigMap,
+ (int)expectedAvailableMinDurations[i],
+ (int)expectedAvailableMinDurations[i+1],
+ (int)expectedAvailableMinDurations[i+2],
+ Duration.MinFrame,
+ expectedAvailableMinDurations[i+3]);
+ }
+
+ // Stall Frame Durations
+
+ for (int i = 0; i < expectedAvailableStallDurations.length; i += DURATION_TUPLE_SIZE) {
+ checkStreamConfigurationMapDurationByFormatSize(
+ streamConfigMap,
+ (int)expectedAvailableStallDurations[i],
+ (int)expectedAvailableStallDurations[i+1],
+ (int)expectedAvailableStallDurations[i+2],
+ Duration.Stall,
+ expectedAvailableStallDurations[i+3]);
+ }
+ }
+
+ private static void checkStreamConfigurationMapByFormatSize(StreamConfigurationMap configMap,
+ int format, int width, int height,
+ boolean output) {
+
+ /** arbitrary class for which StreamConfigurationMap#isOutputSupportedFor(Class) is true */
+ final Class<?> IMPLEMENTATION_DEFINED_OUTPUT_CLASS = SurfaceTexture.class;
+
+ android.util.Size[] sizes;
+ int[] formats;
+
+ if (output) {
+ if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ sizes = configMap.getOutputSizes(IMPLEMENTATION_DEFINED_OUTPUT_CLASS);
+ // in this case the 'is output format supported' is vacuously true
+ formats = new int[] { HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED };
+ } else {
+ sizes = configMap.getOutputSizes(format);
+ formats = configMap.getOutputFormats();
+ assertTrue("Format must be supported by stream configuration map",
+ configMap.isOutputSupportedFor(format));
+ }
+ } else {
+ // NOTE: No function to do input sizes from IMPL_DEFINED, so it would just fail for that
+ sizes = configMap.getInputSizes(format);
+ formats = configMap.getInputFormats();
+ }
+
+ android.util.Size expectedSize = new android.util.Size(width, height);
+
+ assertArrayContains(format, formats);
+ assertArrayContains(expectedSize, sizes);
+ }
+
+ private enum Duration {
+ MinFrame,
+ Stall
+ }
+
+ private static void checkStreamConfigurationMapDurationByFormatSize(
+ StreamConfigurationMap configMap,
+ int format, int width, int height, Duration durationKind, long expectedDuration) {
+
+ /** arbitrary class for which StreamConfigurationMap#isOutputSupportedFor(Class) is true */
+ final Class<?> IMPLEMENTATION_DEFINED_OUTPUT_CLASS = SurfaceTexture.class;
+
+ long actualDuration;
+
+ android.util.Size size = new android.util.Size(width, height);
+ switch (durationKind) {
+ case MinFrame:
+ if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ actualDuration = configMap.getOutputMinFrameDuration(
+ IMPLEMENTATION_DEFINED_OUTPUT_CLASS, size);
+ } else {
+ actualDuration = configMap.getOutputMinFrameDuration(format, size);
+ }
+
+ break;
+ case Stall:
+ if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ actualDuration = configMap.getOutputStallDuration(
+ IMPLEMENTATION_DEFINED_OUTPUT_CLASS, size);
+ } else {
+ actualDuration = configMap.getOutputStallDuration(format, size);
+ }
+
+ break;
+ default:
+ throw new AssertionError();
+ }
+
+ assertEquals("Expected " + durationKind + " to match actual value", expectedDuration,
+ actualDuration);
+ }
+
+ /**
* Validate metadata array tag read/write override.
*
* <p>Only support long and int array for now, can be easily extend to support other
* primitive arrays.</p>
*/
- private <T> void validateArrayMetadataReadWriteOverride(Key<T> key, T writeValues,
- T readValues, int tag) {
- Class<?> type = writeValues.getClass();
+ private <T> void validateArrayMetadataReadWriteOverride(Key<T> key, T expectedWriteValues,
+ T expectedReadValues, int tag) {
+ Class<?> type = expectedWriteValues.getClass();
if (!type.isArray()) {
throw new IllegalArgumentException("This function expects an key with array type");
} else if (type != int[].class && type != long[].class) {
@@ -1030,13 +1249,13 @@ public class CameraMetadataTest extends junit.framework.TestCase {
}
// Write
- mMetadata.set(key, writeValues);
+ mMetadata.set(key, expectedWriteValues);
byte[] readOutValues = mMetadata.readValues(tag);
ByteBuffer bf = ByteBuffer.wrap(readOutValues).order(ByteOrder.nativeOrder());
- int readValuesLength = Array.getLength(readValues);
+ int readValuesLength = Array.getLength(expectedReadValues);
int readValuesNumBytes = readValuesLength * 4;
if (type == long[].class) {
readValuesNumBytes = readValuesLength * 8;
@@ -1045,9 +1264,9 @@ public class CameraMetadataTest extends junit.framework.TestCase {
assertEquals(readValuesNumBytes, readOutValues.length);
for (int i = 0; i < readValuesLength; ++i) {
if (type == int[].class) {
- assertEquals(Array.getInt(readValues, i), bf.getInt());
+ assertEquals(Array.getInt(expectedReadValues, i), bf.getInt());
} else if (type == long[].class) {
- assertEquals(Array.getLong(readValues, i), bf.getLong());
+ assertEquals(Array.getLong(expectedReadValues, i), bf.getLong());
}
}
@@ -1057,16 +1276,16 @@ public class CameraMetadataTest extends junit.framework.TestCase {
ByteBuffer.wrap(readOutValuesAsByteArray).order(ByteOrder.nativeOrder());
for (int i = 0; i < readValuesLength; ++i) {
if (type == int[].class) {
- readOutValuesByteBuffer.putInt(Array.getInt(readValues, i));
+ readOutValuesByteBuffer.putInt(Array.getInt(expectedReadValues, i));
} else if (type == long[].class) {
- readOutValuesByteBuffer.putLong(Array.getLong(readValues, i));
+ readOutValuesByteBuffer.putLong(Array.getLong(expectedReadValues, i));
}
}
mMetadata.writeValues(tag, readOutValuesAsByteArray);
T result = mMetadata.get(key);
assertNotNull(key.getName() + " result shouldn't be null", result);
- assertArrayEquals(writeValues, result);
+ assertArrayEquals(expectedWriteValues, result);
}
// TODO: move somewhere else
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_qs_location_off.png b/packages/SystemUI/res/drawable-hdpi/ic_qs_location_off.png
deleted file mode 100644
index 189f27b..0000000
--- a/packages/SystemUI/res/drawable-hdpi/ic_qs_location_off.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable-hdpi/ic_qs_location_on.png b/packages/SystemUI/res/drawable-hdpi/ic_qs_location_on.png
deleted file mode 100644
index b03d30c..0000000
--- a/packages/SystemUI/res/drawable-hdpi/ic_qs_location_on.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_qs_location_off.png b/packages/SystemUI/res/drawable-mdpi/ic_qs_location_off.png
deleted file mode 100644
index b692107..0000000
--- a/packages/SystemUI/res/drawable-mdpi/ic_qs_location_off.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable-mdpi/ic_qs_location_on.png b/packages/SystemUI/res/drawable-mdpi/ic_qs_location_on.png
deleted file mode 100644
index 867c57d..0000000
--- a/packages/SystemUI/res/drawable-mdpi/ic_qs_location_on.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/ic_qs_location_off.png b/packages/SystemUI/res/drawable-xhdpi/ic_qs_location_off.png
deleted file mode 100644
index 7ce8f83..0000000
--- a/packages/SystemUI/res/drawable-xhdpi/ic_qs_location_off.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xhdpi/ic_qs_location_on.png b/packages/SystemUI/res/drawable-xhdpi/ic_qs_location_on.png
deleted file mode 100644
index 6300bdc..0000000
--- a/packages/SystemUI/res/drawable-xhdpi/ic_qs_location_on.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_off.png b/packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_off.png
deleted file mode 100644
index c14c1bb..0000000
--- a/packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_off.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_on.png b/packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_on.png
deleted file mode 100644
index d6d4c70..0000000
--- a/packages/SystemUI/res/drawable-xxhdpi/ic_qs_location_on.png
+++ /dev/null
Binary files differ
diff --git a/packages/SystemUI/res/drawable/ic_location_24_01.xml b/packages/SystemUI/res/drawable/ic_location_24_01.xml
new file mode 100644
index 0000000..ff37d9a
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_01.xml
@@ -0,0 +1,28 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:fill="#FFFFFFFF"
+ android:pathData="M12.0,2.0C8.13,2.0 5.0,5.13 5.0,9.0c0.0,5.25 7.0,13.0 7.0,13.0s7.0,-7.75 7.0,-13.0C19.0,5.13 15.87,2.0 12.0,2.0zM12.0,11.5c-1.38,0.0 -2.5,-1.12 -2.5,-2.5s1.12,-2.5 2.5,-2.5c1.38,0.0 2.5,1.12 2.5,2.5S13.38,11.5 12.0,11.5z"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_02.xml b/packages/SystemUI/res/drawable/ic_location_24_02.xml
new file mode 100644
index 0000000..bb4465f
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_02.xml
@@ -0,0 +1,28 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:fill="#FFFFFFFF"
+ android:pathData="M12.0,4.0c-3.48,0.0 -6.3,2.82 -6.3,6.3C5.7,15.02 12.0,22.0 12.0,22.0s6.3,-6.98 6.3,-11.7C18.3,6.82 15.48,4.0 12.0,4.0zM12.0,12.55c-1.24,0.0 -2.25,-1.01 -2.25,-2.25S10.76,8.05 12.0,8.05c1.24,0.0 2.25,1.01 2.25,2.25S13.24,12.55 12.0,12.55z"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_03.xml b/packages/SystemUI/res/drawable/ic_location_24_03.xml
new file mode 100644
index 0000000..956a8c3
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_03.xml
@@ -0,0 +1,28 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:fill="#FFFFFFFF"
+ android:pathData="M12.0,7.0c-2.9,0.0 -5.25,2.35 -5.25,5.25C6.75,16.19 12.0,22.0 12.0,22.0s5.25,-5.81 5.25,-9.75C17.25,9.35 14.9,7.0 12.0,7.0zM12.0,14.12c-1.04,0.0 -1.88,-0.84 -1.88,-1.88s0.84,-1.88 1.88,-1.88c1.04,0.0 1.87,0.84 1.87,1.88S13.04,14.12 12.0,14.12z"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_04.xml b/packages/SystemUI/res/drawable/ic_location_24_04.xml
new file mode 100644
index 0000000..0c0fb3b
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_04.xml
@@ -0,0 +1,28 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:fill="#FFFFFFFF"
+ android:pathData="M12.0,10.0c-2.32,0.0 -4.2,1.88 -4.2,4.2C7.8,17.35 12.0,22.0 12.0,22.0s4.2,-4.65 4.2,-7.8C16.2,11.88 14.32,10.0 12.0,10.0zM12.0,15.7c-0.83,0.0 -1.5,-0.67 -1.5,-1.5s0.67,-1.5 1.5,-1.5c0.83,0.0 1.5,0.67 1.5,1.5S12.83,15.7 12.0,15.7z"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_05.xml b/packages/SystemUI/res/drawable/ic_location_24_05.xml
new file mode 100644
index 0000000..1a21e2f
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_05.xml
@@ -0,0 +1,28 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:fill="#FFFFFFFF"
+ android:pathData="M12.0,13.0c-1.74,0.0 -3.15,1.41 -3.15,3.15C8.85,18.51 12.0,22.0 12.0,22.0s3.15,-3.49 3.15,-5.85C15.15,14.41 13.74,13.0 12.0,13.0zM12.0,17.27c-0.62,0.0 -1.13,-0.5 -1.13,-1.12c0.0,-0.62 0.5,-1.12 1.13,-1.12c0.62,0.0 1.12,0.5 1.12,1.12C13.12,16.77 12.62,17.27 12.0,17.27z"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_06.xml b/packages/SystemUI/res/drawable/ic_location_24_06.xml
new file mode 100644
index 0000000..25c9ae5
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_06.xml
@@ -0,0 +1,33 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:fill="#FFFFFFFF"
+ android:pathData="M12.0,16.0c-1.16,0.0 -2.1,0.94 -2.1,2.1C9.9,19.67 12.0,22.0 12.0,22.0s2.1,-2.33 2.1,-3.9C14.1,16.94 13.16,16.0 12.0,16.0zM12.0,18.85c-0.41,0.0 -0.75,-0.34 -0.75,-0.75s0.34,-0.75 0.75,-0.75c0.41,0.0 0.75,0.34 0.75,0.75S12.41,18.85 12.0,18.85z"/>
+ <path
+ android:pathData="M11.99,15c-1.35,0,-2.45,1.1,-2.45,2.45 c0,1.84,2.45,4.55,2.45,4.55s2.45,-2.71,2.45,-4.55C14.44,16.1,13.34,15,11.99,15z M11.99,18.33c-0.48,0,-0.88,-0.39,-0.88,-0.88 s0.39,-0.88,0.88,-0.88c0.48,0,0.87,0.39,0.87,0.88S12.47,18.33,11.99,18.33z"
+ android:strokeWidth=".35"
+ android:fill="#00000000"
+ android:stroke="#CCCCCC"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_07.xml b/packages/SystemUI/res/drawable/ic_location_24_07.xml
new file mode 100644
index 0000000..a69c3a2
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_07.xml
@@ -0,0 +1,30 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:pathData="M12,9c-2.51,0,-4.55,2.04,-4.55,4.55 C7.45,16.96,12,22,12,22s4.55,-5.04,4.55,-8.45C16.55,11.04,14.51,9,12,9z M12,15.18c-0.9,0,-1.63,-0.73,-1.63,-1.62 s0.73,-1.62,1.63,-1.62c0.9,0,1.62,0.73,1.62,1.62S12.9,15.18,12,15.18z"
+ android:strokeWidth="0.65"
+ android:fill="#00000000"
+ android:stroke="#CCCCCC"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_08.xml b/packages/SystemUI/res/drawable/ic_location_24_08.xml
new file mode 100644
index 0000000..c89c047
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_08.xml
@@ -0,0 +1,30 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:pathData="M12,6c-3.09,0,-5.6,2.51,-5.6,5.6 C6.4,15.8,12,22,12,22s5.6,-6.2,5.6,-10.4C17.6,8.51,15.09,6,12,6z M12,13.6c-1.1,0,-2,-0.9,-2,-2s0.9,-2,2,-2c1.1,0,2,0.9,2,2 S13.1,13.6,12,13.6z"
+ android:strokeWidth="0.8"
+ android:fill="#00000000"
+ android:stroke="#CCCCCC"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_09.xml b/packages/SystemUI/res/drawable/ic_location_24_09.xml
new file mode 100644
index 0000000..96bb6ce
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_09.xml
@@ -0,0 +1,30 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:pathData="M12,4c-3.48,0,-6.3,2.82,-6.3,6.3 C5.7,15.02,12,22,12,22s6.3,-6.98,6.3,-11.7C18.3,6.82,15.48,4,12,4z M12,12.55c-1.24,0,-2.25,-1.01,-2.25,-2.25S10.76,8.05,12,8.05 c1.24,0,2.25,1.01,2.25,2.25S13.24,12.55,12,12.55z"
+ android:strokeWidth="0.9"
+ android:fill="#00000000"
+ android:stroke="#CCCCCC"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_10.xml b/packages/SystemUI/res/drawable/ic_location_24_10.xml
new file mode 100644
index 0000000..aced4bd
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_10.xml
@@ -0,0 +1,30 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:pathData="M12,3C8.33,3,5.35,5.98,5.35,9.65 C5.35,14.64,12,22,12,22s6.65,-7.36,6.65,-12.35C18.65,5.98,15.67,3,12,3z M12,12.02c-1.31,0,-2.38,-1.06,-2.38,-2.38 S10.69,7.28,12,7.28c1.31,0,2.37,1.06,2.37,2.37S13.31,12.02,12,12.02z"
+ android:strokeWidth="0.95"
+ android:fill="#00000000"
+ android:stroke="#CCCCCC"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_24_11.xml b/packages/SystemUI/res/drawable/ic_location_24_11.xml
new file mode 100644
index 0000000..578308e
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_24_11.xml
@@ -0,0 +1,30 @@
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<vector xmlns:android="http://schemas.android.com/apk/res/android" >
+ <size
+ android:width="64dp"
+ android:height="64dp"/>
+
+ <viewport
+ android:viewportWidth="24.0"
+ android:viewportHeight="24.0"/>
+
+ <path
+ android:pathData="M12,2C8.13,2,5,5.13,5,9c0,5.25,7,13,7,13s7,-7.75,7,-13 C19,5.13,15.87,2,12,2z M12,11.5c-1.38,0,-2.5,-1.12,-2.5,-2.5s1.12,-2.5,2.5,-2.5c1.38,0,2.5,1.12,2.5,2.5S13.38,11.5,12,11.5z"
+ android:fill="#00000000"
+ android:stroke="#CCCCCC"
+ android:strokeWidth="1.0"/>
+</vector>
diff --git a/packages/SystemUI/res/drawable/ic_location_off_anim.xml b/packages/SystemUI/res/drawable/ic_location_off_anim.xml
new file mode 100644
index 0000000..864eda1
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_off_anim.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<animation-list
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:oneshot="true">
+ <item android:drawable="@drawable/ic_location_24_01" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_02" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_03" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_04" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_05" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_06" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_07" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_08" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_09" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_10" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_11" android:duration="16" />
+</animation-list>
diff --git a/packages/SystemUI/res/drawable/ic_location_on_anim.xml b/packages/SystemUI/res/drawable/ic_location_on_anim.xml
new file mode 100644
index 0000000..65a8afe
--- /dev/null
+++ b/packages/SystemUI/res/drawable/ic_location_on_anim.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<animation-list
+ xmlns:android="http://schemas.android.com/apk/res/android"
+ android:oneshot="true">
+ <item android:drawable="@drawable/ic_location_24_11" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_10" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_09" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_08" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_07" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_06" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_05" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_04" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_03" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_02" android:duration="16" />
+ <item android:drawable="@drawable/ic_location_24_01" android:duration="16" />
+</animation-list>
diff --git a/packages/SystemUI/src/com/android/systemui/qs/tiles/LocationTile.java b/packages/SystemUI/src/com/android/systemui/qs/tiles/LocationTile.java
index d32f98f..176e05c 100644
--- a/packages/SystemUI/src/com/android/systemui/qs/tiles/LocationTile.java
+++ b/packages/SystemUI/src/com/android/systemui/qs/tiles/LocationTile.java
@@ -16,6 +16,8 @@
package com.android.systemui.qs.tiles;
+import android.graphics.drawable.AnimationDrawable;
+
import com.android.systemui.R;
import com.android.systemui.qs.QSTile;
import com.android.systemui.statusbar.policy.LocationController;
@@ -56,16 +58,28 @@ public class LocationTile extends QSTile<QSTile.BooleanState> {
protected void handleUpdateState(BooleanState state, Object arg) {
final boolean locationEnabled = mController.isLocationEnabled();
state.visible = true;
- state.value = locationEnabled;
- state.icon = mHost.getVectorDrawable(R.drawable.ic_qs_location);
+ if (state.value != locationEnabled) {
+ state.value = locationEnabled;
+ final AnimationDrawable d = (AnimationDrawable) mContext.getDrawable(locationEnabled
+ ? R.drawable.ic_location_on_anim
+ : R.drawable.ic_location_off_anim);
+ state.icon = d;
+ mUiHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ d.start();
+ }
+ });
+ }
+ //state.icon = mHost.getVectorDrawable(R.drawable.ic_qs_location);
if (locationEnabled) {
- state.iconId = R.drawable.ic_qs_location_on;
+ if (state.icon == null) state.iconId = R.drawable.ic_location_24_01;
state.label = mContext.getString(R.string.quick_settings_location_label);
state.contentDescription = mContext.getString(
R.string.accessibility_quick_settings_location,
mContext.getString(R.string.accessibility_desc_on));
} else {
- state.iconId = R.drawable.ic_qs_location_off;
+ if (state.icon == null) state.iconId = R.drawable.ic_location_24_11;
state.label = mContext.getString(R.string.quick_settings_location_off_label);
state.contentDescription = mContext.getString(
R.string.accessibility_quick_settings_location,
diff --git a/policy/src/com/android/internal/policy/impl/GlobalActions.java b/policy/src/com/android/internal/policy/impl/GlobalActions.java
index fec9dda..673ce0b 100644
--- a/policy/src/com/android/internal/policy/impl/GlobalActions.java
+++ b/policy/src/com/android/internal/policy/impl/GlobalActions.java
@@ -183,11 +183,17 @@ class GlobalActions implements DialogInterface.OnDismissListener, DialogInterfac
mDialog = createDialog();
prepareDialog();
- WindowManager.LayoutParams attrs = mDialog.getWindow().getAttributes();
- attrs.setTitle("GlobalActions");
- mDialog.getWindow().setAttributes(attrs);
- mDialog.show();
- mDialog.getWindow().getDecorView().setSystemUiVisibility(View.STATUS_BAR_DISABLE_EXPAND);
+ // If we only have 1 item and it's a simple press action, just do this action.
+ if (mAdapter.getCount() == 1
+ && mAdapter.getItem(0) instanceof SinglePressAction) {
+ ((SinglePressAction) mAdapter.getItem(0)).onPress();
+ } else {
+ WindowManager.LayoutParams attrs = mDialog.getWindow().getAttributes();
+ attrs.setTitle("GlobalActions");
+ mDialog.getWindow().setAttributes(attrs);
+ mDialog.show();
+ mDialog.getWindow().getDecorView().setSystemUiVisibility(View.STATUS_BAR_DISABLE_EXPAND);
+ }
}
/**
@@ -398,7 +404,7 @@ class GlobalActions implements DialogInterface.OnDismissListener, DialogInterfac
@Override
public boolean showBeforeProvisioning() {
- return false;
+ return true;
}
};
}
@@ -1024,6 +1030,7 @@ class GlobalActions implements DialogInterface.OnDismissListener, DialogInterfac
mEnableAccessibilityController = null;
super.setCanceledOnTouchOutside(true);
}
+
super.onStart();
}
diff --git a/services/core/java/com/android/server/notification/ValidateNotificationPeople.java b/services/core/java/com/android/server/notification/ValidateNotificationPeople.java
index 8cd2f9b2..b5c2730 100644
--- a/services/core/java/com/android/server/notification/ValidateNotificationPeople.java
+++ b/services/core/java/com/android/server/notification/ValidateNotificationPeople.java
@@ -45,13 +45,13 @@ public class ValidateNotificationPeople implements NotificationSignalExtractor {
private static final boolean ENABLE_PEOPLE_VALIDATOR = true;
private static final String SETTING_ENABLE_PEOPLE_VALIDATOR =
"validate_notification_people_enabled";
- private static final String[] LOOKUP_PROJECTION = { Contacts._ID };
+ private static final String[] LOOKUP_PROJECTION = { Contacts._ID, Contacts.STARRED };
private static final int MAX_PEOPLE = 10;
private static final int PEOPLE_CACHE_SIZE = 200;
private static final float NONE = 0f;
private static final float VALID_CONTACT = 0.5f;
- // TODO private static final float STARRED_CONTACT = 1f;
+ private static final float STARRED_CONTACT = 1f;
protected boolean mEnabled;
private Context mContext;
@@ -104,23 +104,29 @@ public class ValidateNotificationPeople implements NotificationSignalExtractor {
public void work() {
if (INFO) Slog.i(TAG, "Executing: validation for: " + mRecord.sbn.getKey());
float affinity = NONE;
- LookupResult lookupResult = null;
for (final String handle: pendingLookups) {
+ LookupResult lookupResult = null;
final Uri uri = Uri.parse(handle);
if ("tel".equals(uri.getScheme())) {
if (DEBUG) Slog.d(TAG, "checking telephone URI: " + handle);
- lookupResult = resolvePhoneContact(handle, uri.getSchemeSpecificPart());
+ lookupResult = resolvePhoneContact(uri.getSchemeSpecificPart());
+ } else if ("mailto".equals(uri.getScheme())) {
+ if (DEBUG) Slog.d(TAG, "checking mailto URI: " + handle);
+ lookupResult = resolveEmailContact(uri.getSchemeSpecificPart());
} else if (handle.startsWith(Contacts.CONTENT_LOOKUP_URI.toString())) {
if (DEBUG) Slog.d(TAG, "checking lookup URI: " + handle);
- lookupResult = resolveContactsUri(handle, uri);
+ lookupResult = searchContacts(uri);
} else {
+ lookupResult = new LookupResult(); // invalid person for the cache
Slog.w(TAG, "unsupported URI " + handle);
}
+ if (lookupResult != null) {
+ synchronized (mPeopleCache) {
+ mPeopleCache.put(handle, lookupResult);
+ }
+ affinity = Math.max(affinity, lookupResult.getAffinity());
+ }
}
- if (lookupResult != null) {
- affinity = Math.max(affinity, lookupResult.getAffinity());
- }
-
float affinityBound = mRecord.getContactAffinity();
affinity = Math.max(affinity, affinityBound);
mRecord.setContactAffinity(affinity);
@@ -183,47 +189,27 @@ public class ValidateNotificationPeople implements NotificationSignalExtractor {
return null;
}
- private LookupResult resolvePhoneContact(final String handle, final String number) {
- LookupResult lookupResult = null;
- Cursor c = null;
- try {
- Uri numberUri = Uri.withAppendedPath(ContactsContract.PhoneLookup.CONTENT_FILTER_URI,
- Uri.encode(number));
- c = mContext.getContentResolver().query(numberUri, LOOKUP_PROJECTION, null, null, null);
- if (c != null && c.getCount() > 0) {
- c.moveToFirst();
- final int idIdx = c.getColumnIndex(Contacts._ID);
- final int id = c.getInt(idIdx);
- if (DEBUG) Slog.d(TAG, "is valid: " + id);
- lookupResult = new LookupResult(id);
- }
- } catch(Throwable t) {
- Slog.w(TAG, "Problem getting content resolver or performing contacts query.", t);
- } finally {
- if (c != null) {
- c.close();
- }
- }
- if (lookupResult == null) {
- lookupResult = new LookupResult(LookupResult.INVALID_ID);
- }
- synchronized (mPeopleCache) {
- mPeopleCache.put(handle, lookupResult);
- }
- return lookupResult;
+ private LookupResult resolvePhoneContact(final String number) {
+ Uri phoneUri = Uri.withAppendedPath(ContactsContract.PhoneLookup.CONTENT_FILTER_URI,
+ Uri.encode(number));
+ return searchContacts(phoneUri);
+ }
+
+ private LookupResult resolveEmailContact(final String email) {
+ Uri numberUri = Uri.withAppendedPath(
+ ContactsContract.CommonDataKinds.Email.CONTENT_LOOKUP_URI,
+ Uri.encode(email));
+ return searchContacts(numberUri);
}
- private LookupResult resolveContactsUri(String handle, final Uri personUri) {
- LookupResult lookupResult = null;
+ private LookupResult searchContacts(Uri lookupUri) {
+ LookupResult lookupResult = new LookupResult();
Cursor c = null;
try {
- c = mContext.getContentResolver().query(personUri, LOOKUP_PROJECTION, null, null, null);
+ c = mContext.getContentResolver().query(lookupUri, LOOKUP_PROJECTION, null, null, null);
if (c != null && c.getCount() > 0) {
c.moveToFirst();
- final int idIdx = c.getColumnIndex(Contacts._ID);
- final int id = c.getInt(idIdx);
- if (DEBUG) Slog.d(TAG, "is valid: " + id);
- lookupResult = new LookupResult(id);
+ lookupResult.readContact(c);
}
} catch(Throwable t) {
Slog.w(TAG, "Problem getting content resolver or performing contacts query.", t);
@@ -232,12 +218,6 @@ public class ValidateNotificationPeople implements NotificationSignalExtractor {
c.close();
}
}
- if (lookupResult == null) {
- lookupResult = new LookupResult(LookupResult.INVALID_ID);
- }
- synchronized (mPeopleCache) {
- mPeopleCache.put(handle, lookupResult);
- }
return lookupResult;
}
@@ -267,12 +247,31 @@ public class ValidateNotificationPeople implements NotificationSignalExtractor {
private final long mExpireMillis;
private int mId;
+ private boolean mStarred;
- public LookupResult(int id) {
- mId = id;
+ public LookupResult() {
+ mId = INVALID_ID;
+ mStarred = false;
mExpireMillis = System.currentTimeMillis() + CONTACT_REFRESH_MILLIS;
}
+ public void readContact(Cursor cursor) {
+ final int idIdx = cursor.getColumnIndex(Contacts._ID);
+ if (idIdx >= 0) {
+ mId = cursor.getInt(idIdx);
+ if (DEBUG) Slog.d(TAG, "contact _ID is: " + mId);
+ } else {
+ if (DEBUG) Slog.d(TAG, "invalid cursor: no _ID");
+ }
+ final int starIdx = cursor.getColumnIndex(Contacts.STARRED);
+ if (starIdx >= 0) {
+ mStarred = cursor.getInt(starIdx) != 0;
+ if (DEBUG) Slog.d(TAG, "contact STARRED is: " + mStarred);
+ } else {
+ if (DEBUG) Slog.d(TAG, "invalid cursor: no STARRED");
+ }
+ }
+
public boolean isExpired() {
return mExpireMillis < System.currentTimeMillis();
}
@@ -284,11 +283,18 @@ public class ValidateNotificationPeople implements NotificationSignalExtractor {
public float getAffinity() {
if (isInvalid()) {
return NONE;
+ } else if (mStarred) {
+ return STARRED_CONTACT;
} else {
- return VALID_CONTACT; // TODO: finer grained result: stars
+ return VALID_CONTACT;
}
}
+ public LookupResult setStarred(boolean starred) {
+ mStarred = starred;
+ return this;
+ }
+
public LookupResult setId(int id) {
mId = id;
return this;
diff --git a/services/core/java/com/android/server/wm/ViewServer.java b/services/core/java/com/android/server/wm/ViewServer.java
index a763e2c..741cee3 100644
--- a/services/core/java/com/android/server/wm/ViewServer.java
+++ b/services/core/java/com/android/server/wm/ViewServer.java
@@ -314,7 +314,7 @@ class ViewServer implements Runnable {
out.flush();
}
if (needFocusedWindowUpdate) {
- out.write("FOCUS UPDATE\n");
+ out.write("ACTION_FOCUS UPDATE\n");
out.flush();
}
}
diff --git a/tests/HwAccelerationTest/src/com/android/test/hwui/CirclePropActivity.java b/tests/HwAccelerationTest/src/com/android/test/hwui/CirclePropActivity.java
index 1d0a806..e4ea936 100644
--- a/tests/HwAccelerationTest/src/com/android/test/hwui/CirclePropActivity.java
+++ b/tests/HwAccelerationTest/src/com/android/test/hwui/CirclePropActivity.java
@@ -109,26 +109,27 @@ public class CirclePropActivity extends Activity {
mToggle = !mToggle;
mRunningAnimations.add(new RenderNodeAnimator(
- mX, RenderNodeAnimator.DELTA_TYPE_ABSOLUTE, mToggle ? 400.0f : 200.0f));
+ mX, mToggle ? 400.0f : 200.0f));
mRunningAnimations.add(new RenderNodeAnimator(
- mY, RenderNodeAnimator.DELTA_TYPE_ABSOLUTE, mToggle ? 600.0f : 200.0f));
+ mY, mToggle ? 600.0f : 200.0f));
mRunningAnimations.add(new RenderNodeAnimator(
- mRadius, RenderNodeAnimator.DELTA_TYPE_ABSOLUTE, mToggle ? 250.0f : 150.0f));
+ mRadius, mToggle ? 250.0f : 150.0f));
mRunningAnimations.add(new RenderNodeAnimator(
mPaint, RenderNodeAnimator.PAINT_ALPHA,
- RenderNodeAnimator.DELTA_TYPE_ABSOLUTE, mToggle ? 64.0f : 255.0f));
+ mToggle ? 64.0f : 255.0f));
mRunningAnimations.add(new RenderNodeAnimator(
mPaint, RenderNodeAnimator.PAINT_STROKE_WIDTH,
- RenderNodeAnimator.DELTA_TYPE_ABSOLUTE, mToggle ? 5.0f : 60.0f));
+ mToggle ? 5.0f : 60.0f));
TimeInterpolator interp = new OvershootInterpolator(3.0f);
for (int i = 0; i < mRunningAnimations.size(); i++) {
RenderNodeAnimator anim = mRunningAnimations.get(i);
anim.setInterpolator(interp);
+ anim.setDuration(1000);
anim.start(this);
}
diff --git a/tests/RenderThreadTest/src/com/example/renderthread/MainActivity.java b/tests/RenderThreadTest/src/com/example/renderthread/MainActivity.java
index 8f9cf58..b5b12d8 100644
--- a/tests/RenderThreadTest/src/com/example/renderthread/MainActivity.java
+++ b/tests/RenderThreadTest/src/com/example/renderthread/MainActivity.java
@@ -5,6 +5,7 @@ import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
+import android.util.Log;
import android.view.HardwareRenderer;
import android.view.RenderNodeAnimator;
import android.view.View;
@@ -73,14 +74,20 @@ public class MainActivity extends Activity implements OnItemClickListener {
float delta = (pos - clickedPosition) * 1.1f;
if (delta == 0) delta = -1;
RenderNodeAnimator animator = new RenderNodeAnimator(
- RenderNodeAnimator.TRANSLATION_Y, RenderNodeAnimator.DELTA_TYPE_DELTA, dy * delta);
+ RenderNodeAnimator.TRANSLATION_Y, dy * delta);
animator.setDuration(DURATION);
+ if (child == clickedView) logTranslationY(clickedView);
animator.start(child);
+ if (child == clickedView) logTranslationY(clickedView);
}
//mHandler.postDelayed(mLaunchActivity, (long) (DURATION * .4));
mLaunchActivity.run();
}
+ private void logTranslationY(View v) {
+ Log.d("RTTest", "View has translationY: " + v.getTranslationY());
+ }
+
private Runnable mLaunchActivity = new Runnable() {
@Override
diff --git a/tools/aapt/Command.cpp b/tools/aapt/Command.cpp
index dca25e5..24a94a7 100644
--- a/tools/aapt/Command.cpp
+++ b/tools/aapt/Command.cpp
@@ -453,7 +453,7 @@ String8 getComponentName(String8 &pkgName, String8 &componentName) {
return retStr;
}
-static void printCompatibleScreens(ResXMLTree& tree) {
+static void printCompatibleScreens(ResXMLTree& tree, String8* outError) {
size_t len;
ResXMLTree::event_code_t code;
int depth = 0;
@@ -471,7 +471,12 @@ static void printCompatibleScreens(ResXMLTree& tree) {
continue;
}
depth++;
- String8 tag(tree.getElementName(&len));
+ const char16_t* ctag16 = tree.getElementName(&len);
+ if (ctag16 == NULL) {
+ *outError = "failed to get XML element name (bad string pool)";
+ return;
+ }
+ String8 tag(ctag16);
if (tag == "screen") {
int32_t screenSize = getIntegerAttribute(tree,
SCREEN_SIZE_ATTR, NULL, -1);
@@ -536,7 +541,12 @@ Vector<String8> getNfcAidCategories(AssetManager& assets, String8 xmlPath, bool
while ((code=tree.next()) != ResXMLTree::END_DOCUMENT && code != ResXMLTree::BAD_DOCUMENT) {
if (code == ResXMLTree::END_TAG) {
depth--;
- String8 tag(tree.getElementName(&len));
+ const char16_t* ctag16 = tree.getElementName(&len);
+ if (ctag16 == NULL) {
+ *outError = "failed to get XML element name (bad string pool)";
+ return Vector<String8>();
+ }
+ String8 tag(ctag16);
if (depth == 0 && tag == serviceTagName) {
withinApduService = false;
@@ -544,7 +554,12 @@ Vector<String8> getNfcAidCategories(AssetManager& assets, String8 xmlPath, bool
} else if (code == ResXMLTree::START_TAG) {
depth++;
- String8 tag(tree.getElementName(&len));
+ const char16_t* ctag16 = tree.getElementName(&len);
+ if (ctag16 == NULL) {
+ *outError = "failed to get XML element name (bad string pool)";
+ return Vector<String8>();
+ }
+ String8 tag(ctag16);
if (depth == 1) {
if (tag == serviceTagName) {
@@ -711,7 +726,12 @@ int doDump(Bundle* bundle)
continue;
}
depth++;
- String8 tag(tree.getElementName(&len));
+ const char16_t* ctag16 = tree.getElementName(&len);
+ if (ctag16 == NULL) {
+ fprintf(stderr, "ERROR: failed to get XML element name (bad string pool)\n");
+ goto bail;
+ }
+ String8 tag(ctag16);
//printf("Depth %d tag %s\n", depth, tag.string());
if (depth == 1) {
if (tag != "manifest") {
@@ -970,7 +990,13 @@ int doDump(Bundle* bundle)
continue;
}
depth++;
- String8 tag(tree.getElementName(&len));
+
+ const char16_t* ctag16 = tree.getElementName(&len);
+ if (ctag16 == NULL) {
+ fprintf(stderr, "ERROR: failed to get XML element name (bad string pool)\n");
+ goto bail;
+ }
+ String8 tag(ctag16);
//printf("Depth %d, %s\n", depth, tag.string());
if (depth == 1) {
if (tag != "manifest") {
@@ -1297,7 +1323,12 @@ int doDump(Bundle* bundle)
goto bail;
}
} else if (tag == "compatible-screens") {
- printCompatibleScreens(tree);
+ printCompatibleScreens(tree, &error);
+ if (error != "") {
+ fprintf(stderr, "ERROR getting compatible screens: %s\n",
+ error.string());
+ goto bail;
+ }
depth--;
} else if (tag == "package-verifier") {
String8 name = getAttribute(tree, NAME_ATTR, &error);