summaryrefslogtreecommitdiffstats
path: root/core
diff options
context:
space:
mode:
Diffstat (limited to 'core')
-rw-r--r--core/java/android/app/ActivityView.java17
-rw-r--r--core/java/android/app/VoiceInteractor.java2
-rw-r--r--core/java/android/hardware/camera2/CameraCharacteristics.java58
-rw-r--r--core/java/android/hardware/camera2/CameraMetadata.java514
-rw-r--r--core/java/android/hardware/camera2/CaptureRequest.java88
-rw-r--r--core/java/android/hardware/camera2/CaptureResult.java148
-rw-r--r--core/java/android/hardware/camera2/params/StreamConfiguration.java3
-rw-r--r--core/java/android/net/RouteInfo.java2
-rw-r--r--core/java/android/provider/Settings.java1
-rw-r--r--core/java/android/service/notification/NotificationListenerService.java220
-rw-r--r--core/java/android/service/voice/DspInfo.java56
-rw-r--r--core/java/android/service/voice/KeyphraseEnrollmentInfo.java246
-rw-r--r--core/java/android/service/voice/KeyphraseInfo.java27
-rw-r--r--core/java/android/service/voice/SoundTriggerManager.java73
-rw-r--r--core/java/android/service/voice/VoiceInteractionService.java59
-rw-r--r--core/java/android/speech/tts/RequestConfig.java15
-rw-r--r--core/java/android/speech/tts/RequestConfigHelper.java15
-rw-r--r--core/java/android/speech/tts/SynthesisRequestV2.java15
-rw-r--r--core/java/android/speech/tts/VoiceInfo.java15
-rw-r--r--core/jni/Android.mk1
-rw-r--r--core/jni/AndroidRuntime.cpp8
-rw-r--r--core/jni/android/graphics/AndroidPicture.cpp112
-rw-r--r--core/jni/android/graphics/AndroidPicture.h63
-rw-r--r--core/jni/android/graphics/Bitmap.cpp40
-rw-r--r--core/jni/android/graphics/Canvas.cpp209
-rw-r--r--core/jni/android/graphics/Graphics.cpp5
-rw-r--r--core/jni/android/graphics/GraphicsJNI.h4
-rw-r--r--core/jni/android/graphics/Picture.cpp38
-rw-r--r--core/jni/android/graphics/pdf/PdfDocument.cpp29
-rw-r--r--core/jni/android_media_AudioSystem.cpp7
-rw-r--r--core/jni/android_view_Surface.cpp1
-rw-r--r--core/res/AndroidManifest.xml11
-rw-r--r--core/res/res/layout/notification_template_material_big_base.xml2
-rw-r--r--core/res/res/layout/notification_template_material_big_media.xml3
-rw-r--r--core/res/res/layout/notification_template_material_big_text.xml2
-rw-r--r--core/res/res/values-watch/config.xml4
-rw-r--r--core/res/res/values/attrs.xml10
-rw-r--r--core/res/res/values/colors.xml1
-rw-r--r--core/res/res/values/config.xml10
-rw-r--r--core/res/res/values/public.xml3
-rw-r--r--core/res/res/values/strings.xml6
-rw-r--r--core/res/res/values/styles_material.xml3
-rw-r--r--core/res/res/values/symbols.xml4
43 files changed, 1686 insertions, 464 deletions
diff --git a/core/java/android/app/ActivityView.java b/core/java/android/app/ActivityView.java
index c29d75e..94ea2c5 100644
--- a/core/java/android/app/ActivityView.java
+++ b/core/java/android/app/ActivityView.java
@@ -354,9 +354,11 @@ public class ActivityView extends ViewGroup {
private static class ActivityContainerWrapper {
private final IActivityContainer mIActivityContainer;
private final CloseGuard mGuard = CloseGuard.get();
+ boolean mOpened; // Protected by mGuard.
ActivityContainerWrapper(IActivityContainer container) {
mIActivityContainer = container;
+ mOpened = true;
mGuard.open("release");
}
@@ -424,11 +426,16 @@ public class ActivityView extends ViewGroup {
}
void release() {
- if (DEBUG) Log.v(TAG, "ActivityContainerWrapper: release called");
- try {
- mIActivityContainer.release();
- mGuard.close();
- } catch (RemoteException e) {
+ synchronized (mGuard) {
+ if (mOpened) {
+ if (DEBUG) Log.v(TAG, "ActivityContainerWrapper: release called");
+ try {
+ mIActivityContainer.release();
+ mGuard.close();
+ } catch (RemoteException e) {
+ }
+ mOpened = false;
+ }
}
}
diff --git a/core/java/android/app/VoiceInteractor.java b/core/java/android/app/VoiceInteractor.java
index 85e970c..0d94721 100644
--- a/core/java/android/app/VoiceInteractor.java
+++ b/core/java/android/app/VoiceInteractor.java
@@ -293,7 +293,7 @@ public class VoiceInteractor {
IVoiceInteractorRequest submit(IVoiceInteractor interactor, String packageName,
IVoiceInteractorCallback callback) throws RemoteException {
- return interactor.startConfirmation(packageName, callback, mCommand, mArgs);
+ return interactor.startCommand(packageName, callback, mCommand, mArgs);
}
}
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java
index ff56720..9eea545 100644
--- a/core/java/android/hardware/camera2/CameraCharacteristics.java
+++ b/core/java/android/hardware/camera2/CameraCharacteristics.java
@@ -30,7 +30,7 @@ import java.util.List;
*
* <p>These properties are fixed for a given CameraDevice, and can be queried
* through the {@link CameraManager CameraManager}
- * interface in addition to through the CameraDevice interface.</p>
+ * interface with {@link CameraManager#getCameraCharacteristics}.</p>
*
* <p>{@link CameraCharacteristics} objects are immutable.</p>
*
@@ -555,7 +555,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* <p>List containing a subset of the optical image
* stabilization (OIS) modes specified in
* {@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}.</p>
- * <p>If OIS is not implemented for a given camera device, this should
+ * <p>If OIS is not implemented for a given camera device, this will
* contain only OFF.</p>
*
* @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
@@ -612,7 +612,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
/**
* <p>Direction the camera faces relative to
- * device screen</p>
+ * device screen.</p>
* @see #LENS_FACING_FRONT
* @see #LENS_FACING_BACK
*/
@@ -622,7 +622,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
/**
* <p>The set of noise reduction modes supported by this camera device.</p>
* <p>This tag lists the valid modes for {@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}.</p>
- * <p>Full-capability camera devices must laways support OFF and FAST.</p>
+ * <p>Full-capability camera devices must always support OFF and FAST.</p>
*
* @see CaptureRequest#NOISE_REDUCTION_MODE
*/
@@ -778,18 +778,20 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
new Key<Byte>("android.request.pipelineMaxDepth", byte.class);
/**
- * <p>Optional. Defaults to 1. Defines how many sub-components
+ * <p>Defines how many sub-components
* a result will be composed of.</p>
* <p>In order to combat the pipeline latency, partial results
* may be delivered to the application layer from the camera device as
* soon as they are available.</p>
- * <p>A value of 1 means that partial results are not supported.</p>
+ * <p>Optional; defaults to 1. A value of 1 means that partial
+ * results are not supported, and only the final TotalCaptureResult will
+ * be produced by the camera device.</p>
* <p>A typical use case for this might be: after requesting an
* auto-focus (AF) lock the new AF state might be available 50%
* of the way through the pipeline. The camera device could
* then immediately dispatch this state via a partial result to
- * the framework/application layer, and the rest of the
- * metadata via later partial results.</p>
+ * the application, and the rest of the metadata via later
+ * partial results.</p>
*/
public static final Key<Integer> REQUEST_PARTIAL_RESULT_COUNT =
new Key<Integer>("android.request.partialResultCount", int.class);
@@ -806,8 +808,6 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* to do this query each of android.request.availableRequestKeys,
* android.request.availableResultKeys,
* android.request.availableCharacteristicsKeys.</p>
- * <p>XX: Maybe these should go into {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}
- * as a table instead?</p>
* <p>The following capabilities are guaranteed to be available on
* {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> FULL devices:</p>
* <ul>
@@ -815,14 +815,11 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* <li>MANUAL_POST_PROCESSING</li>
* </ul>
* <p>Other capabilities may be available on either FULL or LIMITED
- * devices, but the app. should query this field to be sure.</p>
+ * devices, but the application should query this field to be sure.</p>
*
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
- * @see #REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE
- * @see #REQUEST_AVAILABLE_CAPABILITIES_OPTIONAL
* @see #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR
* @see #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING
- * @see #REQUEST_AVAILABLE_CAPABILITIES_ZSL
* @see #REQUEST_AVAILABLE_CAPABILITIES_DNG
*/
public static final Key<int[]> REQUEST_AVAILABLE_CAPABILITIES =
@@ -838,7 +835,6 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* at a more granular level than capabilities. This is especially
* important for optional keys that are not listed under any capability
* in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
- * <p>TODO: This should be used by #getAvailableCaptureRequestKeys.</p>
*
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
* @hide
@@ -863,7 +859,6 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* at a more granular level than capabilities. This is especially
* important for optional keys that are not listed under any capability
* in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
- * <p>TODO: This should be used by #getAvailableCaptureResultKeys.</p>
*
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
* @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
@@ -879,7 +874,6 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* android.request.availableResultKeys (except that it applies for
* CameraCharacteristics instead of CaptureResult). See above for more
* details.</p>
- * <p>TODO: This should be used by CameraCharacteristics#getKeys.</p>
* @hide
*/
public static final Key<int[]> REQUEST_AVAILABLE_CHARACTERISTICS_KEYS =
@@ -927,10 +921,15 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
new Key<android.util.Size[]>("android.scaler.availableJpegSizes", android.util.Size[].class);
/**
- * <p>The maximum ratio between active area width
- * and crop region width, or between active area height and
- * crop region height, if the crop region height is larger
- * than width</p>
+ * <p>The maximum ratio between both active area width
+ * and crop region width, and active area height and
+ * crop region height.</p>
+ * <p>This represents the maximum amount of zooming possible by
+ * the camera device, or equivalently, the minimum cropping
+ * window size.</p>
+ * <p>Crop regions that have a width or height that is smaller
+ * than this ratio allows will be rounded up to the minimum
+ * allowed size by the camera device.</p>
*/
public static final Key<Float> SCALER_AVAILABLE_MAX_DIGITAL_ZOOM =
new Key<Float>("android.scaler.availableMaxDigitalZoom", float.class);
@@ -1339,9 +1338,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
new Key<android.util.Range<Integer>>("android.sensor.info.sensitivityRange", new TypeReference<android.util.Range<Integer>>() {{ }});
/**
- * <p>Arrangement of color filters on sensor;
+ * <p>The arrangement of color filters on sensor;
* represents the colors in the top-left 2x2 section of
- * the sensor, in reading order</p>
+ * the sensor, in reading order.</p>
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG
* @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG
@@ -1666,10 +1665,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
new Key<int[]>("android.sensor.availableTestPatternModes", int[].class);
/**
- * <p>Which face detection modes are available,
- * if any.</p>
- * <p>OFF means face detection is disabled, it must
- * be included in the list.</p>
+ * <p>The face detection modes that are available
+ * for this camera device.</p>
+ * <p>OFF is always supported.</p>
* <p>SIMPLE means the device supports the
* android.statistics.faceRectangles and
* android.statistics.faceScores outputs.</p>
@@ -1681,8 +1679,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
new Key<int[]>("android.statistics.info.availableFaceDetectModes", int[].class);
/**
- * <p>Maximum number of simultaneously detectable
- * faces</p>
+ * <p>The maximum number of simultaneously detectable
+ * faces.</p>
*/
public static final Key<Integer> STATISTICS_INFO_MAX_FACE_COUNT =
new Key<Integer>("android.statistics.info.maxFaceCount", int.class);
@@ -1691,7 +1689,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri
* <p>The set of hot pixel map output modes supported by this camera device.</p>
* <p>This tag lists valid output modes for {@link CaptureRequest#STATISTICS_HOT_PIXEL_MAP_MODE android.statistics.hotPixelMapMode}.</p>
* <p>If no hotpixel map is available for this camera device, this will contain
- * only OFF. If the hotpixel map is available, this should include both
+ * only OFF. If the hotpixel map is available, this will include both
* the ON and OFF options.</p>
*
* @see CaptureRequest#STATISTICS_HOT_PIXEL_MAP_MODE
diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java
index 90e5e4e..33e1915 100644
--- a/core/java/android/hardware/camera2/CameraMetadata.java
+++ b/core/java/android/hardware/camera2/CameraMetadata.java
@@ -157,8 +157,8 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>The lens focus distance is not accurate, and the units used for
- * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} do not correspond to any physical units.
- * Setting the lens to the same focus distance on separate occasions may
+ * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} do not correspond to any physical units.</p>
+ * <p>Setting the lens to the same focus distance on separate occasions may
* result in a different real focus distance, depending on factors such
* as the orientation of the device, the age of the focusing mechanism,
* and the device temperature. The focus distance value will still be
@@ -172,20 +172,24 @@ public abstract class CameraMetadata<TKey> {
public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED = 0;
/**
- * <p>The lens focus distance is measured in diopters. However, setting the lens
- * to the same focus distance on separate occasions may result in a
- * different real focus distance, depending on factors such as the
- * orientation of the device, the age of the focusing mechanism, and
- * the device temperature.</p>
+ * <p>The lens focus distance is measured in diopters.</p>
+ * <p>However, setting the lens to the same focus distance
+ * on separate occasions may result in a different real
+ * focus distance, depending on factors such as the
+ * orientation of the device, the age of the focusing
+ * mechanism, and the device temperature.</p>
* @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
*/
public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE = 1;
/**
- * <p>The lens focus distance is measured in diopters. The lens mechanism is
- * calibrated so that setting the same focus distance is repeatable on
- * multiple occasions with good accuracy, and the focus distance corresponds
- * to the real physical distance to the plane of best focus.</p>
+ * <p>The lens focus distance is measured in diopters, and
+ * is calibrated.</p>
+ * <p>The lens mechanism is calibrated so that setting the
+ * same focus distance is repeatable on multiple
+ * occasions with good accuracy, and the focus distance
+ * corresponds to the real physical distance to the plane
+ * of best focus.</p>
* @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
*/
public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED = 2;
@@ -195,11 +199,13 @@ public abstract class CameraMetadata<TKey> {
//
/**
+ * <p>The camera device faces the same direction as the device's screen.</p>
* @see CameraCharacteristics#LENS_FACING
*/
public static final int LENS_FACING_FRONT = 0;
/**
+ * <p>The camera device faces the opposite direction as the device's screen.</p>
* @see CameraCharacteristics#LENS_FACING
*/
public static final int LENS_FACING_BACK = 1;
@@ -215,11 +221,10 @@ public abstract class CameraMetadata<TKey> {
* <p>The full set of features supported by this capability makes
* the camera2 api backwards compatible with the camera1
* (android.hardware.Camera) API.</p>
- * <p>TODO: @hide this. Doesn't really mean anything except
- * act as a catch-all for all the 'base' functionality.</p>
*
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @hide
*/
public static final int REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE = 0;
@@ -228,15 +233,14 @@ public abstract class CameraMetadata<TKey> {
* tags or functionality not encapsulated by one of the other
* capabilities.</p>
* <p>A typical example is all tags marked 'optional'.</p>
- * <p>TODO: @hide. We may not need this if we @hide all the optional
- * tags not belonging to a capability.</p>
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @hide
*/
public static final int REQUEST_AVAILABLE_CAPABILITIES_OPTIONAL = 1;
/**
* <p>The camera device can be manually controlled (3A algorithms such
- * as auto exposure, and auto focus can be bypassed).
+ * as auto-exposure, and auto-focus can be bypassed).
* The camera device supports basic manual control of the sensor image
* acquisition related stages. This means the following controls are
* guaranteed to be supported:</p>
@@ -257,11 +261,11 @@ public abstract class CameraMetadata<TKey> {
* <li>{@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}</li>
* </ul>
* </li>
- * <li>Manual lens control<ul>
+ * <li>Manual lens control (if the lens is adjustable)<ul>
* <li>android.lens.*</li>
* </ul>
* </li>
- * <li>Manual flash control<ul>
+ * <li>Manual flash control (if a flash unit is present)<ul>
* <li>android.flash.*</li>
* </ul>
* </li>
@@ -312,8 +316,6 @@ public abstract class CameraMetadata<TKey> {
* </ul>
* <p>If auto white balance is enabled, then the camera device
* will accurately report the values applied by AWB in the result.</p>
- * <p>The camera device will also support everything in MANUAL_SENSOR
- * except manual lens control and manual flash control.</p>
* <p>A given camera device may also support additional post-processing
* controls, but this capability only covers the above list of controls.</p>
*
@@ -340,8 +342,8 @@ public abstract class CameraMetadata<TKey> {
* (both input/output) will match the maximum available
* resolution of JPEG streams.</li>
* </ul>
- * <p>@hide this, TODO: remove it when input related APIs are ready.</p>
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @hide
*/
public static final int REQUEST_AVAILABLE_CAPABILITIES_ZSL = 4;
@@ -355,7 +357,7 @@ public abstract class CameraMetadata<TKey> {
* <li>RAW16 is reprocessable into both YUV_420_888 and JPEG
* formats.</li>
* <li>The maximum available resolution for RAW16 streams (both
- * input/output) will match the either value in
+ * input/output) will match either the value in
* {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize} or
* {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</li>
* <li>All DNG-related optional metadata entries are provided
@@ -373,13 +375,13 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>The camera device will only support centered crop regions.</p>
+ * <p>The camera device only supports centered crop regions.</p>
* @see CameraCharacteristics#SCALER_CROPPING_TYPE
*/
public static final int SCALER_CROPPING_TYPE_CENTER_ONLY = 0;
/**
- * <p>The camera device will support arbitrarily chosen crop regions.</p>
+ * <p>The camera device supports arbitrarily chosen crop regions.</p>
* @see CameraCharacteristics#SCALER_CROPPING_TYPE
*/
public static final int SCALER_CROPPING_TYPE_FREEFORM = 1;
@@ -525,7 +527,7 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>android.led.transmit control is used</p>
+ * <p>android.led.transmit control is used.</p>
* @see CameraCharacteristics#LED_AVAILABLE_LEDS
* @hide
*/
@@ -536,11 +538,14 @@ public abstract class CameraMetadata<TKey> {
//
/**
+ * <p>This camera device has only limited capabilities.</p>
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
*/
public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0;
/**
+ * <p>This camera device is capable of supporting advanced imaging
+ * applications.</p>
* @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
*/
public static final int INFO_SUPPORTED_HARDWARE_LEVEL_FULL = 1;
@@ -550,9 +555,9 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>Every frame has the requests immediately applied.
- * (and furthermore for all results,
- * <code>android.sync.frameNumber == android.request.frameCount</code>)</p>
+ * <p>Every frame has the requests immediately applied.</p>
+ * <p>Furthermore for all results,
+ * <code>android.sync.frameNumber == android.request.frameCount</code></p>
* <p>Changing controls over multiple requests one after another will
* produce results that have those controls applied atomically
* each frame.</p>
@@ -592,8 +597,8 @@ public abstract class CameraMetadata<TKey> {
public static final int COLOR_CORRECTION_MODE_TRANSFORM_MATRIX = 0;
/**
- * <p>Must not slow down capture rate relative to sensor raw
- * output.</p>
+ * <p>Color correction processing must not slow down
+ * capture rate relative to sensor raw output.</p>
* <p>Advanced white balance adjustments above and beyond
* the specified white balance pipeline may be applied.</p>
* <p>If AWB is enabled with <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF</code>, then
@@ -606,8 +611,9 @@ public abstract class CameraMetadata<TKey> {
public static final int COLOR_CORRECTION_MODE_FAST = 1;
/**
- * <p>Capture rate (relative to sensor raw output)
- * may be reduced by high quality.</p>
+ * <p>Color correction processing operates at improved
+ * quality but reduced capture rate (relative to sensor raw
+ * output).</p>
* <p>Advanced white balance adjustments above and beyond
* the specified white balance pipeline may be applied.</p>
* <p>If AWB is enabled with <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF</code>, then
@@ -658,8 +664,8 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>The camera device's autoexposure routine is disabled;
- * the application-selected {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
+ * <p>The camera device's autoexposure routine is disabled.</p>
+ * <p>The application-selected {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
* {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} and
* {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} are used by the camera
* device, along with android.flash.* fields, if there's
@@ -674,7 +680,8 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>The camera device's autoexposure routine is active,
- * with no flash control. The application's values for
+ * with no flash control.</p>
+ * <p>The application's values for
* {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
* {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and
* {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} are ignored. The
@@ -691,10 +698,10 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>Like ON, except that the camera device also controls
* the camera's flash unit, firing it in low-light
- * conditions. The flash may be fired during a
- * precapture sequence (triggered by
- * {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and may be fired
- * for captures for which the
+ * conditions.</p>
+ * <p>The flash may be fired during a precapture sequence
+ * (triggered by {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and
+ * may be fired for captures for which the
* {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} field is set to
* STILL_CAPTURE</p>
*
@@ -707,10 +714,10 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>Like ON, except that the camera device also controls
* the camera's flash unit, always firing it for still
- * captures. The flash may be fired during a precapture
- * sequence (triggered by
- * {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and will always
- * be fired for captures for which the
+ * captures.</p>
+ * <p>The flash may be fired during a precapture sequence
+ * (triggered by {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and
+ * will always be fired for captures for which the
* {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} field is set to
* STILL_CAPTURE</p>
*
@@ -722,9 +729,10 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>Like ON_AUTO_FLASH, but with automatic red eye
- * reduction. If deemed necessary by the camera device,
- * a red eye reduction flash will fire during the
- * precapture sequence.</p>
+ * reduction.</p>
+ * <p>If deemed necessary by the camera device, a red eye
+ * reduction flash will fire during the precapture
+ * sequence.</p>
* @see CaptureRequest#CONTROL_AE_MODE
*/
public static final int CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE = 4;
@@ -741,8 +749,9 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>The precapture metering sequence will be started
- * by the camera device. The exact effect of the precapture
- * trigger depends on the current AE mode and state.</p>
+ * by the camera device.</p>
+ * <p>The exact effect of the precapture trigger depends on
+ * the current AE mode and state.</p>
* @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
*/
public static final int CONTROL_AE_PRECAPTURE_TRIGGER_START = 1;
@@ -754,7 +763,7 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>The auto-focus routine does not control the lens;
* {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} is controlled by the
- * application</p>
+ * application.</p>
*
* @see CaptureRequest#LENS_FOCUS_DISTANCE
* @see CaptureRequest#CONTROL_AF_MODE
@@ -839,8 +848,11 @@ public abstract class CameraMetadata<TKey> {
public static final int CONTROL_AF_MODE_CONTINUOUS_PICTURE = 4;
/**
- * <p>Extended depth of field (digital focus). AF
- * trigger is ignored, AF state should always be
+ * <p>Extended depth of field (digital focus) mode.</p>
+ * <p>The camera device will produce images with an extended
+ * depth of field automatically; no special focusing
+ * operations need to be done before taking a picture.</p>
+ * <p>AF triggers are ignored, and the AF state will always be
* INACTIVE.</p>
* @see CaptureRequest#CONTROL_AF_MODE
*/
@@ -874,8 +886,8 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>The camera device's auto white balance routine is disabled;
- * the application-selected color transform matrix
+ * <p>The camera device's auto-white balance routine is disabled.</p>
+ * <p>The application-selected color transform matrix
* ({@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}) and gains
* ({@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}) are used by the camera
* device for manual white balance control.</p>
@@ -887,9 +899,12 @@ public abstract class CameraMetadata<TKey> {
public static final int CONTROL_AWB_MODE_OFF = 0;
/**
- * <p>The camera device's auto white balance routine is active;
- * the application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
- * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.</p>
+ * <p>The camera device's auto-white balance routine is active.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
*
* @see CaptureRequest#COLOR_CORRECTION_GAINS
* @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
@@ -898,65 +913,125 @@ public abstract class CameraMetadata<TKey> {
public static final int CONTROL_AWB_MODE_AUTO = 1;
/**
- * <p>The camera device's auto white balance routine is disabled;
+ * <p>The camera device's auto-white balance routine is disabled;
* the camera device uses incandescent light as the assumed scene
- * illumination for white balance. While the exact white balance
- * transforms are up to the camera device, they will approximately
- * match the CIE standard illuminant A.</p>
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant A.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
* @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final int CONTROL_AWB_MODE_INCANDESCENT = 2;
/**
- * <p>The camera device's auto white balance routine is disabled;
+ * <p>The camera device's auto-white balance routine is disabled;
* the camera device uses fluorescent light as the assumed scene
- * illumination for white balance. While the exact white balance
- * transforms are up to the camera device, they will approximately
- * match the CIE standard illuminant F2.</p>
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F2.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
* @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final int CONTROL_AWB_MODE_FLUORESCENT = 3;
/**
- * <p>The camera device's auto white balance routine is disabled;
+ * <p>The camera device's auto-white balance routine is disabled;
* the camera device uses warm fluorescent light as the assumed scene
- * illumination for white balance. While the exact white balance
- * transforms are up to the camera device, they will approximately
- * match the CIE standard illuminant F4.</p>
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F4.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
* @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final int CONTROL_AWB_MODE_WARM_FLUORESCENT = 4;
/**
- * <p>The camera device's auto white balance routine is disabled;
+ * <p>The camera device's auto-white balance routine is disabled;
* the camera device uses daylight light as the assumed scene
- * illumination for white balance. While the exact white balance
- * transforms are up to the camera device, they will approximately
- * match the CIE standard illuminant D65.</p>
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant D65.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
* @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final int CONTROL_AWB_MODE_DAYLIGHT = 5;
/**
- * <p>The camera device's auto white balance routine is disabled;
+ * <p>The camera device's auto-white balance routine is disabled;
* the camera device uses cloudy daylight light as the assumed scene
* illumination for white balance.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
* @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final int CONTROL_AWB_MODE_CLOUDY_DAYLIGHT = 6;
/**
- * <p>The camera device's auto white balance routine is disabled;
+ * <p>The camera device's auto-white balance routine is disabled;
* the camera device uses twilight light as the assumed scene
* illumination for white balance.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
* @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final int CONTROL_AWB_MODE_TWILIGHT = 7;
/**
- * <p>The camera device's auto white balance routine is disabled;
+ * <p>The camera device's auto-white balance routine is disabled;
* the camera device uses shade light as the assumed scene
* illumination for white balance.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
* @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final int CONTROL_AWB_MODE_SHADE = 8;
@@ -966,38 +1041,43 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>This request doesn't fall into the other
- * categories. Default to preview-like
+ * <p>The goal of this request doesn't fall into the other
+ * categories. The camera device will default to preview-like
* behavior.</p>
* @see CaptureRequest#CONTROL_CAPTURE_INTENT
*/
public static final int CONTROL_CAPTURE_INTENT_CUSTOM = 0;
/**
- * <p>This request is for a preview-like usecase. The
- * precapture trigger may be used to start off a metering
- * w/flash sequence</p>
+ * <p>This request is for a preview-like use case.</p>
+ * <p>The precapture trigger may be used to start off a metering
+ * w/flash sequence.</p>
* @see CaptureRequest#CONTROL_CAPTURE_INTENT
*/
public static final int CONTROL_CAPTURE_INTENT_PREVIEW = 1;
/**
* <p>This request is for a still capture-type
- * usecase.</p>
+ * use case.</p>
+ * <p>If the flash unit is under automatic control, it may fire as needed.</p>
* @see CaptureRequest#CONTROL_CAPTURE_INTENT
*/
public static final int CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2;
/**
* <p>This request is for a video recording
- * usecase.</p>
+ * use case.</p>
* @see CaptureRequest#CONTROL_CAPTURE_INTENT
*/
public static final int CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3;
/**
* <p>This request is for a video snapshot (still
- * image while recording video) usecase</p>
+ * image while recording video) use case.</p>
+ * <p>The camera device should take the highest-quality image
+ * possible (given the other settings) without disrupting the
+ * frame rate of video recording.<br />
+ * </p>
* @see CaptureRequest#CONTROL_CAPTURE_INTENT
*/
public static final int CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4;
@@ -1006,15 +1086,16 @@ public abstract class CameraMetadata<TKey> {
* <p>This request is for a ZSL usecase; the
* application will stream full-resolution images and
* reprocess one or several later for a final
- * capture</p>
+ * capture.</p>
* @see CaptureRequest#CONTROL_CAPTURE_INTENT
*/
public static final int CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5;
/**
* <p>This request is for manual capture use case where
- * the applications want to directly control the capture parameters
- * (e.g. {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} etc.).</p>
+ * the applications want to directly control the capture parameters.</p>
+ * <p>For example, the application may wish to manually control
+ * {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, etc.</p>
*
* @see CaptureRequest#SENSOR_EXPOSURE_TIME
* @see CaptureRequest#SENSOR_SENSITIVITY
@@ -1034,7 +1115,8 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>A "monocolor" effect where the image is mapped into
- * a single color. This will typically be grayscale.</p>
+ * a single color.</p>
+ * <p>This will typically be grayscale.</p>
* @see CaptureRequest#CONTROL_EFFECT_MODE
*/
public static final int CONTROL_EFFECT_MODE_MONO = 1;
@@ -1094,31 +1176,42 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>Full application control of pipeline. All 3A
- * routines are disabled, no other settings in
- * android.control.* have any effect</p>
+ * <p>Full application control of pipeline.</p>
+ * <p>All control by the device's metering and focusing (3A)
+ * routines is disabled, and no other settings in
+ * android.control.* have any effect, except that
+ * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} may be used by the camera
+ * device to select post-processing values for processing
+ * blocks that do not allow for manual control, or are not
+ * exposed by the camera API.</p>
+ * <p>However, the camera device's 3A routines may continue to
+ * collect statistics and update their internal state so that
+ * when control is switched to AUTO mode, good control values
+ * can be immediately applied.</p>
+ *
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
* @see CaptureRequest#CONTROL_MODE
*/
public static final int CONTROL_MODE_OFF = 0;
/**
- * <p>Use settings for each individual 3A routine.
- * Manual control of capture parameters is disabled. All
+ * <p>Use settings for each individual 3A routine.</p>
+ * <p>Manual control of capture parameters is disabled. All
* controls in android.control.* besides sceneMode take
- * effect</p>
+ * effect.</p>
* @see CaptureRequest#CONTROL_MODE
*/
public static final int CONTROL_MODE_AUTO = 1;
/**
- * <p>Use specific scene mode. Enabling this disables
- * control.aeMode, control.awbMode and control.afMode
- * controls; the camera device will ignore those settings while
- * USE_SCENE_MODE is active (except for FACE_PRIORITY
- * scene mode). Other control entries are still active.
- * This setting can only be used if scene mode is supported
- * (i.e. {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes} contain some modes
- * other than DISABLED).</p>
+ * <p>Use a specific scene mode.</p>
+ * <p>Enabling this disables control.aeMode, control.awbMode and
+ * control.afMode controls; the camera device will ignore
+ * those settings while USE_SCENE_MODE is active (except for
+ * FACE_PRIORITY scene mode). Other control entries are still
+ * active. This setting can only be used if scene mode is
+ * supported (i.e. {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}
+ * contain some modes other than DISABLED).</p>
*
* @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
* @see CaptureRequest#CONTROL_MODE
@@ -1128,7 +1221,12 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>Same as OFF mode, except that this capture will not be
* used by camera device background auto-exposure, auto-white balance and
- * auto-focus algorithms to update their statistics.</p>
+ * auto-focus algorithms (3A) to update their statistics.</p>
+ * <p>Specifically, the 3A routines are locked to the last
+ * values set from a request with AUTO, OFF, or
+ * USE_SCENE_MODE, and any statistics or state updates
+ * collected from manual captures with OFF_KEEP_STATE will be
+ * discarded by the camera device.</p>
* @see CaptureRequest#CONTROL_MODE
*/
public static final int CONTROL_MODE_OFF_KEEP_STATE = 3;
@@ -1146,8 +1244,9 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>If face detection support exists, use face
* detection data for auto-focus, auto-white balance, and
- * auto-exposure routines. If face detection statistics are
- * disabled (i.e. {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} is set to OFF),
+ * auto-exposure routines.</p>
+ * <p>If face detection statistics are disabled
+ * (i.e. {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} is set to OFF),
* this should still operate correctly (but will not return
* face detection statistics to the framework).</p>
* <p>Unlike the other scene modes, {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode},
@@ -1163,8 +1262,8 @@ public abstract class CameraMetadata<TKey> {
public static final int CONTROL_SCENE_MODE_FACE_PRIORITY = 1;
/**
- * <p>Optimized for photos of quickly moving objects.
- * Similar to SPORTS.</p>
+ * <p>Optimized for photos of quickly moving objects.</p>
+ * <p>Similar to SPORTS.</p>
* @see CaptureRequest#CONTROL_SCENE_MODE
*/
public static final int CONTROL_SCENE_MODE_ACTION = 2;
@@ -1233,8 +1332,8 @@ public abstract class CameraMetadata<TKey> {
public static final int CONTROL_SCENE_MODE_FIREWORKS = 12;
/**
- * <p>Optimized for photos of quickly moving people.
- * Similar to ACTION.</p>
+ * <p>Optimized for photos of quickly moving people.</p>
+ * <p>Similar to ACTION.</p>
* @see CaptureRequest#CONTROL_SCENE_MODE
*/
public static final int CONTROL_SCENE_MODE_SPORTS = 13;
@@ -1266,11 +1365,13 @@ public abstract class CameraMetadata<TKey> {
//
/**
+ * <p>Video stabilization is disabled.</p>
* @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
*/
public static final int CONTROL_VIDEO_STABILIZATION_MODE_OFF = 0;
/**
+ * <p>Video stabilization is enabled.</p>
* @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
*/
public static final int CONTROL_VIDEO_STABILIZATION_MODE_ON = 1;
@@ -1280,21 +1381,20 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>No edge enhancement is applied</p>
+ * <p>No edge enhancement is applied.</p>
* @see CaptureRequest#EDGE_MODE
*/
public static final int EDGE_MODE_OFF = 0;
/**
- * <p>Must not slow down frame rate relative to sensor
+ * <p>Apply edge enhancement at a quality level that does not slow down frame rate relative to sensor
* output</p>
* @see CaptureRequest#EDGE_MODE
*/
public static final int EDGE_MODE_FAST = 1;
/**
- * <p>Frame rate may be reduced by high
- * quality</p>
+ * <p>Apply high-quality edge enhancement, at a cost of reducing output frame rate.</p>
* @see CaptureRequest#EDGE_MODE
*/
public static final int EDGE_MODE_HIGH_QUALITY = 2;
@@ -1327,10 +1427,10 @@ public abstract class CameraMetadata<TKey> {
//
/**
+ * <p>No hot pixel correction is applied.</p>
* <p>The frame rate must not be reduced relative to sensor raw output
* for this option.</p>
- * <p>No hot pixel correction is applied.
- * The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
+ * <p>The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
*
* @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
* @see CaptureRequest#HOT_PIXEL_MODE
@@ -1338,10 +1438,9 @@ public abstract class CameraMetadata<TKey> {
public static final int HOT_PIXEL_MODE_OFF = 0;
/**
- * <p>The frame rate must not be reduced relative to sensor raw output
- * for this option.</p>
- * <p>Hot pixel correction is applied.
- * The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
+ * <p>Hot pixel correction is applied, without reducing frame
+ * rate relative to sensor raw output.</p>
+ * <p>The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
*
* @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
* @see CaptureRequest#HOT_PIXEL_MODE
@@ -1349,10 +1448,9 @@ public abstract class CameraMetadata<TKey> {
public static final int HOT_PIXEL_MODE_FAST = 1;
/**
- * <p>The frame rate may be reduced relative to sensor raw output
- * for this option.</p>
- * <p>A high-quality hot pixel correction is applied.
- * The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
+ * <p>High-quality hot pixel correction is applied, at a cost
+ * of reducing frame rate relative to sensor raw output.</p>
+ * <p>The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
*
* @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
* @see CaptureRequest#HOT_PIXEL_MODE
@@ -1380,21 +1478,21 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>No noise reduction is applied</p>
+ * <p>No noise reduction is applied.</p>
* @see CaptureRequest#NOISE_REDUCTION_MODE
*/
public static final int NOISE_REDUCTION_MODE_OFF = 0;
/**
- * <p>Must not slow down frame rate relative to sensor
- * output</p>
+ * <p>Noise reduction is applied without reducing frame rate relative to sensor
+ * output.</p>
* @see CaptureRequest#NOISE_REDUCTION_MODE
*/
public static final int NOISE_REDUCTION_MODE_FAST = 1;
/**
- * <p>May slow down frame rate to provide highest
- * quality</p>
+ * <p>High-quality noise reduction is applied, at the cost of reducing frame rate
+ * relative to sensor output.</p>
* @see CaptureRequest#NOISE_REDUCTION_MODE
*/
public static final int NOISE_REDUCTION_MODE_HIGH_QUALITY = 2;
@@ -1404,8 +1502,9 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>Default. No test pattern mode is used, and the camera
+ * <p>No test pattern mode is used, and the camera
* device returns captures from the image sensor.</p>
+ * <p>This is the default if the key is not set.</p>
* @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
*/
public static final int SENSOR_TEST_PATTERN_MODE_OFF = 0;
@@ -1509,19 +1608,21 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>No lens shading correction is applied</p>
+ * <p>No lens shading correction is applied.</p>
* @see CaptureRequest#SHADING_MODE
*/
public static final int SHADING_MODE_OFF = 0;
/**
- * <p>Must not slow down frame rate relative to sensor raw output</p>
+ * <p>Apply lens shading corrections, without slowing
+ * frame rate relative to sensor raw output</p>
* @see CaptureRequest#SHADING_MODE
*/
public static final int SHADING_MODE_FAST = 1;
/**
- * <p>Frame rate may be reduced by high quality</p>
+ * <p>Apply high-quality lens shading correction, at the
+ * cost of reduced frame rate.</p>
* @see CaptureRequest#SHADING_MODE
*/
public static final int SHADING_MODE_HIGH_QUALITY = 2;
@@ -1531,20 +1632,28 @@ public abstract class CameraMetadata<TKey> {
//
/**
+ * <p>Do not include face detection statistics in capture
+ * results.</p>
* @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
*/
public static final int STATISTICS_FACE_DETECT_MODE_OFF = 0;
/**
- * <p>Optional Return rectangle and confidence
- * only</p>
+ * <p>Return face rectangle and confidence values only.</p>
+ * <p>In this mode, only android.statistics.faceRectangles and
+ * android.statistics.faceScores outputs are valid.</p>
* @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
*/
public static final int STATISTICS_FACE_DETECT_MODE_SIMPLE = 1;
/**
- * <p>Optional Return all face
- * metadata</p>
+ * <p>Return all face
+ * metadata.</p>
+ * <p>In this mode,
+ * android.statistics.faceRectangles,
+ * android.statistics.faceScores,
+ * android.statistics.faceIds, and
+ * android.statistics.faceLandmarks outputs are valid.</p>
* @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
*/
public static final int STATISTICS_FACE_DETECT_MODE_FULL = 2;
@@ -1554,11 +1663,13 @@ public abstract class CameraMetadata<TKey> {
//
/**
+ * <p>Do not include a lens shading map in the capture result.</p>
* @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
*/
public static final int STATISTICS_LENS_SHADING_MAP_MODE_OFF = 0;
/**
+ * <p>Include a lens shading map in the capture result.</p>
* @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
*/
public static final int STATISTICS_LENS_SHADING_MAP_MODE_ON = 1;
@@ -1582,15 +1693,15 @@ public abstract class CameraMetadata<TKey> {
public static final int TONEMAP_MODE_CONTRAST_CURVE = 0;
/**
- * <p>Advanced gamma mapping and color enhancement may be applied.</p>
- * <p>Should not slow down frame rate relative to raw sensor output.</p>
+ * <p>Advanced gamma mapping and color enhancement may be applied, without
+ * reducing frame rate compared to raw sensor output.</p>
* @see CaptureRequest#TONEMAP_MODE
*/
public static final int TONEMAP_MODE_FAST = 1;
/**
- * <p>Advanced gamma mapping and color enhancement may be applied.</p>
- * <p>May slow down frame rate relative to raw sensor output.</p>
+ * <p>High-quality gamma mapping and color enhancement will be applied, at
+ * the cost of reduced frame rate compared to raw sensor output.</p>
* @see CaptureRequest#TONEMAP_MODE
*/
public static final int TONEMAP_MODE_HIGH_QUALITY = 2;
@@ -1600,7 +1711,8 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>AE is off or recently reset. When a camera device is opened, it starts in
+ * <p>AE is off or recently reset.</p>
+ * <p>When a camera device is opened, it starts in
* this state. This is a transient state, the camera device may skip reporting
* this state in capture result.</p>
* @see CaptureResult#CONTROL_AE_STATE
@@ -1609,7 +1721,8 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>AE doesn't yet have a good set of control values
- * for the current scene. This is a transient state, the camera device may skip
+ * for the current scene.</p>
+ * <p>This is a transient state, the camera device may skip
* reporting this state in capture result.</p>
* @see CaptureResult#CONTROL_AE_STATE
*/
@@ -1638,11 +1751,13 @@ public abstract class CameraMetadata<TKey> {
/**
* <p>AE has been asked to do a precapture sequence
- * (through the {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} START),
- * and is currently executing it. Once PRECAPTURE
- * completes, AE will transition to CONVERGED or
- * FLASH_REQUIRED as appropriate. This is a transient state, the
- * camera device may skip reporting this state in capture result.</p>
+ * and is currently executing it.</p>
+ * <p>Precapture can be triggered through setting
+ * {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} to START.</p>
+ * <p>Once PRECAPTURE completes, AE will transition to CONVERGED
+ * or FLASH_REQUIRED as appropriate. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
*
* @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
* @see CaptureResult#CONTROL_AE_STATE
@@ -1654,61 +1769,78 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>AF off or has not yet tried to scan/been asked
- * to scan. When a camera device is opened, it starts in
- * this state. This is a transient state, the camera device may
- * skip reporting this state in capture result.</p>
+ * <p>AF is off or has not yet tried to scan/been asked
+ * to scan.</p>
+ * <p>When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.</p>
* @see CaptureResult#CONTROL_AF_STATE
*/
public static final int CONTROL_AF_STATE_INACTIVE = 0;
/**
- * <p>if CONTINUOUS_* modes are supported. AF is
- * currently doing an AF scan initiated by a continuous
- * autofocus mode. This is a transient state, the camera device may
- * skip reporting this state in capture result.</p>
+ * <p>AF is currently performing an AF scan initiated the
+ * camera device in a continuous autofocus mode.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
* @see CaptureResult#CONTROL_AF_STATE
*/
public static final int CONTROL_AF_STATE_PASSIVE_SCAN = 1;
/**
- * <p>if CONTINUOUS_* modes are supported. AF currently
- * believes it is in focus, but may restart scanning at
- * any time. This is a transient state, the camera device may skip
- * reporting this state in capture result.</p>
+ * <p>AF currently believes it is in focus, but may
+ * restart scanning at any time.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
* @see CaptureResult#CONTROL_AF_STATE
*/
public static final int CONTROL_AF_STATE_PASSIVE_FOCUSED = 2;
/**
- * <p>if AUTO or MACRO modes are supported. AF is doing
- * an AF scan because it was triggered by AF trigger. This is a
- * transient state, the camera device may skip reporting
- * this state in capture result.</p>
+ * <p>AF is performing an AF scan because it was
+ * triggered by AF trigger.</p>
+ * <p>Only used by AUTO or MACRO AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
* @see CaptureResult#CONTROL_AF_STATE
*/
public static final int CONTROL_AF_STATE_ACTIVE_SCAN = 3;
/**
- * <p>if any AF mode besides OFF is supported. AF
- * believes it is focused correctly and is
- * locked.</p>
+ * <p>AF believes it is focused correctly and has locked
+ * focus.</p>
+ * <p>This state is reached only after an explicit START AF trigger has been
+ * sent ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}), when good focus has been obtained.</p>
+ * <p>The lens will remain stationary until the AF mode ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) is changed or
+ * a new AF trigger is sent to the camera device ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}).</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
* @see CaptureResult#CONTROL_AF_STATE
*/
public static final int CONTROL_AF_STATE_FOCUSED_LOCKED = 4;
/**
- * <p>if any AF mode besides OFF is supported. AF has
- * failed to focus successfully and is
- * locked.</p>
+ * <p>AF has failed to focus successfully and has locked
+ * focus.</p>
+ * <p>This state is reached only after an explicit START AF trigger has been
+ * sent ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}), when good focus cannot be obtained.</p>
+ * <p>The lens will remain stationary until the AF mode ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) is changed or
+ * a new AF trigger is sent to the camera device ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}).</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
* @see CaptureResult#CONTROL_AF_STATE
*/
public static final int CONTROL_AF_STATE_NOT_FOCUSED_LOCKED = 5;
/**
- * <p>if CONTINUOUS_* modes are supported. AF finished a
- * passive scan without finding focus, and may restart
- * scanning at any time. This is a transient state, the camera
+ * <p>AF finished a passive scan without finding focus,
+ * and may restart scanning at any time.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
* device may skip reporting this state in capture result.</p>
* @see CaptureResult#CONTROL_AF_STATE
*/
@@ -1719,16 +1851,19 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>AWB is not in auto mode. When a camera device is opened, it
- * starts in this state. This is a transient state, the camera device may
- * skip reporting this state in capture result.</p>
+ * <p>AWB is not in auto mode, or has not yet started metering.</p>
+ * <p>When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.</p>
* @see CaptureResult#CONTROL_AWB_STATE
*/
public static final int CONTROL_AWB_STATE_INACTIVE = 0;
/**
* <p>AWB doesn't yet have a good set of control
- * values for the current scene. This is a transient state, the camera device
+ * values for the current scene.</p>
+ * <p>This is a transient state, the camera device
* may skip reporting this state in capture result.</p>
* @see CaptureResult#CONTROL_AWB_STATE
*/
@@ -1776,8 +1911,9 @@ public abstract class CameraMetadata<TKey> {
public static final int FLASH_STATE_FIRED = 3;
/**
- * <p>Flash partially illuminated this frame. This is usually due to the next
- * or previous frame having the flash fire, and the flash spilling into this capture
+ * <p>Flash partially illuminated this frame.</p>
+ * <p>This is usually due to the next or previous frame having
+ * the flash fire, and the flash spilling into this capture
* due to hardware limitations.</p>
* @see CaptureResult#FLASH_STATE
*/
@@ -1800,8 +1936,10 @@ public abstract class CameraMetadata<TKey> {
public static final int LENS_STATE_STATIONARY = 0;
/**
- * <p>Any of the lens parameters ({@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
- * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} or {@link CaptureRequest#LENS_APERTURE android.lens.aperture}) is changing.</p>
+ * <p>One or several of the lens parameters
+ * ({@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
+ * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} or {@link CaptureRequest#LENS_APERTURE android.lens.aperture}) is
+ * currently changing.</p>
*
* @see CaptureRequest#LENS_APERTURE
* @see CaptureRequest#LENS_FILTER_DENSITY
@@ -1816,16 +1954,22 @@ public abstract class CameraMetadata<TKey> {
//
/**
+ * <p>The camera device does not detect any flickering illumination
+ * in the current scene.</p>
* @see CaptureResult#STATISTICS_SCENE_FLICKER
*/
public static final int STATISTICS_SCENE_FLICKER_NONE = 0;
/**
+ * <p>The camera device detects illumination flickering at 50Hz
+ * in the current scene.</p>
* @see CaptureResult#STATISTICS_SCENE_FLICKER
*/
public static final int STATISTICS_SCENE_FLICKER_50HZ = 1;
/**
+ * <p>The camera device detects illumination flickering at 60Hz
+ * in the current scene.</p>
* @see CaptureResult#STATISTICS_SCENE_FLICKER
*/
public static final int STATISTICS_SCENE_FLICKER_60HZ = 2;
@@ -1835,8 +1979,8 @@ public abstract class CameraMetadata<TKey> {
//
/**
- * <p>The current result is not yet fully synchronized to any request.
- * Synchronization is in progress, and reading metadata from this
+ * <p>The current result is not yet fully synchronized to any request.</p>
+ * <p>Synchronization is in progress, and reading metadata from this
* result may include a mix of data that have taken effect since the
* last synchronization time.</p>
* <p>In some future result, within {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} frames,
@@ -1851,10 +1995,10 @@ public abstract class CameraMetadata<TKey> {
public static final int SYNC_FRAME_NUMBER_CONVERGING = -1;
/**
- * <p>The current result's synchronization status is unknown. The
- * result may have already converged, or it may be in progress.
- * Reading from this result may include some mix of settings from
- * past requests.</p>
+ * <p>The current result's synchronization status is unknown.</p>
+ * <p>The result may have already converged, or it may be in
+ * progress. Reading from this result may include some mix
+ * of settings from past requests.</p>
* <p>After a settings change, the new settings will eventually all
* take effect for the output buffers and results. However, this
* value will not change when that happens. Altering settings
diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java
index 0ff8cce..bf7bd37 100644
--- a/core/java/android/hardware/camera2/CaptureRequest.java
+++ b/core/java/android/hardware/camera2/CaptureRequest.java
@@ -733,8 +733,16 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* included at all in the request settings. When included and
* set to START, the camera device will trigger the autoexposure
* precapture metering sequence.</p>
- * <p>The effect of auto-exposure (AE) precapture trigger depends
- * on the current AE mode and state; see
+ * <p>The precapture sequence should triggered before starting a
+ * high-quality still capture for final metering decisions to
+ * be made, and for firing pre-capture flash pulses to estimate
+ * scene brightness and required final capture flash power, when
+ * the flash is enabled.</p>
+ * <p>Normally, this entry should be set to START for only a
+ * single request, and the application should wait until the
+ * sequence completes before starting a new one.</p>
+ * <p>The exact effect of auto-exposure (AE) precapture trigger
+ * depends on the current AE mode and state; see
* {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture state transition
* details.</p>
*
@@ -800,7 +808,11 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p>
* <p>When set to CANCEL, the camera device will cancel any active trigger,
* and return to its initial AF state.</p>
- * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what that means for each AF mode.</p>
+ * <p>Generally, applications should set this entry to START or CANCEL for only a
+ * single capture, and then return it to IDLE (or not set at all). Specifying
+ * START for multiple captures in a row means restarting the AF operation over
+ * and over again.</p>
+ * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what the trigger means for each AF mode.</p>
*
* @see CaptureResult#CONTROL_AF_STATE
* @see #CONTROL_AF_TRIGGER_IDLE
@@ -813,9 +825,11 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Whether auto-white balance (AWB) is currently locked to its
* latest calculated values.</p>
- * <p>Note that AWB lock is only meaningful for AUTO
- * mode; in other modes, AWB is already fixed to a specific
- * setting.</p>
+ * <p>Note that AWB lock is only meaningful when
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is in the AUTO mode; in other modes,
+ * AWB is already fixed to a specific setting.</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final Key<Boolean> CONTROL_AWB_LOCK =
new Key<Boolean>("android.control.awbLock", boolean.class);
@@ -825,17 +839,21 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* transform fields, and what its illumination target
* is.</p>
* <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
- * <p>When set to the ON mode, the camera device's auto white balance
+ * <p>When set to the ON mode, the camera device's auto-white balance
* routine is enabled, overriding the application's selected
* {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
- * <p>When set to the OFF mode, the camera device's auto white balance
+ * <p>When set to the OFF mode, the camera device's auto-white balance
* routine is disabled. The application manually controls the white
* balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
* and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
- * <p>When set to any other modes, the camera device's auto white balance
- * routine is disabled. The camera device uses each particular illumination
- * target for white balance adjustment.</p>
+ * <p>When set to any other modes, the camera device's auto-white
+ * balance routine is disabled. The camera device uses each
+ * particular illumination target for white balance
+ * adjustment. The application's values for
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform},
+ * {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} are ignored.</p>
*
* @see CaptureRequest#COLOR_CORRECTION_GAINS
* @see CaptureRequest#COLOR_CORRECTION_MODE
@@ -886,8 +904,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* strategy.</p>
* <p>This control (except for MANUAL) is only effective if
* <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p>
- * <p>ZERO_SHUTTER_LAG must be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
- * contains ZSL. MANUAL must be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
+ * <p>ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
+ * contains ZSL. MANUAL will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
* contains MANUAL_SENSOR.</p>
*
* @see CaptureRequest#CONTROL_MODE
@@ -962,7 +980,9 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* <p>This is the mode that that is active when
* <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY,
* these modes will disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode},
- * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} while in use.</p>
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} while in use.
+ * The scene modes available for a given camera device are listed in
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}.</p>
* <p>The interpretation and implementation of these scene modes is left
* to the implementor of the camera device. Their behavior will not be
* consistent across all devices, and any given device may only implement
@@ -970,6 +990,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
*
* @see CaptureRequest#CONTROL_AE_MODE
* @see CaptureRequest#CONTROL_AF_MODE
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
* @see CaptureRequest#CONTROL_AWB_MODE
* @see CaptureRequest#CONTROL_MODE
* @see #CONTROL_SCENE_MODE_DISABLED
@@ -996,6 +1017,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Whether video stabilization is
* active.</p>
+ * <p>Video stabilization automatically translates and scales images from the camera
+ * in order to stabilize motion between consecutive frames.</p>
* <p>If enabled, video stabilization can modify the
* {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream
* stabilized</p>
@@ -1110,14 +1133,14 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Compression quality of the final JPEG
* image.</p>
- * <p>85-95 is typical usage range</p>
+ * <p>85-95 is typical usage range.</p>
*/
public static final Key<Byte> JPEG_QUALITY =
new Key<Byte>("android.jpeg.quality", byte.class);
/**
* <p>Compression quality of JPEG
- * thumbnail</p>
+ * thumbnail.</p>
*/
public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
@@ -1229,12 +1252,18 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Sets whether the camera device uses optical image stabilization (OIS)
* when capturing images.</p>
- * <p>OIS is used to compensate for motion blur due to small movements of
- * the camera during capture. Unlike digital image stabilization, OIS makes
- * use of mechanical elements to stabilize the camera sensor, and thus
- * allows for longer exposure times before camera shake becomes
- * apparent.</p>
- * <p>This is not expected to be supported on most devices.</p>
+ * <p>OIS is used to compensate for motion blur due to small
+ * movements of the camera during capture. Unlike digital image
+ * stabilization ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), OIS
+ * makes use of mechanical elements to stabilize the camera
+ * sensor, and thus allows for longer exposure times before
+ * camera shake becomes apparent.</p>
+ * <p>Not all devices will support OIS; see
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization} for
+ * available controls.</p>
+ *
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
* @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
* @see #LENS_OPTICAL_STABILIZATION_MODE_ON
*/
@@ -1242,16 +1271,15 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
new Key<Integer>("android.lens.opticalStabilizationMode", int.class);
/**
- * <p>Mode of operation for the noise reduction.
- * algorithm</p>
+ * <p>Mode of operation for the noise reduction algorithm.</p>
* <p>Noise filtering control. OFF means no noise reduction
* will be applied by the camera device.</p>
- * <p>This must be set to a valid mode in
+ * <p>This must be set to a valid mode from
* {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes}.</p>
* <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
* will be applied. HIGH_QUALITY mode indicates that the camera device
* will use the highest-quality noise filtering algorithms,
- * even if it slows down capture rate. FAST means the camera device should not
+ * even if it slows down capture rate. FAST means the camera device will not
* slow down capture rate when applying noise filtering.</p>
*
* @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
@@ -1435,7 +1463,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* <p>When enabled, the sensor sends a test pattern instead of
* doing a real exposure from the camera.</p>
* <p>When a test pattern is enabled, all manual sensor controls specified
- * by android.sensor.* should be ignored. All other controls should
+ * by android.sensor.* will be ignored. All other controls should
* work as normal.</p>
* <p>For example, if manual flash is enabled, flash firing should still
* occur (and that the test pattern remain unmodified, since the flash
@@ -1490,7 +1518,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
new Key<Integer>("android.shading.mode", int.class);
/**
- * <p>State of the face detector
+ * <p>Control for the face detector
* unit.</p>
* <p>Whether face detection is enabled, and whether it
* should output just the basic fields or the full set of
@@ -1508,7 +1536,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
/**
* <p>Operating mode for hotpixel map generation.</p>
* <p>If set to ON, a hotpixel map is returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.
- * If set to OFF, no hotpixel map should be returned.</p>
+ * If set to OFF, no hotpixel map will be returned.</p>
* <p>This must be set to a valid mode from {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES android.statistics.info.availableHotPixelMapModes}.</p>
*
* @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
@@ -1521,7 +1549,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
* <p>Whether the camera device will output the lens
* shading map in output result metadata.</p>
* <p>When set to ON,
- * android.statistics.lensShadingMap must be provided in
+ * android.statistics.lensShadingMap will be provided in
* the output result metadata.</p>
* @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
* @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java
index ce3de1d..3d17ed3 100644
--- a/core/java/android/hardware/camera2/CaptureResult.java
+++ b/core/java/android/hardware/camera2/CaptureResult.java
@@ -579,8 +579,16 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* included at all in the request settings. When included and
* set to START, the camera device will trigger the autoexposure
* precapture metering sequence.</p>
- * <p>The effect of auto-exposure (AE) precapture trigger depends
- * on the current AE mode and state; see
+ * <p>The precapture sequence should triggered before starting a
+ * high-quality still capture for final metering decisions to
+ * be made, and for firing pre-capture flash pulses to estimate
+ * scene brightness and required final capture flash power, when
+ * the flash is enabled.</p>
+ * <p>Normally, this entry should be set to START for only a
+ * single request, and the application should wait until the
+ * sequence completes before starting a new one.</p>
+ * <p>The exact effect of auto-exposure (AE) precapture trigger
+ * depends on the current AE mode and state; see
* {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture state transition
* details.</p>
*
@@ -592,7 +600,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
new Key<Integer>("android.control.aePrecaptureTrigger", int.class);
/**
- * <p>Current state of auto-exposure (AE) algorithm.</p>
+ * <p>Current state of the auto-exposure (AE) algorithm.</p>
* <p>Switching between or enabling AE modes ({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}) always
* resets the AE state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
* or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
@@ -844,7 +852,11 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p>
* <p>When set to CANCEL, the camera device will cancel any active trigger,
* and return to its initial AF state.</p>
- * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what that means for each AF mode.</p>
+ * <p>Generally, applications should set this entry to START or CANCEL for only a
+ * single capture, and then return it to IDLE (or not set at all). Specifying
+ * START for multiple captures in a row means restarting the AF operation over
+ * and over again.</p>
+ * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what the trigger means for each AF mode.</p>
*
* @see CaptureResult#CONTROL_AF_STATE
* @see #CONTROL_AF_TRIGGER_IDLE
@@ -1034,13 +1046,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* <td align="center">PASSIVE_SCAN</td>
* <td align="center">AF_TRIGGER</td>
* <td align="center">FOCUSED_LOCKED</td>
- * <td align="center">Immediate trans. If focus is good, Lens now locked</td>
+ * <td align="center">Immediate transition, if focus is good. Lens now locked</td>
* </tr>
* <tr>
* <td align="center">PASSIVE_SCAN</td>
* <td align="center">AF_TRIGGER</td>
* <td align="center">NOT_FOCUSED_LOCKED</td>
- * <td align="center">Immediate trans. if focus is bad, Lens now locked</td>
+ * <td align="center">Immediate transition, if focus is bad. Lens now locked</td>
* </tr>
* <tr>
* <td align="center">PASSIVE_SCAN</td>
@@ -1064,13 +1076,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* <td align="center">PASSIVE_FOCUSED</td>
* <td align="center">AF_TRIGGER</td>
* <td align="center">FOCUSED_LOCKED</td>
- * <td align="center">Immediate trans. Lens now locked</td>
+ * <td align="center">Immediate transition, lens now locked</td>
* </tr>
* <tr>
* <td align="center">PASSIVE_UNFOCUSED</td>
* <td align="center">AF_TRIGGER</td>
* <td align="center">NOT_FOCUSED_LOCKED</td>
- * <td align="center">Immediate trans. Lens now locked</td>
+ * <td align="center">Immediate transition, lens now locked</td>
* </tr>
* <tr>
* <td align="center">FOCUSED_LOCKED</td>
@@ -1137,13 +1149,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* <td align="center">PASSIVE_SCAN</td>
* <td align="center">AF_TRIGGER</td>
* <td align="center">FOCUSED_LOCKED</td>
- * <td align="center">Eventual trans. once focus good, Lens now locked</td>
+ * <td align="center">Eventual transition once the focus is good. Lens now locked</td>
* </tr>
* <tr>
* <td align="center">PASSIVE_SCAN</td>
* <td align="center">AF_TRIGGER</td>
* <td align="center">NOT_FOCUSED_LOCKED</td>
- * <td align="center">Eventual trans. if cannot focus, Lens now locked</td>
+ * <td align="center">Eventual transition if cannot find focus. Lens now locked</td>
* </tr>
* <tr>
* <td align="center">PASSIVE_SCAN</td>
@@ -1254,9 +1266,11 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Whether auto-white balance (AWB) is currently locked to its
* latest calculated values.</p>
- * <p>Note that AWB lock is only meaningful for AUTO
- * mode; in other modes, AWB is already fixed to a specific
- * setting.</p>
+ * <p>Note that AWB lock is only meaningful when
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is in the AUTO mode; in other modes,
+ * AWB is already fixed to a specific setting.</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_MODE
*/
public static final Key<Boolean> CONTROL_AWB_LOCK =
new Key<Boolean>("android.control.awbLock", boolean.class);
@@ -1266,17 +1280,21 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* transform fields, and what its illumination target
* is.</p>
* <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
- * <p>When set to the ON mode, the camera device's auto white balance
+ * <p>When set to the ON mode, the camera device's auto-white balance
* routine is enabled, overriding the application's selected
* {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
* {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
- * <p>When set to the OFF mode, the camera device's auto white balance
+ * <p>When set to the OFF mode, the camera device's auto-white balance
* routine is disabled. The application manually controls the white
* balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
* and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
- * <p>When set to any other modes, the camera device's auto white balance
- * routine is disabled. The camera device uses each particular illumination
- * target for white balance adjustment.</p>
+ * <p>When set to any other modes, the camera device's auto-white
+ * balance routine is disabled. The camera device uses each
+ * particular illumination target for white balance
+ * adjustment. The application's values for
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform},
+ * {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} are ignored.</p>
*
* @see CaptureRequest#COLOR_CORRECTION_GAINS
* @see CaptureRequest#COLOR_CORRECTION_MODE
@@ -1327,8 +1345,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* strategy.</p>
* <p>This control (except for MANUAL) is only effective if
* <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p>
- * <p>ZERO_SHUTTER_LAG must be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
- * contains ZSL. MANUAL must be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
+ * <p>ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
+ * contains ZSL. MANUAL will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
* contains MANUAL_SENSOR.</p>
*
* @see CaptureRequest#CONTROL_MODE
@@ -1533,7 +1551,9 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* <p>This is the mode that that is active when
* <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY,
* these modes will disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode},
- * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} while in use.</p>
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} while in use.
+ * The scene modes available for a given camera device are listed in
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}.</p>
* <p>The interpretation and implementation of these scene modes is left
* to the implementor of the camera device. Their behavior will not be
* consistent across all devices, and any given device may only implement
@@ -1541,6 +1561,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
*
* @see CaptureRequest#CONTROL_AE_MODE
* @see CaptureRequest#CONTROL_AF_MODE
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
* @see CaptureRequest#CONTROL_AWB_MODE
* @see CaptureRequest#CONTROL_MODE
* @see #CONTROL_SCENE_MODE_DISABLED
@@ -1567,6 +1588,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Whether video stabilization is
* active.</p>
+ * <p>Video stabilization automatically translates and scales images from the camera
+ * in order to stabilize motion between consecutive frames.</p>
* <p>If enabled, video stabilization can modify the
* {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream
* stabilized</p>
@@ -1698,14 +1721,14 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Compression quality of the final JPEG
* image.</p>
- * <p>85-95 is typical usage range</p>
+ * <p>85-95 is typical usage range.</p>
*/
public static final Key<Byte> JPEG_QUALITY =
new Key<Byte>("android.jpeg.quality", byte.class);
/**
* <p>Compression quality of JPEG
- * thumbnail</p>
+ * thumbnail.</p>
*/
public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
@@ -1817,12 +1840,18 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Sets whether the camera device uses optical image stabilization (OIS)
* when capturing images.</p>
- * <p>OIS is used to compensate for motion blur due to small movements of
- * the camera during capture. Unlike digital image stabilization, OIS makes
- * use of mechanical elements to stabilize the camera sensor, and thus
- * allows for longer exposure times before camera shake becomes
- * apparent.</p>
- * <p>This is not expected to be supported on most devices.</p>
+ * <p>OIS is used to compensate for motion blur due to small
+ * movements of the camera during capture. Unlike digital image
+ * stabilization ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), OIS
+ * makes use of mechanical elements to stabilize the camera
+ * sensor, and thus allows for longer exposure times before
+ * camera shake becomes apparent.</p>
+ * <p>Not all devices will support OIS; see
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization} for
+ * available controls.</p>
+ *
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
* @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
* @see #LENS_OPTICAL_STABILIZATION_MODE_ON
*/
@@ -1866,16 +1895,15 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
new Key<Integer>("android.lens.state", int.class);
/**
- * <p>Mode of operation for the noise reduction.
- * algorithm</p>
+ * <p>Mode of operation for the noise reduction algorithm.</p>
* <p>Noise filtering control. OFF means no noise reduction
* will be applied by the camera device.</p>
- * <p>This must be set to a valid mode in
+ * <p>This must be set to a valid mode from
* {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes}.</p>
* <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
* will be applied. HIGH_QUALITY mode indicates that the camera device
* will use the highest-quality noise filtering algorithms,
- * even if it slows down capture rate. FAST means the camera device should not
+ * even if it slows down capture rate. FAST means the camera device will not
* slow down capture rate when applying noise filtering.</p>
*
* @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
@@ -2170,7 +2198,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* <p>When enabled, the sensor sends a test pattern instead of
* doing a real exposure from the camera.</p>
* <p>When a test pattern is enabled, all manual sensor controls specified
- * by android.sensor.* should be ignored. All other controls should
+ * by android.sensor.* will be ignored. All other controls should
* work as normal.</p>
* <p>For example, if manual flash is enabled, flash firing should still
* occur (and that the test pattern remain unmodified, since the flash
@@ -2225,7 +2253,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
new Key<Integer>("android.shading.mode", int.class);
/**
- * <p>State of the face detector
+ * <p>Control for the face detector
* unit.</p>
* <p>Whether face detection is enabled, and whether it
* should output just the basic fields or the full set of
@@ -2241,9 +2269,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
new Key<Integer>("android.statistics.faceDetectMode", int.class);
/**
- * <p>List of unique IDs for detected
- * faces</p>
- * <p>Only available if faceDetectMode == FULL</p>
+ * <p>List of unique IDs for detected faces.</p>
+ * <p>Each detected face is given a unique ID that is valid for as long as the face is visible
+ * to the camera device. A face that leaves the field of view and later returns may be
+ * assigned a new ID.</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL</p>
+ *
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
* @hide
*/
public static final Key<int[]> STATISTICS_FACE_IDS =
@@ -2251,8 +2283,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>List of landmarks for detected
- * faces</p>
- * <p>Only available if faceDetectMode == FULL</p>
+ * faces.</p>
+ * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
+ * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
* @hide
*/
public static final Key<int[]> STATISTICS_FACE_LANDMARKS =
@@ -2260,8 +2297,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>List of the bounding rectangles for detected
- * faces</p>
- * <p>Only available if faceDetectMode != OFF</p>
+ * faces.</p>
+ * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
+ * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
* @hide
*/
public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES =
@@ -2270,8 +2312,9 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>List of the face confidence scores for
* detected faces</p>
- * <p>Only available if faceDetectMode != OFF. The value should be
- * meaningful (for example, setting 100 at all times is illegal).</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF.</p>
+ *
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
* @hide
*/
public static final Key<byte[]> STATISTICS_FACE_SCORES =
@@ -2435,12 +2478,13 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* The camera device uses this entry to tell the application what the scene
* illuminant frequency is.</p>
* <p>When manual exposure control is enabled
- * (<code>{@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} == OFF</code> or <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == OFF</code>),
- * the {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} doesn't do the antibanding, and the
- * application can ensure it selects exposure times that do not cause banding
- * issues by looking into this metadata field. See {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode}
- * for more details.</p>
- * <p>Report NONE if there doesn't appear to be flickering illumination.</p>
+ * (<code>{@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} == OFF</code> or <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} ==
+ * OFF</code>), the {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} doesn't perform
+ * antibanding, and the application can ensure it selects
+ * exposure times that do not cause banding issues by looking
+ * into this metadata field. See
+ * {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} for more details.</p>
+ * <p>Reports NONE if there doesn't appear to be flickering illumination.</p>
*
* @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
* @see CaptureRequest#CONTROL_AE_MODE
@@ -2455,7 +2499,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
/**
* <p>Operating mode for hotpixel map generation.</p>
* <p>If set to ON, a hotpixel map is returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.
- * If set to OFF, no hotpixel map should be returned.</p>
+ * If set to OFF, no hotpixel map will be returned.</p>
* <p>This must be set to a valid mode from {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES android.statistics.info.availableHotPixelMapModes}.</p>
*
* @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
@@ -2483,7 +2527,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
* <p>Whether the camera device will output the lens
* shading map in output result metadata.</p>
* <p>When set to ON,
- * android.statistics.lensShadingMap must be provided in
+ * android.statistics.lensShadingMap will be provided in
* the output result metadata.</p>
* @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
* @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
diff --git a/core/java/android/hardware/camera2/params/StreamConfiguration.java b/core/java/android/hardware/camera2/params/StreamConfiguration.java
index dd862b5..a6fc10f 100644
--- a/core/java/android/hardware/camera2/params/StreamConfiguration.java
+++ b/core/java/android/hardware/camera2/params/StreamConfiguration.java
@@ -30,7 +30,8 @@ import android.util.Size;
* Immutable class to store the available stream
* {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS configurations} to set up
* {@link android.view.Surface Surfaces} for creating a {@link CameraCaptureSession capture session}
- * with {@link CameraDevice#createCaptureSession}. <!-- TODO: link to input stream configuration -->
+ * with {@link CameraDevice#createCaptureSession}.
+ * <!-- TODO: link to input stream configuration -->
*
* <p>This is the authoritative list for all input/output formats (and sizes respectively
* for that format) that are supported by a camera device.</p>
diff --git a/core/java/android/net/RouteInfo.java b/core/java/android/net/RouteInfo.java
index af27e1d..8b42bcd 100644
--- a/core/java/android/net/RouteInfo.java
+++ b/core/java/android/net/RouteInfo.java
@@ -361,7 +361,7 @@ public class RouteInfo implements Parcelable {
RouteInfo target = (RouteInfo) obj;
- return Objects.equals(mDestination, target.getDestination()) &&
+ return Objects.equals(mDestination, target.getDestinationLinkAddress()) &&
Objects.equals(mGateway, target.getGateway()) &&
Objects.equals(mInterface, target.getInterface());
}
diff --git a/core/java/android/provider/Settings.java b/core/java/android/provider/Settings.java
index ae536c1..1001677 100644
--- a/core/java/android/provider/Settings.java
+++ b/core/java/android/provider/Settings.java
@@ -6035,6 +6035,7 @@ public final class Settings {
/**
* Battery level [1-99] at which low power mode automatically turns on.
+ * If 0, it will not automatically turn on.
* @hide
*/
public static final String LOW_POWER_MODE_TRIGGER_LEVEL = "low_power_trigger_level";
diff --git a/core/java/android/service/notification/NotificationListenerService.java b/core/java/android/service/notification/NotificationListenerService.java
index fd475cd..8bd0f4d 100644
--- a/core/java/android/service/notification/NotificationListenerService.java
+++ b/core/java/android/service/notification/NotificationListenerService.java
@@ -29,6 +29,7 @@ import android.os.Parcel;
import android.os.Parcelable;
import android.os.RemoteException;
import android.os.ServiceManager;
+import android.util.ArrayMap;
import android.util.Log;
import java.util.List;
@@ -54,7 +55,7 @@ public abstract class NotificationListenerService extends Service {
+ "[" + getClass().getSimpleName() + "]";
private INotificationListenerWrapper mWrapper = null;
- private Ranking mRanking;
+ private RankingMap mRankingMap;
private INotificationManager mNoMan;
@@ -75,7 +76,43 @@ public abstract class NotificationListenerService extends Service {
* object as well as its identifying information (tag and id) and source
* (package name).
*/
- public abstract void onNotificationPosted(StatusBarNotification sbn);
+ public void onNotificationPosted(StatusBarNotification sbn) {
+ // optional
+ }
+
+ /**
+ * Implement this method to learn about new notifications as they are posted by apps.
+ *
+ * @param sbn A data structure encapsulating the original {@link android.app.Notification}
+ * object as well as its identifying information (tag and id) and source
+ * (package name).
+ * @param rankingMap The current ranking map that can be used to retrieve ranking information
+ * for active notifications, including the newly posted one.
+ */
+ public void onNotificationPosted(StatusBarNotification sbn, RankingMap rankingMap) {
+ onNotificationPosted(sbn);
+ }
+
+ /**
+ * Implement this method to learn when notifications are removed.
+ * <P>
+ * This might occur because the user has dismissed the notification using system UI (or another
+ * notification listener) or because the app has withdrawn the notification.
+ * <P>
+ * NOTE: The {@link StatusBarNotification} object you receive will be "light"; that is, the
+ * result from {@link StatusBarNotification#getNotification} may be missing some heavyweight
+ * fields such as {@link android.app.Notification#contentView} and
+ * {@link android.app.Notification#largeIcon}. However, all other fields on
+ * {@link StatusBarNotification}, sufficient to match this call with a prior call to
+ * {@link #onNotificationPosted(StatusBarNotification)}, will be intact.
+ *
+ * @param sbn A data structure encapsulating at least the original information (tag and id)
+ * and source (package name) used to post the {@link android.app.Notification} that
+ * was just removed.
+ */
+ public void onNotificationRemoved(StatusBarNotification sbn) {
+ // optional
+ }
/**
* Implement this method to learn when notifications are removed.
@@ -93,8 +130,13 @@ public abstract class NotificationListenerService extends Service {
* @param sbn A data structure encapsulating at least the original information (tag and id)
* and source (package name) used to post the {@link android.app.Notification} that
* was just removed.
+ * @param rankingMap The current ranking map that can be used to retrieve ranking information
+ * for active notifications.
+ *
*/
- public abstract void onNotificationRemoved(StatusBarNotification sbn);
+ public void onNotificationRemoved(StatusBarNotification sbn, RankingMap rankingMap) {
+ onNotificationRemoved(sbn);
+ }
/**
* Implement this method to learn about when the listener is enabled and connected to
@@ -107,10 +149,11 @@ public abstract class NotificationListenerService extends Service {
/**
* Implement this method to be notified when the notification ranking changes.
- * <P>
- * Call {@link #getCurrentRanking()} to retrieve the new ranking.
+ *
+ * @param rankingMap The current ranking map that can be used to retrieve ranking information
+ * for active notifications.
*/
- public void onNotificationRankingUpdate() {
+ public void onNotificationRankingUpdate(RankingMap rankingMap) {
// optional
}
@@ -241,16 +284,19 @@ public abstract class NotificationListenerService extends Service {
*
* <p>
* The returned object represents the current ranking snapshot and only
- * applies for currently active notifications. Hence you must retrieve a
- * new Ranking after each notification event such as
- * {@link #onNotificationPosted(StatusBarNotification)},
- * {@link #onNotificationRemoved(StatusBarNotification)}, etc.
+ * applies for currently active notifications.
+ * <p>
+ * Generally you should use the RankingMap that is passed with events such
+ * as {@link #onNotificationPosted(StatusBarNotification, RankingMap)},
+ * {@link #onNotificationRemoved(StatusBarNotification, RankingMap)}, and
+ * so on. This method should only be used when needing access outside of
+ * such events, for example to retrieve the RankingMap right after
+ * initialization.
*
- * @return A {@link NotificationListenerService.Ranking} object providing
- * access to ranking information
+ * @return A {@link RankingMap} object providing access to ranking information
*/
- public Ranking getCurrentRanking() {
- return mRanking;
+ public RankingMap getCurrentRanking() {
+ return mRankingMap;
}
@Override
@@ -313,7 +359,7 @@ public abstract class NotificationListenerService extends Service {
synchronized (mWrapper) {
applyUpdate(update);
try {
- NotificationListenerService.this.onNotificationPosted(sbn);
+ NotificationListenerService.this.onNotificationPosted(sbn, mRankingMap);
} catch (Throwable t) {
Log.w(TAG, "Error running onNotificationPosted", t);
}
@@ -326,7 +372,7 @@ public abstract class NotificationListenerService extends Service {
synchronized (mWrapper) {
applyUpdate(update);
try {
- NotificationListenerService.this.onNotificationRemoved(sbn);
+ NotificationListenerService.this.onNotificationRemoved(sbn, mRankingMap);
} catch (Throwable t) {
Log.w(TAG, "Error running onNotificationRemoved", t);
}
@@ -351,7 +397,7 @@ public abstract class NotificationListenerService extends Service {
synchronized (mWrapper) {
applyUpdate(update);
try {
- NotificationListenerService.this.onNotificationRankingUpdate();
+ NotificationListenerService.this.onNotificationRankingUpdate(mRankingMap);
} catch (Throwable t) {
Log.w(TAG, "Error running onNotificationRankingUpdate", t);
}
@@ -360,7 +406,65 @@ public abstract class NotificationListenerService extends Service {
}
private void applyUpdate(NotificationRankingUpdate update) {
- mRanking = new Ranking(update);
+ mRankingMap = new RankingMap(update);
+ }
+
+ /**
+ * Provides access to ranking information on a currently active
+ * notification.
+ *
+ * <p>
+ * Note that this object is not updated on notification events (such as
+ * {@link #onNotificationPosted(StatusBarNotification, RankingMap)},
+ * {@link #onNotificationRemoved(StatusBarNotification)}, etc.). Make sure
+ * to retrieve a new Ranking from the current {@link RankingMap} whenever
+ * a notification event occurs.
+ */
+ public static class Ranking {
+ private final String mKey;
+ private final int mRank;
+ private final boolean mIsAmbient;
+ private final boolean mIsInterceptedByDnd;
+
+ private Ranking(String key, int rank, boolean isAmbient, boolean isInterceptedByDnd) {
+ mKey = key;
+ mRank = rank;
+ mIsAmbient = isAmbient;
+ mIsInterceptedByDnd = isInterceptedByDnd;
+ }
+
+ /**
+ * Returns the key of the notification this Ranking applies to.
+ */
+ public String getKey() {
+ return mKey;
+ }
+
+ /**
+ * Returns the rank of the notification.
+ *
+ * @return the rank of the notification, that is the 0-based index in
+ * the list of active notifications.
+ */
+ public int getRank() {
+ return mRank;
+ }
+
+ /**
+ * Returns whether the notification is an ambient notification, that is
+ * a notification that doesn't require the user's immediate attention.
+ */
+ public boolean isAmbient() {
+ return mIsAmbient;
+ }
+
+ /**
+ * Returns whether the notification was intercepted by
+ * &quot;Do not disturb&quot;.
+ */
+ public boolean isInterceptedByDoNotDisturb() {
+ return mIsInterceptedByDnd;
+ }
}
/**
@@ -371,11 +475,14 @@ public abstract class NotificationListenerService extends Service {
* Note that this object represents a ranking snapshot that only applies to
* notifications active at the time of retrieval.
*/
- public static class Ranking implements Parcelable {
+ public static class RankingMap implements Parcelable {
private final NotificationRankingUpdate mRankingUpdate;
+ private final ArrayMap<String, Ranking> mRankingCache;
+ private boolean mRankingCacheInitialized;
- private Ranking(NotificationRankingUpdate rankingUpdate) {
+ private RankingMap(NotificationRankingUpdate rankingUpdate) {
mRankingUpdate = rankingUpdate;
+ mRankingCache = new ArrayMap<>(rankingUpdate.getOrderedKeys().length);
}
/**
@@ -389,56 +496,37 @@ public abstract class NotificationListenerService extends Service {
}
/**
- * Returns the rank of the notification with the given key, that is the
- * index of <code>key</code> in the array of keys returned by
- * {@link #getOrderedKeys()}.
+ * Returns the Ranking for the notification with the given key.
*
- * @return The rank of the notification with the given key; -1 when the
- * given key is unknown.
+ * @return the Ranking of the notification with the given key;
+ * <code>null</code> when the key is unknown.
*/
- public int getRank(String key) {
- // TODO: Optimize.
- String[] orderedKeys = mRankingUpdate.getOrderedKeys();
- for (int i = 0; i < orderedKeys.length; i++) {
- if (orderedKeys[i].equals(key)) {
- return i;
+ public Ranking getRanking(String key) {
+ synchronized (mRankingCache) {
+ if (!mRankingCacheInitialized) {
+ initializeRankingCache();
+ mRankingCacheInitialized = true;
}
}
- return -1;
+ return mRankingCache.get(key);
}
- /**
- * Returns whether the notification with the given key was intercepted
- * by &quot;Do not disturb&quot;.
- */
- public boolean isInterceptedByDoNotDisturb(String key) {
- // TODO: Optimize.
- for (String interceptedKey : mRankingUpdate.getDndInterceptedKeys()) {
- if (interceptedKey.equals(key)) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Returns whether the notification with the given key is an ambient
- * notification, that is a notification that doesn't require the user's
- * immediate attention.
- */
- public boolean isAmbient(String key) {
- // TODO: Optimize.
- int firstAmbientIndex = mRankingUpdate.getFirstAmbientIndex();
- if (firstAmbientIndex < 0) {
- return false;
- }
+ private void initializeRankingCache() {
String[] orderedKeys = mRankingUpdate.getOrderedKeys();
- for (int i = firstAmbientIndex; i < orderedKeys.length; i++) {
- if (orderedKeys[i].equals(key)) {
- return true;
+ int firstAmbientIndex = mRankingUpdate.getFirstAmbientIndex();
+ for (int i = 0; i < orderedKeys.length; i++) {
+ String key = orderedKeys[i];
+ boolean isAmbient = firstAmbientIndex > -1 && firstAmbientIndex <= i;
+ boolean isInterceptedByDnd = false;
+ // TODO: Optimize.
+ for (String s : mRankingUpdate.getDndInterceptedKeys()) {
+ if (s.equals(key)) {
+ isInterceptedByDnd = true;
+ break;
+ }
}
+ mRankingCache.put(key, new Ranking(key, i, isAmbient, isInterceptedByDnd));
}
- return false;
}
// ----------- Parcelable
@@ -453,16 +541,16 @@ public abstract class NotificationListenerService extends Service {
dest.writeParcelable(mRankingUpdate, flags);
}
- public static final Creator<Ranking> CREATOR = new Creator<Ranking>() {
+ public static final Creator<RankingMap> CREATOR = new Creator<RankingMap>() {
@Override
- public Ranking createFromParcel(Parcel source) {
+ public RankingMap createFromParcel(Parcel source) {
NotificationRankingUpdate rankingUpdate = source.readParcelable(null);
- return new Ranking(rankingUpdate);
+ return new RankingMap(rankingUpdate);
}
@Override
- public Ranking[] newArray(int size) {
- return new Ranking[size];
+ public RankingMap[] newArray(int size) {
+ return new RankingMap[size];
}
};
}
diff --git a/core/java/android/service/voice/DspInfo.java b/core/java/android/service/voice/DspInfo.java
new file mode 100644
index 0000000..0862309
--- /dev/null
+++ b/core/java/android/service/voice/DspInfo.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.service.voice;
+
+import java.util.UUID;
+
+/**
+ * Properties of the DSP hardware on the device.
+ * @hide
+ */
+public class DspInfo {
+ /**
+ * Unique voice engine Id (changes with each version).
+ */
+ public final UUID voiceEngineId;
+
+ /**
+ * Human readable voice detection engine implementor.
+ */
+ public final String voiceEngineImplementor;
+ /**
+ * Human readable voice detection engine description.
+ */
+ public final String voiceEngineDescription;
+ /**
+ * Human readable voice detection engine version
+ */
+ public final int voiceEngineVersion;
+ /**
+ * Rated power consumption when detection is active.
+ */
+ public final int powerConsumptionMw;
+
+ public DspInfo(UUID voiceEngineId, String voiceEngineImplementor,
+ String voiceEngineDescription, int version, int powerConsumptionMw) {
+ this.voiceEngineId = voiceEngineId;
+ this.voiceEngineImplementor = voiceEngineImplementor;
+ this.voiceEngineDescription = voiceEngineDescription;
+ this.voiceEngineVersion = version;
+ this.powerConsumptionMw = powerConsumptionMw;
+ }
+}
diff --git a/core/java/android/service/voice/KeyphraseEnrollmentInfo.java b/core/java/android/service/voice/KeyphraseEnrollmentInfo.java
new file mode 100644
index 0000000..ebe41ce
--- /dev/null
+++ b/core/java/android/service/voice/KeyphraseEnrollmentInfo.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.service.voice;
+
+import android.Manifest;
+import android.content.Intent;
+import android.content.pm.ApplicationInfo;
+import android.content.pm.PackageManager;
+import android.content.pm.ResolveInfo;
+import android.content.res.Resources;
+import android.content.res.TypedArray;
+import android.content.res.XmlResourceParser;
+import android.util.AttributeSet;
+import android.util.Slog;
+import android.util.Xml;
+
+import org.xmlpull.v1.XmlPullParser;
+import org.xmlpull.v1.XmlPullParserException;
+
+import java.io.IOException;
+import java.util.List;
+
+/** @hide */
+public class KeyphraseEnrollmentInfo {
+ private static final String TAG = "KeyphraseEnrollmentInfo";
+ /**
+ * Name under which a Hotword enrollment component publishes information about itself.
+ * This meta-data should reference an XML resource containing a
+ * <code>&lt;{@link
+ * android.R.styleable#VoiceEnrollmentApplication
+ * voice-enrollment-application}&gt;</code> tag.
+ */
+ private static final String VOICE_KEYPHRASE_META_DATA = "android.voice_enrollment";
+ /**
+ * Activity Action: Show activity for managing the keyphrases for hotword detection.
+ * This needs to be defined by an activity that supports enrolling users for hotword/keyphrase
+ * detection.
+ */
+ public static final String ACTION_MANAGE_VOICE_KEYPHRASES =
+ "com.android.intent.action.MANAGE_VOICE_KEYPHRASES";
+ /**
+ * Intent extra: The intent extra for un-enrolling a user for a particular keyphrase.
+ */
+ public static final String EXTRA_VOICE_KEYPHRASE_UNENROLL =
+ "com.android.intent.extra.VOICE_KEYPHRASE_UNENROLL";
+ /**
+ * Intent extra: The hint text to be shown on the voice keyphrase management UI.
+ */
+ public static final String EXTRA_VOICE_KEYPHRASE_HINT_TEXT =
+ "com.android.intent.extra.VOICE_KEYPHRASE_HINT_TEXT";
+ /**
+ * Intent extra: The voice locale to use while managing the keyphrase.
+ */
+ public static final String EXTRA_VOICE_KEYPHRASE_LOCALE =
+ "com.android.intent.extra.VOICE_KEYPHRASE_LOCALE";
+
+ private KeyphraseInfo[] mKeyphrases;
+ private String mEnrollmentPackage;
+ private String mParseError;
+
+ public KeyphraseEnrollmentInfo(PackageManager pm) {
+ // Find the apps that supports enrollment for hotword keyhphrases,
+ // Pick a privileged app and obtain the information about the supported keyphrases
+ // from its metadata.
+ List<ResolveInfo> ris = pm.queryIntentActivities(
+ new Intent(ACTION_MANAGE_VOICE_KEYPHRASES), PackageManager.MATCH_DEFAULT_ONLY);
+ if (ris == null || ris.isEmpty()) {
+ // No application capable of enrolling for voice keyphrases is present.
+ mParseError = "No enrollment application found";
+ return;
+ }
+
+ boolean found = false;
+ ApplicationInfo ai = null;
+ for (ResolveInfo ri : ris) {
+ try {
+ ai = pm.getApplicationInfo(
+ ri.activityInfo.packageName, PackageManager.GET_META_DATA);
+ if ((ai.flags & ApplicationInfo.FLAG_PRIVILEGED) == 0) {
+ // The application isn't privileged (/system/priv-app).
+ // The enrollment application needs to be a privileged system app.
+ Slog.w(TAG, ai.packageName + "is not a privileged system app");
+ continue;
+ }
+ if (!Manifest.permission.MANAGE_VOICE_KEYPHRASES.equals(ai.permission)) {
+ // The application trying to manage keyphrases doesn't
+ // require the MANAGE_VOICE_KEYPHRASES permission.
+ Slog.w(TAG, ai.packageName + " does not require MANAGE_VOICE_KEYPHRASES");
+ continue;
+ }
+ mEnrollmentPackage = ai.packageName;
+ found = true;
+ break;
+ } catch (PackageManager.NameNotFoundException e) {
+ Slog.w(TAG, "error parsing voice enrollment meta-data", e);
+ }
+ }
+
+ if (!found) {
+ mKeyphrases = null;
+ mParseError = "No suitable enrollment application found";
+ return;
+ }
+
+ XmlResourceParser parser = null;
+ try {
+ parser = ai.loadXmlMetaData(pm, VOICE_KEYPHRASE_META_DATA);
+ if (parser == null) {
+ mParseError = "No " + VOICE_KEYPHRASE_META_DATA + " meta-data for "
+ + ai.packageName;
+ return;
+ }
+
+ Resources res = pm.getResourcesForApplication(ai);
+ AttributeSet attrs = Xml.asAttributeSet(parser);
+
+ int type;
+ while ((type=parser.next()) != XmlPullParser.END_DOCUMENT
+ && type != XmlPullParser.START_TAG) {
+ }
+
+ String nodeName = parser.getName();
+ if (!"voice-enrollment-application".equals(nodeName)) {
+ mParseError = "Meta-data does not start with voice-enrollment-application tag";
+ return;
+ }
+
+ TypedArray array = res.obtainAttributes(attrs,
+ com.android.internal.R.styleable.VoiceEnrollmentApplication);
+ int searchKeyphraseId = array.getInt(
+ com.android.internal.R.styleable.VoiceEnrollmentApplication_searchKeyphraseId,
+ -1);
+ if (searchKeyphraseId != -1) {
+ String searchKeyphrase = array.getString(com.android.internal.R.styleable
+ .VoiceEnrollmentApplication_searchKeyphrase);
+ String searchKeyphraseSupportedLocales =
+ array.getString(com.android.internal.R.styleable
+ .VoiceEnrollmentApplication_searchKeyphraseSupportedLocales);
+ String[] supportedLocales = new String[0];
+ // Get all the supported locales from the comma-delimted string.
+ if (searchKeyphraseSupportedLocales != null
+ && !searchKeyphraseSupportedLocales.isEmpty()) {
+ supportedLocales = searchKeyphraseSupportedLocales.split(",");
+ }
+ mKeyphrases = new KeyphraseInfo[1];
+ mKeyphrases[0] = new KeyphraseInfo(
+ searchKeyphraseId, searchKeyphrase, supportedLocales);
+ } else {
+ mParseError = "searchKeyphraseId not specified in meta-data";
+ return;
+ }
+ } catch (XmlPullParserException e) {
+ mParseError = "Error parsing keyphrase enrollment meta-data: " + e;
+ Slog.w(TAG, "error parsing keyphrase enrollment meta-data", e);
+ return;
+ } catch (IOException e) {
+ mParseError = "Error parsing keyphrase enrollment meta-data: " + e;
+ Slog.w(TAG, "error parsing keyphrase enrollment meta-data", e);
+ return;
+ } catch (PackageManager.NameNotFoundException e) {
+ mParseError = "Error parsing keyphrase enrollment meta-data: " + e;
+ Slog.w(TAG, "error parsing keyphrase enrollment meta-data", e);
+ return;
+ } finally {
+ if (parser != null) parser.close();
+ }
+ }
+
+ public String getParseError() {
+ return mParseError;
+ }
+
+ /**
+ * @return An array of available keyphrases that can be enrolled on the system.
+ * It may be null if no keyphrases can be enrolled.
+ */
+ public KeyphraseInfo[] getKeyphrases() {
+ return mKeyphrases;
+ }
+
+ /**
+ * Returns an intent to launch an activity that manages the given keyphrase
+ * for the locale.
+ *
+ * @param enroll Indicates if the intent should enroll the user or un-enroll them.
+ * @param keyphrase The keyphrase that the user needs to be enrolled to.
+ * @param locale The locale for which the enrollment needs to be performed.
+ * @return An {@link Intent} to manage the keyphrase. This can be null if managing the
+ * given keyphrase/locale combination isn't possible.
+ */
+ public Intent getManageKeyphraseIntent(boolean enroll, String keyphrase, String locale) {
+ if (mEnrollmentPackage == null || mEnrollmentPackage.isEmpty()) {
+ Slog.w(TAG, "No enrollment application exists");
+ return null;
+ }
+
+ if (isKeyphraseEnrollmentSupported(keyphrase, locale)) {
+ Intent intent = new Intent(ACTION_MANAGE_VOICE_KEYPHRASES)
+ .setPackage(mEnrollmentPackage)
+ .putExtra(EXTRA_VOICE_KEYPHRASE_HINT_TEXT, keyphrase)
+ .putExtra(EXTRA_VOICE_KEYPHRASE_LOCALE, locale);
+ if (!enroll) intent.putExtra(EXTRA_VOICE_KEYPHRASE_UNENROLL, true);
+ return intent;
+ }
+ return null;
+ }
+
+ /**
+ * Indicates if enrollment is supported for the given keyphrase & locale.
+ *
+ * @param keyphrase The keyphrase that the user needs to be enrolled to.
+ * @param locale The locale for which the enrollment needs to be performed.
+ * @return true, if an enrollment client supports the given keyphrase and the given locale.
+ */
+ public boolean isKeyphraseEnrollmentSupported(String keyphrase, String locale) {
+ if (mKeyphrases == null || mKeyphrases.length == 0) {
+ Slog.w(TAG, "Enrollment application doesn't support keyphrases");
+ return false;
+ }
+ for (KeyphraseInfo keyphraseInfo : mKeyphrases) {
+ // Check if the given keyphrase is supported in the locale provided by
+ // the enrollment application.
+ String supportedKeyphrase = keyphraseInfo.keyphrase;
+ if (supportedKeyphrase.equalsIgnoreCase(keyphrase)
+ && keyphraseInfo.supportedLocales.contains(locale)) {
+ return true;
+ }
+ }
+ Slog.w(TAG, "Enrollment application doesn't support the given keyphrase");
+ return false;
+ }
+}
diff --git a/core/java/android/service/voice/KeyphraseInfo.java b/core/java/android/service/voice/KeyphraseInfo.java
new file mode 100644
index 0000000..d266e1a
--- /dev/null
+++ b/core/java/android/service/voice/KeyphraseInfo.java
@@ -0,0 +1,27 @@
+package android.service.voice;
+
+import android.util.ArraySet;
+
+/**
+ * A Voice Keyphrase.
+ * @hide
+ */
+public class KeyphraseInfo {
+ public final int id;
+ public final String keyphrase;
+ public final ArraySet<String> supportedLocales;
+
+ public KeyphraseInfo(int id, String keyphrase, String[] supportedLocales) {
+ this.id = id;
+ this.keyphrase = keyphrase;
+ this.supportedLocales = new ArraySet<String>(supportedLocales.length);
+ for (String locale : supportedLocales) {
+ this.supportedLocales.add(locale);
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "id=" + id + ", keyphrase=" + keyphrase + ", supported-locales=" + supportedLocales;
+ }
+}
diff --git a/core/java/android/service/voice/SoundTriggerManager.java b/core/java/android/service/voice/SoundTriggerManager.java
new file mode 100644
index 0000000..2d049b9
--- /dev/null
+++ b/core/java/android/service/voice/SoundTriggerManager.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.service.voice;
+
+import android.hardware.soundtrigger.SoundTrigger;
+import android.hardware.soundtrigger.SoundTrigger.ModuleProperties;
+
+import java.util.ArrayList;
+
+/**
+ * Manager for {@link SoundTrigger} APIs.
+ * Currently this just acts as an abstraction over all SoundTrigger API calls.
+ * @hide
+ */
+public class SoundTriggerManager {
+ /** The {@link DspInfo} for the system, or null if none exists. */
+ public DspInfo dspInfo;
+
+ public SoundTriggerManager() {
+ ArrayList <ModuleProperties> modules = new ArrayList<>();
+ int status = SoundTrigger.listModules(modules);
+ if (status != SoundTrigger.STATUS_OK || modules.size() == 0) {
+ // TODO(sansid, elaurent): Figure out how to handle errors in listing the modules here.
+ dspInfo = null;
+ } else {
+ // TODO(sansid, elaurent): Figure out how to determine which module corresponds to the
+ // DSP hardware.
+ ModuleProperties properties = modules.get(0);
+ dspInfo = new DspInfo(properties.uuid, properties.implementor, properties.description,
+ properties.version, properties.powerConsumptionMw);
+ }
+ }
+
+ /**
+ * @return True, if the keyphrase is supported on DSP for the given locale.
+ */
+ public boolean isKeyphraseSupported(String keyphrase, String locale) {
+ // TODO(sansid): We also need to look into a SoundTrigger API that let's us
+ // query this. For now just return supported if there's a DSP available.
+ return dspInfo != null;
+ }
+
+ /**
+ * @return True, if the keyphrase is has been enrolled for the given locale.
+ */
+ public boolean isKeyphraseEnrolled(String keyphrase, String locale) {
+ // TODO(sansid, elaurent): Query SoundTrigger to list currently loaded sound models.
+ // They have been enrolled.
+ return false;
+ }
+
+ /**
+ * @return True, if a recognition for the keyphrase is active for the given locale.
+ */
+ public boolean isKeyphraseActive(String keyphrase, String locale) {
+ // TODO(sansid, elaurent): Check if the recognition for the keyphrase is currently active.
+ return false;
+ }
+}
diff --git a/core/java/android/service/voice/VoiceInteractionService.java b/core/java/android/service/voice/VoiceInteractionService.java
index e15489b..e0329f8 100644
--- a/core/java/android/service/voice/VoiceInteractionService.java
+++ b/core/java/android/service/voice/VoiceInteractionService.java
@@ -17,7 +17,6 @@
package android.service.voice;
import android.annotation.SdkConstant;
-import android.app.Instrumentation;
import android.app.Service;
import android.content.Context;
import android.content.Intent;
@@ -25,8 +24,11 @@ import android.os.Bundle;
import android.os.IBinder;
import android.os.RemoteException;
import android.os.ServiceManager;
+
+import com.android.internal.annotations.VisibleForTesting;
import com.android.internal.app.IVoiceInteractionManagerService;
+
/**
* Top-level service of the current global voice interactor, which is providing
* support for hotwording, the back-end of a {@link android.app.VoiceInteractor}, etc.
@@ -51,6 +53,16 @@ public class VoiceInteractionService extends Service {
public static final String SERVICE_INTERFACE =
"android.service.voice.VoiceInteractionService";
+ // TODO(sansid): Unhide these.
+ /** @hide */
+ public static final int KEYPHRASE_UNAVAILABLE = 0;
+ /** @hide */
+ public static final int KEYPHRASE_UNENROLLED = 1;
+ /** @hide */
+ public static final int KEYPHRASE_ENROLLED = 2;
+ /** @hide */
+ public static final int KEYPHRASE_ACTIVE = 3;
+
/**
* Name under which a VoiceInteractionService component publishes information about itself.
* This meta-data should reference an XML resource containing a
@@ -64,6 +76,9 @@ public class VoiceInteractionService extends Service {
IVoiceInteractionManagerService mSystemService;
+ private SoundTriggerManager mSoundTriggerManager;
+ private KeyphraseEnrollmentInfo mKeyphraseEnrollmentInfo;
+
public void startSession(Bundle args) {
try {
mSystemService.startSession(mInterface, args);
@@ -76,6 +91,8 @@ public class VoiceInteractionService extends Service {
super.onCreate();
mSystemService = IVoiceInteractionManagerService.Stub.asInterface(
ServiceManager.getService(Context.VOICE_INTERACTION_MANAGER_SERVICE));
+ mKeyphraseEnrollmentInfo = new KeyphraseEnrollmentInfo(getPackageManager());
+ mSoundTriggerManager = new SoundTriggerManager();
}
@Override
@@ -85,4 +102,44 @@ public class VoiceInteractionService extends Service {
}
return null;
}
+
+ /**
+ * Gets the state of always-on hotword detection for the given keyphrase and locale
+ * on this system.
+ * Availability implies that the hardware on this system is capable of listening for
+ * the given keyphrase or not.
+ * The return code is one of {@link #KEYPHRASE_UNAVAILABLE}, {@link #KEYPHRASE_UNENROLLED}
+ * {@link #KEYPHRASE_ENROLLED} or {@link #KEYPHRASE_ACTIVE}.
+ *
+ * @param keyphrase The keyphrase whose availability is being checked.
+ * @param locale The locale for which the availability is being checked.
+ * @return Indicates if always-on hotword detection is available for the given keyphrase.
+ * TODO(sansid): Unhide this.
+ * @hide
+ */
+ public final int getAlwaysOnKeyphraseAvailability(String keyphrase, String locale) {
+ // The available keyphrases is a combination of DSP availability and
+ // the keyphrases that have an enrollment application for them.
+ if (!mSoundTriggerManager.isKeyphraseSupported(keyphrase, locale)
+ || !mKeyphraseEnrollmentInfo.isKeyphraseEnrollmentSupported(keyphrase, locale)) {
+ return KEYPHRASE_UNAVAILABLE;
+ }
+ if (!mSoundTriggerManager.isKeyphraseEnrolled(keyphrase, locale)) {
+ return KEYPHRASE_UNENROLLED;
+ }
+ if (!mSoundTriggerManager.isKeyphraseActive(keyphrase, locale)) {
+ return KEYPHRASE_ENROLLED;
+ } else {
+ return KEYPHRASE_ACTIVE;
+ }
+ }
+
+ /**
+ * @return Details of keyphrases available for enrollment.
+ * @hide
+ */
+ @VisibleForTesting
+ protected final KeyphraseEnrollmentInfo getKeyphraseEnrollmentInfo() {
+ return mKeyphraseEnrollmentInfo;
+ }
}
diff --git a/core/java/android/speech/tts/RequestConfig.java b/core/java/android/speech/tts/RequestConfig.java
index 4b5385f..84880c0 100644
--- a/core/java/android/speech/tts/RequestConfig.java
+++ b/core/java/android/speech/tts/RequestConfig.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
package android.speech.tts;
import android.media.AudioManager;
diff --git a/core/java/android/speech/tts/RequestConfigHelper.java b/core/java/android/speech/tts/RequestConfigHelper.java
index b25c985..3b5490b 100644
--- a/core/java/android/speech/tts/RequestConfigHelper.java
+++ b/core/java/android/speech/tts/RequestConfigHelper.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
package android.speech.tts;
import android.speech.tts.TextToSpeechClient.EngineStatus;
diff --git a/core/java/android/speech/tts/SynthesisRequestV2.java b/core/java/android/speech/tts/SynthesisRequestV2.java
index 130e3f9..a42aa16 100644
--- a/core/java/android/speech/tts/SynthesisRequestV2.java
+++ b/core/java/android/speech/tts/SynthesisRequestV2.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
package android.speech.tts;
import android.os.Bundle;
diff --git a/core/java/android/speech/tts/VoiceInfo.java b/core/java/android/speech/tts/VoiceInfo.java
index 16b9a97..71629dc 100644
--- a/core/java/android/speech/tts/VoiceInfo.java
+++ b/core/java/android/speech/tts/VoiceInfo.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
package android.speech.tts;
import android.os.Bundle;
diff --git a/core/jni/Android.mk b/core/jni/Android.mk
index de46804..15dfed1 100644
--- a/core/jni/Android.mk
+++ b/core/jni/Android.mk
@@ -89,6 +89,7 @@ LOCAL_SRC_FILES:= \
android_util_Process.cpp \
android_util_StringBlock.cpp \
android_util_XmlBlock.cpp \
+ android/graphics/AndroidPicture.cpp \
android/graphics/AutoDecodeCancel.cpp \
android/graphics/Bitmap.cpp \
android/graphics/BitmapFactory.cpp \
diff --git a/core/jni/AndroidRuntime.cpp b/core/jni/AndroidRuntime.cpp
index f8e6bc3..f2b9bac 100644
--- a/core/jni/AndroidRuntime.cpp
+++ b/core/jni/AndroidRuntime.cpp
@@ -244,9 +244,6 @@ AndroidRuntime::AndroidRuntime(char* argBlockStart, const size_t argBlockLength)
mArgBlockLength(argBlockLength)
{
SkGraphics::Init();
- // this sets our preference for 16bit images during decode
- // in case the src is opaque and 24bit
- SkImageDecoder::SetDeviceConfig(SkBitmap::kRGB_565_Config);
// There is also a global font cache, but its budget is specified in code
// see SkFontHost_android.cpp
@@ -825,8 +822,9 @@ int AndroidRuntime::startVm(JavaVM** pJavaVM, JNIEnv** pEnv)
mOptions.add(opt);
}
- // Whether the profile should start upon app startup or be delayed by some random offset.
- property_get("dalvik.vm.profile.start-immediately", propBuf, "0");
+ // Whether the profile should start upon app startup or be delayed by some random offset
+ // (in seconds) that is bound between 0 and a fixed value.
+ property_get("dalvik.vm.profile.start-immed", propBuf, "0");
if (propBuf[0] == '1') {
opt.optionString = "-Xprofile-start-immediately";
mOptions.add(opt);
diff --git a/core/jni/android/graphics/AndroidPicture.cpp b/core/jni/android/graphics/AndroidPicture.cpp
new file mode 100644
index 0000000..5977ab2
--- /dev/null
+++ b/core/jni/android/graphics/AndroidPicture.cpp
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AndroidPicture.h"
+#include "SkCanvas.h"
+#include "SkStream.h"
+
+AndroidPicture::AndroidPicture(const AndroidPicture* src) {
+ if (NULL != src) {
+ mWidth = src->width();
+ mHeight = src->height();
+ if (NULL != src->mPicture.get()) {
+ mPicture.reset(SkRef(src->mPicture.get()));
+ } if (NULL != src->mRecorder.get()) {
+ mPicture.reset(src->makePartialCopy());
+ }
+ } else {
+ mWidth = 0;
+ mHeight = 0;
+ }
+}
+
+SkCanvas* AndroidPicture::beginRecording(int width, int height) {
+ mPicture.reset(NULL);
+ mRecorder.reset(new SkPictureRecorder);
+ mWidth = width;
+ mHeight = height;
+ return mRecorder->beginRecording(width, height, NULL, 0);
+}
+
+void AndroidPicture::endRecording() {
+ if (NULL != mRecorder.get()) {
+ mPicture.reset(mRecorder->endRecording());
+ mRecorder.reset(NULL);
+ }
+}
+
+int AndroidPicture::width() const {
+ if (NULL != mPicture.get()) {
+ SkASSERT(mPicture->width() == mWidth);
+ SkASSERT(mPicture->height() == mHeight);
+ }
+
+ return mWidth;
+}
+
+int AndroidPicture::height() const {
+ if (NULL != mPicture.get()) {
+ SkASSERT(mPicture->width() == mWidth);
+ SkASSERT(mPicture->height() == mHeight);
+ }
+
+ return mHeight;
+}
+
+AndroidPicture* AndroidPicture::CreateFromStream(SkStream* stream) {
+ AndroidPicture* newPict = new AndroidPicture;
+
+ newPict->mPicture.reset(SkPicture::CreateFromStream(stream));
+ if (NULL != newPict->mPicture.get()) {
+ newPict->mWidth = newPict->mPicture->width();
+ newPict->mHeight = newPict->mPicture->height();
+ }
+
+ return newPict;
+}
+
+void AndroidPicture::serialize(SkWStream* stream) const {
+ if (NULL != mRecorder.get()) {
+ SkAutoTDelete<SkPicture> tempPict(this->makePartialCopy());
+ tempPict->serialize(stream);
+ } else if (NULL != mPicture.get()) {
+ mPicture->serialize(stream);
+ } else {
+ SkPicture empty;
+ empty.serialize(stream);
+ }
+}
+
+void AndroidPicture::draw(SkCanvas* canvas) {
+ if (NULL != mRecorder.get()) {
+ this->endRecording();
+ SkASSERT(NULL != mPicture.get());
+ }
+ if (NULL != mPicture.get()) {
+ // TODO: remove this const_cast once pictures are immutable
+ const_cast<SkPicture*>(mPicture.get())->draw(canvas);
+ }
+}
+
+SkPicture* AndroidPicture::makePartialCopy() const {
+ SkASSERT(NULL != mRecorder.get());
+
+ SkPictureRecorder reRecorder;
+
+ SkCanvas* canvas = reRecorder.beginRecording(mWidth, mHeight, NULL, 0);
+ mRecorder->partialReplay(canvas);
+ return reRecorder.endRecording();
+}
diff --git a/core/jni/android/graphics/AndroidPicture.h b/core/jni/android/graphics/AndroidPicture.h
new file mode 100644
index 0000000..f434941
--- /dev/null
+++ b/core/jni/android/graphics/AndroidPicture.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_PICTURE_H
+#define ANDROID_PICTURE_H
+
+#include "SkPicture.h"
+#include "SkPictureRecorder.h"
+#include "SkRefCnt.h"
+#include "SkTemplates.h"
+
+class SkCanvas;
+class SkPicture;
+class SkPictureRecorder;
+class SkStream;
+class SkWStream;
+
+// Skia's SkPicture class has been split into an SkPictureRecorder
+// and an SkPicture. AndroidPicture recreates the functionality
+// of the old SkPicture interface by flip-flopping between the two
+// new classes.
+class AndroidPicture {
+public:
+ explicit AndroidPicture(const AndroidPicture* src = NULL);
+
+ SkCanvas* beginRecording(int width, int height);
+
+ void endRecording();
+
+ int width() const;
+
+ int height() const;
+
+ static AndroidPicture* CreateFromStream(SkStream* stream);
+
+ void serialize(SkWStream* stream) const;
+
+ void draw(SkCanvas* canvas);
+
+private:
+ int mWidth;
+ int mHeight;
+ SkAutoTUnref<const SkPicture> mPicture;
+ SkAutoTDelete<SkPictureRecorder> mRecorder;
+
+ // Make a copy of a picture that is in the midst of being recorded. The
+ // resulting picture will have balanced saves and restores.
+ SkPicture* makePartialCopy() const;
+};
+#endif // ANDROID_PICTURE_H
diff --git a/core/jni/android/graphics/Bitmap.cpp b/core/jni/android/graphics/Bitmap.cpp
index 0328517..9998995 100644
--- a/core/jni/android/graphics/Bitmap.cpp
+++ b/core/jni/android/graphics/Bitmap.cpp
@@ -361,24 +361,50 @@ static jboolean Bitmap_recycle(JNIEnv* env, jobject, jlong bitmapHandle) {
}
static void Bitmap_reconfigure(JNIEnv* env, jobject clazz, jlong bitmapHandle,
- jint width, jint height, jint configHandle, jint allocSize) {
+ jint width, jint height, jint configHandle, jint allocSize,
+ jboolean requestPremul) {
SkBitmap* bitmap = reinterpret_cast<SkBitmap*>(bitmapHandle);
SkBitmap::Config config = static_cast<SkBitmap::Config>(configHandle);
- if (width * height * SkBitmap::ComputeBytesPerPixel(config) > allocSize) {
+ SkColorType colorType = SkBitmapConfigToColorType(config);
+
+ // ARGB_4444 is a deprecated format, convert automatically to 8888
+ if (colorType == kARGB_4444_SkColorType) {
+ colorType = kN32_SkColorType;
+ }
+
+ if (width * height * SkColorTypeBytesPerPixel(colorType) > allocSize) {
// done in native as there's no way to get BytesPerPixel in Java
doThrowIAE(env, "Bitmap not large enough to support new configuration");
return;
}
SkPixelRef* ref = bitmap->pixelRef();
- SkSafeRef(ref);
- bitmap->setConfig(config, width, height);
+ ref->ref();
+ SkAlphaType alphaType;
+ if (bitmap->colorType() != kRGB_565_SkColorType
+ && bitmap->alphaType() == kOpaque_SkAlphaType) {
+ // If the original bitmap was set to opaque, keep that setting, unless it
+ // was 565, which is required to be opaque.
+ alphaType = kOpaque_SkAlphaType;
+ } else {
+ // Otherwise respect the premultiplied request.
+ alphaType = requestPremul ? kPremul_SkAlphaType : kUnpremul_SkAlphaType;
+ }
+ bitmap->setInfo(SkImageInfo::Make(width, height, colorType, alphaType));
+ // FIXME: Skia thinks of an SkPixelRef as having a constant SkImageInfo (except for
+ // its alphatype), so it would make more sense from Skia's perspective to create a
+ // new SkPixelRef. That said, libhwui uses the pointer to the SkPixelRef as a key
+ // for its cache, so it won't realize this is the same Java Bitmap.
+ SkImageInfo& info = const_cast<SkImageInfo&>(ref->info());
+ // Use the updated from the SkBitmap, which may have corrected an invalid alphatype.
+ // (e.g. 565 non-opaque)
+ info = bitmap->info();
bitmap->setPixelRef(ref);
// notifyPixelsChanged will increment the generation ID even though the actual pixel data
// hasn't been touched. This signals the renderer that the bitmap (including width, height,
- // and config) has changed.
+ // colortype and alphatype) has changed.
ref->notifyPixelsChanged();
- SkSafeUnref(ref);
+ ref->unref();
}
// These must match the int values in Bitmap.java
@@ -799,7 +825,7 @@ static JNINativeMethod gBitmapMethods[] = {
(void*)Bitmap_copy },
{ "nativeDestructor", "(J)V", (void*)Bitmap_destructor },
{ "nativeRecycle", "(J)Z", (void*)Bitmap_recycle },
- { "nativeReconfigure", "(JIIII)V", (void*)Bitmap_reconfigure },
+ { "nativeReconfigure", "(JIIIIZ)V", (void*)Bitmap_reconfigure },
{ "nativeCompress", "(JIILjava/io/OutputStream;[B)Z",
(void*)Bitmap_compress },
{ "nativeErase", "(JI)V", (void*)Bitmap_erase },
diff --git a/core/jni/android/graphics/Canvas.cpp b/core/jni/android/graphics/Canvas.cpp
index 3b44f97..6de3b9e 100644
--- a/core/jni/android/graphics/Canvas.cpp
+++ b/core/jni/android/graphics/Canvas.cpp
@@ -19,11 +19,14 @@
#include <android_runtime/AndroidRuntime.h>
#include "SkCanvas.h"
+#include "SkClipStack.h"
#include "SkDevice.h"
+#include "SkDeque.h"
#include "SkDrawFilter.h"
#include "SkGraphics.h"
#include "SkPorterDuff.h"
#include "SkShader.h"
+#include "SkTArray.h"
#include "SkTemplates.h"
#ifdef USE_MINIKIN
@@ -43,11 +46,42 @@
namespace android {
+class ClipCopier : public SkCanvas::ClipVisitor {
+public:
+ ClipCopier(SkCanvas* dstCanvas) : m_dstCanvas(dstCanvas) {}
+
+ virtual void clipRect(const SkRect& rect, SkRegion::Op op, bool antialias) {
+ m_dstCanvas->clipRect(rect, op, antialias);
+ }
+ virtual void clipRRect(const SkRRect& rrect, SkRegion::Op op, bool antialias) {
+ m_dstCanvas->clipRRect(rrect, op, antialias);
+ }
+ virtual void clipPath(const SkPath& path, SkRegion::Op op, bool antialias) {
+ m_dstCanvas->clipPath(path, op, antialias);
+ }
+
+private:
+ SkCanvas* m_dstCanvas;
+};
+
// Holds an SkCanvas reference plus additional native data.
class NativeCanvasWrapper {
+private:
+ struct SaveRec {
+ int saveCount;
+ SkCanvas::SaveFlags saveFlags;
+ };
+
public:
NativeCanvasWrapper(SkCanvas* canvas)
- : mCanvas(canvas) { }
+ : mCanvas(canvas)
+ , mSaveStack(NULL) {
+ SkASSERT(canvas);
+ }
+
+ ~NativeCanvasWrapper() {
+ delete mSaveStack;
+ }
SkCanvas* getCanvas() const {
return mCanvas.get();
@@ -56,28 +90,127 @@ public:
void setCanvas(SkCanvas* canvas) {
SkASSERT(canvas);
mCanvas.reset(canvas);
- }
-private:
- SkAutoTUnref<SkCanvas> mCanvas;
-};
+ delete mSaveStack;
+ mSaveStack = NULL;
+ }
-class ClipCopier : public SkCanvas::ClipVisitor {
-public:
- ClipCopier(SkCanvas* dstCanvas) : m_dstCanvas(dstCanvas) {}
+ int save(SkCanvas::SaveFlags flags) {
+ int count = mCanvas->save();
+ recordPartialSave(flags);
+ return count;
+ }
- virtual void clipRect(const SkRect& rect, SkRegion::Op op, bool antialias) {
- m_dstCanvas->clipRect(rect, op, antialias);
+ int saveLayer(const SkRect* bounds, const SkPaint* paint,
+ SkCanvas::SaveFlags flags) {
+ int count = mCanvas->saveLayer(bounds, paint,
+ static_cast<SkCanvas::SaveFlags>(flags | SkCanvas::kMatrixClip_SaveFlag));
+ recordPartialSave(flags);
+ return count;
}
- virtual void clipRRect(const SkRRect& rrect, SkRegion::Op op, bool antialias) {
- m_dstCanvas->clipRRect(rrect, op, antialias);
+
+ int saveLayerAlpha(const SkRect* bounds, U8CPU alpha,
+ SkCanvas::SaveFlags flags) {
+ int count = mCanvas->saveLayerAlpha(bounds, alpha,
+ static_cast<SkCanvas::SaveFlags>(flags | SkCanvas::kMatrixClip_SaveFlag));
+ recordPartialSave(flags);
+ return count;
}
- virtual void clipPath(const SkPath& path, SkRegion::Op op, bool antialias) {
- m_dstCanvas->clipPath(path, op, antialias);
+
+ void restore() {
+ const SaveRec* rec = (NULL == mSaveStack)
+ ? NULL
+ : static_cast<SaveRec*>(mSaveStack->back());
+ int currentSaveCount = mCanvas->getSaveCount() - 1;
+ SkASSERT(NULL == rec || currentSaveCount >= rec->saveCount);
+
+ if (NULL == rec || rec->saveCount != currentSaveCount) {
+ // Fast path - no record for this frame.
+ mCanvas->restore();
+ return;
+ }
+
+ bool preserveMatrix = !(rec->saveFlags & SkCanvas::kMatrix_SaveFlag);
+ bool preserveClip = !(rec->saveFlags & SkCanvas::kClip_SaveFlag);
+
+ SkMatrix savedMatrix;
+ if (preserveMatrix) {
+ savedMatrix = mCanvas->getTotalMatrix();
+ }
+
+ SkTArray<SkClipStack::Element> savedClips;
+ if (preserveClip) {
+ saveClipsForFrame(savedClips, currentSaveCount);
+ }
+
+ mCanvas->restore();
+
+ if (preserveMatrix) {
+ mCanvas->setMatrix(savedMatrix);
+ }
+
+ if (preserveClip && !savedClips.empty()) {
+ applyClips(savedClips);
+ }
+
+ mSaveStack->pop_back();
}
private:
- SkCanvas* m_dstCanvas;
+ void recordPartialSave(SkCanvas::SaveFlags flags) {
+ // A partial save is a save operation which doesn't capture the full canvas state.
+ // (either kMatrix_SaveFlags or kClip_SaveFlag is missing).
+
+ // Mask-out non canvas state bits.
+ flags = static_cast<SkCanvas::SaveFlags>(flags & SkCanvas::kMatrixClip_SaveFlag);
+
+ if (SkCanvas::kMatrixClip_SaveFlag == flags) {
+ // not a partial save.
+ return;
+ }
+
+ if (NULL == mSaveStack) {
+ mSaveStack = new SkDeque(sizeof(struct SaveRec), 8);
+ }
+
+ SaveRec* rec = static_cast<SaveRec*>(mSaveStack->push_back());
+ // Store the save counter in the SkClipStack domain.
+ // (0-based, equal to the number of save ops on the stack).
+ rec->saveCount = mCanvas->getSaveCount() - 1;
+ rec->saveFlags = flags;
+ }
+
+ void saveClipsForFrame(SkTArray<SkClipStack::Element>& clips,
+ int frameSaveCount) {
+ SkClipStack::Iter clipIterator(*mCanvas->getClipStack(),
+ SkClipStack::Iter::kTop_IterStart);
+ while (const SkClipStack::Element* elem = clipIterator.next()) {
+ if (elem->getSaveCount() < frameSaveCount) {
+ // done with the current frame.
+ break;
+ }
+ SkASSERT(elem->getSaveCount() == frameSaveCount);
+ clips.push_back(*elem);
+ }
+ }
+
+ void applyClips(const SkTArray<SkClipStack::Element>& clips) {
+ ClipCopier clipCopier(mCanvas);
+
+ // The clip stack stores clips in device space.
+ SkMatrix origMatrix = mCanvas->getTotalMatrix();
+ mCanvas->resetMatrix();
+
+ // We pushed the clips in reverse order.
+ for (int i = clips.count() - 1; i >= 0; --i) {
+ clips[i].replay(&clipCopier);
+ }
+
+ mCanvas->setMatrix(origMatrix);
+ }
+
+ SkAutoTUnref<SkCanvas> mCanvas;
+ SkDeque* mSaveStack; // lazily allocated, tracks partial saves.
};
// Returns true if the SkCanvas's clip is non-empty.
@@ -88,11 +221,15 @@ static jboolean hasNonEmptyClip(const SkCanvas& canvas) {
class SkCanvasGlue {
public:
+ // Get the native wrapper for a given handle.
+ static inline NativeCanvasWrapper* getNativeWrapper(jlong nativeHandle) {
+ SkASSERT(nativeHandle);
+ return reinterpret_cast<NativeCanvasWrapper*>(nativeHandle);
+ }
// Get the SkCanvas for a given native handle.
static inline SkCanvas* getNativeCanvas(jlong nativeHandle) {
- SkASSERT(nativeHandle);
- NativeCanvasWrapper* wrapper = reinterpret_cast<NativeCanvasWrapper*>(nativeHandle);
+ NativeCanvasWrapper* wrapper = getNativeWrapper(nativeHandle);
SkCanvas* canvas = wrapper->getCanvas();
SkASSERT(canvas);
@@ -186,56 +323,56 @@ public:
}
static jint save(JNIEnv*, jobject, jlong canvasHandle, jint flagsHandle) {
- SkCanvas* canvas = getNativeCanvas(canvasHandle);
+ NativeCanvasWrapper* wrapper = getNativeWrapper(canvasHandle);
SkCanvas::SaveFlags flags = static_cast<SkCanvas::SaveFlags>(flagsHandle);
- return static_cast<jint>(canvas->save(flags));
+ return static_cast<jint>(wrapper->save(flags));
}
static jint saveLayer(JNIEnv* env, jobject, jlong canvasHandle,
jfloat l, jfloat t, jfloat r, jfloat b,
- jlong paintHandle, jint flags) {
- SkCanvas* canvas = getNativeCanvas(canvasHandle);
+ jlong paintHandle, jint flagsHandle) {
+ NativeCanvasWrapper* wrapper = getNativeWrapper(canvasHandle);
SkPaint* paint = reinterpret_cast<SkPaint*>(paintHandle);
+ SkCanvas::SaveFlags flags = static_cast<SkCanvas::SaveFlags>(flagsHandle);
SkRect bounds;
bounds.set(l, t, r, b);
- int result = canvas->saveLayer(&bounds, paint,
- static_cast<SkCanvas::SaveFlags>(flags));
- return static_cast<jint>(result);
+ return static_cast<jint>(wrapper->saveLayer(&bounds, paint, flags));
}
static jint saveLayerAlpha(JNIEnv* env, jobject, jlong canvasHandle,
jfloat l, jfloat t, jfloat r, jfloat b,
- jint alpha, jint flags) {
- SkCanvas* canvas = getNativeCanvas(canvasHandle);
+ jint alpha, jint flagsHandle) {
+ NativeCanvasWrapper* wrapper = getNativeWrapper(canvasHandle);
+ SkCanvas::SaveFlags flags = static_cast<SkCanvas::SaveFlags>(flagsHandle);
SkRect bounds;
bounds.set(l, t, r, b);
- int result = canvas->saveLayerAlpha(&bounds, alpha,
- static_cast<SkCanvas::SaveFlags>(flags));
- return static_cast<jint>(result);
+ return static_cast<jint>(wrapper->saveLayerAlpha(&bounds, alpha, flags));
}
static void restore(JNIEnv* env, jobject, jlong canvasHandle) {
- SkCanvas* canvas = getNativeCanvas(canvasHandle);
- if (canvas->getSaveCount() <= 1) { // cannot restore anymore
+ NativeCanvasWrapper* wrapper = getNativeWrapper(canvasHandle);
+ if (wrapper->getCanvas()->getSaveCount() <= 1) { // cannot restore anymore
doThrowISE(env, "Underflow in restore");
return;
}
- canvas->restore();
+ wrapper->restore();
}
static jint getSaveCount(JNIEnv*, jobject, jlong canvasHandle) {
- SkCanvas* canvas = getNativeCanvas(canvasHandle);
- return static_cast<jint>(canvas->getSaveCount());
+ return static_cast<jint>(getNativeCanvas(canvasHandle)->getSaveCount());
}
static void restoreToCount(JNIEnv* env, jobject, jlong canvasHandle,
jint restoreCount) {
- SkCanvas* canvas = getNativeCanvas(canvasHandle);
+ NativeCanvasWrapper* wrapper = getNativeWrapper(canvasHandle);
if (restoreCount < 1) {
doThrowIAE(env, "Underflow in restoreToCount");
return;
}
- canvas->restoreToCount(restoreCount);
+
+ while (wrapper->getCanvas()->getSaveCount() > restoreCount) {
+ wrapper->restore();
+ }
}
static void translate(JNIEnv*, jobject, jlong canvasHandle,
diff --git a/core/jni/android/graphics/Graphics.cpp b/core/jni/android/graphics/Graphics.cpp
index 64ad223..a4337cc 100644
--- a/core/jni/android/graphics/Graphics.cpp
+++ b/core/jni/android/graphics/Graphics.cpp
@@ -3,6 +3,7 @@
#include "jni.h"
#include "JNIHelp.h"
#include "GraphicsJNI.h"
+#include "AndroidPicture.h"
#include "SkCanvas.h"
#include "SkDevice.h"
@@ -345,13 +346,13 @@ android::TypefaceImpl* GraphicsJNI::getNativeTypeface(JNIEnv* env, jobject paint
return p;
}
-SkPicture* GraphicsJNI::getNativePicture(JNIEnv* env, jobject picture)
+AndroidPicture* GraphicsJNI::getNativePicture(JNIEnv* env, jobject picture)
{
SkASSERT(env);
SkASSERT(picture);
SkASSERT(env->IsInstanceOf(picture, gPicture_class));
jlong pictureHandle = env->GetLongField(picture, gPicture_nativeInstanceID);
- SkPicture* p = reinterpret_cast<SkPicture*>(pictureHandle);
+ AndroidPicture* p = reinterpret_cast<AndroidPicture*>(pictureHandle);
SkASSERT(p);
return p;
}
diff --git a/core/jni/android/graphics/GraphicsJNI.h b/core/jni/android/graphics/GraphicsJNI.h
index 73dd11b..2e2f920 100644
--- a/core/jni/android/graphics/GraphicsJNI.h
+++ b/core/jni/android/graphics/GraphicsJNI.h
@@ -14,7 +14,7 @@
class SkBitmapRegionDecoder;
class SkCanvas;
class SkPaint;
-class SkPicture;
+class AndroidPicture;
class GraphicsJNI {
public:
@@ -50,7 +50,7 @@ public:
static SkPaint* getNativePaint(JNIEnv*, jobject paint);
static android::TypefaceImpl* getNativeTypeface(JNIEnv*, jobject paint);
static SkBitmap* getNativeBitmap(JNIEnv*, jobject bitmap);
- static SkPicture* getNativePicture(JNIEnv*, jobject picture);
+ static AndroidPicture* getNativePicture(JNIEnv*, jobject picture);
static SkRegion* getNativeRegion(JNIEnv*, jobject region);
// Given the 'native' long held by the Rasterizer.java object, return a
diff --git a/core/jni/android/graphics/Picture.cpp b/core/jni/android/graphics/Picture.cpp
index a8a3dae..0683f73 100644
--- a/core/jni/android/graphics/Picture.cpp
+++ b/core/jni/android/graphics/Picture.cpp
@@ -17,9 +17,9 @@
#include "jni.h"
#include "GraphicsJNI.h"
#include <android_runtime/AndroidRuntime.h>
+#include "AndroidPicture.h"
#include "SkCanvas.h"
-#include "SkPicture.h"
#include "SkStream.h"
#include "SkTemplates.h"
#include "CreateJavaOutputStreamAdaptor.h"
@@ -29,45 +29,41 @@ namespace android {
class SkPictureGlue {
public:
static jlong newPicture(JNIEnv* env, jobject, jlong srcHandle) {
- const SkPicture* src = reinterpret_cast<SkPicture*>(srcHandle);
- if (src) {
- return reinterpret_cast<jlong>(new SkPicture(*src));
- } else {
- return reinterpret_cast<jlong>(new SkPicture);
- }
+ const AndroidPicture* src = reinterpret_cast<AndroidPicture*>(srcHandle);
+ return reinterpret_cast<jlong>(new AndroidPicture(src));
}
static jlong deserialize(JNIEnv* env, jobject, jobject jstream,
jbyteArray jstorage) {
- SkPicture* picture = NULL;
+ AndroidPicture* picture = NULL;
SkStream* strm = CreateJavaInputStreamAdaptor(env, jstream, jstorage);
if (strm) {
- picture = SkPicture::CreateFromStream(strm);
+ picture = AndroidPicture::CreateFromStream(strm);
delete strm;
}
return reinterpret_cast<jlong>(picture);
}
static void killPicture(JNIEnv* env, jobject, jlong pictureHandle) {
- SkPicture* picture = reinterpret_cast<SkPicture*>(pictureHandle);
+ AndroidPicture* picture = reinterpret_cast<AndroidPicture*>(pictureHandle);
SkASSERT(picture);
- picture->unref();
+ delete picture;
}
static void draw(JNIEnv* env, jobject, jlong canvasHandle,
jlong pictureHandle) {
SkCanvas* canvas = GraphicsJNI::getNativeCanvas(canvasHandle);
- SkPicture* picture = reinterpret_cast<SkPicture*>(pictureHandle);
+ AndroidPicture* picture = reinterpret_cast<AndroidPicture*>(pictureHandle);
SkASSERT(canvas);
SkASSERT(picture);
picture->draw(canvas);
}
static jboolean serialize(JNIEnv* env, jobject, jlong pictureHandle,
- jobject jstream, jbyteArray jstorage) {
- SkPicture* picture = reinterpret_cast<SkPicture*>(pictureHandle);
+ jobject jstream, jbyteArray jstorage) {
+ AndroidPicture* picture = reinterpret_cast<AndroidPicture*>(pictureHandle);
SkWStream* strm = CreateJavaOutputStreamAdaptor(env, jstream, jstorage);
-
+
if (NULL != strm) {
picture->serialize(strm);
delete strm;
@@ -78,19 +74,21 @@ public:
static jint getWidth(JNIEnv* env, jobject jpic) {
NPE_CHECK_RETURN_ZERO(env, jpic);
- int width = GraphicsJNI::getNativePicture(env, jpic)->width();
+ AndroidPicture* pict = GraphicsJNI::getNativePicture(env, jpic);
+ int width = pict->width();
return static_cast<jint>(width);
}
static jint getHeight(JNIEnv* env, jobject jpic) {
NPE_CHECK_RETURN_ZERO(env, jpic);
- int height = GraphicsJNI::getNativePicture(env, jpic)->height();
+ AndroidPicture* pict = GraphicsJNI::getNativePicture(env, jpic);
+ int height = pict->height();
return static_cast<jint>(height);
}
static jlong beginRecording(JNIEnv* env, jobject, jlong pictHandle,
- jint w, jint h) {
- SkPicture* pict = reinterpret_cast<SkPicture*>(pictHandle);
+ jint w, jint h) {
+ AndroidPicture* pict = reinterpret_cast<AndroidPicture*>(pictHandle);
// beginRecording does not ref its return value, it just returns it.
SkCanvas* canvas = pict->beginRecording(w, h);
// the java side will wrap this guy in a Canvas.java, which will call
@@ -101,7 +99,7 @@ public:
}
static void endRecording(JNIEnv* env, jobject, jlong pictHandle) {
- SkPicture* pict = reinterpret_cast<SkPicture*>(pictHandle);
+ AndroidPicture* pict = reinterpret_cast<AndroidPicture*>(pictHandle);
pict->endRecording();
}
};
diff --git a/core/jni/android/graphics/pdf/PdfDocument.cpp b/core/jni/android/graphics/pdf/PdfDocument.cpp
index d54aaa8..3812c27 100644
--- a/core/jni/android/graphics/pdf/PdfDocument.cpp
+++ b/core/jni/android/graphics/pdf/PdfDocument.cpp
@@ -24,6 +24,7 @@
#include "SkCanvas.h"
#include "SkDocument.h"
#include "SkPicture.h"
+#include "SkPictureRecorder.h"
#include "SkStream.h"
#include "SkRect.h"
@@ -32,15 +33,22 @@ namespace android {
struct PageRecord {
PageRecord(int width, int height, const SkRect& contentRect)
- : mPicture(new SkPicture()), mWidth(width), mHeight(height) {
+ : mPictureRecorder(new SkPictureRecorder())
+ , mPicture(NULL)
+ , mWidth(width)
+ , mHeight(height) {
mContentRect = contentRect;
}
~PageRecord() {
- mPicture->unref();
+ delete mPictureRecorder;
+ if (NULL != mPicture) {
+ mPicture->unref();
+ }
}
- SkPicture* const mPicture;
+ SkPictureRecorder* mPictureRecorder;
+ SkPicture* mPicture;
const int mWidth;
const int mHeight;
SkRect mContentRect;
@@ -62,8 +70,8 @@ public:
mPages.push_back(page);
mCurrentPage = page;
- SkCanvas* canvas = page->mPicture->beginRecording(
- contentRect.width(), contentRect.height(), 0);
+ SkCanvas* canvas = page->mPictureRecorder->beginRecording(
+ contentRect.width(), contentRect.height(), NULL, 0);
// We pass this canvas to Java where it is used to construct
// a Java Canvas object which dereferences the pointer when it
@@ -75,7 +83,11 @@ public:
void finishPage() {
assert(mCurrentPage != NULL);
- mCurrentPage->mPicture->endRecording();
+ assert(mCurrentPage->mPictureRecorder != NULL);
+ assert(mCurrentPage->mPicture == NULL);
+ mCurrentPage->mPicture = mCurrentPage->mPictureRecorder->endRecording();
+ delete mCurrentPage->mPictureRecorder;
+ mCurrentPage->mPictureRecorder = NULL;
mCurrentPage = NULL;
}
@@ -89,7 +101,7 @@ public:
canvas->clipRect(page->mContentRect);
canvas->translate(page->mContentRect.left(), page->mContentRect.top());
- canvas->drawPicture(*page->mPicture);
+ canvas->drawPicture(page->mPicture);
document->endPage();
}
@@ -97,11 +109,10 @@ public:
}
void close() {
+ assert(NULL == mCurrentPage);
for (unsigned i = 0; i < mPages.size(); i++) {
delete mPages[i];
}
- delete mCurrentPage;
- mCurrentPage = NULL;
}
private:
diff --git a/core/jni/android_media_AudioSystem.cpp b/core/jni/android_media_AudioSystem.cpp
index bf47dd3..ee4c619 100644
--- a/core/jni/android_media_AudioSystem.cpp
+++ b/core/jni/android_media_AudioSystem.cpp
@@ -244,6 +244,12 @@ android_media_AudioSystem_isSourceActive(JNIEnv *env, jobject thiz, jint source)
}
static jint
+android_media_AudioSystem_newAudioSessionId(JNIEnv *env, jobject thiz)
+{
+ return AudioSystem::newAudioSessionId();
+}
+
+static jint
android_media_AudioSystem_setParameters(JNIEnv *env, jobject thiz, jstring keyValuePairs)
{
const jchar* c_keyValuePairs = env->GetStringCritical(keyValuePairs, 0);
@@ -1295,6 +1301,7 @@ static JNINativeMethod gMethods[] = {
{"isStreamActive", "(II)Z", (void *)android_media_AudioSystem_isStreamActive},
{"isStreamActiveRemotely","(II)Z", (void *)android_media_AudioSystem_isStreamActiveRemotely},
{"isSourceActive", "(I)Z", (void *)android_media_AudioSystem_isSourceActive},
+ {"newAudioSessionId", "()I", (void *)android_media_AudioSystem_newAudioSessionId},
{"setDeviceConnectionState", "(IILjava/lang/String;)I", (void *)android_media_AudioSystem_setDeviceConnectionState},
{"getDeviceConnectionState", "(ILjava/lang/String;)I", (void *)android_media_AudioSystem_getDeviceConnectionState},
{"setPhoneState", "(I)I", (void *)android_media_AudioSystem_setPhoneState},
diff --git a/core/jni/android_view_Surface.cpp b/core/jni/android_view_Surface.cpp
index 11f87cc..3d14aaf 100644
--- a/core/jni/android_view_Surface.cpp
+++ b/core/jni/android_view_Surface.cpp
@@ -95,6 +95,7 @@ sp<Surface> android_view_Surface_getSurface(JNIEnv* env, jobject surfaceObj) {
env->GetLongField(surfaceObj, gSurfaceClassInfo.mNativeObject));
env->MonitorExit(lock);
}
+ env->DeleteLocalRef(lock);
return sur;
}
diff --git a/core/res/AndroidManifest.xml b/core/res/AndroidManifest.xml
index f3b5ccf..3067cdd0 100644
--- a/core/res/AndroidManifest.xml
+++ b/core/res/AndroidManifest.xml
@@ -2071,6 +2071,14 @@
android:description="@string/permdesc_bindVoiceInteraction"
android:protectionLevel="signature" />
+ <!-- Must be required by hotword enrollment application,
+ to ensure that only the system can interact with it.
+ @hide <p>Not for use by third-party applications.</p> -->
+ <permission android:name="android.permission.MANAGE_VOICE_KEYPHRASES"
+ android:label="@string/permlab_manageVoiceKeyphrases"
+ android:description="@string/permdesc_manageVoiceKeyphrases"
+ android:protectionLevel="signature|system" />
+
<!-- Must be required by a {@link com.android.media.remotedisplay.RemoteDisplayProvider},
to ensure that only the system can bind to it.
@hide -->
@@ -2699,7 +2707,8 @@
android:theme="@style/Theme.Holo.Dialog.Alert"
android:finishOnCloseSystemDialogs="true"
android:excludeFromRecents="true"
- android:multiprocess="true">
+ android:multiprocess="true"
+ android:documentLaunchMode="never">
<intent-filter>
<action android:name="android.intent.action.CHOOSER" />
<category android:name="android.intent.category.DEFAULT" />
diff --git a/core/res/res/layout/notification_template_material_big_base.xml b/core/res/res/layout/notification_template_material_big_base.xml
index bdf27c8..0564a8f 100644
--- a/core/res/res/layout/notification_template_material_big_base.xml
+++ b/core/res/res/layout/notification_template_material_big_base.xml
@@ -145,7 +145,7 @@
android:layout_marginStart="8dp"
android:layout_marginEnd="8dp"
android:visibility="gone"
- style="@style/Widget.Material.Light.ProgressBar.Horizontal"
+ style="@style/Widget.StatusBar.Material.ProgressBar"
/>
</LinearLayout>
<ImageView
diff --git a/core/res/res/layout/notification_template_material_big_media.xml b/core/res/res/layout/notification_template_material_big_media.xml
index c89b9f9..f8e1986 100644
--- a/core/res/res/layout/notification_template_material_big_media.xml
+++ b/core/res/res/layout/notification_template_material_big_media.xml
@@ -38,7 +38,6 @@
android:minHeight="@dimen/notification_large_icon_height"
android:paddingTop="2dp"
android:orientation="vertical"
- android:background="@color/notification_media_info_bg"
>
<LinearLayout
android:id="@+id/line1"
@@ -147,7 +146,7 @@
android:layout_height="6dp"
android:layout_gravity="top"
android:visibility="gone"
- style="@style/Widget.StatusBar.Material.ProgressBar"
+ style="@style/Widget.StatusBar.Material.ProgressBar.Media"
/>
</FrameLayout>
</LinearLayout>
diff --git a/core/res/res/layout/notification_template_material_big_text.xml b/core/res/res/layout/notification_template_material_big_text.xml
index 6f8c3a9..1de5add 100644
--- a/core/res/res/layout/notification_template_material_big_text.xml
+++ b/core/res/res/layout/notification_template_material_big_text.xml
@@ -99,7 +99,7 @@
android:layout_marginEnd="8dp"
android:visibility="gone"
android:layout_weight="0"
- style="@style/Widget.Material.Light.ProgressBar.Horizontal"
+ style="@style/Widget.StatusBar.Material.ProgressBar"
/>
<TextView android:id="@+id/big_text"
android:textAppearance="@style/TextAppearance.StatusBar.Material.EventContent"
diff --git a/core/res/res/values-watch/config.xml b/core/res/res/values-watch/config.xml
index 8d82a17..6052fb0 100644
--- a/core/res/res/values-watch/config.xml
+++ b/core/res/res/values-watch/config.xml
@@ -36,4 +36,8 @@
<!-- Maximum velocity to initiate a fling, as measured in dips per second. -->
<dimen name="config_viewMaxFlingVelocity">8000dp</dimen>
+ <!-- Number of notifications to keep in the notification service historical archive.
+ Reduced intentionally for watches to retain minimal memory footprint -->
+ <integer name="config_notificationServiceArchiveSize">1</integer>
+
</resources>
diff --git a/core/res/res/values/attrs.xml b/core/res/res/values/attrs.xml
index 5b362fc..2fea91e 100644
--- a/core/res/res/values/attrs.xml
+++ b/core/res/res/values/attrs.xml
@@ -6390,6 +6390,16 @@
<attr name="settingsActivity" />
</declare-styleable>
+ <!-- Use <code>voice-enrollment-application</code>
+ as the root tag of the XML resource that escribes the supported keyphrases (hotwords)
+ by the enrollment application.
+ Described here are the attributes that can be included in that tag. -->
+ <declare-styleable name="VoiceEnrollmentApplication">
+ <attr name="searchKeyphraseId" format="integer" />
+ <attr name="searchKeyphrase" format="string" />
+ <attr name="searchKeyphraseSupportedLocales" format="string" />
+ </declare-styleable>
+
<!-- Attributes used to style the Action Bar. -->
<declare-styleable name="ActionBar">
<!-- The type of navigation to use. -->
diff --git a/core/res/res/values/colors.xml b/core/res/res/values/colors.xml
index 9bf2ce8..9f6c7ad 100644
--- a/core/res/res/values/colors.xml
+++ b/core/res/res/values/colors.xml
@@ -135,7 +135,6 @@
<color name="notification_action_legacy_color_filter">#ff555555</color>
<color name="notification_media_action_bg">#00000000</color>
- <color name="notification_media_info_bg">#40FFFFFF</color>
<color name="notification_media_progress">#FFFFFFFF</color>
<!-- Keyguard colors -->
diff --git a/core/res/res/values/config.xml b/core/res/res/values/config.xml
index 9716f0c..9ff9b11 100644
--- a/core/res/res/values/config.xml
+++ b/core/res/res/values/config.xml
@@ -595,10 +595,11 @@
<integer name="config_shutdownBatteryTemperature">680</integer>
<!-- Display low battery warning when battery level dips to this value -->
- <integer name="config_lowBatteryWarningLevel">20</integer>
+ <integer name="config_lowBatteryWarningLevel">15</integer>
- <!-- Close low battery warning when battery level reaches this value -->
- <integer name="config_lowBatteryCloseWarningLevel">25</integer>
+ <!-- Close low battery warning when battery level reaches the lowBatteryWarningLevel
+ plus this -->
+ <integer name="config_lowBatteryCloseWarningBump">5</integer>
<!-- Default color for notification LED. -->
<color name="config_defaultNotificationColor">#ffffffff</color>
@@ -627,6 +628,9 @@
<!-- Default value for LED off time when the battery is low on charge in miliseconds -->
<integer name="config_notificationsBatteryLedOff">2875</integer>
+ <!-- Number of notifications to keep in the notification service historical archive -->
+ <integer name="config_notificationServiceArchiveSize">250</integer>
+
<!-- Allow the menu hard key to be disabled in LockScreen on some devices -->
<bool name="config_disableMenuKeyInLockScreen">false</bool>
diff --git a/core/res/res/values/public.xml b/core/res/res/values/public.xml
index 8f3ee60..2792c93 100644
--- a/core/res/res/values/public.xml
+++ b/core/res/res/values/public.xml
@@ -2185,6 +2185,9 @@
<public type="attr" name="translateY" />
<public type="attr" name="selectableItemBackgroundBorderless" />
<public type="attr" name="elegantTextHeight" />
+ <public type="attr" name="searchKeyphraseId" />
+ <public type="attr" name="searchKeyphrase" />
+ <public type="attr" name="searchKeyphraseSupportedLocales" />
<public-padding type="dimen" name="l_resource_pad" end="0x01050010" />
diff --git a/core/res/res/values/strings.xml b/core/res/res/values/strings.xml
index 2903ac2..f1d9dc3 100644
--- a/core/res/res/values/strings.xml
+++ b/core/res/res/values/strings.xml
@@ -1122,6 +1122,12 @@
interface of a voice interaction service. Should never be needed for normal apps.</string>
<!-- Title of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permlab_manageVoiceKeyphrases">manage voice keyphrases</string>
+ <!-- Description of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
+ <string name="permdesc_manageVoiceKeyphrases">Allows the holder to manage the keyphrases for voice hotword detection.
+ Should never be needed for normal apps.</string>
+
+ <!-- Title of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
<string name="permlab_bindRemoteDisplay">bind to a remote display</string>
<!-- Description of an application permission, listed so the user can choose whether they want to allow the application to do this. -->
<string name="permdesc_bindRemoteDisplay">Allows the holder to bind to the top-level
diff --git a/core/res/res/values/styles_material.xml b/core/res/res/values/styles_material.xml
index 92cce25..a40835c 100644
--- a/core/res/res/values/styles_material.xml
+++ b/core/res/res/values/styles_material.xml
@@ -345,6 +345,9 @@ please see styles_device_defaults.xml.
</style>
<style name="Widget.StatusBar.Material.ProgressBar" parent="Widget.Material.Light.ProgressBar.Horizontal">
+ </style>
+
+ <style name="Widget.StatusBar.Material.ProgressBar.Media">
<item name="android:progressDrawable">@drawable/notification_material_media_progress</item>
</style>
diff --git a/core/res/res/values/symbols.xml b/core/res/res/values/symbols.xml
index 2ea7421..f2550ab 100644
--- a/core/res/res/values/symbols.xml
+++ b/core/res/res/values/symbols.xml
@@ -1509,7 +1509,7 @@
<java-symbol type="integer" name="config_defaultNotificationLedOn" />
<java-symbol type="integer" name="config_deskDockKeepsScreenOn" />
<java-symbol type="integer" name="config_lightSensorWarmupTime" />
- <java-symbol type="integer" name="config_lowBatteryCloseWarningLevel" />
+ <java-symbol type="integer" name="config_lowBatteryCloseWarningBump" />
<java-symbol type="integer" name="config_lowBatteryWarningLevel" />
<java-symbol type="integer" name="config_networkPolicyDefaultWarning" />
<java-symbol type="integer" name="config_networkTransitionTimeout" />
@@ -1518,6 +1518,7 @@
<java-symbol type="integer" name="config_notificationsBatteryLedOn" />
<java-symbol type="integer" name="config_notificationsBatteryLowARGB" />
<java-symbol type="integer" name="config_notificationsBatteryMediumARGB" />
+ <java-symbol type="integer" name="config_notificationServiceArchiveSize" />
<java-symbol type="integer" name="config_radioScanningTimeout" />
<java-symbol type="integer" name="config_screenBrightnessSettingMinimum" />
<java-symbol type="integer" name="config_screenBrightnessSettingMaximum" />
@@ -1678,7 +1679,6 @@
<java-symbol type="drawable" name="notification_material_bg" />
<java-symbol type="drawable" name="notification_material_media_progress" />
<java-symbol type="color" name="notification_media_action_bg" />
- <java-symbol type="color" name="notification_media_info_bg" />
<java-symbol type="color" name="notification_media_progress" />
<java-symbol type="id" name="media_action_area" />