diff options
Diffstat (limited to 'core/java')
| -rw-r--r-- | core/java/android/hardware/camera2/CameraCharacteristics.java | 161 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/CameraDevice.java | 22 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/CameraMetadata.java | 86 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/CaptureRequest.java | 35 | ||||
| -rw-r--r-- | core/java/android/hardware/camera2/CaptureResult.java | 46 | ||||
| -rw-r--r-- | core/java/android/transition/TransitionInflater.java | 2 | ||||
| -rw-r--r-- | core/java/android/view/DisplayListCanvas.java | 18 | ||||
| -rw-r--r-- | core/java/android/view/PhoneWindow.java | 2 | ||||
| -rw-r--r-- | core/java/android/view/View.java | 97 | ||||
| -rw-r--r-- | core/java/android/widget/Editor.java | 59 | ||||
| -rw-r--r-- | core/java/android/widget/Switch.java | 2 | ||||
| -rw-r--r-- | core/java/android/widget/TextView.java | 4 | ||||
| -rw-r--r-- | core/java/com/android/internal/app/ChooserActivity.java | 6 | ||||
| -rw-r--r-- | core/java/com/android/internal/midi/MidiFramer.java | 51 | ||||
| -rw-r--r-- | core/java/com/android/internal/util/ImageUtils.java | 2 | ||||
| -rw-r--r-- | core/java/com/android/internal/widget/LockPatternUtils.java | 237 |
16 files changed, 399 insertions, 431 deletions
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java index 87a1ca9..19e821c 100644 --- a/core/java/android/hardware/camera2/CameraCharacteristics.java +++ b/core/java/android/hardware/camera2/CameraCharacteristics.java @@ -603,10 +603,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>List of available high speed video size and fps range configurations * supported by the camera device, in the format of (width, height, fps_min, fps_max).</p> - * <p>When HIGH_SPEED_VIDEO is supported in {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}, - * this metadata will list the supported high speed video size and fps range - * configurations. All the sizes listed in this configuration will be a subset - * of the sizes reported by StreamConfigurationMap#getOutputSizes for processed + * <p>When HIGH_SPEED_VIDEO is supported in {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}, this metadata + * will list the supported high speed video size and fps range configurations. All the sizes + * listed in this configuration will be a subset of the sizes reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } for processed * non-stalling formats.</p> * <p>For the high speed video use case, where the application will set * {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} to HIGH_SPEED_VIDEO in capture requests, the application must @@ -1116,11 +1115,12 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * into the 3 stream types as below:</p> * <ul> * <li>Processed (but stalling): any non-RAW format with a stallDurations > 0. - * Typically JPEG format (ImageFormat#JPEG).</li> - * <li>Raw formats: ImageFormat#RAW_SENSOR, ImageFormat#RAW10, ImageFormat#RAW12, - * and ImageFormat#RAW_OPAQUE.</li> + * Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.</li> + * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12 RAW12}.</li> * <li>Processed (but not-stalling): any non-RAW format without a stall duration. - * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li> + * Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}, + * {@link android.graphics.ImageFormat#NV21 NV21}, or + * {@link android.graphics.ImageFormat#YV12 YV12}.</li> * </ul> * <p><b>Range of valid values:</b><br></p> * <p>For processed (and stalling) format streams, >= 1.</p> @@ -1148,10 +1148,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * be any <code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p> * <p>In particular, a <code>RAW</code> format is typically one of:</p> * <ul> - * <li>ImageFormat#RAW_SENSOR</li> - * <li>ImageFormat#RAW10</li> - * <li>ImageFormat#RAW12</li> - * <li>Opaque <code>RAW</code></li> + * <li>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</li> + * <li>{@link android.graphics.ImageFormat#RAW10 RAW10}</li> + * <li>{@link android.graphics.ImageFormat#RAW12 RAW12}</li> * </ul> * <p>LEGACY mode devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> LEGACY) * never support raw streams.</p> @@ -1180,13 +1179,13 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration. * Typically:</p> * <ul> - * <li>ImageFormat#YUV_420_888</li> - * <li>ImageFormat#NV21</li> - * <li>ImageFormat#YV12</li> - * <li>Implementation-defined formats, i.e. StreamConfiguration#isOutputSupportedFor(Class)</li> + * <li>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</li> + * <li>{@link android.graphics.ImageFormat#NV21 NV21}</li> + * <li>{@link android.graphics.ImageFormat#YV12 YV12}</li> + * <li>Implementation-defined formats, i.e. {@link android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class) }</li> * </ul> - * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with - * a processed format -- it will return 0 for a non-stalling stream.</p> + * <p>For full guarantees, query {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } with a + * processed format -- it will return 0 for a non-stalling stream.</p> * <p>LEGACY devices will support at least 2 processing/non-stalling streams.</p> * <p><b>Range of valid values:</b><br></p> * <p>>= 3 @@ -1212,10 +1211,11 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * the camera device. Using more streams simultaneously may require more hardware and * CPU resources that will consume more power. The image format for this kind of an output stream can * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p> - * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations > 0. - * Typically only the <code>JPEG</code> format (ImageFormat#JPEG) is a stalling format.</p> - * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with - * a processed format -- it will return a non-0 value for a stalling stream.</p> + * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations + * > 0. Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a + * stalling format.</p> + * <p>For full guarantees, query {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } with a + * processed format -- it will return a non-0 value for a stalling stream.</p> * <p>LEGACY devices will support up to 1 processing/stalling stream.</p> * <p><b>Range of valid values:</b><br></p> * <p>>= 1</p> @@ -1232,10 +1232,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>The maximum numbers of any type of input streams * that can be configured and used simultaneously by a camera device.</p> * <p>When set to 0, it means no input stream is supported.</p> - * <p>The image format for a input stream can be any supported - * format returned by StreamConfigurationMap#getInputFormats. When using an - * input stream, there must be at least one output stream - * configured to to receive the reprocessed images.</p> + * <p>The image format for a input stream can be any supported format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }. When using an + * input stream, there must be at least one output stream configured to to receive the + * reprocessed images.</p> * <p>When an input stream and some output streams are used in a reprocessing request, * only the input buffer will be used to produce these output stream buffers, and a * new sensor image will not be captured.</p> @@ -1352,7 +1351,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>A list of all keys that the camera device has available - * to use with CaptureRequest.</p> + * to use with {@link android.hardware.camera2.CaptureRequest }.</p> * <p>Attempting to set a key into a CaptureRequest that is not * listed here will result in an invalid request and will be rejected * by the camera device.</p> @@ -1370,7 +1369,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>A list of all keys that the camera device has available - * to use with CaptureResult.</p> + * to use with {@link android.hardware.camera2.CaptureResult }.</p> * <p>Attempting to get a key from a CaptureResult that is not * listed here will always return a <code>null</code> value. Getting a key from * a CaptureResult that is listed here will generally never return a <code>null</code> @@ -1396,7 +1395,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>A list of all keys that the camera device has available - * to use with CameraCharacteristics.</p> + * to use with {@link android.hardware.camera2.CameraCharacteristics }.</p> * <p>This entry follows the same rules as * android.request.availableResultKeys (except that it applies for * CameraCharacteristics instead of CaptureResult). See above for more @@ -1535,34 +1534,31 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * </thead> * <tbody> * <tr> - * <td align="left">PRIVATE (ImageFormat#PRIVATE)</td> - * <td align="left">JPEG</td> + * <td align="left">{@link android.graphics.ImageFormat#PRIVATE }</td> + * <td align="left">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="left">OPAQUE_REPROCESSING</td> * </tr> * <tr> - * <td align="left">PRIVATE</td> - * <td align="left">YUV_420_888</td> + * <td align="left">{@link android.graphics.ImageFormat#PRIVATE }</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="left">OPAQUE_REPROCESSING</td> * </tr> * <tr> - * <td align="left">YUV_420_888</td> - * <td align="left">JPEG</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> + * <td align="left">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="left">YUV_REPROCESSING</td> * </tr> * <tr> - * <td align="left">YUV_420_888</td> - * <td align="left">YUV_420_888</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="left">YUV_REPROCESSING</td> * </tr> * </tbody> * </table> - * <p>PRIVATE refers to a device-internal format that is not directly application-visible. - * A PRIVATE input surface can be acquired by - * ImageReader.newOpaqueInstance(width, height, maxImages). - * For a OPAQUE_REPROCESSING-capable camera device, using the PRIVATE format - * as either input or output will never hurt maximum frame rate (i.e. - * StreamConfigurationMap#getOutputStallDuration(format, size) is always 0), - * where format is ImageFormat#PRIVATE.</p> + * <p>PRIVATE refers to a device-internal format that is not directly application-visible. A + * PRIVATE input surface can be acquired by {@link android.media.ImageReader#newOpaqueInstance }.</p> + * <p>For a OPAQUE_REPROCESSING-capable camera device, using the PRIVATE format as either input + * or output will never hurt maximum frame rate (i.e. {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),</p> * <p>Attempting to configure an input stream with output streams not * listed as available in this map is not valid.</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> @@ -1680,7 +1676,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * android.scaler.availableStallDurations for more details about * calculating the max frame rate.</p> * <p>(Keep in sync with - * StreamConfigurationMap#getOutputMinFrameDuration)</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p> * <p><b>Units</b>: (format, width, height, ns) x n</p> * <p>This key is available on all devices.</p> * @@ -1692,7 +1688,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>This lists the maximum stall duration for each - * format/size combination.</p> + * output format/size combination.</p> * <p>A stall duration is how much extra time would get added * to the normal minimum frame duration for a repeating request * that has streams with non-zero stall.</p> @@ -1734,12 +1730,13 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * ignored).</p> * <p>The following formats may always have a stall duration:</p> * <ul> - * <li>ImageFormat#JPEG</li> - * <li>ImageFormat#RAW_SENSOR</li> + * <li>{@link android.graphics.ImageFormat#JPEG }</li> + * <li>{@link android.graphics.ImageFormat#RAW_SENSOR }</li> * </ul> * <p>The following formats will never have a stall duration:</p> * <ul> - * <li>ImageFormat#YUV_420_888</li> + * <li>{@link android.graphics.ImageFormat#YUV_420_888 }</li> + * <li>{@link android.graphics.ImageFormat#RAW10 }</li> * </ul> * <p>All other formats may or may not have an allowed stall duration on * a per-capability basis; refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} @@ -1747,7 +1744,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about * calculating the max frame rate (absent stalls).</p> * <p>(Keep up to date with - * StreamConfigurationMap#getOutputStallDuration(int, Size) )</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } )</p> * <p><b>Units</b>: (format, width, height, ns) x n</p> * <p>This key is available on all devices.</p> * @@ -1786,57 +1783,57 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * </thead> * <tbody> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td> * <td align="center">Any</td> * <td align="center"></td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">1920x1080 (1080p)</td> * <td align="center">Any</td> * <td align="center">if 1080p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">1280x720 (720)</td> * <td align="center">Any</td> * <td align="center">if 720p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">640x480 (480p)</td> * <td align="center">Any</td> * <td align="center">if 480p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">320x240 (240p)</td> * <td align="center">Any</td> * <td align="center">if 240p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">YUV_420_888</td> + * <td align="center">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="center">all output sizes available for JPEG</td> * <td align="center">FULL</td> * <td align="center"></td> * </tr> * <tr> - * <td align="center">YUV_420_888</td> + * <td align="center">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="center">all output sizes available for JPEG, up to the maximum video size</td> * <td align="center">LIMITED</td> * <td align="center"></td> * </tr> * <tr> - * <td align="center">IMPLEMENTATION_DEFINED</td> + * <td align="center">{@link android.graphics.ImageFormat#PRIVATE }</td> * <td align="center">same as YUV_420_888</td> * <td align="center">Any</td> * <td align="center"></td> * </tr> * </tbody> * </table> - * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional - * mandatory stream configurations on a per-capability basis.</p> + * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} and {@link android.hardware.camera2.CameraDevice#createCaptureSession } for additional mandatory + * stream configurations on a per-capability basis.</p> * <p>This key is available on all devices.</p> * * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL @@ -1973,8 +1970,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>Attempting to use frame durations beyond the maximum will result in the frame * duration being clipped to the maximum. See that control for a full definition of frame * durations.</p> - * <p>Refer to StreamConfigurationMap#getOutputMinFrameDuration(int,Size) for the minimum - * frame duration values.</p> + * <p>Refer to {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration } + * for the minimum frame duration values.</p> * <p><b>Units</b>: Nanoseconds</p> * <p><b>Range of valid values:</b><br> * For FULL capability devices @@ -2634,6 +2631,41 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri new Key<Integer>("android.sync.maxLatency", int.class); /** + * <p>The maximal camera capture pipeline stall (in unit of frame count) introduced by a + * reprocess capture request.</p> + * <p>The key describes the maximal interference that one reprocess (input) request + * can introduce to the camera simultaneous streaming of regular (output) capture + * requests, including repeating requests.</p> + * <p>When a reprocessing capture request is submitted while a camera output repeating request + * (e.g. preview) is being served by the camera device, it may preempt the camera capture + * pipeline for at least one frame duration so that the camera device is unable to process + * the following capture request in time for the next sensor start of exposure boundary. + * When this happens, the application may observe a capture time gap (longer than one frame + * duration) between adjacent capture output frames, which usually exhibits as preview + * glitch if the repeating request output targets include a preview surface. This key gives + * the worst-case number of frame stall introduced by one reprocess request with any kind of + * formats/sizes combination.</p> + * <p>If this key reports 0, it means a reprocess request doesn't introduce any glitch to the + * ongoing camera repeating request outputs, as if this reprocess request is never issued.</p> + * <p>This key is supported if the camera device supports OPAQUE or YUV reprocessing ( + * i.e. {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains OPAQUE_REPROCESSING or + * YUV_REPROCESSING).</p> + * <p><b>Units</b>: Number of frames.</p> + * <p><b>Range of valid values:</b><br> + * <= 4</p> + * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> + * <p><b>Limited capability</b> - + * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the + * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p> + * + * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL + * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES + */ + @PublicKey + public static final Key<Integer> REPROCESS_MAX_CAPTURE_STALL = + new Key<Integer>("android.reprocess.maxCaptureStall", int.class); + + /** * <p>The available depth dataspace stream * configurations that this camera device supports * (i.e. format, width, height, output/input stream).</p> @@ -2672,8 +2704,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and * android.scaler.availableStallDurations for more details about * calculating the max frame rate.</p> - * <p>(Keep in sync with - * StreamConfigurationMap#getOutputMinFrameDuration)</p> + * <p>(Keep in sync with {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p> * <p><b>Units</b>: (format, width, height, ns) x n</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> * <p><b>Limited capability</b> - @@ -2689,7 +2720,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>This lists the maximum stall duration for each - * format/size combination for depth streams.</p> + * output format/size combination for depth streams.</p> * <p>A stall duration is how much extra time would get added * to the normal minimum frame duration for a repeating request * that has streams with non-zero stall.</p> diff --git a/core/java/android/hardware/camera2/CameraDevice.java b/core/java/android/hardware/camera2/CameraDevice.java index 51b326b..f6791a4 100644 --- a/core/java/android/hardware/camera2/CameraDevice.java +++ b/core/java/android/hardware/camera2/CameraDevice.java @@ -54,6 +54,7 @@ public abstract class CameraDevice implements AutoCloseable { * means that high frame rate is given priority over the highest-quality * post-processing. These requests would normally be used with the * {@link CameraCaptureSession#setRepeatingRequest} method. + * This template is guaranteed to be supported on all camera devices. * * @see #createCaptureRequest */ @@ -63,6 +64,7 @@ public abstract class CameraDevice implements AutoCloseable { * Create a request suitable for still image capture. Specifically, this * means prioritizing image quality over frame rate. These requests would * commonly be used with the {@link CameraCaptureSession#capture} method. + * This template is guaranteed to be supported on all camera devices. * * @see #createCaptureRequest */ @@ -73,6 +75,7 @@ public abstract class CameraDevice implements AutoCloseable { * that a stable frame rate is used, and post-processing is set for * recording quality. These requests would commonly be used with the * {@link CameraCaptureSession#setRepeatingRequest} method. + * This template is guaranteed to be supported on all camera devices. * * @see #createCaptureRequest */ @@ -84,6 +87,9 @@ public abstract class CameraDevice implements AutoCloseable { * disrupting the ongoing recording. These requests would commonly be used * with the {@link CameraCaptureSession#capture} method while a request based on * {@link #TEMPLATE_RECORD} is is in use with {@link CameraCaptureSession#setRepeatingRequest}. + * This template is guaranteed to be supported on all camera devices except + * legacy devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL} + * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}) * * @see #createCaptureRequest */ @@ -93,6 +99,11 @@ public abstract class CameraDevice implements AutoCloseable { * Create a request suitable for zero shutter lag still capture. This means * means maximizing image quality without compromising preview frame rate. * AE/AWB/AF should be on auto mode. + * This template is guaranteed to be supported on camera devices that support the + * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING OPAQUE_REPROCESSING} + * capability or the + * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING YUV_REPROCESSING} + * capability. * * @see #createCaptureRequest */ @@ -105,6 +116,9 @@ public abstract class CameraDevice implements AutoCloseable { * quality. The manual capture parameters (exposure, sensitivity, and so on) * are set to reasonable defaults, but should be overriden by the * application depending on the intended use case. + * This template is guaranteed to be supported on camera devices that support the + * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR MANUAL_SENSOR} + * capability. * * @see #createCaptureRequest */ @@ -473,12 +487,14 @@ public abstract class CameraDevice implements AutoCloseable { * settings as desired, instead.</p> * * @param templateType An enumeration selecting the use case for this - * request; one of the CameraDevice.TEMPLATE_ values. + * request; one of the CameraDevice.TEMPLATE_ values. Not all template + * types are supported on every device. See the documentation for each + * template type for details. * @return a builder for a capture request, initialized with default * settings for that template, and no output streams * - * @throws IllegalArgumentException if the templateType is not in the list - * of supported templates. + * @throws IllegalArgumentException if the templateType is not supported by + * this device. * @throws CameraAccessException if the camera device is no longer connected or has * encountered a fatal error * @throws IllegalStateException if the camera device has been closed diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java index e3f1d73..ca9439b 100644 --- a/core/java/android/hardware/camera2/CameraMetadata.java +++ b/core/java/android/hardware/camera2/CameraMetadata.java @@ -461,22 +461,21 @@ public abstract class CameraMetadata<TKey> { * <p>The camera device supports the Zero Shutter Lag reprocessing use case.</p> * <ul> * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li> - * <li>ImageFormat#PRIVATE is supported as an output/input format, that is, - * ImageFormat#PRIVATE is included in the lists of formats returned by - * StreamConfigurationMap#getInputFormats and - * StreamConfigurationMap#getOutputFormats.</li> - * <li>StreamConfigurationMap#getValidOutputFormatsForInput returns non empty int[] for - * each supported input format returned by StreamConfigurationMap#getInputFormats.</li> - * <li>Each size returned by StreamConfigurationMap#getInputSizes(ImageFormat#PRIVATE) - * is also included in StreamConfigurationMap#getOutputSizes(ImageFormat#PRIVATE)</li> - * <li>Using ImageFormat#PRIVATE does not cause a frame rate drop - * relative to the sensor's maximum capture rate (at that - * resolution).</li> - * <li>ImageFormat#PRIVATE will be reprocessable into both YUV_420_888 - * and JPEG formats.</li> + * <li>{@link android.graphics.ImageFormat#PRIVATE } is supported as an output/input format, + * that is, {@link android.graphics.ImageFormat#PRIVATE } is included in the lists of + * formats returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li> + * <li>{@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput } + * returns non empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.</li> + * <li>Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(ImageFormat.PRIVATE)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(ImageFormat.PRIVATE)}</li> + * <li>Using {@link android.graphics.ImageFormat#PRIVATE } does not cause a frame rate drop + * relative to the sensor's maximum capture rate (at that resolution).</li> + * <li>{@link android.graphics.ImageFormat#PRIVATE } will be reprocessable into both + * {@link android.graphics.ImageFormat#YUV_420_888 } and + * {@link android.graphics.ImageFormat#JPEG } formats.</li> * <li>The maximum available resolution for OPAQUE streams * (both input/output) will match the maximum available * resolution of JPEG streams.</li> + * <li>Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.</li> * <li>Only below controls are effective for reprocessing requests and * will be present in capture results, other controls in reprocess * requests will be ignored by the camera device.<ul> @@ -489,6 +488,7 @@ public abstract class CameraMetadata<TKey> { * * @see CaptureRequest#EDGE_MODE * @see CaptureRequest#NOISE_REDUCTION_MODE + * @see CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ @@ -569,25 +569,25 @@ public abstract class CameraMetadata<TKey> { * following:</p> * <ul> * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li> - * <li>YUV_420_888 is supported as an output/input format, that is, + * <li>{@link android.graphics.ImageFormat#YUV_420_888 } is supported as an output/input format, that is, * YUV_420_888 is included in the lists of formats returned by - * StreamConfigurationMap#getInputFormats and - * StreamConfigurationMap#getOutputFormats.</li> - * <li>StreamConfigurationMap#getValidOutputFormatsForInput returns non empty int[] for - * each supported input format returned by StreamConfigurationMap#getInputFormats.</li> - * <li>Each size returned by StreamConfigurationMap#getInputSizes(YUV_420_888) - * is also included in StreamConfigurationMap#getOutputSizes(YUV_420_888)</li> - * <li>Using YUV_420_888 does not cause a frame rate drop + * {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li> + * <li>{@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput } + * returns non-empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.</li> + * <li>Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(YUV_420_888)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(YUV_420_888)}</li> + * <li>Using {@link android.graphics.ImageFormat#YUV_420_888 } does not cause a frame rate drop * relative to the sensor's maximum capture rate (at that resolution).</li> - * <li>YUV_420_888 will be reprocessable into both YUV_420_888 - * and JPEG formats.</li> - * <li>The maximum available resolution for YUV_420_888 streams - * (both input/output) will match the maximum available - * resolution of JPEG streams.</li> - * <li>Only the below controls are effective for reprocessing requests and will be - * present in capture results. The reprocess requests are from the original capture - * results that are assocaited with the intermidate YUV_420_888 output buffers. - * All other controls in the reprocess requests will be ignored by the camera device.<ul> + * <li>{@link android.graphics.ImageFormat#YUV_420_888 } will be reprocessable into both + * {@link android.graphics.ImageFormat#YUV_420_888 } and {@link android.graphics.ImageFormat#JPEG } formats.</li> + * <li>The maximum available resolution for {@link android.graphics.ImageFormat#YUV_420_888 } streams (both input/output) will match the + * maximum available resolution of {@link android.graphics.ImageFormat#JPEG } streams.</li> + * <li>Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.</li> + * <li>Only the below controls are effective for reprocessing requests and will be present + * in capture results. The reprocess requests are from the original capture results that + * are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888 } + * output buffers. All other controls in the reprocess requests will be ignored by the + * camera device.<ul> * <li>android.jpeg.*</li> * <li>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}</li> * <li>{@link CaptureRequest#EDGE_MODE android.edge.mode}</li> @@ -599,6 +599,7 @@ public abstract class CameraMetadata<TKey> { * @see CaptureRequest#EDGE_MODE * @see CaptureRequest#NOISE_REDUCTION_MODE * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR + * @see CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ @@ -608,11 +609,13 @@ public abstract class CameraMetadata<TKey> { * <p>The camera device can produce depth measurements from its field of view.</p> * <p>This capability requires the camera device to support the following:</p> * <ul> - * <li>DEPTH16 is supported as an output format.</li> - * <li>DEPTH_POINT_CLOUD is optionally supported as an output format.</li> - * <li>This camera device, and all camera devices with the same android.lens.info.facing, - * will list the following calibration entries in both CameraCharacteristics and - * CaptureResults:<ul> + * <li>{@link android.graphics.ImageFormat#DEPTH16 } is supported as an output format.</li> + * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD } is optionally supported as an + * output format.</li> + * <li>This camera device, and all camera devices with the same {@link CameraCharacteristics#LENS_FACING android.lens.facing}, + * will list the following calibration entries in both + * {@link android.hardware.camera2.CameraCharacteristics } and + * {@link android.hardware.camera2.CaptureResult }:<ul> * <li>{@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}</li> * <li>{@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}</li> * <li>android.lens.intrinsicCalibration</li> @@ -627,13 +630,14 @@ public abstract class CameraMetadata<TKey> { * <p>Generally, depth output operates at a slower frame rate than standard color capture, * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that * should be accounted for (see - * android.hardware.camera2.StreamConfigurationMap#getOutputStallDuration). On a device - * that supports both depth and color-based output, to enable smooth preview, using a - * repeating burst is recommended, where a depth-output target is only included once - * every N frames, where N is the ratio between preview output rate and depth output + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }). + * On a device that supports both depth and color-based output, to enable smooth preview, + * using a repeating burst is recommended, where a depth-output target is only included + * once every N frames, where N is the ratio between preview output rate and depth output * rate, including depth stall time.</p> * * @see CameraCharacteristics#DEPTH_DEPTH_IS_EXCLUSIVE + * @see CameraCharacteristics#LENS_FACING * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES @@ -707,7 +711,7 @@ public abstract class CameraMetadata<TKey> { /** * <p>Timestamps from {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} are in the same timebase as - * android.os.SystemClock#elapsedRealtimeNanos(), + * {@link android.os.SystemClock#elapsedRealtimeNanos }, * and they can be compared to other timestamps using that base.</p> * * @see CaptureResult#SENSOR_TIMESTAMP @@ -866,7 +870,7 @@ public abstract class CameraMetadata<TKey> { /** * <p>Every frame has the requests immediately applied.</p> * <p>Furthermore for all results, - * <code>android.sync.frameNumber == CaptureResult#getFrameNumber()</code></p> + * <code>android.sync.frameNumber == {@link android.hardware.camera2.CaptureResult#getFrameNumber }</code></p> * <p>Changing controls over multiple requests one after another will * produce results that have those controls applied atomically * each frame.</p> diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java index 19d17b1..ab6ce91 100644 --- a/core/java/android/hardware/camera2/CaptureRequest.java +++ b/core/java/android/hardware/camera2/CaptureRequest.java @@ -1275,8 +1275,9 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * <p>This control (except for MANUAL) is only effective if * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p> * <p>ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} - * contains OPAQUE_REPROCESSING. MANUAL will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} - * contains MANUAL_SENSOR. Other intent values are always supported.</p> + * contains OPAQUE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if + * {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR. Other intent values are + * always supported.</p> * <p><b>Possible values:</b> * <ul> * <li>{@link #CONTROL_CAPTURE_INTENT_CUSTOM CUSTOM}</li> @@ -2039,8 +2040,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * cannot process more than 1 capture at a time.</li> * </ul> * <p>The necessary information for the application, given the model above, - * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field - * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size). + * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }. * These are used to determine the maximum frame rate / minimum frame * duration that is possible for a given stream configuration.</p> * <p>Specifically, the application can use the following rules to @@ -2049,21 +2050,19 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * <ol> * <li>Let the set of currently configured input/output streams * be called <code>S</code>.</li> - * <li>Find the minimum frame durations for each stream in <code>S</code>, by - * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using - * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with - * its respective size/format). Let this set of frame durations be called - * <code>F</code>.</li> + * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking + * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration } + * (with its respective size/format). Let this set of frame durations be + * called <code>F</code>.</li> * <li>For any given request <code>R</code>, the minimum frame duration allowed * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams * used in <code>R</code> be called <code>S_r</code>.</li> * </ol> - * <p>If none of the streams in <code>S_r</code> have a stall time (listed in - * StreamConfigurationMap#getOutputStallDuration(int,Size) using its - * respective size/format), then the frame duration in - * <code>F</code> determines the steady state frame rate that the application will - * get if it uses <code>R</code> as a repeating request. Let this special kind - * of request be called <code>Rsimple</code>.</p> + * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } + * using its respective size/format), then the frame duration in <code>F</code> + * determines the steady state frame rate that the application will get + * if it uses <code>R</code> as a repeating request. Let this special kind of + * request be called <code>Rsimple</code>.</p> * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved * by a single capture of a new request <code>Rstall</code> (which has at least * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the @@ -2071,7 +2070,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * if all buffers from the previous <code>Rstall</code> have already been * delivered.</p> * <p>For more details about stalling, see - * StreamConfigurationMap#getOutputStallDuration(int,Size).</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p> * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.</p> * <p><b>Units</b>: Nanoseconds</p> @@ -2647,8 +2646,12 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * <p><b>Range of valid values:</b><br> * >= 1.0</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> + * <p><b>Limited capability</b> - + * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the + * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p> * * @see CaptureRequest#EDGE_MODE + * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CaptureRequest#NOISE_REDUCTION_MODE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java index ef5d75c..3dc8970 100644 --- a/core/java/android/hardware/camera2/CaptureResult.java +++ b/core/java/android/hardware/camera2/CaptureResult.java @@ -1698,8 +1698,9 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * <p>This control (except for MANUAL) is only effective if * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p> * <p>ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} - * contains OPAQUE_REPROCESSING. MANUAL will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} - * contains MANUAL_SENSOR. Other intent values are always supported.</p> + * contains OPAQUE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if + * {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR. Other intent values are + * always supported.</p> * <p><b>Possible values:</b> * <ul> * <li>{@link #CONTROL_CAPTURE_INTENT_CUSTOM CUSTOM}</li> @@ -2885,8 +2886,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * cannot process more than 1 capture at a time.</li> * </ul> * <p>The necessary information for the application, given the model above, - * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field - * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size). + * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }. * These are used to determine the maximum frame rate / minimum frame * duration that is possible for a given stream configuration.</p> * <p>Specifically, the application can use the following rules to @@ -2895,21 +2896,19 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * <ol> * <li>Let the set of currently configured input/output streams * be called <code>S</code>.</li> - * <li>Find the minimum frame durations for each stream in <code>S</code>, by - * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using - * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with - * its respective size/format). Let this set of frame durations be called - * <code>F</code>.</li> + * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking + * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration } + * (with its respective size/format). Let this set of frame durations be + * called <code>F</code>.</li> * <li>For any given request <code>R</code>, the minimum frame duration allowed * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams * used in <code>R</code> be called <code>S_r</code>.</li> * </ol> - * <p>If none of the streams in <code>S_r</code> have a stall time (listed in - * StreamConfigurationMap#getOutputStallDuration(int,Size) using its - * respective size/format), then the frame duration in - * <code>F</code> determines the steady state frame rate that the application will - * get if it uses <code>R</code> as a repeating request. Let this special kind - * of request be called <code>Rsimple</code>.</p> + * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } + * using its respective size/format), then the frame duration in <code>F</code> + * determines the steady state frame rate that the application will get + * if it uses <code>R</code> as a repeating request. Let this special kind of + * request be called <code>Rsimple</code>.</p> * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved * by a single capture of a new request <code>Rstall</code> (which has at least * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the @@ -2917,7 +2916,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * if all buffers from the previous <code>Rstall</code> have already been * delivered.</p> * <p>For more details about stalling, see - * StreamConfigurationMap#getOutputStallDuration(int,Size).</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p> * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.</p> * <p><b>Units</b>: Nanoseconds</p> @@ -2979,11 +2978,10 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * and are monotonically increasing. They can be compared with the * timestamps for other captures from the same camera device, but are * not guaranteed to be comparable to any other time source.</p> - * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME, - * the timestamps measure time in the same timebase as - * android.os.SystemClock#elapsedRealtimeNanos(), and they can be - * compared to other timestamps from other subsystems that are using - * that base.</p> + * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME, the + * timestamps measure time in the same timebase as {@link android.os.SystemClock#elapsedRealtimeNanos }, and they can + * be compared to other timestamps from other subsystems that + * are using that base.</p> * <p><b>Units</b>: Nanoseconds</p> * <p><b>Range of valid values:</b><br> * > 0</p> @@ -3141,7 +3139,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * <p><b>Units</b>: Nanoseconds</p> * <p><b>Range of valid values:</b><br> * >= 0 and < - * StreamConfigurationMap#getOutputMinFrameDuration(int, Size).</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> * <p><b>Limited capability</b> - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the @@ -3966,8 +3964,12 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * <p><b>Range of valid values:</b><br> * >= 1.0</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> + * <p><b>Limited capability</b> - + * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the + * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p> * * @see CaptureRequest#EDGE_MODE + * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CaptureRequest#NOISE_REDUCTION_MODE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ diff --git a/core/java/android/transition/TransitionInflater.java b/core/java/android/transition/TransitionInflater.java index a7d9503..ca37d49 100644 --- a/core/java/android/transition/TransitionInflater.java +++ b/core/java/android/transition/TransitionInflater.java @@ -214,7 +214,7 @@ public class TransitionInflater { sConstructors.put(className, constructor); } } - + constructor.setAccessible(true); return constructor.newInstance(mContext, attrs); } } catch (InstantiationException e) { diff --git a/core/java/android/view/DisplayListCanvas.java b/core/java/android/view/DisplayListCanvas.java index eedbc70..46dd857 100644 --- a/core/java/android/view/DisplayListCanvas.java +++ b/core/java/android/view/DisplayListCanvas.java @@ -234,25 +234,13 @@ public class DisplayListCanvas extends Canvas { * Draws the specified display list onto this canvas. The display list can only * be drawn if {@link android.view.RenderNode#isValid()} returns true. * - * @param renderNode The RenderNode to replay. + * @param renderNode The RenderNode to draw. */ public void drawRenderNode(RenderNode renderNode) { - drawRenderNode(renderNode, RenderNode.FLAG_CLIP_CHILDREN); + nDrawRenderNode(mNativeCanvasWrapper, renderNode.getNativeDisplayList()); } - /** - * Draws the specified display list onto this canvas. - * - * @param renderNode The RenderNode to replay. - * @param flags Optional flags about drawing, see {@link RenderNode} for - * the possible flags. - */ - public void drawRenderNode(RenderNode renderNode, int flags) { - nDrawRenderNode(mNativeCanvasWrapper, renderNode.getNativeDisplayList(), flags); - } - - private static native void nDrawRenderNode(long renderer, long renderNode, - int flags); + private static native void nDrawRenderNode(long renderer, long renderNode); /////////////////////////////////////////////////////////////////////////// // Hardware layer diff --git a/core/java/android/view/PhoneWindow.java b/core/java/android/view/PhoneWindow.java index 794c8e7..a3e7a10 100644 --- a/core/java/android/view/PhoneWindow.java +++ b/core/java/android/view/PhoneWindow.java @@ -3599,7 +3599,7 @@ public class PhoneWindow extends Window implements MenuBuilder.Callback { if (!mForcedNavigationBarColor) { mNavigationBarColor = a.getColor(R.styleable.Window_navigationBarColor, 0xFF000000); } - if (a.getBoolean(R.styleable.Window_windowHasLightStatusBar, false)) { + if (a.getBoolean(R.styleable.Window_windowLightStatusBar, false)) { decor.setSystemUiVisibility( decor.getSystemUiVisibility() | View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR); } diff --git a/core/java/android/view/View.java b/core/java/android/view/View.java index 7da2da4..fa74797 100644 --- a/core/java/android/view/View.java +++ b/core/java/android/view/View.java @@ -30,6 +30,7 @@ import android.annotation.Nullable; import android.annotation.Size; import android.content.ClipData; import android.content.Context; +import android.content.ContextWrapper; import android.content.Intent; import android.content.res.ColorStateList; import android.content.res.Configuration; @@ -2591,7 +2592,7 @@ public class View implements Drawable.Callback, KeyEvent.Callback, * {@link android.view.WindowManager.LayoutParams#FLAG_TRANSLUCENT_STATUS * FLAG_TRANSLUCENT_STATUS}. * - * @see android.R.attr#windowHasLightStatusBar + * @see android.R.attr#windowLightStatusBar */ public static final int SYSTEM_UI_FLAG_LIGHT_STATUS_BAR = 0x00002000; @@ -4016,37 +4017,7 @@ public class View implements Drawable.Callback, KeyEvent.Callback, final String handlerName = a.getString(attr); if (handlerName != null) { - setOnClickListener(new OnClickListener() { - private Method mHandler; - - public void onClick(View v) { - if (mHandler == null) { - try { - mHandler = getContext().getClass().getMethod(handlerName, - View.class); - } catch (NoSuchMethodException e) { - int id = getId(); - String idText = id == NO_ID ? "" : " with id '" - + getContext().getResources().getResourceEntryName( - id) + "'"; - throw new IllegalStateException("Could not find a method " + - handlerName + "(View) in the activity " - + getContext().getClass() + " for onClick handler" - + " on view " + View.this.getClass() + idText, e); - } - } - - try { - mHandler.invoke(getContext(), View.this); - } catch (IllegalAccessException e) { - throw new IllegalStateException("Could not execute non " - + "public method of the activity", e); - } catch (InvocationTargetException e) { - throw new IllegalStateException("Could not execute " - + "method of the activity", e); - } - } - }); + setOnClickListener(new DeclaredOnClickListener(this, handlerName)); } break; case R.styleable.View_overScrollMode: @@ -4238,6 +4209,66 @@ public class View implements Drawable.Callback, KeyEvent.Callback, } /** + * An implementation of OnClickListener that attempts to lazily load a + * named click handling method from a parent or ancestor context. + */ + private static class DeclaredOnClickListener implements OnClickListener { + private final View mHostView; + private final String mMethodName; + + private Method mMethod; + + public DeclaredOnClickListener(@NonNull View hostView, @NonNull String methodName) { + mHostView = hostView; + mMethodName = methodName; + } + + @Override + public void onClick(@NonNull View v) { + if (mMethod == null) { + mMethod = resolveMethod(mHostView.getContext(), mMethodName); + } + + try { + mMethod.invoke(mHostView.getContext(), v); + } catch (IllegalAccessException e) { + throw new IllegalStateException( + "Could not execute non-public method for android:onClick", e); + } catch (InvocationTargetException e) { + throw new IllegalStateException( + "Could not execute method for android:onClick", e); + } + } + + @NonNull + private Method resolveMethod(@Nullable Context context, @NonNull String name) { + while (context != null) { + try { + if (!context.isRestricted()) { + return context.getClass().getMethod(mMethodName, View.class); + } + } catch (NoSuchMethodException e) { + // Failed to find method, keep searching up the hierarchy. + } + + if (context instanceof ContextWrapper) { + context = ((ContextWrapper) context).getBaseContext(); + } else { + // Can't search up the hierarchy, null out and fail. + context = null; + } + } + + final int id = mHostView.getId(); + final String idText = id == NO_ID ? "" : " with id '" + + mHostView.getContext().getResources().getResourceEntryName(id) + "'"; + throw new IllegalStateException("Could not find method " + mMethodName + + "(View) in a parent or ancestor Context for android:onClick " + + "attribute defined on view " + mHostView.getClass() + idText); + } + } + + /** * Non-public constructor for use in testing */ View() { @@ -15509,7 +15540,7 @@ public class View implements Drawable.Callback, KeyEvent.Callback, if (!drawingWithDrawingCache) { if (drawingWithRenderNode) { mPrivateFlags &= ~PFLAG_DIRTY_MASK; - ((DisplayListCanvas) canvas).drawRenderNode(renderNode, parentFlags); + ((DisplayListCanvas) canvas).drawRenderNode(renderNode); } else { // Fast path for layouts with no backgrounds if ((mPrivateFlags & PFLAG_SKIP_DRAW) == PFLAG_SKIP_DRAW) { diff --git a/core/java/android/widget/Editor.java b/core/java/android/widget/Editor.java index 39b9907..089074a 100644 --- a/core/java/android/widget/Editor.java +++ b/core/java/android/widget/Editor.java @@ -145,16 +145,16 @@ public class Editor { InputContentType mInputContentType; InputMethodState mInputMethodState; - private static class TextDisplayList { - RenderNode displayList; + private static class TextRenderNode { + RenderNode renderNode; boolean isDirty; - public TextDisplayList(String name) { + public TextRenderNode(String name) { isDirty = true; - displayList = RenderNode.create(name, null); + renderNode = RenderNode.create(name, null); } - boolean needsRecord() { return isDirty || !displayList.isValid(); } + boolean needsRecord() { return isDirty || !renderNode.isValid(); } } - TextDisplayList[] mTextDisplayLists; + TextRenderNode[] mTextRenderNodes; boolean mFrozenWithFocus; boolean mSelectionMoved; @@ -360,10 +360,10 @@ public class Editor { } private void destroyDisplayListsData() { - if (mTextDisplayLists != null) { - for (int i = 0; i < mTextDisplayLists.length; i++) { - RenderNode displayList = mTextDisplayLists[i] != null - ? mTextDisplayLists[i].displayList : null; + if (mTextRenderNodes != null) { + for (int i = 0; i < mTextRenderNodes.length; i++) { + RenderNode displayList = mTextRenderNodes[i] != null + ? mTextRenderNodes[i].renderNode : null; if (displayList != null && displayList.isValid()) { displayList.destroyDisplayListData(); } @@ -1467,8 +1467,8 @@ public class Editor { firstLine, lastLine); if (layout instanceof DynamicLayout) { - if (mTextDisplayLists == null) { - mTextDisplayLists = ArrayUtils.emptyArray(TextDisplayList.class); + if (mTextRenderNodes == null) { + mTextRenderNodes = ArrayUtils.emptyArray(TextRenderNode.class); } DynamicLayout dynamicLayout = (DynamicLayout) layout; @@ -1489,19 +1489,19 @@ public class Editor { searchStartIndex); // Note how dynamic layout's internal block indices get updated from Editor blockIndices[i] = blockIndex; - if (mTextDisplayLists[blockIndex] != null) { - mTextDisplayLists[blockIndex].isDirty = true; + if (mTextRenderNodes[blockIndex] != null) { + mTextRenderNodes[blockIndex].isDirty = true; } searchStartIndex = blockIndex + 1; } - if (mTextDisplayLists[blockIndex] == null) { - mTextDisplayLists[blockIndex] = - new TextDisplayList("Text " + blockIndex); + if (mTextRenderNodes[blockIndex] == null) { + mTextRenderNodes[blockIndex] = + new TextRenderNode("Text " + blockIndex); } - final boolean blockDisplayListIsInvalid = mTextDisplayLists[blockIndex].needsRecord(); - RenderNode blockDisplayList = mTextDisplayLists[blockIndex].displayList; + final boolean blockDisplayListIsInvalid = mTextRenderNodes[blockIndex].needsRecord(); + RenderNode blockDisplayList = mTextRenderNodes[blockIndex].renderNode; if (i >= indexFirstChangedBlock || blockDisplayListIsInvalid) { final int blockBeginLine = endOfPreviousBlock + 1; final int top = layout.getLineTop(blockBeginLine); @@ -1528,7 +1528,7 @@ public class Editor { // brings this range of text back to the top left corner of the viewport displayListCanvas.translate(-left, -top); layout.drawText(displayListCanvas, blockBeginLine, blockEndLine); - mTextDisplayLists[blockIndex].isDirty = false; + mTextRenderNodes[blockIndex].isDirty = false; // No need to untranslate, previous context is popped after // drawDisplayList } finally { @@ -1543,8 +1543,7 @@ public class Editor { blockDisplayList.setLeftTopRightBottom(left, top, right, bottom); } - ((DisplayListCanvas) canvas).drawRenderNode(blockDisplayList, - 0 /* no child clipping, our TextView parent enforces it */); + ((DisplayListCanvas) canvas).drawRenderNode(blockDisplayList); endOfPreviousBlock = blockEndLine; } @@ -1558,7 +1557,7 @@ public class Editor { private int getAvailableDisplayListIndex(int[] blockIndices, int numberOfBlocks, int searchStartIndex) { - int length = mTextDisplayLists.length; + int length = mTextRenderNodes.length; for (int i = searchStartIndex; i < length; i++) { boolean blockIndexFound = false; for (int j = 0; j < numberOfBlocks; j++) { @@ -1572,7 +1571,7 @@ public class Editor { } // No available index found, the pool has to grow - mTextDisplayLists = GrowingArrayUtils.append(mTextDisplayLists, length, null); + mTextRenderNodes = GrowingArrayUtils.append(mTextRenderNodes, length, null); return length; } @@ -1589,7 +1588,7 @@ public class Editor { * Invalidates all the sub-display lists that overlap the specified character range */ void invalidateTextDisplayList(Layout layout, int start, int end) { - if (mTextDisplayLists != null && layout instanceof DynamicLayout) { + if (mTextRenderNodes != null && layout instanceof DynamicLayout) { final int firstLine = layout.getLineForOffset(start); final int lastLine = layout.getLineForOffset(end); @@ -1609,7 +1608,7 @@ public class Editor { while (i < numberOfBlocks) { final int blockIndex = blockIndices[i]; if (blockIndex != DynamicLayout.INVALID_BLOCK_INDEX) { - mTextDisplayLists[blockIndex].isDirty = true; + mTextRenderNodes[blockIndex].isDirty = true; } if (blockEndLines[i] >= lastLine) break; i++; @@ -1618,9 +1617,9 @@ public class Editor { } void invalidateTextDisplayList() { - if (mTextDisplayLists != null) { - for (int i = 0; i < mTextDisplayLists.length; i++) { - if (mTextDisplayLists[i] != null) mTextDisplayLists[i].isDirty = true; + if (mTextRenderNodes != null) { + for (int i = 0; i < mTextRenderNodes.length; i++) { + if (mTextRenderNodes[i] != null) mTextRenderNodes[i].isDirty = true; } } } @@ -4491,7 +4490,7 @@ public class Editor { private class CorrectionHighlighter { private final Path mPath = new Path(); - private final Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private final Paint mPaint = new Paint(); private int mStart, mEnd; private long mFadingStartTime; private RectF mTempRectF; diff --git a/core/java/android/widget/Switch.java b/core/java/android/widget/Switch.java index ae779fe..f94f97c 100644 --- a/core/java/android/widget/Switch.java +++ b/core/java/android/widget/Switch.java @@ -216,7 +216,7 @@ public class Switch extends CompoundButton { public Switch(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); - mTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + mTextPaint = new TextPaint(); final Resources res = getResources(); mTextPaint.density = res.getDisplayMetrics().density; diff --git a/core/java/android/widget/TextView.java b/core/java/android/widget/TextView.java index 3e8df08..8ce5f08 100644 --- a/core/java/android/widget/TextView.java +++ b/core/java/android/widget/TextView.java @@ -668,11 +668,11 @@ public class TextView extends View implements ViewTreeObserver.OnPreDrawListener final Resources res = getResources(); final CompatibilityInfo compat = res.getCompatibilityInfo(); - mTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + mTextPaint = new TextPaint(); mTextPaint.density = res.getDisplayMetrics().density; mTextPaint.setCompatibilityScaling(compat.applicationScale); - mHighlightPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + mHighlightPaint = new Paint(); mHighlightPaint.setCompatibilityScaling(compat.applicationScale); mMovement = getDefaultMovementMethod(); diff --git a/core/java/com/android/internal/app/ChooserActivity.java b/core/java/com/android/internal/app/ChooserActivity.java index 2b77b2c..e347faa 100644 --- a/core/java/com/android/internal/app/ChooserActivity.java +++ b/core/java/com/android/internal/app/ChooserActivity.java @@ -405,8 +405,10 @@ public class ChooserActivity extends ResolverActivity { int launchedFromUid, boolean filterLastUsed, ChooserTarget[] callerChooserTargets) { super(context, initialIntents, rList, launchedFromUid, filterLastUsed); - for (ChooserTarget target : callerChooserTargets) { - mCallerTargets.add(new ChooserTargetInfo(target)); + if (callerChooserTargets != null) { + for (ChooserTarget target : callerChooserTargets) { + mCallerTargets.add(new ChooserTargetInfo(target)); + } } } diff --git a/core/java/com/android/internal/midi/MidiFramer.java b/core/java/com/android/internal/midi/MidiFramer.java index 53d71bb..1a7fa0b 100644 --- a/core/java/com/android/internal/midi/MidiFramer.java +++ b/core/java/com/android/internal/midi/MidiFramer.java @@ -17,6 +17,7 @@ package com.android.internal.midi; import android.media.midi.MidiReceiver; +import android.util.Log; import java.io.IOException; @@ -38,6 +39,7 @@ public class MidiFramer extends MidiReceiver { private int mCount; private int mRunningStatus; private int mNeeded; + private boolean mInSysEx; public MidiFramer(MidiReceiver receiver) { mReceiver = receiver; @@ -52,12 +54,14 @@ public class MidiFramer extends MidiReceiver { } /* - * @see android.midi.MidiReceiver#onPost(byte[], int, int, long) + * @see android.midi.MidiReceiver#onReceive(byte[], int, int, long) */ @Override public void onReceive(byte[] data, int offset, int count, long timestamp) throws IOException { // Log.i(TAG, formatMidiData(data, offset, count)); + int sysExStartOffset = (mInSysEx ? offset : -1); + for (int i = 0; i < count; i++) { int b = data[offset] & 0xFF; if (b >= 0x80) { // status byte? @@ -66,27 +70,50 @@ public class MidiFramer extends MidiReceiver { mCount = 1; mNeeded = MidiConstants.getBytesPerMessage(b) - 1; } else if (b < 0xF8) { // system common? - mBuffer[0] = (byte) b; - mRunningStatus = 0; - mCount = 1; - mNeeded = MidiConstants.getBytesPerMessage(b) - 1; + if (b == 0xF0 /* SysEx Start */) { + // Log.i(TAG, "SysEx Start"); + mInSysEx = true; + sysExStartOffset = offset; + } else if (b == 0xF7 /* SysEx End */) { + // Log.i(TAG, "SysEx End"); + if (mInSysEx) { + mReceiver.sendWithTimestamp(data, sysExStartOffset, + offset - sysExStartOffset, timestamp); + mInSysEx = false; + sysExStartOffset = -1; + } + } else { + mBuffer[0] = (byte) b; + mRunningStatus = 0; + mCount = 1; + mNeeded = MidiConstants.getBytesPerMessage(b) - 1; + } } else { // real-time? // Single byte message interleaved with other data. mReceiver.sendWithTimestamp(data, offset, 1, timestamp); } } else { // data byte - mBuffer[mCount++] = (byte) b; - if (--mNeeded == 0) { - if (mRunningStatus != 0) { - mBuffer[0] = (byte) mRunningStatus; + // Save SysEx data for SysEx End marker or end of buffer. + if (!mInSysEx) { + mBuffer[mCount++] = (byte) b; + if (--mNeeded == 0) { + if (mRunningStatus != 0) { + mBuffer[0] = (byte) mRunningStatus; + } + mReceiver.sendWithTimestamp(mBuffer, 0, mCount, timestamp); + mNeeded = MidiConstants.getBytesPerMessage(mBuffer[0]) - 1; + mCount = 1; } - mReceiver.sendWithTimestamp(mBuffer, 0, mCount, timestamp); - mNeeded = MidiConstants.getBytesPerMessage(mBuffer[0]) - 1; - mCount = 1; } } ++offset; } + + // send any accumulatedSysEx data + if (sysExStartOffset >= 0) { + mReceiver.sendWithTimestamp(data, sysExStartOffset, + offset - sysExStartOffset, timestamp); + } } } diff --git a/core/java/com/android/internal/util/ImageUtils.java b/core/java/com/android/internal/util/ImageUtils.java index 7d56e9e..a0d0b20 100644 --- a/core/java/com/android/internal/util/ImageUtils.java +++ b/core/java/com/android/internal/util/ImageUtils.java @@ -66,7 +66,7 @@ public class ImageUtils { COMPACT_BITMAP_SIZE, COMPACT_BITMAP_SIZE, Bitmap.Config.ARGB_8888 ); mTempCompactBitmapCanvas = new Canvas(mTempCompactBitmap); - mTempCompactBitmapPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + mTempCompactBitmapPaint = new Paint(); mTempCompactBitmapPaint.setFilterBitmap(true); } mTempMatrix.reset(); diff --git a/core/java/com/android/internal/widget/LockPatternUtils.java b/core/java/com/android/internal/widget/LockPatternUtils.java index 1096e34..3d23986 100644 --- a/core/java/com/android/internal/widget/LockPatternUtils.java +++ b/core/java/com/android/internal/widget/LockPatternUtils.java @@ -17,9 +17,11 @@ package com.android.internal.widget; import android.Manifest; +import android.app.ActivityManager; import android.app.ActivityManagerNative; import android.app.admin.DevicePolicyManager; import android.app.trust.TrustManager; +import android.bluetooth.BluetoothClass; import android.content.ComponentName; import android.content.ContentResolver; import android.content.Context; @@ -142,11 +144,6 @@ public class LockPatternUtils { private DevicePolicyManager mDevicePolicyManager; private ILockSettings mLockSettingsService; - private final boolean mMultiUserMode; - - // The current user is set by KeyguardViewMediator and shared by all LockPatternUtils. - private static volatile int sCurrentUserId = UserHandle.USER_NULL; - public DevicePolicyManager getDevicePolicyManager() { if (mDevicePolicyManager == null) { mDevicePolicyManager = @@ -171,12 +168,6 @@ public class LockPatternUtils { public LockPatternUtils(Context context) { mContext = context; mContentResolver = context.getContentResolver(); - - // If this is being called by the system or by an application like keyguard that - // has permision INTERACT_ACROSS_USERS, then LockPatternUtils will operate in multi-user - // mode where calls are for the current user rather than the user of the calling process. - mMultiUserMode = context.checkCallingOrSelfPermission( - Manifest.permission.INTERACT_ACROSS_USERS_FULL) == PackageManager.PERMISSION_GRANTED; } private ILockSettings getLockSettings() { @@ -188,93 +179,55 @@ public class LockPatternUtils { return mLockSettingsService; } - public int getRequestedMinimumPasswordLength() { - return getDevicePolicyManager().getPasswordMinimumLength(null, getCurrentOrCallingUserId()); + public int getRequestedMinimumPasswordLength(int userId) { + return getDevicePolicyManager().getPasswordMinimumLength(null, userId); } /** * Gets the device policy password mode. If the mode is non-specific, returns * MODE_PATTERN which allows the user to choose anything. */ - public int getRequestedPasswordQuality() { - return getDevicePolicyManager().getPasswordQuality(null, getCurrentOrCallingUserId()); - } - - public int getRequestedPasswordHistoryLength() { - return getRequestedPasswordHistoryLength(getCurrentOrCallingUserId()); + public int getRequestedPasswordQuality(int userId) { + return getDevicePolicyManager().getPasswordQuality(null, userId); } private int getRequestedPasswordHistoryLength(int userId) { return getDevicePolicyManager().getPasswordHistoryLength(null, userId); } - public int getRequestedPasswordMinimumLetters() { - return getDevicePolicyManager().getPasswordMinimumLetters(null, - getCurrentOrCallingUserId()); + public int getRequestedPasswordMinimumLetters(int userId) { + return getDevicePolicyManager().getPasswordMinimumLetters(null, userId); } - public int getRequestedPasswordMinimumUpperCase() { - return getDevicePolicyManager().getPasswordMinimumUpperCase(null, - getCurrentOrCallingUserId()); + public int getRequestedPasswordMinimumUpperCase(int userId) { + return getDevicePolicyManager().getPasswordMinimumUpperCase(null, userId); } - public int getRequestedPasswordMinimumLowerCase() { - return getDevicePolicyManager().getPasswordMinimumLowerCase(null, - getCurrentOrCallingUserId()); + public int getRequestedPasswordMinimumLowerCase(int userId) { + return getDevicePolicyManager().getPasswordMinimumLowerCase(null, userId); } - public int getRequestedPasswordMinimumNumeric() { - return getDevicePolicyManager().getPasswordMinimumNumeric(null, - getCurrentOrCallingUserId()); + public int getRequestedPasswordMinimumNumeric(int userId) { + return getDevicePolicyManager().getPasswordMinimumNumeric(null, userId); } - public int getRequestedPasswordMinimumSymbols() { - return getDevicePolicyManager().getPasswordMinimumSymbols(null, - getCurrentOrCallingUserId()); + public int getRequestedPasswordMinimumSymbols(int userId) { + return getDevicePolicyManager().getPasswordMinimumSymbols(null, userId); } - public int getRequestedPasswordMinimumNonLetter() { - return getDevicePolicyManager().getPasswordMinimumNonLetter(null, - getCurrentOrCallingUserId()); + public int getRequestedPasswordMinimumNonLetter(int userId) { + return getDevicePolicyManager().getPasswordMinimumNonLetter(null, userId); } - public void reportFailedPasswordAttempt() { - int userId = getCurrentOrCallingUserId(); + public void reportFailedPasswordAttempt(int userId) { getDevicePolicyManager().reportFailedPasswordAttempt(userId); getTrustManager().reportUnlockAttempt(false /* authenticated */, userId); getTrustManager().reportRequireCredentialEntry(userId); } - public void reportSuccessfulPasswordAttempt() { - getDevicePolicyManager().reportSuccessfulPasswordAttempt(getCurrentOrCallingUserId()); - getTrustManager().reportUnlockAttempt(true /* authenticated */, - getCurrentOrCallingUserId()); - } - - public void setCurrentUser(int userId) { - sCurrentUserId = userId; - } - - public int getCurrentUser() { - if (sCurrentUserId != UserHandle.USER_NULL) { - // Someone is regularly updating using setCurrentUser() use that value. - return sCurrentUserId; - } - try { - return ActivityManagerNative.getDefault().getCurrentUser().id; - } catch (RemoteException re) { - return UserHandle.USER_OWNER; - } - } - - private int getCurrentOrCallingUserId() { - if (mMultiUserMode) { - // TODO: This is a little inefficient. See if all users of this are able to - // handle USER_CURRENT and pass that instead. - return getCurrentUser(); - } else { - return UserHandle.getCallingUserId(); - } + public void reportSuccessfulPasswordAttempt(int userId) { + getDevicePolicyManager().reportSuccessfulPasswordAttempt(userId); + getTrustManager().reportUnlockAttempt(true /* authenticated */, userId); } /** @@ -286,8 +239,7 @@ public class LockPatternUtils { * @param challenge The challenge to verify against the pattern * @return the attestation that the challenge was verified, or null. */ - public byte[] verifyPattern(List<LockPatternView.Cell> pattern, long challenge) { - final int userId = getCurrentOrCallingUserId(); + public byte[] verifyPattern(List<LockPatternView.Cell> pattern, long challenge, int userId) { try { return getLockSettings().verifyPattern(patternToString(pattern), challenge, userId); } catch (RemoteException re) { @@ -301,8 +253,7 @@ public class LockPatternUtils { * @param pattern The pattern to check. * @return Whether the pattern matches the stored one. */ - public boolean checkPattern(List<LockPatternView.Cell> pattern) { - final int userId = getCurrentOrCallingUserId(); + public boolean checkPattern(List<LockPatternView.Cell> pattern, int userId) { try { return getLockSettings().checkPattern(patternToString(pattern), userId); } catch (RemoteException re) { @@ -319,8 +270,7 @@ public class LockPatternUtils { * @param challenge The challenge to verify against the password * @return the attestation that the challenge was verified, or null. */ - public byte[] verifyPassword(String password, long challenge) { - final int userId = getCurrentOrCallingUserId(); + public byte[] verifyPassword(String password, long challenge, int userId) { try { return getLockSettings().verifyPassword(password, challenge, userId); } catch (RemoteException re) { @@ -334,8 +284,7 @@ public class LockPatternUtils { * @param password The password to check. * @return Whether the password matches the stored one. */ - public boolean checkPassword(String password) { - final int userId = getCurrentOrCallingUserId(); + public boolean checkPassword(String password, int userId) { try { return getLockSettings().checkPassword(password, userId); } catch (RemoteException re) { @@ -348,8 +297,7 @@ public class LockPatternUtils { * Note that this also clears vold's copy of the password. * @return Whether the vold password matches or not. */ - public boolean checkVoldPassword() { - final int userId = getCurrentOrCallingUserId(); + public boolean checkVoldPassword(int userId) { try { return getLockSettings().checkVoldPassword(userId); } catch (RemoteException re) { @@ -364,8 +312,7 @@ public class LockPatternUtils { * @param password The password to check. * @return Whether the password matches any in the history. */ - public boolean checkPasswordHistory(String password) { - int userId = getCurrentOrCallingUserId(); + public boolean checkPasswordHistory(String password, int userId) { String passwordHashString = new String( passwordToHash(password, userId), StandardCharsets.UTF_8); String passwordHistory = getString(PASSWORD_HISTORY_KEY, userId); @@ -374,7 +321,7 @@ public class LockPatternUtils { } // Password History may be too long... int passwordHashLength = passwordHashString.length(); - int passwordHistoryLength = getRequestedPasswordHistoryLength(); + int passwordHistoryLength = getRequestedPasswordHistoryLength(userId); if(passwordHistoryLength == 0) { return false; } @@ -416,16 +363,8 @@ public class LockPatternUtils { * * @return True if the user has ever chosen a pattern. */ - public boolean isPatternEverChosen() { - return getBoolean(PATTERN_EVER_CHOSEN_KEY, false, getCurrentOrCallingUserId()); - } - - /** - * Used by device policy manager to validate the current password - * information it has. - */ - public int getActivePasswordQuality() { - return getActivePasswordQuality(getCurrentOrCallingUserId()); + public boolean isPatternEverChosen(int userId) { + return getBoolean(PATTERN_EVER_CHOSEN_KEY, false, userId); } /** @@ -448,10 +387,6 @@ public class LockPatternUtils { return DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED; } - public void clearLock() { - clearLock(getCurrentOrCallingUserId()); - } - /** * Clear any lock pattern or password. */ @@ -479,16 +414,6 @@ public class LockPatternUtils { } /** - * Disable showing lock screen at all when the DevicePolicyManager allows it. - * This is only meaningful if pattern, pin or password are not set. - * - * @param disable Disables lock screen when true - */ - public void setLockScreenDisabled(boolean disable) { - setLockScreenDisabled(disable, getCurrentOrCallingUserId()); - } - - /** * Disable showing lock screen at all for a given user. * This is only meaningful if pattern, pin or password are not set. * @@ -505,21 +430,16 @@ public class LockPatternUtils { * * @return true if lock screen is disabled */ - public boolean isLockScreenDisabled() { - return !isSecure() && - getBoolean(DISABLE_LOCKSCREEN_KEY, false, getCurrentOrCallingUserId()); + public boolean isLockScreenDisabled(int userId) { + return !isSecure(userId) && + getBoolean(DISABLE_LOCKSCREEN_KEY, false, userId); } /** * Save a lock pattern. * @param pattern The new pattern to save. - * @param savedPattern The previously saved pattern, or null if none + * @param userId the user whose pattern is to be saved. */ - public void saveLockPattern(List<LockPatternView.Cell> pattern, - String savedPattern) { - this.saveLockPattern(pattern, savedPattern, getCurrentOrCallingUserId()); - } - public void saveLockPattern(List<LockPatternView.Cell> pattern, int userId) { this.saveLockPattern(pattern, null, userId); } @@ -588,8 +508,7 @@ public class LockPatternUtils { updateCryptoUserInfo(userId); } - public void setOwnerInfoEnabled(boolean enabled) { - int userId = getCurrentOrCallingUserId(); + public void setOwnerInfoEnabled(boolean enabled, int userId) { setBoolean(LOCK_SCREEN_OWNER_INFO_ENABLED, enabled, userId); updateCryptoUserInfo(userId); } @@ -598,11 +517,7 @@ public class LockPatternUtils { return getString(LOCK_SCREEN_OWNER_INFO, userId); } - public boolean isOwnerInfoEnabled() { - return isOwnerInfoEnabled(getCurrentOrCallingUserId()); - } - - private boolean isOwnerInfoEnabled(int userId) { + public boolean isOwnerInfoEnabled(int userId) { return getBoolean(LOCK_SCREEN_OWNER_INFO_ENABLED, false, userId); } @@ -724,21 +639,10 @@ public class LockPatternUtils { /** * Save a lock password. Does not ensure that the password is as good * as the requested mode, but will adjust the mode to be as good as the - * pattern. + * password. * @param password The password to save * @param savedPassword The previously saved lock password, or null if none * @param quality {@see DevicePolicyManager#getPasswordQuality(android.content.ComponentName)} - */ - public void saveLockPassword(String password, String savedPassword, int quality) { - saveLockPassword(password, savedPassword, quality, getCurrentOrCallingUserId()); - } - - /** - * Save a lock password. Does not ensure that the password is as good - * as the requested mode, but will adjust the mode to be as good as the - * pattern. - * @param password The password to save - * @param quality {@see DevicePolicyManager#getPasswordQuality(android.content.ComponentName)} * @param userHandle The userId of the user to change the password for */ public void saveLockPassword(String password, String savedPassword, int quality, @@ -865,16 +769,6 @@ public class LockPatternUtils { } /** - * Retrieves the quality mode we're in. - * {@see DevicePolicyManager#getPasswordQuality(android.content.ComponentName)} - * - * @return stored password quality - */ - public int getKeyguardStoredPasswordQuality() { - return getKeyguardStoredPasswordQuality(getCurrentOrCallingUserId()); - } - - /** * Retrieves the quality mode for {@param userHandle}. * {@see DevicePolicyManager#getPasswordQuality(android.content.ComponentName)} * @@ -997,13 +891,6 @@ public class LockPatternUtils { } /** - * @return Whether the lock screen is secured. - */ - public boolean isSecure() { - return isSecure(getCurrentOrCallingUserId()); - } - - /** * @param userId the user for which to report the value * @return Whether the lock screen is secured. */ @@ -1012,13 +899,6 @@ public class LockPatternUtils { return isLockPatternEnabled(mode, userId) || isLockPasswordEnabled(mode, userId); } - /** - * @return Whether the lock password is enabled - */ - public boolean isLockPasswordEnabled() { - return isLockPasswordEnabled(getCurrentOrCallingUserId()); - } - public boolean isLockPasswordEnabled(int userId) { return isLockPasswordEnabled(getKeyguardStoredPasswordQuality(userId), userId); } @@ -1035,10 +915,6 @@ public class LockPatternUtils { /** * @return Whether the lock pattern is enabled */ - public boolean isLockPatternEnabled() { - return isLockPatternEnabled(getCurrentOrCallingUserId()); - } - public boolean isLockPatternEnabled(int userId) { return isLockPatternEnabled(getKeyguardStoredPasswordQuality(userId), userId); } @@ -1051,16 +927,14 @@ public class LockPatternUtils { /** * @return Whether the visible pattern is enabled. */ - public boolean isVisiblePatternEnabled() { - return getBoolean(Settings.Secure.LOCK_PATTERN_VISIBLE, false, getCurrentOrCallingUserId()); + public boolean isVisiblePatternEnabled(int userId) { + return getBoolean(Settings.Secure.LOCK_PATTERN_VISIBLE, false, userId); } /** * Set whether the visible pattern is enabled. */ - public void setVisiblePatternEnabled(boolean enabled) { - int userId = getCurrentOrCallingUserId(); - + public void setVisiblePatternEnabled(boolean enabled, int userId) { setBoolean(Settings.Secure.LOCK_PATTERN_VISIBLE, enabled, userId); // Update for crypto if owner @@ -1095,9 +969,9 @@ public class LockPatternUtils { * pattern until the deadline has passed. * @return the chosen deadline. */ - public long setLockoutAttemptDeadline() { + public long setLockoutAttemptDeadline(int userId) { final long deadline = SystemClock.elapsedRealtime() + FAILED_ATTEMPT_TIMEOUT_MS; - setLong(LOCKOUT_ATTEMPT_DEADLINE, deadline, getCurrentOrCallingUserId()); + setLong(LOCKOUT_ATTEMPT_DEADLINE, deadline, userId); return deadline; } @@ -1106,8 +980,8 @@ public class LockPatternUtils { * attempt to enter his/her lock pattern, or 0 if the user is welcome to * enter a pattern. */ - public long getLockoutAttemptDeadline() { - final long deadline = getLong(LOCKOUT_ATTEMPT_DEADLINE, 0L, getCurrentOrCallingUserId()); + public long getLockoutAttemptDeadline(int userId) { + final long deadline = getLong(LOCKOUT_ATTEMPT_DEADLINE, 0L, userId); final long now = SystemClock.elapsedRealtime(); if (deadline < now || deadline > (now + FAILED_ATTEMPT_TIMEOUT_MS)) { return 0L; @@ -1166,21 +1040,12 @@ public class LockPatternUtils { } } - public void setPowerButtonInstantlyLocks(boolean enabled) { - setBoolean(LOCKSCREEN_POWER_BUTTON_INSTANTLY_LOCKS, enabled, getCurrentOrCallingUserId()); - } - - public boolean getPowerButtonInstantlyLocks() { - return getBoolean(LOCKSCREEN_POWER_BUTTON_INSTANTLY_LOCKS, true, - getCurrentOrCallingUserId()); - } - - public void setEnabledTrustAgents(Collection<ComponentName> activeTrustAgents) { - setEnabledTrustAgents(activeTrustAgents, getCurrentOrCallingUserId()); + public void setPowerButtonInstantlyLocks(boolean enabled, int userId) { + setBoolean(LOCKSCREEN_POWER_BUTTON_INSTANTLY_LOCKS, enabled, userId); } - public List<ComponentName> getEnabledTrustAgents() { - return getEnabledTrustAgents(getCurrentOrCallingUserId()); + public boolean getPowerButtonInstantlyLocks(int userId) { + return getBoolean(LOCKSCREEN_POWER_BUTTON_INSTANTLY_LOCKS, true, userId); } public void setEnabledTrustAgents(Collection<ComponentName> activeTrustAgents, int userId) { @@ -1192,7 +1057,7 @@ public class LockPatternUtils { sb.append(cn.flattenToShortString()); } setString(ENABLED_TRUST_AGENTS, sb.toString(), userId); - getTrustManager().reportEnabledTrustAgentsChanged(getCurrentOrCallingUserId()); + getTrustManager().reportEnabledTrustAgentsChanged(userId); } public List<ComponentName> getEnabledTrustAgents(int userId) { @@ -1228,7 +1093,7 @@ public class LockPatternUtils { } public void setCredentialRequiredToDecrypt(boolean required) { - if (getCurrentUser() != UserHandle.USER_OWNER) { + if (ActivityManager.getCurrentUser() != UserHandle.USER_OWNER) { Log.w(TAG, "Only device owner may call setCredentialRequiredForDecrypt()"); return; } |
