diff options
Diffstat (limited to 'core/java/android')
17 files changed, 298 insertions, 225 deletions
diff --git a/core/java/android/app/admin/DevicePolicyManager.java b/core/java/android/app/admin/DevicePolicyManager.java index ed814c3..cf9813f 100644 --- a/core/java/android/app/admin/DevicePolicyManager.java +++ b/core/java/android/app/admin/DevicePolicyManager.java @@ -807,6 +807,24 @@ public class DevicePolicyManager { public static final String ACTION_SYSTEM_UPDATE_POLICY_CHANGED = "android.app.action.SYSTEM_UPDATE_POLICY_CHANGED"; + /** + * Permission policy to prompt user for new permission requests for runtime permissions. + * Already granted or denied permissions are not affected by this. + */ + public static final int PERMISSION_POLICY_PROMPT = 0; + + /** + * Permission policy to always grant new permission requests for runtime permissions. + * Already granted or denied permissions are not affected by this. + */ + public static final int PERMISSION_POLICY_AUTO_GRANT = 1; + + /** + * Permission policy to always deny new permission requests for runtime permissions. + * Already granted or denied permissions are not affected by this. + */ + public static final int PERMISSION_POLICY_AUTO_DENY = 2; + /** * Return true if the given administrator component is currently @@ -4342,4 +4360,58 @@ public class DevicePolicyManager { Log.w(TAG, "Failed talking with device policy service", re); } } + + /** + * Called by profile or device owners to set the default response for future runtime permission + * requests by applications. The policy can allow for normal operation which prompts the + * user to grant a permission, or can allow automatic granting or denying of runtime + * permission requests by an application. This also applies to new permissions declared by app + * updates. + * @param admin Which profile or device owner this request is associated with. + * @param policy One of the policy constants {@link #PERMISSION_POLICY_PROMPT}, + * {@link #PERMISSION_POLICY_AUTO_GRANT} and {@link #PERMISSION_POLICY_AUTO_DENY}. + */ + public void setPermissionPolicy(ComponentName admin, int policy) { + try { + mService.setPermissionPolicy(admin, policy); + } catch (RemoteException re) { + Log.w(TAG, "Failed talking with device policy service", re); + } + } + + /** + * Returns the current runtime permission policy set by the device or profile owner. The + * default is {@link #PERMISSION_POLICY_PROMPT}. + * @param admin Which profile or device owner this request is associated with. + * @return the current policy for future permission requests. + */ + public int getPermissionPolicy(ComponentName admin) { + try { + return mService.getPermissionPolicy(admin); + } catch (RemoteException re) { + return PERMISSION_POLICY_PROMPT; + } + } + + /** + * Grants or revokes a runtime permission to a specific application so that the user + * does not have to be prompted. This might affect all permissions in a group that the + * runtime permission belongs to. This method can only be called by a profile or device + * owner. + * @param admin Which profile or device owner this request is associated with. + * @param packageName The application to grant or revoke a permission to. + * @param permission The permission to grant or revoke. + * @param granted Whether or not to grant the permission. If false, all permissions in the + * associated permission group will be denied. + * @return whether the permission was successfully granted or revoked + */ + public boolean setPermissionGranted(ComponentName admin, String packageName, + String permission, boolean granted) { + try { + return mService.setPermissionGranted(admin, packageName, permission, granted); + } catch (RemoteException re) { + Log.w(TAG, "Failed talking with device policy service", re); + return false; + } + } } diff --git a/core/java/android/app/admin/IDevicePolicyManager.aidl b/core/java/android/app/admin/IDevicePolicyManager.aidl index a678c51..833bc00 100644 --- a/core/java/android/app/admin/IDevicePolicyManager.aidl +++ b/core/java/android/app/admin/IDevicePolicyManager.aidl @@ -229,4 +229,9 @@ interface IDevicePolicyManager { boolean getDoNotAskCredentialsOnBoot(); void notifyPendingSystemUpdate(in long updateReceivedTime); + + void setPermissionPolicy(in ComponentName admin, int policy); + int getPermissionPolicy(in ComponentName admin); + boolean setPermissionGranted(in ComponentName admin, String packageName, String permission, + boolean granted); } diff --git a/core/java/android/content/pm/PackageManager.java b/core/java/android/content/pm/PackageManager.java index e1c271d..f01ca09 100644 --- a/core/java/android/content/pm/PackageManager.java +++ b/core/java/android/content/pm/PackageManager.java @@ -209,7 +209,14 @@ public abstract class PackageManager { * matching. This is a synonym for including the CATEGORY_DEFAULT in your * supplied Intent. */ - public static final int MATCH_DEFAULT_ONLY = 0x00010000; + public static final int MATCH_DEFAULT_ONLY = 0x00010000; + + /** + * Querying flag: if set and if the platform is doing any filtering of the results, then + * the filtering will not happen. This is a synonym for saying that all results should + * be returned. + */ + public static final int MATCH_ALL = 0x00020000; /** * Flag for {@link addCrossProfileIntentFilter}: if this flag is set: @@ -2637,6 +2644,8 @@ public abstract class PackageManager { * {@link #MATCH_DEFAULT_ONLY}, to limit the resolution to only * those activities that support the {@link android.content.Intent#CATEGORY_DEFAULT}. * + * You can also set {@link #MATCH_ALL} for preventing the filtering of the results. + * * @return A List<ResolveInfo> containing one entry for each matching * Activity. These are ordered from best to worst match -- that * is, the first item in the list is what is returned by @@ -2658,6 +2667,8 @@ public abstract class PackageManager { * {@link #MATCH_DEFAULT_ONLY}, to limit the resolution to only * those activities that support the {@link android.content.Intent#CATEGORY_DEFAULT}. * + * You can also set {@link #MATCH_ALL} for preventing the filtering of the results. + * * @return A List<ResolveInfo> containing one entry for each matching * Activity. These are ordered from best to worst match -- that * is, the first item in the list is what is returned by diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java index 341fb18..19e821c 100644 --- a/core/java/android/hardware/camera2/CameraCharacteristics.java +++ b/core/java/android/hardware/camera2/CameraCharacteristics.java @@ -603,10 +603,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>List of available high speed video size and fps range configurations * supported by the camera device, in the format of (width, height, fps_min, fps_max).</p> - * <p>When HIGH_SPEED_VIDEO is supported in {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}, - * this metadata will list the supported high speed video size and fps range - * configurations. All the sizes listed in this configuration will be a subset - * of the sizes reported by StreamConfigurationMap#getOutputSizes for processed + * <p>When HIGH_SPEED_VIDEO is supported in {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}, this metadata + * will list the supported high speed video size and fps range configurations. All the sizes + * listed in this configuration will be a subset of the sizes reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } for processed * non-stalling formats.</p> * <p>For the high speed video use case, where the application will set * {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} to HIGH_SPEED_VIDEO in capture requests, the application must @@ -1116,11 +1115,12 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * into the 3 stream types as below:</p> * <ul> * <li>Processed (but stalling): any non-RAW format with a stallDurations > 0. - * Typically JPEG format (ImageFormat#JPEG).</li> - * <li>Raw formats: ImageFormat#RAW_SENSOR, ImageFormat#RAW10, ImageFormat#RAW12, - * and ImageFormat#RAW_OPAQUE.</li> + * Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.</li> + * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12 RAW12}.</li> * <li>Processed (but not-stalling): any non-RAW format without a stall duration. - * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li> + * Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}, + * {@link android.graphics.ImageFormat#NV21 NV21}, or + * {@link android.graphics.ImageFormat#YV12 YV12}.</li> * </ul> * <p><b>Range of valid values:</b><br></p> * <p>For processed (and stalling) format streams, >= 1.</p> @@ -1148,10 +1148,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * be any <code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p> * <p>In particular, a <code>RAW</code> format is typically one of:</p> * <ul> - * <li>ImageFormat#RAW_SENSOR</li> - * <li>ImageFormat#RAW10</li> - * <li>ImageFormat#RAW12</li> - * <li>Opaque <code>RAW</code></li> + * <li>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</li> + * <li>{@link android.graphics.ImageFormat#RAW10 RAW10}</li> + * <li>{@link android.graphics.ImageFormat#RAW12 RAW12}</li> * </ul> * <p>LEGACY mode devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> LEGACY) * never support raw streams.</p> @@ -1180,13 +1179,13 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration. * Typically:</p> * <ul> - * <li>ImageFormat#YUV_420_888</li> - * <li>ImageFormat#NV21</li> - * <li>ImageFormat#YV12</li> - * <li>Implementation-defined formats, i.e. StreamConfiguration#isOutputSupportedFor(Class)</li> + * <li>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</li> + * <li>{@link android.graphics.ImageFormat#NV21 NV21}</li> + * <li>{@link android.graphics.ImageFormat#YV12 YV12}</li> + * <li>Implementation-defined formats, i.e. {@link android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class) }</li> * </ul> - * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with - * a processed format -- it will return 0 for a non-stalling stream.</p> + * <p>For full guarantees, query {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } with a + * processed format -- it will return 0 for a non-stalling stream.</p> * <p>LEGACY devices will support at least 2 processing/non-stalling streams.</p> * <p><b>Range of valid values:</b><br></p> * <p>>= 3 @@ -1212,10 +1211,11 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * the camera device. Using more streams simultaneously may require more hardware and * CPU resources that will consume more power. The image format for this kind of an output stream can * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p> - * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations > 0. - * Typically only the <code>JPEG</code> format (ImageFormat#JPEG) is a stalling format.</p> - * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with - * a processed format -- it will return a non-0 value for a stalling stream.</p> + * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations + * > 0. Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a + * stalling format.</p> + * <p>For full guarantees, query {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } with a + * processed format -- it will return a non-0 value for a stalling stream.</p> * <p>LEGACY devices will support up to 1 processing/stalling stream.</p> * <p><b>Range of valid values:</b><br></p> * <p>>= 1</p> @@ -1232,10 +1232,9 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>The maximum numbers of any type of input streams * that can be configured and used simultaneously by a camera device.</p> * <p>When set to 0, it means no input stream is supported.</p> - * <p>The image format for a input stream can be any supported - * format returned by StreamConfigurationMap#getInputFormats. When using an - * input stream, there must be at least one output stream - * configured to to receive the reprocessed images.</p> + * <p>The image format for a input stream can be any supported format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }. When using an + * input stream, there must be at least one output stream configured to to receive the + * reprocessed images.</p> * <p>When an input stream and some output streams are used in a reprocessing request, * only the input buffer will be used to produce these output stream buffers, and a * new sensor image will not be captured.</p> @@ -1352,7 +1351,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>A list of all keys that the camera device has available - * to use with CaptureRequest.</p> + * to use with {@link android.hardware.camera2.CaptureRequest }.</p> * <p>Attempting to set a key into a CaptureRequest that is not * listed here will result in an invalid request and will be rejected * by the camera device.</p> @@ -1370,7 +1369,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>A list of all keys that the camera device has available - * to use with CaptureResult.</p> + * to use with {@link android.hardware.camera2.CaptureResult }.</p> * <p>Attempting to get a key from a CaptureResult that is not * listed here will always return a <code>null</code> value. Getting a key from * a CaptureResult that is listed here will generally never return a <code>null</code> @@ -1396,7 +1395,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri /** * <p>A list of all keys that the camera device has available - * to use with CameraCharacteristics.</p> + * to use with {@link android.hardware.camera2.CameraCharacteristics }.</p> * <p>This entry follows the same rules as * android.request.availableResultKeys (except that it applies for * CameraCharacteristics instead of CaptureResult). See above for more @@ -1535,34 +1534,31 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * </thead> * <tbody> * <tr> - * <td align="left">PRIVATE (ImageFormat#PRIVATE)</td> - * <td align="left">JPEG</td> + * <td align="left">{@link android.graphics.ImageFormat#PRIVATE }</td> + * <td align="left">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="left">OPAQUE_REPROCESSING</td> * </tr> * <tr> - * <td align="left">PRIVATE</td> - * <td align="left">YUV_420_888</td> + * <td align="left">{@link android.graphics.ImageFormat#PRIVATE }</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="left">OPAQUE_REPROCESSING</td> * </tr> * <tr> - * <td align="left">YUV_420_888</td> - * <td align="left">JPEG</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> + * <td align="left">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="left">YUV_REPROCESSING</td> * </tr> * <tr> - * <td align="left">YUV_420_888</td> - * <td align="left">YUV_420_888</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> + * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="left">YUV_REPROCESSING</td> * </tr> * </tbody> * </table> - * <p>PRIVATE refers to a device-internal format that is not directly application-visible. - * A PRIVATE input surface can be acquired by - * ImageReader.newOpaqueInstance(width, height, maxImages). - * For a OPAQUE_REPROCESSING-capable camera device, using the PRIVATE format - * as either input or output will never hurt maximum frame rate (i.e. - * StreamConfigurationMap#getOutputStallDuration(format, size) is always 0), - * where format is ImageFormat#PRIVATE.</p> + * <p>PRIVATE refers to a device-internal format that is not directly application-visible. A + * PRIVATE input surface can be acquired by {@link android.media.ImageReader#newOpaqueInstance }.</p> + * <p>For a OPAQUE_REPROCESSING-capable camera device, using the PRIVATE format as either input + * or output will never hurt maximum frame rate (i.e. {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),</p> * <p>Attempting to configure an input stream with output streams not * listed as available in this map is not valid.</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> @@ -1680,7 +1676,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * android.scaler.availableStallDurations for more details about * calculating the max frame rate.</p> * <p>(Keep in sync with - * StreamConfigurationMap#getOutputMinFrameDuration)</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p> * <p><b>Units</b>: (format, width, height, ns) x n</p> * <p>This key is available on all devices.</p> * @@ -1734,12 +1730,13 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * ignored).</p> * <p>The following formats may always have a stall duration:</p> * <ul> - * <li>ImageFormat#JPEG</li> - * <li>ImageFormat#RAW_SENSOR</li> + * <li>{@link android.graphics.ImageFormat#JPEG }</li> + * <li>{@link android.graphics.ImageFormat#RAW_SENSOR }</li> * </ul> * <p>The following formats will never have a stall duration:</p> * <ul> - * <li>ImageFormat#YUV_420_888</li> + * <li>{@link android.graphics.ImageFormat#YUV_420_888 }</li> + * <li>{@link android.graphics.ImageFormat#RAW10 }</li> * </ul> * <p>All other formats may or may not have an allowed stall duration on * a per-capability basis; refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} @@ -1747,7 +1744,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about * calculating the max frame rate (absent stalls).</p> * <p>(Keep up to date with - * StreamConfigurationMap#getOutputStallDuration(int, Size) )</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } )</p> * <p><b>Units</b>: (format, width, height, ns) x n</p> * <p>This key is available on all devices.</p> * @@ -1786,57 +1783,57 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * </thead> * <tbody> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td> * <td align="center">Any</td> * <td align="center"></td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">1920x1080 (1080p)</td> * <td align="center">Any</td> * <td align="center">if 1080p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">1280x720 (720)</td> * <td align="center">Any</td> * <td align="center">if 720p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">640x480 (480p)</td> * <td align="center">Any</td> * <td align="center">if 480p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">JPEG</td> + * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td> * <td align="center">320x240 (240p)</td> * <td align="center">Any</td> * <td align="center">if 240p <= activeArraySize</td> * </tr> * <tr> - * <td align="center">YUV_420_888</td> + * <td align="center">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="center">all output sizes available for JPEG</td> * <td align="center">FULL</td> * <td align="center"></td> * </tr> * <tr> - * <td align="center">YUV_420_888</td> + * <td align="center">{@link android.graphics.ImageFormat#YUV_420_888 }</td> * <td align="center">all output sizes available for JPEG, up to the maximum video size</td> * <td align="center">LIMITED</td> * <td align="center"></td> * </tr> * <tr> - * <td align="center">IMPLEMENTATION_DEFINED</td> + * <td align="center">{@link android.graphics.ImageFormat#PRIVATE }</td> * <td align="center">same as YUV_420_888</td> * <td align="center">Any</td> * <td align="center"></td> * </tr> * </tbody> * </table> - * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional - * mandatory stream configurations on a per-capability basis.</p> + * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} and {@link android.hardware.camera2.CameraDevice#createCaptureSession } for additional mandatory + * stream configurations on a per-capability basis.</p> * <p>This key is available on all devices.</p> * * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL @@ -1973,8 +1970,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>Attempting to use frame durations beyond the maximum will result in the frame * duration being clipped to the maximum. See that control for a full definition of frame * durations.</p> - * <p>Refer to StreamConfigurationMap#getOutputMinFrameDuration(int,Size) for the minimum - * frame duration values.</p> + * <p>Refer to {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration } + * for the minimum frame duration values.</p> * <p><b>Units</b>: Nanoseconds</p> * <p><b>Range of valid values:</b><br> * For FULL capability devices @@ -2707,8 +2704,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and * android.scaler.availableStallDurations for more details about * calculating the max frame rate.</p> - * <p>(Keep in sync with - * StreamConfigurationMap#getOutputMinFrameDuration)</p> + * <p>(Keep in sync with {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p> * <p><b>Units</b>: (format, width, height, ns) x n</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> * <p><b>Limited capability</b> - diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java index 2d9f61d..ca9439b 100644 --- a/core/java/android/hardware/camera2/CameraMetadata.java +++ b/core/java/android/hardware/camera2/CameraMetadata.java @@ -461,19 +461,17 @@ public abstract class CameraMetadata<TKey> { * <p>The camera device supports the Zero Shutter Lag reprocessing use case.</p> * <ul> * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li> - * <li>ImageFormat#PRIVATE is supported as an output/input format, that is, - * ImageFormat#PRIVATE is included in the lists of formats returned by - * StreamConfigurationMap#getInputFormats and - * StreamConfigurationMap#getOutputFormats.</li> - * <li>StreamConfigurationMap#getValidOutputFormatsForInput returns non empty int[] for - * each supported input format returned by StreamConfigurationMap#getInputFormats.</li> - * <li>Each size returned by StreamConfigurationMap#getInputSizes(ImageFormat#PRIVATE) - * is also included in StreamConfigurationMap#getOutputSizes(ImageFormat#PRIVATE)</li> - * <li>Using ImageFormat#PRIVATE does not cause a frame rate drop - * relative to the sensor's maximum capture rate (at that - * resolution).</li> - * <li>ImageFormat#PRIVATE will be reprocessable into both YUV_420_888 - * and JPEG formats.</li> + * <li>{@link android.graphics.ImageFormat#PRIVATE } is supported as an output/input format, + * that is, {@link android.graphics.ImageFormat#PRIVATE } is included in the lists of + * formats returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li> + * <li>{@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput } + * returns non empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.</li> + * <li>Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(ImageFormat.PRIVATE)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(ImageFormat.PRIVATE)}</li> + * <li>Using {@link android.graphics.ImageFormat#PRIVATE } does not cause a frame rate drop + * relative to the sensor's maximum capture rate (at that resolution).</li> + * <li>{@link android.graphics.ImageFormat#PRIVATE } will be reprocessable into both + * {@link android.graphics.ImageFormat#YUV_420_888 } and + * {@link android.graphics.ImageFormat#JPEG } formats.</li> * <li>The maximum available resolution for OPAQUE streams * (both input/output) will match the maximum available * resolution of JPEG streams.</li> @@ -571,26 +569,25 @@ public abstract class CameraMetadata<TKey> { * following:</p> * <ul> * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li> - * <li>YUV_420_888 is supported as an output/input format, that is, + * <li>{@link android.graphics.ImageFormat#YUV_420_888 } is supported as an output/input format, that is, * YUV_420_888 is included in the lists of formats returned by - * StreamConfigurationMap#getInputFormats and - * StreamConfigurationMap#getOutputFormats.</li> - * <li>StreamConfigurationMap#getValidOutputFormatsForInput returns non empty int[] for - * each supported input format returned by StreamConfigurationMap#getInputFormats.</li> - * <li>Each size returned by StreamConfigurationMap#getInputSizes(YUV_420_888) - * is also included in StreamConfigurationMap#getOutputSizes(YUV_420_888)</li> - * <li>Using YUV_420_888 does not cause a frame rate drop + * {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li> + * <li>{@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput } + * returns non-empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.</li> + * <li>Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(YUV_420_888)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(YUV_420_888)}</li> + * <li>Using {@link android.graphics.ImageFormat#YUV_420_888 } does not cause a frame rate drop * relative to the sensor's maximum capture rate (at that resolution).</li> - * <li>YUV_420_888 will be reprocessable into both YUV_420_888 - * and JPEG formats.</li> - * <li>The maximum available resolution for YUV_420_888 streams - * (both input/output) will match the maximum available - * resolution of JPEG streams.</li> + * <li>{@link android.graphics.ImageFormat#YUV_420_888 } will be reprocessable into both + * {@link android.graphics.ImageFormat#YUV_420_888 } and {@link android.graphics.ImageFormat#JPEG } formats.</li> + * <li>The maximum available resolution for {@link android.graphics.ImageFormat#YUV_420_888 } streams (both input/output) will match the + * maximum available resolution of {@link android.graphics.ImageFormat#JPEG } streams.</li> * <li>Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.</li> - * <li>Only the below controls are effective for reprocessing requests and will be - * present in capture results. The reprocess requests are from the original capture - * results that are assocaited with the intermidate YUV_420_888 output buffers. - * All other controls in the reprocess requests will be ignored by the camera device.<ul> + * <li>Only the below controls are effective for reprocessing requests and will be present + * in capture results. The reprocess requests are from the original capture results that + * are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888 } + * output buffers. All other controls in the reprocess requests will be ignored by the + * camera device.<ul> * <li>android.jpeg.*</li> * <li>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}</li> * <li>{@link CaptureRequest#EDGE_MODE android.edge.mode}</li> @@ -612,11 +609,13 @@ public abstract class CameraMetadata<TKey> { * <p>The camera device can produce depth measurements from its field of view.</p> * <p>This capability requires the camera device to support the following:</p> * <ul> - * <li>DEPTH16 is supported as an output format.</li> - * <li>DEPTH_POINT_CLOUD is optionally supported as an output format.</li> - * <li>This camera device, and all camera devices with the same android.lens.info.facing, - * will list the following calibration entries in both CameraCharacteristics and - * CaptureResults:<ul> + * <li>{@link android.graphics.ImageFormat#DEPTH16 } is supported as an output format.</li> + * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD } is optionally supported as an + * output format.</li> + * <li>This camera device, and all camera devices with the same {@link CameraCharacteristics#LENS_FACING android.lens.facing}, + * will list the following calibration entries in both + * {@link android.hardware.camera2.CameraCharacteristics } and + * {@link android.hardware.camera2.CaptureResult }:<ul> * <li>{@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}</li> * <li>{@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}</li> * <li>android.lens.intrinsicCalibration</li> @@ -631,13 +630,14 @@ public abstract class CameraMetadata<TKey> { * <p>Generally, depth output operates at a slower frame rate than standard color capture, * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that * should be accounted for (see - * android.hardware.camera2.StreamConfigurationMap#getOutputStallDuration). On a device - * that supports both depth and color-based output, to enable smooth preview, using a - * repeating burst is recommended, where a depth-output target is only included once - * every N frames, where N is the ratio between preview output rate and depth output + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }). + * On a device that supports both depth and color-based output, to enable smooth preview, + * using a repeating burst is recommended, where a depth-output target is only included + * once every N frames, where N is the ratio between preview output rate and depth output * rate, including depth stall time.</p> * * @see CameraCharacteristics#DEPTH_DEPTH_IS_EXCLUSIVE + * @see CameraCharacteristics#LENS_FACING * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES @@ -711,7 +711,7 @@ public abstract class CameraMetadata<TKey> { /** * <p>Timestamps from {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} are in the same timebase as - * android.os.SystemClock#elapsedRealtimeNanos(), + * {@link android.os.SystemClock#elapsedRealtimeNanos }, * and they can be compared to other timestamps using that base.</p> * * @see CaptureResult#SENSOR_TIMESTAMP @@ -870,7 +870,7 @@ public abstract class CameraMetadata<TKey> { /** * <p>Every frame has the requests immediately applied.</p> * <p>Furthermore for all results, - * <code>android.sync.frameNumber == CaptureResult#getFrameNumber()</code></p> + * <code>android.sync.frameNumber == {@link android.hardware.camera2.CaptureResult#getFrameNumber }</code></p> * <p>Changing controls over multiple requests one after another will * produce results that have those controls applied atomically * each frame.</p> diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java index 9106060..ab6ce91 100644 --- a/core/java/android/hardware/camera2/CaptureRequest.java +++ b/core/java/android/hardware/camera2/CaptureRequest.java @@ -2040,8 +2040,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * cannot process more than 1 capture at a time.</li> * </ul> * <p>The necessary information for the application, given the model above, - * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field - * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size). + * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }. * These are used to determine the maximum frame rate / minimum frame * duration that is possible for a given stream configuration.</p> * <p>Specifically, the application can use the following rules to @@ -2050,21 +2050,19 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * <ol> * <li>Let the set of currently configured input/output streams * be called <code>S</code>.</li> - * <li>Find the minimum frame durations for each stream in <code>S</code>, by - * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using - * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with - * its respective size/format). Let this set of frame durations be called - * <code>F</code>.</li> + * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking + * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration } + * (with its respective size/format). Let this set of frame durations be + * called <code>F</code>.</li> * <li>For any given request <code>R</code>, the minimum frame duration allowed * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams * used in <code>R</code> be called <code>S_r</code>.</li> * </ol> - * <p>If none of the streams in <code>S_r</code> have a stall time (listed in - * StreamConfigurationMap#getOutputStallDuration(int,Size) using its - * respective size/format), then the frame duration in - * <code>F</code> determines the steady state frame rate that the application will - * get if it uses <code>R</code> as a repeating request. Let this special kind - * of request be called <code>Rsimple</code>.</p> + * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } + * using its respective size/format), then the frame duration in <code>F</code> + * determines the steady state frame rate that the application will get + * if it uses <code>R</code> as a repeating request. Let this special kind of + * request be called <code>Rsimple</code>.</p> * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved * by a single capture of a new request <code>Rstall</code> (which has at least * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the @@ -2072,7 +2070,7 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * if all buffers from the previous <code>Rstall</code> have already been * delivered.</p> * <p>For more details about stalling, see - * StreamConfigurationMap#getOutputStallDuration(int,Size).</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p> * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.</p> * <p><b>Units</b>: Nanoseconds</p> diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java index 8c8f0dc..3dc8970 100644 --- a/core/java/android/hardware/camera2/CaptureResult.java +++ b/core/java/android/hardware/camera2/CaptureResult.java @@ -2886,8 +2886,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * cannot process more than 1 capture at a time.</li> * </ul> * <p>The necessary information for the application, given the model above, - * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field - * using StreamConfigurationMap#getOutputMinFrameDuration(int, Size). + * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }. * These are used to determine the maximum frame rate / minimum frame * duration that is possible for a given stream configuration.</p> * <p>Specifically, the application can use the following rules to @@ -2896,21 +2896,19 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * <ol> * <li>Let the set of currently configured input/output streams * be called <code>S</code>.</li> - * <li>Find the minimum frame durations for each stream in <code>S</code>, by - * looking it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using - * StreamConfigurationMap#getOutputMinFrameDuration(int, Size) (with - * its respective size/format). Let this set of frame durations be called - * <code>F</code>.</li> + * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking + * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration } + * (with its respective size/format). Let this set of frame durations be + * called <code>F</code>.</li> * <li>For any given request <code>R</code>, the minimum frame duration allowed * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams * used in <code>R</code> be called <code>S_r</code>.</li> * </ol> - * <p>If none of the streams in <code>S_r</code> have a stall time (listed in - * StreamConfigurationMap#getOutputStallDuration(int,Size) using its - * respective size/format), then the frame duration in - * <code>F</code> determines the steady state frame rate that the application will - * get if it uses <code>R</code> as a repeating request. Let this special kind - * of request be called <code>Rsimple</code>.</p> + * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } + * using its respective size/format), then the frame duration in <code>F</code> + * determines the steady state frame rate that the application will get + * if it uses <code>R</code> as a repeating request. Let this special kind of + * request be called <code>Rsimple</code>.</p> * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved * by a single capture of a new request <code>Rstall</code> (which has at least * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the @@ -2918,7 +2916,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * if all buffers from the previous <code>Rstall</code> have already been * delivered.</p> * <p>For more details about stalling, see - * StreamConfigurationMap#getOutputStallDuration(int,Size).</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p> * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to * OFF; otherwise the auto-exposure algorithm will override this value.</p> * <p><b>Units</b>: Nanoseconds</p> @@ -2980,11 +2978,10 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * and are monotonically increasing. They can be compared with the * timestamps for other captures from the same camera device, but are * not guaranteed to be comparable to any other time source.</p> - * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME, - * the timestamps measure time in the same timebase as - * android.os.SystemClock#elapsedRealtimeNanos(), and they can be - * compared to other timestamps from other subsystems that are using - * that base.</p> + * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME, the + * timestamps measure time in the same timebase as {@link android.os.SystemClock#elapsedRealtimeNanos }, and they can + * be compared to other timestamps from other subsystems that + * are using that base.</p> * <p><b>Units</b>: Nanoseconds</p> * <p><b>Range of valid values:</b><br> * > 0</p> @@ -3142,7 +3139,7 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * <p><b>Units</b>: Nanoseconds</p> * <p><b>Range of valid values:</b><br> * >= 0 and < - * StreamConfigurationMap#getOutputMinFrameDuration(int, Size).</p> + * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.</p> * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> * <p><b>Limited capability</b> - * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the diff --git a/core/java/android/transition/TransitionInflater.java b/core/java/android/transition/TransitionInflater.java index a7d9503..ca37d49 100644 --- a/core/java/android/transition/TransitionInflater.java +++ b/core/java/android/transition/TransitionInflater.java @@ -214,7 +214,7 @@ public class TransitionInflater { sConstructors.put(className, constructor); } } - + constructor.setAccessible(true); return constructor.newInstance(mContext, attrs); } } catch (InstantiationException e) { diff --git a/core/java/android/view/DisplayListCanvas.java b/core/java/android/view/DisplayListCanvas.java index eedbc70..46dd857 100644 --- a/core/java/android/view/DisplayListCanvas.java +++ b/core/java/android/view/DisplayListCanvas.java @@ -234,25 +234,13 @@ public class DisplayListCanvas extends Canvas { * Draws the specified display list onto this canvas. The display list can only * be drawn if {@link android.view.RenderNode#isValid()} returns true. * - * @param renderNode The RenderNode to replay. + * @param renderNode The RenderNode to draw. */ public void drawRenderNode(RenderNode renderNode) { - drawRenderNode(renderNode, RenderNode.FLAG_CLIP_CHILDREN); + nDrawRenderNode(mNativeCanvasWrapper, renderNode.getNativeDisplayList()); } - /** - * Draws the specified display list onto this canvas. - * - * @param renderNode The RenderNode to replay. - * @param flags Optional flags about drawing, see {@link RenderNode} for - * the possible flags. - */ - public void drawRenderNode(RenderNode renderNode, int flags) { - nDrawRenderNode(mNativeCanvasWrapper, renderNode.getNativeDisplayList(), flags); - } - - private static native void nDrawRenderNode(long renderer, long renderNode, - int flags); + private static native void nDrawRenderNode(long renderer, long renderNode); /////////////////////////////////////////////////////////////////////////// // Hardware layer diff --git a/core/java/android/view/PhoneWindow.java b/core/java/android/view/PhoneWindow.java index 794c8e7..a3e7a10 100644 --- a/core/java/android/view/PhoneWindow.java +++ b/core/java/android/view/PhoneWindow.java @@ -3599,7 +3599,7 @@ public class PhoneWindow extends Window implements MenuBuilder.Callback { if (!mForcedNavigationBarColor) { mNavigationBarColor = a.getColor(R.styleable.Window_navigationBarColor, 0xFF000000); } - if (a.getBoolean(R.styleable.Window_windowHasLightStatusBar, false)) { + if (a.getBoolean(R.styleable.Window_windowLightStatusBar, false)) { decor.setSystemUiVisibility( decor.getSystemUiVisibility() | View.SYSTEM_UI_FLAG_LIGHT_STATUS_BAR); } diff --git a/core/java/android/view/View.java b/core/java/android/view/View.java index 5ecda87..fa74797 100644 --- a/core/java/android/view/View.java +++ b/core/java/android/view/View.java @@ -2592,7 +2592,7 @@ public class View implements Drawable.Callback, KeyEvent.Callback, * {@link android.view.WindowManager.LayoutParams#FLAG_TRANSLUCENT_STATUS * FLAG_TRANSLUCENT_STATUS}. * - * @see android.R.attr#windowHasLightStatusBar + * @see android.R.attr#windowLightStatusBar */ public static final int SYSTEM_UI_FLAG_LIGHT_STATUS_BAR = 0x00002000; @@ -15540,7 +15540,7 @@ public class View implements Drawable.Callback, KeyEvent.Callback, if (!drawingWithDrawingCache) { if (drawingWithRenderNode) { mPrivateFlags &= ~PFLAG_DIRTY_MASK; - ((DisplayListCanvas) canvas).drawRenderNode(renderNode, parentFlags); + ((DisplayListCanvas) canvas).drawRenderNode(renderNode); } else { // Fast path for layouts with no backgrounds if ((mPrivateFlags & PFLAG_SKIP_DRAW) == PFLAG_SKIP_DRAW) { diff --git a/core/java/android/widget/DayPickerPagerAdapter.java b/core/java/android/widget/DayPickerPagerAdapter.java index 478fa00..d271af2 100644 --- a/core/java/android/widget/DayPickerPagerAdapter.java +++ b/core/java/android/widget/DayPickerPagerAdapter.java @@ -286,14 +286,10 @@ class DayPickerPagerAdapter extends PagerAdapter { return null; } - private boolean isCalendarInRange(Calendar value) { - return value.compareTo(mMinDate) >= 0 && value.compareTo(mMaxDate) <= 0; - } - private final OnDayClickListener mOnDayClickListener = new OnDayClickListener() { @Override public void onDayClick(SimpleMonthView view, Calendar day) { - if (day != null && isCalendarInRange(day)) { + if (day != null) { setSelectedDay(day); if (mOnDaySelectedListener != null) { diff --git a/core/java/android/widget/DayPickerView.java b/core/java/android/widget/DayPickerView.java index 113e597..334afab 100644 --- a/core/java/android/widget/DayPickerView.java +++ b/core/java/android/widget/DayPickerView.java @@ -178,6 +178,13 @@ class DayPickerView extends ViewGroup { }); } + private void updateButtonVisibility(int position) { + final boolean hasPrev = position > 0; + final boolean hasNext = position < (mAdapter.getCount() - 1); + mPrevButton.setVisibility(hasPrev ? View.VISIBLE : View.INVISIBLE); + mNextButton.setVisibility(hasNext ? View.VISIBLE : View.INVISIBLE); + } + @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { final ViewPager viewPager = mViewPager; @@ -218,12 +225,6 @@ class DayPickerView extends ViewGroup { final int height = bottom - top; mViewPager.layout(0, 0, width, height); - if (mViewPager.getChildCount() < 1) { - leftButton.setVisibility(View.INVISIBLE); - rightButton.setVisibility(View.INVISIBLE); - return; - } - final SimpleMonthView monthView = (SimpleMonthView) mViewPager.getChildAt(0); final int monthHeight = monthView.getMonthHeight(); final int cellWidth = monthView.getCellWidth(); @@ -235,7 +236,6 @@ class DayPickerView extends ViewGroup { final int leftIconTop = monthView.getPaddingTop() + (monthHeight - leftDH) / 2; final int leftIconLeft = monthView.getPaddingLeft() + (cellWidth - leftDW) / 2; leftButton.layout(leftIconLeft, leftIconTop, leftIconLeft + leftDW, leftIconTop + leftDH); - leftButton.setVisibility(View.VISIBLE); final int rightDW = rightButton.getMeasuredWidth(); final int rightDH = rightButton.getMeasuredHeight(); @@ -243,7 +243,6 @@ class DayPickerView extends ViewGroup { final int rightIconRight = width - monthView.getPaddingRight() - (cellWidth - rightDW) / 2; rightButton.layout(rightIconRight - rightDW, rightIconTop, rightIconRight, rightIconTop + rightDH); - rightButton.setVisibility(View.VISIBLE); } public void setDayOfWeekTextAppearance(int resId) { @@ -399,10 +398,7 @@ class DayPickerView extends ViewGroup { @Override public void onPageSelected(int position) { - mPrevButton.setVisibility( - position > 0 ? View.VISIBLE : View.INVISIBLE); - mNextButton.setVisibility( - position < (mAdapter.getCount() - 1) ? View.VISIBLE : View.INVISIBLE); + updateButtonVisibility(position); } }; diff --git a/core/java/android/widget/Editor.java b/core/java/android/widget/Editor.java index 39b9907..089074a 100644 --- a/core/java/android/widget/Editor.java +++ b/core/java/android/widget/Editor.java @@ -145,16 +145,16 @@ public class Editor { InputContentType mInputContentType; InputMethodState mInputMethodState; - private static class TextDisplayList { - RenderNode displayList; + private static class TextRenderNode { + RenderNode renderNode; boolean isDirty; - public TextDisplayList(String name) { + public TextRenderNode(String name) { isDirty = true; - displayList = RenderNode.create(name, null); + renderNode = RenderNode.create(name, null); } - boolean needsRecord() { return isDirty || !displayList.isValid(); } + boolean needsRecord() { return isDirty || !renderNode.isValid(); } } - TextDisplayList[] mTextDisplayLists; + TextRenderNode[] mTextRenderNodes; boolean mFrozenWithFocus; boolean mSelectionMoved; @@ -360,10 +360,10 @@ public class Editor { } private void destroyDisplayListsData() { - if (mTextDisplayLists != null) { - for (int i = 0; i < mTextDisplayLists.length; i++) { - RenderNode displayList = mTextDisplayLists[i] != null - ? mTextDisplayLists[i].displayList : null; + if (mTextRenderNodes != null) { + for (int i = 0; i < mTextRenderNodes.length; i++) { + RenderNode displayList = mTextRenderNodes[i] != null + ? mTextRenderNodes[i].renderNode : null; if (displayList != null && displayList.isValid()) { displayList.destroyDisplayListData(); } @@ -1467,8 +1467,8 @@ public class Editor { firstLine, lastLine); if (layout instanceof DynamicLayout) { - if (mTextDisplayLists == null) { - mTextDisplayLists = ArrayUtils.emptyArray(TextDisplayList.class); + if (mTextRenderNodes == null) { + mTextRenderNodes = ArrayUtils.emptyArray(TextRenderNode.class); } DynamicLayout dynamicLayout = (DynamicLayout) layout; @@ -1489,19 +1489,19 @@ public class Editor { searchStartIndex); // Note how dynamic layout's internal block indices get updated from Editor blockIndices[i] = blockIndex; - if (mTextDisplayLists[blockIndex] != null) { - mTextDisplayLists[blockIndex].isDirty = true; + if (mTextRenderNodes[blockIndex] != null) { + mTextRenderNodes[blockIndex].isDirty = true; } searchStartIndex = blockIndex + 1; } - if (mTextDisplayLists[blockIndex] == null) { - mTextDisplayLists[blockIndex] = - new TextDisplayList("Text " + blockIndex); + if (mTextRenderNodes[blockIndex] == null) { + mTextRenderNodes[blockIndex] = + new TextRenderNode("Text " + blockIndex); } - final boolean blockDisplayListIsInvalid = mTextDisplayLists[blockIndex].needsRecord(); - RenderNode blockDisplayList = mTextDisplayLists[blockIndex].displayList; + final boolean blockDisplayListIsInvalid = mTextRenderNodes[blockIndex].needsRecord(); + RenderNode blockDisplayList = mTextRenderNodes[blockIndex].renderNode; if (i >= indexFirstChangedBlock || blockDisplayListIsInvalid) { final int blockBeginLine = endOfPreviousBlock + 1; final int top = layout.getLineTop(blockBeginLine); @@ -1528,7 +1528,7 @@ public class Editor { // brings this range of text back to the top left corner of the viewport displayListCanvas.translate(-left, -top); layout.drawText(displayListCanvas, blockBeginLine, blockEndLine); - mTextDisplayLists[blockIndex].isDirty = false; + mTextRenderNodes[blockIndex].isDirty = false; // No need to untranslate, previous context is popped after // drawDisplayList } finally { @@ -1543,8 +1543,7 @@ public class Editor { blockDisplayList.setLeftTopRightBottom(left, top, right, bottom); } - ((DisplayListCanvas) canvas).drawRenderNode(blockDisplayList, - 0 /* no child clipping, our TextView parent enforces it */); + ((DisplayListCanvas) canvas).drawRenderNode(blockDisplayList); endOfPreviousBlock = blockEndLine; } @@ -1558,7 +1557,7 @@ public class Editor { private int getAvailableDisplayListIndex(int[] blockIndices, int numberOfBlocks, int searchStartIndex) { - int length = mTextDisplayLists.length; + int length = mTextRenderNodes.length; for (int i = searchStartIndex; i < length; i++) { boolean blockIndexFound = false; for (int j = 0; j < numberOfBlocks; j++) { @@ -1572,7 +1571,7 @@ public class Editor { } // No available index found, the pool has to grow - mTextDisplayLists = GrowingArrayUtils.append(mTextDisplayLists, length, null); + mTextRenderNodes = GrowingArrayUtils.append(mTextRenderNodes, length, null); return length; } @@ -1589,7 +1588,7 @@ public class Editor { * Invalidates all the sub-display lists that overlap the specified character range */ void invalidateTextDisplayList(Layout layout, int start, int end) { - if (mTextDisplayLists != null && layout instanceof DynamicLayout) { + if (mTextRenderNodes != null && layout instanceof DynamicLayout) { final int firstLine = layout.getLineForOffset(start); final int lastLine = layout.getLineForOffset(end); @@ -1609,7 +1608,7 @@ public class Editor { while (i < numberOfBlocks) { final int blockIndex = blockIndices[i]; if (blockIndex != DynamicLayout.INVALID_BLOCK_INDEX) { - mTextDisplayLists[blockIndex].isDirty = true; + mTextRenderNodes[blockIndex].isDirty = true; } if (blockEndLines[i] >= lastLine) break; i++; @@ -1618,9 +1617,9 @@ public class Editor { } void invalidateTextDisplayList() { - if (mTextDisplayLists != null) { - for (int i = 0; i < mTextDisplayLists.length; i++) { - if (mTextDisplayLists[i] != null) mTextDisplayLists[i].isDirty = true; + if (mTextRenderNodes != null) { + for (int i = 0; i < mTextRenderNodes.length; i++) { + if (mTextRenderNodes[i] != null) mTextRenderNodes[i].isDirty = true; } } } @@ -4491,7 +4490,7 @@ public class Editor { private class CorrectionHighlighter { private final Path mPath = new Path(); - private final Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + private final Paint mPaint = new Paint(); private int mStart, mEnd; private long mFadingStartTime; private RectF mTempRectF; diff --git a/core/java/android/widget/SimpleMonthView.java b/core/java/android/widget/SimpleMonthView.java index 2778f0f..acf1df9 100644 --- a/core/java/android/widget/SimpleMonthView.java +++ b/core/java/android/widget/SimpleMonthView.java @@ -31,6 +31,7 @@ import android.text.TextPaint; import android.text.format.DateFormat; import android.util.AttributeSet; import android.util.IntArray; +import android.util.MathUtils; import android.util.StateSet; import android.view.MotionEvent; import android.view.View; @@ -422,7 +423,8 @@ class SimpleMonthView extends View { int stateMask = 0; - if (day >= mEnabledDayStart && day <= mEnabledDayEnd) { + final boolean isDayEnabled = isDayEnabled(day); + if (isDayEnabled) { stateMask |= StateSet.VIEW_STATE_ENABLED; } @@ -435,8 +437,11 @@ class SimpleMonthView extends View { } else if (mTouchedItem == day) { stateMask |= StateSet.VIEW_STATE_PRESSED; - // Adjust the circle to be centered on the row. - canvas.drawCircle(colCenterRtl, rowCenter, mDaySelectorRadius, mDayHighlightPaint); + if (isDayEnabled) { + // Adjust the circle to be centered on the row. + canvas.drawCircle(colCenterRtl, rowCenter, + mDaySelectorRadius, mDayHighlightPaint); + } } final boolean isDayToday = mToday == day; @@ -460,6 +465,14 @@ class SimpleMonthView extends View { } } + private boolean isDayEnabled(int day) { + return day >= mEnabledDayStart && day <= mEnabledDayEnd; + } + + private boolean isValidDayOfMonth(int day) { + return day >= 1 && day <= mDaysInMonth; + } + private static boolean isValidDayOfWeek(int day) { return day >= Calendar.SUNDAY && day <= Calendar.SATURDAY; } @@ -536,13 +549,6 @@ class SimpleMonthView extends View { mWeekStart = mCalendar.getFirstDayOfWeek(); } - if (enabledDayStart > 0 && enabledDayEnd < 32) { - mEnabledDayStart = enabledDayStart; - } - if (enabledDayEnd > 0 && enabledDayEnd < 32 && enabledDayEnd >= enabledDayStart) { - mEnabledDayEnd = enabledDayEnd; - } - // Figure out what day today is. final Calendar today = Calendar.getInstance(); mToday = -1; @@ -554,6 +560,9 @@ class SimpleMonthView extends View { } } + mEnabledDayStart = MathUtils.constrain(enabledDayStart, 1, mDaysInMonth); + mEnabledDayEnd = MathUtils.constrain(enabledDayEnd, mEnabledDayStart, mDaysInMonth); + // Invalidate the old title. mTitle = null; @@ -694,7 +703,7 @@ class SimpleMonthView extends View { final int col = (paddedXRtl * DAYS_IN_WEEK) / mPaddedWidth; final int index = col + row * DAYS_IN_WEEK; final int day = index + 1 - findDayOffset(); - if (day < 1 || day > mDaysInMonth) { + if (!isValidDayOfMonth(day)) { return -1; } @@ -708,7 +717,7 @@ class SimpleMonthView extends View { * @param outBounds the rect to populate with bounds */ private boolean getBoundsForDay(int id, Rect outBounds) { - if (id < 1 || id > mDaysInMonth) { + if (!isValidDayOfMonth(id)) { return false; } @@ -742,7 +751,7 @@ class SimpleMonthView extends View { * @param day the day that was clicked */ private boolean onDayClicked(int day) { - if (day < 0 || day > mDaysInMonth) { + if (!isValidDayOfMonth(day) || !isDayEnabled(day)) { return false; } @@ -774,7 +783,7 @@ class SimpleMonthView extends View { @Override protected int getVirtualViewAt(float x, float y) { final int day = getDayAtLocation((int) (x + 0.5f), (int) (y + 0.5f)); - if (day >= 0) { + if (day != -1) { return day; } return ExploreByTouchHelper.INVALID_ID; @@ -808,7 +817,13 @@ class SimpleMonthView extends View { node.setText(getDayText(virtualViewId)); node.setContentDescription(getDayDescription(virtualViewId)); node.setBoundsInParent(mTempRect); - node.addAction(AccessibilityAction.ACTION_CLICK); + + final boolean isDayEnabled = isDayEnabled(virtualViewId); + if (isDayEnabled) { + node.addAction(AccessibilityAction.ACTION_CLICK); + } + + node.setEnabled(isDayEnabled); if (virtualViewId == mActivatedDay) { // TODO: This should use activated once that's supported. @@ -835,7 +850,7 @@ class SimpleMonthView extends View { * @return a description of the virtual view */ private CharSequence getDayDescription(int id) { - if (id >= 1 && id <= mDaysInMonth) { + if (isValidDayOfMonth(id)) { mTempCalendar.set(mYear, mMonth, id); return DateFormat.format(DATE_FORMAT, mTempCalendar.getTimeInMillis()); } @@ -850,7 +865,7 @@ class SimpleMonthView extends View { * @return the visible text of the virtual view */ private CharSequence getDayText(int id) { - if (id >= 1 && id <= mDaysInMonth) { + if (isValidDayOfMonth(id)) { return Integer.toString(id); } diff --git a/core/java/android/widget/Switch.java b/core/java/android/widget/Switch.java index ae779fe..f94f97c 100644 --- a/core/java/android/widget/Switch.java +++ b/core/java/android/widget/Switch.java @@ -216,7 +216,7 @@ public class Switch extends CompoundButton { public Switch(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); - mTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + mTextPaint = new TextPaint(); final Resources res = getResources(); mTextPaint.density = res.getDisplayMetrics().density; diff --git a/core/java/android/widget/TextView.java b/core/java/android/widget/TextView.java index 3e8df08..8ce5f08 100644 --- a/core/java/android/widget/TextView.java +++ b/core/java/android/widget/TextView.java @@ -668,11 +668,11 @@ public class TextView extends View implements ViewTreeObserver.OnPreDrawListener final Resources res = getResources(); final CompatibilityInfo compat = res.getCompatibilityInfo(); - mTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG); + mTextPaint = new TextPaint(); mTextPaint.density = res.getDisplayMetrics().density; mTextPaint.setCompatibilityScaling(compat.applicationScale); - mHighlightPaint = new Paint(Paint.ANTI_ALIAS_FLAG); + mHighlightPaint = new Paint(); mHighlightPaint.setCompatibilityScaling(compat.applicationScale); mMovement = getDefaultMovementMethod(); |
