diff options
author | Igor Murashkin <iam@google.com> | 2014-05-27 18:32:18 -0700 |
---|---|---|
committer | Eino-Ville Talvala <etalvala@google.com> | 2014-05-28 01:36:53 -0700 |
commit | 78712a8bd7275542013ba099f33536eead0167fb (patch) | |
tree | 6aea769b59dbe05e6f8dc661081f2afebf53d455 | |
parent | 817f8933ee1c11130ad2ca45b5ac8ce8b729f125 (diff) | |
download | frameworks_base-78712a8bd7275542013ba099f33536eead0167fb.zip frameworks_base-78712a8bd7275542013ba099f33536eead0167fb.tar.gz frameworks_base-78712a8bd7275542013ba099f33536eead0167fb.tar.bz2 |
camera2: Add new metadata keys, change types for existing range keys
Also adds the following keys:
* android.control.maxRegionsAe
* android.control.maxRegionsAwb
* android.control.maxRegionsAf
* android.request.maxNumOutputRaw
* android.request.maxNumOutputProc
* android.request.maxNumOutputProcStalling
Changes the following keys' type
generations:
* android.control.aeTargetFpsRange
* (Range<Integer>)
* android.control.aeAvailableTargetFpsRanges
* (Range<Integer>[])
* android.control.aeCompensationRange
* (Range<Integer>)
* android.lens.focusRange
* (Range<Float>)
Bug: 14628001
Change-Id: I141847dffc4b0d89cea37c19a54d6d5faf24a9bb
6 files changed, 234 insertions, 23 deletions
diff --git a/api/current.txt b/api/current.txt index db18714..4fbcd8d 100644 --- a/api/current.txt +++ b/api/current.txt @@ -12149,7 +12149,9 @@ package android.hardware.camera2 { field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_SCENE_MODES; field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES; field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_AWB_AVAILABLE_MODES; - field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS; + field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AE; + field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AF; + field public static final android.hardware.camera2.CameraCharacteristics.Key CONTROL_MAX_REGIONS_AWB; field public static final android.hardware.camera2.CameraCharacteristics.Key EDGE_AVAILABLE_EDGE_MODES; field public static final android.hardware.camera2.CameraCharacteristics.Key FLASH_INFO_AVAILABLE; field public static final android.hardware.camera2.CameraCharacteristics.Key HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES; @@ -12167,7 +12169,9 @@ package android.hardware.camera2 { field public static final android.hardware.camera2.CameraCharacteristics.Key NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES; field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_AVAILABLE_CAPABILITIES; field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_INPUT_STREAMS; - field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_STREAMS; + field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_PROC; + field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_PROC_STALLING; + field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_MAX_NUM_OUTPUT_RAW; field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PARTIAL_RESULT_COUNT; field public static final android.hardware.camera2.CameraCharacteristics.Key REQUEST_PIPELINE_MAX_DEPTH; field public static final android.hardware.camera2.CameraCharacteristics.Key SCALER_AVAILABLE_MAX_DIGITAL_ZOOM; diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java index c08424a..f1d419c 100644 --- a/core/java/android/hardware/camera2/CameraCharacteristics.java +++ b/core/java/android/hardware/camera2/CameraCharacteristics.java @@ -322,8 +322,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <p>List of frame rate ranges supported by the * AE algorithm/hardware</p> */ - public static final Key<int[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES = - new Key<int[]>("android.control.aeAvailableTargetFpsRanges", int[].class); + public static final Key<android.util.Range<Integer>[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES = + new Key<android.util.Range<Integer>[]>("android.control.aeAvailableTargetFpsRanges", new TypeReference<android.util.Range<Integer>[]>() {{ }}); /** * <p>Maximum and minimum exposure compensation @@ -332,8 +332,8 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP */ - public static final Key<int[]> CONTROL_AE_COMPENSATION_RANGE = - new Key<int[]>("android.control.aeCompensationRange", int[].class); + public static final Key<android.util.Range<Integer>> CONTROL_AE_COMPENSATION_RANGE = + new Key<android.util.Range<Integer>>("android.control.aeCompensationRange", new TypeReference<android.util.Range<Integer>>() {{ }}); /** * <p>Smallest step by which exposure compensation @@ -427,11 +427,45 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * @see CaptureRequest#CONTROL_AE_REGIONS * @see CaptureRequest#CONTROL_AF_REGIONS * @see CaptureRequest#CONTROL_AWB_REGIONS + * @hide */ public static final Key<int[]> CONTROL_MAX_REGIONS = new Key<int[]>("android.control.maxRegions", int[].class); /** + * <p>List of the maximum number of regions that can be used for metering in + * auto-exposure (AE); + * this corresponds to the the maximum number of elements in + * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}.</p> + * + * @see CaptureRequest#CONTROL_AE_REGIONS + */ + public static final Key<Integer> CONTROL_MAX_REGIONS_AE = + new Key<Integer>("android.control.maxRegionsAe", int.class); + + /** + * <p>List of the maximum number of regions that can be used for metering in + * auto-white balance (AWB); + * this corresponds to the the maximum number of elements in + * {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}.</p> + * + * @see CaptureRequest#CONTROL_AWB_REGIONS + */ + public static final Key<Integer> CONTROL_MAX_REGIONS_AWB = + new Key<Integer>("android.control.maxRegionsAwb", int.class); + + /** + * <p>List of the maximum number of regions that can be used for metering in + * auto-focus (AF); + * this corresponds to the the maximum number of elements in + * {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p> + * + * @see CaptureRequest#CONTROL_AF_REGIONS + */ + public static final Key<Integer> CONTROL_MAX_REGIONS_AF = + new Key<Integer>("android.control.maxRegionsAf", int.class); + + /** * <p>The set of edge enhancement modes supported by this camera device.</p> * <p>This tag lists the valid modes for {@link CaptureRequest#EDGE_MODE android.edge.mode}.</p> * <p>Full-capability camera devices must always support OFF and FAST.</p> @@ -621,7 +655,7 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p> * <p>This lists the upper bound of the number of output streams supported by * the camera device. Using more streams simultaneously may require more hardware and - * CPU resources that will consume more power. The image format for a output stream can + * CPU resources that will consume more power. The image format for an output stream can * be any supported format provided by android.scaler.availableStreamConfigurations. * The formats defined in android.scaler.availableStreamConfigurations can be catergorized * into the 3 stream types as below:</p> @@ -632,11 +666,79 @@ public final class CameraCharacteristics extends CameraMetadata<CameraCharacteri * <li>Processed (but not-stalling): any non-RAW format without a stall duration. * Typically ImageFormat#YUV_420_888, ImageFormat#NV21, ImageFormat#YV12.</li> * </ul> + * @hide */ public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS = new Key<int[]>("android.request.maxNumOutputStreams", int[].class); /** + * <p>The maximum numbers of different types of output streams + * that can be configured and used simultaneously by a camera device + * for any <code>RAW</code> formats.</p> + * <p>This value contains the max number of output simultaneous + * streams from the raw sensor.</p> + * <p>This lists the upper bound of the number of output streams supported by + * the camera device. Using more streams simultaneously may require more hardware and + * CPU resources that will consume more power. The image format for this kind of an output stream can + * be any <code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p> + * <p>In particular, a <code>RAW</code> format is typically one of:</p> + * <ul> + * <li>ImageFormat#RAW_SENSOR</li> + * <li>Opaque <code>RAW</code></li> + * </ul> + * + * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP + */ + public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_RAW = + new Key<Integer>("android.request.maxNumOutputRaw", int.class); + + /** + * <p>The maximum numbers of different types of output streams + * that can be configured and used simultaneously by a camera device + * for any processed (but not-stalling) formats.</p> + * <p>This value contains the max number of output simultaneous + * streams for any processed (but not-stalling) formats.</p> + * <p>This lists the upper bound of the number of output streams supported by + * the camera device. Using more streams simultaneously may require more hardware and + * CPU resources that will consume more power. The image format for this kind of an output stream can + * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p> + * <p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration. + * Typically:</p> + * <ul> + * <li>ImageFormat#YUV_420_888</li> + * <li>ImageFormat#NV21</li> + * <li>ImageFormat#YV12</li> + * <li>Implementation-defined formats, i.e. StreamConfiguration#isOutputSupportedFor(Class)</li> + * </ul> + * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with + * a processed format -- it will return 0 for a non-stalling stream.</p> + * + * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP + */ + public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC = + new Key<Integer>("android.request.maxNumOutputProc", int.class); + + /** + * <p>The maximum numbers of different types of output streams + * that can be configured and used simultaneously by a camera device + * for any processed (and stalling) formats.</p> + * <p>This value contains the max number of output simultaneous + * streams for any processed (but not-stalling) formats.</p> + * <p>This lists the upper bound of the number of output streams supported by + * the camera device. Using more streams simultaneously may require more hardware and + * CPU resources that will consume more power. The image format for this kind of an output stream can + * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p> + * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations > 0. + * Typically only the <code>JPEG</code> format (ImageFormat#JPEG)</p> + * <p>For full guarantees, query StreamConfigurationMap#getOutputStallDuration with + * a processed format -- it will return a non-0 value for a stalling stream.</p> + * + * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP + */ + public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC_STALLING = + new Key<Integer>("android.request.maxNumOutputProcStalling", int.class); + + /** * <p>The maximum numbers of any type of input streams * that can be configured and used simultaneously by a camera device.</p> * <p>When set to 0, it means no input stream is supported.</p> diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java index a133312..01d731d 100644 --- a/core/java/android/hardware/camera2/CaptureRequest.java +++ b/core/java/android/hardware/camera2/CaptureRequest.java @@ -719,8 +719,8 @@ public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>> * * @see CaptureRequest#SENSOR_EXPOSURE_TIME */ - public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE = - new Key<int[]>("android.control.aeTargetFpsRange", int[].class); + public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE = + new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }}); /** * <p>Whether the camera device will trigger a precapture diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java index 694aae3..3c324b4 100644 --- a/core/java/android/hardware/camera2/CaptureResult.java +++ b/core/java/android/hardware/camera2/CaptureResult.java @@ -565,8 +565,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * * @see CaptureRequest#SENSOR_EXPOSURE_TIME */ - public static final Key<int[]> CONTROL_AE_TARGET_FPS_RANGE = - new Key<int[]>("android.control.aeTargetFpsRange", int[].class); + public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE = + new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }}); /** * <p>Whether the camera device will trigger a precapture @@ -1783,8 +1783,8 @@ public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> { * <p>If variable focus not supported, can still report * fixed depth of field range</p> */ - public static final Key<float[]> LENS_FOCUS_RANGE = - new Key<float[]>("android.lens.focusRange", float[].class); + public static final Key<android.util.Range<Float>> LENS_FOCUS_RANGE = + new Key<android.util.Range<Float>>("android.lens.focusRange", new TypeReference<android.util.Range<Float>>() {{ }}); /** * <p>Sets whether the camera device uses optical image stabilization (OIS) diff --git a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java index ab2c49a..45e7944 100644 --- a/core/java/android/hardware/camera2/impl/CameraMetadataNative.java +++ b/core/java/android/hardware/camera2/impl/CameraMetadataNative.java @@ -50,6 +50,7 @@ import android.hardware.camera2.utils.TypeReference; import android.os.Parcelable; import android.os.Parcel; import android.util.Log; +import android.util.Pair; import com.android.internal.util.Preconditions; @@ -297,9 +298,9 @@ public class CameraMetadataNative implements Parcelable { public <T> T get(Key<T> key) { Preconditions.checkNotNull(key, "key must not be null"); - T value = getOverride(key); - if (value != null) { - return value; + Pair<T, Boolean> override = getOverride(key); + if (override.second) { + return override.first; } return getBase(key); @@ -413,19 +414,35 @@ public class CameraMetadataNative implements Parcelable { // Need overwrite some metadata that has different definitions between native // and managed sides. @SuppressWarnings("unchecked") - private <T> T getOverride(Key<T> key) { + private <T> Pair<T, Boolean> getOverride(Key<T> key) { + T value = null; + boolean override = true; + if (key.equals(CameraCharacteristics.SCALER_AVAILABLE_FORMATS)) { - return (T) getAvailableFormats(); + value = (T) getAvailableFormats(); } else if (key.equals(CaptureResult.STATISTICS_FACES)) { - return (T) getFaces(); + value = (T) getFaces(); } else if (key.equals(CaptureResult.STATISTICS_FACE_RECTANGLES)) { - return (T) getFaceRectangles(); + value = (T) getFaceRectangles(); } else if (key.equals(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)) { - return (T) getStreamConfigurationMap(); + value = (T) getStreamConfigurationMap(); + } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { + value = (T) getMaxRegions(key); + } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { + value = (T) getMaxRegions(key); + } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { + value = (T) getMaxRegions(key); + } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { + value = (T) getMaxNumOutputs(key); + } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { + value = (T) getMaxNumOutputs(key); + } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { + value = (T) getMaxNumOutputs(key); + } else { + override = false; } - // For other keys, get() falls back to getBase() - return null; + return Pair.create(value, override); } private int[] getAvailableFormats() { @@ -552,6 +569,52 @@ public class CameraMetadataNative implements Parcelable { return new StreamConfigurationMap(configurations, minFrameDurations, stallDurations); } + private <T> Integer getMaxRegions(Key<T> key) { + final int AE = 0; + final int AWB = 1; + final int AF = 2; + + // The order of the elements is: (AE, AWB, AF) + int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS); + + if (maxRegions == null) { + return null; + } + + if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) { + return maxRegions[AE]; + } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) { + return maxRegions[AWB]; + } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) { + return maxRegions[AF]; + } else { + throw new AssertionError("Invalid key " + key); + } + } + + private <T> Integer getMaxNumOutputs(Key<T> key) { + final int RAW = 0; + final int PROC = 1; + final int PROC_STALLING = 2; + + // The order of the elements is: (raw, proc+nonstalling, proc+stalling) + int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS); + + if (maxNumOutputs == null) { + return null; + } + + if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) { + return maxNumOutputs[RAW]; + } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) { + return maxNumOutputs[PROC]; + } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) { + return maxNumOutputs[PROC_STALLING]; + } else { + throw new AssertionError("Invalid key " + key); + } + } + private <T> void setBase(CameraCharacteristics.Key<T> key, T value) { setBase(key.getNativeKey(), value); } diff --git a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java index a3caba2..cc64f00 100644 --- a/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java +++ b/media/tests/MediaFrameworkTest/src/com/android/mediaframeworktest/unit/CameraMetadataTest.java @@ -1166,6 +1166,48 @@ public class CameraMetadataTest extends junit.framework.TestCase { } } + private <T> void assertKeyValueEquals(T expected, CameraCharacteristics.Key<T> key) { + assertKeyValueEquals(expected, key.getNativeKey()); + } + + private <T> void assertKeyValueEquals(T expected, Key<T> key) { + T actual = mMetadata.get(key); + + assertEquals("Expected value for key " + key + " to match", expected, actual); + } + + @SmallTest + public void testOverrideMaxRegions() { + // All keys are null before doing any writes. + assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AE); + assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB); + assertKeyValueEquals(null, CameraCharacteristics.CONTROL_MAX_REGIONS_AF); + + mMetadata.set(CameraCharacteristics.CONTROL_MAX_REGIONS, + new int[] { /*AE*/1, /*AWB*/2, /*AF*/3 }); + + // All keys are the expected value after doing a write + assertKeyValueEquals(1, CameraCharacteristics.CONTROL_MAX_REGIONS_AE); + assertKeyValueEquals(2, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB); + assertKeyValueEquals(3, CameraCharacteristics.CONTROL_MAX_REGIONS_AF); + } + + @SmallTest + public void testOverrideMaxNumOutputStreams() { + // All keys are null before doing any writes. + assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW); + assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC); + assertKeyValueEquals(null, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING); + + mMetadata.set(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS, + new int[] { /*AE*/1, /*AWB*/2, /*AF*/3 }); + + // All keys are the expected value after doing a write + assertKeyValueEquals(1, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW); + assertKeyValueEquals(2, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC); + assertKeyValueEquals(3, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING); + } + @SmallTest public void testCaptureResult() { mMetadata.set(CaptureRequest.CONTROL_AE_MODE, |