diff options
author | Igor Murashkin <iam@google.com> | 2014-01-18 02:15:25 +0000 |
---|---|---|
committer | Android (Google) Code Review <android-gerrit@google.com> | 2014-01-18 02:15:25 +0000 |
commit | 1e2d27a97f2f9c2621db953e8342c5e9a938011a (patch) | |
tree | a2d92ccfd2e7123d4ae8062fc5062522242b72b2 /core | |
parent | 509cc13b705f8c488774e7097ab17471c3dacd2e (diff) | |
parent | c127f05292919ef1646b08b16dca1fe7c324afd4 (diff) | |
download | frameworks_base-1e2d27a97f2f9c2621db953e8342c5e9a938011a.zip frameworks_base-1e2d27a97f2f9c2621db953e8342c5e9a938011a.tar.gz frameworks_base-1e2d27a97f2f9c2621db953e8342c5e9a938011a.tar.bz2 |
Merge "camera2: Add test patterns and pipeline depth information"
Diffstat (limited to 'core')
4 files changed, 204 insertions, 0 deletions
diff --git a/core/java/android/hardware/camera2/CameraCharacteristics.java b/core/java/android/hardware/camera2/CameraCharacteristics.java index 7f6dd4a..0beeaff 100644 --- a/core/java/android/hardware/camera2/CameraCharacteristics.java +++ b/core/java/android/hardware/camera2/CameraCharacteristics.java @@ -396,6 +396,27 @@ public final class CameraCharacteristics extends CameraMetadata { new Key<int[]>("android.request.maxNumOutputStreams", int[].class); /** + * <p>Specifies the number of maximum pipeline stages a frame + * has to go through from when it's exposed to when it's available + * to the framework.</p> + * <p>A typical minimum value for this is 2 (one stage to expose, + * one stage to readout) from the sensor. The ISP then usually adds + * its own stages to do custom HW processing. Further stages may be + * added by SW processing.</p> + * <p>Depending on what settings are used (e.g. YUV, JPEG) and what + * processing is enabled (e.g. face detection), the actual pipeline + * depth (specified by {@link CaptureResult#REQUEST_PIPELINE_DEPTH android.request.pipelineDepth}) may be less than + * the max pipeline depth.</p> + * <p>A pipeline depth of X stages is equivalent to a pipeline latency of + * X frame intervals.</p> + * <p>This value will be 8 or less.</p> + * + * @see CaptureResult#REQUEST_PIPELINE_DEPTH + */ + public static final Key<Byte> REQUEST_PIPELINE_MAX_DEPTH = + new Key<Byte>("android.request.pipelineMaxDepth", byte.class); + + /** * <p>The list of image formats that are supported by this * camera device.</p> * <p>All camera devices will support JPEG and YUV_420_888 formats.</p> @@ -554,6 +575,14 @@ public final class CameraCharacteristics extends CameraMetadata { new Key<Integer>("android.sensor.orientation", int.class); /** + * <p>Optional. Defaults to [OFF]. Lists the supported test + * pattern modes for android.test.patternMode.</p> + * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> + */ + public static final Key<Byte> SENSOR_AVAILABLE_TEST_PATTERN_MODES = + new Key<Byte>("android.sensor.availableTestPatternModes", byte.class); + + /** * <p>Which face detection modes are available, * if any</p> * <p>OFF means face detection is disabled, it must diff --git a/core/java/android/hardware/camera2/CameraMetadata.java b/core/java/android/hardware/camera2/CameraMetadata.java index 92375fd..d2beba6 100644 --- a/core/java/android/hardware/camera2/CameraMetadata.java +++ b/core/java/android/hardware/camera2/CameraMetadata.java @@ -990,6 +990,111 @@ public abstract class CameraMetadata { public static final int NOISE_REDUCTION_MODE_HIGH_QUALITY = 2; // + // Enumeration values for CaptureRequest#SENSOR_TEST_PATTERN_MODE + // + + /** + * <p>Default. No test pattern mode is used, and the camera + * device returns captures from the image sensor.</p> + * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE + */ + public static final int SENSOR_TEST_PATTERN_MODE_OFF = 0; + + /** + * <p>Each pixel in <code>[R, G_even, G_odd, B]</code> is replaced by its + * respective color channel provided in + * {@link CaptureRequest#SENSOR_TEST_PATTERN_DATA android.sensor.testPatternData}.</p> + * <p>For example:</p> + * <pre><code>android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0] + * </code></pre> + * <p>All green pixels are 100% green. All red/blue pixels are black.</p> + * <pre><code>android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0] + * </code></pre> + * <p>All red pixels are 100% red. Only the odd green pixels + * are 100% green. All blue pixels are 100% black.</p> + * + * @see CaptureRequest#SENSOR_TEST_PATTERN_DATA + * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE + */ + public static final int SENSOR_TEST_PATTERN_MODE_SOLID_COLOR = 1; + + /** + * <p>All pixel data is replaced with an 8-bar color pattern.</p> + * <p>The vertical bars (left-to-right) are as follows:</p> + * <ul> + * <li>100% white</li> + * <li>yellow</li> + * <li>cyan</li> + * <li>green</li> + * <li>magenta</li> + * <li>red</li> + * <li>blue</li> + * <li>black</li> + * </ul> + * <p>In general the image would look like the following:</p> + * <pre><code>W Y C G M R B K + * W Y C G M R B K + * W Y C G M R B K + * W Y C G M R B K + * W Y C G M R B K + * . . . . . . . . + * . . . . . . . . + * . . . . . . . . + * + * (B = Blue, K = Black) + * </code></pre> + * <p>Each bar should take up 1/8 of the sensor pixel array width. + * When this is not possible, the bar size should be rounded + * down to the nearest integer and the pattern can repeat + * on the right side.</p> + * <p>Each bar's height must always take up the full sensor + * pixel array height.</p> + * <p>Each pixel in this test pattern must be set to either + * 0% intensity or 100% intensity.</p> + * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE + */ + public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS = 2; + + /** + * <p>The test pattern is similar to COLOR_BARS, except that + * each bar should start at its specified color at the top, + * and fade to gray at the bottom.</p> + * <p>Furthermore each bar is further subdivided into a left and + * right half. The left half should have a smooth gradient, + * and the right half should have a quantized gradient.</p> + * <p>In particular, the right half's should consist of blocks of the + * same color for 1/16th active sensor pixel array width.</p> + * <p>The least significant bits in the quantized gradient should + * be copied from the most significant bits of the smooth gradient.</p> + * <p>The height of each bar should always be a multiple of 128. + * When this is not the case, the pattern should repeat at the bottom + * of the image.</p> + * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE + */ + public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY = 3; + + /** + * <p>All pixel data is replaced by a pseudo-random sequence + * generated from a PN9 512-bit sequence (typically implemented + * in hardware with a linear feedback shift register).</p> + * <p>The generator should be reset at the beginning of each frame, + * and thus each subsequent raw frame with this test pattern should + * be exactly the same as the last.</p> + * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE + */ + public static final int SENSOR_TEST_PATTERN_MODE_PN9 = 4; + + /** + * <p>The first custom test pattern. All custom patterns that are + * available only on this camera device are at least this numeric + * value.</p> + * <p>All of the custom test patterns will be static + * (that is the raw image must not vary from frame to frame).</p> + * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE + */ + public static final int SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256; + + // // Enumeration values for CaptureRequest#SHADING_MODE // diff --git a/core/java/android/hardware/camera2/CaptureRequest.java b/core/java/android/hardware/camera2/CaptureRequest.java index f2f7806..26a6182 100644 --- a/core/java/android/hardware/camera2/CaptureRequest.java +++ b/core/java/android/hardware/camera2/CaptureRequest.java @@ -1159,6 +1159,42 @@ public final class CaptureRequest extends CameraMetadata implements Parcelable { new Key<Integer>("android.sensor.sensitivity", int.class); /** + * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern + * when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.</p> + * <p>Each color channel is treated as an unsigned 32-bit integer. + * The camera device then uses the most significant X bits + * that correspond to how many bits are in its Bayer raw sensor + * output.</p> + * <p>For example, a sensor with RAW10 Bayer output would use the + * 10 most significant bits from each color channel.</p> + * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> + * + * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE + */ + public static final Key<int[]> SENSOR_TEST_PATTERN_DATA = + new Key<int[]>("android.sensor.testPatternData", int[].class); + + /** + * <p>When enabled, the sensor sends a test pattern instead of + * doing a real exposure from the camera.</p> + * <p>When a test pattern is enabled, all manual sensor controls specified + * by android.sensor.* should be ignored. All other controls should + * work as normal.</p> + * <p>For example, if manual flash is enabled, flash firing should still + * occur (and that the test pattern remain unmodified, since the flash + * would not actually affect it).</p> + * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> + * @see #SENSOR_TEST_PATTERN_MODE_OFF + * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR + * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS + * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY + * @see #SENSOR_TEST_PATTERN_MODE_PN9 + * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1 + */ + public static final Key<Integer> SENSOR_TEST_PATTERN_MODE = + new Key<Integer>("android.sensor.testPatternMode", int.class); + + /** * <p>Quality of lens shading correction applied * to the image data.</p> * <p>When set to OFF mode, no lens shading correction will be applied by the diff --git a/core/java/android/hardware/camera2/CaptureResult.java b/core/java/android/hardware/camera2/CaptureResult.java index ca6c8cd..07191e5 100644 --- a/core/java/android/hardware/camera2/CaptureResult.java +++ b/core/java/android/hardware/camera2/CaptureResult.java @@ -1206,6 +1206,20 @@ public final class CaptureResult extends CameraMetadata { new Key<Integer>("android.request.id", int.class); /** + * <p>Specifies the number of pipeline stages the frame went + * through from when it was exposed to when the final completed result + * was available to the framework.</p> + * <p>Depending on what settings are used in the request, and + * what streams are configured, the data may undergo less processing, + * and some pipeline stages skipped.</p> + * <p>See {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} for more details.</p> + * + * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH + */ + public static final Key<Byte> REQUEST_PIPELINE_DEPTH = + new Key<Byte>("android.request.pipelineDepth", byte.class); + + /** * <p>(x, y, width, height).</p> * <p>A rectangle with the top-level corner of (x,y) and size * (width, height). The region of the sensor that is used for @@ -1378,6 +1392,26 @@ public final class CaptureResult extends CameraMetadata { new Key<Float>("android.sensor.temperature", float.class); /** + * <p>When enabled, the sensor sends a test pattern instead of + * doing a real exposure from the camera.</p> + * <p>When a test pattern is enabled, all manual sensor controls specified + * by android.sensor.* should be ignored. All other controls should + * work as normal.</p> + * <p>For example, if manual flash is enabled, flash firing should still + * occur (and that the test pattern remain unmodified, since the flash + * would not actually affect it).</p> + * <p><b>Optional</b> - This value may be {@code null} on some devices.</p> + * @see #SENSOR_TEST_PATTERN_MODE_OFF + * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR + * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS + * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY + * @see #SENSOR_TEST_PATTERN_MODE_PN9 + * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1 + */ + public static final Key<Integer> SENSOR_TEST_PATTERN_MODE = + new Key<Integer>("android.sensor.testPatternMode", int.class); + + /** * <p>Quality of lens shading correction applied * to the image data.</p> * <p>When set to OFF mode, no lens shading correction will be applied by the |