summaryrefslogtreecommitdiffstats
path: root/include/hardware/camera3.h
diff options
context:
space:
mode:
authorZhijun He <zhijunhe@google.com>2014-01-23 14:42:54 -0800
committerZhijun He <zhijunhe@google.com>2014-02-04 17:05:56 -0800
commitbc35868a6f6483a212d65bc909f452348f19731e (patch)
treec3e8d9d79d7c7cff3d6d1d4e4750553cd8aef3e0 /include/hardware/camera3.h
parent093e8e8b9b8d28880b0edacd10ad982fab353115 (diff)
downloadhardware_libhardware-bc35868a6f6483a212d65bc909f452348f19731e.zip
hardware_libhardware-bc35868a6f6483a212d65bc909f452348f19731e.tar.gz
hardware_libhardware-bc35868a6f6483a212d65bc909f452348f19731e.tar.bz2
camera3: update BIDIRECTIONAL and INPUT stream specs
Also add the manual template to the version history. Change-Id: I0a516ccbbd3600d69217502e19b267e2d3aa60e8
Diffstat (limited to 'include/hardware/camera3.h')
-rw-r--r--include/hardware/camera3.h112
1 files changed, 90 insertions, 22 deletions
diff --git a/include/hardware/camera3.h b/include/hardware/camera3.h
index 1cff2cb..dd102fd 100644
--- a/include/hardware/camera3.h
+++ b/include/hardware/camera3.h
@@ -51,6 +51,7 @@
* S5. Cropping
* S6. Error management
* S7. Key Performance Indicator (KPI) glossary
+ * S8. Sample Use Cases
*/
/**
@@ -107,6 +108,12 @@
* - add partial result support. process_capture_result may be called
* multiple times with a subset of the available result before the full
* result is available.
+ *
+ * - add manual template to camera3_request_template. The applications may
+ * use this template to control the capture settings directly.
+ *
+ * - Rework the bidirectional and input stream specifications.
+ *
*/
/**
@@ -1063,6 +1070,73 @@
*
*/
+/**
+ * S8. Sample Use Cases:
+ *
+ * This includes some typical use case examples the camera HAL may support.
+ *
+ * S8.1 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_INPUT stream.
+ *
+ * When Zero Shutter Lag (ZSL) is supported by the camera device, the INPUT stream
+ * can be used for application/framework implemented ZSL use case. This kind of stream
+ * will be used by the framework as follows:
+ *
+ * 1. Framework configures an opaque raw format output stream that is used to
+ * produce the ZSL output buffers. The stream pixel format will be
+ * HAL_PIXEL_FORMAT_RAW_OPAQUE.
+ *
+ * 2. Framework configures an opaque raw format input stream that is used to
+ * send the reprocess ZSL buffers to the HAL. The stream pixel format will
+ * also be HAL_PIXEL_FORMAT_RAW_OPAQUE.
+ *
+ * 3. Framework configures a YUV/JPEG output stream that is used to receive the
+ * reprocessed data. The stream pixel format will be YCbCr_420/HAL_PIXEL_FORMAT_BLOB.
+ *
+ * 4. Framework picks a ZSL buffer from the output stream when a ZSL capture is
+ * issued by the application, and sends the data back as an input buffer in a
+ * reprocessing request, then sends to the HAL for reprocessing.
+ *
+ * 5. The HAL sends back the output JPEG result to framework.
+ *
+ * The HAL can select the actual raw buffer format and configure the ISP pipeline
+ * appropriately based on the HAL_PIXEL_FORMAT_RAW_OPAQUE format. See this format
+ * definition for more details.
+ *
+ * S8.2 Zero Shutter Lag (ZSL) with CAMERA3_STREAM_BIDIRECTIONAL stream.
+ *
+ * For this use case, the bidirectional stream will be used by the framework as follows:
+ *
+ * 1. The framework includes a buffer from this stream as output buffer in a
+ * request as normal.
+ *
+ * 2. Once the HAL device returns a filled output buffer to the framework,
+ * the framework may do one of two things with the filled buffer:
+ *
+ * 2. a. The framework uses the filled data, and returns the now-used buffer
+ * to the stream queue for reuse. This behavior exactly matches the
+ * OUTPUT type of stream.
+ *
+ * 2. b. The framework wants to reprocess the filled data, and uses the
+ * buffer as an input buffer for a request. Once the HAL device has
+ * used the reprocessing buffer, it then returns it to the
+ * framework. The framework then returns the now-used buffer to the
+ * stream queue for reuse.
+ *
+ * 3. The HAL device will be given the buffer again as an output buffer for
+ * a request at some future point.
+ *
+ * For ZSL use case, the pixel format for bidirectional stream will be
+ * HAL_PIXEL_FORMAT_RAW_OPAQUE if it is listed in android.scaler.availableInputFormats.
+ * A configuration stream list that has BIDIRECTIONAL stream used as input, will usually
+ * also have a distinct OUTPUT stream to get the reprocessing data. For example, for the
+ * ZSL use case, the stream list might be configured with the following:
+ *
+ * - A HAL_PIXEL_FORMAT_RAW_OPAQUE bidirectional stream is used
+ * as input.
+ * - And a HAL_PIXEL_FORMAT_BLOB (JPEG) output stream.
+ *
+ */
+
__BEGIN_DECLS
struct camera3_device;
@@ -1097,6 +1171,20 @@ typedef enum camera3_stream_type {
* for reading buffers from this stream and sending them through the camera
* processing pipeline, as if the buffer was a newly captured image from the
* imager.
+ *
+ * The pixel format for input stream can be any format reported by
+ * android.scaler.availableInputFormats. The pixel format of the output stream
+ * that is used to produce the reprocessing data may be any format reported by
+ * android.scaler.availableFormats. The supported input/output stream combinations
+ * depends the camera device capabilities, see android.scaler.availableInputFormats
+ * for stream map details.
+ *
+ * This kind of stream is generally used to reprocess data into higher quality images
+ * (that otherwise would cause a frame rate performance loss), or to do off-line
+ * reprocessing.
+ *
+ * A typical use case is Zero Shutter Lag (ZSL), see S8.1 for more details.
+ *
*/
CAMERA3_STREAM_INPUT = 1,
@@ -1105,29 +1193,9 @@ typedef enum camera3_stream_type {
* used as an output stream, but occasionally one already-filled buffer may
* be sent back to the HAL device for reprocessing.
*
- * This kind of stream is meant generally for zero-shutter-lag features,
+ * This kind of stream is meant generally for Zero Shutter Lag (ZSL) features,
* where copying the captured image from the output buffer to the
- * reprocessing input buffer would be expensive. The stream will be used by
- * the framework as follows:
- *
- * 1. The framework includes a buffer from this stream as output buffer in a
- * request as normal.
- *
- * 2. Once the HAL device returns a filled output buffer to the framework,
- * the framework may do one of two things with the filled buffer:
- *
- * 2. a. The framework uses the filled data, and returns the now-used buffer
- * to the stream queue for reuse. This behavior exactly matches the
- * OUTPUT type of stream.
- *
- * 2. b. The framework wants to reprocess the filled data, and uses the
- * buffer as an input buffer for a request. Once the HAL device has
- * used the reprocessing buffer, it then returns it to the
- * framework. The framework then returns the now-used buffer to the
- * stream queue for reuse.
- *
- * 3. The HAL device will be given the buffer again as an output buffer for
- * a request at some future point.
+ * reprocessing input buffer would be expensive. See S8.2 for more details.
*
* Note that the HAL will always be reprocessing data it produced.
*