summaryrefslogtreecommitdiffstats
path: root/include/hardware/camera2.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/hardware/camera2.h')
-rw-r--r--include/hardware/camera2.h763
1 files changed, 630 insertions, 133 deletions
diff --git a/include/hardware/camera2.h b/include/hardware/camera2.h
index 36f2a9e..5d45325 100644
--- a/include/hardware/camera2.h
+++ b/include/hardware/camera2.h
@@ -18,6 +18,7 @@
#define ANDROID_INCLUDE_CAMERA2_H
#include "camera_common.h"
+#include "system/camera_metadata.h"
/**
* Camera device HAL 2.0 [ CAMERA_DEVICE_API_VERSION_2_0 ]
@@ -35,7 +36,7 @@
* version 2.0 of the camera module interface (as defined by
* camera_module_t.common.module_api_version).
*
- * See camera_common.h for more details.
+ * See camera_common.h for more versioning details.
*
*/
@@ -43,105 +44,250 @@ __BEGIN_DECLS
struct camera2_device;
-/**
- * Output image stream queue management
+/**********************************************************************
+ *
+ * Input/output stream buffer queue interface definitions
+ *
*/
+/**
+ * Output image stream queue interface. A set of these methods is provided to
+ * the HAL device in allocate_stream(), and are used to interact with the
+ * gralloc buffer queue for that stream. They may not be called until after
+ * allocate_stream returns.
+ */
typedef struct camera2_stream_ops {
- int (*dequeue_buffer)(struct camera2_stream_ops* w,
- buffer_handle_t** buffer, int *stride);
- int (*enqueue_buffer)(struct camera2_stream_ops* w,
- buffer_handle_t* buffer);
- int (*cancel_buffer)(struct camera2_stream_ops* w,
- buffer_handle_t* buffer);
- int (*set_buffer_count)(struct camera2_stream_ops* w, int count);
- int (*set_buffers_geometry)(struct camera2_stream_ops* pw,
- int w, int h, int format);
- int (*set_crop)(struct camera2_stream_ops *w,
- int left, int top, int right, int bottom);
- // Timestamps are measured in nanoseconds, and must be comparable
- // and monotonically increasing between two frames in the same
- // preview stream. They do not need to be comparable between
- // consecutive or parallel preview streams, cameras, or app runs.
- // The timestamp must be the time at the start of image exposure.
- int (*set_timestamp)(struct camera2_stream_ops *w, int64_t timestamp);
- int (*set_usage)(struct camera2_stream_ops* w, int usage);
- int (*get_min_undequeued_buffer_count)(const struct camera2_stream_ops *w,
- int *count);
- int (*lock_buffer)(struct camera2_stream_ops* w,
- buffer_handle_t* buffer);
+ /**
+ * Get a buffer to fill from the queue. The size and format of the buffer
+ * are fixed for a given stream (defined in allocate_stream), and the stride
+ * should be queried from the platform gralloc module. The gralloc buffer
+ * will have been allocated based on the usage flags provided by
+ * allocate_stream, and will be locked for use.
+ */
+ int (*dequeue_buffer)(const struct camera2_stream_ops* w,
+ buffer_handle_t** buffer);
+
+ /**
+ * Push a filled buffer to the stream to be used by the consumer.
+ *
+ * The timestamp represents the time at start of exposure of the first row
+ * of the image; it must be from a monotonic clock, and is measured in
+ * nanoseconds. The timestamps do not need to be comparable between
+ * different cameras, or consecutive instances of the same camera. However,
+ * they must be comparable between streams from the same camera. If one
+ * capture produces buffers for multiple streams, each stream must have the
+ * same timestamp for that buffer, and that timestamp must match the
+ * timestamp in the output frame metadata.
+ */
+ int (*enqueue_buffer)(const struct camera2_stream_ops* w,
+ int64_t timestamp,
+ buffer_handle_t* buffer);
+ /**
+ * Return a buffer to the queue without marking it as filled.
+ */
+ int (*cancel_buffer)(const struct camera2_stream_ops* w,
+ buffer_handle_t* buffer);
+ /**
+ * Set the crop window for subsequently enqueued buffers. The parameters are
+ * measured in pixels relative to the buffer width and height.
+ */
+ int (*set_crop)(const struct camera2_stream_ops *w,
+ int left, int top, int right, int bottom);
+
} camera2_stream_ops_t;
/**
+ * Temporary definition during transition.
+ *
+ * These formats will be removed and replaced with
+ * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED. To maximize forward compatibility,
+ * HAL implementations are strongly recommended to treat FORMAT_OPAQUE and
+ * FORMAT_ZSL as equivalent to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, and
+ * return HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED in the format_actual output
+ * parameter of allocate_stream, allowing the gralloc module to select the
+ * specific format based on the usage flags from the camera and the stream
+ * consumer.
+ */
+enum {
+ CAMERA2_HAL_PIXEL_FORMAT_OPAQUE = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ CAMERA2_HAL_PIXEL_FORMAT_ZSL = -1
+};
+
+/**
+ * Transport header for compressed JPEG buffers in output streams.
+ *
+ * To capture JPEG images, a stream is created using the pixel format
+ * HAL_PIXEL_FORMAT_BLOB, and the static metadata field android.jpeg.maxSize is
+ * used as the buffer size. Since compressed JPEG images are of variable size,
+ * the HAL needs to include the final size of the compressed image using this
+ * structure inside the output stream buffer. The JPEG blob ID field must be set
+ * to CAMERA2_JPEG_BLOB_ID.
+ *
+ * Transport header should be at the end of the JPEG output stream buffer. That
+ * means the jpeg_blob_id must start at byte[android.jpeg.maxSize -
+ * sizeof(camera2_jpeg_blob)]. Any HAL using this transport header must
+ * account for it in android.jpeg.maxSize. The JPEG data itself starts at
+ * byte[0] and should be jpeg_size bytes long.
+ */
+typedef struct camera2_jpeg_blob {
+ uint16_t jpeg_blob_id;
+ uint32_t jpeg_size;
+};
+
+enum {
+ CAMERA2_JPEG_BLOB_ID = 0x00FF
+};
+
+/**
+ * Input reprocess stream queue management. A set of these methods is provided
+ * to the HAL device in allocate_reprocess_stream(); they are used to interact
+ * with the reprocess stream's input gralloc buffer queue.
+ */
+typedef struct camera2_stream_in_ops {
+ /**
+ * Get the next buffer of image data to reprocess. The width, height, and
+ * format of the buffer is fixed in allocate_reprocess_stream(), and the
+ * stride and other details should be queried from the platform gralloc
+ * module as needed. The buffer will already be locked for use.
+ */
+ int (*acquire_buffer)(const struct camera2_stream_in_ops *w,
+ buffer_handle_t** buffer);
+ /**
+ * Return a used buffer to the buffer queue for reuse.
+ */
+ int (*release_buffer)(const struct camera2_stream_in_ops *w,
+ buffer_handle_t* buffer);
+
+} camera2_stream_in_ops_t;
+
+/**********************************************************************
+ *
* Metadata queue management, used for requests sent to HAL module, and for
* frames produced by the HAL.
*
- * Queue protocol:
+ */
+
+enum {
+ CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS = -1
+};
+
+/**
+ * Request input queue protocol:
*
- * The source holds the queue and its contents. At start, the queue is empty.
+ * The framework holds the queue and its contents. At start, the queue is empty.
*
- * 1. When the first metadata buffer is placed into the queue, the source must
- * signal the destination by calling notify_queue_not_empty().
+ * 1. When the first metadata buffer is placed into the queue, the framework
+ * signals the device by calling notify_request_queue_not_empty().
*
- * 2. After receiving notify_queue_not_empty, the destination must call
+ * 2. After receiving notify_request_queue_not_empty, the device must call
* dequeue() once it's ready to handle the next buffer.
*
- * 3. Once the destination has processed a buffer, it should try to dequeue
- * another buffer. If there are no more buffers available, dequeue() will
- * return NULL. In this case, when a buffer becomes available, the source
- * must call notify_queue_not_empty() again. If the destination receives a
- * NULL return from dequeue, it does not need to query the queue again until
- * a notify_queue_not_empty() call is received from the source.
- *
- * 4. If the destination calls buffer_count() and receives 0, this does not mean
- * that the source will provide a notify_queue_not_empty() call. The source
- * must only provide such a call after the destination has received a NULL
- * from dequeue, or on initial startup.
- *
- * 5. The dequeue() call in response to notify_queue_not_empty() may be on the
- * same thread as the notify_queue_not_empty() call. The source must not
- * deadlock in that case.
+ * 3. Once the device has processed a buffer, and is ready for the next buffer,
+ * it must call dequeue() again instead of waiting for a notification. If
+ * there are no more buffers available, dequeue() will return NULL. After
+ * this point, when a buffer becomes available, the framework must call
+ * notify_request_queue_not_empty() again. If the device receives a NULL
+ * return from dequeue, it does not need to query the queue again until a
+ * notify_request_queue_not_empty() call is received from the source.
+ *
+ * 4. If the device calls buffer_count() and receives 0, this does not mean that
+ * the framework will provide a notify_request_queue_not_empty() call. The
+ * framework will only provide such a notification after the device has
+ * received a NULL from dequeue, or on initial startup.
+ *
+ * 5. The dequeue() call in response to notify_request_queue_not_empty() may be
+ * on the same thread as the notify_request_queue_not_empty() call, and may
+ * be performed from within the notify call.
+ *
+ * 6. All dequeued request buffers must be returned to the framework by calling
+ * free_request, including when errors occur, a device flush is requested, or
+ * when the device is shutting down.
*/
-
-typedef struct camera2_metadata_queue_src_ops {
+typedef struct camera2_request_queue_src_ops {
/**
- * Get count of buffers in queue
+ * Get the count of request buffers pending in the queue. May return
+ * CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS if a repeating request (stream
+ * request) is currently configured. Calling this method has no effect on
+ * whether the notify_request_queue_not_empty() method will be called by the
+ * framework.
*/
- int (*buffer_count)(camera2_metadata_queue_src_ops *q);
+ int (*request_count)(const struct camera2_request_queue_src_ops *q);
/**
- * Get a metadata buffer from the source. Returns OK if a request is
- * available, placing a pointer to it in next_request.
+ * Get a metadata buffer from the framework. Returns OK if there is no
+ * error. If the queue is empty, returns NULL in buffer. In that case, the
+ * device must wait for a notify_request_queue_not_empty() message before
+ * attempting to dequeue again. Buffers obtained in this way must be
+ * returned to the framework with free_request().
*/
- int (*dequeue)(camera2_metadata_queue_src_ops *q,
+ int (*dequeue_request)(const struct camera2_request_queue_src_ops *q,
camera_metadata_t **buffer);
/**
- * Return a metadata buffer to the source once it has been used
+ * Return a metadata buffer to the framework once it has been used, or if
+ * an error or shutdown occurs.
*/
- int (*free)(camera2_metadata_queue_src_ops *q,
+ int (*free_request)(const struct camera2_request_queue_src_ops *q,
camera_metadata_t *old_buffer);
-} camera2_metadata_queue_src_ops_t;
+} camera2_request_queue_src_ops_t;
+
+/**
+ * Frame output queue protocol:
+ *
+ * The framework holds the queue and its contents. At start, the queue is empty.
+ *
+ * 1. When the device is ready to fill an output metadata frame, it must dequeue
+ * a metadata buffer of the required size.
+ *
+ * 2. It should then fill the metadata buffer, and place it on the frame queue
+ * using enqueue_frame. The framework takes ownership of the frame.
+ *
+ * 3. In case of an error, a request to flush the pipeline, or shutdown, the
+ * device must return any affected dequeued frames to the framework by
+ * calling cancel_frame.
+ */
+typedef struct camera2_frame_queue_dst_ops {
+ /**
+ * Get an empty metadata buffer to fill from the framework. The new metadata
+ * buffer will have room for entries number of metadata entries, plus
+ * data_bytes worth of extra storage. Frames dequeued here must be returned
+ * to the framework with either cancel_frame or enqueue_frame.
+ */
+ int (*dequeue_frame)(const struct camera2_frame_queue_dst_ops *q,
+ size_t entries, size_t data_bytes,
+ camera_metadata_t **buffer);
+
+ /**
+ * Return a dequeued metadata buffer to the framework for reuse; do not mark it as
+ * filled. Use when encountering errors, or flushing the internal request queue.
+ */
+ int (*cancel_frame)(const struct camera2_frame_queue_dst_ops *q,
+ camera_metadata_t *buffer);
-typedef struct camera2_metadata_queue_dst_ops {
/**
- * Notify destination that the queue is no longer empty
+ * Place a completed metadata frame on the frame output queue.
*/
- int (*notify_queue_not_empty)(struct camera2_metadata_queue_dst_ops *);
+ int (*enqueue_frame)(const struct camera2_frame_queue_dst_ops *q,
+ camera_metadata_t *buffer);
-} camera2_metadata_queue_dst_ops_t;
+} camera2_frame_queue_dst_ops_t;
-/* Defined in camera_metadata.h */
-typedef struct vendor_tag_query_ops vendor_tag_query_ops_t;
+/**********************************************************************
+ *
+ * Notification callback and message definition, and trigger definitions
+ *
+ */
/**
* Asynchronous notification callback from the HAL, fired for various
* reasons. Only for information independent of frame capture, or that require
- * specific timing.
+ * specific timing. The user pointer must be the same one that was passed to the
+ * device in set_notify_callback().
*/
typedef void (*camera2_notify_callback)(int32_t msg_type,
int32_t ext1,
int32_t ext2,
+ int32_t ext3,
void *user);
/**
@@ -149,15 +295,42 @@ typedef void (*camera2_notify_callback)(int32_t msg_type,
*/
enum {
/**
- * A serious error has occurred. Argument ext1 contains the error code, and
- * ext2 and user contain any error-specific information.
+ * An error has occurred. Argument ext1 contains the error code, and
+ * ext2 and ext3 contain any error-specific information.
*/
CAMERA2_MSG_ERROR = 0x0001,
/**
* The exposure of a given request has begun. Argument ext1 contains the
- * request id.
+ * frame number, and ext2 and ext3 contain the low-order and high-order
+ * bytes of the timestamp for when exposure began.
+ * (timestamp = (ext3 << 32 | ext2))
+ */
+ CAMERA2_MSG_SHUTTER = 0x0010,
+ /**
+ * The autofocus routine has changed state. Argument ext1 contains the new
+ * state; the values are the same as those for the metadata field
+ * android.control.afState. Ext2 contains the latest trigger ID passed to
+ * trigger_action(CAMERA2_TRIGGER_AUTOFOCUS) or
+ * trigger_action(CAMERA2_TRIGGER_CANCEL_AUTOFOCUS), or 0 if trigger has not
+ * been called with either of those actions.
+ */
+ CAMERA2_MSG_AUTOFOCUS = 0x0020,
+ /**
+ * The autoexposure routine has changed state. Argument ext1 contains the
+ * new state; the values are the same as those for the metadata field
+ * android.control.aeState. Ext2 contains the latest trigger ID value passed to
+ * trigger_action(CAMERA2_TRIGGER_PRECAPTURE_METERING), or 0 if that method
+ * has not been called.
*/
- CAMERA2_MSG_SHUTTER = 0x0002
+ CAMERA2_MSG_AUTOEXPOSURE = 0x0021,
+ /**
+ * The auto-whitebalance routine has changed state. Argument ext1 contains
+ * the new state; the values are the same as those for the metadata field
+ * android.control.awbState. Ext2 contains the latest trigger ID passed to
+ * trigger_action(CAMERA2_TRIGGER_PRECAPTURE_METERING), or 0 if that method
+ * has not been called.
+ */
+ CAMERA2_MSG_AUTOWB = 0x0022
};
/**
@@ -169,133 +342,457 @@ enum {
* no further frames or buffer streams will be produced by the
* device. Device should be treated as closed.
*/
- CAMERA2_MSG_ERROR_HARDWARE_FAULT = 0x0001,
+ CAMERA2_MSG_ERROR_HARDWARE = 0x0001,
/**
* A serious failure occured. No further frames or buffer streams will be
* produced by the device. Device should be treated as closed. The client
* must reopen the device to use it again.
*/
- CAMERA2_MSG_ERROR_DEVICE_FAULT = 0x0002,
+ CAMERA2_MSG_ERROR_DEVICE,
/**
- * The camera service has failed. Device should be treated as released. The client
- * must reopen the device to use it again.
+ * An error has occurred in processing a request. No output (metadata or
+ * buffers) will be produced for this request. ext2 contains the frame
+ * number of the request. Subsequent requests are unaffected, and the device
+ * remains operational.
+ */
+ CAMERA2_MSG_ERROR_REQUEST,
+ /**
+ * An error has occurred in producing an output frame metadata buffer for a
+ * request, but image buffers for it will still be available. Subsequent
+ * requests are unaffected, and the device remains operational. ext2
+ * contains the frame number of the request.
+ */
+ CAMERA2_MSG_ERROR_FRAME,
+ /**
+ * An error has occurred in placing an output buffer into a stream for a
+ * request. The frame metadata and other buffers may still be
+ * available. Subsequent requests are unaffected, and the device remains
+ * operational. ext2 contains the frame number of the request, and ext3
+ * contains the stream id.
+ */
+ CAMERA2_MSG_ERROR_STREAM,
+ /**
+ * Number of error types
*/
- CAMERA2_MSG_ERROR_SERVER_FAULT = 0x0003
+ CAMERA2_MSG_NUM_ERRORS
};
-typedef struct camera2_device_ops {
+/**
+ * Possible trigger ids for trigger_action()
+ */
+enum {
/**
- * Input request queue methods
+ * Trigger an autofocus cycle. The effect of the trigger depends on the
+ * autofocus mode in effect when the trigger is received, which is the mode
+ * listed in the latest capture request to be dequeued by the HAL. If the
+ * mode is OFF, EDOF, or FIXED, the trigger has no effect. In AUTO, MACRO,
+ * or CONTINUOUS_* modes, see below for the expected behavior. The state of
+ * the autofocus cycle can be tracked in android.control.afMode and the
+ * corresponding notifications.
+ *
+ **
+ * In AUTO or MACRO mode, the AF state transitions (and notifications)
+ * when calling with trigger ID = N with the previous ID being K are:
+ *
+ * Initial state Transitions
+ * INACTIVE (K) -> ACTIVE_SCAN (N) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * AF_FOCUSED (K) -> ACTIVE_SCAN (N) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * AF_NOT_FOCUSED (K) -> ACTIVE_SCAN (N) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * ACTIVE_SCAN (K) -> AF_FOCUSED(N) or AF_NOT_FOCUSED(N)
+ * PASSIVE_SCAN (K) Not used in AUTO/MACRO mode
+ * PASSIVE_FOCUSED (K) Not used in AUTO/MACRO mode
+ *
+ **
+ * In CONTINUOUS_PICTURE mode, triggering AF must lock the AF to the current
+ * lens position and transition the AF state to either AF_FOCUSED or
+ * NOT_FOCUSED. If a passive scan is underway, that scan must complete and
+ * then lock the lens position and change AF state. TRIGGER_CANCEL_AUTOFOCUS
+ * will allow the AF to restart its operation.
+ *
+ * Initial state Transitions
+ * INACTIVE (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * PASSIVE_FOCUSED (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * PASSIVE_SCAN (K) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * AF_FOCUSED (K) no effect except to change next notification ID to N
+ * AF_NOT_FOCUSED (K) no effect except to change next notification ID to N
+ *
+ **
+ * In CONTINUOUS_VIDEO mode, triggering AF must lock the AF to the current
+ * lens position and transition the AF state to either AF_FOCUSED or
+ * NOT_FOCUSED. If a passive scan is underway, it must immediately halt, in
+ * contrast with CONTINUOUS_PICTURE mode. TRIGGER_CANCEL_AUTOFOCUS will
+ * allow the AF to restart its operation.
+ *
+ * Initial state Transitions
+ * INACTIVE (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * PASSIVE_FOCUSED (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * PASSIVE_SCAN (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N)
+ * AF_FOCUSED (K) no effect except to change next notification ID to N
+ * AF_NOT_FOCUSED (K) no effect except to change next notification ID to N
+ *
+ * Ext1 is an ID that must be returned in subsequent auto-focus state change
+ * notifications through camera2_notify_callback() and stored in
+ * android.control.afTriggerId.
*/
- int (*set_request_queue_src_ops)(struct camera2_device *,
- camera2_metadata_queue_src_ops *queue_src_ops);
-
- int (*get_request_queue_dst_ops)(struct camera2_device *,
- camera2_metadata_queue_dst_ops **queue_dst_ops);
+ CAMERA2_TRIGGER_AUTOFOCUS = 0x0001,
+ /**
+ * Send a cancel message to the autofocus algorithm. The effect of the
+ * cancellation depends on the autofocus mode in effect when the trigger is
+ * received, which is the mode listed in the latest capture request to be
+ * dequeued by the HAL. If the AF mode is OFF or EDOF, the cancel has no
+ * effect. For other modes, the lens should return to its default position,
+ * any current autofocus scan must be canceled, and the AF state should be
+ * set to INACTIVE.
+ *
+ * The state of the autofocus cycle can be tracked in android.control.afMode
+ * and the corresponding notification. Continuous autofocus modes may resume
+ * focusing operations thereafter exactly as if the camera had just been set
+ * to a continuous AF mode.
+ *
+ * Ext1 is an ID that must be returned in subsequent auto-focus state change
+ * notifications through camera2_notify_callback() and stored in
+ * android.control.afTriggerId.
+ */
+ CAMERA2_TRIGGER_CANCEL_AUTOFOCUS,
+ /**
+ * Trigger a pre-capture metering cycle, which may include firing the flash
+ * to determine proper capture parameters. Typically, this trigger would be
+ * fired for a half-depress of a camera shutter key, or before a snapshot
+ * capture in general. The state of the metering cycle can be tracked in
+ * android.control.aeMode and the corresponding notification. If the
+ * auto-exposure mode is OFF, the trigger does nothing.
+ *
+ * Ext1 is an ID that must be returned in subsequent
+ * auto-exposure/auto-white balance state change notifications through
+ * camera2_notify_callback() and stored in android.control.aePrecaptureId.
+ */
+ CAMERA2_TRIGGER_PRECAPTURE_METERING
+};
+/**
+ * Possible template types for construct_default_request()
+ */
+enum {
+ /**
+ * Standard camera preview operation with 3A on auto.
+ */
+ CAMERA2_TEMPLATE_PREVIEW = 1,
+ /**
+ * Standard camera high-quality still capture with 3A and flash on auto.
+ */
+ CAMERA2_TEMPLATE_STILL_CAPTURE,
/**
- * Input reprocessing queue methods
+ * Standard video recording plus preview with 3A on auto, torch off.
*/
- int (*set_reprocess_queue_ops)(struct camera2_device *,
- camera2_metadata_queue_src_ops *queue_src_ops);
+ CAMERA2_TEMPLATE_VIDEO_RECORD,
+ /**
+ * High-quality still capture while recording video. Application will
+ * include preview, video record, and full-resolution YUV or JPEG streams in
+ * request. Must not cause stuttering on video stream. 3A on auto.
+ */
+ CAMERA2_TEMPLATE_VIDEO_SNAPSHOT,
+ /**
+ * Zero-shutter-lag mode. Application will request preview and
+ * full-resolution data for each frame, and reprocess it to JPEG when a
+ * still image is requested by user. Settings should provide highest-quality
+ * full-resolution images without compromising preview frame rate. 3A on
+ * auto.
+ */
+ CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG,
+
+ /* Total number of templates */
+ CAMERA2_TEMPLATE_COUNT
+};
- int (*get_reprocess_queue_dst_ops)(struct camera2_device *,
- camera2_metadata_queue_dst_ops **queue_dst_ops);
+
+/**********************************************************************
+ *
+ * Camera device operations
+ *
+ */
+typedef struct camera2_device_ops {
+
+ /**********************************************************************
+ * Request and frame queue setup and management methods
+ */
/**
- * Output frame queue methods
+ * Pass in input request queue interface methods.
*/
- int (*set_frame_queue_dst_ops)(struct camera2_device *,
- camera2_metadata_queue_dst_ops *queue_dst_ops);
+ int (*set_request_queue_src_ops)(const struct camera2_device *,
+ const camera2_request_queue_src_ops_t *request_src_ops);
- int (*get_frame_queue_src_ops)(struct camera2_device *,
- camera2_metadata_queue_src_ops **queue_dst_ops);
+ /**
+ * Notify device that the request queue is no longer empty. Must only be
+ * called when the first buffer is added a new queue, or after the source
+ * has returned NULL in response to a dequeue call.
+ */
+ int (*notify_request_queue_not_empty)(const struct camera2_device *);
/**
- * Pass in notification methods
+ * Pass in output frame queue interface methods
*/
- int (*set_notify_callback)(struct camera2_device *,
- camera2_notify_callback notify_cb);
+ int (*set_frame_queue_dst_ops)(const struct camera2_device *,
+ const camera2_frame_queue_dst_ops_t *frame_dst_ops);
/**
- * Number of camera frames being processed by the device
- * at the moment (frames that have had their request dequeued,
- * but have not yet been enqueued onto output pipeline(s) )
+ * Number of camera requests being processed by the device at the moment
+ * (captures/reprocesses that have had their request dequeued, but have not
+ * yet been enqueued onto output pipeline(s) ). No streams may be released
+ * by the framework until the in-progress count is 0.
*/
- int (*get_in_progress_count)(struct camera2_device *);
+ int (*get_in_progress_count)(const struct camera2_device *);
/**
* Flush all in-progress captures. This includes all dequeued requests
* (regular or reprocessing) that have not yet placed any outputs into a
* stream or the frame queue. Partially completed captures must be completed
- * normally. No new requests may be dequeued from the request or
- * reprocessing queues until the flush completes.
+ * normally. No new requests may be dequeued from the request queue until
+ * the flush completes.
*/
- int (*flush_captures_in_progress)(struct camera2_device *);
+ int (*flush_captures_in_progress)(const struct camera2_device *);
/**
- * Camera stream management
+ * Create a filled-in default request for standard camera use cases.
+ *
+ * The device must return a complete request that is configured to meet the
+ * requested use case, which must be one of the CAMERA2_TEMPLATE_*
+ * enums. All request control fields must be included, except for
+ * android.request.outputStreams.
+ *
+ * The metadata buffer returned must be allocated with
+ * allocate_camera_metadata. The framework takes ownership of the buffer.
+ */
+ int (*construct_default_request)(const struct camera2_device *,
+ int request_template,
+ camera_metadata_t **request);
+
+ /**********************************************************************
+ * Stream management
*/
/**
- * Operations on the input reprocessing stream
+ * allocate_stream:
+ *
+ * Allocate a new output stream for use, defined by the output buffer width,
+ * height, target, and possibly the pixel format. Returns the new stream's
+ * ID, gralloc usage flags, minimum queue buffer count, and possibly the
+ * pixel format, on success. Error conditions:
+ *
+ * - Requesting a width/height/format combination not listed as
+ * supported by the sensor's static characteristics
+ *
+ * - Asking for too many streams of a given format type (2 bayer raw
+ * streams, for example).
+ *
+ * Input parameters:
+ *
+ * - width, height, format: Specification for the buffers to be sent through
+ * this stream. Format is a value from the HAL_PIXEL_FORMAT_* list. If
+ * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform
+ * gralloc module will select a format based on the usage flags provided
+ * by the camera HAL and the consumer of the stream. The camera HAL should
+ * inspect the buffers handed to it in the register_stream_buffers call to
+ * obtain the implementation-specific format if necessary.
+ *
+ * - stream_ops: A structure of function pointers for obtaining and queuing
+ * up buffers for this stream. The underlying stream will be configured
+ * based on the usage and max_buffers outputs. The methods in this
+ * structure may not be called until after allocate_stream returns.
+ *
+ * Output parameters:
+ *
+ * - stream_id: An unsigned integer identifying this stream. This value is
+ * used in incoming requests to identify the stream, and in releasing the
+ * stream.
+ *
+ * - usage: The gralloc usage mask needed by the HAL device for producing
+ * the requested type of data. This is used in allocating new gralloc
+ * buffers for the stream buffer queue.
+ *
+ * - max_buffers: The maximum number of buffers the HAL device may need to
+ * have dequeued at the same time. The device may not dequeue more buffers
+ * than this value at the same time.
+ *
*/
- int (*get_reprocess_stream_ops)(struct camera2_device *,
- camera2_stream_ops_t **stream_ops);
+ int (*allocate_stream)(
+ const struct camera2_device *,
+ // inputs
+ uint32_t width,
+ uint32_t height,
+ int format,
+ const camera2_stream_ops_t *stream_ops,
+ // outputs
+ uint32_t *stream_id,
+ uint32_t *format_actual, // IGNORED, will be removed
+ uint32_t *usage,
+ uint32_t *max_buffers);
+
+ /**
+ * Register buffers for a given stream. This is called after a successful
+ * allocate_stream call, and before the first request referencing the stream
+ * is enqueued. This method is intended to allow the HAL device to map or
+ * otherwise prepare the buffers for later use. num_buffers is guaranteed to
+ * be at least max_buffers (from allocate_stream), but may be larger. The
+ * buffers will already be locked for use. At the end of the call, all the
+ * buffers must be ready to be returned to the queue. If the stream format
+ * was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL should
+ * inspect the passed-in buffers here to determine any platform-private
+ * pixel format information.
+ */
+ int (*register_stream_buffers)(
+ const struct camera2_device *,
+ uint32_t stream_id,
+ int num_buffers,
+ buffer_handle_t *buffers);
+
+ /**
+ * Release a stream. Returns an error if called when get_in_progress_count
+ * is non-zero, or if the stream id is invalid.
+ */
+ int (*release_stream)(
+ const struct camera2_device *,
+ uint32_t stream_id);
/**
- * Get the number of streams that can be simultaneously allocated.
- * A request may include any allocated pipeline for its output, without
- * causing a substantial delay in frame production.
+ * allocate_reprocess_stream:
+ *
+ * Allocate a new input stream for use, defined by the output buffer width,
+ * height, and the pixel format. Returns the new stream's ID, gralloc usage
+ * flags, and required simultaneously acquirable buffer count, on
+ * success. Error conditions:
+ *
+ * - Requesting a width/height/format combination not listed as
+ * supported by the sensor's static characteristics
+ *
+ * - Asking for too many reprocessing streams to be configured at once.
+ *
+ * Input parameters:
+ *
+ * - width, height, format: Specification for the buffers to be sent through
+ * this stream. Format must be a value from the HAL_PIXEL_FORMAT_* list.
+ *
+ * - reprocess_stream_ops: A structure of function pointers for acquiring
+ * and releasing buffers for this stream. The underlying stream will be
+ * configured based on the usage and max_buffers outputs.
+ *
+ * Output parameters:
+ *
+ * - stream_id: An unsigned integer identifying this stream. This value is
+ * used in incoming requests to identify the stream, and in releasing the
+ * stream. These ids are numbered separately from the input stream ids.
+ *
+ * - consumer_usage: The gralloc usage mask needed by the HAL device for
+ * consuming the requested type of data. This is used in allocating new
+ * gralloc buffers for the stream buffer queue.
+ *
+ * - max_buffers: The maximum number of buffers the HAL device may need to
+ * have acquired at the same time. The device may not have more buffers
+ * acquired at the same time than this value.
+ *
*/
- int (*get_stream_slot_count)(struct camera2_device *);
+ int (*allocate_reprocess_stream)(const struct camera2_device *,
+ uint32_t width,
+ uint32_t height,
+ uint32_t format,
+ const camera2_stream_in_ops_t *reprocess_stream_ops,
+ // outputs
+ uint32_t *stream_id,
+ uint32_t *consumer_usage,
+ uint32_t *max_buffers);
/**
- * Allocate a new stream for use. Requires specifying which pipeline slot
- * to use. Specifies the buffer width, height, and format.
+ * allocate_reprocess_stream_from_stream:
+ *
+ * Allocate a new input stream for use, which will use the buffers allocated
+ * for an existing output stream. That is, after the HAL enqueues a buffer
+ * onto the output stream, it may see that same buffer handed to it from
+ * this input reprocessing stream. After the HAL releases the buffer back to
+ * the reprocessing stream, it will be returned to the output queue for
+ * reuse.
+ *
* Error conditions:
- * - Allocating an already-allocated slot without first releasing it
- * - Requesting a width/height/format combination not listed as supported
- * - Requesting a pipeline slot >= pipeline slot count.
+ *
+ * - Using an output stream of unsuitable size/format for the basis of the
+ * reprocessing stream.
+ *
+ * - Attempting to allocatee too many reprocessing streams at once.
+ *
+ * Input parameters:
+ *
+ * - output_stream_id: The ID of an existing output stream which has
+ * a size and format suitable for reprocessing.
+ *
+ * - reprocess_stream_ops: A structure of function pointers for acquiring
+ * and releasing buffers for this stream. The underlying stream will use
+ * the same graphics buffer handles as the output stream uses.
+ *
+ * Output parameters:
+ *
+ * - stream_id: An unsigned integer identifying this stream. This value is
+ * used in incoming requests to identify the stream, and in releasing the
+ * stream. These ids are numbered separately from the input stream ids.
+ *
+ * The HAL client must always release the reprocessing stream before it
+ * releases the output stream it is based on.
+ *
*/
- int (*allocate_stream)(
- struct camera2_device *,
- uint32_t stream_slot,
- uint32_t width,
- uint32_t height,
- uint32_t format,
- camera2_stream_ops_t *camera2_stream_ops);
+ int (*allocate_reprocess_stream_from_stream)(const struct camera2_device *,
+ uint32_t output_stream_id,
+ const camera2_stream_in_ops_t *reprocess_stream_ops,
+ // outputs
+ uint32_t *stream_id);
/**
- * Release a stream. Returns an error if called when
- * get_in_progress_count is non-zero, or if the pipeline slot is not
- * allocated.
+ * Release a reprocessing stream. Returns an error if called when
+ * get_in_progress_count is non-zero, or if the stream id is not
+ * valid.
+ */
+ int (*release_reprocess_stream)(
+ const struct camera2_device *,
+ uint32_t stream_id);
+
+ /**********************************************************************
+ * Miscellaneous methods
*/
- int (*release_stream)(
- struct camera2_device *,
- uint32_t stream_slot);
/**
- * Get methods to query for vendor extension metadata tag infomation. May
- * set ops to NULL if no vendor extension tags are defined.
+ * Trigger asynchronous activity. This is used for triggering special
+ * behaviors of the camera 3A routines when they are in use. See the
+ * documentation for CAMERA2_TRIGGER_* above for details of the trigger ids
+ * and their arguments.
*/
- int (*get_metadata_vendor_tag_ops)(struct camera2_device*,
- vendor_tag_query_ops_t **ops);
+ int (*trigger_action)(const struct camera2_device *,
+ uint32_t trigger_id,
+ int32_t ext1,
+ int32_t ext2);
+
+ /**
+ * Notification callback setup
+ */
+ int (*set_notify_callback)(const struct camera2_device *,
+ camera2_notify_callback notify_cb,
+ void *user);
/**
- * Release the camera hardware. Requests that are in flight will be
- * canceled. No further buffers will be pushed into any allocated pipelines
- * once this call returns.
+ * Get methods to query for vendor extension metadata tag infomation. May
+ * set ops to NULL if no vendor extension tags are defined.
*/
- void (*release)(struct camera2_device *);
+ int (*get_metadata_vendor_tag_ops)(const struct camera2_device*,
+ vendor_tag_query_ops_t **ops);
/**
* Dump state of the camera hardware
*/
- int (*dump)(struct camera2_device *, int fd);
+ int (*dump)(const struct camera2_device *, int fd);
} camera2_device_ops_t;
+/**********************************************************************
+ *
+ * Camera device definition
+ *
+ */
typedef struct camera2_device {
/**
* common.version must equal CAMERA_DEVICE_API_VERSION_2_0 to identify