diff options
-rw-r--r-- | include/hardware/camera2.h | 607 | ||||
-rw-r--r-- | tests/camera2/Android.mk | 8 | ||||
-rw-r--r-- | tests/camera2/camera2.cpp | 513 | ||||
-rw-r--r-- | tests/camera2/camera2_utils.cpp | 583 | ||||
-rw-r--r-- | tests/camera2/camera2_utils.h | 235 |
5 files changed, 1791 insertions, 155 deletions
diff --git a/include/hardware/camera2.h b/include/hardware/camera2.h index 36f2a9e..7f06c52 100644 --- a/include/hardware/camera2.h +++ b/include/hardware/camera2.h @@ -18,6 +18,7 @@ #define ANDROID_INCLUDE_CAMERA2_H #include "camera_common.h" +#include "system/camera_metadata.h" /** * Camera device HAL 2.0 [ CAMERA_DEVICE_API_VERSION_2_0 ] @@ -35,7 +36,7 @@ * version 2.0 of the camera module interface (as defined by * camera_module_t.common.module_api_version). * - * See camera_common.h for more details. + * See camera_common.h for more versioning details. * */ @@ -43,105 +44,219 @@ __BEGIN_DECLS struct camera2_device; -/** - * Output image stream queue management +/********************************************************************** + * + * Input/output stream buffer queue interface definitions + * */ +/** + * Output image stream queue interface. A set of these methods is provided to + * the HAL device in allocate_stream(), and are used to interact with the + * gralloc buffer queue for that stream. They may not be called until after + * allocate_stream returns. + */ typedef struct camera2_stream_ops { + /** + * Get a buffer to fill from the queue. The size and format of the buffer + * are fixed for a given stream (defined in allocate_stream), and the stride + * should be queried from the platform gralloc module. The gralloc buffer + * will have been allocated based on the usage flags provided by + * allocate_stream, and will be locked for use. + */ int (*dequeue_buffer)(struct camera2_stream_ops* w, - buffer_handle_t** buffer, int *stride); + buffer_handle_t** buffer); + + /** + * Push a filled buffer to the stream to be used by the consumer. + * + * The timestamp represents the time at start of exposure of the first row + * of the image; it must be from a monotonic clock, and is measured in + * nanoseconds. The timestamps do not need to be comparable between + * different cameras, or consecutive instances of the same camera. However, + * they must be comparable between streams from the same camera. If one + * capture produces buffers for multiple streams, each stream must have the + * same timestamp for that buffer, and that timestamp must match the + * timestamp in the output frame metadata. + */ int (*enqueue_buffer)(struct camera2_stream_ops* w, - buffer_handle_t* buffer); + int64_t timestamp, + buffer_handle_t* buffer); + /** + * Return a buffer to the queue without marking it as filled. + */ int (*cancel_buffer)(struct camera2_stream_ops* w, - buffer_handle_t* buffer); - int (*set_buffer_count)(struct camera2_stream_ops* w, int count); - int (*set_buffers_geometry)(struct camera2_stream_ops* pw, - int w, int h, int format); + buffer_handle_t* buffer); + /** + * Set the crop window for subsequently enqueued buffers. The parameters are + * measured in pixels relative to the buffer width and height. + */ int (*set_crop)(struct camera2_stream_ops *w, - int left, int top, int right, int bottom); - // Timestamps are measured in nanoseconds, and must be comparable - // and monotonically increasing between two frames in the same - // preview stream. They do not need to be comparable between - // consecutive or parallel preview streams, cameras, or app runs. - // The timestamp must be the time at the start of image exposure. - int (*set_timestamp)(struct camera2_stream_ops *w, int64_t timestamp); - int (*set_usage)(struct camera2_stream_ops* w, int usage); - int (*get_min_undequeued_buffer_count)(const struct camera2_stream_ops *w, - int *count); - int (*lock_buffer)(struct camera2_stream_ops* w, - buffer_handle_t* buffer); + int left, int top, int right, int bottom); + } camera2_stream_ops_t; /** + * Special pixel format value used to indicate that the framework does not care + * what exact pixel format is to be used for an output stream. The device HAL is + * free to select any pixel format, platform-specific and otherwise, and this + * opaque value will be passed on to the platform gralloc module when buffers + * need to be allocated for the stream. + */ +enum { + CAMERA2_HAL_PIXEL_FORMAT_OPAQUE = -1 +}; + +/** + * Input reprocess stream queue management. A set of these methods is provided + * to the HAL device in allocate_reprocess_stream(); they are used to interact with the + * reprocess stream's input gralloc buffer queue. + */ +typedef struct camera2_stream_in_ops { + /** + * Get the next buffer of image data to reprocess. The width, height, and + * format of the buffer is fixed in allocate_reprocess_stream(), and the + * stride and other details should be queried from the platform gralloc + * module as needed. The buffer will already be locked for use. + */ + int (*acquire_buffer)(struct camera2_stream_in_ops *w, + buffer_handle_t** buffer); + /** + * Return a used buffer to the buffer queue for reuse. + */ + int (*release_buffer)(struct camera2_stream_in_ops *w, + buffer_handle_t* buffer); + +} camera2_stream_in_ops_t; + +/********************************************************************** + * * Metadata queue management, used for requests sent to HAL module, and for * frames produced by the HAL. * - * Queue protocol: + */ + +enum { + CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS = -1 +}; + +/** + * Request input queue protocol: * - * The source holds the queue and its contents. At start, the queue is empty. + * The framework holds the queue and its contents. At start, the queue is empty. * - * 1. When the first metadata buffer is placed into the queue, the source must - * signal the destination by calling notify_queue_not_empty(). + * 1. When the first metadata buffer is placed into the queue, the framework + * signals the device by calling notify_request_queue_not_empty(). * - * 2. After receiving notify_queue_not_empty, the destination must call + * 2. After receiving notify_request_queue_not_empty, the device must call * dequeue() once it's ready to handle the next buffer. * - * 3. Once the destination has processed a buffer, it should try to dequeue - * another buffer. If there are no more buffers available, dequeue() will - * return NULL. In this case, when a buffer becomes available, the source - * must call notify_queue_not_empty() again. If the destination receives a - * NULL return from dequeue, it does not need to query the queue again until - * a notify_queue_not_empty() call is received from the source. + * 3. Once the device has processed a buffer, and is ready for the next buffer, + * it must call dequeue() again instead of waiting for a notification. If + * there are no more buffers available, dequeue() will return NULL. After + * this point, when a buffer becomes available, the framework must call + * notify_request_queue_not_empty() again. If the device receives a NULL + * return from dequeue, it does not need to query the queue again until a + * notify_request_queue_not_empty() call is received from the source. + * + * 4. If the device calls buffer_count() and receives 0, this does not mean that + * the framework will provide a notify_request_queue_not_empty() call. The + * framework will only provide such a notification after the device has + * received a NULL from dequeue, or on initial startup. * - * 4. If the destination calls buffer_count() and receives 0, this does not mean - * that the source will provide a notify_queue_not_empty() call. The source - * must only provide such a call after the destination has received a NULL - * from dequeue, or on initial startup. + * 5. The dequeue() call in response to notify_request_queue_not_empty() may be + * on the same thread as the notify_request_queue_not_empty() call, and may + * be performed from within the notify call. * - * 5. The dequeue() call in response to notify_queue_not_empty() may be on the - * same thread as the notify_queue_not_empty() call. The source must not - * deadlock in that case. + * 6. All dequeued request buffers must be returned to the framework by calling + * free_request, including when errors occur, a device flush is requested, or + * when the device is shutting down. */ - -typedef struct camera2_metadata_queue_src_ops { +typedef struct camera2_request_queue_src_ops { /** - * Get count of buffers in queue + * Get the count of request buffers pending in the queue. May return + * CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS if a repeating request (stream + * request) is currently configured. Calling this method has no effect on + * whether the notify_request_queue_not_empty() method will be called by the + * framework. */ - int (*buffer_count)(camera2_metadata_queue_src_ops *q); + int (*request_count)(struct camera2_request_queue_src_ops *q); /** - * Get a metadata buffer from the source. Returns OK if a request is - * available, placing a pointer to it in next_request. + * Get a metadata buffer from the framework. Returns OK if there is no + * error. If the queue is empty, returns NULL in buffer. In that case, the + * device must wait for a notify_request_queue_not_empty() message before + * attempting to dequeue again. Buffers obtained in this way must be + * returned to the framework with free_request(). */ - int (*dequeue)(camera2_metadata_queue_src_ops *q, + int (*dequeue_request)(struct camera2_request_queue_src_ops *q, camera_metadata_t **buffer); /** - * Return a metadata buffer to the source once it has been used + * Return a metadata buffer to the framework once it has been used, or if + * an error or shutdown occurs. */ - int (*free)(camera2_metadata_queue_src_ops *q, + int (*free_request)(struct camera2_request_queue_src_ops *q, camera_metadata_t *old_buffer); -} camera2_metadata_queue_src_ops_t; +} camera2_request_queue_src_ops_t; -typedef struct camera2_metadata_queue_dst_ops { +/** + * Frame output queue protocol: + * + * The framework holds the queue and its contents. At start, the queue is empty. + * + * 1. When the device is ready to fill an output metadata frame, it must dequeue + * a metadata buffer of the required size. + * + * 2. It should then fill the metadata buffer, and place it on the frame queue + * using enqueue_frame. The framework takes ownership of the frame. + * + * 3. In case of an error, a request to flush the pipeline, or shutdown, the + * device must return any affected dequeued frames to the framework by + * calling cancel_frame. + */ +typedef struct camera2_frame_queue_dst_ops { /** - * Notify destination that the queue is no longer empty + * Get an empty metadata buffer to fill from the framework. The new metadata + * buffer will have room for entries number of metadata entries, plus + * data_bytes worth of extra storage. Frames dequeued here must be returned + * to the framework with either cancel_frame or enqueue_frame. */ - int (*notify_queue_not_empty)(struct camera2_metadata_queue_dst_ops *); + int (*dequeue_frame)(struct camera2_frame_queue_dst_ops *q, + size_t entries, size_t data_bytes, + camera_metadata_t **buffer); -} camera2_metadata_queue_dst_ops_t; + /** + * Return a dequeued metadata buffer to the framework for reuse; do not mark it as + * filled. Use when encountering errors, or flushing the internal request queue. + */ + int (*cancel_frame)(struct camera2_frame_queue_dst_ops *q, + camera_metadata_t *buffer); -/* Defined in camera_metadata.h */ -typedef struct vendor_tag_query_ops vendor_tag_query_ops_t; + /** + * Place a completed metadata frame on the frame output queue. + */ + int (*enqueue_frame)(struct camera2_frame_queue_dst_ops *q, + camera_metadata_t *buffer); + +} camera2_frame_queue_dst_ops_t; + +/********************************************************************** + * + * Notification callback and message definition, and trigger definitions + * + */ /** * Asynchronous notification callback from the HAL, fired for various * reasons. Only for information independent of frame capture, or that require - * specific timing. + * specific timing. The user pointer must be the same one that was passed to the + * device in set_notify_callback(). */ typedef void (*camera2_notify_callback)(int32_t msg_type, int32_t ext1, int32_t ext2, + int32_t ext3, void *user); /** @@ -149,15 +264,39 @@ typedef void (*camera2_notify_callback)(int32_t msg_type, */ enum { /** - * A serious error has occurred. Argument ext1 contains the error code, and - * ext2 and user contain any error-specific information. + * An error has occurred. Argument ext1 contains the error code, and + * ext2 and ext3 contain any error-specific information. */ CAMERA2_MSG_ERROR = 0x0001, /** * The exposure of a given request has begun. Argument ext1 contains the - * request id. + * frame number, and ext2 and ext3 contain the low-order and high-order + * bytes of the timestamp for when exposure began. + * (timestamp = (ext3 << 32 | ext2)) + */ + CAMERA2_MSG_SHUTTER = 0x0010, + /** + * The autofocus routine has changed state. Argument ext1 contains the new + * state; the values are the same as those for the metadata field + * android.control.afState. Ext2 contains the latest value passed to + * trigger_action(CAMERA2_TRIGGER_AUTOFOCUS), or 0 if that method has not + * been called. */ - CAMERA2_MSG_SHUTTER = 0x0002 + CAMERA2_MSG_AUTOFOCUS = 0x0020, + /** + * The autoexposure routine has changed state. Argument ext1 contains the + * new state; the values are the same as those for the metadata field + * android.control.aeState. Ext2 containst the latest value passed to + * trigger_action(CAMERA2_TRIGGER_PRECAPTURE_METERING), or 0 if that method + * has not been called. + */ + CAMERA2_MSG_AUTOEXPOSURE = 0x0021, + /** + * The auto-whitebalance routine has changed state. Argument ext1 contains + * the new state; the values are the same as those for the metadata field + * android.control.awbState. + */ + CAMERA2_MSG_AUTOWB = 0x0022 }; /** @@ -169,58 +308,137 @@ enum { * no further frames or buffer streams will be produced by the * device. Device should be treated as closed. */ - CAMERA2_MSG_ERROR_HARDWARE_FAULT = 0x0001, + CAMERA2_MSG_ERROR_HARDWARE = 0x0001, /** * A serious failure occured. No further frames or buffer streams will be * produced by the device. Device should be treated as closed. The client * must reopen the device to use it again. */ - CAMERA2_MSG_ERROR_DEVICE_FAULT = 0x0002, + CAMERA2_MSG_ERROR_DEVICE, /** - * The camera service has failed. Device should be treated as released. The client - * must reopen the device to use it again. + * An error has occurred in processing a request. No output (metadata or + * buffers) will be produced for this request. ext2 contains the frame + * number of the request. Subsequent requests are unaffected, and the device + * remains operational. */ - CAMERA2_MSG_ERROR_SERVER_FAULT = 0x0003 + CAMERA2_MSG_ERROR_REQUEST, + /** + * An error has occurred in producing an output frame metadata buffer for a + * request, but image buffers for it will still be available. Subsequent + * requests are unaffected, and the device remains operational. ext2 + * contains the frame number of the request. + */ + CAMERA2_MSG_ERROR_FRAME, + /** + * An error has occurred in placing an output buffer into a stream for a + * request. The frame metadata and other buffers may still be + * available. Subsequent requests are unaffected, and the device remains + * operational. ext2 contains the frame number of the request, and ext3 + * contains the stream id. + */ + CAMERA2_MSG_ERROR_STREAM, + /** + * Number of error types + */ + CAMERA2_MSG_NUM_ERRORS }; -typedef struct camera2_device_ops { +/** + * Possible trigger ids for trigger_action() + */ +enum { /** - * Input request queue methods + * Trigger an autofocus cycle. The effect of the trigger depends on the + * autofocus mode in effect when the trigger is received, which is the mode + * listed in the latest capture request to be dequeued. If the mode is off, + * the trigger has no effect. If autofocus is already scanning, the trigger + * has no effect. In AUTO, MACRO, or CONTINUOUS_* modes, the trigger + * otherwise begins an appropriate scan of the scene for focus. The state of + * the autofocus cycle can be tracked in android.control.afMode and the + * corresponding notification. Ext1 is an id that must be returned in + * subsequent auto-focus state change notifications. */ - int (*set_request_queue_src_ops)(struct camera2_device *, - camera2_metadata_queue_src_ops *queue_src_ops); - - int (*get_request_queue_dst_ops)(struct camera2_device *, - camera2_metadata_queue_dst_ops **queue_dst_ops); + CAMERA2_TRIGGER_AUTOFOCUS = 0x0001, + /** + * Trigger a pre-capture metering cycle, which may include firing the flash + * to determine proper capture parameters. Typically, this trigger would be + * fired for a half-depress of a camera shutter key, or before a snapshot + * capture in general. The state of the metering cycle can be tracked in + * android.control.aeMode and the corresponding notification. If the + * auto-exposure mode is OFF, the trigger does nothing. Ext1 is an id that + * must be returned in subsequent auto-exposure state change notifications. + */ + CAMERA2_TRIGGER_PRECAPTURE_METERING +}; +/** + * Possible template types for construct_default_request() + */ +enum { /** - * Input reprocessing queue methods + * Standard camera preview operation with 3A on auto. */ - int (*set_reprocess_queue_ops)(struct camera2_device *, - camera2_metadata_queue_src_ops *queue_src_ops); + CAMERA2_TEMPLATE_PREVIEW = 1, + /** + * Standard camera high-quality still capture with 3A and flash on auto. + */ + CAMERA2_TEMPLATE_STILL_CAPTURE, + /** + * Standard video recording plus preview with 3A on auto, torch off. + */ + CAMERA2_TEMPLATE_VIDEO_RECORD, + /** + * High-quality still capture while recording video. Application will + * include preview, video record, and full-resolution YUV or JPEG streams in + * request. Must not cause stuttering on video stream. 3A on auto. + */ + CAMERA2_TEMPLATE_VIDEO_SNAPSHOT, + /** + * Zero-shutter-lag mode. Application will request preview and + * full-resolution YUV data for each frame, and reprocess it to JPEG when a + * still image is requested by user. Settings should provide highest-quality + * full-resolution images without compromising preview frame rate. 3A on + * auto. + */ + CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG +}; - int (*get_reprocess_queue_dst_ops)(struct camera2_device *, - camera2_metadata_queue_dst_ops **queue_dst_ops); + +/********************************************************************** + * + * Camera device operations + * + */ +typedef struct camera2_device_ops { + + /********************************************************************** + * Request and frame queue setup and management methods + */ /** - * Output frame queue methods + * Pass in input request queue interface methods. */ - int (*set_frame_queue_dst_ops)(struct camera2_device *, - camera2_metadata_queue_dst_ops *queue_dst_ops); + int (*set_request_queue_src_ops)(struct camera2_device *, + camera2_request_queue_src_ops_t *request_src_ops); - int (*get_frame_queue_src_ops)(struct camera2_device *, - camera2_metadata_queue_src_ops **queue_dst_ops); + /** + * Notify device that the request queue is no longer empty. Must only be + * called when the first buffer is added a new queue, or after the source + * has returned NULL in response to a dequeue call. + */ + int (*notify_request_queue_not_empty)(struct camera2_device *); /** - * Pass in notification methods + * Pass in output frame queue interface methods */ - int (*set_notify_callback)(struct camera2_device *, - camera2_notify_callback notify_cb); + int (*set_frame_queue_dst_ops)(struct camera2_device *, + camera2_frame_queue_dst_ops_t *frame_dst_ops); /** - * Number of camera frames being processed by the device - * at the moment (frames that have had their request dequeued, - * but have not yet been enqueued onto output pipeline(s) ) + * Number of camera requests being processed by the device at the moment + * (captures/reprocesses that have had their request dequeued, but have not + * yet been enqueued onto output pipeline(s) ). No streams may be released + * by the framework until the in-progress count is 0. */ int (*get_in_progress_count)(struct camera2_device *); @@ -228,52 +446,195 @@ typedef struct camera2_device_ops { * Flush all in-progress captures. This includes all dequeued requests * (regular or reprocessing) that have not yet placed any outputs into a * stream or the frame queue. Partially completed captures must be completed - * normally. No new requests may be dequeued from the request or - * reprocessing queues until the flush completes. + * normally. No new requests may be dequeued from the request queue until + * the flush completes. */ int (*flush_captures_in_progress)(struct camera2_device *); /** - * Camera stream management + * Create a filled-in default request for standard camera use cases. + * + * The device must return a complete request that is configured to meet the + * requested use case, which must be one of the CAMERA2_TEMPLATE_* + * enums. All request control fields must be included, except for + * android.request.outputStreams and android.request.frameNumber. + * + * The metadata buffer returned must be allocated with + * allocate_camera_metadata. The framework takes ownership of the buffer. */ + int (*construct_default_request)(struct camera2_device *, + int request_template, + camera_metadata_t **request); - /** - * Operations on the input reprocessing stream + /********************************************************************** + * Stream management */ - int (*get_reprocess_stream_ops)(struct camera2_device *, - camera2_stream_ops_t **stream_ops); /** - * Get the number of streams that can be simultaneously allocated. - * A request may include any allocated pipeline for its output, without - * causing a substantial delay in frame production. + * allocate_stream: + * + * Allocate a new output stream for use, defined by the output buffer width, + * height, target, and possibly the pixel format. Returns the new stream's + * ID, gralloc usage flags, minimum queue buffer count, and possibly the + * pixel format, on success. Error conditions: + * + * - Requesting a width/height/format combination not listed as + * supported by the sensor's static characteristics + * + * - Asking for too many streams of a given format type (2 bayer raw + * streams, for example). + * + * Input parameters: + * + * - width, height, format: Specification for the buffers to be sent through + * this stream. Format is a value from the HAL_PIXEL_FORMAT_* list, or + * CAMERA2_HAL_PIXEL_FORMAT_OPAQUE. In the latter case, the camera device + * must select an appropriate (possible platform-specific) HAL pixel + * format to return in format_actual. In the former case, format_actual + * must be set to match format. + * + * - stream_ops: A structure of function pointers for obtaining and queuing + * up buffers for this stream. The underlying stream will be configured + * based on the usage and max_buffers outputs. The methods in this + * structure may not be called until after allocate_stream returns. + * + * Output parameters: + * + * - stream_id: An unsigned integer identifying this stream. This value is + * used in incoming requests to identify the stream, and in releasing the + * stream. + * + * - format_actual: If the input format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, + * then device must select the appropriate (possible platform-specific) + * pixel format and return it in *format_actual. It will be treated as an + * opaque value by the framework, and simply passed to the gralloc module + * when new buffers need to be allocated. If the input format is one of + * the values from HAL_PIXEL_FORMAT_* list, then *format_actual must be + * set equal to format. In the latter case, format_actual may also be + * NULL, in which case it can be ignored as an output. + * + * - usage: The gralloc usage mask needed by the HAL device for producing + * the requested type of data. This is used in allocating new gralloc + * buffers for the stream buffer queue. + * + * - max_buffers: The maximum number of buffers the HAL device may need to + * have dequeued at the same time. The device may not dequeue more buffers + * than this value at the same time. + * */ - int (*get_stream_slot_count)(struct camera2_device *); + int (*allocate_stream)( + struct camera2_device *, + // inputs + uint32_t width, + uint32_t height, + int format, + camera2_stream_ops_t *stream_ops, + // outputs + uint32_t *stream_id, + uint32_t *format_actual, + uint32_t *usage, + uint32_t *max_buffers); /** - * Allocate a new stream for use. Requires specifying which pipeline slot - * to use. Specifies the buffer width, height, and format. - * Error conditions: - * - Allocating an already-allocated slot without first releasing it - * - Requesting a width/height/format combination not listed as supported - * - Requesting a pipeline slot >= pipeline slot count. + * Register buffers for a given stream. This is called after a successful + * allocate_stream call, and before the first request referencing the stream + * is enqueued. This method is intended to allow the HAL device to map or + * otherwise prepare the buffers for later use. num_buffers is guaranteed to + * be at least max_buffers (from allocate_stream), but may be larger. The + * buffers will already be locked for use. At the end of the call, all the + * buffers must be ready to be returned to the queue. */ - int (*allocate_stream)( - struct camera2_device *, - uint32_t stream_slot, - uint32_t width, - uint32_t height, - uint32_t format, - camera2_stream_ops_t *camera2_stream_ops); + int (*register_stream_buffers)( + struct camera2_device *, + uint32_t stream_id, + int num_buffers, + buffer_handle_t *buffers); /** - * Release a stream. Returns an error if called when - * get_in_progress_count is non-zero, or if the pipeline slot is not - * allocated. + * Release a stream. Returns an error if called when get_in_progress_count + * is non-zero, or if the stream id is invalid. */ int (*release_stream)( - struct camera2_device *, - uint32_t stream_slot); + struct camera2_device *, + uint32_t stream_id); + + /** + * allocate_reprocess_stream: + * + * Allocate a new input stream for use, defined by the output buffer width, + * height, and the pixel format. Returns the new stream's ID, gralloc usage + * flags, and required simultaneously acquirable buffer count, on + * success. Error conditions: + * + * - Requesting a width/height/format combination not listed as + * supported by the sensor's static characteristics + * + * - Asking for too many reprocessing streams to be configured at once. + * + * Input parameters: + * + * - width, height, format: Specification for the buffers to be sent through + * this stream. Format must be a value from the HAL_PIXEL_FORMAT_* list. + * + * - reprocess_stream_ops: A structure of function pointers for acquiring + * and releasing buffers for this stream. The underlying stream will be + * configured based on the usage and max_buffers outputs. + * + * Output parameters: + * + * - stream_id: An unsigned integer identifying this stream. This value is + * used in incoming requests to identify the stream, and in releasing the + * stream. These ids are numbered separately from the input stream ids. + * + * - consumer_usage: The gralloc usage mask needed by the HAL device for + * consuming the requested type of data. This is used in allocating new + * gralloc buffers for the stream buffer queue. + * + * - max_buffers: The maximum number of buffers the HAL device may need to + * have acquired at the same time. The device may not have more buffers + * acquired at the same time than this value. + * + */ + int (*allocate_reprocess_stream)(struct camera2_device *, + uint32_t width, + uint32_t height, + uint32_t format, + camera2_stream_in_ops_t *reprocess_stream_ops, + // outputs + uint32_t *stream_id, + uint32_t *consumer_usage, + uint32_t *max_buffers); + + /** + * Release a reprocessing stream. Returns an error if called when + * get_in_progress_count is non-zero, or if the stream id is not + * valid. + */ + int (*release_reprocess_stream)( + struct camera2_device *, + uint32_t stream_id); + + /********************************************************************** + * Miscellaneous methods + */ + + /** + * Trigger asynchronous activity. This is used for triggering special + * behaviors of the camera 3A routines when they are in use. See the + * documentation for CAMERA2_TRIGGER_* above for details of the trigger ids + * and their arguments. + */ + int (*trigger_action)(struct camera2_device *, + uint32_t trigger_id, + int ext1, + int ext2); + + /** + * Notification callback setup + */ + int (*set_notify_callback)(struct camera2_device *, + camera2_notify_callback notify_cb, + void *user); /** * Get methods to query for vendor extension metadata tag infomation. May @@ -283,19 +644,17 @@ typedef struct camera2_device_ops { vendor_tag_query_ops_t **ops); /** - * Release the camera hardware. Requests that are in flight will be - * canceled. No further buffers will be pushed into any allocated pipelines - * once this call returns. - */ - void (*release)(struct camera2_device *); - - /** * Dump state of the camera hardware */ int (*dump)(struct camera2_device *, int fd); } camera2_device_ops_t; +/********************************************************************** + * + * Camera device definition + * + */ typedef struct camera2_device { /** * common.version must equal CAMERA_DEVICE_API_VERSION_2_0 to identify diff --git a/tests/camera2/Android.mk b/tests/camera2/Android.mk index 340ec30..325e82d 100644 --- a/tests/camera2/Android.mk +++ b/tests/camera2/Android.mk @@ -2,13 +2,15 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - camera2.cpp + camera2.cpp \ + camera2_utils.cpp LOCAL_SHARED_LIBRARIES := \ libutils \ libstlport \ libhardware \ - libcamera_metadata + libcamera_metadata \ + libgui LOCAL_STATIC_LIBRARIES := \ libgtest \ @@ -21,7 +23,7 @@ LOCAL_C_INCLUDES += \ external/stlport/stlport \ system/media/camera/include \ -LOCAL_MODULE:= camera2_hal_tests +LOCAL_MODULE:= camera2_test LOCAL_MODULE_TAGS := tests include $(BUILD_EXECUTABLE) diff --git a/tests/camera2/camera2.cpp b/tests/camera2/camera2.cpp index d13d7cd..50f0b06 100644 --- a/tests/camera2/camera2.cpp +++ b/tests/camera2/camera2.cpp @@ -14,10 +14,21 @@ * limitations under the License. */ -#include <system/camera_metadata.h> -#include <hardware/camera2.h> +#define LOG_TAG "Camera2_test" +#define LOG_NDEBUG 0 + +#include <utils/Log.h> #include <gtest/gtest.h> #include <iostream> +#include <fstream> + +#include <utils/Vector.h> +#include <gui/CpuConsumer.h> +#include <system/camera_metadata.h> + +#include "camera2_utils.h" + +namespace android { class Camera2Test: public testing::Test { public: @@ -33,12 +44,16 @@ class Camera2Test: public testing::Test { ASSERT_TRUE(NULL != module) << "No camera module was set by hw_get_module"; - std::cout << " Camera module name: " << module->name << std::endl; - std::cout << " Camera module author: " << module->author << std::endl; - std::cout << " Camera module API version: 0x" << std::hex - << module->module_api_version << std::endl; - std::cout << " Camera module HAL API version: 0x" << std::hex - << module->hal_api_version << std::endl; + IF_ALOGV() { + std::cout << " Camera module name: " + << module->name << std::endl; + std::cout << " Camera module author: " + << module->author << std::endl; + std::cout << " Camera module API version: 0x" << std::hex + << module->module_api_version << std::endl; + std::cout << " Camera module HAL API version: 0x" << std::hex + << module->hal_api_version << std::endl; + } int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0; ASSERT_EQ(version2_0, module->module_api_version) @@ -52,7 +67,10 @@ class Camera2Test: public testing::Test { sNumCameras = sCameraModule->get_number_of_cameras(); ASSERT_LT(0, sNumCameras) << "No camera devices available!"; - std::cout << " Camera device count: " << sNumCameras << std::endl; + IF_ALOGV() { + std::cout << " Camera device count: " << sNumCameras << std::endl; + } + sCameraSupportsHal2 = new bool[sNumCameras]; for (int i = 0; i < sNumCameras; i++) { @@ -60,19 +78,24 @@ class Camera2Test: public testing::Test { res = sCameraModule->get_camera_info(i, &info); ASSERT_EQ(0, res) << "Failure getting camera info for camera " << i; - std::cout << " Camera device: " << std::dec - << i << std::endl;; - std::cout << " Facing: " << std::dec - << info.facing << std::endl; - std::cout << " Orientation: " << std::dec - << info.orientation << std::endl; - std::cout << " Version: 0x" << std::hex << - info.device_version << std::endl; + IF_ALOGV() { + std::cout << " Camera device: " << std::dec + << i << std::endl;; + std::cout << " Facing: " << std::dec + << info.facing << std::endl; + std::cout << " Orientation: " << std::dec + << info.orientation << std::endl; + std::cout << " Version: 0x" << std::hex << + info.device_version << std::endl; + } if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0) { sCameraSupportsHal2[i] = true; ASSERT_TRUE(NULL != info.static_camera_characteristics); - std::cout << " Static camera metadata:" << std::endl; - dump_camera_metadata(info.static_camera_characteristics, 0, 1); + IF_ALOGV() { + std::cout << " Static camera metadata:" << std::endl; + dump_camera_metadata(info.static_camera_characteristics, + 0, 1); + } } else { sCameraSupportsHal2[i] = false; } @@ -83,13 +106,26 @@ class Camera2Test: public testing::Test { return sCameraModule; } - static const camera2_device_t *openCameraDevice(int id) { + static int getNumCameras() { + return sNumCameras; + } + + static bool isHal2Supported(int id) { + return sCameraSupportsHal2[id]; + } + + static camera2_device_t *openCameraDevice(int id) { + ALOGV("Opening camera %d", id); if (NULL == sCameraSupportsHal2) return NULL; if (id >= sNumCameras) return NULL; if (!sCameraSupportsHal2[id]) return NULL; hw_device_t *device = NULL; const camera_module_t *cam_module = getCameraModule(); + if (cam_module == NULL) { + return NULL; + } + char camId[10]; int res; @@ -98,7 +134,7 @@ class Camera2Test: public testing::Test { (const hw_module_t*)cam_module, camId, &device); - if (res < 0 || cam_module == NULL) { + if (res != NO_ERROR || device == NULL) { return NULL; } camera2_device_t *cam_device = @@ -106,18 +142,439 @@ class Camera2Test: public testing::Test { return cam_device; } - private: + static status_t configureCameraDevice(camera2_device_t *dev, + MetadataQueue &requestQueue, + MetadataQueue &frameQueue, + NotifierListener &listener) { + + status_t err; + + err = dev->ops->set_request_queue_src_ops(dev, + requestQueue.getToConsumerInterface()); + if (err != OK) return err; + + requestQueue.setFromConsumerInterface(dev); + + err = dev->ops->set_frame_queue_dst_ops(dev, + frameQueue.getToProducerInterface()); + if (err != OK) return err; + + err = listener.getNotificationsFrom(dev); + if (err != OK) return err; + + vendor_tag_query_ops_t *vendor_metadata_tag_ops; + err = dev->ops->get_metadata_vendor_tag_ops(dev, &vendor_metadata_tag_ops); + if (err != OK) return err; + + err = set_camera_metadata_vendor_tag_ops(vendor_metadata_tag_ops); + if (err != OK) return err; + + return OK; + } + + static status_t closeCameraDevice(camera2_device_t *cam_dev) { + int res; + ALOGV("Closing camera %p", cam_dev); + + hw_device_t *dev = reinterpret_cast<hw_device_t *>(cam_dev); + res = dev->close(dev); + return res; + } + + void setUpCamera(int id) { + ASSERT_GT(sNumCameras, id); + status_t res; + + if (mDevice != NULL) { + closeCameraDevice(mDevice); + } + mDevice = openCameraDevice(id); + ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device"; + + camera_info info; + res = sCameraModule->get_camera_info(id, &info); + ASSERT_EQ(OK, res); + mStaticInfo = info.static_camera_characteristics; + + res = configureCameraDevice(mDevice, + mRequests, + mFrames, + mNotifications); + ASSERT_EQ(OK, res) << "Failure to configure camera device"; + + } + + void setUpStream(sp<ISurfaceTexture> consumer, + int width, int height, int format, int *id) { + status_t res; + + StreamAdapter* stream = new StreamAdapter(consumer); + + ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height); + res = stream->connectToDevice(mDevice, width, height, format); + ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: " + << strerror(-res); + mStreams.push_back(stream); + + *id = stream->getId(); + } + + void disconnectStream(int id) { + status_t res; + unsigned int i=0; + for (; i < mStreams.size(); i++) { + if (mStreams[i]->getId() == id) { + res = mStreams[i]->disconnect(); + ASSERT_EQ(NO_ERROR, res) << + "Failed to disconnect stream " << id; + break; + } + } + ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id; + } + + void getResolutionList(uint32_t format, + uint32_t **list, + size_t *count) { + + uint32_t *availableFormats; + size_t availableFormatsCount; + status_t res; + res = find_camera_metadata_entry(mStaticInfo, + ANDROID_SCALER_AVAILABLE_FORMATS, + NULL, + (void**)&availableFormats, + &availableFormatsCount); + ASSERT_EQ(OK, res); + + uint32_t formatIdx; + for (formatIdx=0; formatIdx < availableFormatsCount; formatIdx++) { + if (availableFormats[formatIdx] == format) break; + } + ASSERT_NE(availableFormatsCount, formatIdx) + << "No support found for format 0x" << std::hex << format; + + uint32_t *availableSizesPerFormat; + size_t availableSizesPerFormatCount; + res = find_camera_metadata_entry(mStaticInfo, + ANDROID_SCALER_AVAILABLE_SIZES_PER_FORMAT, + NULL, + (void**)&availableSizesPerFormat, + &availableSizesPerFormatCount); + ASSERT_EQ(OK, res); + + int size_offset = 0; + for (unsigned int i=0; i < formatIdx; i++) { + size_offset += availableSizesPerFormat[i]; + } + + uint32_t *availableSizes; + size_t availableSizesCount; + res = find_camera_metadata_entry(mStaticInfo, + ANDROID_SCALER_AVAILABLE_SIZES, + NULL, + (void**)&availableSizes, + &availableSizesCount); + ASSERT_EQ(OK, res); + + *list = availableSizes + size_offset; + *count = availableSizesPerFormat[formatIdx]; + } + + virtual void SetUp() { + const ::testing::TestInfo* const testInfo = + ::testing::UnitTest::GetInstance()->current_test_info(); + + ALOGV("*** Starting test %s in test case %s", testInfo->name(), testInfo->test_case_name()); + mDevice = NULL; + } + + virtual void TearDown() { + for (unsigned int i = 0; i < mStreams.size(); i++) { + delete mStreams[i]; + } + if (mDevice != NULL) { + closeCameraDevice(mDevice); + } + } + + camera2_device *mDevice; + camera_metadata_t *mStaticInfo; + + MetadataQueue mRequests; + MetadataQueue mFrames; + NotifierListener mNotifications; + + Vector<StreamAdapter*> mStreams; + + private: static camera_module_t *sCameraModule; - static int sNumCameras; - static bool *sCameraSupportsHal2; + static int sNumCameras; + static bool *sCameraSupportsHal2; }; camera_module_t *Camera2Test::sCameraModule = NULL; -int Camera2Test::sNumCameras = 0; -bool *Camera2Test::sCameraSupportsHal2 = NULL; +bool *Camera2Test::sCameraSupportsHal2 = NULL; +int Camera2Test::sNumCameras = 0; +static const nsecs_t USEC = 1000; +static const nsecs_t MSEC = 1000*USEC; +static const nsecs_t SEC = 1000*MSEC; -TEST_F(Camera2Test, Basic) { - ASSERT_TRUE(NULL != getCameraModule()); + +TEST_F(Camera2Test, OpenClose) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + camera2_device_t *d = openCameraDevice(id); + ASSERT_TRUE(NULL != d) << "Failed to open camera device"; + + res = closeCameraDevice(d); + ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device"; + } } + +TEST_F(Camera2Test, Capture1Raw) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + ASSERT_NO_FATAL_FAILURE(setUpCamera(id)); + + sp<CpuConsumer> rawConsumer = new CpuConsumer(1); + sp<FrameWaiter> rawWaiter = new FrameWaiter(); + rawConsumer->setFrameAvailableListener(rawWaiter); + + uint32_t *rawResolutions; + size_t rawResolutionsCount; + + int format = HAL_PIXEL_FORMAT_RAW_SENSOR; + + getResolutionList(format, + &rawResolutions, &rawResolutionsCount); + ASSERT_LT((uint32_t)0, rawResolutionsCount); + + // Pick first available raw resolution + int width = rawResolutions[0]; + int height = rawResolutions[1]; + + int streamId; + ASSERT_NO_FATAL_FAILURE( + setUpStream(rawConsumer->getProducerInterface(), + width, height, format, &streamId) ); + + camera_metadata_t *request; + request = allocate_camera_metadata(20, 2000); + + uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; + add_camera_metadata_entry(request, + ANDROID_REQUEST_METADATA_MODE, + (void**)&metadataMode, 1); + uint32_t outputStreams = streamId; + add_camera_metadata_entry(request, + ANDROID_REQUEST_OUTPUT_STREAMS, + (void**)&outputStreams, 1); + + uint64_t exposureTime = 2*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_EXPOSURE_TIME, + (void**)&exposureTime, 1); + uint64_t frameDuration = 30*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_FRAME_DURATION, + (void**)&frameDuration, 1); + uint32_t sensitivity = 100; + add_camera_metadata_entry(request, + ANDROID_SENSOR_SENSITIVITY, + (void**)&sensitivity, 1); + + uint32_t hourOfDay = 12; + add_camera_metadata_entry(request, + 0x80000000, // EMULATOR_HOUROFDAY + &hourOfDay, 1); + + IF_ALOGV() { + std::cout << "Input request: " << std::endl; + dump_camera_metadata(request, 0, 1); + } + + res = mRequests.enqueue(request); + ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res); + + res = mFrames.waitForBuffer(exposureTime + SEC); + ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res); + + camera_metadata_t *frame; + res = mFrames.dequeue(&frame); + ASSERT_EQ(NO_ERROR, res); + ASSERT_TRUE(frame != NULL); + + IF_ALOGV() { + std::cout << "Output frame:" << std::endl; + dump_camera_metadata(frame, 0, 1); + } + + res = rawWaiter->waitForFrame(exposureTime + SEC); + ASSERT_EQ(NO_ERROR, res); + + CpuConsumer::LockedBuffer buffer; + res = rawConsumer->lockNextBuffer(&buffer); + ASSERT_EQ(NO_ERROR, res); + + IF_ALOGV() { + const char *dumpname = + "/data/local/tmp/camera2_test-capture1raw-dump.raw"; + ALOGV("Dumping raw buffer to %s", dumpname); + // Write to file + std::ofstream rawFile(dumpname); + for (unsigned int y = 0; y < buffer.height; y++) { + rawFile.write((const char *)(buffer.data + y * buffer.stride * 2), + buffer.width * 2); + } + rawFile.close(); + } + + res = rawConsumer->unlockBuffer(buffer); + ASSERT_EQ(NO_ERROR, res); + + ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId)); + + res = closeCameraDevice(mDevice); + ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device"; + + } +} + +TEST_F(Camera2Test, CaptureBurstRaw) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + ASSERT_NO_FATAL_FAILURE(setUpCamera(id)); + + sp<CpuConsumer> rawConsumer = new CpuConsumer(1); + sp<FrameWaiter> rawWaiter = new FrameWaiter(); + rawConsumer->setFrameAvailableListener(rawWaiter); + + uint32_t *rawResolutions; + size_t rawResolutionsCount; + + int format = HAL_PIXEL_FORMAT_RAW_SENSOR; + + getResolutionList(format, + &rawResolutions, &rawResolutionsCount); + ASSERT_LT((uint32_t)0, rawResolutionsCount); + + // Pick first available raw resolution + int width = rawResolutions[0]; + int height = rawResolutions[1]; + + int streamId; + ASSERT_NO_FATAL_FAILURE( + setUpStream(rawConsumer->getProducerInterface(), + width, height, format, &streamId) ); + + camera_metadata_t *request; + request = allocate_camera_metadata(20, 2000); + + uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; + add_camera_metadata_entry(request, + ANDROID_REQUEST_METADATA_MODE, + (void**)&metadataMode, 1); + uint32_t outputStreams = streamId; + add_camera_metadata_entry(request, + ANDROID_REQUEST_OUTPUT_STREAMS, + (void**)&outputStreams, 1); + + uint64_t frameDuration = 30*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_FRAME_DURATION, + (void**)&frameDuration, 1); + uint32_t sensitivity = 100; + add_camera_metadata_entry(request, + ANDROID_SENSOR_SENSITIVITY, + (void**)&sensitivity, 1); + + uint32_t hourOfDay = 12; + add_camera_metadata_entry(request, + 0x80000000, // EMULATOR_HOUROFDAY + &hourOfDay, 1); + + IF_ALOGV() { + std::cout << "Input request template: " << std::endl; + dump_camera_metadata(request, 0, 1); + } + + int numCaptures = 10; + + // Enqueue numCaptures requests with increasing exposure time + + uint64_t exposureTime = 1 * MSEC; + for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) { + camera_metadata_t *req; + req = allocate_camera_metadata(20, 2000); + append_camera_metadata(req, request); + + add_camera_metadata_entry(req, + ANDROID_SENSOR_EXPOSURE_TIME, + (void**)&exposureTime, 1); + exposureTime *= 2; + + res = mRequests.enqueue(req); + ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " + << strerror(-res); + } + + // Get frames and image buffers one by one + for (int frameCount = 0; frameCount < 10; frameCount++) { + res = mFrames.waitForBuffer(SEC); + ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res); + + camera_metadata_t *frame; + res = mFrames.dequeue(&frame); + ASSERT_EQ(NO_ERROR, res); + ASSERT_TRUE(frame != NULL); + + uint32_t *frameNumber; + res = find_camera_metadata_entry(frame, + ANDROID_REQUEST_FRAME_COUNT, + NULL, (void**)&frameNumber, NULL); + ASSERT_EQ(NO_ERROR, res); + ASSERT_EQ(frameCount, *frameNumber); + + res = rawWaiter->waitForFrame(SEC); + ASSERT_EQ(NO_ERROR, res) << + "Never got raw data for capture " << frameCount; + + CpuConsumer::LockedBuffer buffer; + res = rawConsumer->lockNextBuffer(&buffer); + ASSERT_EQ(NO_ERROR, res); + + IF_ALOGV() { + char dumpname[60]; + snprintf(dumpname, 60, + "/data/local/tmp/camera2_test-capture1raw-dump_%d.raw", + frameCount); + ALOGV("Dumping raw buffer to %s", dumpname); + // Write to file + std::ofstream rawFile(dumpname); + for (unsigned int y = 0; y < buffer.height; y++) { + rawFile.write( + (const char *)(buffer.data + y * buffer.stride * 2), + buffer.width * 2); + } + rawFile.close(); + } + + res = rawConsumer->unlockBuffer(buffer); + ASSERT_EQ(NO_ERROR, res); + } + } +} + +} // namespace android diff --git a/tests/camera2/camera2_utils.cpp b/tests/camera2/camera2_utils.cpp new file mode 100644 index 0000000..bd56644 --- /dev/null +++ b/tests/camera2/camera2_utils.cpp @@ -0,0 +1,583 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Utility classes for camera2 HAL testing + +#define LOG_TAG "Camera2_test_utils" +#define LOG_NDEBUG 0 + +#include "utils/Log.h" +#include "camera2_utils.h" + +namespace android { + +/** + * MetadataQueue + */ + +MetadataQueue::MetadataQueue(): + mDevice(NULL), + mFrameCount(0), + mCount(0), + mStreamSlotCount(0), + mSignalConsumer(true) +{ + camera2_request_queue_src_ops::dequeue_request = consumer_dequeue; + camera2_request_queue_src_ops::request_count = consumer_buffer_count; + camera2_request_queue_src_ops::free_request = consumer_free; + + camera2_frame_queue_dst_ops::dequeue_frame = producer_dequeue; + camera2_frame_queue_dst_ops::cancel_frame = producer_cancel; + camera2_frame_queue_dst_ops::enqueue_frame = producer_enqueue; +} + +MetadataQueue::~MetadataQueue() { + freeBuffers(mEntries.begin(), mEntries.end()); + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); +} + +// Interface to camera2 HAL as consumer (input requests/reprocessing) +camera2_request_queue_src_ops_t* MetadataQueue::getToConsumerInterface() { + return static_cast<camera2_request_queue_src_ops_t*>(this); +} + +void MetadataQueue::setFromConsumerInterface(camera2_device_t *d) { + mDevice = d; +} + +camera2_frame_queue_dst_ops_t* MetadataQueue::getToProducerInterface() { + return static_cast<camera2_frame_queue_dst_ops_t*>(this); +} + +// Real interfaces +status_t MetadataQueue::enqueue(camera_metadata_t *buf) { + Mutex::Autolock l(mMutex); + + mCount++; + mEntries.push_back(buf); + notEmpty.signal(); + + if (mSignalConsumer && mDevice != NULL) { + mSignalConsumer = false; + + mMutex.unlock(); + ALOGV("%s: Signaling consumer", __FUNCTION__); + mDevice->ops->notify_request_queue_not_empty(mDevice); + mMutex.lock(); + } + return OK; +} + +int MetadataQueue::getBufferCount() { + Mutex::Autolock l(mMutex); + if (mStreamSlotCount > 0) { + return CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS; + } + return mCount; +} + +status_t MetadataQueue::dequeue(camera_metadata_t **buf, bool incrementCount) { + Mutex::Autolock l(mMutex); + + if (mCount == 0) { + if (mStreamSlotCount == 0) { + ALOGV("%s: Empty", __FUNCTION__); + *buf = NULL; + mSignalConsumer = true; + return OK; + } + ALOGV("%s: Streaming %d frames to queue", __FUNCTION__, + mStreamSlotCount); + + for (List<camera_metadata_t*>::iterator slotEntry = mStreamSlot.begin(); + slotEntry != mStreamSlot.end(); + slotEntry++ ) { + size_t entries = get_camera_metadata_entry_count(*slotEntry); + size_t dataBytes = get_camera_metadata_data_count(*slotEntry); + + camera_metadata_t *copy = allocate_camera_metadata(entries, dataBytes); + append_camera_metadata(copy, *slotEntry); + mEntries.push_back(copy); + } + mCount = mStreamSlotCount; + } + ALOGV("MetadataQueue: deque (%d buffers)", mCount); + camera_metadata_t *b = *(mEntries.begin()); + mEntries.erase(mEntries.begin()); + + if (incrementCount) { + add_camera_metadata_entry(b, + ANDROID_REQUEST_FRAME_COUNT, + (void**)&mFrameCount, 1); + mFrameCount++; + } + + *buf = b; + mCount--; + + return OK; +} + +status_t MetadataQueue::waitForBuffer(nsecs_t timeout) { + Mutex::Autolock l(mMutex); + status_t res; + while (mCount == 0) { + res = notEmpty.waitRelative(mMutex,timeout); + if (res != OK) return res; + } + return OK; +} + +status_t MetadataQueue::setStreamSlot(camera_metadata_t *buf) { + if (buf == NULL) { + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + mStreamSlotCount = 0; + return OK; + } + if (mStreamSlotCount > 1) { + List<camera_metadata_t*>::iterator deleter = ++mStreamSlot.begin(); + freeBuffers(++mStreamSlot.begin(), mStreamSlot.end()); + mStreamSlotCount = 1; + } + if (mStreamSlotCount == 1) { + free_camera_metadata( *(mStreamSlot.begin()) ); + *(mStreamSlot.begin()) = buf; + } else { + mStreamSlot.push_front(buf); + mStreamSlotCount = 1; + } + return OK; +} + +status_t MetadataQueue::setStreamSlot(const List<camera_metadata_t*> &bufs) { + if (mStreamSlotCount > 0) { + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + } + mStreamSlot = bufs; + mStreamSlotCount = mStreamSlot.size(); + + return OK; +} + +status_t MetadataQueue::freeBuffers(List<camera_metadata_t*>::iterator start, + List<camera_metadata_t*>::iterator end) { + while (start != end) { + free_camera_metadata(*start); + start = mStreamSlot.erase(start); + } + return OK; +} + +int MetadataQueue::consumer_buffer_count( + camera2_request_queue_src_ops_t *q) { + MetadataQueue *queue = static_cast<MetadataQueue *>(q); + return queue->getBufferCount(); +} + +int MetadataQueue::consumer_dequeue(camera2_request_queue_src_ops_t *q, + camera_metadata_t **buffer) { + MetadataQueue *queue = static_cast<MetadataQueue *>(q); + return queue->dequeue(buffer, true); +} + +int MetadataQueue::consumer_free(camera2_request_queue_src_ops_t *q, + camera_metadata_t *old_buffer) { + MetadataQueue *queue = static_cast<MetadataQueue *>(q); + free_camera_metadata(old_buffer); + return OK; +} + +int MetadataQueue::producer_dequeue(camera2_frame_queue_dst_ops_t *q, + size_t entries, size_t bytes, + camera_metadata_t **buffer) { + camera_metadata_t *new_buffer = + allocate_camera_metadata(entries, bytes); + if (new_buffer == NULL) return NO_MEMORY; + *buffer = new_buffer; + return OK; +} + +int MetadataQueue::producer_cancel(camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *old_buffer) { + free_camera_metadata(old_buffer); + return OK; +} + +int MetadataQueue::producer_enqueue(camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *filled_buffer) { + MetadataQueue *queue = static_cast<MetadataQueue *>(q); + return queue->enqueue(filled_buffer); +} + +/** + * NotifierListener + */ + +NotifierListener::NotifierListener() { +} + +status_t NotifierListener::getNotificationsFrom(camera2_device *dev) { + if (!dev) return BAD_VALUE; + status_t err; + err = dev->ops->set_notify_callback(dev, + notify_callback_dispatch, + (void*)this); + return err; +} + +status_t NotifierListener::getNextNotification(int32_t *msg_type, + int32_t *ext1, + int32_t *ext2, + int32_t *ext3) { + Mutex::Autolock l(mMutex); + if (mNotifications.size() == 0) return BAD_VALUE; + return getNextNotificationLocked(msg_type, ext1, ext2, ext3); +} + +status_t NotifierListener::waitForNotification(int32_t *msg_type, + int32_t *ext1, + int32_t *ext2, + int32_t *ext3) { + Mutex::Autolock l(mMutex); + while (mNotifications.size() == 0) { + mNewNotification.wait(mMutex); + } + return getNextNotificationLocked(msg_type, ext1, ext2, ext3); +} + +int NotifierListener::numNotifications() { + Mutex::Autolock l(mMutex); + return mNotifications.size(); +} + +status_t NotifierListener::getNextNotificationLocked(int32_t *msg_type, + int32_t *ext1, + int32_t *ext2, + int32_t *ext3) { + *msg_type = mNotifications.begin()->msg_type; + *ext1 = mNotifications.begin()->ext1; + *ext2 = mNotifications.begin()->ext2; + *ext3 = mNotifications.begin()->ext3; + mNotifications.erase(mNotifications.begin()); + return OK; +} + +void NotifierListener::onNotify(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3) { + Mutex::Autolock l(mMutex); + mNotifications.push_back(Notification(msg_type, ext1, ext2, ext3)); + mNewNotification.signal(); +} + +void NotifierListener::notify_callback_dispatch(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3, + void *user) { + NotifierListener *me = reinterpret_cast<NotifierListener*>(user); + me->onNotify(msg_type, ext1, ext2, ext3); +} + +/** + * StreamAdapter + */ + +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +StreamAdapter::StreamAdapter(sp<ISurfaceTexture> consumer): + mState(UNINITIALIZED), mDevice(NULL), + mId(-1), + mWidth(0), mHeight(0), mFormatRequested(0) +{ + mConsumerInterface = new SurfaceTextureClient(consumer); + camera2_stream_ops::dequeue_buffer = dequeue_buffer; + camera2_stream_ops::enqueue_buffer = enqueue_buffer; + camera2_stream_ops::cancel_buffer = cancel_buffer; + camera2_stream_ops::set_crop = set_crop; +} + +StreamAdapter::~StreamAdapter() { + disconnect(); +} + +status_t StreamAdapter::connectToDevice(camera2_device_t *d, + uint32_t width, uint32_t height, int format) { + if (mState != UNINITIALIZED) return INVALID_OPERATION; + if (d == NULL) { + ALOGE("%s: Null device passed to stream adapter", __FUNCTION__); + return BAD_VALUE; + } + + status_t res; + + mWidth = width; + mHeight = height; + mFormatRequested = format; + + // Allocate device-side stream interface + + uint32_t id; + uint32_t formatActual; + uint32_t usage; + uint32_t maxBuffers = 2; + res = d->ops->allocate_stream(d, + mWidth, mHeight, mFormatRequested, getStreamOps(), + &id, &formatActual, &usage, &maxBuffers); + if (res != OK) { + ALOGE("%s: Device stream allocation failed: %s (%d)", + __FUNCTION__, strerror(-res), res); + mState = UNINITIALIZED; + return res; + } + mDevice = d; + + mId = id; + mFormat = formatActual; + mUsage = usage; + mMaxProducerBuffers = maxBuffers; + + // Configure consumer-side ANativeWindow interface + + res = native_window_api_connect(mConsumerInterface.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to connect to native window for stream %d", + __FUNCTION__, mId); + mState = ALLOCATED; + return res; + } + + res = native_window_set_usage(mConsumerInterface.get(), mUsage); + if (res != OK) { + ALOGE("%s: Unable to configure usage %08x for stream %d", + __FUNCTION__, mUsage, mId); + mState = CONNECTED; + return res; + } + + res = native_window_set_buffers_geometry(mConsumerInterface.get(), + mWidth, mHeight, mFormat); + if (res != OK) { + ALOGE("%s: Unable to configure buffer geometry" + " %d x %d, format 0x%x for stream %d", + __FUNCTION__, mWidth, mHeight, mFormat, mId); + mState = CONNECTED; + return res; + } + + int maxConsumerBuffers; + res = mConsumerInterface->query(mConsumerInterface.get(), + NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); + if (res != OK) { + ALOGE("%s: Unable to query consumer undequeued" + " buffer count for stream %d", __FUNCTION__, mId); + mState = CONNECTED; + return res; + } + mMaxConsumerBuffers = maxConsumerBuffers; + + ALOGV("%s: Producer wants %d buffers, consumer wants %d", __FUNCTION__, + mMaxProducerBuffers, mMaxConsumerBuffers); + + int totalBuffers = mMaxConsumerBuffers + mMaxProducerBuffers; + + res = native_window_set_buffer_count(mConsumerInterface.get(), + totalBuffers); + if (res != OK) { + ALOGE("%s: Unable to set buffer count for stream %d", + __FUNCTION__, mId); + mState = CONNECTED; + return res; + } + + // Register allocated buffers with HAL device + buffer_handle_t *buffers = new buffer_handle_t[totalBuffers]; + ANativeWindowBuffer **anwBuffers = new ANativeWindowBuffer*[totalBuffers]; + int bufferIdx = 0; + for (; bufferIdx < totalBuffers; bufferIdx++) { + res = mConsumerInterface->dequeueBuffer(mConsumerInterface.get(), + &anwBuffers[bufferIdx]); + if (res != OK) { + ALOGE("%s: Unable to dequeue buffer %d for initial registration for" + "stream %d", __FUNCTION__, bufferIdx, mId); + mState = CONNECTED; + goto cleanUpBuffers; + } + + res = mConsumerInterface->lockBuffer(mConsumerInterface.get(), + anwBuffers[bufferIdx]); + if (res != OK) { + ALOGE("%s: Unable to lock buffer %d for initial registration for" + "stream %d", __FUNCTION__, bufferIdx, mId); + mState = CONNECTED; + bufferIdx++; + goto cleanUpBuffers; + } + + buffers[bufferIdx] = anwBuffers[bufferIdx]->handle; + } + + res = mDevice->ops->register_stream_buffers(mDevice, + mId, + totalBuffers, + buffers); + if (res != OK) { + ALOGE("%s: Unable to register buffers with HAL device for stream %d", + __FUNCTION__, mId); + mState = CONNECTED; + } else { + mState = ACTIVE; + } + +cleanUpBuffers: + for (int i = 0; i < bufferIdx; i++) { + res = mConsumerInterface->cancelBuffer(mConsumerInterface.get(), + anwBuffers[i]); + } + delete anwBuffers; + delete buffers; + + return res; +} + +status_t StreamAdapter::disconnect() { + status_t res; + if (mState >= ALLOCATED) { + res = mDevice->ops->release_stream(mDevice, mId); + if (res != OK) { + ALOGE("%s: Unable to release stream %d", + __FUNCTION__, mId); + return res; + } + } + if (mState >= CONNECTED) { + res = native_window_api_disconnect(mConsumerInterface.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to disconnect stream %d from native window", + __FUNCTION__, mId); + return res; + } + } + mId = -1; + mState = DISCONNECTED; + return OK; +} + +int StreamAdapter::getId() { + return mId; +} + +camera2_stream_ops *StreamAdapter::getStreamOps() { + return static_cast<camera2_stream_ops *>(this); +} + +ANativeWindow* StreamAdapter::toANW(camera2_stream_ops_t *w) { + return static_cast<StreamAdapter*>(w)->mConsumerInterface.get(); +} + +int StreamAdapter::dequeue_buffer(camera2_stream_ops_t *w, + buffer_handle_t** buffer) { + int res; + int state = static_cast<StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + + ANativeWindow *a = toANW(w); + ANativeWindowBuffer* anb; + res = a->dequeueBuffer(a, &anb); + if (res != OK) return res; + res = a->lockBuffer(a, anb); + if (res != OK) return res; + + *buffer = &(anb->handle); + + return res; +} + +int StreamAdapter::enqueue_buffer(camera2_stream_ops_t* w, + int64_t timestamp, + buffer_handle_t* buffer) { + int state = static_cast<StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + status_t err; + err = native_window_set_buffers_timestamp(a, timestamp); + if (err != OK) return err; + return a->queueBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle)); +} + +int StreamAdapter::cancel_buffer(camera2_stream_ops_t* w, + buffer_handle_t* buffer) { + int state = static_cast<StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + return a->cancelBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle)); +} + +int StreamAdapter::set_crop(camera2_stream_ops_t* w, + int left, int top, int right, int bottom) { + int state = static_cast<StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + android_native_rect_t crop = { left, top, right, bottom }; + return native_window_set_crop(a, &crop); +} + +/** + * FrameWaiter + */ + +FrameWaiter::FrameWaiter(): + mPendingFrames(0) { +} + +status_t FrameWaiter::waitForFrame(nsecs_t timeout) { + status_t res; + Mutex::Autolock lock(mMutex); + while (mPendingFrames == 0) { + res = mCondition.waitRelative(mMutex, timeout); + if (res != OK) return res; + } + mPendingFrames--; + return OK; +} + +void FrameWaiter::onFrameAvailable() { + Mutex::Autolock lock(mMutex); + mPendingFrames++; + mCondition.signal(); +} + +} // namespace android diff --git a/tests/camera2/camera2_utils.h b/tests/camera2/camera2_utils.h new file mode 100644 index 0000000..4e0b521 --- /dev/null +++ b/tests/camera2/camera2_utils.h @@ -0,0 +1,235 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Utility classes for camera2 HAL testing + +#include <system/camera_metadata.h> +#include <hardware/camera2.h> + +#include <gui/SurfaceTextureClient.h> +#include <gui/CpuConsumer.h> + +#include <utils/List.h> +#include <utils/Mutex.h> +#include <utils/Condition.h> + +namespace android { + +/** + * Queue class for both sending requests to a camera2 device, and for receiving + * frames from a camera2 device. + */ +class MetadataQueue: public camera2_request_queue_src_ops_t, + public camera2_frame_queue_dst_ops_t { + public: + MetadataQueue(); + ~MetadataQueue(); + + // Interface to camera2 HAL device, either for requests (device is consumer) + // or for frames (device is producer) + camera2_request_queue_src_ops_t* getToConsumerInterface(); + void setFromConsumerInterface(camera2_device_t *d); + + camera2_frame_queue_dst_ops_t* getToProducerInterface(); + + // Real interfaces. On enqueue, queue takes ownership of buffer pointer + // On dequeue, user takes ownership of buffer pointer. + status_t enqueue(camera_metadata_t *buf); + status_t dequeue(camera_metadata_t **buf, bool incrementCount = true); + int getBufferCount(); + status_t waitForBuffer(nsecs_t timeout); + + // Set repeating buffer(s); if the queue is empty on a dequeue call, the + // queue copies the contents of the stream slot into the queue, and then + // dequeues the first new entry. + status_t setStreamSlot(camera_metadata_t *buf); + status_t setStreamSlot(const List<camera_metadata_t*> &bufs); + + private: + status_t freeBuffers(List<camera_metadata_t*>::iterator start, + List<camera_metadata_t*>::iterator end); + + camera2_device_t *mDevice; + + Mutex mMutex; + Condition notEmpty; + + int mFrameCount; + + int mCount; + List<camera_metadata_t*> mEntries; + int mStreamSlotCount; + List<camera_metadata_t*> mStreamSlot; + + bool mSignalConsumer; + + static int consumer_buffer_count(camera2_request_queue_src_ops_t *q); + + static int consumer_dequeue(camera2_request_queue_src_ops_t *q, + camera_metadata_t **buffer); + + static int consumer_free(camera2_request_queue_src_ops_t *q, + camera_metadata_t *old_buffer); + + static int producer_dequeue(camera2_frame_queue_dst_ops_t *q, + size_t entries, size_t bytes, + camera_metadata_t **buffer); + + static int producer_cancel(camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *old_buffer); + + static int producer_enqueue(camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *filled_buffer); + +}; + +/** + * Basic class to receive and queue up notifications from the camera device + */ + +class NotifierListener { + public: + + NotifierListener(); + + status_t getNotificationsFrom(camera2_device *dev); + + status_t getNextNotification(int32_t *msg_type, int32_t *ext1, + int32_t *ext2, int32_t *ext3); + + status_t waitForNotification(int32_t *msg_type, int32_t *ext1, + int32_t *ext2, int32_t *ext3); + + int numNotifications(); + + private: + + status_t getNextNotificationLocked(int32_t *msg_type, + int32_t *ext1, int32_t *ext2, int32_t *ext3); + + struct Notification { + Notification(int32_t type, int32_t e1, int32_t e2, int32_t e3): + msg_type(type), + ext1(e1), + ext2(e2), + ext3(e3) + {} + + int32_t msg_type; + int32_t ext1; + int32_t ext2; + int32_t ext3; + }; + + List<Notification> mNotifications; + + Mutex mMutex; + Condition mNewNotification; + + void onNotify(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3); + + static void notify_callback_dispatch(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3, + void *user); + +}; + +/** + * Adapter from an ISurfaceTexture interface to camera2 device stream ops. + * Also takes care of allocating/deallocating stream in device interface + */ +class StreamAdapter: public camera2_stream_ops { + public: + StreamAdapter(sp<ISurfaceTexture> consumer); + + ~StreamAdapter(); + + status_t connectToDevice(camera2_device_t *d, + uint32_t width, uint32_t height, int format); + + status_t disconnect(); + + // Get stream ID. Only valid after a successful connectToDevice call. + int getId(); + + private: + enum { + ERROR = -1, + DISCONNECTED = 0, + UNINITIALIZED, + ALLOCATED, + CONNECTED, + ACTIVE + } mState; + + sp<ANativeWindow> mConsumerInterface; + camera2_device_t *mDevice; + + uint32_t mId; + uint32_t mWidth; + uint32_t mHeight; + uint32_t mFormat; + uint32_t mUsage; + uint32_t mMaxProducerBuffers; + uint32_t mMaxConsumerBuffers; + + int mFormatRequested; + + camera2_stream_ops *getStreamOps(); + + static ANativeWindow* toANW(camera2_stream_ops_t *w); + + static int dequeue_buffer(camera2_stream_ops_t *w, + buffer_handle_t** buffer); + + static int enqueue_buffer(camera2_stream_ops_t* w, + int64_t timestamp, + buffer_handle_t* buffer); + + static int cancel_buffer(camera2_stream_ops_t* w, + buffer_handle_t* buffer); + + static int set_crop(camera2_stream_ops_t* w, + int left, int top, int right, int bottom); + +}; + +/** + * Simple class to wait on the CpuConsumer to have a frame available + */ +class FrameWaiter : public CpuConsumer::FrameAvailableListener { + public: + FrameWaiter(); + + /** + * Wait for max timeout nanoseconds for a new frame. Returns + * OK if a frame is available, TIMED_OUT if the timeout was reached. + */ + status_t waitForFrame(nsecs_t timeout); + + virtual void onFrameAvailable(); + + int mPendingFrames; + Mutex mMutex; + Condition mCondition; +}; + +} |