diff options
author | Steve Kondik <shade@chemlab.org> | 2012-11-18 20:46:23 -0800 |
---|---|---|
committer | Steve Kondik <shade@chemlab.org> | 2012-11-18 20:46:23 -0800 |
commit | 90b17dc3aca5efbed34a82972db73de71b724fe3 (patch) | |
tree | 9acc413d20b5520ad4f5f96786e46718d5dbb7fb | |
parent | bb992a86df645482167013634a8c1de1a3109344 (diff) | |
parent | c55d7ac337641c7ad9ff734169e65553fbb40787 (diff) | |
download | hardware_libhardware-90b17dc3aca5efbed34a82972db73de71b724fe3.zip hardware_libhardware-90b17dc3aca5efbed34a82972db73de71b724fe3.tar.gz hardware_libhardware-90b17dc3aca5efbed34a82972db73de71b724fe3.tar.bz2 |
Merge branch 'jb-mr1-release' of https://android.googlesource.com/platform/hardware/libhardware into HEAD
Conflicts:
include/hardware/fb.h
include/hardware/hwcomposer.h
modules/audio/audio_hw.c
Change-Id: Ib37d49cb600bf0cd062847f9c0eb3841bae13f74
38 files changed, 5399 insertions, 427 deletions
diff --git a/include/hardware/audio.h b/include/hardware/audio.h index 12c4a96..6e02157 100644 --- a/include/hardware/audio.h +++ b/include/hardware/audio.h @@ -55,7 +55,8 @@ __BEGIN_DECLS */ #define AUDIO_DEVICE_API_VERSION_0_0 HARDWARE_DEVICE_API_VERSION(0, 0) #define AUDIO_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION(1, 0) -#define AUDIO_DEVICE_API_VERSION_CURRENT AUDIO_DEVICE_API_VERSION_1_0 +#define AUDIO_DEVICE_API_VERSION_2_0 HARDWARE_DEVICE_API_VERSION(2, 0) +#define AUDIO_DEVICE_API_VERSION_CURRENT AUDIO_DEVICE_API_VERSION_2_0 /** * List of known audio HAL modules. This is the base name of the audio HAL @@ -67,6 +68,7 @@ __BEGIN_DECLS #define AUDIO_HARDWARE_MODULE_ID_PRIMARY "primary" #define AUDIO_HARDWARE_MODULE_ID_A2DP "a2dp" #define AUDIO_HARDWARE_MODULE_ID_USB "usb" +#define AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX "r_submix" /**************************************/ @@ -394,7 +396,7 @@ typedef struct audio_stream_in audio_stream_in_t; /** * return the frame size (number of bytes per sample). */ -static inline size_t audio_stream_frame_size(struct audio_stream *s) +static inline size_t audio_stream_frame_size(const struct audio_stream *s) { size_t chan_samp_sz; uint32_t chan_mask = s->get_channels(s); @@ -469,6 +471,12 @@ struct audio_hw_device { * each audio_hw_device implementation. * * Return value is a bitmask of 1 or more values of audio_devices_t + * + * NOTE: audio HAL implementations starting with + * AUDIO_DEVICE_API_VERSION_2_0 do not implement this function. + * All supported devices should be listed in audio_policy.conf + * file and the audio policy manager must choose the appropriate + * audio module based on information in this file. */ uint32_t (*get_supported_devices)(const struct audio_hw_device *dev); @@ -494,7 +502,7 @@ struct audio_hw_device { * master volume control. AudioFlinger will query this value from the * primary audio HAL when the service starts and use the value for setting * the initial master volume across all HALs. HALs which do not support - * this method should may leave it set to NULL. + * this method may leave it set to NULL. */ int (*get_master_volume)(struct audio_hw_device *dev, float *volume); #endif @@ -595,6 +603,21 @@ struct audio_hw_device { /** This method dumps the state of the audio hardware */ int (*dump)(const struct audio_hw_device *dev, int fd); + + /** + * set the audio mute status for all audio activities. If any value other + * than 0 is returned, the software mixer will emulate this capability. + */ + int (*set_master_mute)(struct audio_hw_device *dev, bool mute); + + /** + * Get the current master mute status for the HAL, if the HAL supports + * master mute control. AudioFlinger will query this value from the primary + * audio HAL when the service starts and use the value for setting the + * initial master mute across all HALs. HALs which do not support this + * method may leave it set to NULL. + */ + int (*get_master_mute)(struct audio_hw_device *dev, bool *mute); }; typedef struct audio_hw_device audio_hw_device_t; diff --git a/include/hardware/audio_effect.h b/include/hardware/audio_effect.h index 65eba36..46e323d 100644 --- a/include/hardware/audio_effect.h +++ b/include/hardware/audio_effect.h @@ -142,6 +142,10 @@ typedef struct effect_descriptor_s { // | | | 1 requires audio mode updates // | | | 2..3 reserved // +---------------------------+-----------+----------------------------------- +// | Audio source indication | 20..21 | 0 none +// | | | 1 requires audio source updates +// | | | 2..3 reserved +// +---------------------------+-----------+----------------------------------- // Insert mode #define EFFECT_FLAG_TYPE_SHIFT 0 @@ -216,6 +220,13 @@ typedef struct effect_descriptor_s { #define EFFECT_FLAG_AUDIO_MODE_IND (1 << EFFECT_FLAG_AUDIO_MODE_SHIFT) #define EFFECT_FLAG_AUDIO_MODE_NONE (0 << EFFECT_FLAG_AUDIO_MODE_SHIFT) +// Audio source indication +#define EFFECT_FLAG_AUDIO_SOURCE_SHIFT (EFFECT_FLAG_AUDIO_MODE_SHIFT + EFFECT_FLAG_AUDIO_MODE_SIZE) +#define EFFECT_FLAG_AUDIO_SOURCE_SIZE 2 +#define EFFECT_FLAG_AUDIO_SOURCE_MASK (((1 << EFFECT_FLAG_AUDIO_SOURCE_SIZE) -1) \ + << EFFECT_FLAG_AUDIO_SOURCE_SHIFT) +#define EFFECT_FLAG_AUDIO_SOURCE_IND (1 << EFFECT_FLAG_AUDIO_SOURCE_SHIFT) +#define EFFECT_FLAG_AUDIO_SOURCE_NONE (0 << EFFECT_FLAG_AUDIO_SOURCE_SHIFT) #define EFFECT_MAKE_API_VERSION(M, m) (((M)<<16) | ((m) & 0xFFFF)) #define EFFECT_API_VERSION_MAJOR(v) ((v)>>16) @@ -413,6 +424,7 @@ enum effect_command_e { EFFECT_CMD_GET_FEATURE_SUPPORTED_CONFIGS,// get all supported configurations for a feature. EFFECT_CMD_GET_FEATURE_CONFIG, // get current feature configuration EFFECT_CMD_SET_FEATURE_CONFIG, // set current feature configuration + EFFECT_CMD_SET_AUDIO_SOURCE, // set the audio source (see audio.h, audio_source_t) EFFECT_CMD_FIRST_PROPRIETARY = 0x10000 // first proprietary command code }; @@ -705,6 +717,20 @@ enum effect_command_e { // size: sizeof(uint32_t) // data: status //================================================================================================== +// command: EFFECT_CMD_SET_AUDIO_SOURCE +//-------------------------------------------------------------------------------------------------- +// description: +// Set the audio source the capture path is configured for (Camcorder, voice recognition...). +// See audio.h, audio_source_t for values. +//-------------------------------------------------------------------------------------------------- +// command format: +// size: sizeof(uint32_t) +// data: uint32_t +//-------------------------------------------------------------------------------------------------- +// reply format: +// size: 0 +// data: N/A +//================================================================================================== // command: EFFECT_CMD_FIRST_PROPRIETARY //-------------------------------------------------------------------------------------------------- // description: @@ -784,8 +810,8 @@ enum effect_feature_e { // EFFECT_FEATURE_AUX_CHANNELS feature configuration descriptor. Describe a combination // of main and auxiliary channels supported typedef struct channel_config_s { - uint32_t main_channels; // channel mask for main channels - uint32_t aux_channels; // channel mask for auxiliary channels + audio_channel_mask_t main_channels; // channel mask for main channels + audio_channel_mask_t aux_channels; // channel mask for auxiliary channels } channel_config_t; diff --git a/include/hardware/audio_policy.h b/include/hardware/audio_policy.h index 540ef6d..775573c 100644 --- a/include/hardware/audio_policy.h +++ b/include/hardware/audio_policy.h @@ -157,7 +157,7 @@ struct audio_policy { audio_stream_type_t stream, uint32_t samplingRate, audio_format_t format, - uint32_t channels, + audio_channel_mask_t channelMask, audio_output_flags_t flags); /* indicates to the audio policy manager that the output starts being used @@ -182,7 +182,7 @@ struct audio_policy { audio_io_handle_t (*get_input)(struct audio_policy *pol, audio_source_t inputSource, uint32_t samplingRate, audio_format_t format, - uint32_t channels, + audio_channel_mask_t channelMask, audio_in_acoustics_t acoustics); /* indicates to the audio policy manager that the input starts being used */ @@ -242,10 +242,10 @@ struct audio_policy { /* Audio effect management */ audio_io_handle_t (*get_output_for_effect)(struct audio_policy *pol, - struct effect_descriptor_s *desc); + const struct effect_descriptor_s *desc); int (*register_effect)(struct audio_policy *pol, - struct effect_descriptor_s *desc, + const struct effect_descriptor_s *desc, audio_io_handle_t output, uint32_t strategy, int session, @@ -259,6 +259,9 @@ struct audio_policy { audio_stream_type_t stream, uint32_t in_past_ms); + bool (*is_source_active)(const struct audio_policy *pol, + audio_source_t source); + /* dump state */ int (*dump)(const struct audio_policy *pol, int fd); }; @@ -330,7 +333,10 @@ struct audio_policy_service_ops { /* Audio input Control functions */ /* */ - /* opens an audio input */ + /* opens an audio input + * deprecated - new implementations should use open_input_on_module, + * and the acoustics parameter is ignored + */ audio_io_handle_t (*open_input)(void *service, audio_devices_t *pDevices, uint32_t *pSamplingRate, diff --git a/include/hardware/bluetooth.h b/include/hardware/bluetooth.h new file mode 100644 index 0000000..be7f0b1 --- /dev/null +++ b/include/hardware/bluetooth.h @@ -0,0 +1,441 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_INCLUDE_BLUETOOTH_H +#define ANDROID_INCLUDE_BLUETOOTH_H + +#include <stdint.h> +#include <sys/cdefs.h> +#include <sys/types.h> + +#include <hardware/hardware.h> + +__BEGIN_DECLS + +/** + * The Bluetooth Hardware Module ID + */ + +#define BT_HARDWARE_MODULE_ID "bluetooth" +#define BT_STACK_MODULE_ID "bluetooth" +#define BT_STACK_TEST_MODULE_ID "bluetooth_test" + + +/* Bluetooth profile interface IDs */ + +#define BT_PROFILE_HANDSFREE_ID "handsfree" +#define BT_PROFILE_ADVANCED_AUDIO_ID "a2dp" +#define BT_PROFILE_HEALTH_ID "health" +#define BT_PROFILE_SOCKETS_ID "socket" +#define BT_PROFILE_HIDHOST_ID "hidhost" +#define BT_PROFILE_PAN_ID "pan" + + +/** Bluetooth Address */ +typedef struct { + uint8_t address[6]; +} __attribute__((packed))bt_bdaddr_t; + +/** Bluetooth Device Name */ +typedef struct { + uint8_t name[248]; +} __attribute__((packed))bt_bdname_t; + +/** Bluetooth Adapter Visibility Modes*/ +typedef enum { + BT_SCAN_MODE_NONE, + BT_SCAN_MODE_CONNECTABLE, + BT_SCAN_MODE_CONNECTABLE_DISCOVERABLE +} bt_scan_mode_t; + +/** Bluetooth Adapter State */ +typedef enum { + BT_STATE_OFF, + BT_STATE_ON +} bt_state_t; + +/** Bluetooth Error Status */ +/** We need to build on this */ + +typedef enum { + BT_STATUS_SUCCESS, + BT_STATUS_FAIL, + BT_STATUS_NOT_READY, + BT_STATUS_NOMEM, + BT_STATUS_BUSY, + BT_STATUS_DONE, /* request already completed */ + BT_STATUS_UNSUPPORTED, + BT_STATUS_PARM_INVALID, + BT_STATUS_UNHANDLED, + BT_STATUS_AUTH_FAILURE, + BT_STATUS_RMT_DEV_DOWN + +} bt_status_t; + +/** Bluetooth PinKey Code */ +typedef struct { + uint8_t pin[16]; +} __attribute__((packed))bt_pin_code_t; + +/** Bluetooth Adapter Discovery state */ +typedef enum { + BT_DISCOVERY_STOPPED, + BT_DISCOVERY_STARTED +} bt_discovery_state_t; + +/** Bluetooth ACL connection state */ +typedef enum { + BT_ACL_STATE_CONNECTED, + BT_ACL_STATE_DISCONNECTED +} bt_acl_state_t; + +/** Bluetooth 128-bit UUID */ +typedef struct { + uint8_t uu[16]; +} bt_uuid_t; + +/** Bluetooth SDP service record */ +typedef struct +{ + bt_uuid_t uuid; + uint16_t channel; + char name[256]; // what's the maximum length +} bt_service_record_t; + +/* Bluetooth Adapter and Remote Device property types */ +typedef enum { + /* Properties common to both adapter and remote device */ + /** + * Description - Bluetooth Device Name + * Access mode - Adapter name can be GET/SET. Remote device can be GET + * Data type - bt_bdname_t + */ + BT_PROPERTY_BDNAME = 0x1, + /** + * Description - Bluetooth Device Address + * Access mode - Only GET. + * Data type - bt_bdaddr_t + */ + BT_PROPERTY_BDADDR, + /** + * Description - Bluetooth Service 128-bit UUIDs + * Access mode - Only GET. + * Data type - Array of bt_uuid_t (Array size inferred from property length). + */ + BT_PROPERTY_UUIDS, + /** + * Description - Bluetooth Class of Device as found in Assigned Numbers + * Access mode - Only GET. + * Data type - uint32_t. + */ + BT_PROPERTY_CLASS_OF_DEVICE, + /** + * Description - Device Type - BREDR, BLE or DUAL Mode + * Access mode - Only GET. + * Data type - bt_device_type_t + */ + BT_PROPERTY_TYPE_OF_DEVICE, + /** + * Description - Bluetooth Service Record + * Access mode - Only GET. + * Data type - bt_service_record_t + */ + BT_PROPERTY_SERVICE_RECORD, + + /* Properties unique to adapter */ + /** + * Description - Bluetooth Adapter scan mode + * Access mode - GET and SET + * Data type - bt_scan_mode_t. + */ + BT_PROPERTY_ADAPTER_SCAN_MODE, + /** + * Description - List of bonded devices + * Access mode - Only GET. + * Data type - Array of bt_bdaddr_t of the bonded remote devices + * (Array size inferred from property length). + */ + BT_PROPERTY_ADAPTER_BONDED_DEVICES, + /** + * Description - Bluetooth Adapter Discovery timeout (in seconds) + * Access mode - GET and SET + * Data type - uint32_t + */ + BT_PROPERTY_ADAPTER_DISCOVERY_TIMEOUT, + + /* Properties unique to remote device */ + /** + * Description - User defined friendly name of the remote device + * Access mode - GET and SET + * Data type - bt_bdname_t. + */ + BT_PROPERTY_REMOTE_FRIENDLY_NAME, + /** + * Description - RSSI value of the inquired remote device + * Access mode - Only GET. + * Data type - int32_t. + */ + BT_PROPERTY_REMOTE_RSSI, + + BT_PROPERTY_REMOTE_DEVICE_TIMESTAMP = 0xFF, +} bt_property_type_t; + +/** Bluetooth Adapter Property data structure */ +typedef struct +{ + bt_property_type_t type; + int len; + void *val; +} bt_property_t; + +/** Bluetooth Device Type */ +typedef enum { + BT_DEVICE_DEVTYPE_BREDR = 0x1, + BT_DEVICE_DEVTYPE_BLE, + BT_DEVICE_DEVTYPE_DUAL +} bt_device_type_t; +/** Bluetooth Bond state */ +typedef enum { + BT_BOND_STATE_NONE, + BT_BOND_STATE_BONDING, + BT_BOND_STATE_BONDED +} bt_bond_state_t; + +/** Bluetooth SSP Bonding Variant */ +typedef enum { + BT_SSP_VARIANT_PASSKEY_CONFIRMATION, + BT_SSP_VARIANT_PASSKEY_ENTRY, + BT_SSP_VARIANT_CONSENT, + BT_SSP_VARIANT_PASSKEY_NOTIFICATION +} bt_ssp_variant_t; + +#define BT_MAX_NUM_UUIDS 32 + +/** Bluetooth Interface callbacks */ + +/** Bluetooth Enable/Disable Callback. */ +typedef void (*adapter_state_changed_callback)(bt_state_t state); + +/** GET/SET Adapter Properties callback */ +/* TODO: For the GET/SET property APIs/callbacks, we may need a session + * identifier to associate the call with the callback. This would be needed + * whenever more than one simultaneous instance of the same adapter_type + * is get/set. + * + * If this is going to be handled in the Java framework, then we do not need + * to manage sessions here. + */ +typedef void (*adapter_properties_callback)(bt_status_t status, + int num_properties, + bt_property_t *properties); + +/** GET/SET Remote Device Properties callback */ +/** TODO: For remote device properties, do not see a need to get/set + * multiple properties - num_properties shall be 1 + */ +typedef void (*remote_device_properties_callback)(bt_status_t status, + bt_bdaddr_t *bd_addr, + int num_properties, + bt_property_t *properties); + +/** New device discovered callback */ +/** If EIR data is not present, then BD_NAME and RSSI shall be NULL and -1 + * respectively */ +typedef void (*device_found_callback)(int num_properties, + bt_property_t *properties); + +/** Discovery state changed callback */ +typedef void (*discovery_state_changed_callback)(bt_discovery_state_t state); + +/** Bluetooth Legacy PinKey Request callback */ +typedef void (*pin_request_callback)(bt_bdaddr_t *remote_bd_addr, + bt_bdname_t *bd_name, uint32_t cod); + +/** Bluetooth SSP Request callback - Just Works & Numeric Comparison*/ +/** pass_key - Shall be 0 for BT_SSP_PAIRING_VARIANT_CONSENT & + * BT_SSP_PAIRING_PASSKEY_ENTRY */ +/* TODO: Passkey request callback shall not be needed for devices with display + * capability. We still need support this in the stack for completeness */ +typedef void (*ssp_request_callback)(bt_bdaddr_t *remote_bd_addr, + bt_bdname_t *bd_name, + uint32_t cod, + bt_ssp_variant_t pairing_variant, + uint32_t pass_key); + +/** Bluetooth Bond state changed callback */ +/* Invoked in response to create_bond, cancel_bond or remove_bond */ +typedef void (*bond_state_changed_callback)(bt_status_t status, + bt_bdaddr_t *remote_bd_addr, + bt_bond_state_t state); + +/** Bluetooth ACL connection state changed callback */ +typedef void (*acl_state_changed_callback)(bt_status_t status, bt_bdaddr_t *remote_bd_addr, + bt_acl_state_t state); + +typedef enum { + ASSOCIATE_JVM, + DISASSOCIATE_JVM +} bt_cb_thread_evt; + +/** Thread Associate/Disassociate JVM Callback */ +/* Callback that is invoked by the callback thread to allow upper layer to attach/detach to/from + * the JVM */ +typedef void (*callback_thread_event)(bt_cb_thread_evt evt); + +/** Bluetooth Test Mode Callback */ +/* Receive any HCI event from controller. Must be in DUT Mode for this callback to be received */ +typedef void (*dut_mode_recv_callback)(uint16_t opcode, uint8_t *buf, uint8_t len); + +/** TODO: Add callbacks for Link Up/Down and other generic + * notifications/callbacks */ + +/** Bluetooth DM callback structure. */ +typedef struct { + /** set to sizeof(bt_callbacks_t) */ + size_t size; + adapter_state_changed_callback adapter_state_changed_cb; + adapter_properties_callback adapter_properties_cb; + remote_device_properties_callback remote_device_properties_cb; + device_found_callback device_found_cb; + discovery_state_changed_callback discovery_state_changed_cb; + pin_request_callback pin_request_cb; + ssp_request_callback ssp_request_cb; + bond_state_changed_callback bond_state_changed_cb; + acl_state_changed_callback acl_state_changed_cb; + callback_thread_event thread_evt_cb; + dut_mode_recv_callback dut_mode_recv_cb; +} bt_callbacks_t; + +/** NOTE: By default, no profiles are initialized at the time of init/enable. + * Whenever the application invokes the 'init' API of a profile, then one of + * the following shall occur: + * + * 1.) If Bluetooth is not enabled, then the Bluetooth core shall mark the + * profile as enabled. Subsequently, when the application invokes the + * Bluetooth 'enable', as part of the enable sequence the profile that were + * marked shall be enabled by calling appropriate stack APIs. The + * 'adapter_properties_cb' shall return the list of UUIDs of the + * enabled profiles. + * + * 2.) If Bluetooth is enabled, then the Bluetooth core shall invoke the stack + * profile API to initialize the profile and trigger a + * 'adapter_properties_cb' with the current list of UUIDs including the + * newly added profile's UUID. + * + * The reverse shall occur whenever the profile 'cleanup' APIs are invoked + */ + +/** Represents the standard Bluetooth DM interface. */ +typedef struct { + /** set to sizeof(bt_interface_t) */ + size_t size; + /** + * Opens the interface and provides the callback routines + * to the implemenation of this interface. + */ + int (*init)(bt_callbacks_t* callbacks ); + + /** Enable Bluetooth. */ + int (*enable)(void); + + /** Disable Bluetooth. */ + int (*disable)(void); + + /** Closes the interface. */ + void (*cleanup)(void); + + /** Get all Bluetooth Adapter properties at init */ + int (*get_adapter_properties)(void); + + /** Get Bluetooth Adapter property of 'type' */ + int (*get_adapter_property)(bt_property_type_t type); + + /** Set Bluetooth Adapter property of 'type' */ + /* Based on the type, val shall be one of + * bt_bdaddr_t or bt_bdname_t or bt_scanmode_t etc + */ + int (*set_adapter_property)(const bt_property_t *property); + + /** Get all Remote Device properties */ + int (*get_remote_device_properties)(bt_bdaddr_t *remote_addr); + + /** Get Remote Device property of 'type' */ + int (*get_remote_device_property)(bt_bdaddr_t *remote_addr, + bt_property_type_t type); + + /** Set Remote Device property of 'type' */ + int (*set_remote_device_property)(bt_bdaddr_t *remote_addr, + const bt_property_t *property); + + /** Get Remote Device's service record for the given UUID */ + int (*get_remote_service_record)(bt_bdaddr_t *remote_addr, + bt_uuid_t *uuid); + + /** Start SDP to get remote services */ + int (*get_remote_services)(bt_bdaddr_t *remote_addr); + + /** Start Discovery */ + int (*start_discovery)(void); + + /** Cancel Discovery */ + int (*cancel_discovery)(void); + + /** Create Bluetooth Bonding */ + int (*create_bond)(const bt_bdaddr_t *bd_addr); + + /** Remove Bond */ + int (*remove_bond)(const bt_bdaddr_t *bd_addr); + + /** Cancel Bond */ + int (*cancel_bond)(const bt_bdaddr_t *bd_addr); + + /** BT Legacy PinKey Reply */ + /** If accept==FALSE, then pin_len and pin_code shall be 0x0 */ + int (*pin_reply)(const bt_bdaddr_t *bd_addr, uint8_t accept, + uint8_t pin_len, bt_pin_code_t *pin_code); + + /** BT SSP Reply - Just Works, Numeric Comparison and Passkey + * passkey shall be zero for BT_SSP_VARIANT_PASSKEY_COMPARISON & + * BT_SSP_VARIANT_CONSENT + * For BT_SSP_VARIANT_PASSKEY_ENTRY, if accept==FALSE, then passkey + * shall be zero */ + int (*ssp_reply)(const bt_bdaddr_t *bd_addr, bt_ssp_variant_t variant, + uint8_t accept, uint32_t passkey); + + /** Get Bluetooth profile interface */ + const void* (*get_profile_interface) (const char *profile_id); + + /** Bluetooth Test Mode APIs - Bluetooth must be enabled for these APIs */ + /* Configure DUT Mode - Use this mode to enter/exit DUT mode */ + int (*dut_mode_configure)(uint8_t enable); + + /* Send any test HCI (vendor-specific) command to the controller. Must be in DUT Mode */ + int (*dut_mode_send)(uint16_t opcode, uint8_t *buf, uint8_t len); +} bt_interface_t; + +/** TODO: Need to add APIs for Service Discovery, Service authorization and + * connection management. Also need to add APIs for configuring + * properties of remote bonded devices such as name, UUID etc. */ + +typedef struct { + struct hw_device_t common; + const bt_interface_t* (*get_bluetooth_interface)(); +} bluetooth_device_t; + +typedef bluetooth_device_t bluetooth_module_t; +__END_DECLS + +#endif /* ANDROID_INCLUDE_BLUETOOTH_H */ diff --git a/include/hardware/bt_av.h b/include/hardware/bt_av.h new file mode 100644 index 0000000..2ec00c3 --- /dev/null +++ b/include/hardware/bt_av.h @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_INCLUDE_BT_AV_H +#define ANDROID_INCLUDE_BT_AV_H + +__BEGIN_DECLS + +/* Bluetooth AV connection states */ +typedef enum { + BTAV_CONNECTION_STATE_DISCONNECTED = 0, + BTAV_CONNECTION_STATE_CONNECTING, + BTAV_CONNECTION_STATE_CONNECTED, + BTAV_CONNECTION_STATE_DISCONNECTING +} btav_connection_state_t; + +/* Bluetooth AV datapath states */ +typedef enum { + BTAV_AUDIO_STATE_REMOTE_SUSPEND = 0, + BTAV_AUDIO_STATE_STOPPED, + BTAV_AUDIO_STATE_STARTED, +} btav_audio_state_t; + + +/** Callback for connection state change. + * state will have one of the values from btav_connection_state_t + */ +typedef void (* btav_connection_state_callback)(btav_connection_state_t state, + bt_bdaddr_t *bd_addr); + +/** Callback for audiopath state change. + * state will have one of the values from btav_audio_state_t + */ +typedef void (* btav_audio_state_callback)(btav_audio_state_t state, + bt_bdaddr_t *bd_addr); + +/** BT-AV callback structure. */ +typedef struct { + /** set to sizeof(btav_callbacks_t) */ + size_t size; + btav_connection_state_callback connection_state_cb; + btav_audio_state_callback audio_state_cb; +} btav_callbacks_t; + +/** + * NOTE: + * + * 1. AVRCP 1.0 shall be supported initially. AVRCP passthrough commands + * shall be handled internally via uinput + * + * 2. A2DP data path shall be handled via a socket pipe between the AudioFlinger + * android_audio_hw library and the Bluetooth stack. + * + */ +/** Represents the standard BT-AV interface. */ +typedef struct { + + /** set to sizeof(btav_interface_t) */ + size_t size; + /** + * Register the BtAv callbacks + */ + bt_status_t (*init)( btav_callbacks_t* callbacks ); + + /** connect to headset */ + bt_status_t (*connect)( bt_bdaddr_t *bd_addr ); + + /** dis-connect from headset */ + bt_status_t (*disconnect)( bt_bdaddr_t *bd_addr ); + + /** Closes the interface. */ + void (*cleanup)( void ); +} btav_interface_t; + +__END_DECLS + +#endif /* ANDROID_INCLUDE_BT_AV_H */ diff --git a/include/hardware/bt_hf.h b/include/hardware/bt_hf.h new file mode 100644 index 0000000..6135ac4 --- /dev/null +++ b/include/hardware/bt_hf.h @@ -0,0 +1,284 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_INCLUDE_BT_HF_H +#define ANDROID_INCLUDE_BT_HF_H + +__BEGIN_DECLS + +/* AT response code - OK/Error */ +typedef enum { + BTHF_AT_RESPONSE_ERROR = 0, + BTHF_AT_RESPONSE_OK +} bthf_at_response_t; + +typedef enum { + BTHF_CONNECTION_STATE_DISCONNECTED = 0, + BTHF_CONNECTION_STATE_CONNECTING, + BTHF_CONNECTION_STATE_CONNECTED, + BTHF_CONNECTION_STATE_SLC_CONNECTED, + BTHF_CONNECTION_STATE_DISCONNECTING +} bthf_connection_state_t; + +typedef enum { + BTHF_AUDIO_STATE_DISCONNECTED = 0, + BTHF_AUDIO_STATE_CONNECTING, + BTHF_AUDIO_STATE_CONNECTED, + BTHF_AUDIO_STATE_DISCONNECTING +} bthf_audio_state_t; + +typedef enum { + BTHF_VR_STATE_STOPPED = 0, + BTHF_VR_STATE_STARTED +} bthf_vr_state_t; + +typedef enum { + BTHF_VOLUME_TYPE_SPK = 0, + BTHF_VOLUME_TYPE_MIC +} bthf_volume_type_t; + +/* Noise Reduction and Echo Cancellation */ +typedef enum +{ + BTHF_NREC_STOP, + BTHF_NREC_START +} bthf_nrec_t; + +/* CHLD - Call held handling */ +typedef enum +{ + BTHF_CHLD_TYPE_RELEASEHELD, // Terminate all held or set UDUB("busy") to a waiting call + BTHF_CHLD_TYPE_RELEASEACTIVE_ACCEPTHELD, // Terminate all active calls and accepts a waiting/held call + BTHF_CHLD_TYPE_HOLDACTIVE_ACCEPTHELD, // Hold all active calls and accepts a waiting/held call + BTHF_CHLD_TYPE_ADDHELDTOCONF, // Add all held calls to a conference +} bthf_chld_type_t; + +/** Callback for connection state change. + * state will have one of the values from BtHfConnectionState + */ +typedef void (* bthf_connection_state_callback)(bthf_connection_state_t state, bt_bdaddr_t *bd_addr); + +/** Callback for audio connection state change. + * state will have one of the values from BtHfAudioState + */ +typedef void (* bthf_audio_state_callback)(bthf_audio_state_t state, bt_bdaddr_t *bd_addr); + +/** Callback for VR connection state change. + * state will have one of the values from BtHfVRState + */ +typedef void (* bthf_vr_cmd_callback)(bthf_vr_state_t state); + +/** Callback for answer incoming call (ATA) + */ +typedef void (* bthf_answer_call_cmd_callback)(); + +/** Callback for disconnect call (AT+CHUP) + */ +typedef void (* bthf_hangup_call_cmd_callback)(); + +/** Callback for disconnect call (AT+CHUP) + * type will denote Speaker/Mic gain (BtHfVolumeControl). + */ +typedef void (* bthf_volume_cmd_callback)(bthf_volume_type_t type, int volume); + +/** Callback for dialing an outgoing call + * If number is NULL, redial + */ +typedef void (* bthf_dial_call_cmd_callback)(char *number); + +/** Callback for sending DTMF tones + * tone contains the dtmf character to be sent + */ +typedef void (* bthf_dtmf_cmd_callback)(char tone); + +/** Callback for enabling/disabling noise reduction/echo cancellation + * value will be 1 to enable, 0 to disable + */ +typedef void (* bthf_nrec_cmd_callback)(bthf_nrec_t nrec); + +/** Callback for call hold handling (AT+CHLD) + * value will contain the call hold command (0, 1, 2, 3) + */ +typedef void (* bthf_chld_cmd_callback)(bthf_chld_type_t chld); + +/** Callback for CNUM (subscriber number) + */ +typedef void (* bthf_cnum_cmd_callback)(); + +/** Callback for indicators (CIND) + */ +typedef void (* bthf_cind_cmd_callback)(); + +/** Callback for operator selection (COPS) + */ +typedef void (* bthf_cops_cmd_callback)(); + +/** Callback for call list (AT+CLCC) + */ +typedef void (* bthf_clcc_cmd_callback) (); + +/** Callback for unknown AT command recd from HF + * at_string will contain the unparsed AT string + */ +typedef void (* bthf_unknown_at_cmd_callback)(char *at_string); + +/** Callback for keypressed (HSP) event. + */ +typedef void (* bthf_key_pressed_cmd_callback)(); + +/** BT-HF callback structure. */ +typedef struct { + /** set to sizeof(BtHfCallbacks) */ + size_t size; + bthf_connection_state_callback connection_state_cb; + bthf_audio_state_callback audio_state_cb; + bthf_vr_cmd_callback vr_cmd_cb; + bthf_answer_call_cmd_callback answer_call_cmd_cb; + bthf_hangup_call_cmd_callback hangup_call_cmd_cb; + bthf_volume_cmd_callback volume_cmd_cb; + bthf_dial_call_cmd_callback dial_call_cmd_cb; + bthf_dtmf_cmd_callback dtmf_cmd_cb; + bthf_nrec_cmd_callback nrec_cmd_cb; + bthf_chld_cmd_callback chld_cmd_cb; + bthf_cnum_cmd_callback cnum_cmd_cb; + bthf_cind_cmd_callback cind_cmd_cb; + bthf_cops_cmd_callback cops_cmd_cb; + bthf_clcc_cmd_callback clcc_cmd_cb; + bthf_unknown_at_cmd_callback unknown_at_cmd_cb; + bthf_key_pressed_cmd_callback key_pressed_cmd_cb; +} bthf_callbacks_t; + +/** Network Status */ +typedef enum +{ + BTHF_NETWORK_STATE_NOT_AVAILABLE = 0, + BTHF_NETWORK_STATE_AVAILABLE +} bthf_network_state_t; + +/** Service type */ +typedef enum +{ + BTHF_SERVICE_TYPE_HOME = 0, + BTHF_SERVICE_TYPE_ROAMING +} bthf_service_type_t; + +typedef enum { + BTHF_CALL_STATE_ACTIVE = 0, + BTHF_CALL_STATE_HELD, + BTHF_CALL_STATE_DIALING, + BTHF_CALL_STATE_ALERTING, + BTHF_CALL_STATE_INCOMING, + BTHF_CALL_STATE_WAITING, + BTHF_CALL_STATE_IDLE +} bthf_call_state_t; + +typedef enum { + BTHF_CALL_DIRECTION_OUTGOING = 0, + BTHF_CALL_DIRECTION_INCOMING +} bthf_call_direction_t; + +typedef enum { + BTHF_CALL_TYPE_VOICE = 0, + BTHF_CALL_TYPE_DATA, + BTHF_CALL_TYPE_FAX +} bthf_call_mode_t; + +typedef enum { + BTHF_CALL_MPTY_TYPE_SINGLE = 0, + BTHF_CALL_MPTY_TYPE_MULTI +} bthf_call_mpty_type_t; + +typedef enum { + BTHF_CALL_ADDRTYPE_UNKNOWN = 0x81, + BTHF_CALL_ADDRTYPE_INTERNATIONAL = 0x91 +} bthf_call_addrtype_t; +/** Represents the standard BT-HF interface. */ +typedef struct { + + /** set to sizeof(BtHfInterface) */ + size_t size; + /** + * Register the BtHf callbacks + */ + bt_status_t (*init)( bthf_callbacks_t* callbacks ); + + /** connect to headset */ + bt_status_t (*connect)( bt_bdaddr_t *bd_addr ); + + /** dis-connect from headset */ + bt_status_t (*disconnect)( bt_bdaddr_t *bd_addr ); + + /** create an audio connection */ + bt_status_t (*connect_audio)( bt_bdaddr_t *bd_addr ); + + /** close the audio connection */ + bt_status_t (*disconnect_audio)( bt_bdaddr_t *bd_addr ); + + /** start voice recognition */ + bt_status_t (*start_voice_recognition)(); + + /** stop voice recognition */ + bt_status_t (*stop_voice_recognition)(); + + /** volume control */ + bt_status_t (*volume_control) (bthf_volume_type_t type, int volume); + + /** Combined device status change notification */ + bt_status_t (*device_status_notification)(bthf_network_state_t ntk_state, bthf_service_type_t svc_type, int signal, + int batt_chg); + + /** Response for COPS command */ + bt_status_t (*cops_response)(const char *cops); + + /** Response for CIND command */ + bt_status_t (*cind_response)(int svc, int num_active, int num_held, bthf_call_state_t call_setup_state, + int signal, int roam, int batt_chg); + + /** Pre-formatted AT response, typically in response to unknown AT cmd */ + bt_status_t (*formatted_at_response)(const char *rsp); + + /** ok/error response + * ERROR (0) + * OK (1) + */ + bt_status_t (*at_response) (bthf_at_response_t response_code, int error_code); + + /** response for CLCC command + * Can be iteratively called for each call index + * Call index of 0 will be treated as NULL termination (Completes response) + */ + bt_status_t (*clcc_response) (int index, bthf_call_direction_t dir, + bthf_call_state_t state, bthf_call_mode_t mode, + bthf_call_mpty_type_t mpty, const char *number, + bthf_call_addrtype_t type); + + /** notify of a call state change + * Each update notifies + * 1. Number of active/held/ringing calls + * 2. call_state: This denotes the state change that triggered this msg + * This will take one of the values from BtHfCallState + * 3. number & type: valid only for incoming & waiting call + */ + bt_status_t (*phone_state_change) (int num_active, int num_held, bthf_call_state_t call_setup_state, + const char *number, bthf_call_addrtype_t type); + + /** Closes the interface. */ + void (*cleanup)( void ); +} bthf_interface_t; + +__END_DECLS + +#endif /* ANDROID_INCLUDE_BT_HF_H */ diff --git a/include/hardware/bt_hh.h b/include/hardware/bt_hh.h new file mode 100644 index 0000000..09f547b --- /dev/null +++ b/include/hardware/bt_hh.h @@ -0,0 +1,179 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_INCLUDE_BT_HH_H +#define ANDROID_INCLUDE_BT_HH_H + +#include <stdint.h> + +__BEGIN_DECLS + +#define BTHH_MAX_DSC_LEN 884 + +/* HH connection states */ +typedef enum +{ + BTHH_CONN_STATE_CONNECTED = 0, + BTHH_CONN_STATE_CONNECTING, + BTHH_CONN_STATE_DISCONNECTED, + BTHH_CONN_STATE_DISCONNECTING, + BTHH_CONN_STATE_FAILED_MOUSE_FROM_HOST, + BTHH_CONN_STATE_FAILED_KBD_FROM_HOST, + BTHH_CONN_STATE_FAILED_TOO_MANY_DEVICES, + BTHH_CONN_STATE_FAILED_NO_BTHID_DRIVER, + BTHH_CONN_STATE_FAILED_GENERIC, + BTHH_CONN_STATE_UNKNOWN +} bthh_connection_state_t; + +typedef enum +{ + BTHH_OK = 0, + BTHH_HS_HID_NOT_READY, /* handshake error : device not ready */ + BTHH_HS_INVALID_RPT_ID, /* handshake error : invalid report ID */ + BTHH_HS_TRANS_NOT_SPT, /* handshake error : transaction not spt */ + BTHH_HS_INVALID_PARAM, /* handshake error : invalid paremter */ + BTHH_HS_ERROR, /* handshake error : unspecified HS error */ + BTHH_ERR, /* general BTA HH error */ + BTHH_ERR_SDP, /* SDP error */ + BTHH_ERR_PROTO, /* SET_Protocol error, + only used in BTA_HH_OPEN_EVT callback */ + BTHH_ERR_DB_FULL, /* device database full error, used */ + BTHH_ERR_TOD_UNSPT, /* type of device not supported */ + BTHH_ERR_NO_RES, /* out of system resources */ + BTHH_ERR_AUTH_FAILED, /* authentication fail */ + BTHH_ERR_HDL +}bthh_status_t; + +/* Protocol modes */ +typedef enum { + BTHH_REPORT_MODE = 0x00, + BTHH_BOOT_MODE = 0x01, + BTHH_UNSUPPORTED_MODE = 0xff +}bthh_protocol_mode_t; + +/* Report types */ +typedef enum { + BTHH_INPUT_REPORT = 1, + BTHH_OUTPUT_REPORT, + BTHH_FEATURE_REPORT +}bthh_report_type_t; + +typedef struct +{ + int attr_mask; + uint8_t sub_class; + uint8_t app_id; + int vendor_id; + int product_id; + int version; + uint8_t ctry_code; + int dl_len; + uint8_t dsc_list[BTHH_MAX_DSC_LEN]; +} bthh_hid_info_t; + +/** Callback for connection state change. + * state will have one of the values from bthh_connection_state_t + */ +typedef void (* bthh_connection_state_callback)(bt_bdaddr_t *bd_addr, bthh_connection_state_t state); + +/** Callback for vitual unplug api. + * the status of the vitual unplug + */ +typedef void (* bthh_virtual_unplug_callback)(bt_bdaddr_t *bd_addr, bthh_status_t hh_status); + +/** Callback for get hid info + * hid_info will contain attr_mask, sub_class, app_id, vendor_id, product_id, version, ctry_code, len + */ +typedef void (* bthh_hid_info_callback)(bt_bdaddr_t *bd_addr, bthh_hid_info_t hid_info); + +/** Callback for get/set protocal api. + * the protocol mode is one of the value from bthh_protocol_mode_t + */ +typedef void (* bthh_protocol_mode_callback)(bt_bdaddr_t *bd_addr, bthh_status_t hh_status,bthh_protocol_mode_t mode); + +/** Callback for get/set_idle_time api. + */ +typedef void (* bthh_idle_time_callback)(bt_bdaddr_t *bd_addr, bthh_status_t hh_status, int idle_rate); + + +/** Callback for get report api. + * if staus is ok rpt_data contains the report data + */ +typedef void (* bthh_get_report_callback)(bt_bdaddr_t *bd_addr, bthh_status_t hh_status, uint8_t* rpt_data, int rpt_size); + + +/** BT-HH callback structure. */ +typedef struct { + /** set to sizeof(BtHfCallbacks) */ + size_t size; + bthh_connection_state_callback connection_state_cb; + bthh_hid_info_callback hid_info_cb; + bthh_protocol_mode_callback protocol_mode_cb; + bthh_idle_time_callback idle_time_cb; + bthh_get_report_callback get_report_cb; + bthh_virtual_unplug_callback virtual_unplug_cb; + +} bthh_callbacks_t; + + + +/** Represents the standard BT-HH interface. */ +typedef struct { + + /** set to sizeof(BtHhInterface) */ + size_t size; + + /** + * Register the BtHh callbacks + */ + bt_status_t (*init)( bthh_callbacks_t* callbacks ); + + /** connect to hid device */ + bt_status_t (*connect)( bt_bdaddr_t *bd_addr); + + /** dis-connect from hid device */ + bt_status_t (*disconnect)( bt_bdaddr_t *bd_addr ); + + /** Virtual UnPlug (VUP) the specified HID device */ + bt_status_t (*virtual_unplug)(bt_bdaddr_t *bd_addr); + + /** Set the HID device descriptor for the specified HID device. */ + bt_status_t (*set_info)(bt_bdaddr_t *bd_addr, bthh_hid_info_t hid_info ); + + /** Get the HID proto mode. */ + bt_status_t (*get_protocol) (bt_bdaddr_t *bd_addr, bthh_protocol_mode_t protocolMode); + + /** Set the HID proto mode. */ + bt_status_t (*set_protocol)(bt_bdaddr_t *bd_addr, bthh_protocol_mode_t protocolMode); + + /** Send a GET_REPORT to HID device. */ + bt_status_t (*get_report)(bt_bdaddr_t *bd_addr, bthh_report_type_t reportType, uint8_t reportId, int bufferSize); + + /** Send a SET_REPORT to HID device. */ + bt_status_t (*set_report)(bt_bdaddr_t *bd_addr, bthh_report_type_t reportType, char* report); + + /** Send data to HID device. */ + bt_status_t (*send_data)(bt_bdaddr_t *bd_addr, char* data); + + /** Closes the interface. */ + void (*cleanup)( void ); + +} bthh_interface_t; +__END_DECLS + +#endif /* ANDROID_INCLUDE_BT_HH_H */ + + diff --git a/include/hardware/bt_hl.h b/include/hardware/bt_hl.h new file mode 100644 index 0000000..bd29e3a --- /dev/null +++ b/include/hardware/bt_hl.h @@ -0,0 +1,123 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_INCLUDE_BT_HL_H +#define ANDROID_INCLUDE_BT_HL_H + +__BEGIN_DECLS + +/* HL connection states */ + +typedef enum +{ + BTHL_MDEP_ROLE_SOURCE, + BTHL_MDEP_ROLE_SINK +} bthl_mdep_role_t; + +typedef enum { + BTHL_APP_REG_STATE_REG_SUCCESS, + BTHL_APP_REG_STATE_REG_FAILED, + BTHL_APP_REG_STATE_DEREG_SUCCESS, + BTHL_APP_REG_STATE_DEREG_FAILED +} bthl_app_reg_state_t; + +typedef enum +{ + BTHL_CHANNEL_TYPE_RELIABLE, + BTHL_CHANNEL_TYPE_STREAMING, + BTHL_CHANNEL_TYPE_ANY +} bthl_channel_type_t; + + +/* HL connection states */ +typedef enum { + BTHL_CONN_STATE_CONNECTING, + BTHL_CONN_STATE_CONNECTED, + BTHL_CONN_STATE_DISCONNECTING, + BTHL_CONN_STATE_DISCONNECTED, + BTHL_CONN_STATE_DESTROYED +} bthl_channel_state_t; + +typedef struct +{ + bthl_mdep_role_t mdep_role; + int data_type; + bthl_channel_type_t channel_type; + const char *mdep_description; /* MDEP description to be used in the SDP (optional); null terminated */ +} bthl_mdep_cfg_t; + +typedef struct +{ + const char *application_name; + const char *provider_name; /* provider name to be used in the SDP (optional); null terminated */ + const char *srv_name; /* service name to be used in the SDP (optional); null terminated*/ + const char *srv_desp; /* service description to be used in the SDP (optional); null terminated */ + int number_of_mdeps; + bthl_mdep_cfg_t *mdep_cfg; /* Dynamic array */ +} bthl_reg_param_t; + +/** Callback for application registration status. + * state will have one of the values from bthl_app_reg_state_t + */ +typedef void (* bthl_app_reg_state_callback)(int app_id, bthl_app_reg_state_t state); + +/** Callback for channel connection state change. + * state will have one of the values from + * bthl_connection_state_t and fd (file descriptor) + */ +typedef void (* bthl_channel_state_callback)(int app_id, bt_bdaddr_t *bd_addr, int mdep_cfg_index, int channel_id, bthl_channel_state_t state, int fd); + +/** BT-HL callback structure. */ +typedef struct { + /** set to sizeof(bthl_callbacks_t) */ + size_t size; + bthl_app_reg_state_callback app_reg_state_cb; + bthl_channel_state_callback channel_state_cb; +} bthl_callbacks_t; + + +/** Represents the standard BT-HL interface. */ +typedef struct { + + /** set to sizeof(bthl_interface_t) */ + size_t size; + + /** + * Register the Bthl callbacks + */ + bt_status_t (*init)( bthl_callbacks_t* callbacks ); + + /** Register HL application */ + bt_status_t (*register_application) ( bthl_reg_param_t *p_reg_param, int *app_id); + + /** Unregister HL application */ + bt_status_t (*unregister_application) (int app_id); + + /** connect channel */ + bt_status_t (*connect_channel)(int app_id, bt_bdaddr_t *bd_addr, int mdep_cfg_index, int *channel_id); + + /** destroy channel */ + bt_status_t (*destroy_channel)(int channel_id); + + /** Close the Bthl callback **/ + void (*cleanup)(void); + +} bthl_interface_t; +__END_DECLS + +#endif /* ANDROID_INCLUDE_BT_HL_H */ + + diff --git a/include/hardware/bt_pan.h b/include/hardware/bt_pan.h new file mode 100644 index 0000000..c8b36b4 --- /dev/null +++ b/include/hardware/bt_pan.h @@ -0,0 +1,87 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_INCLUDE_BT_PAN_H +#define ANDROID_INCLUDE_BT_PAN_H + +__BEGIN_DECLS + +#define BTPAN_ROLE_NONE 0 +#define BTPAN_ROLE_PANNAP 1 +#define BTPAN_ROLE_PANU 2 + +typedef enum { + BTPAN_STATE_CONNECTED = 0, + BTPAN_STATE_CONNECTING = 1, + BTPAN_STATE_DISCONNECTED = 2, + BTPAN_STATE_DISCONNECTING = 3 +} btpan_connection_state_t; + +typedef enum { + BTPAN_STATE_ENABLED = 0, + BTPAN_STATE_DISABLED = 1 +} btpan_control_state_t; + +/** +* Callback for pan connection state +*/ +typedef void (*btpan_connection_state_callback)(btpan_connection_state_t state, bt_status_t error, + const bt_bdaddr_t *bd_addr, int local_role, int remote_role); +typedef void (*btpan_control_state_callback)(btpan_control_state_t state, bt_status_t error, + int local_role, const char* ifname); + +typedef struct { + size_t size; + btpan_control_state_callback control_state_cb; + btpan_connection_state_callback connection_state_cb; +} btpan_callbacks_t; +typedef struct { + /** set to size of this struct*/ + size_t size; + /** + * Initialize the pan interface and register the btpan callbacks + */ + bt_status_t (*init)(const btpan_callbacks_t* callbacks); + /* + * enable the pan service by specified role. The result state of + * enabl will be returned by btpan_control_state_callback. when pan-nap is enabled, + * the state of connecting panu device will be notified by btpan_connection_state_callback + */ + bt_status_t (*enable)(int local_role); + /* + * get current pan local role + */ + int (*get_local_role)(void); + /** + * start bluetooth pan connection to the remote device by specified pan role. The result state will be + * returned by btpan_connection_state_callback + */ + bt_status_t (*connect)(const bt_bdaddr_t *bd_addr, int local_role, int remote_role); + /** + * stop bluetooth pan connection. The result state will be returned by btpan_connection_state_callback + */ + bt_status_t (*disconnect)(const bt_bdaddr_t *bd_addr); + + /** + * Cleanup the pan interface + */ + void (*cleanup)(void); + +} btpan_interface_t; + +__END_DECLS + +#endif /* ANDROID_INCLUDE_BT_PAN_H */ diff --git a/include/hardware/bt_sock.h b/include/hardware/bt_sock.h new file mode 100644 index 0000000..a4aa046 --- /dev/null +++ b/include/hardware/bt_sock.h @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_INCLUDE_BT_SOCK_H +#define ANDROID_INCLUDE_BT_SOCK_H + +__BEGIN_DECLS + +#define BTSOCK_FLAG_ENCRYPT 1 +#define BTSOCK_FLAG_AUTH (1 << 1) + +typedef enum { + BTSOCK_RFCOMM = 1, + BTSOCK_SCO = 2, + BTSOCK_L2CAP = 3 +} btsock_type_t; + +/** Represents the standard BT SOCKET interface. */ +typedef struct { + short size; + bt_bdaddr_t bd_addr; + int channel; + int status; +} __attribute__((packed)) sock_connect_signal_t; + +typedef struct { + + /** set to size of this struct*/ + size_t size; + /** + * listen to a rfcomm uuid or channel. It returns the socket fd from which + * btsock_connect_signal can be read out when a remote device connected + */ + bt_status_t (*listen)(btsock_type_t type, const char* service_name, const uint8_t* service_uuid, int channel, int* sock_fd, int flags); + /* + * connect to a rfcomm uuid channel of remote device, It returns the socket fd from which + * the btsock_connect_signal and a new socket fd to be accepted can be read out when connected + */ + bt_status_t (*connect)(const bt_bdaddr_t *bd_addr, btsock_type_t type, const uint8_t* uuid, int channel, int* sock_fd, int flags); + +} btsock_interface_t; + +__END_DECLS + +#endif /* ANDROID_INCLUDE_BT_SOCK_H */ diff --git a/include/hardware/camera2.h b/include/hardware/camera2.h index 36f2a9e..5d45325 100644 --- a/include/hardware/camera2.h +++ b/include/hardware/camera2.h @@ -18,6 +18,7 @@ #define ANDROID_INCLUDE_CAMERA2_H #include "camera_common.h" +#include "system/camera_metadata.h" /** * Camera device HAL 2.0 [ CAMERA_DEVICE_API_VERSION_2_0 ] @@ -35,7 +36,7 @@ * version 2.0 of the camera module interface (as defined by * camera_module_t.common.module_api_version). * - * See camera_common.h for more details. + * See camera_common.h for more versioning details. * */ @@ -43,105 +44,250 @@ __BEGIN_DECLS struct camera2_device; -/** - * Output image stream queue management +/********************************************************************** + * + * Input/output stream buffer queue interface definitions + * */ +/** + * Output image stream queue interface. A set of these methods is provided to + * the HAL device in allocate_stream(), and are used to interact with the + * gralloc buffer queue for that stream. They may not be called until after + * allocate_stream returns. + */ typedef struct camera2_stream_ops { - int (*dequeue_buffer)(struct camera2_stream_ops* w, - buffer_handle_t** buffer, int *stride); - int (*enqueue_buffer)(struct camera2_stream_ops* w, - buffer_handle_t* buffer); - int (*cancel_buffer)(struct camera2_stream_ops* w, - buffer_handle_t* buffer); - int (*set_buffer_count)(struct camera2_stream_ops* w, int count); - int (*set_buffers_geometry)(struct camera2_stream_ops* pw, - int w, int h, int format); - int (*set_crop)(struct camera2_stream_ops *w, - int left, int top, int right, int bottom); - // Timestamps are measured in nanoseconds, and must be comparable - // and monotonically increasing between two frames in the same - // preview stream. They do not need to be comparable between - // consecutive or parallel preview streams, cameras, or app runs. - // The timestamp must be the time at the start of image exposure. - int (*set_timestamp)(struct camera2_stream_ops *w, int64_t timestamp); - int (*set_usage)(struct camera2_stream_ops* w, int usage); - int (*get_min_undequeued_buffer_count)(const struct camera2_stream_ops *w, - int *count); - int (*lock_buffer)(struct camera2_stream_ops* w, - buffer_handle_t* buffer); + /** + * Get a buffer to fill from the queue. The size and format of the buffer + * are fixed for a given stream (defined in allocate_stream), and the stride + * should be queried from the platform gralloc module. The gralloc buffer + * will have been allocated based on the usage flags provided by + * allocate_stream, and will be locked for use. + */ + int (*dequeue_buffer)(const struct camera2_stream_ops* w, + buffer_handle_t** buffer); + + /** + * Push a filled buffer to the stream to be used by the consumer. + * + * The timestamp represents the time at start of exposure of the first row + * of the image; it must be from a monotonic clock, and is measured in + * nanoseconds. The timestamps do not need to be comparable between + * different cameras, or consecutive instances of the same camera. However, + * they must be comparable between streams from the same camera. If one + * capture produces buffers for multiple streams, each stream must have the + * same timestamp for that buffer, and that timestamp must match the + * timestamp in the output frame metadata. + */ + int (*enqueue_buffer)(const struct camera2_stream_ops* w, + int64_t timestamp, + buffer_handle_t* buffer); + /** + * Return a buffer to the queue without marking it as filled. + */ + int (*cancel_buffer)(const struct camera2_stream_ops* w, + buffer_handle_t* buffer); + /** + * Set the crop window for subsequently enqueued buffers. The parameters are + * measured in pixels relative to the buffer width and height. + */ + int (*set_crop)(const struct camera2_stream_ops *w, + int left, int top, int right, int bottom); + } camera2_stream_ops_t; /** + * Temporary definition during transition. + * + * These formats will be removed and replaced with + * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED. To maximize forward compatibility, + * HAL implementations are strongly recommended to treat FORMAT_OPAQUE and + * FORMAT_ZSL as equivalent to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, and + * return HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED in the format_actual output + * parameter of allocate_stream, allowing the gralloc module to select the + * specific format based on the usage flags from the camera and the stream + * consumer. + */ +enum { + CAMERA2_HAL_PIXEL_FORMAT_OPAQUE = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, + CAMERA2_HAL_PIXEL_FORMAT_ZSL = -1 +}; + +/** + * Transport header for compressed JPEG buffers in output streams. + * + * To capture JPEG images, a stream is created using the pixel format + * HAL_PIXEL_FORMAT_BLOB, and the static metadata field android.jpeg.maxSize is + * used as the buffer size. Since compressed JPEG images are of variable size, + * the HAL needs to include the final size of the compressed image using this + * structure inside the output stream buffer. The JPEG blob ID field must be set + * to CAMERA2_JPEG_BLOB_ID. + * + * Transport header should be at the end of the JPEG output stream buffer. That + * means the jpeg_blob_id must start at byte[android.jpeg.maxSize - + * sizeof(camera2_jpeg_blob)]. Any HAL using this transport header must + * account for it in android.jpeg.maxSize. The JPEG data itself starts at + * byte[0] and should be jpeg_size bytes long. + */ +typedef struct camera2_jpeg_blob { + uint16_t jpeg_blob_id; + uint32_t jpeg_size; +}; + +enum { + CAMERA2_JPEG_BLOB_ID = 0x00FF +}; + +/** + * Input reprocess stream queue management. A set of these methods is provided + * to the HAL device in allocate_reprocess_stream(); they are used to interact + * with the reprocess stream's input gralloc buffer queue. + */ +typedef struct camera2_stream_in_ops { + /** + * Get the next buffer of image data to reprocess. The width, height, and + * format of the buffer is fixed in allocate_reprocess_stream(), and the + * stride and other details should be queried from the platform gralloc + * module as needed. The buffer will already be locked for use. + */ + int (*acquire_buffer)(const struct camera2_stream_in_ops *w, + buffer_handle_t** buffer); + /** + * Return a used buffer to the buffer queue for reuse. + */ + int (*release_buffer)(const struct camera2_stream_in_ops *w, + buffer_handle_t* buffer); + +} camera2_stream_in_ops_t; + +/********************************************************************** + * * Metadata queue management, used for requests sent to HAL module, and for * frames produced by the HAL. * - * Queue protocol: + */ + +enum { + CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS = -1 +}; + +/** + * Request input queue protocol: * - * The source holds the queue and its contents. At start, the queue is empty. + * The framework holds the queue and its contents. At start, the queue is empty. * - * 1. When the first metadata buffer is placed into the queue, the source must - * signal the destination by calling notify_queue_not_empty(). + * 1. When the first metadata buffer is placed into the queue, the framework + * signals the device by calling notify_request_queue_not_empty(). * - * 2. After receiving notify_queue_not_empty, the destination must call + * 2. After receiving notify_request_queue_not_empty, the device must call * dequeue() once it's ready to handle the next buffer. * - * 3. Once the destination has processed a buffer, it should try to dequeue - * another buffer. If there are no more buffers available, dequeue() will - * return NULL. In this case, when a buffer becomes available, the source - * must call notify_queue_not_empty() again. If the destination receives a - * NULL return from dequeue, it does not need to query the queue again until - * a notify_queue_not_empty() call is received from the source. - * - * 4. If the destination calls buffer_count() and receives 0, this does not mean - * that the source will provide a notify_queue_not_empty() call. The source - * must only provide such a call after the destination has received a NULL - * from dequeue, or on initial startup. - * - * 5. The dequeue() call in response to notify_queue_not_empty() may be on the - * same thread as the notify_queue_not_empty() call. The source must not - * deadlock in that case. + * 3. Once the device has processed a buffer, and is ready for the next buffer, + * it must call dequeue() again instead of waiting for a notification. If + * there are no more buffers available, dequeue() will return NULL. After + * this point, when a buffer becomes available, the framework must call + * notify_request_queue_not_empty() again. If the device receives a NULL + * return from dequeue, it does not need to query the queue again until a + * notify_request_queue_not_empty() call is received from the source. + * + * 4. If the device calls buffer_count() and receives 0, this does not mean that + * the framework will provide a notify_request_queue_not_empty() call. The + * framework will only provide such a notification after the device has + * received a NULL from dequeue, or on initial startup. + * + * 5. The dequeue() call in response to notify_request_queue_not_empty() may be + * on the same thread as the notify_request_queue_not_empty() call, and may + * be performed from within the notify call. + * + * 6. All dequeued request buffers must be returned to the framework by calling + * free_request, including when errors occur, a device flush is requested, or + * when the device is shutting down. */ - -typedef struct camera2_metadata_queue_src_ops { +typedef struct camera2_request_queue_src_ops { /** - * Get count of buffers in queue + * Get the count of request buffers pending in the queue. May return + * CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS if a repeating request (stream + * request) is currently configured. Calling this method has no effect on + * whether the notify_request_queue_not_empty() method will be called by the + * framework. */ - int (*buffer_count)(camera2_metadata_queue_src_ops *q); + int (*request_count)(const struct camera2_request_queue_src_ops *q); /** - * Get a metadata buffer from the source. Returns OK if a request is - * available, placing a pointer to it in next_request. + * Get a metadata buffer from the framework. Returns OK if there is no + * error. If the queue is empty, returns NULL in buffer. In that case, the + * device must wait for a notify_request_queue_not_empty() message before + * attempting to dequeue again. Buffers obtained in this way must be + * returned to the framework with free_request(). */ - int (*dequeue)(camera2_metadata_queue_src_ops *q, + int (*dequeue_request)(const struct camera2_request_queue_src_ops *q, camera_metadata_t **buffer); /** - * Return a metadata buffer to the source once it has been used + * Return a metadata buffer to the framework once it has been used, or if + * an error or shutdown occurs. */ - int (*free)(camera2_metadata_queue_src_ops *q, + int (*free_request)(const struct camera2_request_queue_src_ops *q, camera_metadata_t *old_buffer); -} camera2_metadata_queue_src_ops_t; +} camera2_request_queue_src_ops_t; + +/** + * Frame output queue protocol: + * + * The framework holds the queue and its contents. At start, the queue is empty. + * + * 1. When the device is ready to fill an output metadata frame, it must dequeue + * a metadata buffer of the required size. + * + * 2. It should then fill the metadata buffer, and place it on the frame queue + * using enqueue_frame. The framework takes ownership of the frame. + * + * 3. In case of an error, a request to flush the pipeline, or shutdown, the + * device must return any affected dequeued frames to the framework by + * calling cancel_frame. + */ +typedef struct camera2_frame_queue_dst_ops { + /** + * Get an empty metadata buffer to fill from the framework. The new metadata + * buffer will have room for entries number of metadata entries, plus + * data_bytes worth of extra storage. Frames dequeued here must be returned + * to the framework with either cancel_frame or enqueue_frame. + */ + int (*dequeue_frame)(const struct camera2_frame_queue_dst_ops *q, + size_t entries, size_t data_bytes, + camera_metadata_t **buffer); + + /** + * Return a dequeued metadata buffer to the framework for reuse; do not mark it as + * filled. Use when encountering errors, or flushing the internal request queue. + */ + int (*cancel_frame)(const struct camera2_frame_queue_dst_ops *q, + camera_metadata_t *buffer); -typedef struct camera2_metadata_queue_dst_ops { /** - * Notify destination that the queue is no longer empty + * Place a completed metadata frame on the frame output queue. */ - int (*notify_queue_not_empty)(struct camera2_metadata_queue_dst_ops *); + int (*enqueue_frame)(const struct camera2_frame_queue_dst_ops *q, + camera_metadata_t *buffer); -} camera2_metadata_queue_dst_ops_t; +} camera2_frame_queue_dst_ops_t; -/* Defined in camera_metadata.h */ -typedef struct vendor_tag_query_ops vendor_tag_query_ops_t; +/********************************************************************** + * + * Notification callback and message definition, and trigger definitions + * + */ /** * Asynchronous notification callback from the HAL, fired for various * reasons. Only for information independent of frame capture, or that require - * specific timing. + * specific timing. The user pointer must be the same one that was passed to the + * device in set_notify_callback(). */ typedef void (*camera2_notify_callback)(int32_t msg_type, int32_t ext1, int32_t ext2, + int32_t ext3, void *user); /** @@ -149,15 +295,42 @@ typedef void (*camera2_notify_callback)(int32_t msg_type, */ enum { /** - * A serious error has occurred. Argument ext1 contains the error code, and - * ext2 and user contain any error-specific information. + * An error has occurred. Argument ext1 contains the error code, and + * ext2 and ext3 contain any error-specific information. */ CAMERA2_MSG_ERROR = 0x0001, /** * The exposure of a given request has begun. Argument ext1 contains the - * request id. + * frame number, and ext2 and ext3 contain the low-order and high-order + * bytes of the timestamp for when exposure began. + * (timestamp = (ext3 << 32 | ext2)) + */ + CAMERA2_MSG_SHUTTER = 0x0010, + /** + * The autofocus routine has changed state. Argument ext1 contains the new + * state; the values are the same as those for the metadata field + * android.control.afState. Ext2 contains the latest trigger ID passed to + * trigger_action(CAMERA2_TRIGGER_AUTOFOCUS) or + * trigger_action(CAMERA2_TRIGGER_CANCEL_AUTOFOCUS), or 0 if trigger has not + * been called with either of those actions. + */ + CAMERA2_MSG_AUTOFOCUS = 0x0020, + /** + * The autoexposure routine has changed state. Argument ext1 contains the + * new state; the values are the same as those for the metadata field + * android.control.aeState. Ext2 contains the latest trigger ID value passed to + * trigger_action(CAMERA2_TRIGGER_PRECAPTURE_METERING), or 0 if that method + * has not been called. */ - CAMERA2_MSG_SHUTTER = 0x0002 + CAMERA2_MSG_AUTOEXPOSURE = 0x0021, + /** + * The auto-whitebalance routine has changed state. Argument ext1 contains + * the new state; the values are the same as those for the metadata field + * android.control.awbState. Ext2 contains the latest trigger ID passed to + * trigger_action(CAMERA2_TRIGGER_PRECAPTURE_METERING), or 0 if that method + * has not been called. + */ + CAMERA2_MSG_AUTOWB = 0x0022 }; /** @@ -169,133 +342,457 @@ enum { * no further frames or buffer streams will be produced by the * device. Device should be treated as closed. */ - CAMERA2_MSG_ERROR_HARDWARE_FAULT = 0x0001, + CAMERA2_MSG_ERROR_HARDWARE = 0x0001, /** * A serious failure occured. No further frames or buffer streams will be * produced by the device. Device should be treated as closed. The client * must reopen the device to use it again. */ - CAMERA2_MSG_ERROR_DEVICE_FAULT = 0x0002, + CAMERA2_MSG_ERROR_DEVICE, /** - * The camera service has failed. Device should be treated as released. The client - * must reopen the device to use it again. + * An error has occurred in processing a request. No output (metadata or + * buffers) will be produced for this request. ext2 contains the frame + * number of the request. Subsequent requests are unaffected, and the device + * remains operational. + */ + CAMERA2_MSG_ERROR_REQUEST, + /** + * An error has occurred in producing an output frame metadata buffer for a + * request, but image buffers for it will still be available. Subsequent + * requests are unaffected, and the device remains operational. ext2 + * contains the frame number of the request. + */ + CAMERA2_MSG_ERROR_FRAME, + /** + * An error has occurred in placing an output buffer into a stream for a + * request. The frame metadata and other buffers may still be + * available. Subsequent requests are unaffected, and the device remains + * operational. ext2 contains the frame number of the request, and ext3 + * contains the stream id. + */ + CAMERA2_MSG_ERROR_STREAM, + /** + * Number of error types */ - CAMERA2_MSG_ERROR_SERVER_FAULT = 0x0003 + CAMERA2_MSG_NUM_ERRORS }; -typedef struct camera2_device_ops { +/** + * Possible trigger ids for trigger_action() + */ +enum { /** - * Input request queue methods + * Trigger an autofocus cycle. The effect of the trigger depends on the + * autofocus mode in effect when the trigger is received, which is the mode + * listed in the latest capture request to be dequeued by the HAL. If the + * mode is OFF, EDOF, or FIXED, the trigger has no effect. In AUTO, MACRO, + * or CONTINUOUS_* modes, see below for the expected behavior. The state of + * the autofocus cycle can be tracked in android.control.afMode and the + * corresponding notifications. + * + ** + * In AUTO or MACRO mode, the AF state transitions (and notifications) + * when calling with trigger ID = N with the previous ID being K are: + * + * Initial state Transitions + * INACTIVE (K) -> ACTIVE_SCAN (N) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * AF_FOCUSED (K) -> ACTIVE_SCAN (N) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * AF_NOT_FOCUSED (K) -> ACTIVE_SCAN (N) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * ACTIVE_SCAN (K) -> AF_FOCUSED(N) or AF_NOT_FOCUSED(N) + * PASSIVE_SCAN (K) Not used in AUTO/MACRO mode + * PASSIVE_FOCUSED (K) Not used in AUTO/MACRO mode + * + ** + * In CONTINUOUS_PICTURE mode, triggering AF must lock the AF to the current + * lens position and transition the AF state to either AF_FOCUSED or + * NOT_FOCUSED. If a passive scan is underway, that scan must complete and + * then lock the lens position and change AF state. TRIGGER_CANCEL_AUTOFOCUS + * will allow the AF to restart its operation. + * + * Initial state Transitions + * INACTIVE (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * PASSIVE_FOCUSED (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * PASSIVE_SCAN (K) -> AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * AF_FOCUSED (K) no effect except to change next notification ID to N + * AF_NOT_FOCUSED (K) no effect except to change next notification ID to N + * + ** + * In CONTINUOUS_VIDEO mode, triggering AF must lock the AF to the current + * lens position and transition the AF state to either AF_FOCUSED or + * NOT_FOCUSED. If a passive scan is underway, it must immediately halt, in + * contrast with CONTINUOUS_PICTURE mode. TRIGGER_CANCEL_AUTOFOCUS will + * allow the AF to restart its operation. + * + * Initial state Transitions + * INACTIVE (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * PASSIVE_FOCUSED (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * PASSIVE_SCAN (K) -> immediate AF_FOCUSED (N) or AF_NOT_FOCUSED (N) + * AF_FOCUSED (K) no effect except to change next notification ID to N + * AF_NOT_FOCUSED (K) no effect except to change next notification ID to N + * + * Ext1 is an ID that must be returned in subsequent auto-focus state change + * notifications through camera2_notify_callback() and stored in + * android.control.afTriggerId. */ - int (*set_request_queue_src_ops)(struct camera2_device *, - camera2_metadata_queue_src_ops *queue_src_ops); - - int (*get_request_queue_dst_ops)(struct camera2_device *, - camera2_metadata_queue_dst_ops **queue_dst_ops); + CAMERA2_TRIGGER_AUTOFOCUS = 0x0001, + /** + * Send a cancel message to the autofocus algorithm. The effect of the + * cancellation depends on the autofocus mode in effect when the trigger is + * received, which is the mode listed in the latest capture request to be + * dequeued by the HAL. If the AF mode is OFF or EDOF, the cancel has no + * effect. For other modes, the lens should return to its default position, + * any current autofocus scan must be canceled, and the AF state should be + * set to INACTIVE. + * + * The state of the autofocus cycle can be tracked in android.control.afMode + * and the corresponding notification. Continuous autofocus modes may resume + * focusing operations thereafter exactly as if the camera had just been set + * to a continuous AF mode. + * + * Ext1 is an ID that must be returned in subsequent auto-focus state change + * notifications through camera2_notify_callback() and stored in + * android.control.afTriggerId. + */ + CAMERA2_TRIGGER_CANCEL_AUTOFOCUS, + /** + * Trigger a pre-capture metering cycle, which may include firing the flash + * to determine proper capture parameters. Typically, this trigger would be + * fired for a half-depress of a camera shutter key, or before a snapshot + * capture in general. The state of the metering cycle can be tracked in + * android.control.aeMode and the corresponding notification. If the + * auto-exposure mode is OFF, the trigger does nothing. + * + * Ext1 is an ID that must be returned in subsequent + * auto-exposure/auto-white balance state change notifications through + * camera2_notify_callback() and stored in android.control.aePrecaptureId. + */ + CAMERA2_TRIGGER_PRECAPTURE_METERING +}; +/** + * Possible template types for construct_default_request() + */ +enum { + /** + * Standard camera preview operation with 3A on auto. + */ + CAMERA2_TEMPLATE_PREVIEW = 1, + /** + * Standard camera high-quality still capture with 3A and flash on auto. + */ + CAMERA2_TEMPLATE_STILL_CAPTURE, /** - * Input reprocessing queue methods + * Standard video recording plus preview with 3A on auto, torch off. */ - int (*set_reprocess_queue_ops)(struct camera2_device *, - camera2_metadata_queue_src_ops *queue_src_ops); + CAMERA2_TEMPLATE_VIDEO_RECORD, + /** + * High-quality still capture while recording video. Application will + * include preview, video record, and full-resolution YUV or JPEG streams in + * request. Must not cause stuttering on video stream. 3A on auto. + */ + CAMERA2_TEMPLATE_VIDEO_SNAPSHOT, + /** + * Zero-shutter-lag mode. Application will request preview and + * full-resolution data for each frame, and reprocess it to JPEG when a + * still image is requested by user. Settings should provide highest-quality + * full-resolution images without compromising preview frame rate. 3A on + * auto. + */ + CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG, + + /* Total number of templates */ + CAMERA2_TEMPLATE_COUNT +}; - int (*get_reprocess_queue_dst_ops)(struct camera2_device *, - camera2_metadata_queue_dst_ops **queue_dst_ops); + +/********************************************************************** + * + * Camera device operations + * + */ +typedef struct camera2_device_ops { + + /********************************************************************** + * Request and frame queue setup and management methods + */ /** - * Output frame queue methods + * Pass in input request queue interface methods. */ - int (*set_frame_queue_dst_ops)(struct camera2_device *, - camera2_metadata_queue_dst_ops *queue_dst_ops); + int (*set_request_queue_src_ops)(const struct camera2_device *, + const camera2_request_queue_src_ops_t *request_src_ops); - int (*get_frame_queue_src_ops)(struct camera2_device *, - camera2_metadata_queue_src_ops **queue_dst_ops); + /** + * Notify device that the request queue is no longer empty. Must only be + * called when the first buffer is added a new queue, or after the source + * has returned NULL in response to a dequeue call. + */ + int (*notify_request_queue_not_empty)(const struct camera2_device *); /** - * Pass in notification methods + * Pass in output frame queue interface methods */ - int (*set_notify_callback)(struct camera2_device *, - camera2_notify_callback notify_cb); + int (*set_frame_queue_dst_ops)(const struct camera2_device *, + const camera2_frame_queue_dst_ops_t *frame_dst_ops); /** - * Number of camera frames being processed by the device - * at the moment (frames that have had their request dequeued, - * but have not yet been enqueued onto output pipeline(s) ) + * Number of camera requests being processed by the device at the moment + * (captures/reprocesses that have had their request dequeued, but have not + * yet been enqueued onto output pipeline(s) ). No streams may be released + * by the framework until the in-progress count is 0. */ - int (*get_in_progress_count)(struct camera2_device *); + int (*get_in_progress_count)(const struct camera2_device *); /** * Flush all in-progress captures. This includes all dequeued requests * (regular or reprocessing) that have not yet placed any outputs into a * stream or the frame queue. Partially completed captures must be completed - * normally. No new requests may be dequeued from the request or - * reprocessing queues until the flush completes. + * normally. No new requests may be dequeued from the request queue until + * the flush completes. */ - int (*flush_captures_in_progress)(struct camera2_device *); + int (*flush_captures_in_progress)(const struct camera2_device *); /** - * Camera stream management + * Create a filled-in default request for standard camera use cases. + * + * The device must return a complete request that is configured to meet the + * requested use case, which must be one of the CAMERA2_TEMPLATE_* + * enums. All request control fields must be included, except for + * android.request.outputStreams. + * + * The metadata buffer returned must be allocated with + * allocate_camera_metadata. The framework takes ownership of the buffer. + */ + int (*construct_default_request)(const struct camera2_device *, + int request_template, + camera_metadata_t **request); + + /********************************************************************** + * Stream management */ /** - * Operations on the input reprocessing stream + * allocate_stream: + * + * Allocate a new output stream for use, defined by the output buffer width, + * height, target, and possibly the pixel format. Returns the new stream's + * ID, gralloc usage flags, minimum queue buffer count, and possibly the + * pixel format, on success. Error conditions: + * + * - Requesting a width/height/format combination not listed as + * supported by the sensor's static characteristics + * + * - Asking for too many streams of a given format type (2 bayer raw + * streams, for example). + * + * Input parameters: + * + * - width, height, format: Specification for the buffers to be sent through + * this stream. Format is a value from the HAL_PIXEL_FORMAT_* list. If + * HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform + * gralloc module will select a format based on the usage flags provided + * by the camera HAL and the consumer of the stream. The camera HAL should + * inspect the buffers handed to it in the register_stream_buffers call to + * obtain the implementation-specific format if necessary. + * + * - stream_ops: A structure of function pointers for obtaining and queuing + * up buffers for this stream. The underlying stream will be configured + * based on the usage and max_buffers outputs. The methods in this + * structure may not be called until after allocate_stream returns. + * + * Output parameters: + * + * - stream_id: An unsigned integer identifying this stream. This value is + * used in incoming requests to identify the stream, and in releasing the + * stream. + * + * - usage: The gralloc usage mask needed by the HAL device for producing + * the requested type of data. This is used in allocating new gralloc + * buffers for the stream buffer queue. + * + * - max_buffers: The maximum number of buffers the HAL device may need to + * have dequeued at the same time. The device may not dequeue more buffers + * than this value at the same time. + * */ - int (*get_reprocess_stream_ops)(struct camera2_device *, - camera2_stream_ops_t **stream_ops); + int (*allocate_stream)( + const struct camera2_device *, + // inputs + uint32_t width, + uint32_t height, + int format, + const camera2_stream_ops_t *stream_ops, + // outputs + uint32_t *stream_id, + uint32_t *format_actual, // IGNORED, will be removed + uint32_t *usage, + uint32_t *max_buffers); + + /** + * Register buffers for a given stream. This is called after a successful + * allocate_stream call, and before the first request referencing the stream + * is enqueued. This method is intended to allow the HAL device to map or + * otherwise prepare the buffers for later use. num_buffers is guaranteed to + * be at least max_buffers (from allocate_stream), but may be larger. The + * buffers will already be locked for use. At the end of the call, all the + * buffers must be ready to be returned to the queue. If the stream format + * was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the camera HAL should + * inspect the passed-in buffers here to determine any platform-private + * pixel format information. + */ + int (*register_stream_buffers)( + const struct camera2_device *, + uint32_t stream_id, + int num_buffers, + buffer_handle_t *buffers); + + /** + * Release a stream. Returns an error if called when get_in_progress_count + * is non-zero, or if the stream id is invalid. + */ + int (*release_stream)( + const struct camera2_device *, + uint32_t stream_id); /** - * Get the number of streams that can be simultaneously allocated. - * A request may include any allocated pipeline for its output, without - * causing a substantial delay in frame production. + * allocate_reprocess_stream: + * + * Allocate a new input stream for use, defined by the output buffer width, + * height, and the pixel format. Returns the new stream's ID, gralloc usage + * flags, and required simultaneously acquirable buffer count, on + * success. Error conditions: + * + * - Requesting a width/height/format combination not listed as + * supported by the sensor's static characteristics + * + * - Asking for too many reprocessing streams to be configured at once. + * + * Input parameters: + * + * - width, height, format: Specification for the buffers to be sent through + * this stream. Format must be a value from the HAL_PIXEL_FORMAT_* list. + * + * - reprocess_stream_ops: A structure of function pointers for acquiring + * and releasing buffers for this stream. The underlying stream will be + * configured based on the usage and max_buffers outputs. + * + * Output parameters: + * + * - stream_id: An unsigned integer identifying this stream. This value is + * used in incoming requests to identify the stream, and in releasing the + * stream. These ids are numbered separately from the input stream ids. + * + * - consumer_usage: The gralloc usage mask needed by the HAL device for + * consuming the requested type of data. This is used in allocating new + * gralloc buffers for the stream buffer queue. + * + * - max_buffers: The maximum number of buffers the HAL device may need to + * have acquired at the same time. The device may not have more buffers + * acquired at the same time than this value. + * */ - int (*get_stream_slot_count)(struct camera2_device *); + int (*allocate_reprocess_stream)(const struct camera2_device *, + uint32_t width, + uint32_t height, + uint32_t format, + const camera2_stream_in_ops_t *reprocess_stream_ops, + // outputs + uint32_t *stream_id, + uint32_t *consumer_usage, + uint32_t *max_buffers); /** - * Allocate a new stream for use. Requires specifying which pipeline slot - * to use. Specifies the buffer width, height, and format. + * allocate_reprocess_stream_from_stream: + * + * Allocate a new input stream for use, which will use the buffers allocated + * for an existing output stream. That is, after the HAL enqueues a buffer + * onto the output stream, it may see that same buffer handed to it from + * this input reprocessing stream. After the HAL releases the buffer back to + * the reprocessing stream, it will be returned to the output queue for + * reuse. + * * Error conditions: - * - Allocating an already-allocated slot without first releasing it - * - Requesting a width/height/format combination not listed as supported - * - Requesting a pipeline slot >= pipeline slot count. + * + * - Using an output stream of unsuitable size/format for the basis of the + * reprocessing stream. + * + * - Attempting to allocatee too many reprocessing streams at once. + * + * Input parameters: + * + * - output_stream_id: The ID of an existing output stream which has + * a size and format suitable for reprocessing. + * + * - reprocess_stream_ops: A structure of function pointers for acquiring + * and releasing buffers for this stream. The underlying stream will use + * the same graphics buffer handles as the output stream uses. + * + * Output parameters: + * + * - stream_id: An unsigned integer identifying this stream. This value is + * used in incoming requests to identify the stream, and in releasing the + * stream. These ids are numbered separately from the input stream ids. + * + * The HAL client must always release the reprocessing stream before it + * releases the output stream it is based on. + * */ - int (*allocate_stream)( - struct camera2_device *, - uint32_t stream_slot, - uint32_t width, - uint32_t height, - uint32_t format, - camera2_stream_ops_t *camera2_stream_ops); + int (*allocate_reprocess_stream_from_stream)(const struct camera2_device *, + uint32_t output_stream_id, + const camera2_stream_in_ops_t *reprocess_stream_ops, + // outputs + uint32_t *stream_id); /** - * Release a stream. Returns an error if called when - * get_in_progress_count is non-zero, or if the pipeline slot is not - * allocated. + * Release a reprocessing stream. Returns an error if called when + * get_in_progress_count is non-zero, or if the stream id is not + * valid. + */ + int (*release_reprocess_stream)( + const struct camera2_device *, + uint32_t stream_id); + + /********************************************************************** + * Miscellaneous methods */ - int (*release_stream)( - struct camera2_device *, - uint32_t stream_slot); /** - * Get methods to query for vendor extension metadata tag infomation. May - * set ops to NULL if no vendor extension tags are defined. + * Trigger asynchronous activity. This is used for triggering special + * behaviors of the camera 3A routines when they are in use. See the + * documentation for CAMERA2_TRIGGER_* above for details of the trigger ids + * and their arguments. */ - int (*get_metadata_vendor_tag_ops)(struct camera2_device*, - vendor_tag_query_ops_t **ops); + int (*trigger_action)(const struct camera2_device *, + uint32_t trigger_id, + int32_t ext1, + int32_t ext2); + + /** + * Notification callback setup + */ + int (*set_notify_callback)(const struct camera2_device *, + camera2_notify_callback notify_cb, + void *user); /** - * Release the camera hardware. Requests that are in flight will be - * canceled. No further buffers will be pushed into any allocated pipelines - * once this call returns. + * Get methods to query for vendor extension metadata tag infomation. May + * set ops to NULL if no vendor extension tags are defined. */ - void (*release)(struct camera2_device *); + int (*get_metadata_vendor_tag_ops)(const struct camera2_device*, + vendor_tag_query_ops_t **ops); /** * Dump state of the camera hardware */ - int (*dump)(struct camera2_device *, int fd); + int (*dump)(const struct camera2_device *, int fd); } camera2_device_ops_t; +/********************************************************************** + * + * Camera device definition + * + */ typedef struct camera2_device { /** * common.version must equal CAMERA_DEVICE_API_VERSION_2_0 to identify diff --git a/include/hardware/camera_common.h b/include/hardware/camera_common.h index 5459b6c..5697bda 100644 --- a/include/hardware/camera_common.h +++ b/include/hardware/camera_common.h @@ -135,7 +135,9 @@ struct camera_info { /** * The camera's fixed characteristics, which include all camera metadata in - * the android.*.info.* sections. + * the android.*.info.* sections. This should be a sorted metadata buffer, + * and may not be modified or freed by the caller. The pointer should remain + * valid for the lifetime of the camera module. * * Version information (based on camera_module_t.common.module_api_version): * @@ -150,7 +152,7 @@ struct camera_info { * otherwise. * */ - camera_metadata_t *static_camera_characteristics; + const camera_metadata_t *static_camera_characteristics; }; typedef struct camera_module { diff --git a/include/hardware/fb.h b/include/hardware/fb.h index 0f716f2..135e4aa 100644 --- a/include/hardware/fb.h +++ b/include/hardware/fb.h @@ -64,14 +64,10 @@ typedef struct framebuffer_device_t { /* max swap interval supported by this framebuffer */ const int maxSwapInterval; -#ifdef QCOM_HARDWARE /* Number of framebuffers supported*/ const int numFramebuffers; int reserved[7]; -#else - int reserved[8]; -#endif /* * requests a specific swap-interval (same definition than EGL) diff --git a/include/hardware/gralloc.h b/include/hardware/gralloc.h index 3357153..f5e1b99 100644 --- a/include/hardware/gralloc.h +++ b/include/hardware/gralloc.h @@ -76,8 +76,16 @@ enum { GRALLOC_USAGE_HW_FB = 0x00001000, /* buffer will be used with the HW video encoder */ GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000, + /* buffer will be written by the HW camera pipeline */ + GRALLOC_USAGE_HW_CAMERA_WRITE = 0x00020000, + /* buffer will be read by the HW camera pipeline */ + GRALLOC_USAGE_HW_CAMERA_READ = 0x00040000, + /* buffer will be used as part of zero-shutter-lag queue */ + GRALLOC_USAGE_HW_CAMERA_ZSL = 0x00060000, + /* mask for the camera access values */ + GRALLOC_USAGE_HW_CAMERA_MASK = 0x00060000, /* mask for the software usage bit-mask */ - GRALLOC_USAGE_HW_MASK = 0x00011F00, + GRALLOC_USAGE_HW_MASK = 0x00071F00, /* buffer should be displayed full-screen on an external display when * possible diff --git a/include/hardware/hardware.h b/include/hardware/hardware.h index d00cbe2..37a5a15 100644 --- a/include/hardware/hardware.h +++ b/include/hardware/hardware.h @@ -51,6 +51,12 @@ __BEGIN_DECLS #define HARDWARE_MAKE_API_VERSION(maj,min) \ ((((maj) & 0xff) << 8) | ((min) & 0xff)) +#define HARDWARE_MAKE_API_VERSION_2(maj,min,hdr) \ + ((((maj) & 0xff) << 24) | (((min) & 0xff) << 16) | ((hdr) & 0xffff)) +#define HARDWARE_API_VERSION_2_MAJ_MIN_MASK 0xffff0000 +#define HARDWARE_API_VERSION_2_HEADER_MASK 0x0000ffff + + /* * The current HAL API version. * @@ -74,11 +80,13 @@ __BEGIN_DECLS * Use this macro to set the hw_module_t.module_api_version field. */ #define HARDWARE_MODULE_API_VERSION(maj,min) HARDWARE_MAKE_API_VERSION(maj,min) +#define HARDWARE_MODULE_API_VERSION_2(maj,min,hdr) HARDWARE_MAKE_API_VERSION_2(maj,min,hdr) /* * Use this macro to set the hw_device_t.version field */ #define HARDWARE_DEVICE_API_VERSION(maj,min) HARDWARE_MAKE_API_VERSION(maj,min) +#define HARDWARE_DEVICE_API_VERSION_2(maj,min,hdr) HARDWARE_MAKE_API_VERSION_2(maj,min,hdr) struct hw_module_t; struct hw_module_methods_t; diff --git a/include/hardware/hwcomposer.h b/include/hardware/hwcomposer.h index e30c736..ac3aa42 100644 --- a/include/hardware/hwcomposer.h +++ b/include/hardware/hwcomposer.h @@ -30,11 +30,25 @@ __BEGIN_DECLS /*****************************************************************************/ -// for compatibility +/* for compatibility */ #define HWC_MODULE_API_VERSION HWC_MODULE_API_VERSION_0_1 #define HWC_DEVICE_API_VERSION HWC_DEVICE_API_VERSION_0_1 #define HWC_API_VERSION HWC_DEVICE_API_VERSION +/* Users of this header can define HWC_REMOVE_DEPRECATED_VERSIONS to test that + * they still work with just the current version declared, before the + * deprecated versions are actually removed. + * + * To find code that still depends on the old versions, set the #define to 1 + * here. Code that explicitly sets it to zero (rather than simply not defining + * it) will still see the old versions. + */ +#if !defined(HWC_REMOVE_DEPRECATED_VERSIONS) +#define HWC_REMOVE_DEPRECATED_VERSIONS 0 +#endif + +/*****************************************************************************/ + /** * The id of this module */ @@ -45,44 +59,6 @@ __BEGIN_DECLS */ #define HWC_HARDWARE_COMPOSER "composer" - -struct hwc_composer_device; - -/* - * availability: HWC_DEVICE_API_VERSION_0_3 - * - * struct hwc_methods cannot be embedded in other structures as - * sizeof(struct hwc_methods) cannot be relied upon. - * - */ -typedef struct hwc_methods { - - /************************************************************************* - * HWC_DEVICE_API_VERSION_0_3 - *************************************************************************/ - - /* - * eventControl(..., event, value) - * Enables or disables h/w composer events. - * - * eventControl can be called from any thread and takes effect - * immediately. - * - * Supported events are: - * HWC_EVENT_VSYNC - * HWC_EVENT_ORIENTATION - * - * returns -EINVAL if the "event" parameter is not one of the value above - * or if the "value" parameter is not 0 or 1 for HWC_EVENT_VSYNC. - * and if the "value" parameter is not going to be just 0 or 1 for - * HWC_EVENT_ORIENTATION - */ - - int (*eventControl)( - struct hwc_composer_device* dev, int event, int value); - -} hwc_methods_t; - typedef struct hwc_rect { int left; int top; @@ -102,22 +78,31 @@ typedef struct hwc_color { uint8_t a; } hwc_color_t; -typedef struct hwc_layer { +typedef struct hwc_layer_1 { /* - * initially set to HWC_FRAMEBUFFER or HWC_BACKGROUND. + * Initially set to HWC_FRAMEBUFFER, HWC_BACKGROUND, or + * HWC_FRAMEBUFFER_TARGET. + * * HWC_FRAMEBUFFER - * indicates the layer will be drawn into the framebuffer - * using OpenGL ES. - * The HWC can toggle this value to HWC_OVERLAY, to indicate - * it will handle the layer. + * Indicates the layer will be drawn into the framebuffer + * using OpenGL ES. The HWC can toggle this value to HWC_OVERLAY to + * indicate it will handle the layer. * * HWC_BACKGROUND - * indicates this is a special "background" layer. The only valid - * field is backgroundColor. HWC_BACKGROUND can only be used with - * HWC_API_VERSION >= 0.2 - * The HWC can toggle this value to HWC_FRAMEBUFFER, to indicate - * it CANNOT handle the background color + * Indicates this is a special "background" layer. The only valid field + * is backgroundColor. The HWC can toggle this value to HWC_FRAMEBUFFER + * to indicate it CANNOT handle the background color. + * + * HWC_FRAMEBUFFER_TARGET + * Indicates this layer is the framebuffer surface used as the target of + * OpenGL ES composition. If the HWC sets all other layers to HWC_OVERLAY + * or HWC_BACKGROUND, then no OpenGL ES composition will be done, and + * this layer should be ignored during set(). * + * This flag (and the framebuffer surface layer) will only be used if the + * HWC version is HWC_DEVICE_API_VERSION_1_1 or higher. In older versions, + * the OpenGL ES target surface is communicated by the (dpy, sur) fields + * in hwc_compositor_device_1_t. */ int32_t compositionType; @@ -166,42 +151,137 @@ typedef struct hwc_layer { * The visible region INCLUDES areas overlapped by a translucent layer. */ hwc_region_t visibleRegionScreen; + + /* Sync fence object that will be signaled when the buffer's + * contents are available. May be -1 if the contents are already + * available. This field is only valid during set(), and should be + * ignored during prepare(). The set() call must not wait for the + * fence to be signaled before returning, but the HWC must wait for + * all buffers to be signaled before reading from them. + * + * HWC_FRAMEBUFFER layers will never have an acquire fence, since + * reads from them are complete before the framebuffer is ready for + * display. + * + * The HWC takes ownership of the acquireFenceFd and is responsible + * for closing it when no longer needed. + */ + int acquireFenceFd; + + /* During set() the HWC must set this field to a file descriptor for + * a sync fence object that will signal after the HWC has finished + * reading from the buffer. The field is ignored by prepare(). Each + * layer should have a unique file descriptor, even if more than one + * refer to the same underlying fence object; this allows each to be + * closed independently. + * + * If buffer reads can complete at significantly different times, + * then using independent fences is preferred. For example, if the + * HWC handles some layers with a blit engine and others with + * overlays, then the blit layers can be reused immediately after + * the blit completes, but the overlay layers can't be reused until + * a subsequent frame has been displayed. + * + * Since HWC doesn't read from HWC_FRAMEBUFFER layers, it shouldn't + * produce a release fence for them. The releaseFenceFd will be -1 + * for these layers when set() is called. + * + * The HWC client taks ownership of the releaseFenceFd and is + * responsible for closing it when no longer needed. + */ + int releaseFenceFd; }; }; -} hwc_layer_t; + /* Allow for expansion w/o breaking binary compatibility. + * Pad layer to 96 bytes, assuming 32-bit pointers. + */ + int32_t reserved[24 - 18]; + +} hwc_layer_1_t; + +/* This represents a display, typically an EGLDisplay object */ +typedef void* hwc_display_t; + +/* This represents a surface, typically an EGLSurface object */ +typedef void* hwc_surface_t; /* - * hwc_layer_list_t::flags values + * hwc_display_contents_1_t::flags values */ enum { /* * HWC_GEOMETRY_CHANGED is set by SurfaceFlinger to indicate that the list - * passed to (*prepare)() has changed by more than just the buffer handles. + * passed to (*prepare)() has changed by more than just the buffer handles + * and acquire fences. */ HWC_GEOMETRY_CHANGED = 0x00000001, }; /* - * List of layers. - * The handle members of hwLayers elements must be unique. + * Description of the contents to output on a display. + * + * This is the top-level structure passed to the prepare and set calls to + * negotiate and commit the composition of a display image. */ -typedef struct hwc_layer_list { - uint32_t flags; - size_t numHwLayers; - hwc_layer_t hwLayers[0]; -} hwc_layer_list_t; +typedef struct hwc_display_contents_1 { + /* File descriptor referring to a Sync HAL fence object which will signal + * when this composition is retired. For a physical display, a composition + * is retired when it has been replaced on-screen by a subsequent set. For + * a virtual display, the composition is retired when the writes to + * outputBuffer are complete and can be read. The fence object is created + * and returned by the set call; this field will be -1 on entry to prepare + * and set. SurfaceFlinger will close the returned file descriptor. + */ + int retireFenceFd; -/* This represents a display, typically an EGLDisplay object */ -typedef void* hwc_display_t; + union { + /* Fields only relevant for HWC_DEVICE_VERSION_1_0. */ + struct { + /* (dpy, sur) is the target of SurfaceFlinger's OpenGL ES + * composition for HWC_DEVICE_VERSION_1_0. They aren't relevant to + * prepare. The set call should commit this surface atomically to + * the display along with any overlay layers. + */ + hwc_display_t dpy; + hwc_surface_t sur; + }; -/* This represents a surface, typically an EGLSurface object */ -typedef void* hwc_surface_t; + /* Fields only relevant for HWC_DEVICE_VERSION_1_2 and later. */ + struct { + /* outbuf is the buffer that receives the composed image for + * virtual displays. Writes to the outbuf must wait until + * outbufAcquireFenceFd signals. A fence that will signal when + * writes to outbuf are complete should be returned in + * retireFenceFd. + * + * For physical displays, outbuf will be NULL. + */ + buffer_handle_t outbuf; + + /* File descriptor for a fence that will signal when outbuf is + * ready to be written. The h/w composer is responsible for closing + * this when no longer needed. + * + * Will be -1 whenever outbuf is NULL, or when the outbuf can be + * written immediately. + */ + int outbufAcquireFenceFd; + }; + }; + /* List of layers that will be composed on the display. The buffer handles + * in the list will be unique. If numHwLayers is 0, all composition will be + * performed by SurfaceFlinger. + */ + uint32_t flags; + size_t numHwLayers; + hwc_layer_1_t hwLayers[0]; + +} hwc_display_contents_1_t; /* see hwc_composer_device::registerProcs() - * Any of the callbacks can be NULL, in which case the corresponding - * functionality is not supported. + * All of the callbacks are required and non-NULL unless otherwise noted. */ typedef struct hwc_procs { /* @@ -213,13 +293,14 @@ typedef struct hwc_procs { * it is safe to call invalidate() from any of hwc_composer_device * hooks, unless noted otherwise. */ - void (*invalidate)(struct hwc_procs* procs); + void (*invalidate)(const struct hwc_procs* procs); /* * (*vsync)() is called by the h/w composer HAL when a vsync event is - * received and HWC_EVENT_VSYNC is enabled (see: hwc_event_control). + * received and HWC_EVENT_VSYNC is enabled on a display + * (see: hwc_event_control). * - * the "zero" parameter must always be 0. + * the "disp" parameter indicates which display the vsync event is for. * the "timestamp" parameter is the system monotonic clock timestamp in * nanosecond of when the vsync event happened. * @@ -233,9 +314,31 @@ typedef struct hwc_procs { * hwc_composer_device.set(..., 0, 0, 0) (screen off). The implementation * can either stop or continue to process VSYNC events, but must not * crash or cause other problems. + */ + void (*vsync)(const struct hwc_procs* procs, int disp, int64_t timestamp); + + /* + * (*hotplug)() is called by the h/w composer HAL when a display is + * connected or disconnected. The PRIMARY display is always connected and + * the hotplug callback should not be called for it. * + * The disp parameter indicates which display type this event is for. + * The connected parameter indicates whether the display has just been + * connected (1) or disconnected (0). + * + * The hotplug() callback may call back into the h/w composer on the same + * thread to query refresh rate and dpi for the display. Additionally, + * other threads may be calling into the h/w composer while the callback + * is in progress. + * + * The h/w composer must serialize calls to the hotplug callback; only + * one thread may call it at a time. + * + * This callback will be NULL if the h/w composer is using + * HWC_DEVICE_API_VERSION_1_0. */ - void (*vsync)(struct hwc_procs* procs, int zero, int64_t timestamp); + void (*hotplug)(const struct hwc_procs* procs, int disp, int connected); + } hwc_procs_t; @@ -245,8 +348,7 @@ typedef struct hwc_module { struct hw_module_t common; } hwc_module_t; - -typedef struct hwc_composer_device { +typedef struct hwc_composer_device_1 { struct hw_device_t common; /* @@ -255,134 +357,204 @@ typedef struct hwc_composer_device { * * (*prepare)() can be called more than once, the last call prevails. * - * The HWC responds by setting the compositionType field to either - * HWC_FRAMEBUFFER or HWC_OVERLAY. In the former case, the composition for - * this layer is handled by SurfaceFlinger with OpenGL ES, in the later - * case, the HWC will have to handle this layer's composition. + * The HWC responds by setting the compositionType field in each layer to + * either HWC_FRAMEBUFFER or HWC_OVERLAY. In the former case, the + * composition for the layer is handled by SurfaceFlinger with OpenGL ES, + * in the later case, the HWC will have to handle the layer's composition. * * (*prepare)() is called with HWC_GEOMETRY_CHANGED to indicate that the * list's geometry has changed, that is, when more than just the buffer's * handles have been updated. Typically this happens (but is not limited to) * when a window is added, removed, resized or moved. * - * a NULL list parameter or a numHwLayers of zero indicates that the - * entire composition will be handled by SurfaceFlinger with OpenGL ES. + * For HWC 1.0, numDisplays will always be one, and displays[0] will be + * non-NULL. + * + * For HWC 1.1, numDisplays will always be HWC_NUM_DISPLAY_TYPES. Entries + * for unsupported or disabled/disconnected display types will be NULL. + * + * For HWC 1.2 and later, numDisplays will be HWC_NUM_DISPLAY_TYPES or more. + * The extra entries correspond to enabled virtual displays, and will be + * non-NULL. In HWC 1.2, support for one virtual display is required, and + * no more than one will be used. Future HWC versions might require more. * * returns: 0 on success. An negative error code on error. If an error is * returned, SurfaceFlinger will assume that none of the layer will be * handled by the HWC. */ - int (*prepare)(struct hwc_composer_device *dev, hwc_layer_list_t* list); - + int (*prepare)(struct hwc_composer_device_1 *dev, + size_t numDisplays, hwc_display_contents_1_t** displays); /* * (*set)() is used in place of eglSwapBuffers(), and assumes the same * functionality, except it also commits the work list atomically with * the actual eglSwapBuffers(). * - * The list parameter is guaranteed to be the same as the one returned - * from the last call to (*prepare)(). + * The layer lists are guaranteed to be the same as the ones returned from + * the last call to (*prepare)(). * - * When this call returns the caller assumes that: + * When this call returns the caller assumes that the displays will be + * updated in the near future with the content of their work lists, without + * artifacts during the transition from the previous frame. * - * - the display will be updated in the near future with the content - * of the work list, without artifacts during the transition from the - * previous frame. + * A display with zero layers indicates that the entire composition has + * been handled by SurfaceFlinger with OpenGL ES. In this case, (*set)() + * behaves just like eglSwapBuffers(). * - * - all objects are available for immediate access or destruction, in - * particular, hwc_region_t::rects data and hwc_layer_t::layer's buffer. - * Note that this means that immediately accessing (potentially from a - * different process) a buffer used in this call will not result in - * screen corruption, the driver must apply proper synchronization or - * scheduling (eg: block the caller, such as gralloc_module_t::lock(), - * OpenGL ES, Camera, Codecs, etc..., or schedule the caller's work - * after the buffer is freed from the actual composition). + * For HWC 1.0, numDisplays will always be one, and displays[0] will be + * non-NULL. * - * a NULL list parameter or a numHwLayers of zero indicates that the - * entire composition has been handled by SurfaceFlinger with OpenGL ES. - * In this case, (*set)() behaves just like eglSwapBuffers(). + * For HWC 1.1, numDisplays will always be HWC_NUM_DISPLAY_TYPES. Entries + * for unsupported or disabled/disconnected display types will be NULL. * - * dpy, sur, and list are set to NULL to indicate that the screen is - * turning off. This happens WITHOUT prepare() being called first. - * This is a good time to free h/w resources and/or power - * the relevant h/w blocks down. + * For HWC 1.2 and later, numDisplays will be HWC_NUM_DISPLAY_TYPES or more. + * The extra entries correspond to enabled virtual displays, and will be + * non-NULL. In HWC 1.2, support for one virtual display is required, and + * no more than one will be used. Future HWC versions might require more. * - * IMPORTANT NOTE: there is an implicit layer containing opaque black - * pixels behind all the layers in the list. - * It is the responsibility of the hwcomposer module to make - * sure black pixels are output (or blended from). + * IMPORTANT NOTE: There is an implicit layer containing opaque black + * pixels behind all the layers in the list. It is the responsibility of + * the hwcomposer module to make sure black pixels are output (or blended + * from). * - * returns: 0 on success. An negative error code on error: - * HWC_EGL_ERROR: eglGetError() will provide the proper error code - * Another code for non EGL errors. + * IMPORTANT NOTE: In the event of an error this call *MUST* still cause + * any fences returned in the previous call to set to eventually become + * signaled. The caller may have already issued wait commands on these + * fences, and having set return without causing those fences to signal + * will likely result in a deadlock. * + * returns: 0 on success. A negative error code on error: + * HWC_EGL_ERROR: eglGetError() will provide the proper error code (only + * allowed prior to HWComposer 1.1) + * Another code for non EGL errors. */ - int (*set)(struct hwc_composer_device *dev, - hwc_display_t dpy, - hwc_surface_t sur, - hwc_layer_list_t* list); + int (*set)(struct hwc_composer_device_1 *dev, + size_t numDisplays, hwc_display_contents_1_t** displays); + /* - * This field is OPTIONAL and can be NULL. + * eventControl(..., event, enabled) + * Enables or disables h/w composer events for a display. * - * If non NULL it will be called by SurfaceFlinger on dumpsys + * eventControl can be called from any thread and takes effect + * immediately. + * + * Supported events are: + * HWC_EVENT_VSYNC + * + * returns -EINVAL if the "event" parameter is not one of the value above + * or if the "enabled" parameter is not 0 or 1. */ - void (*dump)(struct hwc_composer_device* dev, char *buff, int buff_len); + int (*eventControl)(struct hwc_composer_device_1* dev, int disp, + int event, int enabled); /* - * This field is OPTIONAL and can be NULL. + * blank(..., blank) + * Blanks or unblanks a display's screen. * - * (*registerProcs)() registers a set of callbacks the h/w composer HAL - * can later use. It is FORBIDDEN to call any of the callbacks from - * within registerProcs(). registerProcs() must save the hwc_procs_t pointer - * which is needed when calling a registered callback. - * Each call to registerProcs replaces the previous set of callbacks. - * registerProcs is called with NULL to unregister all callbacks. + * Turns the screen off when blank is nonzero, on when blank is zero. + * Multiple sequential calls with the same blank value must be supported. + * The screen state transition must be be complete when the function + * returns. * - * Any of the callbacks can be NULL, in which case the corresponding - * functionality is not supported. + * returns 0 on success, negative on error. */ - void (*registerProcs)(struct hwc_composer_device* dev, - hwc_procs_t const* procs); + int (*blank)(struct hwc_composer_device_1* dev, int disp, int blank); /* - * This field is OPTIONAL and can be NULL. - * availability: HWC_DEVICE_API_VERSION_0_2 - * * Used to retrieve information about the h/w composer * * Returns 0 on success or -errno on error. */ - int (*query)(struct hwc_composer_device* dev, int what, int* value); + int (*query)(struct hwc_composer_device_1* dev, int what, int* value); /* - * Reserved for future use. Must be NULL. + * (*registerProcs)() registers callbacks that the h/w composer HAL can + * later use. It will be called immediately after the composer device is + * opened with non-NULL procs. It is FORBIDDEN to call any of the callbacks + * from within registerProcs(). registerProcs() must save the hwc_procs_t + * pointer which is needed when calling a registered callback. */ - void* reserved_proc[4]; + void (*registerProcs)(struct hwc_composer_device_1* dev, + hwc_procs_t const* procs); /* * This field is OPTIONAL and can be NULL. - * availability: HWC_DEVICE_API_VERSION_0_3 + * + * If non NULL it will be called by SurfaceFlinger on dumpsys + */ + void (*dump)(struct hwc_composer_device_1* dev, char *buff, int buff_len); + + /* + * (*getDisplayConfigs)() returns handles for the configurations available + * on the connected display. These handles must remain valid as long as the + * display is connected. + * + * Configuration handles are written to configs. The number of entries + * allocated by the caller is passed in *numConfigs; getDisplayConfigs must + * not try to write more than this number of config handles. On return, the + * total number of configurations available for the display is returned in + * *numConfigs. If *numConfigs is zero on entry, then configs may be NULL. + * + * HWC_DEVICE_API_VERSION_1_1 does not provide a way to choose a config. + * For displays that support multiple configurations, the h/w composer + * implementation should choose one and report it as the first config in + * the list. Reporting the not-chosen configs is not required. + * + * Returns 0 on success or -errno on error. If disp is a hotpluggable + * display type and no display is connected, an error should be returned. + * + * This field is REQUIRED for HWC_DEVICE_API_VERSION_1_1 and later. + * It should be NULL for previous versions. + */ + int (*getDisplayConfigs)(struct hwc_composer_device_1* dev, int disp, + uint32_t* configs, size_t* numConfigs); + + /* + * (*getDisplayAttributes)() returns attributes for a specific config of a + * connected display. The config parameter is one of the config handles + * returned by getDisplayConfigs. + * + * The list of attributes to return is provided in the attributes + * parameter, terminated by HWC_DISPLAY_NO_ATTRIBUTE. The value for each + * requested attribute is written in order to the values array. The + * HWC_DISPLAY_NO_ATTRIBUTE attribute does not have a value, so the values + * array will have one less value than the attributes array. + * + * This field is REQUIRED for HWC_DEVICE_API_VERSION_1_1 and later. + * It should be NULL for previous versions. + * + * If disp is a hotpluggable display type and no display is connected, + * or if config is not a valid configuration for the display, a negative + * value should be returned. */ - hwc_methods_t const *methods; + int (*getDisplayAttributes)(struct hwc_composer_device_1* dev, int disp, + uint32_t config, const uint32_t* attributes, int32_t* values); -} hwc_composer_device_t; + /* + * Reserved for future use. Must be NULL. + */ + void* reserved_proc[4]; +} hwc_composer_device_1_t; /** convenience API for opening and closing a device */ -static inline int hwc_open(const struct hw_module_t* module, - hwc_composer_device_t** device) { +static inline int hwc_open_1(const struct hw_module_t* module, + hwc_composer_device_1_t** device) { return module->methods->open(module, HWC_HARDWARE_COMPOSER, (struct hw_device_t**)device); } -static inline int hwc_close(hwc_composer_device_t* device) { +static inline int hwc_close_1(hwc_composer_device_1_t* device) { return device->common.close(&device->common); } - /*****************************************************************************/ +#if !HWC_REMOVE_DEPRECATED_VERSIONS +#include <hardware/hwcomposer_v0.h> +#endif + __END_DECLS #endif /* ANDROID_INCLUDE_HARDWARE_HWCOMPOSER_H */ diff --git a/include/hardware/hwcomposer_defs.h b/include/hardware/hwcomposer_defs.h index 5e2f5ff..b23f08a 100644 --- a/include/hardware/hwcomposer_defs.h +++ b/include/hardware/hwcomposer_defs.h @@ -28,12 +28,16 @@ __BEGIN_DECLS /*****************************************************************************/ -#define HWC_MODULE_API_VERSION_0_1 HARDWARE_MODULE_API_VERSION(0, 1) +#define HWC_HEADER_VERSION 1 -#define HWC_DEVICE_API_VERSION_0_1 HARDWARE_DEVICE_API_VERSION(0, 1) -#define HWC_DEVICE_API_VERSION_0_2 HARDWARE_DEVICE_API_VERSION(0, 2) -#define HWC_DEVICE_API_VERSION_0_3 HARDWARE_DEVICE_API_VERSION(0, 3) +#define HWC_MODULE_API_VERSION_0_1 HARDWARE_MODULE_API_VERSION(0, 1) +#define HWC_DEVICE_API_VERSION_0_1 HARDWARE_DEVICE_API_VERSION_2(0, 1, HWC_HEADER_VERSION) +#define HWC_DEVICE_API_VERSION_0_2 HARDWARE_DEVICE_API_VERSION_2(0, 2, HWC_HEADER_VERSION) +#define HWC_DEVICE_API_VERSION_0_3 HARDWARE_DEVICE_API_VERSION_2(0, 3, HWC_HEADER_VERSION) +#define HWC_DEVICE_API_VERSION_1_0 HARDWARE_DEVICE_API_VERSION_2(1, 0, HWC_HEADER_VERSION) +#define HWC_DEVICE_API_VERSION_1_1 HARDWARE_DEVICE_API_VERSION_2(1, 1, HWC_HEADER_VERSION) +#define HWC_DEVICE_API_VERSION_1_2 HARDWARE_DEVICE_API_VERSION_2(1, 2, HWC_HEADER_VERSION) enum { /* hwc_composer_device_t::set failed in EGL */ @@ -89,6 +93,10 @@ enum { /* this is the background layer. it's used to set the background color. * there is only a single background layer */ HWC_BACKGROUND = 2, + + /* this layer holds the result of compositing the HWC_FRAMEBUFFER layers. + * Added in HWC_DEVICE_API_VERSION_1_1. */ + HWC_FRAMEBUFFER_TARGET = 3, }; /* @@ -124,16 +132,49 @@ enum { /* attributes queriable with query() */ enum { /* - * availability: HWC_DEVICE_API_VERSION_0_2 - * must return 1 if the background layer is supported, 0 otherwise + * Availability: HWC_DEVICE_API_VERSION_0_2 + * Must return 1 if the background layer is supported, 0 otherwise. */ HWC_BACKGROUND_LAYER_SUPPORTED = 0, /* - * availability: HWC_DEVICE_API_VERSION_0_3 - * returns the vsync period in nanosecond + * Availability: HWC_DEVICE_API_VERSION_0_3 + * Returns the vsync period in nanoseconds. + * + * This query is not used for HWC_DEVICE_API_VERSION_1_1 and later. + * Instead, the per-display attribute HWC_DISPLAY_VSYNC_PERIOD is used. */ HWC_VSYNC_PERIOD = 1, + + /* + * Availability: HWC_DEVICE_API_VERSION_1_1 + * Returns a mask of supported display types. + */ + HWC_DISPLAY_TYPES_SUPPORTED = 2, +}; + +/* display attributes returned by getDisplayAttributes() */ +enum { + /* Indicates the end of an attribute list */ + HWC_DISPLAY_NO_ATTRIBUTE = 0, + + /* The vsync period in nanoseconds */ + HWC_DISPLAY_VSYNC_PERIOD = 1, + + /* The number of pixels in the horizontal and vertical directions. */ + HWC_DISPLAY_WIDTH = 2, + HWC_DISPLAY_HEIGHT = 3, + + /* The number of pixels per thousand inches of this configuration. + * + * Scaling DPI by 1000 allows it to be stored in an int without losing + * too much precision. + * + * If the DPI for a configuration is unavailable or the HWC implementation + * considers it unreliable, it should set these attributes to zero. + */ + HWC_DISPLAY_DPI_X = 4, + HWC_DISPLAY_DPI_Y = 5, }; /* Allowed events for hwc_methods::eventControl() */ @@ -142,6 +183,18 @@ enum { HWC_EVENT_ORIENTATION // To notify HWC about the device orientation }; +/* Display types and associated mask bits. */ +enum { + HWC_DISPLAY_PRIMARY = 0, + HWC_DISPLAY_EXTERNAL = 1, // HDMI, DP, etc. + HWC_NUM_DISPLAY_TYPES +}; + +enum { + HWC_DISPLAY_PRIMARY_BIT = 1 << HWC_DISPLAY_PRIMARY, + HWC_DISPLAY_EXTERNAL_BIT = 1 << HWC_DISPLAY_EXTERNAL, +}; + /*****************************************************************************/ __END_DECLS diff --git a/include/hardware/hwcomposer_v0.h b/include/hardware/hwcomposer_v0.h new file mode 100644 index 0000000..473819b --- /dev/null +++ b/include/hardware/hwcomposer_v0.h @@ -0,0 +1,272 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* This header contains deprecated HWCv0 interface declarations. Don't include + * this header directly; it will be included by <hardware/hwcomposer.h> unless + * HWC_REMOVE_DEPRECATED_VERSIONS is defined to non-zero. + */ +#ifndef ANDROID_INCLUDE_HARDWARE_HWCOMPOSER_H +#error "This header should only be included by hardware/hwcomposer.h" +#endif + +#ifndef ANDROID_INCLUDE_HARDWARE_HWCOMPOSER_V0_H +#define ANDROID_INCLUDE_HARDWARE_HWCOMPOSER_V0_H + +struct hwc_composer_device; + +/* + * availability: HWC_DEVICE_API_VERSION_0_3 + * + * struct hwc_methods cannot be embedded in other structures as + * sizeof(struct hwc_methods) cannot be relied upon. + * + */ +typedef struct hwc_methods { + + /************************************************************************* + * HWC_DEVICE_API_VERSION_0_3 + *************************************************************************/ + + /* + * eventControl(..., event, enabled) + * Enables or disables h/w composer events. + * + * eventControl can be called from any thread and takes effect + * immediately. + * + * Supported events are: + * HWC_EVENT_VSYNC + * + * returns -EINVAL if the "event" parameter is not one of the value above + * or if the "enabled" parameter is not 0 or 1. + */ + + int (*eventControl)( + struct hwc_composer_device* dev, int event, int enabled); + +} hwc_methods_t; + +typedef struct hwc_layer { + /* + * initially set to HWC_FRAMEBUFFER or HWC_BACKGROUND. + * HWC_FRAMEBUFFER + * indicates the layer will be drawn into the framebuffer + * using OpenGL ES. + * The HWC can toggle this value to HWC_OVERLAY, to indicate + * it will handle the layer. + * + * HWC_BACKGROUND + * indicates this is a special "background" layer. The only valid + * field is backgroundColor. HWC_BACKGROUND can only be used with + * HWC_API_VERSION >= 0.2 + * The HWC can toggle this value to HWC_FRAMEBUFFER, to indicate + * it CANNOT handle the background color + * + */ + int32_t compositionType; + + /* see hwc_layer_t::hints above */ + uint32_t hints; + + /* see hwc_layer_t::flags above */ + uint32_t flags; + + union { + /* color of the background. hwc_color_t.a is ignored */ + hwc_color_t backgroundColor; + + struct { + /* handle of buffer to compose. This handle is guaranteed to have been + * allocated from gralloc using the GRALLOC_USAGE_HW_COMPOSER usage flag. If + * the layer's handle is unchanged across two consecutive prepare calls and + * the HWC_GEOMETRY_CHANGED flag is not set for the second call then the + * HWComposer implementation may assume that the contents of the buffer have + * not changed. */ + buffer_handle_t handle; + + /* transformation to apply to the buffer during composition */ + uint32_t transform; + + /* blending to apply during composition */ + int32_t blending; + + /* area of the source to consider, the origin is the top-left corner of + * the buffer */ + hwc_rect_t sourceCrop; + + /* where to composite the sourceCrop onto the display. The sourceCrop + * is scaled using linear filtering to the displayFrame. The origin is the + * top-left corner of the screen. + */ + hwc_rect_t displayFrame; + + /* visible region in screen space. The origin is the + * top-left corner of the screen. + * The visible region INCLUDES areas overlapped by a translucent layer. + */ + hwc_region_t visibleRegionScreen; + }; + }; +} hwc_layer_t; + +/* + * List of layers. + * The handle members of hwLayers elements must be unique. + */ +typedef struct hwc_layer_list { + uint32_t flags; + size_t numHwLayers; + hwc_layer_t hwLayers[0]; +} hwc_layer_list_t; + +/*****************************************************************************/ + +typedef struct hwc_composer_device { + struct hw_device_t common; + + /* + * (*prepare)() is called for each frame before composition and is used by + * SurfaceFlinger to determine what composition steps the HWC can handle. + * + * (*prepare)() can be called more than once, the last call prevails. + * + * The HWC responds by setting the compositionType field to either + * HWC_FRAMEBUFFER or HWC_OVERLAY. In the former case, the composition for + * this layer is handled by SurfaceFlinger with OpenGL ES, in the later + * case, the HWC will have to handle this layer's composition. + * + * (*prepare)() is called with HWC_GEOMETRY_CHANGED to indicate that the + * list's geometry has changed, that is, when more than just the buffer's + * handles have been updated. Typically this happens (but is not limited to) + * when a window is added, removed, resized or moved. + * + * a NULL list parameter or a numHwLayers of zero indicates that the + * entire composition will be handled by SurfaceFlinger with OpenGL ES. + * + * returns: 0 on success. An negative error code on error. If an error is + * returned, SurfaceFlinger will assume that none of the layer will be + * handled by the HWC. + */ + int (*prepare)(struct hwc_composer_device *dev, hwc_layer_list_t* list); + + /* + * (*set)() is used in place of eglSwapBuffers(), and assumes the same + * functionality, except it also commits the work list atomically with + * the actual eglSwapBuffers(). + * + * The list parameter is guaranteed to be the same as the one returned + * from the last call to (*prepare)(). + * + * When this call returns the caller assumes that: + * + * - the display will be updated in the near future with the content + * of the work list, without artifacts during the transition from the + * previous frame. + * + * - all objects are available for immediate access or destruction, in + * particular, hwc_region_t::rects data and hwc_layer_t::layer's buffer. + * Note that this means that immediately accessing (potentially from a + * different process) a buffer used in this call will not result in + * screen corruption, the driver must apply proper synchronization or + * scheduling (eg: block the caller, such as gralloc_module_t::lock(), + * OpenGL ES, Camera, Codecs, etc..., or schedule the caller's work + * after the buffer is freed from the actual composition). + * + * a NULL list parameter or a numHwLayers of zero indicates that the + * entire composition has been handled by SurfaceFlinger with OpenGL ES. + * In this case, (*set)() behaves just like eglSwapBuffers(). + * + * dpy, sur, and list are set to NULL to indicate that the screen is + * turning off. This happens WITHOUT prepare() being called first. + * This is a good time to free h/w resources and/or power + * the relevant h/w blocks down. + * + * IMPORTANT NOTE: there is an implicit layer containing opaque black + * pixels behind all the layers in the list. + * It is the responsibility of the hwcomposer module to make + * sure black pixels are output (or blended from). + * + * returns: 0 on success. An negative error code on error: + * HWC_EGL_ERROR: eglGetError() will provide the proper error code + * Another code for non EGL errors. + * + */ + int (*set)(struct hwc_composer_device *dev, + hwc_display_t dpy, + hwc_surface_t sur, + hwc_layer_list_t* list); + + /* + * This field is OPTIONAL and can be NULL. + * + * If non NULL it will be called by SurfaceFlinger on dumpsys + */ + void (*dump)(struct hwc_composer_device* dev, char *buff, int buff_len); + + /* + * This field is OPTIONAL and can be NULL. + * + * (*registerProcs)() registers a set of callbacks the h/w composer HAL + * can later use. It is FORBIDDEN to call any of the callbacks from + * within registerProcs(). registerProcs() must save the hwc_procs_t pointer + * which is needed when calling a registered callback. + * Each call to registerProcs replaces the previous set of callbacks. + * registerProcs is called with NULL to unregister all callbacks. + * + * Any of the callbacks can be NULL, in which case the corresponding + * functionality is not supported. + */ + void (*registerProcs)(struct hwc_composer_device* dev, + hwc_procs_t const* procs); + + /* + * This field is OPTIONAL and can be NULL. + * availability: HWC_DEVICE_API_VERSION_0_2 + * + * Used to retrieve information about the h/w composer + * + * Returns 0 on success or -errno on error. + */ + int (*query)(struct hwc_composer_device* dev, int what, int* value); + + /* + * Reserved for future use. Must be NULL. + */ + void* reserved_proc[4]; + + /* + * This field is OPTIONAL and can be NULL. + * availability: HWC_DEVICE_API_VERSION_0_3 + */ + hwc_methods_t const *methods; + +} hwc_composer_device_t; + +/** convenience API for opening and closing a device */ + +static inline int hwc_open(const struct hw_module_t* module, + hwc_composer_device_t** device) { + return module->methods->open(module, + HWC_HARDWARE_COMPOSER, (struct hw_device_t**)device); +} + +static inline int hwc_close(hwc_composer_device_t* device) { + return device->common.close(&device->common); +} + +/*****************************************************************************/ + +#endif /* ANDROID_INCLUDE_HARDWARE_HWCOMPOSER_V0_H */ diff --git a/include/hardware/nfc.h b/include/hardware/nfc.h index 74b3cfb..09523b3 100644 --- a/include/hardware/nfc.h +++ b/include/hardware/nfc.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2011 The Android Open Source Project + * Copyright (C) 2011, 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,7 +14,6 @@ * limitations under the License. */ - #ifndef ANDROID_NFC_HAL_INTERFACE_H #define ANDROID_NFC_HAL_INTERFACE_H @@ -27,11 +26,187 @@ __BEGIN_DECLS -#define NFC_HARDWARE_MODULE_ID "nfc" + +/* NFC device HAL for NCI-based NFC controllers. + * + * This HAL allows NCI silicon vendors to make use + * of the core NCI stack in Android for their own silicon. + * + * The responibilities of the NCI HAL implementation + * are as follows: + * + * - Implement the transport to the NFC controller + * - Implement each of the HAL methods specified below as applicable to their silicon + * - Pass up received NCI messages from the controller to the stack + * + * A simplified timeline of NCI HAL method calls: + * 1) Core NCI stack calls open() + * 2) Core NCI stack executes CORE_RESET and CORE_INIT through calls to write() + * 3) Core NCI stack calls core_initialized() to allow HAL to do post-init configuration + * 4) Core NCI stack calls pre_discover() to allow HAL to prepare for RF discovery + * 5) Core NCI stack starts discovery through calls to write() + * 6) Core NCI stack stops discovery through calls to write() (e.g. screen turns off) + * 7) Core NCI stack calls pre_discover() to prepare for RF discovery (e.g. screen turned back on) + * 8) Core NCI stack starts discovery through calls to write() + * ... + * ... + * 9) Core NCI stack calls close() + */ +#define NFC_NCI_HARDWARE_MODULE_ID "nfc_nci" +#define NFC_NCI_CONTROLLER "nci" + +/* + * nfc_nci_module_t should contain module-specific parameters + */ +typedef struct nfc_nci_module_t { + struct hw_module_t common; +} nfc_nci_module_t; + +/* + * HAL events that can be passed back to the stack + */ +typedef uint8_t nfc_event_t; + +enum { + HAL_NFC_OPEN_CPLT_EVT = 0x00, + HAL_NFC_CLOSE_CPLT_EVT = 0x01, + HAL_NFC_POST_INIT_CPLT_EVT = 0x02, + HAL_NFC_PRE_DISCOVER_CPLT_EVT = 0x03, + HAL_NFC_REQUEST_CONTROL_EVT = 0x04, + HAL_NFC_RELEASE_CONTROL_EVT = 0x05, + HAL_NFC_ERROR_EVT = 0x06 +}; + +/* + * Allowed status return values for each of the HAL methods + */ +typedef uint8_t nfc_status_t; + +enum { + HAL_NFC_STATUS_OK = 0x00, + HAL_NFC_STATUS_FAILED = 0x01, + HAL_NFC_STATUS_ERR_TRANSPORT = 0x02, + HAL_NFC_STATUS_ERR_CMD_TIMEOUT = 0x03, + HAL_NFC_STATUS_REFUSED = 0x04 +}; + +/* + * The callback passed in from the NFC stack that the HAL + * can use to pass events back to the stack. + */ +typedef void (nfc_stack_callback_t) (nfc_event_t event, nfc_status_t event_status); + +/* + * The callback passed in from the NFC stack that the HAL + * can use to pass incomming data to the stack. + */ +typedef void (nfc_stack_data_callback_t) (uint16_t data_len, uint8_t* p_data); + +/* nfc_nci_device_t starts with a hw_device_t struct, + * followed by device-specific methods and members. + * + * All methods in the NCI HAL are asynchronous. + */ +typedef struct nfc_nci_device { + struct hw_device_t common; + /* + * (*open)() Opens the NFC controller device and performs initialization. + * This may include patch download and other vendor-specific initialization. + * + * If open completes successfully, the controller should be ready to perform + * NCI initialization - ie accept CORE_RESET and subsequent commands through + * the write() call. + * + * If open() returns 0, the NCI stack will wait for a HAL_NFC_OPEN_CPLT_EVT + * before continuing. + * + * If open() returns any other value, the NCI stack will stop. + * + */ + int (*open)(const struct nfc_nci_device *p_dev, nfc_stack_callback_t *p_cback, + nfc_stack_data_callback_t *p_data_cback); + + /* + * (*write)() Performs an NCI write. + * + * This method may queue writes and return immediately. The only + * requirement is that the writes are executed in order. + */ + int (*write)(const struct nfc_nci_device *p_dev, uint16_t data_len, const uint8_t *p_data); + + /* + * (*core_initialized)() is called after the CORE_INIT_RSP is received from the NFCC. + * At this time, the HAL can do any chip-specific configuration. + * + * If core_initialized() returns 0, the NCI stack will wait for a HAL_NFC_POST_INIT_CPLT_EVT + * before continuing. + * + * If core_initialized() returns any other value, the NCI stack will continue + * immediately. + */ + int (*core_initialized)(const struct nfc_nci_device *p_dev, uint8_t* p_core_init_rsp_params); + + /* + * (*pre_discover)() Is called every time before starting RF discovery. + * It is a good place to do vendor-specific configuration that must be + * performed every time RF discovery is about to be started. + * + * If pre_discover() returns 0, the NCI stack will wait for a HAL_NFC_PRE_DISCOVER_CPLT_EVT + * before continuing. + * + * If pre_discover() returns any other value, the NCI stack will start + * RF discovery immediately. + */ + int (*pre_discover)(const struct nfc_nci_device *p_dev); + + /* + * (*close)() Closed the NFC controller. Should free all resources. + */ + int (*close)(const struct nfc_nci_device *p_dev); + + /* + * (*control_granted)() Grant HAL the exclusive control to send NCI commands. + * Called in response to HAL_REQUEST_CONTROL_EVT. + * Must only be called when there are no NCI commands pending. + * HAL_RELEASE_CONTROL_EVT will notify when HAL no longer needs exclusive control. + */ + int (*control_granted)(const struct nfc_nci_device *p_dev); + + /* + * (*power_cycle)() Restart controller by power cyle; + * HAL_OPEN_CPLT_EVT will notify when operation is complete. + */ + int (*power_cycle)(const struct nfc_nci_device *p_dev); +} nfc_nci_device_t; + +/* + * Convenience methods that the NFC stack can use to open + * and close an NCI device + */ +static inline int nfc_nci_open(const struct hw_module_t* module, + nfc_nci_device_t** dev) { + return module->methods->open(module, NFC_NCI_CONTROLLER, + (struct hw_device_t**) dev); +} + +static inline int nfc_nci_close(nfc_nci_device_t* dev) { + return dev->common.close(&dev->common); +} +/* + * End NFC NCI HAL + */ /* + * This is a limited NFC HAL for NXP PN544-based devices. + * This HAL as Android is moving to + * an NCI-based NFC stack. + * + * All NCI-based NFC controllers should use the NFC-NCI + * HAL instead. * Begin PN544 specific HAL */ +#define NFC_HARDWARE_MODULE_ID "nfc" + #define NFC_PN544_CONTROLLER "pn544" typedef struct nfc_module_t { diff --git a/modules/Android.mk b/modules/Android.mk index 871b984..faa8bb3 100644 --- a/modules/Android.mk +++ b/modules/Android.mk @@ -1,2 +1,2 @@ -hardware_modules := gralloc hwcomposer audio nfc local_time power usbaudio +hardware_modules := gralloc hwcomposer audio nfc nfc-nci local_time power usbaudio audio_remote_submix include $(call all-named-subdir-makefiles,$(hardware_modules)) diff --git a/modules/audio/audio_hw.c b/modules/audio/audio_hw.c index 78bed03..b051cf9 100644 --- a/modules/audio/audio_hw.c +++ b/modules/audio/audio_hw.c @@ -55,7 +55,7 @@ static size_t out_get_buffer_size(const struct audio_stream *stream) return 4096; } -static uint32_t out_get_channels(const struct audio_stream *stream) +static audio_channel_mask_t out_get_channels(const struct audio_stream *stream) { return AUDIO_CHANNEL_OUT_STEREO; } @@ -150,7 +150,7 @@ static size_t in_get_buffer_size(const struct audio_stream *stream) return 320; } -static uint32_t in_get_channels(const struct audio_stream *stream) +static audio_channel_mask_t in_get_channels(const struct audio_stream *stream) { return AUDIO_CHANNEL_IN_MONO; } @@ -292,8 +292,17 @@ static int adev_set_master_volume(struct audio_hw_device *dev, float volume) } #ifndef ICS_AUDIO_BLOB -static int adev_get_master_volume(struct audio_hw_device *dev, - float *volume) +static int adev_get_master_volume(struct audio_hw_device *dev, float *volume) +{ + return -ENOSYS; +} + +static int adev_set_master_mute(struct audio_hw_device *dev, bool muted) +{ + return -ENOSYS; +} + +static int adev_get_master_mute(struct audio_hw_device *dev, bool *muted) { return -ENOSYS; } @@ -376,29 +385,6 @@ static int adev_close(hw_device_t *device) return 0; } -static uint32_t adev_get_supported_devices(const struct audio_hw_device *dev) -{ - return (/* OUT */ - AUDIO_DEVICE_OUT_EARPIECE | - AUDIO_DEVICE_OUT_SPEAKER | - AUDIO_DEVICE_OUT_WIRED_HEADSET | - AUDIO_DEVICE_OUT_WIRED_HEADPHONE | - AUDIO_DEVICE_OUT_AUX_DIGITAL | - AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET | - AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET | - AUDIO_DEVICE_OUT_ALL_SCO | - AUDIO_DEVICE_OUT_DEFAULT | - /* IN */ - AUDIO_DEVICE_IN_COMMUNICATION | - AUDIO_DEVICE_IN_AMBIENT | - AUDIO_DEVICE_IN_BUILTIN_MIC | - AUDIO_DEVICE_IN_WIRED_HEADSET | - AUDIO_DEVICE_IN_AUX_DIGITAL | - AUDIO_DEVICE_IN_BACK_MIC | - AUDIO_DEVICE_IN_ALL_SCO | - AUDIO_DEVICE_IN_DEFAULT); -} - static int adev_open(const hw_module_t* module, const char* name, hw_device_t** device) { @@ -413,16 +399,17 @@ static int adev_open(const hw_module_t* module, const char* name, return -ENOMEM; adev->device.common.tag = HARDWARE_DEVICE_TAG; - adev->device.common.version = AUDIO_DEVICE_API_VERSION_1_0; + adev->device.common.version = AUDIO_DEVICE_API_VERSION_2_0; adev->device.common.module = (struct hw_module_t *) module; adev->device.common.close = adev_close; - adev->device.get_supported_devices = adev_get_supported_devices; adev->device.init_check = adev_init_check; adev->device.set_voice_volume = adev_set_voice_volume; adev->device.set_master_volume = adev_set_master_volume; #ifndef ICS_AUDIO_BLOB adev->device.get_master_volume = adev_get_master_volume; + adev->device.set_master_mute = adev_set_master_mute; + adev->device.get_master_mute = adev_get_master_mute; #endif adev->device.set_mode = adev_set_mode; adev->device.set_mic_mute = adev_set_mic_mute; diff --git a/modules/audio/audio_policy.c b/modules/audio/audio_policy.c index ce74ff8..82aa449 100644 --- a/modules/audio/audio_policy.c +++ b/modules/audio/audio_policy.c @@ -98,7 +98,7 @@ static audio_io_handle_t ap_get_output(struct audio_policy *pol, audio_stream_type_t stream, uint32_t sampling_rate, audio_format_t format, - uint32_t channels, + audio_channel_mask_t channelMask, audio_output_flags_t flags) { return 0; @@ -124,7 +124,7 @@ static void ap_release_output(struct audio_policy *pol, static audio_io_handle_t ap_get_input(struct audio_policy *pol, audio_source_t inputSource, uint32_t sampling_rate, audio_format_t format, - uint32_t channels, + audio_channel_mask_t channelMask, audio_in_acoustics_t acoustics) { return 0; @@ -195,13 +195,13 @@ static audio_devices_t ap_get_devices_for_stream(const struct audio_policy *pol, } static audio_io_handle_t ap_get_output_for_effect(struct audio_policy *pol, - struct effect_descriptor_s *desc) + const struct effect_descriptor_s *desc) { return 0; } static int ap_register_effect(struct audio_policy *pol, - struct effect_descriptor_s *desc, + const struct effect_descriptor_s *desc, audio_io_handle_t output, uint32_t strategy, int session, diff --git a/modules/audio_remote_submix/Android.mk b/modules/audio_remote_submix/Android.mk new file mode 100644 index 0000000..5f54902 --- /dev/null +++ b/modules/audio_remote_submix/Android.mk @@ -0,0 +1,30 @@ +# Copyright (C) 2012 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE := audio.r_submix.default +LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw +LOCAL_SRC_FILES := \ + audio_hw.cpp +LOCAL_C_INCLUDES += \ + frameworks/av/include/ \ + frameworks/native/include/ +LOCAL_SHARED_LIBRARIES := liblog libcutils libutils libnbaio +LOCAL_STATIC_LIBRARIES := libmedia_helper +LOCAL_MODULE_TAGS := optional +include $(BUILD_SHARED_LIBRARY) + diff --git a/modules/audio_remote_submix/audio_hw.cpp b/modules/audio_remote_submix/audio_hw.cpp new file mode 100755 index 0000000..3756274 --- /dev/null +++ b/modules/audio_remote_submix/audio_hw.cpp @@ -0,0 +1,839 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "r_submix" +//#define LOG_NDEBUG 0 + +#include <errno.h> +#include <pthread.h> +#include <stdint.h> +#include <sys/time.h> +#include <stdlib.h> + +#include <cutils/log.h> +#include <cutils/str_parms.h> +#include <cutils/properties.h> + +#include <hardware/hardware.h> +#include <system/audio.h> +#include <hardware/audio.h> + +#include <media/nbaio/MonoPipe.h> +#include <media/nbaio/MonoPipeReader.h> +#include <media/AudioBufferProvider.h> + +#include <utils/String8.h> +#include <media/AudioParameter.h> + +extern "C" { + +namespace android { + +#define MAX_PIPE_DEPTH_IN_FRAMES (1024*8) +// The duration of MAX_READ_ATTEMPTS * READ_ATTEMPT_SLEEP_MS must be stricly inferior to +// the duration of a record buffer at the current record sample rate (of the device, not of +// the recording itself). Here we have: +// 3 * 5ms = 15ms < 1024 frames * 1000 / 48000 = 21.333ms +#define MAX_READ_ATTEMPTS 3 +#define READ_ATTEMPT_SLEEP_MS 5 // 5ms between two read attempts when pipe is empty +#define DEFAULT_RATE_HZ 48000 // default sample rate + +struct submix_config { + audio_format_t format; + audio_channel_mask_t channel_mask; + unsigned int rate; // sample rate for the device + unsigned int period_size; // size of the audio pipe is period_size * period_count in frames + unsigned int period_count; +}; + +struct submix_audio_device { + struct audio_hw_device device; + bool output_standby; + bool input_standby; + submix_config config; + // Pipe variables: they handle the ring buffer that "pipes" audio: + // - from the submix virtual audio output == what needs to be played + // remotely, seen as an output for AudioFlinger + // - to the virtual audio source == what is captured by the component + // which "records" the submix / virtual audio source, and handles it as needed. + // A usecase example is one where the component capturing the audio is then sending it over + // Wifi for presentation on a remote Wifi Display device (e.g. a dongle attached to a TV, or a + // TV with Wifi Display capabilities), or to a wireless audio player. + sp<MonoPipe> rsxSink; + sp<MonoPipeReader> rsxSource; + + // device lock, also used to protect access to the audio pipe + pthread_mutex_t lock; +}; + +struct submix_stream_out { + struct audio_stream_out stream; + struct submix_audio_device *dev; +}; + +struct submix_stream_in { + struct audio_stream_in stream; + struct submix_audio_device *dev; + bool output_standby; // output standby state as seen from record thread + + // wall clock when recording starts + struct timespec record_start_time; + // how many frames have been requested to be read + int64_t read_counter_frames; +}; + + +/* audio HAL functions */ + +static uint32_t out_get_sample_rate(const struct audio_stream *stream) +{ + const struct submix_stream_out *out = + reinterpret_cast<const struct submix_stream_out *>(stream); + uint32_t out_rate = out->dev->config.rate; + //ALOGV("out_get_sample_rate() returns %u", out_rate); + return out_rate; +} + +static int out_set_sample_rate(struct audio_stream *stream, uint32_t rate) +{ + if ((rate != 44100) && (rate != 48000)) { + ALOGE("out_set_sample_rate(rate=%u) rate unsupported", rate); + return -ENOSYS; + } + struct submix_stream_out *out = reinterpret_cast<struct submix_stream_out *>(stream); + //ALOGV("out_set_sample_rate(rate=%u)", rate); + out->dev->config.rate = rate; + return 0; +} + +static size_t out_get_buffer_size(const struct audio_stream *stream) +{ + const struct submix_stream_out *out = + reinterpret_cast<const struct submix_stream_out *>(stream); + const struct submix_config& config_out = out->dev->config; + size_t buffer_size = config_out.period_size * popcount(config_out.channel_mask) + * sizeof(int16_t); // only PCM 16bit + //ALOGV("out_get_buffer_size() returns %u, period size=%u", + // buffer_size, config_out.period_size); + return buffer_size; +} + +static audio_channel_mask_t out_get_channels(const struct audio_stream *stream) +{ + const struct submix_stream_out *out = + reinterpret_cast<const struct submix_stream_out *>(stream); + uint32_t channels = out->dev->config.channel_mask; + //ALOGV("out_get_channels() returns %08x", channels); + return channels; +} + +static audio_format_t out_get_format(const struct audio_stream *stream) +{ + return AUDIO_FORMAT_PCM_16_BIT; +} + +static int out_set_format(struct audio_stream *stream, audio_format_t format) +{ + if (format != AUDIO_FORMAT_PCM_16_BIT) { + return -ENOSYS; + } else { + return 0; + } +} + +static int out_standby(struct audio_stream *stream) +{ + ALOGI("out_standby()"); + + const struct submix_stream_out *out = reinterpret_cast<const struct submix_stream_out *>(stream); + + pthread_mutex_lock(&out->dev->lock); + + out->dev->output_standby = true; + + pthread_mutex_unlock(&out->dev->lock); + + return 0; +} + +static int out_dump(const struct audio_stream *stream, int fd) +{ + return 0; +} + +static int out_set_parameters(struct audio_stream *stream, const char *kvpairs) +{ + int exiting = -1; + AudioParameter parms = AudioParameter(String8(kvpairs)); + // FIXME this is using hard-coded strings but in the future, this functionality will be + // converted to use audio HAL extensions required to support tunneling + if ((parms.getInt(String8("exiting"), exiting) == NO_ERROR) && (exiting > 0)) { + const struct submix_stream_out *out = + reinterpret_cast<const struct submix_stream_out *>(stream); + + pthread_mutex_lock(&out->dev->lock); + + MonoPipe* sink = out->dev->rsxSink.get(); + if (sink != NULL) { + sink->incStrong(out); + } else { + pthread_mutex_unlock(&out->dev->lock); + return 0; + } + + ALOGI("shutdown"); + sink->shutdown(true); + + sink->decStrong(out); + + pthread_mutex_unlock(&out->dev->lock); + } + + return 0; +} + +static char * out_get_parameters(const struct audio_stream *stream, const char *keys) +{ + return strdup(""); +} + +static uint32_t out_get_latency(const struct audio_stream_out *stream) +{ + const struct submix_stream_out *out = + reinterpret_cast<const struct submix_stream_out *>(stream); + const struct submix_config * config_out = &(out->dev->config); + uint32_t latency = (MAX_PIPE_DEPTH_IN_FRAMES * 1000) / config_out->rate; + ALOGV("out_get_latency() returns %u", latency); + return latency; +} + +static int out_set_volume(struct audio_stream_out *stream, float left, + float right) +{ + return -ENOSYS; +} + +static ssize_t out_write(struct audio_stream_out *stream, const void* buffer, + size_t bytes) +{ + //ALOGV("out_write(bytes=%d)", bytes); + ssize_t written_frames = 0; + struct submix_stream_out *out = reinterpret_cast<struct submix_stream_out *>(stream); + + const size_t frame_size = audio_stream_frame_size(&stream->common); + const size_t frames = bytes / frame_size; + + pthread_mutex_lock(&out->dev->lock); + + out->dev->output_standby = false; + + MonoPipe* sink = out->dev->rsxSink.get(); + if (sink != NULL) { + if (sink->isShutdown()) { + pthread_mutex_unlock(&out->dev->lock); + // the pipe has already been shutdown, this buffer will be lost but we must + // simulate timing so we don't drain the output faster than realtime + usleep(frames * 1000000 / out_get_sample_rate(&stream->common)); + return bytes; + } + sink->incStrong(buffer); + } else { + pthread_mutex_unlock(&out->dev->lock); + ALOGE("out_write without a pipe!"); + ALOG_ASSERT("out_write without a pipe!"); + return 0; + } + + pthread_mutex_unlock(&out->dev->lock); + + written_frames = sink->write(buffer, frames); + if (written_frames < 0) { + if (written_frames == (ssize_t)NEGOTIATE) { + ALOGE("out_write() write to pipe returned NEGOTIATE"); + + pthread_mutex_lock(&out->dev->lock); + sink->decStrong(buffer); + pthread_mutex_unlock(&out->dev->lock); + + written_frames = 0; + return 0; + } else { + // write() returned UNDERRUN or WOULD_BLOCK, retry + ALOGE("out_write() write to pipe returned unexpected %16lx", written_frames); + written_frames = sink->write(buffer, frames); + } + } + + pthread_mutex_lock(&out->dev->lock); + + sink->decStrong(buffer); + + pthread_mutex_unlock(&out->dev->lock); + + if (written_frames < 0) { + ALOGE("out_write() failed writing to pipe with %16lx", written_frames); + return 0; + } else { + ALOGV("out_write() wrote %lu bytes)", written_frames * frame_size); + return written_frames * frame_size; + } +} + +static int out_get_render_position(const struct audio_stream_out *stream, + uint32_t *dsp_frames) +{ + return -EINVAL; +} + +static int out_add_audio_effect(const struct audio_stream *stream, effect_handle_t effect) +{ + return 0; +} + +static int out_remove_audio_effect(const struct audio_stream *stream, effect_handle_t effect) +{ + return 0; +} + +static int out_get_next_write_timestamp(const struct audio_stream_out *stream, + int64_t *timestamp) +{ + return -EINVAL; +} + +/** audio_stream_in implementation **/ +static uint32_t in_get_sample_rate(const struct audio_stream *stream) +{ + const struct submix_stream_in *in = reinterpret_cast<const struct submix_stream_in *>(stream); + //ALOGV("in_get_sample_rate() returns %u", in->dev->config.rate); + return in->dev->config.rate; +} + +static int in_set_sample_rate(struct audio_stream *stream, uint32_t rate) +{ + return -ENOSYS; +} + +static size_t in_get_buffer_size(const struct audio_stream *stream) +{ + const struct submix_stream_in *in = reinterpret_cast<const struct submix_stream_in *>(stream); + ALOGV("in_get_buffer_size() returns %u", + in->dev->config.period_size * audio_stream_frame_size(stream)); + return in->dev->config.period_size * audio_stream_frame_size(stream); +} + +static audio_channel_mask_t in_get_channels(const struct audio_stream *stream) +{ + return AUDIO_CHANNEL_IN_STEREO; +} + +static audio_format_t in_get_format(const struct audio_stream *stream) +{ + return AUDIO_FORMAT_PCM_16_BIT; +} + +static int in_set_format(struct audio_stream *stream, audio_format_t format) +{ + if (format != AUDIO_FORMAT_PCM_16_BIT) { + return -ENOSYS; + } else { + return 0; + } +} + +static int in_standby(struct audio_stream *stream) +{ + ALOGI("in_standby()"); + const struct submix_stream_in *in = reinterpret_cast<const struct submix_stream_in *>(stream); + + pthread_mutex_lock(&in->dev->lock); + + in->dev->input_standby = true; + + pthread_mutex_unlock(&in->dev->lock); + + return 0; +} + +static int in_dump(const struct audio_stream *stream, int fd) +{ + return 0; +} + +static int in_set_parameters(struct audio_stream *stream, const char *kvpairs) +{ + return 0; +} + +static char * in_get_parameters(const struct audio_stream *stream, + const char *keys) +{ + return strdup(""); +} + +static int in_set_gain(struct audio_stream_in *stream, float gain) +{ + return 0; +} + +static ssize_t in_read(struct audio_stream_in *stream, void* buffer, + size_t bytes) +{ + //ALOGV("in_read bytes=%u", bytes); + ssize_t frames_read = -1977; + struct submix_stream_in *in = reinterpret_cast<struct submix_stream_in *>(stream); + const size_t frame_size = audio_stream_frame_size(&stream->common); + const size_t frames_to_read = bytes / frame_size; + + pthread_mutex_lock(&in->dev->lock); + + const bool output_standby_transition = (in->output_standby != in->dev->output_standby); + in->output_standby = in->dev->output_standby; + + if (in->dev->input_standby || output_standby_transition) { + in->dev->input_standby = false; + // keep track of when we exit input standby (== first read == start "real recording") + // or when we start recording silence, and reset projected time + int rc = clock_gettime(CLOCK_MONOTONIC, &in->record_start_time); + if (rc == 0) { + in->read_counter_frames = 0; + } + } + + in->read_counter_frames += frames_to_read; + + MonoPipeReader* source = in->dev->rsxSource.get(); + if (source != NULL) { + source->incStrong(buffer); + } else { + ALOGE("no audio pipe yet we're trying to read!"); + pthread_mutex_unlock(&in->dev->lock); + usleep((bytes / frame_size) * 1000000 / in_get_sample_rate(&stream->common)); + memset(buffer, 0, bytes); + return bytes; + } + + pthread_mutex_unlock(&in->dev->lock); + + // read the data from the pipe (it's non blocking) + size_t remaining_frames = frames_to_read; + int attempts = 0; + char* buff = (char*)buffer; + while ((remaining_frames > 0) && (attempts < MAX_READ_ATTEMPTS)) { + attempts++; + frames_read = source->read(buff, remaining_frames, AudioBufferProvider::kInvalidPTS); + if (frames_read > 0) { + remaining_frames -= frames_read; + buff += frames_read * frame_size; + //ALOGV(" in_read (att=%d) got %ld frames, remaining=%u", + // attempts, frames_read, remaining_frames); + } else { + //ALOGE(" in_read read returned %ld", frames_read); + usleep(READ_ATTEMPT_SLEEP_MS * 1000); + } + } + + // done using the source + pthread_mutex_lock(&in->dev->lock); + + source->decStrong(buffer); + + pthread_mutex_unlock(&in->dev->lock); + + if (remaining_frames > 0) { + ALOGV(" remaining_frames = %d", remaining_frames); + memset(((char*)buffer)+ bytes - (remaining_frames * frame_size), 0, + remaining_frames * frame_size); + } + + // compute how much we need to sleep after reading the data by comparing the wall clock with + // the projected time at which we should return. + struct timespec time_after_read;// wall clock after reading from the pipe + struct timespec record_duration;// observed record duration + int rc = clock_gettime(CLOCK_MONOTONIC, &time_after_read); + const uint32_t sample_rate = in_get_sample_rate(&stream->common); + if (rc == 0) { + // for how long have we been recording? + record_duration.tv_sec = time_after_read.tv_sec - in->record_start_time.tv_sec; + record_duration.tv_nsec = time_after_read.tv_nsec - in->record_start_time.tv_nsec; + if (record_duration.tv_nsec < 0) { + record_duration.tv_sec--; + record_duration.tv_nsec += 1000000000; + } + + // read_counter_frames contains the number of frames that have been read since the beginning + // of recording (including this call): it's converted to usec and compared to how long we've + // been recording for, which gives us how long we must wait to sync the projected recording + // time, and the observed recording time + long projected_vs_observed_offset_us = + ((int64_t)(in->read_counter_frames + - (record_duration.tv_sec*sample_rate))) + * 1000000 / sample_rate + - (record_duration.tv_nsec / 1000); + + ALOGV(" record duration %5lds %3ldms, will wait: %7ldus", + record_duration.tv_sec, record_duration.tv_nsec/1000000, + projected_vs_observed_offset_us); + if (projected_vs_observed_offset_us > 0) { + usleep(projected_vs_observed_offset_us); + } + } + + + ALOGV("in_read returns %d", bytes); + return bytes; + +} + +static uint32_t in_get_input_frames_lost(struct audio_stream_in *stream) +{ + return 0; +} + +static int in_add_audio_effect(const struct audio_stream *stream, effect_handle_t effect) +{ + return 0; +} + +static int in_remove_audio_effect(const struct audio_stream *stream, effect_handle_t effect) +{ + return 0; +} + +static int adev_open_output_stream(struct audio_hw_device *dev, + audio_io_handle_t handle, + audio_devices_t devices, + audio_output_flags_t flags, + struct audio_config *config, + struct audio_stream_out **stream_out) +{ + ALOGV("adev_open_output_stream()"); + struct submix_audio_device *rsxadev = (struct submix_audio_device *)dev; + struct submix_stream_out *out; + int ret; + + out = (struct submix_stream_out *)calloc(1, sizeof(struct submix_stream_out)); + if (!out) { + ret = -ENOMEM; + goto err_open; + } + + pthread_mutex_lock(&rsxadev->lock); + + out->stream.common.get_sample_rate = out_get_sample_rate; + out->stream.common.set_sample_rate = out_set_sample_rate; + out->stream.common.get_buffer_size = out_get_buffer_size; + out->stream.common.get_channels = out_get_channels; + out->stream.common.get_format = out_get_format; + out->stream.common.set_format = out_set_format; + out->stream.common.standby = out_standby; + out->stream.common.dump = out_dump; + out->stream.common.set_parameters = out_set_parameters; + out->stream.common.get_parameters = out_get_parameters; + out->stream.common.add_audio_effect = out_add_audio_effect; + out->stream.common.remove_audio_effect = out_remove_audio_effect; + out->stream.get_latency = out_get_latency; + out->stream.set_volume = out_set_volume; + out->stream.write = out_write; + out->stream.get_render_position = out_get_render_position; + out->stream.get_next_write_timestamp = out_get_next_write_timestamp; + + config->channel_mask = AUDIO_CHANNEL_OUT_STEREO; + rsxadev->config.channel_mask = config->channel_mask; + + if ((config->sample_rate != 48000) || (config->sample_rate != 44100)) { + config->sample_rate = DEFAULT_RATE_HZ; + } + rsxadev->config.rate = config->sample_rate; + + config->format = AUDIO_FORMAT_PCM_16_BIT; + rsxadev->config.format = config->format; + + rsxadev->config.period_size = 1024; + rsxadev->config.period_count = 4; + out->dev = rsxadev; + + *stream_out = &out->stream; + + // initialize pipe + { + ALOGV(" initializing pipe"); + const NBAIO_Format format = + config->sample_rate == 48000 ? Format_SR48_C2_I16 : Format_SR44_1_C2_I16; + const NBAIO_Format offers[1] = {format}; + size_t numCounterOffers = 0; + // creating a MonoPipe with optional blocking set to true. + MonoPipe* sink = new MonoPipe(MAX_PIPE_DEPTH_IN_FRAMES, format, true/*writeCanBlock*/); + ssize_t index = sink->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + MonoPipeReader* source = new MonoPipeReader(sink); + numCounterOffers = 0; + index = source->negotiate(offers, 1, NULL, numCounterOffers); + ALOG_ASSERT(index == 0); + rsxadev->rsxSink = sink; + rsxadev->rsxSource = source; + } + + pthread_mutex_unlock(&rsxadev->lock); + + return 0; + +err_open: + *stream_out = NULL; + return ret; +} + +static void adev_close_output_stream(struct audio_hw_device *dev, + struct audio_stream_out *stream) +{ + ALOGV("adev_close_output_stream()"); + struct submix_audio_device *rsxadev = (struct submix_audio_device *)dev; + + pthread_mutex_lock(&rsxadev->lock); + + rsxadev->rsxSink.clear(); + rsxadev->rsxSource.clear(); + free(stream); + + pthread_mutex_unlock(&rsxadev->lock); +} + +static int adev_set_parameters(struct audio_hw_device *dev, const char *kvpairs) +{ + return -ENOSYS; +} + +static char * adev_get_parameters(const struct audio_hw_device *dev, + const char *keys) +{ + return strdup("");; +} + +static int adev_init_check(const struct audio_hw_device *dev) +{ + ALOGI("adev_init_check()"); + return 0; +} + +static int adev_set_voice_volume(struct audio_hw_device *dev, float volume) +{ + return -ENOSYS; +} + +static int adev_set_master_volume(struct audio_hw_device *dev, float volume) +{ + return -ENOSYS; +} + +static int adev_get_master_volume(struct audio_hw_device *dev, float *volume) +{ + return -ENOSYS; +} + +static int adev_set_master_mute(struct audio_hw_device *dev, bool muted) +{ + return -ENOSYS; +} + +static int adev_get_master_mute(struct audio_hw_device *dev, bool *muted) +{ + return -ENOSYS; +} + +static int adev_set_mode(struct audio_hw_device *dev, audio_mode_t mode) +{ + return 0; +} + +static int adev_set_mic_mute(struct audio_hw_device *dev, bool state) +{ + return -ENOSYS; +} + +static int adev_get_mic_mute(const struct audio_hw_device *dev, bool *state) +{ + return -ENOSYS; +} + +static size_t adev_get_input_buffer_size(const struct audio_hw_device *dev, + const struct audio_config *config) +{ + //### TODO correlate this with pipe parameters + return 4096; +} + +static int adev_open_input_stream(struct audio_hw_device *dev, + audio_io_handle_t handle, + audio_devices_t devices, + struct audio_config *config, + struct audio_stream_in **stream_in) +{ + ALOGI("adev_open_input_stream()"); + + struct submix_audio_device *rsxadev = (struct submix_audio_device *)dev; + struct submix_stream_in *in; + int ret; + + in = (struct submix_stream_in *)calloc(1, sizeof(struct submix_stream_in)); + if (!in) { + ret = -ENOMEM; + goto err_open; + } + + pthread_mutex_lock(&rsxadev->lock); + + in->stream.common.get_sample_rate = in_get_sample_rate; + in->stream.common.set_sample_rate = in_set_sample_rate; + in->stream.common.get_buffer_size = in_get_buffer_size; + in->stream.common.get_channels = in_get_channels; + in->stream.common.get_format = in_get_format; + in->stream.common.set_format = in_set_format; + in->stream.common.standby = in_standby; + in->stream.common.dump = in_dump; + in->stream.common.set_parameters = in_set_parameters; + in->stream.common.get_parameters = in_get_parameters; + in->stream.common.add_audio_effect = in_add_audio_effect; + in->stream.common.remove_audio_effect = in_remove_audio_effect; + in->stream.set_gain = in_set_gain; + in->stream.read = in_read; + in->stream.get_input_frames_lost = in_get_input_frames_lost; + + config->channel_mask = AUDIO_CHANNEL_IN_STEREO; + rsxadev->config.channel_mask = config->channel_mask; + + if ((config->sample_rate != 48000) || (config->sample_rate != 44100)) { + config->sample_rate = DEFAULT_RATE_HZ; + } + rsxadev->config.rate = config->sample_rate; + + config->format = AUDIO_FORMAT_PCM_16_BIT; + rsxadev->config.format = config->format; + + rsxadev->config.period_size = 1024; + rsxadev->config.period_count = 4; + + *stream_in = &in->stream; + + in->dev = rsxadev; + + in->read_counter_frames = 0; + in->output_standby = rsxadev->output_standby; + + pthread_mutex_unlock(&rsxadev->lock); + + return 0; + +err_open: + *stream_in = NULL; + return ret; +} + +static void adev_close_input_stream(struct audio_hw_device *dev, + struct audio_stream_in *stream) +{ + ALOGV("adev_close_input_stream()"); + struct submix_audio_device *rsxadev = (struct submix_audio_device *)dev; + + pthread_mutex_lock(&rsxadev->lock); + + MonoPipe* sink = rsxadev->rsxSink.get(); + if (sink != NULL) { + ALOGI("shutdown"); + sink->shutdown(true); + } + + free(stream); + + pthread_mutex_unlock(&rsxadev->lock); +} + +static int adev_dump(const audio_hw_device_t *device, int fd) +{ + return 0; +} + +static int adev_close(hw_device_t *device) +{ + ALOGI("adev_close()"); + free(device); + return 0; +} + +static int adev_open(const hw_module_t* module, const char* name, + hw_device_t** device) +{ + ALOGI("adev_open(name=%s)", name); + struct submix_audio_device *rsxadev; + + if (strcmp(name, AUDIO_HARDWARE_INTERFACE) != 0) + return -EINVAL; + + rsxadev = (submix_audio_device*) calloc(1, sizeof(struct submix_audio_device)); + if (!rsxadev) + return -ENOMEM; + + rsxadev->device.common.tag = HARDWARE_DEVICE_TAG; + rsxadev->device.common.version = AUDIO_DEVICE_API_VERSION_2_0; + rsxadev->device.common.module = (struct hw_module_t *) module; + rsxadev->device.common.close = adev_close; + + rsxadev->device.init_check = adev_init_check; + rsxadev->device.set_voice_volume = adev_set_voice_volume; + rsxadev->device.set_master_volume = adev_set_master_volume; + rsxadev->device.get_master_volume = adev_get_master_volume; + rsxadev->device.set_master_mute = adev_set_master_mute; + rsxadev->device.get_master_mute = adev_get_master_mute; + rsxadev->device.set_mode = adev_set_mode; + rsxadev->device.set_mic_mute = adev_set_mic_mute; + rsxadev->device.get_mic_mute = adev_get_mic_mute; + rsxadev->device.set_parameters = adev_set_parameters; + rsxadev->device.get_parameters = adev_get_parameters; + rsxadev->device.get_input_buffer_size = adev_get_input_buffer_size; + rsxadev->device.open_output_stream = adev_open_output_stream; + rsxadev->device.close_output_stream = adev_close_output_stream; + rsxadev->device.open_input_stream = adev_open_input_stream; + rsxadev->device.close_input_stream = adev_close_input_stream; + rsxadev->device.dump = adev_dump; + + rsxadev->input_standby = true; + rsxadev->output_standby = true; + + *device = &rsxadev->device.common; + + return 0; +} + +static struct hw_module_methods_t hal_module_methods = { + /* open */ adev_open, +}; + +struct audio_module HAL_MODULE_INFO_SYM = { + /* common */ { + /* tag */ HARDWARE_MODULE_TAG, + /* module_api_version */ AUDIO_MODULE_API_VERSION_0_1, + /* hal_api_version */ HARDWARE_HAL_API_VERSION, + /* id */ AUDIO_HARDWARE_MODULE_ID, + /* name */ "Wifi Display audio HAL", + /* author */ "The Android Open Source Project", + /* methods */ &hal_module_methods, + /* dso */ NULL, + /* reserved */ { 0 }, + }, +}; + +} //namespace android + +} //extern "C" diff --git a/modules/gralloc/framebuffer.cpp b/modules/gralloc/framebuffer.cpp index a37e2c0..326f2ae 100644 --- a/modules/gralloc/framebuffer.cpp +++ b/modules/gralloc/framebuffer.cpp @@ -312,11 +312,6 @@ int fb_device_open(hw_module_t const* module, const char* name, { int status = -EINVAL; if (!strcmp(name, GRALLOC_HARDWARE_FB0)) { - alloc_device_t* gralloc_device; - status = gralloc_open(module, &gralloc_device); - if (status < 0) - return status; - /* initialize our state here */ fb_context_t *dev = (fb_context_t*)malloc(sizeof(*dev)); memset(dev, 0, sizeof(*dev)); diff --git a/modules/gralloc/gralloc.cpp b/modules/gralloc/gralloc.cpp index a6b4edd..99aeb01 100644 --- a/modules/gralloc/gralloc.cpp +++ b/modules/gralloc/gralloc.cpp @@ -219,6 +219,7 @@ static int gralloc_alloc(alloc_device_t* dev, case HAL_PIXEL_FORMAT_RGB_565: case HAL_PIXEL_FORMAT_RGBA_5551: case HAL_PIXEL_FORMAT_RGBA_4444: + case HAL_PIXEL_FORMAT_RAW_SENSOR: bpp = 2; break; default: diff --git a/modules/hwcomposer/hwcomposer.cpp b/modules/hwcomposer/hwcomposer.cpp index 0e04cac..f0a5512 100644 --- a/modules/hwcomposer/hwcomposer.cpp +++ b/modules/hwcomposer/hwcomposer.cpp @@ -29,7 +29,7 @@ /*****************************************************************************/ struct hwc_context_t { - hwc_composer_device_t device; + hwc_composer_device_1_t device; /* our private state goes below here */ }; @@ -54,7 +54,7 @@ hwc_module_t HAL_MODULE_INFO_SYM = { /*****************************************************************************/ -static void dump_layer(hwc_layer_t const* l) { +static void dump_layer(hwc_layer_1_t const* l) { ALOGD("\ttype=%d, flags=%08x, handle=%p, tr=%02x, blend=%04x, {%d,%d,%d,%d}, {%d,%d,%d,%d}", l->compositionType, l->flags, l->handle, l->transform, l->blending, l->sourceCrop.left, @@ -67,26 +67,26 @@ static void dump_layer(hwc_layer_t const* l) { l->displayFrame.bottom); } -static int hwc_prepare(hwc_composer_device_t *dev, hwc_layer_list_t* list) { - if (list && (list->flags & HWC_GEOMETRY_CHANGED)) { - for (size_t i=0 ; i<list->numHwLayers ; i++) { +static int hwc_prepare(hwc_composer_device_1_t *dev, + size_t numDisplays, hwc_display_contents_1_t** displays) { + if (displays && (displays[0]->flags & HWC_GEOMETRY_CHANGED)) { + for (size_t i=0 ; i<displays[0]->numHwLayers ; i++) { //dump_layer(&list->hwLayers[i]); - list->hwLayers[i].compositionType = HWC_FRAMEBUFFER; + displays[0]->hwLayers[i].compositionType = HWC_FRAMEBUFFER; } } return 0; } -static int hwc_set(hwc_composer_device_t *dev, - hwc_display_t dpy, - hwc_surface_t sur, - hwc_layer_list_t* list) +static int hwc_set(hwc_composer_device_1_t *dev, + size_t numDisplays, hwc_display_contents_1_t** displays) { //for (size_t i=0 ; i<list->numHwLayers ; i++) { // dump_layer(&list->hwLayers[i]); //} - EGLBoolean sucess = eglSwapBuffers((EGLDisplay)dpy, (EGLSurface)sur); + EGLBoolean sucess = eglSwapBuffers((EGLDisplay)displays[0]->dpy, + (EGLSurface)displays[0]->sur); if (!sucess) { return HWC_EGL_ERROR; } @@ -117,7 +117,7 @@ static int hwc_device_open(const struct hw_module_t* module, const char* name, /* initialize the procs */ dev->device.common.tag = HARDWARE_DEVICE_TAG; - dev->device.common.version = 0; + dev->device.common.version = HWC_DEVICE_API_VERSION_1_0; dev->device.common.module = const_cast<hw_module_t*>(module); dev->device.common.close = hwc_device_close; diff --git a/modules/nfc-nci/Android.mk b/modules/nfc-nci/Android.mk new file mode 100644 index 0000000..97262ef --- /dev/null +++ b/modules/nfc-nci/Android.mk @@ -0,0 +1,25 @@ +# Copyright (C) 2011 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_MODULE := nfc_nci.default +LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw +LOCAL_SRC_FILES := nfc_nci_example.c +LOCAL_SHARED_LIBRARIES := liblog libcutils +LOCAL_MODULE_TAGS := optional + +include $(BUILD_SHARED_LIBRARY) diff --git a/modules/nfc-nci/nfc_nci_example.c b/modules/nfc-nci/nfc_nci_example.c new file mode 100644 index 0000000..2514225 --- /dev/null +++ b/modules/nfc-nci/nfc_nci_example.c @@ -0,0 +1,119 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include <errno.h> +#include <string.h> + +#include <cutils/log.h> +#include <hardware/hardware.h> +#include <hardware/nfc.h> + + +/* + * NCI HAL method implementations. These must be overriden + */ +static int hal_open(const struct nfc_nci_device *dev, + nfc_stack_callback_t *p_cback, nfc_stack_data_callback_t *p_data_cback) { + ALOGE("NFC-NCI HAL: %s", __FUNCTION__); + return 0; +} + +static int hal_write(const struct nfc_nci_device *dev, + uint16_t data_len, const uint8_t *p_data) { + ALOGE("NFC-NCI HAL: %s", __FUNCTION__); + return 0; +} + +static int hal_core_initialized(const struct nfc_nci_device *dev, + uint8_t* p_core_init_rsp_params) { + ALOGE("NFC-NCI HAL: %s", __FUNCTION__); + return 0; +} + +static int hal_pre_discover(const struct nfc_nci_device *dev) { + ALOGE("NFC-NCI HAL: %s", __FUNCTION__); + return 0; +} + +static int hal_close(const struct nfc_nci_device *dev) { + ALOGE("NFC-NCI HAL: %s", __FUNCTION__); + return 0; +} + +static int hal_control_granted (const struct nfc_nci_device *p_dev) +{ + ALOGE("NFC-NCI HAL: %s", __FUNCTION__); + return 0; +} + + +static int hal_power_cycle (const struct nfc_nci_device *p_dev) +{ + ALOGE("NFC-NCI HAL: %s", __FUNCTION__); + return 0; +} + +/* + * Generic device handling below - can generally be left unchanged. + */ +/* Close an opened nfc device instance */ +static int nfc_close(hw_device_t *dev) { + free(dev); + return 0; +} + +static int nfc_open(const hw_module_t* module, const char* name, + hw_device_t** device) { + if (strcmp(name, NFC_NCI_CONTROLLER) == 0) { + nfc_nci_device_t *dev = calloc(1, sizeof(nfc_nci_device_t)); + + dev->common.tag = HARDWARE_DEVICE_TAG; + dev->common.version = 0x00010000; // [31:16] major, [15:0] minor + dev->common.module = (struct hw_module_t*) module; + dev->common.close = nfc_close; + + // NCI HAL method pointers + dev->open = hal_open; + dev->write = hal_write; + dev->core_initialized = hal_core_initialized; + dev->pre_discover = hal_pre_discover; + dev->close = hal_close; + dev->control_granted = hal_control_granted; + dev->power_cycle = hal_power_cycle; + + *device = (hw_device_t*) dev; + + return 0; + } else { + return -EINVAL; + } +} + + +static struct hw_module_methods_t nfc_module_methods = { + .open = nfc_open, +}; + +struct nfc_nci_module_t HAL_MODULE_INFO_SYM = { + .common = { + .tag = HARDWARE_MODULE_TAG, + .module_api_version = 0x0100, // [15:8] major, [7:0] minor (1.0) + .hal_api_version = 0x00, // 0 is only valid value + .id = NFC_NCI_HARDWARE_MODULE_ID, + .name = "Default NFC NCI HW HAL", + .author = "The Android Open Source Project", + .methods = &nfc_module_methods, + }, +}; diff --git a/modules/nfc/Android.mk b/modules/nfc/Android.mk index d541b21..429fb43 100644 --- a/modules/nfc/Android.mk +++ b/modules/nfc/Android.mk @@ -18,7 +18,7 @@ include $(CLEAR_VARS) LOCAL_MODULE := nfc.default LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw -LOCAL_SRC_FILES := nfc_hw_example.c +LOCAL_SRC_FILES := nfc_pn544_example.c LOCAL_SHARED_LIBRARIES := liblog libcutils LOCAL_MODULE_TAGS := optional diff --git a/modules/nfc/nfc_hw_example.c b/modules/nfc/nfc_pn544_example.c index 54c9c56..54c9c56 100644 --- a/modules/nfc/nfc_hw_example.c +++ b/modules/nfc/nfc_pn544_example.c diff --git a/modules/usbaudio/audio_hw.c b/modules/usbaudio/audio_hw.c index 9283016..f33c343 100644 --- a/modules/usbaudio/audio_hw.c +++ b/modules/usbaudio/audio_hw.c @@ -379,11 +379,6 @@ static int adev_close(hw_device_t *device) return 0; } -static uint32_t adev_get_supported_devices(const struct audio_hw_device *dev) -{ - return AUDIO_DEVICE_OUT_ALL_USB; -} - static int adev_open(const hw_module_t* module, const char* name, hw_device_t** device) { @@ -398,11 +393,10 @@ static int adev_open(const hw_module_t* module, const char* name, return -ENOMEM; adev->hw_device.common.tag = HARDWARE_DEVICE_TAG; - adev->hw_device.common.version = AUDIO_DEVICE_API_VERSION_1_0; + adev->hw_device.common.version = AUDIO_DEVICE_API_VERSION_2_0; adev->hw_device.common.module = (struct hw_module_t *) module; adev->hw_device.common.close = adev_close; - adev->hw_device.get_supported_devices = adev_get_supported_devices; adev->hw_device.init_check = adev_init_check; adev->hw_device.set_voice_volume = adev_set_voice_volume; adev->hw_device.set_master_volume = adev_set_master_volume; diff --git a/tests/camera2/Android.mk b/tests/camera2/Android.mk index 340ec30..c378e12 100644 --- a/tests/camera2/Android.mk +++ b/tests/camera2/Android.mk @@ -2,13 +2,17 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - camera2.cpp + camera2.cpp \ + camera2_utils.cpp LOCAL_SHARED_LIBRARIES := \ libutils \ libstlport \ libhardware \ - libcamera_metadata + libcamera_metadata \ + libgui \ + libsync \ + libui LOCAL_STATIC_LIBRARIES := \ libgtest \ @@ -21,7 +25,7 @@ LOCAL_C_INCLUDES += \ external/stlport/stlport \ system/media/camera/include \ -LOCAL_MODULE:= camera2_hal_tests +LOCAL_MODULE:= camera2_test LOCAL_MODULE_TAGS := tests include $(BUILD_EXECUTABLE) diff --git a/tests/camera2/camera2.cpp b/tests/camera2/camera2.cpp index d13d7cd..f43513e 100644 --- a/tests/camera2/camera2.cpp +++ b/tests/camera2/camera2.cpp @@ -14,10 +14,22 @@ * limitations under the License. */ -#include <system/camera_metadata.h> -#include <hardware/camera2.h> +#define LOG_TAG "Camera2_test" +#define LOG_NDEBUG 0 + +#include <utils/Log.h> #include <gtest/gtest.h> #include <iostream> +#include <fstream> + +#include <utils/Vector.h> +#include <gui/CpuConsumer.h> +#include <ui/PixelFormat.h> +#include <system/camera_metadata.h> + +#include "camera2_utils.h" + +namespace android { class Camera2Test: public testing::Test { public: @@ -33,12 +45,16 @@ class Camera2Test: public testing::Test { ASSERT_TRUE(NULL != module) << "No camera module was set by hw_get_module"; - std::cout << " Camera module name: " << module->name << std::endl; - std::cout << " Camera module author: " << module->author << std::endl; - std::cout << " Camera module API version: 0x" << std::hex - << module->module_api_version << std::endl; - std::cout << " Camera module HAL API version: 0x" << std::hex - << module->hal_api_version << std::endl; + IF_ALOGV() { + std::cout << " Camera module name: " + << module->name << std::endl; + std::cout << " Camera module author: " + << module->author << std::endl; + std::cout << " Camera module API version: 0x" << std::hex + << module->module_api_version << std::endl; + std::cout << " Camera module HAL API version: 0x" << std::hex + << module->hal_api_version << std::endl; + } int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0; ASSERT_EQ(version2_0, module->module_api_version) @@ -52,7 +68,10 @@ class Camera2Test: public testing::Test { sNumCameras = sCameraModule->get_number_of_cameras(); ASSERT_LT(0, sNumCameras) << "No camera devices available!"; - std::cout << " Camera device count: " << sNumCameras << std::endl; + IF_ALOGV() { + std::cout << " Camera device count: " << sNumCameras << std::endl; + } + sCameraSupportsHal2 = new bool[sNumCameras]; for (int i = 0; i < sNumCameras; i++) { @@ -60,19 +79,24 @@ class Camera2Test: public testing::Test { res = sCameraModule->get_camera_info(i, &info); ASSERT_EQ(0, res) << "Failure getting camera info for camera " << i; - std::cout << " Camera device: " << std::dec - << i << std::endl;; - std::cout << " Facing: " << std::dec - << info.facing << std::endl; - std::cout << " Orientation: " << std::dec - << info.orientation << std::endl; - std::cout << " Version: 0x" << std::hex << - info.device_version << std::endl; + IF_ALOGV() { + std::cout << " Camera device: " << std::dec + << i << std::endl;; + std::cout << " Facing: " << std::dec + << info.facing << std::endl; + std::cout << " Orientation: " << std::dec + << info.orientation << std::endl; + std::cout << " Version: 0x" << std::hex << + info.device_version << std::endl; + } if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0) { sCameraSupportsHal2[i] = true; ASSERT_TRUE(NULL != info.static_camera_characteristics); - std::cout << " Static camera metadata:" << std::endl; - dump_camera_metadata(info.static_camera_characteristics, 0, 1); + IF_ALOGV() { + std::cout << " Static camera metadata:" << std::endl; + dump_indented_camera_metadata(info.static_camera_characteristics, + 0, 1, 6); + } } else { sCameraSupportsHal2[i] = false; } @@ -83,13 +107,26 @@ class Camera2Test: public testing::Test { return sCameraModule; } - static const camera2_device_t *openCameraDevice(int id) { + static int getNumCameras() { + return sNumCameras; + } + + static bool isHal2Supported(int id) { + return sCameraSupportsHal2[id]; + } + + static camera2_device_t *openCameraDevice(int id) { + ALOGV("Opening camera %d", id); if (NULL == sCameraSupportsHal2) return NULL; if (id >= sNumCameras) return NULL; if (!sCameraSupportsHal2[id]) return NULL; hw_device_t *device = NULL; const camera_module_t *cam_module = getCameraModule(); + if (cam_module == NULL) { + return NULL; + } + char camId[10]; int res; @@ -98,7 +135,7 @@ class Camera2Test: public testing::Test { (const hw_module_t*)cam_module, camId, &device); - if (res < 0 || cam_module == NULL) { + if (res != NO_ERROR || device == NULL) { return NULL; } camera2_device_t *cam_device = @@ -106,18 +143,582 @@ class Camera2Test: public testing::Test { return cam_device; } - private: + static status_t configureCameraDevice(camera2_device_t *dev, + MetadataQueue &requestQueue, + MetadataQueue &frameQueue, + NotifierListener &listener) { + + status_t err; + + err = dev->ops->set_request_queue_src_ops(dev, + requestQueue.getToConsumerInterface()); + if (err != OK) return err; + + requestQueue.setFromConsumerInterface(dev); + + err = dev->ops->set_frame_queue_dst_ops(dev, + frameQueue.getToProducerInterface()); + if (err != OK) return err; + + err = listener.getNotificationsFrom(dev); + if (err != OK) return err; + + vendor_tag_query_ops_t *vendor_metadata_tag_ops; + err = dev->ops->get_metadata_vendor_tag_ops(dev, &vendor_metadata_tag_ops); + if (err != OK) return err; + + err = set_camera_metadata_vendor_tag_ops(vendor_metadata_tag_ops); + if (err != OK) return err; + + return OK; + } + + static status_t closeCameraDevice(camera2_device_t *cam_dev) { + int res; + ALOGV("Closing camera %p", cam_dev); + + hw_device_t *dev = reinterpret_cast<hw_device_t *>(cam_dev); + res = dev->close(dev); + return res; + } + + void setUpCamera(int id) { + ASSERT_GT(sNumCameras, id); + status_t res; + + if (mDevice != NULL) { + closeCameraDevice(mDevice); + } + mDevice = openCameraDevice(id); + ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device"; + + camera_info info; + res = sCameraModule->get_camera_info(id, &info); + ASSERT_EQ(OK, res); + + mStaticInfo = info.static_camera_characteristics; + + res = configureCameraDevice(mDevice, + mRequests, + mFrames, + mNotifications); + ASSERT_EQ(OK, res) << "Failure to configure camera device"; + + } + + void setUpStream(sp<ISurfaceTexture> consumer, + int width, int height, int format, int *id) { + status_t res; + + StreamAdapter* stream = new StreamAdapter(consumer); + + ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height); + res = stream->connectToDevice(mDevice, width, height, format); + ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: " + << strerror(-res); + mStreams.push_back(stream); + + *id = stream->getId(); + } + + void disconnectStream(int id) { + status_t res; + unsigned int i=0; + for (; i < mStreams.size(); i++) { + if (mStreams[i]->getId() == id) { + res = mStreams[i]->disconnect(); + ASSERT_EQ(NO_ERROR, res) << + "Failed to disconnect stream " << id; + break; + } + } + ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id; + } + + void getResolutionList(int32_t format, + const int32_t **list, + size_t *count) { + ALOGV("Getting resolutions for format %x", format); + status_t res; + if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { + camera_metadata_ro_entry_t availableFormats; + res = find_camera_metadata_ro_entry(mStaticInfo, + ANDROID_SCALER_AVAILABLE_FORMATS, + &availableFormats); + ASSERT_EQ(OK, res); + + uint32_t formatIdx; + for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) { + if (availableFormats.data.i32[formatIdx] == format) break; + } + ASSERT_NE(availableFormats.count, formatIdx) + << "No support found for format 0x" << std::hex << format; + } + + camera_metadata_ro_entry_t availableSizes; + if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) { + res = find_camera_metadata_ro_entry(mStaticInfo, + ANDROID_SCALER_AVAILABLE_RAW_SIZES, + &availableSizes); + } else if (format == HAL_PIXEL_FORMAT_BLOB) { + res = find_camera_metadata_ro_entry(mStaticInfo, + ANDROID_SCALER_AVAILABLE_JPEG_SIZES, + &availableSizes); + } else { + res = find_camera_metadata_ro_entry(mStaticInfo, + ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, + &availableSizes); + } + ASSERT_EQ(OK, res); + + *list = availableSizes.data.i32; + *count = availableSizes.count; + } + + virtual void SetUp() { + const ::testing::TestInfo* const testInfo = + ::testing::UnitTest::GetInstance()->current_test_info(); + ALOGV("*** Starting test %s in test case %s", testInfo->name(), testInfo->test_case_name()); + mDevice = NULL; + } + + virtual void TearDown() { + for (unsigned int i = 0; i < mStreams.size(); i++) { + delete mStreams[i]; + } + if (mDevice != NULL) { + closeCameraDevice(mDevice); + } + } + + camera2_device *mDevice; + const camera_metadata_t *mStaticInfo; + + MetadataQueue mRequests; + MetadataQueue mFrames; + NotifierListener mNotifications; + + Vector<StreamAdapter*> mStreams; + + private: static camera_module_t *sCameraModule; - static int sNumCameras; - static bool *sCameraSupportsHal2; + static int sNumCameras; + static bool *sCameraSupportsHal2; }; camera_module_t *Camera2Test::sCameraModule = NULL; -int Camera2Test::sNumCameras = 0; -bool *Camera2Test::sCameraSupportsHal2 = NULL; +bool *Camera2Test::sCameraSupportsHal2 = NULL; +int Camera2Test::sNumCameras = 0; + +static const nsecs_t USEC = 1000; +static const nsecs_t MSEC = 1000*USEC; +static const nsecs_t SEC = 1000*MSEC; -TEST_F(Camera2Test, Basic) { - ASSERT_TRUE(NULL != getCameraModule()); +TEST_F(Camera2Test, OpenClose) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + camera2_device_t *d = openCameraDevice(id); + ASSERT_TRUE(NULL != d) << "Failed to open camera device"; + + res = closeCameraDevice(d); + ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device"; + } } + +TEST_F(Camera2Test, Capture1Raw) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + ASSERT_NO_FATAL_FAILURE(setUpCamera(id)); + + sp<CpuConsumer> rawConsumer = new CpuConsumer(1); + sp<FrameWaiter> rawWaiter = new FrameWaiter(); + rawConsumer->setFrameAvailableListener(rawWaiter); + + const int32_t *rawResolutions; + size_t rawResolutionsCount; + + int format = HAL_PIXEL_FORMAT_RAW_SENSOR; + + getResolutionList(format, + &rawResolutions, &rawResolutionsCount); + ASSERT_LT((size_t)0, rawResolutionsCount); + + // Pick first available raw resolution + int width = rawResolutions[0]; + int height = rawResolutions[1]; + + int streamId; + ASSERT_NO_FATAL_FAILURE( + setUpStream(rawConsumer->getProducerInterface(), + width, height, format, &streamId) ); + + camera_metadata_t *request; + request = allocate_camera_metadata(20, 2000); + + uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; + add_camera_metadata_entry(request, + ANDROID_REQUEST_METADATA_MODE, + (void**)&metadataMode, 1); + uint32_t outputStreams = streamId; + add_camera_metadata_entry(request, + ANDROID_REQUEST_OUTPUT_STREAMS, + (void**)&outputStreams, 1); + + uint64_t exposureTime = 10*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_EXPOSURE_TIME, + (void**)&exposureTime, 1); + uint64_t frameDuration = 30*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_FRAME_DURATION, + (void**)&frameDuration, 1); + uint32_t sensitivity = 100; + add_camera_metadata_entry(request, + ANDROID_SENSOR_SENSITIVITY, + (void**)&sensitivity, 1); + + uint32_t hourOfDay = 12; + add_camera_metadata_entry(request, + 0x80000000, // EMULATOR_HOUROFDAY + &hourOfDay, 1); + + IF_ALOGV() { + std::cout << "Input request: " << std::endl; + dump_indented_camera_metadata(request, 0, 1, 2); + } + + res = mRequests.enqueue(request); + ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res); + + res = mFrames.waitForBuffer(exposureTime + SEC); + ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res); + + camera_metadata_t *frame; + res = mFrames.dequeue(&frame); + ASSERT_EQ(NO_ERROR, res); + ASSERT_TRUE(frame != NULL); + + IF_ALOGV() { + std::cout << "Output frame:" << std::endl; + dump_indented_camera_metadata(frame, 0, 1, 2); + } + + res = rawWaiter->waitForFrame(exposureTime + SEC); + ASSERT_EQ(NO_ERROR, res); + + CpuConsumer::LockedBuffer buffer; + res = rawConsumer->lockNextBuffer(&buffer); + ASSERT_EQ(NO_ERROR, res); + + IF_ALOGV() { + const char *dumpname = + "/data/local/tmp/camera2_test-capture1raw-dump.raw"; + ALOGV("Dumping raw buffer to %s", dumpname); + // Write to file + std::ofstream rawFile(dumpname); + size_t bpp = 2; + for (unsigned int y = 0; y < buffer.height; y++) { + rawFile.write( + (const char *)(buffer.data + y * buffer.stride * bpp), + buffer.width * bpp); + } + rawFile.close(); + } + + res = rawConsumer->unlockBuffer(buffer); + ASSERT_EQ(NO_ERROR, res); + + ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId)); + + res = closeCameraDevice(mDevice); + ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device"; + + } +} + +TEST_F(Camera2Test, CaptureBurstRaw) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + ASSERT_NO_FATAL_FAILURE(setUpCamera(id)); + + sp<CpuConsumer> rawConsumer = new CpuConsumer(1); + sp<FrameWaiter> rawWaiter = new FrameWaiter(); + rawConsumer->setFrameAvailableListener(rawWaiter); + + const int32_t *rawResolutions; + size_t rawResolutionsCount; + + int format = HAL_PIXEL_FORMAT_RAW_SENSOR; + + getResolutionList(format, + &rawResolutions, &rawResolutionsCount); + ASSERT_LT((uint32_t)0, rawResolutionsCount); + + // Pick first available raw resolution + int width = rawResolutions[0]; + int height = rawResolutions[1]; + + int streamId; + ASSERT_NO_FATAL_FAILURE( + setUpStream(rawConsumer->getProducerInterface(), + width, height, format, &streamId) ); + + camera_metadata_t *request; + request = allocate_camera_metadata(20, 2000); + + uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; + add_camera_metadata_entry(request, + ANDROID_REQUEST_METADATA_MODE, + (void**)&metadataMode, 1); + uint32_t outputStreams = streamId; + add_camera_metadata_entry(request, + ANDROID_REQUEST_OUTPUT_STREAMS, + (void**)&outputStreams, 1); + + uint64_t frameDuration = 30*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_FRAME_DURATION, + (void**)&frameDuration, 1); + uint32_t sensitivity = 100; + add_camera_metadata_entry(request, + ANDROID_SENSOR_SENSITIVITY, + (void**)&sensitivity, 1); + + uint32_t hourOfDay = 12; + add_camera_metadata_entry(request, + 0x80000000, // EMULATOR_HOUROFDAY + &hourOfDay, 1); + + IF_ALOGV() { + std::cout << "Input request template: " << std::endl; + dump_indented_camera_metadata(request, 0, 1, 2); + } + + int numCaptures = 10; + + // Enqueue numCaptures requests with increasing exposure time + + uint64_t exposureTime = 100 * USEC; + for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) { + camera_metadata_t *req; + req = allocate_camera_metadata(20, 2000); + append_camera_metadata(req, request); + + add_camera_metadata_entry(req, + ANDROID_SENSOR_EXPOSURE_TIME, + (void**)&exposureTime, 1); + exposureTime *= 2; + + res = mRequests.enqueue(req); + ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " + << strerror(-res); + } + + // Get frames and image buffers one by one + uint64_t expectedExposureTime = 100 * USEC; + for (int frameCount = 0; frameCount < 10; frameCount++) { + res = mFrames.waitForBuffer(SEC + expectedExposureTime); + ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res); + + camera_metadata_t *frame; + res = mFrames.dequeue(&frame); + ASSERT_EQ(NO_ERROR, res); + ASSERT_TRUE(frame != NULL); + + camera_metadata_entry_t frameNumber; + res = find_camera_metadata_entry(frame, + ANDROID_REQUEST_FRAME_COUNT, + &frameNumber); + ASSERT_EQ(NO_ERROR, res); + ASSERT_EQ(frameCount, *frameNumber.data.i32); + + res = rawWaiter->waitForFrame(SEC + expectedExposureTime); + ASSERT_EQ(NO_ERROR, res) << + "Never got raw data for capture " << frameCount; + + CpuConsumer::LockedBuffer buffer; + res = rawConsumer->lockNextBuffer(&buffer); + ASSERT_EQ(NO_ERROR, res); + + IF_ALOGV() { + char dumpname[60]; + snprintf(dumpname, 60, + "/data/local/tmp/camera2_test-" + "captureBurstRaw-dump_%d.raw", + frameCount); + ALOGV("Dumping raw buffer to %s", dumpname); + // Write to file + std::ofstream rawFile(dumpname); + for (unsigned int y = 0; y < buffer.height; y++) { + rawFile.write( + (const char *)(buffer.data + y * buffer.stride * 2), + buffer.width * 2); + } + rawFile.close(); + } + + res = rawConsumer->unlockBuffer(buffer); + ASSERT_EQ(NO_ERROR, res); + + expectedExposureTime *= 2; + } + } +} + +TEST_F(Camera2Test, ConstructDefaultRequests) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + ASSERT_NO_FATAL_FAILURE(setUpCamera(id)); + + for (int i = CAMERA2_TEMPLATE_PREVIEW; i < CAMERA2_TEMPLATE_COUNT; + i++) { + camera_metadata_t *request = NULL; + res = mDevice->ops->construct_default_request(mDevice, + i, + &request); + EXPECT_EQ(NO_ERROR, res) << + "Unable to construct request from template type %d", i; + EXPECT_TRUE(request != NULL); + EXPECT_LT((size_t)0, get_camera_metadata_entry_count(request)); + EXPECT_LT((size_t)0, get_camera_metadata_data_count(request)); + + IF_ALOGV() { + std::cout << " ** Template type " << i << ":"<<std::endl; + dump_indented_camera_metadata(request, 0, 2, 4); + } + + free_camera_metadata(request); + } + } +} + +TEST_F(Camera2Test, Capture1Jpeg) { + status_t res; + + for (int id = 0; id < getNumCameras(); id++) { + if (!isHal2Supported(id)) continue; + + ASSERT_NO_FATAL_FAILURE(setUpCamera(id)); + + sp<CpuConsumer> jpegConsumer = new CpuConsumer(1); + sp<FrameWaiter> jpegWaiter = new FrameWaiter(); + jpegConsumer->setFrameAvailableListener(jpegWaiter); + + const int32_t *jpegResolutions; + size_t jpegResolutionsCount; + + int format = HAL_PIXEL_FORMAT_BLOB; + + getResolutionList(format, + &jpegResolutions, &jpegResolutionsCount); + ASSERT_LT((size_t)0, jpegResolutionsCount); + + // Pick first available JPEG resolution + int width = jpegResolutions[0]; + int height = jpegResolutions[1]; + + int streamId; + ASSERT_NO_FATAL_FAILURE( + setUpStream(jpegConsumer->getProducerInterface(), + width, height, format, &streamId) ); + + camera_metadata_t *request; + request = allocate_camera_metadata(20, 2000); + + uint8_t metadataMode = ANDROID_REQUEST_METADATA_FULL; + add_camera_metadata_entry(request, + ANDROID_REQUEST_METADATA_MODE, + (void**)&metadataMode, 1); + uint32_t outputStreams = streamId; + add_camera_metadata_entry(request, + ANDROID_REQUEST_OUTPUT_STREAMS, + (void**)&outputStreams, 1); + + uint64_t exposureTime = 10*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_EXPOSURE_TIME, + (void**)&exposureTime, 1); + uint64_t frameDuration = 30*MSEC; + add_camera_metadata_entry(request, + ANDROID_SENSOR_FRAME_DURATION, + (void**)&frameDuration, 1); + uint32_t sensitivity = 100; + add_camera_metadata_entry(request, + ANDROID_SENSOR_SENSITIVITY, + (void**)&sensitivity, 1); + + uint32_t hourOfDay = 12; + add_camera_metadata_entry(request, + 0x80000000, // EMULATOR_HOUROFDAY + &hourOfDay, 1); + + IF_ALOGV() { + std::cout << "Input request: " << std::endl; + dump_indented_camera_metadata(request, 0, 1, 4); + } + + res = mRequests.enqueue(request); + ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res); + + res = mFrames.waitForBuffer(exposureTime + SEC); + ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res); + + camera_metadata_t *frame; + res = mFrames.dequeue(&frame); + ASSERT_EQ(NO_ERROR, res); + ASSERT_TRUE(frame != NULL); + + IF_ALOGV() { + std::cout << "Output frame:" << std::endl; + dump_indented_camera_metadata(frame, 0, 1, 4); + } + + res = jpegWaiter->waitForFrame(exposureTime + SEC); + ASSERT_EQ(NO_ERROR, res); + + CpuConsumer::LockedBuffer buffer; + res = jpegConsumer->lockNextBuffer(&buffer); + ASSERT_EQ(NO_ERROR, res); + + IF_ALOGV() { + const char *dumpname = + "/data/local/tmp/camera2_test-capture1jpeg-dump.jpeg"; + ALOGV("Dumping raw buffer to %s", dumpname); + // Write to file + std::ofstream jpegFile(dumpname); + size_t bpp = 1; + for (unsigned int y = 0; y < buffer.height; y++) { + jpegFile.write( + (const char *)(buffer.data + y * buffer.stride * bpp), + buffer.width * bpp); + } + jpegFile.close(); + } + + res = jpegConsumer->unlockBuffer(buffer); + ASSERT_EQ(NO_ERROR, res); + + ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId)); + + res = closeCameraDevice(mDevice); + ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device"; + + } +} + + +} // namespace android diff --git a/tests/camera2/camera2_utils.cpp b/tests/camera2/camera2_utils.cpp new file mode 100644 index 0000000..cefe29a --- /dev/null +++ b/tests/camera2/camera2_utils.cpp @@ -0,0 +1,581 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Utility classes for camera2 HAL testing + +#define LOG_TAG "Camera2_test_utils" +#define LOG_NDEBUG 0 + +#include "utils/Log.h" +#include "camera2_utils.h" + +namespace android { + +/** + * MetadataQueue + */ + +MetadataQueue::MetadataQueue(): + mDevice(NULL), + mFrameCount(0), + mCount(0), + mStreamSlotCount(0), + mSignalConsumer(true) +{ + camera2_request_queue_src_ops::dequeue_request = consumer_dequeue; + camera2_request_queue_src_ops::request_count = consumer_buffer_count; + camera2_request_queue_src_ops::free_request = consumer_free; + + camera2_frame_queue_dst_ops::dequeue_frame = producer_dequeue; + camera2_frame_queue_dst_ops::cancel_frame = producer_cancel; + camera2_frame_queue_dst_ops::enqueue_frame = producer_enqueue; +} + +MetadataQueue::~MetadataQueue() { + freeBuffers(mEntries.begin(), mEntries.end()); + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); +} + +// Interface to camera2 HAL as consumer (input requests/reprocessing) +const camera2_request_queue_src_ops_t* MetadataQueue::getToConsumerInterface() { + return static_cast<camera2_request_queue_src_ops_t*>(this); +} + +void MetadataQueue::setFromConsumerInterface(camera2_device_t *d) { + mDevice = d; +} + +const camera2_frame_queue_dst_ops_t* MetadataQueue::getToProducerInterface() { + return static_cast<camera2_frame_queue_dst_ops_t*>(this); +} + +// Real interfaces +status_t MetadataQueue::enqueue(camera_metadata_t *buf) { + Mutex::Autolock l(mMutex); + + mCount++; + mEntries.push_back(buf); + notEmpty.signal(); + + if (mSignalConsumer && mDevice != NULL) { + mSignalConsumer = false; + + mMutex.unlock(); + ALOGV("%s: Signaling consumer", __FUNCTION__); + mDevice->ops->notify_request_queue_not_empty(mDevice); + mMutex.lock(); + } + return OK; +} + +int MetadataQueue::getBufferCount() { + Mutex::Autolock l(mMutex); + if (mStreamSlotCount > 0) { + return CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS; + } + return mCount; +} + +status_t MetadataQueue::dequeue(camera_metadata_t **buf, bool incrementCount) { + Mutex::Autolock l(mMutex); + + if (mCount == 0) { + if (mStreamSlotCount == 0) { + ALOGV("%s: Empty", __FUNCTION__); + *buf = NULL; + mSignalConsumer = true; + return OK; + } + ALOGV("%s: Streaming %d frames to queue", __FUNCTION__, + mStreamSlotCount); + + for (List<camera_metadata_t*>::iterator slotEntry = mStreamSlot.begin(); + slotEntry != mStreamSlot.end(); + slotEntry++ ) { + size_t entries = get_camera_metadata_entry_count(*slotEntry); + size_t dataBytes = get_camera_metadata_data_count(*slotEntry); + + camera_metadata_t *copy = allocate_camera_metadata(entries, dataBytes); + append_camera_metadata(copy, *slotEntry); + mEntries.push_back(copy); + } + mCount = mStreamSlotCount; + } + ALOGV("MetadataQueue: deque (%d buffers)", mCount); + camera_metadata_t *b = *(mEntries.begin()); + mEntries.erase(mEntries.begin()); + + if (incrementCount) { + add_camera_metadata_entry(b, + ANDROID_REQUEST_FRAME_COUNT, + (void**)&mFrameCount, 1); + mFrameCount++; + } + + *buf = b; + mCount--; + + return OK; +} + +status_t MetadataQueue::waitForBuffer(nsecs_t timeout) { + Mutex::Autolock l(mMutex); + status_t res; + while (mCount == 0) { + res = notEmpty.waitRelative(mMutex,timeout); + if (res != OK) return res; + } + return OK; +} + +status_t MetadataQueue::setStreamSlot(camera_metadata_t *buf) { + if (buf == NULL) { + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + mStreamSlotCount = 0; + return OK; + } + if (mStreamSlotCount > 1) { + List<camera_metadata_t*>::iterator deleter = ++mStreamSlot.begin(); + freeBuffers(++mStreamSlot.begin(), mStreamSlot.end()); + mStreamSlotCount = 1; + } + if (mStreamSlotCount == 1) { + free_camera_metadata( *(mStreamSlot.begin()) ); + *(mStreamSlot.begin()) = buf; + } else { + mStreamSlot.push_front(buf); + mStreamSlotCount = 1; + } + return OK; +} + +status_t MetadataQueue::setStreamSlot(const List<camera_metadata_t*> &bufs) { + if (mStreamSlotCount > 0) { + freeBuffers(mStreamSlot.begin(), mStreamSlot.end()); + } + mStreamSlot = bufs; + mStreamSlotCount = mStreamSlot.size(); + + return OK; +} + +status_t MetadataQueue::freeBuffers(List<camera_metadata_t*>::iterator start, + List<camera_metadata_t*>::iterator end) { + while (start != end) { + free_camera_metadata(*start); + start = mStreamSlot.erase(start); + } + return OK; +} + +MetadataQueue* MetadataQueue::getInstance( + const camera2_request_queue_src_ops_t *q) { + const MetadataQueue* cmq = static_cast<const MetadataQueue*>(q); + return const_cast<MetadataQueue*>(cmq); +} + +MetadataQueue* MetadataQueue::getInstance( + const camera2_frame_queue_dst_ops_t *q) { + const MetadataQueue* cmq = static_cast<const MetadataQueue*>(q); + return const_cast<MetadataQueue*>(cmq); +} + +int MetadataQueue::consumer_buffer_count( + const camera2_request_queue_src_ops_t *q) { + MetadataQueue *queue = getInstance(q); + return queue->getBufferCount(); +} + +int MetadataQueue::consumer_dequeue(const camera2_request_queue_src_ops_t *q, + camera_metadata_t **buffer) { + MetadataQueue *queue = getInstance(q); + return queue->dequeue(buffer, true); +} + +int MetadataQueue::consumer_free(const camera2_request_queue_src_ops_t *q, + camera_metadata_t *old_buffer) { + MetadataQueue *queue = getInstance(q); + free_camera_metadata(old_buffer); + return OK; +} + +int MetadataQueue::producer_dequeue(const camera2_frame_queue_dst_ops_t *q, + size_t entries, size_t bytes, + camera_metadata_t **buffer) { + camera_metadata_t *new_buffer = + allocate_camera_metadata(entries, bytes); + if (new_buffer == NULL) return NO_MEMORY; + *buffer = new_buffer; + return OK; +} + +int MetadataQueue::producer_cancel(const camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *old_buffer) { + free_camera_metadata(old_buffer); + return OK; +} + +int MetadataQueue::producer_enqueue(const camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *filled_buffer) { + MetadataQueue *queue = getInstance(q); + return queue->enqueue(filled_buffer); +} + +/** + * NotifierListener + */ + +NotifierListener::NotifierListener() { +} + +status_t NotifierListener::getNotificationsFrom(camera2_device *dev) { + if (!dev) return BAD_VALUE; + status_t err; + err = dev->ops->set_notify_callback(dev, + notify_callback_dispatch, + (void*)this); + return err; +} + +status_t NotifierListener::getNextNotification(int32_t *msg_type, + int32_t *ext1, + int32_t *ext2, + int32_t *ext3) { + Mutex::Autolock l(mMutex); + if (mNotifications.size() == 0) return BAD_VALUE; + return getNextNotificationLocked(msg_type, ext1, ext2, ext3); +} + +status_t NotifierListener::waitForNotification(int32_t *msg_type, + int32_t *ext1, + int32_t *ext2, + int32_t *ext3) { + Mutex::Autolock l(mMutex); + while (mNotifications.size() == 0) { + mNewNotification.wait(mMutex); + } + return getNextNotificationLocked(msg_type, ext1, ext2, ext3); +} + +int NotifierListener::numNotifications() { + Mutex::Autolock l(mMutex); + return mNotifications.size(); +} + +status_t NotifierListener::getNextNotificationLocked(int32_t *msg_type, + int32_t *ext1, + int32_t *ext2, + int32_t *ext3) { + *msg_type = mNotifications.begin()->msg_type; + *ext1 = mNotifications.begin()->ext1; + *ext2 = mNotifications.begin()->ext2; + *ext3 = mNotifications.begin()->ext3; + mNotifications.erase(mNotifications.begin()); + return OK; +} + +void NotifierListener::onNotify(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3) { + Mutex::Autolock l(mMutex); + mNotifications.push_back(Notification(msg_type, ext1, ext2, ext3)); + mNewNotification.signal(); +} + +void NotifierListener::notify_callback_dispatch(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3, + void *user) { + NotifierListener *me = reinterpret_cast<NotifierListener*>(user); + me->onNotify(msg_type, ext1, ext2, ext3); +} + +/** + * StreamAdapter + */ + +#ifndef container_of +#define container_of(ptr, type, member) \ + (type *)((char*)(ptr) - offsetof(type, member)) +#endif + +StreamAdapter::StreamAdapter(sp<ISurfaceTexture> consumer): + mState(UNINITIALIZED), mDevice(NULL), + mId(-1), + mWidth(0), mHeight(0), mFormat(0) +{ + mConsumerInterface = new SurfaceTextureClient(consumer); + camera2_stream_ops::dequeue_buffer = dequeue_buffer; + camera2_stream_ops::enqueue_buffer = enqueue_buffer; + camera2_stream_ops::cancel_buffer = cancel_buffer; + camera2_stream_ops::set_crop = set_crop; +} + +StreamAdapter::~StreamAdapter() { + disconnect(); +} + +status_t StreamAdapter::connectToDevice(camera2_device_t *d, + uint32_t width, uint32_t height, int format) { + if (mState != UNINITIALIZED) return INVALID_OPERATION; + if (d == NULL) { + ALOGE("%s: Null device passed to stream adapter", __FUNCTION__); + return BAD_VALUE; + } + + status_t res; + + mWidth = width; + mHeight = height; + mFormat = format; + + // Allocate device-side stream interface + + uint32_t id; + uint32_t formatActual; // ignored + uint32_t usage; + uint32_t maxBuffers = 2; + res = d->ops->allocate_stream(d, + mWidth, mHeight, mFormat, getStreamOps(), + &id, &formatActual, &usage, &maxBuffers); + if (res != OK) { + ALOGE("%s: Device stream allocation failed: %s (%d)", + __FUNCTION__, strerror(-res), res); + mState = UNINITIALIZED; + return res; + } + mDevice = d; + + mId = id; + mUsage = usage; + mMaxProducerBuffers = maxBuffers; + + // Configure consumer-side ANativeWindow interface + + res = native_window_api_connect(mConsumerInterface.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to connect to native window for stream %d", + __FUNCTION__, mId); + mState = ALLOCATED; + return res; + } + + res = native_window_set_usage(mConsumerInterface.get(), mUsage); + if (res != OK) { + ALOGE("%s: Unable to configure usage %08x for stream %d", + __FUNCTION__, mUsage, mId); + mState = CONNECTED; + return res; + } + + res = native_window_set_buffers_geometry(mConsumerInterface.get(), + mWidth, mHeight, mFormat); + if (res != OK) { + ALOGE("%s: Unable to configure buffer geometry" + " %d x %d, format 0x%x for stream %d", + __FUNCTION__, mWidth, mHeight, mFormat, mId); + mState = CONNECTED; + return res; + } + + int maxConsumerBuffers; + res = mConsumerInterface->query(mConsumerInterface.get(), + NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); + if (res != OK) { + ALOGE("%s: Unable to query consumer undequeued" + " buffer count for stream %d", __FUNCTION__, mId); + mState = CONNECTED; + return res; + } + mMaxConsumerBuffers = maxConsumerBuffers; + + ALOGV("%s: Producer wants %d buffers, consumer wants %d", __FUNCTION__, + mMaxProducerBuffers, mMaxConsumerBuffers); + + int totalBuffers = mMaxConsumerBuffers + mMaxProducerBuffers; + + res = native_window_set_buffer_count(mConsumerInterface.get(), + totalBuffers); + if (res != OK) { + ALOGE("%s: Unable to set buffer count for stream %d", + __FUNCTION__, mId); + mState = CONNECTED; + return res; + } + + // Register allocated buffers with HAL device + buffer_handle_t *buffers = new buffer_handle_t[totalBuffers]; + ANativeWindowBuffer **anwBuffers = new ANativeWindowBuffer*[totalBuffers]; + int bufferIdx = 0; + for (; bufferIdx < totalBuffers; bufferIdx++) { + res = native_window_dequeue_buffer_and_wait(mConsumerInterface.get(), + &anwBuffers[bufferIdx]); + if (res != OK) { + ALOGE("%s: Unable to dequeue buffer %d for initial registration for" + "stream %d", __FUNCTION__, bufferIdx, mId); + mState = CONNECTED; + goto cleanUpBuffers; + } + buffers[bufferIdx] = anwBuffers[bufferIdx]->handle; + } + + res = mDevice->ops->register_stream_buffers(mDevice, + mId, + totalBuffers, + buffers); + if (res != OK) { + ALOGE("%s: Unable to register buffers with HAL device for stream %d", + __FUNCTION__, mId); + mState = CONNECTED; + } else { + mState = ACTIVE; + } + +cleanUpBuffers: + for (int i = 0; i < bufferIdx; i++) { + res = mConsumerInterface->cancelBuffer(mConsumerInterface.get(), + anwBuffers[i], -1); + } + delete anwBuffers; + delete buffers; + + return res; +} + +status_t StreamAdapter::disconnect() { + status_t res; + if (mState >= ALLOCATED) { + res = mDevice->ops->release_stream(mDevice, mId); + if (res != OK) { + ALOGE("%s: Unable to release stream %d", + __FUNCTION__, mId); + return res; + } + } + if (mState >= CONNECTED) { + res = native_window_api_disconnect(mConsumerInterface.get(), + NATIVE_WINDOW_API_CAMERA); + if (res != OK) { + ALOGE("%s: Unable to disconnect stream %d from native window", + __FUNCTION__, mId); + return res; + } + } + mId = -1; + mState = DISCONNECTED; + return OK; +} + +int StreamAdapter::getId() { + return mId; +} + +const camera2_stream_ops *StreamAdapter::getStreamOps() { + return static_cast<camera2_stream_ops *>(this); +} + +ANativeWindow* StreamAdapter::toANW(const camera2_stream_ops_t *w) { + return static_cast<const StreamAdapter*>(w)->mConsumerInterface.get(); +} + +int StreamAdapter::dequeue_buffer(const camera2_stream_ops_t *w, + buffer_handle_t** buffer) { + int res; + int state = static_cast<const StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + + ANativeWindow *a = toANW(w); + ANativeWindowBuffer* anb; + res = native_window_dequeue_buffer_and_wait(a, &anb); + if (res != OK) return res; + + *buffer = &(anb->handle); + + return res; +} + +int StreamAdapter::enqueue_buffer(const camera2_stream_ops_t* w, + int64_t timestamp, + buffer_handle_t* buffer) { + int state = static_cast<const StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + status_t err; + err = native_window_set_buffers_timestamp(a, timestamp); + if (err != OK) return err; + return a->queueBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle), -1); +} + +int StreamAdapter::cancel_buffer(const camera2_stream_ops_t* w, + buffer_handle_t* buffer) { + int state = static_cast<const StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + return a->cancelBuffer(a, + container_of(buffer, ANativeWindowBuffer, handle), -1); +} + +int StreamAdapter::set_crop(const camera2_stream_ops_t* w, + int left, int top, int right, int bottom) { + int state = static_cast<const StreamAdapter*>(w)->mState; + if (state != ACTIVE) { + ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state); + return INVALID_OPERATION; + } + ANativeWindow *a = toANW(w); + android_native_rect_t crop = { left, top, right, bottom }; + return native_window_set_crop(a, &crop); +} + +/** + * FrameWaiter + */ + +FrameWaiter::FrameWaiter(): + mPendingFrames(0) { +} + +status_t FrameWaiter::waitForFrame(nsecs_t timeout) { + status_t res; + Mutex::Autolock lock(mMutex); + while (mPendingFrames == 0) { + res = mCondition.waitRelative(mMutex, timeout); + if (res != OK) return res; + } + mPendingFrames--; + return OK; +} + +void FrameWaiter::onFrameAvailable() { + Mutex::Autolock lock(mMutex); + mPendingFrames++; + mCondition.signal(); +} + +} // namespace android diff --git a/tests/camera2/camera2_utils.h b/tests/camera2/camera2_utils.h new file mode 100644 index 0000000..7822f5b --- /dev/null +++ b/tests/camera2/camera2_utils.h @@ -0,0 +1,236 @@ +/* + * Copyright (C) 2012 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Utility classes for camera2 HAL testing + +#include <system/camera_metadata.h> +#include <hardware/camera2.h> + +#include <gui/SurfaceTextureClient.h> +#include <gui/CpuConsumer.h> + +#include <utils/List.h> +#include <utils/Mutex.h> +#include <utils/Condition.h> + +namespace android { + +/** + * Queue class for both sending requests to a camera2 device, and for receiving + * frames from a camera2 device. + */ +class MetadataQueue: public camera2_request_queue_src_ops_t, + public camera2_frame_queue_dst_ops_t { + public: + MetadataQueue(); + ~MetadataQueue(); + + // Interface to camera2 HAL device, either for requests (device is consumer) + // or for frames (device is producer) + const camera2_request_queue_src_ops_t* getToConsumerInterface(); + void setFromConsumerInterface(camera2_device_t *d); + + const camera2_frame_queue_dst_ops_t* getToProducerInterface(); + + // Real interfaces. On enqueue, queue takes ownership of buffer pointer + // On dequeue, user takes ownership of buffer pointer. + status_t enqueue(camera_metadata_t *buf); + status_t dequeue(camera_metadata_t **buf, bool incrementCount = true); + int getBufferCount(); + status_t waitForBuffer(nsecs_t timeout); + + // Set repeating buffer(s); if the queue is empty on a dequeue call, the + // queue copies the contents of the stream slot into the queue, and then + // dequeues the first new entry. + status_t setStreamSlot(camera_metadata_t *buf); + status_t setStreamSlot(const List<camera_metadata_t*> &bufs); + + private: + status_t freeBuffers(List<camera_metadata_t*>::iterator start, + List<camera_metadata_t*>::iterator end); + + camera2_device_t *mDevice; + + Mutex mMutex; + Condition notEmpty; + + int mFrameCount; + + int mCount; + List<camera_metadata_t*> mEntries; + int mStreamSlotCount; + List<camera_metadata_t*> mStreamSlot; + + bool mSignalConsumer; + + static MetadataQueue* getInstance(const camera2_frame_queue_dst_ops_t *q); + static MetadataQueue* getInstance(const camera2_request_queue_src_ops_t *q); + + static int consumer_buffer_count(const camera2_request_queue_src_ops_t *q); + + static int consumer_dequeue(const camera2_request_queue_src_ops_t *q, + camera_metadata_t **buffer); + + static int consumer_free(const camera2_request_queue_src_ops_t *q, + camera_metadata_t *old_buffer); + + static int producer_dequeue(const camera2_frame_queue_dst_ops_t *q, + size_t entries, size_t bytes, + camera_metadata_t **buffer); + + static int producer_cancel(const camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *old_buffer); + + static int producer_enqueue(const camera2_frame_queue_dst_ops_t *q, + camera_metadata_t *filled_buffer); + +}; + +/** + * Basic class to receive and queue up notifications from the camera device + */ + +class NotifierListener { + public: + + NotifierListener(); + + status_t getNotificationsFrom(camera2_device *dev); + + status_t getNextNotification(int32_t *msg_type, int32_t *ext1, + int32_t *ext2, int32_t *ext3); + + status_t waitForNotification(int32_t *msg_type, int32_t *ext1, + int32_t *ext2, int32_t *ext3); + + int numNotifications(); + + private: + + status_t getNextNotificationLocked(int32_t *msg_type, + int32_t *ext1, int32_t *ext2, int32_t *ext3); + + struct Notification { + Notification(int32_t type, int32_t e1, int32_t e2, int32_t e3): + msg_type(type), + ext1(e1), + ext2(e2), + ext3(e3) + {} + + int32_t msg_type; + int32_t ext1; + int32_t ext2; + int32_t ext3; + }; + + List<Notification> mNotifications; + + Mutex mMutex; + Condition mNewNotification; + + void onNotify(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3); + + static void notify_callback_dispatch(int32_t msg_type, + int32_t ext1, + int32_t ext2, + int32_t ext3, + void *user); + +}; + +/** + * Adapter from an ISurfaceTexture interface to camera2 device stream ops. + * Also takes care of allocating/deallocating stream in device interface + */ +class StreamAdapter: public camera2_stream_ops { + public: + StreamAdapter(sp<ISurfaceTexture> consumer); + + ~StreamAdapter(); + + status_t connectToDevice(camera2_device_t *d, + uint32_t width, uint32_t height, int format); + + status_t disconnect(); + + // Get stream ID. Only valid after a successful connectToDevice call. + int getId(); + + private: + enum { + ERROR = -1, + DISCONNECTED = 0, + UNINITIALIZED, + ALLOCATED, + CONNECTED, + ACTIVE + } mState; + + sp<ANativeWindow> mConsumerInterface; + camera2_device_t *mDevice; + + uint32_t mId; + uint32_t mWidth; + uint32_t mHeight; + uint32_t mFormat; + uint32_t mUsage; + uint32_t mMaxProducerBuffers; + uint32_t mMaxConsumerBuffers; + + const camera2_stream_ops *getStreamOps(); + + static ANativeWindow* toANW(const camera2_stream_ops_t *w); + + static int dequeue_buffer(const camera2_stream_ops_t *w, + buffer_handle_t** buffer); + + static int enqueue_buffer(const camera2_stream_ops_t* w, + int64_t timestamp, + buffer_handle_t* buffer); + + static int cancel_buffer(const camera2_stream_ops_t* w, + buffer_handle_t* buffer); + + static int set_crop(const camera2_stream_ops_t* w, + int left, int top, int right, int bottom); + +}; + +/** + * Simple class to wait on the CpuConsumer to have a frame available + */ +class FrameWaiter : public CpuConsumer::FrameAvailableListener { + public: + FrameWaiter(); + + /** + * Wait for max timeout nanoseconds for a new frame. Returns + * OK if a frame is available, TIMED_OUT if the timeout was reached. + */ + status_t waitForFrame(nsecs_t timeout); + + virtual void onFrameAvailable(); + + int mPendingFrames; + Mutex mMutex; + Condition mCondition; +}; + +} diff --git a/tests/keymaster/Android.mk b/tests/keymaster/Android.mk index 2661211..e53e67f 100644 --- a/tests/keymaster/Android.mk +++ b/tests/keymaster/Android.mk @@ -10,11 +10,13 @@ LOCAL_SRC_FILES:= \ LOCAL_C_INCLUDES := \ bionic \ external/gtest/include \ + external/openssl/include \ external/stlport/stlport LOCAL_SHARED_LIBRARIES := \ liblog \ libutils \ + libcrypto \ libstlport \ libhardware diff --git a/tests/keymaster/keymaster_test.cpp b/tests/keymaster/keymaster_test.cpp index f4cfcd2..dd24fcb 100644 --- a/tests/keymaster/keymaster_test.cpp +++ b/tests/keymaster/keymaster_test.cpp @@ -22,6 +22,10 @@ #include <gtest/gtest.h> +#include <openssl/bn.h> +#include <openssl/evp.h> +#include <openssl/x509.h> + #include <fstream> #include <iostream> @@ -93,6 +97,34 @@ private: keymaster_device_t** mDevice; }; +struct BIGNUM_Delete { + void operator()(BIGNUM* p) const { + BN_free(p); + } +}; +typedef UniquePtr<BIGNUM, BIGNUM_Delete> Unique_BIGNUM; + +struct EVP_PKEY_Delete { + void operator()(EVP_PKEY* p) const { + EVP_PKEY_free(p); + } +}; +typedef UniquePtr<EVP_PKEY, EVP_PKEY_Delete> Unique_EVP_PKEY; + +struct PKCS8_PRIV_KEY_INFO_Delete { + void operator()(PKCS8_PRIV_KEY_INFO* p) const { + PKCS8_PRIV_KEY_INFO_free(p); + } +}; +typedef UniquePtr<PKCS8_PRIV_KEY_INFO, PKCS8_PRIV_KEY_INFO_Delete> Unique_PKCS8_PRIV_KEY_INFO; + +struct RSA_Delete { + void operator()(RSA* p) const { + RSA_free(p); + } +}; +typedef UniquePtr<RSA, RSA_Delete> Unique_RSA; + /* * DER-encoded PKCS#8 format RSA key. Generated using: * @@ -209,8 +241,8 @@ static uint8_t TEST_KEY_1[] = { static unsigned char BOGUS_KEY_1[] = { 0xFF, 0xFF, 0xFF, 0xFF }; -class KeymasterTest : public testing::Test { -protected: +class KeymasterBaseTest : public ::testing::Test { +public: static void SetUpTestCase() { const hw_module_t* mod; ASSERT_EQ(0, hw_get_module_by_class(KEYSTORE_HARDWARE_MODULE_ID, NULL, &mod)) @@ -241,22 +273,24 @@ protected: ASSERT_EQ(0, keymaster_close(sDevice)); } - virtual void SetUp() { - } +protected: + static keymaster_device_t* sDevice; +}; - virtual void TearDown() { - } +keymaster_device_t* KeymasterBaseTest::sDevice = NULL; - static keymaster_device_t* sDevice; +class KeymasterTest : public KeymasterBaseTest { }; -keymaster_device_t* KeymasterTest::sDevice = NULL; +class KeymasterGenerateTest : public KeymasterBaseTest, + public ::testing::WithParamInterface<uint32_t> { +}; -TEST_F(KeymasterTest, GenerateKeyPair_RSA_512_Success) { +TEST_P(KeymasterGenerateTest, GenerateKeyPair_RSA_Success) { keymaster_keypair_t key_type = TYPE_RSA; keymaster_rsa_keygen_params_t params = { - modulus_size: 512, - public_exponent: 0x10001L, + modulus_size: GetParam(), + public_exponent: RSA_F4, }; uint8_t* key_blob; @@ -266,40 +300,39 @@ TEST_F(KeymasterTest, GenerateKeyPair_RSA_512_Success) { sDevice->generate_keypair(sDevice, key_type, ¶ms, &key_blob, &key_blob_length)) << "Should generate an RSA key with 512 bit modulus size"; UniqueKey key(&sDevice, key_blob, key_blob_length); -} -TEST_F(KeymasterTest, GenerateKeyPair_RSA_1024_Success) { - keymaster_keypair_t key_type = TYPE_RSA; - keymaster_rsa_keygen_params_t params = { - modulus_size: 1024, - public_exponent: 0x3L, - }; + uint8_t* x509_data = NULL; + size_t x509_data_length; + EXPECT_EQ(0, + sDevice->get_keypair_public(sDevice, key_blob, key_blob_length, + &x509_data, &x509_data_length)) + << "Should be able to retrieve RSA public key successfully"; + UniqueBlob x509_blob(x509_data, x509_data_length); + ASSERT_FALSE(x509_blob.get() == NULL) + << "X509 data should be allocated"; - uint8_t* key_blob; - size_t key_blob_length; + const unsigned char *tmp = static_cast<const unsigned char*>(x509_blob.get()); + Unique_EVP_PKEY actual(d2i_PUBKEY((EVP_PKEY**) NULL, &tmp, + static_cast<long>(x509_blob.length()))); - EXPECT_EQ(0, - sDevice->generate_keypair(sDevice, key_type, ¶ms, &key_blob, &key_blob_length)) - << "Should generate an RSA key with 2048 bit modulus size"; - UniqueKey key(&sDevice, key_blob, key_blob_length); -} + ASSERT_EQ(EVP_PKEY_RSA, EVP_PKEY_type(actual.get()->type)) + << "Generated key type should be of type RSA"; -TEST_F(KeymasterTest, GenerateKeyPair_RSA_2048_Success) { - keymaster_keypair_t key_type = TYPE_RSA; - keymaster_rsa_keygen_params_t params = { - modulus_size: 2048, - public_exponent: 0x3L, - }; + Unique_RSA rsa(EVP_PKEY_get1_RSA(actual.get())); + ASSERT_FALSE(rsa.get() == NULL) + << "Should be able to extract RSA key from EVP_PKEY"; - uint8_t* key_blob; - size_t key_blob_length; + EXPECT_EQ(static_cast<unsigned long>(RSA_F4), BN_get_word(rsa.get()->e)) + << "Exponent should be RSA_F4"; - EXPECT_EQ(0, - sDevice->generate_keypair(sDevice, key_type, ¶ms, &key_blob, &key_blob_length)) - << "Should generate an RSA key with 2048 bit modulus size"; - UniqueKey key(&sDevice, key_blob, key_blob_length); + EXPECT_EQ(GetParam() / 8, static_cast<uint32_t>(RSA_size(rsa.get()))) + << "Modulus size should be the specified parameter"; } +INSTANTIATE_TEST_CASE_P(RSA, + KeymasterGenerateTest, + ::testing::Values(512, 1024, 2048)); + TEST_F(KeymasterTest, GenerateKeyPair_RSA_NullParams_Failure) { keymaster_keypair_t key_type = TYPE_RSA; @@ -331,6 +364,31 @@ TEST_F(KeymasterTest, ImportKeyPair_RSA_Success) { &key_blob, &key_blob_length)) << "Should successfully import an RSA key"; UniqueKey key(&sDevice, key_blob, key_blob_length); + + uint8_t* x509_data; + size_t x509_data_length; + EXPECT_EQ(0, + sDevice->get_keypair_public(sDevice, key_blob, key_blob_length, + &x509_data, &x509_data_length)) + << "Should be able to retrieve RSA public key successfully"; + UniqueBlob x509_blob(x509_data, x509_data_length); + + const unsigned char *tmp = static_cast<const unsigned char*>(x509_blob.get()); + Unique_EVP_PKEY actual(d2i_PUBKEY((EVP_PKEY**) NULL, &tmp, + static_cast<long>(x509_blob.length()))); + + EXPECT_EQ(EVP_PKEY_type(actual.get()->type), EVP_PKEY_RSA) + << "Generated key type should be of type RSA"; + + const unsigned char *expectedTmp = static_cast<const unsigned char*>(TEST_KEY_1); + Unique_PKCS8_PRIV_KEY_INFO expectedPkcs8( + d2i_PKCS8_PRIV_KEY_INFO((PKCS8_PRIV_KEY_INFO**) NULL, &expectedTmp, + sizeof(TEST_KEY_1))); + + Unique_EVP_PKEY expected(EVP_PKCS82PKEY(expectedPkcs8.get())); + + EXPECT_EQ(1, EVP_PKEY_cmp(expected.get(), actual.get())) + << "Expected and actual keys should match"; } TEST_F(KeymasterTest, ImportKeyPair_BogusKey_Failure) { |