summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--include/hardware/audio.h8
-rw-r--r--include/hardware/bt_gatt_server.h4
-rw-r--r--include/hardware/gps.h45
-rw-r--r--include/hardware/power.h20
-rw-r--r--include/hardware/tv_input.h49
-rw-r--r--modules/audio_remote_submix/audio_hw.cpp436
-rw-r--r--modules/sensors/multihal.cpp58
-rw-r--r--modules/usbaudio/alsa_device_profile.c42
-rw-r--r--modules/usbaudio/audio_hw.c49
-rw-r--r--tests/camera2/CameraBurstTests.cpp57
-rw-r--r--tests/camera2/CameraMetadataTests.cpp45
-rw-r--r--tests/camera2/CameraMultiStreamTests.cpp71
-rw-r--r--tests/camera2/CameraStreamFixture.h91
-rw-r--r--tests/camera2/camera2.cpp112
-rw-r--r--tests/camera2/camera2_utils.cpp2
-rw-r--r--tests/camera2/camera2_utils.h2
-rw-r--r--tests/camera3/camera3test_fixtures.h2
17 files changed, 779 insertions, 314 deletions
diff --git a/include/hardware/audio.h b/include/hardware/audio.h
index 763ca58..8d5b2f0 100644
--- a/include/hardware/audio.h
+++ b/include/hardware/audio.h
@@ -112,6 +112,10 @@ __BEGIN_DECLS
/* Bluetooth SCO wideband */
#define AUDIO_PARAMETER_KEY_BT_SCO_WB "bt_wbs"
+/* Get a new HW synchronization source identifier.
+ * Return a valid source (positive integer) or AUDIO_HW_SYNC_INVALID if an error occurs
+ * or no HW sync is available. */
+#define AUDIO_PARAMETER_HW_AV_SYNC "hw_av_sync"
/**
* audio stream parameters
@@ -136,9 +140,7 @@ __BEGIN_DECLS
* "sup_sampling_rates=44100|48000" */
#define AUDIO_PARAMETER_STREAM_SUP_SAMPLING_RATES "sup_sampling_rates"
-/* Get the HW synchronization source used for an output stream.
- * Return a valid source (positive integer) or AUDIO_HW_SYNC_INVALID if an error occurs
- * or no HW sync source is used. */
+/* Set the HW synchronization source for an output stream. */
#define AUDIO_PARAMETER_STREAM_HW_AV_SYNC "hw_av_sync"
/**
diff --git a/include/hardware/bt_gatt_server.h b/include/hardware/bt_gatt_server.h
index 2b1de27..0d6cc1e 100644
--- a/include/hardware/bt_gatt_server.h
+++ b/include/hardware/bt_gatt_server.h
@@ -117,6 +117,9 @@ typedef void (*indication_sent_callback)(int conn_id, int status);
*/
typedef void (*congestion_callback)(int conn_id, bool congested);
+/** Callback invoked when the MTU for a given connection changes */
+typedef void (*mtu_changed_callback)(int conn_id, int mtu);
+
typedef struct {
register_server_callback register_server_cb;
connection_callback connection_cb;
@@ -133,6 +136,7 @@ typedef struct {
response_confirmation_callback response_confirmation_cb;
indication_sent_callback indication_sent_cb;
congestion_callback congestion_cb;
+ mtu_changed_callback mtu_changed_cb;
} btgatt_server_callbacks_t;
/** Represents the standard BT-GATT server interface. */
diff --git a/include/hardware/gps.h b/include/hardware/gps.h
index fff6402..e264cf5 100644
--- a/include/hardware/gps.h
+++ b/include/hardware/gps.h
@@ -318,6 +318,12 @@ typedef uint8_t GpsMultipathIndicator;
/**
* Flags indicating the GPS measurement state.
+ * The expected behavior here is for GPS HAL to set all the flags that applies. For
+ * example, if the state for a satellite is only C/A code locked and bit synchronized,
+ * and there is still millisecond ambiguity, the state should be set as:
+ * GPS_MEASUREMENT_STATE_CODE_LOCK|GPS_MEASUREMENT_STATE_BIT_SYNC|GPS_MEASUREMENT_STATE_MSEC_AMBIGUOUS
+ * If GPS is still searching for a satellite, the corresponding state should be set to
+ * GPS_MEASUREMENT_STATE_UNKNOWN(0).
*/
typedef uint16_t GpsMeasurementState;
#define GPS_MEASUREMENT_STATE_UNKNOWN 0
@@ -325,6 +331,7 @@ typedef uint16_t GpsMeasurementState;
#define GPS_MEASUREMENT_STATE_BIT_SYNC (1<<1)
#define GPS_MEASUREMENT_STATE_SUBFRAME_SYNC (1<<2)
#define GPS_MEASUREMENT_STATE_TOW_DECODED (1<<3)
+#define GPS_MEASUREMENT_STATE_MSEC_AMBIGUOUS (1<<4)
/**
* Flags indicating the Accumulated Delta Range's states.
@@ -336,7 +343,7 @@ typedef uint16_t GpsAccumulatedDeltaRangeState;
#define GPS_ADR_STATE_CYCLE_SLIP (1<<2)
/**
- * Enumeration of available values to indicate the available GPS Natigation message types.
+ * Enumeration of available values to indicate the available GPS Navigation message types.
*/
typedef uint8_t GpsNavigationMessageType;
/** The message type is unknown. */
@@ -350,6 +357,19 @@ typedef uint8_t GpsNavigationMessageType;
/** CNAV-2 message contained in the structure. */
#define GPS_NAVIGATION_MESSAGE_TYPE_CNAV2 4
+/**
+ * Status of Navigation Message
+ * When a message is received properly without any parity error in its navigation words, the
+ * status should be set to NAV_MESSAGE_STATUS_PARITY_PASSED. But if a message is received
+ * with words that failed parity check, but GPS is able to correct those words, the status
+ * should be set to NAV_MESSAGE_STATUS_PARITY_REBUILT.
+ * No need to send any navigation message that contains words with parity error and cannot be
+ * corrected.
+ */
+typedef uint16_t NavigationMessageStatus;
+#define NAV_MESSAGE_STATUS_UNKONW 0
+#define NAV_MESSAGE_STATUS_PARITY_PASSED (1<<0)
+#define NAV_MESSAGE_STATUS_PARITY_REBUILT (1<<1)
/**
* Name for the GPS XTRA interface.
@@ -1386,12 +1406,16 @@ typedef struct {
* Received GPS Time-of-Week at the measurement time, in nanoseconds.
* The value is relative to the beginning of the current GPS week.
*
- * Given the sync state of GPS receiver, per each satellite, valid range for this field can be:
- * Searching : [ 0 ] : GPS_MEASUREMENT_STATE_UNKNOWN
- * Ranging code lock : [ 0 1ms ] : GPS_MEASUREMENT_STATE_CODE_LOCK is set
- * Bit sync : [ 0 20ms ] : GPS_MEASUREMENT_STATE_BIT_SYNC is set
- * Subframe sync : [ 0 6ms ] : GPS_MEASUREMENT_STATE_SUBFRAME_SYNC is set
- * TOW decoded : [ 0 1week ] : GPS_MEASUREMENT_STATE_TOW_DECODED is set
+ * Given the highest sync state that can be achieved, per each satellite, valid range for
+ * this field can be:
+ * Searching : [ 0 ] : GPS_MEASUREMENT_STATE_UNKNOWN
+ * C/A code lock : [ 0 1ms ] : GPS_MEASUREMENT_STATE_CODE_LOCK is set
+ * Bit sync : [ 0 20ms ] : GPS_MEASUREMENT_STATE_BIT_SYNC is set
+ * Subframe sync : [ 0 6s ] : GPS_MEASUREMENT_STATE_SUBFRAME_SYNC is set
+ * TOW decoded : [ 0 1week ] : GPS_MEASUREMENT_STATE_TOW_DECODED is set
+ *
+ * However, if there is any ambiguity in integer millisecond,
+ * GPS_MEASUREMENT_STATE_MSEC_AMBIGUOUS should be set accordingly, in the 'state' field.
*/
int64_t received_gps_tow_ns;
@@ -1681,6 +1705,13 @@ typedef struct {
GpsNavigationMessageType type;
/**
+ * The status of the received navigation message.
+ * No need to send any navigation message that contains words with parity error and cannot be
+ * corrected.
+ */
+ NavigationMessageStatus status;
+
+ /**
* Message identifier.
* It provides an index so the complete Navigation Message can be assembled. i.e. fo L1 C/A
* subframe 4 and 5, this value corresponds to the 'frame id' of the navigation message.
diff --git a/include/hardware/power.h b/include/hardware/power.h
index dc33705..af7799e 100644
--- a/include/hardware/power.h
+++ b/include/hardware/power.h
@@ -27,6 +27,7 @@ __BEGIN_DECLS
#define POWER_MODULE_API_VERSION_0_1 HARDWARE_MODULE_API_VERSION(0, 1)
#define POWER_MODULE_API_VERSION_0_2 HARDWARE_MODULE_API_VERSION(0, 2)
+#define POWER_MODULE_API_VERSION_0_3 HARDWARE_MODULE_API_VERSION(0, 3)
/**
* The id of this module
@@ -48,6 +49,10 @@ typedef enum {
POWER_HINT_LOW_POWER = 0x00000005
} power_hint_t;
+typedef enum {
+ POWER_FEATURE_DOUBLE_TAP_TO_WAKE = 0x00000001
+} feature_t;
+
/**
* Every hardware module must have a data structure named HAL_MODULE_INFO_SYM
* and the fields of this data structure must begin with hw_module_t
@@ -127,6 +132,21 @@ typedef struct power_module {
*/
void (*powerHint)(struct power_module *module, power_hint_t hint,
void *data);
+
+ /*
+ * (*setFeature) is called to turn on or off a particular feature
+ * depending on the state parameter. The possible features are:
+ *
+ * FEATURE_DOUBLE_TAP_TO_WAKE
+ *
+ * Enabling/Disabling this feature will allow/disallow the system
+ * to wake up by tapping the screen twice.
+ *
+ * availability: version 0.3
+ *
+ */
+ void (*setFeature)(struct power_module *module, feature_t feature, int state);
+
} power_module_t;
diff --git a/include/hardware/tv_input.h b/include/hardware/tv_input.h
index a94e4ea..ed3fafb 100644
--- a/include/hardware/tv_input.h
+++ b/include/hardware/tv_input.h
@@ -110,29 +110,78 @@ typedef struct tv_input_device_info {
int32_t reserved[16];
} tv_input_device_info_t;
+/* See tv_input_event_t for more details. */
enum {
/*
* Hardware notifies the framework that a device is available.
+ *
+ * Note that DEVICE_AVAILABLE and DEVICE_UNAVAILABLE events do not represent
+ * hotplug events (i.e. plugging cable into or out of the physical port).
+ * These events notify the framework whether the port is available or not.
+ * For a concrete example, when a user plugs in or pulls out the HDMI cable
+ * from a HDMI port, it does not generate DEVICE_AVAILABLE and/or
+ * DEVICE_UNAVAILABLE events. However, if a user inserts a pluggable USB
+ * tuner into the Android device, it will generate a DEVICE_AVAILABLE event
+ * and when the port is removed, it should generate a DEVICE_UNAVAILABLE
+ * event.
+ *
+ * For hotplug events, please see STREAM_CONFIGURATION_CHANGED for more
+ * details.
+ *
+ * HAL implementation should register devices by using this event when the
+ * device boots up. The framework will recognize device reported via this
+ * event only. In addition, the implementation could use this event to
+ * notify the framework that a removable TV input device (such as USB tuner
+ * as stated in the example above) is attached.
*/
TV_INPUT_EVENT_DEVICE_AVAILABLE = 1,
/*
* Hardware notifies the framework that a device is unavailable.
+ *
+ * HAL implementation should generate this event when a device registered
+ * by TV_INPUT_EVENT_DEVICE_AVAILABLE is no longer available. For example,
+ * the event can indicate that a USB tuner is plugged out from the Android
+ * device.
+ *
+ * Note that this event is not for indicating cable plugged out of the port;
+ * for that purpose, the implementation should use
+ * STREAM_CONFIGURATION_CHANGED event. This event represents the port itself
+ * being no longer available.
*/
TV_INPUT_EVENT_DEVICE_UNAVAILABLE = 2,
/*
* Stream configurations are changed. Client should regard all open streams
* at the specific device are closed, and should call
* get_stream_configurations() again, opening some of them if necessary.
+ *
+ * HAL implementation should generate this event when the available stream
+ * configurations change for any reason. A typical use case of this event
+ * would be to notify the framework that the input signal has changed
+ * resolution, or that the cable is plugged out so that the number of
+ * available streams is 0.
+ *
+ * The implementation may use this event to indicate hotplug status of the
+ * port. the framework regards input devices with no available streams as
+ * disconnected, so the implementation can generate this event with no
+ * available streams to indicate that this device is disconnected, and vice
+ * versa.
*/
TV_INPUT_EVENT_STREAM_CONFIGURATIONS_CHANGED = 3,
/*
* Hardware is done with capture request with the buffer. Client can assume
* ownership of the buffer again.
+ *
+ * HAL implementation should generate this event after request_capture() if
+ * it succeeded. The event shall have the buffer with the captured image.
*/
TV_INPUT_EVENT_CAPTURE_SUCCEEDED = 4,
/*
* Hardware met a failure while processing a capture request or client
* canceled the request. Client can assume ownership of the buffer again.
+ *
+ * The event is similar to TV_INPUT_EVENT_CAPTURE_SUCCEEDED, but HAL
+ * implementation generates this event upon a failure to process
+ * request_capture(), or a request cancellation.
*/
TV_INPUT_EVENT_CAPTURE_FAILED = 5,
};
diff --git a/modules/audio_remote_submix/audio_hw.cpp b/modules/audio_remote_submix/audio_hw.cpp
index 8fed8e4..b9dcf7a 100644
--- a/modules/audio_remote_submix/audio_hw.cpp
+++ b/modules/audio_remote_submix/audio_hw.cpp
@@ -140,11 +140,10 @@ struct submix_config {
size_t buffer_period_size_frames;
};
-struct submix_audio_device {
- struct audio_hw_device device;
- bool input_standby;
- bool output_standby;
- submix_config config;
+#define MAX_ROUTES 10
+typedef struct route_config {
+ struct submix_config config;
+ char address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
// Pipe variables: they handle the ring buffer that "pipes" audio:
// - from the submix virtual audio output == what needs to be played
// remotely, seen as an output for AudioFlinger
@@ -155,17 +154,20 @@ struct submix_audio_device {
// TV with Wifi Display capabilities), or to a wireless audio player.
sp<MonoPipe> rsxSink;
sp<MonoPipeReader> rsxSource;
+ // Pointers to the current input and output stream instances. rsxSink and rsxSource are
+ // destroyed if both and input and output streams are destroyed.
+ struct submix_stream_out *output;
+ struct submix_stream_in *input;
#if ENABLE_RESAMPLING
// Buffer used as temporary storage for resampled data prior to returning data to the output
// stream.
int16_t resampler_buffer[DEFAULT_PIPE_SIZE_IN_FRAMES];
#endif // ENABLE_RESAMPLING
+} route_config_t;
- // Pointers to the current input and output stream instances. rsxSink and rsxSource are
- // destroyed if both and input and output streams are destroyed.
- struct submix_stream_out *output;
- struct submix_stream_in *input;
-
+struct submix_audio_device {
+ struct audio_hw_device device;
+ route_config_t routes[MAX_ROUTES];
// Device lock, also used to protect access to submix_audio_device from the input and output
// streams.
pthread_mutex_t lock;
@@ -174,6 +176,8 @@ struct submix_audio_device {
struct submix_stream_out {
struct audio_stream_out stream;
struct submix_audio_device *dev;
+ int route_handle;
+ bool output_standby;
#if LOG_STREAMS_TO_FILES
int log_fd;
#endif // LOG_STREAMS_TO_FILES
@@ -182,7 +186,9 @@ struct submix_stream_out {
struct submix_stream_in {
struct audio_stream_in stream;
struct submix_audio_device *dev;
- bool output_standby; // output standby state as seen from record thread
+ int route_handle;
+ bool input_standby;
+ bool output_standby_rec_thr; // output standby state as seen from record thread
// wall clock when recording starts
struct timespec record_start_time;
@@ -346,40 +352,53 @@ static bool audio_config_compare(const audio_config * const input_config,
// If one doesn't exist, create a pipe for the submix audio device rsxadev of size
// buffer_size_frames and optionally associate "in" or "out" with the submix audio device.
-static void submix_audio_device_create_pipe(struct submix_audio_device * const rsxadev,
+// Must be called with lock held on the submix_audio_device
+static void submix_audio_device_create_pipe_l(struct submix_audio_device * const rsxadev,
const struct audio_config * const config,
const size_t buffer_size_frames,
const uint32_t buffer_period_count,
struct submix_stream_in * const in,
- struct submix_stream_out * const out)
+ struct submix_stream_out * const out,
+ const char *address,
+ int route_idx)
{
ALOG_ASSERT(in || out);
- ALOGD("submix_audio_device_create_pipe()");
- pthread_mutex_lock(&rsxadev->lock);
+ ALOG_ASSERT(route_idx > -1);
+ ALOG_ASSERT(route_idx < MAX_ROUTES);
+ ALOGD("submix_audio_device_create_pipe_l(addr=%s, idx=%d)", address, route_idx);
+
// Save a reference to the specified input or output stream and the associated channel
// mask.
if (in) {
- rsxadev->input = in;
- rsxadev->config.input_channel_mask = config->channel_mask;
+ in->route_handle = route_idx;
+ rsxadev->routes[route_idx].input = in;
+ rsxadev->routes[route_idx].config.input_channel_mask = config->channel_mask;
#if ENABLE_RESAMPLING
- rsxadev->config.input_sample_rate = config->sample_rate;
+ rsxadev->routes[route_idx].config.input_sample_rate = config->sample_rate;
// If the output isn't configured yet, set the output sample rate to the maximum supported
- // sample rate such that the smallest possible input buffer is created.
- if (!rsxadev->output) {
- rsxadev->config.output_sample_rate = 48000;
+ // sample rate such that the smallest possible input buffer is created, and put a default
+ // value for channel count
+ if (!rsxadev->routes[route_idx].output) {
+ rsxadev->routes[route_idx].config.output_sample_rate = 48000;
+ rsxadev->routes[route_idx].config.output_channel_mask = AUDIO_CHANNEL_OUT_STEREO;
}
#endif // ENABLE_RESAMPLING
}
if (out) {
- rsxadev->output = out;
- rsxadev->config.output_channel_mask = config->channel_mask;
+ out->route_handle = route_idx;
+ rsxadev->routes[route_idx].output = out;
+ rsxadev->routes[route_idx].config.output_channel_mask = config->channel_mask;
#if ENABLE_RESAMPLING
- rsxadev->config.output_sample_rate = config->sample_rate;
+ rsxadev->routes[route_idx].config.output_sample_rate = config->sample_rate;
#endif // ENABLE_RESAMPLING
}
+ // Save the address
+ strncpy(rsxadev->routes[route_idx].address, address, AUDIO_DEVICE_MAX_ADDRESS_LEN);
+ ALOGD(" now using address %s for route %d", rsxadev->routes[route_idx].address, route_idx);
// If a pipe isn't associated with the device, create one.
- if (rsxadev->rsxSink == NULL || rsxadev->rsxSource == NULL) {
- struct submix_config * const device_config = &rsxadev->config;
+ if (rsxadev->routes[route_idx].rsxSink == NULL || rsxadev->routes[route_idx].rsxSource == NULL)
+ {
+ struct submix_config * const device_config = &rsxadev->routes[route_idx].config;
uint32_t channel_count;
if (out)
channel_count = audio_channel_count_from_out_mask(config->channel_mask);
@@ -407,13 +426,13 @@ static void submix_audio_device_create_pipe(struct submix_audio_device * const r
numCounterOffers = 0;
index = source->negotiate(offers, 1, NULL, numCounterOffers);
ALOG_ASSERT(index == 0);
- ALOGV("submix_audio_device_create_pipe(): created pipe");
+ ALOGV("submix_audio_device_create_pipe_l(): created pipe");
// Save references to the source and sink.
- ALOG_ASSERT(rsxadev->rsxSink == NULL);
- ALOG_ASSERT(rsxadev->rsxSource == NULL);
- rsxadev->rsxSink = sink;
- rsxadev->rsxSource = source;
+ ALOG_ASSERT(rsxadev->routes[route_idx].rsxSink == NULL);
+ ALOG_ASSERT(rsxadev->routes[route_idx].rsxSource == NULL);
+ rsxadev->routes[route_idx].rsxSink = sink;
+ rsxadev->routes[route_idx].rsxSource = source;
// Store the sanitized audio format in the device so that it's possible to determine
// the format of the pipe source when opening the input device.
memcpy(&device_config->common, config, sizeof(device_config->common));
@@ -427,51 +446,71 @@ static void submix_audio_device_create_pipe(struct submix_audio_device * const r
device_config->pipe_frame_size = (device_config->pipe_frame_size * pipe_channel_count) /
channel_count;
#endif // ENABLE_CHANNEL_CONVERSION
- SUBMIX_ALOGV("submix_audio_device_create_pipe(): pipe frame size %zd, pipe size %zd, "
+ SUBMIX_ALOGV("submix_audio_device_create_pipe_l(): pipe frame size %zd, pipe size %zd, "
"period size %zd", device_config->pipe_frame_size,
device_config->buffer_size_frames, device_config->buffer_period_size_frames);
}
- pthread_mutex_unlock(&rsxadev->lock);
}
// Release references to the sink and source. Input and output threads may maintain references
// to these objects via StrongPointer (sp<MonoPipe> and sp<MonoPipeReader>) which they can use
// before they shutdown.
-static void submix_audio_device_release_pipe(struct submix_audio_device * const rsxadev)
-{
- ALOGD("submix_audio_device_release_pipe()");
- rsxadev->rsxSink.clear();
- rsxadev->rsxSource.clear();
+// Must be called with lock held on the submix_audio_device
+static void submix_audio_device_release_pipe_l(struct submix_audio_device * const rsxadev,
+ int route_idx)
+{
+ ALOG_ASSERT(route_idx > -1);
+ ALOG_ASSERT(route_idx < MAX_ROUTES);
+ ALOGD("submix_audio_device_release_pipe_l(idx=%d) addr=%s", route_idx,
+ rsxadev->routes[route_idx].address);
+ if (rsxadev->routes[route_idx].rsxSink != 0) {
+ rsxadev->routes[route_idx].rsxSink.clear();
+ rsxadev->routes[route_idx].rsxSink = 0;
+ }
+ if (rsxadev->routes[route_idx].rsxSource != 0) {
+ rsxadev->routes[route_idx].rsxSource.clear();
+ rsxadev->routes[route_idx].rsxSource = 0;
+ }
+ memset(rsxadev->routes[route_idx].address, 0, AUDIO_DEVICE_MAX_ADDRESS_LEN);
+#ifdef ENABLE_RESAMPLING
+ memset(rsxadev->routes[route_idx].resampler_buffer, 0,
+ sizeof(int16_t) * DEFAULT_PIPE_SIZE_IN_FRAMES);
+#endif
}
// Remove references to the specified input and output streams. When the device no longer
// references input and output streams destroy the associated pipe.
-static void submix_audio_device_destroy_pipe(struct submix_audio_device * const rsxadev,
+// Must be called with lock held on the submix_audio_device
+static void submix_audio_device_destroy_pipe_l(struct submix_audio_device * const rsxadev,
const struct submix_stream_in * const in,
const struct submix_stream_out * const out)
{
MonoPipe* sink;
- pthread_mutex_lock(&rsxadev->lock);
- ALOGV("submix_audio_device_destroy_pipe()");
- ALOG_ASSERT(in == NULL || rsxadev->input == in);
- ALOG_ASSERT(out == NULL || rsxadev->output == out);
+ ALOGV("submix_audio_device_destroy_pipe_l()");
+ int route_idx = -1;
if (in != NULL) {
#if ENABLE_LEGACY_INPUT_OPEN
const_cast<struct submix_stream_in*>(in)->ref_count--;
+ route_idx = in->route_handle;
+ ALOG_ASSERT(rsxadev->routes[route_idx].input == in);
if (in->ref_count == 0) {
- rsxadev->input = NULL;
+ rsxadev->routes[route_idx].input = NULL;
}
- ALOGV("submix_audio_device_destroy_pipe(): input ref_count %d", in->ref_count);
+ ALOGV("submix_audio_device_destroy_pipe_l(): input ref_count %d", in->ref_count);
#else
rsxadev->input = NULL;
#endif // ENABLE_LEGACY_INPUT_OPEN
}
- if (out != NULL) rsxadev->output = NULL;
- if (rsxadev->input == NULL && rsxadev->output == NULL) {
- submix_audio_device_release_pipe(rsxadev);
- ALOGD("submix_audio_device_destroy_pipe(): pipe destroyed");
+ if (out != NULL) {
+ route_idx = out->route_handle;
+ ALOG_ASSERT(rsxadev->routes[route_idx].output == out);
+ rsxadev->routes[route_idx].output = NULL;
+ }
+ if (route_idx != -1 &&
+ rsxadev->routes[route_idx].input == NULL && rsxadev->routes[route_idx].output == NULL) {
+ submix_audio_device_release_pipe_l(rsxadev, route_idx);
+ ALOGD("submix_audio_device_destroy_pipe_l(): pipe destroyed");
}
- pthread_mutex_unlock(&rsxadev->lock);
}
// Sanitize the user specified audio config for a submix input / output stream.
@@ -484,8 +523,9 @@ static void submix_sanitize_config(struct audio_config * const config, const boo
}
// Verify a submix input or output stream can be opened.
-static bool submix_open_validate(const struct submix_audio_device * const rsxadev,
- pthread_mutex_t * const lock,
+// Must be called with lock held on the submix_audio_device
+static bool submix_open_validate_l(const struct submix_audio_device * const rsxadev,
+ int route_idx,
const struct audio_config * const config,
const bool opening_input)
{
@@ -494,20 +534,18 @@ static bool submix_open_validate(const struct submix_audio_device * const rsxade
audio_config pipe_config;
// Query the device for the current audio config and whether input and output streams are open.
- pthread_mutex_lock(lock);
- output_open = rsxadev->output != NULL;
- input_open = rsxadev->input != NULL;
- memcpy(&pipe_config, &rsxadev->config.common, sizeof(pipe_config));
- pthread_mutex_unlock(lock);
+ output_open = rsxadev->routes[route_idx].output != NULL;
+ input_open = rsxadev->routes[route_idx].input != NULL;
+ memcpy(&pipe_config, &rsxadev->routes[route_idx].config.common, sizeof(pipe_config));
// If the stream is already open, don't open it again.
if (opening_input ? !ENABLE_LEGACY_INPUT_OPEN && input_open : output_open) {
- ALOGE("submix_open_validate(): %s stream already open.", opening_input ? "Input" :
+ ALOGE("submix_open_validate_l(): %s stream already open.", opening_input ? "Input" :
"Output");
return false;
}
- SUBMIX_ALOGV("submix_open_validate(): sample rate=%d format=%x "
+ SUBMIX_ALOGV("submix_open_validate_l(): sample rate=%d format=%x "
"%s_channel_mask=%x", config->sample_rate, config->format,
opening_input ? "in" : "out", config->channel_mask);
@@ -518,16 +556,46 @@ static bool submix_open_validate(const struct submix_audio_device * const rsxade
const audio_config * const output_config = opening_input ? &pipe_config : config;
// Get the channel mask of the open device.
pipe_config.channel_mask =
- opening_input ? rsxadev->config.output_channel_mask :
- rsxadev->config.input_channel_mask;
+ opening_input ? rsxadev->routes[route_idx].config.output_channel_mask :
+ rsxadev->routes[route_idx].config.input_channel_mask;
if (!audio_config_compare(input_config, output_config)) {
- ALOGE("submix_open_validate(): Unsupported format.");
+ ALOGE("submix_open_validate_l(): Unsupported format.");
return false;
}
}
return true;
}
+// Must be called with lock held on the submix_audio_device
+static status_t submix_get_route_idx_for_address_l(const struct submix_audio_device * const rsxadev,
+ const char* address, /*in*/
+ int *idx /*out*/)
+{
+ // Do we already have a route for this address
+ int route_idx = -1;
+ int route_empty_idx = -1; // index of an empty route slot that can be used if needed
+ for (int i=0 ; i < MAX_ROUTES ; i++) {
+ if (strcmp(rsxadev->routes[i].address, "") == 0) {
+ route_empty_idx = i;
+ }
+ if (strncmp(rsxadev->routes[i].address, address, AUDIO_DEVICE_MAX_ADDRESS_LEN) == 0) {
+ route_idx = i;
+ break;
+ }
+ }
+
+ if ((route_idx == -1) && (route_empty_idx == -1)) {
+ ALOGE("Cannot create new route for address %s, max number of routes reached", address);
+ return -ENOMEM;
+ }
+ if (route_idx == -1) {
+ route_idx = route_empty_idx;
+ }
+ *idx = route_idx;
+ return OK;
+}
+
+
// Calculate the maximum size of the pipe buffer in frames for the specified stream.
static size_t calculate_stream_pipe_size_in_frames(const struct audio_stream *stream,
const struct submix_config *config,
@@ -546,11 +614,12 @@ static uint32_t out_get_sample_rate(const struct audio_stream *stream)
const struct submix_stream_out * const out = audio_stream_get_submix_stream_out(
const_cast<struct audio_stream *>(stream));
#if ENABLE_RESAMPLING
- const uint32_t out_rate = out->dev->config.output_sample_rate;
+ const uint32_t out_rate = out->dev->routes[out->route_handle].config.output_sample_rate;
#else
- const uint32_t out_rate = out->dev->config.common.sample_rate;
+ const uint32_t out_rate = out->dev->routes[out->route_handle].config.common.sample_rate;
#endif // ENABLE_RESAMPLING
- SUBMIX_ALOGV("out_get_sample_rate() returns %u", out_rate);
+ SUBMIX_ALOGV("out_get_sample_rate() returns %u for addr %s",
+ out_rate, out->dev->routes[out->route_handle].address);
return out_rate;
}
@@ -560,9 +629,11 @@ static int out_set_sample_rate(struct audio_stream *stream, uint32_t rate)
#if ENABLE_RESAMPLING
// The sample rate of the stream can't be changed once it's set since this would change the
// output buffer size and hence break playback to the shared pipe.
- if (rate != out->dev->config.output_sample_rate) {
+ if (rate != out->dev->routes[out->route_handle].config.output_sample_rate) {
ALOGE("out_set_sample_rate() resampling enabled can't change sample rate from "
- "%u to %u", out->dev->config.output_sample_rate, rate);
+ "%u to %u for addr %s",
+ out->dev->routes[out->route_handle].config.output_sample_rate, rate,
+ out->dev->routes[out->route_handle].address);
return -ENOSYS;
}
#endif // ENABLE_RESAMPLING
@@ -571,7 +642,7 @@ static int out_set_sample_rate(struct audio_stream *stream, uint32_t rate)
return -ENOSYS;
}
SUBMIX_ALOGV("out_set_sample_rate(rate=%u)", rate);
- out->dev->config.common.sample_rate = rate;
+ out->dev->routes[out->route_handle].config.common.sample_rate = rate;
return 0;
}
@@ -579,7 +650,7 @@ static size_t out_get_buffer_size(const struct audio_stream *stream)
{
const struct submix_stream_out * const out = audio_stream_get_submix_stream_out(
const_cast<struct audio_stream *>(stream));
- const struct submix_config * const config = &out->dev->config;
+ const struct submix_config * const config = &out->dev->routes[out->route_handle].config;
const size_t stream_frame_size =
audio_stream_out_frame_size((const struct audio_stream_out *)stream);
const size_t buffer_size_frames = calculate_stream_pipe_size_in_frames(
@@ -594,7 +665,7 @@ static audio_channel_mask_t out_get_channels(const struct audio_stream *stream)
{
const struct submix_stream_out * const out = audio_stream_get_submix_stream_out(
const_cast<struct audio_stream *>(stream));
- uint32_t channel_mask = out->dev->config.output_channel_mask;
+ uint32_t channel_mask = out->dev->routes[out->route_handle].config.output_channel_mask;
SUBMIX_ALOGV("out_get_channels() returns %08x", channel_mask);
return channel_mask;
}
@@ -603,7 +674,7 @@ static audio_format_t out_get_format(const struct audio_stream *stream)
{
const struct submix_stream_out * const out = audio_stream_get_submix_stream_out(
const_cast<struct audio_stream *>(stream));
- const audio_format_t format = out->dev->config.common.format;
+ const audio_format_t format = out->dev->routes[out->route_handle].config.common.format;
SUBMIX_ALOGV("out_get_format() returns %x", format);
return format;
}
@@ -611,7 +682,7 @@ static audio_format_t out_get_format(const struct audio_stream *stream)
static int out_set_format(struct audio_stream *stream, audio_format_t format)
{
const struct submix_stream_out * const out = audio_stream_get_submix_stream_out(stream);
- if (format != out->dev->config.common.format) {
+ if (format != out->dev->routes[out->route_handle].config.common.format) {
ALOGE("out_set_format(format=%x) format unsupported", format);
return -ENOSYS;
}
@@ -621,12 +692,13 @@ static int out_set_format(struct audio_stream *stream, audio_format_t format)
static int out_standby(struct audio_stream *stream)
{
- struct submix_audio_device * const rsxadev = audio_stream_get_submix_stream_out(stream)->dev;
ALOGI("out_standby()");
+ struct submix_stream_out * const out = audio_stream_get_submix_stream_out(stream);
+ struct submix_audio_device * const rsxadev = out->dev;
pthread_mutex_lock(&rsxadev->lock);
- rsxadev->output_standby = true;
+ out->output_standby = true;
pthread_mutex_unlock(&rsxadev->lock);
@@ -653,7 +725,9 @@ static int out_set_parameters(struct audio_stream *stream, const char *kvpairs)
audio_stream_get_submix_stream_out(stream)->dev;
pthread_mutex_lock(&rsxadev->lock);
{ // using the sink
- sp<MonoPipe> sink = rsxadev->rsxSink;
+ sp<MonoPipe> sink =
+ rsxadev->routes[audio_stream_get_submix_stream_out(stream)->route_handle]
+ .rsxSink;
if (sink == NULL) {
pthread_mutex_unlock(&rsxadev->lock);
return 0;
@@ -678,7 +752,7 @@ static uint32_t out_get_latency(const struct audio_stream_out *stream)
{
const struct submix_stream_out * const out = audio_stream_out_get_submix_stream_out(
const_cast<struct audio_stream_out *>(stream));
- const struct submix_config * const config = &out->dev->config;
+ const struct submix_config * const config = &out->dev->routes[out->route_handle].config;
const size_t stream_frame_size =
audio_stream_out_frame_size(stream);
const size_t buffer_size_frames = calculate_stream_pipe_size_in_frames(
@@ -711,9 +785,9 @@ static ssize_t out_write(struct audio_stream_out *stream, const void* buffer,
pthread_mutex_lock(&rsxadev->lock);
- rsxadev->output_standby = false;
+ out->output_standby = false;
- sp<MonoPipe> sink = rsxadev->rsxSink;
+ sp<MonoPipe> sink = rsxadev->routes[out->route_handle].rsxSink;
if (sink != NULL) {
if (sink->isShutdown()) {
sink.clear();
@@ -735,8 +809,8 @@ static ssize_t out_write(struct audio_stream_out *stream, const void* buffer,
// from the pipe to make space to write the most recent data.
{
const size_t availableToWrite = sink->availableToWrite();
- sp<MonoPipeReader> source = rsxadev->rsxSource;
- if (rsxadev->input == NULL && availableToWrite < frames) {
+ sp<MonoPipeReader> source = rsxadev->routes[out->route_handle].rsxSource;
+ if (rsxadev->routes[out->route_handle].input == NULL && availableToWrite < frames) {
static uint8_t flush_buffer[64];
const size_t flushBufferSizeFrames = sizeof(flush_buffer) / frame_size;
size_t frames_to_flush_from_source = frames - availableToWrite;
@@ -745,6 +819,7 @@ static ssize_t out_write(struct audio_stream_out *stream, const void* buffer,
while (frames_to_flush_from_source) {
const size_t flush_size = min(frames_to_flush_from_source, flushBufferSizeFrames);
frames_to_flush_from_source -= flush_size;
+ // read does not block
source->read(flush_buffer, flush_size, AudioBufferProvider::kInvalidPTS);
}
}
@@ -824,9 +899,9 @@ static uint32_t in_get_sample_rate(const struct audio_stream *stream)
const struct submix_stream_in * const in = audio_stream_get_submix_stream_in(
const_cast<struct audio_stream*>(stream));
#if ENABLE_RESAMPLING
- const uint32_t rate = in->dev->config.input_sample_rate;
+ const uint32_t rate = in->dev->routes[in->route_handle].config.input_sample_rate;
#else
- const uint32_t rate = in->dev->config.common.sample_rate;
+ const uint32_t rate = in->dev->routes[in->route_handle].config.common.sample_rate;
#endif // ENABLE_RESAMPLING
SUBMIX_ALOGV("in_get_sample_rate() returns %u", rate);
return rate;
@@ -838,9 +913,9 @@ static int in_set_sample_rate(struct audio_stream *stream, uint32_t rate)
#if ENABLE_RESAMPLING
// The sample rate of the stream can't be changed once it's set since this would change the
// input buffer size and hence break recording from the shared pipe.
- if (rate != in->dev->config.input_sample_rate) {
+ if (rate != in->dev->routes[in->route_handle].config.input_sample_rate) {
ALOGE("in_set_sample_rate() resampling enabled can't change sample rate from "
- "%u to %u", in->dev->config.input_sample_rate, rate);
+ "%u to %u", in->dev->routes[in->route_handle].config.input_sample_rate, rate);
return -ENOSYS;
}
#endif // ENABLE_RESAMPLING
@@ -848,7 +923,7 @@ static int in_set_sample_rate(struct audio_stream *stream, uint32_t rate)
ALOGE("in_set_sample_rate(rate=%u) rate unsupported", rate);
return -ENOSYS;
}
- in->dev->config.common.sample_rate = rate;
+ in->dev->routes[in->route_handle].config.common.sample_rate = rate;
SUBMIX_ALOGV("in_set_sample_rate() set %u", rate);
return 0;
}
@@ -857,7 +932,7 @@ static size_t in_get_buffer_size(const struct audio_stream *stream)
{
const struct submix_stream_in * const in = audio_stream_get_submix_stream_in(
const_cast<struct audio_stream*>(stream));
- const struct submix_config * const config = &in->dev->config;
+ const struct submix_config * const config = &in->dev->routes[in->route_handle].config;
const size_t stream_frame_size =
audio_stream_in_frame_size((const struct audio_stream_in *)stream);
size_t buffer_size_frames = calculate_stream_pipe_size_in_frames(
@@ -879,7 +954,8 @@ static audio_channel_mask_t in_get_channels(const struct audio_stream *stream)
{
const struct submix_stream_in * const in = audio_stream_get_submix_stream_in(
const_cast<struct audio_stream*>(stream));
- const audio_channel_mask_t channel_mask = in->dev->config.input_channel_mask;
+ const audio_channel_mask_t channel_mask =
+ in->dev->routes[in->route_handle].config.input_channel_mask;
SUBMIX_ALOGV("in_get_channels() returns %x", channel_mask);
return channel_mask;
}
@@ -888,7 +964,7 @@ static audio_format_t in_get_format(const struct audio_stream *stream)
{
const struct submix_stream_in * const in = audio_stream_get_submix_stream_in(
const_cast<struct audio_stream*>(stream));
- const audio_format_t format = in->dev->config.common.format;
+ const audio_format_t format = in->dev->routes[in->route_handle].config.common.format;
SUBMIX_ALOGV("in_get_format() returns %x", format);
return format;
}
@@ -896,7 +972,7 @@ static audio_format_t in_get_format(const struct audio_stream *stream)
static int in_set_format(struct audio_stream *stream, audio_format_t format)
{
const struct submix_stream_in * const in = audio_stream_get_submix_stream_in(stream);
- if (format != in->dev->config.common.format) {
+ if (format != in->dev->routes[in->route_handle].config.common.format) {
ALOGE("in_set_format(format=%x) format unsupported", format);
return -ENOSYS;
}
@@ -906,12 +982,13 @@ static int in_set_format(struct audio_stream *stream, audio_format_t format)
static int in_standby(struct audio_stream *stream)
{
- struct submix_audio_device * const rsxadev = audio_stream_get_submix_stream_in(stream)->dev;
ALOGI("in_standby()");
+ struct submix_stream_in * const in = audio_stream_get_submix_stream_in(stream);
+ struct submix_audio_device * const rsxadev = in->dev;
pthread_mutex_lock(&rsxadev->lock);
- rsxadev->input_standby = true;
+ in->input_standby = true;
pthread_mutex_unlock(&rsxadev->lock);
@@ -959,11 +1036,13 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
SUBMIX_ALOGV("in_read bytes=%zu", bytes);
pthread_mutex_lock(&rsxadev->lock);
- const bool output_standby_transition = (in->output_standby != in->dev->output_standby);
- in->output_standby = rsxadev->output_standby;
+ const bool output_standby = rsxadev->routes[in->route_handle].output == NULL
+ ? true : rsxadev->routes[in->route_handle].output->output_standby;
+ const bool output_standby_transition = (in->output_standby_rec_thr != output_standby);
+ in->output_standby_rec_thr = output_standby;
- if (rsxadev->input_standby || output_standby_transition) {
- rsxadev->input_standby = false;
+ if (in->input_standby || output_standby_transition) {
+ in->input_standby = false;
// keep track of when we exit input standby (== first read == start "real recording")
// or when we start recording silence, and reset projected time
int rc = clock_gettime(CLOCK_MONOTONIC, &in->record_start_time);
@@ -977,7 +1056,7 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
{
// about to read from audio source
- sp<MonoPipeReader> source = rsxadev->rsxSource;
+ sp<MonoPipeReader> source = rsxadev->routes[in->route_handle].rsxSource;
if (source == NULL) {
in->read_error_count++;// ok if it rolls over
ALOGE_IF(in->read_error_count < MAX_READ_ERROR_LOGS,
@@ -996,14 +1075,15 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
#if ENABLE_CHANNEL_CONVERSION
// Determine whether channel conversion is required.
const uint32_t input_channels = audio_channel_count_from_in_mask(
- rsxadev->config.input_channel_mask);
+ rsxadev->routes[in->route_handle].config.input_channel_mask);
const uint32_t output_channels = audio_channel_count_from_out_mask(
- rsxadev->config.output_channel_mask);
+ rsxadev->routes[in->route_handle].config.output_channel_mask);
if (input_channels != output_channels) {
SUBMIX_ALOGV("in_read(): %d output channels will be converted to %d "
"input channels", output_channels, input_channels);
// Only support 16-bit PCM channel conversion from mono to stereo or stereo to mono.
- ALOG_ASSERT(rsxadev->config.common.format == AUDIO_FORMAT_PCM_16_BIT);
+ ALOG_ASSERT(rsxadev->routes[in->route_handle].config.common.format ==
+ AUDIO_FORMAT_PCM_16_BIT);
ALOG_ASSERT((input_channels == 1 && output_channels == 2) ||
(input_channels == 2 && output_channels == 1));
}
@@ -1011,17 +1091,21 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
#if ENABLE_RESAMPLING
const uint32_t input_sample_rate = in_get_sample_rate(&stream->common);
- const uint32_t output_sample_rate = rsxadev->config.output_sample_rate;
+ const uint32_t output_sample_rate =
+ rsxadev->routes[in->route_handle].config.output_sample_rate;
const size_t resampler_buffer_size_frames =
- sizeof(rsxadev->resampler_buffer) / sizeof(rsxadev->resampler_buffer[0]);
+ sizeof(rsxadev->routes[in->route_handle].resampler_buffer) /
+ sizeof(rsxadev->routes[in->route_handle].resampler_buffer[0]);
float resampler_ratio = 1.0f;
// Determine whether resampling is required.
if (input_sample_rate != output_sample_rate) {
resampler_ratio = (float)output_sample_rate / (float)input_sample_rate;
// Only support 16-bit PCM mono resampling.
// NOTE: Resampling is performed after the channel conversion step.
- ALOG_ASSERT(rsxadev->config.common.format == AUDIO_FORMAT_PCM_16_BIT);
- ALOG_ASSERT(audio_channel_count_from_in_mask(rsxadev->config.input_channel_mask) == 1);
+ ALOG_ASSERT(rsxadev->routes[in->route_handle].config.common.format ==
+ AUDIO_FORMAT_PCM_16_BIT);
+ ALOG_ASSERT(audio_channel_count_from_in_mask(
+ rsxadev->routes[in->route_handle].config.input_channel_mask) == 1);
}
#endif // ENABLE_RESAMPLING
@@ -1037,7 +1121,7 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer,
(float)read_frames * (float)resampler_ratio);
read_frames = min(frames_required_for_resampler, resampler_buffer_size_frames);
// Read into the resampler buffer.
- buff = (char*)rsxadev->resampler_buffer;
+ buff = (char*)rsxadev->routes[in->route_handle].resampler_buffer;
}
#endif // ENABLE_RESAMPLING
#if ENABLE_CHANNEL_CONVERSION
@@ -1195,10 +1279,10 @@ static int adev_open_output_stream(struct audio_hw_device *dev,
audio_output_flags_t flags,
struct audio_config *config,
struct audio_stream_out **stream_out,
- const char *address __unused)
+ const char *address)
{
struct submix_audio_device * const rsxadev = audio_hw_device_get_submix_audio_device(dev);
- ALOGD("adev_open_output_stream()");
+ ALOGD("adev_open_output_stream(address=%s)", address);
struct submix_stream_out *out;
bool force_pipe_creation = false;
(void)handle;
@@ -1209,13 +1293,29 @@ static int adev_open_output_stream(struct audio_hw_device *dev,
// Make sure it's possible to open the device given the current audio config.
submix_sanitize_config(config, false);
- if (!submix_open_validate(rsxadev, &rsxadev->lock, config, false)) {
- ALOGE("adev_open_output_stream(): Unable to open output stream.");
+
+ int route_idx = -1;
+
+ pthread_mutex_lock(&rsxadev->lock);
+
+ status_t res = submix_get_route_idx_for_address_l(rsxadev, address, &route_idx);
+ if (res != OK) {
+ ALOGE("Error %d looking for address=%s in adev_open_output_stream", res, address);
+ pthread_mutex_unlock(&rsxadev->lock);
+ return res;
+ }
+
+ if (!submix_open_validate_l(rsxadev, route_idx, config, false)) {
+ ALOGE("adev_open_output_stream(): Unable to open output stream for address %s", address);
+ pthread_mutex_unlock(&rsxadev->lock);
return -EINVAL;
}
out = (struct submix_stream_out *)calloc(1, sizeof(struct submix_stream_out));
- if (!out) return -ENOMEM;
+ if (!out) {
+ pthread_mutex_unlock(&rsxadev->lock);
+ return -ENOMEM;
+ }
// Initialize the function pointer tables (v-tables).
out->stream.common.get_sample_rate = out_get_sample_rate;
@@ -1239,23 +1339,23 @@ static int adev_open_output_stream(struct audio_hw_device *dev,
#if ENABLE_RESAMPLING
// Recreate the pipe with the correct sample rate so that MonoPipe.write() rate limits
// writes correctly.
- force_pipe_creation = rsxadev->config.common.sample_rate != config->sample_rate;
+ force_pipe_creation = rsxadev->routes[route_idx].config.common.sample_rate
+ != config->sample_rate;
#endif // ENABLE_RESAMPLING
// If the sink has been shutdown or pipe recreation is forced (see above), delete the pipe so
// that it's recreated.
- pthread_mutex_lock(&rsxadev->lock);
- if ((rsxadev->rsxSink != NULL && rsxadev->rsxSink->isShutdown()) || force_pipe_creation) {
- submix_audio_device_release_pipe(rsxadev);
+ if ((rsxadev->routes[route_idx].rsxSink != NULL
+ && rsxadev->routes[route_idx].rsxSink->isShutdown()) || force_pipe_creation) {
+ submix_audio_device_release_pipe_l(rsxadev, route_idx);
}
- pthread_mutex_unlock(&rsxadev->lock);
// Store a pointer to the device from the output stream.
out->dev = rsxadev;
// Initialize the pipe.
- ALOGV("adev_open_output_stream(): about to create pipe");
- submix_audio_device_create_pipe(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
- DEFAULT_PIPE_PERIOD_COUNT, NULL, out);
+ ALOGV("adev_open_output_stream(): about to create pipe at index %d", route_idx);
+ submix_audio_device_create_pipe_l(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
+ DEFAULT_PIPE_PERIOD_COUNT, NULL, out, address, route_idx);
#if LOG_STREAMS_TO_FILES
out->log_fd = open(LOG_STREAM_OUT_FILENAME, O_CREAT | O_TRUNC | O_WRONLY,
LOG_STREAM_FILE_PERMISSIONS);
@@ -1266,18 +1366,25 @@ static int adev_open_output_stream(struct audio_hw_device *dev,
// Return the output stream.
*stream_out = &out->stream;
+ pthread_mutex_unlock(&rsxadev->lock);
return 0;
}
static void adev_close_output_stream(struct audio_hw_device *dev,
struct audio_stream_out *stream)
{
+ struct submix_audio_device * rsxadev = audio_hw_device_get_submix_audio_device(
+ const_cast<struct audio_hw_device*>(dev));
struct submix_stream_out * const out = audio_stream_out_get_submix_stream_out(stream);
- ALOGD("adev_close_output_stream()");
- submix_audio_device_destroy_pipe(audio_hw_device_get_submix_audio_device(dev), NULL, out);
+
+ pthread_mutex_lock(&rsxadev->lock);
+ ALOGD("adev_close_output_stream() addr = %s", rsxadev->routes[out->route_handle].address);
+ submix_audio_device_destroy_pipe_l(audio_hw_device_get_submix_audio_device(dev), NULL, out);
#if LOG_STREAMS_TO_FILES
if (out->log_fd >= 0) close(out->log_fd);
#endif // LOG_STREAMS_TO_FILES
+
+ pthread_mutex_unlock(&rsxadev->lock);
free(out);
}
@@ -1363,12 +1470,19 @@ static size_t adev_get_input_buffer_size(const struct audio_hw_device *dev,
const struct audio_config *config)
{
if (audio_is_linear_pcm(config->format)) {
- const size_t buffer_period_size_frames =
- audio_hw_device_get_submix_audio_device(const_cast<struct audio_hw_device*>(dev))->
- config.buffer_period_size_frames;
+ size_t max_buffer_period_size_frames = 0;
+ struct submix_audio_device * rsxadev = audio_hw_device_get_submix_audio_device(
+ const_cast<struct audio_hw_device*>(dev));
+ // look for the largest buffer period size
+ for (int i = 0 ; i < MAX_ROUTES ; i++) {
+ if (rsxadev->routes[i].config.buffer_period_size_frames > max_buffer_period_size_frames)
+ {
+ max_buffer_period_size_frames = rsxadev->routes[i].config.buffer_period_size_frames;
+ }
+ }
const size_t frame_size_in_bytes = audio_channel_count_from_in_mask(config->channel_mask) *
audio_bytes_per_sample(config->format);
- const size_t buffer_size = buffer_period_size_frames * frame_size_in_bytes;
+ const size_t buffer_size = max_buffer_period_size_frames * frame_size_in_bytes;
SUBMIX_ALOGV("adev_get_input_buffer_size() returns %zu bytes, %zu frames",
buffer_size, buffer_period_size_frames);
return buffer_size;
@@ -1382,37 +1496,49 @@ static int adev_open_input_stream(struct audio_hw_device *dev,
struct audio_config *config,
struct audio_stream_in **stream_in,
audio_input_flags_t flags __unused,
- const char *address __unused,
+ const char *address,
audio_source_t source __unused)
{
struct submix_audio_device *rsxadev = audio_hw_device_get_submix_audio_device(dev);
struct submix_stream_in *in;
- ALOGD("adev_open_input_stream()");
+ ALOGD("adev_open_input_stream(addr=%s)", address);
(void)handle;
(void)devices;
*stream_in = NULL;
+ // Do we already have a route for this address
+ int route_idx = -1;
+
+ pthread_mutex_lock(&rsxadev->lock);
+
+ status_t res = submix_get_route_idx_for_address_l(rsxadev, address, &route_idx);
+ if (res != OK) {
+ ALOGE("Error %d looking for address=%s in adev_open_output_stream", res, address);
+ pthread_mutex_unlock(&rsxadev->lock);
+ return res;
+ }
+
// Make sure it's possible to open the device given the current audio config.
submix_sanitize_config(config, true);
- if (!submix_open_validate(rsxadev, &rsxadev->lock, config, true)) {
+ if (!submix_open_validate_l(rsxadev, route_idx, config, true)) {
ALOGE("adev_open_input_stream(): Unable to open input stream.");
+ pthread_mutex_unlock(&rsxadev->lock);
return -EINVAL;
}
#if ENABLE_LEGACY_INPUT_OPEN
- pthread_mutex_lock(&rsxadev->lock);
- in = rsxadev->input;
+ in = rsxadev->routes[route_idx].input;
if (in) {
in->ref_count++;
- sp<MonoPipe> sink = rsxadev->rsxSink;
+ sp<MonoPipe> sink = rsxadev->routes[route_idx].rsxSink;
ALOG_ASSERT(sink != NULL);
// If the sink has been shutdown, delete the pipe.
if (sink != NULL) {
if (sink->isShutdown()) {
ALOGD(" Non-NULL shut down sink when opening input stream, releasing, refcount=%d",
in->ref_count);
- submix_audio_device_release_pipe(rsxadev);
+ submix_audio_device_release_pipe_l(rsxadev, in->route_handle);
} else {
ALOGD(" Non-NULL sink when opening input stream, refcount=%d", in->ref_count);
}
@@ -1420,7 +1546,6 @@ static int adev_open_input_stream(struct audio_hw_device *dev,
ALOGE("NULL sink when opening input stream, refcount=%d", in->ref_count);
}
}
- pthread_mutex_unlock(&rsxadev->lock);
#else
in = NULL;
#endif // ENABLE_LEGACY_INPUT_OPEN
@@ -1446,18 +1571,29 @@ static int adev_open_input_stream(struct audio_hw_device *dev,
in->stream.set_gain = in_set_gain;
in->stream.read = in_read;
in->stream.get_input_frames_lost = in_get_input_frames_lost;
+
+ in->dev = rsxadev;
+#if LOG_STREAMS_TO_FILES
+ in->log_fd = -1;
+#endif
}
// Initialize the input stream.
in->read_counter_frames = 0;
- in->output_standby = rsxadev->output_standby;
- in->dev = rsxadev;
+ in->input_standby = true;
+ if (rsxadev->routes[route_idx].output != NULL) {
+ in->output_standby_rec_thr = rsxadev->routes[route_idx].output->output_standby;
+ } else {
+ in->output_standby_rec_thr = true;
+ }
+
in->read_error_count = 0;
// Initialize the pipe.
ALOGV("adev_open_input_stream(): about to create pipe");
- submix_audio_device_create_pipe(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
- DEFAULT_PIPE_PERIOD_COUNT, in, NULL);
+ submix_audio_device_create_pipe_l(rsxadev, config, DEFAULT_PIPE_SIZE_IN_FRAMES,
+ DEFAULT_PIPE_PERIOD_COUNT, in, NULL, address, route_idx);
#if LOG_STREAMS_TO_FILES
+ if (in->log_fd >= 0) close(in->log_fd);
in->log_fd = open(LOG_STREAM_IN_FILENAME, O_CREAT | O_TRUNC | O_WRONLY,
LOG_STREAM_FILE_PERMISSIONS);
ALOGE_IF(in->log_fd < 0, "adev_open_input_stream(): log file open failed %s",
@@ -1467,15 +1603,19 @@ static int adev_open_input_stream(struct audio_hw_device *dev,
// Return the input stream.
*stream_in = &in->stream;
+ pthread_mutex_unlock(&rsxadev->lock);
return 0;
}
static void adev_close_input_stream(struct audio_hw_device *dev,
struct audio_stream_in *stream)
{
+ struct submix_audio_device * const rsxadev = audio_hw_device_get_submix_audio_device(dev);
+
struct submix_stream_in * const in = audio_stream_in_get_submix_stream_in(stream);
ALOGD("adev_close_input_stream()");
- submix_audio_device_destroy_pipe(audio_hw_device_get_submix_audio_device(dev), in, NULL);
+ pthread_mutex_lock(&rsxadev->lock);
+ submix_audio_device_destroy_pipe_l(rsxadev, in, NULL);
#if LOG_STREAMS_TO_FILES
if (in->log_fd >= 0) close(in->log_fd);
#endif // LOG_STREAMS_TO_FILES
@@ -1484,12 +1624,26 @@ static void adev_close_input_stream(struct audio_hw_device *dev,
#else
free(in);
#endif // ENABLE_LEGACY_INPUT_OPEN
+
+ pthread_mutex_unlock(&rsxadev->lock);
}
static int adev_dump(const audio_hw_device_t *device, int fd)
{
- (void)device;
- (void)fd;
+ const struct submix_audio_device * rsxadev = //audio_hw_device_get_submix_audio_device(device);
+ reinterpret_cast<const struct submix_audio_device *>(
+ reinterpret_cast<const uint8_t *>(device) -
+ offsetof(struct submix_audio_device, device));
+ char msg[100];
+ int n = sprintf(msg, "\nReroute submix audio module:\n");
+ write(fd, &msg, n);
+ for (int i=0 ; i < MAX_ROUTES ; i++) {
+ n = sprintf(msg, " route[%d] rate in=%d out=%d, addr=[%s]\n", i,
+ rsxadev->routes[i].config.input_sample_rate,
+ rsxadev->routes[i].config.output_sample_rate,
+ rsxadev->routes[i].address);
+ write(fd, &msg, n);
+ }
return 0;
}
@@ -1536,8 +1690,10 @@ static int adev_open(const hw_module_t* module, const char* name,
rsxadev->device.close_input_stream = adev_close_input_stream;
rsxadev->device.dump = adev_dump;
- rsxadev->input_standby = true;
- rsxadev->output_standby = true;
+ for (int i=0 ; i < MAX_ROUTES ; i++) {
+ memset(&rsxadev->routes[i], 0, sizeof(route_config));
+ strcpy(rsxadev->routes[i].address, "");
+ }
*device = &rsxadev->device.common;
diff --git a/modules/sensors/multihal.cpp b/modules/sensors/multihal.cpp
index 76ec161..cd67f6d 100644
--- a/modules/sensors/multihal.cpp
+++ b/modules/sensors/multihal.cpp
@@ -28,6 +28,7 @@
#include <vector>
#include <map>
+#include <string>
#include <stdio.h>
#include <dlfcn.h>
@@ -250,13 +251,41 @@ int sensors_poll_context_t::get_device_version_by_handle(int handle) {
}
}
+// Android L requires sensor HALs to be either 1_0 or 1_3 compliant
+#define HAL_VERSION_IS_COMPLIANT(version) \
+ (version == SENSORS_DEVICE_API_VERSION_1_0 || version >= SENSORS_DEVICE_API_VERSION_1_3)
+
+// Returns true if HAL is compliant, false if HAL is not compliant or if handle is invalid
+static bool halIsCompliant(sensors_poll_context_t *ctx, int handle) {
+ int version = ctx->get_device_version_by_handle(handle);
+ return version != -1 && HAL_VERSION_IS_COMPLIANT(version);
+}
+
+const char *apiNumToStr(int version) {
+ switch(version) {
+ case SENSORS_DEVICE_API_VERSION_1_0:
+ return "SENSORS_DEVICE_API_VERSION_1_0";
+ case SENSORS_DEVICE_API_VERSION_1_1:
+ return "SENSORS_DEVICE_API_VERSION_1_1";
+ case SENSORS_DEVICE_API_VERSION_1_2:
+ return "SENSORS_DEVICE_API_VERSION_1_2";
+ case SENSORS_DEVICE_API_VERSION_1_3:
+ return "SENSORS_DEVICE_API_VERSION_1_3";
+ default:
+ return "UNKNOWN";
+ }
+}
+
int sensors_poll_context_t::activate(int handle, int enabled) {
int retval = -EINVAL;
ALOGV("activate");
int local_handle = get_local_handle(handle);
sensors_poll_device_t* v0 = this->get_v0_device_by_handle(handle);
- if (local_handle >= 0 && v0) {
+ if (halIsCompliant(this, handle) && local_handle >= 0 && v0) {
retval = v0->activate(v0, local_handle, enabled);
+ } else {
+ ALOGE("IGNORING activate(enable %d) call to non-API-compliant sensor handle=%d !",
+ enabled, handle);
}
ALOGV("retval %d", retval);
return retval;
@@ -267,8 +296,10 @@ int sensors_poll_context_t::setDelay(int handle, int64_t ns) {
ALOGV("setDelay");
int local_handle = get_local_handle(handle);
sensors_poll_device_t* v0 = this->get_v0_device_by_handle(handle);
- if (local_handle >= 0 && v0) {
+ if (halIsCompliant(this, handle) && local_handle >= 0 && v0) {
retval = v0->setDelay(v0, local_handle, ns);
+ } else {
+ ALOGE("IGNORING setDelay() call for non-API-compliant sensor handle=%d !", handle);
}
ALOGV("retval %d", retval);
return retval;
@@ -341,11 +372,12 @@ int sensors_poll_context_t::poll(sensors_event_t *data, int maxReads) {
int sensors_poll_context_t::batch(int handle, int flags, int64_t period_ns, int64_t timeout) {
ALOGV("batch");
int retval = -EINVAL;
- int version = this->get_device_version_by_handle(handle);
int local_handle = get_local_handle(handle);
sensors_poll_device_1_t* v1 = this->get_v1_device_by_handle(handle);
- if (version >= SENSORS_DEVICE_API_VERSION_1_0 && local_handle >= 0 && v1) {
+ if (halIsCompliant(this, handle) && local_handle >= 0 && v1) {
retval = v1->batch(v1, local_handle, flags, period_ns, timeout);
+ } else {
+ ALOGE("IGNORING batch() call to non-API-compliant sensor handle=%d !", handle);
}
ALOGV("retval %d", retval);
return retval;
@@ -354,11 +386,12 @@ int sensors_poll_context_t::batch(int handle, int flags, int64_t period_ns, int6
int sensors_poll_context_t::flush(int handle) {
ALOGV("flush");
int retval = -EINVAL;
- int version = this->get_device_version_by_handle(handle);
int local_handle = get_local_handle(handle);
sensors_poll_device_1_t* v1 = this->get_v1_device_by_handle(handle);
- if (version >= SENSORS_DEVICE_API_VERSION_1_0 && local_handle >= 0 && v1) {
+ if (halIsCompliant(this, handle) && local_handle >= 0 && v1) {
retval = v1->flush(v1, local_handle);
+ } else {
+ ALOGE("IGNORING flush() call to non-API-compliant sensor handle=%d !", handle);
}
ALOGV("retval %d", retval);
return retval;
@@ -577,7 +610,7 @@ static void lazy_init_sensors_list() {
ALOGV("end lazy_init_sensors_list");
}
-static int module__get_sensors_list(struct sensors_module_t* module,
+static int module__get_sensors_list(__unused struct sensors_module_t* module,
struct sensor_t const** list) {
ALOGV("module__get_sensors_list start");
lazy_init_sensors_list();
@@ -618,7 +651,7 @@ static int open_sensors(const struct hw_module_t* hw_module, const char* name,
sensors_poll_context_t *dev = new sensors_poll_context_t();
memset(dev, 0, sizeof(sensors_poll_device_1_t));
dev->proxy_device.common.tag = HARDWARE_DEVICE_TAG;
- dev->proxy_device.common.version = SENSORS_DEVICE_API_VERSION_1_1;
+ dev->proxy_device.common.version = SENSORS_DEVICE_API_VERSION_1_3;
dev->proxy_device.common.module = const_cast<hw_module_t*>(hw_module);
dev->proxy_device.common.close = device__close;
dev->proxy_device.activate = device__activate;
@@ -635,8 +668,15 @@ static int open_sensors(const struct hw_module_t* hw_module, const char* name,
sensors_module_t *sensors_module = (sensors_module_t*) *it;
struct hw_device_t* sub_hw_device;
int sub_open_result = sensors_module->common.methods->open(*it, name, &sub_hw_device);
- if (!sub_open_result)
+ if (!sub_open_result) {
+ if (!HAL_VERSION_IS_COMPLIANT(sub_hw_device->version)) {
+ ALOGE("SENSORS_DEVICE_API_VERSION_1_3 is required for all sensor HALs");
+ ALOGE("This HAL reports non-compliant API level : %s",
+ apiNumToStr(sub_hw_device->version));
+ ALOGE("Sensors belonging to this HAL will get ignored !");
+ }
dev->addSubHwDevice(sub_hw_device);
+ }
}
// Prepare the output param and return
diff --git a/modules/usbaudio/alsa_device_profile.c b/modules/usbaudio/alsa_device_profile.c
index c7df00c..8e84471 100644
--- a/modules/usbaudio/alsa_device_profile.c
+++ b/modules/usbaudio/alsa_device_profile.c
@@ -58,10 +58,9 @@ extern int8_t const pcm_format_value_map[50];
static const unsigned std_sample_rates[] =
{48000, 44100, 32000, 24000, 22050, 16000, 12000, 11025, 8000};
-void profile_init(alsa_device_profile* profile, int direction)
+static void profile_reset(alsa_device_profile* profile)
{
profile->card = profile->device = -1;
- profile->direction = direction;
/* Fill the attribute arrays with invalid values */
size_t index;
@@ -83,6 +82,12 @@ void profile_init(alsa_device_profile* profile, int direction)
profile->is_valid = false;
}
+void profile_init(alsa_device_profile* profile, int direction)
+{
+ profile->direction = direction;
+ profile_reset(profile);
+}
+
bool profile_is_initialized(alsa_device_profile* profile)
{
return profile->card >= 0 && profile->device >= 0;
@@ -97,7 +102,7 @@ bool profile_is_cached_for(alsa_device_profile* profile, int card, int device) {
}
void profile_decache(alsa_device_profile* profile) {
- profile->card = profile->device = -1;
+ profile_reset(profile);
}
/*
@@ -275,28 +280,19 @@ static unsigned profile_enum_sample_formats(alsa_device_profile* profile, struct
static unsigned profile_enum_channel_counts(alsa_device_profile* profile, unsigned min, unsigned max)
{
- // TODO: Don't return MONO even if the device supports it. This causes problems
- // in AudioPolicyManager. Revisit.
- static const unsigned std_out_channel_counts[] = {8, 4, 2/*, 1*/};
- static const unsigned std_in_channel_counts[] = {8, 4, 2, 1};
-
- unsigned * channel_counts =
- profile->direction == PCM_OUT ? std_out_channel_counts : std_in_channel_counts;
- unsigned num_channel_counts =
- profile->direction == PCM_OUT
- ? ARRAY_SIZE(std_out_channel_counts) : ARRAY_SIZE(std_in_channel_counts);
+ static const unsigned std_channel_counts[] = {8, 4, 2, 1};
unsigned num_counts = 0;
unsigned index;
/* TODO write a profile_test_channel_count() */
/* Ensure there is at least one invalid channel count to terminate the channel counts array */
- for (index = 0; index < num_channel_counts &&
+ for (index = 0; index < ARRAY_SIZE(std_channel_counts) &&
num_counts < ARRAY_SIZE(profile->channel_counts) - 1;
index++) {
/* TODO Do we want a channel counts test? */
- if (channel_counts[index] >= min && channel_counts[index] <= max /* &&
+ if (std_channel_counts[index] >= min && std_channel_counts[index] <= max /* &&
profile_test_channel_count(profile, channel_counts[index])*/) {
- profile->channel_counts[num_counts++] = channel_counts[index];
+ profile->channel_counts[num_counts++] = std_channel_counts[index];
}
}
@@ -459,6 +455,7 @@ char * profile_get_channel_count_strs(alsa_device_profile* profile)
};
const bool isOutProfile = profile->direction == PCM_OUT;
+
const char * const * const names_array = isOutProfile ? out_chans_strs : in_chans_strs;
const size_t names_size = isOutProfile ? ARRAY_SIZE(out_chans_strs)
: ARRAY_SIZE(in_chans_strs);
@@ -467,12 +464,17 @@ char * profile_get_channel_count_strs(alsa_device_profile* profile)
buffer[0] = '\0';
const int buffer_size = ARRAY_SIZE(buffer);
int num_entries = 0;
- bool stereo_allowed = false;
+ /* We currently support MONO and STEREO, and always report STEREO but some (many)
+ * USB Audio Devices may only announce support for MONO (a headset mic for example), or
+ * The total number of output channels. SO, if the device itself doesn't explicitly
+ * support STEREO, append to the channel config strings we are generating.
+ */
+ bool stereo_present = false;
unsigned index;
unsigned channel_count;
for (index = 0; (channel_count = profile->channel_counts[index]) != 0; index++) {
- stereo_allowed = stereo_allowed || channel_count == 2;
+ stereo_present = stereo_present || channel_count == 2;
if (channel_count < names_size && names_array[channel_count] != NULL) {
if (num_entries++ != 0) {
strncat(buffer, "|", buffer_size);
@@ -480,14 +482,16 @@ char * profile_get_channel_count_strs(alsa_device_profile* profile)
strncat(buffer, names_array[channel_count], buffer_size);
}
}
+
/* emulated modes:
* always expose stereo as we can emulate it for PCM_OUT
*/
- if (!stereo_allowed && isOutProfile) {
+ if (!stereo_present) {
if (num_entries++ != 0) {
strncat(buffer, "|", buffer_size);
}
strncat(buffer, names_array[2], buffer_size); /* stereo */
}
+
return strdup(buffer);
}
diff --git a/modules/usbaudio/audio_hw.c b/modules/usbaudio/audio_hw.c
index 664a753..0346408 100644
--- a/modules/usbaudio/audio_hw.c
+++ b/modules/usbaudio/audio_hw.c
@@ -104,9 +104,11 @@ struct stream_in {
alsa_device_profile * profile;
alsa_device_proxy proxy; /* state of the stream */
- // not used?
- // struct audio_config hal_pcm_config;
-
+ unsigned hal_channel_count; /* channel count exposed to AudioFlinger.
+ * This may differ from the device channel count when
+ * the device is not compatible with AudioFlinger
+ * capabilities, e.g. exposes too many channels or
+ * too few channels. */
/* We may need to read more data from the device in order to data reduce to 16bit, 4chan */
void * conversion_buffer; /* any conversions are put into here
* they could come from here too if
@@ -623,25 +625,13 @@ static int in_set_sample_rate(struct audio_stream *stream, uint32_t rate)
static size_t in_get_buffer_size(const struct audio_stream *stream)
{
const struct stream_in * in = ((const struct stream_in*)stream);
- size_t buffer_size =
- proxy_get_period_size(&in->proxy) * audio_stream_in_frame_size(&(in->stream));
- ALOGV("in_get_buffer_size() = %zd", buffer_size);
-
- return buffer_size;
+ return proxy_get_period_size(&in->proxy) * audio_stream_in_frame_size(&(in->stream));
}
static uint32_t in_get_channels(const struct audio_stream *stream)
{
- /* TODO Here is the code we need when we support arbitrary channel counts
- * alsa_device_proxy * proxy = ((struct stream_in*)stream)->proxy;
- * unsigned channel_count = proxy_get_channel_count(proxy);
- * uint32_t channel_mask = audio_channel_in_mask_from_count(channel_count);
- * ALOGV("in_get_channels() = 0x%X count:%d", channel_mask, channel_count);
- * return channel_mask;
- */
- /* TODO When AudioPolicyManager & AudioFlinger supports arbitrary channels
- rewrite this to return the ACTUAL channel format */
- return AUDIO_CHANNEL_IN_STEREO;
+ const struct stream_in *in = (const struct stream_in*)stream;
+ return audio_channel_in_mask_from_count(in->hal_channel_count);
}
static audio_format_t in_get_format(const struct audio_stream *stream)
@@ -808,7 +798,7 @@ static ssize_t in_read(struct audio_stream_in *stream, void* buffer, size_t byte
*/
num_read_buff_bytes = bytes;
int num_device_channels = proxy_get_channel_count(&in->proxy);
- int num_req_channels = 2; /* always, for now */
+ int num_req_channels = in->hal_channel_count;
if (num_device_channels != num_req_channels) {
num_read_buff_bytes = (num_device_channels * num_read_buff_bytes) / num_req_channels;
@@ -960,19 +950,18 @@ static int adev_open_input_stream(struct audio_hw_device *dev,
ret = -EINVAL;
}
- if (config->channel_mask == AUDIO_CHANNEL_NONE) {
- /* just return AUDIO_CHANNEL_IN_STEREO until the framework supports other input
- * formats */
- config->channel_mask = AUDIO_CHANNEL_IN_STEREO;
-
- } else if (config->channel_mask != AUDIO_CHANNEL_IN_STEREO) {
- /* allow only stereo capture for now */
- config->channel_mask = AUDIO_CHANNEL_IN_STEREO;
- ret = -EINVAL;
+ /* Channels */
+ unsigned proposed_channel_count = profile_get_default_channel_count(in->profile);
+ if (k_force_channels) {
+ proposed_channel_count = k_force_channels;
+ } else if (config->channel_mask != AUDIO_CHANNEL_NONE) {
+ proposed_channel_count = audio_channel_count_from_in_mask(config->channel_mask);
}
- // proxy_config.channels = 0; /* don't change */
- proxy_config.channels = profile_get_default_channel_count(in->profile);
+ /* we can expose any channel count mask, and emulate internally. */
+ config->channel_mask = audio_channel_in_mask_from_count(proposed_channel_count);
+ in->hal_channel_count = proposed_channel_count;
+ proxy_config.channels = profile_get_default_channel_count(in->profile);
proxy_prepare(&in->proxy, in->profile, &proxy_config);
in->standby = true;
diff --git a/tests/camera2/CameraBurstTests.cpp b/tests/camera2/CameraBurstTests.cpp
index 560fca5..198c0c1 100644
--- a/tests/camera2/CameraBurstTests.cpp
+++ b/tests/camera2/CameraBurstTests.cpp
@@ -311,8 +311,7 @@ TEST_F(CameraBurstTest, ManualExposureControl) {
* $ setenv CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES 1
* $ /data/nativetest/camera2_test/camera2_test --gtest_filter="*VariableBurst"
*/
-// Disable this test for now, as we need cleanup the usage of the deprecated tag quite a bit.
-TEST_F(CameraBurstTest, DISABLED_VariableBurst) {
+TEST_F(CameraBurstTest, VariableBurst) {
TEST_EXTENSION_FORKING_INIT;
@@ -413,34 +412,38 @@ TEST_F(CameraBurstTest, DISABLED_VariableBurst) {
dout << std::endl;
{
- camera_metadata_ro_entry availableProcessedSizes =
- GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
-
- camera_metadata_ro_entry availableProcessedMinFrameDurations =
- GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
-
- EXPECT_EQ(availableProcessedSizes.count,
- availableProcessedMinFrameDurations.count * 2) <<
- "The number of minimum frame durations doesn't match the number of "
- "available sizes. Using fallback values";
-
- if (availableProcessedSizes.count ==
- availableProcessedMinFrameDurations.count * 2) {
- bool gotSize = false;
- for (size_t i = 0; i < availableProcessedSizes.count; i += 2) {
- if (availableProcessedSizes.data.i32[i] == mWidth &&
- availableProcessedSizes.data.i32[i+1] == mHeight) {
- gotSize = true;
- minDuration = availableProcessedMinFrameDurations.data.i64[i/2];
+ if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_ro_entry availableProcessedSizes =
+ GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
+
+ camera_metadata_ro_entry availableProcessedMinFrameDurations =
+ GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
+
+ EXPECT_EQ(availableProcessedSizes.count,
+ availableProcessedMinFrameDurations.count * 2) <<
+ "The number of minimum frame durations doesn't match the number of "
+ "available sizes. Using fallback values";
+
+ if (availableProcessedSizes.count ==
+ availableProcessedMinFrameDurations.count * 2) {
+ bool gotSize = false;
+ for (size_t i = 0; i < availableProcessedSizes.count; i += 2) {
+ if (availableProcessedSizes.data.i32[i] == mWidth &&
+ availableProcessedSizes.data.i32[i+1] == mHeight) {
+ gotSize = true;
+ minDuration = availableProcessedMinFrameDurations.data.i64[i/2];
+ }
}
+ EXPECT_TRUE(gotSize) << "Can't find stream size in list of "
+ "available sizes: " << mWidth << ", " << mHeight;
}
- EXPECT_TRUE(gotSize) << "Can't find stream size in list of "
- "available sizes: " << mWidth << ", " << mHeight;
- }
- if (minDuration == 0) {
- minDuration = 1 * SEC / 30; // Fall back to 30 fps as minimum duration
+ if (minDuration == 0) {
+ minDuration = 1 * SEC / 30; // Fall back to 30 fps as minimum duration
+ }
+ } else {
+ minDuration = getMinFrameDurationFor(
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, mWidth, mHeight);
}
-
ASSERT_LT(0, minDuration);
camera_metadata_ro_entry maxFrameDuration =
diff --git a/tests/camera2/CameraMetadataTests.cpp b/tests/camera2/CameraMetadataTests.cpp
index eddc593..94fa911 100644
--- a/tests/camera2/CameraMetadataTests.cpp
+++ b/tests/camera2/CameraMetadataTests.cpp
@@ -162,26 +162,33 @@ TEST_F(CameraMetadataTest, RequiredFormats) {
TEST_F(CameraMetadataTest, SaneResolutions) {
TEST_EXTENSION_FORKING_INIT;
- // Iff there are listed raw resolutions, the format should be available
- int rawResolutionsCount =
- GetEntryCountFromStaticTag(ANDROID_SCALER_AVAILABLE_RAW_SIZES);
- if (rawResolutionsCount > 0) {
- EXPECT_TRUE(
- HasElementInArrayFromStaticTag(ANDROID_SCALER_AVAILABLE_FORMATS,
- HAL_PIXEL_FORMAT_RAW_SENSOR));
- }
+ if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
+ // Iff there are listed raw resolutions, the format should be available
+ int rawResolutionsCount =
+ GetEntryCountFromStaticTag(ANDROID_SCALER_AVAILABLE_RAW_SIZES);
+ if (rawResolutionsCount > 0) {
+ EXPECT_TRUE(
+ HasElementInArrayFromStaticTag(ANDROID_SCALER_AVAILABLE_FORMATS,
+ HAL_PIXEL_FORMAT_RAW_SENSOR));
+ }
- // Required processed sizes.
- int processedSizeCount =
- GetEntryCountFromStaticTag(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
- EXPECT_NE(0, processedSizeCount);
- EXPECT_EQ(0, processedSizeCount % 2); // multiple of 2 (w,h)
-
- // Required JPEG sizes
- int jpegSizeCount =
- GetEntryCountFromStaticTag(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
- EXPECT_NE(0, jpegSizeCount);
- EXPECT_EQ(0, jpegSizeCount % 2); // multiple of 2 (w,h)
+ // Required processed sizes.
+ int processedSizeCount =
+ GetEntryCountFromStaticTag(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
+ EXPECT_NE(0, processedSizeCount);
+ EXPECT_EQ(0, processedSizeCount % 2); // multiple of 2 (w,h)
+
+ // Required JPEG sizes
+ int jpegSizeCount =
+ GetEntryCountFromStaticTag(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
+ EXPECT_NE(0, jpegSizeCount);
+ EXPECT_EQ(0, jpegSizeCount % 2); // multiple of 2 (w,h)
+ } else {
+ int strmConfigCount =
+ GetEntryCountFromStaticTag(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ EXPECT_NE(0, strmConfigCount);
+ EXPECT_EQ(0, strmConfigCount % 4); // multiple of 4 (format,w,h,output?)
+ }
}
diff --git a/tests/camera2/CameraMultiStreamTests.cpp b/tests/camera2/CameraMultiStreamTests.cpp
index 89af91d..df8e623 100644
--- a/tests/camera2/CameraMultiStreamTests.cpp
+++ b/tests/camera2/CameraMultiStreamTests.cpp
@@ -472,19 +472,42 @@ TEST_F(CameraMultiStreamTest, DISABLED_MultiBurst) {
TEST_EXTENSION_FORKING_INIT;
- camera_metadata_ro_entry availableProcessedSizes =
- GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
- ASSERT_EQ(0u, availableProcessedSizes.count % 2);
- ASSERT_GE(availableProcessedSizes.count, 2u);
- camera_metadata_ro_entry availableProcessedMinFrameDurations =
- GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
- EXPECT_EQ(availableProcessedSizes.count,
- availableProcessedMinFrameDurations.count * 2);
-
- camera_metadata_ro_entry availableJpegSizes =
- GetStaticEntry(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
- ASSERT_EQ(0u, availableJpegSizes.count % 2);
- ASSERT_GE(availableJpegSizes.count, 2u);
+ const int32_t* implDefData;
+ size_t implDefCount;
+ const int32_t* jpegData;
+ size_t jpegCount;
+ if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_ro_entry availableProcessedSizes =
+ GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
+ ASSERT_EQ(0u, availableProcessedSizes.count % 2);
+ ASSERT_GE(availableProcessedSizes.count, 2u);
+ camera_metadata_ro_entry availableProcessedMinFrameDurations =
+ GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
+ EXPECT_EQ(availableProcessedSizes.count,
+ availableProcessedMinFrameDurations.count * 2);
+
+ camera_metadata_ro_entry availableJpegSizes =
+ GetStaticEntry(ANDROID_SCALER_AVAILABLE_JPEG_SIZES);
+ ASSERT_EQ(0u, availableJpegSizes.count % 2);
+ ASSERT_GE(availableJpegSizes.count, 2u);
+ implDefData = availableProcessedSizes.data.i32;
+ implDefCount = availableProcessedSizes.count;
+ jpegData = availableJpegSizes.data.i32;
+ jpegCount = availableJpegSizes.count;
+ } else {
+ const int32_t *implDefResolutions;
+ size_t implDefResolutionsCount;
+
+ getResolutionList(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, &implDefData, &implDefCount);
+ ASSERT_NE(0u, implDefCount)
+ << "Missing implementation defined sizes";
+ ASSERT_EQ(0u, implDefCount % 2);
+ ASSERT_GE(implDefCount, 2u);
+
+ getResolutionList(HAL_PIXEL_FORMAT_BLOB, &jpegData, &jpegCount);
+ ASSERT_EQ(0u, jpegCount % 2);
+ ASSERT_GE(jpegCount, 2u);
+ }
camera_metadata_ro_entry hardwareLevel =
GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
@@ -504,23 +527,25 @@ TEST_F(CameraMultiStreamTest, DISABLED_MultiBurst) {
}
// Find the right sizes for preview, metering, and capture streams
- // assumes at least 2 entries in availableProcessedSizes.
int64_t minFrameDuration = DEFAULT_FRAME_DURATION;
Size processedMinSize, processedMaxSize, jpegMaxSize;
- const int32_t* data = availableProcessedSizes.data.i32;
- size_t count = availableProcessedSizes.count;
int32_t minIdx, maxIdx;
- GetMinSize(data, count, &processedMinSize, &minIdx);
- GetMaxSize(data, count, &processedMaxSize, &maxIdx);
+ GetMinSize(implDefData, implDefCount, &processedMinSize, &minIdx);
+ GetMaxSize(implDefData, implDefCount, &processedMaxSize, &maxIdx);
ALOGV("Found processed max size: %dx%d, min size = %dx%d",
processedMaxSize.width, processedMaxSize.height,
processedMinSize.width, processedMinSize.height);
- if (availableProcessedSizes.count ==
- availableProcessedMinFrameDurations.count * 2) {
+ if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
+ camera_metadata_ro_entry availableProcessedMinFrameDurations =
+ GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
minFrameDuration =
availableProcessedMinFrameDurations.data.i64[maxIdx / 2];
+ } else {
+ minFrameDuration = getMinFrameDurationFor(
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ processedMaxSize.width, processedMaxSize.height);
}
EXPECT_GT(minFrameDuration, 0);
@@ -531,9 +556,7 @@ TEST_F(CameraMultiStreamTest, DISABLED_MultiBurst) {
ALOGV("targeted minimal frame duration is: %" PRId64 "ns", minFrameDuration);
- data = &(availableJpegSizes.data.i32[0]);
- count = availableJpegSizes.count;
- GetMaxSize(data, count, &jpegMaxSize, &maxIdx);
+ GetMaxSize(jpegData, jpegCount, &jpegMaxSize, &maxIdx);
ALOGV("Found Jpeg size max idx = %d", maxIdx);
// Max Jpeg size should be available in processed sizes. Use it for
@@ -627,7 +650,7 @@ TEST_F(CameraMultiStreamTest, DISABLED_MultiBurst) {
// purely by analog gain if possible.
Vector<int32_t> sensitivities;
Vector<int64_t> exposures;
- count = (maxAnalogSensitivity - minSensitivity + 99) / 100;
+ size_t count = (maxAnalogSensitivity - minSensitivity + 99) / 100;
sensitivities.push_back(minSensitivity);
for (size_t i = 1; i < count; i++) {
sensitivities.push_back(minSensitivity + i * 100);
diff --git a/tests/camera2/CameraStreamFixture.h b/tests/camera2/CameraStreamFixture.h
index f56daf0..cc13169 100644
--- a/tests/camera2/CameraStreamFixture.h
+++ b/tests/camera2/CameraStreamFixture.h
@@ -96,7 +96,7 @@ private:
sp<CameraDeviceBase> device = mDevice;
/* use an arbitrary w,h */
- {
+ if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
const int tag = ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES;
const CameraMetadata& staticInfo = device->info();
@@ -106,9 +106,22 @@ private:
ASSERT_LE(2u, entry.count);
/* this seems like it would always be the smallest w,h
- but we actually make no contract that it's sorted asc */;
+ but we actually make no contract that it's sorted asc */
mWidth = entry.data.i32[0];
mHeight = entry.data.i32[1];
+ } else {
+ buildOutputResolutions();
+ const int32_t *implDefResolutions;
+ size_t implDefResolutionsCount;
+
+ int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+
+ getResolutionList(format,
+ &implDefResolutions, &implDefResolutionsCount);
+ ASSERT_NE(0u, implDefResolutionsCount)
+ << "Missing implementation defined sizes";
+ mWidth = implDefResolutions[0];
+ mHeight = implDefResolutions[1];
}
}
void TearDown() {
@@ -117,12 +130,82 @@ private:
// important: shut down HAL before releasing streams
CameraModuleFixture::TearDown();
+ deleteOutputResolutions();
mNativeWindow.clear();
mCpuConsumer.clear();
mFrameListener.clear();
}
protected:
+
+ int64_t getMinFrameDurationFor(int32_t format, int32_t width, int32_t height) {
+ int64_t minFrameDuration = -1L;
+ const int tag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+ sp<CameraDeviceBase> device = mDevice;
+ const CameraMetadata& staticInfo = device->info();
+ camera_metadata_ro_entry_t availableMinDurations = staticInfo.find(tag);
+ for (uint32_t i = 0; i < availableMinDurations.count; i += 4) {
+ if (format == availableMinDurations.data.i64[i] &&
+ width == availableMinDurations.data.i64[i + 1] &&
+ height == availableMinDurations.data.i64[i + 2]) {
+ minFrameDuration = availableMinDurations.data.i64[i + 3];
+ break;
+ }
+ }
+ return minFrameDuration;
+ }
+
+ void buildOutputResolutions() {
+ if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
+ return;
+ }
+ if (mOutputResolutions.isEmpty()) {
+ const int tag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+ const CameraMetadata& staticInfo = mDevice->info();
+ camera_metadata_ro_entry_t availableStrmConfigs = staticInfo.find(tag);
+ ASSERT_EQ(0u, availableStrmConfigs.count % 4);
+ for (uint32_t i = 0; i < availableStrmConfigs.count; i += 4) {
+ int32_t format = availableStrmConfigs.data.i32[i];
+ int32_t width = availableStrmConfigs.data.i32[i + 1];
+ int32_t height = availableStrmConfigs.data.i32[i + 2];
+ int32_t inOrOut = availableStrmConfigs.data.i32[i + 3];
+ if (inOrOut == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
+ int index = mOutputResolutions.indexOfKey(format);
+ if (index < 0) {
+ index = mOutputResolutions.add(format, new Vector<int32_t>());
+ ASSERT_TRUE(index >= 0);
+ }
+ Vector<int32_t> *resolutions = mOutputResolutions.editValueAt(index);
+ resolutions->add(width);
+ resolutions->add(height);
+ }
+ }
+ }
+ }
+
+ void getResolutionList(int32_t format,
+ const int32_t **list,
+ size_t *count) {
+ status_t res;
+ ALOGV("Getting resolutions for format %x", format);
+ if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
+ return;
+ }
+ int index = mOutputResolutions.indexOfKey(format);
+ ASSERT_TRUE(index >= 0);
+ Vector<int32_t>* resolutions = mOutputResolutions.valueAt(index);
+ *list = resolutions->array();
+ *count = resolutions->size();
+ }
+
+ void deleteOutputResolutions() {
+ for (uint32_t i = 0; i < mOutputResolutions.size(); i++) {
+ Vector<int32_t>* resolutions = mOutputResolutions.editValueAt(i);
+ delete resolutions;
+ }
+ mOutputResolutions.clear();
+ }
+
struct FrameListener : public ConsumerBase::FrameAvailableListener {
FrameListener() {
@@ -130,7 +213,7 @@ protected:
}
// CpuConsumer::FrameAvailableListener implementation
- virtual void onFrameAvailable() {
+ virtual void onFrameAvailable(const BufferItem& /* item */) {
ALOGV("Frame now available (start)");
Mutex::Autolock lock(mMutex);
@@ -280,7 +363,7 @@ protected:
android::sp<FrameListener> mFrameListener;
android::sp<CpuConsumer> mCpuConsumer;
android::sp<ANativeWindow> mNativeWindow;
-
+ KeyedVector<int32_t, Vector<int32_t>* > mOutputResolutions;
private:
CameraStreamParams mParam;
diff --git a/tests/camera2/camera2.cpp b/tests/camera2/camera2.cpp
index e3e7d9a..72b7b61 100644
--- a/tests/camera2/camera2.cpp
+++ b/tests/camera2/camera2.cpp
@@ -23,6 +23,7 @@
#include <fstream>
#include <utils/Vector.h>
+#include <utils/KeyedVector.h>
#include <gui/CpuConsumer.h>
#include <ui/PixelFormat.h>
#include <system/camera_metadata.h>
@@ -202,7 +203,9 @@ class Camera2Test: public testing::Test {
res = sCameraModule->get_camera_info(id, &info);
ASSERT_EQ(OK, res);
+ mDeviceVersion = info.device_version;
mStaticInfo = info.static_camera_characteristics;
+ buildOutputResolutions();
res = configureCameraDevice(mDevice,
mRequests,
@@ -241,44 +244,91 @@ class Camera2Test: public testing::Test {
ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id;
}
+ void buildOutputResolutions() {
+ status_t res;
+ if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
+ return;
+ }
+ if (mOutputResolutions.isEmpty()) {
+ camera_metadata_ro_entry_t availableStrmConfigs;
+ res = find_camera_metadata_ro_entry(mStaticInfo,
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ &availableStrmConfigs);
+ ASSERT_EQ(OK, res);
+ ASSERT_EQ(0u, availableStrmConfigs.count % 4);
+ for (uint32_t i = 0; i < availableStrmConfigs.count; i += 4) {
+ int32_t format = availableStrmConfigs.data.i32[i];
+ int32_t width = availableStrmConfigs.data.i32[i + 1];
+ int32_t height = availableStrmConfigs.data.i32[i + 2];
+ int32_t inOrOut = availableStrmConfigs.data.i32[i + 3];
+ if (inOrOut == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
+ int index = mOutputResolutions.indexOfKey(format);
+ if (index < 0) {
+ index = mOutputResolutions.add(format, new Vector<int32_t>());
+ ASSERT_TRUE(index >= 0);
+ }
+ Vector<int32_t> *resolutions = mOutputResolutions.editValueAt(index);
+ resolutions->add(width);
+ resolutions->add(height);
+ }
+ }
+ }
+ }
+
+ void deleteOutputResolutions() {
+ for (uint32_t i = 0; i < mOutputResolutions.size(); i++) {
+ Vector<int32_t>* resolutions = mOutputResolutions.editValueAt(i);
+ delete resolutions;
+ }
+ mOutputResolutions.clear();
+ }
+
void getResolutionList(int32_t format,
const int32_t **list,
size_t *count) {
- ALOGV("Getting resolutions for format %x", format);
status_t res;
- if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
- camera_metadata_ro_entry_t availableFormats;
- res = find_camera_metadata_ro_entry(mStaticInfo,
- ANDROID_SCALER_AVAILABLE_FORMATS,
- &availableFormats);
- ASSERT_EQ(OK, res);
+ ALOGV("Getting resolutions for format %x", format);
+ if (mDeviceVersion < CAMERA_DEVICE_API_VERSION_3_2) {
+ if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ camera_metadata_ro_entry_t availableFormats;
+ res = find_camera_metadata_ro_entry(mStaticInfo,
+ ANDROID_SCALER_AVAILABLE_FORMATS,
+ &availableFormats);
+ ASSERT_EQ(OK, res);
+
+ uint32_t formatIdx;
+ for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) {
+ if (availableFormats.data.i32[formatIdx] == format) break;
+ }
+ ASSERT_NE(availableFormats.count, formatIdx)
+ << "No support found for format 0x" << std::hex << format;
+ }
- uint32_t formatIdx;
- for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) {
- if (availableFormats.data.i32[formatIdx] == format) break;
+ camera_metadata_ro_entry_t availableSizes;
+ if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) {
+ res = find_camera_metadata_ro_entry(mStaticInfo,
+ ANDROID_SCALER_AVAILABLE_RAW_SIZES,
+ &availableSizes);
+ } else if (format == HAL_PIXEL_FORMAT_BLOB) {
+ res = find_camera_metadata_ro_entry(mStaticInfo,
+ ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
+ &availableSizes);
+ } else {
+ res = find_camera_metadata_ro_entry(mStaticInfo,
+ ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
+ &availableSizes);
}
- ASSERT_NE(availableFormats.count, formatIdx)
- << "No support found for format 0x" << std::hex << format;
- }
+ ASSERT_EQ(OK, res);
- camera_metadata_ro_entry_t availableSizes;
- if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) {
- res = find_camera_metadata_ro_entry(mStaticInfo,
- ANDROID_SCALER_AVAILABLE_RAW_SIZES,
- &availableSizes);
- } else if (format == HAL_PIXEL_FORMAT_BLOB) {
- res = find_camera_metadata_ro_entry(mStaticInfo,
- ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
- &availableSizes);
+ *list = availableSizes.data.i32;
+ *count = availableSizes.count;
} else {
- res = find_camera_metadata_ro_entry(mStaticInfo,
- ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
- &availableSizes);
+ int index = mOutputResolutions.indexOfKey(format);
+ ASSERT_TRUE(index >= 0);
+ Vector<int32_t>* resolutions = mOutputResolutions.valueAt(index);
+ *list = resolutions->array();
+ *count = resolutions->size();
}
- ASSERT_EQ(OK, res);
-
- *list = availableSizes.data.i32;
- *count = availableSizes.count;
}
status_t waitUntilDrained() {
@@ -325,11 +375,13 @@ class Camera2Test: public testing::Test {
closeCameraDevice(&mDevice);
}
+ deleteOutputResolutions();
TearDownModule();
}
int mId;
camera2_device *mDevice;
+ uint32_t mDeviceVersion;
const camera_metadata_t *mStaticInfo;
MetadataQueue mRequests;
@@ -337,11 +389,13 @@ class Camera2Test: public testing::Test {
NotifierListener mNotifications;
Vector<StreamAdapter*> mStreams;
+ KeyedVector<int32_t, Vector<int32_t>* > mOutputResolutions;
private:
static camera_module_t *sCameraModule;
static int sNumCameras;
static bool *sCameraSupportsHal2;
+
};
camera_module_t *Camera2Test::sCameraModule = NULL;
diff --git a/tests/camera2/camera2_utils.cpp b/tests/camera2/camera2_utils.cpp
index 3c0767a..9cc6c90 100644
--- a/tests/camera2/camera2_utils.cpp
+++ b/tests/camera2/camera2_utils.cpp
@@ -574,7 +574,7 @@ status_t FrameWaiter::waitForFrame(nsecs_t timeout) {
return OK;
}
-void FrameWaiter::onFrameAvailable() {
+void FrameWaiter::onFrameAvailable(const BufferItem& /* item */) {
Mutex::Autolock lock(mMutex);
mPendingFrames++;
mCondition.signal();
diff --git a/tests/camera2/camera2_utils.h b/tests/camera2/camera2_utils.h
index 0cdf4a3..c1d1e72 100644
--- a/tests/camera2/camera2_utils.h
+++ b/tests/camera2/camera2_utils.h
@@ -231,7 +231,7 @@ class FrameWaiter : public CpuConsumer::FrameAvailableListener {
*/
status_t waitForFrame(nsecs_t timeout);
- virtual void onFrameAvailable();
+ virtual void onFrameAvailable(const BufferItem& item);
int mPendingFrames;
Mutex mMutex;
diff --git a/tests/camera3/camera3test_fixtures.h b/tests/camera3/camera3test_fixtures.h
index 81d1997..17e3d45 100644
--- a/tests/camera3/camera3test_fixtures.h
+++ b/tests/camera3/camera3test_fixtures.h
@@ -68,7 +68,7 @@ class Camera3Device : public Camera3Module {
<< "Can't open camera device";
ASSERT_TRUE(NULL != device)
<< "Camera open() returned a NULL device";
- ASSERT_EQ(kVersion3_0, device->version)
+ ASSERT_LE(kVersion3_0, device->version)
<< "The device does not support HAL3";
cam_device_ = reinterpret_cast<camera3_device_t*>(device);
}