diff options
448 files changed, 29355 insertions, 14074 deletions
diff --git a/camera/Android.mk b/camera/Android.mk index da5ac59..4c4700b 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -30,12 +30,10 @@ LOCAL_SRC_FILES:= \ ICameraServiceListener.cpp \ ICameraRecordingProxy.cpp \ ICameraRecordingProxyListener.cpp \ - IProCameraUser.cpp \ - IProCameraCallbacks.cpp \ camera2/ICameraDeviceUser.cpp \ camera2/ICameraDeviceCallbacks.cpp \ camera2/CaptureRequest.cpp \ - ProCamera.cpp \ + camera2/OutputConfiguration.cpp \ CameraBase.cpp \ CameraUtils.cpp \ VendorTagDescriptor.cpp diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp index 65a1a47..5d50aa8 100644 --- a/camera/CameraBase.cpp +++ b/camera/CameraBase.cpp @@ -29,7 +29,6 @@ #include <camera/ICameraService.h> // needed to instantiate -#include <camera/ProCamera.h> #include <camera/Camera.h> #include <system/camera_metadata.h> @@ -217,7 +216,6 @@ status_t CameraBase<TCam, TCamTraits>::removeServiceListener( return cs->removeListener(listener); } -template class CameraBase<ProCamera>; template class CameraBase<Camera>; } // namespace android diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp index 043437f..e216d26 100644 --- a/camera/CameraMetadata.cpp +++ b/camera/CameraMetadata.cpp @@ -74,7 +74,7 @@ CameraMetadata::~CameraMetadata() { clear(); } -const camera_metadata_t* CameraMetadata::getAndLock() { +const camera_metadata_t* CameraMetadata::getAndLock() const { mLocked = true; return mBuffer; } diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp index e5e4e90..68969cf 100644 --- a/camera/CameraParameters.cpp +++ b/camera/CameraParameters.cpp @@ -526,8 +526,12 @@ int CameraParameters::previewFormatToEnum(const char* format) { !strcmp(format, PIXEL_FORMAT_RGBA8888) ? HAL_PIXEL_FORMAT_RGBA_8888 : // RGB8888 !strcmp(format, PIXEL_FORMAT_BAYER_RGGB) ? - HAL_PIXEL_FORMAT_RAW_SENSOR : // Raw sensor data + HAL_PIXEL_FORMAT_RAW16 : // Raw sensor data -1; } +bool CameraParameters::isEmpty() const { + return mMap.isEmpty(); +} + }; // namespace android diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp index fc3e437..51a775b 100644 --- a/camera/ICameraService.cpp +++ b/camera/ICameraService.cpp @@ -2,16 +2,16 @@ ** ** Copyright 2008, The Android Open Source Project ** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at ** -** http://www.apache.org/licenses/LICENSE-2.0 +** http://www.apache.org/licenses/LICENSE-2.0 ** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and ** limitations under the License. */ @@ -29,8 +29,6 @@ #include <camera/ICameraService.h> #include <camera/ICameraServiceListener.h> -#include <camera/IProCameraUser.h> -#include <camera/IProCameraCallbacks.h> #include <camera/ICamera.h> #include <camera/ICameraClient.h> #include <camera/camera2/ICameraDeviceUser.h> @@ -209,26 +207,18 @@ public: return status; } - // connect to camera service (pro client) - virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb, int cameraId, - const String16 &clientPackageName, int clientUid, - /*out*/ - sp<IProCameraUser>& device) + virtual status_t setTorchMode(const String16& cameraId, bool enabled, + const sp<IBinder>& clientBinder) { Parcel data, reply; data.writeInterfaceToken(ICameraService::getInterfaceDescriptor()); - data.writeStrongBinder(IInterface::asBinder(cameraCb)); - data.writeInt32(cameraId); - data.writeString16(clientPackageName); - data.writeInt32(clientUid); - remote()->transact(BnCameraService::CONNECT_PRO, data, &reply); + data.writeString16(cameraId); + data.writeInt32(enabled ? 1 : 0); + data.writeStrongBinder(clientBinder); + remote()->transact(BnCameraService::SET_TORCH_MODE, data, &reply); if (readExceptionCode(reply)) return -EPROTO; - status_t status = reply.readInt32(); - if (reply.readInt32() != 0) { - device = interface_cast<IProCameraUser>(reply.readStrongBinder()); - } - return status; + return reply.readInt32(); } // connect to camera service (android.hardware.camera2.CameraDevice) @@ -312,6 +302,15 @@ public: status_t res = data.readInt32(); return res; } + + virtual void notifySystemEvent(int eventId, int arg0) { + Parcel data, reply; + data.writeInt32(eventId); + data.writeInt32(arg0); + remote()->transact(BnCameraService::NOTIFY_SYSTEM_EVENT, data, &reply, + IBinder::FLAG_ONEWAY); + } + }; IMPLEMENT_META_INTERFACE(CameraService, "android.hardware.ICameraService"); @@ -390,26 +389,6 @@ status_t BnCameraService::onTransact( } return NO_ERROR; } break; - case CONNECT_PRO: { - CHECK_INTERFACE(ICameraService, data, reply); - sp<IProCameraCallbacks> cameraClient = - interface_cast<IProCameraCallbacks>(data.readStrongBinder()); - int32_t cameraId = data.readInt32(); - const String16 clientName = data.readString16(); - int32_t clientUid = data.readInt32(); - sp<IProCameraUser> camera; - status_t status = connectPro(cameraClient, cameraId, - clientName, clientUid, /*out*/camera); - reply->writeNoException(); - reply->writeInt32(status); - if (camera != NULL) { - reply->writeInt32(1); - reply->writeStrongBinder(IInterface::asBinder(camera)); - } else { - reply->writeInt32(0); - } - return NO_ERROR; - } break; case CONNECT_DEVICE: { CHECK_INTERFACE(ICameraService, data, reply); sp<ICameraDeviceCallbacks> cameraClient = @@ -490,6 +469,23 @@ status_t BnCameraService::onTransact( } return NO_ERROR; } break; + case SET_TORCH_MODE: { + CHECK_INTERFACE(ICameraService, data, reply); + String16 cameraId = data.readString16(); + bool enabled = data.readInt32() != 0 ? true : false; + const sp<IBinder> clientBinder = data.readStrongBinder(); + status_t status = setTorchMode(cameraId, enabled, clientBinder); + reply->writeNoException(); + reply->writeInt32(status); + return NO_ERROR; + } break; + case NOTIFY_SYSTEM_EVENT: { + CHECK_INTERFACE(ICameraService, data, reply); + int eventId = data.readInt32(); + int arg0 = data.readInt32(); + notifySystemEvent(eventId, arg0); + return NO_ERROR; + } break; default: return BBinder::onTransact(code, data, reply, flags); } diff --git a/camera/ICameraServiceListener.cpp b/camera/ICameraServiceListener.cpp index b2f1729..90a8bc2 100644 --- a/camera/ICameraServiceListener.cpp +++ b/camera/ICameraServiceListener.cpp @@ -29,6 +29,7 @@ namespace android { namespace { enum { STATUS_CHANGED = IBinder::FIRST_CALL_TRANSACTION, + TORCH_STATUS_CHANGED, }; }; // namespace anonymous @@ -54,8 +55,21 @@ public: data, &reply, IBinder::FLAG_ONEWAY); + } - reply.readExceptionCode(); + virtual void onTorchStatusChanged(TorchStatus status, const String16 &cameraId) + { + Parcel data, reply; + data.writeInterfaceToken( + ICameraServiceListener::getInterfaceDescriptor()); + + data.writeInt32(static_cast<int32_t>(status)); + data.writeString16(cameraId); + + remote()->transact(TORCH_STATUS_CHANGED, + data, + &reply, + IBinder::FLAG_ONEWAY); } }; @@ -75,7 +89,16 @@ status_t BnCameraServiceListener::onTransact( int32_t cameraId = data.readInt32(); onStatusChanged(status, cameraId); - reply->writeNoException(); + + return NO_ERROR; + } break; + case TORCH_STATUS_CHANGED: { + CHECK_INTERFACE(ICameraServiceListener, data, reply); + + TorchStatus status = static_cast<TorchStatus>(data.readInt32()); + String16 cameraId = data.readString16(); + + onTorchStatusChanged(status, cameraId); return NO_ERROR; } break; diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp deleted file mode 100644 index bd3d420..0000000 --- a/camera/IProCameraCallbacks.cpp +++ /dev/null @@ -1,125 +0,0 @@ -/* -** -** Copyright 2013, The Android Open Source Project -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "IProCameraCallbacks" -#include <utils/Log.h> -#include <stdint.h> -#include <sys/types.h> - -#include <binder/Parcel.h> -#include <gui/IGraphicBufferProducer.h> -#include <gui/Surface.h> -#include <utils/Mutex.h> - -#include <camera/IProCameraCallbacks.h> - -#include "camera/CameraMetadata.h" - -namespace android { - -enum { - NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION, - LOCK_STATUS_CHANGED, - RESULT_RECEIVED, -}; - -class BpProCameraCallbacks: public BpInterface<IProCameraCallbacks> -{ -public: - BpProCameraCallbacks(const sp<IBinder>& impl) - : BpInterface<IProCameraCallbacks>(impl) - { - } - - // generic callback from camera service to app - void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) - { - ALOGV("notifyCallback"); - Parcel data, reply; - data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); - data.writeInt32(msgType); - data.writeInt32(ext1); - data.writeInt32(ext2); - remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY); - } - - void onLockStatusChanged(LockStatus newLockStatus) { - ALOGV("onLockStatusChanged"); - Parcel data, reply; - data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); - data.writeInt32(newLockStatus); - remote()->transact(LOCK_STATUS_CHANGED, data, &reply, - IBinder::FLAG_ONEWAY); - } - - void onResultReceived(int32_t requestId, camera_metadata* result) { - ALOGV("onResultReceived"); - Parcel data, reply; - data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor()); - data.writeInt32(requestId); - CameraMetadata::writeToParcel(data, result); - remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY); - } -}; - -IMPLEMENT_META_INTERFACE(ProCameraCallbacks, - "android.hardware.IProCameraCallbacks"); - -// ---------------------------------------------------------------------- - -status_t BnProCameraCallbacks::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) -{ - ALOGV("onTransact - code = %d", code); - switch(code) { - case NOTIFY_CALLBACK: { - ALOGV("NOTIFY_CALLBACK"); - CHECK_INTERFACE(IProCameraCallbacks, data, reply); - int32_t msgType = data.readInt32(); - int32_t ext1 = data.readInt32(); - int32_t ext2 = data.readInt32(); - notifyCallback(msgType, ext1, ext2); - return NO_ERROR; - } break; - case LOCK_STATUS_CHANGED: { - ALOGV("LOCK_STATUS_CHANGED"); - CHECK_INTERFACE(IProCameraCallbacks, data, reply); - LockStatus newLockStatus - = static_cast<LockStatus>(data.readInt32()); - onLockStatusChanged(newLockStatus); - return NO_ERROR; - } break; - case RESULT_RECEIVED: { - ALOGV("RESULT_RECEIVED"); - CHECK_INTERFACE(IProCameraCallbacks, data, reply); - int32_t requestId = data.readInt32(); - camera_metadata_t *result = NULL; - CameraMetadata::readFromParcel(data, &result); - onResultReceived(requestId, result); - return NO_ERROR; - break; - } - default: - return BBinder::onTransact(code, data, reply, flags); - } -} - -// ---------------------------------------------------------------------------- - -}; // namespace android - diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp deleted file mode 100644 index 9bd7597..0000000 --- a/camera/IProCameraUser.cpp +++ /dev/null @@ -1,324 +0,0 @@ -/* -** -** Copyright 2013, The Android Open Source Project -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -// #define LOG_NDEBUG 0 -#define LOG_TAG "IProCameraUser" -#include <utils/Log.h> -#include <stdint.h> -#include <sys/types.h> -#include <binder/Parcel.h> -#include <camera/IProCameraUser.h> -#include <gui/IGraphicBufferProducer.h> -#include <gui/Surface.h> -#include "camera/CameraMetadata.h" - -namespace android { - -enum { - DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, - CONNECT, - EXCLUSIVE_TRY_LOCK, - EXCLUSIVE_LOCK, - EXCLUSIVE_UNLOCK, - HAS_EXCLUSIVE_LOCK, - SUBMIT_REQUEST, - CANCEL_REQUEST, - DELETE_STREAM, - CREATE_STREAM, - CREATE_DEFAULT_REQUEST, - GET_CAMERA_INFO, -}; - -class BpProCameraUser: public BpInterface<IProCameraUser> -{ -public: - BpProCameraUser(const sp<IBinder>& impl) - : BpInterface<IProCameraUser>(impl) - { - } - - // disconnect from camera service - void disconnect() - { - ALOGV("disconnect"); - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - remote()->transact(DISCONNECT, data, &reply); - reply.readExceptionCode(); - } - - virtual status_t connect(const sp<IProCameraCallbacks>& cameraClient) - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - data.writeStrongBinder(IInterface::asBinder(cameraClient)); - remote()->transact(CONNECT, data, &reply); - return reply.readInt32(); - } - - /* Shared ProCameraUser */ - - virtual status_t exclusiveTryLock() - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - remote()->transact(EXCLUSIVE_TRY_LOCK, data, &reply); - return reply.readInt32(); - } - virtual status_t exclusiveLock() - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - remote()->transact(EXCLUSIVE_LOCK, data, &reply); - return reply.readInt32(); - } - - virtual status_t exclusiveUnlock() - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - remote()->transact(EXCLUSIVE_UNLOCK, data, &reply); - return reply.readInt32(); - } - - virtual bool hasExclusiveLock() - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - remote()->transact(HAS_EXCLUSIVE_LOCK, data, &reply); - return !!reply.readInt32(); - } - - virtual int submitRequest(camera_metadata_t* metadata, bool streaming) - { - - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - - // arg0+arg1 - CameraMetadata::writeToParcel(data, metadata); - - // arg2 = streaming (bool) - data.writeInt32(streaming); - - remote()->transact(SUBMIT_REQUEST, data, &reply); - return reply.readInt32(); - } - - virtual status_t cancelRequest(int requestId) - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - data.writeInt32(requestId); - - remote()->transact(CANCEL_REQUEST, data, &reply); - return reply.readInt32(); - } - - virtual status_t deleteStream(int streamId) - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - data.writeInt32(streamId); - - remote()->transact(DELETE_STREAM, data, &reply); - return reply.readInt32(); - } - - virtual status_t createStream(int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer, - /*out*/ - int* streamId) - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - data.writeInt32(width); - data.writeInt32(height); - data.writeInt32(format); - - sp<IBinder> b(IInterface::asBinder(bufferProducer)); - data.writeStrongBinder(b); - - remote()->transact(CREATE_STREAM, data, &reply); - - int sId = reply.readInt32(); - if (streamId) { - *streamId = sId; - } - return reply.readInt32(); - } - - // Create a request object from a template. - virtual status_t createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request) - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - data.writeInt32(templateId); - remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply); - CameraMetadata::readFromParcel(reply, /*out*/request); - return reply.readInt32(); - } - - - virtual status_t getCameraInfo(int cameraId, camera_metadata** info) - { - Parcel data, reply; - data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor()); - data.writeInt32(cameraId); - remote()->transact(GET_CAMERA_INFO, data, &reply); - CameraMetadata::readFromParcel(reply, /*out*/info); - return reply.readInt32(); - } - - -private: - - -}; - -IMPLEMENT_META_INTERFACE(ProCameraUser, "android.hardware.IProCameraUser"); - -// ---------------------------------------------------------------------- - -status_t BnProCameraUser::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) -{ - switch(code) { - case DISCONNECT: { - ALOGV("DISCONNECT"); - CHECK_INTERFACE(IProCameraUser, data, reply); - disconnect(); - reply->writeNoException(); - return NO_ERROR; - } break; - case CONNECT: { - CHECK_INTERFACE(IProCameraUser, data, reply); - sp<IProCameraCallbacks> cameraClient = - interface_cast<IProCameraCallbacks>(data.readStrongBinder()); - reply->writeInt32(connect(cameraClient)); - return NO_ERROR; - } break; - - /* Shared ProCameraUser */ - case EXCLUSIVE_TRY_LOCK: { - CHECK_INTERFACE(IProCameraUser, data, reply); - reply->writeInt32(exclusiveTryLock()); - return NO_ERROR; - } break; - case EXCLUSIVE_LOCK: { - CHECK_INTERFACE(IProCameraUser, data, reply); - reply->writeInt32(exclusiveLock()); - return NO_ERROR; - } break; - case EXCLUSIVE_UNLOCK: { - CHECK_INTERFACE(IProCameraUser, data, reply); - reply->writeInt32(exclusiveUnlock()); - return NO_ERROR; - } break; - case HAS_EXCLUSIVE_LOCK: { - CHECK_INTERFACE(IProCameraUser, data, reply); - reply->writeInt32(hasExclusiveLock()); - return NO_ERROR; - } break; - case SUBMIT_REQUEST: { - CHECK_INTERFACE(IProCameraUser, data, reply); - camera_metadata_t* metadata; - CameraMetadata::readFromParcel(data, /*out*/&metadata); - - // arg2 = streaming (bool) - bool streaming = data.readInt32(); - - // return code: requestId (int32) - reply->writeInt32(submitRequest(metadata, streaming)); - - return NO_ERROR; - } break; - case CANCEL_REQUEST: { - CHECK_INTERFACE(IProCameraUser, data, reply); - int requestId = data.readInt32(); - reply->writeInt32(cancelRequest(requestId)); - return NO_ERROR; - } break; - case DELETE_STREAM: { - CHECK_INTERFACE(IProCameraUser, data, reply); - int streamId = data.readInt32(); - reply->writeInt32(deleteStream(streamId)); - return NO_ERROR; - } break; - case CREATE_STREAM: { - CHECK_INTERFACE(IProCameraUser, data, reply); - int width, height, format; - - width = data.readInt32(); - height = data.readInt32(); - format = data.readInt32(); - - sp<IGraphicBufferProducer> bp = - interface_cast<IGraphicBufferProducer>(data.readStrongBinder()); - - int streamId = -1; - status_t ret; - ret = createStream(width, height, format, bp, &streamId); - - reply->writeInt32(streamId); - reply->writeInt32(ret); - - return NO_ERROR; - } break; - - case CREATE_DEFAULT_REQUEST: { - CHECK_INTERFACE(IProCameraUser, data, reply); - - int templateId = data.readInt32(); - - camera_metadata_t* request = NULL; - status_t ret; - ret = createDefaultRequest(templateId, &request); - - CameraMetadata::writeToParcel(*reply, request); - reply->writeInt32(ret); - - free_camera_metadata(request); - - return NO_ERROR; - } break; - case GET_CAMERA_INFO: { - CHECK_INTERFACE(IProCameraUser, data, reply); - - int cameraId = data.readInt32(); - - camera_metadata_t* info = NULL; - status_t ret; - ret = getCameraInfo(cameraId, &info); - - CameraMetadata::writeToParcel(*reply, info); - reply->writeInt32(ret); - - free_camera_metadata(info); - - return NO_ERROR; - } break; - default: - return BBinder::onTransact(code, data, reply, flags); - } -} - -// ---------------------------------------------------------------------------- - -}; // namespace android diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp deleted file mode 100644 index 48f8e8e..0000000 --- a/camera/ProCamera.cpp +++ /dev/null @@ -1,436 +0,0 @@ -/* -** -** Copyright (C) 2013, The Android Open Source Project -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "ProCamera" -#include <utils/Log.h> -#include <utils/threads.h> -#include <utils/Mutex.h> - -#include <binder/IPCThreadState.h> -#include <binder/IServiceManager.h> -#include <binder/IMemory.h> - -#include <camera/ProCamera.h> -#include <camera/IProCameraUser.h> -#include <camera/IProCameraCallbacks.h> - -#include <gui/IGraphicBufferProducer.h> - -#include <system/camera_metadata.h> - -namespace android { - -sp<ProCamera> ProCamera::connect(int cameraId) -{ - return CameraBaseT::connect(cameraId, String16(), - ICameraService::USE_CALLING_UID); -} - -ProCamera::ProCamera(int cameraId) - : CameraBase(cameraId) -{ -} - -CameraTraits<ProCamera>::TCamConnectService CameraTraits<ProCamera>::fnConnectService = - &ICameraService::connectPro; - -ProCamera::~ProCamera() -{ - -} - -/* IProCameraUser's implementation */ - -// callback from camera service -void ProCamera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2) -{ - return CameraBaseT::notifyCallback(msgType, ext1, ext2); -} - -void ProCamera::onLockStatusChanged( - IProCameraCallbacks::LockStatus newLockStatus) -{ - ALOGV("%s: newLockStatus = %d", __FUNCTION__, newLockStatus); - - sp<ProCameraListener> listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - if (listener != NULL) { - switch (newLockStatus) { - case IProCameraCallbacks::LOCK_ACQUIRED: - listener->onLockAcquired(); - break; - case IProCameraCallbacks::LOCK_RELEASED: - listener->onLockReleased(); - break; - case IProCameraCallbacks::LOCK_STOLEN: - listener->onLockStolen(); - break; - default: - ALOGE("%s: Unknown lock status: %d", - __FUNCTION__, newLockStatus); - } - } -} - -void ProCamera::onResultReceived(int32_t requestId, camera_metadata* result) { - ALOGV("%s: requestId = %d, result = %p", __FUNCTION__, requestId, result); - - sp<ProCameraListener> listener; - { - Mutex::Autolock _l(mLock); - listener = mListener; - } - - CameraMetadata tmp(result); - - // Unblock waitForFrame(id) callers - { - Mutex::Autolock al(mWaitMutex); - mMetadataReady = true; - mLatestMetadata = tmp; // make copy - mWaitCondition.broadcast(); - } - - result = tmp.release(); - - if (listener != NULL) { - listener->onResultReceived(requestId, result); - } else { - free_camera_metadata(result); - } - -} - -status_t ProCamera::exclusiveTryLock() -{ - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - return c->exclusiveTryLock(); -} -status_t ProCamera::exclusiveLock() -{ - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - return c->exclusiveLock(); -} -status_t ProCamera::exclusiveUnlock() -{ - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - return c->exclusiveUnlock(); -} -bool ProCamera::hasExclusiveLock() -{ - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - return c->hasExclusiveLock(); -} - -// Note that the callee gets a copy of the metadata. -int ProCamera::submitRequest(const struct camera_metadata* metadata, - bool streaming) -{ - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - return c->submitRequest(const_cast<struct camera_metadata*>(metadata), - streaming); -} - -status_t ProCamera::cancelRequest(int requestId) -{ - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - return c->cancelRequest(requestId); -} - -status_t ProCamera::deleteStream(int streamId) -{ - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - status_t s = c->deleteStream(streamId); - - mStreams.removeItem(streamId); - - return s; -} - -status_t ProCamera::createStream(int width, int height, int format, - const sp<Surface>& surface, - /*out*/ - int* streamId) -{ - *streamId = -1; - - ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, - format); - - if (surface == 0) { - return BAD_VALUE; - } - - return createStream(width, height, format, - surface->getIGraphicBufferProducer(), - streamId); -} - -status_t ProCamera::createStream(int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer, - /*out*/ - int* streamId) { - *streamId = -1; - - ALOGV("%s: createStreamT %dx%d (fmt=0x%x)", __FUNCTION__, width, height, - format); - - if (bufferProducer == 0) { - return BAD_VALUE; - } - - sp <IProCameraUser> c = mCamera; - status_t stat = c->createStream(width, height, format, bufferProducer, - streamId); - - if (stat == OK) { - StreamInfo s(*streamId); - - mStreams.add(*streamId, s); - } - - return stat; -} - -status_t ProCamera::createStreamCpu(int width, int height, int format, - int heapCount, - /*out*/ - sp<CpuConsumer>* cpuConsumer, - int* streamId) { - return createStreamCpu(width, height, format, heapCount, - /*synchronousMode*/true, - cpuConsumer, streamId); -} - -status_t ProCamera::createStreamCpu(int width, int height, int format, - int heapCount, - bool synchronousMode, - /*out*/ - sp<CpuConsumer>* cpuConsumer, - int* streamId) -{ - ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height, - format); - - *cpuConsumer = NULL; - - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - sp<IGraphicBufferProducer> producer; - sp<IGraphicBufferConsumer> consumer; - BufferQueue::createBufferQueue(&producer, &consumer); - sp<CpuConsumer> cc = new CpuConsumer(consumer, heapCount - /*, synchronousMode*/); - cc->setName(String8("ProCamera::mCpuConsumer")); - - sp<Surface> stc = new Surface(producer); - - status_t s = createStream(width, height, format, - stc->getIGraphicBufferProducer(), - streamId); - - if (s != OK) { - ALOGE("%s: Failure to create stream %dx%d (fmt=0x%x)", __FUNCTION__, - width, height, format); - return s; - } - - sp<ProFrameListener> frameAvailableListener = - new ProFrameListener(this, *streamId); - - getStreamInfo(*streamId).cpuStream = true; - getStreamInfo(*streamId).cpuConsumer = cc; - getStreamInfo(*streamId).synchronousMode = synchronousMode; - getStreamInfo(*streamId).stc = stc; - // for lifetime management - getStreamInfo(*streamId).frameAvailableListener = frameAvailableListener; - - cc->setFrameAvailableListener(frameAvailableListener); - - *cpuConsumer = cc; - - return s; -} - -camera_metadata* ProCamera::getCameraInfo(int cameraId) { - ALOGV("%s: cameraId = %d", __FUNCTION__, cameraId); - - sp <IProCameraUser> c = mCamera; - if (c == 0) return NULL; - - camera_metadata* ptr = NULL; - status_t status = c->getCameraInfo(cameraId, &ptr); - - if (status != OK) { - ALOGE("%s: Failed to get camera info, error = %d", __FUNCTION__, status); - } - - return ptr; -} - -status_t ProCamera::createDefaultRequest(int templateId, - camera_metadata** request) const { - ALOGV("%s: templateId = %d", __FUNCTION__, templateId); - - sp <IProCameraUser> c = mCamera; - if (c == 0) return NO_INIT; - - return c->createDefaultRequest(templateId, request); -} - -void ProCamera::onFrameAvailable(int streamId) { - ALOGV("%s: streamId = %d", __FUNCTION__, streamId); - - sp<ProCameraListener> listener = mListener; - StreamInfo& stream = getStreamInfo(streamId); - - if (listener.get() != NULL) { - listener->onFrameAvailable(streamId, stream.cpuConsumer); - } - - // Unblock waitForFrame(id) callers - { - Mutex::Autolock al(mWaitMutex); - getStreamInfo(streamId).frameReady++; - mWaitCondition.broadcast(); - } -} - -int ProCamera::waitForFrameBuffer(int streamId) { - status_t stat = BAD_VALUE; - Mutex::Autolock al(mWaitMutex); - - StreamInfo& si = getStreamInfo(streamId); - - if (si.frameReady > 0) { - int numFrames = si.frameReady; - si.frameReady = 0; - return numFrames; - } else { - while (true) { - stat = mWaitCondition.waitRelative(mWaitMutex, - mWaitTimeout); - if (stat != OK) { - ALOGE("%s: Error while waiting for frame buffer: %d", - __FUNCTION__, stat); - return stat; - } - - if (si.frameReady > 0) { - int numFrames = si.frameReady; - si.frameReady = 0; - return numFrames; - } - // else it was some other stream that got unblocked - } - } - - return stat; -} - -int ProCamera::dropFrameBuffer(int streamId, int count) { - StreamInfo& si = getStreamInfo(streamId); - - if (!si.cpuStream) { - return BAD_VALUE; - } else if (count < 0) { - return BAD_VALUE; - } - - if (!si.synchronousMode) { - ALOGW("%s: No need to drop frames on asynchronous streams," - " as asynchronous mode only keeps 1 latest frame around.", - __FUNCTION__); - return BAD_VALUE; - } - - int numDropped = 0; - for (int i = 0; i < count; ++i) { - CpuConsumer::LockedBuffer buffer; - if (si.cpuConsumer->lockNextBuffer(&buffer) != OK) { - break; - } - - si.cpuConsumer->unlockBuffer(buffer); - numDropped++; - } - - return numDropped; -} - -status_t ProCamera::waitForFrameMetadata() { - status_t stat = BAD_VALUE; - Mutex::Autolock al(mWaitMutex); - - if (mMetadataReady) { - return OK; - } else { - while (true) { - stat = mWaitCondition.waitRelative(mWaitMutex, - mWaitTimeout); - - if (stat != OK) { - ALOGE("%s: Error while waiting for metadata: %d", - __FUNCTION__, stat); - return stat; - } - - if (mMetadataReady) { - mMetadataReady = false; - return OK; - } - // else it was some other stream or metadata - } - } - - return stat; -} - -CameraMetadata ProCamera::consumeFrameMetadata() { - Mutex::Autolock al(mWaitMutex); - - // Destructive: Subsequent calls return empty metadatas - CameraMetadata tmp = mLatestMetadata; - mLatestMetadata.clear(); - - return tmp; -} - -ProCamera::StreamInfo& ProCamera::getStreamInfo(int streamId) { - return mStreams.editValueFor(streamId); -} - -}; // namespace android diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp index 277b5db..89c6fb7 100644 --- a/camera/camera2/ICameraDeviceUser.cpp +++ b/camera/camera2/ICameraDeviceUser.cpp @@ -26,6 +26,7 @@ #include <gui/Surface.h> #include <camera/CameraMetadata.h> #include <camera/camera2/CaptureRequest.h> +#include <camera/camera2/OutputConfiguration.h> namespace android { @@ -208,20 +209,16 @@ public: return reply.readInt32(); } - virtual status_t createStream(int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer) + virtual status_t createStream(const OutputConfiguration& outputConfiguration) { Parcel data, reply; data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor()); - data.writeInt32(width); - data.writeInt32(height); - data.writeInt32(format); - - data.writeInt32(1); // marker that bufferProducer is not null - data.writeString16(String16("unknown_name")); // name of surface - sp<IBinder> b(IInterface::asBinder(bufferProducer)); - data.writeStrongBinder(b); - + if (outputConfiguration.getGraphicBufferProducer() != NULL) { + data.writeInt32(1); // marker that OutputConfiguration is not null. Mimic aidl behavior + outputConfiguration.writeToParcel(data); + } else { + data.writeInt32(0); + } remote()->transact(CREATE_STREAM, data, &reply); reply.readExceptionCode(); @@ -396,31 +393,15 @@ status_t BnCameraDeviceUser::onTransact( } break; case CREATE_STREAM: { CHECK_INTERFACE(ICameraDeviceUser, data, reply); - int width, height, format; - width = data.readInt32(); - ALOGV("%s: CREATE_STREAM: width = %d", __FUNCTION__, width); - height = data.readInt32(); - ALOGV("%s: CREATE_STREAM: height = %d", __FUNCTION__, height); - format = data.readInt32(); - ALOGV("%s: CREATE_STREAM: format = %d", __FUNCTION__, format); - - sp<IGraphicBufferProducer> bp; + status_t ret = BAD_VALUE; if (data.readInt32() != 0) { - String16 name = readMaybeEmptyString16(data); - bp = interface_cast<IGraphicBufferProducer>( - data.readStrongBinder()); - - ALOGV("%s: CREATE_STREAM: bp = %p, name = %s", __FUNCTION__, - bp.get(), String8(name).string()); + OutputConfiguration outputConfiguration(data); + ret = createStream(outputConfiguration); } else { - ALOGV("%s: CREATE_STREAM: bp = unset, name = unset", - __FUNCTION__); + ALOGE("%s: cannot take an empty OutputConfiguration", __FUNCTION__); } - status_t ret; - ret = createStream(width, height, format, bp); - reply->writeNoException(); ALOGV("%s: CREATE_STREAM: write noException", __FUNCTION__); reply->writeInt32(ret); diff --git a/camera/camera2/OutputConfiguration.cpp b/camera/camera2/OutputConfiguration.cpp new file mode 100644 index 0000000..24acaa0 --- /dev/null +++ b/camera/camera2/OutputConfiguration.cpp @@ -0,0 +1,79 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "OutputConfiguration" +#include <utils/Log.h> + +#include <camera/camera2/OutputConfiguration.h> +#include <binder/Parcel.h> + +namespace android { + + +const int OutputConfiguration::INVALID_ROTATION = -1; + +// Read empty strings without printing a false error message. +String16 OutputConfiguration::readMaybeEmptyString16(const Parcel& parcel) { + size_t len; + const char16_t* str = parcel.readString16Inplace(&len); + if (str != NULL) { + return String16(str, len); + } else { + return String16(); + } +} + +sp<IGraphicBufferProducer> OutputConfiguration::getGraphicBufferProducer() const { + return mGbp; +} + +int OutputConfiguration::getRotation() const { + return mRotation; +} + +OutputConfiguration::OutputConfiguration(const Parcel& parcel) { + status_t err; + int rotation = 0; + if ((err = parcel.readInt32(&rotation)) != OK) { + ALOGE("%s: Failed to read rotation from parcel", __FUNCTION__); + mGbp = NULL; + mRotation = INVALID_ROTATION; + return; + } + + String16 name = readMaybeEmptyString16(parcel); + const sp<IGraphicBufferProducer>& gbp = + interface_cast<IGraphicBufferProducer>(parcel.readStrongBinder()); + mGbp = gbp; + mRotation = rotation; + + ALOGV("%s: OutputConfiguration: bp = %p, name = %s", __FUNCTION__, + gbp.get(), String8(name).string()); +} + +status_t OutputConfiguration::writeToParcel(Parcel& parcel) const { + + parcel.writeInt32(mRotation); + parcel.writeString16(String16("unknown_name")); // name of surface + sp<IBinder> b(IInterface::asBinder(mGbp)); + parcel.writeStrongBinder(b); + + return OK; +} + +}; // namespace android + diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk index 2db4c14..5d37f9e 100644 --- a/camera/tests/Android.mk +++ b/camera/tests/Android.mk @@ -17,7 +17,6 @@ include $(CLEAR_VARS) LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk LOCAL_SRC_FILES:= \ - ProCameraTests.cpp \ VendorTagDescriptorTests.cpp LOCAL_SHARED_LIBRARIES := \ diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp deleted file mode 100644 index 1f5867a..0000000 --- a/camera/tests/ProCameraTests.cpp +++ /dev/null @@ -1,1278 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <gtest/gtest.h> -#include <iostream> - -#include <binder/IPCThreadState.h> -#include <utils/Thread.h> - -#include "Camera.h" -#include "ProCamera.h" -#include <utils/Vector.h> -#include <utils/Mutex.h> -#include <utils/Condition.h> - -#include <gui/SurfaceComposerClient.h> -#include <gui/Surface.h> - -#include <system/camera_metadata.h> -#include <hardware/camera2.h> // for CAMERA2_TEMPLATE_PREVIEW only -#include <camera/CameraMetadata.h> - -#include <camera/ICameraServiceListener.h> - -namespace android { -namespace camera2 { -namespace tests { -namespace client { - -#define CAMERA_ID 0 -#define TEST_DEBUGGING 0 - -#define TEST_LISTENER_TIMEOUT 1000000000 // 1 second listener timeout -#define TEST_FORMAT HAL_PIXEL_FORMAT_Y16 //TODO: YUY2 instead - -#define TEST_FORMAT_MAIN HAL_PIXEL_FORMAT_Y8 -#define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16 - -// defaults for display "test" -#define TEST_DISPLAY_FORMAT HAL_PIXEL_FORMAT_Y8 -#define TEST_DISPLAY_WIDTH 320 -#define TEST_DISPLAY_HEIGHT 240 - -#define TEST_CPU_FRAME_COUNT 2 -#define TEST_CPU_HEAP_COUNT 5 - -#define TEST_FRAME_PROCESSING_DELAY_US 200000 // 200 ms - -#if TEST_DEBUGGING -#define dout std::cerr -#else -#define dout if (0) std::cerr -#endif - -#define EXPECT_OK(x) EXPECT_EQ(OK, (x)) -#define ASSERT_OK(x) ASSERT_EQ(OK, (x)) - -class ProCameraTest; - -struct ServiceListener : public BnCameraServiceListener { - - ServiceListener() : - mLatestStatus(STATUS_UNKNOWN), - mPrevStatus(STATUS_UNKNOWN) - { - } - - void onStatusChanged(Status status, int32_t cameraId) { - dout << "On status changed: 0x" << std::hex - << (unsigned int) status << " cameraId " << cameraId - << std::endl; - - Mutex::Autolock al(mMutex); - - mLatestStatus = status; - mCondition.broadcast(); - } - - status_t waitForStatusChange(Status& newStatus) { - Mutex::Autolock al(mMutex); - - if (mLatestStatus != mPrevStatus) { - newStatus = mLatestStatus; - mPrevStatus = mLatestStatus; - return OK; - } - - status_t stat = mCondition.waitRelative(mMutex, - TEST_LISTENER_TIMEOUT); - - if (stat == OK) { - newStatus = mLatestStatus; - mPrevStatus = mLatestStatus; - } - - return stat; - } - - Condition mCondition; - Mutex mMutex; - - Status mLatestStatus; - Status mPrevStatus; -}; - -enum ProEvent { - UNKNOWN, - ACQUIRED, - RELEASED, - STOLEN, - FRAME_RECEIVED, - RESULT_RECEIVED, -}; - -inline int ProEvent_Mask(ProEvent e) { - return (1 << static_cast<int>(e)); -} - -typedef Vector<ProEvent> EventList; - -class ProCameraTestThread : public Thread -{ -public: - ProCameraTestThread() { - } - - virtual bool threadLoop() { - mProc = ProcessState::self(); - mProc->startThreadPool(); - - IPCThreadState *ptr = IPCThreadState::self(); - - ptr->joinThreadPool(); - - return false; - } - - sp<ProcessState> mProc; -}; - -class ProCameraTestListener : public ProCameraListener { - -public: - static const int EVENT_MASK_ALL = 0xFFFFFFFF; - - ProCameraTestListener() { - mEventMask = EVENT_MASK_ALL; - mDropFrames = false; - } - - status_t WaitForEvent() { - Mutex::Autolock cal(mConditionMutex); - - { - Mutex::Autolock al(mListenerMutex); - - if (mProEventList.size() > 0) { - return OK; - } - } - - return mListenerCondition.waitRelative(mConditionMutex, - TEST_LISTENER_TIMEOUT); - } - - /* Read events into out. Existing queue is flushed */ - void ReadEvents(EventList& out) { - Mutex::Autolock al(mListenerMutex); - - for (size_t i = 0; i < mProEventList.size(); ++i) { - out.push(mProEventList[i]); - } - - mProEventList.clear(); - } - - /** - * Dequeue 1 event from the event queue. - * Returns UNKNOWN if queue is empty - */ - ProEvent ReadEvent() { - Mutex::Autolock al(mListenerMutex); - - if (mProEventList.size() == 0) { - return UNKNOWN; - } - - ProEvent ev = mProEventList[0]; - mProEventList.removeAt(0); - - return ev; - } - - void SetEventMask(int eventMask) { - Mutex::Autolock al(mListenerMutex); - mEventMask = eventMask; - } - - // Automatically acquire/release frames as they are available - void SetDropFrames(bool dropFrames) { - Mutex::Autolock al(mListenerMutex); - mDropFrames = dropFrames; - } - -private: - void QueueEvent(ProEvent ev) { - bool eventAdded = false; - { - Mutex::Autolock al(mListenerMutex); - - // Drop events not part of mask - if (ProEvent_Mask(ev) & mEventMask) { - mProEventList.push(ev); - eventAdded = true; - } - } - - if (eventAdded) { - mListenerCondition.broadcast(); - } - } - -protected: - - ////////////////////////////////////////////////// - ///////// ProCameraListener ////////////////////// - ////////////////////////////////////////////////// - - - // Lock has been acquired. Write operations now available. - virtual void onLockAcquired() { - QueueEvent(ACQUIRED); - } - // Lock has been released with exclusiveUnlock - virtual void onLockReleased() { - QueueEvent(RELEASED); - } - - // Lock has been stolen by another client. - virtual void onLockStolen() { - QueueEvent(STOLEN); - } - - // Lock free. - virtual void onTriggerNotify(int32_t ext1, int32_t ext2, int32_t ext3) { - - dout << "Trigger notify: " << ext1 << " " << ext2 - << " " << ext3 << std::endl; - } - - virtual void onFrameAvailable(int streamId, - const sp<CpuConsumer>& consumer) { - - QueueEvent(FRAME_RECEIVED); - - Mutex::Autolock al(mListenerMutex); - if (mDropFrames) { - CpuConsumer::LockedBuffer buf; - status_t ret; - - if (OK == (ret = consumer->lockNextBuffer(&buf))) { - - dout << "Frame received on streamId = " << streamId << - ", dataPtr = " << (void*)buf.data << - ", timestamp = " << buf.timestamp << std::endl; - - EXPECT_OK(consumer->unlockBuffer(buf)); - } - } else { - dout << "Frame received on streamId = " << streamId << std::endl; - } - } - - virtual void onResultReceived(int32_t requestId, - camera_metadata* request) { - dout << "Result received requestId = " << requestId - << ", requestPtr = " << (void*)request << std::endl; - QueueEvent(RESULT_RECEIVED); - free_camera_metadata(request); - } - - virtual void notify(int32_t msg, int32_t ext1, int32_t ext2) { - dout << "Notify received: msg " << std::hex << msg - << ", ext1: " << std::hex << ext1 << ", ext2: " << std::hex << ext2 - << std::endl; - } - - Vector<ProEvent> mProEventList; - Mutex mListenerMutex; - Mutex mConditionMutex; - Condition mListenerCondition; - int mEventMask; - bool mDropFrames; -}; - -class ProCameraTest : public ::testing::Test { - -public: - ProCameraTest() { - char* displaySecsEnv = getenv("TEST_DISPLAY_SECS"); - if (displaySecsEnv != NULL) { - mDisplaySecs = atoi(displaySecsEnv); - if (mDisplaySecs < 0) { - mDisplaySecs = 0; - } - } else { - mDisplaySecs = 0; - } - - char* displayFmtEnv = getenv("TEST_DISPLAY_FORMAT"); - if (displayFmtEnv != NULL) { - mDisplayFmt = FormatFromString(displayFmtEnv); - } else { - mDisplayFmt = TEST_DISPLAY_FORMAT; - } - - char* displayWidthEnv = getenv("TEST_DISPLAY_WIDTH"); - if (displayWidthEnv != NULL) { - mDisplayW = atoi(displayWidthEnv); - if (mDisplayW < 0) { - mDisplayW = 0; - } - } else { - mDisplayW = TEST_DISPLAY_WIDTH; - } - - char* displayHeightEnv = getenv("TEST_DISPLAY_HEIGHT"); - if (displayHeightEnv != NULL) { - mDisplayH = atoi(displayHeightEnv); - if (mDisplayH < 0) { - mDisplayH = 0; - } - } else { - mDisplayH = TEST_DISPLAY_HEIGHT; - } - } - - static void SetUpTestCase() { - // Binder Thread Pool Initialization - mTestThread = new ProCameraTestThread(); - mTestThread->run("ProCameraTestThread"); - } - - virtual void SetUp() { - mCamera = ProCamera::connect(CAMERA_ID); - ASSERT_NE((void*)NULL, mCamera.get()); - - mListener = new ProCameraTestListener(); - mCamera->setListener(mListener); - } - - virtual void TearDown() { - ASSERT_NE((void*)NULL, mCamera.get()); - mCamera->disconnect(); - } - -protected: - sp<ProCamera> mCamera; - sp<ProCameraTestListener> mListener; - - static sp<Thread> mTestThread; - - int mDisplaySecs; - int mDisplayFmt; - int mDisplayW; - int mDisplayH; - - sp<SurfaceComposerClient> mComposerClient; - sp<SurfaceControl> mSurfaceControl; - - sp<SurfaceComposerClient> mDepthComposerClient; - sp<SurfaceControl> mDepthSurfaceControl; - - int getSurfaceWidth() { - return 512; - } - int getSurfaceHeight() { - return 512; - } - - void createOnScreenSurface(sp<Surface>& surface) { - mComposerClient = new SurfaceComposerClient; - ASSERT_EQ(NO_ERROR, mComposerClient->initCheck()); - - mSurfaceControl = mComposerClient->createSurface( - String8("ProCameraTest StreamingImage Surface"), - getSurfaceWidth(), getSurfaceHeight(), - PIXEL_FORMAT_RGB_888, 0); - - mSurfaceControl->setPosition(0, 0); - - ASSERT_TRUE(mSurfaceControl != NULL); - ASSERT_TRUE(mSurfaceControl->isValid()); - - SurfaceComposerClient::openGlobalTransaction(); - ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF)); - ASSERT_EQ(NO_ERROR, mSurfaceControl->show()); - SurfaceComposerClient::closeGlobalTransaction(); - - sp<ANativeWindow> window = mSurfaceControl->getSurface(); - surface = mSurfaceControl->getSurface(); - - ASSERT_NE((void*)NULL, surface.get()); - } - - void createDepthOnScreenSurface(sp<Surface>& surface) { - mDepthComposerClient = new SurfaceComposerClient; - ASSERT_EQ(NO_ERROR, mDepthComposerClient->initCheck()); - - mDepthSurfaceControl = mDepthComposerClient->createSurface( - String8("ProCameraTest StreamingImage Surface"), - getSurfaceWidth(), getSurfaceHeight(), - PIXEL_FORMAT_RGB_888, 0); - - mDepthSurfaceControl->setPosition(640, 0); - - ASSERT_TRUE(mDepthSurfaceControl != NULL); - ASSERT_TRUE(mDepthSurfaceControl->isValid()); - - SurfaceComposerClient::openGlobalTransaction(); - ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->setLayer(0x7FFFFFFF)); - ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->show()); - SurfaceComposerClient::closeGlobalTransaction(); - - sp<ANativeWindow> window = mDepthSurfaceControl->getSurface(); - surface = mDepthSurfaceControl->getSurface(); - - ASSERT_NE((void*)NULL, surface.get()); - } - - template <typename T> - static bool ExistsItem(T needle, T* array, size_t count) { - if (!array) { - return false; - } - - for (size_t i = 0; i < count; ++i) { - if (array[i] == needle) { - return true; - } - } - return false; - } - - - static int FormatFromString(const char* str) { - std::string s(str); - -#define CMP_STR(x, y) \ - if (s == #x) return HAL_PIXEL_FORMAT_ ## y; -#define CMP_STR_SAME(x) CMP_STR(x, x) - - CMP_STR_SAME( Y16); - CMP_STR_SAME( Y8); - CMP_STR_SAME( YV12); - CMP_STR(NV16, YCbCr_422_SP); - CMP_STR(NV21, YCrCb_420_SP); - CMP_STR(YUY2, YCbCr_422_I); - CMP_STR(RAW, RAW_SENSOR); - CMP_STR(RGBA, RGBA_8888); - - std::cerr << "Unknown format string " << str << std::endl; - return -1; - - } - - /** - * Creating a streaming request for these output streams from a template, - * and submit it - */ - void createSubmitRequestForStreams(int32_t* streamIds, size_t count, int requestCount=-1) { - - ASSERT_NE((void*)NULL, streamIds); - ASSERT_LT(0u, count); - - camera_metadata_t *requestTmp = NULL; - EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, - /*out*/&requestTmp)); - ASSERT_NE((void*)NULL, requestTmp); - CameraMetadata request(requestTmp); - - // set the output streams. default is empty - - uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS); - request.update(tag, streamIds, count); - - requestTmp = request.release(); - - if (requestCount < 0) { - EXPECT_OK(mCamera->submitRequest(requestTmp, /*streaming*/true)); - } else { - for (int i = 0; i < requestCount; ++i) { - EXPECT_OK(mCamera->submitRequest(requestTmp, - /*streaming*/false)); - } - } - request.acquire(requestTmp); - } -}; - -sp<Thread> ProCameraTest::mTestThread; - -TEST_F(ProCameraTest, AvailableFormats) { - if (HasFatalFailure()) { - return; - } - - CameraMetadata staticInfo = mCamera->getCameraInfo(CAMERA_ID); - ASSERT_FALSE(staticInfo.isEmpty()); - - uint32_t tag = static_cast<uint32_t>(ANDROID_SCALER_AVAILABLE_FORMATS); - EXPECT_TRUE(staticInfo.exists(tag)); - camera_metadata_entry_t entry = staticInfo.find(tag); - - EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YV12, - entry.data.i32, entry.count)); - EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YCrCb_420_SP, - entry.data.i32, entry.count)); -} - -// test around exclusiveTryLock (immediate locking) -TEST_F(ProCameraTest, LockingImmediate) { - - if (HasFatalFailure()) { - return; - } - - mListener->SetEventMask(ProEvent_Mask(ACQUIRED) | - ProEvent_Mask(STOLEN) | - ProEvent_Mask(RELEASED)); - - EXPECT_FALSE(mCamera->hasExclusiveLock()); - EXPECT_EQ(OK, mCamera->exclusiveTryLock()); - // at this point we definitely have the lock - - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); - - EXPECT_TRUE(mCamera->hasExclusiveLock()); - EXPECT_EQ(OK, mCamera->exclusiveUnlock()); - - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(RELEASED, mListener->ReadEvent()); - - EXPECT_FALSE(mCamera->hasExclusiveLock()); -} - -// test around exclusiveLock (locking at some future point in time) -TEST_F(ProCameraTest, LockingAsynchronous) { - - if (HasFatalFailure()) { - return; - } - - - mListener->SetEventMask(ProEvent_Mask(ACQUIRED) | - ProEvent_Mask(STOLEN) | - ProEvent_Mask(RELEASED)); - - // TODO: Add another procamera that has a lock here. - // then we can be test that the lock wont immediately be acquired - - EXPECT_FALSE(mCamera->hasExclusiveLock()); - EXPECT_EQ(OK, mCamera->exclusiveTryLock()); - // at this point we definitely have the lock - - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); - - EXPECT_TRUE(mCamera->hasExclusiveLock()); - EXPECT_EQ(OK, mCamera->exclusiveUnlock()); - - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(RELEASED, mListener->ReadEvent()); - - EXPECT_FALSE(mCamera->hasExclusiveLock()); -} - -// Stream directly to the screen. -TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) { - if (HasFatalFailure()) { - return; - } - - sp<Surface> surface; - if (mDisplaySecs > 0) { - createOnScreenSurface(/*out*/surface); - } - else { - dout << "Skipping, will not render to screen" << std::endl; - return; - } - - int depthStreamId = -1; - - sp<ServiceListener> listener = new ServiceListener(); - EXPECT_OK(ProCamera::addServiceListener(listener)); - - ServiceListener::Status currentStatus; - - // when subscribing a new listener, - // we immediately get a callback to the current status - while (listener->waitForStatusChange(/*out*/currentStatus) != OK); - EXPECT_EQ(ServiceListener::STATUS_PRESENT, currentStatus); - - dout << "Will now stream and resume infinitely..." << std::endl; - while (true) { - - if (currentStatus == ServiceListener::STATUS_PRESENT) { - - ASSERT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt, - surface, - &depthStreamId)); - EXPECT_NE(-1, depthStreamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - - int32_t streams[] = { depthStreamId }; - ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams( - streams, - /*count*/1)); - } - - ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN; - - // TODO: maybe check for getch every once in a while? - while (listener->waitForStatusChange(/*out*/stat) != OK); - - if (currentStatus != stat) { - if (stat == ServiceListener::STATUS_PRESENT) { - dout << "Reconnecting to camera" << std::endl; - mCamera = ProCamera::connect(CAMERA_ID); - } else if (stat == ServiceListener::STATUS_NOT_AVAILABLE) { - dout << "Disconnecting from camera" << std::endl; - mCamera->disconnect(); - } else if (stat == ServiceListener::STATUS_NOT_PRESENT) { - dout << "Camera unplugged" << std::endl; - mCamera = NULL; - } else { - dout << "Unknown status change " - << std::hex << stat << std::endl; - } - - currentStatus = stat; - } - } - - EXPECT_OK(ProCamera::removeServiceListener(listener)); - EXPECT_OK(mCamera->deleteStream(depthStreamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -// Stream directly to the screen. -TEST_F(ProCameraTest, DISABLED_StreamingImageDual) { - if (HasFatalFailure()) { - return; - } - sp<Surface> surface; - sp<Surface> depthSurface; - if (mDisplaySecs > 0) { - createOnScreenSurface(/*out*/surface); - createDepthOnScreenSurface(/*out*/depthSurface); - } - - int streamId = -1; - EXPECT_OK(mCamera->createStream(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, surface, &streamId)); - EXPECT_NE(-1, streamId); - - int depthStreamId = -1; - EXPECT_OK(mCamera->createStream(/*width*/320, /*height*/240, - TEST_FORMAT_DEPTH, depthSurface, &depthStreamId)); - EXPECT_NE(-1, depthStreamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - /* - */ - /* iterate in a loop submitting requests every frame. - * what kind of requests doesnt really matter, just whatever. - */ - - // it would probably be better to use CameraMetadata from camera service. - camera_metadata_t *request = NULL; - EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, - /*out*/&request)); - EXPECT_NE((void*)NULL, request); - - /*FIXME: dont need this later, at which point the above should become an - ASSERT_NE*/ - if(request == NULL) request = allocate_camera_metadata(10, 100); - - // set the output streams to just this stream ID - - // wow what a verbose API. - int32_t allStreams[] = { streamId, depthStreamId }; - // IMPORTANT. bad things will happen if its not a uint8. - size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]); - camera_metadata_entry_t entry; - uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS); - int find = find_camera_metadata_entry(request, tag, &entry); - if (find == -ENOENT) { - if (add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount) != OK) { - camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); - ASSERT_OK(append_camera_metadata(tmp, request)); - free_camera_metadata(request); - request = tmp; - - ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount)); - } - } else { - ASSERT_OK(update_camera_metadata_entry(request, entry.index, - &allStreams, /*data_count*/streamCount, &entry)); - } - - EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); - - dout << "will sleep now for " << mDisplaySecs << std::endl; - sleep(mDisplaySecs); - - free_camera_metadata(request); - - for (size_t i = 0; i < streamCount; ++i) { - EXPECT_OK(mCamera->deleteStream(allStreams[i])); - } - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -TEST_F(ProCameraTest, CpuConsumerSingle) { - if (HasFatalFailure()) { - return; - } - - mListener->SetEventMask(ProEvent_Mask(ACQUIRED) | - ProEvent_Mask(STOLEN) | - ProEvent_Mask(RELEASED) | - ProEvent_Mask(FRAME_RECEIVED)); - mListener->SetDropFrames(true); - - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, - TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(ACQUIRED, mListener->ReadEvent()); - /* iterate in a loop submitting requests every frame. - * what kind of requests doesnt really matter, just whatever. - */ - - // it would probably be better to use CameraMetadata from camera service. - camera_metadata_t *request = NULL; - EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, - /*out*/&request)); - EXPECT_NE((void*)NULL, request); - - /*FIXME: dont need this later, at which point the above should become an - ASSERT_NE*/ - if(request == NULL) request = allocate_camera_metadata(10, 100); - - // set the output streams to just this stream ID - - int32_t allStreams[] = { streamId }; - camera_metadata_entry_t entry; - uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS); - int find = find_camera_metadata_entry(request, tag, &entry); - if (find == -ENOENT) { - if (add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/1) != OK) { - camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); - ASSERT_OK(append_camera_metadata(tmp, request)); - free_camera_metadata(request); - request = tmp; - - ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/1)); - } - } else { - ASSERT_OK(update_camera_metadata_entry(request, entry.index, - &allStreams, /*data_count*/1, &entry)); - } - - EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); - - // Consume a couple of frames - for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent()); - } - - // Done: clean up - free_camera_metadata(request); - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -TEST_F(ProCameraTest, CpuConsumerDual) { - if (HasFatalFailure()) { - return; - } - - mListener->SetEventMask(ProEvent_Mask(FRAME_RECEIVED)); - mListener->SetDropFrames(true); - - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - int depthStreamId = -1; - EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, - TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &depthStreamId)); - EXPECT_NE(-1, depthStreamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - /* - */ - /* iterate in a loop submitting requests every frame. - * what kind of requests doesnt really matter, just whatever. - */ - - // it would probably be better to use CameraMetadata from camera service. - camera_metadata_t *request = NULL; - EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, - /*out*/&request)); - EXPECT_NE((void*)NULL, request); - - if(request == NULL) request = allocate_camera_metadata(10, 100); - - // set the output streams to just this stream ID - - // wow what a verbose API. - int32_t allStreams[] = { streamId, depthStreamId }; - size_t streamCount = 2; - camera_metadata_entry_t entry; - uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS); - int find = find_camera_metadata_entry(request, tag, &entry); - if (find == -ENOENT) { - if (add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount) != OK) { - camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); - ASSERT_OK(append_camera_metadata(tmp, request)); - free_camera_metadata(request); - request = tmp; - - ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount)); - } - } else { - ASSERT_OK(update_camera_metadata_entry(request, entry.index, - &allStreams, /*data_count*/streamCount, &entry)); - } - - EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); - - // Consume a couple of frames - for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { - // stream id 1 - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent()); - - // stream id 2 - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent()); - - //TODO: events should be a struct with some data like the stream id - } - - // Done: clean up - free_camera_metadata(request); - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -TEST_F(ProCameraTest, ResultReceiver) { - if (HasFatalFailure()) { - return; - } - - mListener->SetEventMask(ProEvent_Mask(RESULT_RECEIVED)); - mListener->SetDropFrames(true); - //FIXME: if this is run right after the previous test we get FRAME_RECEIVED - // need to filter out events at read time - - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - /* - */ - /* iterate in a loop submitting requests every frame. - * what kind of requests doesnt really matter, just whatever. - */ - - camera_metadata_t *request = NULL; - EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, - /*out*/&request)); - EXPECT_NE((void*)NULL, request); - - /*FIXME*/ - if(request == NULL) request = allocate_camera_metadata(10, 100); - - // set the output streams to just this stream ID - - int32_t allStreams[] = { streamId }; - size_t streamCount = 1; - camera_metadata_entry_t entry; - uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS); - int find = find_camera_metadata_entry(request, tag, &entry); - if (find == -ENOENT) { - if (add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount) != OK) { - camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000); - ASSERT_OK(append_camera_metadata(tmp, request)); - free_camera_metadata(request); - request = tmp; - - ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams, - /*data_count*/streamCount)); - } - } else { - ASSERT_OK(update_camera_metadata_entry(request, entry.index, - &allStreams, /*data_count*/streamCount, &entry)); - } - - EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true)); - - // Consume a couple of results - for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { - EXPECT_EQ(OK, mListener->WaitForEvent()); - EXPECT_EQ(RESULT_RECEIVED, mListener->ReadEvent()); - } - - // Done: clean up - free_camera_metadata(request); - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -// FIXME: This is racy and sometimes fails on waitForFrameMetadata -TEST_F(ProCameraTest, DISABLED_WaitForResult) { - if (HasFatalFailure()) { - return; - } - - mListener->SetDropFrames(true); - - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - - int32_t streams[] = { streamId }; - ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1)); - - // Consume a couple of results - for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { - EXPECT_OK(mCamera->waitForFrameMetadata()); - CameraMetadata meta = mCamera->consumeFrameMetadata(); - EXPECT_FALSE(meta.isEmpty()); - } - - // Done: clean up - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -TEST_F(ProCameraTest, WaitForSingleStreamBuffer) { - if (HasFatalFailure()) { - return; - } - - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - - int32_t streams[] = { streamId }; - ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, - /*requests*/TEST_CPU_FRAME_COUNT)); - - // Consume a couple of results - for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) { - EXPECT_EQ(1, mCamera->waitForFrameBuffer(streamId)); - - CpuConsumer::LockedBuffer buf; - EXPECT_OK(consumer->lockNextBuffer(&buf)); - - dout << "Buffer synchronously received on streamId = " << streamId << - ", dataPtr = " << (void*)buf.data << - ", timestamp = " << buf.timestamp << std::endl; - - EXPECT_OK(consumer->unlockBuffer(buf)); - } - - // Done: clean up - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -// FIXME: This is racy and sometimes fails on waitForFrameMetadata -TEST_F(ProCameraTest, DISABLED_WaitForDualStreamBuffer) { - if (HasFatalFailure()) { - return; - } - - const int REQUEST_COUNT = TEST_CPU_FRAME_COUNT * 10; - - // 15 fps - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - // 30 fps - int depthStreamId = -1; - sp<CpuConsumer> depthConsumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240, - TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &depthConsumer, &depthStreamId)); - EXPECT_NE(-1, depthStreamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - - int32_t streams[] = { streamId, depthStreamId }; - ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2, - /*requests*/REQUEST_COUNT)); - - int depthFrames = 0; - int greyFrames = 0; - - // Consume two frames simultaneously. Unsynchronized by timestamps. - for (int i = 0; i < REQUEST_COUNT; ++i) { - - // Exhaust event queue so it doesn't keep growing - while (mListener->ReadEvent() != UNKNOWN); - - // Get the metadata - EXPECT_OK(mCamera->waitForFrameMetadata()); - CameraMetadata meta = mCamera->consumeFrameMetadata(); - EXPECT_FALSE(meta.isEmpty()); - - // Get the buffers - - EXPECT_EQ(1, mCamera->waitForFrameBuffer(depthStreamId)); - - /** - * Guaranteed to be able to consume the depth frame, - * since we waited on it. - */ - CpuConsumer::LockedBuffer depthBuffer; - EXPECT_OK(depthConsumer->lockNextBuffer(&depthBuffer)); - - dout << "Depth Buffer synchronously received on streamId = " << - streamId << - ", dataPtr = " << (void*)depthBuffer.data << - ", timestamp = " << depthBuffer.timestamp << std::endl; - - EXPECT_OK(depthConsumer->unlockBuffer(depthBuffer)); - - depthFrames++; - - - /** Consume Greyscale frames if there are any. - * There may not be since it runs at half FPS */ - CpuConsumer::LockedBuffer greyBuffer; - while (consumer->lockNextBuffer(&greyBuffer) == OK) { - - dout << "GRAY Buffer synchronously received on streamId = " << - streamId << - ", dataPtr = " << (void*)greyBuffer.data << - ", timestamp = " << greyBuffer.timestamp << std::endl; - - EXPECT_OK(consumer->unlockBuffer(greyBuffer)); - - greyFrames++; - } - } - - dout << "Done, summary: depth frames " << std::dec << depthFrames - << ", grey frames " << std::dec << greyFrames << std::endl; - - // Done: clean up - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesSync) { - if (HasFatalFailure()) { - return; - } - - const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT; - - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, - /*synchronousMode*/true, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - - int32_t streams[] = { streamId }; - ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, - /*requests*/NUM_REQUESTS)); - - // Consume a couple of results - for (int i = 0; i < NUM_REQUESTS; ++i) { - int numFrames; - EXPECT_TRUE((numFrames = mCamera->waitForFrameBuffer(streamId)) > 0); - - // Drop all but the newest framebuffer - EXPECT_EQ(numFrames-1, mCamera->dropFrameBuffer(streamId, numFrames-1)); - - dout << "Dropped " << (numFrames - 1) << " frames" << std::endl; - - // Skip the counter ahead, don't try to consume these frames again - i += numFrames-1; - - // "Consume" the buffer - CpuConsumer::LockedBuffer buf; - EXPECT_OK(consumer->lockNextBuffer(&buf)); - - dout << "Buffer synchronously received on streamId = " << streamId << - ", dataPtr = " << (void*)buf.data << - ", timestamp = " << buf.timestamp << std::endl; - - // Process at 10fps, stream is at 15fps. - // This means we will definitely fill up the buffer queue with - // extra buffers and need to drop them. - usleep(TEST_FRAME_PROCESSING_DELAY_US); - - EXPECT_OK(consumer->unlockBuffer(buf)); - } - - // Done: clean up - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - -TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) { - if (HasFatalFailure()) { - return; - } - - const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT; - - int streamId = -1; - sp<CpuConsumer> consumer; - EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960, - TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, - /*synchronousMode*/false, &consumer, &streamId)); - EXPECT_NE(-1, streamId); - - EXPECT_OK(mCamera->exclusiveTryLock()); - - int32_t streams[] = { streamId }; - ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1, - /*requests*/NUM_REQUESTS)); - - uint64_t lastFrameNumber = 0; - int numFrames; - - // Consume a couple of results - int i; - for (i = 0; i < NUM_REQUESTS && lastFrameNumber < NUM_REQUESTS; ++i) { - EXPECT_LT(0, (numFrames = mCamera->waitForFrameBuffer(streamId))); - - dout << "Dropped " << (numFrames - 1) << " frames" << std::endl; - - // Skip the counter ahead, don't try to consume these frames again - i += numFrames-1; - - // "Consume" the buffer - CpuConsumer::LockedBuffer buf; - - EXPECT_EQ(OK, consumer->lockNextBuffer(&buf)); - - lastFrameNumber = buf.frameNumber; - - dout << "Buffer asynchronously received on streamId = " << streamId << - ", dataPtr = " << (void*)buf.data << - ", timestamp = " << buf.timestamp << - ", framenumber = " << buf.frameNumber << std::endl; - - // Process at 10fps, stream is at 15fps. - // This means we will definitely fill up the buffer queue with - // extra buffers and need to drop them. - usleep(TEST_FRAME_PROCESSING_DELAY_US); - - EXPECT_OK(consumer->unlockBuffer(buf)); - } - - dout << "Done after " << i << " iterations " << std::endl; - - // Done: clean up - EXPECT_OK(mCamera->deleteStream(streamId)); - EXPECT_OK(mCamera->exclusiveUnlock()); -} - - - -//TODO: refactor into separate file -TEST_F(ProCameraTest, ServiceListenersSubscribe) { - - ASSERT_EQ(4u, sizeof(ServiceListener::Status)); - - sp<ServiceListener> listener = new ServiceListener(); - - EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener)); - EXPECT_OK(ProCamera::addServiceListener(listener)); - - EXPECT_EQ(ALREADY_EXISTS, ProCamera::addServiceListener(listener)); - EXPECT_OK(ProCamera::removeServiceListener(listener)); - - EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener)); -} - -//TODO: refactor into separate file -TEST_F(ProCameraTest, ServiceListenersFunctional) { - - sp<ServiceListener> listener = new ServiceListener(); - - EXPECT_OK(ProCamera::addServiceListener(listener)); - - sp<Camera> cam = Camera::connect(CAMERA_ID, - /*clientPackageName*/String16(), - -1); - EXPECT_NE((void*)NULL, cam.get()); - - ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN; - EXPECT_OK(listener->waitForStatusChange(/*out*/stat)); - - EXPECT_EQ(ServiceListener::STATUS_NOT_AVAILABLE, stat); - - if (cam.get()) { - cam->disconnect(); - } - - EXPECT_OK(listener->waitForStatusChange(/*out*/stat)); - EXPECT_EQ(ServiceListener::STATUS_PRESENT, stat); - - EXPECT_OK(ProCamera::removeServiceListener(listener)); -} - - - -} -} -} -} diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp index 02df1d2..36a7e73 100644 --- a/cmds/screenrecord/screenrecord.cpp +++ b/cmds/screenrecord/screenrecord.cpp @@ -23,7 +23,10 @@ #include <stdio.h> #include <stdlib.h> #include <string.h> +#include <sys/stat.h> +#include <sys/types.h> #include <sys/wait.h> + #include <termios.h> #include <unistd.h> @@ -637,7 +640,13 @@ static status_t recordScreen(const char* fileName) { case FORMAT_MP4: { // Configure muxer. We have to wait for the CSD blob from the encoder // before we can start it. - muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4); + int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); + if (fd < 0) { + fprintf(stderr, "ERROR: couldn't open file\n"); + abort(); + } + muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4); + close(fd); if (gRotate) { muxer->setOrientationHint(90); // TODO: does this do anything? } diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index 561ce02..0e3bc68 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -169,6 +169,48 @@ include $(BUILD_EXECUTABLE) include $(CLEAR_VARS) +LOCAL_SRC_FILES:= \ + filters/argbtorgba.rs \ + filters/nightvision.rs \ + filters/saturation.rs \ + mediafilter.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libstagefright \ + liblog \ + libutils \ + libbinder \ + libstagefright_foundation \ + libmedia \ + libgui \ + libcutils \ + libui \ + libRScpp \ + +LOCAL_C_INCLUDES:= \ + $(TOP)/frameworks/av/media/libstagefright \ + $(TOP)/frameworks/native/include/media/openmax \ + $(TOP)/frameworks/rs/cpp \ + $(TOP)/frameworks/rs \ + +intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,) +LOCAL_C_INCLUDES += $(intermediates) + +LOCAL_STATIC_LIBRARIES:= \ + libstagefright_mediafilter + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE_TAGS := optional + +LOCAL_MODULE:= mediafilter + +include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + LOCAL_SRC_FILES:= \ muxer.cpp \ diff --git a/cmds/stagefright/SimplePlayer.cpp b/cmds/stagefright/SimplePlayer.cpp index 4b2f980..ac1a547 100644 --- a/cmds/stagefright/SimplePlayer.cpp +++ b/cmds/stagefright/SimplePlayer.cpp @@ -59,14 +59,14 @@ status_t PostAndAwaitResponse( return err; } status_t SimplePlayer::setDataSource(const char *path) { - sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); msg->setString("path", path); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t SimplePlayer::setSurface(const sp<IGraphicBufferProducer> &bufferProducer) { - sp<AMessage> msg = new AMessage(kWhatSetSurface, id()); + sp<AMessage> msg = new AMessage(kWhatSetSurface, this); sp<Surface> surface; if (bufferProducer != NULL) { @@ -81,25 +81,25 @@ status_t SimplePlayer::setSurface(const sp<IGraphicBufferProducer> &bufferProduc } status_t SimplePlayer::prepare() { - sp<AMessage> msg = new AMessage(kWhatPrepare, id()); + sp<AMessage> msg = new AMessage(kWhatPrepare, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t SimplePlayer::start() { - sp<AMessage> msg = new AMessage(kWhatStart, id()); + sp<AMessage> msg = new AMessage(kWhatStart, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t SimplePlayer::stop() { - sp<AMessage> msg = new AMessage(kWhatStop, id()); + sp<AMessage> msg = new AMessage(kWhatStop, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t SimplePlayer::reset() { - sp<AMessage> msg = new AMessage(kWhatReset, id()); + sp<AMessage> msg = new AMessage(kWhatReset, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } @@ -116,7 +116,7 @@ void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) { mState = UNPREPARED; } - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<AMessage> response = new AMessage; @@ -139,7 +139,7 @@ void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) { err = OK; } - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<AMessage> response = new AMessage; @@ -161,7 +161,7 @@ void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) { } } - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<AMessage> response = new AMessage; @@ -194,7 +194,7 @@ void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) { } } - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<AMessage> response = new AMessage; @@ -217,7 +217,7 @@ void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) { } } - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<AMessage> response = new AMessage; @@ -240,7 +240,7 @@ void SimplePlayer::onMessageReceived(const sp<AMessage> &msg) { mState = UNINITIALIZED; } - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<AMessage> response = new AMessage; @@ -382,7 +382,7 @@ status_t SimplePlayer::onStart() { mStartTimeRealUs = -1ll; - sp<AMessage> msg = new AMessage(kWhatDoMoreStuff, id()); + sp<AMessage> msg = new AMessage(kWhatDoMoreStuff, this); msg->setInt32("generation", ++mDoMoreStuffGeneration); msg->post(); diff --git a/cmds/stagefright/audioloop.cpp b/cmds/stagefright/audioloop.cpp index 96073f1..7b0de24 100644 --- a/cmds/stagefright/audioloop.cpp +++ b/cmds/stagefright/audioloop.cpp @@ -14,6 +14,10 @@ * limitations under the License. */ +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> + #include <binder/ProcessState.h> #include <media/mediarecorder.h> #include <media/stagefright/foundation/ADebug.h> @@ -109,7 +113,12 @@ int main(int argc, char* argv[]) if (fileOut != NULL) { // target file specified, write encoded AMR output - sp<AMRWriter> writer = new AMRWriter(fileOut); + int fd = open(fileOut, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); + if (fd < 0) { + return 1; + } + sp<AMRWriter> writer = new AMRWriter(fd); + close(fd); writer->addSource(encoder); writer->start(); sleep(duration); diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp index fd02bcc..d987250 100644 --- a/cmds/stagefright/codec.cpp +++ b/cmds/stagefright/codec.cpp @@ -45,9 +45,10 @@ static void usage(const char *me) { fprintf(stderr, "usage: %s [-a] use audio\n" "\t\t[-v] use video\n" "\t\t[-p] playback\n" - "\t\t[-S] allocate buffers from a surface\n", + "\t\t[-S] allocate buffers from a surface\n" + "\t\t[-R] render output to surface (enables -S)\n" + "\t\t[-T] use render timestamps (enables -R)\n", me); - exit(1); } @@ -71,7 +72,9 @@ static int decode( const char *path, bool useAudio, bool useVideo, - const android::sp<android::Surface> &surface) { + const android::sp<android::Surface> &surface, + bool renderSurface, + bool useTimestamp) { using namespace android; static int64_t kTimeout = 500ll; @@ -136,6 +139,7 @@ static int decode( CHECK(!stateByTrack.isEmpty()); int64_t startTimeUs = ALooper::GetNowUs(); + int64_t startTimeRender = -1; for (size_t i = 0; i < stateByTrack.size(); ++i) { CodecState *state = &stateByTrack.editValueAt(i); @@ -260,7 +264,23 @@ static int decode( ++state->mNumBuffersDecoded; state->mNumBytesDecoded += size; - err = state->mCodec->releaseOutputBuffer(index); + if (surface == NULL || !renderSurface) { + err = state->mCodec->releaseOutputBuffer(index); + } else if (useTimestamp) { + if (startTimeRender == -1) { + // begin rendering 2 vsyncs (~33ms) after first decode + startTimeRender = + systemTime(SYSTEM_TIME_MONOTONIC) + 33000000 + - (presentationTimeUs * 1000); + } + presentationTimeUs = + (presentationTimeUs * 1000) + startTimeRender; + err = state->mCodec->renderOutputBufferAndRelease( + index, presentationTimeUs); + } else { + err = state->mCodec->renderOutputBufferAndRelease(index); + } + CHECK_EQ(err, (status_t)OK); if (flags & MediaCodec::BUFFER_FLAG_EOS) { @@ -320,34 +340,42 @@ int main(int argc, char **argv) { bool useVideo = false; bool playback = false; bool useSurface = false; + bool renderSurface = false; + bool useTimestamp = false; int res; - while ((res = getopt(argc, argv, "havpSD")) >= 0) { + while ((res = getopt(argc, argv, "havpSDRT")) >= 0) { switch (res) { case 'a': { useAudio = true; break; } - case 'v': { useVideo = true; break; } - case 'p': { playback = true; break; } - + case 'T': + { + useTimestamp = true; + } + // fall through + case 'R': + { + renderSurface = true; + } + // fall through case 'S': { useSurface = true; break; } - case '?': case 'h': default: @@ -422,7 +450,8 @@ int main(int argc, char **argv) { player->stop(); player->reset(); } else { - decode(looper, argv[0], useAudio, useVideo, surface); + decode(looper, argv[0], useAudio, useVideo, surface, renderSurface, + useTimestamp); } if (playback || (useSurface && useVideo)) { diff --git a/include/media/nbaio/roundup.h b/cmds/stagefright/filters/argbtorgba.rs index 4c3cc25..229ff8c 100644 --- a/include/media/nbaio/roundup.h +++ b/cmds/stagefright/filters/argbtorgba.rs @@ -1,5 +1,5 @@ /* - * Copyright (C) 2012 The Android Open Source Project + * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,18 +14,13 @@ * limitations under the License. */ -#ifndef ROUNDUP_H -#define ROUNDUP_H +#pragma version(1) +#pragma rs java_package_name(com.android.rs.cppbasic) +#pragma rs_fp_relaxed -#ifdef __cplusplus -extern "C" { -#endif - -// Round up to the next highest power of 2 -unsigned roundup(unsigned v); - -#ifdef __cplusplus -} -#endif - -#endif // ROUNDUP_H +void root(const uchar4 *v_in, uchar4 *v_out) { + v_out->x = v_in->y; + v_out->y = v_in->z; + v_out->z = v_in->w; + v_out->w = v_in->x; +}
\ No newline at end of file diff --git a/cmds/stagefright/filters/nightvision.rs b/cmds/stagefright/filters/nightvision.rs new file mode 100644 index 0000000..f61413c --- /dev/null +++ b/cmds/stagefright/filters/nightvision.rs @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma version(1) +#pragma rs java_package_name(com.android.rs.cppbasic) +#pragma rs_fp_relaxed + +const static float3 gMonoMult = {0.299f, 0.587f, 0.114f}; +const static float3 gNightVisionMult = {0.5f, 1.f, 0.5f}; + +// calculates luminance of pixel, then biases color balance toward green +void root(const uchar4 *v_in, uchar4 *v_out) { + v_out->x = v_in->x; // don't modify A + + // get RGB, scale 0-255 uchar to 0-1.0 float + float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f, + v_in->w * 0.003921569f}; + + // apply filter + float3 result = dot(rgb, gMonoMult) * gNightVisionMult; + + v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f); + v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f); + v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f); +} diff --git a/cmds/stagefright/filters/saturation.rs b/cmds/stagefright/filters/saturation.rs new file mode 100644 index 0000000..1de9dd8 --- /dev/null +++ b/cmds/stagefright/filters/saturation.rs @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma version(1) +#pragma rs java_package_name(com.android.rs.cppbasic) +#pragma rs_fp_relaxed + +const static float3 gMonoMult = {0.299f, 0.587f, 0.114f}; + +// global variables (parameters accessible to application code) +float gSaturation = 1.0f; + +void root(const uchar4 *v_in, uchar4 *v_out) { + v_out->x = v_in->x; // don't modify A + + // get RGB, scale 0-255 uchar to 0-1.0 float + float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f, + v_in->w * 0.003921569f}; + + // apply saturation filter + float3 result = dot(rgb, gMonoMult); + result = mix(result, rgb, gSaturation); + + v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f); + v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f); + v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f); +} diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp new file mode 100644 index 0000000..f77b38b --- /dev/null +++ b/cmds/stagefright/mediafilter.cpp @@ -0,0 +1,785 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "mediafilterTest" + +#include <inttypes.h> + +#include <binder/ProcessState.h> +#include <filters/ColorConvert.h> +#include <gui/ISurfaceComposer.h> +#include <gui/SurfaceComposerClient.h> +#include <gui/Surface.h> +#include <media/ICrypto.h> +#include <media/IMediaHTTPService.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MediaCodec.h> +#include <media/stagefright/NuMediaExtractor.h> +#include <media/stagefright/RenderScriptWrapper.h> +#include <OMX_IVCommon.h> +#include <ui/DisplayInfo.h> + +#include "RenderScript.h" +#include "ScriptC_argbtorgba.h" +#include "ScriptC_nightvision.h" +#include "ScriptC_saturation.h" + +// test parameters +static const bool kTestFlush = true; // Note: true will drop 1 out of +static const int kFlushAfterFrames = 25; // kFlushAfterFrames output frames +static const int64_t kTimeout = 500ll; + +// built-in filter parameters +static const int32_t kInvert = false; // ZeroFilter param +static const float kBlurRadius = 15.0f; // IntrinsicBlurFilter param +static const float kSaturation = 0.0f; // SaturationFilter param + +static void usage(const char *me) { + fprintf(stderr, "usage: [flags] %s\n" + "\t[-b] use IntrinsicBlurFilter\n" + "\t[-c] use argb to rgba conversion RSFilter\n" + "\t[-n] use night vision RSFilter\n" + "\t[-r] use saturation RSFilter\n" + "\t[-s] use SaturationFilter\n" + "\t[-z] use ZeroFilter (copy filter)\n" + "\t[-R] render output to surface (enables -S)\n" + "\t[-S] allocate buffers from a surface\n" + "\t[-T] use render timestamps (enables -R)\n", + me); + exit(1); +} + +namespace android { + +struct SaturationRSFilter : RenderScriptWrapper::RSFilterCallback { + void init(RSC::sp<RSC::RS> context) { + mScript = new ScriptC_saturation(context); + mScript->set_gSaturation(3.f); + } + + virtual status_t processBuffers( + RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) { + mScript->forEach_root(inBuffer, outBuffer); + + return OK; + } + + status_t handleSetParameters(const sp<AMessage> &msg) { + return OK; + } + +private: + RSC::sp<ScriptC_saturation> mScript; +}; + +struct NightVisionRSFilter : RenderScriptWrapper::RSFilterCallback { + void init(RSC::sp<RSC::RS> context) { + mScript = new ScriptC_nightvision(context); + } + + virtual status_t processBuffers( + RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) { + mScript->forEach_root(inBuffer, outBuffer); + + return OK; + } + + status_t handleSetParameters(const sp<AMessage> &msg) { + return OK; + } + +private: + RSC::sp<ScriptC_nightvision> mScript; +}; + +struct ARGBToRGBARSFilter : RenderScriptWrapper::RSFilterCallback { + void init(RSC::sp<RSC::RS> context) { + mScript = new ScriptC_argbtorgba(context); + } + + virtual status_t processBuffers( + RSC::Allocation *inBuffer, RSC::Allocation *outBuffer) { + mScript->forEach_root(inBuffer, outBuffer); + + return OK; + } + + status_t handleSetParameters(const sp<AMessage> &msg) { + return OK; + } + +private: + RSC::sp<ScriptC_argbtorgba> mScript; +}; + +struct CodecState { + sp<MediaCodec> mCodec; + Vector<sp<ABuffer> > mInBuffers; + Vector<sp<ABuffer> > mOutBuffers; + bool mSignalledInputEOS; + bool mSawOutputEOS; + int64_t mNumBuffersDecoded; +}; + +struct DecodedFrame { + size_t index; + size_t offset; + size_t size; + int64_t presentationTimeUs; + uint32_t flags; +}; + +enum FilterType { + FILTERTYPE_ZERO, + FILTERTYPE_INTRINSIC_BLUR, + FILTERTYPE_SATURATION, + FILTERTYPE_RS_SATURATION, + FILTERTYPE_RS_NIGHT_VISION, + FILTERTYPE_RS_ARGB_TO_RGBA, +}; + +size_t inputFramesSinceFlush = 0; +void tryCopyDecodedBuffer( + List<DecodedFrame> *decodedFrameIndices, + CodecState *filterState, + CodecState *vidState) { + if (decodedFrameIndices->empty()) { + return; + } + + size_t filterIndex; + status_t err = filterState->mCodec->dequeueInputBuffer( + &filterIndex, kTimeout); + if (err != OK) { + return; + } + + ++inputFramesSinceFlush; + + DecodedFrame frame = *decodedFrameIndices->begin(); + + // only consume a buffer if we are not going to flush, since we expect + // the dequeue -> flush -> queue operation to cause an error and + // not produce an output frame + if (!kTestFlush || inputFramesSinceFlush < kFlushAfterFrames) { + decodedFrameIndices->erase(decodedFrameIndices->begin()); + } + size_t outIndex = frame.index; + + const sp<ABuffer> &srcBuffer = + vidState->mOutBuffers.itemAt(outIndex); + const sp<ABuffer> &destBuffer = + filterState->mInBuffers.itemAt(filterIndex); + + sp<AMessage> srcFormat, destFormat; + vidState->mCodec->getOutputFormat(&srcFormat); + filterState->mCodec->getInputFormat(&destFormat); + + int32_t srcWidth, srcHeight, srcStride, srcSliceHeight; + int32_t srcColorFormat, destColorFormat; + int32_t destWidth, destHeight, destStride, destSliceHeight; + CHECK(srcFormat->findInt32("stride", &srcStride) + && srcFormat->findInt32("slice-height", &srcSliceHeight) + && srcFormat->findInt32("width", &srcWidth) + && srcFormat->findInt32("height", & srcHeight) + && srcFormat->findInt32("color-format", &srcColorFormat)); + CHECK(destFormat->findInt32("stride", &destStride) + && destFormat->findInt32("slice-height", &destSliceHeight) + && destFormat->findInt32("width", &destWidth) + && destFormat->findInt32("height", & destHeight) + && destFormat->findInt32("color-format", &destColorFormat)); + + CHECK(srcWidth <= destStride && srcHeight <= destSliceHeight); + + convertYUV420spToARGB( + srcBuffer->data(), + srcBuffer->data() + srcStride * srcSliceHeight, + srcWidth, + srcHeight, + destBuffer->data()); + + // copy timestamp + int64_t timeUs; + CHECK(srcBuffer->meta()->findInt64("timeUs", &timeUs)); + destBuffer->meta()->setInt64("timeUs", timeUs); + + if (kTestFlush && inputFramesSinceFlush >= kFlushAfterFrames) { + inputFramesSinceFlush = 0; + + // check that queueing a buffer that was dequeued before flush + // fails with expected error EACCES + filterState->mCodec->flush(); + + err = filterState->mCodec->queueInputBuffer( + filterIndex, 0 /* offset */, destBuffer->size(), + timeUs, frame.flags); + + if (err == OK) { + ALOGE("FAIL: queue after flush returned OK"); + } else if (err != -EACCES) { + ALOGE("queueInputBuffer after flush returned %d, " + "expected -EACCES (-13)", err); + } + } else { + err = filterState->mCodec->queueInputBuffer( + filterIndex, 0 /* offset */, destBuffer->size(), + timeUs, frame.flags); + CHECK(err == OK); + + err = vidState->mCodec->releaseOutputBuffer(outIndex); + CHECK(err == OK); + } +} + +size_t outputFramesSinceFlush = 0; +void tryDrainOutputBuffer( + CodecState *filterState, + const sp<Surface> &surface, bool renderSurface, + bool useTimestamp, int64_t *startTimeRender) { + size_t index; + size_t offset; + size_t size; + int64_t presentationTimeUs; + uint32_t flags; + status_t err = filterState->mCodec->dequeueOutputBuffer( + &index, &offset, &size, &presentationTimeUs, &flags, + kTimeout); + + if (err != OK) { + return; + } + + ++outputFramesSinceFlush; + + if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) { + filterState->mCodec->flush(); + } + + if (surface == NULL || !renderSurface) { + err = filterState->mCodec->releaseOutputBuffer(index); + } else if (useTimestamp) { + if (*startTimeRender == -1) { + // begin rendering 2 vsyncs after first decode + *startTimeRender = systemTime(SYSTEM_TIME_MONOTONIC) + + 33000000 - (presentationTimeUs * 1000); + } + presentationTimeUs = + (presentationTimeUs * 1000) + *startTimeRender; + err = filterState->mCodec->renderOutputBufferAndRelease( + index, presentationTimeUs); + } else { + err = filterState->mCodec->renderOutputBufferAndRelease(index); + } + + if (kTestFlush && outputFramesSinceFlush >= kFlushAfterFrames) { + outputFramesSinceFlush = 0; + + // releasing the buffer dequeued before flush should cause an error + // if so, the frame will also be skipped in output stream + if (err == OK) { + ALOGE("FAIL: release after flush returned OK"); + } else if (err != -EACCES) { + ALOGE("releaseOutputBuffer after flush returned %d, " + "expected -EACCES (-13)", err); + } + } else { + CHECK(err == OK); + } + + if (flags & MediaCodec::BUFFER_FLAG_EOS) { + ALOGV("reached EOS on output."); + filterState->mSawOutputEOS = true; + } +} + +static int decode( + const sp<ALooper> &looper, + const char *path, + const sp<Surface> &surface, + bool renderSurface, + bool useTimestamp, + FilterType filterType) { + + static int64_t kTimeout = 500ll; + + sp<NuMediaExtractor> extractor = new NuMediaExtractor; + if (extractor->setDataSource(NULL /* httpService */, path) != OK) { + fprintf(stderr, "unable to instantiate extractor.\n"); + return 1; + } + + KeyedVector<size_t, CodecState> stateByTrack; + + CodecState *vidState = NULL; + for (size_t i = 0; i < extractor->countTracks(); ++i) { + sp<AMessage> format; + status_t err = extractor->getTrackFormat(i, &format); + CHECK(err == OK); + + AString mime; + CHECK(format->findString("mime", &mime)); + bool isVideo = !strncasecmp(mime.c_str(), "video/", 6); + if (!isVideo) { + continue; + } + + ALOGV("selecting track %zu", i); + + err = extractor->selectTrack(i); + CHECK(err == OK); + + CodecState *state = + &stateByTrack.editValueAt(stateByTrack.add(i, CodecState())); + + vidState = state; + + state->mNumBuffersDecoded = 0; + + state->mCodec = MediaCodec::CreateByType( + looper, mime.c_str(), false /* encoder */); + + CHECK(state->mCodec != NULL); + + err = state->mCodec->configure( + format, NULL /* surface */, NULL /* crypto */, 0 /* flags */); + + CHECK(err == OK); + + state->mSignalledInputEOS = false; + state->mSawOutputEOS = false; + + break; + } + CHECK(!stateByTrack.isEmpty()); + CHECK(vidState != NULL); + sp<AMessage> vidFormat; + vidState->mCodec->getOutputFormat(&vidFormat); + + // set filter to use ARGB8888 + vidFormat->setInt32("color-format", OMX_COLOR_Format32bitARGB8888); + // set app cache directory path + vidFormat->setString("cacheDir", "/system/bin"); + + // create RenderScript context for RSFilters + RSC::sp<RSC::RS> context = new RSC::RS(); + context->init("/system/bin"); + + sp<RenderScriptWrapper::RSFilterCallback> rsFilter; + + // create renderscript wrapper for RSFilters + sp<RenderScriptWrapper> rsWrapper = new RenderScriptWrapper; + rsWrapper->mContext = context.get(); + + CodecState *filterState = new CodecState(); + filterState->mNumBuffersDecoded = 0; + + sp<AMessage> params = new AMessage(); + + switch (filterType) { + case FILTERTYPE_ZERO: + { + filterState->mCodec = MediaCodec::CreateByComponentName( + looper, "android.filter.zerofilter"); + params->setInt32("invert", kInvert); + break; + } + case FILTERTYPE_INTRINSIC_BLUR: + { + filterState->mCodec = MediaCodec::CreateByComponentName( + looper, "android.filter.intrinsicblur"); + params->setFloat("blur-radius", kBlurRadius); + break; + } + case FILTERTYPE_SATURATION: + { + filterState->mCodec = MediaCodec::CreateByComponentName( + looper, "android.filter.saturation"); + params->setFloat("saturation", kSaturation); + break; + } + case FILTERTYPE_RS_SATURATION: + { + SaturationRSFilter *satFilter = new SaturationRSFilter; + satFilter->init(context); + rsFilter = satFilter; + rsWrapper->mCallback = rsFilter; + vidFormat->setObject("rs-wrapper", rsWrapper); + + filterState->mCodec = MediaCodec::CreateByComponentName( + looper, "android.filter.RenderScript"); + break; + } + case FILTERTYPE_RS_NIGHT_VISION: + { + NightVisionRSFilter *nightVisionFilter = new NightVisionRSFilter; + nightVisionFilter->init(context); + rsFilter = nightVisionFilter; + rsWrapper->mCallback = rsFilter; + vidFormat->setObject("rs-wrapper", rsWrapper); + + filterState->mCodec = MediaCodec::CreateByComponentName( + looper, "android.filter.RenderScript"); + break; + } + case FILTERTYPE_RS_ARGB_TO_RGBA: + { + ARGBToRGBARSFilter *argbToRgbaFilter = new ARGBToRGBARSFilter; + argbToRgbaFilter->init(context); + rsFilter = argbToRgbaFilter; + rsWrapper->mCallback = rsFilter; + vidFormat->setObject("rs-wrapper", rsWrapper); + + filterState->mCodec = MediaCodec::CreateByComponentName( + looper, "android.filter.RenderScript"); + break; + } + default: + { + LOG_ALWAYS_FATAL("mediacodec.cpp error: unrecognized FilterType"); + break; + } + } + CHECK(filterState->mCodec != NULL); + + status_t err = filterState->mCodec->configure( + vidFormat /* format */, surface, NULL /* crypto */, 0 /* flags */); + CHECK(err == OK); + + filterState->mSignalledInputEOS = false; + filterState->mSawOutputEOS = false; + + int64_t startTimeUs = ALooper::GetNowUs(); + int64_t startTimeRender = -1; + + for (size_t i = 0; i < stateByTrack.size(); ++i) { + CodecState *state = &stateByTrack.editValueAt(i); + + sp<MediaCodec> codec = state->mCodec; + + CHECK_EQ((status_t)OK, codec->start()); + + CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers)); + CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers)); + + ALOGV("got %zu input and %zu output buffers", + state->mInBuffers.size(), state->mOutBuffers.size()); + } + + CHECK_EQ((status_t)OK, filterState->mCodec->setParameters(params)); + + if (kTestFlush) { + status_t flushErr = filterState->mCodec->flush(); + if (flushErr == OK) { + ALOGE("FAIL: Flush before start returned OK"); + } else { + ALOGV("Flush before start returned status %d, usually ENOSYS (-38)", + flushErr); + } + } + + CHECK_EQ((status_t)OK, filterState->mCodec->start()); + CHECK_EQ((status_t)OK, filterState->mCodec->getInputBuffers( + &filterState->mInBuffers)); + CHECK_EQ((status_t)OK, filterState->mCodec->getOutputBuffers( + &filterState->mOutBuffers)); + + if (kTestFlush) { + status_t flushErr = filterState->mCodec->flush(); + if (flushErr != OK) { + ALOGE("FAIL: Flush after start returned %d, expect OK (0)", + flushErr); + } else { + ALOGV("Flush immediately after start OK"); + } + } + + List<DecodedFrame> decodedFrameIndices; + + // loop until decoder reaches EOS + bool sawInputEOS = false; + bool sawOutputEOSOnAllTracks = false; + while (!sawOutputEOSOnAllTracks) { + if (!sawInputEOS) { + size_t trackIndex; + status_t err = extractor->getSampleTrackIndex(&trackIndex); + + if (err != OK) { + ALOGV("saw input eos"); + sawInputEOS = true; + } else { + CodecState *state = &stateByTrack.editValueFor(trackIndex); + + size_t index; + err = state->mCodec->dequeueInputBuffer(&index, kTimeout); + + if (err == OK) { + ALOGV("filling input buffer %zu", index); + + const sp<ABuffer> &buffer = state->mInBuffers.itemAt(index); + + err = extractor->readSampleData(buffer); + CHECK(err == OK); + + int64_t timeUs; + err = extractor->getSampleTime(&timeUs); + CHECK(err == OK); + + uint32_t bufferFlags = 0; + + err = state->mCodec->queueInputBuffer( + index, 0 /* offset */, buffer->size(), + timeUs, bufferFlags); + + CHECK(err == OK); + + extractor->advance(); + } else { + CHECK_EQ(err, -EAGAIN); + } + } + } else { + for (size_t i = 0; i < stateByTrack.size(); ++i) { + CodecState *state = &stateByTrack.editValueAt(i); + + if (!state->mSignalledInputEOS) { + size_t index; + status_t err = + state->mCodec->dequeueInputBuffer(&index, kTimeout); + + if (err == OK) { + ALOGV("signalling input EOS on track %zu", i); + + err = state->mCodec->queueInputBuffer( + index, 0 /* offset */, 0 /* size */, + 0ll /* timeUs */, MediaCodec::BUFFER_FLAG_EOS); + + CHECK(err == OK); + + state->mSignalledInputEOS = true; + } else { + CHECK_EQ(err, -EAGAIN); + } + } + } + } + + sawOutputEOSOnAllTracks = true; + for (size_t i = 0; i < stateByTrack.size(); ++i) { + CodecState *state = &stateByTrack.editValueAt(i); + + if (state->mSawOutputEOS) { + continue; + } else { + sawOutputEOSOnAllTracks = false; + } + + DecodedFrame frame; + status_t err = state->mCodec->dequeueOutputBuffer( + &frame.index, &frame.offset, &frame.size, + &frame.presentationTimeUs, &frame.flags, kTimeout); + + if (err == OK) { + ALOGV("draining decoded buffer %zu, time = %lld us", + frame.index, frame.presentationTimeUs); + + ++(state->mNumBuffersDecoded); + + decodedFrameIndices.push_back(frame); + + if (frame.flags & MediaCodec::BUFFER_FLAG_EOS) { + ALOGV("reached EOS on decoder output."); + state->mSawOutputEOS = true; + } + + } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) { + ALOGV("INFO_OUTPUT_BUFFERS_CHANGED"); + CHECK_EQ((status_t)OK, state->mCodec->getOutputBuffers( + &state->mOutBuffers)); + + ALOGV("got %zu output buffers", state->mOutBuffers.size()); + } else if (err == INFO_FORMAT_CHANGED) { + sp<AMessage> format; + CHECK_EQ((status_t)OK, state->mCodec->getOutputFormat(&format)); + + ALOGV("INFO_FORMAT_CHANGED: %s", + format->debugString().c_str()); + } else { + CHECK_EQ(err, -EAGAIN); + } + + tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState); + + tryDrainOutputBuffer( + filterState, surface, renderSurface, + useTimestamp, &startTimeRender); + } + } + + // after EOS on decoder, let filter reach EOS + while (!filterState->mSawOutputEOS) { + tryCopyDecodedBuffer(&decodedFrameIndices, filterState, vidState); + + tryDrainOutputBuffer( + filterState, surface, renderSurface, + useTimestamp, &startTimeRender); + } + + int64_t elapsedTimeUs = ALooper::GetNowUs() - startTimeUs; + + for (size_t i = 0; i < stateByTrack.size(); ++i) { + CodecState *state = &stateByTrack.editValueAt(i); + + CHECK_EQ((status_t)OK, state->mCodec->release()); + + printf("track %zu: %" PRId64 " frames decoded and filtered, " + "%.2f fps.\n", i, state->mNumBuffersDecoded, + state->mNumBuffersDecoded * 1E6 / elapsedTimeUs); + } + + return 0; +} + +} // namespace android + +int main(int argc, char **argv) { + using namespace android; + + const char *me = argv[0]; + + bool useSurface = false; + bool renderSurface = false; + bool useTimestamp = false; + FilterType filterType = FILTERTYPE_ZERO; + + int res; + while ((res = getopt(argc, argv, "bcnrszTRSh")) >= 0) { + switch (res) { + case 'b': + { + filterType = FILTERTYPE_INTRINSIC_BLUR; + break; + } + case 'c': + { + filterType = FILTERTYPE_RS_ARGB_TO_RGBA; + break; + } + case 'n': + { + filterType = FILTERTYPE_RS_NIGHT_VISION; + break; + } + case 'r': + { + filterType = FILTERTYPE_RS_SATURATION; + break; + } + case 's': + { + filterType = FILTERTYPE_SATURATION; + break; + } + case 'z': + { + filterType = FILTERTYPE_ZERO; + break; + } + case 'T': + { + useTimestamp = true; + } + // fall through + case 'R': + { + renderSurface = true; + } + // fall through + case 'S': + { + useSurface = true; + break; + } + case '?': + case 'h': + default: + { + usage(me); + break; + } + } + } + + argc -= optind; + argv += optind; + + if (argc != 1) { + usage(me); + } + + ProcessState::self()->startThreadPool(); + + DataSource::RegisterDefaultSniffers(); + + android::sp<ALooper> looper = new ALooper; + looper->start(); + + android::sp<SurfaceComposerClient> composerClient; + android::sp<SurfaceControl> control; + android::sp<Surface> surface; + + if (useSurface) { + composerClient = new SurfaceComposerClient; + CHECK_EQ((status_t)OK, composerClient->initCheck()); + + android::sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay( + ISurfaceComposer::eDisplayIdMain)); + DisplayInfo info; + SurfaceComposerClient::getDisplayInfo(display, &info); + ssize_t displayWidth = info.w; + ssize_t displayHeight = info.h; + + ALOGV("display is %zd x %zd", displayWidth, displayHeight); + + control = composerClient->createSurface( + String8("A Surface"), displayWidth, displayHeight, + PIXEL_FORMAT_RGBA_8888, 0); + + CHECK(control != NULL); + CHECK(control->isValid()); + + SurfaceComposerClient::openGlobalTransaction(); + CHECK_EQ((status_t)OK, control->setLayer(INT_MAX)); + CHECK_EQ((status_t)OK, control->show()); + SurfaceComposerClient::closeGlobalTransaction(); + + surface = control->getSurface(); + CHECK(surface != NULL); + } + + decode(looper, argv[0], surface, renderSurface, useTimestamp, filterType); + + if (useSurface) { + composerClient->dispose(); + } + + looper->stop(); + + return 0; +} diff --git a/cmds/stagefright/muxer.cpp b/cmds/stagefright/muxer.cpp index f4a33e8..461b56c 100644 --- a/cmds/stagefright/muxer.cpp +++ b/cmds/stagefright/muxer.cpp @@ -17,6 +17,9 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "muxer" #include <inttypes.h> +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> #include <utils/Log.h> #include <binder/ProcessState.h> @@ -72,8 +75,15 @@ static int muxing( ALOGV("input file %s, output file %s", path, outputFileName); ALOGV("useAudio %d, useVideo %d", useAudio, useVideo); - sp<MediaMuxer> muxer = new MediaMuxer(outputFileName, + int fd = open(outputFileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); + + if (fd < 0) { + ALOGE("couldn't open file"); + return fd; + } + sp<MediaMuxer> muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4); + close(fd); size_t trackCount = extractor->countTracks(); // Map the extractor's track index to the muxer's track index. diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp index 9f547c7..2ad40bd 100644 --- a/cmds/stagefright/recordvideo.cpp +++ b/cmds/stagefright/recordvideo.cpp @@ -17,6 +17,10 @@ #include "SineSource.h" #include <inttypes.h> +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> + #include <binder/ProcessState.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/AudioPlayer.h> @@ -300,7 +304,13 @@ int main(int argc, char **argv) { client.interface(), enc_meta, true /* createEncoder */, source, 0, preferSoftwareCodec ? OMXCodec::kPreferSoftwareCodecs : 0); - sp<MPEG4Writer> writer = new MPEG4Writer(fileName); + int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); + if (fd < 0) { + fprintf(stderr, "couldn't open file"); + return 1; + } + sp<MPEG4Writer> writer = new MPEG4Writer(fd); + close(fd); writer->addSource(encoder); int64_t start = systemTime(); CHECK_EQ((status_t)OK, writer->start()); diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp index 0f729a3..172dc36 100644 --- a/cmds/stagefright/sf2.cpp +++ b/cmds/stagefright/sf2.cpp @@ -72,7 +72,7 @@ struct Controller : public AHandler { } void startAsync() { - (new AMessage(kWhatStart, id()))->post(); + (new AMessage(kWhatStart, this))->post(); } protected: @@ -100,7 +100,7 @@ protected: if (ctrlc) { printf("\n"); printStatistics(); - (new AMessage(kWhatStop, id()))->post(); + (new AMessage(kWhatStop, this))->post(); ctrlc = false; } switch (msg->what()) { @@ -149,7 +149,7 @@ protected: mDecodeLooper->registerHandler(mCodec); mCodec->setNotificationMessage( - new AMessage(kWhatCodecNotify, id())); + new AMessage(kWhatCodecNotify, this)); sp<AMessage> format = makeFormat(mSource->getFormat()); @@ -168,7 +168,7 @@ protected: mFinalResult = OK; mSeekState = SEEK_NONE; - // (new AMessage(kWhatSeek, id()))->post(5000000ll); + // (new AMessage(kWhatSeek, this))->post(5000000ll); break; } @@ -225,12 +225,12 @@ protected: printf((what == CodecBase::kWhatEOS) ? "$\n" : "E\n"); printStatistics(); - (new AMessage(kWhatStop, id()))->post(); + (new AMessage(kWhatStop, this))->post(); } else if (what == CodecBase::kWhatFlushCompleted) { mSeekState = SEEK_FLUSH_COMPLETED; mCodec->signalResume(); - (new AMessage(kWhatSeek, id()))->post(5000000ll); + (new AMessage(kWhatSeek, this))->post(5000000ll); } else if (what == CodecBase::kWhatOutputFormatChanged) { } else if (what == CodecBase::kWhatShutdownCompleted) { mDecodeLooper->unregisterHandler(mCodec->id()); diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index 81edcb4..318b56d 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -19,6 +19,8 @@ #include <stdlib.h> #include <string.h> #include <sys/time.h> +#include <sys/types.h> +#include <sys/stat.h> //#define LOG_NDEBUG 0 #define LOG_TAG "stagefright" @@ -506,8 +508,13 @@ static void writeSourcesToMP4( sp<MPEG4Writer> writer = new MPEG4Writer(gWriteMP4Filename.string()); #else + int fd = open(gWriteMP4Filename.string(), O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); + if (fd < 0) { + fprintf(stderr, "couldn't open file"); + return; + } sp<MPEG2TSWriter> writer = - new MPEG2TSWriter(gWriteMP4Filename.string()); + new MPEG2TSWriter(fd); #endif // at most one minute. diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp index 96fca94..6b8c772 100644 --- a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp +++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp @@ -48,12 +48,13 @@ status_t DrmPlugin::getKeyRequest( KeyType keyType, const KeyedVector<String8, String8>& optionalParameters, Vector<uint8_t>& request, - String8& defaultUrl) { + String8& defaultUrl, + DrmPlugin::KeyRequestType *keyRequestType) { UNUSED(optionalParameters); if (keyType != kKeyType_Streaming) { return android::ERROR_DRM_CANNOT_HANDLE; } - + *keyRequestType = DrmPlugin::kKeyRequestType_Initial; sp<Session> session = mSessionLibrary->findSession(scope); defaultUrl.clear(); return session->getKeyRequest(initData, initDataType, &request); diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/DrmPlugin.h index 6139f1f..ba4aefe 100644 --- a/drm/mediadrm/plugins/clearkey/DrmPlugin.h +++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.h @@ -54,7 +54,8 @@ public: KeyType keyType, const KeyedVector<String8, String8>& optionalParameters, Vector<uint8_t>& request, - String8& defaultUrl); + String8& defaultUrl, + DrmPlugin::KeyRequestType *keyRequestType); virtual status_t provideKeyResponse( const Vector<uint8_t>& scope, diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp index 7eac0a1..9b786c5 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp @@ -111,7 +111,8 @@ namespace android { Vector<uint8_t> const &initData, String8 const &mimeType, KeyType keyType, KeyedVector<String8, String8> const &optionalParameters, - Vector<uint8_t> &request, String8 &defaultUrl) + Vector<uint8_t> &request, String8 &defaultUrl, + KeyRequestType *keyRequestType) { Mutex::Autolock lock(mLock); ALOGD("MockDrmPlugin::getKeyRequest(sessionId=%s, initData=%s, mimeType=%s" @@ -149,6 +150,7 @@ namespace android { // Properties used in mock test, set by cts test app returned from mock plugin // byte[] mock-request -> request // string mock-default-url -> defaultUrl + // string mock-key-request-type -> keyRequestType index = mByteArrayProperties.indexOfKey(String8("mock-request")); if (index < 0) { @@ -165,6 +167,16 @@ namespace android { } else { defaultUrl = mStringProperties.valueAt(index); } + + index = mStringProperties.indexOfKey(String8("mock-keyRequestType")); + if (index < 0) { + ALOGD("Missing 'mock-keyRequestType' parameter for mock"); + return BAD_VALUE; + } else { + *keyRequestType = static_cast<KeyRequestType>( + atoi(mStringProperties.valueAt(index).string())); + } + return OK; } diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h index d1d8058..d0f2ddb 100644 --- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h +++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h @@ -62,7 +62,8 @@ namespace android { Vector<uint8_t> const &initData, String8 const &mimeType, KeyType keyType, KeyedVector<String8, String8> const &optionalParameters, - Vector<uint8_t> &request, String8 &defaultUrl); + Vector<uint8_t> &request, String8 &defaultUrl, + KeyRequestType *keyRequestType); status_t provideKeyResponse(Vector<uint8_t> const &sessionId, Vector<uint8_t> const &response, diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h index 1254d3c..953d711 100644 --- a/include/camera/CameraMetadata.h +++ b/include/camera/CameraMetadata.h @@ -56,7 +56,7 @@ class CameraMetadata { * thread-safety, it simply prevents the camera_metadata_t pointer returned * here from being accidentally invalidated by CameraMetadata operations. */ - const camera_metadata_t* getAndLock(); + const camera_metadata_t* getAndLock() const; /** * Unlock the CameraMetadata for use again. After this unlock, the pointer @@ -208,7 +208,7 @@ class CameraMetadata { private: camera_metadata_t *mBuffer; - bool mLocked; + mutable bool mLocked; /** * Check if tag has a given type diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h index c6074fc..ba33ffe 100644 --- a/include/camera/CameraParameters.h +++ b/include/camera/CameraParameters.h @@ -108,6 +108,9 @@ public: */ void getSupportedPreviewFormats(Vector<int>& formats) const; + // Returns true if no keys are present + bool isEmpty() const; + // Parameter keys to communicate between camera application and driver. // The access (read/write, read only, or write only) is viewed from the // perspective of applications, not driver. diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h index f7f06bb..cad275e 100644 --- a/include/camera/ICameraService.h +++ b/include/camera/ICameraService.h @@ -25,8 +25,6 @@ namespace android { class ICamera; class ICameraClient; -class IProCameraUser; -class IProCameraCallbacks; class ICameraServiceListener; class ICameraDeviceUser; class ICameraDeviceCallbacks; @@ -44,7 +42,6 @@ public: GET_NUMBER_OF_CAMERAS = IBinder::FIRST_CALL_TRANSACTION, GET_CAMERA_INFO, CONNECT, - CONNECT_PRO, CONNECT_DEVICE, ADD_LISTENER, REMOVE_LISTENER, @@ -53,6 +50,8 @@ public: GET_LEGACY_PARAMETERS, SUPPORTS_CAMERA_API, CONNECT_LEGACY, + SET_TORCH_MODE, + NOTIFY_SYSTEM_EVENT, }; enum { @@ -66,7 +65,18 @@ public: enum { CAMERA_HAL_API_VERSION_UNSPECIFIED = -1 - }; + }; + + /** + * Keep up-to-date with declarations in + * frameworks/base/services/core/java/com/android/server/camera/CameraService.java + * + * These event codes are intended to be used with the notifySystemEvent call. + */ + enum { + NO_EVENT = 0, + USER_SWITCHED, + }; public: DECLARE_META_INTERFACE(CameraService); @@ -104,13 +114,6 @@ public: /*out*/ sp<ICamera>& device) = 0; - virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb, - int cameraId, - const String16& clientPackageName, - int clientUid, - /*out*/ - sp<IProCameraUser>& device) = 0; - virtual status_t connectDevice( const sp<ICameraDeviceCallbacks>& cameraCb, int cameraId, @@ -142,6 +145,26 @@ public: int clientUid, /*out*/ sp<ICamera>& device) = 0; + + /** + * Turn on or off a camera's torch mode. Torch mode will be turned off by + * camera service if the lastest client binder that turns it on dies. + * + * return values: + * 0: on a successful operation. + * -ENOSYS: the camera device doesn't support this operation. It it returned + * if and only if android.flash.into.available is false. + * -EBUSY: the camera device is opened. + * -EINVAL: camera_id is invalid or clientBinder is NULL when enabling a + * torch mode. + */ + virtual status_t setTorchMode(const String16& cameraId, bool enabled, + const sp<IBinder>& clientBinder) = 0; + + /** + * Notify the camera service of a system event. Should only be called from system_server. + */ + virtual void notifySystemEvent(int eventId, int arg0) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/ICameraServiceListener.h b/include/camera/ICameraServiceListener.h index 0a0e43a..709ff31 100644 --- a/include/camera/ICameraServiceListener.h +++ b/include/camera/ICameraServiceListener.h @@ -66,9 +66,35 @@ public: STATUS_UNKNOWN = 0xFFFFFFFF, }; + /** + * The torch mode status of a camera. + * + * Initial status will be transmitted with onTorchStatusChanged immediately + * after this listener is added to the service listener list. + * + * The enums should be set to values matching + * include/hardware/camera_common.h + */ + enum TorchStatus { + // The camera's torch mode has become not available to use via + // setTorchMode(). + TORCH_STATUS_NOT_AVAILABLE = TORCH_MODE_STATUS_NOT_AVAILABLE, + // The camera's torch mode is off and available to be turned on via + // setTorchMode(). + TORCH_STATUS_AVAILABLE_OFF = TORCH_MODE_STATUS_AVAILABLE_OFF, + // The camera's torch mode is on and available to be turned off via + // setTorchMode(). + TORCH_STATUS_AVAILABLE_ON = TORCH_MODE_STATUS_AVAILABLE_ON, + + // Use to initialize variables only + TORCH_STATUS_UNKNOWN = 0xFFFFFFFF, + }; + DECLARE_META_INTERFACE(CameraServiceListener); virtual void onStatusChanged(Status status, int32_t cameraId) = 0; + + virtual void onTorchStatusChanged(TorchStatus status, const String16& cameraId) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h deleted file mode 100644 index e8abb89..0000000 --- a/include/camera/IProCameraCallbacks.h +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H -#define ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H - -#include <utils/RefBase.h> -#include <binder/IInterface.h> -#include <binder/Parcel.h> -#include <binder/IMemory.h> -#include <utils/Timers.h> -#include <system/camera.h> - -struct camera_metadata; - -namespace android { - -class IProCameraCallbacks : public IInterface -{ - /** - * Keep up-to-date with IProCameraCallbacks.aidl in frameworks/base - */ -public: - DECLARE_META_INTERFACE(ProCameraCallbacks); - - virtual void notifyCallback(int32_t msgType, - int32_t ext1, - int32_t ext2) = 0; - - enum LockStatus { - LOCK_ACQUIRED, - LOCK_RELEASED, - LOCK_STOLEN, - }; - - virtual void onLockStatusChanged(LockStatus newLockStatus) = 0; - - /** Missing by design: implementation is client-side in ProCamera.cpp **/ - // virtual void onBufferReceived(int streamId, - // const CpuConsumer::LockedBufer& buf); - virtual void onResultReceived(int32_t requestId, - camera_metadata* result) = 0; -}; - -// ---------------------------------------------------------------------------- - -class BnProCameraCallbacks : public BnInterface<IProCameraCallbacks> -{ -public: - virtual status_t onTransact( uint32_t code, - const Parcel& data, - Parcel* reply, - uint32_t flags = 0); -}; - -}; // namespace android - -#endif diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h deleted file mode 100644 index 2ccc4d2..0000000 --- a/include/camera/IProCameraUser.h +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_IPROCAMERAUSER_H -#define ANDROID_HARDWARE_IPROCAMERAUSER_H - -#include <utils/RefBase.h> -#include <binder/IInterface.h> -#include <binder/Parcel.h> -#include <binder/IMemory.h> -#include <utils/String8.h> -#include <camera/IProCameraCallbacks.h> - -struct camera_metadata; - -namespace android { - -class IProCameraUserClient; -class IGraphicBufferProducer; -class Surface; - -class IProCameraUser: public IInterface -{ - /** - * Keep up-to-date with IProCameraUser.aidl in frameworks/base - */ -public: - DECLARE_META_INTERFACE(ProCameraUser); - - virtual void disconnect() = 0; - - // connect to the service, given a callbacks listener - virtual status_t connect(const sp<IProCameraCallbacks>& callbacks) - = 0; - - /** - * Locking - **/ - virtual status_t exclusiveTryLock() = 0; - virtual status_t exclusiveLock() = 0; - virtual status_t exclusiveUnlock() = 0; - - virtual bool hasExclusiveLock() = 0; - - /** - * Request Handling - **/ - - // Note that the callee gets a copy of the metadata. - virtual int submitRequest(struct camera_metadata* metadata, - bool streaming = false) = 0; - virtual status_t cancelRequest(int requestId) = 0; - - virtual status_t deleteStream(int streamId) = 0; - virtual status_t createStream( - int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer, - /*out*/ - int* streamId) = 0; - - // Create a request object from a template. - virtual status_t createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request) - = 0; - - // Get static camera metadata - virtual status_t getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info) = 0; - -}; - -// ---------------------------------------------------------------------------- - -class BnProCameraUser: public BnInterface<IProCameraUser> -{ -public: - virtual status_t onTransact( uint32_t code, - const Parcel& data, - Parcel* reply, - uint32_t flags = 0); -}; - -}; // namespace android - -#endif diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h deleted file mode 100644 index e9b687a..0000000 --- a/include/camera/ProCamera.h +++ /dev/null @@ -1,319 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_HARDWARE_PRO_CAMERA_H -#define ANDROID_HARDWARE_PRO_CAMERA_H - -#include <utils/Timers.h> -#include <utils/KeyedVector.h> -#include <gui/IGraphicBufferProducer.h> -#include <system/camera.h> -#include <camera/IProCameraCallbacks.h> -#include <camera/IProCameraUser.h> -#include <camera/Camera.h> -#include <camera/CameraMetadata.h> -#include <camera/ICameraService.h> -#include <gui/CpuConsumer.h> - -#include <gui/Surface.h> - -#include <utils/Condition.h> -#include <utils/Mutex.h> - -#include <camera/CameraBase.h> - -struct camera_metadata; - -namespace android { - -// All callbacks on this class are concurrent -// (they come from separate threads) -class ProCameraListener : virtual public RefBase -{ -public: - virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0; - - // Lock has been acquired. Write operations now available. - virtual void onLockAcquired() = 0; - // Lock has been released with exclusiveUnlock. - virtual void onLockReleased() = 0; - // Lock has been stolen by another client. - virtual void onLockStolen() = 0; - - // Lock free. - virtual void onTriggerNotify(int32_t msgType, int32_t ext1, int32_t ext2) - = 0; - // onFrameAvailable and OnResultReceived can come in with any order, - // use android.sensor.timestamp and LockedBuffer.timestamp to correlate them - - /** - * A new metadata buffer has been received. - * -- Ownership of request passes on to the callee, free with - * free_camera_metadata. - */ - virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0; - - // TODO: make onFrameAvailable pure virtual - - // A new frame buffer has been received for this stream. - // -- This callback only fires for createStreamCpu streams - // -- A buffer may be obtained by calling cpuConsumer->lockNextBuffer - // -- Use buf.timestamp to correlate with result's android.sensor.timestamp - // -- The buffer should be accessed with CpuConsumer::lockNextBuffer - // and CpuConsumer::unlockBuffer - virtual void onFrameAvailable(int /*streamId*/, - const sp<CpuConsumer>& /*cpuConsumer*/) { - } - -}; - -class ProCamera; - -template <> -struct CameraTraits<ProCamera> -{ - typedef ProCameraListener TCamListener; - typedef IProCameraUser TCamUser; - typedef IProCameraCallbacks TCamCallbacks; - typedef status_t (ICameraService::*TCamConnectService)(const sp<IProCameraCallbacks>&, - int, const String16&, int, - /*out*/ - sp<IProCameraUser>&); - static TCamConnectService fnConnectService; -}; - - -class ProCamera : - public CameraBase<ProCamera>, - public BnProCameraCallbacks -{ -public: - /** - * Connect a shared camera. By default access is restricted to read only - * (Lock free) operations. To be able to submit custom requests a lock needs - * to be acquired with exclusive[Try]Lock. - */ - static sp<ProCamera> connect(int cameraId); - virtual ~ProCamera(); - - /** - * Exclusive Locks: - * - We may request exclusive access to a camera if no other - * clients are using the camera. This works as a traditional - * client, writing/reading any camera state. - * - An application opening the camera (a regular 'Camera') will - * always steal away the exclusive lock from a ProCamera, - * this will call onLockReleased. - * - onLockAcquired will be called again once it is possible - * to again exclusively lock the camera. - * - */ - - /** - * All exclusiveLock/unlock functions are asynchronous. The remote endpoint - * shall not block while waiting to acquire the lock. Instead the lock - * notifications will come in asynchronously on the listener. - */ - - /** - * Attempt to acquire the lock instantly (non-blocking) - * - If this succeeds, you do not need to wait for onLockAcquired - * but the event will still be fired - * - * Returns -EBUSY if already locked. 0 on success. - */ - status_t exclusiveTryLock(); - // always returns 0. wait for onLockAcquired before lock is acquired. - status_t exclusiveLock(); - // release a lock if we have one, or cancel the lock request. - status_t exclusiveUnlock(); - - // exclusive lock = do whatever we want. no lock = read only. - bool hasExclusiveLock(); - - /** - * < 0 error, >= 0 the request ID. streaming to have the request repeat - * until cancelled. - * The request queue is flushed when a lock is released or stolen - * if not locked will return PERMISSION_DENIED - */ - int submitRequest(const struct camera_metadata* metadata, - bool streaming = false); - // if not locked will return PERMISSION_DENIED, BAD_VALUE if requestId bad - status_t cancelRequest(int requestId); - - /** - * Ask for a stream to be enabled. - * Lock free. Service maintains counter of streams. - */ - status_t requestStream(int streamId); -// TODO: remove requestStream, its useless. - - /** - * Delete a stream. - * Lock free. - * - * NOTE: As a side effect this cancels ALL streaming requests. - * - * Errors: BAD_VALUE if unknown stream ID. - * PERMISSION_DENIED if the stream wasn't yours - */ - status_t deleteStream(int streamId); - - /** - * Create a new HW stream, whose sink will be the window. - * Lock free. Service maintains counter of streams. - * Errors: -EBUSY if too many streams created - */ - status_t createStream(int width, int height, int format, - const sp<Surface>& surface, - /*out*/ - int* streamId); - - /** - * Create a new HW stream, whose sink will be the SurfaceTexture. - * Lock free. Service maintains counter of streams. - * Errors: -EBUSY if too many streams created - */ - status_t createStream(int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer, - /*out*/ - int* streamId); - status_t createStreamCpu(int width, int height, int format, - int heapCount, - /*out*/ - sp<CpuConsumer>* cpuConsumer, - int* streamId); - status_t createStreamCpu(int width, int height, int format, - int heapCount, - bool synchronousMode, - /*out*/ - sp<CpuConsumer>* cpuConsumer, - int* streamId); - - // Create a request object from a template. - status_t createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request) const; - - // Get static camera metadata - camera_metadata* getCameraInfo(int cameraId); - - // Blocks until a frame is available (CPU streams only) - // - Obtain the frame data by calling CpuConsumer::lockNextBuffer - // - Release the frame data after use with CpuConsumer::unlockBuffer - // Return value: - // - >0 - number of frames available to be locked - // - <0 - error (refer to error codes) - // Error codes: - // -ETIMEDOUT if it took too long to get a frame - int waitForFrameBuffer(int streamId); - - // Blocks until a metadata result is available - // - Obtain the metadata by calling consumeFrameMetadata() - // Error codes: - // -ETIMEDOUT if it took too long to get a frame - status_t waitForFrameMetadata(); - - // Get the latest metadata. This is destructive. - // - Calling this repeatedly will produce empty metadata objects. - // - Use waitForFrameMetadata to sync until new data is available. - CameraMetadata consumeFrameMetadata(); - - // Convenience method to drop frame buffers (CPU streams only) - // Return values: - // >=0 - number of frames dropped (up to count) - // <0 - error code - // Error codes: - // BAD_VALUE - invalid streamId or count passed - int dropFrameBuffer(int streamId, int count); - -protected: - //////////////////////////////////////////////////////// - // IProCameraCallbacks implementation - //////////////////////////////////////////////////////// - virtual void notifyCallback(int32_t msgType, - int32_t ext, - int32_t ext2); - - virtual void onLockStatusChanged( - IProCameraCallbacks::LockStatus newLockStatus); - - virtual void onResultReceived(int32_t requestId, - camera_metadata* result); -private: - ProCamera(int cameraId); - - class ProFrameListener : public CpuConsumer::FrameAvailableListener { - public: - ProFrameListener(wp<ProCamera> camera, int streamID) { - mCamera = camera; - mStreamId = streamID; - } - - protected: - virtual void onFrameAvailable(const BufferItem& /* item */) { - sp<ProCamera> c = mCamera.promote(); - if (c.get() != NULL) { - c->onFrameAvailable(mStreamId); - } - } - - private: - wp<ProCamera> mCamera; - int mStreamId; - }; - friend class ProFrameListener; - - struct StreamInfo - { - StreamInfo(int streamId) { - this->streamID = streamId; - cpuStream = false; - frameReady = 0; - } - - StreamInfo() { - streamID = -1; - cpuStream = false; - } - - int streamID; - bool cpuStream; - sp<CpuConsumer> cpuConsumer; - bool synchronousMode; - sp<ProFrameListener> frameAvailableListener; - sp<Surface> stc; - int frameReady; - }; - - Condition mWaitCondition; - Mutex mWaitMutex; - static const nsecs_t mWaitTimeout = 1000000000; // 1sec - KeyedVector<int, StreamInfo> mStreams; - bool mMetadataReady; - CameraMetadata mLatestMetadata; - - void onFrameAvailable(int streamId); - - StreamInfo& getStreamInfo(int streamId); - - friend class CameraBase; -}; - -}; // namespace android - -#endif diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h index 35488bb..e9f1f5a 100644 --- a/include/camera/camera2/ICameraDeviceUser.h +++ b/include/camera/camera2/ICameraDeviceUser.h @@ -27,9 +27,9 @@ namespace android { class ICameraDeviceUserClient; class IGraphicBufferProducer; -class Surface; class CaptureRequest; class CameraMetadata; +class OutputConfiguration; enum { NO_IN_FLIGHT_REPEATING_FRAMES = -1, @@ -100,9 +100,8 @@ public: virtual status_t endConfigure() = 0; virtual status_t deleteStream(int streamId) = 0; - virtual status_t createStream( - int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer) = 0; + + virtual status_t createStream(const OutputConfiguration& outputConfiguration) = 0; // Create a request object from a template. virtual status_t createDefaultRequest(int templateId, diff --git a/include/camera/camera2/OutputConfiguration.h b/include/camera/camera2/OutputConfiguration.h new file mode 100644 index 0000000..e6b679f --- /dev/null +++ b/include/camera/camera2/OutputConfiguration.h @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_CAMERA2_OUTPUTCONFIGURATION_H +#define ANDROID_HARDWARE_CAMERA2_OUTPUTCONFIGURATION_H + +#include <utils/RefBase.h> +#include <gui/IGraphicBufferProducer.h> + +namespace android { + +class Surface; + +class OutputConfiguration : public virtual RefBase { +public: + + static const int INVALID_ROTATION; + sp<IGraphicBufferProducer> getGraphicBufferProducer() const; + int getRotation() const; + + /** + * Keep impl up-to-date with OutputConfiguration.java in frameworks/base + */ + status_t writeToParcel(Parcel& parcel) const; + // getGraphicBufferProducer will be NULL if error occurred + // getRotation will be INVALID_ROTATION if error occurred + OutputConfiguration(const Parcel& parcel); + +private: + sp<IGraphicBufferProducer> mGbp; + int mRotation; + + // helper function + static String16 readMaybeEmptyString16(const Parcel& parcel); +}; +}; // namespace android + +#endif diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index f70d981..7be2c3e 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -42,8 +42,7 @@ public: EVENT_MORE_DATA = 0, // Request to read available data from buffer. // If this event is delivered but the callback handler // does not want to read the available data, the handler must - // explicitly - // ignore the event by setting frameCount to zero. + // explicitly ignore the event by setting frameCount to zero. EVENT_OVERRUN = 1, // Buffer overrun occurred. EVENT_MARKER = 2, // Record head is at the specified marker position // (See setMarkerPosition()). @@ -53,7 +52,7 @@ public: // voluntary invalidation by mediaserver, or mediaserver crash. }; - /* Client should declare Buffer on the stack and pass address to obtainBuffer() + /* Client should declare a Buffer and pass address to obtainBuffer() * and releaseBuffer(). See also callback_t for EVENT_MORE_DATA. */ @@ -62,20 +61,25 @@ public: public: // FIXME use m prefix size_t frameCount; // number of sample frames corresponding to size; - // on input it is the number of frames available, - // on output is the number of frames actually drained - // (currently ignored but will make the primary field in future) + // on input to obtainBuffer() it is the number of frames desired + // on output from obtainBuffer() it is the number of available + // frames to be read + // on input to releaseBuffer() it is currently ignored size_t size; // input/output in bytes == frameCount * frameSize - // on output is the number of bytes actually drained - // FIXME this is redundant with respect to frameCount, - // and TRANSFER_OBTAIN mode is broken for 8-bit data - // since we don't define the frame format + // on input to obtainBuffer() it is ignored + // on output from obtainBuffer() it is the number of available + // bytes to be read, which is frameCount * frameSize + // on input to releaseBuffer() it is the number of bytes to + // release + // FIXME This is redundant with respect to frameCount. Consider + // removing size and making frameCount the primary field. union { void* raw; short* i16; // signed 16-bit int8_t* i8; // unsigned 8-bit, offset by 0x80 + // input to obtainBuffer(): unused, output: pointer to buffer }; }; @@ -88,8 +92,8 @@ public: * user: Pointer to context for use by the callback receiver. * info: Pointer to optional parameter according to event type: * - EVENT_MORE_DATA: pointer to AudioRecord::Buffer struct. The callback must not read - * more bytes than indicated by 'size' field and update 'size' if fewer bytes are - * consumed. + * more bytes than indicated by 'size' field and update 'size' if + * fewer bytes are consumed. * - EVENT_OVERRUN: unused. * - EVENT_MARKER: pointer to const uint32_t containing the marker position in frames. * - EVENT_NEW_POS: pointer to const uint32_t containing the new position in frames. @@ -106,6 +110,7 @@ public: * - BAD_VALUE: unsupported configuration * frameCount is guaranteed to be non-zero if status is NO_ERROR, * and is undefined otherwise. + * FIXME This API assumes a route, and so should be deprecated. */ static status_t getMinFrameCount(size_t* frameCount, @@ -118,7 +123,7 @@ public: enum transfer_type { TRANSFER_DEFAULT, // not specified explicitly; determine from the other parameters TRANSFER_CALLBACK, // callback EVENT_MORE_DATA - TRANSFER_OBTAIN, // FIXME deprecated: call obtainBuffer() and releaseBuffer() + TRANSFER_OBTAIN, // call obtainBuffer() and releaseBuffer() TRANSFER_SYNC, // synchronous read() }; @@ -144,15 +149,16 @@ public: * be larger if the requested size is not compatible with current audio HAL * latency. Zero means to use a default value. * cbf: Callback function. If not null, this function is called periodically - * to consume new data and inform of marker, position updates, etc. + * to consume new data in TRANSFER_CALLBACK mode + * and inform of marker, position updates, etc. * user: Context for use by the callback receiver. * notificationFrames: The callback function is called each time notificationFrames PCM * frames are ready in record track output buffer. * sessionId: Not yet supported. * transferType: How data is transferred from AudioRecord. * flags: See comments on audio_input_flags_t in <system/audio.h> + * pAttributes: If not NULL, supersedes inputSource for use case selection. * threadCanCallJava: Not present in parameter list, and so is fixed at false. - * pAttributes: if not NULL, supersedes inputSource for use case selection */ AudioRecord(audio_source_t inputSource, @@ -177,6 +183,7 @@ public: /* Initialize an AudioRecord that was created using the AudioRecord() constructor. * Don't call set() more than once, or after an AudioRecord() constructor that takes parameters. + * set() is not multi-thread safe. * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful intialization * - INVALID_OPERATION: AudioRecord is already initialized or record device is already in use @@ -211,7 +218,7 @@ public: status_t initCheck() const { return mStatus; } /* Returns this track's estimated latency in milliseconds. - * This includes the latency due to AudioRecord buffer size, + * This includes the latency due to AudioRecord buffer size, resampling if applicable, * and audio hardware driver. */ uint32_t latency() const { return mLatency; } @@ -243,11 +250,6 @@ public: */ uint32_t getSampleRate() const { return mSampleRate; } - /* Return the notification frame count. - * This is approximately how often the callback is invoked, for transfer type TRANSFER_CALLBACK. - */ - size_t notificationFrames() const { return mNotificationFramesAct; } - /* Sets marker position. When record reaches the number of frames specified, * a callback with event type EVENT_MARKER is called. Calling setMarkerPosition * with marker == 0 cancels marker notification callback. @@ -309,7 +311,12 @@ public: * Returned value: * handle on audio hardware input */ - audio_io_handle_t getInput() const; +// FIXME The only known public caller is frameworks/opt/net/voip/src/jni/rtp/AudioGroup.cpp + audio_io_handle_t getInput() const __attribute__((__deprecated__)) + { return getInputPrivate(); } +private: + audio_io_handle_t getInputPrivate() const; +public: /* Returns the audio session ID associated with this AudioRecord. * @@ -323,7 +330,8 @@ public: */ int getSessionId() const { return mSessionId; } - /* Obtains a buffer of up to "audioBuffer->frameCount" full frames. + /* Public API for TRANSFER_OBTAIN mode. + * Obtains a buffer of up to "audioBuffer->frameCount" full frames. * After draining these frames of data, the caller should release them with releaseBuffer(). * If the track buffer is not empty, obtainBuffer() returns as many contiguous * full frames as are available immediately. @@ -336,9 +344,6 @@ public: * or return WOULD_BLOCK depending on the value of the "waitCount" * parameter. * - * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications, - * which should use read() or callback EVENT_MORE_DATA instead. - * * Interpretation of waitCount: * +n limits wait time to n * WAIT_PERIOD_MS, * -1 causes an (almost) infinite wait time, @@ -347,6 +352,8 @@ public: * Buffer fields * On entry: * frameCount number of frames requested + * size ignored + * raw ignored * After error return: * frameCount 0 * size 0 @@ -357,9 +364,7 @@ public: * raw pointer to the buffer */ - /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */ - status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount) - __attribute__((__deprecated__)); + status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount); private: /* If nonContig is non-NULL, it is an output parameter that will be set to the number of @@ -372,9 +377,15 @@ private: struct timespec *elapsed = NULL, size_t *nonContig = NULL); public: - /* Release an emptied buffer of "audioBuffer->frameCount" frames for AudioFlinger to re-fill. */ - // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed - void releaseBuffer(Buffer* audioBuffer); + /* Public API for TRANSFER_OBTAIN mode. + * Release an emptied buffer of "audioBuffer->frameCount" frames for AudioFlinger to re-fill. + * + * Buffer fields: + * frameCount currently ignored but recommend to set to actual number of frames consumed + * size actual number of bytes consumed, must be multiple of frameSize + * raw ignored + */ + void releaseBuffer(const Buffer* audioBuffer); /* As a convenience we provide a read() interface to the audio buffer. * Input parameter 'size' is in byte units. @@ -386,8 +397,11 @@ public: * WOULD_BLOCK when obtainBuffer() returns same, or * AudioRecord was stopped during the read * or any other error code returned by IAudioRecord::start() or restoreRecord_l(). + * Default behavior is to only return when all data has been transferred. Set 'blocking' to + * false for the method to return immediately without waiting to try multiple times to read + * the full content of the buffer. */ - ssize_t read(void* buffer, size_t size); + ssize_t read(void* buffer, size_t size, bool blocking = true); /* Return the number of input frames lost in the audio driver since the last call of this * function. Audio driver is expected to reset the value to 0 and restart counting upon @@ -416,6 +430,7 @@ private: void pause(); // suspend thread from execution at next loop boundary void resume(); // allow thread to execute, if not requested to exit + void wake(); // wake to handle changed notification conditions. private: void pauseInternal(nsecs_t ns = 0LL); @@ -430,7 +445,9 @@ private: bool mPaused; // whether thread is requested to pause at next loop entry bool mPausedInt; // whether thread internally requests pause nsecs_t mPausedNs; // if mPausedInt then associated timeout, otherwise ignored - bool mIgnoreNextPausedInt; // whether to ignore next mPausedInt request + bool mIgnoreNextPausedInt; // skip any internal pause and go immediately + // to processAudioBuffer() as state may have changed + // since pause time calculated. }; // body of AudioRecordThread::threadLoop() @@ -458,7 +475,7 @@ private: bool mActive; // for client callback handler - callback_t mCbf; // callback handler for events, or NULL + callback_t mCbf; // callback handler for events, or NULL void* mUserData; // for notification APIs @@ -475,10 +492,10 @@ private: bool mRetryOnPartialBuffer; // sleep and retry after partial obtainBuffer() uint32_t mObservedSequence; // last observed value of mSequence - uint32_t mMarkerPosition; // in wrapping (overflow) frame units + uint32_t mMarkerPosition; // in wrapping (overflow) frame units bool mMarkerReached; - uint32_t mNewPosition; // in frames - uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS + uint32_t mNewPosition; // in frames + uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS status_t mStatus; diff --git a/include/media/AudioResamplerPublic.h b/include/media/AudioResamplerPublic.h index 97847a0..b705efa 100644 --- a/include/media/AudioResamplerPublic.h +++ b/include/media/AudioResamplerPublic.h @@ -26,4 +26,17 @@ // TODO: replace with an API #define AUDIO_RESAMPLER_DOWN_RATIO_MAX 256 +// Returns the source frames needed to resample to destination frames. This is not a precise +// value and depends on the resampler (and possibly how it handles rounding internally). +// Nevertheless, this should be an upper bound on the requirements of the resampler. +// If srcSampleRate and dstSampleRate are equal, then it returns destination frames, which +// may not be true if the resampler is asynchronous. +static inline size_t sourceFramesNeeded( + uint32_t srcSampleRate, size_t dstFramesRequired, uint32_t dstSampleRate) { + // +1 for rounding - always do this even if matched ratio (resampler may use phases not ratio) + // +1 for additional sample needed for interpolation + return srcSampleRate == dstSampleRate ? dstFramesRequired : + size_t((uint64_t)dstFramesRequired * srcSampleRate / dstSampleRate + 1 + 1); +} + #endif // ANDROID_AUDIO_RESAMPLER_PUBLIC_H diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 843a354..f5db1bb 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -98,10 +98,13 @@ public: // Returned samplingRate and frameCount output values are guaranteed // to be non-zero if status == NO_ERROR + // FIXME This API assumes a route, and so should be deprecated. static status_t getOutputSamplingRate(uint32_t* samplingRate, audio_stream_type_t stream); + // FIXME This API assumes a route, and so should be deprecated. static status_t getOutputFrameCount(size_t* frameCount, audio_stream_type_t stream); + // FIXME This API assumes a route, and so should be deprecated. static status_t getOutputLatency(uint32_t* latency, audio_stream_type_t stream); static status_t getSamplingRate(audio_io_handle_t output, @@ -110,19 +113,20 @@ public: // audio_stream->get_buffer_size()/audio_stream_out_frame_size() static status_t getFrameCount(audio_io_handle_t output, size_t* frameCount); - // returns the audio output stream latency in ms. Corresponds to + // returns the audio output latency in ms. Corresponds to // audio_stream_out->get_latency() static status_t getLatency(audio_io_handle_t output, uint32_t* latency); // return status NO_ERROR implies *buffSize > 0 + // FIXME This API assumes a route, and so should deprecated. static status_t getInputBufferSize(uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask, size_t* buffSize); static status_t setVoiceVolume(float volume); // return the number of audio frames written by AudioFlinger to audio HAL and - // audio dsp to DAC since the specified output I/O handle has exited standby. + // audio dsp to DAC since the specified output has exited standby. // returned status (from utils/Errors.h) can be: // - NO_ERROR: successful operation, halFrames and dspFrames point to valid data // - INVALID_OPERATION: Not supported on current hardware platform @@ -201,7 +205,7 @@ public: // IAudioPolicyService interface (see AudioPolicyInterface for method descriptions) // static status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address); + const char *device_address, const char *device_name); static audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device, const char *device_address); static status_t setPhoneState(audio_mode_t state); @@ -342,7 +346,8 @@ public: }; - static void setAudioPortCallback(sp<AudioPortCallback> callBack); + static status_t addAudioPortCallback(const sp<AudioPortCallback>& callBack); + static status_t removeAudioPortCallback(const sp<AudioPortCallback>& callBack); private: @@ -369,12 +374,19 @@ private: AudioPolicyServiceClient() { } + status_t addAudioPortCallback(const sp<AudioPortCallback>& callBack); + status_t removeAudioPortCallback(const sp<AudioPortCallback>& callBack); + // DeathRecipient virtual void binderDied(const wp<IBinder>& who); // IAudioPolicyServiceClient virtual void onAudioPortListUpdate(); virtual void onAudioPatchListUpdate(); + + private: + Mutex mLock; + Vector <sp <AudioPortCallback> > mAudioPortCallbacks; }; static sp<AudioFlingerClient> gAudioFlingerClient; @@ -386,7 +398,6 @@ private: static Mutex gLockCache; // protects gOutputs, gPrevInSamplingRate, gPrevInFormat, // gPrevInChannelMask and gInBuffSize static Mutex gLockAPS; // protects gAudioPolicyService and gAudioPolicyServiceClient - static Mutex gLockAPC; // protects gAudioPortCallback static sp<IAudioFlinger> gAudioFlinger; static audio_error_callback gAudioErrorCallback; @@ -401,8 +412,6 @@ private: // list of output descriptors containing cached parameters // (sampling rate, framecount, channel count...) static DefaultKeyedVector<audio_io_handle_t, OutputDescriptor *> gOutputs; - - static sp<AudioPortCallback> gAudioPortCallback; }; }; // namespace android diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index fd51b8f..d9b7057 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -63,7 +63,7 @@ public: // See AudioTimestamp for the information included with event. }; - /* Client should declare Buffer on the stack and pass address to obtainBuffer() + /* Client should declare a Buffer and pass the address to obtainBuffer() * and releaseBuffer(). See also callback_t for EVENT_MORE_DATA. */ @@ -72,22 +72,26 @@ public: public: // FIXME use m prefix size_t frameCount; // number of sample frames corresponding to size; - // on input it is the number of frames desired, - // on output is the number of frames actually filled - // (currently ignored, but will make the primary field in future) + // on input to obtainBuffer() it is the number of frames desired, + // on output from obtainBuffer() it is the number of available + // [empty slots for] frames to be filled + // on input to releaseBuffer() it is currently ignored size_t size; // input/output in bytes == frameCount * frameSize - // on input it is unused - // on output is the number of bytes actually filled - // FIXME this is redundant with respect to frameCount, - // and TRANSFER_OBTAIN mode is broken for 8-bit data - // since we don't define the frame format + // on input to obtainBuffer() it is ignored + // on output from obtainBuffer() it is the number of available + // [empty slots for] bytes to be filled, + // which is frameCount * frameSize + // on input to releaseBuffer() it is the number of bytes to + // release + // FIXME This is redundant with respect to frameCount. Consider + // removing size and making frameCount the primary field. union { void* raw; short* i16; // signed 16-bit int8_t* i8; // unsigned 8-bit, offset by 0x80 - }; // input: unused, output: pointer to buffer + }; // input to obtainBuffer(): unused, output: pointer to buffer }; /* As a convenience, if a callback is supplied, a handler thread @@ -121,6 +125,7 @@ public: * - BAD_VALUE: unsupported configuration * frameCount is guaranteed to be non-zero if status is NO_ERROR, * and is undefined otherwise. + * FIXME This API assumes a route, and so should be deprecated. */ static status_t getMinFrameCount(size_t* frameCount, @@ -132,7 +137,7 @@ public: enum transfer_type { TRANSFER_DEFAULT, // not specified explicitly; determine from the other parameters TRANSFER_CALLBACK, // callback EVENT_MORE_DATA - TRANSFER_OBTAIN, // FIXME deprecated: call obtainBuffer() and releaseBuffer() + TRANSFER_OBTAIN, // call obtainBuffer() and releaseBuffer() TRANSFER_SYNC, // synchronous write() TRANSFER_SHARED, // shared memory }; @@ -145,18 +150,15 @@ public: /* Creates an AudioTrack object and registers it with AudioFlinger. * Once created, the track needs to be started before it can be used. * Unspecified values are set to appropriate default values. - * With this constructor, the track is configured for streaming mode. - * Data to be rendered is supplied by write() or by the callback EVENT_MORE_DATA. - * Intermixing a combination of write() and non-ignored EVENT_MORE_DATA is not allowed. * * Parameters: * * streamType: Select the type of audio stream this track is attached to * (e.g. AUDIO_STREAM_MUSIC). * sampleRate: Data source sampling rate in Hz. - * format: Audio format. For mixed tracks, any PCM format supported by server is OK - * or AUDIO_FORMAT_PCM_8_BIT which is handled on client side. For direct - * and offloaded tracks, the possible format(s) depends on the output sink. + * format: Audio format. For mixed tracks, any PCM format supported by server is OK. + * For direct and offloaded tracks, the possible format(s) depends on the + * output sink. * channelMask: Channel mask, such that audio_is_output_channel(channelMask) is true. * frameCount: Minimum size of track PCM buffer in frames. This defines the * application's contribution to the @@ -165,20 +167,28 @@ public: * configuration. Zero means to use a default value. * flags: See comments on audio_output_flags_t in <system/audio.h>. * cbf: Callback function. If not null, this function is called periodically - * to provide new data and inform of marker, position updates, etc. + * to provide new data in TRANSFER_CALLBACK mode + * and inform of marker, position updates, etc. * user: Context for use by the callback receiver. * notificationFrames: The callback function is called each time notificationFrames PCM * frames have been consumed from track input buffer. * This is expressed in units of frames at the initial source sample rate. * sessionId: Specific session ID, or zero to use default. * transferType: How data is transferred to AudioTrack. + * offloadInfo: If not NULL, provides offload parameters for + * AudioSystem::getOutputForAttr(). + * uid: User ID of the app which initially requested this AudioTrack + * for power management tracking, or -1 for current user ID. + * pid: Process ID of the app which initially requested this AudioTrack + * for power management tracking, or -1 for current process ID. + * pAttributes: If not NULL, supersedes streamType for use case selection. * threadCanCallJava: Not present in parameter list, and so is fixed at false. */ AudioTrack( audio_stream_type_t streamType, uint32_t sampleRate, audio_format_t format, - audio_channel_mask_t, + audio_channel_mask_t channelMask, size_t frameCount = 0, audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE, callback_t cbf = NULL, @@ -193,9 +203,10 @@ public: /* Creates an audio track and registers it with AudioFlinger. * With this constructor, the track is configured for static buffer mode. - * The format must not be 8-bit linear PCM. * Data to be rendered is passed in a shared memory buffer - * identified by the argument sharedBuffer, which must be non-0. + * identified by the argument sharedBuffer, which should be non-0. + * If sharedBuffer is zero, this constructor is equivalent to the previous constructor + * but without the ability to specify a non-zero value for the frameCount parameter. * The memory should be initialized to the desired data before calling start(). * The write() method is not supported in this case. * It is recommended to pass a callback function to be notified of playback end by an @@ -227,6 +238,7 @@ public: /* Initialize an AudioTrack that was created using the AudioTrack() constructor. * Don't call set() more than once, or after the AudioTrack() constructors that take parameters. + * set() is not multi-thread safe. * Returned status (from utils/Errors.h) can be: * - NO_ERROR: successful initialization * - INVALID_OPERATION: AudioTrack is already initialized @@ -461,7 +473,9 @@ public: * handle on audio hardware output, or AUDIO_IO_HANDLE_NONE if the * track needed to be re-created but that failed */ +private: audio_io_handle_t getOutput() const; +public: /* Returns the unique session ID associated with this track. * @@ -487,10 +501,18 @@ public: */ status_t attachAuxEffect(int effectId); - /* Obtains a buffer of up to "audioBuffer->frameCount" empty slots for frames. + /* Public API for TRANSFER_OBTAIN mode. + * Obtains a buffer of up to "audioBuffer->frameCount" empty slots for frames. * After filling these slots with data, the caller should release them with releaseBuffer(). * If the track buffer is not full, obtainBuffer() returns as many contiguous * [empty slots for] frames as are available immediately. + * + * If nonContig is non-NULL, it is an output parameter that will be set to the number of + * additional non-contiguous frames that are predicted to be available immediately, + * if the client were to release the first frames and then call obtainBuffer() again. + * This value is only a prediction, and needs to be confirmed. + * It will be set to zero for an error return. + * * If the track buffer is full and track is stopped, obtainBuffer() returns WOULD_BLOCK * regardless of the value of waitCount. * If the track buffer is full and track is not stopped, obtainBuffer() blocks with a @@ -499,10 +521,6 @@ public: * is exhausted, at which point obtainBuffer() will either block * or return WOULD_BLOCK depending on the value of the "waitCount" * parameter. - * Each sample is 16-bit signed PCM. - * - * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications, - * which should use write() or callback EVENT_MORE_DATA instead. * * Interpretation of waitCount: * +n limits wait time to n * WAIT_PERIOD_MS, @@ -511,24 +529,27 @@ public: * * Buffer fields * On entry: - * frameCount number of frames requested + * frameCount number of [empty slots for] frames requested + * size ignored + * raw ignored * After error return: * frameCount 0 * size 0 * raw undefined * After successful return: - * frameCount actual number of frames available, <= number requested + * frameCount actual number of [empty slots for] frames available, <= number requested * size actual number of bytes available * raw pointer to the buffer */ - - /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */ - status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount) - __attribute__((__deprecated__)); + status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount, + size_t *nonContig = NULL); private: /* If nonContig is non-NULL, it is an output parameter that will be set to the number of - * additional non-contiguous frames that are available immediately. + * additional non-contiguous frames that are predicted to be available immediately, + * if the client were to release the first frames and then call obtainBuffer() again. + * This value is only a prediction, and needs to be confirmed. + * It will be set to zero for an error return. * FIXME We could pass an array of Buffers instead of only one Buffer to obtainBuffer(), * in case the requested amount of frames is in two or more non-contiguous regions. * FIXME requested and elapsed are both relative times. Consider changing to absolute time. @@ -537,9 +558,15 @@ private: struct timespec *elapsed = NULL, size_t *nonContig = NULL); public: - /* Release a filled buffer of "audioBuffer->frameCount" frames for AudioFlinger to process. */ - // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed - void releaseBuffer(Buffer* audioBuffer); + /* Public API for TRANSFER_OBTAIN mode. + * Release a filled buffer of frames for AudioFlinger to process. + * + * Buffer fields: + * frameCount currently ignored but recommend to set to actual number of frames filled + * size actual number of bytes filled, must be multiple of frameSize + * raw ignored + */ + void releaseBuffer(const Buffer* audioBuffer); /* As a convenience we provide a write() interface to the audio buffer. * Input parameter 'size' is in byte units. @@ -551,7 +578,7 @@ public: * WOULD_BLOCK when obtainBuffer() returns same, or * AudioTrack was stopped during the write * or any other error code returned by IAudioTrack::start() or restoreTrack_l(). - * Default behavior is to only return until all data has been transferred. Set 'blocking' to + * Default behavior is to only return when all data has been transferred. Set 'blocking' to * false for the method to return immediately without waiting to try multiple times to write * the full content of the buffer. */ @@ -559,6 +586,7 @@ public: /* * Dumps the state of an audio track. + * Not a general-purpose API; intended only for use by media player service to dump its tracks. */ status_t dump(int fd, const Vector<String16>& args) const; @@ -600,8 +628,6 @@ protected: AudioTrack(const AudioTrack& other); AudioTrack& operator = (const AudioTrack& other); - void setAttributesFromStreamType(audio_stream_type_t streamType); - /* a small internal class to handle the callback */ class AudioTrackThread : public Thread { @@ -614,6 +640,7 @@ protected: void pause(); // suspend thread from execution at next loop boundary void resume(); // allow thread to execute, if not requested to exit + void wake(); // wake to handle changed notification conditions. private: void pauseInternal(nsecs_t ns = 0LL); @@ -628,7 +655,9 @@ protected: bool mPaused; // whether thread is requested to pause at next loop entry bool mPausedInt; // whether thread internally requests pause nsecs_t mPausedNs; // if mPausedInt then associated timeout, otherwise ignored - bool mIgnoreNextPausedInt; // whether to ignore next mPausedInt request + bool mIgnoreNextPausedInt; // skip any internal pause and go immediately + // to processAudioBuffer() as state may have changed + // since pause time calculated. }; // body of AudioTrackThread::threadLoop() @@ -641,10 +670,6 @@ protected: static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3; nsecs_t processAudioBuffer(); - bool isOffloaded() const; - bool isDirect() const; - bool isOffloadedOrDirect() const; - // caller must hold lock on mLock for all _l methods status_t createTrack_l(); @@ -657,6 +682,10 @@ protected: // FIXME enum is faster than strcmp() for parameter 'from' status_t restoreTrack_l(const char *from); + bool isOffloaded() const; + bool isDirect() const; + bool isOffloadedOrDirect() const; + bool isOffloaded_l() const { return (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; } @@ -680,7 +709,7 @@ protected: float mVolume[2]; float mSendLevel; - mutable uint32_t mSampleRate; // mutable because getSampleRate() can update it. + mutable uint32_t mSampleRate; // mutable because getSampleRate() can update it size_t mFrameCount; // corresponds to current IAudioTrack, value is // reported back by AudioFlinger to the client size_t mReqFrameCount; // frame count to request the first or next time @@ -698,10 +727,7 @@ protected: const audio_offload_info_t* mOffloadInfo; audio_attributes_t mAttributes; - // mFrameSize is equal to mFrameSizeAF for non-PCM or 16-bit PCM data. For 8-bit PCM data, it's - // twice as large as mFrameSize because data is expanded to 16-bit before it's stored in buffer. - size_t mFrameSize; // app-level frame size - size_t mFrameSizeAF; // AudioFlinger frame size + size_t mFrameSize; // frame size in bytes status_t mStatus; @@ -732,17 +758,25 @@ protected: bool mRefreshRemaining; // processAudioBuffer() should refresh // mRemainingFrames and mRetryOnPartialBuffer + // used for static track cbf and restoration + int32_t mLoopCount; // last setLoop loopCount; zero means disabled + uint32_t mLoopStart; // last setLoop loopStart + uint32_t mLoopEnd; // last setLoop loopEnd + int32_t mLoopCountNotified; // the last loopCount notified by callback. + // mLoopCountNotified counts down, matching + // the remaining loop count for static track + // playback. + // These are private to processAudioBuffer(), and are not protected by a lock uint32_t mRemainingFrames; // number of frames to request in obtainBuffer() bool mRetryOnPartialBuffer; // sleep and retry after partial obtainBuffer() uint32_t mObservedSequence; // last observed value of mSequence - uint32_t mLoopPeriod; // in frames, zero means looping is disabled - uint32_t mMarkerPosition; // in wrapping (overflow) frame units bool mMarkerReached; uint32_t mNewPosition; // in frames uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS + uint32_t mServer; // in frames, last known mProxy->getPosition() // which is count of frames consumed by server, // reset by new IAudioTrack, diff --git a/include/media/EffectsFactoryApi.h b/include/media/EffectsFactoryApi.h index b1ed7b0..64a3212 100644 --- a/include/media/EffectsFactoryApi.h +++ b/include/media/EffectsFactoryApi.h @@ -171,6 +171,8 @@ int EffectGetDescriptor(const effect_uuid_t *pEffectUuid, effect_descriptor_t *p //////////////////////////////////////////////////////////////////////////////// int EffectIsNullUuid(const effect_uuid_t *pEffectUuid); +int EffectDumpEffects(int fd); + #if __cplusplus } // extern "C" #endif diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h index 31a14f0..f927a80 100644 --- a/include/media/IAudioFlinger.h +++ b/include/media/IAudioFlinger.h @@ -94,6 +94,8 @@ public: sp<IMemory>& buffers, // return value 0 means it follows cblk status_t *status) = 0; + // FIXME Surprisingly, sampleRate/format/frameCount/latency don't work for input handles + /* query the audio hardware state. This state never changes, * and therefore can be cached. */ @@ -142,6 +144,7 @@ public: virtual void registerClient(const sp<IAudioFlingerClient>& client) = 0; // retrieve the audio recording buffer size + // FIXME This API assumes a route, and so should be deprecated. virtual size_t getInputBufferSize(uint32_t sampleRate, audio_format_t format, audio_channel_mask_t channelMask) const = 0; diff --git a/include/media/IAudioPolicyService.h b/include/media/IAudioPolicyService.h index c98c475..fecc6f1 100644 --- a/include/media/IAudioPolicyService.h +++ b/include/media/IAudioPolicyService.h @@ -44,7 +44,8 @@ public: // virtual status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address) = 0; + const char *device_address, + const char *device_name) = 0; virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device, const char *device_address) = 0; virtual status_t setPhoneState(audio_mode_t state) = 0; diff --git a/include/media/IDrm.h b/include/media/IDrm.h index affcbd7..9449beb 100644 --- a/include/media/IDrm.h +++ b/include/media/IDrm.h @@ -47,7 +47,8 @@ struct IDrm : public IInterface { Vector<uint8_t> const &initData, String8 const &mimeType, DrmPlugin::KeyType keyType, KeyedVector<String8, String8> const &optionalParameters, - Vector<uint8_t> &request, String8 &defaultUrl) = 0; + Vector<uint8_t> &request, String8 &defaultUrl, + DrmPlugin::KeyRequestType *keyRequestType) = 0; virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId, Vector<uint8_t> const &response, diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h index db62cd5..4153c25 100644 --- a/include/media/IMediaPlayer.h +++ b/include/media/IMediaPlayer.h @@ -56,6 +56,7 @@ public: virtual status_t stop() = 0; virtual status_t pause() = 0; virtual status_t isPlaying(bool* state) = 0; + virtual status_t setPlaybackRate(float rate) = 0; virtual status_t seekTo(int msec) = 0; virtual status_t getCurrentPosition(int* msec) = 0; virtual status_t getDuration(int* msec) = 0; diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h index 67b599a..49a3d61 100644 --- a/include/media/IMediaPlayerService.h +++ b/include/media/IMediaPlayerService.h @@ -49,7 +49,8 @@ public: virtual sp<IMediaRecorder> createMediaRecorder() = 0; virtual sp<IMediaMetadataRetriever> createMetadataRetriever() = 0; - virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId = 0) = 0; + virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId = 0) + = 0; virtual sp<IOMX> getOMX() = 0; virtual sp<ICrypto> makeCrypto() = 0; diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h index 3e67550..509c06b 100644 --- a/include/media/IMediaRecorder.h +++ b/include/media/IMediaRecorder.h @@ -41,7 +41,6 @@ public: virtual status_t setOutputFormat(int of) = 0; virtual status_t setVideoEncoder(int ve) = 0; virtual status_t setAudioEncoder(int ae) = 0; - virtual status_t setOutputFile(const char* path) = 0; virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0; virtual status_t setVideoSize(int width, int height) = 0; virtual status_t setVideoFrameRate(int frames_per_second) = 0; diff --git a/include/media/IOMX.h b/include/media/IOMX.h index 627f23b..6def65b 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -147,6 +147,7 @@ public: INTERNAL_OPTION_SUSPEND, // data is a bool INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, // data is an int64_t INTERNAL_OPTION_MAX_TIMESTAMP_GAP, // data is int64_t + INTERNAL_OPTION_MAX_FPS, // data is float INTERNAL_OPTION_START_TIME, // data is an int64_t INTERNAL_OPTION_TIME_LAPSE, // data is an int64_t[2] }; diff --git a/include/media/IResourceManagerClient.h b/include/media/IResourceManagerClient.h new file mode 100644 index 0000000..3587aea --- /dev/null +++ b/include/media/IResourceManagerClient.h @@ -0,0 +1,47 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_IRESOURCEMANAGERCLIENT_H +#define ANDROID_IRESOURCEMANAGERCLIENT_H + +#include <utils/RefBase.h> +#include <binder/IInterface.h> +#include <binder/Parcel.h> + +namespace android { + +class IResourceManagerClient: public IInterface +{ +public: + DECLARE_META_INTERFACE(ResourceManagerClient); + + virtual bool reclaimResource() = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnResourceManagerClient: public BnInterface<IResourceManagerClient> +{ +public: + virtual status_t onTransact(uint32_t code, + const Parcel &data, + Parcel *reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif // ANDROID_IRESOURCEMANAGERCLIENT_H diff --git a/include/media/IResourceManagerService.h b/include/media/IResourceManagerService.h new file mode 100644 index 0000000..067392c --- /dev/null +++ b/include/media/IResourceManagerService.h @@ -0,0 +1,66 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_IRESOURCEMANAGERSERVICE_H +#define ANDROID_IRESOURCEMANAGERSERVICE_H + +#include <utils/Errors.h> // for status_t +#include <utils/KeyedVector.h> +#include <utils/RefBase.h> +#include <utils/String8.h> +#include <binder/IInterface.h> +#include <binder/Parcel.h> + +#include <media/IResourceManagerClient.h> +#include <media/MediaResource.h> +#include <media/MediaResourcePolicy.h> + +namespace android { + +class IResourceManagerService: public IInterface +{ +public: + DECLARE_META_INTERFACE(ResourceManagerService); + + virtual void config(const Vector<MediaResourcePolicy> &policies) = 0; + + virtual void addResource( + int pid, + int64_t clientId, + const sp<IResourceManagerClient> client, + const Vector<MediaResource> &resources) = 0; + + virtual void removeResource(int64_t clientId) = 0; + + virtual bool reclaimResource( + int callingPid, + const Vector<MediaResource> &resources) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnResourceManagerService: public BnInterface<IResourceManagerService> +{ +public: + virtual status_t onTransact(uint32_t code, + const Parcel &data, + Parcel *reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif // ANDROID_IRESOURCEMANAGERSERVICE_H diff --git a/include/media/IStreamSource.h b/include/media/IStreamSource.h index 677119b..149bd49 100644 --- a/include/media/IStreamSource.h +++ b/include/media/IStreamSource.h @@ -81,6 +81,13 @@ struct IStreamListener : public IInterface { // with the next PTS occuring in the stream. The value is of type int64_t. static const char *const kKeyMediaTimeUs; + // Optionally signalled as part of a discontinuity that includes + // DISCONTINUITY_TIME. It indicates the media time (in us) of a recent + // sample from the same content, and is used as a hint for the parser to + // handle PTS wraparound. This is required when a new parser is created + // to continue parsing content from the same timeline. + static const char *const kKeyRecentMediaTimeUs; + virtual void issueCommand( Command cmd, bool synchronous, const sp<AMessage> &msg = NULL) = 0; }; diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h index 4a6bf28..d6fe390 100644 --- a/include/media/MediaPlayerInterface.h +++ b/include/media/MediaPlayerInterface.h @@ -113,7 +113,19 @@ public: const audio_offload_info_t *offloadInfo = NULL) = 0; virtual status_t start() = 0; - virtual ssize_t write(const void* buffer, size_t size) = 0; + + /* Input parameter |size| is in byte units stored in |buffer|. + * Data is copied over and actual number of bytes written (>= 0) + * is returned, or no data is copied and a negative status code + * is returned (even when |blocking| is true). + * When |blocking| is false, AudioSink will immediately return after + * part of or full |buffer| is copied over. + * When |blocking| is true, AudioSink will wait to copy the entire + * buffer, unless an error occurs or the copy operation is + * prematurely stopped. + */ + virtual ssize_t write(const void* buffer, size_t size, bool blocking = true) = 0; + virtual void stop() = 0; virtual void flush() = 0; virtual void pause() = 0; @@ -156,6 +168,7 @@ public: virtual status_t stop() = 0; virtual status_t pause() = 0; virtual bool isPlaying() = 0; + virtual status_t setPlaybackRate(float rate) { return INVALID_OPERATION; } virtual status_t seekTo(int msec) = 0; virtual status_t getCurrentPosition(int *msec) = 0; virtual status_t getDuration(int *msec) = 0; diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h index d7ac302..f55063e 100644 --- a/include/media/MediaRecorderBase.h +++ b/include/media/MediaRecorderBase.h @@ -43,7 +43,6 @@ struct MediaRecorderBase { virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy) = 0; virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface) = 0; - virtual status_t setOutputFile(const char *path) = 0; virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0; virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;} virtual status_t setParameters(const String8& params) = 0; diff --git a/include/media/MediaResource.h b/include/media/MediaResource.h new file mode 100644 index 0000000..0b57c84 --- /dev/null +++ b/include/media/MediaResource.h @@ -0,0 +1,51 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef ANDROID_MEDIA_RESOURCE_H +#define ANDROID_MEDIA_RESOURCE_H + +#include <binder/Parcel.h> +#include <utils/String8.h> + +namespace android { + +extern const char kResourceSecureCodec[]; +extern const char kResourceNonSecureCodec[]; +extern const char kResourceGraphicMemory[]; + +class MediaResource { +public: + MediaResource(); + MediaResource(String8 type, uint64_t value); + MediaResource(String8 type, String8 subType, uint64_t value); + + void readFromParcel(const Parcel &parcel); + void writeToParcel(Parcel *parcel) const; + + String8 toString() const; + + bool operator==(const MediaResource &other) const; + bool operator!=(const MediaResource &other) const; + + String8 mType; + String8 mSubType; + uint64_t mValue; +}; + +}; // namespace android + +#endif // ANDROID_MEDIA_RESOURCE_H diff --git a/include/media/MediaResourcePolicy.h b/include/media/MediaResourcePolicy.h new file mode 100644 index 0000000..1e1c341 --- /dev/null +++ b/include/media/MediaResourcePolicy.h @@ -0,0 +1,45 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef ANDROID_MEDIA_RESOURCE_POLICY_H +#define ANDROID_MEDIA_RESOURCE_POLICY_H + +#include <binder/Parcel.h> +#include <utils/String8.h> + +namespace android { + +extern const char kPolicySupportsMultipleSecureCodecs[]; +extern const char kPolicySupportsSecureWithNonSecureCodec[]; + +class MediaResourcePolicy { +public: + MediaResourcePolicy(); + MediaResourcePolicy(String8 type, uint64_t value); + + void readFromParcel(const Parcel &parcel); + void writeToParcel(Parcel *parcel) const; + + String8 toString() const; + + String8 mType; + uint64_t mValue; +}; + +}; // namespace android + +#endif // ANDROID_MEDIA_RESOURCE_POLICY_H diff --git a/include/media/SingleStateQueue.h b/include/media/SingleStateQueue.h index 04c5fd0..d423962 100644 --- a/include/media/SingleStateQueue.h +++ b/include/media/SingleStateQueue.h @@ -21,6 +21,7 @@ // Non-blocking single-reader / single-writer multi-word atomic load / store #include <stdint.h> +#include <cutils/atomic.h> namespace android { @@ -31,6 +32,12 @@ public: class Mutator; class Observer; + enum SSQ_STATUS { + SSQ_PENDING, /* = 0 */ + SSQ_READ, + SSQ_DONE, + }; + struct Shared { // needs to be part of a union so don't define constructor or destructor @@ -41,28 +48,56 @@ private: void init() { mAck = 0; mSequence = 0; } volatile int32_t mAck; -#if 0 - int mPad[7]; - // cache line boundary -#endif volatile int32_t mSequence; T mValue; }; class Mutator { public: - Mutator(Shared *shared); - /*virtual*/ ~Mutator() { } + Mutator(Shared *shared) + : mSequence(0), mShared(shared) + { + // exactly one of Mutator and Observer must initialize, currently it is Observer + // shared->init(); + } // push new value onto state queue, overwriting previous value; // returns a sequence number which can be used with ack() - int32_t push(const T& value); - - // return true if most recent push has been observed - bool ack(); + int32_t push(const T& value) + { + Shared *shared = mShared; + int32_t sequence = mSequence; + sequence++; + android_atomic_acquire_store(sequence, &shared->mSequence); + shared->mValue = value; + sequence++; + android_atomic_release_store(sequence, &shared->mSequence); + mSequence = sequence; + // consider signalling a futex here, if we know that observer is waiting + return sequence; + } + + // returns the status of the last state push. This may be a stale value. + // + // SSQ_PENDING, or 0, means it has not been observed + // SSQ_READ means it has been read + // SSQ_DONE means it has been acted upon, after Observer::done() is called + enum SSQ_STATUS ack() const + { + // in the case of SSQ_DONE, prevent any subtle data-races of subsequent reads + // being performed (out-of-order) before the ack read, should the caller be + // depending on sequentiality of reads. + const int32_t ack = android_atomic_acquire_load(&mShared->mAck); + return ack - mSequence & ~1 ? SSQ_PENDING /* seq differ */ : + ack & 1 ? SSQ_DONE : SSQ_READ; + } // return true if a push with specified sequence number or later has been observed - bool ack(int32_t sequence); + bool ack(int32_t sequence) const + { + // this relies on 2's complement rollover to detect an ancient sequence number + return mShared->mAck - sequence >= 0; + } private: int32_t mSequence; @@ -71,11 +106,54 @@ private: class Observer { public: - Observer(Shared *shared); - /*virtual*/ ~Observer() { } + Observer(Shared *shared) + : mSequence(0), mSeed(1), mShared(shared) + { + // exactly one of Mutator and Observer must initialize, currently it is Observer + shared->init(); + } // return true if value has changed - bool poll(T& value); + bool poll(T& value) + { + Shared *shared = mShared; + int32_t before = shared->mSequence; + if (before == mSequence) { + return false; + } + for (int tries = 0; ; ) { + const int MAX_TRIES = 5; + if (before & 1) { + if (++tries >= MAX_TRIES) { + return false; + } + before = shared->mSequence; + } else { + android_memory_barrier(); + T temp = shared->mValue; + int32_t after = android_atomic_release_load(&shared->mSequence); + if (after == before) { + value = temp; + shared->mAck = before; + mSequence = before; // mSequence is even after poll success + return true; + } + if (++tries >= MAX_TRIES) { + return false; + } + before = after; + } + } + } + + // (optional) used to indicate to the Mutator that the state that has been polled + // has also been acted upon. + void done() + { + const int32_t ack = mShared->mAck + 1; + // ensure all previous writes have been performed. + android_atomic_release_store(ack, &mShared->mAck); // mSequence is odd after "done" + } private: int32_t mSequence; diff --git a/include/media/StringArray.h b/include/media/StringArray.h index ae47085..48d98bf 100644 --- a/include/media/StringArray.h +++ b/include/media/StringArray.h @@ -16,7 +16,7 @@ // // Sortable array of strings. STL-ish, but STL-free. -// +// #ifndef _LIBS_MEDIA_STRING_ARRAY_H #define _LIBS_MEDIA_STRING_ARRAY_H diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index 5830933..808e893 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -220,6 +220,7 @@ public: status_t stop(); status_t pause(); bool isPlaying(); + status_t setPlaybackRate(float rate); status_t getVideoWidth(int *w); status_t getVideoHeight(int *h); status_t seekTo(int msec); @@ -274,6 +275,7 @@ private: int mVideoWidth; int mVideoHeight; int mAudioSessionId; + float mPlaybackRate; float mSendLevel; struct sockaddr_in mRetransmitEndpoint; bool mRetransmitEndpointValid; diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h index b0a62a7..74a6469 100644 --- a/include/media/mediarecorder.h +++ b/include/media/mediarecorder.h @@ -221,7 +221,6 @@ public: status_t setOutputFormat(int of); status_t setVideoEncoder(int ve); status_t setAudioEncoder(int ae); - status_t setOutputFile(const char* path); status_t setOutputFile(int fd, int64_t offset, int64_t length); status_t setVideoSize(int width, int height); status_t setVideoFrameRate(int frames_per_second); diff --git a/include/media/nbaio/NBAIO.h b/include/media/nbaio/NBAIO.h index d422576..d9bbc8d 100644 --- a/include/media/nbaio/NBAIO.h +++ b/include/media/nbaio/NBAIO.h @@ -231,7 +231,8 @@ public: virtual status_t getTimestamp(AudioTimestamp& timestamp) { return INVALID_OPERATION; } protected: - NBAIO_Sink(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0) { } + NBAIO_Sink(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesWritten(0) + { } virtual ~NBAIO_Sink() { } // Implementations are free to ignore these if they don't need them @@ -322,7 +323,8 @@ public: virtual void onTimestamp(const AudioTimestamp& timestamp) { } protected: - NBAIO_Source(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0) { } + NBAIO_Source(const NBAIO_Format& format = Format_Invalid) : NBAIO_Port(format), mFramesRead(0) + { } virtual ~NBAIO_Source() { } // Implementations are free to ignore these if they don't need them diff --git a/include/media/nbaio/NBLog.h b/include/media/nbaio/NBLog.h index bcbbc04..1297b51 100644 --- a/include/media/nbaio/NBLog.h +++ b/include/media/nbaio/NBLog.h @@ -21,7 +21,7 @@ #include <binder/IMemory.h> #include <utils/Mutex.h> -#include <media/nbaio/roundup.h> +#include <audio_utils/roundup.h> namespace android { diff --git a/include/media/stagefright/AACWriter.h b/include/media/stagefright/AACWriter.h index d22707a..86417a5 100644 --- a/include/media/stagefright/AACWriter.h +++ b/include/media/stagefright/AACWriter.h @@ -27,7 +27,6 @@ struct MediaSource; struct MetaData; struct AACWriter : public MediaWriter { - AACWriter(const char *filename); AACWriter(int fd); status_t initCheck() const; diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h index cd2bd27..c1483f3 100644 --- a/include/media/stagefright/ACodec.h +++ b/include/media/stagefright/ACodec.h @@ -214,6 +214,7 @@ private: int64_t mRepeatFrameDelayUs; int64_t mMaxPtsGapUs; + float mMaxFps; int64_t mTimePerFrameUs; int64_t mTimePerCaptureUs; @@ -298,6 +299,8 @@ private: status_t setupRawAudioFormat( OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels); + status_t setPriority(int32_t priority); + status_t setMinBufferSize(OMX_U32 portIndex, size_t size); status_t setupMPEG4EncoderParameters(const sp<AMessage> &msg); diff --git a/include/media/stagefright/AMRWriter.h b/include/media/stagefright/AMRWriter.h index 392f968..bac878b 100644 --- a/include/media/stagefright/AMRWriter.h +++ b/include/media/stagefright/AMRWriter.h @@ -29,7 +29,6 @@ struct MediaSource; struct MetaData; struct AMRWriter : public MediaWriter { - AMRWriter(const char *filename); AMRWriter(int fd); status_t initCheck() const; diff --git a/include/media/stagefright/BufferProducerWrapper.h b/include/media/stagefright/BufferProducerWrapper.h index d8acf30..4caa2c6 100644 --- a/include/media/stagefright/BufferProducerWrapper.h +++ b/include/media/stagefright/BufferProducerWrapper.h @@ -19,6 +19,7 @@ #define BUFFER_PRODUCER_WRAPPER_H_ #include <gui/IGraphicBufferProducer.h> +#include <media/stagefright/foundation/ABase.h> namespace android { diff --git a/include/media/stagefright/MPEG2TSWriter.h b/include/media/stagefright/MPEG2TSWriter.h index 2e2922e..3d7960b 100644 --- a/include/media/stagefright/MPEG2TSWriter.h +++ b/include/media/stagefright/MPEG2TSWriter.h @@ -29,7 +29,6 @@ struct ABuffer; struct MPEG2TSWriter : public MediaWriter { MPEG2TSWriter(int fd); - MPEG2TSWriter(const char *filename); MPEG2TSWriter( void *cookie, diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h index 26ce5f9..a195fe8 100644 --- a/include/media/stagefright/MPEG4Writer.h +++ b/include/media/stagefright/MPEG4Writer.h @@ -26,13 +26,13 @@ namespace android { +class AMessage; class MediaBuffer; class MediaSource; class MetaData; class MPEG4Writer : public MediaWriter { public: - MPEG4Writer(const char *filename); MPEG4Writer(int fd); // Limitations @@ -49,6 +49,7 @@ public: virtual status_t dump(int fd, const Vector<String16>& args); void beginBox(const char *fourcc); + void beginBox(uint32_t id); void writeInt8(int8_t x); void writeInt16(int16_t x); void writeInt32(int32_t x); @@ -63,6 +64,7 @@ public: int32_t getTimeScale() const { return mTimeScale; } status_t setGeoData(int latitudex10000, int longitudex10000); + status_t setCaptureRate(float captureFps); virtual void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; } virtual int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; } @@ -89,6 +91,7 @@ private: off64_t mFreeBoxOffset; bool mStreamableFile; off64_t mEstimatedMoovBoxSize; + off64_t mMoovExtraSize; uint32_t mInterleaveDurationUs; int32_t mTimeScale; int64_t mStartTimestampUs; @@ -103,6 +106,8 @@ private: List<off64_t> mBoxes; + sp<AMessage> mMetaKeys; + void setStartTimestampUs(int64_t timeUs); int64_t getStartTimestampUs(); // Not const status_t startTracks(MetaData *params); @@ -196,6 +201,12 @@ private: void writeGeoDataBox(); void writeLatitude(int degreex10000); void writeLongitude(int degreex10000); + + void addDeviceMeta(); + void writeHdlr(); + void writeKeys(); + void writeIlst(); + void writeMetaBox(); void sendSessionSummary(); void release(); status_t reset(); diff --git a/include/media/stagefright/MediaClock.h b/include/media/stagefright/MediaClock.h new file mode 100644 index 0000000..e9c09a1 --- /dev/null +++ b/include/media/stagefright/MediaClock.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_CLOCK_H_ + +#define MEDIA_CLOCK_H_ + +#include <media/stagefright/foundation/ABase.h> +#include <utils/Mutex.h> +#include <utils/RefBase.h> + +namespace android { + +struct AMessage; + +struct MediaClock : public RefBase { + MediaClock(); + + void setStartingTimeMedia(int64_t startingTimeMediaUs); + + void clearAnchor(); + // It's required to use timestamp of just rendered frame as + // anchor time in paused state. + void updateAnchor( + int64_t anchorTimeMediaUs, + int64_t anchorTimeRealUs, + int64_t maxTimeMediaUs = INT64_MAX); + + void updateMaxTimeMedia(int64_t maxTimeMediaUs); + + void setPlaybackRate(float rate); + + // query media time corresponding to real time |realUs|, and save the + // result in |outMediaUs|. + status_t getMediaTime( + int64_t realUs, + int64_t *outMediaUs, + bool allowPastMaxTime = false) const; + // query real time corresponding to media time |targetMediaUs|. + // The result is saved in |outRealUs|. + status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) const; + +protected: + virtual ~MediaClock(); + +private: + status_t getMediaTime_l( + int64_t realUs, + int64_t *outMediaUs, + bool allowPastMaxTime) const; + + mutable Mutex mLock; + + int64_t mAnchorTimeMediaUs; + int64_t mAnchorTimeRealUs; + int64_t mMaxTimeMediaUs; + int64_t mStartingTimeMediaUs; + + float mPlaybackRate; + + DISALLOW_EVIL_CONSTRUCTORS(MediaClock); +}; + +} // namespace android + +#endif // MEDIA_CLOCK_H_ diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h index d448097..8241e19 100644 --- a/include/media/stagefright/MediaCodec.h +++ b/include/media/stagefright/MediaCodec.h @@ -27,6 +27,7 @@ namespace android { struct ABuffer; struct AMessage; +struct AReplyToken; struct AString; struct CodecBase; struct ICrypto; @@ -222,7 +223,7 @@ private: sp<ALooper> mCodecLooper; sp<CodecBase> mCodec; AString mComponentName; - uint32_t mReplyID; + sp<AReplyToken> mReplyID; uint32_t mFlags; status_t mStickyError; sp<Surface> mNativeWindow; @@ -249,10 +250,10 @@ private: Vector<BufferInfo> mPortBuffers[2]; int32_t mDequeueInputTimeoutGeneration; - uint32_t mDequeueInputReplyID; + sp<AReplyToken> mDequeueInputReplyID; int32_t mDequeueOutputTimeoutGeneration; - uint32_t mDequeueOutputReplyID; + sp<AReplyToken> mDequeueOutputReplyID; sp<ICrypto> mCrypto; @@ -267,7 +268,7 @@ private: static status_t PostAndAwaitResponse( const sp<AMessage> &msg, sp<AMessage> *response); - static void PostReplyWithError(int32_t replyID, int32_t err); + static void PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err); status_t init(const AString &name, bool nameIsType, bool encoder); @@ -283,8 +284,8 @@ private: size_t portIndex, size_t index, sp<ABuffer> *buffer, sp<AMessage> *format); - bool handleDequeueInputBuffer(uint32_t replyID, bool newRequest = false); - bool handleDequeueOutputBuffer(uint32_t replyID, bool newRequest = false); + bool handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false); + bool handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest = false); void cancelPendingDequeueOperations(); void extractCSD(const sp<AMessage> &format); diff --git a/include/media/stagefright/MediaCodecSource.h b/include/media/stagefright/MediaCodecSource.h index 0970b2b..7b8f59d 100644 --- a/include/media/stagefright/MediaCodecSource.h +++ b/include/media/stagefright/MediaCodecSource.h @@ -25,6 +25,7 @@ namespace android { class ALooper; class AMessage; +struct AReplyToken; class IGraphicBufferProducer; class MediaCodec; class MetaData; @@ -99,7 +100,7 @@ private: sp<Puller> mPuller; sp<MediaCodec> mEncoder; uint32_t mFlags; - List<uint32_t> mStopReplyIDQueue; + List<sp<AReplyToken>> mStopReplyIDQueue; bool mIsVideo; bool mStarted; bool mStopping; diff --git a/include/media/stagefright/MediaFilter.h b/include/media/stagefright/MediaFilter.h new file mode 100644 index 0000000..7b3f700 --- /dev/null +++ b/include/media/stagefright/MediaFilter.h @@ -0,0 +1,167 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_FILTER_H_ +#define MEDIA_FILTER_H_ + +#include <media/stagefright/CodecBase.h> + +namespace android { + +struct ABuffer; +struct GraphicBufferListener; +struct MemoryDealer; +struct SimpleFilter; + +struct MediaFilter : public CodecBase { + MediaFilter(); + + virtual void setNotificationMessage(const sp<AMessage> &msg); + + virtual void initiateAllocateComponent(const sp<AMessage> &msg); + virtual void initiateConfigureComponent(const sp<AMessage> &msg); + virtual void initiateCreateInputSurface(); + virtual void initiateStart(); + virtual void initiateShutdown(bool keepComponentAllocated = false); + + virtual void signalFlush(); + virtual void signalResume(); + + virtual void signalRequestIDRFrame(); + virtual void signalSetParameters(const sp<AMessage> &msg); + virtual void signalEndOfInputStream(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + + struct PortDescription : public CodecBase::PortDescription { + virtual size_t countBuffers(); + virtual IOMX::buffer_id bufferIDAt(size_t index) const; + virtual sp<ABuffer> bufferAt(size_t index) const; + + protected: + PortDescription(); + + private: + friend struct MediaFilter; + + Vector<IOMX::buffer_id> mBufferIDs; + Vector<sp<ABuffer> > mBuffers; + + void addBuffer(IOMX::buffer_id id, const sp<ABuffer> &buffer); + + DISALLOW_EVIL_CONSTRUCTORS(PortDescription); + }; + +protected: + virtual ~MediaFilter(); + +private: + struct BufferInfo { + enum Status { + OWNED_BY_US, + OWNED_BY_UPSTREAM, + }; + + IOMX::buffer_id mBufferID; + int32_t mGeneration; + int32_t mOutputFlags; + Status mStatus; + + sp<ABuffer> mData; + }; + + enum State { + UNINITIALIZED, + INITIALIZED, + CONFIGURED, + STARTED, + }; + + enum { + kWhatInputBufferFilled = 'inpF', + kWhatOutputBufferDrained = 'outD', + kWhatShutdown = 'shut', + kWhatFlush = 'flus', + kWhatResume = 'resm', + kWhatAllocateComponent = 'allo', + kWhatConfigureComponent = 'conf', + kWhatCreateInputSurface = 'cisf', + kWhatSignalEndOfInputStream = 'eois', + kWhatStart = 'star', + kWhatSetParameters = 'setP', + kWhatProcessBuffers = 'proc', + }; + + enum { + kPortIndexInput = 0, + kPortIndexOutput = 1 + }; + + // member variables + AString mComponentName; + State mState; + status_t mInputEOSResult; + int32_t mWidth, mHeight; + int32_t mStride, mSliceHeight; + int32_t mColorFormatIn, mColorFormatOut; + size_t mMaxInputSize, mMaxOutputSize; + int32_t mGeneration; + sp<AMessage> mNotify; + sp<AMessage> mInputFormat; + sp<AMessage> mOutputFormat; + + sp<MemoryDealer> mDealer[2]; + Vector<BufferInfo> mBuffers[2]; + Vector<BufferInfo*> mAvailableInputBuffers; + Vector<BufferInfo*> mAvailableOutputBuffers; + bool mPortEOS[2]; + + sp<SimpleFilter> mFilter; + sp<GraphicBufferListener> mGraphicBufferListener; + + // helper functions + void signalProcessBuffers(); + void signalError(status_t error); + + status_t allocateBuffersOnPort(OMX_U32 portIndex); + BufferInfo *findBufferByID( + uint32_t portIndex, IOMX::buffer_id bufferID, + ssize_t *index = NULL); + void postFillThisBuffer(BufferInfo *info); + void postDrainThisBuffer(BufferInfo *info); + void postEOS(); + void sendFormatChange(); + void requestFillEmptyInput(); + void processBuffers(); + + void onAllocateComponent(const sp<AMessage> &msg); + void onConfigureComponent(const sp<AMessage> &msg); + void onStart(); + void onInputBufferFilled(const sp<AMessage> &msg); + void onOutputBufferDrained(const sp<AMessage> &msg); + void onShutdown(const sp<AMessage> &msg); + void onFlush(); + void onSetParameters(const sp<AMessage> &msg); + void onCreateInputSurface(); + void onInputFrameAvailable(); + void onSignalEndOfInputStream(); + + DISALLOW_EVIL_CONSTRUCTORS(MediaFilter); +}; + +} // namespace android + +#endif // MEDIA_FILTER_H_ diff --git a/include/media/stagefright/MediaMuxer.h b/include/media/stagefright/MediaMuxer.h index 9da98d9..e6538d1 100644 --- a/include/media/stagefright/MediaMuxer.h +++ b/include/media/stagefright/MediaMuxer.h @@ -50,9 +50,6 @@ public: OUTPUT_FORMAT_LIST_END // must be last - used to validate format type }; - // Construct the muxer with the output file path. - MediaMuxer(const char *path, OutputFormat format); - // Construct the muxer with the file descriptor. Note that the MediaMuxer // will close this file at stop(). MediaMuxer(int fd, OutputFormat format); diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h new file mode 100644 index 0000000..8bb8c7f --- /dev/null +++ b/include/media/stagefright/MediaSync.h @@ -0,0 +1,239 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MEDIA_SYNC_H +#define MEDIA_SYNC_H + +#include <gui/IConsumerListener.h> +#include <gui/IProducerListener.h> + +#include <media/stagefright/foundation/AHandler.h> + +#include <utils/Condition.h> +#include <utils/Mutex.h> + +namespace android { + +class AudioTrack; +class BufferItem; +class Fence; +class GraphicBuffer; +class IGraphicBufferConsumer; +class IGraphicBufferProducer; +struct MediaClock; + +// MediaSync manages media playback and its synchronization to a media clock +// source. It can be also used for video-only playback. +// +// For video playback, it requires an output surface and provides an input +// surface. It then controls the rendering of input buffers (buffer queued to +// the input surface) on the output surface to happen at the appropriate time. +// +// For audio playback, it requires an audio track and takes updates of +// information of rendered audio data to maintain media clock when audio track +// serves as media clock source. (TODO: move audio rendering from JAVA to +// native code). +// +// It can use the audio or video track as media clock source, as well as an +// external clock. (TODO: actually support external clock as media clock +// sources; use video track as media clock source for audio-and-video stream). +// +// In video-only mode, MediaSync will playback every video frame even though +// a video frame arrives late based on its timestamp and last frame's. +// +// The client needs to configure surface (for output video rendering) and audio +// track (for querying information of audio rendering) for MediaSync. +// +// Then the client needs to obtain a surface from MediaSync and render video +// frames onto that surface. Internally, the MediaSync will receive those video +// frames and render them onto the output surface at the appropriate time. +// +// The client needs to call updateQueuedAudioData() immediately after it writes +// audio data to the audio track. Such information will be used to update media +// clock. +// +class MediaSync : public AHandler { +public: + // Create an instance of MediaSync. + static sp<MediaSync> create(); + + // Called when MediaSync is used to render video. It should be called + // before createInputSurface(). + status_t configureSurface(const sp<IGraphicBufferProducer> &output); + + // Called when audio track is used as media clock source. It should be + // called before updateQueuedAudioData(). + // |nativeSampleRateInHz| is the sample rate of audio data fed into audio + // track. It's the same number used to create AudioTrack. + status_t configureAudioTrack( + const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz); + + // Create a surface for client to render video frames. This is the surface + // on which the client should render video frames. Those video frames will + // be internally directed to output surface for rendering at appropriate + // time. + status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer); + + // Update just-rendered audio data size and the presentation timestamp of + // the first frame of that audio data. It should be called immediately + // after the client write audio data into AudioTrack. + // This function assumes continous audio stream. + // TODO: support gap or backwards updates. + status_t updateQueuedAudioData( + size_t sizeInBytes, int64_t presentationTimeUs); + + // Set the consumer name of the input queue. + void setName(const AString &name); + + // Set the playback in a desired speed. + // This method can be called any time. + // |rate| is the ratio between desired speed and the normal one, and should + // be non-negative. The meaning of rate values: + // 1.0 -- normal playback + // 0.0 -- stop or pause + // larger than 1.0 -- faster than normal speed + // between 0.0 and 1.0 -- slower than normal speed + status_t setPlaybackRate(float rate); + + // Get the media clock used by the MediaSync so that the client can obtain + // corresponding media time or real time via + // MediaClock::getMediaTime() and MediaClock::getRealTimeFor(). + sp<const MediaClock> getMediaClock(); + +protected: + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + enum { + kWhatDrainVideo = 'dVid', + }; + + static const int MAX_OUTSTANDING_BUFFERS = 2; + + // This is a thin wrapper class that lets us listen to + // IConsumerListener::onFrameAvailable from mInput. + class InputListener : public BnConsumerListener, + public IBinder::DeathRecipient { + public: + InputListener(const sp<MediaSync> &sync); + virtual ~InputListener(); + + // From IConsumerListener + virtual void onFrameAvailable(const BufferItem &item); + + // From IConsumerListener + // We don't care about released buffers because we detach each buffer as + // soon as we acquire it. See the comment for onBufferReleased below for + // some clarifying notes about the name. + virtual void onBuffersReleased() {} + + // From IConsumerListener + // We don't care about sideband streams, since we won't relay them. + virtual void onSidebandStreamChanged(); + + // From IBinder::DeathRecipient + virtual void binderDied(const wp<IBinder> &who); + + private: + sp<MediaSync> mSync; + }; + + // This is a thin wrapper class that lets us listen to + // IProducerListener::onBufferReleased from mOutput. + class OutputListener : public BnProducerListener, + public IBinder::DeathRecipient { + public: + OutputListener(const sp<MediaSync> &sync); + virtual ~OutputListener(); + + // From IProducerListener + virtual void onBufferReleased(); + + // From IBinder::DeathRecipient + virtual void binderDied(const wp<IBinder> &who); + + private: + sp<MediaSync> mSync; + }; + + // mIsAbandoned is set to true when the input or output dies. + // Once the MediaSync has been abandoned by one side, it will disconnect + // from the other side and not attempt to communicate with it further. + bool mIsAbandoned; + + mutable Mutex mMutex; + Condition mReleaseCondition; + size_t mNumOutstandingBuffers; + sp<IGraphicBufferConsumer> mInput; + sp<IGraphicBufferProducer> mOutput; + + sp<AudioTrack> mAudioTrack; + uint32_t mNativeSampleRateInHz; + int64_t mNumFramesWritten; + bool mHasAudio; + + int64_t mNextBufferItemMediaUs; + List<BufferItem> mBufferItems; + sp<ALooper> mLooper; + float mPlaybackRate; + + sp<MediaClock> mMediaClock; + + MediaSync(); + + // Must be accessed through RefBase + virtual ~MediaSync(); + + int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs); + int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames); + int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs); + + void onDrainVideo_l(); + + // This implements the onFrameAvailable callback from IConsumerListener. + // It gets called from an InputListener. + // During this callback, we detach the buffer from the input, and queue + // it for rendering on the output. This call can block if there are too + // many outstanding buffers. If it blocks, it will resume when + // onBufferReleasedByOutput releases a buffer back to the input. + void onFrameAvailableFromInput(); + + // Send |bufferItem| to the output for rendering. + void renderOneBufferItem_l(const BufferItem &bufferItem); + + // This implements the onBufferReleased callback from IProducerListener. + // It gets called from an OutputListener. + // During this callback, we detach the buffer from the output, and release + // it to the input. A blocked onFrameAvailable call will be allowed to proceed. + void onBufferReleasedByOutput(); + + // Return |buffer| back to the input. + void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence); + + // When this is called, the MediaSync disconnects from (i.e., abandons) its + // input or output, and signals any waiting onFrameAvailable calls to wake + // up. This must be called with mMutex locked. + void onAbandoned_l(bool isInput); + + // helper. + bool isPlaying() { return mPlaybackRate != 0.0; } + + DISALLOW_EVIL_CONSTRUCTORS(MediaSync); +}; + +} // namespace android + +#endif diff --git a/include/media/stagefright/ProcessInfo.h b/include/media/stagefright/ProcessInfo.h new file mode 100644 index 0000000..ec0cdff --- /dev/null +++ b/include/media/stagefright/ProcessInfo.h @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef PROCESS_INFO_H_ + +#define PROCESS_INFO_H_ + +#include <media/stagefright/foundation/ABase.h> +#include <media/stagefright/ProcessInfoInterface.h> + +namespace android { + +struct ProcessInfo : public ProcessInfoInterface { + ProcessInfo(); + + virtual bool getPriority(int pid, int* priority); + +protected: + virtual ~ProcessInfo(); + +private: + DISALLOW_EVIL_CONSTRUCTORS(ProcessInfo); +}; + +} // namespace android + +#endif // PROCESS_INFO_H_ diff --git a/media/libmedia/SingleStateQueueInstantiations.cpp b/include/media/stagefright/ProcessInfoInterface.h index 0265c8c..222f92d 100644 --- a/media/libmedia/SingleStateQueueInstantiations.cpp +++ b/include/media/stagefright/ProcessInfoInterface.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2012 The Android Open Source Project + * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,15 +14,20 @@ * limitations under the License. */ -#include <media/SingleStateQueue.h> -#include <private/media/StaticAudioTrackState.h> -#include <media/AudioTimestamp.h> +#ifndef PROCESS_INFO_INTERFACE_H_ +#define PROCESS_INFO_INTERFACE_H_ -// FIXME hack for gcc +#include <utils/RefBase.h> namespace android { -template class SingleStateQueue<StaticAudioTrackState>; // typedef StaticAudioTrackSingleStateQueue -template class SingleStateQueue<AudioTimestamp>; // typedef AudioTimestampSingleStateQueue +struct ProcessInfoInterface : public RefBase { + virtual bool getPriority(int pid, int* priority) = 0; -} +protected: + virtual ~ProcessInfoInterface() {} +}; + +} // namespace android + +#endif // PROCESS_INFO_INTERFACE_H_ diff --git a/include/media/stagefright/RenderScriptWrapper.h b/include/media/stagefright/RenderScriptWrapper.h new file mode 100644 index 0000000..b42649e --- /dev/null +++ b/include/media/stagefright/RenderScriptWrapper.h @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RENDERSCRIPT_WRAPPER_H_ +#define RENDERSCRIPT_WRAPPER_H_ + +#include <RenderScript.h> + +namespace android { + +struct RenderScriptWrapper : public RefBase { +public: + struct RSFilterCallback : public RefBase { + public: + // called by RSFilter to process each input buffer + virtual status_t processBuffers( + RSC::Allocation* inBuffer, + RSC::Allocation* outBuffer) = 0; + + virtual status_t handleSetParameters(const sp<AMessage> &msg) = 0; + }; + + sp<RSFilterCallback> mCallback; + RSC::sp<RSC::RS> mContext; +}; + +} // namespace android + +#endif // RENDERSCRIPT_WRAPPER_H_ diff --git a/include/media/stagefright/Utils.h b/include/media/stagefright/Utils.h index a795c80..ec3a10e 100644 --- a/include/media/stagefright/Utils.h +++ b/include/media/stagefright/Utils.h @@ -65,6 +65,17 @@ bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo, AString uriDebugString(const AString &uri, bool incognito = false); +struct HLSTime { + int32_t mSeq; + int64_t mTimeUs; + sp<AMessage> mMeta; + + HLSTime(const sp<AMessage> &meta = NULL); + int64_t getSegmentTimeUs(bool midpoint = false) const; +}; + +bool operator <(const HLSTime &t0, const HLSTime &t1); + } // namespace android #endif // UTILS_H_ diff --git a/include/media/stagefright/foundation/ABase.h b/include/media/stagefright/foundation/ABase.h index 72e3d87..ef1e010 100644 --- a/include/media/stagefright/foundation/ABase.h +++ b/include/media/stagefright/foundation/ABase.h @@ -18,7 +18,9 @@ #define A_BASE_H_ +#ifndef ARRAY_SIZE #define ARRAY_SIZE(a) (sizeof(a) / sizeof(*(a))) +#endif #define DISALLOW_EVIL_CONSTRUCTORS(name) \ name(const name &); \ diff --git a/include/media/stagefright/foundation/AHandler.h b/include/media/stagefright/foundation/AHandler.h index 41ade77..fe02a86 100644 --- a/include/media/stagefright/foundation/AHandler.h +++ b/include/media/stagefright/foundation/AHandler.h @@ -29,6 +29,7 @@ struct AMessage; struct AHandler : public RefBase { AHandler() : mID(0), + mVerboseStats(false), mMessageCounter(0) { } @@ -36,23 +37,40 @@ struct AHandler : public RefBase { return mID; } - sp<ALooper> looper(); + sp<ALooper> looper() const { + return mLooper.promote(); + } + + wp<ALooper> getLooper() const { + return mLooper; + } + + wp<AHandler> getHandler() const { + // allow getting a weak reference to a const handler + return const_cast<AHandler *>(this); + } protected: virtual void onMessageReceived(const sp<AMessage> &msg) = 0; private: - friend struct ALooperRoster; + friend struct AMessage; // deliverMessage() + friend struct ALooperRoster; // setID() ALooper::handler_id mID; + wp<ALooper> mLooper; - void setID(ALooper::handler_id id) { + inline void setID(ALooper::handler_id id, wp<ALooper> looper) { mID = id; + mLooper = looper; } + bool mVerboseStats; uint32_t mMessageCounter; KeyedVector<uint32_t, uint32_t> mMessages; + void deliverMessage(const sp<AMessage> &msg); + DISALLOW_EVIL_CONSTRUCTORS(AHandler); }; diff --git a/include/media/stagefright/foundation/ALooper.h b/include/media/stagefright/foundation/ALooper.h index 70e0c5e..09c469b 100644 --- a/include/media/stagefright/foundation/ALooper.h +++ b/include/media/stagefright/foundation/ALooper.h @@ -30,6 +30,7 @@ namespace android { struct AHandler; struct AMessage; +struct AReplyToken; struct ALooper : public RefBase { typedef int32_t event_id; @@ -53,11 +54,15 @@ struct ALooper : public RefBase { static int64_t GetNowUs(); + const char *getName() const { + return mName.c_str(); + } + protected: virtual ~ALooper(); private: - friend struct ALooperRoster; + friend struct AMessage; // post() struct Event { int64_t mWhenUs; @@ -75,12 +80,32 @@ private: sp<LooperThread> mThread; bool mRunningLocally; + // use a separate lock for reply handling, as it is always on another thread + // use a central lock, however, to avoid creating a mutex for each reply + Mutex mRepliesLock; + Condition mRepliesCondition; + + // START --- methods used only by AMessage + + // posts a message on this looper with the given timeout void post(const sp<AMessage> &msg, int64_t delayUs); + + // creates a reply token to be used with this looper + sp<AReplyToken> createReplyToken(); + // waits for a response for the reply token. If status is OK, the response + // is stored into the supplied variable. Otherwise, it is unchanged. + status_t awaitResponse(const sp<AReplyToken> &replyToken, sp<AMessage> *response); + // posts a reply for a reply token. If the reply could be successfully posted, + // it returns OK. Otherwise, it returns an error value. + status_t postReply(const sp<AReplyToken> &replyToken, const sp<AMessage> &msg); + + // END --- methods used only by AMessage + bool loop(); DISALLOW_EVIL_CONSTRUCTORS(ALooper); }; -} // namespace android +} // namespace android #endif // A_LOOPER_H_ diff --git a/include/media/stagefright/foundation/ALooperRoster.h b/include/media/stagefright/foundation/ALooperRoster.h index a0be8eb..9912455 100644 --- a/include/media/stagefright/foundation/ALooperRoster.h +++ b/include/media/stagefright/foundation/ALooperRoster.h @@ -33,16 +33,6 @@ struct ALooperRoster { void unregisterHandler(ALooper::handler_id handlerID); void unregisterStaleHandlers(); - status_t postMessage(const sp<AMessage> &msg, int64_t delayUs = 0); - void deliverMessage(const sp<AMessage> &msg); - - status_t postAndAwaitResponse( - const sp<AMessage> &msg, sp<AMessage> *response); - - void postReply(uint32_t replyID, const sp<AMessage> &reply); - - sp<ALooper> findLooper(ALooper::handler_id handlerID); - void dump(int fd, const Vector<String16>& args); private: @@ -54,10 +44,6 @@ private: Mutex mLock; KeyedVector<ALooper::handler_id, HandlerInfo> mHandlers; ALooper::handler_id mNextHandlerID; - uint32_t mNextReplyID; - Condition mRepliesCondition; - - KeyedVector<uint32_t, sp<AMessage> > mReplies; DISALLOW_EVIL_CONSTRUCTORS(ALooperRoster); }; diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h index a9e235b..4c6bd21 100644 --- a/include/media/stagefright/foundation/AMessage.h +++ b/include/media/stagefright/foundation/AMessage.h @@ -26,11 +26,41 @@ namespace android { struct ABuffer; +struct AHandler; struct AString; struct Parcel; +struct AReplyToken : public RefBase { + AReplyToken(const sp<ALooper> &looper) + : mLooper(looper), + mReplied(false) { + } + +private: + friend struct AMessage; + friend struct ALooper; + wp<ALooper> mLooper; + sp<AMessage> mReply; + bool mReplied; + + sp<ALooper> getLooper() const { + return mLooper.promote(); + } + // if reply is not set, returns false; otherwise, it retrieves the reply and returns true + bool retrieveReply(sp<AMessage> *reply) { + if (mReplied) { + *reply = mReply; + mReply.clear(); + } + return mReplied; + } + // sets the reply for this token. returns OK or error + status_t setReply(const sp<AMessage> &reply); +}; + struct AMessage : public RefBase { - AMessage(uint32_t what = 0, ALooper::handler_id target = 0); + AMessage(); + AMessage(uint32_t what, const sp<const AHandler> &handler); static sp<AMessage> FromParcel(const Parcel &parcel); void writeToParcel(Parcel *parcel) const; @@ -38,8 +68,7 @@ struct AMessage : public RefBase { void setWhat(uint32_t what); uint32_t what() const; - void setTarget(ALooper::handler_id target); - ALooper::handler_id target() const; + void setTarget(const sp<const AHandler> &handler); void clear(); @@ -76,18 +105,22 @@ struct AMessage : public RefBase { const char *name, int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const; - void post(int64_t delayUs = 0); + status_t post(int64_t delayUs = 0); // Posts the message to its target and waits for a response (or error) // before returning. status_t postAndAwaitResponse(sp<AMessage> *response); // If this returns true, the sender of this message is synchronously - // awaiting a response, the "replyID" can be used to send the response - // via "postReply" below. - bool senderAwaitsResponse(uint32_t *replyID) const; + // awaiting a response and the reply token is consumed from the message + // and stored into replyID. The reply token must be used to send the response + // using "postReply" below. + bool senderAwaitsResponse(sp<AReplyToken> *replyID); - void postReply(uint32_t replyID); + // Posts the message as a response to a reply token. A reply token can + // only be used once. Returns OK if the response could be posted; otherwise, + // an error. + status_t postReply(const sp<AReplyToken> &replyID); // Performs a deep-copy of "this", contained messages are in turn "dup'ed". // Warning: RefBase items, i.e. "objects" are _not_ copied but only have @@ -117,9 +150,16 @@ protected: virtual ~AMessage(); private: + friend struct ALooper; // deliver() + uint32_t mWhat; + + // used only for debugging ALooper::handler_id mTarget; + wp<AHandler> mHandler; + wp<ALooper> mLooper; + struct Rect { int32_t mLeft, mTop, mRight, mBottom; }; @@ -157,6 +197,8 @@ private: size_t findItemIndex(const char *name, size_t len) const; + void deliver(); + DISALLOW_EVIL_CONSTRUCTORS(AMessage); }; diff --git a/include/ndk/NdkMediaCodec.h b/include/ndk/NdkMediaCodec.h index c07f4c9..4f6a1ef 100644 --- a/include/ndk/NdkMediaCodec.h +++ b/include/ndk/NdkMediaCodec.h @@ -142,7 +142,8 @@ media_status_t AMediaCodec_queueSecureInputBuffer(AMediaCodec*, /** * Get the index of the next available buffer of processed data. */ -ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec*, AMediaCodecBufferInfo *info, int64_t timeoutUs); +ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec*, AMediaCodecBufferInfo *info, + int64_t timeoutUs); AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec*); /** diff --git a/include/ndk/NdkMediaExtractor.h b/include/ndk/NdkMediaExtractor.h index 7a4e702..7324d31 100644 --- a/include/ndk/NdkMediaExtractor.h +++ b/include/ndk/NdkMediaExtractor.h @@ -55,12 +55,14 @@ media_status_t AMediaExtractor_delete(AMediaExtractor*); /** * Set the file descriptor from which the extractor will read. */ -media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor*, int fd, off64_t offset, off64_t length); +media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor*, int fd, off64_t offset, + off64_t length); /** * Set the URI from which the extractor will read. */ -media_status_t AMediaExtractor_setDataSource(AMediaExtractor*, const char *location); // TODO support headers +media_status_t AMediaExtractor_setDataSource(AMediaExtractor*, const char *location); + // TODO support headers /** * Return the number of tracks in the previously specified media file diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h index 31dff36..5644428 100644 --- a/include/private/media/AudioTrackShared.h +++ b/include/private/media/AudioTrackShared.h @@ -24,9 +24,8 @@ #include <utils/threads.h> #include <utils/Log.h> #include <utils/RefBase.h> -#include <media/nbaio/roundup.h> +#include <audio_utils/roundup.h> #include <media/SingleStateQueue.h> -#include <private/media/StaticAudioTrackState.h> namespace android { @@ -54,22 +53,64 @@ namespace android { struct AudioTrackSharedStreaming { // similar to NBAIO MonoPipe // in continuously incrementing frame units, take modulo buffer size, which must be a power of 2 - volatile int32_t mFront; // read by server - volatile int32_t mRear; // write by client + volatile int32_t mFront; // read by consumer (output: server, input: client) + volatile int32_t mRear; // written by producer (output: client, input: server) volatile int32_t mFlush; // incremented by client to indicate a request to flush; // server notices and discards all data between mFront and mRear volatile uint32_t mUnderrunFrames; // server increments for each unavailable but desired frame }; +// Represents a single state of an AudioTrack that was created in static mode (shared memory buffer +// supplied by the client). This state needs to be communicated from the client to server. As this +// state is too large to be updated atomically without a mutex, and mutexes aren't allowed here, the +// state is wrapped by a SingleStateQueue. +struct StaticAudioTrackState { + // Do not define constructors, destructors, or virtual methods as this is part of a + // union in shared memory and they will not get called properly. + + // These fields should both be size_t, but since they are located in shared memory we + // force to 32-bit. The client and server may have different typedefs for size_t. + + // The state has a sequence counter to indicate whether changes are made to loop or position. + // The sequence counter also currently indicates whether loop or position is first depending + // on which is greater; it jumps by max(mLoopSequence, mPositionSequence) + 1. + + uint32_t mLoopStart; + uint32_t mLoopEnd; + int32_t mLoopCount; + uint32_t mLoopSequence; // a sequence counter to indicate changes to loop + uint32_t mPosition; + uint32_t mPositionSequence; // a sequence counter to indicate changes to position +}; + typedef SingleStateQueue<StaticAudioTrackState> StaticAudioTrackSingleStateQueue; +struct StaticAudioTrackPosLoop { + // Do not define constructors, destructors, or virtual methods as this is part of a + // union in shared memory and will not get called properly. + + // These fields should both be size_t, but since they are located in shared memory we + // force to 32-bit. The client and server may have different typedefs for size_t. + + // This struct information is stored in a single state queue to communicate the + // static AudioTrack server state to the client while data is consumed. + // It is smaller than StaticAudioTrackState to prevent unnecessary information from + // being sent. + + uint32_t mBufferPosition; + int32_t mLoopCount; +}; + +typedef SingleStateQueue<StaticAudioTrackPosLoop> StaticAudioTrackPosLoopQueue; + struct AudioTrackSharedStatic { + // client requests to the server for loop or position changes. StaticAudioTrackSingleStateQueue::Shared mSingleStateQueue; - // This field should be a size_t, but since it is located in shared memory we - // force to 32-bit. The client and server may have different typedefs for size_t. - uint32_t mBufferPosition; // updated asynchronously by server, - // "for entertainment purposes only" + // position info updated asynchronously by server and read by client, + // "for entertainment purposes only" + StaticAudioTrackPosLoopQueue::Shared + mPosLoopQueue; }; // ---------------------------------------------------------------------------- @@ -96,7 +137,8 @@ struct audio_track_cblk_t uint32_t mServer; // Number of filled frames consumed by server (mIsOut), // or filled frames provided by server (!mIsOut). // It is updated asynchronously by server without a barrier. - // The value should be used "for entertainment purposes only", + // The value should be used + // "for entertainment purposes only", // which means don't make important decisions based on it. uint32_t mPad1; // unused @@ -313,8 +355,28 @@ public: virtual void flush(); #define MIN_LOOP 16 // minimum length of each loop iteration in frames + + // setLoop(), setBufferPosition(), and setBufferPositionAndLoop() set the + // static buffer position and looping parameters. These commands are not + // synchronous (they do not wait or block); instead they take effect at the + // next buffer data read from the server side. However, the client side + // getters will read a cached version of the position and loop variables + // until the setting takes effect. + // + // setBufferPositionAndLoop() is equivalent to calling, in order, setLoop() and + // setBufferPosition(). + // + // The functions should not be relied upon to do parameter or state checking. + // That is done at the AudioTrack level. + void setLoop(size_t loopStart, size_t loopEnd, int loopCount); + void setBufferPosition(size_t position); + void setBufferPositionAndLoop(size_t position, size_t loopStart, size_t loopEnd, + int loopCount); size_t getBufferPosition(); + // getBufferPositionAndLoopCount() provides the proper snapshot of + // position and loopCount together. + void getBufferPositionAndLoopCount(size_t *position, int *loopCount); virtual size_t getMisalignment() { return 0; @@ -326,7 +388,9 @@ public: private: StaticAudioTrackSingleStateQueue::Mutator mMutator; - size_t mBufferPosition; // so that getBufferPosition() appears to be synchronous + StaticAudioTrackPosLoopQueue::Observer mPosLoopObserver; + StaticAudioTrackState mState; // last communicated state to server + StaticAudioTrackPosLoop mPosLoop; // snapshot of position and loop. }; // ---------------------------------------------------------------------------- @@ -447,10 +511,13 @@ public: virtual uint32_t getUnderrunFrames() const { return 0; } private: + status_t updateStateWithLoop(StaticAudioTrackState *localState, + const StaticAudioTrackState &update) const; + status_t updateStateWithPosition(StaticAudioTrackState *localState, + const StaticAudioTrackState &update) const; ssize_t pollPosition(); // poll for state queue update, and return current position StaticAudioTrackSingleStateQueue::Observer mObserver; - size_t mPosition; // server's current play position in frames, relative to 0 - + StaticAudioTrackPosLoopQueue::Mutator mPosLoopMutator; size_t mFramesReadySafe; // Assuming size_t read/writes are atomic on 32 / 64 bit // processors, this is a thread-safe version of // mFramesReady. @@ -459,7 +526,8 @@ private: // can cause a track to appear to have a large number // of frames. INT64_MAX means an infinite loop. bool mFramesReadyIsCalledByMultipleThreads; - StaticAudioTrackState mState; + StaticAudioTrackState mState; // Server side state. Any updates from client must be + // passed by the mObserver SingleStateQueue. }; // Proxy used by AudioFlinger for servicing AudioRecord diff --git a/include/private/media/StaticAudioTrackState.h b/include/private/media/StaticAudioTrackState.h deleted file mode 100644 index d483061..0000000 --- a/include/private/media/StaticAudioTrackState.h +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef STATIC_AUDIO_TRACK_STATE_H -#define STATIC_AUDIO_TRACK_STATE_H - -namespace android { - -// Represents a single state of an AudioTrack that was created in static mode (shared memory buffer -// supplied by the client). This state needs to be communicated from the client to server. As this -// state is too large to be updated atomically without a mutex, and mutexes aren't allowed here, the -// state is wrapped by a SingleStateQueue. -struct StaticAudioTrackState { - // do not define constructors, destructors, or virtual methods - - // These fields should both be size_t, but since they are located in shared memory we - // force to 32-bit. The client and server may have different typedefs for size_t. - uint32_t mLoopStart; - uint32_t mLoopEnd; - - int mLoopCount; -}; - -} // namespace android - -#endif // STATIC_AUDIO_TRACK_STATE_H diff --git a/include/radio/IRadio.h b/include/radio/IRadio.h new file mode 100644 index 0000000..1877f8f --- /dev/null +++ b/include/radio/IRadio.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_IRADIO_H +#define ANDROID_HARDWARE_IRADIO_H + +#include <utils/RefBase.h> +#include <binder/IInterface.h> +#include <binder/IMemory.h> +#include <binder/Parcel.h> +#include <system/radio.h> + +namespace android { + +class IRadio : public IInterface +{ +public: + + DECLARE_META_INTERFACE(Radio); + + virtual void detach() = 0; + + virtual status_t setConfiguration(const struct radio_band_config *config) = 0; + + virtual status_t getConfiguration(struct radio_band_config *config) = 0; + + virtual status_t setMute(bool mute) = 0; + + virtual status_t getMute(bool *mute) = 0; + + virtual status_t step(radio_direction_t direction, bool skipSubChannel) = 0; + + virtual status_t scan(radio_direction_t direction, bool skipSubChannel) = 0; + + virtual status_t tune(unsigned int channel, unsigned int subChannel) = 0; + + virtual status_t cancel() = 0; + + virtual status_t getProgramInformation(struct radio_program_info *info) = 0; + + virtual status_t hasControl(bool *hasControl) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnRadio: public BnInterface<IRadio> +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif //ANDROID_HARDWARE_IRADIO_H diff --git a/include/radio/IRadioClient.h b/include/radio/IRadioClient.h new file mode 100644 index 0000000..9062ad6 --- /dev/null +++ b/include/radio/IRadioClient.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_IRADIO_CLIENT_H +#define ANDROID_HARDWARE_IRADIO_CLIENT_H + +#include <utils/RefBase.h> +#include <binder/IInterface.h> +#include <binder/IMemory.h> +#include <binder/Parcel.h> + +namespace android { + +class IRadioClient : public IInterface +{ +public: + + DECLARE_META_INTERFACE(RadioClient); + + virtual void onEvent(const sp<IMemory>& eventMemory) = 0; + +}; + +// ---------------------------------------------------------------------------- + +class BnRadioClient : public BnInterface<IRadioClient> +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif //ANDROID_HARDWARE_IRADIO_CLIENT_H diff --git a/include/radio/IRadioService.h b/include/radio/IRadioService.h new file mode 100644 index 0000000..a946dd5 --- /dev/null +++ b/include/radio/IRadioService.h @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_IRADIO_SERVICE_H +#define ANDROID_HARDWARE_IRADIO_SERVICE_H + +#include <utils/RefBase.h> +#include <binder/IInterface.h> +#include <binder/Parcel.h> +#include <system/radio.h> + +namespace android { + +class IRadio; +class IRadioClient; + +class IRadioService : public IInterface +{ +public: + + DECLARE_META_INTERFACE(RadioService); + + virtual status_t listModules(struct radio_properties *properties, + uint32_t *numModules) = 0; + + virtual status_t attach(const radio_handle_t handle, + const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool withAudio, + sp<IRadio>& radio) = 0; +}; + +// ---------------------------------------------------------------------------- + +class BnRadioService: public BnInterface<IRadioService> +{ +public: + virtual status_t onTransact( uint32_t code, + const Parcel& data, + Parcel* reply, + uint32_t flags = 0); +}; + +}; // namespace android + +#endif //ANDROID_HARDWARE_IRADIO_SERVICE_H diff --git a/include/radio/Radio.h b/include/radio/Radio.h new file mode 100644 index 0000000..302bf16 --- /dev/null +++ b/include/radio/Radio.h @@ -0,0 +1,88 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_RADIO_H +#define ANDROID_HARDWARE_RADIO_H + +#include <binder/IBinder.h> +#include <utils/threads.h> +#include <radio/RadioCallback.h> +#include <radio/IRadio.h> +#include <radio/IRadioService.h> +#include <radio/IRadioClient.h> +#include <system/radio.h> + +namespace android { + +class MemoryDealer; + +class Radio : public BnRadioClient, + public IBinder::DeathRecipient +{ +public: + + virtual ~Radio(); + + static status_t listModules(struct radio_properties *properties, + uint32_t *numModules); + static sp<Radio> attach(radio_handle_t handle, + const struct radio_band_config *config, + bool withAudio, + const sp<RadioCallback>& callback); + + + void detach(); + + status_t setConfiguration(const struct radio_band_config *config); + + status_t getConfiguration(struct radio_band_config *config); + + status_t setMute(bool mute); + + status_t getMute(bool *mute); + + status_t step(radio_direction_t direction, bool skipSubChannel); + + status_t scan(radio_direction_t direction, bool skipSubChannel); + + status_t tune(unsigned int channel, unsigned int subChannel); + + status_t cancel(); + + status_t getProgramInformation(struct radio_program_info *info); + + status_t hasControl(bool *hasControl); + + // BpRadioClient + virtual void onEvent(const sp<IMemory>& eventMemory); + + //IBinder::DeathRecipient + virtual void binderDied(const wp<IBinder>& who); + +private: + Radio(radio_handle_t handle, + const sp<RadioCallback>&); + static const sp<IRadioService>& getRadioService(); + + Mutex mLock; + sp<IRadio> mIRadio; + const radio_handle_t mHandle; + sp<RadioCallback> mCallback; +}; + +}; // namespace android + +#endif //ANDROID_HARDWARE_RADIO_H diff --git a/media/libnbaio/roundup.c b/include/radio/RadioCallback.h index 1d552d1..4a7f1a6 100644 --- a/media/libnbaio/roundup.c +++ b/include/radio/RadioCallback.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2012 The Android Open Source Project + * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,19 +14,25 @@ * limitations under the License. */ -#include <media/nbaio/roundup.h> +#ifndef ANDROID_HARDWARE_RADIO_CALLBACK_H +#define ANDROID_HARDWARE_RADIO_CALLBACK_H -unsigned roundup(unsigned v) +#include <utils/RefBase.h> +#include <system/radio.h> + +namespace android { + +class RadioCallback : public RefBase { - // __builtin_clz is undefined for zero input - if (v == 0) { - v = 1; - } - int lz = __builtin_clz((int) v); - unsigned rounded = ((unsigned) 0x80000000) >> lz; - // 0x800000001 and higher are actually rounded _down_ to prevent overflow - if (v > rounded && lz > 0) { - rounded <<= 1; - } - return rounded; -} +public: + + RadioCallback() {} + virtual ~RadioCallback() {} + + virtual void onEvent(struct radio_event *event) = 0; + +}; + +}; // namespace android + +#endif //ANDROID_HARDWARE_RADIO_CALLBACK_H diff --git a/media/libeffects/factory/EffectsFactory.c b/media/libeffects/factory/EffectsFactory.c index 6d30d64..c310fe2 100644 --- a/media/libeffects/factory/EffectsFactory.c +++ b/media/libeffects/factory/EffectsFactory.c @@ -28,6 +28,7 @@ static list_elem_t *gEffectList; // list of effect_entry_t: all currently created effects static list_elem_t *gLibraryList; // list of lib_entry_t: all currently loaded libraries +static list_elem_t *gSkippedEffects; // list of effects skipped because of duplicate uuid // list of effect_descriptor and list of sub effects : all currently loaded // It does not contain effects without sub effects. static list_sub_elem_t *gSubEffectList; @@ -63,10 +64,10 @@ static int findEffect(const effect_uuid_t *type, lib_entry_t **lib, effect_descriptor_t **desc); // To search a subeffect in the gSubEffectList -int findSubEffect(const effect_uuid_t *uuid, +static int findSubEffect(const effect_uuid_t *uuid, lib_entry_t **lib, effect_descriptor_t **desc); -static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len); +static void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent); static int stringToUuid(const char *str, effect_uuid_t *uuid); static int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen); @@ -237,8 +238,8 @@ int EffectQueryEffect(uint32_t index, effect_descriptor_t *pDescriptor) } #if (LOG_NDEBUG == 0) - char str[256]; - dumpEffectDescriptor(pDescriptor, str, 256); + char str[512]; + dumpEffectDescriptor(pDescriptor, str, sizeof(str), 0 /* indent */); ALOGV("EffectQueryEffect() desc:%s", str); #endif pthread_mutex_unlock(&gLibLock); @@ -503,15 +504,31 @@ int loadLibrary(cnode *root, const char *name) audio_effect_library_t *desc; list_elem_t *e; lib_entry_t *l; + char path[PATH_MAX]; + char *str; + size_t len; node = config_find(root, PATH_TAG); if (node == NULL) { return -EINVAL; } + // audio_effects.conf always specifies 32 bit lib path: convert to 64 bit path if needed + strlcpy(path, node->value, PATH_MAX); +#ifdef __LP64__ + str = strstr(path, "/lib/"); + if (str == NULL) + return -EINVAL; + len = str - path; + path[len] = '\0'; + strlcat(path, "/lib64/", PATH_MAX); + strlcat(path, node->value + len + strlen("/lib/"), PATH_MAX); +#endif + if (strlen(path) >= PATH_MAX - 1) + return -EINVAL; - hdl = dlopen(node->value, RTLD_NOW); + hdl = dlopen(path, RTLD_NOW); if (hdl == NULL) { - ALOGW("loadLibrary() failed to open %s", node->value); + ALOGW("loadLibrary() failed to open %s", path); goto error; } @@ -535,7 +552,7 @@ int loadLibrary(cnode *root, const char *name) // add entry for library in gLibraryList l = malloc(sizeof(lib_entry_t)); l->name = strndup(name, PATH_MAX); - l->path = strndup(node->value, PATH_MAX); + l->path = strndup(path, PATH_MAX); l->handle = hdl; l->desc = desc; l->effects = NULL; @@ -547,7 +564,7 @@ int loadLibrary(cnode *root, const char *name) e->next = gLibraryList; gLibraryList = e; pthread_mutex_unlock(&gLibLock); - ALOGV("getLibrary() linked library %p for path %s", l, node->value); + ALOGV("getLibrary() linked library %p for path %s", l, path); return 0; @@ -595,8 +612,8 @@ int addSubEffect(cnode *root) return -EINVAL; } #if (LOG_NDEBUG==0) - char s[256]; - dumpEffectDescriptor(d, s, 256); + char s[512]; + dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */); ALOGV("addSubEffect() read descriptor %p:%s",d, s); #endif if (EFFECT_API_VERSION_MAJOR(d->apiVersion) != @@ -660,6 +677,13 @@ int loadEffect(cnode *root) ALOGW("loadEffect() invalid uuid %s", node->value); return -EINVAL; } + lib_entry_t *tmp; + bool skip = false; + if (findEffect(NULL, &uuid, &tmp, NULL) == 0) { + ALOGW("skipping duplicate uuid %s %s", node->value, + node->next ? "and its sub-effects" : ""); + skip = true; + } d = malloc(sizeof(effect_descriptor_t)); if (l->desc->get_descriptor(&uuid, d) != 0) { @@ -670,8 +694,8 @@ int loadEffect(cnode *root) return -EINVAL; } #if (LOG_NDEBUG==0) - char s[256]; - dumpEffectDescriptor(d, s, 256); + char s[512]; + dumpEffectDescriptor(d, s, sizeof(s), 0 /* indent */); ALOGV("loadEffect() read descriptor %p:%s",d, s); #endif if (EFFECT_API_VERSION_MAJOR(d->apiVersion) != @@ -682,8 +706,14 @@ int loadEffect(cnode *root) } e = malloc(sizeof(list_elem_t)); e->object = d; - e->next = l->effects; - l->effects = e; + if (skip) { + e->next = gSkippedEffects; + gSkippedEffects = e; + return -EINVAL; + } else { + e->next = l->effects; + l->effects = e; + } // After the UUID node in the config_tree, if node->next is valid, // that would be sub effect node. @@ -876,22 +906,30 @@ int findEffect(const effect_uuid_t *type, return ret; } -void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len) { +void dumpEffectDescriptor(effect_descriptor_t *desc, char *str, size_t len, int indent) { char s[256]; + char ss[256]; + char idt[indent + 1]; - snprintf(str, len, "\nEffect Descriptor %p:\n", desc); - strncat(str, "- TYPE: ", len); - uuidToString(&desc->uuid, s, 256); - snprintf(str, len, "- UUID: %s\n", s); - uuidToString(&desc->type, s, 256); - snprintf(str, len, "- TYPE: %s\n", s); - sprintf(s, "- apiVersion: %08X\n- flags: %08X\n", - desc->apiVersion, desc->flags); - strncat(str, s, len); - sprintf(s, "- name: %s\n", desc->name); - strncat(str, s, len); - sprintf(s, "- implementor: %s\n", desc->implementor); - strncat(str, s, len); + memset(idt, ' ', indent); + idt[indent] = 0; + + str[0] = 0; + + snprintf(s, sizeof(s), "%s%s / %s\n", idt, desc->name, desc->implementor); + strlcat(str, s, len); + + uuidToString(&desc->uuid, s, sizeof(s)); + snprintf(ss, sizeof(ss), "%s UUID: %s\n", idt, s); + strlcat(str, ss, len); + + uuidToString(&desc->type, s, sizeof(s)); + snprintf(ss, sizeof(ss), "%s TYPE: %s\n", idt, s); + strlcat(str, ss, len); + + sprintf(s, "%s apiVersion: %08X\n%s flags: %08X\n", idt, + desc->apiVersion, idt, desc->flags); + strlcat(str, s, len); } int stringToUuid(const char *str, effect_uuid_t *uuid) @@ -934,3 +972,40 @@ int uuidToString(const effect_uuid_t *uuid, char *str, size_t maxLen) return 0; } +int EffectDumpEffects(int fd) { + char s[512]; + list_elem_t *e = gLibraryList; + lib_entry_t *l = NULL; + effect_descriptor_t *d = NULL; + int found = 0; + int ret = 0; + + while (e) { + l = (lib_entry_t *)e->object; + list_elem_t *efx = l->effects; + dprintf(fd, "Library %s\n", l->name); + if (!efx) { + dprintf(fd, " (no effects)\n"); + } + while (efx) { + d = (effect_descriptor_t *)efx->object; + dumpEffectDescriptor(d, s, sizeof(s), 2); + dprintf(fd, "%s", s); + efx = efx->next; + } + e = e->next; + } + + e = gSkippedEffects; + if (e) { + dprintf(fd, "Skipped effects\n"); + while(e) { + d = (effect_descriptor_t *)e->object; + dumpEffectDescriptor(d, s, sizeof(s), 2 /* indent */); + dprintf(fd, "%s", s); + e = e->next; + } + } + return ret; +} + diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 6c585fb..3b260d6 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -36,6 +36,8 @@ LOCAL_SRC_FILES:= \ IMediaRecorder.cpp \ IRemoteDisplay.cpp \ IRemoteDisplayClient.cpp \ + IResourceManagerClient.cpp \ + IResourceManagerService.cpp \ IStreamSource.cpp \ MediaCodecInfo.cpp \ Metadata.cpp \ @@ -53,6 +55,8 @@ LOCAL_SRC_FILES:= \ CharacterEncodingDetector.cpp \ IMediaDeathNotifier.cpp \ MediaProfiles.cpp \ + MediaResource.cpp \ + MediaResourcePolicy.cpp \ IEffect.cpp \ IEffectClient.cpp \ AudioEffect.cpp \ @@ -61,15 +65,11 @@ LOCAL_SRC_FILES:= \ StringArray.cpp \ AudioPolicy.cpp -LOCAL_SRC_FILES += ../libnbaio/roundup.c - LOCAL_SHARED_LIBRARIES := \ libui liblog libcutils libutils libbinder libsonivox libicuuc libicui18n libexpat \ libcamera_client libstagefright_foundation \ libgui libdl libaudioutils libnbaio -LOCAL_STATIC_LIBRARIES += libinstantssq - LOCAL_WHOLE_STATIC_LIBRARIES := libmedia_helper LOCAL_MODULE:= libmedia @@ -85,12 +85,3 @@ LOCAL_C_INCLUDES := \ include $(BUILD_SHARED_LIBRARY) -include $(CLEAR_VARS) - -LOCAL_SRC_FILES += SingleStateQueue.cpp -LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"' - -LOCAL_MODULE := libinstantssq -LOCAL_MODULE_TAGS := optional - -include $(BUILD_STATIC_LIBRARY) diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp index af103c1..7d8222f 100644 --- a/media/libmedia/AudioEffect.cpp +++ b/media/libmedia/AudioEffect.cpp @@ -486,4 +486,4 @@ status_t AudioEffect::guidToString(const effect_uuid_t *guid, char *str, size_t } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioParameter.cpp b/media/libmedia/AudioParameter.cpp index 33dbf0b..8c8cf45 100644 --- a/media/libmedia/AudioParameter.cpp +++ b/media/libmedia/AudioParameter.cpp @@ -180,4 +180,4 @@ status_t AudioParameter::getAt(size_t index, String8& key, String8& value) } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioPolicy.cpp b/media/libmedia/AudioPolicy.cpp index d2d0971..c7dafcb 100644 --- a/media/libmedia/AudioPolicy.cpp +++ b/media/libmedia/AudioPolicy.cpp @@ -112,4 +112,4 @@ status_t AudioMix::writeToParcel(Parcel *parcel) const return NO_ERROR; } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index 07ca14f..100a914 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -112,7 +112,9 @@ AudioRecord::~AudioRecord() mCblkMemory.clear(); mBufferMemory.clear(); IPCThreadState::self()->flushCommands(); - AudioSystem::releaseAudioSessionId(mSessionId, -1); + ALOGV("~AudioRecord, releasing session id %d", + mSessionId); + AudioSystem::releaseAudioSessionId(mSessionId, -1 /*pid*/); } } @@ -159,8 +161,6 @@ status_t AudioRecord::set( } mTransfer = transferType; - AutoMutex lock(mLock); - // invariant that mAudioRecord != 0 is true only after set() returns successfully if (mAudioRecord != 0) { ALOGE("Track already in use"); @@ -233,6 +233,7 @@ status_t AudioRecord::set( if (cbf != NULL) { mAudioRecordThread = new AudioRecordThread(*this, threadCanCallJava); mAudioRecordThread->run("AudioRecord", ANDROID_PRIORITY_AUDIO); + // thread begins in paused state, and will not reference us until start() } // create the IAudioRecord @@ -286,7 +287,6 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession) status_t status = NO_ERROR; if (!(flags & CBLK_INVALID)) { - ALOGV("mAudioRecord->start()"); status = mAudioRecord->start(event, triggerSession); if (status == DEAD_OBJECT) { flags |= CBLK_INVALID; @@ -352,6 +352,10 @@ status_t AudioRecord::setMarkerPosition(uint32_t marker) mMarkerPosition = marker; mMarkerReached = false; + sp<AudioRecordThread> t = mAudioRecordThread; + if (t != 0) { + t->wake(); + } return NO_ERROR; } @@ -378,6 +382,10 @@ status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod) mNewPosition = mProxy->getPosition() + updatePeriod; mUpdatePeriod = updatePeriod; + sp<AudioRecordThread> t = mAudioRecordThread; + if (t != 0) { + t->wake(); + } return NO_ERROR; } @@ -408,7 +416,7 @@ status_t AudioRecord::getPosition(uint32_t *position) const uint32_t AudioRecord::getInputFramesLost() const { // no need to check mActive, because if inactive this will return 0, which is what we want - return AudioSystem::getInputFramesLost(getInput()); + return AudioSystem::getInputFramesLost(getInputPrivate()); } // ------------------------------------------------------------------------- @@ -416,7 +424,6 @@ uint32_t AudioRecord::getInputFramesLost() const // must be called with mLock held status_t AudioRecord::openRecord_l(size_t epoch) { - status_t status; const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger(); if (audioFlinger == 0) { ALOGE("Could not get audioflinger"); @@ -431,12 +438,16 @@ status_t AudioRecord::openRecord_l(size_t epoch) } // Client can only express a preference for FAST. Server will perform additional tests. - if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !( - // use case: callback transfer mode - (mTransfer == TRANSFER_CALLBACK) && + if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !(( + // either of these use cases: + // use case 1: callback transfer mode + (mTransfer == TRANSFER_CALLBACK) || + // use case 2: obtain/release mode + (mTransfer == TRANSFER_OBTAIN)) && // matching sample rate (mSampleRate == afSampleRate))) { - ALOGW("AUDIO_INPUT_FLAG_FAST denied by client"); + ALOGW("AUDIO_INPUT_FLAG_FAST denied by client; transfer %d, track %u Hz, primary %u Hz", + mTransfer, mSampleRate, afSampleRate); // once denied, do not request again if IAudioRecord is re-created mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST); } @@ -452,7 +463,8 @@ status_t AudioRecord::openRecord_l(size_t epoch) } audio_io_handle_t input; - status = AudioSystem::getInputForAttr(&mAttributes, &input, (audio_session_t)mSessionId, + status_t status = AudioSystem::getInputForAttr(&mAttributes, &input, + (audio_session_t)mSessionId, mSampleRate, mFormat, mChannelMask, mFlags); if (status != NO_ERROR) { @@ -684,9 +696,9 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r return status; } -void AudioRecord::releaseBuffer(Buffer* audioBuffer) +void AudioRecord::releaseBuffer(const Buffer* audioBuffer) { - // all TRANSFER_* are valid + // FIXME add error checking on mode, by adding an internal version size_t stepCount = audioBuffer->size / mFrameSize; if (stepCount == 0) { @@ -704,7 +716,7 @@ void AudioRecord::releaseBuffer(Buffer* audioBuffer) // the server does not automatically disable recorder on overrun, so no need to restart } -audio_io_handle_t AudioRecord::getInput() const +audio_io_handle_t AudioRecord::getInputPrivate() const { AutoMutex lock(mLock); return mInput; @@ -712,7 +724,7 @@ audio_io_handle_t AudioRecord::getInput() const // ------------------------------------------------------------------------- -ssize_t AudioRecord::read(void* buffer, size_t userSize) +ssize_t AudioRecord::read(void* buffer, size_t userSize, bool blocking) { if (mTransfer != TRANSFER_SYNC) { return INVALID_OPERATION; @@ -731,7 +743,8 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize) while (userSize >= mFrameSize) { audioBuffer.frameCount = userSize / mFrameSize; - status_t err = obtainBuffer(&audioBuffer, &ClientProxy::kForever); + status_t err = obtainBuffer(&audioBuffer, + blocking ? &ClientProxy::kForever : &ClientProxy::kNonBlocking); if (err < 0) { if (read > 0) { break; @@ -863,8 +876,11 @@ nsecs_t AudioRecord::processAudioBuffer() if (!markerReached && position < markerPosition) { minFrames = markerPosition - position; } - if (updatePeriod > 0 && updatePeriod < minFrames) { - minFrames = updatePeriod; + if (updatePeriod > 0) { + uint32_t remaining = newPosition - position; + if (remaining < minFrames) { + minFrames = remaining; + } } // If > 0, poll periodically to recover from a stuck server. A good value is 2. @@ -990,14 +1006,13 @@ status_t AudioRecord::restoreRecord_l(const char *from) { ALOGW("dead IAudioRecord, creating a new one from %s()", from); ++mSequence; - status_t result; // if the new IAudioRecord is created, openRecord_l() will modify the // following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory. // It will also delete the strong references on previous IAudioRecord and IMemory size_t position = mProxy->getPosition(); mNewPosition = position + mUpdatePeriod; - result = openRecord_l(position); + status_t result = openRecord_l(position); if (result == NO_ERROR) { if (mActive) { // callback thread or sync event hasn't changed @@ -1069,8 +1084,8 @@ bool AudioRecord::AudioRecordThread::threadLoop() case NS_NEVER: return false; case NS_WHENEVER: - // FIXME increase poll interval, or make event-driven - ns = 1000000000LL; + // Event driven: call wake() when callback notifications conditions change. + ns = INT64_MAX; // fall through default: LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns); @@ -1103,6 +1118,17 @@ void AudioRecord::AudioRecordThread::resume() } } +void AudioRecord::AudioRecordThread::wake() +{ + AutoMutex _l(mMyLock); + if (!mPaused && mPausedInt && mPausedNs > 0) { + // audio record is active and internally paused with timeout. + mIgnoreNextPausedInt = true; + mPausedInt = false; + mMyCond.signal(); + } +} + void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns) { AutoMutex _l(mMyLock); @@ -1112,4 +1138,4 @@ void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns) // ------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 9cae21c..9150a94 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -34,7 +34,6 @@ namespace android { Mutex AudioSystem::gLock; Mutex AudioSystem::gLockCache; Mutex AudioSystem::gLockAPS; -Mutex AudioSystem::gLockAPC; sp<IAudioFlinger> AudioSystem::gAudioFlinger; sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient; audio_error_callback AudioSystem::gAudioErrorCallback = NULL; @@ -48,8 +47,6 @@ audio_format_t AudioSystem::gPrevInFormat; audio_channel_mask_t AudioSystem::gPrevInChannelMask; size_t AudioSystem::gInBuffSize = 0; // zero indicates cache is invalid -sp<AudioSystem::AudioPortCallback> AudioSystem::gAudioPortCallback; - // establish binder interface to AudioFlinger service const sp<IAudioFlinger> AudioSystem::get_audio_flinger() { @@ -499,8 +496,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle OutputDescriptor *outputDesc = new OutputDescriptor(*desc); gOutputs.add(ioHandle, outputDesc); - ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x frameCount %zu " - "latency %d", + ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x " + "frameCount %zu latency %d", outputDesc->samplingRate, outputDesc->format, outputDesc->channelMask, outputDesc->frameCount, outputDesc->latency); } break; @@ -523,8 +520,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle if (param2 == NULL) break; desc = (const OutputDescriptor *)param2; - ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x channel mask %#x " - "frameCount %zu latency %d", + ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x " + "channel mask %#x frameCount %zu latency %d", ioHandle, desc->samplingRate, desc->format, desc->channelMask, desc->frameCount, desc->latency); OutputDescriptor *outputDesc = gOutputs.valueAt(index); @@ -590,18 +587,22 @@ const sp<IAudioPolicyService> AudioSystem::get_audio_policy_service() status_t AudioSystem::setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address) + const char *device_address, + const char *device_name) { const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); const char *address = ""; + const char *name = ""; if (aps == 0) return PERMISSION_DENIED; if (device_address != NULL) { address = device_address; } - - return aps->setDeviceConnectionState(device, state, address); + if (device_name != NULL) { + name = device_name; + } + return aps->setDeviceConnectionState(device, state, address, name); } audio_policy_dev_state_t AudioSystem::getDeviceConnectionState(audio_devices_t device, @@ -869,7 +870,6 @@ void AudioSystem::clearAudioConfigCache() Mutex::Autolock _l(gLockAPS); gAudioPolicyService.clear(); } - // Do not clear gAudioPortCallback } bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info) @@ -929,12 +929,31 @@ status_t AudioSystem::setAudioPortConfig(const struct audio_port_config *config) return aps->setAudioPortConfig(config); } -void AudioSystem::setAudioPortCallback(sp<AudioPortCallback> callBack) +status_t AudioSystem::addAudioPortCallback(const sp<AudioPortCallback>& callBack) { - Mutex::Autolock _l(gLockAPC); - gAudioPortCallback = callBack; + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + + Mutex::Autolock _l(gLockAPS); + if (gAudioPolicyServiceClient == 0) { + return NO_INIT; + } + return gAudioPolicyServiceClient->addAudioPortCallback(callBack); } +status_t AudioSystem::removeAudioPortCallback(const sp<AudioPortCallback>& callBack) +{ + const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service(); + if (aps == 0) return PERMISSION_DENIED; + + Mutex::Autolock _l(gLockAPS); + if (gAudioPolicyServiceClient == 0) { + return NO_INIT; + } + return gAudioPolicyServiceClient->removeAudioPortCallback(callBack); +} + + status_t AudioSystem::acquireSoundTriggerSession(audio_session_t *session, audio_io_handle_t *ioHandle, audio_devices_t *device) @@ -967,36 +986,66 @@ status_t AudioSystem::registerPolicyMixes(Vector<AudioMix> mixes, bool registrat // --------------------------------------------------------------------------- -void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) +status_t AudioSystem::AudioPolicyServiceClient::addAudioPortCallback( + const sp<AudioPortCallback>& callBack) { - { - Mutex::Autolock _l(gLockAPC); - if (gAudioPortCallback != 0) { - gAudioPortCallback->onServiceDied(); + Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { + if (mAudioPortCallbacks[i] == callBack) { + return INVALID_OPERATION; } } - { - Mutex::Autolock _l(gLockAPS); - AudioSystem::gAudioPolicyService.clear(); - } + mAudioPortCallbacks.add(callBack); + return NO_ERROR; +} - ALOGW("AudioPolicyService server died!"); +status_t AudioSystem::AudioPolicyServiceClient::removeAudioPortCallback( + const sp<AudioPortCallback>& callBack) +{ + Mutex::Autolock _l(mLock); + size_t i; + for (i = 0; i < mAudioPortCallbacks.size(); i++) { + if (mAudioPortCallbacks[i] == callBack) { + break; + } + } + if (i == mAudioPortCallbacks.size()) { + return INVALID_OPERATION; + } + mAudioPortCallbacks.removeAt(i); + return NO_ERROR; } void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate() { - Mutex::Autolock _l(gLockAPC); - if (gAudioPortCallback != 0) { - gAudioPortCallback->onAudioPortListUpdate(); + Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { + mAudioPortCallbacks[i]->onAudioPortListUpdate(); } } void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate() { - Mutex::Autolock _l(gLockAPC); - if (gAudioPortCallback != 0) { - gAudioPortCallback->onAudioPatchListUpdate(); + Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { + mAudioPortCallbacks[i]->onAudioPatchListUpdate(); + } +} + +void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused) +{ + { + Mutex::Autolock _l(mLock); + for (size_t i = 0; i < mAudioPortCallbacks.size(); i++) { + mAudioPortCallbacks[i]->onServiceDied(); + } } + { + Mutex::Autolock _l(gLockAPS); + AudioSystem::gAudioPolicyService.clear(); + } + + ALOGW("AudioPolicyService server died!"); } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 735db5c..ce30c62 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -33,11 +33,16 @@ #define WAIT_PERIOD_MS 10 #define WAIT_STREAM_END_TIMEOUT_SEC 120 - +static const int kMaxLoopCountNotifications = 32; namespace android { // --------------------------------------------------------------------------- +template <typename T> +const T &min(const T &x, const T &y) { + return x < y ? x : y; +} + static int64_t convertTimespecToUs(const struct timespec &tv) { return tv.tv_sec * 1000000ll + tv.tv_nsec / 1000; @@ -61,12 +66,11 @@ status_t AudioTrack::getMinFrameCount( return BAD_VALUE; } - // FIXME merge with similar code in createTrack_l(), except we're missing - // some information here that is available in createTrack_l(): + // FIXME handle in server, like createTrack_l(), possible missing info: // audio_io_handle_t output // audio_format_t format // audio_channel_mask_t channelMask - // audio_output_flags_t flags + // audio_output_flags_t flags (FAST) uint32_t afSampleRate; status_t status; status = AudioSystem::getOutputSamplingRate(&afSampleRate, streamType); @@ -96,16 +100,16 @@ status_t AudioTrack::getMinFrameCount( minBufCount = 2; } - *frameCount = (sampleRate == 0) ? afFrameCount * minBufCount : - afFrameCount * minBufCount * uint64_t(sampleRate) / afSampleRate; - // The formula above should always produce a non-zero value, but return an error - // in the unlikely event that it does not, as that's part of the API contract. + *frameCount = minBufCount * sourceFramesNeeded(sampleRate, afFrameCount, afSampleRate); + // The formula above should always produce a non-zero value under normal circumstances: + // AudioTrack.SAMPLE_RATE_HZ_MIN <= sampleRate <= AudioTrack.SAMPLE_RATE_HZ_MAX. + // Return error in the unlikely event that it does not, as that's part of the API contract. if (*frameCount == 0) { - ALOGE("AudioTrack::getMinFrameCount failed for streamType %d, sampleRate %d", + ALOGE("AudioTrack::getMinFrameCount failed for streamType %d, sampleRate %u", streamType, sampleRate); return BAD_VALUE; } - ALOGV("getMinFrameCount=%zu: afFrameCount=%zu, minBufCount=%d, afSampleRate=%d, afLatency=%d", + ALOGV("getMinFrameCount=%zu: afFrameCount=%zu, minBufCount=%u, afSampleRate=%u, afLatency=%u", *frameCount, afFrameCount, minBufCount, afSampleRate, afLatency); return NO_ERROR; } @@ -199,8 +203,8 @@ AudioTrack::~AudioTrack() mCblkMemory.clear(); mSharedBuffer.clear(); IPCThreadState::self()->flushCommands(); - ALOGV("~AudioTrack, releasing session id from %d on behalf of %d", - IPCThreadState::self()->getCallingPid(), mClientPid); + ALOGV("~AudioTrack, releasing session id %d from %d on behalf of %d", + mSessionId, IPCThreadState::self()->getCallingPid(), mClientPid); AudioSystem::releaseAudioSessionId(mSessionId, mClientPid); } } @@ -225,9 +229,9 @@ status_t AudioTrack::set( const audio_attributes_t* pAttributes) { ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, " - "flags #%x, notificationFrames %u, sessionId %d, transferType %d", + "flags #%x, notificationFrames %u, sessionId %d, transferType %d, uid %d, pid %d", streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames, - sessionId, transferType); + sessionId, transferType, uid, pid); switch (transferType) { case TRANSFER_DEFAULT: @@ -270,8 +274,6 @@ status_t AudioTrack::set( ALOGV("set() streamType %d frameCount %zu flags %04x", streamType, frameCount, flags); - AutoMutex lock(mLock); - // invariant that mAudioTrack != 0 is true only after set() returns successfully if (mAudioTrack != 0) { ALOGE("Track already in use"); @@ -295,6 +297,9 @@ status_t AudioTrack::set( ALOGV("Building AudioTrack with attributes: usage=%d content=%d flags=0x%x tags=[%s]", mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags); mStreamType = AUDIO_STREAM_DEFAULT; + if ((mAttributes.flags & AUDIO_FLAG_HW_AV_SYNC) != 0) { + flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_HW_AV_SYNC); + } } // these below should probably come from the audioFlinger too... @@ -317,12 +322,6 @@ status_t AudioTrack::set( uint32_t channelCount = audio_channel_count_from_out_mask(channelMask); mChannelCount = channelCount; - // AudioFlinger does not currently support 8-bit data in shared memory - if (format == AUDIO_FORMAT_PCM_8_BIT && sharedBuffer != 0) { - ALOGE("8-bit data in shared memory is not supported"); - return BAD_VALUE; - } - // force direct flag if format is not linear PCM // or offload was requested if ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) @@ -346,12 +345,9 @@ status_t AudioTrack::set( } else { mFrameSize = sizeof(uint8_t); } - mFrameSizeAF = mFrameSize; } else { ALOG_ASSERT(audio_is_linear_pcm(format)); mFrameSize = channelCount * audio_bytes_per_sample(format); - mFrameSizeAF = channelCount * audio_bytes_per_sample( - format == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : format); // createTrack will return an error if PCM format is not supported by server, // so no need to check for specific PCM formats here } @@ -403,6 +399,7 @@ status_t AudioTrack::set( if (cbf != NULL) { mAudioTrackThread = new AudioTrackThread(*this, threadCanCallJava); mAudioTrackThread->run("AudioTrack", ANDROID_PRIORITY_AUDIO, 0 /*stack*/); + // thread begins in paused state, and will not reference us until start() } // create the IAudioTrack @@ -420,7 +417,10 @@ status_t AudioTrack::set( mStatus = NO_ERROR; mState = STATE_STOPPED; mUserData = user; - mLoopPeriod = 0; + mLoopCount = 0; + mLoopStart = 0; + mLoopEnd = 0; + mLoopCountNotified = 0; mMarkerPosition = 0; mMarkerReached = false; mNewPosition = 0; @@ -531,14 +531,12 @@ void AudioTrack::stop() // the playback head position will reset to 0, so if a marker is set, we need // to activate it again mMarkerReached = false; -#if 0 - // Force flush if a shared buffer is used otherwise audioflinger - // will not stop before end of buffer is reached. - // It may be needed to make sure that we stop playback, likely in case looping is on. + if (mSharedBuffer != 0) { - flush_l(); + // clear buffer position and loop count. + mStaticProxy->setBufferPositionAndLoop(0 /* position */, + 0 /* loopStart */, 0 /* loopEnd */, 0 /* loopCount */); } -#endif sp<AudioTrackThread> t = mAudioTrackThread; if (t != 0) { @@ -669,14 +667,18 @@ void AudioTrack::getAuxEffectSendLevel(float* level) const status_t AudioTrack::setSampleRate(uint32_t rate) { - if (mIsTimed || isOffloadedOrDirect()) { + AutoMutex lock(mLock); + if (rate == mSampleRate) { + return NO_ERROR; + } + if (mIsTimed || isOffloadedOrDirect_l() || (mFlags & AUDIO_OUTPUT_FLAG_FAST)) { return INVALID_OPERATION; } - - AutoMutex lock(mLock); if (mOutput == AUDIO_IO_HANDLE_NONE) { return NO_INIT; } + // NOTE: it is theoretically possible, but highly unlikely, that a device change + // could mean a previously allowed sampling rate is no longer allowed. uint32_t afSamplingRate; if (AudioSystem::getSamplingRate(mOutput, &afSamplingRate) != NO_ERROR) { return NO_INIT; @@ -740,10 +742,15 @@ status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount) { - // Setting the loop will reset next notification update period (like setPosition). - mNewPosition = updateAndGetPosition_l() + mUpdatePeriod; - mLoopPeriod = loopCount != 0 ? loopEnd - loopStart : 0; + // We do not update the periodic notification point. + // mNewPosition = updateAndGetPosition_l() + mUpdatePeriod; + mLoopCount = loopCount; + mLoopEnd = loopEnd; + mLoopStart = loopStart; + mLoopCountNotified = loopCount; mStaticProxy->setLoop(loopStart, loopEnd, loopCount); + + // Waking the AudioTrackThread is not needed as this cannot be called when active. } status_t AudioTrack::setMarkerPosition(uint32_t marker) @@ -757,6 +764,10 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker) mMarkerPosition = marker; mMarkerReached = false; + sp<AudioTrackThread> t = mAudioTrackThread; + if (t != 0) { + t->wake(); + } return NO_ERROR; } @@ -786,6 +797,10 @@ status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod) mNewPosition = updateAndGetPosition_l() + updatePeriod; mUpdatePeriod = updatePeriod; + sp<AudioTrackThread> t = mAudioTrackThread; + if (t != 0) { + t->wake(); + } return NO_ERROR; } @@ -823,12 +838,11 @@ status_t AudioTrack::setPosition(uint32_t position) if (mState == STATE_ACTIVE) { return INVALID_OPERATION; } + // After setting the position, use full update period before notification. mNewPosition = updateAndGetPosition_l() + mUpdatePeriod; - mLoopPeriod = 0; - // FIXME Check whether loops and setting position are incompatible in old code. - // If we use setLoop for both purposes we lose the capability to set the position while looping. - mStaticProxy->setLoop(position, mFrameCount, 0); + mStaticProxy->setBufferPosition(position); + // Waking the AudioTrackThread is not needed as this cannot be called when active. return NO_ERROR; } @@ -893,10 +907,18 @@ status_t AudioTrack::reload() return INVALID_OPERATION; } mNewPosition = mUpdatePeriod; - mLoopPeriod = 0; - // FIXME The new code cannot reload while keeping a loop specified. - // Need to check how the old code handled this, and whether it's a significant change. - mStaticProxy->setLoop(0, mFrameCount, 0); + (void) updateAndGetPosition_l(); + mPosition = 0; +#if 0 + // The documentation is not clear on the behavior of reload() and the restoration + // of loop count. Historically we have not restored loop count, start, end, + // but it makes sense if one desires to repeat playing a particular sound. + if (mLoopCount != 0) { + mLoopCountNotified = mLoopCount; + mStaticProxy->setLoop(mLoopStart, mLoopEnd, mLoopCount); + } +#endif + mStaticProxy->setBufferPosition(0); return NO_ERROR; } @@ -945,9 +967,9 @@ status_t AudioTrack::createTrack_l() if (status != NO_ERROR || output == AUDIO_IO_HANDLE_NONE) { - ALOGE("Could not get audio output for stream type %d, usage %d, sample rate %u, format %#x," + ALOGE("Could not get audio output for session %d, stream type %d, usage %d, sample rate %u, format %#x," " channel mask %#x, flags %#x", - streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags); + mSessionId, streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags); return BAD_VALUE; } { @@ -962,6 +984,7 @@ status_t AudioTrack::createTrack_l() ALOGE("getLatency(%d) failed status %d", output, status); goto release; } + ALOGV("createTrack_l() output %d afLatency %u", output, afLatency); size_t afFrameCount; status = AudioSystem::getFrameCount(output, &afFrameCount); @@ -986,23 +1009,23 @@ status_t AudioTrack::createTrack_l() // use case 1: shared buffer (mSharedBuffer != 0) || // use case 2: callback transfer mode - (mTransfer == TRANSFER_CALLBACK)) && + (mTransfer == TRANSFER_CALLBACK) || + // use case 3: obtain/release mode + (mTransfer == TRANSFER_OBTAIN)) && // matching sample rate (mSampleRate == afSampleRate))) { - ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client"); + ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client; transfer %d, track %u Hz, output %u Hz", + mTransfer, mSampleRate, afSampleRate); // once denied, do not request again if IAudioTrack is re-created mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST); } - ALOGV("createTrack_l() output %d afLatency %d", output, afLatency); // The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where // n = 1 fast track with single buffering; nBuffering is ignored // n = 2 fast track with double buffering - // n = 2 normal track, no sample rate conversion - // n = 3 normal track, with sample rate conversion - // (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering) - // n > 3 very high latency or very small notification interval; nBuffering is ignored - const uint32_t nBuffering = (mSampleRate == afSampleRate) ? 2 : 3; + // n = 2 normal track, (including those with sample rate conversion) + // n >= 3 very high latency or very small notification interval (unused). + const uint32_t nBuffering = 2; mNotificationFramesAct = mNotificationFramesReq; @@ -1019,12 +1042,12 @@ status_t AudioTrack::createTrack_l() mNotificationFramesAct = frameCount; } } else if (mSharedBuffer != 0) { - - // Ensure that buffer alignment matches channel count - // 8-bit data in shared memory is not currently supported by AudioFlinger - size_t alignment = audio_bytes_per_sample( - mFormat == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : mFormat); + // FIXME: Ensure client side memory buffers need + // not have additional alignment beyond sample + // (e.g. 16 bit stereo accessed as 32 bit frame). + size_t alignment = audio_bytes_per_sample(mFormat); if (alignment & 1) { + // for AUDIO_FORMAT_PCM_24_BIT_PACKED (not exposed through Java). alignment = 1; } if (mChannelCount > 1) { @@ -1042,40 +1065,10 @@ status_t AudioTrack::createTrack_l() // there's no frameCount parameter. // But when initializing a shared buffer AudioTrack via set(), // there _is_ a frameCount parameter. We silently ignore it. - frameCount = mSharedBuffer->size() / mFrameSizeAF; - - } else if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) { - - // FIXME move these calculations and associated checks to server - - // Ensure that buffer depth covers at least audio hardware latency - uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate); - ALOGV("afFrameCount=%zu, minBufCount=%d, afSampleRate=%u, afLatency=%d", - afFrameCount, minBufCount, afSampleRate, afLatency); - if (minBufCount <= nBuffering) { - minBufCount = nBuffering; - } - - size_t minFrameCount = afFrameCount * minBufCount * uint64_t(mSampleRate) / afSampleRate; - ALOGV("minFrameCount: %zu, afFrameCount=%zu, minBufCount=%d, sampleRate=%u, afSampleRate=%u" - ", afLatency=%d", - minFrameCount, afFrameCount, minBufCount, mSampleRate, afSampleRate, afLatency); - - if (frameCount == 0) { - frameCount = minFrameCount; - } else if (frameCount < minFrameCount) { - // not ALOGW because it happens all the time when playing key clicks over A2DP - ALOGV("Minimum buffer size corrected from %zu to %zu", - frameCount, minFrameCount); - frameCount = minFrameCount; - } - // Make sure that application is notified with sufficient margin before underrun - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { - mNotificationFramesAct = frameCount/nBuffering; - } - + frameCount = mSharedBuffer->size() / mFrameSize; } else { - // For fast tracks, the frame count calculations and checks are done by server + // For fast and normal streaming tracks, + // the frame count calculations and checks are done by server } IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT; @@ -1101,12 +1094,10 @@ status_t AudioTrack::createTrack_l() size_t temp = frameCount; // temp may be replaced by a revised value of frameCount, // but we will still need the original value also + int originalSessionId = mSessionId; sp<IAudioTrack> track = audioFlinger->createTrack(streamType, mSampleRate, - // AudioFlinger only sees 16-bit PCM - mFormat == AUDIO_FORMAT_PCM_8_BIT && - !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT) ? - AUDIO_FORMAT_PCM_16_BIT : mFormat, + mFormat, mChannelMask, &temp, &trackFlags, @@ -1116,6 +1107,8 @@ status_t AudioTrack::createTrack_l() &mSessionId, mClientUid, &status); + ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId, + "session ID changed from %d to %d", originalSessionId, mSessionId); if (status != NO_ERROR) { ALOGE("AudioFlinger could not create track, status: %d", status); @@ -1161,23 +1154,10 @@ status_t AudioTrack::createTrack_l() if (trackFlags & IAudioFlinger::TRACK_FAST) { ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu", frameCount); mAwaitBoost = true; - if (mSharedBuffer == 0) { - // Theoretically double-buffering is not required for fast tracks, - // due to tighter scheduling. But in practice, to accommodate kernels with - // scheduling jitter, and apps with computation jitter, we use double-buffering. - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { - mNotificationFramesAct = frameCount/nBuffering; - } - } } else { ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu", frameCount); // once denied, do not request again if IAudioTrack is re-created mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST); - if (mSharedBuffer == 0) { - if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) { - mNotificationFramesAct = frameCount/nBuffering; - } - } } } if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) { @@ -1200,6 +1180,16 @@ status_t AudioTrack::createTrack_l() //return NO_INIT; } } + // Make sure that application is notified with sufficient margin before underrun + if (mSharedBuffer == 0 && audio_is_linear_pcm(mFormat)) { + // Theoretically double-buffering is not required for fast tracks, + // due to tighter scheduling. But in practice, to accommodate kernels with + // scheduling jitter, and apps with computation jitter, we use double-buffering + // for fast tracks just like normal streaming tracks. + if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount / nBuffering) { + mNotificationFramesAct = frameCount / nBuffering; + } + } // We retain a copy of the I/O handle, but don't own the reference mOutput = output; @@ -1211,9 +1201,13 @@ status_t AudioTrack::createTrack_l() // address space. AudioFlinger::TrackBase::mBuffer is for the server address space. void* buffers; if (mSharedBuffer == 0) { - buffers = (char*)cblk + sizeof(audio_track_cblk_t); + buffers = cblk + 1; } else { buffers = mSharedBuffer->pointer(); + if (buffers == NULL) { + ALOGE("Could not get buffer pointer"); + return NO_INIT; + } } mAudioTrack->attachAuxEffect(mAuxEffectId); @@ -1230,9 +1224,9 @@ status_t AudioTrack::createTrack_l() // update proxy if (mSharedBuffer == 0) { mStaticProxy.clear(); - mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF); + mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize); } else { - mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF); + mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSize); mProxy = mStaticProxy; } @@ -1258,7 +1252,7 @@ release: return status; } -status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) +status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount, size_t *nonContig) { if (audioBuffer == NULL) { return BAD_VALUE; @@ -1285,7 +1279,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount) ALOGE("%s invalid waitCount %d", __func__, waitCount); requested = NULL; } - return obtainBuffer(audioBuffer, requested); + return obtainBuffer(audioBuffer, requested, NULL /*elapsed*/, nonContig); } status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *requested, @@ -1352,7 +1346,7 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *re } while ((status == DEAD_OBJECT) && (tryCounter-- > 0)); audioBuffer->frameCount = buffer.mFrameCount; - audioBuffer->size = buffer.mFrameCount * mFrameSizeAF; + audioBuffer->size = buffer.mFrameCount * mFrameSize; audioBuffer->raw = buffer.mRaw; if (nonContig != NULL) { *nonContig = buffer.mNonContig; @@ -1360,13 +1354,14 @@ status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, const struct timespec *re return status; } -void AudioTrack::releaseBuffer(Buffer* audioBuffer) +void AudioTrack::releaseBuffer(const Buffer* audioBuffer) { + // FIXME add error checking on mode, by adding an internal version if (mTransfer == TRANSFER_SHARED) { return; } - size_t stepCount = audioBuffer->size / mFrameSizeAF; + size_t stepCount = audioBuffer->size / mFrameSize; if (stepCount == 0) { return; } @@ -1431,15 +1426,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize, bool blocking) return ssize_t(err); } - size_t toWrite; - if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { - // Divide capacity by 2 to take expansion into account - toWrite = audioBuffer.size >> 1; - memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) buffer, toWrite); - } else { - toWrite = audioBuffer.size; - memcpy(audioBuffer.i8, buffer, toWrite); - } + size_t toWrite = audioBuffer.size; + memcpy(audioBuffer.i8, buffer, toWrite); buffer = ((const char *) buffer) + toWrite; userSize -= toWrite; written += toWrite; @@ -1559,9 +1547,8 @@ nsecs_t AudioTrack::processAudioBuffer() // that the upper layers can recreate the track if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) { status_t status = restoreTrack_l("processAudioBuffer"); - mLock.unlock(); - // Run again immediately, but with a new IAudioTrack - return 0; + // after restoration, continue below to make sure that the loop and buffer events + // are notified because they have been cleared from mCblk->mFlags above. } } @@ -1610,7 +1597,6 @@ nsecs_t AudioTrack::processAudioBuffer() } // Cache other fields that will be needed soon - uint32_t loopPeriod = mLoopPeriod; uint32_t sampleRate = mSampleRate; uint32_t notificationFrames = mNotificationFramesAct; if (mRefreshRemaining) { @@ -1622,8 +1608,30 @@ nsecs_t AudioTrack::processAudioBuffer() uint32_t sequence = mSequence; sp<AudioTrackClientProxy> proxy = mProxy; + // Determine the number of new loop callback(s) that will be needed, while locked. + int loopCountNotifications = 0; + uint32_t loopPeriod = 0; // time in frames for next EVENT_LOOP_END or EVENT_BUFFER_END + + if (mLoopCount > 0) { + int loopCount; + size_t bufferPosition; + mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount); + loopPeriod = ((loopCount > 0) ? mLoopEnd : mFrameCount) - bufferPosition; + loopCountNotifications = min(mLoopCountNotified - loopCount, kMaxLoopCountNotifications); + mLoopCountNotified = loopCount; // discard any excess notifications + } else if (mLoopCount < 0) { + // FIXME: We're not accurate with notification count and position with infinite looping + // since loopCount from server side will always return -1 (we could decrement it). + size_t bufferPosition = mStaticProxy->getBufferPosition(); + loopCountNotifications = int((flags & (CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL)) != 0); + loopPeriod = mLoopEnd - bufferPosition; + } else if (/* mLoopCount == 0 && */ mSharedBuffer != 0) { + size_t bufferPosition = mStaticProxy->getBufferPosition(); + loopPeriod = mFrameCount - bufferPosition; + } + // These fields don't need to be cached, because they are assigned only by set(): - // mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFrameSizeAF, mFlags + // mTransfer, mCbf, mUserData, mFormat, mFrameSize, mFlags // mFlags is also assigned by createTrack_l(), but not the bit we care about. mLock.unlock(); @@ -1662,10 +1670,9 @@ nsecs_t AudioTrack::processAudioBuffer() if (newUnderrun) { mCbf(EVENT_UNDERRUN, mUserData, NULL); } - // FIXME we will miss loops if loop cycle was signaled several times since last call - // to processAudioBuffer() - if (flags & (CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL)) { + while (loopCountNotifications > 0) { mCbf(EVENT_LOOP_END, mUserData, NULL); + --loopCountNotifications; } if (flags & CBLK_BUFFER_END) { mCbf(EVENT_BUFFER_END, mUserData, NULL); @@ -1701,10 +1708,11 @@ nsecs_t AudioTrack::processAudioBuffer() minFrames = markerPosition - position; } if (loopPeriod > 0 && loopPeriod < minFrames) { + // loopPeriod is already adjusted for actual position. minFrames = loopPeriod; } - if (updatePeriod > 0 && updatePeriod < minFrames) { - minFrames = updatePeriod; + if (updatePeriod > 0) { + minFrames = min(minFrames, uint32_t(newPosition - position)); } // If > 0, poll periodically to recover from a stuck server. A good value is 2. @@ -1767,13 +1775,6 @@ nsecs_t AudioTrack::processAudioBuffer() } } - // Divide buffer size by 2 to take into account the expansion - // due to 8 to 16 bit conversion: the callback must fill only half - // of the destination buffer - if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { - audioBuffer.size >>= 1; - } - size_t reqSize = audioBuffer.size; mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer); size_t writtenSize = audioBuffer.size; @@ -1793,13 +1794,7 @@ nsecs_t AudioTrack::processAudioBuffer() return WAIT_PERIOD_MS * 1000000LL; } - if (mFormat == AUDIO_FORMAT_PCM_8_BIT && !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT)) { - // 8 to 16 bit conversion, note that source and destination are the same address - memcpy_to_i16_from_u8(audioBuffer.i16, (const uint8_t *) audioBuffer.i8, writtenSize); - audioBuffer.size <<= 1; - } - - size_t releasedFrames = audioBuffer.size / mFrameSizeAF; + size_t releasedFrames = writtenSize / mFrameSize; audioBuffer.frameCount = releasedFrames; mRemainingFrames -= releasedFrames; if (misalignment >= releasedFrames) { @@ -1844,7 +1839,6 @@ status_t AudioTrack::restoreTrack_l(const char *from) ALOGW("dead IAudioTrack, %s, creating a new one from %s()", isOffloadedOrDirect_l() ? "Offloaded or Direct" : "PCM", from); ++mSequence; - status_t result; // refresh the audio configuration cache in this process to make sure we get new // output parameters and new IAudioFlinger in createTrack_l() @@ -1856,39 +1850,39 @@ status_t AudioTrack::restoreTrack_l(const char *from) } // save the old static buffer position - size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0; + size_t bufferPosition = 0; + int loopCount = 0; + if (mStaticProxy != 0) { + mStaticProxy->getBufferPositionAndLoopCount(&bufferPosition, &loopCount); + } // If a new IAudioTrack is successfully created, createTrack_l() will modify the // following member variables: mAudioTrack, mCblkMemory and mCblk. // It will also delete the strong references on previous IAudioTrack and IMemory. // If a new IAudioTrack cannot be created, the previous (dead) instance will be left intact. - result = createTrack_l(); + status_t result = createTrack_l(); // take the frames that will be lost by track recreation into account in saved position + // For streaming tracks, this is the amount we obtained from the user/client + // (not the number actually consumed at the server - those are already lost). (void) updateAndGetPosition_l(); - mPosition = mReleased; + if (mStaticProxy == 0) { + mPosition = mReleased; + } if (result == NO_ERROR) { - // continue playback from last known position, but - // don't attempt to restore loop after invalidation; it's difficult and not worthwhile - if (mStaticProxy != NULL) { - mLoopPeriod = 0; - mStaticProxy->setLoop(bufferPosition, mFrameCount, 0); - } - // FIXME How do we simulate the fact that all frames present in the buffer at the time of - // track destruction have been played? This is critical for SoundPool implementation - // This must be broken, and needs to be tested/debugged. -#if 0 - // restore write index and set other indexes to reflect empty buffer status - if (!strcmp(from, "start")) { - // Make sure that a client relying on callback events indicating underrun or - // the actual amount of audio frames played (e.g SoundPool) receives them. - if (mSharedBuffer == 0) { - // restart playback even if buffer is not completely filled. - android_atomic_or(CBLK_FORCEREADY, &mCblk->mFlags); + // Continue playback from last known position and restore loop. + if (mStaticProxy != 0) { + if (loopCount != 0) { + mStaticProxy->setBufferPositionAndLoop(bufferPosition, + mLoopStart, mLoopEnd, loopCount); + } else { + mStaticProxy->setBufferPosition(bufferPosition); + if (bufferPosition == mFrameCount) { + ALOGD("restoring track at end of static buffer"); + } } } -#endif if (mState == STATE_ACTIVE) { result = mAudioTrack->start(); } @@ -2148,8 +2142,8 @@ bool AudioTrack::AudioTrackThread::threadLoop() case NS_NEVER: return false; case NS_WHENEVER: - // FIXME increase poll interval, or make event-driven - ns = 1000000000LL; + // Event driven: call wake() when callback notifications conditions change. + ns = INT64_MAX; // fall through default: LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns); @@ -2182,6 +2176,17 @@ void AudioTrack::AudioTrackThread::resume() } } +void AudioTrack::AudioTrackThread::wake() +{ + AutoMutex _l(mMyLock); + if (!mPaused && mPausedInt && mPausedNs > 0) { + // audio track is active and internally paused with timeout. + mIgnoreNextPausedInt = true; + mPausedInt = false; + mMyCond.signal(); + } +} + void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns) { AutoMutex _l(mMyLock); @@ -2189,4 +2194,4 @@ void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns) mPausedNs = ns; } -}; // namespace android +} // namespace android diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp index ff24475..6d5f1af 100644 --- a/media/libmedia/AudioTrackShared.cpp +++ b/media/libmedia/AudioTrackShared.cpp @@ -28,7 +28,21 @@ namespace android { // used to clamp a value to size_t. TODO: move to another file. template <typename T> size_t clampToSize(T x) { - return x > SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x; + return sizeof(T) > sizeof(size_t) && x > (T) SIZE_MAX ? SIZE_MAX : x < 0 ? 0 : (size_t) x; +} + +// incrementSequence is used to determine the next sequence value +// for the loop and position sequence counters. It should return +// a value between "other" + 1 and "other" + INT32_MAX, the choice of +// which needs to be the "least recently used" sequence value for "self". +// In general, this means (new_self) returned is max(self, other) + 1. + +static uint32_t incrementSequence(uint32_t self, uint32_t other) { + int32_t diff = self - other; + if (diff >= 0 && diff < INT32_MAX) { + return self + 1; // we're already ahead of other. + } + return other + 1; // we're behind, so move just ahead of other. } audio_track_cblk_t::audio_track_cblk_t() @@ -409,7 +423,6 @@ status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *request goto end; } // check for obtainBuffer interrupted by client - // check for obtainBuffer interrupted by client if (flags & CBLK_INTERRUPT) { ALOGV("waitStreamEndDone() interrupted by client"); status = -EINTR; @@ -485,8 +498,11 @@ end: StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) : AudioTrackClientProxy(cblk, buffers, frameCount, frameSize), - mMutator(&cblk->u.mStatic.mSingleStateQueue), mBufferPosition(0) + mMutator(&cblk->u.mStatic.mSingleStateQueue), + mPosLoopObserver(&cblk->u.mStatic.mPosLoopQueue) { + memset(&mState, 0, sizeof(mState)); + memset(&mPosLoop, 0, sizeof(mPosLoop)); } void StaticAudioTrackClientProxy::flush() @@ -501,30 +517,72 @@ void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int // FIXME Should return an error status return; } - StaticAudioTrackState newState; - newState.mLoopStart = (uint32_t) loopStart; - newState.mLoopEnd = (uint32_t) loopEnd; - newState.mLoopCount = loopCount; - size_t bufferPosition; - if (loopCount == 0 || (bufferPosition = getBufferPosition()) >= loopEnd) { - bufferPosition = loopStart; + mState.mLoopStart = (uint32_t) loopStart; + mState.mLoopEnd = (uint32_t) loopEnd; + mState.mLoopCount = loopCount; + mState.mLoopSequence = incrementSequence(mState.mLoopSequence, mState.mPositionSequence); + // set patch-up variables until the mState is acknowledged by the ServerProxy. + // observed buffer position and loop count will freeze until then to give the + // illusion of a synchronous change. + getBufferPositionAndLoopCount(NULL, NULL); + // preserve behavior to restart at mState.mLoopStart if position exceeds mState.mLoopEnd. + if (mState.mLoopCount != 0 && mPosLoop.mBufferPosition >= mState.mLoopEnd) { + mPosLoop.mBufferPosition = mState.mLoopStart; + } + mPosLoop.mLoopCount = mState.mLoopCount; + (void) mMutator.push(mState); +} + +void StaticAudioTrackClientProxy::setBufferPosition(size_t position) +{ + // This can only happen on a 64-bit client + if (position > UINT32_MAX) { + // FIXME Should return an error status + return; + } + mState.mPosition = (uint32_t) position; + mState.mPositionSequence = incrementSequence(mState.mPositionSequence, mState.mLoopSequence); + // set patch-up variables until the mState is acknowledged by the ServerProxy. + // observed buffer position and loop count will freeze until then to give the + // illusion of a synchronous change. + if (mState.mLoopCount > 0) { // only check if loop count is changing + getBufferPositionAndLoopCount(NULL, NULL); // get last position + } + mPosLoop.mBufferPosition = position; + if (position >= mState.mLoopEnd) { + // no ongoing loop is possible if position is greater than loopEnd. + mPosLoop.mLoopCount = 0; } - mBufferPosition = bufferPosition; // snapshot buffer position until loop is acknowledged. - (void) mMutator.push(newState); + (void) mMutator.push(mState); +} + +void StaticAudioTrackClientProxy::setBufferPositionAndLoop(size_t position, size_t loopStart, + size_t loopEnd, int loopCount) +{ + setLoop(loopStart, loopEnd, loopCount); + setBufferPosition(position); } size_t StaticAudioTrackClientProxy::getBufferPosition() { - size_t bufferPosition; - if (mMutator.ack()) { - bufferPosition = (size_t) mCblk->u.mStatic.mBufferPosition; - if (bufferPosition > mFrameCount) { - bufferPosition = mFrameCount; - } - } else { - bufferPosition = mBufferPosition; + getBufferPositionAndLoopCount(NULL, NULL); + return mPosLoop.mBufferPosition; +} + +void StaticAudioTrackClientProxy::getBufferPositionAndLoopCount( + size_t *position, int *loopCount) +{ + if (mMutator.ack() == StaticAudioTrackSingleStateQueue::SSQ_DONE) { + if (mPosLoopObserver.poll(mPosLoop)) { + ; // a valid mPosLoop should be available if ackDone is true. + } + } + if (position != NULL) { + *position = mPosLoop.mBufferPosition; + } + if (loopCount != NULL) { + *loopCount = mPosLoop.mLoopCount; } - return bufferPosition; } // --------------------------------------------------------------------------- @@ -560,7 +618,8 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush) ssize_t filled = rear - newFront; // Rather than shutting down on a corrupt flush, just treat it as a full flush if (!(0 <= filled && (size_t) filled <= mFrameCount)) { - ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, filled %d=%#x", + ALOGE("mFlush %#x -> %#x, front %#x, rear %#x, mask %#x, newFront %#x, " + "filled %d=%#x", mFlush, flush, front, rear, mask, newFront, filled, filled); newFront = rear; } @@ -739,13 +798,12 @@ void AudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount) StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers, size_t frameCount, size_t frameSize) : AudioTrackServerProxy(cblk, buffers, frameCount, frameSize), - mObserver(&cblk->u.mStatic.mSingleStateQueue), mPosition(0), + mObserver(&cblk->u.mStatic.mSingleStateQueue), + mPosLoopMutator(&cblk->u.mStatic.mPosLoopQueue), mFramesReadySafe(frameCount), mFramesReady(frameCount), mFramesReadyIsCalledByMultipleThreads(false) { - mState.mLoopStart = 0; - mState.mLoopEnd = 0; - mState.mLoopCount = 0; + memset(&mState, 0, sizeof(mState)); } void StaticAudioTrackServerProxy::framesReadyIsCalledByMultipleThreads() @@ -762,55 +820,97 @@ size_t StaticAudioTrackServerProxy::framesReady() return mFramesReadySafe; } -ssize_t StaticAudioTrackServerProxy::pollPosition() +status_t StaticAudioTrackServerProxy::updateStateWithLoop( + StaticAudioTrackState *localState, const StaticAudioTrackState &update) const { - size_t position = mPosition; - StaticAudioTrackState state; - if (mObserver.poll(state)) { + if (localState->mLoopSequence != update.mLoopSequence) { bool valid = false; - size_t loopStart = state.mLoopStart; - size_t loopEnd = state.mLoopEnd; - if (state.mLoopCount == 0) { - if (loopStart > mFrameCount) { - loopStart = mFrameCount; - } - // ignore loopEnd - mPosition = position = loopStart; - mFramesReady = mFrameCount - mPosition; - mState.mLoopCount = 0; + const size_t loopStart = update.mLoopStart; + const size_t loopEnd = update.mLoopEnd; + size_t position = localState->mPosition; + if (update.mLoopCount == 0) { valid = true; - } else if (state.mLoopCount >= -1) { + } else if (update.mLoopCount >= -1) { if (loopStart < loopEnd && loopEnd <= mFrameCount && loopEnd - loopStart >= MIN_LOOP) { // If the current position is greater than the end of the loop // we "wrap" to the loop start. This might cause an audible pop. if (position >= loopEnd) { - mPosition = position = loopStart; - } - if (state.mLoopCount == -1) { - mFramesReady = INT64_MAX; - } else { - // mFramesReady is 64 bits to handle the effective number of frames - // that the static audio track contains, including loops. - // TODO: Later consider fixing overflow, but does not seem needed now - // as will not overflow if loopStart and loopEnd are Java "ints". - mFramesReady = int64_t(state.mLoopCount) * (loopEnd - loopStart) - + mFrameCount - mPosition; + position = loopStart; } - mState = state; valid = true; } } - if (!valid || mPosition > mFrameCount) { + if (!valid || position > mFrameCount) { + return NO_INIT; + } + localState->mPosition = position; + localState->mLoopCount = update.mLoopCount; + localState->mLoopEnd = loopEnd; + localState->mLoopStart = loopStart; + localState->mLoopSequence = update.mLoopSequence; + } + return OK; +} + +status_t StaticAudioTrackServerProxy::updateStateWithPosition( + StaticAudioTrackState *localState, const StaticAudioTrackState &update) const +{ + if (localState->mPositionSequence != update.mPositionSequence) { + if (update.mPosition > mFrameCount) { + return NO_INIT; + } else if (localState->mLoopCount != 0 && update.mPosition >= localState->mLoopEnd) { + localState->mLoopCount = 0; // disable loop count if position is beyond loop end. + } + localState->mPosition = update.mPosition; + localState->mPositionSequence = update.mPositionSequence; + } + return OK; +} + +ssize_t StaticAudioTrackServerProxy::pollPosition() +{ + StaticAudioTrackState state; + if (mObserver.poll(state)) { + StaticAudioTrackState trystate = mState; + bool result; + const int32_t diffSeq = state.mLoopSequence - state.mPositionSequence; + + if (diffSeq < 0) { + result = updateStateWithLoop(&trystate, state) == OK && + updateStateWithPosition(&trystate, state) == OK; + } else { + result = updateStateWithPosition(&trystate, state) == OK && + updateStateWithLoop(&trystate, state) == OK; + } + if (!result) { + mObserver.done(); + // caution: no update occurs so server state will be inconsistent with client state. ALOGE("%s client pushed an invalid state, shutting down", __func__); mIsShutdown = true; return (ssize_t) NO_INIT; } + mState = trystate; + if (mState.mLoopCount == -1) { + mFramesReady = INT64_MAX; + } else if (mState.mLoopCount == 0) { + mFramesReady = mFrameCount - mState.mPosition; + } else if (mState.mLoopCount > 0) { + // TODO: Later consider fixing overflow, but does not seem needed now + // as will not overflow if loopStart and loopEnd are Java "ints". + mFramesReady = int64_t(mState.mLoopCount) * (mState.mLoopEnd - mState.mLoopStart) + + mFrameCount - mState.mPosition; + } mFramesReadySafe = clampToSize(mFramesReady); // This may overflow, but client is not supposed to rely on it - mCblk->u.mStatic.mBufferPosition = (uint32_t) position; + StaticAudioTrackPosLoop posLoop; + + posLoop.mLoopCount = (int32_t) mState.mLoopCount; + posLoop.mBufferPosition = (uint32_t) mState.mPosition; + mPosLoopMutator.push(posLoop); + mObserver.done(); // safe to read mStatic variables. } - return (ssize_t) position; + return (ssize_t) mState.mPosition; } status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush __unused) @@ -849,7 +949,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush } // As mFramesReady is the total remaining frames in the static audio track, // it is always larger or equal to avail. - LOG_ALWAYS_FATAL_IF(mFramesReady < avail); + LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail); buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail); mUnreleased = avail; return NO_ERROR; @@ -858,7 +958,7 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) { size_t stepCount = buffer->mFrameCount; - LOG_ALWAYS_FATAL_IF(!(stepCount <= mFramesReady)); + LOG_ALWAYS_FATAL_IF(!((int64_t) stepCount <= mFramesReady)); LOG_ALWAYS_FATAL_IF(!(stepCount <= mUnreleased)); if (stepCount == 0) { // prevent accidental re-use of buffer @@ -868,11 +968,12 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) } mUnreleased -= stepCount; audio_track_cblk_t* cblk = mCblk; - size_t position = mPosition; + size_t position = mState.mPosition; size_t newPosition = position + stepCount; int32_t setFlags = 0; if (!(position <= newPosition && newPosition <= mFrameCount)) { - ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, mFrameCount); + ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, + mFrameCount); newPosition = mFrameCount; } else if (mState.mLoopCount != 0 && newPosition == mState.mLoopEnd) { newPosition = mState.mLoopStart; @@ -885,7 +986,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) if (newPosition == mFrameCount) { setFlags |= CBLK_BUFFER_END; } - mPosition = newPosition; + mState.mPosition = newPosition; if (mFramesReady != INT64_MAX) { mFramesReady -= stepCount; } @@ -893,7 +994,10 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer) cblk->mServer += stepCount; // This may overflow, but client is not supposed to rely on it - cblk->u.mStatic.mBufferPosition = (uint32_t) newPosition; + StaticAudioTrackPosLoop posLoop; + posLoop.mBufferPosition = mState.mPosition; + posLoop.mLoopCount = mState.mLoopCount; + mPosLoopMutator.push(posLoop); if (setFlags != 0) { (void) android_atomic_or(setFlags, &cblk->mFlags); // this would be a good place to wake a futex diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp index 8e3b633..38055f9 100644 --- a/media/libmedia/IAudioFlinger.cpp +++ b/media/libmedia/IAudioFlinger.cpp @@ -83,6 +83,8 @@ enum { GET_AUDIO_HW_SYNC }; +#define MAX_ITEMS_PER_LIST 1024 + class BpAudioFlinger : public BpInterface<IAudioFlinger> { public: @@ -1289,15 +1291,27 @@ status_t BnAudioFlinger::onTransact( } break; case LIST_AUDIO_PORTS: { CHECK_INTERFACE(IAudioFlinger, data, reply); - unsigned int num_ports = data.readInt32(); + unsigned int numPortsReq = data.readInt32(); + if (numPortsReq > MAX_ITEMS_PER_LIST) { + numPortsReq = MAX_ITEMS_PER_LIST; + } + unsigned int numPorts = numPortsReq; struct audio_port *ports = - (struct audio_port *)calloc(num_ports, + (struct audio_port *)calloc(numPortsReq, sizeof(struct audio_port)); - status_t status = listAudioPorts(&num_ports, ports); + if (ports == NULL) { + reply->writeInt32(NO_MEMORY); + reply->writeInt32(0); + return NO_ERROR; + } + status_t status = listAudioPorts(&numPorts, ports); reply->writeInt32(status); + reply->writeInt32(numPorts); if (status == NO_ERROR) { - reply->writeInt32(num_ports); - reply->write(&ports, num_ports * sizeof(struct audio_port)); + if (numPortsReq > numPorts) { + numPortsReq = numPorts; + } + reply->write(ports, numPortsReq * sizeof(struct audio_port)); } free(ports); return NO_ERROR; @@ -1336,15 +1350,27 @@ status_t BnAudioFlinger::onTransact( } break; case LIST_AUDIO_PATCHES: { CHECK_INTERFACE(IAudioFlinger, data, reply); - unsigned int num_patches = data.readInt32(); + unsigned int numPatchesReq = data.readInt32(); + if (numPatchesReq > MAX_ITEMS_PER_LIST) { + numPatchesReq = MAX_ITEMS_PER_LIST; + } + unsigned int numPatches = numPatchesReq; struct audio_patch *patches = - (struct audio_patch *)calloc(num_patches, + (struct audio_patch *)calloc(numPatchesReq, sizeof(struct audio_patch)); - status_t status = listAudioPatches(&num_patches, patches); + if (patches == NULL) { + reply->writeInt32(NO_MEMORY); + reply->writeInt32(0); + return NO_ERROR; + } + status_t status = listAudioPatches(&numPatches, patches); reply->writeInt32(status); + reply->writeInt32(numPatches); if (status == NO_ERROR) { - reply->writeInt32(num_patches); - reply->write(&patches, num_patches * sizeof(struct audio_patch)); + if (numPatchesReq > numPatches) { + numPatchesReq = numPatches; + } + reply->write(patches, numPatchesReq * sizeof(struct audio_patch)); } free(patches); return NO_ERROR; @@ -1369,4 +1395,4 @@ status_t BnAudioFlinger::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp index 1c299f7..641e6c1 100644 --- a/media/libmedia/IAudioFlingerClient.cpp +++ b/media/libmedia/IAudioFlingerClient.cpp @@ -99,4 +99,4 @@ status_t BnAudioFlingerClient::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp index cfb28a9..39374d8 100644 --- a/media/libmedia/IAudioPolicyService.cpp +++ b/media/libmedia/IAudioPolicyService.cpp @@ -86,13 +86,15 @@ public: virtual status_t setDeviceConnectionState( audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address) + const char *device_address, + const char *device_name) { Parcel data, reply; data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor()); data.writeInt32(static_cast <uint32_t>(device)); data.writeInt32(static_cast <uint32_t>(state)); data.writeCString(device_address); + data.writeCString(device_name); remote()->transact(SET_DEVICE_CONNECTION_STATE, data, &reply); return static_cast <status_t> (reply.readInt32()); } @@ -728,9 +730,11 @@ status_t BnAudioPolicyService::onTransact( audio_policy_dev_state_t state = static_cast <audio_policy_dev_state_t>(data.readInt32()); const char *device_address = data.readCString(); + const char *device_name = data.readCString(); reply->writeInt32(static_cast<uint32_t> (setDeviceConnectionState(device, state, - device_address))); + device_address, + device_name))); return NO_ERROR; } break; @@ -1224,4 +1228,4 @@ status_t BnAudioPolicyService::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioPolicyServiceClient.cpp b/media/libmedia/IAudioPolicyServiceClient.cpp index e802277..7c65878 100644 --- a/media/libmedia/IAudioPolicyServiceClient.cpp +++ b/media/libmedia/IAudioPolicyServiceClient.cpp @@ -80,4 +80,4 @@ status_t BnAudioPolicyServiceClient::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp index 8a4a383..9d80753 100644 --- a/media/libmedia/IAudioRecord.cpp +++ b/media/libmedia/IAudioRecord.cpp @@ -91,4 +91,4 @@ status_t BnAudioRecord::onTransact( } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp index df209fd..651cb61 100644 --- a/media/libmedia/IAudioTrack.cpp +++ b/media/libmedia/IAudioTrack.cpp @@ -292,4 +292,4 @@ status_t BnAudioTrack::onTransact( } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp index b08fa82..714a0b3 100644 --- a/media/libmedia/IDrm.cpp +++ b/media/libmedia/IDrm.cpp @@ -125,7 +125,8 @@ struct BpDrm : public BpInterface<IDrm> { Vector<uint8_t> const &initData, String8 const &mimeType, DrmPlugin::KeyType keyType, KeyedVector<String8, String8> const &optionalParameters, - Vector<uint8_t> &request, String8 &defaultUrl) { + Vector<uint8_t> &request, String8 &defaultUrl, + DrmPlugin::KeyRequestType *keyRequestType) { Parcel data, reply; data.writeInterfaceToken(IDrm::getInterfaceDescriptor()); @@ -143,6 +144,7 @@ struct BpDrm : public BpInterface<IDrm> { readVector(reply, request); defaultUrl = reply.readString8(); + *keyRequestType = static_cast<DrmPlugin::KeyRequestType>(reply.readInt32()); return reply.readInt32(); } @@ -562,13 +564,15 @@ status_t BnDrm::onTransact( Vector<uint8_t> request; String8 defaultUrl; + DrmPlugin::KeyRequestType keyRequestType; + + status_t result = getKeyRequest(sessionId, initData, mimeType, + keyType, optionalParameters, request, defaultUrl, + &keyRequestType); - status_t result = getKeyRequest(sessionId, initData, - mimeType, keyType, - optionalParameters, - request, defaultUrl); writeVector(reply, request); reply->writeString8(defaultUrl); + reply->writeInt32(static_cast<int32_t>(keyRequestType)); reply->writeInt32(result); return OK; } diff --git a/media/libmedia/IDrmClient.cpp b/media/libmedia/IDrmClient.cpp index f50715e..490c6ed 100644 --- a/media/libmedia/IDrmClient.cpp +++ b/media/libmedia/IDrmClient.cpp @@ -78,4 +78,4 @@ status_t BnDrmClient::onTransact( } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp index c2fff78..eb4b098 100644 --- a/media/libmedia/IEffect.cpp +++ b/media/libmedia/IEffect.cpp @@ -201,4 +201,4 @@ status_t BnEffect::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IEffectClient.cpp b/media/libmedia/IEffectClient.cpp index aef4371..1322e72 100644 --- a/media/libmedia/IEffectClient.cpp +++ b/media/libmedia/IEffectClient.cpp @@ -141,4 +141,4 @@ status_t BnEffectClient::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaCodecList.cpp b/media/libmedia/IMediaCodecList.cpp index bf7c5ca..80020db 100644 --- a/media/libmedia/IMediaCodecList.cpp +++ b/media/libmedia/IMediaCodecList.cpp @@ -160,4 +160,4 @@ status_t BnMediaCodecList::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp index 38e9ca0..d4360ea 100644 --- a/media/libmedia/IMediaDeathNotifier.cpp +++ b/media/libmedia/IMediaDeathNotifier.cpp @@ -108,4 +108,4 @@ IMediaDeathNotifier::DeathNotifier::~DeathNotifier() } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp index 7e26ee6..2ff7658 100644 --- a/media/libmedia/IMediaHTTPConnection.cpp +++ b/media/libmedia/IMediaHTTPConnection.cpp @@ -178,5 +178,4 @@ private: IMPLEMENT_META_INTERFACE( MediaHTTPConnection, "android.media.IMediaHTTPConnection"); -} // namespace android - +} // namespace android diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp index 1260582..f30d0f3 100644 --- a/media/libmedia/IMediaHTTPService.cpp +++ b/media/libmedia/IMediaHTTPService.cpp @@ -54,5 +54,4 @@ struct BpMediaHTTPService : public BpInterface<IMediaHTTPService> { IMPLEMENT_META_INTERFACE( MediaHTTPService, "android.media.IMediaHTTPService"); -} // namespace android - +} // namespace android diff --git a/media/libmedia/IMediaLogService.cpp b/media/libmedia/IMediaLogService.cpp index a4af7b7..230749e 100644 --- a/media/libmedia/IMediaLogService.cpp +++ b/media/libmedia/IMediaLogService.cpp @@ -91,4 +91,4 @@ status_t BnMediaLogService::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp index aa2665a..551cffe 100644 --- a/media/libmedia/IMediaMetadataRetriever.cpp +++ b/media/libmedia/IMediaMetadataRetriever.cpp @@ -297,4 +297,4 @@ status_t BnMediaMetadataRetriever::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp index 7f3e5cc..ce3009a 100644 --- a/media/libmedia/IMediaPlayer.cpp +++ b/media/libmedia/IMediaPlayer.cpp @@ -39,6 +39,7 @@ enum { START, STOP, IS_PLAYING, + SET_PLAYBACK_RATE, PAUSE, SEEK_TO, GET_CURRENT_POSITION, @@ -164,6 +165,15 @@ public: return reply.readInt32(); } + status_t setPlaybackRate(float rate) + { + Parcel data, reply; + data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); + data.writeFloat(rate); + remote()->transact(SET_PLAYBACK_RATE, data, &reply); + return reply.readInt32(); + } + status_t pause() { Parcel data, reply; @@ -426,6 +436,11 @@ status_t BnMediaPlayer::onTransact( reply->writeInt32(ret); return NO_ERROR; } break; + case SET_PLAYBACK_RATE: { + CHECK_INTERFACE(IMediaPlayer, data, reply); + reply->writeInt32(setPlaybackRate(data.readFloat())); + return NO_ERROR; + } break; case PAUSE: { CHECK_INTERFACE(IMediaPlayer, data, reply); reply->writeInt32(pause()); @@ -559,4 +574,4 @@ status_t BnMediaPlayer::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaPlayerClient.cpp b/media/libmedia/IMediaPlayerClient.cpp index a670c96..d608386 100644 --- a/media/libmedia/IMediaPlayerClient.cpp +++ b/media/libmedia/IMediaPlayerClient.cpp @@ -75,4 +75,4 @@ status_t BnMediaPlayerClient::onTransact( } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index feea267..aa7b2e1 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -234,4 +234,4 @@ status_t BnMediaPlayerService::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index a733b68..8ca256c 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -46,7 +46,6 @@ enum { SET_OUTPUT_FORMAT, SET_VIDEO_ENCODER, SET_AUDIO_ENCODER, - SET_OUTPUT_FILE_PATH, SET_OUTPUT_FILE_FD, SET_VIDEO_SIZE, SET_VIDEO_FRAMERATE, @@ -158,16 +157,6 @@ public: return reply.readInt32(); } - status_t setOutputFile(const char* path) - { - ALOGV("setOutputFile(%s)", path); - Parcel data, reply; - data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); - data.writeCString(path); - remote()->transact(SET_OUTPUT_FILE_PATH, data, &reply); - return reply.readInt32(); - } - status_t setOutputFile(int fd, int64_t offset, int64_t length) { ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length); Parcel data, reply; @@ -300,7 +289,8 @@ IMPLEMENT_META_INTERFACE(MediaRecorder, "android.media.IMediaRecorder"); // ---------------------------------------------------------------------- status_t BnMediaRecorder::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) + uint32_t code, const Parcel& data, Parcel* reply, + uint32_t flags) { switch (code) { case RELEASE: { @@ -390,13 +380,6 @@ status_t BnMediaRecorder::onTransact( return NO_ERROR; } break; - case SET_OUTPUT_FILE_PATH: { - ALOGV("SET_OUTPUT_FILE_PATH"); - CHECK_INTERFACE(IMediaRecorder, data, reply); - const char* path = data.readCString(); - reply->writeInt32(setOutputFile(path)); - return NO_ERROR; - } break; case SET_OUTPUT_FILE_FD: { ALOGV("SET_OUTPUT_FILE_FD"); CHECK_INTERFACE(IMediaRecorder, data, reply); @@ -445,7 +428,8 @@ status_t BnMediaRecorder::onTransact( case SET_PREVIEW_SURFACE: { ALOGV("SET_PREVIEW_SURFACE"); CHECK_INTERFACE(IMediaRecorder, data, reply); - sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>(data.readStrongBinder()); + sp<IGraphicBufferProducer> surface = interface_cast<IGraphicBufferProducer>( + data.readStrongBinder()); reply->writeInt32(setPreviewSurface(surface)); return NO_ERROR; } break; @@ -479,4 +463,4 @@ status_t BnMediaRecorder::onTransact( // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/media/libmedia/IMediaRecorderClient.cpp b/media/libmedia/IMediaRecorderClient.cpp index e7907e3..6795d23 100644 --- a/media/libmedia/IMediaRecorderClient.cpp +++ b/media/libmedia/IMediaRecorderClient.cpp @@ -67,4 +67,4 @@ status_t BnMediaRecorderClient::onTransact( } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IRemoteDisplay.cpp b/media/libmedia/IRemoteDisplay.cpp index 1e15434..869d11a 100644 --- a/media/libmedia/IRemoteDisplay.cpp +++ b/media/libmedia/IRemoteDisplay.cpp @@ -91,4 +91,4 @@ status_t BnRemoteDisplay::onTransact( } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp index 9d63bc9..bedeb6c 100644 --- a/media/libmedia/IRemoteDisplayClient.cpp +++ b/media/libmedia/IRemoteDisplayClient.cpp @@ -101,4 +101,4 @@ status_t BnRemoteDisplayClient::onTransact( } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/IResourceManagerClient.cpp b/media/libmedia/IResourceManagerClient.cpp new file mode 100644 index 0000000..6fa56fc --- /dev/null +++ b/media/libmedia/IResourceManagerClient.cpp @@ -0,0 +1,70 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include <utils/RefBase.h> +#include <binder/IInterface.h> +#include <binder/Parcel.h> + +#include <media/IResourceManagerClient.h> + +namespace android { + +enum { + RECLAIM_RESOURCE = IBinder::FIRST_CALL_TRANSACTION, +}; + +class BpResourceManagerClient: public BpInterface<IResourceManagerClient> +{ +public: + BpResourceManagerClient(const sp<IBinder> &impl) + : BpInterface<IResourceManagerClient>(impl) + { + } + + virtual bool reclaimResource() { + Parcel data, reply; + data.writeInterfaceToken(IResourceManagerClient::getInterfaceDescriptor()); + + bool ret = false; + status_t status = remote()->transact(RECLAIM_RESOURCE, data, &reply); + if (status == NO_ERROR) { + ret = (bool)reply.readInt32(); + } + return ret; + } +}; + +IMPLEMENT_META_INTERFACE(ResourceManagerClient, "android.media.IResourceManagerClient"); + +// ---------------------------------------------------------------------- + +status_t BnResourceManagerClient::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) +{ + switch (code) { + case RECLAIM_RESOURCE: { + CHECK_INTERFACE(IResourceManagerClient, data, reply); + bool ret = reclaimResource(); + reply->writeInt32(ret); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +}; // namespace android diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp new file mode 100644 index 0000000..95a2d1c --- /dev/null +++ b/media/libmedia/IResourceManagerService.cpp @@ -0,0 +1,169 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IResourceManagerService" +#include <utils/Log.h> + +#include "media/IResourceManagerService.h" + +#include <binder/Parcel.h> + +#include <stdint.h> +#include <sys/types.h> + +namespace android { + +enum { + CONFIG = IBinder::FIRST_CALL_TRANSACTION, + ADD_RESOURCE, + REMOVE_RESOURCE, + RECLAIM_RESOURCE, +}; + +template <typename T> +static void writeToParcel(Parcel *data, const Vector<T> &items) { + size_t size = items.size(); + size_t sizePosition = data->dataPosition(); + // truncates size, but should be okay for this usecase + data->writeUint32(static_cast<uint32_t>(size)); + for (size_t i = 0; i < size; i++) { + size_t position = data->dataPosition(); + items[i].writeToParcel(data); + } +} + +template <typename T> +static void readFromParcel(const Parcel &data, Vector<T> *items) { + size_t size = (size_t)data.readUint32(); + for (size_t i = 0; i < size; i++) { + T item; + item.readFromParcel(data); + items->add(item); + } +} + +class BpResourceManagerService : public BpInterface<IResourceManagerService> +{ +public: + BpResourceManagerService(const sp<IBinder> &impl) + : BpInterface<IResourceManagerService>(impl) + { + } + + virtual void config(const Vector<MediaResourcePolicy> &policies) { + Parcel data, reply; + data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); + writeToParcel(&data, policies); + remote()->transact(CONFIG, data, &reply); + } + + virtual void addResource( + int pid, + int64_t clientId, + const sp<IResourceManagerClient> client, + const Vector<MediaResource> &resources) { + Parcel data, reply; + data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); + data.writeInt32(pid); + data.writeInt64(clientId); + data.writeStrongBinder(IInterface::asBinder(client)); + writeToParcel(&data, resources); + + remote()->transact(ADD_RESOURCE, data, &reply); + } + + virtual void removeResource(int64_t clientId) { + Parcel data, reply; + data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); + data.writeInt64(clientId); + + remote()->transact(REMOVE_RESOURCE, data, &reply); + } + + virtual bool reclaimResource(int callingPid, const Vector<MediaResource> &resources) { + Parcel data, reply; + data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor()); + data.writeInt32(callingPid); + writeToParcel(&data, resources); + + bool ret = false; + status_t status = remote()->transact(RECLAIM_RESOURCE, data, &reply); + if (status == NO_ERROR) { + ret = (bool)reply.readInt32(); + } + return ret; + } +}; + +IMPLEMENT_META_INTERFACE(ResourceManagerService, "android.media.IResourceManagerService"); + +// ---------------------------------------------------------------------- + + +status_t BnResourceManagerService::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) +{ + switch (code) { + case CONFIG: { + CHECK_INTERFACE(IResourceManagerService, data, reply); + int pid = data.readInt32(); + sp<IResourceManagerClient> client( + interface_cast<IResourceManagerClient>(data.readStrongBinder())); + Vector<MediaResourcePolicy> policies; + readFromParcel(data, &policies); + config(policies); + return NO_ERROR; + } break; + + case ADD_RESOURCE: { + CHECK_INTERFACE(IResourceManagerService, data, reply); + int pid = data.readInt32(); + int64_t clientId = data.readInt64(); + sp<IResourceManagerClient> client( + interface_cast<IResourceManagerClient>(data.readStrongBinder())); + Vector<MediaResource> resources; + readFromParcel(data, &resources); + addResource(pid, clientId, client, resources); + return NO_ERROR; + } break; + + case REMOVE_RESOURCE: { + CHECK_INTERFACE(IResourceManagerService, data, reply); + int64_t clientId = data.readInt64(); + removeResource(clientId); + return NO_ERROR; + } break; + + case RECLAIM_RESOURCE: { + CHECK_INTERFACE(IResourceManagerService, data, reply); + int callingPid = data.readInt32(); + Vector<MediaResource> resources; + readFromParcel(data, &resources); + bool ret = reclaimResource(callingPid, resources); + reply->writeInt32(ret); + return NO_ERROR; + } break; + + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp index d480aef..840e453 100644 --- a/media/libmedia/IStreamSource.cpp +++ b/media/libmedia/IStreamSource.cpp @@ -35,6 +35,9 @@ const char *const IStreamListener::kKeyDiscontinuityMask = "discontinuity-mask"; // static const char *const IStreamListener::kKeyMediaTimeUs = "media-time-us"; +// static +const char *const IStreamListener::kKeyRecentMediaTimeUs = "recent-media-time-us"; + enum { // IStreamSource SET_LISTENER = IBinder::FIRST_CALL_TRANSACTION, diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp index 721d8d7..271be0c 100644 --- a/media/libmedia/JetPlayer.cpp +++ b/media/libmedia/JetPlayer.cpp @@ -408,7 +408,8 @@ int JetPlayer::queueSegment(int segmentNum, int libNum, int repeatCount, int tra ALOGV("JetPlayer::queueSegment segmentNum=%d, libNum=%d, repeatCount=%d, transpose=%d", segmentNum, libNum, repeatCount, transpose); Mutex::Autolock lock(mMutex); - return JET_QueueSegment(mEasData, segmentNum, libNum, repeatCount, transpose, muteFlags, userID); + return JET_QueueSegment(mEasData, segmentNum, libNum, repeatCount, transpose, muteFlags, + userID); } //------------------------------------------------------------------------------------------------- @@ -449,7 +450,8 @@ void JetPlayer::dump() void JetPlayer::dumpJetStatus(S_JET_STATUS* pJetStatus) { if (pJetStatus!=NULL) - ALOGV(">> current JET player status: userID=%d segmentRepeatCount=%d numQueuedSegments=%d paused=%d", + ALOGV(">> current JET player status: userID=%d segmentRepeatCount=%d numQueuedSegments=%d " + "paused=%d", pJetStatus->currentUserID, pJetStatus->segmentRepeatCount, pJetStatus->numQueuedSegments, pJetStatus->paused); else diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp index e2e6042..47f9258 100644 --- a/media/libmedia/MediaProfiles.cpp +++ b/media/libmedia/MediaProfiles.cpp @@ -163,7 +163,8 @@ MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap UNUSED } /*static*/ int -MediaProfiles::findTagForName(const MediaProfiles::NameToTagMap *map, size_t nMappings, const char *name) +MediaProfiles::findTagForName(const MediaProfiles::NameToTagMap *map, size_t nMappings, + const char *name) { int tag = -1; for (size_t i = 0; i < nMappings; ++i) { @@ -295,9 +296,8 @@ MediaProfiles::createAudioEncoderCap(const char **atts) CHECK(codec != -1); MediaProfiles::AudioEncoderCap *cap = - new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]), atoi(atts[7]), - atoi(atts[9]), atoi(atts[11]), atoi(atts[13]), - atoi(atts[15])); + new MediaProfiles::AudioEncoderCap(static_cast<audio_encoder>(codec), atoi(atts[5]), + atoi(atts[7]), atoi(atts[9]), atoi(atts[11]), atoi(atts[13]), atoi(atts[15])); logAudioEncoderCap(*cap); return cap; } @@ -330,7 +330,8 @@ MediaProfiles::createCamcorderProfile(int cameraId, const char **atts, Vector<in !strcmp("fileFormat", atts[2]) && !strcmp("duration", atts[4])); - const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/sizeof(sCamcorderQualityNameMap[0]); + const size_t nProfileMappings = sizeof(sCamcorderQualityNameMap)/ + sizeof(sCamcorderQualityNameMap[0]); const int quality = findTagForName(sCamcorderQualityNameMap, nProfileMappings, atts[1]); CHECK(quality != -1); @@ -722,16 +723,20 @@ MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality qual MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles( MediaProfiles::CamcorderProfile **lowTimeLapseProfile, MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) { - *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW); - *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF); + *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile( + CAMCORDER_QUALITY_TIME_LAPSE_LOW); + *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile( + CAMCORDER_QUALITY_TIME_LAPSE_QCIF); } /*static*/ void MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles( MediaProfiles::CamcorderProfile **highTimeLapseProfile, MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) { - *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH); - *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P); + *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile( + CAMCORDER_QUALITY_TIME_LAPSE_HIGH); + *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile( + CAMCORDER_QUALITY_TIME_LAPSE_480P); } /*static*/ MediaProfiles::CamcorderProfile* @@ -809,7 +814,8 @@ MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles) // high camcorder time lapse profiles. MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile; - createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile); + createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, + &highSpecificTimeLapseProfile); profiles->mCamcorderProfiles.add(highTimeLapseProfile); profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile); diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp new file mode 100644 index 0000000..8be01bc --- /dev/null +++ b/media/libmedia/MediaResource.cpp @@ -0,0 +1,65 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaResource" +#include <utils/Log.h> +#include <media/MediaResource.h> + +namespace android { + +const char kResourceSecureCodec[] = "secure-codec"; +const char kResourceNonSecureCodec[] = "non-secure-codec"; +const char kResourceGraphicMemory[] = "graphic-memory"; + +MediaResource::MediaResource() : mValue(0) {} + +MediaResource::MediaResource(String8 type, uint64_t value) + : mType(type), + mValue(value) {} + +MediaResource::MediaResource(String8 type, String8 subType, uint64_t value) + : mType(type), + mSubType(subType), + mValue(value) {} + +void MediaResource::readFromParcel(const Parcel &parcel) { + mType = parcel.readString8(); + mSubType = parcel.readString8(); + mValue = parcel.readUint64(); +} + +void MediaResource::writeToParcel(Parcel *parcel) const { + parcel->writeString8(mType); + parcel->writeString8(mSubType); + parcel->writeUint64(mValue); +} + +String8 MediaResource::toString() const { + String8 str; + str.appendFormat("%s/%s:%llu", mType.string(), mSubType.string(), mValue); + return str; +} + +bool MediaResource::operator==(const MediaResource &other) const { + return (other.mType == mType) && (other.mSubType == mSubType) && (other.mValue == mValue); +} + +bool MediaResource::operator!=(const MediaResource &other) const { + return !(*this == other); +} + +}; // namespace android diff --git a/media/libmedia/MediaResourcePolicy.cpp b/media/libmedia/MediaResourcePolicy.cpp new file mode 100644 index 0000000..2bb996a --- /dev/null +++ b/media/libmedia/MediaResourcePolicy.cpp @@ -0,0 +1,49 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaResourcePolicy" +#include <utils/Log.h> +#include <media/MediaResourcePolicy.h> + +namespace android { + +const char kPolicySupportsMultipleSecureCodecs[] = "supports-multiple-secure-codecs"; +const char kPolicySupportsSecureWithNonSecureCodec[] = "supports-secure-with-non-secure-codec"; + +MediaResourcePolicy::MediaResourcePolicy() : mValue(0) {} + +MediaResourcePolicy::MediaResourcePolicy(String8 type, uint64_t value) + : mType(type), + mValue(value) {} + +void MediaResourcePolicy::readFromParcel(const Parcel &parcel) { + mType = parcel.readString8(); + mValue = parcel.readUint64(); +} + +void MediaResourcePolicy::writeToParcel(Parcel *parcel) const { + parcel->writeString8(mType); + parcel->writeUint64(mValue); +} + +String8 MediaResourcePolicy::toString() const { + String8 str; + str.appendFormat("%s:%llu", mType.string(), mValue); + return str; +} + +}; // namespace android diff --git a/media/libmedia/SingleStateQueue.cpp b/media/libmedia/SingleStateQueue.cpp deleted file mode 100644 index c241184..0000000 --- a/media/libmedia/SingleStateQueue.cpp +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright (C) 2012 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <new> -#include <cutils/atomic.h> -#include <media/SingleStateQueue.h> - -namespace android { - -template<typename T> SingleStateQueue<T>::Mutator::Mutator(Shared *shared) - : mSequence(0), mShared((Shared *) shared) -{ - // exactly one of Mutator and Observer must initialize, currently it is Observer - //shared->init(); -} - -template<typename T> int32_t SingleStateQueue<T>::Mutator::push(const T& value) -{ - Shared *shared = mShared; - int32_t sequence = mSequence; - sequence++; - android_atomic_acquire_store(sequence, &shared->mSequence); - shared->mValue = value; - sequence++; - android_atomic_release_store(sequence, &shared->mSequence); - mSequence = sequence; - // consider signalling a futex here, if we know that observer is waiting - return sequence; -} - -template<typename T> bool SingleStateQueue<T>::Mutator::ack() -{ - return mShared->mAck - mSequence == 0; -} - -template<typename T> bool SingleStateQueue<T>::Mutator::ack(int32_t sequence) -{ - // this relies on 2's complement rollover to detect an ancient sequence number - return mShared->mAck - sequence >= 0; -} - -template<typename T> SingleStateQueue<T>::Observer::Observer(Shared *shared) - : mSequence(0), mSeed(1), mShared((Shared *) shared) -{ - // exactly one of Mutator and Observer must initialize, currently it is Observer - shared->init(); -} - -template<typename T> bool SingleStateQueue<T>::Observer::poll(T& value) -{ - Shared *shared = mShared; - int32_t before = shared->mSequence; - if (before == mSequence) { - return false; - } - for (int tries = 0; ; ) { - const int MAX_TRIES = 5; - if (before & 1) { - if (++tries >= MAX_TRIES) { - return false; - } - before = shared->mSequence; - } else { - android_memory_barrier(); - T temp = shared->mValue; - int32_t after = android_atomic_release_load(&shared->mSequence); - if (after == before) { - value = temp; - shared->mAck = before; - mSequence = before; - return true; - } - if (++tries >= MAX_TRIES) { - return false; - } - before = after; - } - } -} - -#if 0 -template<typename T> SingleStateQueue<T>::SingleStateQueue(void /*Shared*/ *shared) -{ - ((Shared *) shared)->init(); -} -#endif - -} // namespace android - -// hack for gcc -#ifdef SINGLE_STATE_QUEUE_INSTANTIATIONS -#include SINGLE_STATE_QUEUE_INSTANTIATIONS -#endif diff --git a/media/libmedia/StringArray.cpp b/media/libmedia/StringArray.cpp index 5f5b57a..b2e5907 100644 --- a/media/libmedia/StringArray.cpp +++ b/media/libmedia/StringArray.cpp @@ -16,7 +16,7 @@ // // Sortable array of strings. STL-ish, but STL-free. -// +// #include <stdlib.h> #include <string.h> @@ -110,4 +110,4 @@ void StringArray::setEntry(int idx, const char* str) { } -}; // namespace android +} // namespace android diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp index f91e3e4..9d69b6a 100644 --- a/media/libmedia/Visualizer.cpp +++ b/media/libmedia/Visualizer.cpp @@ -429,4 +429,4 @@ bool Visualizer::CaptureThread::threadLoop() return false; } -}; // namespace android +} // namespace android diff --git a/media/libmedia/docs/Makefile b/media/libmedia/docs/Makefile new file mode 100644 index 0000000..bddbc9b --- /dev/null +++ b/media/libmedia/docs/Makefile @@ -0,0 +1,2 @@ +paused.png : paused.dot + dot -Tpng < $< > $@ diff --git a/media/libmedia/docs/paused.dot b/media/libmedia/docs/paused.dot new file mode 100644 index 0000000..11e1777 --- /dev/null +++ b/media/libmedia/docs/paused.dot @@ -0,0 +1,85 @@ +digraph paused { +initial [label="INITIAL\n\ +mIgnoreNextPausedInt = false\n\ +mPaused = false\n\ +mPausedInt = false"]; + +resume_body [label="mIgnoreNextPausedInt = true\nif (mPaused || mPausedInt)"]; +resume_paused [label="mPaused = false\nmPausedInt = false\nsignal()"]; +resume_paused -> resume_merged; +resume_merged [label="return"]; + +Application -> ATstop; +ATstop [label="AudioTrack::stop()"]; +ATstop -> pause; +Application -> ATpause; +ATpause [label="AudioTrack::pause()"]; +ATpause -> pause; +ATstart -> resume; +ATstart [label="AudioTrack::start()"]; +destructor [label="~AudioTrack()"]; +destructor -> requestExit; +requestExit [label="AudioTrackThread::requestExit()"]; +requestExit -> resume; +Application -> ATsetMarkerPosition +ATsetMarkerPosition [label="AudioTrack::setMarkerPosition()\n[sets marker variables]"]; +ATsetMarkerPosition -> ATTwake +Application -> ATsetPositionUpdatePeriod +ATsetPositionUpdatePeriod [label="AudioTrack::setPositionUpdatePeriod()\n[sets update period variables]"]; +ATsetPositionUpdatePeriod -> ATTwake +Application -> ATstart; + +resume [label="AudioTrackThread::resume()"]; +resume -> resume_body; + +resume_body -> resume_paused [label="true"]; +resume_body -> resume_merged [label="false"]; + +ATTwake [label="AudioTrackThread::wake()\nif (!mPaused && mPausedInt && mPausedNs > 0)"]; +ATTwake-> ATTWake_wakeable [label="true"]; +ATTWake_wakeable [label="mIgnoreNextPausedInt = true\nmPausedInt = false\nsignal()"]; +ATTwake-> ATTWake_cannotwake [label="false"] +ATTWake_cannotwake [label="ignore"]; + +pause [label="mPaused = true"]; +pause -> return; + +threadLoop [label="AudioTrackThread::threadLoop()\nENTRY"]; +threadLoop -> threadLoop_1; +threadLoop_1 [label="if (mPaused)"]; +threadLoop_1 -> threadLoop_1_true [label="true"]; +threadLoop_1 -> threadLoop_2 [label="false"]; +threadLoop_1_true [label="wait()\nreturn true"]; +threadLoop_2 [label="if (mIgnoreNextPausedInt)"]; +threadLoop_2 -> threadLoop_2_true [label="true"]; +threadLoop_2 -> threadLoop_3 [label="false"]; +threadLoop_2_true [label="mIgnoreNextPausedInt = false\nmPausedInt = false"]; +threadLoop_2_true -> threadLoop_3; +threadLoop_3 [label="if (mPausedInt)"]; +threadLoop_3 -> threadLoop_3_true [label="true"]; +threadLoop_3 -> threadLoop_4 [label="false"]; +threadLoop_3_true [label="wait()\nmPausedInt = false\nreturn true"]; +threadLoop_4 [label="if (exitPending)"]; +threadLoop_4 -> threadLoop_4_true [label="true"]; +threadLoop_4 -> threadLoop_5 [label="false"]; +threadLoop_4_true [label="return false"]; +threadLoop_5 [label="ns = processAudioBuffer()"]; +threadLoop_5 -> threadLoop_6; +threadLoop_6 [label="case ns"]; +threadLoop_6 -> threadLoop_6_0 [label="0"]; +threadLoop_6 -> threadLoop_6_NS_INACTIVE [label="NS_INACTIVE"]; +threadLoop_6 -> threadLoop_6_NS_NEVER [label="NS_NEVER"]; +threadLoop_6 -> threadLoop_6_NS_WHENEVER [label="NS_WHENEVER"]; +threadLoop_6 -> threadLoop_6_default [label="default"]; +threadLoop_6_default [label="if (ns < 0)"]; +threadLoop_6_default -> threadLoop_6_default_true [label="true"]; +threadLoop_6_default -> threadLoop_6_default_false [label="false"]; +threadLoop_6_default_true [label="FATAL"]; +threadLoop_6_default_false [label="pauseInternal(ns) [wake()-able]\nmPausedInternal = true\nmPausedNs = ns\nreturn true"]; +threadLoop_6_0 [label="return true"]; +threadLoop_6_NS_INACTIVE [label="pauseInternal()\nmPausedInternal = true\nmPausedNs = 0\nreturn true"]; +threadLoop_6_NS_NEVER [label="return false"]; +threadLoop_6_NS_WHENEVER [label="ns = 1s"]; +threadLoop_6_NS_WHENEVER -> threadLoop_6_default_false; + +} diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp index 8e8a1ed..873808a 100644 --- a/media/libmedia/mediametadataretriever.cpp +++ b/media/libmedia/mediametadataretriever.cpp @@ -176,4 +176,4 @@ MediaMetadataRetriever::DeathNotifier::~DeathNotifier() } } -}; // namespace android +} // namespace android diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index 05c89ed..5dd8c02 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -59,6 +59,7 @@ MediaPlayer::MediaPlayer() mLoop = false; mLeftVolume = mRightVolume = 1.0; mVideoWidth = mVideoHeight = 0; + mPlaybackRate = 1.0; mLockThreadId = 0; mAudioSessionId = AudioSystem::newAudioUniqueId(); AudioSystem::acquireAudioSessionId(mAudioSessionId, -1); @@ -240,7 +241,7 @@ status_t MediaPlayer::setVideoSurfaceTexture( // must call with lock held status_t MediaPlayer::prepareAsync_l() { - if ( (mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) { + if ( (mPlayer != 0) && ( mCurrentState & (MEDIA_PLAYER_INITIALIZED | MEDIA_PLAYER_STOPPED) ) ) { mPlayer->setAudioStreamType(mStreamType); if (mAudioAttributesParcel != NULL) { mPlayer->setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, *mAudioAttributesParcel); @@ -378,6 +379,24 @@ bool MediaPlayer::isPlaying() return false; } +status_t MediaPlayer::setPlaybackRate(float rate) +{ + ALOGV("setPlaybackRate: %f", rate); + if (rate <= 0.0) { + return BAD_VALUE; + } + Mutex::Autolock _l(mLock); + if (mPlayer != 0) { + if (mPlaybackRate == rate) { + return NO_ERROR; + } + mPlaybackRate = rate; + return mPlayer->setPlaybackRate(rate); + } + ALOGV("setPlaybackRate: no active player"); + return INVALID_OPERATION; +} + status_t MediaPlayer::getVideoWidth(int *w) { ALOGV("getVideoWidth"); @@ -414,7 +433,8 @@ status_t MediaPlayer::getCurrentPosition(int *msec) status_t MediaPlayer::getDuration_l(int *msec) { ALOGV("getDuration_l"); - bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE)); + bool isValidState = (mCurrentState & (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_STARTED | + MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_STOPPED | MEDIA_PLAYER_PLAYBACK_COMPLETE)); if (mPlayer != 0 && isValidState) { int durationMs; status_t ret = mPlayer->getDuration(&durationMs); @@ -443,7 +463,8 @@ status_t MediaPlayer::getDuration(int *msec) status_t MediaPlayer::seekTo_l(int msec) { ALOGV("seekTo %d", msec); - if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE) ) ) { + if ((mPlayer != 0) && ( mCurrentState & ( MEDIA_PLAYER_STARTED | MEDIA_PLAYER_PREPARED | + MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE) ) ) { if ( msec < 0 ) { ALOGW("Attempt to seek to invalid position: %d", msec); msec = 0; @@ -477,7 +498,8 @@ status_t MediaPlayer::seekTo_l(int msec) return NO_ERROR; } } - ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(), mCurrentState); + ALOGE("Attempt to perform seekTo in wrong state: mPlayer=%p, mCurrentState=%u", mPlayer.get(), + mCurrentState); return INVALID_OPERATION; } @@ -855,4 +877,4 @@ status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) { return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer); } -}; // namespace android +} // namespace android diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index 1952b86..a2d6e53 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -264,32 +264,6 @@ status_t MediaRecorder::setAudioEncoder(int ae) return ret; } -status_t MediaRecorder::setOutputFile(const char* path) -{ - ALOGV("setOutputFile(%s)", path); - if (mMediaRecorder == NULL) { - ALOGE("media recorder is not initialized yet"); - return INVALID_OPERATION; - } - if (mIsOutputFileSet) { - ALOGE("output file has already been set"); - return INVALID_OPERATION; - } - if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) { - ALOGE("setOutputFile called in an invalid state(%d)", mCurrentState); - return INVALID_OPERATION; - } - - status_t ret = mMediaRecorder->setOutputFile(path); - if (OK != ret) { - ALOGV("setOutputFile failed: %d", ret); - mCurrentState = MEDIA_RECORDER_ERROR; - return ret; - } - mIsOutputFileSet = true; - return ret; -} - status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length) { ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length); @@ -706,4 +680,4 @@ void MediaRecorder::died() notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_ERROR_SERVER_DIED, 0); } -}; // namespace android +} // namespace android diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 9d8fe62..4b31715 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -10,6 +10,7 @@ LOCAL_SRC_FILES:= \ ActivityManager.cpp \ Crypto.cpp \ Drm.cpp \ + DrmSessionManager.cpp \ HDCP.cpp \ MediaPlayerFactory.cpp \ MediaPlayerService.cpp \ diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp index 73f1a2a..49e01d1 100644 --- a/media/libmediaplayerservice/Drm.cpp +++ b/media/libmediaplayerservice/Drm.cpp @@ -23,6 +23,8 @@ #include "Drm.h" +#include "DrmSessionClientInterface.h" +#include "DrmSessionManager.h" #include <media/drm/DrmAPI.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AString.h> @@ -33,6 +35,10 @@ namespace android { +static inline int getCallingPid() { + return IPCThreadState::self()->getCallingPid(); +} + static bool checkPermission(const char* permissionString) { #ifndef HAVE_ANDROID_OS return true; @@ -57,14 +63,41 @@ static bool operator<(const Vector<uint8_t> &lhs, const Vector<uint8_t> &rhs) { return memcmp((void *)lhs.array(), (void *)rhs.array(), rhs.size()) < 0; } +struct DrmSessionClient : public DrmSessionClientInterface { + DrmSessionClient(Drm* drm) : mDrm(drm) {} + + virtual bool reclaimSession(const Vector<uint8_t>& sessionId) { + sp<Drm> drm = mDrm.promote(); + if (drm == NULL) { + return true; + } + status_t err = drm->closeSession(sessionId); + if (err != OK) { + return false; + } + drm->sendEvent(DrmPlugin::kDrmPluginEventSessionReclaimed, 0, &sessionId, NULL); + return true; + } + +protected: + virtual ~DrmSessionClient() {} + +private: + wp<Drm> mDrm; + + DISALLOW_EVIL_CONSTRUCTORS(DrmSessionClient); +}; + Drm::Drm() : mInitCheck(NO_INIT), + mDrmSessionClient(new DrmSessionClient(this)), mListener(NULL), mFactory(NULL), mPlugin(NULL) { } Drm::~Drm() { + DrmSessionManager::Instance()->removeDrm(mDrmSessionClient); delete mPlugin; mPlugin = NULL; closeFactory(); @@ -289,7 +322,18 @@ status_t Drm::openSession(Vector<uint8_t> &sessionId) { return -EINVAL; } - return mPlugin->openSession(sessionId); + status_t err = mPlugin->openSession(sessionId); + if (err == ERROR_DRM_RESOURCE_BUSY) { + bool retry = false; + retry = DrmSessionManager::Instance()->reclaimSession(getCallingPid()); + if (retry) { + err = mPlugin->openSession(sessionId); + } + } + if (err == OK) { + DrmSessionManager::Instance()->addSession(getCallingPid(), mDrmSessionClient, sessionId); + } + return err; } status_t Drm::closeSession(Vector<uint8_t> const &sessionId) { @@ -303,14 +347,19 @@ status_t Drm::closeSession(Vector<uint8_t> const &sessionId) { return -EINVAL; } - return mPlugin->closeSession(sessionId); + status_t err = mPlugin->closeSession(sessionId); + if (err == OK) { + DrmSessionManager::Instance()->removeSession(sessionId); + } + return err; } status_t Drm::getKeyRequest(Vector<uint8_t> const &sessionId, Vector<uint8_t> const &initData, String8 const &mimeType, DrmPlugin::KeyType keyType, KeyedVector<String8, String8> const &optionalParameters, - Vector<uint8_t> &request, String8 &defaultUrl) { + Vector<uint8_t> &request, String8 &defaultUrl, + DrmPlugin::KeyRequestType *keyRequestType) { Mutex::Autolock autoLock(mLock); if (mInitCheck != OK) { @@ -321,8 +370,11 @@ status_t Drm::getKeyRequest(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->getKeyRequest(sessionId, initData, mimeType, keyType, - optionalParameters, request, defaultUrl); + optionalParameters, request, defaultUrl, + keyRequestType); } status_t Drm::provideKeyResponse(Vector<uint8_t> const &sessionId, @@ -338,6 +390,8 @@ status_t Drm::provideKeyResponse(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->provideKeyResponse(sessionId, response, keySetId); } @@ -367,6 +421,8 @@ status_t Drm::restoreKeys(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->restoreKeys(sessionId, keySetId); } @@ -382,6 +438,8 @@ status_t Drm::queryKeyStatus(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->queryKeyStatus(sessionId, infoMap); } @@ -561,6 +619,8 @@ status_t Drm::setCipherAlgorithm(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->setCipherAlgorithm(sessionId, algorithm); } @@ -576,6 +636,8 @@ status_t Drm::setMacAlgorithm(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->setMacAlgorithm(sessionId, algorithm); } @@ -594,6 +656,8 @@ status_t Drm::encrypt(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->encrypt(sessionId, keyId, input, iv, output); } @@ -612,6 +676,8 @@ status_t Drm::decrypt(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->decrypt(sessionId, keyId, input, iv, output); } @@ -629,6 +695,8 @@ status_t Drm::sign(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->sign(sessionId, keyId, message, signature); } @@ -647,6 +715,8 @@ status_t Drm::verify(Vector<uint8_t> const &sessionId, return -EINVAL; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->verify(sessionId, keyId, message, signature, match); } @@ -669,6 +739,8 @@ status_t Drm::signRSA(Vector<uint8_t> const &sessionId, return -EPERM; } + DrmSessionManager::Instance()->useSession(sessionId); + return mPlugin->signRSA(sessionId, algorithm, message, wrappedKey, signature); } diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h index 0e1eb2c..7e8f246 100644 --- a/media/libmediaplayerservice/Drm.h +++ b/media/libmediaplayerservice/Drm.h @@ -28,6 +28,7 @@ namespace android { struct DrmFactory; struct DrmPlugin; +struct DrmSessionClientInterface; struct Drm : public BnDrm, public IBinder::DeathRecipient, @@ -52,7 +53,8 @@ struct Drm : public BnDrm, Vector<uint8_t> const &initData, String8 const &mimeType, DrmPlugin::KeyType keyType, KeyedVector<String8, String8> const &optionalParameters, - Vector<uint8_t> &request, String8 &defaultUrl); + Vector<uint8_t> &request, String8 &defaultUrl, + DrmPlugin::KeyRequestType *keyRequestType); virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId, Vector<uint8_t> const &response, @@ -138,6 +140,8 @@ private: status_t mInitCheck; + sp<DrmSessionClientInterface> mDrmSessionClient; + sp<IDrmClient> mListener; mutable Mutex mEventLock; mutable Mutex mNotifyLock; diff --git a/media/libmediaplayerservice/DrmSessionClientInterface.h b/media/libmediaplayerservice/DrmSessionClientInterface.h new file mode 100644 index 0000000..17faf08 --- /dev/null +++ b/media/libmediaplayerservice/DrmSessionClientInterface.h @@ -0,0 +1,34 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DRM_PROXY_INTERFACE_H_ +#define DRM_PROXY_INTERFACE_H_ + +#include <utils/RefBase.h> +#include <utils/Vector.h> + +namespace android { + +struct DrmSessionClientInterface : public RefBase { + virtual bool reclaimSession(const Vector<uint8_t>& sessionId) = 0; + +protected: + virtual ~DrmSessionClientInterface() {} +}; + +} // namespace android + +#endif // DRM_PROXY_INTERFACE_H_ diff --git a/media/libmediaplayerservice/DrmSessionManager.cpp b/media/libmediaplayerservice/DrmSessionManager.cpp new file mode 100644 index 0000000..641f881 --- /dev/null +++ b/media/libmediaplayerservice/DrmSessionManager.cpp @@ -0,0 +1,240 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "DrmSessionManager" +#include <utils/Log.h> + +#include "DrmSessionManager.h" + +#include "DrmSessionClientInterface.h" +#include <binder/IPCThreadState.h> +#include <binder/IProcessInfoService.h> +#include <binder/IServiceManager.h> +#include <media/stagefright/ProcessInfo.h> +#include <unistd.h> +#include <utils/String8.h> + +namespace android { + +static String8 GetSessionIdString(const Vector<uint8_t> &sessionId) { + String8 sessionIdStr; + for (size_t i = 0; i < sessionId.size(); ++i) { + sessionIdStr.appendFormat("%u ", sessionId[i]); + } + return sessionIdStr; +} + +bool isEqualSessionId(const Vector<uint8_t> &sessionId1, const Vector<uint8_t> &sessionId2) { + if (sessionId1.size() != sessionId2.size()) { + return false; + } + for (size_t i = 0; i < sessionId1.size(); ++i) { + if (sessionId1[i] != sessionId2[i]) { + return false; + } + } + return true; +} + +sp<DrmSessionManager> DrmSessionManager::Instance() { + static sp<DrmSessionManager> drmSessionManager = new DrmSessionManager(); + return drmSessionManager; +} + +DrmSessionManager::DrmSessionManager() + : mProcessInfo(new ProcessInfo()), + mTime(0) {} + +DrmSessionManager::DrmSessionManager(sp<ProcessInfoInterface> processInfo) + : mProcessInfo(processInfo), + mTime(0) {} + +DrmSessionManager::~DrmSessionManager() {} + +void DrmSessionManager::addSession( + int pid, sp<DrmSessionClientInterface> drm, const Vector<uint8_t> &sessionId) { + ALOGV("addSession(pid %d, drm %p, sessionId %s)", pid, drm.get(), + GetSessionIdString(sessionId).string()); + + Mutex::Autolock lock(mLock); + SessionInfo info; + info.drm = drm; + info.sessionId = sessionId; + info.timeStamp = getTime_l(); + ssize_t index = mSessionMap.indexOfKey(pid); + if (index < 0) { + // new pid + SessionInfos infosForPid; + infosForPid.push_back(info); + mSessionMap.add(pid, infosForPid); + } else { + mSessionMap.editValueAt(index).push_back(info); + } +} + +void DrmSessionManager::useSession(const Vector<uint8_t> &sessionId) { + ALOGV("useSession(%s)", GetSessionIdString(sessionId).string()); + + Mutex::Autolock lock(mLock); + for (size_t i = 0; i < mSessionMap.size(); ++i) { + SessionInfos& infos = mSessionMap.editValueAt(i); + for (size_t j = 0; j < infos.size(); ++j) { + SessionInfo& info = infos.editItemAt(j); + if (isEqualSessionId(sessionId, info.sessionId)) { + info.timeStamp = getTime_l(); + return; + } + } + } +} + +void DrmSessionManager::removeSession(const Vector<uint8_t> &sessionId) { + ALOGV("removeSession(%s)", GetSessionIdString(sessionId).string()); + + Mutex::Autolock lock(mLock); + for (size_t i = 0; i < mSessionMap.size(); ++i) { + SessionInfos& infos = mSessionMap.editValueAt(i); + for (size_t j = 0; j < infos.size(); ++j) { + if (isEqualSessionId(sessionId, infos[j].sessionId)) { + infos.removeAt(j); + return; + } + } + } +} + +void DrmSessionManager::removeDrm(sp<DrmSessionClientInterface> drm) { + ALOGV("removeDrm(%p)", drm.get()); + + Mutex::Autolock lock(mLock); + bool found = false; + for (size_t i = 0; i < mSessionMap.size(); ++i) { + SessionInfos& infos = mSessionMap.editValueAt(i); + for (size_t j = 0; j < infos.size();) { + if (infos[j].drm == drm) { + ALOGV("removed session (%s)", GetSessionIdString(infos[j].sessionId).string()); + j = infos.removeAt(j); + found = true; + } else { + ++j; + } + } + if (found) { + break; + } + } +} + +bool DrmSessionManager::reclaimSession(int callingPid) { + ALOGV("reclaimSession(%d)", callingPid); + + sp<DrmSessionClientInterface> drm; + Vector<uint8_t> sessionId; + int lowestPriorityPid; + int lowestPriority; + { + Mutex::Autolock lock(mLock); + int callingPriority; + if (!mProcessInfo->getPriority(callingPid, &callingPriority)) { + return false; + } + if (!getLowestPriority_l(&lowestPriorityPid, &lowestPriority)) { + return false; + } + if (lowestPriority <= callingPriority) { + return false; + } + + if (!getLeastUsedSession_l(lowestPriorityPid, &drm, &sessionId)) { + return false; + } + } + + if (drm == NULL) { + return false; + } + + ALOGV("reclaim session(%s) opened by pid %d", + GetSessionIdString(sessionId).string(), lowestPriorityPid); + + return drm->reclaimSession(sessionId); +} + +int64_t DrmSessionManager::getTime_l() { + return mTime++; +} + +bool DrmSessionManager::getLowestPriority_l(int* lowestPriorityPid, int* lowestPriority) { + int pid = -1; + int priority = -1; + for (size_t i = 0; i < mSessionMap.size(); ++i) { + if (mSessionMap.valueAt(i).size() == 0) { + // no opened session by this process. + continue; + } + int tempPid = mSessionMap.keyAt(i); + int tempPriority; + if (!mProcessInfo->getPriority(tempPid, &tempPriority)) { + // shouldn't happen. + return false; + } + if (pid == -1) { + pid = tempPid; + priority = tempPriority; + } else { + if (tempPriority > priority) { + pid = tempPid; + priority = tempPriority; + } + } + } + if (pid != -1) { + *lowestPriorityPid = pid; + *lowestPriority = priority; + } + return (pid != -1); +} + +bool DrmSessionManager::getLeastUsedSession_l( + int pid, sp<DrmSessionClientInterface>* drm, Vector<uint8_t>* sessionId) { + ssize_t index = mSessionMap.indexOfKey(pid); + if (index < 0) { + return false; + } + + int leastUsedIndex = -1; + int64_t minTs = LLONG_MAX; + const SessionInfos& infos = mSessionMap.valueAt(index); + for (size_t j = 0; j < infos.size(); ++j) { + if (leastUsedIndex == -1) { + leastUsedIndex = j; + minTs = infos[j].timeStamp; + } else { + if (infos[j].timeStamp < minTs) { + leastUsedIndex = j; + minTs = infos[j].timeStamp; + } + } + } + if (leastUsedIndex != -1) { + *drm = infos[leastUsedIndex].drm; + *sessionId = infos[leastUsedIndex].sessionId; + } + return (leastUsedIndex != -1); +} + +} // namespace android diff --git a/media/libmediaplayerservice/DrmSessionManager.h b/media/libmediaplayerservice/DrmSessionManager.h new file mode 100644 index 0000000..ba5c268 --- /dev/null +++ b/media/libmediaplayerservice/DrmSessionManager.h @@ -0,0 +1,77 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DRM_SESSION_MANAGER_H_ + +#define DRM_SESSION_MANAGER_H_ + +#include <media/stagefright/foundation/ABase.h> +#include <utils/RefBase.h> +#include <utils/KeyedVector.h> +#include <utils/threads.h> +#include <utils/Vector.h> + +namespace android { + +class DrmSessionManagerTest; +struct DrmSessionClientInterface; +struct ProcessInfoInterface; + +bool isEqualSessionId(const Vector<uint8_t> &sessionId1, const Vector<uint8_t> &sessionId2); + +struct SessionInfo { + sp<DrmSessionClientInterface> drm; + Vector<uint8_t> sessionId; + int64_t timeStamp; +}; + +typedef Vector<SessionInfo > SessionInfos; +typedef KeyedVector<int, SessionInfos > PidSessionInfosMap; + +struct DrmSessionManager : public RefBase { + static sp<DrmSessionManager> Instance(); + + DrmSessionManager(); + DrmSessionManager(sp<ProcessInfoInterface> processInfo); + + void addSession(int pid, sp<DrmSessionClientInterface> drm, const Vector<uint8_t>& sessionId); + void useSession(const Vector<uint8_t>& sessionId); + void removeSession(const Vector<uint8_t>& sessionId); + void removeDrm(sp<DrmSessionClientInterface> drm); + bool reclaimSession(int callingPid); + +protected: + virtual ~DrmSessionManager(); + +private: + friend class DrmSessionManagerTest; + + int64_t getTime_l(); + bool getLowestPriority_l(int* lowestPriorityPid, int* lowestPriority); + bool getLeastUsedSession_l( + int pid, sp<DrmSessionClientInterface>* drm, Vector<uint8_t>* sessionId); + + sp<ProcessInfoInterface> mProcessInfo; + mutable Mutex mLock; + PidSessionInfosMap mSessionMap; + int64_t mTime; + + DISALLOW_EVIL_CONSTRUCTORS(DrmSessionManager); +}; + +} // namespace android + +#endif // DRM_SESSION_MANAGER_H_ diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index 694f1a4..f113e21 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -290,8 +290,9 @@ MediaPlayerService::MediaPlayerService() const sp<IServiceManager> sm(defaultServiceManager()); if (sm != NULL) { const String16 name("batterystats"); + // use checkService() to avoid blocking if service is not up yet sp<IBatteryStats> batteryStats = - interface_cast<IBatteryStats>(sm->getService(name)); + interface_cast<IBatteryStats>(sm->checkService(name)); if (batteryStats != NULL) { batteryStats->noteResetVideo(); batteryStats->noteResetAudio(); @@ -441,6 +442,9 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args) const size_t SIZE = 256; char buffer[SIZE]; String8 result; + SortedVector< sp<Client> > clients; //to serialise the mutex unlock & client destruction. + SortedVector< sp<MediaRecorderClient> > mediaRecorderClients; + if (checkCallingPermission(String16("android.permission.DUMP")) == false) { snprintf(buffer, SIZE, "Permission Denial: " "can't dump MediaPlayerService from pid=%d, uid=%d\n", @@ -452,6 +456,7 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args) for (int i = 0, n = mClients.size(); i < n; ++i) { sp<Client> c = mClients[i].promote(); if (c != 0) c->dump(fd, args); + clients.add(c); } if (mMediaRecorderClients.size() == 0) { result.append(" No media recorder client\n\n"); @@ -464,6 +469,7 @@ status_t MediaPlayerService::dump(int fd, const Vector<String16>& args) write(fd, result.string(), result.size()); result = "\n"; c->dump(fd, args); + mediaRecorderClients.add(c); } } } @@ -961,6 +967,14 @@ status_t MediaPlayerService::Client::isPlaying(bool* state) return NO_ERROR; } +status_t MediaPlayerService::Client::setPlaybackRate(float rate) +{ + ALOGV("[%d] setPlaybackRate(%f)", mConnId, rate); + sp<MediaPlayerBase> p = getPlayer(); + if (p == 0) return UNKNOWN_ERROR; + return p->setPlaybackRate(rate); +} + status_t MediaPlayerService::Client::getCurrentPosition(int *msec) { ALOGV("getCurrentPosition"); @@ -1664,13 +1678,13 @@ void MediaPlayerService::AudioOutput::switchToNextOutput() { } } -ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size) +ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size, bool blocking) { LOG_ALWAYS_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback."); //ALOGV("write(%p, %u)", buffer, size); if (mTrack != 0) { - ssize_t ret = mTrack->write(buffer, size); + ssize_t ret = mTrack->write(buffer, size, blocking); if (ret >= 0) { mBytesWritten += ret; } diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index fad3447..4ce4b81 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -97,7 +97,7 @@ class MediaPlayerService : public BnMediaPlayerService const audio_offload_info_t *offloadInfo = NULL); virtual status_t start(); - virtual ssize_t write(const void* buffer, size_t size); + virtual ssize_t write(const void* buffer, size_t size, bool blocking = true); virtual void stop(); virtual void flush(); virtual void pause(); @@ -261,6 +261,7 @@ private: virtual status_t stop(); virtual status_t pause(); virtual status_t isPlaying(bool* state); + virtual status_t setPlaybackRate(float rate); virtual status_t seekTo(int msec); virtual status_t getCurrentPosition(int* msec); virtual status_t getDuration(int* msec); diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index 194abbb..4d4de9b 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -154,17 +154,6 @@ status_t MediaRecorderClient::setAudioEncoder(int ae) return mRecorder->setAudioEncoder((audio_encoder)ae); } -status_t MediaRecorderClient::setOutputFile(const char* path) -{ - ALOGV("setOutputFile(%s)", path); - Mutex::Autolock lock(mLock); - if (mRecorder == NULL) { - ALOGE("recorder is not initialized"); - return NO_INIT; - } - return mRecorder->setOutputFile(path); -} - status_t MediaRecorderClient::setOutputFile(int fd, int64_t offset, int64_t length) { ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length); diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index a65ec9f..a444b6c 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -38,7 +38,6 @@ public: virtual status_t setOutputFormat(int of); virtual status_t setVideoEncoder(int ve); virtual status_t setAudioEncoder(int ae); - virtual status_t setOutputFile(const char* path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); virtual status_t setVideoSize(int width, int height); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index 86639cb..55763f0 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -75,6 +75,7 @@ StagefrightRecorder::StagefrightRecorder() mAudioSource(AUDIO_SOURCE_CNT), mVideoSource(VIDEO_SOURCE_LIST_END), mCaptureTimeLapse(false), + mCaptureFps(0.0f), mStarted(false) { ALOGV("Constructor"); @@ -206,7 +207,7 @@ status_t StagefrightRecorder::setVideoSize(int width, int height) { status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) { ALOGV("setVideoFrameRate: %d", frames_per_second); if ((frames_per_second <= 0 && frames_per_second != -1) || - frames_per_second > 120) { + frames_per_second > kMaxHighSpeedFps) { ALOGE("Invalid video frame rate: %d", frames_per_second); return BAD_VALUE; } @@ -241,14 +242,6 @@ status_t StagefrightRecorder::setPreviewSurface(const sp<IGraphicBufferProducer> return OK; } -status_t StagefrightRecorder::setOutputFile(const char * /* path */) { - ALOGE("setOutputFile(const char*) must not be called"); - // We don't actually support this at all, as the media_server process - // no longer has permissions to create files. - - return -EPERM; -} - status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t length) { ALOGV("setOutputFile: %d, %lld, %lld", fd, offset, length); // These don't make any sense, do they? @@ -271,6 +264,31 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng return OK; } +// Attempt to parse an float literal optionally surrounded by whitespace, +// returns true on success, false otherwise. +static bool safe_strtof(const char *s, float *val) { + char *end; + + // It is lame, but according to man page, we have to set errno to 0 + // before calling strtof(). + errno = 0; + *val = strtof(s, &end); + + if (end == s || errno == ERANGE) { + return false; + } + + // Skip trailing whitespace + while (isspace(*end)) { + ++end; + } + + // For a successful return, the string must contain nothing but a valid + // float literal optionally surrounded by whitespace. + + return *end == '\0'; +} + // Attempt to parse an int64 literal optionally surrounded by whitespace, // returns true on success, false otherwise. static bool safe_strtoi64(const char *s, int64_t *val) { @@ -554,8 +572,10 @@ status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) { return OK; } -status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) { - ALOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs); +status_t StagefrightRecorder::setParamTimeLapseFps(float fps) { + ALOGV("setParamTimeLapseFps: %.2f", fps); + + int64_t timeUs = (int64_t) (1000000.0 / fps + 0.5f); // Not allowing time more than a day if (timeUs <= 0 || timeUs > 86400*1E6) { @@ -563,6 +583,7 @@ status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t t return BAD_VALUE; } + mCaptureFps = fps; mTimeBetweenTimeLapseFrameCaptureUs = timeUs; return OK; } @@ -690,11 +711,10 @@ status_t StagefrightRecorder::setParameter( if (safe_strtoi32(value.string(), &timeLapseEnable)) { return setParamTimeLapseEnable(timeLapseEnable); } - } else if (key == "time-between-time-lapse-frame-capture") { - int64_t timeBetweenTimeLapseFrameCaptureUs; - if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureUs)) { - return setParamTimeBetweenTimeLapseFrameCapture( - timeBetweenTimeLapseFrameCaptureUs); + } else if (key == "time-lapse-fps") { + float fps; + if (safe_strtof(value.string(), &fps)) { + return setParamTimeLapseFps(fps); } } else { ALOGE("setParameter: failed to find key %s", key.string()); @@ -1589,10 +1609,11 @@ status_t StagefrightRecorder::setupMPEG4orWEBMRecording() { status_t err = OK; sp<MediaWriter> writer; + sp<MPEG4Writer> mp4writer; if (mOutputFormat == OUTPUT_FORMAT_WEBM) { writer = new WebmWriter(mOutputFd); } else { - writer = new MPEG4Writer(mOutputFd); + writer = mp4writer = new MPEG4Writer(mOutputFd); } if (mVideoSource < VIDEO_SOURCE_LIST_END) { @@ -1625,13 +1646,15 @@ status_t StagefrightRecorder::setupMPEG4orWEBMRecording() { mTotalBitRate += mAudioBitRate; } + if (mCaptureTimeLapse) { + mp4writer->setCaptureRate(mCaptureFps); + } + if (mInterleaveDurationUs > 0) { - reinterpret_cast<MPEG4Writer *>(writer.get())-> - setInterleaveDuration(mInterleaveDurationUs); + mp4writer->setInterleaveDuration(mInterleaveDurationUs); } if (mLongitudex10000 > -3600000 && mLatitudex10000 > -3600000) { - reinterpret_cast<MPEG4Writer *>(writer.get())-> - setGeoData(mLatitudex10000, mLongitudex10000); + mp4writer->setGeoData(mLatitudex10000, mLongitudex10000); } } if (mMaxFileDurationUs != 0) { diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index 54c38d3..f34c229 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -53,7 +53,6 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t setVideoFrameRate(int frames_per_second); virtual status_t setCamera(const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy); virtual status_t setPreviewSurface(const sp<IGraphicBufferProducer>& surface); - virtual status_t setOutputFile(const char *path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); virtual status_t setParameters(const String8& params); virtual status_t setListener(const sp<IMediaRecorderClient>& listener); @@ -110,6 +109,7 @@ private: int32_t mTotalBitRate; bool mCaptureTimeLapse; + float mCaptureFps; int64_t mTimeBetweenTimeLapseFrameCaptureUs; sp<CameraSourceTimeLapse> mCameraSourceTimeLapse; @@ -127,6 +127,8 @@ private: sp<IGraphicBufferProducer> mGraphicBufferProducer; sp<ALooper> mLooper; + static const int kMaxHighSpeedFps = 1000; + status_t prepareInternal(); status_t setupMPEG4orWEBMRecording(); void setupMPEG4orWEBMMetaData(sp<MetaData> *meta); @@ -154,7 +156,7 @@ private: status_t setParamAudioSamplingRate(int32_t sampleRate); status_t setParamAudioTimeScale(int32_t timeScale); status_t setParamTimeLapseEnable(int32_t timeLapseEnable); - status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs); + status_t setParamTimeLapseFps(float fps); status_t setParamVideoEncodingBitRate(int32_t bitRate); status_t setParamVideoIFramesInterval(int32_t seconds); status_t setParamVideoEncoderProfile(int32_t profile); diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp index 1b2fc5e..5a31b74 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp +++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp @@ -65,12 +65,12 @@ NuPlayer::GenericSource::GenericSource( mUID(uid), mFd(-1), mDrmManagerClient(NULL), - mMetaDataSize(-1ll), mBitrate(-1ll), mPollBufferingGeneration(0), mPendingReadBufferTypes(0), mBuffering(false), - mPrepareBuffering(false) { + mPrepareBuffering(false), + mPrevBufferPercentage(-1) { resetDataSource(); DataSource::RegisterDefaultSniffers(); } @@ -130,23 +130,34 @@ sp<MetaData> NuPlayer::GenericSource::getFileFormatMeta() const { status_t NuPlayer::GenericSource::initFromDataSource() { sp<MediaExtractor> extractor; + String8 mimeType; + float confidence; + sp<AMessage> dummy; + bool isWidevineStreaming = false; CHECK(mDataSource != NULL); if (mIsWidevine) { - String8 mimeType; - float confidence; - sp<AMessage> dummy; - bool success; - - success = SniffWVM(mDataSource, &mimeType, &confidence, &dummy); - if (!success - || strcasecmp( + isWidevineStreaming = SniffWVM( + mDataSource, &mimeType, &confidence, &dummy); + if (!isWidevineStreaming || + strcasecmp( mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM)) { ALOGE("unsupported widevine mime: %s", mimeType.string()); return UNKNOWN_ERROR; } + } else if (mIsStreaming) { + if (!mDataSource->sniff(&mimeType, &confidence, &dummy)) { + return UNKNOWN_ERROR; + } + isWidevineStreaming = !strcasecmp( + mimeType.string(), MEDIA_MIMETYPE_CONTAINER_WVM); + } + if (isWidevineStreaming) { + // we don't want cached source for widevine streaming. + mCachedSource.clear(); + mDataSource = mHttpSource; mWVMExtractor = new WVMExtractor(mDataSource); mWVMExtractor->setAdaptiveStreamingMode(true); if (mUIDValid) { @@ -155,7 +166,7 @@ status_t NuPlayer::GenericSource::initFromDataSource() { extractor = mWVMExtractor; } else { extractor = MediaExtractor::Create(mDataSource, - mSniffedMIME.empty() ? NULL: mSniffedMIME.c_str()); + mimeType.isEmpty() ? NULL : mimeType.string()); } if (extractor == NULL) { @@ -181,14 +192,6 @@ status_t NuPlayer::GenericSource::initFromDataSource() { if (mFileMeta->findCString(kKeyMIMEType, &fileMime) && !strncasecmp(fileMime, "video/wvm", 9)) { mIsWidevine = true; - if (!mUri.empty()) { - // streaming, but the app forgot to specify widevine:// url - mWVMExtractor = static_cast<WVMExtractor *>(extractor.get()); - mWVMExtractor->setAdaptiveStreamingMode(true); - if (mUIDValid) { - mWVMExtractor->setUID(mUID); - } - } } } } @@ -332,7 +335,7 @@ void NuPlayer::GenericSource::prepareAsync() { mLooper->registerHandler(this); } - sp<AMessage> msg = new AMessage(kWhatPrepareAsync, id()); + sp<AMessage> msg = new AMessage(kWhatPrepareAsync, this); msg->post(); } @@ -345,6 +348,7 @@ void NuPlayer::GenericSource::onPrepareAsync() { if (!mUri.empty()) { const char* uri = mUri.c_str(); + String8 contentType; mIsWidevine = !strncasecmp(uri, "widevine://", 11); if (!strncasecmp("http://", uri, 7) @@ -359,7 +363,7 @@ void NuPlayer::GenericSource::onPrepareAsync() { } mDataSource = DataSource::CreateFromURI( - mHTTPService, uri, &mUriHeaders, &mContentType, + mHTTPService, uri, &mUriHeaders, &contentType, static_cast<HTTPBase *>(mHttpSource.get())); } else { mIsWidevine = false; @@ -387,20 +391,8 @@ void NuPlayer::GenericSource::onPrepareAsync() { mIsStreaming = (mIsWidevine || mCachedSource != NULL); } - // check initial caching status - status_t err = prefillCacheIfNecessary(); - if (err != OK) { - if (err == -EAGAIN) { - (new AMessage(kWhatPrepareAsync, id()))->post(200000); - } else { - ALOGE("Failed to prefill data cache!"); - notifyPreparedAndCleanup(UNKNOWN_ERROR); - } - return; - } - - // init extrator from data source - err = initFromDataSource(); + // init extractor from data source + status_t err = initFromDataSource(); if (err != OK) { ALOGE("Failed to init from data source!"); @@ -429,7 +421,7 @@ void NuPlayer::GenericSource::onPrepareAsync() { if (mIsSecure) { // secure decoders must be instantiated before starting widevine source - sp<AMessage> reply = new AMessage(kWhatSecureDecodersInstantiated, id()); + sp<AMessage> reply = new AMessage(kWhatSecureDecodersInstantiated, this); notifyInstantiateSecureDecoders(reply); } else { finishPrepareAsync(); @@ -465,9 +457,6 @@ void NuPlayer::GenericSource::finishPrepareAsync() { void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) { if (err != OK) { - mMetaDataSize = -1ll; - mContentType = ""; - mSniffedMIME = ""; mDataSource.clear(); mCachedSource.clear(); mHttpSource.clear(); @@ -478,76 +467,6 @@ void NuPlayer::GenericSource::notifyPreparedAndCleanup(status_t err) { notifyPrepared(err); } -status_t NuPlayer::GenericSource::prefillCacheIfNecessary() { - CHECK(mDataSource != NULL); - - if (mCachedSource == NULL) { - // no prefill if the data source is not cached - return OK; - } - - // We're not doing this for streams that appear to be audio-only - // streams to ensure that even low bandwidth streams start - // playing back fairly instantly. - if (!strncasecmp(mContentType.string(), "audio/", 6)) { - return OK; - } - - // We're going to prefill the cache before trying to instantiate - // the extractor below, as the latter is an operation that otherwise - // could block on the datasource for a significant amount of time. - // During that time we'd be unable to abort the preparation phase - // without this prefill. - - // Initially make sure we have at least 192 KB for the sniff - // to complete without blocking. - static const size_t kMinBytesForSniffing = 192 * 1024; - static const size_t kDefaultMetaSize = 200000; - - status_t finalStatus; - - size_t cachedDataRemaining = - mCachedSource->approxDataRemaining(&finalStatus); - - if (finalStatus != OK || (mMetaDataSize >= 0 - && (off64_t)cachedDataRemaining >= mMetaDataSize)) { - ALOGV("stop caching, status %d, " - "metaDataSize %lld, cachedDataRemaining %zu", - finalStatus, mMetaDataSize, cachedDataRemaining); - return OK; - } - - ALOGV("now cached %zu bytes of data", cachedDataRemaining); - - if (mMetaDataSize < 0 - && cachedDataRemaining >= kMinBytesForSniffing) { - String8 tmp; - float confidence; - sp<AMessage> meta; - if (!mCachedSource->sniff(&tmp, &confidence, &meta)) { - return UNKNOWN_ERROR; - } - - // We successfully identified the file's extractor to - // be, remember this mime type so we don't have to - // sniff it again when we call MediaExtractor::Create() - mSniffedMIME = tmp.string(); - - if (meta == NULL - || !meta->findInt64("meta-data-size", - reinterpret_cast<int64_t*>(&mMetaDataSize))) { - mMetaDataSize = kDefaultMetaSize; - } - - if (mMetaDataSize < 0ll) { - ALOGE("invalid metaDataSize = %lld bytes", mMetaDataSize); - return UNKNOWN_ERROR; - } - } - - return -EAGAIN; -} - void NuPlayer::GenericSource::start() { ALOGI("start"); @@ -563,7 +482,7 @@ void NuPlayer::GenericSource::start() { setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000); mStarted = true; - (new AMessage(kWhatStart, id()))->post(); + (new AMessage(kWhatStart, this))->post(); } void NuPlayer::GenericSource::stop() { @@ -572,7 +491,7 @@ void NuPlayer::GenericSource::stop() { mStarted = false; if (mIsWidevine || mIsSecure) { // For widevine or secure sources we need to prevent any further reads. - sp<AMessage> msg = new AMessage(kWhatStopWidevine, id()); + sp<AMessage> msg = new AMessage(kWhatStopWidevine, this); sp<AMessage> response; (void) msg->postAndAwaitResponse(&response); } @@ -589,7 +508,7 @@ void NuPlayer::GenericSource::resume() { setDrmPlaybackStatusIfNeeded(Playback::START, getLastReadPosition() / 1000); mStarted = true; - (new AMessage(kWhatResume, id()))->post(); + (new AMessage(kWhatResume, this))->post(); } void NuPlayer::GenericSource::disconnect() { @@ -616,7 +535,7 @@ status_t NuPlayer::GenericSource::feedMoreTSData() { } void NuPlayer::GenericSource::schedulePollBuffering() { - sp<AMessage> msg = new AMessage(kWhatPollBuffering, id()); + sp<AMessage> msg = new AMessage(kWhatPollBuffering, this); msg->setInt32("generation", mPollBufferingGeneration); msg->post(1000000ll); } @@ -624,6 +543,7 @@ void NuPlayer::GenericSource::schedulePollBuffering() { void NuPlayer::GenericSource::cancelPollBuffering() { mBuffering = false; ++mPollBufferingGeneration; + mPrevBufferPercentage = -1; } void NuPlayer::GenericSource::restartPollBuffering() { @@ -633,7 +553,19 @@ void NuPlayer::GenericSource::restartPollBuffering() { } } -void NuPlayer::GenericSource::notifyBufferingUpdate(int percentage) { +void NuPlayer::GenericSource::notifyBufferingUpdate(int32_t percentage) { + // Buffering percent could go backward as it's estimated from remaining + // data and last access time. This could cause the buffering position + // drawn on media control to jitter slightly. Remember previously reported + // percentage and don't allow it to go backward. + if (percentage < mPrevBufferPercentage) { + percentage = mPrevBufferPercentage; + } else if (percentage > 100) { + percentage = 100; + } + + mPrevBufferPercentage = percentage; + ALOGV("notifyBufferingUpdate: buffering %d%%", percentage); sp<AMessage> msg = dupNotify(); @@ -687,10 +619,10 @@ void NuPlayer::GenericSource::sendCacheStats() { int32_t kbps = 0; status_t err = UNKNOWN_ERROR; - if (mCachedSource != NULL) { - err = mCachedSource->getEstimatedBandwidthKbps(&kbps); - } else if (mWVMExtractor != NULL) { + if (mWVMExtractor != NULL) { err = mWVMExtractor->getEstimatedBandwidthKbps(&kbps); + } else if (mCachedSource != NULL) { + err = mCachedSource->getEstimatedBandwidthKbps(&kbps); } if (err == OK) { @@ -712,7 +644,13 @@ void NuPlayer::GenericSource::onPollBuffering() { int64_t cachedDurationUs = -1ll; ssize_t cachedDataRemaining = -1; - if (mCachedSource != NULL) { + ALOGW_IF(mWVMExtractor != NULL && mCachedSource != NULL, + "WVMExtractor and NuCachedSource both present"); + + if (mWVMExtractor != NULL) { + cachedDurationUs = + mWVMExtractor->getCachedDurationUs(&finalStatus); + } else if (mCachedSource != NULL) { cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus); @@ -728,9 +666,6 @@ void NuPlayer::GenericSource::onPollBuffering() { cachedDurationUs = cachedDataRemaining * 8000000ll / bitrate; } } - } else if (mWVMExtractor != NULL) { - cachedDurationUs - = mWVMExtractor->getCachedDurationUs(&finalStatus); } if (finalStatus != OK) { @@ -918,7 +853,7 @@ void NuPlayer::GenericSource::onMessageReceived(const sp<AMessage> &msg) { mVideoTrack.mPackets->clear(); } sp<AMessage> response = new AMessage; - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; @@ -958,7 +893,7 @@ void NuPlayer::GenericSource::fetchTextData( const int64_t oneSecUs = 1000000ll; delayUs -= oneSecUs; } - sp<AMessage> msg2 = new AMessage(sendWhat, id()); + sp<AMessage> msg2 = new AMessage(sendWhat, this); msg2->setInt32("generation", msgGeneration); msg2->post(delayUs < 0 ? 0 : delayUs); } @@ -998,7 +933,7 @@ void NuPlayer::GenericSource::sendTextData( } sp<MetaData> NuPlayer::GenericSource::getFormatMeta(bool audio) { - sp<AMessage> msg = new AMessage(kWhatGetFormat, id()); + sp<AMessage> msg = new AMessage(kWhatGetFormat, this); msg->setInt32("audio", audio); sp<AMessage> response; @@ -1020,7 +955,7 @@ void NuPlayer::GenericSource::onGetFormatMeta(sp<AMessage> msg) const { sp<MetaData> format = doGetFormatMeta(audio); response->setPointer("format", format.get()); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); } @@ -1087,7 +1022,7 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit( if (mSubtitleTrack.mSource != NULL && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) { - sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id()); + sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this); msg->setInt64("timeUs", timeUs); msg->setInt32("generation", mFetchSubtitleDataGeneration); msg->post(); @@ -1095,7 +1030,7 @@ status_t NuPlayer::GenericSource::dequeueAccessUnit( if (mTimedTextTrack.mSource != NULL && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) { - sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id()); + sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this); msg->setInt64("timeUs", timeUs); msg->setInt32("generation", mFetchTimedTextDataGeneration); msg->post(); @@ -1160,7 +1095,7 @@ sp<AMessage> NuPlayer::GenericSource::getTrackInfo(size_t trackIndex) const { } ssize_t NuPlayer::GenericSource::getSelectedTrack(media_track_type type) const { - sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id()); + sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this); msg->setInt32("type", type); sp<AMessage> response; @@ -1183,7 +1118,7 @@ void NuPlayer::GenericSource::onGetSelectedTrack(sp<AMessage> msg) const { ssize_t index = doGetSelectedTrack(type); response->setInt32("index", index); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); } @@ -1216,7 +1151,7 @@ ssize_t NuPlayer::GenericSource::doGetSelectedTrack(media_track_type type) const status_t NuPlayer::GenericSource::selectTrack(size_t trackIndex, bool select, int64_t timeUs) { ALOGV("%s track: %zu", select ? "select" : "deselect", trackIndex); - sp<AMessage> msg = new AMessage(kWhatSelectTrack, id()); + sp<AMessage> msg = new AMessage(kWhatSelectTrack, this); msg->setInt32("trackIndex", trackIndex); msg->setInt32("select", select); msg->setInt64("timeUs", timeUs); @@ -1241,7 +1176,7 @@ void NuPlayer::GenericSource::onSelectTrack(sp<AMessage> msg) { status_t err = doSelectTrack(trackIndex, select, timeUs); response->setInt32("err", err); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); } @@ -1302,7 +1237,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, status_t eosResult; // ignored if (mSubtitleTrack.mSource != NULL && !mSubtitleTrack.mPackets->hasBufferAvailable(&eosResult)) { - sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id()); + sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this); msg->setInt64("timeUs", timeUs); msg->setInt32("generation", mFetchSubtitleDataGeneration); msg->post(); @@ -1310,7 +1245,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, if (mTimedTextTrack.mSource != NULL && !mTimedTextTrack.mPackets->hasBufferAvailable(&eosResult)) { - sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, id()); + sp<AMessage> msg = new AMessage(kWhatFetchTimedTextData, this); msg->setInt64("timeUs", timeUs); msg->setInt32("generation", mFetchTimedTextDataGeneration); msg->post(); @@ -1324,7 +1259,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, return OK; } - sp<AMessage> msg = new AMessage(kWhatChangeAVSource, id()); + sp<AMessage> msg = new AMessage(kWhatChangeAVSource, this); msg->setInt32("trackIndex", trackIndex); msg->post(); return OK; @@ -1334,7 +1269,7 @@ status_t NuPlayer::GenericSource::doSelectTrack(size_t trackIndex, bool select, } status_t NuPlayer::GenericSource::seekTo(int64_t seekTimeUs) { - sp<AMessage> msg = new AMessage(kWhatSeek, id()); + sp<AMessage> msg = new AMessage(kWhatSeek, this); msg->setInt64("seekTimeUs", seekTimeUs); sp<AMessage> response; @@ -1354,7 +1289,7 @@ void NuPlayer::GenericSource::onSeek(sp<AMessage> msg) { status_t err = doSeek(seekTimeUs); response->setInt32("err", err); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); } @@ -1474,7 +1409,7 @@ void NuPlayer::GenericSource::postReadBuffer(media_track_type trackType) { if ((mPendingReadBufferTypes & (1 << trackType)) == 0) { mPendingReadBufferTypes |= (1 << trackType); - sp<AMessage> msg = new AMessage(kWhatReadBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatReadBuffer, this); msg->setInt32("trackType", trackType); msg->post(); } diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h index 2d73ea9..862ee5f 100644 --- a/media/libmediaplayerservice/nuplayer/GenericSource.h +++ b/media/libmediaplayerservice/nuplayer/GenericSource.h @@ -140,14 +140,13 @@ private: sp<DecryptHandle> mDecryptHandle; bool mStarted; bool mStopRead; - String8 mContentType; - AString mSniffedMIME; - off64_t mMetaDataSize; int64_t mBitrate; int32_t mPollBufferingGeneration; uint32_t mPendingReadBufferTypes; bool mBuffering; bool mPrepareBuffering; + int32_t mPrevBufferPercentage; + mutable Mutex mReadBufferLock; sp<ALooper> mLooper; @@ -159,8 +158,6 @@ private: int64_t getLastReadPosition(); void setDrmPlaybackStatusIfNeeded(int playbackStatus, int64_t position); - status_t prefillCacheIfNecessary(); - void notifyPreparedAndCleanup(status_t err); void onSecureDecodersInstantiated(status_t err); void finishPrepareAsync(); @@ -204,7 +201,7 @@ private: void cancelPollBuffering(); void restartPollBuffering(); void onPollBuffering(); - void notifyBufferingUpdate(int percentage); + void notifyBufferingUpdate(int32_t percentage); void startBufferingIfNecessary(); void stopBufferingIfNecessary(); void sendCacheStats(); diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp index a26ef9e..0476c9b 100644 --- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -81,7 +81,7 @@ void NuPlayer::HTTPLiveSource::prepareAsync() { mLiveLooper->registerHandler(this); } - sp<AMessage> notify = new AMessage(kWhatSessionNotify, id()); + sp<AMessage> notify = new AMessage(kWhatSessionNotify, this); mLiveSession = new LiveSession( notify, @@ -153,7 +153,7 @@ status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select, i if (err == OK) { mFetchSubtitleDataGeneration++; if (select) { - sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, id()); + sp<AMessage> msg = new AMessage(kWhatFetchSubtitleData, this); msg->setInt32("generation", mFetchSubtitleDataGeneration); msg->post(); } @@ -281,6 +281,34 @@ void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) { break; } + case LiveSession::kWhatBufferingStart: + { + sp<AMessage> notify = dupNotify(); + notify->setInt32("what", kWhatPauseOnBufferingStart); + notify->post(); + break; + } + + case LiveSession::kWhatBufferingEnd: + { + sp<AMessage> notify = dupNotify(); + notify->setInt32("what", kWhatResumeOnBufferingEnd); + notify->post(); + break; + } + + + case LiveSession::kWhatBufferingUpdate: + { + sp<AMessage> notify = dupNotify(); + int32_t percentage; + CHECK(msg->findInt32("percentage", &percentage)); + notify->setInt32("what", kWhatBufferingUpdate); + notify->setInt32("percentage", percentage); + notify->post(); + break; + } + case LiveSession::kWhatError: { break; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp index aeea204..02d9f32 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -180,6 +180,7 @@ NuPlayer::NuPlayer() mFlushingVideo(NONE), mResumePending(false), mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW), + mPlaybackRate(1.0), mStarted(false), mPaused(false), mPausedByClient(false) { @@ -199,9 +200,9 @@ void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { } void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) { - sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); - sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); + sp<AMessage> notify = new AMessage(kWhatSourceNotify, this); msg->setObject("source", new StreamingSource(notify, source)); msg->post(); @@ -229,10 +230,10 @@ void NuPlayer::setDataSourceAsync( const char *url, const KeyedVector<String8, String8> *headers) { - sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); size_t len = strlen(url); - sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); + sp<AMessage> notify = new AMessage(kWhatSourceNotify, this); sp<Source> source; if (IsHTTPLiveURL(url)) { @@ -266,9 +267,9 @@ void NuPlayer::setDataSourceAsync( } void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) { - sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + sp<AMessage> msg = new AMessage(kWhatSetDataSource, this); - sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); + sp<AMessage> notify = new AMessage(kWhatSourceNotify, this); sp<GenericSource> source = new GenericSource(notify, mUIDValid, mUID); @@ -285,12 +286,12 @@ void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) { } void NuPlayer::prepareAsync() { - (new AMessage(kWhatPrepare, id()))->post(); + (new AMessage(kWhatPrepare, this))->post(); } void NuPlayer::setVideoSurfaceTextureAsync( const sp<IGraphicBufferProducer> &bufferProducer) { - sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); + sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, this); if (bufferProducer == NULL) { msg->setObject("native-window", NULL); @@ -305,17 +306,23 @@ void NuPlayer::setVideoSurfaceTextureAsync( } void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { - sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); + sp<AMessage> msg = new AMessage(kWhatSetAudioSink, this); msg->setObject("sink", sink); msg->post(); } void NuPlayer::start() { - (new AMessage(kWhatStart, id()))->post(); + (new AMessage(kWhatStart, this))->post(); +} + +void NuPlayer::setPlaybackRate(float rate) { + sp<AMessage> msg = new AMessage(kWhatSetRate, this); + msg->setFloat("rate", rate); + msg->post(); } void NuPlayer::pause() { - (new AMessage(kWhatPause, id()))->post(); + (new AMessage(kWhatPause, this))->post(); } void NuPlayer::resetAsync() { @@ -329,11 +336,11 @@ void NuPlayer::resetAsync() { mSource->disconnect(); } - (new AMessage(kWhatReset, id()))->post(); + (new AMessage(kWhatReset, this))->post(); } void NuPlayer::seekToAsync(int64_t seekTimeUs, bool needNotify) { - sp<AMessage> msg = new AMessage(kWhatSeek, id()); + sp<AMessage> msg = new AMessage(kWhatSeek, this); msg->setInt64("seekTimeUs", seekTimeUs); msg->setInt32("needNotify", needNotify); msg->post(); @@ -401,7 +408,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { case kWhatGetTrackInfo: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); Parcel* reply; @@ -454,7 +461,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { sp<AMessage> response = new AMessage; response->setInt32("err", err); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; @@ -462,7 +469,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { case kWhatSelectTrack: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); size_t trackIndex; @@ -604,6 +611,16 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { break; } + case kWhatSetRate: + { + ALOGV("kWhatSetRate"); + CHECK(msg->findFloat("rate", &mPlaybackRate)); + if (mRenderer != NULL) { + mRenderer->setPlaybackRate(mPlaybackRate); + } + break; + } + case kWhatScanSources: { int32_t generation; @@ -1062,15 +1079,17 @@ void NuPlayer::onStart() { flags |= Renderer::FLAG_OFFLOAD_AUDIO; } - sp<AMessage> notify = new AMessage(kWhatRendererNotify, id()); + sp<AMessage> notify = new AMessage(kWhatRendererNotify, this); ++mRendererGeneration; notify->setInt32("generation", mRendererGeneration); mRenderer = new Renderer(mAudioSink, notify, flags); - mRendererLooper = new ALooper; mRendererLooper->setName("NuPlayerRenderer"); mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO); mRendererLooper->registerHandler(mRenderer); + if (mPlaybackRate != 1.0) { + mRenderer->setPlaybackRate(mPlaybackRate); + } sp<MetaData> meta = getFileMeta(); int32_t rate; @@ -1176,7 +1195,7 @@ void NuPlayer::postScanSources() { return; } - sp<AMessage> msg = new AMessage(kWhatScanSources, id()); + sp<AMessage> msg = new AMessage(kWhatScanSources, this); msg->setInt32("generation", mScanSourcesGeneration); msg->post(); @@ -1218,7 +1237,7 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) { AString mime; CHECK(format->findString("mime", &mime)); - sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id()); + sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, this); if (mCCDecoder == NULL) { mCCDecoder = new CCDecoder(ccNotify); } @@ -1233,17 +1252,19 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<DecoderBase> *decoder) { } if (audio) { - sp<AMessage> notify = new AMessage(kWhatAudioNotify, id()); + sp<AMessage> notify = new AMessage(kWhatAudioNotify, this); ++mAudioDecoderGeneration; notify->setInt32("generation", mAudioDecoderGeneration); if (mOffloadAudio) { + const bool hasVideo = (mSource->getFormat(false /*audio */) != NULL); + format->setInt32("has-video", hasVideo); *decoder = new DecoderPassThrough(notify, mSource, mRenderer); } else { *decoder = new Decoder(notify, mSource, mRenderer); } } else { - sp<AMessage> notify = new AMessage(kWhatVideoNotify, id()); + sp<AMessage> notify = new AMessage(kWhatVideoNotify, this); ++mVideoDecoderGeneration; notify->setInt32("generation", mVideoDecoderGeneration); @@ -1434,7 +1455,7 @@ status_t NuPlayer::setVideoScalingMode(int32_t mode) { } status_t NuPlayer::getTrackInfo(Parcel* reply) const { - sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, id()); + sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, this); msg->setPointer("reply", reply); sp<AMessage> response; @@ -1443,7 +1464,7 @@ status_t NuPlayer::getTrackInfo(Parcel* reply) const { } status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const { - sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, id()); + sp<AMessage> msg = new AMessage(kWhatGetSelectedTrack, this); msg->setPointer("reply", reply); msg->setInt32("type", type); @@ -1456,7 +1477,7 @@ status_t NuPlayer::getSelectedTrack(int32_t type, Parcel* reply) const { } status_t NuPlayer::selectTrack(size_t trackIndex, bool select, int64_t timeUs) { - sp<AMessage> msg = new AMessage(kWhatSelectTrack, id()); + sp<AMessage> msg = new AMessage(kWhatSelectTrack, this); msg->setSize("trackIndex", trackIndex); msg->setInt32("select", select); msg->setInt64("timeUs", timeUs); @@ -1499,7 +1520,7 @@ sp<MetaData> NuPlayer::getFileMeta() { } void NuPlayer::schedulePollDuration() { - sp<AMessage> msg = new AMessage(kWhatPollDuration, id()); + sp<AMessage> msg = new AMessage(kWhatPollDuration, this); msg->setInt32("generation", mPollDurationGeneration); msg->post(); } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h index 30ede1a..2bc20d7 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -51,6 +51,7 @@ struct NuPlayer : public AHandler { const sp<IGraphicBufferProducer> &bufferProducer); void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink); + void setPlaybackRate(float rate); void start(); void pause(); @@ -104,6 +105,7 @@ private: kWhatSetVideoNativeWindow = '=NaW', kWhatSetAudioSink = '=AuS', kWhatMoreDataQueued = 'more', + kWhatSetRate = 'setR', kWhatStart = 'strt', kWhatScanSources = 'scan', kWhatVideoNotify = 'vidN', @@ -175,6 +177,7 @@ private: int32_t mVideoScalingMode; + float mPlaybackRate; bool mStarted; // Actual pause state, either as requested by client or due to buffering. diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp index 9229704..cf3e8ad 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.cpp @@ -19,6 +19,7 @@ #include <utils/Log.h> #include <inttypes.h> +#include "avc_utils.h" #include "NuPlayerCCDecoder.h" #include <media/stagefright/foundation/ABitReader.h> @@ -185,17 +186,38 @@ int32_t NuPlayer::CCDecoder::getTrackIndex(size_t channel) const { // returns true if a new CC track is found bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) { - int64_t timeUs; - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - sp<ABuffer> sei; if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) { return false; } + int64_t timeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + bool trackAdded = false; - NALBitReader br(sei->data() + 1, sei->size() - 1); + const NALPosition *nal = (NALPosition *) sei->data(); + + for (size_t i = 0; i < sei->size() / sizeof(NALPosition); ++i, ++nal) { + trackAdded |= parseSEINalUnit( + timeUs, accessUnit->data() + nal->nalOffset, nal->nalSize); + } + + return trackAdded; +} + +// returns true if a new CC track is found +bool NuPlayer::CCDecoder::parseSEINalUnit( + int64_t timeUs, const uint8_t *nalStart, size_t nalSize) { + unsigned nalType = nalStart[0] & 0x1f; + + // the buffer should only have SEI in it + if (nalType != 6) { + return false; + } + + bool trackAdded = false; + NALBitReader br(nalStart + 1, nalSize - 1); // sei_message() while (br.atLeastNumBitsLeft(16)) { // at least 16-bit for sei_message() uint32_t payload_type = 0; @@ -214,20 +236,25 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) { // sei_payload() if (payload_type == 4) { - // user_data_registered_itu_t_t35() - - // ATSC A/72: 6.4.2 - uint8_t itu_t_t35_country_code = br.getBits(8); - uint16_t itu_t_t35_provider_code = br.getBits(16); - uint32_t user_identifier = br.getBits(32); - uint8_t user_data_type_code = br.getBits(8); - - payload_size -= 1 + 2 + 4 + 1; + bool isCC = false; + if (payload_size > 1 + 2 + 4 + 1) { + // user_data_registered_itu_t_t35() + + // ATSC A/72: 6.4.2 + uint8_t itu_t_t35_country_code = br.getBits(8); + uint16_t itu_t_t35_provider_code = br.getBits(16); + uint32_t user_identifier = br.getBits(32); + uint8_t user_data_type_code = br.getBits(8); + + payload_size -= 1 + 2 + 4 + 1; + + isCC = itu_t_t35_country_code == 0xB5 + && itu_t_t35_provider_code == 0x0031 + && user_identifier == 'GA94' + && user_data_type_code == 0x3; + } - if (itu_t_t35_country_code == 0xB5 - && itu_t_t35_provider_code == 0x0031 - && user_identifier == 'GA94' - && user_data_type_code == 0x3) { + if (isCC && payload_size > 2) { // MPEG_cc_data() // ATSC A/53 Part 4: 6.2.3.1 br.skipBits(1); //process_em_data_flag @@ -243,7 +270,7 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) { sp<ABuffer> ccBuf = new ABuffer(cc_count * sizeof(CCData)); ccBuf->setRange(0, 0); - for (size_t i = 0; i < cc_count; i++) { + for (size_t i = 0; i < cc_count && payload_size >= 3; i++) { uint8_t marker = br.getBits(5); CHECK_EQ(marker, 0x1f); @@ -253,6 +280,8 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) { uint8_t cc_data_1 = br.getBits(8) & 0x7f; uint8_t cc_data_2 = br.getBits(8) & 0x7f; + payload_size -= 3; + if (cc_valid && (cc_type == 0 || cc_type == 1)) { CCData cc(cc_type, cc_data_1, cc_data_2); @@ -269,7 +298,6 @@ bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) { } } } - payload_size -= cc_count * 3; mCCMap.add(timeUs, ccBuf); break; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h index 5e06f4e..77fb0fe 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerCCDecoder.h @@ -49,6 +49,7 @@ private: bool isTrackValid(size_t index) const; int32_t getTrackIndex(size_t channel) const; bool extractFromSEI(const sp<ABuffer> &accessUnit); + bool parseSEINalUnit(int64_t timeUs, const uint8_t *nalStart, size_t nalSize); sp<ABuffer> filterCCBuf(const sp<ABuffer> &ccBuf, size_t index); DISALLOW_EVIL_CONSTRUCTORS(CCDecoder); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp index 5d98d98..04ac699 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp @@ -56,6 +56,7 @@ NuPlayer::Decoder::Decoder( mIsVideoAVC(false), mIsSecure(false), mFormatChangePending(false), + mTimeChangePending(false), mPaused(true), mResumePending(false), mComponentName("decoder") { @@ -121,6 +122,7 @@ void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) { CHECK(mCodec == NULL); mFormatChangePending = false; + mTimeChangePending = false; ++mBufferGeneration; @@ -235,7 +237,7 @@ void NuPlayer::Decoder::onResume(bool notifyComplete) { } } -void NuPlayer::Decoder::onFlush(bool notifyComplete) { +void NuPlayer::Decoder::doFlush(bool notifyComplete) { if (mCCDecoder != NULL) { mCCDecoder->flush(); } @@ -259,13 +261,22 @@ void NuPlayer::Decoder::onFlush(bool notifyComplete) { // we attempt to release the buffers even if flush fails. } releaseAndResetMediaBuffers(); +} - if (notifyComplete) { - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatFlushCompleted); - notify->post(); - mPaused = true; +void NuPlayer::Decoder::onFlush() { + doFlush(true); + + if (isDiscontinuityPending()) { + // This could happen if the client starts seeking/shutdown + // after we queued an EOS for discontinuities. + // We can consider discontinuity handled. + finishHandleDiscontinuity(false /* flushOnTimeChange */); } + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatFlushCompleted); + notify->post(); + mPaused = true; } void NuPlayer::Decoder::onShutdown(bool notifyComplete) { @@ -309,16 +320,17 @@ void NuPlayer::Decoder::onShutdown(bool notifyComplete) { } void NuPlayer::Decoder::doRequestBuffers() { - if (mFormatChangePending) { + if (isDiscontinuityPending()) { return; } status_t err = OK; - while (!mDequeuedInputBuffers.empty()) { + while (err == OK && !mDequeuedInputBuffers.empty()) { size_t bufferIx = *mDequeuedInputBuffers.begin(); sp<AMessage> msg = new AMessage(); msg->setSize("buffer-ix", bufferIx); err = fetchInputData(msg); - if (err != OK) { + if (err != OK && err != ERROR_END_OF_STREAM) { + // if EOS, need to queue EOS buffer break; } mDequeuedInputBuffers.erase(mDequeuedInputBuffers.begin()); @@ -336,7 +348,7 @@ void NuPlayer::Decoder::doRequestBuffers() { } bool NuPlayer::Decoder::handleAnInputBuffer() { - if (mFormatChangePending) { + if (isDiscontinuityPending()) { return false; } size_t bufferIx = -1; @@ -391,9 +403,6 @@ bool NuPlayer::Decoder::handleAnInputBuffer() { } bool NuPlayer::Decoder::handleAnOutputBuffer() { - if (mFormatChangePending) { - return false; - } size_t bufferIx = -1; size_t offset; size_t size; @@ -474,17 +483,20 @@ bool NuPlayer::Decoder::handleAnOutputBuffer() { buffer->setRange(offset, size); buffer->meta()->clear(); buffer->meta()->setInt64("timeUs", timeUs); - if (flags & MediaCodec::BUFFER_FLAG_EOS) { - buffer->meta()->setInt32("eos", true); - notifyResumeCompleteIfNecessary(); - } + + bool eos = flags & MediaCodec::BUFFER_FLAG_EOS; // we do not expect CODECCONFIG or SYNCFRAME for decoder - sp<AMessage> reply = new AMessage(kWhatRenderBuffer, id()); + sp<AMessage> reply = new AMessage(kWhatRenderBuffer, this); reply->setSize("buffer-ix", bufferIx); reply->setInt32("generation", mBufferGeneration); - if (mSkipRenderingUntilMediaTimeUs >= 0) { + if (eos) { + ALOGI("[%s] saw output EOS", mIsAudio ? "audio" : "video"); + + buffer->meta()->setInt32("eos", true); + reply->setInt32("eos", true); + } else if (mSkipRenderingUntilMediaTimeUs >= 0) { if (timeUs < mSkipRenderingUntilMediaTimeUs) { ALOGV("[%s] dropping buffer at time %lld as requested.", mComponentName.c_str(), (long long)timeUs); @@ -502,7 +514,7 @@ bool NuPlayer::Decoder::handleAnOutputBuffer() { if (mRenderer != NULL) { // send the buffer to renderer. mRenderer->queueBuffer(mIsAudio, buffer, reply); - if (flags & MediaCodec::BUFFER_FLAG_EOS) { + if (eos && !isDiscontinuityPending()) { mRenderer->queueEOS(mIsAudio, ERROR_END_OF_STREAM); } } @@ -533,11 +545,8 @@ void NuPlayer::Decoder::releaseAndResetMediaBuffers() { } void NuPlayer::Decoder::requestCodecNotification() { - if (mFormatChangePending) { - return; - } if (mCodec != NULL) { - sp<AMessage> reply = new AMessage(kWhatCodecNotify, id()); + sp<AMessage> reply = new AMessage(kWhatCodecNotify, this); reply->setInt32("generation", mBufferGeneration); mCodec->requestActivityNotification(reply); } @@ -582,39 +591,31 @@ status_t NuPlayer::Decoder::fetchInputData(sp<AMessage> &reply) { formatChange = !seamlessFormatChange; } - if (formatChange || timeChange) { - sp<AMessage> msg = mNotify->dup(); - msg->setInt32("what", kWhatInputDiscontinuity); - msg->setInt32("formatChange", formatChange); - msg->post(); - } - + // For format or time change, return EOS to queue EOS input, + // then wait for EOS on output. if (formatChange /* not seamless */) { - // must change decoder - // return EOS and wait to be killed mFormatChangePending = true; - return ERROR_END_OF_STREAM; + err = ERROR_END_OF_STREAM; } else if (timeChange) { - // need to flush - // TODO: Ideally we shouldn't need a flush upon time - // discontinuity, flushing will cause loss of frames. - // We probably should queue a time change marker to the - // output queue, and handles it in renderer instead. rememberCodecSpecificData(newFormat); - onFlush(false /* notifyComplete */); - err = OK; + mTimeChangePending = true; + err = ERROR_END_OF_STREAM; } else if (seamlessFormatChange) { // reuse existing decoder and don't flush rememberCodecSpecificData(newFormat); - err = OK; + continue; } else { // This stream is unaffected by the discontinuity return -EWOULDBLOCK; } } + // reply should only be returned without a buffer set + // when there is an error (including EOS) + CHECK(err != OK); + reply->setInt32("err", err); - return OK; + return ERROR_END_OF_STREAM; } if (!mIsAudio) { @@ -636,7 +637,7 @@ status_t NuPlayer::Decoder::fetchInputData(sp<AMessage> &reply) { #if 0 int64_t mediaTimeUs; CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); - ALOGV("feeding %s input buffer at media time %.2f secs", + ALOGV("[%s] feeding input buffer at media time %" PRId64, mIsAudio ? "audio" : "video", mediaTimeUs / 1E6); #endif @@ -696,10 +697,7 @@ bool NuPlayer::Decoder::onInputBufferFetched(const sp<AMessage> &msg) { int32_t streamErr = ERROR_END_OF_STREAM; CHECK(msg->findInt32("err", &streamErr) || !hasBuffer); - if (streamErr == OK) { - /* buffers are returned to hold on to */ - return true; - } + CHECK(streamErr != OK); // attempt to queue EOS status_t err = mCodec->queueInputBuffer( @@ -781,6 +779,7 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) { status_t err; int32_t render; size_t bufferIx; + int32_t eos; CHECK(msg->findSize("buffer-ix", &bufferIx)); if (!mIsAudio) { @@ -805,6 +804,42 @@ void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) { mComponentName.c_str(), err); handleError(err); } + if (msg->findInt32("eos", &eos) && eos + && isDiscontinuityPending()) { + finishHandleDiscontinuity(true /* flushOnTimeChange */); + } +} + +bool NuPlayer::Decoder::isDiscontinuityPending() const { + return mFormatChangePending || mTimeChangePending; +} + +void NuPlayer::Decoder::finishHandleDiscontinuity(bool flushOnTimeChange) { + ALOGV("finishHandleDiscontinuity: format %d, time %d, flush %d", + mFormatChangePending, mTimeChangePending, flushOnTimeChange); + + // If we have format change, pause and wait to be killed; + // If we have time change only, flush and restart fetching. + + if (mFormatChangePending) { + mPaused = true; + } else if (mTimeChangePending) { + if (flushOnTimeChange) { + doFlush(false /*notifyComplete*/); + } + + // restart fetching input + scheduleRequestBuffers(); + } + + // Notify NuPlayer to either shutdown decoder, or rescan sources + sp<AMessage> msg = mNotify->dup(); + msg->setInt32("what", kWhatInputDiscontinuity); + msg->setInt32("formatChange", mFormatChangePending); + msg->post(); + + mFormatChangePending = false; + mTimeChangePending = false; } bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange( diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h index 1bfa94f..4aab2c6 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h @@ -43,7 +43,7 @@ protected: virtual void onSetRenderer(const sp<Renderer> &renderer); virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers); virtual void onResume(bool notifyComplete); - virtual void onFlush(bool notifyComplete); + virtual void onFlush(); virtual void onShutdown(bool notifyComplete); virtual void doRequestBuffers(); @@ -81,6 +81,7 @@ private: bool mIsVideoAVC; bool mIsSecure; bool mFormatChangePending; + bool mTimeChangePending; bool mPaused; bool mResumePending; @@ -93,6 +94,7 @@ private: void requestCodecNotification(); bool isStaleReply(const sp<AMessage> &msg); + void doFlush(bool notifyComplete); status_t fetchInputData(sp<AMessage> &reply); bool onInputBufferFetched(const sp<AMessage> &msg); void onRenderBuffer(const sp<AMessage> &msg); @@ -100,6 +102,8 @@ private: bool supportsSeamlessFormatChange(const sp<AMessage> &to) const; bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const; void rememberCodecSpecificData(const sp<AMessage> &format); + bool isDiscontinuityPending() const; + void finishHandleDiscontinuity(bool flushOnTimeChange); void notifyResumeCompleteIfNecessary(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp index d56fc4d..4636f0a 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp @@ -61,7 +61,7 @@ status_t PostAndAwaitResponse( } void NuPlayer::DecoderBase::configure(const sp<AMessage> &format) { - sp<AMessage> msg = new AMessage(kWhatConfigure, id()); + sp<AMessage> msg = new AMessage(kWhatConfigure, this); msg->setMessage("format", format); msg->post(); } @@ -71,13 +71,13 @@ void NuPlayer::DecoderBase::init() { } void NuPlayer::DecoderBase::setRenderer(const sp<Renderer> &renderer) { - sp<AMessage> msg = new AMessage(kWhatSetRenderer, id()); + sp<AMessage> msg = new AMessage(kWhatSetRenderer, this); msg->setObject("renderer", renderer); msg->post(); } status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<ABuffer> > *buffers) const { - sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, id()); + sp<AMessage> msg = new AMessage(kWhatGetInputBuffers, this); msg->setPointer("buffers", buffers); sp<AMessage> response; @@ -85,17 +85,17 @@ status_t NuPlayer::DecoderBase::getInputBuffers(Vector<sp<ABuffer> > *buffers) c } void NuPlayer::DecoderBase::signalFlush() { - (new AMessage(kWhatFlush, id()))->post(); + (new AMessage(kWhatFlush, this))->post(); } void NuPlayer::DecoderBase::signalResume(bool notifyComplete) { - sp<AMessage> msg = new AMessage(kWhatResume, id()); + sp<AMessage> msg = new AMessage(kWhatResume, this); msg->setInt32("notifyComplete", notifyComplete); msg->post(); } void NuPlayer::DecoderBase::initiateShutdown() { - (new AMessage(kWhatShutdown, id()))->post(); + (new AMessage(kWhatShutdown, this))->post(); } void NuPlayer::DecoderBase::onRequestInputBuffers() { @@ -111,7 +111,7 @@ void NuPlayer::DecoderBase::scheduleRequestBuffers() { return; } mRequestInputBuffersPending = true; - sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, id()); + sp<AMessage> msg = new AMessage(kWhatRequestInputBuffers, this); msg->post(10 * 1000ll); } @@ -136,7 +136,7 @@ void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) { case kWhatGetInputBuffers: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); Vector<sp<ABuffer> > *dstBuffers; @@ -157,7 +157,7 @@ void NuPlayer::DecoderBase::onMessageReceived(const sp<AMessage> &msg) { case kWhatFlush: { - onFlush(true); + onFlush(); break; } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h index 6732ff4..97e9269 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.h @@ -65,7 +65,7 @@ protected: virtual void onSetRenderer(const sp<Renderer> &renderer) = 0; virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers) = 0; virtual void onResume(bool notifyComplete) = 0; - virtual void onFlush(bool notifyComplete) = 0; + virtual void onFlush() = 0; virtual void onShutdown(bool notifyComplete) = 0; void onRequestInputBuffers(); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp index 9f7f09a..563de5e 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.cpp @@ -74,11 +74,14 @@ void NuPlayer::DecoderPassThrough::onConfigure(const sp<AMessage> &format) { onRequestInputBuffers(); + int32_t hasVideo = 0; + format->findInt32("has-video", &hasVideo); + // The audio sink is already opened before the PassThrough decoder is created. // Opening again might be relevant if decoder is instantiated after shutdown and // format is different. status_t err = mRenderer->openAudioSink( - format, true /* offloadOnly */, false /* hasVideo */, + format, true /* offloadOnly */, hasVideo, AUDIO_OUTPUT_FLAG_NONE /* flags */, NULL /* isOffloaded */); if (err != OK) { handleError(err); @@ -247,7 +250,7 @@ status_t NuPlayer::DecoderPassThrough::fetchInputData(sp<AMessage> &reply) { } if (timeChange) { - onFlush(false /* notifyComplete */); + doFlush(false /* notifyComplete */); err = OK; } else if (formatChange) { // do seamless format change @@ -333,7 +336,7 @@ void NuPlayer::DecoderPassThrough::onInputBufferFetched( return; } - sp<AMessage> reply = new AMessage(kWhatBufferConsumed, id()); + sp<AMessage> reply = new AMessage(kWhatBufferConsumed, this); reply->setInt32("generation", mBufferGeneration); reply->setInt32("size", bufferSize); @@ -364,7 +367,7 @@ void NuPlayer::DecoderPassThrough::onResume(bool notifyComplete) { } } -void NuPlayer::DecoderPassThrough::onFlush(bool notifyComplete) { +void NuPlayer::DecoderPassThrough::doFlush(bool notifyComplete) { ++mBufferGeneration; mSkipRenderingUntilMediaTimeUs = -1; mPendingAudioAccessUnit.clear(); @@ -376,18 +379,21 @@ void NuPlayer::DecoderPassThrough::onFlush(bool notifyComplete) { mRenderer->signalTimeDiscontinuity(); } - if (notifyComplete) { - mPaused = true; - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatFlushCompleted); - notify->post(); - } - mPendingBuffersToDrain = 0; mCachedBytes = 0; mReachedEOS = false; } +void NuPlayer::DecoderPassThrough::onFlush() { + doFlush(true /* notifyComplete */); + + mPaused = true; + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatFlushCompleted); + notify->post(); + +} + void NuPlayer::DecoderPassThrough::onShutdown(bool notifyComplete) { ++mBufferGeneration; mSkipRenderingUntilMediaTimeUs = -1; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h index a6e1faf..173cfbd 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderPassThrough.h @@ -43,7 +43,7 @@ protected: virtual void onSetRenderer(const sp<Renderer> &renderer); virtual void onGetInputBuffers(Vector<sp<ABuffer> > *dstBuffers); virtual void onResume(bool notifyComplete); - virtual void onFlush(bool notifyComplete); + virtual void onFlush(); virtual void onShutdown(bool notifyComplete); virtual void doRequestBuffers(); @@ -77,6 +77,7 @@ private: status_t dequeueAccessUnit(sp<ABuffer> *accessUnit); sp<ABuffer> aggregateBuffer(const sp<ABuffer> &accessUnit); status_t fetchInputData(sp<AMessage> &reply); + void doFlush(bool notifyComplete); void onInputBufferFetched(const sp<AMessage> &msg); void onBufferConsumed(int32_t size); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp index bc79fdb..1fa9cef 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -341,6 +341,11 @@ bool NuPlayerDriver::isPlaying() { return mState == STATE_RUNNING && !mAtEOS; } +status_t NuPlayerDriver::setPlaybackRate(float rate) { + mPlayer->setPlaybackRate(rate); + return OK; +} + status_t NuPlayerDriver::seekTo(int msec) { ALOGD("seekTo(%p) %d ms", this, msec); Mutex::Autolock autoLock(mLock); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h index 5cba7d9..e53abcd 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -47,6 +47,7 @@ struct NuPlayerDriver : public MediaPlayerInterface { virtual status_t stop(); virtual status_t pause(); virtual bool isPlaying(); + virtual status_t setPlaybackRate(float rate); virtual status_t seekTo(int msec); virtual status_t getCurrentPosition(int *msec); virtual status_t getDuration(int *msec); diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp index 25225a8..a2ec51c 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -25,6 +25,7 @@ #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/foundation/AUtils.h> #include <media/stagefright/foundation/AWakeLock.h> +#include <media/stagefright/MediaClock.h> #include <media/stagefright/MediaErrors.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/Utils.h> @@ -63,22 +64,19 @@ NuPlayer::Renderer::Renderer( mDrainVideoQueuePending(false), mAudioQueueGeneration(0), mVideoQueueGeneration(0), + mAudioDrainGeneration(0), + mVideoDrainGeneration(0), + mPlaybackRate(1.0), mAudioFirstAnchorTimeMediaUs(-1), mAnchorTimeMediaUs(-1), - mAnchorTimeRealUs(-1), mAnchorNumFramesWritten(-1), - mAnchorMaxMediaUs(-1), mVideoLateByUs(0ll), mHasAudio(false), mHasVideo(false), - mPauseStartedTimeRealUs(-1), - mFlushingAudio(false), - mFlushingVideo(false), mNotifyCompleteAudio(false), mNotifyCompleteVideo(false), mSyncQueues(false), mPaused(false), - mPausePositionMediaTimeUs(-1), mVideoSampleReceived(false), mVideoRenderingStarted(false), mVideoRenderingStartGeneration(0), @@ -90,7 +88,7 @@ NuPlayer::Renderer::Renderer( mTotalBuffersQueued(0), mLastAudioBufferDrained(0), mWakeLock(new AWakeLock()) { - + mMediaClock = new MediaClock; } NuPlayer::Renderer::~Renderer() { @@ -105,7 +103,8 @@ void NuPlayer::Renderer::queueBuffer( bool audio, const sp<ABuffer> &buffer, const sp<AMessage> ¬ifyConsumed) { - sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this); + msg->setInt32("queueGeneration", getQueueGeneration(audio)); msg->setInt32("audio", static_cast<int32_t>(audio)); msg->setBuffer("buffer", buffer); msg->setMessage("notifyConsumed", notifyConsumed); @@ -115,199 +114,108 @@ void NuPlayer::Renderer::queueBuffer( void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { CHECK_NE(finalResult, (status_t)OK); - sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); + sp<AMessage> msg = new AMessage(kWhatQueueEOS, this); + msg->setInt32("queueGeneration", getQueueGeneration(audio)); msg->setInt32("audio", static_cast<int32_t>(audio)); msg->setInt32("finalResult", finalResult); msg->post(); } +void NuPlayer::Renderer::setPlaybackRate(float rate) { + sp<AMessage> msg = new AMessage(kWhatSetRate, this); + msg->setFloat("rate", rate); + msg->post(); +} + void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) { { - Mutex::Autolock autoLock(mFlushLock); + Mutex::Autolock autoLock(mLock); if (audio) { mNotifyCompleteAudio |= notifyComplete; - if (mFlushingAudio) { - return; - } - mFlushingAudio = true; + ++mAudioQueueGeneration; + ++mAudioDrainGeneration; } else { mNotifyCompleteVideo |= notifyComplete; - if (mFlushingVideo) { - return; - } - mFlushingVideo = true; + ++mVideoQueueGeneration; + ++mVideoDrainGeneration; } + + clearAnchorTime_l(); + clearAudioFirstAnchorTime_l(); + mVideoLateByUs = 0; + mSyncQueues = false; } - sp<AMessage> msg = new AMessage(kWhatFlush, id()); + sp<AMessage> msg = new AMessage(kWhatFlush, this); msg->setInt32("audio", static_cast<int32_t>(audio)); msg->post(); } void NuPlayer::Renderer::signalTimeDiscontinuity() { - Mutex::Autolock autoLock(mLock); - // CHECK(mAudioQueue.empty()); - // CHECK(mVideoQueue.empty()); - setAudioFirstAnchorTime(-1); - setAnchorTime(-1, -1); - setVideoLateByUs(0); - mSyncQueues = false; -} - -void NuPlayer::Renderer::signalAudioSinkChanged() { - (new AMessage(kWhatAudioSinkChanged, id()))->post(); } void NuPlayer::Renderer::signalDisableOffloadAudio() { - (new AMessage(kWhatDisableOffloadAudio, id()))->post(); + (new AMessage(kWhatDisableOffloadAudio, this))->post(); } void NuPlayer::Renderer::signalEnableOffloadAudio() { - (new AMessage(kWhatEnableOffloadAudio, id()))->post(); + (new AMessage(kWhatEnableOffloadAudio, this))->post(); } void NuPlayer::Renderer::pause() { - (new AMessage(kWhatPause, id()))->post(); + (new AMessage(kWhatPause, this))->post(); } void NuPlayer::Renderer::resume() { - (new AMessage(kWhatResume, id()))->post(); + (new AMessage(kWhatResume, this))->post(); } void NuPlayer::Renderer::setVideoFrameRate(float fps) { - sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, id()); + sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this); msg->setFloat("frame-rate", fps); msg->post(); } -// Called on any threads, except renderer's thread. -status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { - { - Mutex::Autolock autoLock(mLock); - int64_t currentPositionUs; - if (getCurrentPositionIfPaused_l(¤tPositionUs)) { - *mediaUs = currentPositionUs; - return OK; - } - } - return getCurrentPositionFromAnchor(mediaUs, ALooper::GetNowUs()); -} - -// Called on only renderer's thread. -status_t NuPlayer::Renderer::getCurrentPositionOnLooper(int64_t *mediaUs) { - return getCurrentPositionOnLooper(mediaUs, ALooper::GetNowUs()); -} - -// Called on only renderer's thread. -// Since mPaused and mPausePositionMediaTimeUs are changed only on renderer's -// thread, no need to acquire mLock. -status_t NuPlayer::Renderer::getCurrentPositionOnLooper( - int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { - int64_t currentPositionUs; - if (getCurrentPositionIfPaused_l(¤tPositionUs)) { - *mediaUs = currentPositionUs; - return OK; - } - return getCurrentPositionFromAnchor(mediaUs, nowUs, allowPastQueuedVideo); -} - -// Called either with mLock acquired or on renderer's thread. -bool NuPlayer::Renderer::getCurrentPositionIfPaused_l(int64_t *mediaUs) { - if (!mPaused || mPausePositionMediaTimeUs < 0ll) { - return false; - } - *mediaUs = mPausePositionMediaTimeUs; - return true; -} - // Called on any threads. -status_t NuPlayer::Renderer::getCurrentPositionFromAnchor( - int64_t *mediaUs, int64_t nowUs, bool allowPastQueuedVideo) { - Mutex::Autolock autoLock(mTimeLock); - if (!mHasAudio && !mHasVideo) { - return NO_INIT; - } - - if (mAnchorTimeMediaUs < 0) { - return NO_INIT; - } - - int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; - - if (mPauseStartedTimeRealUs != -1) { - positionUs -= (nowUs - mPauseStartedTimeRealUs); - } - - // limit position to the last queued media time (for video only stream - // position will be discrete as we don't know how long each frame lasts) - if (mAnchorMaxMediaUs >= 0 && !allowPastQueuedVideo) { - if (positionUs > mAnchorMaxMediaUs) { - positionUs = mAnchorMaxMediaUs; - } - } - - if (positionUs < mAudioFirstAnchorTimeMediaUs) { - positionUs = mAudioFirstAnchorTimeMediaUs; - } - - *mediaUs = (positionUs <= 0) ? 0 : positionUs; - return OK; -} - -void NuPlayer::Renderer::setHasMedia(bool audio) { - Mutex::Autolock autoLock(mTimeLock); - if (audio) { - mHasAudio = true; - } else { - mHasVideo = true; - } +status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) { + return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs); } -void NuPlayer::Renderer::setAudioFirstAnchorTime(int64_t mediaUs) { - Mutex::Autolock autoLock(mTimeLock); - mAudioFirstAnchorTimeMediaUs = mediaUs; +void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() { + mAudioFirstAnchorTimeMediaUs = -1; + mMediaClock->setStartingTimeMedia(-1); } -void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs) { - Mutex::Autolock autoLock(mTimeLock); +void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) { if (mAudioFirstAnchorTimeMediaUs == -1) { mAudioFirstAnchorTimeMediaUs = mediaUs; + mMediaClock->setStartingTimeMedia(mediaUs); } } -void NuPlayer::Renderer::setAnchorTime( - int64_t mediaUs, int64_t realUs, int64_t numFramesWritten, bool resume) { - Mutex::Autolock autoLock(mTimeLock); - mAnchorTimeMediaUs = mediaUs; - mAnchorTimeRealUs = realUs; - mAnchorNumFramesWritten = numFramesWritten; - if (resume) { - mPauseStartedTimeRealUs = -1; - } +void NuPlayer::Renderer::clearAnchorTime_l() { + mMediaClock->clearAnchor(); + mAnchorTimeMediaUs = -1; + mAnchorNumFramesWritten = -1; } void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) { - Mutex::Autolock autoLock(mTimeLock); + Mutex::Autolock autoLock(mLock); mVideoLateByUs = lateUs; } int64_t NuPlayer::Renderer::getVideoLateByUs() { - Mutex::Autolock autoLock(mTimeLock); + Mutex::Autolock autoLock(mLock); return mVideoLateByUs; } -void NuPlayer::Renderer::setPauseStartedTimeRealUs(int64_t realUs) { - Mutex::Autolock autoLock(mTimeLock); - mPauseStartedTimeRealUs = realUs; -} - status_t NuPlayer::Renderer::openAudioSink( const sp<AMessage> &format, bool offloadOnly, bool hasVideo, uint32_t flags, bool *isOffloaded) { - sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, id()); + sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this); msg->setMessage("format", format); msg->setInt32("offload-only", offloadOnly); msg->setInt32("has-video", hasVideo); @@ -328,7 +236,7 @@ status_t NuPlayer::Renderer::openAudioSink( } void NuPlayer::Renderer::closeAudioSink() { - sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, id()); + sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this); sp<AMessage> response; msg->postAndAwaitResponse(&response); @@ -356,7 +264,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { response->setInt32("err", err); response->setInt32("offload", offloadingAudio()); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); @@ -365,7 +273,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { case kWhatCloseAudioSink: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); onCloseAudioSink(); @@ -384,8 +292,8 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { case kWhatDrainAudioQueue: { int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - if (generation != mAudioQueueGeneration) { + CHECK(msg->findInt32("drainGeneration", &generation)); + if (generation != getDrainGeneration(true /* audio */)) { break; } @@ -407,9 +315,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { // Let's give it more data after about half that time // has elapsed. - // kWhatDrainAudioQueue is used for non-offloading mode, - // and mLock is used only for offloading mode. Therefore, - // no need to acquire mLock here. + Mutex::Autolock autoLock(mLock); postDrainAudioQueue_l(delayUs / 2); } break; @@ -418,8 +324,8 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { case kWhatDrainVideoQueue: { int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - if (generation != mVideoQueueGeneration) { + CHECK(msg->findInt32("drainGeneration", &generation)); + if (generation != getDrainGeneration(false /* audio */)) { break; } @@ -427,22 +333,20 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { onDrainVideoQueue(); - Mutex::Autolock autoLock(mLock); - postDrainVideoQueue_l(); + postDrainVideoQueue(); break; } case kWhatPostDrainVideoQueue: { int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - if (generation != mVideoQueueGeneration) { + CHECK(msg->findInt32("drainGeneration", &generation)); + if (generation != getDrainGeneration(false /* audio */)) { break; } mDrainVideoQueuePending = false; - Mutex::Autolock autoLock(mLock); - postDrainVideoQueue_l(); + postDrainVideoQueue(); break; } @@ -458,15 +362,19 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { break; } - case kWhatFlush: + case kWhatSetRate: { - onFlush(msg); + CHECK(msg->findFloat("rate", &mPlaybackRate)); + int32_t ratePermille = (int32_t)(0.5f + 1000 * mPlaybackRate); + mPlaybackRate = ratePermille / 1000.0f; + mMediaClock->setPlaybackRate(mPlaybackRate); + mAudioSink->setPlaybackRatePermille(ratePermille); break; } - case kWhatAudioSinkChanged: + case kWhatFlush: { - onAudioSinkChanged(); + onFlush(msg); break; } @@ -511,7 +419,7 @@ void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { case kWhatAudioOffloadPauseTimeout: { int32_t generation; - CHECK(msg->findInt32("generation", &generation)); + CHECK(msg->findInt32("drainGeneration", &generation)); if (generation != mAudioOffloadPauseTimeoutGeneration) { break; } @@ -538,19 +446,19 @@ void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) { } mDrainAudioQueuePending = true; - sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); - msg->setInt32("generation", mAudioQueueGeneration); + sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this); + msg->setInt32("drainGeneration", mAudioDrainGeneration); msg->post(delayUs); } -void NuPlayer::Renderer::prepareForMediaRenderingStart() { - mAudioRenderingStartGeneration = mAudioQueueGeneration; - mVideoRenderingStartGeneration = mVideoQueueGeneration; +void NuPlayer::Renderer::prepareForMediaRenderingStart_l() { + mAudioRenderingStartGeneration = mAudioDrainGeneration; + mVideoRenderingStartGeneration = mVideoDrainGeneration; } -void NuPlayer::Renderer::notifyIfMediaRenderingStarted() { - if (mVideoRenderingStartGeneration == mVideoQueueGeneration && - mAudioRenderingStartGeneration == mAudioQueueGeneration) { +void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() { + if (mVideoRenderingStartGeneration == mVideoDrainGeneration && + mAudioRenderingStartGeneration == mAudioDrainGeneration) { mVideoRenderingStartGeneration = -1; mAudioRenderingStartGeneration = -1; @@ -618,7 +526,7 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { int64_t mediaTimeUs; CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); ALOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); - setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); + setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs); } size_t copy = entry->mBuffer->size() - entry->mOffset; @@ -638,34 +546,45 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) { entry = NULL; } sizeCopied += copy; - notifyIfMediaRenderingStarted(); + + notifyIfMediaRenderingStarted_l(); } if (mAudioFirstAnchorTimeMediaUs >= 0) { int64_t nowUs = ALooper::GetNowUs(); - setAnchorTime(mAudioFirstAnchorTimeMediaUs, nowUs - getPlayedOutAudioDurationUs(nowUs)); + int64_t nowMediaUs = + mAudioFirstAnchorTimeMediaUs + getPlayedOutAudioDurationUs(nowUs); + // we don't know how much data we are queueing for offloaded tracks. + mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX); } - // we don't know how much data we are queueing for offloaded tracks - mAnchorMaxMediaUs = -1; - if (hasEOS) { - (new AMessage(kWhatStopAudioSink, id()))->post(); + (new AMessage(kWhatStopAudioSink, this))->post(); } return sizeCopied; } bool NuPlayer::Renderer::onDrainAudioQueue() { + // TODO: This call to getPosition checks if AudioTrack has been created + // in AudioSink before draining audio. If AudioTrack doesn't exist, then + // CHECKs on getPosition will fail. + // We still need to figure out why AudioTrack is not created when + // this function is called. One possible reason could be leftover + // audio. Another possible place is to check whether decoder + // has received INFO_FORMAT_CHANGED as the first buffer since + // AudioSink is opened there, and possible interactions with flush + // immediately after start. Investigate error message + // "vorbis_dsp_synthesis returned -135", along with RTSP. uint32_t numFramesPlayed; if (mAudioSink->getPosition(&numFramesPlayed) != OK) { return false; } +#if 0 ssize_t numFramesAvailableToWrite = mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); -#if 0 if (numFramesAvailableToWrite == mAudioSink->frameCount()) { ALOGI("audio sink underrun"); } else { @@ -674,10 +593,7 @@ bool NuPlayer::Renderer::onDrainAudioQueue() { } #endif - size_t numBytesAvailableToWrite = - numFramesAvailableToWrite * mAudioSink->frameSize(); - - while (numBytesAvailableToWrite > 0 && !mAudioQueue.empty()) { + while (!mAudioQueue.empty()) { QueueEntry *entry = &*mAudioQueue.begin(); mLastAudioBufferDrained = entry->mBufferOrdinal; @@ -710,14 +626,16 @@ bool NuPlayer::Renderer::onDrainAudioQueue() { } size_t copy = entry->mBuffer->size() - entry->mOffset; - if (copy > numBytesAvailableToWrite) { - copy = numBytesAvailableToWrite; - } - ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, copy); + ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset, + copy, false /* blocking */); if (written < 0) { // An error in AudioSink write. Perhaps the AudioSink was not properly opened. - ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy); + if (written == WOULD_BLOCK) { + ALOGW("AudioSink write would block when writing %zu bytes", copy); + } else { + ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy); + } break; } @@ -729,73 +647,92 @@ bool NuPlayer::Renderer::onDrainAudioQueue() { entry = NULL; } - numBytesAvailableToWrite -= written; size_t copiedFrames = written / mAudioSink->frameSize(); mNumFramesWritten += copiedFrames; - notifyIfMediaRenderingStarted(); + { + Mutex::Autolock autoLock(mLock); + notifyIfMediaRenderingStarted_l(); + } if (written != (ssize_t)copy) { // A short count was received from AudioSink::write() // - // AudioSink write should block until exactly the number of bytes are delivered. - // But it may return with a short count (without an error) when: + // AudioSink write is called in non-blocking mode. + // It may return with a short count when: // // 1) Size to be copied is not a multiple of the frame size. We consider this fatal. - // 2) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. + // 2) The data to be copied exceeds the available buffer in AudioSink. + // 3) An error occurs and data has been partially copied to the buffer in AudioSink. + // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded. // (Case 1) // Must be a multiple of the frame size. If it is not a multiple of a frame size, it // needs to fail, as we should not carry over fractional frames between calls. CHECK_EQ(copy % mAudioSink->frameSize(), 0); - // (Case 2) + // (Case 2, 3, 4) // Return early to the caller. // Beware of calling immediately again as this may busy-loop if you are not careful. - ALOGW("AudioSink write short frame count %zd < %zu", written, copy); + ALOGV("AudioSink write short frame count %zd < %zu", written, copy); break; } } - mAnchorMaxMediaUs = - mAnchorTimeMediaUs + - (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) - * 1000LL * mAudioSink->msecsPerFrame()); + int64_t maxTimeMedia; + { + Mutex::Autolock autoLock(mLock); + maxTimeMedia = + mAnchorTimeMediaUs + + (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL) + * 1000LL * mAudioSink->msecsPerFrame()); + } + mMediaClock->updateMaxTimeMedia(maxTimeMedia); return !mAudioQueue.empty(); } +int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) { + int32_t sampleRate = offloadingAudio() ? + mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate; + // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. + return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate); +} + +// Calculate duration of pending samples if played at normal rate (i.e., 1.0). int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) { - int64_t writtenAudioDurationUs = - mNumFramesWritten * 1000LL * mAudioSink->msecsPerFrame(); + int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten); return writtenAudioDurationUs - getPlayedOutAudioDurationUs(nowUs); } int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) { - int64_t currentPositionUs; - if (mPaused || getCurrentPositionOnLooper( - ¤tPositionUs, nowUs, true /* allowPastQueuedVideo */) != OK) { - // If failed to get current position, e.g. due to audio clock is not ready, then just - // play out video immediately without delay. + int64_t realUs; + if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) { + // If failed to get current position, e.g. due to audio clock is + // not ready, then just play out video immediately without delay. return nowUs; } - return (mediaTimeUs - currentPositionUs) + nowUs; + return realUs; } void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) { + Mutex::Autolock autoLock(mLock); // TRICKY: vorbis decoder generates multiple frames with the same // timestamp, so only update on the first frame with a given timestamp if (mediaTimeUs == mAnchorTimeMediaUs) { return; } - setAudioFirstAnchorTimeIfNeeded(mediaTimeUs); + setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs); int64_t nowUs = ALooper::GetNowUs(); - setAnchorTime( - mediaTimeUs, nowUs + getPendingAudioPlayoutDurationUs(nowUs), mNumFramesWritten); + int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs); + mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs); + mAnchorNumFramesWritten = mNumFramesWritten; + mAnchorTimeMediaUs = mediaTimeUs; } -void NuPlayer::Renderer::postDrainVideoQueue_l() { +// Called without mLock acquired. +void NuPlayer::Renderer::postDrainVideoQueue() { if (mDrainVideoQueuePending - || mSyncQueues + || getSyncQueues() || (mPaused && mVideoSampleReceived)) { return; } @@ -806,8 +743,8 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() { QueueEntry &entry = *mVideoQueue.begin(); - sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); - msg->setInt32("generation", mVideoQueueGeneration); + sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this); + msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */)); if (entry.mBuffer == NULL) { // EOS doesn't carry a timestamp. @@ -827,16 +764,19 @@ void NuPlayer::Renderer::postDrainVideoQueue_l() { int64_t mediaTimeUs; CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); - if (mAnchorTimeMediaUs < 0) { - setAnchorTime(mediaTimeUs, nowUs); - mPausePositionMediaTimeUs = mediaTimeUs; - mAnchorMaxMediaUs = mediaTimeUs; - realTimeUs = nowUs; - } else { - realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); + { + Mutex::Autolock autoLock(mLock); + if (mAnchorTimeMediaUs < 0) { + mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs); + mAnchorTimeMediaUs = mediaTimeUs; + realTimeUs = nowUs; + } else { + realTimeUs = getRealTimeUs(mediaTimeUs, nowUs); + } } if (!mHasAudio) { - mAnchorMaxMediaUs = mediaTimeUs + 100000; // smooth out videos >= 10fps + // smooth out videos >= 10fps + mMediaClock->updateMaxTimeMedia(mediaTimeUs + 100000); } // Heuristics to handle situation when media time changed without a @@ -915,16 +855,19 @@ void NuPlayer::Renderer::onDrainVideoQueue() { ALOGV("video late by %lld us (%.2f secs)", mVideoLateByUs, mVideoLateByUs / 1E6); } else { + int64_t mediaUs = 0; + mMediaClock->getMediaTime(realTimeUs, &mediaUs); ALOGV("rendering video at media time %.2f secs", (mFlags & FLAG_REAL_TIME ? realTimeUs : - (realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6); + mediaUs) / 1E6); } } else { setVideoLateByUs(0); if (!mVideoSampleReceived && !mHasAudio) { // This will ensure that the first frame after a flush won't be used as anchor // when renderer is in paused state, because resume can happen any time after seek. - setAnchorTime(-1, -1); + Mutex::Autolock autoLock(mLock); + clearAnchorTime_l(); } } @@ -941,7 +884,8 @@ void NuPlayer::Renderer::onDrainVideoQueue() { mVideoRenderingStarted = true; notifyVideoRenderingStart(); } - notifyIfMediaRenderingStarted(); + Mutex::Autolock autoLock(mLock); + notifyIfMediaRenderingStarted_l(); } } @@ -960,14 +904,22 @@ void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t del } void NuPlayer::Renderer::notifyAudioOffloadTearDown() { - (new AMessage(kWhatAudioOffloadTearDown, id()))->post(); + (new AMessage(kWhatAudioOffloadTearDown, this))->post(); } void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { int32_t audio; CHECK(msg->findInt32("audio", &audio)); - setHasMedia(audio); + if (dropBufferIfStale(audio, msg)) { + return; + } + + if (audio) { + mHasAudio = true; + } else { + mHasVideo = true; + } if (mHasVideo) { if (mVideoScheduler == NULL) { @@ -976,10 +928,6 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { } } - if (dropBufferWhileFlushing(audio, msg)) { - return; - } - sp<ABuffer> buffer; CHECK(msg->findBuffer("buffer", &buffer)); @@ -993,15 +941,16 @@ void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { entry.mFinalResult = OK; entry.mBufferOrdinal = ++mTotalBuffersQueued; - Mutex::Autolock autoLock(mLock); if (audio) { + Mutex::Autolock autoLock(mLock); mAudioQueue.push_back(entry); postDrainAudioQueue_l(); } else { mVideoQueue.push_back(entry); - postDrainVideoQueue_l(); + postDrainVideoQueue(); } + Mutex::Autolock autoLock(mLock); if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) { return; } @@ -1050,7 +999,9 @@ void NuPlayer::Renderer::syncQueuesDone_l() { } if (!mVideoQueue.empty()) { - postDrainVideoQueue_l(); + mLock.unlock(); + postDrainVideoQueue(); + mLock.lock(); } } @@ -1058,7 +1009,7 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { int32_t audio; CHECK(msg->findInt32("audio", &audio)); - if (dropBufferWhileFlushing(audio, msg)) { + if (dropBufferIfStale(audio, msg)) { return; } @@ -1069,19 +1020,20 @@ void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { entry.mOffset = 0; entry.mFinalResult = finalResult; - Mutex::Autolock autoLock(mLock); if (audio) { + Mutex::Autolock autoLock(mLock); if (mAudioQueue.empty() && mSyncQueues) { syncQueuesDone_l(); } mAudioQueue.push_back(entry); postDrainAudioQueue_l(); } else { - if (mVideoQueue.empty() && mSyncQueues) { + if (mVideoQueue.empty() && getSyncQueues()) { + Mutex::Autolock autoLock(mLock); syncQueuesDone_l(); } mVideoQueue.push_back(entry); - postDrainVideoQueue_l(); + postDrainVideoQueue(); } } @@ -1090,31 +1042,25 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { CHECK(msg->findInt32("audio", &audio)); { - Mutex::Autolock autoLock(mFlushLock); + Mutex::Autolock autoLock(mLock); if (audio) { - mFlushingAudio = false; notifyComplete = mNotifyCompleteAudio; mNotifyCompleteAudio = false; } else { - mFlushingVideo = false; notifyComplete = mNotifyCompleteVideo; mNotifyCompleteVideo = false; } - } - // If we're currently syncing the queues, i.e. dropping audio while - // aligning the first audio/video buffer times and only one of the - // two queues has data, we may starve that queue by not requesting - // more buffers from the decoder. If the other source then encounters - // a discontinuity that leads to flushing, we'll never find the - // corresponding discontinuity on the other queue. - // Therefore we'll stop syncing the queues if at least one of them - // is flushed. - { - Mutex::Autolock autoLock(mLock); - syncQueuesDone_l(); - setPauseStartedTimeRealUs(-1); - setAnchorTime(-1, -1); + // If we're currently syncing the queues, i.e. dropping audio while + // aligning the first audio/video buffer times and only one of the + // two queues has data, we may starve that queue by not requesting + // more buffers from the decoder. If the other source then encounters + // a discontinuity that leads to flushing, we'll never find the + // corresponding discontinuity on the other queue. + // Therefore we'll stop syncing the queues if at least one of them + // is flushed. + syncQueuesDone_l(); + clearAnchorTime_l(); } ALOGV("flushing %s", audio ? "audio" : "video"); @@ -1123,11 +1069,11 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { Mutex::Autolock autoLock(mLock); flushQueue(&mAudioQueue); - ++mAudioQueueGeneration; - prepareForMediaRenderingStart(); + ++mAudioDrainGeneration; + prepareForMediaRenderingStart_l(); if (offloadingAudio()) { - setAudioFirstAnchorTime(-1); + clearAudioFirstAnchorTime_l(); } } @@ -1142,13 +1088,14 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { flushQueue(&mVideoQueue); mDrainVideoQueuePending = false; - ++mVideoQueueGeneration; if (mVideoScheduler != NULL) { mVideoScheduler->restart(); } - prepareForMediaRenderingStart(); + Mutex::Autolock autoLock(mLock); + ++mVideoDrainGeneration; + prepareForMediaRenderingStart_l(); } mVideoSampleReceived = false; @@ -1178,20 +1125,12 @@ void NuPlayer::Renderer::notifyFlushComplete(bool audio) { notify->post(); } -bool NuPlayer::Renderer::dropBufferWhileFlushing( +bool NuPlayer::Renderer::dropBufferIfStale( bool audio, const sp<AMessage> &msg) { - bool flushing = false; - - { - Mutex::Autolock autoLock(mFlushLock); - if (audio) { - flushing = mFlushingAudio; - } else { - flushing = mFlushingVideo; - } - } + int32_t queueGeneration; + CHECK(msg->findInt32("queueGeneration", &queueGeneration)); - if (!flushing) { + if (queueGeneration == getQueueGeneration(audio)) { return false; } @@ -1209,7 +1148,10 @@ void NuPlayer::Renderer::onAudioSinkChanged() { } CHECK(!mDrainAudioQueuePending); mNumFramesWritten = 0; - mAnchorNumFramesWritten = -1; + { + Mutex::Autolock autoLock(mLock); + mAnchorNumFramesWritten = -1; + } uint32_t written; if (mAudioSink->getFramesWritten(&written) == OK) { mNumFramesWritten = written; @@ -1219,13 +1161,13 @@ void NuPlayer::Renderer::onAudioSinkChanged() { void NuPlayer::Renderer::onDisableOffloadAudio() { Mutex::Autolock autoLock(mLock); mFlags &= ~FLAG_OFFLOAD_AUDIO; - ++mAudioQueueGeneration; + ++mAudioDrainGeneration; } void NuPlayer::Renderer::onEnableOffloadAudio() { Mutex::Autolock autoLock(mLock); mFlags |= FLAG_OFFLOAD_AUDIO; - ++mAudioQueueGeneration; + ++mAudioDrainGeneration; } void NuPlayer::Renderer::onPause() { @@ -1234,25 +1176,13 @@ void NuPlayer::Renderer::onPause() { return; } int64_t currentPositionUs; - int64_t pausePositionMediaTimeUs; - if (getCurrentPositionFromAnchor( - ¤tPositionUs, ALooper::GetNowUs()) == OK) { - pausePositionMediaTimeUs = currentPositionUs; - } else { - // Set paused position to -1 (unavailabe) if we don't have anchor time - // This could happen if client does a seekTo() immediately followed by - // pause(). Renderer will be flushed with anchor time cleared. We don't - // want to leave stale value in mPausePositionMediaTimeUs. - pausePositionMediaTimeUs = -1; - } { Mutex::Autolock autoLock(mLock); - mPausePositionMediaTimeUs = pausePositionMediaTimeUs; - ++mAudioQueueGeneration; - ++mVideoQueueGeneration; - prepareForMediaRenderingStart(); + ++mAudioDrainGeneration; + ++mVideoDrainGeneration; + prepareForMediaRenderingStart_l(); mPaused = true; - setPauseStartedTimeRealUs(ALooper::GetNowUs()); + mMediaClock->setPlaybackRate(0.0); } mDrainAudioQueuePending = false; @@ -1277,21 +1207,18 @@ void NuPlayer::Renderer::onResume() { mAudioSink->start(); } - Mutex::Autolock autoLock(mLock); - mPaused = false; - if (mPauseStartedTimeRealUs != -1) { - int64_t newAnchorRealUs = - mAnchorTimeRealUs + ALooper::GetNowUs() - mPauseStartedTimeRealUs; - setAnchorTime( - mAnchorTimeMediaUs, newAnchorRealUs, mAnchorNumFramesWritten, true /* resume */); - } + { + Mutex::Autolock autoLock(mLock); + mPaused = false; + mMediaClock->setPlaybackRate(mPlaybackRate); - if (!mAudioQueue.empty()) { - postDrainAudioQueue_l(); + if (!mAudioQueue.empty()) { + postDrainAudioQueue_l(); + } } if (!mVideoQueue.empty()) { - postDrainVideoQueue_l(); + postDrainVideoQueue(); } } @@ -1302,6 +1229,21 @@ void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { mVideoScheduler->init(fps); } +int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) { + Mutex::Autolock autoLock(mLock); + return (audio ? mAudioQueueGeneration : mVideoQueueGeneration); +} + +int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) { + Mutex::Autolock autoLock(mLock); + return (audio ? mAudioDrainGeneration : mVideoDrainGeneration); +} + +bool NuPlayer::Renderer::getSyncQueues() { + Mutex::Autolock autoLock(mLock); + return mSyncQueues; +} + // TODO: Remove unnecessary calls to getPlayedOutAudioDurationUs() // as it acquires locks and may query the audio driver. // @@ -1309,6 +1251,7 @@ void NuPlayer::Renderer::onSetVideoFrameRate(float fps) { // accessing getTimestamp() or getPosition() every time a data buffer with // a media time is received. // +// Calculate duration of played samples if played at normal rate (i.e., 1.0). int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { uint32_t numFramesPlayed; int64_t numFramesPlayedAt; @@ -1346,9 +1289,8 @@ int64_t NuPlayer::Renderer::getPlayedOutAudioDurationUs(int64_t nowUs) { //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); } - // TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours. //CHECK_EQ(numFramesPlayed & (1 << 31), 0); // can't be negative until 12.4 hrs, test - int64_t durationUs = (int64_t)((int32_t)numFramesPlayed * 1000LL * mAudioSink->msecsPerFrame()) + int64_t durationUs = getDurationUsIfPlayedAtSampleRate(numFramesPlayed) + nowUs - numFramesPlayedAt; if (durationUs < 0) { // Occurs when numFramesPlayed position is very small and the following: @@ -1373,7 +1315,7 @@ void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reaso mAudioOffloadTornDown = true; int64_t currentPositionUs; - if (getCurrentPositionOnLooper(¤tPositionUs) != OK) { + if (getCurrentPosition(¤tPositionUs) != OK) { currentPositionUs = 0; } @@ -1390,8 +1332,8 @@ void NuPlayer::Renderer::onAudioOffloadTearDown(AudioOffloadTearDownReason reaso void NuPlayer::Renderer::startAudioOffloadPauseTimeout() { if (offloadingAudio()) { mWakeLock->acquire(); - sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, id()); - msg->setInt32("generation", mAudioOffloadPauseTimeoutGeneration); + sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this); + msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration); msg->post(kOffloadPauseMaxUs); } } @@ -1487,6 +1429,10 @@ status_t NuPlayer::Renderer::onOpenAudioSink( &offloadInfo); if (err == OK) { + if (mPlaybackRate != 1.0) { + mAudioSink->setPlaybackRatePermille( + (int32_t)(mPlaybackRate * 1000 + 0.5f)); + } // If the playback is offloaded to h/w, we pass // the HAL some metadata information. // We don't want to do this for PCM because it @@ -1542,6 +1488,10 @@ status_t NuPlayer::Renderer::onOpenAudioSink( return err; } mCurrentPcmInfo = info; + if (mPlaybackRate != 1.0) { + mAudioSink->setPlaybackRatePermille( + (int32_t)(mPlaybackRate * 1000 + 0.5f)); + } mAudioSink->start(); } if (audioSinkChanged) { diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h index 003d1d0..38843d5 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h @@ -24,6 +24,7 @@ namespace android { struct ABuffer; class AWakeLock; +struct MediaClock; struct VideoFrameScheduler; struct NuPlayer::Renderer : public AHandler { @@ -47,6 +48,8 @@ struct NuPlayer::Renderer : public AHandler { void queueEOS(bool audio, status_t finalResult); + void setPlaybackRate(float rate); + void flush(bool audio, bool notifyComplete); void signalTimeDiscontinuity(); @@ -61,16 +64,8 @@ struct NuPlayer::Renderer : public AHandler { void setVideoFrameRate(float fps); - // Following setters and getters are protected by mTimeLock. status_t getCurrentPosition(int64_t *mediaUs); - void setHasMedia(bool audio); - void setAudioFirstAnchorTime(int64_t mediaUs); - void setAudioFirstAnchorTimeIfNeeded(int64_t mediaUs); - void setAnchorTime( - int64_t mediaUs, int64_t realUs, int64_t numFramesWritten = -1, bool resume = false); - void setVideoLateByUs(int64_t lateUs); int64_t getVideoLateByUs(); - void setPauseStartedTimeRealUs(int64_t realUs); status_t openAudioSink( const sp<AMessage> &format, @@ -107,8 +102,8 @@ private: kWhatPostDrainVideoQueue = 'pDVQ', kWhatQueueBuffer = 'queB', kWhatQueueEOS = 'qEOS', + kWhatSetRate = 'setR', kWhatFlush = 'flus', - kWhatAudioSinkChanged = 'auSC', kWhatPause = 'paus', kWhatResume = 'resm', kWhatOpenAudioSink = 'opnA', @@ -142,26 +137,18 @@ private: bool mDrainVideoQueuePending; int32_t mAudioQueueGeneration; int32_t mVideoQueueGeneration; + int32_t mAudioDrainGeneration; + int32_t mVideoDrainGeneration; - Mutex mTimeLock; - // |mTimeLock| protects the following 7 member vars that are related to time. - // Note: those members are only written on Renderer thread, so reading on Renderer thread - // doesn't need to be protected. Otherwise accessing those members must be protected by - // |mTimeLock|. - // TODO: move those members to a seperated media clock class. + sp<MediaClock> mMediaClock; + float mPlaybackRate; int64_t mAudioFirstAnchorTimeMediaUs; int64_t mAnchorTimeMediaUs; - int64_t mAnchorTimeRealUs; int64_t mAnchorNumFramesWritten; - int64_t mAnchorMaxMediaUs; int64_t mVideoLateByUs; bool mHasAudio; bool mHasVideo; - int64_t mPauseStartedTimeRealUs; - Mutex mFlushLock; // protects the following 2 member vars. - bool mFlushingAudio; - bool mFlushingVideo; bool mNotifyCompleteAudio; bool mNotifyCompleteVideo; @@ -169,7 +156,6 @@ private: // modified on only renderer's thread. bool mPaused; - int64_t mPausePositionMediaTimeUs; bool mVideoSampleReceived; bool mVideoRenderingStarted; @@ -211,14 +197,19 @@ private: int64_t getPlayedOutAudioDurationUs(int64_t nowUs); void postDrainAudioQueue_l(int64_t delayUs = 0); + void clearAnchorTime_l(); + void clearAudioFirstAnchorTime_l(); + void setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs); + void setVideoLateByUs(int64_t lateUs); + void onNewAudioMediaTime(int64_t mediaTimeUs); int64_t getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs); void onDrainVideoQueue(); - void postDrainVideoQueue_l(); + void postDrainVideoQueue(); - void prepareForMediaRenderingStart(); - void notifyIfMediaRenderingStarted(); + void prepareForMediaRenderingStart_l(); + void notifyIfMediaRenderingStarted_l(); void onQueueBuffer(const sp<AMessage> &msg); void onQueueEOS(const sp<AMessage> &msg); @@ -229,6 +220,9 @@ private: void onPause(); void onResume(); void onSetVideoFrameRate(float fps); + int32_t getQueueGeneration(bool audio); + int32_t getDrainGeneration(bool audio); + bool getSyncQueues(); void onAudioOffloadTearDown(AudioOffloadTearDownReason reason); status_t onOpenAudioSink( const sp<AMessage> &format, @@ -245,7 +239,7 @@ private: void notifyAudioOffloadTearDown(); void flushQueue(List<QueueEntry> *queue); - bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg); + bool dropBufferIfStale(bool audio, const sp<AMessage> &msg); void syncQueuesDone_l(); bool offloadingAudio() const { return (mFlags & FLAG_OFFLOAD_AUDIO) != 0; } @@ -253,6 +247,8 @@ private: void startAudioOffloadPauseTimeout(); void cancelAudioOffloadPauseTimeout(); + int64_t getDurationUsIfPlayedAtSampleRate(uint32_t numFrames); + DISALLOW_EVIL_CONSTRUCTORS(Renderer); }; diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp index 885ebe4..f53afbd 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp +++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp @@ -29,9 +29,9 @@ namespace android { NuPlayer::NuPlayerStreamListener::NuPlayerStreamListener( const sp<IStreamSource> &source, - ALooper::handler_id id) + const sp<AHandler> &targetHandler) : mSource(source), - mTargetID(id), + mTargetHandler(targetHandler), mEOS(false), mSendDataNotification(true) { mSource->setListener(this); @@ -65,8 +65,8 @@ void NuPlayer::NuPlayerStreamListener::queueBuffer(size_t index, size_t size) { if (mSendDataNotification) { mSendDataNotification = false; - if (mTargetID != 0) { - (new AMessage(kWhatMoreDataQueued, mTargetID))->post(); + if (mTargetHandler != NULL) { + (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post(); } } } @@ -86,8 +86,8 @@ void NuPlayer::NuPlayerStreamListener::issueCommand( if (mSendDataNotification) { mSendDataNotification = false; - if (mTargetID != 0) { - (new AMessage(kWhatMoreDataQueued, mTargetID))->post(); + if (mTargetHandler != NULL) { + (new AMessage(kWhatMoreDataQueued, mTargetHandler))->post(); } } } diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h index 1874d80..2de829b 100644 --- a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h +++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h @@ -29,7 +29,7 @@ struct MemoryDealer; struct NuPlayer::NuPlayerStreamListener : public BnStreamListener { NuPlayerStreamListener( const sp<IStreamSource> &source, - ALooper::handler_id targetID); + const sp<AHandler> &targetHandler); virtual void queueBuffer(size_t index, size_t size); @@ -59,7 +59,7 @@ private: Mutex mLock; sp<IStreamSource> mSource; - ALooper::handler_id mTargetID; + sp<AHandler> mTargetHandler; sp<MemoryDealer> mMemoryDealer; Vector<sp<IMemory> > mBuffers; List<QueueEntry> mQueue; diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp index 0282a9f..5210fc8 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp @@ -87,7 +87,7 @@ void NuPlayer::RTSPSource::prepareAsync() { CHECK(mHandler == NULL); CHECK(mSDPLoader == NULL); - sp<AMessage> notify = new AMessage(kWhatNotify, id()); + sp<AMessage> notify = new AMessage(kWhatNotify, this); CHECK_EQ(mState, (int)DISCONNECTED); mState = CONNECTING; @@ -116,7 +116,7 @@ void NuPlayer::RTSPSource::stop() { if (mLooper == NULL) { return; } - sp<AMessage> msg = new AMessage(kWhatDisconnect, id()); + sp<AMessage> msg = new AMessage(kWhatDisconnect, this); sp<AMessage> dummy; msg->postAndAwaitResponse(&dummy); @@ -292,7 +292,7 @@ status_t NuPlayer::RTSPSource::getDuration(int64_t *durationUs) { } status_t NuPlayer::RTSPSource::seekTo(int64_t seekTimeUs) { - sp<AMessage> msg = new AMessage(kWhatPerformSeek, id()); + sp<AMessage> msg = new AMessage(kWhatPerformSeek, this); msg->setInt32("generation", ++mSeekGeneration); msg->setInt64("timeUs", seekTimeUs); msg->post(200000ll); @@ -311,7 +311,7 @@ void NuPlayer::RTSPSource::performSeek(int64_t seekTimeUs) { void NuPlayer::RTSPSource::onMessageReceived(const sp<AMessage> &msg) { if (msg->what() == kWhatDisconnect) { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); mDisconnectReplyID = replyID; @@ -600,7 +600,7 @@ void NuPlayer::RTSPSource::onSDPLoaded(const sp<AMessage> &msg) { ALOGE("Unable to find url in SDP"); err = UNKNOWN_ERROR; } else { - sp<AMessage> notify = new AMessage(kWhatNotify, id()); + sp<AMessage> notify = new AMessage(kWhatNotify, this); mHandler = new MyHandler(rtspUri.c_str(), notify, mUIDValid, mUID); mLooper->registerHandler(mHandler); diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h index ac3299a..5f2cf33 100644 --- a/media/libmediaplayerservice/nuplayer/RTSPSource.h +++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h @@ -25,6 +25,7 @@ namespace android { struct ALooper; +struct AReplyToken; struct AnotherPacketSource; struct MyHandler; struct SDPLoader; @@ -96,7 +97,7 @@ private: bool mIsSDP; State mState; status_t mFinalResult; - uint32_t mDisconnectReplyID; + sp<AReplyToken> mDisconnectReplyID; Mutex mBufferingLock; bool mBuffering; diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp index b3f224d..0246b59 100644 --- a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp @@ -63,7 +63,7 @@ void NuPlayer::StreamingSource::prepareAsync() { } void NuPlayer::StreamingSource::start() { - mStreamListener = new NuPlayerStreamListener(mSource, 0); + mStreamListener = new NuPlayerStreamListener(mSource, NULL); uint32_t sourceFlags = mSource->flags(); @@ -163,7 +163,7 @@ status_t NuPlayer::StreamingSource::postReadBuffer() { mBuffering = true; } - (new AMessage(kWhatReadBuffer, id()))->post(); + (new AMessage(kWhatReadBuffer, this))->post(); return OK; } diff --git a/media/libmediaplayerservice/tests/Android.mk b/media/libmediaplayerservice/tests/Android.mk new file mode 100644 index 0000000..7bc78ff --- /dev/null +++ b/media/libmediaplayerservice/tests/Android.mk @@ -0,0 +1,24 @@ +# Build the unit tests. +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE := DrmSessionManager_test + +LOCAL_MODULE_TAGS := tests + +LOCAL_SRC_FILES := \ + DrmSessionManager_test.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + liblog \ + libmediaplayerservice \ + libutils \ + +LOCAL_C_INCLUDES := \ + frameworks/av/include \ + frameworks/av/media/libmediaplayerservice \ + +LOCAL_32_BIT_ONLY := true + +include $(BUILD_NATIVE_TEST) + diff --git a/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp new file mode 100644 index 0000000..d3e760b --- /dev/null +++ b/media/libmediaplayerservice/tests/DrmSessionManager_test.cpp @@ -0,0 +1,249 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "DrmSessionManager_test" +#include <utils/Log.h> + +#include <gtest/gtest.h> + +#include "Drm.h" +#include "DrmSessionClientInterface.h" +#include "DrmSessionManager.h" +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/ProcessInfoInterface.h> + +namespace android { + +struct FakeProcessInfo : public ProcessInfoInterface { + FakeProcessInfo() {} + virtual ~FakeProcessInfo() {} + + virtual bool getPriority(int pid, int* priority) { + // For testing, use pid as priority. + // Lower the value higher the priority. + *priority = pid; + return true; + } + +private: + DISALLOW_EVIL_CONSTRUCTORS(FakeProcessInfo); +}; + +struct FakeDrm : public DrmSessionClientInterface { + FakeDrm() {} + virtual ~FakeDrm() {} + + virtual bool reclaimSession(const Vector<uint8_t>& sessionId) { + mReclaimedSessions.push_back(sessionId); + return true; + } + + const Vector<Vector<uint8_t> >& reclaimedSessions() const { + return mReclaimedSessions; + } + +private: + Vector<Vector<uint8_t> > mReclaimedSessions; + + DISALLOW_EVIL_CONSTRUCTORS(FakeDrm); +}; + +static const int kTestPid1 = 30; +static const int kTestPid2 = 20; +static const uint8_t kTestSessionId1[] = {1, 2, 3}; +static const uint8_t kTestSessionId2[] = {4, 5, 6, 7, 8}; +static const uint8_t kTestSessionId3[] = {9, 0}; + +class DrmSessionManagerTest : public ::testing::Test { +public: + DrmSessionManagerTest() + : mDrmSessionManager(new DrmSessionManager(new FakeProcessInfo())), + mTestDrm1(new FakeDrm()), + mTestDrm2(new FakeDrm()) { + GetSessionId(kTestSessionId1, ARRAY_SIZE(kTestSessionId1), &mSessionId1); + GetSessionId(kTestSessionId2, ARRAY_SIZE(kTestSessionId2), &mSessionId2); + GetSessionId(kTestSessionId3, ARRAY_SIZE(kTestSessionId3), &mSessionId3); + } + +protected: + static void GetSessionId(const uint8_t* ids, size_t num, Vector<uint8_t>* sessionId) { + for (size_t i = 0; i < num; ++i) { + sessionId->push_back(ids[i]); + } + } + + static void ExpectEqSessionInfo(const SessionInfo& info, sp<DrmSessionClientInterface> drm, + const Vector<uint8_t>& sessionId, int64_t timeStamp) { + EXPECT_EQ(drm, info.drm); + EXPECT_TRUE(isEqualSessionId(sessionId, info.sessionId)); + EXPECT_EQ(timeStamp, info.timeStamp); + } + + void addSession() { + mDrmSessionManager->addSession(kTestPid1, mTestDrm1, mSessionId1); + mDrmSessionManager->addSession(kTestPid2, mTestDrm2, mSessionId2); + mDrmSessionManager->addSession(kTestPid2, mTestDrm2, mSessionId3); + const PidSessionInfosMap& map = sessionMap(); + EXPECT_EQ(2, map.size()); + ssize_t index1 = map.indexOfKey(kTestPid1); + ASSERT_GE(index1, 0); + const SessionInfos& infos1 = map[index1]; + EXPECT_EQ(1, infos1.size()); + ExpectEqSessionInfo(infos1[0], mTestDrm1, mSessionId1, 0); + + ssize_t index2 = map.indexOfKey(kTestPid2); + ASSERT_GE(index2, 0); + const SessionInfos& infos2 = map[index2]; + EXPECT_EQ(2, infos2.size()); + ExpectEqSessionInfo(infos2[0], mTestDrm2, mSessionId2, 1); + ExpectEqSessionInfo(infos2[1], mTestDrm2, mSessionId3, 2); + } + + const PidSessionInfosMap& sessionMap() { + return mDrmSessionManager->mSessionMap; + } + + void testGetLowestPriority() { + int pid; + int priority; + EXPECT_FALSE(mDrmSessionManager->getLowestPriority_l(&pid, &priority)); + + addSession(); + EXPECT_TRUE(mDrmSessionManager->getLowestPriority_l(&pid, &priority)); + + EXPECT_EQ(kTestPid1, pid); + FakeProcessInfo processInfo; + int priority1; + processInfo.getPriority(kTestPid1, &priority1); + EXPECT_EQ(priority1, priority); + } + + void testGetLeastUsedSession() { + sp<DrmSessionClientInterface> drm; + Vector<uint8_t> sessionId; + EXPECT_FALSE(mDrmSessionManager->getLeastUsedSession_l(kTestPid1, &drm, &sessionId)); + + addSession(); + + EXPECT_TRUE(mDrmSessionManager->getLeastUsedSession_l(kTestPid1, &drm, &sessionId)); + EXPECT_EQ(mTestDrm1, drm); + EXPECT_TRUE(isEqualSessionId(mSessionId1, sessionId)); + + EXPECT_TRUE(mDrmSessionManager->getLeastUsedSession_l(kTestPid2, &drm, &sessionId)); + EXPECT_EQ(mTestDrm2, drm); + EXPECT_TRUE(isEqualSessionId(mSessionId2, sessionId)); + + // mSessionId2 is no longer the least used session. + mDrmSessionManager->useSession(mSessionId2); + EXPECT_TRUE(mDrmSessionManager->getLeastUsedSession_l(kTestPid2, &drm, &sessionId)); + EXPECT_EQ(mTestDrm2, drm); + EXPECT_TRUE(isEqualSessionId(mSessionId3, sessionId)); + } + + sp<DrmSessionManager> mDrmSessionManager; + sp<FakeDrm> mTestDrm1; + sp<FakeDrm> mTestDrm2; + Vector<uint8_t> mSessionId1; + Vector<uint8_t> mSessionId2; + Vector<uint8_t> mSessionId3; +}; + +TEST_F(DrmSessionManagerTest, addSession) { + addSession(); +} + +TEST_F(DrmSessionManagerTest, useSession) { + addSession(); + + mDrmSessionManager->useSession(mSessionId1); + mDrmSessionManager->useSession(mSessionId3); + + const PidSessionInfosMap& map = sessionMap(); + const SessionInfos& infos1 = map.valueFor(kTestPid1); + const SessionInfos& infos2 = map.valueFor(kTestPid2); + ExpectEqSessionInfo(infos1[0], mTestDrm1, mSessionId1, 3); + ExpectEqSessionInfo(infos2[1], mTestDrm2, mSessionId3, 4); +} + +TEST_F(DrmSessionManagerTest, removeSession) { + addSession(); + + mDrmSessionManager->removeSession(mSessionId2); + + const PidSessionInfosMap& map = sessionMap(); + EXPECT_EQ(2, map.size()); + const SessionInfos& infos1 = map.valueFor(kTestPid1); + const SessionInfos& infos2 = map.valueFor(kTestPid2); + EXPECT_EQ(1, infos1.size()); + EXPECT_EQ(1, infos2.size()); + // mSessionId2 has been removed. + ExpectEqSessionInfo(infos2[0], mTestDrm2, mSessionId3, 2); +} + +TEST_F(DrmSessionManagerTest, removeDrm) { + addSession(); + + sp<FakeDrm> drm = new FakeDrm; + const uint8_t ids[] = {123}; + Vector<uint8_t> sessionId; + GetSessionId(ids, ARRAY_SIZE(ids), &sessionId); + mDrmSessionManager->addSession(kTestPid2, drm, sessionId); + + mDrmSessionManager->removeDrm(mTestDrm2); + + const PidSessionInfosMap& map = sessionMap(); + const SessionInfos& infos2 = map.valueFor(kTestPid2); + EXPECT_EQ(1, infos2.size()); + // mTestDrm2 has been removed. + ExpectEqSessionInfo(infos2[0], drm, sessionId, 3); +} + +TEST_F(DrmSessionManagerTest, reclaimSession) { + EXPECT_FALSE(mDrmSessionManager->reclaimSession(kTestPid1)); + addSession(); + + // calling pid priority is too low + EXPECT_FALSE(mDrmSessionManager->reclaimSession(50)); + + EXPECT_TRUE(mDrmSessionManager->reclaimSession(10)); + EXPECT_EQ(1, mTestDrm1->reclaimedSessions().size()); + EXPECT_TRUE(isEqualSessionId(mSessionId1, mTestDrm1->reclaimedSessions()[0])); + + mDrmSessionManager->removeSession(mSessionId1); + + // add a session from a higher priority process. + sp<FakeDrm> drm = new FakeDrm; + const uint8_t ids[] = {1, 3, 5}; + Vector<uint8_t> sessionId; + GetSessionId(ids, ARRAY_SIZE(ids), &sessionId); + mDrmSessionManager->addSession(15, drm, sessionId); + + EXPECT_TRUE(mDrmSessionManager->reclaimSession(18)); + EXPECT_EQ(1, mTestDrm2->reclaimedSessions().size()); + // mSessionId2 is reclaimed. + EXPECT_TRUE(isEqualSessionId(mSessionId2, mTestDrm2->reclaimedSessions()[0])); +} + +TEST_F(DrmSessionManagerTest, getLowestPriority) { + testGetLowestPriority(); +} + +TEST_F(DrmSessionManagerTest, getLeastUsedSession_l) { + testGetLeastUsedSession(); +} + +} // namespace android diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk index 9707c4a..1353f28 100644 --- a/media/libnbaio/Android.mk +++ b/media/libnbaio/Android.mk @@ -11,7 +11,6 @@ LOCAL_SRC_FILES := \ MonoPipeReader.cpp \ Pipe.cpp \ PipeReader.cpp \ - roundup.c \ SourceAudioBufferProvider.cpp LOCAL_SRC_FILES += NBLog.cpp @@ -27,12 +26,13 @@ LOCAL_SRC_FILES += NBLog.cpp LOCAL_MODULE := libnbaio LOCAL_SHARED_LIBRARIES := \ + libaudioutils \ libbinder \ libcommon_time_client \ libcutils \ libutils \ liblog -LOCAL_STATIC_LIBRARIES += libinstantssq +LOCAL_C_INCLUDES := $(call include-path-for, audio-utils) include $(BUILD_SHARED_LIBRARY) diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp index 0b65861..129e9ef 100644 --- a/media/libnbaio/MonoPipe.cpp +++ b/media/libnbaio/MonoPipe.cpp @@ -27,7 +27,7 @@ #include <utils/Trace.h> #include <media/AudioBufferProvider.h> #include <media/nbaio/MonoPipe.h> -#include <media/nbaio/roundup.h> +#include <audio_utils/roundup.h> namespace android { diff --git a/media/libnbaio/MonoPipeReader.cpp b/media/libnbaio/MonoPipeReader.cpp index de82229..e4d3ed8 100644 --- a/media/libnbaio/MonoPipeReader.cpp +++ b/media/libnbaio/MonoPipeReader.cpp @@ -39,7 +39,7 @@ ssize_t MonoPipeReader::availableToRead() return NEGOTIATE; } ssize_t ret = android_atomic_acquire_load(&mPipe->mRear) - mPipe->mFront; - ALOG_ASSERT((0 <= ret) && (ret <= mMaxFrames)); + ALOG_ASSERT((0 <= ret) && ((size_t) ret <= mPipe->mMaxFrames)); return ret; } diff --git a/media/libnbaio/Pipe.cpp b/media/libnbaio/Pipe.cpp index 6e0ec8c..13f211d 100644 --- a/media/libnbaio/Pipe.cpp +++ b/media/libnbaio/Pipe.cpp @@ -21,7 +21,7 @@ #include <cutils/compiler.h> #include <utils/Log.h> #include <media/nbaio/Pipe.h> -#include <media/nbaio/roundup.h> +#include <audio_utils/roundup.h> namespace android { diff --git a/media/libstagefright/AACWriter.cpp b/media/libstagefright/AACWriter.cpp index 2e41d80..9d90dbd 100644 --- a/media/libstagefright/AACWriter.cpp +++ b/media/libstagefright/AACWriter.cpp @@ -36,33 +36,19 @@ namespace android { -AACWriter::AACWriter(const char *filename) - : mFd(-1), - mInitCheck(NO_INIT), - mStarted(false), - mPaused(false), - mResumed(false), - mChannelCount(-1), - mSampleRate(-1), - mAACProfile(OMX_AUDIO_AACObjectLC) { - - ALOGV("AACWriter Constructor"); - - mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); - if (mFd >= 0) { - mInitCheck = OK; - } -} - AACWriter::AACWriter(int fd) : mFd(dup(fd)), mInitCheck(mFd < 0? NO_INIT: OK), mStarted(false), mPaused(false), mResumed(false), + mThread(0), + mEstimatedSizeBytes(0), + mEstimatedDurationUs(0), mChannelCount(-1), mSampleRate(-1), - mAACProfile(OMX_AUDIO_AACObjectLC) { + mAACProfile(OMX_AUDIO_AACObjectLC), + mFrameDurationUs(0) { } AACWriter::~AACWriter() { diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp index d298cb1..97f3e20 100644 --- a/media/libstagefright/ACodec.cpp +++ b/media/libstagefright/ACodec.cpp @@ -419,6 +419,7 @@ ACodec::ACodec() mMetaDataBuffersToSubmit(0), mRepeatFrameDelayUs(-1ll), mMaxPtsGapUs(-1ll), + mMaxFps(-1), mTimePerFrameUs(-1ll), mTimePerCaptureUs(-1ll), mCreateInputBuffersSuspended(false), @@ -451,61 +452,61 @@ void ACodec::setNotificationMessage(const sp<AMessage> &msg) { void ACodec::initiateSetup(const sp<AMessage> &msg) { msg->setWhat(kWhatSetup); - msg->setTarget(id()); + msg->setTarget(this); msg->post(); } void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { - sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); + sp<AMessage> msg = new AMessage(kWhatSetParameters, this); msg->setMessage("params", params); msg->post(); } void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { msg->setWhat(kWhatAllocateComponent); - msg->setTarget(id()); + msg->setTarget(this); msg->post(); } void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { msg->setWhat(kWhatConfigureComponent); - msg->setTarget(id()); + msg->setTarget(this); msg->post(); } void ACodec::initiateCreateInputSurface() { - (new AMessage(kWhatCreateInputSurface, id()))->post(); + (new AMessage(kWhatCreateInputSurface, this))->post(); } void ACodec::signalEndOfInputStream() { - (new AMessage(kWhatSignalEndOfInputStream, id()))->post(); + (new AMessage(kWhatSignalEndOfInputStream, this))->post(); } void ACodec::initiateStart() { - (new AMessage(kWhatStart, id()))->post(); + (new AMessage(kWhatStart, this))->post(); } void ACodec::signalFlush() { ALOGV("[%s] signalFlush", mComponentName.c_str()); - (new AMessage(kWhatFlush, id()))->post(); + (new AMessage(kWhatFlush, this))->post(); } void ACodec::signalResume() { - (new AMessage(kWhatResume, id()))->post(); + (new AMessage(kWhatResume, this))->post(); } void ACodec::initiateShutdown(bool keepComponentAllocated) { - sp<AMessage> msg = new AMessage(kWhatShutdown, id()); + sp<AMessage> msg = new AMessage(kWhatShutdown, this); msg->setInt32("keepComponentAllocated", keepComponentAllocated); msg->post(); if (!keepComponentAllocated) { // ensure shutdown completes in 3 seconds - (new AMessage(kWhatReleaseCodecInstance, id()))->post(3000000); + (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); } } void ACodec::signalRequestIDRFrame() { - (new AMessage(kWhatRequestIDRFrame, id()))->post(); + (new AMessage(kWhatRequestIDRFrame, this))->post(); } // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** @@ -516,7 +517,7 @@ void ACodec::signalRequestIDRFrame() { void ACodec::signalSubmitOutputMetaDataBufferIfEOS_workaround() { if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && mMetaDataBuffersToSubmit > 0) { - (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, id()))->post(); + (new AMessage(kWhatSubmitOutputMetaDataBufferIfEOS, this))->post(); } } @@ -1259,6 +1260,10 @@ status_t ACodec::configureCodec( mMaxPtsGapUs = -1ll; } + if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { + mMaxFps = -1; + } + if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { mTimePerCaptureUs = -1ll; } @@ -1675,6 +1680,11 @@ status_t ACodec::configureCodec( err = setMinBufferSize(kPortIndexInput, 8192); // XXX } + int32_t priority; + if (msg->findInt32("priority", &priority)) { + err = setPriority(priority); + } + mBaseOutputFormat = outputFormat; CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK); @@ -1685,6 +1695,22 @@ status_t ACodec::configureCodec( return err; } +status_t ACodec::setPriority(int32_t priority) { + if (priority < 0) { + return BAD_VALUE; + } + OMX_PARAM_U32TYPE config; + InitOMXParams(&config); + config.nU32 = (OMX_U32)priority; + status_t temp = mOMX->setConfig( + mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, + &config, sizeof(config)); + if (temp != OK) { + ALOGI("codec does not support config priority (err %d)", temp); + } + return OK; +} + status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); @@ -4297,7 +4323,7 @@ void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { info->mData->meta()->clear(); notify->setBuffer("buffer", info->mData); - sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id()); + sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); reply->setInt32("buffer-id", info->mBufferID); notify->setMessage("reply", reply); @@ -4557,7 +4583,7 @@ bool ACodec::BaseState::onOMXFillBufferDone( } sp<AMessage> reply = - new AMessage(kWhatOutputBufferDrained, mCodec->id()); + new AMessage(kWhatOutputBufferDrained, mCodec); if (!mCodec->mSentFormat && rangeLength > 0) { mCodec->sendFormatChange(reply); @@ -4833,7 +4859,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { sp<IOMX> omx = client.interface(); - sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id()); + sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); mDeathNotifier = new DeathNotifier(notify); if (IInterface::asBinder(omx)->linkToDeath(mDeathNotifier) != OK) { @@ -4908,7 +4934,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { return false; } - notify = new AMessage(kWhatOMXMessage, mCodec->id()); + notify = new AMessage(kWhatOMXMessage, mCodec); observer->setNotificationMessage(notify); mCodec->mComponentName = componentName; @@ -5114,6 +5140,21 @@ void ACodec::LoadedState::onCreateInputSurface( } } + if (err == OK && mCodec->mMaxFps > 0) { + err = mCodec->mOMX->setInternalOption( + mCodec->mNode, + kPortIndexInput, + IOMX::INTERNAL_OPTION_MAX_FPS, + &mCodec->mMaxFps, + sizeof(mCodec->mMaxFps)); + + if (err != OK) { + ALOGE("[%s] Unable to configure max fps (err %d)", + mCodec->mComponentName.c_str(), + err); + } + } + if (err == OK && mCodec->mTimePerCaptureUs > 0ll && mCodec->mTimePerFrameUs > 0ll) { int64_t timeLapse[2]; @@ -5984,7 +6025,7 @@ bool ACodec::FlushingState::onOMXEvent( case OMX_EventPortSettingsChanged: { - sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec->id()); + sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); msg->setInt32("type", omx_message::EVENT); msg->setInt32("node", mCodec->mNode); msg->setInt32("event", event); diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp index 9aa7d95..f53d7f0 100644 --- a/media/libstagefright/AMRWriter.cpp +++ b/media/libstagefright/AMRWriter.cpp @@ -31,19 +31,6 @@ namespace android { -AMRWriter::AMRWriter(const char *filename) - : mFd(-1), - mInitCheck(NO_INIT), - mStarted(false), - mPaused(false), - mResumed(false) { - - mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); - if (mFd >= 0) { - mInitCheck = OK; - } -} - AMRWriter::AMRWriter(int fd) : mFd(dup(fd)), mInitCheck(mFd < 0? NO_INIT: OK), diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 2629afc..a2cbdaf 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -31,11 +31,13 @@ LOCAL_SRC_FILES:= \ MediaAdapter.cpp \ MediaBuffer.cpp \ MediaBufferGroup.cpp \ + MediaClock.cpp \ MediaCodec.cpp \ MediaCodecList.cpp \ MediaCodecSource.cpp \ MediaDefs.cpp \ MediaExtractor.cpp \ + MediaSync.cpp \ MidiExtractor.cpp \ http/MediaHTTP.cpp \ MediaMuxer.cpp \ @@ -46,6 +48,7 @@ LOCAL_SRC_FILES:= \ OMXClient.cpp \ OMXCodec.cpp \ OggExtractor.cpp \ + ProcessInfo.cpp \ SampleIterator.cpp \ SampleTable.cpp \ SkipCutBuffer.cpp \ @@ -101,6 +104,7 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_color_conversion \ libstagefright_aacenc \ libstagefright_matroska \ + libstagefright_mediafilter \ libstagefright_webm \ libstagefright_timedtext \ libvpx \ @@ -108,13 +112,14 @@ LOCAL_STATIC_LIBRARIES := \ libstagefright_mpeg2ts \ libstagefright_id3 \ libFLAC \ - libmedia_helper + libmedia_helper \ LOCAL_SHARED_LIBRARIES += \ libstagefright_enc_common \ libstagefright_avc_common \ libstagefright_foundation \ - libdl + libdl \ + libRScpp \ LOCAL_CFLAGS += -Wno-multichar diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp index a7ca3da..f0db76b 100644 --- a/media/libstagefright/FileSource.cpp +++ b/media/libstagefright/FileSource.cpp @@ -14,6 +14,10 @@ * limitations under the License. */ +//#define LOG_NDEBUG 0 +#define LOG_TAG "FileSource" +#include <utils/Log.h> + #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/FileSource.h> #include <sys/types.h> diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp index 0c2ff15..77a652a 100644 --- a/media/libstagefright/HTTPBase.cpp +++ b/media/libstagefright/HTTPBase.cpp @@ -75,7 +75,11 @@ void HTTPBase::addBandwidthMeasurement( bool HTTPBase::estimateBandwidth(int32_t *bandwidth_bps) { Mutex::Autolock autoLock(mLock); - if (mNumBandwidthHistoryItems < 2) { + // Do not do bandwidth estimation if we don't have enough samples, or + // total bytes download are too small (<64K). + // Bandwidth estimation from these samples can often shoot up and cause + // unwanted bw adaption behaviors. + if (mNumBandwidthHistoryItems < 2 || mTotalTransferBytes < 65536) { return false; } diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp index 9856f92..ef07aa0 100644 --- a/media/libstagefright/MPEG2TSWriter.cpp +++ b/media/libstagefright/MPEG2TSWriter.cpp @@ -135,7 +135,7 @@ void MPEG2TSWriter::SourceInfo::start(const sp<AMessage> ¬ify) { mNotify = notify; - (new AMessage(kWhatStart, id()))->post(); + (new AMessage(kWhatStart, this))->post(); } void MPEG2TSWriter::SourceInfo::stop() { @@ -361,7 +361,7 @@ bool MPEG2TSWriter::SourceInfo::flushAACFrames() { } void MPEG2TSWriter::SourceInfo::readMore() { - (new AMessage(kWhatRead, id()))->post(); + (new AMessage(kWhatRead, this))->post(); } void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { @@ -480,19 +480,6 @@ MPEG2TSWriter::MPEG2TSWriter(int fd) init(); } -MPEG2TSWriter::MPEG2TSWriter(const char *filename) - : mFile(fopen(filename, "wb")), - mWriteCookie(NULL), - mWriteFunc(NULL), - mStarted(false), - mNumSourcesDone(0), - mNumTSPacketsWritten(0), - mNumTSPacketsBeforeMeta(0), - mPATContinuityCounter(0), - mPMTContinuityCounter(0) { - init(); -} - MPEG2TSWriter::MPEG2TSWriter( void *cookie, ssize_t (*write)(void *cookie, const void *data, size_t size)) @@ -565,7 +552,7 @@ status_t MPEG2TSWriter::start(MetaData * /* param */) { for (size_t i = 0; i < mSources.size(); ++i) { sp<AMessage> notify = - new AMessage(kWhatSourceNotify, mReflector->id()); + new AMessage(kWhatSourceNotify, mReflector); notify->setInt32("source-index", i); diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 8bf7f63..d0f42cc 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -354,6 +354,8 @@ static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source) : mMoofOffset(0), + mMoofFound(false), + mMdatFound(false), mDataSource(source), mInitCheck(NO_INIT), mHasVideo(false), @@ -490,7 +492,9 @@ status_t MPEG4Extractor::readMetaData() { off64_t offset = 0; status_t err; - while (true) { + bool sawMoovOrSidx = false; + + while (!(sawMoovOrSidx && (mMdatFound || mMoofFound))) { off64_t orig_offset = offset; err = parseChunk(&offset, 0); @@ -502,23 +506,9 @@ status_t MPEG4Extractor::readMetaData() { ALOGE("did not advance: 0x%lld->0x%lld", orig_offset, offset); err = ERROR_MALFORMED; break; - } else if (err == OK) { - continue; - } - - uint32_t hdr[2]; - if (mDataSource->readAt(offset, hdr, 8) < 8) { - break; + } else if (err == UNKNOWN_ERROR) { + sawMoovOrSidx = true; } - uint32_t chunk_type = ntohl(hdr[1]); - if (chunk_type == FOURCC('m', 'o', 'o', 'f')) { - // store the offset of the first segment - mMoofOffset = offset; - } else if (chunk_type != FOURCC('m', 'd', 'a', 't')) { - // keep parsing until we get to the data - continue; - } - break; } if (mInitCheck == OK) { @@ -864,6 +854,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('s', 'c', 'h', 'i'): case FOURCC('e', 'd', 't', 's'): { + if (chunk_type == FOURCC('m', 'o', 'o', 'f') && !mMoofFound) { + // store the offset of the first segment + mMoofFound = true; + mMoofOffset = *offset; + } + if (chunk_type == FOURCC('s', 't', 'b', 'l')) { ALOGV("sampleTable chunk is %" PRIu64 " bytes long.", chunk_size); @@ -1830,6 +1826,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { case FOURCC('m', 'd', 'a', 't'): { ALOGV("mdat chunk, drm: %d", mIsDrm); + + mMdatFound = true; + if (!mIsDrm) { *offset += chunk_size; break; diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 9f20b1d..6f6e362 100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -29,6 +29,7 @@ #include <utils/Log.h> #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/MPEG4Writer.h> #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MetaData.h> @@ -62,6 +63,14 @@ static const uint8_t kNalUnitTypeSeqParamSet = 0x07; static const uint8_t kNalUnitTypePicParamSet = 0x08; static const int64_t kInitialDelayTimeUs = 700000LL; +static const char kMetaKey_Model[] = "com.android.model"; +static const char kMetaKey_Version[] = "com.android.version"; +static const char kMetaKey_Build[] = "com.android.build"; +static const char kMetaKey_CaptureFps[] = "com.android.capture.fps"; + +/* uncomment to include model and build in meta */ +//#define SHOW_MODEL_BUILD 1 + class MPEG4Writer::Track { public: Track(MPEG4Writer *owner, const sp<MediaSource> &source, size_t trackId); @@ -345,31 +354,6 @@ private: Track &operator=(const Track &); }; -MPEG4Writer::MPEG4Writer(const char *filename) - : mFd(-1), - mInitCheck(NO_INIT), - mIsRealTimeRecording(true), - mUse4ByteNalLength(true), - mUse32BitOffset(true), - mIsFileSizeLimitExplicitlyRequested(false), - mPaused(false), - mStarted(false), - mWriterThreadStarted(false), - mOffset(0), - mMdatOffset(0), - mEstimatedMoovBoxSize(0), - mInterleaveDurationUs(1000000), - mLatitudex10000(0), - mLongitudex10000(0), - mAreGeoTagsAvailable(false), - mStartTimeOffsetMs(-1) { - - mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); - if (mFd >= 0) { - mInitCheck = OK; - } -} - MPEG4Writer::MPEG4Writer(int fd) : mFd(dup(fd)), mInitCheck(mFd < 0? NO_INIT: OK), @@ -383,11 +367,14 @@ MPEG4Writer::MPEG4Writer(int fd) mOffset(0), mMdatOffset(0), mEstimatedMoovBoxSize(0), + mMoovExtraSize(0), mInterleaveDurationUs(1000000), mLatitudex10000(0), mLongitudex10000(0), mAreGeoTagsAvailable(false), + mMetaKeys(new AMessage()), mStartTimeOffsetMs(-1) { + addDeviceMeta(); } MPEG4Writer::~MPEG4Writer() { @@ -507,6 +494,34 @@ status_t MPEG4Writer::startTracks(MetaData *params) { return OK; } +void MPEG4Writer::addDeviceMeta() { + // add device info and estimate space in 'moov' + char val[PROPERTY_VALUE_MAX]; + size_t n; + // meta size is estimated by adding up the following: + // - meta header structures, which occur only once (total 66 bytes) + // - size for each key, which consists of a fixed header (32 bytes), + // plus key length and data length. + mMoovExtraSize += 66; + if (property_get("ro.build.version.release", val, NULL) + && (n = strlen(val)) > 0) { + mMetaKeys->setString(kMetaKey_Version, val, n + 1); + mMoovExtraSize += sizeof(kMetaKey_Version) + n + 32; + } +#ifdef SHOW_MODEL_BUILD + if (property_get("ro.product.model", val, NULL) + && (n = strlen(val)) > 0) { + mMetaKeys->setString(kMetaKey_Model, val, n + 1); + mMoovExtraSize += sizeof(kMetaKey_Model) + n + 32; + } + if (property_get("ro.build.display.id", val, NULL) + && (n = strlen(val)) > 0) { + mMetaKeys->setString(kMetaKey_Build, val, n + 1); + mMoovExtraSize += sizeof(kMetaKey_Build) + n + 32; + } +#endif +} + int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) { // This implementation is highly experimental/heurisitic. // @@ -560,6 +575,9 @@ int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) { size = MAX_MOOV_BOX_SIZE; } + // Account for the extra stuff (Geo, meta keys, etc.) + size += mMoovExtraSize; + ALOGI("limits: %" PRId64 "/%" PRId64 " bytes/us, bit rate: %d bps and the" " estimated moov size %" PRId64 " bytes", mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size); @@ -971,6 +989,7 @@ void MPEG4Writer::writeMoovBox(int64_t durationUs) { if (mAreGeoTagsAvailable) { writeUdtaBox(); } + writeMetaBox(); int32_t id = 1; for (List<Track *>::iterator it = mTracks.begin(); it != mTracks.end(); ++it, ++id) { @@ -1140,6 +1159,14 @@ size_t MPEG4Writer::write( return bytes; } +void MPEG4Writer::beginBox(uint32_t id) { + mBoxes.push_back(mWriteMoovBoxToMemory? + mMoovBoxBufferOffset: mOffset); + + writeInt32(0); + writeInt32(id); +} + void MPEG4Writer::beginBox(const char *fourcc) { CHECK_EQ(strlen(fourcc), 4); @@ -1264,6 +1291,18 @@ status_t MPEG4Writer::setGeoData(int latitudex10000, int longitudex10000) { mLatitudex10000 = latitudex10000; mLongitudex10000 = longitudex10000; mAreGeoTagsAvailable = true; + mMoovExtraSize += 30; + return OK; +} + +status_t MPEG4Writer::setCaptureRate(float captureFps) { + if (captureFps <= 0.0f) { + return BAD_VALUE; + } + + mMetaKeys->setFloat(kMetaKey_CaptureFps, captureFps); + mMoovExtraSize += sizeof(kMetaKey_CaptureFps) + 4 + 32; + return OK; } @@ -3095,6 +3134,103 @@ void MPEG4Writer::writeUdtaBox() { endBox(); } +void MPEG4Writer::writeHdlr() { + beginBox("hdlr"); + writeInt32(0); // Version, Flags + writeInt32(0); // Predefined + writeFourcc("mdta"); + writeInt32(0); // Reserved[0] + writeInt32(0); // Reserved[1] + writeInt32(0); // Reserved[2] + writeInt8(0); // Name (empty) + endBox(); +} + +void MPEG4Writer::writeKeys() { + size_t count = mMetaKeys->countEntries(); + + beginBox("keys"); + writeInt32(0); // Version, Flags + writeInt32(count); // Entry_count + for (size_t i = 0; i < count; i++) { + AMessage::Type type; + const char *key = mMetaKeys->getEntryNameAt(i, &type); + size_t n = strlen(key); + writeInt32(n + 8); + writeFourcc("mdta"); + write(key, n); // write without the \0 + } + endBox(); +} + +void MPEG4Writer::writeIlst() { + size_t count = mMetaKeys->countEntries(); + + beginBox("ilst"); + for (size_t i = 0; i < count; i++) { + beginBox(i + 1); // key id (1-based) + beginBox("data"); + AMessage::Type type; + const char *key = mMetaKeys->getEntryNameAt(i, &type); + switch (type) { + case AMessage::kTypeString: + { + AString val; + CHECK(mMetaKeys->findString(key, &val)); + writeInt32(1); // type = UTF8 + writeInt32(0); // default country/language + write(val.c_str(), strlen(val.c_str())); // write without \0 + break; + } + + case AMessage::kTypeFloat: + { + float val; + CHECK(mMetaKeys->findFloat(key, &val)); + writeInt32(23); // type = float32 + writeInt32(0); // default country/language + writeInt32(*reinterpret_cast<int32_t *>(&val)); + break; + } + + case AMessage::kTypeInt32: + { + int32_t val; + CHECK(mMetaKeys->findInt32(key, &val)); + writeInt32(67); // type = signed int32 + writeInt32(0); // default country/language + writeInt32(val); + break; + } + + default: + { + ALOGW("Unsupported key type, writing 0 instead"); + writeInt32(77); // type = unsigned int32 + writeInt32(0); // default country/language + writeInt32(0); + break; + } + } + endBox(); // data + endBox(); // key id + } + endBox(); // ilst +} + +void MPEG4Writer::writeMetaBox() { + size_t count = mMetaKeys->countEntries(); + if (count == 0) { + return; + } + + beginBox("meta"); + writeHdlr(); + writeKeys(); + writeIlst(); + endBox(); +} + /* * Geodata is stored according to ISO-6709 standard. */ diff --git a/media/libstagefright/MediaClock.cpp b/media/libstagefright/MediaClock.cpp new file mode 100644 index 0000000..433f555 --- /dev/null +++ b/media/libstagefright/MediaClock.cpp @@ -0,0 +1,148 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaClock" +#include <utils/Log.h> + +#include <media/stagefright/MediaClock.h> + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> + +namespace android { + +MediaClock::MediaClock() + : mAnchorTimeMediaUs(-1), + mAnchorTimeRealUs(-1), + mMaxTimeMediaUs(INT64_MAX), + mStartingTimeMediaUs(-1), + mPlaybackRate(1.0) { +} + +MediaClock::~MediaClock() { +} + +void MediaClock::setStartingTimeMedia(int64_t startingTimeMediaUs) { + Mutex::Autolock autoLock(mLock); + mStartingTimeMediaUs = startingTimeMediaUs; +} + +void MediaClock::clearAnchor() { + Mutex::Autolock autoLock(mLock); + mAnchorTimeMediaUs = -1; + mAnchorTimeRealUs = -1; +} + +void MediaClock::updateAnchor( + int64_t anchorTimeMediaUs, + int64_t anchorTimeRealUs, + int64_t maxTimeMediaUs) { + if (anchorTimeMediaUs < 0 || anchorTimeRealUs < 0) { + ALOGW("reject anchor time since it is negative."); + return; + } + + Mutex::Autolock autoLock(mLock); + int64_t nowUs = ALooper::GetNowUs(); + int64_t nowMediaUs = + anchorTimeMediaUs + (nowUs - anchorTimeRealUs) * (double)mPlaybackRate; + if (nowMediaUs < 0) { + ALOGW("reject anchor time since it leads to negative media time."); + return; + } + mAnchorTimeRealUs = nowUs; + mAnchorTimeMediaUs = nowMediaUs; + mMaxTimeMediaUs = maxTimeMediaUs; +} + +void MediaClock::updateMaxTimeMedia(int64_t maxTimeMediaUs) { + Mutex::Autolock autoLock(mLock); + mMaxTimeMediaUs = maxTimeMediaUs; +} + +void MediaClock::setPlaybackRate(float rate) { + CHECK_GE(rate, 0.0); + Mutex::Autolock autoLock(mLock); + if (mAnchorTimeRealUs == -1) { + mPlaybackRate = rate; + return; + } + + int64_t nowUs = ALooper::GetNowUs(); + mAnchorTimeMediaUs += (nowUs - mAnchorTimeRealUs) * (double)mPlaybackRate; + if (mAnchorTimeMediaUs < 0) { + ALOGW("setRate: anchor time should not be negative, set to 0."); + mAnchorTimeMediaUs = 0; + } + mAnchorTimeRealUs = nowUs; + mPlaybackRate = rate; +} + +status_t MediaClock::getMediaTime( + int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const { + if (outMediaUs == NULL) { + return BAD_VALUE; + } + + Mutex::Autolock autoLock(mLock); + return getMediaTime_l(realUs, outMediaUs, allowPastMaxTime); +} + +status_t MediaClock::getMediaTime_l( + int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const { + if (mAnchorTimeRealUs == -1) { + return NO_INIT; + } + + int64_t mediaUs = mAnchorTimeMediaUs + + (realUs - mAnchorTimeRealUs) * (double)mPlaybackRate; + if (mediaUs > mMaxTimeMediaUs && !allowPastMaxTime) { + mediaUs = mMaxTimeMediaUs; + } + if (mediaUs < mStartingTimeMediaUs) { + mediaUs = mStartingTimeMediaUs; + } + if (mediaUs < 0) { + mediaUs = 0; + } + *outMediaUs = mediaUs; + return OK; +} + +status_t MediaClock::getRealTimeFor( + int64_t targetMediaUs, int64_t *outRealUs) const { + if (outRealUs == NULL) { + return BAD_VALUE; + } + + Mutex::Autolock autoLock(mLock); + if (mPlaybackRate == 0.0) { + return NO_INIT; + } + + int64_t nowUs = ALooper::GetNowUs(); + int64_t nowMediaUs; + status_t status = + getMediaTime_l(nowUs, &nowMediaUs, true /* allowPastMaxTime */); + if (status != OK) { + return status; + } + *outRealUs = (targetMediaUs - nowMediaUs) / (double)mPlaybackRate + nowUs; + return OK; +} + +} // namespace android diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp index 6ca123a..0597f1d 100644 --- a/media/libstagefright/MediaCodec.cpp +++ b/media/libstagefright/MediaCodec.cpp @@ -36,6 +36,7 @@ #include <media/stagefright/MediaCodecList.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaFilter.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/NativeWindowWrapper.h> #include <private/android_filesystem_config.h> @@ -173,7 +174,7 @@ status_t MediaCodec::PostAndAwaitResponse( } // static -void MediaCodec::PostReplyWithError(int32_t replyID, int32_t err) { +void MediaCodec::PostReplyWithError(const sp<AReplyToken> &replyID, int32_t err) { sp<AMessage> response = new AMessage; response->setInt32("err", err); response->postReply(replyID); @@ -189,7 +190,16 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) { // quickly, violating the OpenMAX specs, until that is remedied // we need to invest in an extra looper to free the main event // queue. - mCodec = new ACodec; + + if (nameIsType || !strncasecmp(name.c_str(), "omx.", 4)) { + mCodec = new ACodec; + } else if (!nameIsType + && !strncasecmp(name.c_str(), "android.filter.", 15)) { + mCodec = new MediaFilter; + } else { + return NAME_NOT_FOUND; + } + bool needDedicatedLooper = false; if (nameIsType && !strncasecmp(name.c_str(), "video/", 6)) { needDedicatedLooper = true; @@ -227,9 +237,9 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) { mLooper->registerHandler(this); - mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, id())); + mCodec->setNotificationMessage(new AMessage(kWhatCodecNotify, this)); - sp<AMessage> msg = new AMessage(kWhatInit, id()); + sp<AMessage> msg = new AMessage(kWhatInit, this); msg->setString("name", name); msg->setInt32("nameIsType", nameIsType); @@ -242,7 +252,7 @@ status_t MediaCodec::init(const AString &name, bool nameIsType, bool encoder) { } status_t MediaCodec::setCallback(const sp<AMessage> &callback) { - sp<AMessage> msg = new AMessage(kWhatSetCallback, id()); + sp<AMessage> msg = new AMessage(kWhatSetCallback, this); msg->setMessage("callback", callback); sp<AMessage> response; @@ -254,7 +264,7 @@ status_t MediaCodec::configure( const sp<Surface> &nativeWindow, const sp<ICrypto> &crypto, uint32_t flags) { - sp<AMessage> msg = new AMessage(kWhatConfigure, id()); + sp<AMessage> msg = new AMessage(kWhatConfigure, this); msg->setMessage("format", format); msg->setInt32("flags", flags); @@ -288,7 +298,7 @@ status_t MediaCodec::configure( status_t MediaCodec::createInputSurface( sp<IGraphicBufferProducer>* bufferProducer) { - sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, id()); + sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, this); sp<AMessage> response; status_t err = PostAndAwaitResponse(msg, &response); @@ -307,21 +317,21 @@ status_t MediaCodec::createInputSurface( } status_t MediaCodec::start() { - sp<AMessage> msg = new AMessage(kWhatStart, id()); + sp<AMessage> msg = new AMessage(kWhatStart, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t MediaCodec::stop() { - sp<AMessage> msg = new AMessage(kWhatStop, id()); + sp<AMessage> msg = new AMessage(kWhatStop, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t MediaCodec::release() { - sp<AMessage> msg = new AMessage(kWhatRelease, id()); + sp<AMessage> msg = new AMessage(kWhatRelease, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); @@ -373,7 +383,7 @@ status_t MediaCodec::queueInputBuffer( errorDetailMsg->clear(); } - sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this); msg->setSize("index", index); msg->setSize("offset", offset); msg->setSize("size", size); @@ -400,7 +410,7 @@ status_t MediaCodec::queueSecureInputBuffer( errorDetailMsg->clear(); } - sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this); msg->setSize("index", index); msg->setSize("offset", offset); msg->setPointer("subSamples", (void *)subSamples); @@ -419,7 +429,7 @@ status_t MediaCodec::queueSecureInputBuffer( } status_t MediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) { - sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatDequeueInputBuffer, this); msg->setInt64("timeoutUs", timeoutUs); sp<AMessage> response; @@ -440,7 +450,7 @@ status_t MediaCodec::dequeueOutputBuffer( int64_t *presentationTimeUs, uint32_t *flags, int64_t timeoutUs) { - sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatDequeueOutputBuffer, this); msg->setInt64("timeoutUs", timeoutUs); sp<AMessage> response; @@ -459,7 +469,7 @@ status_t MediaCodec::dequeueOutputBuffer( } status_t MediaCodec::renderOutputBufferAndRelease(size_t index) { - sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this); msg->setSize("index", index); msg->setInt32("render", true); @@ -468,7 +478,7 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index) { } status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) { - sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this); msg->setSize("index", index); msg->setInt32("render", true); msg->setInt64("timestampNs", timestampNs); @@ -478,7 +488,7 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestam } status_t MediaCodec::releaseOutputBuffer(size_t index) { - sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, this); msg->setSize("index", index); sp<AMessage> response; @@ -486,14 +496,14 @@ status_t MediaCodec::releaseOutputBuffer(size_t index) { } status_t MediaCodec::signalEndOfInputStream() { - sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, id()); + sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const { - sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, id()); + sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, this); sp<AMessage> response; status_t err; @@ -507,7 +517,7 @@ status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const { } status_t MediaCodec::getInputFormat(sp<AMessage> *format) const { - sp<AMessage> msg = new AMessage(kWhatGetInputFormat, id()); + sp<AMessage> msg = new AMessage(kWhatGetInputFormat, this); sp<AMessage> response; status_t err; @@ -521,7 +531,7 @@ status_t MediaCodec::getInputFormat(sp<AMessage> *format) const { } status_t MediaCodec::getName(AString *name) const { - sp<AMessage> msg = new AMessage(kWhatGetName, id()); + sp<AMessage> msg = new AMessage(kWhatGetName, this); sp<AMessage> response; status_t err; @@ -535,7 +545,7 @@ status_t MediaCodec::getName(AString *name) const { } status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const { - sp<AMessage> msg = new AMessage(kWhatGetBuffers, id()); + sp<AMessage> msg = new AMessage(kWhatGetBuffers, this); msg->setInt32("portIndex", kPortIndexInput); msg->setPointer("buffers", buffers); @@ -544,7 +554,7 @@ status_t MediaCodec::getInputBuffers(Vector<sp<ABuffer> > *buffers) const { } status_t MediaCodec::getOutputBuffers(Vector<sp<ABuffer> > *buffers) const { - sp<AMessage> msg = new AMessage(kWhatGetBuffers, id()); + sp<AMessage> msg = new AMessage(kWhatGetBuffers, this); msg->setInt32("portIndex", kPortIndexOutput); msg->setPointer("buffers", buffers); @@ -602,20 +612,20 @@ status_t MediaCodec::getBufferAndFormat( } status_t MediaCodec::flush() { - sp<AMessage> msg = new AMessage(kWhatFlush, id()); + sp<AMessage> msg = new AMessage(kWhatFlush, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t MediaCodec::requestIDRFrame() { - (new AMessage(kWhatRequestIDRFrame, id()))->post(); + (new AMessage(kWhatRequestIDRFrame, this))->post(); return OK; } void MediaCodec::requestActivityNotification(const sp<AMessage> ¬ify) { - sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, id()); + sp<AMessage> msg = new AMessage(kWhatRequestActivityNotification, this); msg->setMessage("notify", notify); msg->post(); } @@ -640,7 +650,7 @@ void MediaCodec::cancelPendingDequeueOperations() { } } -bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) { +bool MediaCodec::handleDequeueInputBuffer(const sp<AReplyToken> &replyID, bool newRequest) { if (!isExecuting() || (mFlags & kFlagIsAsync) || (newRequest && (mFlags & kFlagDequeueInputPending))) { PostReplyWithError(replyID, INVALID_OPERATION); @@ -664,7 +674,7 @@ bool MediaCodec::handleDequeueInputBuffer(uint32_t replyID, bool newRequest) { return true; } -bool MediaCodec::handleDequeueOutputBuffer(uint32_t replyID, bool newRequest) { +bool MediaCodec::handleDequeueOutputBuffer(const sp<AReplyToken> &replyID, bool newRequest) { sp<AMessage> response = new AMessage; if (!isExecuting() || (mFlags & kFlagIsAsync) @@ -1188,7 +1198,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatInit: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mState != UNINITIALIZED) { @@ -1224,7 +1234,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatSetCallback: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mState == UNINITIALIZED @@ -1256,7 +1266,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatConfigure: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mState != INITIALIZED) { @@ -1313,7 +1323,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatCreateInputSurface: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); // Must be configured, but can't have been started yet. @@ -1329,7 +1339,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatStart: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mState == FLUSHED) { @@ -1355,7 +1365,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { State targetState = (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED; - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1 @@ -1403,7 +1413,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatDequeueInputBuffer: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mFlags & kFlagIsAsync) { @@ -1435,7 +1445,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { if (timeoutUs > 0ll) { sp<AMessage> timeoutMsg = - new AMessage(kWhatDequeueInputTimedOut, id()); + new AMessage(kWhatDequeueInputTimedOut, this); timeoutMsg->setInt32( "generation", ++mDequeueInputTimeoutGeneration); timeoutMsg->post(timeoutUs); @@ -1464,7 +1474,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatQueueInputBuffer: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (!isExecuting()) { @@ -1483,7 +1493,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatDequeueOutputBuffer: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mFlags & kFlagIsAsync) { @@ -1509,7 +1519,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { if (timeoutUs > 0ll) { sp<AMessage> timeoutMsg = - new AMessage(kWhatDequeueOutputTimedOut, id()); + new AMessage(kWhatDequeueOutputTimedOut, this); timeoutMsg->setInt32( "generation", ++mDequeueOutputTimeoutGeneration); timeoutMsg->post(timeoutUs); @@ -1538,7 +1548,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatReleaseOutputBuffer: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (!isExecuting()) { @@ -1557,7 +1567,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatSignalEndOfInputStream: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (!isExecuting()) { @@ -1575,7 +1585,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatGetBuffers: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (!isExecuting() || (mFlags & kFlagIsAsync)) { @@ -1609,7 +1619,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatFlush: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (!isExecuting()) { @@ -1635,7 +1645,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { sp<AMessage> format = (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if ((mState != CONFIGURED && mState != STARTING && @@ -1672,7 +1682,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatGetName: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mComponentName.empty()) { @@ -1688,7 +1698,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) { case kWhatSetParameters: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<AMessage> params; @@ -1742,7 +1752,7 @@ status_t MediaCodec::queueCSDInputBuffer(size_t bufferIndex) { AString errorDetailMsg; - sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, id()); + sp<AMessage> msg = new AMessage(kWhatQueueInputBuffer, this); msg->setSize("index", bufferIndex); msg->setSize("offset", 0); msg->setSize("size", csd->size()); @@ -2197,7 +2207,7 @@ void MediaCodec::postActivityNotificationIfPossible() { } status_t MediaCodec::setParameters(const sp<AMessage> ¶ms) { - sp<AMessage> msg = new AMessage(kWhatSetParameters, id()); + sp<AMessage> msg = new AMessage(kWhatSetParameters, this); msg->setMessage("params", params); sp<AMessage> response; diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp index c26e909..b6fa810 100644 --- a/media/libstagefright/MediaCodecSource.cpp +++ b/media/libstagefright/MediaCodecSource.cpp @@ -121,7 +121,7 @@ status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta, mLooper->registerHandler(this); mNotify = notify; - sp<AMessage> msg = new AMessage(kWhatStart, id()); + sp<AMessage> msg = new AMessage(kWhatStart, this); msg->setObject("meta", meta); return postSynchronouslyAndReturnError(msg); } @@ -137,19 +137,19 @@ void MediaCodecSource::Puller::stop() { mSource->stop(); ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video"); - (new AMessage(kWhatStop, id()))->post(); + (new AMessage(kWhatStop, this))->post(); } void MediaCodecSource::Puller::pause() { - (new AMessage(kWhatPause, id()))->post(); + (new AMessage(kWhatPause, this))->post(); } void MediaCodecSource::Puller::resume() { - (new AMessage(kWhatResume, id()))->post(); + (new AMessage(kWhatResume, this))->post(); } void MediaCodecSource::Puller::schedulePull() { - sp<AMessage> msg = new AMessage(kWhatPull, id()); + sp<AMessage> msg = new AMessage(kWhatPull, this); msg->setInt32("generation", mPullGeneration); msg->post(); } @@ -182,7 +182,7 @@ void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) { sp<AMessage> response = new AMessage; response->setInt32("err", err); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; @@ -269,13 +269,13 @@ sp<MediaCodecSource> MediaCodecSource::Create( } status_t MediaCodecSource::start(MetaData* params) { - sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id()); + sp<AMessage> msg = new AMessage(kWhatStart, mReflector); msg->setObject("meta", params); return postSynchronouslyAndReturnError(msg); } status_t MediaCodecSource::stop() { - sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id()); + sp<AMessage> msg = new AMessage(kWhatStop, mReflector); status_t err = postSynchronouslyAndReturnError(msg); // mPuller->stop() needs to be done outside MediaCodecSource's looper, @@ -294,7 +294,7 @@ status_t MediaCodecSource::stop() { } status_t MediaCodecSource::pause() { - (new AMessage(kWhatPause, mReflector->id()))->post(); + (new AMessage(kWhatPause, mReflector))->post(); return OK; } @@ -422,8 +422,7 @@ status_t MediaCodecSource::initEncoder() { } } - mEncoderActivityNotify = new AMessage( - kWhatEncoderActivity, mReflector->id()); + mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, mReflector); mEncoder->setCallback(mEncoderActivityNotify); err = mEncoder->start(); @@ -492,7 +491,7 @@ void MediaCodecSource::signalEOS(status_t err) { if (mStopping && mEncoderReachedEOS) { ALOGI("encoder (%s) stopped", mIsVideo ? "video" : "audio"); // posting reply to everyone that's waiting - List<uint32_t>::iterator it; + List<sp<AReplyToken>>::iterator it; for (it = mStopReplyIDQueue.begin(); it != mStopReplyIDQueue.end(); it++) { (new AMessage)->postReply(*it); @@ -620,8 +619,7 @@ status_t MediaCodecSource::onStart(MetaData *params) { resume(startTimeUs); } else { CHECK(mPuller != NULL); - sp<AMessage> notify = new AMessage( - kWhatPullerNotify, mReflector->id()); + sp<AMessage> notify = new AMessage(kWhatPullerNotify, mReflector); err = mPuller->start(params, notify); if (err != OK) { return err; @@ -768,7 +766,7 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) { } case kWhatStart: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); sp<RefBase> obj; @@ -784,7 +782,7 @@ void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) { { ALOGI("encoder (%s) stopping", mIsVideo ? "video" : "audio"); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); if (mEncoderReachedEOS) { diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp index c7c6f34..b13877d 100644 --- a/media/libstagefright/MediaMuxer.cpp +++ b/media/libstagefright/MediaMuxer.cpp @@ -38,21 +38,6 @@ namespace android { -MediaMuxer::MediaMuxer(const char *path, OutputFormat format) - : mFormat(format), - mState(UNINITIALIZED) { - if (format == OUTPUT_FORMAT_MPEG_4) { - mWriter = new MPEG4Writer(path); - } else if (format == OUTPUT_FORMAT_WEBM) { - mWriter = new WebmWriter(path); - } - - if (mWriter != NULL) { - mFileMeta = new MetaData; - mState = INITIALIZED; - } -} - MediaMuxer::MediaMuxer(int fd, OutputFormat format) : mFormat(format), mState(UNINITIALIZED) { diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp new file mode 100644 index 0000000..7b6c7d9 --- /dev/null +++ b/media/libstagefright/MediaSync.cpp @@ -0,0 +1,541 @@ +/* + * Copyright 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaSync" +#include <inttypes.h> + +#include <gui/BufferQueue.h> +#include <gui/IGraphicBufferConsumer.h> +#include <gui/IGraphicBufferProducer.h> + +#include <media/AudioTrack.h> +#include <media/stagefright/MediaClock.h> +#include <media/stagefright/MediaSync.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <ui/GraphicBuffer.h> + +// Maximum late time allowed for a video frame to be rendered. When a video +// frame arrives later than this number, it will be discarded without rendering. +static const int64_t kMaxAllowedVideoLateTimeUs = 40000ll; + +namespace android { + +// static +sp<MediaSync> MediaSync::create() { + sp<MediaSync> sync = new MediaSync(); + sync->mLooper->registerHandler(sync); + return sync; +} + +MediaSync::MediaSync() + : mIsAbandoned(false), + mMutex(), + mReleaseCondition(), + mNumOutstandingBuffers(0), + mNativeSampleRateInHz(0), + mNumFramesWritten(0), + mHasAudio(false), + mNextBufferItemMediaUs(-1), + mPlaybackRate(0.0) { + mMediaClock = new MediaClock; + + mLooper = new ALooper; + mLooper->setName("MediaSync"); + mLooper->start(false, false, ANDROID_PRIORITY_AUDIO); +} + +MediaSync::~MediaSync() { + if (mInput != NULL) { + mInput->consumerDisconnect(); + } + if (mOutput != NULL) { + mOutput->disconnect(NATIVE_WINDOW_API_MEDIA); + } + + if (mLooper != NULL) { + mLooper->unregisterHandler(id()); + mLooper->stop(); + } +} + +status_t MediaSync::configureSurface(const sp<IGraphicBufferProducer> &output) { + Mutex::Autolock lock(mMutex); + + // TODO: support suface change. + if (mOutput != NULL) { + ALOGE("configureSurface: output surface has already been configured."); + return INVALID_OPERATION; + } + + if (output != NULL) { + IGraphicBufferProducer::QueueBufferOutput queueBufferOutput; + sp<OutputListener> listener(new OutputListener(this)); + IInterface::asBinder(output)->linkToDeath(listener); + status_t status = + output->connect(listener, + NATIVE_WINDOW_API_MEDIA, + true /* producerControlledByApp */, + &queueBufferOutput); + if (status != NO_ERROR) { + ALOGE("configureSurface: failed to connect (%d)", status); + return status; + } + + mOutput = output; + } + + return NO_ERROR; +} + +// |audioTrack| is used only for querying information. +status_t MediaSync::configureAudioTrack( + const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz) { + Mutex::Autolock lock(mMutex); + + // TODO: support audio track change. + if (mAudioTrack != NULL) { + ALOGE("configureAudioTrack: audioTrack has already been configured."); + return INVALID_OPERATION; + } + + mAudioTrack = audioTrack; + mNativeSampleRateInHz = nativeSampleRateInHz; + + return NO_ERROR; +} + +status_t MediaSync::createInputSurface( + sp<IGraphicBufferProducer> *outBufferProducer) { + if (outBufferProducer == NULL) { + return BAD_VALUE; + } + + Mutex::Autolock lock(mMutex); + + if (mOutput == NULL) { + return NO_INIT; + } + + if (mInput != NULL) { + return INVALID_OPERATION; + } + + sp<IGraphicBufferProducer> bufferProducer; + sp<IGraphicBufferConsumer> bufferConsumer; + BufferQueue::createBufferQueue(&bufferProducer, &bufferConsumer); + + sp<InputListener> listener(new InputListener(this)); + IInterface::asBinder(bufferConsumer)->linkToDeath(listener); + status_t status = + bufferConsumer->consumerConnect(listener, false /* controlledByApp */); + if (status == NO_ERROR) { + bufferConsumer->setConsumerName(String8("MediaSync")); + *outBufferProducer = bufferProducer; + mInput = bufferConsumer; + } + return status; +} + +status_t MediaSync::setPlaybackRate(float rate) { + if (rate < 0.0) { + return BAD_VALUE; + } + + Mutex::Autolock lock(mMutex); + + if (rate > mPlaybackRate) { + mNextBufferItemMediaUs = -1; + } + mPlaybackRate = rate; + mMediaClock->setPlaybackRate(rate); + onDrainVideo_l(); + + return OK; +} + +sp<const MediaClock> MediaSync::getMediaClock() { + return mMediaClock; +} + +status_t MediaSync::updateQueuedAudioData( + size_t sizeInBytes, int64_t presentationTimeUs) { + if (sizeInBytes == 0) { + return OK; + } + + Mutex::Autolock lock(mMutex); + + if (mAudioTrack == NULL) { + ALOGW("updateQueuedAudioData: audioTrack has NOT been configured."); + return INVALID_OPERATION; + } + + int64_t numFrames = sizeInBytes / mAudioTrack->frameSize(); + int64_t maxMediaTimeUs = presentationTimeUs + + getDurationIfPlayedAtNativeSampleRate_l(numFrames); + mNumFramesWritten += numFrames; + + int64_t nowUs = ALooper::GetNowUs(); + int64_t nowMediaUs = maxMediaTimeUs + - getDurationIfPlayedAtNativeSampleRate_l(mNumFramesWritten) + + getPlayedOutAudioDurationMedia_l(nowUs); + + int64_t oldRealTime = -1; + if (mNextBufferItemMediaUs != -1) { + oldRealTime = getRealTime(mNextBufferItemMediaUs, nowUs); + } + + mMediaClock->updateAnchor(nowMediaUs, nowUs, maxMediaTimeUs); + mHasAudio = true; + + if (oldRealTime != -1) { + int64_t newRealTime = getRealTime(mNextBufferItemMediaUs, nowUs); + if (newRealTime < oldRealTime) { + mNextBufferItemMediaUs = -1; + onDrainVideo_l(); + } + } + + return OK; +} + +void MediaSync::setName(const AString &name) { + Mutex::Autolock lock(mMutex); + mInput->setConsumerName(String8(name.c_str())); +} + +int64_t MediaSync::getRealTime(int64_t mediaTimeUs, int64_t nowUs) { + int64_t realUs; + if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) { + // If failed to get current position, e.g. due to audio clock is + // not ready, then just play out video immediately without delay. + return nowUs; + } + return realUs; +} + +int64_t MediaSync::getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames) { + return (numFrames * 1000000LL / mNativeSampleRateInHz); +} + +int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) { + CHECK(mAudioTrack != NULL); + + uint32_t numFramesPlayed; + int64_t numFramesPlayedAt; + AudioTimestamp ts; + static const int64_t kStaleTimestamp100ms = 100000; + + status_t res = mAudioTrack->getTimestamp(ts); + if (res == OK) { + // case 1: mixing audio tracks. + numFramesPlayed = ts.mPosition; + numFramesPlayedAt = + ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000; + const int64_t timestampAge = nowUs - numFramesPlayedAt; + if (timestampAge > kStaleTimestamp100ms) { + // This is an audio FIXME. + // getTimestamp returns a timestamp which may come from audio + // mixing threads. After pausing, the MixerThread may go idle, + // thus the mTime estimate may become stale. Assuming that the + // MixerThread runs 20ms, with FastMixer at 5ms, the max latency + // should be about 25ms with an average around 12ms (to be + // verified). For safety we use 100ms. + ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) " + "numFramesPlayedAt(%lld)", + (long long)nowUs, (long long)numFramesPlayedAt); + numFramesPlayedAt = nowUs - kStaleTimestamp100ms; + } + //ALOGD("getTimestamp: OK %d %lld", + // numFramesPlayed, (long long)numFramesPlayedAt); + } else if (res == WOULD_BLOCK) { + // case 2: transitory state on start of a new track + numFramesPlayed = 0; + numFramesPlayedAt = nowUs; + //ALOGD("getTimestamp: WOULD_BLOCK %d %lld", + // numFramesPlayed, (long long)numFramesPlayedAt); + } else { + // case 3: transitory at new track or audio fast tracks. + res = mAudioTrack->getPosition(&numFramesPlayed); + CHECK_EQ(res, (status_t)OK); + numFramesPlayedAt = nowUs; + numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */ + //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt); + } + + //can't be negative until 12.4 hrs, test. + //CHECK_EQ(numFramesPlayed & (1 << 31), 0); + int64_t durationUs = + getDurationIfPlayedAtNativeSampleRate_l(numFramesPlayed) + + nowUs - numFramesPlayedAt; + if (durationUs < 0) { + // Occurs when numFramesPlayed position is very small and the following: + // (1) In case 1, the time nowUs is computed before getTimestamp() is + // called and numFramesPlayedAt is greater than nowUs by time more + // than numFramesPlayed. + // (2) In case 3, using getPosition and adding mAudioTrack->latency() + // to numFramesPlayedAt, by a time amount greater than + // numFramesPlayed. + // + // Both of these are transitory conditions. + ALOGV("getPlayedOutAudioDurationMedia_l: negative duration %lld " + "set to zero", (long long)durationUs); + durationUs = 0; + } + ALOGV("getPlayedOutAudioDurationMedia_l(%lld) nowUs(%lld) frames(%u) " + "framesAt(%lld)", + (long long)durationUs, (long long)nowUs, numFramesPlayed, + (long long)numFramesPlayedAt); + return durationUs; +} + +void MediaSync::onDrainVideo_l() { + if (!isPlaying()) { + return; + } + + int64_t nowUs = ALooper::GetNowUs(); + + while (!mBufferItems.empty()) { + BufferItem *bufferItem = &*mBufferItems.begin(); + int64_t itemMediaUs = bufferItem->mTimestamp / 1000; + int64_t itemRealUs = getRealTime(itemMediaUs, nowUs); + if (itemRealUs <= nowUs) { + if (mHasAudio) { + if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) { + renderOneBufferItem_l(*bufferItem); + } else { + // too late. + returnBufferToInput_l( + bufferItem->mGraphicBuffer, bufferItem->mFence); + } + } else { + // always render video buffer in video-only mode. + renderOneBufferItem_l(*bufferItem); + + // smooth out videos >= 10fps + mMediaClock->updateAnchor( + itemMediaUs, nowUs, itemMediaUs + 100000); + } + + mBufferItems.erase(mBufferItems.begin()); + + if (mBufferItems.empty()) { + mNextBufferItemMediaUs = -1; + } + } else { + if (mNextBufferItemMediaUs == -1 + || mNextBufferItemMediaUs != itemMediaUs) { + sp<AMessage> msg = new AMessage(kWhatDrainVideo, this); + msg->post(itemRealUs - nowUs); + } + break; + } + } +} + +void MediaSync::onFrameAvailableFromInput() { + Mutex::Autolock lock(mMutex); + + // If there are too many outstanding buffers, wait until a buffer is + // released back to the input in onBufferReleased. + while (mNumOutstandingBuffers >= MAX_OUTSTANDING_BUFFERS) { + mReleaseCondition.wait(mMutex); + + // If the sync is abandoned while we are waiting, the release + // condition variable will be broadcast, and we should just return + // without attempting to do anything more (since the input queue will + // also be abandoned). + if (mIsAbandoned) { + return; + } + } + ++mNumOutstandingBuffers; + + // Acquire and detach the buffer from the input. + BufferItem bufferItem; + status_t status = mInput->acquireBuffer(&bufferItem, 0 /* presentWhen */); + if (status != NO_ERROR) { + ALOGE("acquiring buffer from input failed (%d)", status); + return; + } + + ALOGV("acquired buffer %#llx from input", (long long)bufferItem.mGraphicBuffer->getId()); + + status = mInput->detachBuffer(bufferItem.mBuf); + if (status != NO_ERROR) { + ALOGE("detaching buffer from input failed (%d)", status); + if (status == NO_INIT) { + // If the input has been abandoned, move on. + onAbandoned_l(true /* isInput */); + } + return; + } + + mBufferItems.push_back(bufferItem); + onDrainVideo_l(); +} + +void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) { + IGraphicBufferProducer::QueueBufferInput queueInput( + bufferItem.mTimestamp, + bufferItem.mIsAutoTimestamp, + bufferItem.mDataSpace, + bufferItem.mCrop, + static_cast<int32_t>(bufferItem.mScalingMode), + bufferItem.mTransform, + bufferItem.mIsDroppable, + bufferItem.mFence); + + // Attach and queue the buffer to the output. + int slot; + status_t status = mOutput->attachBuffer(&slot, bufferItem.mGraphicBuffer); + ALOGE_IF(status != NO_ERROR, "attaching buffer to output failed (%d)", status); + if (status == NO_ERROR) { + IGraphicBufferProducer::QueueBufferOutput queueOutput; + status = mOutput->queueBuffer(slot, queueInput, &queueOutput); + ALOGE_IF(status != NO_ERROR, "queueing buffer to output failed (%d)", status); + } + + if (status != NO_ERROR) { + returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence); + if (status == NO_INIT) { + // If the output has been abandoned, move on. + onAbandoned_l(false /* isInput */); + } + return; + } + + ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId()); +} + +void MediaSync::onBufferReleasedByOutput() { + Mutex::Autolock lock(mMutex); + + sp<GraphicBuffer> buffer; + sp<Fence> fence; + status_t status = mOutput->detachNextBuffer(&buffer, &fence); + ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status); + + if (status == NO_INIT) { + // If the output has been abandoned, we can't do anything else, + // since buffer is invalid. + onAbandoned_l(false /* isInput */); + return; + } + + ALOGV("detached buffer %#llx from output", (long long)buffer->getId()); + + // If we've been abandoned, we can't return the buffer to the input, so just + // move on. + if (mIsAbandoned) { + return; + } + + returnBufferToInput_l(buffer, fence); +} + +void MediaSync::returnBufferToInput_l( + const sp<GraphicBuffer> &buffer, const sp<Fence> &fence) { + // Attach and release the buffer back to the input. + int consumerSlot; + status_t status = mInput->attachBuffer(&consumerSlot, buffer); + ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status); + if (status == NO_ERROR) { + status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence); + ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status); + } + + if (status != NO_ERROR) { + // TODO: do we need to try to return this buffer later? + return; + } + + ALOGV("released buffer %#llx to input", (long long)buffer->getId()); + + // Notify any waiting onFrameAvailable calls. + --mNumOutstandingBuffers; + mReleaseCondition.signal(); +} + +void MediaSync::onAbandoned_l(bool isInput) { + ALOGE("the %s has abandoned me", (isInput ? "input" : "output")); + if (!mIsAbandoned) { + if (isInput) { + mOutput->disconnect(NATIVE_WINDOW_API_MEDIA); + } else { + mInput->consumerDisconnect(); + } + mIsAbandoned = true; + } + mReleaseCondition.broadcast(); +} + +void MediaSync::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatDrainVideo: + { + Mutex::Autolock lock(mMutex); + onDrainVideo_l(); + break; + } + + default: + TRESPASS(); + break; + } +} + +MediaSync::InputListener::InputListener(const sp<MediaSync> &sync) + : mSync(sync) {} + +MediaSync::InputListener::~InputListener() {} + +void MediaSync::InputListener::onFrameAvailable(const BufferItem &/* item */) { + mSync->onFrameAvailableFromInput(); +} + +// We don't care about sideband streams, since we won't relay them. +void MediaSync::InputListener::onSidebandStreamChanged() { + ALOGE("onSidebandStreamChanged: got sideband stream unexpectedly."); +} + + +void MediaSync::InputListener::binderDied(const wp<IBinder> &/* who */) { + Mutex::Autolock lock(mSync->mMutex); + mSync->onAbandoned_l(true /* isInput */); +} + +MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync) + : mSync(sync) {} + +MediaSync::OutputListener::~OutputListener() {} + +void MediaSync::OutputListener::onBufferReleased() { + mSync->onBufferReleasedByOutput(); +} + +void MediaSync::OutputListener::binderDied(const wp<IBinder> &/* who */) { + Mutex::Autolock lock(mSync->mMutex); + mSync->onAbandoned_l(false /* isInput */); +} + +} // namespace android diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp index 7d7d631..8d70e50 100644 --- a/media/libstagefright/NuCachedSource2.cpp +++ b/media/libstagefright/NuCachedSource2.cpp @@ -226,7 +226,7 @@ NuCachedSource2::NuCachedSource2( mLooper->start(false /* runOnCallingThread */, true /* canCallJava */); Mutex::Autolock autoLock(mLock); - (new AMessage(kWhatFetchMore, mReflector->id()))->post(); + (new AMessage(kWhatFetchMore, mReflector))->post(); } NuCachedSource2::~NuCachedSource2() { @@ -433,7 +433,7 @@ void NuCachedSource2::onFetch() { delayUs = 100000ll; } - (new AMessage(kWhatFetchMore, mReflector->id()))->post(delayUs); + (new AMessage(kWhatFetchMore, mReflector))->post(delayUs); } void NuCachedSource2::onRead(const sp<AMessage> &msg) { @@ -522,7 +522,7 @@ ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) { return size; } - sp<AMessage> msg = new AMessage(kWhatRead, mReflector->id()); + sp<AMessage> msg = new AMessage(kWhatRead, mReflector); msg->setInt64("offset", offset); msg->setPointer("data", data); msg->setSize("size", size); diff --git a/media/libstagefright/ProcessInfo.cpp b/media/libstagefright/ProcessInfo.cpp new file mode 100644 index 0000000..b4172b3 --- /dev/null +++ b/media/libstagefright/ProcessInfo.cpp @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ProcessInfo" +#include <utils/Log.h> + +#include <media/stagefright/ProcessInfo.h> + +#include <binder/IProcessInfoService.h> +#include <binder/IServiceManager.h> + +namespace android { + +ProcessInfo::ProcessInfo() {} + +bool ProcessInfo::getPriority(int pid, int* priority) { + sp<IBinder> binder = defaultServiceManager()->getService(String16("processinfo")); + sp<IProcessInfoService> service = interface_cast<IProcessInfoService>(binder); + + size_t length = 1; + int32_t states; + status_t err = service->getProcessStatesFromPids(length, &pid, &states); + if (err != OK) { + ALOGE("getProcessStatesFromPids failed"); + return false; + } + ALOGV("pid %d states %d", pid, states); + if (states < 0) { + return false; + } + + // Use process state as the priority. Lower the value, higher the priority. + *priority = states; + return true; +} + +ProcessInfo::~ProcessInfo() {} + +} // namespace android diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp index 530383b..e8abf48 100644 --- a/media/libstagefright/SurfaceMediaSource.cpp +++ b/media/libstagefright/SurfaceMediaSource.cpp @@ -26,6 +26,7 @@ #include <media/hardware/MetadataBufferType.h> #include <ui/GraphicBuffer.h> +#include <gui/BufferItem.h> #include <gui/ISurfaceComposer.h> #include <gui/IGraphicBufferAlloc.h> #include <OMX_Component.h> @@ -290,7 +291,7 @@ status_t SurfaceMediaSource::read( // TODO: mCurrentSlot can be made a bufferstate since there // can be more than one "current" slots. - BufferQueue::BufferItem item; + BufferItem item; // If the recording has started and the queue is empty, then just // wait here till the frames come in from the client side while (mStarted) { diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp index b3a79a0..8506e37 100644 --- a/media/libstagefright/Utils.cpp +++ b/media/libstagefright/Utils.cpp @@ -344,6 +344,28 @@ status_t convertMetaDataToMessage( buffer->meta()->setInt32("csd", true); buffer->meta()->setInt64("timeUs", 0); msg->setBuffer("csd-0", buffer); + + if (!meta->findData(kKeyOpusCodecDelay, &type, &data, &size)) { + return -EINVAL; + } + + buffer = new ABuffer(size); + memcpy(buffer->data(), data, size); + + buffer->meta()->setInt32("csd", true); + buffer->meta()->setInt64("timeUs", 0); + msg->setBuffer("csd-1", buffer); + + if (!meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size)) { + return -EINVAL; + } + + buffer = new ABuffer(size); + memcpy(buffer->data(), data, size); + + buffer->meta()->setInt32("csd", true); + buffer->meta()->setInt64("timeUs", 0); + msg->setBuffer("csd-2", buffer); } *format = msg; @@ -800,5 +822,36 @@ AString uriDebugString(const AString &uri, bool incognito) { return AString("<no-scheme URI suppressed>"); } +HLSTime::HLSTime(const sp<AMessage>& meta) : + mSeq(-1), + mTimeUs(-1ll), + mMeta(meta) { + if (meta != NULL) { + CHECK(meta->findInt32("discontinuitySeq", &mSeq)); + CHECK(meta->findInt64("timeUs", &mTimeUs)); + } +} + +int64_t HLSTime::getSegmentTimeUs(bool midpoint) const { + int64_t segmentStartTimeUs = -1ll; + if (mMeta != NULL) { + CHECK(mMeta->findInt64("segmentStartTimeUs", &segmentStartTimeUs)); + if (midpoint) { + int64_t durationUs; + CHECK(mMeta->findInt64("segmentDurationUs", &durationUs)); + segmentStartTimeUs += durationUs / 2; + } + } + return segmentStartTimeUs; +} + +bool operator <(const HLSTime &t0, const HLSTime &t1) { + // we can only compare discontinuity sequence and timestamp. + // (mSegmentTimeUs is not reliable in live streaming case, it's the + // time starting from beginning of playlist but playlist could change.) + return t0.mSeq < t1.mSeq + || (t0.mSeq == t1.mSeq && t0.mTimeUs < t1.mTimeUs); +} + } // namespace android diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp index 5ec3438..8ef2dca 100644 --- a/media/libstagefright/avc_utils.cpp +++ b/media/libstagefright/avc_utils.cpp @@ -26,6 +26,7 @@ #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaErrors.h> #include <media/stagefright/MetaData.h> +#include <utils/misc.h> namespace android { @@ -186,17 +187,31 @@ void FindAVCDimensions( if (aspect_ratio_idc == 255 /* extendedSAR */) { sar_width = br.getBits(16); sar_height = br.getBits(16); - } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) { - static const int32_t kFixedSARWidth[] = { - 1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160 + } else { + static const struct { unsigned width, height; } kFixedSARs[] = { + { 0, 0 }, // Invalid + { 1, 1 }, + { 12, 11 }, + { 10, 11 }, + { 16, 11 }, + { 40, 33 }, + { 24, 11 }, + { 20, 11 }, + { 32, 11 }, + { 80, 33 }, + { 18, 11 }, + { 15, 11 }, + { 64, 33 }, + { 160, 99 }, + { 4, 3 }, + { 3, 2 }, + { 2, 1 }, }; - static const int32_t kFixedSARHeight[] = { - 1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99 - }; - - sar_width = kFixedSARWidth[aspect_ratio_idc - 1]; - sar_height = kFixedSARHeight[aspect_ratio_idc - 1]; + if (aspect_ratio_idc > 0 && aspect_ratio_idc < NELEM(kFixedSARs)) { + sar_width = kFixedSARs[aspect_ratio_idc].width; + sar_height = kFixedSARs[aspect_ratio_idc].height; + } } } diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp index 495bad0..10937ec 100644 --- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp +++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp @@ -623,7 +623,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) { } else { int64_t currentTime = mBufferTimestamps.top(); currentTime += mStreamInfo->aacSamplesPerFrame * - 1000000ll / mStreamInfo->sampleRate; + 1000000ll / mStreamInfo->aacSampleRate; mBufferTimestamps.add(currentTime); } } else { @@ -874,7 +874,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 /* portIndex */) { // adjust/interpolate next time stamp *currentBufLeft -= decodedSize; *nextTimeStamp += mStreamInfo->aacSamplesPerFrame * - 1000000ll / mStreamInfo->sampleRate; + 1000000ll / mStreamInfo->aacSampleRate; ALOGV("adjusted nextTimeStamp/size to %lld/%d", (long long) *nextTimeStamp, *currentBufLeft); } else { @@ -975,6 +975,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) { mBufferSizes.clear(); mDecodedSizes.clear(); mLastInHeader = NULL; + mEndOfInput = false; } else { int avail; while ((avail = outputDelayRingBufferSamplesAvailable()) > 0) { @@ -989,6 +990,7 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) { mOutputBufferCount++; } mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos; + mEndOfOutput = false; } } diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp index 8a95643..6e6a78a 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp @@ -38,7 +38,10 @@ SoftVPX::SoftVPX( NULL /* profileLevels */, 0 /* numProfileLevels */, 320 /* width */, 240 /* height */, callbacks, appData, component), mMode(codingType == OMX_VIDEO_CodingVP8 ? MODE_VP8 : MODE_VP9), + mEOSStatus(INPUT_DATA_AVAILABLE), mCtx(NULL), + mFrameParallelMode(false), + mTimeStampIdx(0), mImg(NULL) { // arbitrary from avc/hevc as vpx does not specify a min compression ratio const size_t kMinCompressionRatio = mMode == MODE_VP8 ? 2 : 4; @@ -51,9 +54,7 @@ SoftVPX::SoftVPX( } SoftVPX::~SoftVPX() { - vpx_codec_destroy((vpx_codec_ctx_t *)mCtx); - delete (vpx_codec_ctx_t *)mCtx; - mCtx = NULL; + destroyDecoder(); } static int GetCPUCoreCount() { @@ -73,12 +74,19 @@ status_t SoftVPX::initDecoder() { mCtx = new vpx_codec_ctx_t; vpx_codec_err_t vpx_err; vpx_codec_dec_cfg_t cfg; + vpx_codec_flags_t flags; memset(&cfg, 0, sizeof(vpx_codec_dec_cfg_t)); + memset(&flags, 0, sizeof(vpx_codec_flags_t)); cfg.threads = GetCPUCoreCount(); + + if (mFrameParallelMode) { + flags |= VPX_CODEC_USE_FRAME_THREADING; + } + if ((vpx_err = vpx_codec_dec_init( (vpx_codec_ctx_t *)mCtx, mMode == MODE_VP8 ? &vpx_codec_vp8_dx_algo : &vpx_codec_vp9_dx_algo, - &cfg, 0))) { + &cfg, flags))) { ALOGE("on2 decoder failed to initialize. (%d)", vpx_err); return UNKNOWN_ERROR; } @@ -86,86 +94,155 @@ status_t SoftVPX::initDecoder() { return OK; } +status_t SoftVPX::destroyDecoder() { + vpx_codec_destroy((vpx_codec_ctx_t *)mCtx); + delete (vpx_codec_ctx_t *)mCtx; + mCtx = NULL; + return OK; +} + +bool SoftVPX::outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset) { + List<BufferInfo *> &inQueue = getPortQueue(0); + List<BufferInfo *> &outQueue = getPortQueue(1); + BufferInfo *outInfo = NULL; + OMX_BUFFERHEADERTYPE *outHeader = NULL; + vpx_codec_iter_t iter = NULL; + + if (flushDecoder && mFrameParallelMode) { + // Flush decoder by passing NULL data ptr and 0 size. + // Ideally, this should never fail. + if (vpx_codec_decode((vpx_codec_ctx_t *)mCtx, NULL, 0, NULL, 0)) { + ALOGE("Failed to flush on2 decoder."); + return false; + } + } + + if (!display) { + if (!flushDecoder) { + ALOGE("Invalid operation."); + return false; + } + // Drop all the decoded frames in decoder. + while ((mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter))) { + } + return true; + } + + while (!outQueue.empty()) { + if (mImg == NULL) { + mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter); + if (mImg == NULL) { + break; + } + } + uint32_t width = mImg->d_w; + uint32_t height = mImg->d_h; + outInfo = *outQueue.begin(); + outHeader = outInfo->mHeader; + CHECK_EQ(mImg->fmt, IMG_FMT_I420); + handlePortSettingsChange(portWillReset, width, height); + if (*portWillReset) { + return true; + } + + outHeader->nOffset = 0; + outHeader->nFilledLen = (width * height * 3) / 2; + outHeader->nFlags = 0; + outHeader->nTimeStamp = *(OMX_TICKS *)mImg->user_priv; + + uint8_t *dst = outHeader->pBuffer; + const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y]; + const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U]; + const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V]; + size_t srcYStride = mImg->stride[PLANE_Y]; + size_t srcUStride = mImg->stride[PLANE_U]; + size_t srcVStride = mImg->stride[PLANE_V]; + copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride); + + mImg = NULL; + outInfo->mOwnedByUs = false; + outQueue.erase(outQueue.begin()); + outInfo = NULL; + notifyFillBufferDone(outHeader); + outHeader = NULL; + } + + if (!eos) { + return true; + } + + if (!outQueue.empty()) { + outInfo = *outQueue.begin(); + outQueue.erase(outQueue.begin()); + outHeader = outInfo->mHeader; + outHeader->nTimeStamp = 0; + outHeader->nFilledLen = 0; + outHeader->nFlags = OMX_BUFFERFLAG_EOS; + outInfo->mOwnedByUs = false; + notifyFillBufferDone(outHeader); + mEOSStatus = OUTPUT_FRAMES_FLUSHED; + } + return true; +} + void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) { - if (mOutputPortSettingsChange != NONE) { + if (mOutputPortSettingsChange != NONE || mEOSStatus == OUTPUT_FRAMES_FLUSHED) { return; } List<BufferInfo *> &inQueue = getPortQueue(0); List<BufferInfo *> &outQueue = getPortQueue(1); bool EOSseen = false; + vpx_codec_err_t err; + bool portWillReset = false; + + while ((mEOSStatus == INPUT_EOS_SEEN || !inQueue.empty()) + && !outQueue.empty()) { + // Output the pending frames that left from last port reset or decoder flush. + if (mEOSStatus == INPUT_EOS_SEEN || mImg != NULL) { + if (!outputBuffers( + mEOSStatus == INPUT_EOS_SEEN, true /* display */, + mEOSStatus == INPUT_EOS_SEEN, &portWillReset)) { + ALOGE("on2 decoder failed to output frame."); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; + } + if (portWillReset || mEOSStatus == OUTPUT_FRAMES_FLUSHED || + mEOSStatus == INPUT_EOS_SEEN) { + return; + } + } - while (!inQueue.empty() && !outQueue.empty()) { BufferInfo *inInfo = *inQueue.begin(); OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; + mTimeStamps[mTimeStampIdx] = inHeader->nTimeStamp; BufferInfo *outInfo = *outQueue.begin(); OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; - if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { + mEOSStatus = INPUT_EOS_SEEN; EOSseen = true; - if (inHeader->nFilledLen == 0) { - inQueue.erase(inQueue.begin()); - inInfo->mOwnedByUs = false; - notifyEmptyBufferDone(inHeader); - - outHeader->nFilledLen = 0; - outHeader->nFlags = OMX_BUFFERFLAG_EOS; - - outQueue.erase(outQueue.begin()); - outInfo->mOwnedByUs = false; - notifyFillBufferDone(outHeader); - return; - } } - if (mImg == NULL) { - if (vpx_codec_decode( - (vpx_codec_ctx_t *)mCtx, - inHeader->pBuffer + inHeader->nOffset, - inHeader->nFilledLen, - NULL, - 0)) { - ALOGE("on2 decoder failed to decode frame."); - - notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); - return; - } - vpx_codec_iter_t iter = NULL; - mImg = vpx_codec_get_frame((vpx_codec_ctx_t *)mCtx, &iter); + if (inHeader->nFilledLen > 0 && + vpx_codec_decode((vpx_codec_ctx_t *)mCtx, + inHeader->pBuffer + inHeader->nOffset, + inHeader->nFilledLen, + &mTimeStamps[mTimeStampIdx], 0)) { + ALOGE("on2 decoder failed to decode frame."); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; } + mTimeStampIdx = (mTimeStampIdx + 1) % kNumBuffers; - if (mImg != NULL) { - CHECK_EQ(mImg->fmt, IMG_FMT_I420); - - uint32_t width = mImg->d_w; - uint32_t height = mImg->d_h; - bool portWillReset = false; - handlePortSettingsChange(&portWillReset, width, height); - if (portWillReset) { - return; - } - - outHeader->nOffset = 0; - outHeader->nFilledLen = (width * height * 3) / 2; - outHeader->nFlags = EOSseen ? OMX_BUFFERFLAG_EOS : 0; - outHeader->nTimeStamp = inHeader->nTimeStamp; - - uint8_t *dst = outHeader->pBuffer; - const uint8_t *srcY = (const uint8_t *)mImg->planes[PLANE_Y]; - const uint8_t *srcU = (const uint8_t *)mImg->planes[PLANE_U]; - const uint8_t *srcV = (const uint8_t *)mImg->planes[PLANE_V]; - size_t srcYStride = mImg->stride[PLANE_Y]; - size_t srcUStride = mImg->stride[PLANE_U]; - size_t srcVStride = mImg->stride[PLANE_V]; - copyYV12FrameToOutputBuffer(dst, srcY, srcU, srcV, srcYStride, srcUStride, srcVStride); - - mImg = NULL; - outInfo->mOwnedByUs = false; - outQueue.erase(outQueue.begin()); - outInfo = NULL; - notifyFillBufferDone(outHeader); - outHeader = NULL; + if (!outputBuffers( + EOSseen /* flushDecoder */, true /* display */, EOSseen, &portWillReset)) { + ALOGE("on2 decoder failed to output frame."); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; + } + if (portWillReset) { + return; } inInfo->mOwnedByUs = false; @@ -176,6 +253,30 @@ void SoftVPX::onQueueFilled(OMX_U32 /* portIndex */) { } } +void SoftVPX::onPortFlushCompleted(OMX_U32 portIndex) { + if (portIndex == kInputPortIndex) { + bool portWillReset = false; + if (!outputBuffers( + true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) { + ALOGE("Failed to flush decoder."); + notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL); + return; + } + mEOSStatus = INPUT_DATA_AVAILABLE; + } +} + +void SoftVPX::onReset() { + bool portWillReset = false; + if (!outputBuffers( + true /* flushDecoder */, false /* display */, false /* eos */, &portWillReset)) { + ALOGW("Failed to flush decoder. Try to hard reset decoder"); + destroyDecoder(); + initDecoder(); + } + mEOSStatus = INPUT_DATA_AVAILABLE; +} + } // namespace android android::SoftOMXComponent *createSoftOMXComponent( diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.h b/media/libstagefright/codecs/on2/dec/SoftVPX.h index 8f68693..8ccbae2 100644 --- a/media/libstagefright/codecs/on2/dec/SoftVPX.h +++ b/media/libstagefright/codecs/on2/dec/SoftVPX.h @@ -38,6 +38,8 @@ protected: virtual ~SoftVPX(); virtual void onQueueFilled(OMX_U32 portIndex); + virtual void onPortFlushCompleted(OMX_U32 portIndex); + virtual void onReset(); private: enum { @@ -49,11 +51,21 @@ private: MODE_VP9 } mMode; - void *mCtx; + enum { + INPUT_DATA_AVAILABLE, // VPX component is ready to decode data. + INPUT_EOS_SEEN, // VPX component saw EOS and is flushing On2 decoder. + OUTPUT_FRAMES_FLUSHED // VPX component finished flushing On2 decoder. + } mEOSStatus; + void *mCtx; + bool mFrameParallelMode; // Frame parallel is only supported by VP9 decoder. + OMX_TICKS mTimeStamps[kNumBuffers]; + uint8_t mTimeStampIdx; vpx_image_t *mImg; status_t initDecoder(); + status_t destroyDecoder(); + bool outputBuffers(bool flushDecoder, bool display, bool eos, bool *portWillReset); DISALLOW_EVIL_CONSTRUCTORS(SoftVPX); }; diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp index b8084ae..6322dc2 100644 --- a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp +++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp @@ -345,9 +345,15 @@ void SoftOpus::onQueueFilled(OMX_U32 portIndex) { } uint8_t channel_mapping[kMaxChannels] = {0}; - memcpy(&channel_mapping, - kDefaultOpusChannelLayout, - kMaxChannelsWithDefaultLayout); + if (mHeader->channels <= kMaxChannelsWithDefaultLayout) { + memcpy(&channel_mapping, + kDefaultOpusChannelLayout, + kMaxChannelsWithDefaultLayout); + } else { + memcpy(&channel_mapping, + mHeader->stream_map, + mHeader->channels); + } int status = OPUS_INVALID_STATE; mDecoder = opus_multistream_decoder_create(kRate, diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp index 4e75250..21da707 100644 --- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp +++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp @@ -98,33 +98,49 @@ void SoftwareRenderer::resetFormatIfChanged(const sp<AMessage> &format) { mCropWidth = mCropRight - mCropLeft + 1; mCropHeight = mCropBottom - mCropTop + 1; - int halFormat; - size_t bufWidth, bufHeight; - - switch (mColorFormat) { - case OMX_COLOR_FormatYUV420Planar: - case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: - case OMX_COLOR_FormatYUV420SemiPlanar: - { - if (!runningInEmulator()) { + // by default convert everything to RGB565 + int halFormat = HAL_PIXEL_FORMAT_RGB_565; + size_t bufWidth = mCropWidth; + size_t bufHeight = mCropHeight; + + // hardware has YUV12 and RGBA8888 support, so convert known formats + if (!runningInEmulator()) { + switch (mColorFormat) { + case OMX_COLOR_FormatYUV420Planar: + case OMX_COLOR_FormatYUV420SemiPlanar: + case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar: + { halFormat = HAL_PIXEL_FORMAT_YV12; bufWidth = (mCropWidth + 1) & ~1; bufHeight = (mCropHeight + 1) & ~1; break; } - - // fall through. + case OMX_COLOR_Format24bitRGB888: + { + halFormat = HAL_PIXEL_FORMAT_RGB_888; + bufWidth = (mCropWidth + 1) & ~1; + bufHeight = (mCropHeight + 1) & ~1; + break; + } + case OMX_COLOR_Format32bitARGB8888: + case OMX_COLOR_Format32BitRGBA8888: + { + halFormat = HAL_PIXEL_FORMAT_RGBA_8888; + bufWidth = (mCropWidth + 1) & ~1; + bufHeight = (mCropHeight + 1) & ~1; + break; + } + default: + { + break; + } } + } - default: - halFormat = HAL_PIXEL_FORMAT_RGB_565; - bufWidth = mCropWidth; - bufHeight = mCropHeight; - - mConverter = new ColorConverter( - mColorFormat, OMX_COLOR_Format16bitRGB565); - CHECK(mConverter->isValid()); - break; + if (halFormat == HAL_PIXEL_FORMAT_RGB_565) { + mConverter = new ColorConverter( + mColorFormat, OMX_COLOR_Format16bitRGB565); + CHECK(mConverter->isValid()); } CHECK(mNativeWindow != NULL); @@ -201,6 +217,8 @@ void SoftwareRenderer::render( CHECK_EQ(0, mapper.lock( buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst)); + // TODO move the other conversions also into ColorConverter, and + // fix cropping issues (when mCropLeft/Top != 0 or mWidth != mCropWidth) if (mConverter) { mConverter->convert( data, @@ -211,7 +229,8 @@ void SoftwareRenderer::render( 0, 0, mCropWidth - 1, mCropHeight - 1); } else if (mColorFormat == OMX_COLOR_FormatYUV420Planar) { const uint8_t *src_y = (const uint8_t *)data; - const uint8_t *src_u = (const uint8_t *)data + mWidth * mHeight; + const uint8_t *src_u = + (const uint8_t *)data + mWidth * mHeight; const uint8_t *src_v = src_u + (mWidth / 2 * mHeight / 2); uint8_t *dst_y = (uint8_t *)dst; @@ -239,11 +258,9 @@ void SoftwareRenderer::render( } } else if (mColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar || mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) { - const uint8_t *src_y = - (const uint8_t *)data; - - const uint8_t *src_uv = - (const uint8_t *)data + mWidth * (mHeight - mCropTop / 2); + const uint8_t *src_y = (const uint8_t *)data; + const uint8_t *src_uv = (const uint8_t *)data + + mWidth * (mHeight - mCropTop / 2); uint8_t *dst_y = (uint8_t *)dst; @@ -271,6 +288,38 @@ void SoftwareRenderer::render( dst_u += dst_c_stride; dst_v += dst_c_stride; } + } else if (mColorFormat == OMX_COLOR_Format24bitRGB888) { + uint8_t* srcPtr = (uint8_t*)data; + uint8_t* dstPtr = (uint8_t*)dst; + + for (size_t y = 0; y < (size_t)mCropHeight; ++y) { + memcpy(dstPtr, srcPtr, mCropWidth * 3); + srcPtr += mWidth * 3; + dstPtr += buf->stride * 3; + } + } else if (mColorFormat == OMX_COLOR_Format32bitARGB8888) { + uint8_t *srcPtr, *dstPtr; + + for (size_t y = 0; y < (size_t)mCropHeight; ++y) { + srcPtr = (uint8_t*)data + mWidth * 4 * y; + dstPtr = (uint8_t*)dst + buf->stride * 4 * y; + for (size_t x = 0; x < (size_t)mCropWidth; ++x) { + uint8_t a = *srcPtr++; + for (size_t i = 0; i < 3; ++i) { // copy RGB + *dstPtr++ = *srcPtr++; + } + *dstPtr++ = a; // alpha last (ARGB to RGBA) + } + } + } else if (mColorFormat == OMX_COLOR_Format32BitRGBA8888) { + uint8_t* srcPtr = (uint8_t*)data; + uint8_t* dstPtr = (uint8_t*)dst; + + for (size_t y = 0; y < (size_t)mCropHeight; ++y) { + memcpy(dstPtr, srcPtr, mCropWidth * 4); + srcPtr += mWidth * 4; + dstPtr += buf->stride * 4; + } } else { LOG_ALWAYS_FATAL("bad color format %#x", mColorFormat); } diff --git a/media/libstagefright/filters/Android.mk b/media/libstagefright/filters/Android.mk new file mode 100644 index 0000000..36ab444 --- /dev/null +++ b/media/libstagefright/filters/Android.mk @@ -0,0 +1,27 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + ColorConvert.cpp \ + GraphicBufferListener.cpp \ + IntrinsicBlurFilter.cpp \ + MediaFilter.cpp \ + RSFilter.cpp \ + SaturationFilter.cpp \ + saturationARGB.rs \ + SimpleFilter.cpp \ + ZeroFilter.cpp + +LOCAL_C_INCLUDES := \ + $(TOP)/frameworks/native/include/media/openmax \ + $(TOP)/frameworks/rs/cpp \ + $(TOP)/frameworks/rs \ + +intermediates := $(call intermediates-dir-for,STATIC_LIBRARIES,libRS,TARGET,) +LOCAL_C_INCLUDES += $(intermediates) + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE:= libstagefright_mediafilter + +include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/filters/ColorConvert.cpp b/media/libstagefright/filters/ColorConvert.cpp new file mode 100644 index 0000000..a5039f9 --- /dev/null +++ b/media/libstagefright/filters/ColorConvert.cpp @@ -0,0 +1,111 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ColorConvert.h" + +#ifndef max +#define max(a,b) ((a) > (b) ? (a) : (b)) +#endif +#ifndef min +#define min(a,b) ((a) < (b) ? (a) : (b)) +#endif + +namespace android { + +void YUVToRGB( + int32_t y, int32_t u, int32_t v, + int32_t* r, int32_t* g, int32_t* b) { + y -= 16; + u -= 128; + v -= 128; + + *b = 1192 * y + 2066 * u; + *g = 1192 * y - 833 * v - 400 * u; + *r = 1192 * y + 1634 * v; + + *r = min(262143, max(0, *r)); + *g = min(262143, max(0, *g)); + *b = min(262143, max(0, *b)); + + *r >>= 10; + *g >>= 10; + *b >>= 10; +} + +void convertYUV420spToARGB( + uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, + uint8_t *dest) { + const int32_t bytes_per_pixel = 2; + + for (int32_t i = 0; i < height; i++) { + for (int32_t j = 0; j < width; j++) { + int32_t y = *(pY + i * width + j); + int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2)); + int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1); + + int32_t r, g, b; + YUVToRGB(y, u, v, &r, &g, &b); + + *dest++ = 0xFF; + *dest++ = r; + *dest++ = g; + *dest++ = b; + } + } +} + +void convertYUV420spToRGB888( + uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, + uint8_t *dest) { + const int32_t bytes_per_pixel = 2; + + for (int32_t i = 0; i < height; i++) { + for (int32_t j = 0; j < width; j++) { + int32_t y = *(pY + i * width + j); + int32_t u = *(pUV + (i/2) * width + bytes_per_pixel * (j/2)); + int32_t v = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1); + + int32_t r, g, b; + YUVToRGB(y, u, v, &r, &g, &b); + + *dest++ = r; + *dest++ = g; + *dest++ = b; + } + } +} + +// HACK - not even slightly optimized +// TODO: remove when RGBA support is added to SoftwareRenderer +void convertRGBAToARGB( + uint8_t *src, int32_t width, int32_t height, uint32_t stride, + uint8_t *dest) { + for (size_t i = 0; i < height; ++i) { + for (size_t j = 0; j < width; ++j) { + uint8_t r = *src++; + uint8_t g = *src++; + uint8_t b = *src++; + uint8_t a = *src++; + *dest++ = a; + *dest++ = r; + *dest++ = g; + *dest++ = b; + } + src += (stride - width) * 4; + } +} + +} // namespace android diff --git a/media/libstagefright/filters/ColorConvert.h b/media/libstagefright/filters/ColorConvert.h new file mode 100644 index 0000000..13faa02 --- /dev/null +++ b/media/libstagefright/filters/ColorConvert.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COLOR_CONVERT_H_ +#define COLOR_CONVERT_H_ + +#include <inttypes.h> + +namespace android { + +void YUVToRGB( + int32_t y, int32_t u, int32_t v, + int32_t* r, int32_t* g, int32_t* b); + +void convertYUV420spToARGB( + uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, + uint8_t *dest); + +void convertYUV420spToRGB888( + uint8_t *pY, uint8_t *pUV, int32_t width, int32_t height, + uint8_t *dest); + +// TODO: remove when RGBA support is added to SoftwareRenderer +void convertRGBAToARGB( + uint8_t *src, int32_t width, int32_t height, uint32_t stride, + uint8_t *dest); + +} // namespace android + +#endif // COLOR_CONVERT_H_ diff --git a/media/libstagefright/filters/GraphicBufferListener.cpp b/media/libstagefright/filters/GraphicBufferListener.cpp new file mode 100644 index 0000000..66374ba --- /dev/null +++ b/media/libstagefright/filters/GraphicBufferListener.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "GraphicBufferListener" + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaErrors.h> + +#include <gui/BufferItem.h> + +#include "GraphicBufferListener.h" + +namespace android { + +status_t GraphicBufferListener::init( + const sp<AMessage> ¬ify, + size_t bufferWidth, size_t bufferHeight, size_t bufferCount) { + mNotify = notify; + + String8 name("GraphicBufferListener"); + BufferQueue::createBufferQueue(&mProducer, &mConsumer); + mConsumer->setConsumerName(name); + mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight); + mConsumer->setConsumerUsageBits(GRALLOC_USAGE_SW_READ_OFTEN); + + status_t err = mConsumer->setMaxAcquiredBufferCount(bufferCount); + if (err != NO_ERROR) { + ALOGE("Unable to set BQ max acquired buffer count to %u: %d", + bufferCount, err); + return err; + } + + wp<BufferQueue::ConsumerListener> listener = + static_cast<BufferQueue::ConsumerListener*>(this); + sp<BufferQueue::ProxyConsumerListener> proxy = + new BufferQueue::ProxyConsumerListener(listener); + + err = mConsumer->consumerConnect(proxy, false); + if (err != NO_ERROR) { + ALOGE("Error connecting to BufferQueue: %s (%d)", + strerror(-err), err); + return err; + } + + ALOGV("init() successful."); + + return OK; +} + +void GraphicBufferListener::onFrameAvailable(const BufferItem& /* item */) { + ALOGV("onFrameAvailable() called"); + + { + Mutex::Autolock autoLock(mMutex); + mNumFramesAvailable++; + } + + sp<AMessage> notify = mNotify->dup(); + mNotify->setWhat(kWhatFrameAvailable); + mNotify->post(); +} + +void GraphicBufferListener::onBuffersReleased() { + ALOGV("onBuffersReleased() called"); + // nothing to do +} + +void GraphicBufferListener::onSidebandStreamChanged() { + ALOGW("GraphicBufferListener cannot consume sideband streams."); + // nothing to do +} + +BufferItem GraphicBufferListener::getBufferItem() { + BufferItem item; + + { + Mutex::Autolock autoLock(mMutex); + if (mNumFramesAvailable <= 0) { + ALOGE("getBuffer() called with no frames available"); + return item; + } + mNumFramesAvailable--; + } + + status_t err = mConsumer->acquireBuffer(&item, 0); + if (err == BufferQueue::NO_BUFFER_AVAILABLE) { + // shouldn't happen, since we track num frames available + ALOGE("frame was not available"); + item.mBuf = -1; + return item; + } else if (err != OK) { + ALOGE("acquireBuffer returned err=%d", err); + item.mBuf = -1; + return item; + } + + // Wait for it to become available. + err = item.mFence->waitForever("GraphicBufferListener::getBufferItem"); + if (err != OK) { + ALOGW("failed to wait for buffer fence: %d", err); + // keep going + } + + // If this is the first time we're seeing this buffer, add it to our + // slot table. + if (item.mGraphicBuffer != NULL) { + ALOGV("setting mBufferSlot %d", item.mBuf); + mBufferSlot[item.mBuf] = item.mGraphicBuffer; + } + + return item; +} + +sp<GraphicBuffer> GraphicBufferListener::getBuffer(BufferItem item) { + sp<GraphicBuffer> buf; + if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) { + ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf); + return buf; + } + + buf = mBufferSlot[item.mBuf]; + CHECK(buf.get() != NULL); + + return buf; +} + +status_t GraphicBufferListener::releaseBuffer(BufferItem item) { + if (item.mBuf < 0 || item.mBuf >= BufferQueue::NUM_BUFFER_SLOTS) { + ALOGE("getBuffer() received invalid BufferItem: mBuf==%d", item.mBuf); + return ERROR_OUT_OF_RANGE; + } + + mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber, + EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); + + return OK; +} + +} // namespace android diff --git a/media/libstagefright/filters/GraphicBufferListener.h b/media/libstagefright/filters/GraphicBufferListener.h new file mode 100644 index 0000000..586bf65 --- /dev/null +++ b/media/libstagefright/filters/GraphicBufferListener.h @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GRAPHIC_BUFFER_LISTENER_H_ +#define GRAPHIC_BUFFER_LISTENER_H_ + +#include <gui/BufferQueue.h> + +namespace android { + +struct AMessage; + +struct GraphicBufferListener : public BufferQueue::ConsumerListener { +public: + GraphicBufferListener() {}; + + status_t init( + const sp<AMessage> ¬ify, + size_t bufferWidth, size_t bufferHeight, size_t bufferCount); + + virtual void onFrameAvailable(const BufferItem& item); + virtual void onBuffersReleased(); + virtual void onSidebandStreamChanged(); + + // Returns the handle to the producer side of the BufferQueue. Buffers + // queued on this will be received by GraphicBufferListener. + sp<IGraphicBufferProducer> getIGraphicBufferProducer() const { + return mProducer; + } + + BufferItem getBufferItem(); + sp<GraphicBuffer> getBuffer(BufferItem item); + status_t releaseBuffer(BufferItem item); + + enum { + kWhatFrameAvailable = 'frav', + }; + +private: + sp<AMessage> mNotify; + size_t mNumFramesAvailable; + + mutable Mutex mMutex; + + // Our BufferQueue interfaces. mProducer is passed to the producer through + // getIGraphicBufferProducer, and mConsumer is used internally to retrieve + // the buffers queued by the producer. + sp<IGraphicBufferProducer> mProducer; + sp<IGraphicBufferConsumer> mConsumer; + + // Cache of GraphicBuffers from the buffer queue. + sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS]; +}; + +} // namespace android + +#endif // GRAPHIC_BUFFER_LISTENER_H diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.cpp b/media/libstagefright/filters/IntrinsicBlurFilter.cpp new file mode 100644 index 0000000..cbcf699 --- /dev/null +++ b/media/libstagefright/filters/IntrinsicBlurFilter.cpp @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IntrinsicBlurFilter" + +#include <utils/Log.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "IntrinsicBlurFilter.h" + +namespace android { + +status_t IntrinsicBlurFilter::configure(const sp<AMessage> &msg) { + status_t err = SimpleFilter::configure(msg); + if (err != OK) { + return err; + } + + if (!msg->findString("cacheDir", &mCacheDir)) { + ALOGE("Failed to find cache directory in config message."); + return NAME_NOT_FOUND; + } + + return OK; +} + +status_t IntrinsicBlurFilter::start() { + // TODO: use a single RS context object for entire application + mRS = new RSC::RS(); + + if (!mRS->init(mCacheDir.c_str())) { + ALOGE("Failed to initialize RenderScript context."); + return NO_INIT; + } + + // 32-bit elements for ARGB8888 + RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS); + + RSC::Type::Builder tb(mRS, e); + tb.setX(mWidth); + tb.setY(mHeight); + RSC::sp<const RSC::Type> t = tb.create(); + + mAllocIn = RSC::Allocation::createTyped(mRS, t); + mAllocOut = RSC::Allocation::createTyped(mRS, t); + + mBlur = RSC::ScriptIntrinsicBlur::create(mRS, e); + mBlur->setRadius(mBlurRadius); + mBlur->setInput(mAllocIn); + + return OK; +} + +void IntrinsicBlurFilter::reset() { + mBlur.clear(); + mAllocOut.clear(); + mAllocIn.clear(); + mRS.clear(); +} + +status_t IntrinsicBlurFilter::setParameters(const sp<AMessage> &msg) { + sp<AMessage> params; + CHECK(msg->findMessage("params", ¶ms)); + + float blurRadius; + if (params->findFloat("blur-radius", &blurRadius)) { + mBlurRadius = blurRadius; + } + + return OK; +} + +status_t IntrinsicBlurFilter::processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { + mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data()); + mBlur->forEach(mAllocOut); + mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data()); + + return OK; +} + +} // namespace android diff --git a/media/libstagefright/filters/IntrinsicBlurFilter.h b/media/libstagefright/filters/IntrinsicBlurFilter.h new file mode 100644 index 0000000..4707ab7 --- /dev/null +++ b/media/libstagefright/filters/IntrinsicBlurFilter.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INTRINSIC_BLUR_FILTER_H_ +#define INTRINSIC_BLUR_FILTER_H_ + +#include "RenderScript.h" +#include "SimpleFilter.h" + +namespace android { + +struct IntrinsicBlurFilter : public SimpleFilter { +public: + IntrinsicBlurFilter() : mBlurRadius(1.f) {}; + + virtual status_t configure(const sp<AMessage> &msg); + virtual status_t start(); + virtual void reset(); + virtual status_t setParameters(const sp<AMessage> &msg); + virtual status_t processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: + virtual ~IntrinsicBlurFilter() {}; + +private: + AString mCacheDir; + RSC::sp<RSC::RS> mRS; + RSC::sp<RSC::Allocation> mAllocIn; + RSC::sp<RSC::Allocation> mAllocOut; + RSC::sp<RSC::ScriptIntrinsicBlur> mBlur; + float mBlurRadius; +}; + +} // namespace android + +#endif // INTRINSIC_BLUR_FILTER_H_ diff --git a/media/libstagefright/filters/MediaFilter.cpp b/media/libstagefright/filters/MediaFilter.cpp new file mode 100644 index 0000000..0a09575 --- /dev/null +++ b/media/libstagefright/filters/MediaFilter.cpp @@ -0,0 +1,818 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaFilter" + +#include <inttypes.h> +#include <utils/Trace.h> + +#include <binder/MemoryDealer.h> + +#include <media/stagefright/BufferProducerWrapper.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaFilter.h> + +#include <gui/BufferItem.h> + +#include "ColorConvert.h" +#include "GraphicBufferListener.h" +#include "IntrinsicBlurFilter.h" +#include "RSFilter.h" +#include "SaturationFilter.h" +#include "ZeroFilter.h" + +namespace android { + +// parameter: number of input and output buffers +static const size_t kBufferCountActual = 4; + +MediaFilter::MediaFilter() + : mState(UNINITIALIZED), + mGeneration(0), + mGraphicBufferListener(NULL) { +} + +MediaFilter::~MediaFilter() { +} + +//////////////////// PUBLIC FUNCTIONS ////////////////////////////////////////// + +void MediaFilter::setNotificationMessage(const sp<AMessage> &msg) { + mNotify = msg; +} + +void MediaFilter::initiateAllocateComponent(const sp<AMessage> &msg) { + msg->setWhat(kWhatAllocateComponent); + msg->setTarget(this); + msg->post(); +} + +void MediaFilter::initiateConfigureComponent(const sp<AMessage> &msg) { + msg->setWhat(kWhatConfigureComponent); + msg->setTarget(this); + msg->post(); +} + +void MediaFilter::initiateCreateInputSurface() { + (new AMessage(kWhatCreateInputSurface, this))->post(); +} + +void MediaFilter::initiateStart() { + (new AMessage(kWhatStart, this))->post(); +} + +void MediaFilter::initiateShutdown(bool keepComponentAllocated) { + sp<AMessage> msg = new AMessage(kWhatShutdown, this); + msg->setInt32("keepComponentAllocated", keepComponentAllocated); + msg->post(); +} + +void MediaFilter::signalFlush() { + (new AMessage(kWhatFlush, this))->post(); +} + +void MediaFilter::signalResume() { + (new AMessage(kWhatResume, this))->post(); +} + +// nothing to do +void MediaFilter::signalRequestIDRFrame() { + return; +} + +void MediaFilter::signalSetParameters(const sp<AMessage> ¶ms) { + sp<AMessage> msg = new AMessage(kWhatSetParameters, this); + msg->setMessage("params", params); + msg->post(); +} + +void MediaFilter::signalEndOfInputStream() { + (new AMessage(kWhatSignalEndOfInputStream, this))->post(); +} + +void MediaFilter::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatAllocateComponent: + { + onAllocateComponent(msg); + break; + } + case kWhatConfigureComponent: + { + onConfigureComponent(msg); + break; + } + case kWhatStart: + { + onStart(); + break; + } + case kWhatProcessBuffers: + { + processBuffers(); + break; + } + case kWhatInputBufferFilled: + { + onInputBufferFilled(msg); + break; + } + case kWhatOutputBufferDrained: + { + onOutputBufferDrained(msg); + break; + } + case kWhatShutdown: + { + onShutdown(msg); + break; + } + case kWhatFlush: + { + onFlush(); + break; + } + case kWhatResume: + { + // nothing to do + break; + } + case kWhatSetParameters: + { + onSetParameters(msg); + break; + } + case kWhatCreateInputSurface: + { + onCreateInputSurface(); + break; + } + case GraphicBufferListener::kWhatFrameAvailable: + { + onInputFrameAvailable(); + break; + } + case kWhatSignalEndOfInputStream: + { + onSignalEndOfInputStream(); + break; + } + default: + { + ALOGE("Message not handled:\n%s", msg->debugString().c_str()); + break; + } + } +} + +//////////////////// PORT DESCRIPTION ////////////////////////////////////////// + +MediaFilter::PortDescription::PortDescription() { +} + +void MediaFilter::PortDescription::addBuffer( + IOMX::buffer_id id, const sp<ABuffer> &buffer) { + mBufferIDs.push_back(id); + mBuffers.push_back(buffer); +} + +size_t MediaFilter::PortDescription::countBuffers() { + return mBufferIDs.size(); +} + +IOMX::buffer_id MediaFilter::PortDescription::bufferIDAt(size_t index) const { + return mBufferIDs.itemAt(index); +} + +sp<ABuffer> MediaFilter::PortDescription::bufferAt(size_t index) const { + return mBuffers.itemAt(index); +} + +//////////////////// HELPER FUNCTIONS ////////////////////////////////////////// + +void MediaFilter::signalProcessBuffers() { + (new AMessage(kWhatProcessBuffers, this))->post(); +} + +void MediaFilter::signalError(status_t error) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatError); + notify->setInt32("err", error); + notify->post(); +} + +status_t MediaFilter::allocateBuffersOnPort(OMX_U32 portIndex) { + CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); + const bool isInput = portIndex == kPortIndexInput; + const size_t bufferSize = isInput ? mMaxInputSize : mMaxOutputSize; + + CHECK(mDealer[portIndex] == NULL); + CHECK(mBuffers[portIndex].isEmpty()); + + ALOGV("Allocating %zu buffers of size %zu on %s port", + kBufferCountActual, bufferSize, + isInput ? "input" : "output"); + + size_t totalSize = kBufferCountActual * bufferSize; + + mDealer[portIndex] = new MemoryDealer(totalSize, "MediaFilter"); + + for (size_t i = 0; i < kBufferCountActual; ++i) { + sp<IMemory> mem = mDealer[portIndex]->allocate(bufferSize); + CHECK(mem.get() != NULL); + + BufferInfo info; + info.mStatus = BufferInfo::OWNED_BY_US; + info.mBufferID = i; + info.mGeneration = mGeneration; + info.mOutputFlags = 0; + info.mData = new ABuffer(mem->pointer(), bufferSize); + info.mData->meta()->setInt64("timeUs", 0); + + mBuffers[portIndex].push_back(info); + + if (!isInput) { + mAvailableOutputBuffers.push( + &mBuffers[portIndex].editItemAt(i)); + } + } + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatBuffersAllocated); + + notify->setInt32("portIndex", portIndex); + + sp<PortDescription> desc = new PortDescription; + + for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { + const BufferInfo &info = mBuffers[portIndex][i]; + + desc->addBuffer(info.mBufferID, info.mData); + } + + notify->setObject("portDesc", desc); + notify->post(); + + return OK; +} + +MediaFilter::BufferInfo* MediaFilter::findBufferByID( + uint32_t portIndex, IOMX::buffer_id bufferID, + ssize_t *index) { + for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { + BufferInfo *info = &mBuffers[portIndex].editItemAt(i); + + if (info->mBufferID == bufferID) { + if (index != NULL) { + *index = i; + } + return info; + } + } + + TRESPASS(); + + return NULL; +} + +void MediaFilter::postFillThisBuffer(BufferInfo *info) { + ALOGV("postFillThisBuffer on buffer %d", info->mBufferID); + if (mPortEOS[kPortIndexInput]) { + return; + } + + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); + + info->mGeneration = mGeneration; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatFillThisBuffer); + notify->setInt32("buffer-id", info->mBufferID); + + info->mData->meta()->clear(); + notify->setBuffer("buffer", info->mData); + + sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, this); + reply->setInt32("buffer-id", info->mBufferID); + + notify->setMessage("reply", reply); + + info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; + notify->post(); +} + +void MediaFilter::postDrainThisBuffer(BufferInfo *info) { + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); + + info->mGeneration = mGeneration; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); + notify->setInt32("buffer-id", info->mBufferID); + notify->setInt32("flags", info->mOutputFlags); + notify->setBuffer("buffer", info->mData); + + sp<AMessage> reply = new AMessage(kWhatOutputBufferDrained, this); + reply->setInt32("buffer-id", info->mBufferID); + + notify->setMessage("reply", reply); + + notify->post(); + + info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; +} + +void MediaFilter::postEOS() { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatEOS); + notify->setInt32("err", ERROR_END_OF_STREAM); + notify->post(); + + ALOGV("Sent kWhatEOS."); +} + +void MediaFilter::sendFormatChange() { + sp<AMessage> notify = mNotify->dup(); + + notify->setInt32("what", kWhatOutputFormatChanged); + + AString mime; + CHECK(mOutputFormat->findString("mime", &mime)); + notify->setString("mime", mime.c_str()); + + notify->setInt32("stride", mStride); + notify->setInt32("slice-height", mSliceHeight); + notify->setInt32("color-format", mColorFormatOut); + notify->setRect("crop", 0, 0, mStride - 1, mSliceHeight - 1); + notify->setInt32("width", mWidth); + notify->setInt32("height", mHeight); + + notify->post(); +} + +void MediaFilter::requestFillEmptyInput() { + if (mPortEOS[kPortIndexInput]) { + return; + } + + for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) { + BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i); + + if (info->mStatus == BufferInfo::OWNED_BY_US) { + postFillThisBuffer(info); + } + } +} + +void MediaFilter::processBuffers() { + if (mAvailableInputBuffers.empty() || mAvailableOutputBuffers.empty()) { + ALOGV("Skipping process (buffers unavailable)"); + return; + } + + if (mPortEOS[kPortIndexOutput]) { + // TODO notify caller of queueInput error when it is supported + // in MediaCodec + ALOGW("Tried to process a buffer after EOS."); + return; + } + + BufferInfo *inputInfo = mAvailableInputBuffers[0]; + mAvailableInputBuffers.removeAt(0); + BufferInfo *outputInfo = mAvailableOutputBuffers[0]; + mAvailableOutputBuffers.removeAt(0); + + status_t err; + err = mFilter->processBuffers(inputInfo->mData, outputInfo->mData); + if (err != (status_t)OK) { + outputInfo->mData->meta()->setInt32("err", err); + } + + int64_t timeUs; + CHECK(inputInfo->mData->meta()->findInt64("timeUs", &timeUs)); + outputInfo->mData->meta()->setInt64("timeUs", timeUs); + outputInfo->mOutputFlags = 0; + int32_t eos = 0; + if (inputInfo->mData->meta()->findInt32("eos", &eos) && eos != 0) { + outputInfo->mOutputFlags |= OMX_BUFFERFLAG_EOS; + mPortEOS[kPortIndexOutput] = true; + outputInfo->mData->meta()->setInt32("eos", eos); + postEOS(); + ALOGV("Output stream saw EOS."); + } + + ALOGV("Processed input buffer %u [%zu], output buffer %u [%zu]", + inputInfo->mBufferID, inputInfo->mData->size(), + outputInfo->mBufferID, outputInfo->mData->size()); + + if (mGraphicBufferListener != NULL) { + delete inputInfo; + } else { + postFillThisBuffer(inputInfo); + } + postDrainThisBuffer(outputInfo); + + // prevent any corner case where buffers could get stuck in queue + signalProcessBuffers(); +} + +void MediaFilter::onAllocateComponent(const sp<AMessage> &msg) { + CHECK_EQ(mState, UNINITIALIZED); + + CHECK(msg->findString("componentName", &mComponentName)); + const char* name = mComponentName.c_str(); + if (!strcasecmp(name, "android.filter.zerofilter")) { + mFilter = new ZeroFilter; + } else if (!strcasecmp(name, "android.filter.saturation")) { + mFilter = new SaturationFilter; + } else if (!strcasecmp(name, "android.filter.intrinsicblur")) { + mFilter = new IntrinsicBlurFilter; + } else if (!strcasecmp(name, "android.filter.RenderScript")) { + mFilter = new RSFilter; + } else { + ALOGE("Unrecognized filter name: %s", name); + signalError(NAME_NOT_FOUND); + return; + } + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatComponentAllocated); + // HACK - need "OMX.google" to use MediaCodec's software renderer + notify->setString("componentName", "OMX.google.MediaFilter"); + notify->post(); + mState = INITIALIZED; + ALOGV("Handled kWhatAllocateComponent."); +} + +void MediaFilter::onConfigureComponent(const sp<AMessage> &msg) { + // TODO: generalize to allow audio filters as well as video + + CHECK_EQ(mState, INITIALIZED); + + // get params - at least mime, width & height + AString mime; + CHECK(msg->findString("mime", &mime)); + if (strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_RAW)) { + ALOGE("Bad mime: %s", mime.c_str()); + signalError(BAD_VALUE); + return; + } + + CHECK(msg->findInt32("width", &mWidth)); + CHECK(msg->findInt32("height", &mHeight)); + if (!msg->findInt32("stride", &mStride)) { + mStride = mWidth; + } + if (!msg->findInt32("slice-height", &mSliceHeight)) { + mSliceHeight = mHeight; + } + + mMaxInputSize = mWidth * mHeight * 4; // room for ARGB8888 + int32_t maxInputSize; + if (msg->findInt32("max-input-size", &maxInputSize) + && (size_t)maxInputSize > mMaxInputSize) { + mMaxInputSize = maxInputSize; + } + + if (!msg->findInt32("color-format", &mColorFormatIn)) { + // default to OMX_COLOR_Format32bitARGB8888 + mColorFormatIn = OMX_COLOR_Format32bitARGB8888; + msg->setInt32("color-format", mColorFormatIn); + } + mColorFormatOut = mColorFormatIn; + + mMaxOutputSize = mWidth * mHeight * 4; // room for ARGB8888 + + AString cacheDir; + if (!msg->findString("cacheDir", &cacheDir)) { + ALOGE("Failed to find cache directory in config message."); + signalError(NAME_NOT_FOUND); + return; + } + + status_t err; + err = mFilter->configure(msg); + if (err != (status_t)OK) { + ALOGE("Failed to configure filter component, err %d", err); + signalError(err); + return; + } + + mInputFormat = new AMessage(); + mInputFormat->setString("mime", mime.c_str()); + mInputFormat->setInt32("stride", mStride); + mInputFormat->setInt32("slice-height", mSliceHeight); + mInputFormat->setInt32("color-format", mColorFormatIn); + mInputFormat->setRect("crop", 0, 0, mStride, mSliceHeight); + mInputFormat->setInt32("width", mWidth); + mInputFormat->setInt32("height", mHeight); + + mOutputFormat = new AMessage(); + mOutputFormat->setString("mime", mime.c_str()); + mOutputFormat->setInt32("stride", mStride); + mOutputFormat->setInt32("slice-height", mSliceHeight); + mOutputFormat->setInt32("color-format", mColorFormatOut); + mOutputFormat->setRect("crop", 0, 0, mStride, mSliceHeight); + mOutputFormat->setInt32("width", mWidth); + mOutputFormat->setInt32("height", mHeight); + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatComponentConfigured); + notify->setString("componentName", "MediaFilter"); + notify->setMessage("input-format", mInputFormat); + notify->setMessage("output-format", mOutputFormat); + notify->post(); + mState = CONFIGURED; + ALOGV("Handled kWhatConfigureComponent."); + + sendFormatChange(); +} + +void MediaFilter::onStart() { + CHECK_EQ(mState, CONFIGURED); + + allocateBuffersOnPort(kPortIndexInput); + + allocateBuffersOnPort(kPortIndexOutput); + + status_t err = mFilter->start(); + if (err != (status_t)OK) { + ALOGE("Failed to start filter component, err %d", err); + signalError(err); + return; + } + + mPortEOS[kPortIndexInput] = false; + mPortEOS[kPortIndexOutput] = false; + mInputEOSResult = OK; + mState = STARTED; + + requestFillEmptyInput(); + ALOGV("Handled kWhatStart."); +} + +void MediaFilter::onInputBufferFilled(const sp<AMessage> &msg) { + IOMX::buffer_id bufferID; + CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); + BufferInfo *info = findBufferByID(kPortIndexInput, bufferID); + + if (mState != STARTED) { + // we're not running, so we'll just keep that buffer... + info->mStatus = BufferInfo::OWNED_BY_US; + return; + } + + if (info->mGeneration != mGeneration) { + ALOGV("Caught a stale input buffer [ID %d]", bufferID); + // buffer is stale (taken before a flush/shutdown) - repost it + CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US); + postFillThisBuffer(info); + return; + } + + CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM); + info->mStatus = BufferInfo::OWNED_BY_US; + + sp<ABuffer> buffer; + int32_t err = OK; + bool eos = false; + + if (!msg->findBuffer("buffer", &buffer)) { + // these are unfilled buffers returned by client + CHECK(msg->findInt32("err", &err)); + + if (err == OK) { + // buffers with no errors are returned on MediaCodec.flush + ALOGV("saw unfilled buffer (MediaCodec.flush)"); + postFillThisBuffer(info); + return; + } else { + ALOGV("saw error %d instead of an input buffer", err); + eos = true; + } + + buffer.clear(); + } + + int32_t isCSD; + if (buffer != NULL && buffer->meta()->findInt32("csd", &isCSD) + && isCSD != 0) { + // ignore codec-specific data buffers + ALOGW("MediaFilter received a codec-specific data buffer"); + postFillThisBuffer(info); + return; + } + + int32_t tmp; + if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { + eos = true; + err = ERROR_END_OF_STREAM; + } + + mAvailableInputBuffers.push_back(info); + processBuffers(); + + if (eos) { + mPortEOS[kPortIndexInput] = true; + mInputEOSResult = err; + } + + ALOGV("Handled kWhatInputBufferFilled. [ID %u]", bufferID); +} + +void MediaFilter::onOutputBufferDrained(const sp<AMessage> &msg) { + IOMX::buffer_id bufferID; + CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); + BufferInfo *info = findBufferByID(kPortIndexOutput, bufferID); + + if (mState != STARTED) { + // we're not running, so we'll just keep that buffer... + info->mStatus = BufferInfo::OWNED_BY_US; + return; + } + + if (info->mGeneration != mGeneration) { + ALOGV("Caught a stale output buffer [ID %d]", bufferID); + // buffer is stale (taken before a flush/shutdown) - keep it + CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_US); + return; + } + + CHECK_EQ(info->mStatus, BufferInfo::OWNED_BY_UPSTREAM); + info->mStatus = BufferInfo::OWNED_BY_US; + + mAvailableOutputBuffers.push_back(info); + + processBuffers(); + + ALOGV("Handled kWhatOutputBufferDrained. [ID %u]", + bufferID); +} + +void MediaFilter::onShutdown(const sp<AMessage> &msg) { + mGeneration++; + + if (mState != UNINITIALIZED) { + mFilter->reset(); + } + + int32_t keepComponentAllocated; + CHECK(msg->findInt32("keepComponentAllocated", &keepComponentAllocated)); + if (!keepComponentAllocated || mState == UNINITIALIZED) { + mState = UNINITIALIZED; + } else { + mState = INITIALIZED; + } + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatShutdownCompleted); + notify->post(); +} + +void MediaFilter::onFlush() { + mGeneration++; + + mAvailableInputBuffers.clear(); + for (size_t i = 0; i < mBuffers[kPortIndexInput].size(); ++i) { + BufferInfo *info = &mBuffers[kPortIndexInput].editItemAt(i); + info->mStatus = BufferInfo::OWNED_BY_US; + } + mAvailableOutputBuffers.clear(); + for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { + BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); + info->mStatus = BufferInfo::OWNED_BY_US; + mAvailableOutputBuffers.push_back(info); + } + + mPortEOS[kPortIndexInput] = false; + mPortEOS[kPortIndexOutput] = false; + mInputEOSResult = OK; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatFlushCompleted); + notify->post(); + ALOGV("Posted kWhatFlushCompleted"); + + // MediaCodec returns all input buffers after flush, so in + // onInputBufferFilled we call postFillThisBuffer on them +} + +void MediaFilter::onSetParameters(const sp<AMessage> &msg) { + CHECK(mState != STARTED); + + status_t err = mFilter->setParameters(msg); + if (err != (status_t)OK) { + ALOGE("setParameters returned err %d", err); + } +} + +void MediaFilter::onCreateInputSurface() { + CHECK(mState == CONFIGURED); + + mGraphicBufferListener = new GraphicBufferListener; + + sp<AMessage> notify = new AMessage(); + notify->setTarget(this); + status_t err = mGraphicBufferListener->init( + notify, mStride, mSliceHeight, kBufferCountActual); + + if (err != OK) { + ALOGE("Failed to init mGraphicBufferListener: %d", err); + signalError(err); + return; + } + + sp<AMessage> reply = mNotify->dup(); + reply->setInt32("what", CodecBase::kWhatInputSurfaceCreated); + reply->setObject( + "input-surface", + new BufferProducerWrapper( + mGraphicBufferListener->getIGraphicBufferProducer())); + reply->post(); +} + +void MediaFilter::onInputFrameAvailable() { + BufferItem item = mGraphicBufferListener->getBufferItem(); + sp<GraphicBuffer> buf = mGraphicBufferListener->getBuffer(item); + + // get pointer to graphic buffer + void* bufPtr; + buf->lock(GraphicBuffer::USAGE_SW_READ_OFTEN, &bufPtr); + + // HACK - there is no OMX_COLOR_FORMATTYPE value for RGBA, so the format + // conversion is hardcoded until we add this. + // TODO: check input format and convert only if necessary + // copy RGBA graphic buffer into temporary ARGB input buffer + BufferInfo *inputInfo = new BufferInfo; + inputInfo->mData = new ABuffer(buf->getWidth() * buf->getHeight() * 4); + ALOGV("Copying surface data into temp buffer."); + convertRGBAToARGB( + (uint8_t*)bufPtr, buf->getWidth(), buf->getHeight(), + buf->getStride(), inputInfo->mData->data()); + inputInfo->mBufferID = item.mBuf; + inputInfo->mGeneration = mGeneration; + inputInfo->mOutputFlags = 0; + inputInfo->mStatus = BufferInfo::OWNED_BY_US; + inputInfo->mData->meta()->setInt64("timeUs", item.mTimestamp / 1000); + + mAvailableInputBuffers.push_back(inputInfo); + + mGraphicBufferListener->releaseBuffer(item); + + signalProcessBuffers(); +} + +void MediaFilter::onSignalEndOfInputStream() { + // if using input surface, need to send an EOS output buffer + if (mGraphicBufferListener != NULL) { + Vector<BufferInfo> *outputBufs = &mBuffers[kPortIndexOutput]; + BufferInfo* eosBuf; + bool foundBuf = false; + for (size_t i = 0; i < kBufferCountActual; i++) { + eosBuf = &outputBufs->editItemAt(i); + if (eosBuf->mStatus == BufferInfo::OWNED_BY_US) { + foundBuf = true; + break; + } + } + + if (!foundBuf) { + ALOGE("onSignalEndOfInputStream failed to find an output buffer"); + return; + } + + eosBuf->mOutputFlags = OMX_BUFFERFLAG_EOS; + eosBuf->mGeneration = mGeneration; + eosBuf->mData->setRange(0, 0); + postDrainThisBuffer(eosBuf); + ALOGV("Posted EOS on output buffer %zu", eosBuf->mBufferID); + } + + mPortEOS[kPortIndexOutput] = true; + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); + notify->post(); + + ALOGV("Output stream saw EOS."); +} + +} // namespace android diff --git a/media/libstagefright/filters/RSFilter.cpp b/media/libstagefright/filters/RSFilter.cpp new file mode 100644 index 0000000..b569945 --- /dev/null +++ b/media/libstagefright/filters/RSFilter.cpp @@ -0,0 +1,96 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "RSFilter" + +#include <utils/Log.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "RSFilter.h" + +namespace android { + +RSFilter::RSFilter() { + +} + +RSFilter::~RSFilter() { + +} + +status_t RSFilter::configure(const sp<AMessage> &msg) { + status_t err = SimpleFilter::configure(msg); + if (err != OK) { + return err; + } + + if (!msg->findString("cacheDir", &mCacheDir)) { + ALOGE("Failed to find cache directory in config message."); + return NAME_NOT_FOUND; + } + + sp<RenderScriptWrapper> wrapper; + if (!msg->findObject("rs-wrapper", (sp<RefBase>*)&wrapper)) { + ALOGE("Failed to find RenderScriptWrapper in config message."); + return NAME_NOT_FOUND; + } + + mRS = wrapper->mContext; + mCallback = wrapper->mCallback; + + return OK; +} + +status_t RSFilter::start() { + // 32-bit elements for ARGB8888 + RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS); + + RSC::Type::Builder tb(mRS, e); + tb.setX(mWidth); + tb.setY(mHeight); + RSC::sp<const RSC::Type> t = tb.create(); + + mAllocIn = RSC::Allocation::createTyped(mRS, t); + mAllocOut = RSC::Allocation::createTyped(mRS, t); + + return OK; +} + +void RSFilter::reset() { + mCallback.clear(); + mAllocOut.clear(); + mAllocIn.clear(); + mRS.clear(); +} + +status_t RSFilter::setParameters(const sp<AMessage> &msg) { + return mCallback->handleSetParameters(msg); +} + +status_t RSFilter::processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { + mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data()); + mCallback->processBuffers(mAllocIn.get(), mAllocOut.get()); + mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data()); + + return OK; +} + +} // namespace android diff --git a/media/libstagefright/filters/RSFilter.h b/media/libstagefright/filters/RSFilter.h new file mode 100644 index 0000000..c5b5074 --- /dev/null +++ b/media/libstagefright/filters/RSFilter.h @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef RS_FILTER_H_ +#define RS_FILTER_H_ + +#include <media/stagefright/RenderScriptWrapper.h> +#include <RenderScript.h> + +#include "SimpleFilter.h" + +namespace android { + +struct AString; + +struct RSFilter : public SimpleFilter { +public: + RSFilter(); + + virtual status_t configure(const sp<AMessage> &msg); + virtual status_t start(); + virtual void reset(); + virtual status_t setParameters(const sp<AMessage> &msg); + virtual status_t processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: + virtual ~RSFilter(); + +private: + AString mCacheDir; + sp<RenderScriptWrapper::RSFilterCallback> mCallback; + RSC::sp<RSC::RS> mRS; + RSC::sp<RSC::Allocation> mAllocIn; + RSC::sp<RSC::Allocation> mAllocOut; +}; + +} // namespace android + +#endif // RS_FILTER_H_ diff --git a/media/libstagefright/filters/SaturationFilter.cpp b/media/libstagefright/filters/SaturationFilter.cpp new file mode 100644 index 0000000..ba5f75a --- /dev/null +++ b/media/libstagefright/filters/SaturationFilter.cpp @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "SaturationFilter" + +#include <utils/Log.h> + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "SaturationFilter.h" + +namespace android { + +status_t SaturationFilter::configure(const sp<AMessage> &msg) { + status_t err = SimpleFilter::configure(msg); + if (err != OK) { + return err; + } + + if (!msg->findString("cacheDir", &mCacheDir)) { + ALOGE("Failed to find cache directory in config message."); + return NAME_NOT_FOUND; + } + + return OK; +} + +status_t SaturationFilter::start() { + // TODO: use a single RS context object for entire application + mRS = new RSC::RS(); + + if (!mRS->init(mCacheDir.c_str())) { + ALOGE("Failed to initialize RenderScript context."); + return NO_INIT; + } + + // 32-bit elements for ARGB8888 + RSC::sp<const RSC::Element> e = RSC::Element::U8_4(mRS); + + RSC::Type::Builder tb(mRS, e); + tb.setX(mWidth); + tb.setY(mHeight); + RSC::sp<const RSC::Type> t = tb.create(); + + mAllocIn = RSC::Allocation::createTyped(mRS, t); + mAllocOut = RSC::Allocation::createTyped(mRS, t); + + mScript = new ScriptC_saturationARGB(mRS); + + mScript->set_gSaturation(mSaturation); + + return OK; +} + +void SaturationFilter::reset() { + mScript.clear(); + mAllocOut.clear(); + mAllocIn.clear(); + mRS.clear(); +} + +status_t SaturationFilter::setParameters(const sp<AMessage> &msg) { + sp<AMessage> params; + CHECK(msg->findMessage("params", ¶ms)); + + float saturation; + if (params->findFloat("saturation", &saturation)) { + mSaturation = saturation; + } + + return OK; +} + +status_t SaturationFilter::processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { + mAllocIn->copy1DRangeFrom(0, mWidth * mHeight, srcBuffer->data()); + mScript->forEach_root(mAllocIn, mAllocOut); + mAllocOut->copy1DRangeTo(0, mWidth * mHeight, outBuffer->data()); + + return OK; +} + +} // namespace android diff --git a/media/libstagefright/filters/SaturationFilter.h b/media/libstagefright/filters/SaturationFilter.h new file mode 100644 index 0000000..0545021 --- /dev/null +++ b/media/libstagefright/filters/SaturationFilter.h @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SATURATION_FILTER_H_ +#define SATURATION_FILTER_H_ + +#include <RenderScript.h> + +#include "ScriptC_saturationARGB.h" +#include "SimpleFilter.h" + +namespace android { + +struct SaturationFilter : public SimpleFilter { +public: + SaturationFilter() : mSaturation(1.f) {}; + + virtual status_t configure(const sp<AMessage> &msg); + virtual status_t start(); + virtual void reset(); + virtual status_t setParameters(const sp<AMessage> &msg); + virtual status_t processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: + virtual ~SaturationFilter() {}; + +private: + AString mCacheDir; + RSC::sp<RSC::RS> mRS; + RSC::sp<RSC::Allocation> mAllocIn; + RSC::sp<RSC::Allocation> mAllocOut; + RSC::sp<ScriptC_saturationARGB> mScript; + float mSaturation; +}; + +} // namespace android + +#endif // SATURATION_FILTER_H_ diff --git a/media/libstagefright/filters/SimpleFilter.cpp b/media/libstagefright/filters/SimpleFilter.cpp new file mode 100644 index 0000000..6c1ca2c --- /dev/null +++ b/media/libstagefright/filters/SimpleFilter.cpp @@ -0,0 +1,39 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "SimpleFilter.h" + +namespace android { + +status_t SimpleFilter::configure(const sp<AMessage> &msg) { + CHECK(msg->findInt32("width", &mWidth)); + CHECK(msg->findInt32("height", &mHeight)); + if (!msg->findInt32("stride", &mStride)) { + mStride = mWidth; + } + if (!msg->findInt32("slice-height", &mSliceHeight)) { + mSliceHeight = mHeight; + } + CHECK(msg->findInt32("color-format", &mColorFormatIn)); + mColorFormatOut = mColorFormatIn; + + return OK; +} + +} // namespace android diff --git a/media/libstagefright/filters/SimpleFilter.h b/media/libstagefright/filters/SimpleFilter.h new file mode 100644 index 0000000..4cd37ef --- /dev/null +++ b/media/libstagefright/filters/SimpleFilter.h @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SIMPLE_FILTER_H_ +#define SIMPLE_FILTER_H_ + +#include <stdint.h> +#include <utils/Errors.h> +#include <utils/RefBase.h> + +struct ABuffer; +struct AMessage; + +namespace android { + +struct SimpleFilter : public RefBase { +public: + SimpleFilter() : mWidth(0), mHeight(0), mStride(0), mSliceHeight(0), + mColorFormatIn(0), mColorFormatOut(0) {}; + + virtual status_t configure(const sp<AMessage> &msg); + + virtual status_t start() = 0; + virtual void reset() = 0; + virtual status_t setParameters(const sp<AMessage> &msg) = 0; + virtual status_t processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) = 0; + +protected: + int32_t mWidth, mHeight; + int32_t mStride, mSliceHeight; + int32_t mColorFormatIn, mColorFormatOut; + + virtual ~SimpleFilter() {}; +}; + +} // namespace android + +#endif // SIMPLE_FILTER_H_ diff --git a/media/libstagefright/filters/ZeroFilter.cpp b/media/libstagefright/filters/ZeroFilter.cpp new file mode 100644 index 0000000..3f1243c --- /dev/null +++ b/media/libstagefright/filters/ZeroFilter.cpp @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ZeroFilter" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include "ZeroFilter.h" + +namespace android { + +status_t ZeroFilter::setParameters(const sp<AMessage> &msg) { + sp<AMessage> params; + CHECK(msg->findMessage("params", ¶ms)); + + int32_t invert; + if (params->findInt32("invert", &invert)) { + mInvertData = (invert != 0); + } + + return OK; +} + +status_t ZeroFilter::processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer) { + // assuming identical input & output buffers, since we're a copy filter + if (mInvertData) { + uint32_t* src = (uint32_t*)srcBuffer->data(); + uint32_t* dest = (uint32_t*)outBuffer->data(); + for (size_t i = 0; i < srcBuffer->size() / 4; ++i) { + *(dest++) = *(src++) ^ 0xFFFFFFFF; + } + } else { + memcpy(outBuffer->data(), srcBuffer->data(), srcBuffer->size()); + } + outBuffer->setRange(0, srcBuffer->size()); + + return OK; +} + +} // namespace android diff --git a/media/libstagefright/filters/ZeroFilter.h b/media/libstagefright/filters/ZeroFilter.h new file mode 100644 index 0000000..bd34dfb --- /dev/null +++ b/media/libstagefright/filters/ZeroFilter.h @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ZERO_FILTER_H_ +#define ZERO_FILTER_H_ + +#include "SimpleFilter.h" + +namespace android { + +struct ZeroFilter : public SimpleFilter { +public: + ZeroFilter() : mInvertData(false) {}; + + virtual status_t start() { return OK; }; + virtual void reset() {}; + virtual status_t setParameters(const sp<AMessage> &msg); + virtual status_t processBuffers( + const sp<ABuffer> &srcBuffer, const sp<ABuffer> &outBuffer); + +protected: + virtual ~ZeroFilter() {}; + +private: + bool mInvertData; +}; + +} // namespace android + +#endif // ZERO_FILTER_H_ diff --git a/media/libstagefright/filters/saturation.rs b/media/libstagefright/filters/saturation.rs new file mode 100644 index 0000000..2c867ac --- /dev/null +++ b/media/libstagefright/filters/saturation.rs @@ -0,0 +1,40 @@ +// Sample script for RGB888 support (compare to saturationARGB.rs) +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma version(1) +#pragma rs java_package_name(com.android.rs.cppbasic) +#pragma rs_fp_relaxed + +const static float3 gMonoMult = {0.299f, 0.587f, 0.114f}; + +// global variables (parameters accessible to application code) +float gSaturation = 1.0f; + +void root(const uchar3 *v_in, uchar3 *v_out) { + // scale 0-255 uchar to 0-1.0 float + float3 in = {v_in->r * 0.003921569f, v_in->g * 0.003921569f, + v_in->b * 0.003921569f}; + + // apply saturation filter + float3 result = dot(in, gMonoMult); + result = mix(result, in, gSaturation); + + // convert to uchar, copied from rsPackColorTo8888 + v_out->x = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f); + v_out->y = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f); + v_out->z = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f); +} diff --git a/media/libstagefright/filters/saturationARGB.rs b/media/libstagefright/filters/saturationARGB.rs new file mode 100644 index 0000000..1de9dd8 --- /dev/null +++ b/media/libstagefright/filters/saturationARGB.rs @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma version(1) +#pragma rs java_package_name(com.android.rs.cppbasic) +#pragma rs_fp_relaxed + +const static float3 gMonoMult = {0.299f, 0.587f, 0.114f}; + +// global variables (parameters accessible to application code) +float gSaturation = 1.0f; + +void root(const uchar4 *v_in, uchar4 *v_out) { + v_out->x = v_in->x; // don't modify A + + // get RGB, scale 0-255 uchar to 0-1.0 float + float3 rgb = {v_in->y * 0.003921569f, v_in->z * 0.003921569f, + v_in->w * 0.003921569f}; + + // apply saturation filter + float3 result = dot(rgb, gMonoMult); + result = mix(result, rgb, gSaturation); + + v_out->y = (uchar)clamp((result.r * 255.f + 0.5f), 0.f, 255.f); + v_out->z = (uchar)clamp((result.g * 255.f + 0.5f), 0.f, 255.f); + v_out->w = (uchar)clamp((result.b * 255.f + 0.5f), 0.f, 255.f); +} diff --git a/media/libstagefright/foundation/AHandler.cpp b/media/libstagefright/foundation/AHandler.cpp index bd5f7e9..7dbbe54 100644 --- a/media/libstagefright/foundation/AHandler.cpp +++ b/media/libstagefright/foundation/AHandler.cpp @@ -19,15 +19,23 @@ #include <utils/Log.h> #include <media/stagefright/foundation/AHandler.h> - -#include <media/stagefright/foundation/ALooperRoster.h> +#include <media/stagefright/foundation/AMessage.h> namespace android { -sp<ALooper> AHandler::looper() { - extern ALooperRoster gLooperRoster; +void AHandler::deliverMessage(const sp<AMessage> &msg) { + onMessageReceived(msg); + mMessageCounter++; - return gLooperRoster.findLooper(id()); + if (mVerboseStats) { + uint32_t what = msg->what(); + ssize_t idx = mMessages.indexOfKey(what); + if (idx < 0) { + mMessages.add(what, 1); + } else { + mMessages.editValueAt(idx)++; + } + } } } // namespace android diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp index 88b1c92..90b5f68 100644 --- a/media/libstagefright/foundation/ALooper.cpp +++ b/media/libstagefright/foundation/ALooper.cpp @@ -16,6 +16,9 @@ //#define LOG_NDEBUG 0 #define LOG_TAG "ALooper" + +#include <media/stagefright/foundation/ADebug.h> + #include <utils/Log.h> #include <sys/time.h> @@ -210,7 +213,7 @@ bool ALooper::loop() { mEventQueue.erase(mEventQueue.begin()); } - gLooperRoster.deliverMessage(event.mMessage); + event.mMessage->deliver(); // NOTE: It's important to note that at this point our "ALooper" object // may no longer exist (its final reference may have gone away while @@ -220,4 +223,29 @@ bool ALooper::loop() { return true; } +// to be called by AMessage::postAndAwaitResponse only +sp<AReplyToken> ALooper::createReplyToken() { + return new AReplyToken(this); +} + +// to be called by AMessage::postAndAwaitResponse only +status_t ALooper::awaitResponse(const sp<AReplyToken> &replyToken, sp<AMessage> *response) { + // return status in case we want to handle an interrupted wait + Mutex::Autolock autoLock(mRepliesLock); + CHECK(replyToken != NULL); + while (!replyToken->retrieveReply(response)) { + mRepliesCondition.wait(mRepliesLock); + } + return OK; +} + +status_t ALooper::postReply(const sp<AReplyToken> &replyToken, const sp<AMessage> &reply) { + Mutex::Autolock autoLock(mRepliesLock); + status_t err = replyToken->setReply(reply); + if (err == OK) { + mRepliesCondition.broadcast(); + } + return err; +} + } // namespace android diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp index 2d57aee..473ce1b 100644 --- a/media/libstagefright/foundation/ALooperRoster.cpp +++ b/media/libstagefright/foundation/ALooperRoster.cpp @@ -30,8 +30,7 @@ namespace android { static bool verboseStats = false; ALooperRoster::ALooperRoster() - : mNextHandlerID(1), - mNextReplyID(1) { + : mNextHandlerID(1) { } ALooper::handler_id ALooperRoster::registerHandler( @@ -49,7 +48,7 @@ ALooper::handler_id ALooperRoster::registerHandler( ALooper::handler_id handlerID = mNextHandlerID++; mHandlers.add(handlerID, info); - handler->setID(handlerID); + handler->setID(handlerID, looper); return handlerID; } @@ -68,7 +67,7 @@ void ALooperRoster::unregisterHandler(ALooper::handler_id handlerID) { sp<AHandler> handler = info.mHandler.promote(); if (handler != NULL) { - handler->setID(0); + handler->setID(0, NULL); } mHandlers.removeItemsAt(index); @@ -100,116 +99,6 @@ void ALooperRoster::unregisterStaleHandlers() { } } -status_t ALooperRoster::postMessage( - const sp<AMessage> &msg, int64_t delayUs) { - - sp<ALooper> looper = findLooper(msg->target()); - - if (looper == NULL) { - return -ENOENT; - } - looper->post(msg, delayUs); - return OK; -} - -void ALooperRoster::deliverMessage(const sp<AMessage> &msg) { - sp<AHandler> handler; - - { - Mutex::Autolock autoLock(mLock); - - ssize_t index = mHandlers.indexOfKey(msg->target()); - - if (index < 0) { - ALOGW("failed to deliver message. Target handler not registered."); - return; - } - - const HandlerInfo &info = mHandlers.valueAt(index); - handler = info.mHandler.promote(); - - if (handler == NULL) { - ALOGW("failed to deliver message. " - "Target handler %d registered, but object gone.", - msg->target()); - - mHandlers.removeItemsAt(index); - return; - } - } - - handler->onMessageReceived(msg); - handler->mMessageCounter++; - - if (verboseStats) { - uint32_t what = msg->what(); - ssize_t idx = handler->mMessages.indexOfKey(what); - if (idx < 0) { - handler->mMessages.add(what, 1); - } else { - handler->mMessages.editValueAt(idx)++; - } - } -} - -sp<ALooper> ALooperRoster::findLooper(ALooper::handler_id handlerID) { - Mutex::Autolock autoLock(mLock); - - ssize_t index = mHandlers.indexOfKey(handlerID); - - if (index < 0) { - return NULL; - } - - sp<ALooper> looper = mHandlers.valueAt(index).mLooper.promote(); - - if (looper == NULL) { - mHandlers.removeItemsAt(index); - return NULL; - } - - return looper; -} - -status_t ALooperRoster::postAndAwaitResponse( - const sp<AMessage> &msg, sp<AMessage> *response) { - sp<ALooper> looper = findLooper(msg->target()); - - if (looper == NULL) { - ALOGW("failed to post message. " - "Target handler %d still registered, but object gone.", - msg->target()); - response->clear(); - return -ENOENT; - } - - Mutex::Autolock autoLock(mLock); - - uint32_t replyID = mNextReplyID++; - - msg->setInt32("replyID", replyID); - - looper->post(msg, 0 /* delayUs */); - - ssize_t index; - while ((index = mReplies.indexOfKey(replyID)) < 0) { - mRepliesCondition.wait(mLock); - } - - *response = mReplies.valueAt(index); - mReplies.removeItemsAt(index); - - return OK; -} - -void ALooperRoster::postReply(uint32_t replyID, const sp<AMessage> &reply) { - Mutex::Autolock autoLock(mLock); - - CHECK(mReplies.indexOfKey(replyID) < 0); - mReplies.add(replyID, reply); - mRepliesCondition.broadcast(); -} - static void makeFourCC(uint32_t fourcc, char *s) { s[0] = (fourcc >> 24) & 0xff; if (s[0]) { @@ -225,7 +114,7 @@ static void makeFourCC(uint32_t fourcc, char *s) { void ALooperRoster::dump(int fd, const Vector<String16>& args) { bool clear = false; bool oldVerbose = verboseStats; - for (size_t i = 0;i < args.size(); i++) { + for (size_t i = 0; i < args.size(); i++) { if (args[i] == String16("-c")) { clear = true; } else if (args[i] == String16("-von")) { @@ -241,22 +130,23 @@ void ALooperRoster::dump(int fd, const Vector<String16>& args) { Mutex::Autolock autoLock(mLock); size_t n = mHandlers.size(); - s.appendFormat(" %zd registered handlers:\n", n); + s.appendFormat(" %zu registered handlers:\n", n); for (size_t i = 0; i < n; i++) { - s.appendFormat(" %zd: ", i); + s.appendFormat(" %d: ", mHandlers.keyAt(i)); HandlerInfo &info = mHandlers.editValueAt(i); sp<ALooper> looper = info.mLooper.promote(); if (looper != NULL) { - s.append(looper->mName.c_str()); + s.append(looper->getName()); sp<AHandler> handler = info.mHandler.promote(); if (handler != NULL) { + handler->mVerboseStats = verboseStats; s.appendFormat(": %u messages processed", handler->mMessageCounter); if (verboseStats) { for (size_t j = 0; j < handler->mMessages.size(); j++) { char fourcc[15]; makeFourCC(handler->mMessages.keyAt(j), fourcc); - s.appendFormat("\n %s: %d", + s.appendFormat("\n %s: %u", fourcc, handler->mMessages.valueAt(j)); } diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp index 1f46bc9..e549ff6 100644 --- a/media/libstagefright/foundation/AMessage.cpp +++ b/media/libstagefright/foundation/AMessage.cpp @@ -27,6 +27,7 @@ #include "ABuffer.h" #include "ADebug.h" #include "ALooperRoster.h" +#include "AHandler.h" #include "AString.h" #include <binder/Parcel.h> @@ -36,10 +37,27 @@ namespace android { extern ALooperRoster gLooperRoster; -AMessage::AMessage(uint32_t what, ALooper::handler_id target) +status_t AReplyToken::setReply(const sp<AMessage> &reply) { + if (mReplied) { + ALOGE("trying to post a duplicate reply"); + return -EBUSY; + } + CHECK(mReply == NULL); + mReply = reply; + mReplied = true; + return OK; +} + +AMessage::AMessage(void) + : mWhat(0), + mTarget(0), + mNumItems(0) { +} + +AMessage::AMessage(uint32_t what, const sp<const AHandler> &handler) : mWhat(what), - mTarget(target), mNumItems(0) { + setTarget(handler); } AMessage::~AMessage() { @@ -54,12 +72,16 @@ uint32_t AMessage::what() const { return mWhat; } -void AMessage::setTarget(ALooper::handler_id handlerID) { - mTarget = handlerID; -} - -ALooper::handler_id AMessage::target() const { - return mTarget; +void AMessage::setTarget(const sp<const AHandler> &handler) { + if (handler == NULL) { + mTarget = 0; + mHandler.clear(); + mLooper.clear(); + } else { + mTarget = handler->id(); + mHandler = handler->getHandler(); + mLooper = handler->getLooper(); + } } void AMessage::clear() { @@ -322,33 +344,76 @@ bool AMessage::findRect( return true; } -void AMessage::post(int64_t delayUs) { - gLooperRoster.postMessage(this, delayUs); +void AMessage::deliver() { + sp<AHandler> handler = mHandler.promote(); + if (handler == NULL) { + ALOGW("failed to deliver message as target handler %d is gone.", mTarget); + return; + } + + handler->deliverMessage(this); +} + +status_t AMessage::post(int64_t delayUs) { + sp<ALooper> looper = mLooper.promote(); + if (looper == NULL) { + ALOGW("failed to post message as target looper for handler %d is gone.", mTarget); + return -ENOENT; + } + + looper->post(this, delayUs); + return OK; } status_t AMessage::postAndAwaitResponse(sp<AMessage> *response) { - return gLooperRoster.postAndAwaitResponse(this, response); + sp<ALooper> looper = mLooper.promote(); + if (looper == NULL) { + ALOGW("failed to post message as target looper for handler %d is gone.", mTarget); + return -ENOENT; + } + + sp<AReplyToken> token = looper->createReplyToken(); + if (token == NULL) { + ALOGE("failed to create reply token"); + return -ENOMEM; + } + setObject("replyID", token); + + looper->post(this, 0 /* delayUs */); + return looper->awaitResponse(token, response); } -void AMessage::postReply(uint32_t replyID) { - gLooperRoster.postReply(replyID, this); +status_t AMessage::postReply(const sp<AReplyToken> &replyToken) { + if (replyToken == NULL) { + ALOGW("failed to post reply to a NULL token"); + return -ENOENT; + } + sp<ALooper> looper = replyToken->getLooper(); + if (looper == NULL) { + ALOGW("failed to post reply as target looper is gone."); + return -ENOENT; + } + return looper->postReply(replyToken, this); } -bool AMessage::senderAwaitsResponse(uint32_t *replyID) const { - int32_t tmp; - bool found = findInt32("replyID", &tmp); +bool AMessage::senderAwaitsResponse(sp<AReplyToken> *replyToken) { + sp<RefBase> tmp; + bool found = findObject("replyID", &tmp); if (!found) { return false; } - *replyID = static_cast<uint32_t>(tmp); + *replyToken = static_cast<AReplyToken *>(tmp.get()); + tmp.clear(); + setObject("replyID", tmp); + // TODO: delete Object instead of setting it to NULL - return true; + return *replyToken != NULL; } sp<AMessage> AMessage::dup() const { - sp<AMessage> msg = new AMessage(mWhat, mTarget); + sp<AMessage> msg = new AMessage(mWhat, mHandler.promote()); msg->mNumItems = mNumItems; #ifdef DUMP_STATS @@ -532,7 +597,8 @@ AString AMessage::debugString(int32_t indent) const { // static sp<AMessage> AMessage::FromParcel(const Parcel &parcel) { int32_t what = parcel.readInt32(); - sp<AMessage> msg = new AMessage(what); + sp<AMessage> msg = new AMessage(); + msg->setWhat(what); msg->mNumItems = static_cast<size_t>(parcel.readInt32()); for (size_t i = 0; i < msg->mNumItems; ++i) { diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp index d0f3bc2..2d93152 100644 --- a/media/libstagefright/httplive/LiveSession.cpp +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -33,6 +33,7 @@ #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/foundation/AUtils.h> #include <media/stagefright/DataSource.h> #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaErrors.h> @@ -49,8 +50,96 @@ namespace android { -// Number of recently-read bytes to use for bandwidth estimation -const size_t LiveSession::kBandwidthHistoryBytes = 200 * 1024; +// static +// Bandwidth Switch Mark Defaults +const int64_t LiveSession::kUpSwitchMarkUs = 15000000ll; +const int64_t LiveSession::kDownSwitchMarkUs = 20000000ll; +const int64_t LiveSession::kUpSwitchMarginUs = 5000000ll; +const int64_t LiveSession::kResumeThresholdUs = 100000ll; + +// Buffer Prepare/Ready/Underflow Marks +const int64_t LiveSession::kReadyMarkUs = 5000000ll; +const int64_t LiveSession::kPrepareMarkUs = 1500000ll; +const int64_t LiveSession::kUnderflowMarkUs = 1000000ll; + +struct LiveSession::BandwidthEstimator : public RefBase { + BandwidthEstimator(); + + void addBandwidthMeasurement(size_t numBytes, int64_t delayUs); + bool estimateBandwidth(int32_t *bandwidth); + +private: + // Bandwidth estimation parameters + static const int32_t kMaxBandwidthHistoryItems = 20; + static const int64_t kMaxBandwidthHistoryWindowUs = 5000000ll; // 5 sec + + struct BandwidthEntry { + int64_t mDelayUs; + size_t mNumBytes; + }; + + Mutex mLock; + List<BandwidthEntry> mBandwidthHistory; + int64_t mTotalTransferTimeUs; + size_t mTotalTransferBytes; + + DISALLOW_EVIL_CONSTRUCTORS(BandwidthEstimator); +}; + +LiveSession::BandwidthEstimator::BandwidthEstimator() : + mTotalTransferTimeUs(0), + mTotalTransferBytes(0) { +} + +void LiveSession::BandwidthEstimator::addBandwidthMeasurement( + size_t numBytes, int64_t delayUs) { + AutoMutex autoLock(mLock); + + BandwidthEntry entry; + entry.mDelayUs = delayUs; + entry.mNumBytes = numBytes; + mTotalTransferTimeUs += delayUs; + mTotalTransferBytes += numBytes; + mBandwidthHistory.push_back(entry); + + // trim old samples, keeping at least kMaxBandwidthHistoryItems samples, + // and total transfer time at least kMaxBandwidthHistoryWindowUs. + while (mBandwidthHistory.size() > kMaxBandwidthHistoryItems) { + List<BandwidthEntry>::iterator it = mBandwidthHistory.begin(); + if (mTotalTransferTimeUs - it->mDelayUs < kMaxBandwidthHistoryWindowUs) { + break; + } + mTotalTransferTimeUs -= it->mDelayUs; + mTotalTransferBytes -= it->mNumBytes; + mBandwidthHistory.erase(mBandwidthHistory.begin()); + } +} + +bool LiveSession::BandwidthEstimator::estimateBandwidth(int32_t *bandwidthBps) { + AutoMutex autoLock(mLock); + + if (mBandwidthHistory.size() < 2) { + return false; + } + + *bandwidthBps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs); + return true; +} + +//static +const char *LiveSession::getKeyForStream(StreamType type) { + switch (type) { + case STREAMTYPE_VIDEO: + return "timeUsVideo"; + case STREAMTYPE_AUDIO: + return "timeUsAudio"; + case STREAMTYPE_SUBTITLES: + return "timeUsSubtitle"; + default: + TRESPASS(); + } + return NULL; +} LiveSession::LiveSession( const sp<AMessage> ¬ify, uint32_t flags, @@ -58,146 +147,68 @@ LiveSession::LiveSession( : mNotify(notify), mFlags(flags), mHTTPService(httpService), + mBuffering(false), mInPreparationPhase(true), + mPollBufferingGeneration(0), + mPrevBufferPercentage(-1), mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())), mCurBandwidthIndex(-1), + mOrigBandwidthIndex(-1), + mLastBandwidthBps(-1ll), + mBandwidthEstimator(new BandwidthEstimator()), mStreamMask(0), mNewStreamMask(0), mSwapMask(0), - mCheckBandwidthGeneration(0), mSwitchGeneration(0), mSubtitleGeneration(0), mLastDequeuedTimeUs(0ll), mRealTimeBaseUs(0ll), mReconfigurationInProgress(false), mSwitchInProgress(false), - mDisconnectReplyID(0), - mSeekReplyID(0), + mUpSwitchMark(kUpSwitchMarkUs), + mDownSwitchMark(kDownSwitchMarkUs), + mUpSwitchMargin(kUpSwitchMarginUs), mFirstTimeUsValid(false), mFirstTimeUs(0), mLastSeekTimeUs(0) { - mStreams[kAudioIndex] = StreamItem("audio"); mStreams[kVideoIndex] = StreamItem("video"); mStreams[kSubtitleIndex] = StreamItem("subtitles"); for (size_t i = 0; i < kMaxStreams; ++i) { - mDiscontinuities.add(indexToType(i), new AnotherPacketSource(NULL /* meta */)); mPacketSources.add(indexToType(i), new AnotherPacketSource(NULL /* meta */)); mPacketSources2.add(indexToType(i), new AnotherPacketSource(NULL /* meta */)); - mBuffering[i] = false; - } - - size_t numHistoryItems = kBandwidthHistoryBytes / - PlaylistFetcher::kDownloadBlockSize + 1; - if (numHistoryItems < 5) { - numHistoryItems = 5; } - mHTTPDataSource->setBandwidthHistorySize(numHistoryItems); } LiveSession::~LiveSession() { -} - -sp<ABuffer> LiveSession::createFormatChangeBuffer(bool swap) { - ABuffer *discontinuity = new ABuffer(0); - discontinuity->meta()->setInt32("discontinuity", ATSParser::DISCONTINUITY_FORMATCHANGE); - discontinuity->meta()->setInt32("swapPacketSource", swap); - discontinuity->meta()->setInt32("switchGeneration", mSwitchGeneration); - discontinuity->meta()->setInt64("timeUs", -1); - return discontinuity; -} - -void LiveSession::swapPacketSource(StreamType stream) { - sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream); - sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream); - sp<AnotherPacketSource> tmp = aps; - aps = aps2; - aps2 = tmp; - aps2->clear(); + if (mFetcherLooper != NULL) { + mFetcherLooper->stop(); + } } status_t LiveSession::dequeueAccessUnit( StreamType stream, sp<ABuffer> *accessUnit) { - if (!(mStreamMask & stream)) { - // return -EWOULDBLOCK to avoid halting the decoder - // when switching between audio/video and audio only. - return -EWOULDBLOCK; - } - - status_t finalResult; - sp<AnotherPacketSource> discontinuityQueue = mDiscontinuities.valueFor(stream); - if (discontinuityQueue->hasBufferAvailable(&finalResult)) { - discontinuityQueue->dequeueAccessUnit(accessUnit); - // seeking, track switching - sp<AMessage> extra; - int64_t timeUs; - if ((*accessUnit)->meta()->findMessage("extra", &extra) - && extra != NULL - && extra->findInt64("timeUs", &timeUs)) { - // seeking only - mLastSeekTimeUs = timeUs; - mDiscontinuityOffsetTimesUs.clear(); - mDiscontinuityAbsStartTimesUs.clear(); - } - return INFO_DISCONTINUITY; - } - + status_t finalResult = OK; sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(stream); ssize_t idx = typeToIndex(stream); - if (!packetSource->hasBufferAvailable(&finalResult)) { + // Do not let client pull data if we don't have data packets yet. + // We might only have a format discontinuity queued without data. + // When NuPlayerDecoder dequeues the format discontinuity, it will + // immediately try to getFormat. If we return NULL, NuPlayerDecoder + // thinks it can do seamless change, so will not shutdown decoder. + // When the actual format arrives, it can't handle it and get stuck. + if (!packetSource->hasDataBufferAvailable(&finalResult)) { if (finalResult == OK) { - mBuffering[idx] = true; return -EAGAIN; } else { return finalResult; } } - int32_t targetDuration = 0; - sp<AMessage> meta = packetSource->getLatestEnqueuedMeta(); - if (meta != NULL) { - meta->findInt32("targetDuration", &targetDuration); - } - - int64_t targetDurationUs = targetDuration * 1000000ll; - if (targetDurationUs == 0 || - targetDurationUs > PlaylistFetcher::kMinBufferedDurationUs) { - // Fetchers limit buffering to - // min(3 * targetDuration, kMinBufferedDurationUs) - targetDurationUs = PlaylistFetcher::kMinBufferedDurationUs; - } - - if (mBuffering[idx]) { - if (mSwitchInProgress - || packetSource->isFinished(0) - || packetSource->getEstimatedDurationUs() > targetDurationUs) { - mBuffering[idx] = false; - } - } - - if (mBuffering[idx]) { - return -EAGAIN; - } - - // wait for counterpart - sp<AnotherPacketSource> otherSource; - uint32_t mask = mNewStreamMask & mStreamMask; - uint32_t fetchersMask = 0; - for (size_t i = 0; i < mFetcherInfos.size(); ++i) { - uint32_t fetcherMask = mFetcherInfos.valueAt(i).mFetcher->getStreamTypeMask(); - fetchersMask |= fetcherMask; - } - mask &= fetchersMask; - if (stream == STREAMTYPE_AUDIO && (mask & STREAMTYPE_VIDEO)) { - otherSource = mPacketSources.valueFor(STREAMTYPE_VIDEO); - } else if (stream == STREAMTYPE_VIDEO && (mask & STREAMTYPE_AUDIO)) { - otherSource = mPacketSources.valueFor(STREAMTYPE_AUDIO); - } - if (otherSource != NULL && !otherSource->hasBufferAvailable(&finalResult)) { - return finalResult == OK ? -EAGAIN : finalResult; - } + // Let the client dequeue as long as we have buffers available + // Do not make pause/resume decisions here. status_t err = packetSource->dequeueAccessUnit(accessUnit); @@ -235,42 +246,6 @@ status_t LiveSession::dequeueAccessUnit( streamStr, type, extra == NULL ? "NULL" : extra->debugString().c_str()); - - int32_t swap; - if ((*accessUnit)->meta()->findInt32("swapPacketSource", &swap) && swap) { - int32_t switchGeneration; - CHECK((*accessUnit)->meta()->findInt32("switchGeneration", &switchGeneration)); - { - Mutex::Autolock lock(mSwapMutex); - if (switchGeneration == mSwitchGeneration) { - swapPacketSource(stream); - sp<AMessage> msg = new AMessage(kWhatSwapped, id()); - msg->setInt32("stream", stream); - msg->setInt32("switchGeneration", switchGeneration); - msg->post(); - } - } - } else { - size_t seq = strm.mCurDiscontinuitySeq; - int64_t offsetTimeUs; - if (mDiscontinuityOffsetTimesUs.indexOfKey(seq) >= 0) { - offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(seq); - } else { - offsetTimeUs = 0; - } - - seq += 1; - if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) { - int64_t firstTimeUs; - firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq); - offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs; - offsetTimeUs += strm.mLastSampleDurationUs; - } else { - offsetTimeUs += strm.mLastSampleDurationUs; - } - - mDiscontinuityOffsetTimesUs.add(seq, offsetTimeUs); - } } else if (err == OK) { if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) { @@ -278,7 +253,26 @@ status_t LiveSession::dequeueAccessUnit( int32_t discontinuitySeq = 0; CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs)); (*accessUnit)->meta()->findInt32("discontinuitySeq", &discontinuitySeq); - strm.mCurDiscontinuitySeq = discontinuitySeq; + if (discontinuitySeq > (int32_t) strm.mCurDiscontinuitySeq) { + int64_t offsetTimeUs; + if (mDiscontinuityOffsetTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) { + offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(strm.mCurDiscontinuitySeq); + } else { + offsetTimeUs = 0; + } + + if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) { + int64_t firstTimeUs; + firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq); + offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs; + offsetTimeUs += strm.mLastSampleDurationUs; + } else { + offsetTimeUs += strm.mLastSampleDurationUs; + } + + mDiscontinuityOffsetTimesUs.add(discontinuitySeq, offsetTimeUs); + strm.mCurDiscontinuitySeq = discontinuitySeq; + } int32_t discard = 0; int64_t firstTimeUs; @@ -331,7 +325,6 @@ status_t LiveSession::dequeueAccessUnit( } status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) { - // No swapPacketSource race condition; called from the same thread as dequeueAccessUnit. if (!(mStreamMask & stream)) { return UNKNOWN_ERROR; } @@ -344,12 +337,21 @@ status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) { return -EAGAIN; } + if (stream == STREAMTYPE_AUDIO) { + // set AAC input buffer size to 32K bytes (256kbps x 1sec) + meta->setInt32(kKeyMaxInputSize, 32 * 1024); + } + return convertMetaDataToMessage(meta, format); } +sp<HTTPBase> LiveSession::getHTTPDataSource() { + return new MediaHTTP(mHTTPService->makeHTTPConnection()); +} + void LiveSession::connectAsync( const char *url, const KeyedVector<String8, String8> *headers) { - sp<AMessage> msg = new AMessage(kWhatConnect, id()); + sp<AMessage> msg = new AMessage(kWhatConnect, this); msg->setString("url", url); if (headers != NULL) { @@ -362,7 +364,7 @@ void LiveSession::connectAsync( } status_t LiveSession::disconnect() { - sp<AMessage> msg = new AMessage(kWhatDisconnect, id()); + sp<AMessage> msg = new AMessage(kWhatDisconnect, this); sp<AMessage> response; status_t err = msg->postAndAwaitResponse(&response); @@ -371,7 +373,7 @@ status_t LiveSession::disconnect() { } status_t LiveSession::seekTo(int64_t timeUs) { - sp<AMessage> msg = new AMessage(kWhatSeek, id()); + sp<AMessage> msg = new AMessage(kWhatSeek, this); msg->setInt64("timeUs", timeUs); sp<AMessage> response; @@ -380,6 +382,95 @@ status_t LiveSession::seekTo(int64_t timeUs) { return err; } +bool LiveSession::checkSwitchProgress( + sp<AMessage> &stopParams, int64_t delayUs, bool *needResumeUntil) { + AString newUri; + CHECK(stopParams->findString("uri", &newUri)); + + *needResumeUntil = false; + sp<AMessage> firstNewMeta[kMaxStreams]; + for (size_t i = 0; i < kMaxStreams; ++i) { + StreamType stream = indexToType(i); + if (!(mSwapMask & mNewStreamMask & stream) + || (mStreams[i].mNewUri != newUri)) { + continue; + } + if (stream == STREAMTYPE_SUBTITLES) { + continue; + } + sp<AnotherPacketSource> &source = mPacketSources.editValueAt(i); + + // First, get latest dequeued meta, which is where the decoder is at. + // (when upswitching, we take the meta after a certain delay, so that + // the decoder is left with some cushion) + sp<AMessage> lastDequeueMeta, lastEnqueueMeta; + if (delayUs > 0) { + lastDequeueMeta = source->getMetaAfterLastDequeued(delayUs); + if (lastDequeueMeta == NULL) { + // this means we don't have enough cushion, try again later + ALOGV("[%s] up switching failed due to insufficient buffer", + stream == STREAMTYPE_AUDIO ? "audio" : "video"); + return false; + } + } else { + // It's okay for lastDequeueMeta to be NULL here, it means the + // decoder hasn't even started dequeueing + lastDequeueMeta = source->getLatestDequeuedMeta(); + } + // Then, trim off packets at beginning of mPacketSources2 that's before + // the latest dequeued time. These samples are definitely too late. + firstNewMeta[i] = mPacketSources2.editValueAt(i) + ->trimBuffersBeforeMeta(lastDequeueMeta); + + // Now firstNewMeta[i] is the first sample after the trim. + // If it's NULL, we failed because dequeue already past all samples + // in mPacketSource2, we have to try again. + if (firstNewMeta[i] == NULL) { + HLSTime dequeueTime(lastDequeueMeta); + ALOGV("[%s] dequeue time (%d, %lld) past start time", + stream == STREAMTYPE_AUDIO ? "audio" : "video", + dequeueTime.mSeq, (long long) dequeueTime.mTimeUs); + return false; + } + + // Otherwise, we check if mPacketSources2 overlaps with what old fetcher + // already fetched, and see if we need to resumeUntil + lastEnqueueMeta = source->getLatestEnqueuedMeta(); + // lastEnqueueMeta == NULL means old fetcher stopped at a discontinuity + // boundary, no need to resume as the content will look different anyways + if (lastEnqueueMeta != NULL) { + HLSTime lastTime(lastEnqueueMeta), startTime(firstNewMeta[i]); + + // no need to resume old fetcher if new fetcher started in different + // discontinuity sequence, as the content will look different. + *needResumeUntil |= (startTime.mSeq == lastTime.mSeq + && startTime.mTimeUs - lastTime.mTimeUs > kResumeThresholdUs); + + // update the stopTime for resumeUntil + stopParams->setInt32("discontinuitySeq", startTime.mSeq); + stopParams->setInt64(getKeyForStream(stream), startTime.mTimeUs); + } + } + + // if we're here, it means dequeue progress hasn't passed some samples in + // mPacketSource2, we can trim off the excess in mPacketSource. + // (old fetcher might still need to resumeUntil the start time of new fetcher) + for (size_t i = 0; i < kMaxStreams; ++i) { + StreamType stream = indexToType(i); + if (!(mSwapMask & mNewStreamMask & stream) + || (newUri != mStreams[i].mNewUri) + || stream == STREAMTYPE_SUBTITLES) { + continue; + } + mPacketSources.valueFor(stream)->trimBuffersAfterMeta(firstNewMeta[i]); + } + + // no resumeUntil if already underflow + *needResumeUntil &= !mBuffering; + + return true; +} + void LiveSession::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatConnect: @@ -402,7 +493,7 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { case kWhatSeek: { - uint32_t seekReplyID; + sp<AReplyToken> seekReplyID; CHECK(msg->senderAwaitsResponse(&seekReplyID)); mSeekReplyID = seekReplyID; mSeekReply = new AMessage; @@ -426,16 +517,25 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { case PlaylistFetcher::kWhatPaused: case PlaylistFetcher::kWhatStopped: { - if (what == PlaylistFetcher::kWhatStopped) { - AString uri; - CHECK(msg->findString("uri", &uri)); - if (mFetcherInfos.removeItem(uri) < 0) { - // ignore duplicated kWhatStopped messages. - break; - } + AString uri; + CHECK(msg->findString("uri", &uri)); + ssize_t index = mFetcherInfos.indexOfKey(uri); + if (index < 0) { + // ignore msgs from fetchers that's already gone + break; + } - if (mSwitchInProgress) { - tryToFinishBandwidthSwitch(); + if (what == PlaylistFetcher::kWhatStopped) { + mFetcherLooper->unregisterHandler( + mFetcherInfos[index].mFetcher->id()); + mFetcherInfos.removeItemsAt(index); + } else if (what == PlaylistFetcher::kWhatPaused) { + int32_t seekMode; + CHECK(msg->findInt32("seekMode", &seekMode)); + for (size_t i = 0; i < kMaxStreams; ++i) { + if (mStreams[i].mUri == uri) { + mStreams[i].mSeekMode = (SeekMode) seekMode; + } } } @@ -443,14 +543,6 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { CHECK_GT(mContinuationCounter, 0); if (--mContinuationCounter == 0) { mContinuation->post(); - - if (mSeekReplyID != 0) { - CHECK(mSeekReply != NULL); - mSeekReply->setInt32("err", OK); - mSeekReply->postReply(mSeekReplyID); - mSeekReplyID = 0; - mSeekReply.clear(); - } } } break; @@ -464,8 +556,21 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { int64_t durationUs; CHECK(msg->findInt64("durationUs", &durationUs)); - FetcherInfo *info = &mFetcherInfos.editValueFor(uri); - info->mDurationUs = durationUs; + ssize_t index = mFetcherInfos.indexOfKey(uri); + if (index >= 0) { + FetcherInfo *info = &mFetcherInfos.editValueFor(uri); + info->mDurationUs = durationUs; + } + break; + } + + case PlaylistFetcher::kWhatTargetDurationUpdate: + { + int64_t targetDurationUs; + CHECK(msg->findInt64("targetDurationUs", &targetDurationUs)); + mUpSwitchMark = min(kUpSwitchMarkUs, targetDurationUs * 7 / 4); + mDownSwitchMark = min(kDownSwitchMarkUs, targetDurationUs * 9 / 4); + mUpSwitchMargin = min(kUpSwitchMarginUs, targetDurationUs); break; } @@ -506,38 +611,23 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { mPacketSources.valueFor( STREAMTYPE_SUBTITLES)->signalEOS(err); - sp<AMessage> notify = mNotify->dup(); - notify->setInt32("what", kWhatError); - notify->setInt32("err", err); - notify->post(); + postError(err); break; } - case PlaylistFetcher::kWhatTemporarilyDoneFetching: + case PlaylistFetcher::kWhatStopReached: { - AString uri; - CHECK(msg->findString("uri", &uri)); + ALOGV("kWhatStopReached"); - if (mFetcherInfos.indexOfKey(uri) < 0) { - ALOGE("couldn't find uri"); + AString oldUri; + CHECK(msg->findString("uri", &oldUri)); + + ssize_t index = mFetcherInfos.indexOfKey(oldUri); + if (index < 0) { break; } - FetcherInfo *info = &mFetcherInfos.editValueFor(uri); - info->mIsPrepared = true; - - if (mInPreparationPhase) { - bool allFetchersPrepared = true; - for (size_t i = 0; i < mFetcherInfos.size(); ++i) { - if (!mFetcherInfos.valueAt(i).mIsPrepared) { - allFetchersPrepared = false; - break; - } - } - if (allFetchersPrepared) { - postPrepared(OK); - } - } + tryToFinishBandwidthSwitch(oldUri); break; } @@ -550,15 +640,69 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { break; } - // Resume fetcher for the original variant; the resumed fetcher should - // continue until the timestamps found in msg, which is stored by the - // new fetcher to indicate where the new variant has started buffering. - for (size_t i = 0; i < mFetcherInfos.size(); i++) { - const FetcherInfo info = mFetcherInfos.valueAt(i); - if (info.mToBeRemoved) { - info.mFetcher->resumeUntilAsync(msg); + AString uri; + CHECK(msg->findString("uri", &uri)); + + // mark new fetcher mToBeResumed + ssize_t index = mFetcherInfos.indexOfKey(uri); + if (index >= 0) { + mFetcherInfos.editValueAt(index).mToBeResumed = true; + } + + // temporarily disable packet sources to be swapped to prevent + // NuPlayerDecoder from dequeuing while we check progress + for (size_t i = 0; i < mPacketSources.size(); ++i) { + if ((mSwapMask & mPacketSources.keyAt(i)) + && uri == mStreams[i].mNewUri) { + mPacketSources.editValueAt(i)->enable(false); + } + } + bool switchUp = (mCurBandwidthIndex > mOrigBandwidthIndex); + // If switching up, require a cushion bigger than kUnderflowMark + // to avoid buffering immediately after the switch. + // (If we don't have that cushion we'd rather cancel and try again.) + int64_t delayUs = switchUp ? (kUnderflowMarkUs + 1000000ll) : 0; + bool needResumeUntil = false; + sp<AMessage> stopParams = msg; + if (checkSwitchProgress(stopParams, delayUs, &needResumeUntil)) { + // playback time hasn't passed startAt time + if (!needResumeUntil) { + for (size_t i = 0; i < kMaxStreams; ++i) { + if ((mSwapMask & indexToType(i)) + && uri == mStreams[i].mNewUri) { + // have to make a copy of mStreams[i].mUri because + // tryToFinishBandwidthSwitch is modifying mStreams[] + AString oldURI = mStreams[i].mUri; + tryToFinishBandwidthSwitch(oldURI); + break; + } + } + } else { + // startAt time is after last enqueue time + // Resume fetcher for the original variant; the resumed fetcher should + // continue until the timestamps found in msg, which is stored by the + // new fetcher to indicate where the new variant has started buffering. + for (size_t i = 0; i < mFetcherInfos.size(); i++) { + const FetcherInfo &info = mFetcherInfos.valueAt(i); + if (info.mToBeRemoved) { + info.mFetcher->resumeUntilAsync(stopParams); + } + } + } + } else { + // playback time passed startAt time + if (switchUp) { + // if switching up, cancel and retry if condition satisfies again + cancelBandwidthSwitch(true /* resume */); + } else { + resumeFetcher(uri, mSwapMask, -1, true /* newUri */); } } + // re-enable all packet sources + for (size_t i = 0; i < mPacketSources.size(); ++i) { + mPacketSources.editValueAt(i)->enable(true); + } + break; } @@ -569,19 +713,6 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { break; } - case kWhatCheckBandwidth: - { - int32_t generation; - CHECK(msg->findInt32("generation", &generation)); - - if (generation != mCheckBandwidthGeneration) { - break; - } - - onCheckBandwidth(msg); - break; - } - case kWhatChangeConfiguration: { onChangeConfiguration(msg); @@ -606,21 +737,13 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) { break; } - case kWhatSwapped: + case kWhatPollBuffering: { - onSwapped(msg); - break; - } - - case kWhatCheckSwitchDown: - { - onCheckSwitchDown(); - break; - } - - case kWhatSwitchDown: - { - onSwitchDown(); + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation == mPollBufferingGeneration) { + onPollBuffering(); + } break; } @@ -691,6 +814,14 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { return; } + // create looper for fetchers + if (mFetcherLooper == NULL) { + mFetcherLooper = new ALooper(); + + mFetcherLooper->setName("Fetcher"); + mFetcherLooper->start(false, false); + } + // We trust the content provider to make a reasonable choice of preferred // initial bandwidth by listing it first in the variant playlist. // At startup we really don't have a good estimate on the available @@ -700,6 +831,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { size_t initialBandwidthIndex = 0; if (mPlaylist->isVariantPlaylist()) { + Vector<BandwidthItem> itemsWithVideo; for (size_t i = 0; i < mPlaylist->size(); ++i) { BandwidthItem item; @@ -711,14 +843,22 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth)); - if (initialBandwidth == 0) { - initialBandwidth = item.mBandwidth; - } - mBandwidthItems.push(item); + if (mPlaylist->hasType(i, "video")) { + itemsWithVideo.push(item); + } + } + // remove the audio-only variants if we have at least one with video + if (!itemsWithVideo.empty() + && itemsWithVideo.size() < mBandwidthItems.size()) { + mBandwidthItems.clear(); + for (size_t i = 0; i < itemsWithVideo.size(); ++i) { + mBandwidthItems.push(itemsWithVideo[i]); + } } CHECK_GT(mBandwidthItems.size(), 0u); + initialBandwidth = mBandwidthItems[0].mBandwidth; mBandwidthItems.sort(SortByBandwidth); @@ -742,22 +882,20 @@ void LiveSession::onConnect(const sp<AMessage> &msg) { } void LiveSession::finishDisconnect() { + ALOGV("finishDisconnect"); + // No reconfiguration is currently pending, make sure none will trigger // during disconnection either. - cancelCheckBandwidthEvent(); - - // Protect mPacketSources from a swapPacketSource race condition through disconnect. - // (finishDisconnect, onFinishDisconnect2) cancelBandwidthSwitch(); - // cancel switch down monitor - mSwitchDownMonitor.clear(); + // cancel buffer polling + cancelPollBuffering(); for (size_t i = 0; i < mFetcherInfos.size(); ++i) { mFetcherInfos.valueAt(i).mFetcher->stopAsync(); } - sp<AMessage> msg = new AMessage(kWhatFinishDisconnect2, id()); + sp<AMessage> msg = new AMessage(kWhatFinishDisconnect2, this); mContinuationCounter = mFetcherInfos.size(); mContinuation = msg; @@ -780,7 +918,7 @@ void LiveSession::onFinishDisconnect2() { response->setInt32("err", OK); response->postReply(mDisconnectReplyID); - mDisconnectReplyID = 0; + mDisconnectReplyID.clear(); } sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) { @@ -790,16 +928,16 @@ sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) { return NULL; } - sp<AMessage> notify = new AMessage(kWhatFetcherNotify, id()); + sp<AMessage> notify = new AMessage(kWhatFetcherNotify, this); notify->setString("uri", uri); notify->setInt32("switchGeneration", mSwitchGeneration); FetcherInfo info; info.mFetcher = new PlaylistFetcher(notify, this, uri, mSubtitleGeneration); info.mDurationUs = -1ll; - info.mIsPrepared = false; info.mToBeRemoved = false; - looper()->registerHandler(info.mFetcher); + info.mToBeResumed = false; + mFetcherLooper->registerHandler(info.mFetcher); mFetcherInfos.add(uri, info); @@ -827,14 +965,15 @@ ssize_t LiveSession::fetchFile( int64_t range_offset, int64_t range_length, uint32_t block_size, /* download block size */ sp<DataSource> *source, /* to return and reuse source */ - String8 *actualUrl) { + String8 *actualUrl, + bool forceConnectHTTP /* force connect HTTP when resuing source */) { off64_t size; sp<DataSource> temp_source; if (source == NULL) { source = &temp_source; } - if (*source == NULL) { + if (*source == NULL || forceConnectHTTP) { if (!strncasecmp(url, "file://", 7)) { *source = new FileSource(url + 7); } else if (strncasecmp(url, "http://", 7) @@ -853,13 +992,18 @@ ssize_t LiveSession::fetchFile( ? "" : AStringPrintf("%lld", range_offset + range_length - 1).c_str()).c_str())); } - status_t err = mHTTPDataSource->connect(url, &headers); + + HTTPBase* httpDataSource = + (*source == NULL) ? mHTTPDataSource.get() : (HTTPBase*)source->get(); + status_t err = httpDataSource->connect(url, &headers); if (err != OK) { return err; } - *source = mHTTPDataSource; + if (*source == NULL) { + *source = mHTTPDataSource; + } } } @@ -949,6 +1093,9 @@ sp<M3UParser> LiveSession::fetchPlaylist( String8 actualUrl; ssize_t err = fetchFile(url, &buffer, 0, -1, 0, NULL, &actualUrl); + // close off the connection after use + mHTTPDataSource->disconnect(); + if (err <= 0) { return NULL; } @@ -995,8 +1142,108 @@ static double uniformRand() { } #endif -size_t LiveSession::getBandwidthIndex() { - if (mBandwidthItems.size() == 0) { +bool LiveSession::resumeFetcher( + const AString &uri, uint32_t streamMask, int64_t timeUs, bool newUri) { + ssize_t index = mFetcherInfos.indexOfKey(uri); + if (index < 0) { + ALOGE("did not find fetcher for uri: %s", uri.c_str()); + return false; + } + + bool resume = false; + sp<AnotherPacketSource> sources[kMaxStreams]; + for (size_t i = 0; i < kMaxStreams; ++i) { + if ((streamMask & indexToType(i)) + && ((!newUri && uri == mStreams[i].mUri) + || (newUri && uri == mStreams[i].mNewUri))) { + resume = true; + if (newUri) { + sources[i] = mPacketSources2.valueFor(indexToType(i)); + sources[i]->clear(); + } else { + sources[i] = mPacketSources.valueFor(indexToType(i)); + } + } + } + + if (resume) { + ALOGV("resuming fetcher %s, timeUs %lld", uri.c_str(), (long long)timeUs); + SeekMode seekMode = newUri ? kSeekModeNextSample : kSeekModeExactPosition; + mFetcherInfos.editValueAt(index).mFetcher->startAsync( + sources[kAudioIndex], + sources[kVideoIndex], + sources[kSubtitleIndex], + timeUs, -1, -1, seekMode); + } + + return resume; +} + +float LiveSession::getAbortThreshold( + ssize_t currentBWIndex, ssize_t targetBWIndex) const { + float abortThreshold = -1.0f; + if (currentBWIndex > 0 && targetBWIndex < currentBWIndex) { + /* + If we're switching down, we need to decide whether to + + 1) finish last segment of high-bandwidth variant, or + 2) abort last segment of high-bandwidth variant, and fetch an + overlapping portion from low-bandwidth variant. + + Here we try to maximize the amount of buffer left when the + switch point is met. Given the following parameters: + + B: our current buffering level in seconds + T: target duration in seconds + X: sample duration in seconds remain to fetch in last segment + bw0: bandwidth of old variant (as specified in playlist) + bw1: bandwidth of new variant (as specified in playlist) + bw: measured bandwidth available + + If we choose 1), when switch happens at the end of current + segment, our buffering will be + B + X - X * bw0 / bw + + If we choose 2), when switch happens where we aborted current + segment, our buffering will be + B - (T - X) * bw1 / bw + + We should only choose 1) if + X/T < bw1 / (bw1 + bw0 - bw) + */ + + // Taking the measured current bandwidth at 50% face value only, + // as our bandwidth estimation is a lagging indicator. Being + // conservative on this, we prefer switching to lower bandwidth + // unless we're really confident finishing up the last segment + // of higher bandwidth will be fast. + CHECK(mLastBandwidthBps >= 0); + abortThreshold = + (float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth + / ((float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth + + (float)mBandwidthItems.itemAt(currentBWIndex).mBandwidth + - (float)mLastBandwidthBps * 0.5f); + if (abortThreshold < 0.0f) { + abortThreshold = -1.0f; // do not abort + } + ALOGV("Switching Down: bps %ld => %ld, measured %d, abort ratio %.2f", + mBandwidthItems.itemAt(currentBWIndex).mBandwidth, + mBandwidthItems.itemAt(targetBWIndex).mBandwidth, + mLastBandwidthBps, + abortThreshold); + } + return abortThreshold; +} + +void LiveSession::addBandwidthMeasurement(size_t numBytes, int64_t delayUs) { + mBandwidthEstimator->addBandwidthMeasurement(numBytes, delayUs); +} + +size_t LiveSession::getBandwidthIndex(int32_t bandwidthBps) { + if (mBandwidthItems.size() < 2) { + // shouldn't be here if we only have 1 bandwidth, check + // logic to get rid of redundant bandwidth polling + ALOGW("getBandwidthIndex() called for single bandwidth playlist!"); return 0; } @@ -1014,15 +1261,6 @@ size_t LiveSession::getBandwidthIndex() { } if (index < 0) { - int32_t bandwidthBps; - if (mHTTPDataSource != NULL - && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) { - ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f); - } else { - ALOGV("no bandwidth estimate."); - return 0; // Pick the lowest bandwidth stream by default. - } - char value[PROPERTY_VALUE_MAX]; if (property_get("media.httplive.max-bw", value, NULL)) { char *end; @@ -1039,15 +1277,9 @@ size_t LiveSession::getBandwidthIndex() { index = mBandwidthItems.size() - 1; while (index > 0) { - // consider only 80% of the available bandwidth, but if we are switching up, - // be even more conservative (70%) to avoid overestimating and immediately - // switching back. - size_t adjustedBandwidthBps = bandwidthBps; - if (index > mCurBandwidthIndex) { - adjustedBandwidthBps = adjustedBandwidthBps * 7 / 10; - } else { - adjustedBandwidthBps = adjustedBandwidthBps * 8 / 10; - } + // be conservative (70%) to avoid overestimating and immediately + // switching down again. + size_t adjustedBandwidthBps = bandwidthBps * 7 / 10; if (mBandwidthItems.itemAt(index).mBandwidth <= adjustedBandwidthBps) { break; } @@ -1107,22 +1339,14 @@ size_t LiveSession::getBandwidthIndex() { return index; } -int64_t LiveSession::latestMediaSegmentStartTimeUs() { - sp<AMessage> audioMeta = mPacketSources.valueFor(STREAMTYPE_AUDIO)->getLatestDequeuedMeta(); - int64_t minSegmentStartTimeUs = -1, videoSegmentStartTimeUs = -1; - if (audioMeta != NULL) { - audioMeta->findInt64("segmentStartTimeUs", &minSegmentStartTimeUs); - } +HLSTime LiveSession::latestMediaSegmentStartTime() const { + HLSTime audioTime(mPacketSources.valueFor( + STREAMTYPE_AUDIO)->getLatestDequeuedMeta()); - sp<AMessage> videoMeta = mPacketSources.valueFor(STREAMTYPE_VIDEO)->getLatestDequeuedMeta(); - if (videoMeta != NULL - && videoMeta->findInt64("segmentStartTimeUs", &videoSegmentStartTimeUs)) { - if (minSegmentStartTimeUs < 0 || videoSegmentStartTimeUs < minSegmentStartTimeUs) { - minSegmentStartTimeUs = videoSegmentStartTimeUs; - } + HLSTime videoTime(mPacketSources.valueFor( + STREAMTYPE_VIDEO)->getLatestDequeuedMeta()); - } - return minSegmentStartTimeUs; + return audioTime < videoTime ? videoTime : audioTime; } status_t LiveSession::onSeek(const sp<AMessage> &msg) { @@ -1130,7 +1354,7 @@ status_t LiveSession::onSeek(const sp<AMessage> &msg) { CHECK(msg->findInt64("timeUs", &timeUs)); if (!mReconfigurationInProgress) { - changeConfiguration(timeUs, mCurBandwidthIndex); + changeConfiguration(timeUs); return OK; } else { return -EWOULDBLOCK; @@ -1185,8 +1409,7 @@ status_t LiveSession::selectTrack(size_t index, bool select) { ++mSubtitleGeneration; status_t err = mPlaylist->selectTrack(index, select); if (err == OK) { - sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, id()); - msg->setInt32("bandwidthIndex", mCurBandwidthIndex); + sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, this); msg->setInt32("pickTrack", select); msg->post(); } @@ -1201,35 +1424,18 @@ ssize_t LiveSession::getSelectedTrack(media_track_type type) const { } } -bool LiveSession::canSwitchUp() { - // Allow upwards bandwidth switch when a stream has buffered at least 10 seconds. - status_t err = OK; - for (size_t i = 0; i < mPacketSources.size(); ++i) { - sp<AnotherPacketSource> source = mPacketSources.valueAt(i); - int64_t dur = source->getBufferedDurationUs(&err); - if (err == OK && dur > 10000000) { - return true; - } - } - return false; -} - void LiveSession::changeConfiguration( - int64_t timeUs, size_t bandwidthIndex, bool pickTrack) { - // Protect mPacketSources from a swapPacketSource race condition through reconfiguration. - // (changeConfiguration, onChangeConfiguration2, onChangeConfiguration3). + int64_t timeUs, ssize_t bandwidthIndex, bool pickTrack) { cancelBandwidthSwitch(); CHECK(!mReconfigurationInProgress); mReconfigurationInProgress = true; - - mCurBandwidthIndex = bandwidthIndex; - - ALOGV("changeConfiguration => timeUs:%" PRId64 " us, bwIndex:%zu, pickTrack:%d", - timeUs, bandwidthIndex, pickTrack); - - CHECK_LT(bandwidthIndex, mBandwidthItems.size()); - const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex); + if (bandwidthIndex >= 0) { + mOrigBandwidthIndex = mCurBandwidthIndex; + mCurBandwidthIndex = bandwidthIndex; + } + CHECK_LT(mCurBandwidthIndex, mBandwidthItems.size()); + const BandwidthItem &item = mBandwidthItems.itemAt(mCurBandwidthIndex); uint32_t streamMask = 0; // streams that should be fetched by the new fetcher uint32_t resumeMask = 0; // streams that should be fetched by the original fetcher @@ -1244,38 +1450,58 @@ void LiveSession::changeConfiguration( // Step 1, stop and discard fetchers that are no longer needed. // Pause those that we'll reuse. for (size_t i = 0; i < mFetcherInfos.size(); ++i) { - const AString &uri = mFetcherInfos.keyAt(i); - - bool discardFetcher = true; + // skip fetchers that are marked mToBeRemoved, + // these are done and can't be reused + if (mFetcherInfos[i].mToBeRemoved) { + continue; + } - // If we're seeking all current fetchers are discarded. - if (timeUs < 0ll) { - // delay fetcher removal if not picking tracks - discardFetcher = pickTrack; + const AString &uri = mFetcherInfos.keyAt(i); + sp<PlaylistFetcher> &fetcher = mFetcherInfos.editValueAt(i).mFetcher; - for (size_t j = 0; j < kMaxStreams; ++j) { - StreamType type = indexToType(j); - if ((streamMask & type) && uri == URIs[j]) { - resumeMask |= type; - streamMask &= ~type; - discardFetcher = false; - } + bool discardFetcher = true, delayRemoval = false; + for (size_t j = 0; j < kMaxStreams; ++j) { + StreamType type = indexToType(j); + if ((streamMask & type) && uri == URIs[j]) { + resumeMask |= type; + streamMask &= ~type; + discardFetcher = false; } } + // Delay fetcher removal if not picking tracks, AND old fetcher + // has stream mask that overlaps new variant. (Okay to discard + // old fetcher now, if completely no overlap.) + if (discardFetcher && timeUs < 0ll && !pickTrack + && (fetcher->getStreamTypeMask() & streamMask)) { + discardFetcher = false; + delayRemoval = true; + } if (discardFetcher) { - mFetcherInfos.valueAt(i).mFetcher->stopAsync(); + fetcher->stopAsync(); } else { - mFetcherInfos.valueAt(i).mFetcher->pauseAsync(); + float threshold = -1.0f; // always finish fetching by default + if (timeUs >= 0ll) { + // seeking, no need to finish fetching + threshold = 0.0f; + } else if (delayRemoval) { + // adapting, abort if remaining of current segment is over threshold + threshold = getAbortThreshold( + mOrigBandwidthIndex, mCurBandwidthIndex); + } + + ALOGV("Pausing with threshold %.3f", threshold); + + fetcher->pauseAsync(threshold); } } sp<AMessage> msg; if (timeUs < 0ll) { // skip onChangeConfiguration2 (decoder destruction) if not seeking. - msg = new AMessage(kWhatChangeConfiguration3, id()); + msg = new AMessage(kWhatChangeConfiguration3, this); } else { - msg = new AMessage(kWhatChangeConfiguration2, id()); + msg = new AMessage(kWhatChangeConfiguration2, this); } msg->setInt32("streamMask", streamMask); msg->setInt32("resumeMask", resumeMask); @@ -1296,23 +1522,14 @@ void LiveSession::changeConfiguration( if (mContinuationCounter == 0) { msg->post(); - - if (mSeekReplyID != 0) { - CHECK(mSeekReply != NULL); - mSeekReply->setInt32("err", OK); - mSeekReply->postReply(mSeekReplyID); - mSeekReplyID = 0; - mSeekReply.clear(); - } } } void LiveSession::onChangeConfiguration(const sp<AMessage> &msg) { if (!mReconfigurationInProgress) { - int32_t pickTrack = 0, bandwidthIndex = mCurBandwidthIndex; + int32_t pickTrack = 0; msg->findInt32("pickTrack", &pickTrack); - msg->findInt32("bandwidthIndex", &bandwidthIndex); - changeConfiguration(-1ll /* timeUs */, bandwidthIndex, pickTrack); + changeConfiguration(-1ll /* timeUs */, -1, pickTrack); } else { msg->post(1000000ll); // retry in 1 sec } @@ -1323,13 +1540,42 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) { // All fetchers are either suspended or have been removed now. + // If we're seeking, clear all packet sources before we report + // seek complete, to prevent decoder from pulling stale data. + int64_t timeUs; + CHECK(msg->findInt64("timeUs", &timeUs)); + + if (timeUs >= 0) { + mLastSeekTimeUs = timeUs; + + for (size_t i = 0; i < mPacketSources.size(); i++) { + mPacketSources.editValueAt(i)->clear(); + } + + for (size_t i = 0; i < kMaxStreams; ++i) { + mStreams[i].mCurDiscontinuitySeq = 0; + } + + mDiscontinuityOffsetTimesUs.clear(); + mDiscontinuityAbsStartTimesUs.clear(); + + if (mSeekReplyID != NULL) { + CHECK(mSeekReply != NULL); + mSeekReply->setInt32("err", OK); + mSeekReply->postReply(mSeekReplyID); + mSeekReplyID.clear(); + mSeekReply.clear(); + } + + // restart buffer polling after seek becauese previous + // buffering position is no longer valid. + restartPollBuffering(); + } + uint32_t streamMask, resumeMask; CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask)); CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask)); - // currently onChangeConfiguration2 is only called for seeking; - // remove the following CHECK if using it else where. - CHECK_EQ(resumeMask, 0); streamMask |= resumeMask; AString URIs[kMaxStreams]; @@ -1341,17 +1587,25 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) { } } - // Determine which decoders to shutdown on the player side, - // a decoder has to be shutdown if either - // 1) its streamtype was active before but now longer isn't. - // or - // 2) its streamtype was already active and still is but the URI - // has changed. uint32_t changedMask = 0; for (size_t i = 0; i < kMaxStreams && i != kSubtitleIndex; ++i) { - if (((mStreamMask & streamMask & indexToType(i)) - && !(URIs[i] == mStreams[i].mUri)) - || (mStreamMask & ~streamMask & indexToType(i))) { + // stream URI could change even if onChangeConfiguration2 is only + // used for seek. Seek could happen during a bw switch, in this + // case bw switch will be cancelled, but the seekTo position will + // fetch from the new URI. + if ((mStreamMask & streamMask & indexToType(i)) + && !mStreams[i].mUri.empty() + && !(URIs[i] == mStreams[i].mUri)) { + ALOGV("stream %zu changed: oldURI %s, newURI %s", i, + mStreams[i].mUri.c_str(), URIs[i].c_str()); + sp<AnotherPacketSource> source = mPacketSources.valueFor(indexToType(i)); + source->queueDiscontinuity( + ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true); + } + // Determine which decoders to shutdown on the player side, + // a decoder has to be shutdown if its streamtype was active + // before but now longer isn't. + if ((mStreamMask & ~streamMask & indexToType(i))) { changedMask |= indexToType(i); } } @@ -1372,7 +1626,7 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) { notify->setInt32("changedMask", changedMask); msg->setWhat(kWhatChangeConfiguration3); - msg->setTarget(id()); + msg->setTarget(this); notify->setMessage("reply", msg); notify->post(); @@ -1387,6 +1641,8 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) { CHECK(msg->findInt32("streamMask", (int32_t *)&streamMask)); CHECK(msg->findInt32("resumeMask", (int32_t *)&resumeMask)); + mNewStreamMask = streamMask | resumeMask; + int64_t timeUs; int32_t pickTrack; bool switching = false; @@ -1395,7 +1651,19 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) { if (timeUs < 0ll) { if (!pickTrack) { - switching = true; + // mSwapMask contains streams that are in both old and new variant, + // (in mNewStreamMask & mStreamMask) but with different URIs + // (not in resumeMask). + // For example, old variant has video and audio in two separate + // URIs, and new variant has only audio with unchanged URI. mSwapMask + // should be 0 as there is nothing to swap. We only need to stop video, + // and resume audio. + mSwapMask = mNewStreamMask & mStreamMask & ~resumeMask; + switching = (mSwapMask != 0); + if (!switching) { + ALOGV("#### Finishing Bandwidth Switch Early: %zd => %zd", + mOrigBandwidthIndex, mCurBandwidthIndex); + } } mRealTimeBaseUs = ALooper::GetNowUs() - mLastDequeuedTimeUs; } else { @@ -1412,47 +1680,18 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) { } } - mNewStreamMask = streamMask | resumeMask; - if (switching) { - mSwapMask = mStreamMask & ~resumeMask; - } - // Of all existing fetchers: // * Resume fetchers that are still needed and assign them original packet sources. // * Mark otherwise unneeded fetchers for removal. ALOGV("resuming fetchers for mask 0x%08x", resumeMask); for (size_t i = 0; i < mFetcherInfos.size(); ++i) { const AString &uri = mFetcherInfos.keyAt(i); - - sp<AnotherPacketSource> sources[kMaxStreams]; - for (size_t j = 0; j < kMaxStreams; ++j) { - if ((resumeMask & indexToType(j)) && uri == mStreams[j].mUri) { - sources[j] = mPacketSources.valueFor(indexToType(j)); - - if (j != kSubtitleIndex) { - ALOGV("queueing dummy discontinuity for stream type %d", indexToType(j)); - sp<AnotherPacketSource> discontinuityQueue; - discontinuityQueue = mDiscontinuities.valueFor(indexToType(j)); - discontinuityQueue->queueDiscontinuity( - ATSParser::DISCONTINUITY_NONE, - NULL, - true); - } - } - } - - FetcherInfo &info = mFetcherInfos.editValueAt(i); - if (sources[kAudioIndex] != NULL || sources[kVideoIndex] != NULL - || sources[kSubtitleIndex] != NULL) { - info.mFetcher->startAsync( - sources[kAudioIndex], sources[kVideoIndex], sources[kSubtitleIndex]); - } else { - info.mToBeRemoved = true; + if (!resumeFetcher(uri, resumeMask, timeUs)) { + mFetcherInfos.editValueAt(i).mToBeRemoved = true; } } // streamMask now only contains the types that need a new fetcher created. - if (streamMask != 0) { ALOGV("creating new fetchers for mask 0x%08x", streamMask); } @@ -1470,13 +1709,12 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) { sp<PlaylistFetcher> fetcher = addFetcher(uri.c_str()); CHECK(fetcher != NULL); - int64_t startTimeUs = -1; - int64_t segmentStartTimeUs = -1ll; - int32_t discontinuitySeq = -1; + HLSTime startTime; + SeekMode seekMode = kSeekModeExactPosition; sp<AnotherPacketSource> sources[kMaxStreams]; - if (i == kSubtitleIndex) { - segmentStartTimeUs = latestMediaSegmentStartTimeUs(); + if (i == kSubtitleIndex || (!pickTrack && !switching)) { + startTime = latestMediaSegmentStartTime(); } // TRICKY: looping from i as earlier streams are already removed from streamMask @@ -1486,63 +1724,50 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) { sources[j] = mPacketSources.valueFor(indexToType(j)); if (timeUs >= 0) { - sources[j]->clear(); - startTimeUs = timeUs; - - sp<AnotherPacketSource> discontinuityQueue; - sp<AMessage> extra = new AMessage; - extra->setInt64("timeUs", timeUs); - discontinuityQueue = mDiscontinuities.valueFor(indexToType(j)); - discontinuityQueue->queueDiscontinuity( - ATSParser::DISCONTINUITY_TIME, extra, true); + startTime.mTimeUs = timeUs; } else { int32_t type; sp<AMessage> meta; - if (pickTrack) { - // selecting + if (!switching) { + // selecting, or adapting but no swap required meta = sources[j]->getLatestDequeuedMeta(); } else { - // adapting + // adapting and swap required meta = sources[j]->getLatestEnqueuedMeta(); - } - - if (meta != NULL && !meta->findInt32("discontinuity", &type)) { - int64_t tmpUs; - int64_t tmpSegmentUs; - - CHECK(meta->findInt64("timeUs", &tmpUs)); - CHECK(meta->findInt64("segmentStartTimeUs", &tmpSegmentUs)); - if (startTimeUs < 0 || tmpSegmentUs < segmentStartTimeUs) { - startTimeUs = tmpUs; - segmentStartTimeUs = tmpSegmentUs; - } else if (tmpSegmentUs == segmentStartTimeUs && tmpUs < startTimeUs) { - startTimeUs = tmpUs; + if (meta != NULL && mCurBandwidthIndex > mOrigBandwidthIndex) { + // switching up + meta = sources[j]->getMetaAfterLastDequeued(mUpSwitchMargin); } + } - int32_t seq; - CHECK(meta->findInt32("discontinuitySeq", &seq)); - if (discontinuitySeq < 0 || seq < discontinuitySeq) { - discontinuitySeq = seq; + if (j != kSubtitleIndex && meta != NULL + && !meta->findInt32("discontinuity", &type)) { + HLSTime tmpTime(meta); + if (startTime < tmpTime) { + startTime = tmpTime; } } - if (pickTrack) { - // selecting track, queue discontinuities before content + if (!switching) { + // selecting, or adapting but no swap required sources[j]->clear(); if (j == kSubtitleIndex) { break; } - sp<AnotherPacketSource> discontinuityQueue; - discontinuityQueue = mDiscontinuities.valueFor(indexToType(j)); - discontinuityQueue->queueDiscontinuity( - ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true); + + ALOGV("stream[%zu]: queue format change", j); + sources[j]->queueDiscontinuity( + ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, true); } else { - // adapting, queue discontinuities after resume + // switching, queue discontinuities after resume sources[j] = mPacketSources2.valueFor(indexToType(j)); sources[j]->clear(); - uint32_t extraStreams = mNewStreamMask & (~mStreamMask); - if (extraStreams & indexToType(j)) { - sources[j]->queueAccessUnit(createFormatChangeBuffer(/*swap*/ false)); + // the new fetcher might be providing streams that used to be + // provided by two different fetchers, if one of the fetcher + // paused in the middle while the other somehow paused in next + // seg, we have to start from next seg. + if (seekMode < mStreams[j].mSeekMode) { + seekMode = mStreams[j].mSeekMode; } } } @@ -1551,54 +1776,89 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) { } } + // Set the target segment start time to the middle point of the + // segment where the last sample was. + // This gives a better guess if segments of the two variants are not + // perfectly aligned. (If the corresponding segment in new variant + // starts slightly later than that in the old variant, we still want + // to pick that segment, not the one before) fetcher->startAsync( sources[kAudioIndex], sources[kVideoIndex], sources[kSubtitleIndex], - startTimeUs < 0 ? mLastSeekTimeUs : startTimeUs, - segmentStartTimeUs, - discontinuitySeq, - switching); + startTime.mTimeUs < 0 ? mLastSeekTimeUs : startTime.mTimeUs, + startTime.getSegmentTimeUs(true /* midpoint */), + startTime.mSeq, + seekMode); } // All fetchers have now been started, the configuration change // has completed. - cancelCheckBandwidthEvent(); - scheduleCheckBandwidthEvent(); - ALOGV("XXX configuration change completed."); mReconfigurationInProgress = false; if (switching) { mSwitchInProgress = true; } else { mStreamMask = mNewStreamMask; + mOrigBandwidthIndex = mCurBandwidthIndex; } - if (mDisconnectReplyID != 0) { + if (mDisconnectReplyID != NULL) { finishDisconnect(); } } -void LiveSession::onSwapped(const sp<AMessage> &msg) { - int32_t switchGeneration; - CHECK(msg->findInt32("switchGeneration", &switchGeneration)); - if (switchGeneration != mSwitchGeneration) { +void LiveSession::swapPacketSource(StreamType stream) { + ALOGV("swapPacketSource: stream = %d", stream); + + // transfer packets from source2 to source + sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream); + sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream); + + // queue discontinuity in mPacketSource + aps->queueDiscontinuity(ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, false); + + // queue packets in mPacketSource2 to mPacketSource + status_t finalResult = OK; + sp<ABuffer> accessUnit; + while (aps2->hasBufferAvailable(&finalResult) && finalResult == OK && + OK == aps2->dequeueAccessUnit(&accessUnit)) { + aps->queueAccessUnit(accessUnit); + } + aps2->clear(); +} + +void LiveSession::tryToFinishBandwidthSwitch(const AString &oldUri) { + if (!mSwitchInProgress) { + return; + } + + ssize_t index = mFetcherInfos.indexOfKey(oldUri); + if (index < 0 || !mFetcherInfos[index].mToBeRemoved) { return; } - int32_t stream; - CHECK(msg->findInt32("stream", &stream)); + // Swap packet source of streams provided by old variant + for (size_t idx = 0; idx < kMaxStreams; idx++) { + StreamType stream = indexToType(idx); + if ((mSwapMask & stream) && (oldUri == mStreams[idx].mUri)) { + swapPacketSource(stream); - ssize_t idx = typeToIndex(stream); - CHECK(idx >= 0); - if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) { - ALOGW("swapping stream type %d %s to empty stream", stream, mStreams[idx].mUri.c_str()); + if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) { + ALOGW("swapping stream type %d %s to empty stream", + stream, mStreams[idx].mUri.c_str()); + } + mStreams[idx].mUri = mStreams[idx].mNewUri; + mStreams[idx].mNewUri.clear(); + + mSwapMask &= ~stream; + } } - mStreams[idx].mUri = mStreams[idx].mNewUri; - mStreams[idx].mNewUri.clear(); - mSwapMask &= ~stream; + mFetcherInfos.editValueAt(index).mFetcher->stopAsync(false /* clear */); + + ALOGV("tryToFinishBandwidthSwitch: mSwapMask=%x", mSwapMask); if (mSwapMask != 0) { return; } @@ -1606,155 +1866,308 @@ void LiveSession::onSwapped(const sp<AMessage> &msg) { // Check if new variant contains extra streams. uint32_t extraStreams = mNewStreamMask & (~mStreamMask); while (extraStreams) { - StreamType extraStream = (StreamType) (extraStreams & ~(extraStreams - 1)); - swapPacketSource(extraStream); - extraStreams &= ~extraStream; + StreamType stream = (StreamType) (extraStreams & ~(extraStreams - 1)); + extraStreams &= ~stream; - idx = typeToIndex(extraStream); + swapPacketSource(stream); + + ssize_t idx = typeToIndex(stream); CHECK(idx >= 0); if (mStreams[idx].mNewUri.empty()) { ALOGW("swapping extra stream type %d %s to empty stream", - extraStream, mStreams[idx].mUri.c_str()); + stream, mStreams[idx].mUri.c_str()); } mStreams[idx].mUri = mStreams[idx].mNewUri; mStreams[idx].mNewUri.clear(); } - tryToFinishBandwidthSwitch(); -} - -void LiveSession::onCheckSwitchDown() { - if (mSwitchDownMonitor == NULL) { - return; + // Restart new fetcher (it was paused after the first 47k block) + // and let it fetch into mPacketSources (not mPacketSources2) + for (size_t i = 0; i < mFetcherInfos.size(); ++i) { + FetcherInfo &info = mFetcherInfos.editValueAt(i); + if (info.mToBeResumed) { + resumeFetcher(mFetcherInfos.keyAt(i), mNewStreamMask); + info.mToBeResumed = false; + } } - if (mSwitchInProgress || mReconfigurationInProgress) { - ALOGV("Switch/Reconfig in progress, defer switch down"); - mSwitchDownMonitor->post(1000000ll); - return; - } + ALOGI("#### Finished Bandwidth Switch: %zd => %zd", + mOrigBandwidthIndex, mCurBandwidthIndex); - for (size_t i = 0; i < kMaxStreams; ++i) { - int32_t targetDuration; - sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(indexToType(i)); - sp<AMessage> meta = packetSource->getLatestDequeuedMeta(); + mStreamMask = mNewStreamMask; + mSwitchInProgress = false; + mOrigBandwidthIndex = mCurBandwidthIndex; - if (meta != NULL && meta->findInt32("targetDuration", &targetDuration) ) { - int64_t bufferedDurationUs = packetSource->getEstimatedDurationUs(); - int64_t targetDurationUs = targetDuration * 1000000ll; + restartPollBuffering(); +} - if (bufferedDurationUs < targetDurationUs / 3) { - (new AMessage(kWhatSwitchDown, id()))->post(); - break; - } - } - } +void LiveSession::schedulePollBuffering() { + sp<AMessage> msg = new AMessage(kWhatPollBuffering, this); + msg->setInt32("generation", mPollBufferingGeneration); + msg->post(1000000ll); +} - mSwitchDownMonitor->post(1000000ll); +void LiveSession::cancelPollBuffering() { + ++mPollBufferingGeneration; + mPrevBufferPercentage = -1; } -void LiveSession::onSwitchDown() { - if (mReconfigurationInProgress || mSwitchInProgress || mCurBandwidthIndex == 0) { - return; - } +void LiveSession::restartPollBuffering() { + cancelPollBuffering(); + onPollBuffering(); +} + +void LiveSession::onPollBuffering() { + ALOGV("onPollBuffering: mSwitchInProgress %d, mReconfigurationInProgress %d, " + "mInPreparationPhase %d, mCurBandwidthIndex %zd, mStreamMask 0x%x", + mSwitchInProgress, mReconfigurationInProgress, + mInPreparationPhase, mCurBandwidthIndex, mStreamMask); + + bool underflow, ready, down, up; + if (checkBuffering(underflow, ready, down, up)) { + if (mInPreparationPhase && ready) { + postPrepared(OK); + } + + // don't switch before we report prepared + if (!mInPreparationPhase) { + if (ready) { + stopBufferingIfNecessary(); + } else if (underflow) { + startBufferingIfNecessary(); + } + switchBandwidthIfNeeded(up, down); + } - ssize_t bandwidthIndex = getBandwidthIndex(); - if (bandwidthIndex < mCurBandwidthIndex) { - changeConfiguration(-1, bandwidthIndex, false); - return; } + schedulePollBuffering(); } -// Mark switch done when: -// 1. all old buffers are swapped out -void LiveSession::tryToFinishBandwidthSwitch() { +void LiveSession::cancelBandwidthSwitch(bool resume) { + ALOGV("cancelBandwidthSwitch: mSwitchGen(%d)++, orig %zd, cur %zd", + mSwitchGeneration, mOrigBandwidthIndex, mCurBandwidthIndex); if (!mSwitchInProgress) { return; } - bool needToRemoveFetchers = false; for (size_t i = 0; i < mFetcherInfos.size(); ++i) { - if (mFetcherInfos.valueAt(i).mToBeRemoved) { - needToRemoveFetchers = true; - break; + FetcherInfo& info = mFetcherInfos.editValueAt(i); + if (info.mToBeRemoved) { + info.mToBeRemoved = false; + if (resume) { + resumeFetcher(mFetcherInfos.keyAt(i), mSwapMask); + } } } - if (!needToRemoveFetchers && mSwapMask == 0) { - ALOGI("mSwitchInProgress = false"); - mStreamMask = mNewStreamMask; - mSwitchInProgress = false; + for (size_t i = 0; i < kMaxStreams; ++i) { + AString newUri = mStreams[i].mNewUri; + if (!newUri.empty()) { + // clear all mNewUri matching this newUri + for (size_t j = i; j < kMaxStreams; ++j) { + if (mStreams[j].mNewUri == newUri) { + mStreams[j].mNewUri.clear(); + } + } + ALOGV("stopping newUri = %s", newUri.c_str()); + ssize_t index = mFetcherInfos.indexOfKey(newUri); + if (index < 0) { + ALOGE("did not find fetcher for newUri: %s", newUri.c_str()); + continue; + } + FetcherInfo &info = mFetcherInfos.editValueAt(index); + info.mToBeRemoved = true; + info.mFetcher->stopAsync(); + } } -} -void LiveSession::scheduleCheckBandwidthEvent() { - sp<AMessage> msg = new AMessage(kWhatCheckBandwidth, id()); - msg->setInt32("generation", mCheckBandwidthGeneration); - msg->post(10000000ll); -} - -void LiveSession::cancelCheckBandwidthEvent() { - ++mCheckBandwidthGeneration; -} + ALOGI("#### Canceled Bandwidth Switch: %zd => %zd", + mCurBandwidthIndex, mOrigBandwidthIndex); -void LiveSession::cancelBandwidthSwitch() { - Mutex::Autolock lock(mSwapMutex); mSwitchGeneration++; mSwitchInProgress = false; + mCurBandwidthIndex = mOrigBandwidthIndex; mSwapMask = 0; +} - for (size_t i = 0; i < mFetcherInfos.size(); ++i) { - FetcherInfo& info = mFetcherInfos.editValueAt(i); - if (info.mToBeRemoved) { - info.mToBeRemoved = false; - } +bool LiveSession::checkBuffering( + bool &underflow, bool &ready, bool &down, bool &up) { + underflow = ready = down = up = false; + + if (mReconfigurationInProgress) { + ALOGV("Switch/Reconfig in progress, defer buffer polling"); + return false; } - for (size_t i = 0; i < kMaxStreams; ++i) { - if (!mStreams[i].mNewUri.empty()) { - ssize_t j = mFetcherInfos.indexOfKey(mStreams[i].mNewUri); - if (j < 0) { - mStreams[i].mNewUri.clear(); - continue; + size_t activeCount, underflowCount, readyCount, downCount, upCount; + activeCount = underflowCount = readyCount = downCount = upCount =0; + int32_t minBufferPercent = -1; + int64_t durationUs; + if (getDuration(&durationUs) != OK) { + durationUs = -1; + } + for (size_t i = 0; i < mPacketSources.size(); ++i) { + // we don't check subtitles for buffering level + if (!(mStreamMask & mPacketSources.keyAt(i) + & (STREAMTYPE_AUDIO | STREAMTYPE_VIDEO))) { + continue; + } + // ignore streams that never had any packet queued. + // (it's possible that the variant only has audio or video) + sp<AMessage> meta = mPacketSources[i]->getLatestEnqueuedMeta(); + if (meta == NULL) { + continue; + } + + int64_t bufferedDurationUs = + mPacketSources[i]->getEstimatedDurationUs(); + ALOGV("source[%zu]: buffered %lld us", i, (long long)bufferedDurationUs); + if (durationUs >= 0) { + int32_t percent; + if (mPacketSources[i]->isFinished(0 /* duration */)) { + percent = 100; + } else { + percent = (int32_t)(100.0 * (mLastDequeuedTimeUs + bufferedDurationUs) / durationUs); + } + if (minBufferPercent < 0 || percent < minBufferPercent) { + minBufferPercent = percent; } + } - const FetcherInfo &info = mFetcherInfos.valueAt(j); - info.mFetcher->stopAsync(); - mFetcherInfos.removeItemsAt(j); - mStreams[i].mNewUri.clear(); + ++activeCount; + int64_t readyMark = mInPreparationPhase ? kPrepareMarkUs : kReadyMarkUs; + if (bufferedDurationUs > readyMark + || mPacketSources[i]->isFinished(0)) { + ++readyCount; + } + if (!mPacketSources[i]->isFinished(0)) { + if (bufferedDurationUs < kUnderflowMarkUs) { + ++underflowCount; + } + if (bufferedDurationUs > mUpSwitchMark) { + ++upCount; + } + if (bufferedDurationUs < mDownSwitchMark) { + ++downCount; + } } } -} -bool LiveSession::canSwitchBandwidthTo(size_t bandwidthIndex) { - if (mReconfigurationInProgress || mSwitchInProgress) { - return false; + if (minBufferPercent >= 0) { + notifyBufferingUpdate(minBufferPercent); } - if (mCurBandwidthIndex < 0) { + if (activeCount > 0) { + up = (upCount == activeCount); + down = (downCount > 0); + ready = (readyCount == activeCount); + underflow = (underflowCount > 0); return true; } - if (bandwidthIndex == (size_t)mCurBandwidthIndex) { - return false; - } else if (bandwidthIndex > (size_t)mCurBandwidthIndex) { - return canSwitchUp(); - } else { - return true; + return false; +} + +void LiveSession::startBufferingIfNecessary() { + ALOGV("startBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d", + mInPreparationPhase, mBuffering); + if (!mBuffering) { + mBuffering = true; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatBufferingStart); + notify->post(); } } -void LiveSession::onCheckBandwidth(const sp<AMessage> &msg) { - size_t bandwidthIndex = getBandwidthIndex(); - if (canSwitchBandwidthTo(bandwidthIndex)) { - changeConfiguration(-1ll /* timeUs */, bandwidthIndex); +void LiveSession::stopBufferingIfNecessary() { + ALOGV("stopBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d", + mInPreparationPhase, mBuffering); + + if (mBuffering) { + mBuffering = false; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatBufferingEnd); + notify->post(); + } +} + +void LiveSession::notifyBufferingUpdate(int32_t percentage) { + if (percentage < mPrevBufferPercentage) { + percentage = mPrevBufferPercentage; + } else if (percentage > 100) { + percentage = 100; + } + + mPrevBufferPercentage = percentage; + + ALOGV("notifyBufferingUpdate: percentage=%d%%", percentage); + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatBufferingUpdate); + notify->setInt32("percentage", percentage); + notify->post(); +} + +void LiveSession::switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow) { + // no need to check bandwidth if we only have 1 bandwidth settings + if (mSwitchInProgress || mBandwidthItems.size() < 2) { + return; + } + + int32_t bandwidthBps; + if (mBandwidthEstimator->estimateBandwidth(&bandwidthBps)) { + ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f); + mLastBandwidthBps = bandwidthBps; } else { - // Come back and check again 10 seconds later in case there is nothing to do now. - // If we DO change configuration, once that completes it'll schedule a new - // check bandwidth event with an incremented mCheckBandwidthGeneration. - msg->post(10000000ll); + ALOGV("no bandwidth estimate."); + return; } + + int32_t curBandwidth = mBandwidthItems.itemAt(mCurBandwidthIndex).mBandwidth; + // canSwithDown and canSwitchUp can't both be true. + // we only want to switch up when measured bw is 120% higher than current variant, + // and we only want to switch down when measured bw is below current variant. + bool canSwithDown = bufferLow + && (bandwidthBps < (int32_t)curBandwidth); + bool canSwitchUp = bufferHigh + && (bandwidthBps > (int32_t)curBandwidth * 12 / 10); + + if (canSwithDown || canSwitchUp) { + ssize_t bandwidthIndex = getBandwidthIndex(bandwidthBps); + + // it's possible that we're checking for canSwitchUp case, but the returned + // bandwidthIndex is < mCurBandwidthIndex, as getBandwidthIndex() only uses 70% + // of measured bw. In that case we don't want to do anything, since we have + // both enough buffer and enough bw. + if (bandwidthIndex == mCurBandwidthIndex + || (canSwitchUp && bandwidthIndex < mCurBandwidthIndex) + || (canSwithDown && bandwidthIndex > mCurBandwidthIndex)) { + return; + } + + ALOGI("#### Starting Bandwidth Switch: %zd => %zd", + mCurBandwidthIndex, bandwidthIndex); + changeConfiguration(-1, bandwidthIndex, false); + } +} + +void LiveSession::postError(status_t err) { + // if we reached EOS, notify buffering of 100% + if (err == ERROR_END_OF_STREAM) { + notifyBufferingUpdate(100); + } + // we'll stop buffer polling now, before that notify + // stop buffering to stop the spinning icon + stopBufferingIfNecessary(); + cancelPollBuffering(); + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatError); + notify->setInt32("err", err); + notify->post(); } void LiveSession::postPrepared(status_t err) { @@ -1764,6 +2177,8 @@ void LiveSession::postPrepared(status_t err) { if (err == OK || err == ERROR_END_OF_STREAM) { notify->setInt32("what", kWhatPrepared); } else { + cancelPollBuffering(); + notify->setInt32("what", kWhatPreparationFailed); notify->setInt32("err", err); } @@ -1771,10 +2186,8 @@ void LiveSession::postPrepared(status_t err) { notify->post(); mInPreparationPhase = false; - - mSwitchDownMonitor = new AMessage(kWhatCheckSwitchDown, id()); - mSwitchDownMonitor->post(); } + } // namespace android diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h index 2d3a25a..b5e31c9 100644 --- a/media/libstagefright/httplive/LiveSession.h +++ b/media/libstagefright/httplive/LiveSession.h @@ -26,6 +26,7 @@ namespace android { struct ABuffer; +struct AReplyToken; struct AnotherPacketSource; struct DataSource; struct HTTPBase; @@ -33,16 +34,13 @@ struct IMediaHTTPService; struct LiveDataSource; struct M3UParser; struct PlaylistFetcher; +struct HLSTime; struct LiveSession : public AHandler { enum Flags { // Don't log any URLs. kFlagIncognito = 1, }; - LiveSession( - const sp<AMessage> ¬ify, - uint32_t flags, - const sp<IMediaHTTPService> &httpService); enum StreamIndex { kAudioIndex = 0, @@ -56,10 +54,24 @@ struct LiveSession : public AHandler { STREAMTYPE_VIDEO = 1 << kVideoIndex, STREAMTYPE_SUBTITLES = 1 << kSubtitleIndex, }; + + enum SeekMode { + kSeekModeExactPosition = 0, // used for seeking + kSeekModeNextSample = 1, // used for seamless switching + kSeekModeNextSegment = 2, // used for seamless switching + }; + + LiveSession( + const sp<AMessage> ¬ify, + uint32_t flags, + const sp<IMediaHTTPService> &httpService); + status_t dequeueAccessUnit(StreamType stream, sp<ABuffer> *accessUnit); status_t getStreamFormat(StreamType stream, sp<AMessage> *format); + sp<HTTPBase> getHTTPDataSource(); + void connectAsync( const char *url, const KeyedVector<String8, String8> *headers = NULL); @@ -78,18 +90,18 @@ struct LiveSession : public AHandler { bool isSeekable() const; bool hasDynamicDuration() const; + static const char *getKeyForStream(StreamType type); + enum { kWhatStreamsChanged, kWhatError, kWhatPrepared, kWhatPreparationFailed, + kWhatBufferingStart, + kWhatBufferingEnd, + kWhatBufferingUpdate, }; - // create a format-change discontinuity - // - // swap: - // whether is format-change discontinuity should trigger a buffer swap - sp<ABuffer> createFormatChangeBuffer(bool swap = true); protected: virtual ~LiveSession(); @@ -103,18 +115,25 @@ private: kWhatDisconnect = 'disc', kWhatSeek = 'seek', kWhatFetcherNotify = 'notf', - kWhatCheckBandwidth = 'bndw', kWhatChangeConfiguration = 'chC0', kWhatChangeConfiguration2 = 'chC2', kWhatChangeConfiguration3 = 'chC3', kWhatFinishDisconnect2 = 'fin2', - kWhatSwapped = 'swap', - kWhatCheckSwitchDown = 'ckSD', - kWhatSwitchDown = 'sDwn', + kWhatPollBuffering = 'poll', }; - static const size_t kBandwidthHistoryBytes; + // Bandwidth Switch Mark Defaults + static const int64_t kUpSwitchMarkUs; + static const int64_t kDownSwitchMarkUs; + static const int64_t kUpSwitchMarginUs; + static const int64_t kResumeThresholdUs; + + // Buffer Prepare/Ready/Underflow Marks + static const int64_t kReadyMarkUs; + static const int64_t kPrepareMarkUs; + static const int64_t kUnderflowMarkUs; + struct BandwidthEstimator; struct BandwidthItem { size_t mPlaylistIndex; unsigned long mBandwidth; @@ -123,23 +142,22 @@ private: struct FetcherInfo { sp<PlaylistFetcher> mFetcher; int64_t mDurationUs; - bool mIsPrepared; bool mToBeRemoved; + bool mToBeResumed; }; struct StreamItem { const char *mType; AString mUri, mNewUri; + SeekMode mSeekMode; size_t mCurDiscontinuitySeq; int64_t mLastDequeuedTimeUs; int64_t mLastSampleDurationUs; StreamItem() - : mType(""), - mCurDiscontinuitySeq(0), - mLastDequeuedTimeUs(0), - mLastSampleDurationUs(0) {} + : StreamItem("") {} StreamItem(const char *type) : mType(type), + mSeekMode(kSeekModeExactPosition), mCurDiscontinuitySeq(0), mLastDequeuedTimeUs(0), mLastSampleDurationUs(0) {} @@ -155,8 +173,10 @@ private: uint32_t mFlags; sp<IMediaHTTPService> mHTTPService; + bool mBuffering; bool mInPreparationPhase; - bool mBuffering[kMaxStreams]; + int32_t mPollBufferingGeneration; + int32_t mPrevBufferPercentage; sp<HTTPBase> mHTTPDataSource; KeyedVector<String8, String8> mExtraHeaders; @@ -165,9 +185,13 @@ private: Vector<BandwidthItem> mBandwidthItems; ssize_t mCurBandwidthIndex; + ssize_t mOrigBandwidthIndex; + int32_t mLastBandwidthBps; + sp<BandwidthEstimator> mBandwidthEstimator; sp<M3UParser> mPlaylist; + sp<ALooper> mFetcherLooper; KeyedVector<AString, FetcherInfo> mFetcherInfos; uint32_t mStreamMask; @@ -180,17 +204,10 @@ private: // we use this to track reconfiguration progress. uint32_t mSwapMask; - KeyedVector<StreamType, sp<AnotherPacketSource> > mDiscontinuities; KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources; // A second set of packet sources that buffer content for the variant we're switching to. KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources2; - // A mutex used to serialize two sets of events: - // * the swapping of packet sources in dequeueAccessUnit on the player thread, AND - // * a forced bandwidth switch termination in cancelSwitch on the live looper. - Mutex mSwapMutex; - - int32_t mCheckBandwidthGeneration; int32_t mSwitchGeneration; int32_t mSubtitleGeneration; @@ -203,13 +220,16 @@ private: bool mReconfigurationInProgress; bool mSwitchInProgress; - uint32_t mDisconnectReplyID; - uint32_t mSeekReplyID; + int64_t mUpSwitchMark; + int64_t mDownSwitchMark; + int64_t mUpSwitchMargin; + + sp<AReplyToken> mDisconnectReplyID; + sp<AReplyToken> mSeekReplyID; bool mFirstTimeUsValid; int64_t mFirstTimeUs; int64_t mLastSeekTimeUs; - sp<AMessage> mSwitchDownMonitor; KeyedVector<size_t, int64_t> mDiscontinuityAbsStartTimesUs; KeyedVector<size_t, int64_t> mDiscontinuityOffsetTimesUs; @@ -238,45 +258,54 @@ private: uint32_t block_size = 0, /* reuse DataSource if doing partial fetch */ sp<DataSource> *source = NULL, - String8 *actualUrl = NULL); + String8 *actualUrl = NULL, + /* force connect http even when resuing DataSource */ + bool forceConnectHTTP = false); sp<M3UParser> fetchPlaylist( const char *url, uint8_t *curPlaylistHash, bool *unchanged); - size_t getBandwidthIndex(); - int64_t latestMediaSegmentStartTimeUs(); + bool resumeFetcher( + const AString &uri, uint32_t streamMask, + int64_t timeUs = -1ll, bool newUri = false); + + float getAbortThreshold( + ssize_t currentBWIndex, ssize_t targetBWIndex) const; + void addBandwidthMeasurement(size_t numBytes, int64_t delayUs); + size_t getBandwidthIndex(int32_t bandwidthBps); + HLSTime latestMediaSegmentStartTime() const; static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *); static StreamType indexToType(int idx); static ssize_t typeToIndex(int32_t type); void changeConfiguration( - int64_t timeUs, size_t bandwidthIndex, bool pickTrack = false); + int64_t timeUs, ssize_t bwIndex = -1, bool pickTrack = false); void onChangeConfiguration(const sp<AMessage> &msg); void onChangeConfiguration2(const sp<AMessage> &msg); void onChangeConfiguration3(const sp<AMessage> &msg); - void onSwapped(const sp<AMessage> &msg); - void onCheckSwitchDown(); - void onSwitchDown(); - void tryToFinishBandwidthSwitch(); - - void scheduleCheckBandwidthEvent(); - void cancelCheckBandwidthEvent(); - - // cancelBandwidthSwitch is atomic wrt swapPacketSource; call it to prevent packet sources - // from being swapped out on stale discontinuities while manipulating - // mPacketSources/mPacketSources2. - void cancelBandwidthSwitch(); - bool canSwitchBandwidthTo(size_t bandwidthIndex); - void onCheckBandwidth(const sp<AMessage> &msg); + void swapPacketSource(StreamType stream); + void tryToFinishBandwidthSwitch(const AString &oldUri); + void cancelBandwidthSwitch(bool resume = false); + bool checkSwitchProgress( + sp<AMessage> &msg, int64_t delayUs, bool *needResumeUntil); + + void switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow); + + void schedulePollBuffering(); + void cancelPollBuffering(); + void restartPollBuffering(); + void onPollBuffering(); + bool checkBuffering(bool &underflow, bool &ready, bool &down, bool &up); + void startBufferingIfNecessary(); + void stopBufferingIfNecessary(); + void notifyBufferingUpdate(int32_t percentage); void finishDisconnect(); void postPrepared(status_t err); - - void swapPacketSource(StreamType stream); - bool canSwitchUp(); + void postError(status_t err); DISALLOW_EVIL_CONSTRUCTORS(LiveSession); }; diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index 997b694..7bb7f2c 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -251,6 +251,7 @@ M3UParser::M3UParser( mIsComplete(false), mIsEvent(false), mDiscontinuitySeq(0), + mDiscontinuityCount(0), mSelectedIndex(-1) { mInitCheck = parse(data, size); } @@ -394,7 +395,9 @@ ssize_t M3UParser::getSelectedTrack(media_track_type type) const { bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const { if (!mIsVariantPlaylist) { - *uri = mBaseURI; + if (uri != NULL) { + *uri = mBaseURI; + } // Assume media without any more specific attribute contains // audio and video, but no subtitles. @@ -407,7 +410,9 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const { AString groupID; if (!meta->findString(key, &groupID)) { - *uri = mItems.itemAt(index).mURI; + if (uri != NULL) { + *uri = mItems.itemAt(index).mURI; + } AString codecs; if (!meta->findString("codecs", &codecs)) { @@ -433,18 +438,26 @@ bool M3UParser::getTypeURI(size_t index, const char *key, AString *uri) const { } } - sp<MediaGroup> group = mMediaGroups.valueFor(groupID); - if (!group->getActiveURI(uri)) { - return false; - } + // if uri == NULL, we're only checking if the type is present, + // don't care about the active URI (or if there is an active one) + if (uri != NULL) { + sp<MediaGroup> group = mMediaGroups.valueFor(groupID); + if (!group->getActiveURI(uri)) { + return false; + } - if ((*uri).empty()) { - *uri = mItems.itemAt(index).mURI; + if ((*uri).empty()) { + *uri = mItems.itemAt(index).mURI; + } } return true; } +bool M3UParser::hasType(size_t index, const char *key) const { + return getTypeURI(index, key, NULL /* uri */); +} + static bool MakeURL(const char *baseURL, const char *url, AString *out) { out->clear(); @@ -582,6 +595,7 @@ status_t M3UParser::parse(const void *_data, size_t size) { itemMeta = new AMessage; } itemMeta->setInt32("discontinuity", true); + ++mDiscontinuityCount; } else if (line.startsWith("#EXT-X-STREAM-INF")) { if (mMeta != NULL) { return ERROR_MALFORMED; @@ -609,6 +623,9 @@ status_t M3UParser::parse(const void *_data, size_t size) { } else if (line.startsWith("#EXT-X-MEDIA")) { err = parseMedia(line); } else if (line.startsWith("#EXT-X-DISCONTINUITY-SEQUENCE")) { + if (mIsVariantPlaylist) { + return ERROR_MALFORMED; + } size_t seq; err = parseDiscontinuitySequence(line, &seq); if (err == OK) { @@ -628,6 +645,8 @@ status_t M3UParser::parse(const void *_data, size_t size) { || !itemMeta->findInt64("durationUs", &durationUs)) { return ERROR_MALFORMED; } + itemMeta->setInt32("discontinuity-sequence", + mDiscontinuitySeq + mDiscontinuityCount); } mItems.push(); @@ -644,6 +663,14 @@ status_t M3UParser::parse(const void *_data, size_t size) { ++lineNo; } + // error checking of all fields that's required to appear once + // (currently only checking "target-duration") + int32_t targetDurationSecs; + if (!mIsVariantPlaylist && (mMeta == NULL || !mMeta->findInt32( + "target-duration", &targetDurationSecs))) { + return ERROR_MALFORMED; + } + return OK; } diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h index 1cad060..fef361f 100644 --- a/media/libstagefright/httplive/M3UParser.h +++ b/media/libstagefright/httplive/M3UParser.h @@ -50,6 +50,7 @@ struct M3UParser : public RefBase { ssize_t getSelectedTrack(media_track_type /* type */) const; bool getTypeURI(size_t index, const char *key, AString *uri) const; + bool hasType(size_t index, const char *key) const; protected: virtual ~M3UParser(); @@ -70,6 +71,7 @@ private: bool mIsComplete; bool mIsEvent; size_t mDiscontinuitySeq; + int32_t mDiscontinuityCount; sp<AMessage> mMeta; Vector<Item> mItems; diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp index 1227600..368612d 100644 --- a/media/libstagefright/httplive/PlaylistFetcher.cpp +++ b/media/libstagefright/httplive/PlaylistFetcher.cpp @@ -33,6 +33,7 @@ #include <media/stagefright/foundation/ABitReader.h> #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AUtils.h> #include <media/stagefright/foundation/hexdump.h> #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaDefs.h> @@ -47,11 +48,96 @@ namespace android { // static -const int64_t PlaylistFetcher::kMinBufferedDurationUs = 10000000ll; +const int64_t PlaylistFetcher::kMinBufferedDurationUs = 30000000ll; const int64_t PlaylistFetcher::kMaxMonitorDelayUs = 3000000ll; // LCM of 188 (size of a TS packet) & 1k works well const int32_t PlaylistFetcher::kDownloadBlockSize = 47 * 1024; -const int32_t PlaylistFetcher::kNumSkipFrames = 5; + +struct PlaylistFetcher::DownloadState : public RefBase { + DownloadState(); + void resetState(); + bool hasSavedState() const; + void restoreState( + AString &uri, + sp<AMessage> &itemMeta, + sp<ABuffer> &buffer, + sp<ABuffer> &tsBuffer, + int32_t &firstSeqNumberInPlaylist, + int32_t &lastSeqNumberInPlaylist); + void saveState( + AString &uri, + sp<AMessage> &itemMeta, + sp<ABuffer> &buffer, + sp<ABuffer> &tsBuffer, + int32_t &firstSeqNumberInPlaylist, + int32_t &lastSeqNumberInPlaylist); + +private: + bool mHasSavedState; + AString mUri; + sp<AMessage> mItemMeta; + sp<ABuffer> mBuffer; + sp<ABuffer> mTsBuffer; + int32_t mFirstSeqNumberInPlaylist; + int32_t mLastSeqNumberInPlaylist; +}; + +PlaylistFetcher::DownloadState::DownloadState() { + resetState(); +} + +bool PlaylistFetcher::DownloadState::hasSavedState() const { + return mHasSavedState; +} + +void PlaylistFetcher::DownloadState::resetState() { + mHasSavedState = false; + + mUri.clear(); + mItemMeta = NULL; + mBuffer = NULL; + mTsBuffer = NULL; + mFirstSeqNumberInPlaylist = 0; + mLastSeqNumberInPlaylist = 0; +} + +void PlaylistFetcher::DownloadState::restoreState( + AString &uri, + sp<AMessage> &itemMeta, + sp<ABuffer> &buffer, + sp<ABuffer> &tsBuffer, + int32_t &firstSeqNumberInPlaylist, + int32_t &lastSeqNumberInPlaylist) { + if (!mHasSavedState) { + return; + } + + uri = mUri; + itemMeta = mItemMeta; + buffer = mBuffer; + tsBuffer = mTsBuffer; + firstSeqNumberInPlaylist = mFirstSeqNumberInPlaylist; + lastSeqNumberInPlaylist = mLastSeqNumberInPlaylist; + + resetState(); +} + +void PlaylistFetcher::DownloadState::saveState( + AString &uri, + sp<AMessage> &itemMeta, + sp<ABuffer> &buffer, + sp<ABuffer> &tsBuffer, + int32_t &firstSeqNumberInPlaylist, + int32_t &lastSeqNumberInPlaylist) { + mHasSavedState = true; + + mUri = uri; + mItemMeta = itemMeta; + mBuffer = buffer; + mTsBuffer = tsBuffer; + mFirstSeqNumberInPlaylist = firstSeqNumberInPlaylist; + mLastSeqNumberInPlaylist = lastSeqNumberInPlaylist; +} PlaylistFetcher::PlaylistFetcher( const sp<AMessage> ¬ify, @@ -59,7 +145,6 @@ PlaylistFetcher::PlaylistFetcher( const char *uri, int32_t subtitleGeneration) : mNotify(notify), - mStartTimeUsNotify(notify->dup()), mSession(session), mURI(uri), mStreamTypeMask(0), @@ -71,18 +156,21 @@ PlaylistFetcher::PlaylistFetcher( mSeqNumber(-1), mNumRetries(0), mStartup(true), - mAdaptive(false), - mPrepared(false), + mIDRFound(false), + mSeekMode(LiveSession::kSeekModeExactPosition), + mTimeChangeSignaled(false), mNextPTSTimeUs(-1ll), mMonitorQueueGeneration(0), mSubtitleGeneration(subtitleGeneration), + mLastDiscontinuitySeq(-1ll), mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY), mFirstPTSValid(false), - mAbsoluteTimeAnchorUs(0ll), - mVideoBuffer(new AnotherPacketSource(NULL)) { + mFirstTimeUs(-1ll), + mVideoBuffer(new AnotherPacketSource(NULL)), + mThresholdRatio(-1.0f), + mDownloadState(new DownloadState()) { memset(mPlaylistHash, 0, sizeof(mPlaylistHash)); - mStartTimeUsNotify->setInt32("what", kWhatStartedAt); - mStartTimeUsNotify->setInt32("streamMask", 0); + mHTTPDataSource = mSession->getHTTPDataSource(); } PlaylistFetcher::~PlaylistFetcher() { @@ -119,6 +207,32 @@ int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const { return segmentStartUs; } +int64_t PlaylistFetcher::getSegmentDurationUs(int32_t seqNumber) const { + CHECK(mPlaylist != NULL); + + int32_t firstSeqNumberInPlaylist; + if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( + "media-sequence", &firstSeqNumberInPlaylist)) { + firstSeqNumberInPlaylist = 0; + } + + int32_t lastSeqNumberInPlaylist = + firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; + + CHECK_GE(seqNumber, firstSeqNumberInPlaylist); + CHECK_LE(seqNumber, lastSeqNumberInPlaylist); + + int32_t index = seqNumber - firstSeqNumberInPlaylist; + sp<AMessage> itemMeta; + CHECK(mPlaylist->itemAt( + index, NULL /* uri */, &itemMeta)); + + int64_t itemDurationUs; + CHECK(itemMeta->findInt64("durationUs", &itemDurationUs)); + + return itemDurationUs; +} + int64_t PlaylistFetcher::delayUsToRefreshPlaylist() const { int64_t nowUs = ALooper::GetNowUs(); @@ -325,7 +439,7 @@ void PlaylistFetcher::postMonitorQueue(int64_t delayUs, int64_t minDelayUs) { ALOGV("Need to refresh playlist in %" PRId64 , maxDelayUs); delayUs = maxDelayUs; } - sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id()); + sp<AMessage> msg = new AMessage(kWhatMonitorQueue, this); msg->setInt32("generation", mMonitorQueueGeneration); msg->post(delayUs); } @@ -334,6 +448,14 @@ void PlaylistFetcher::cancelMonitorQueue() { ++mMonitorQueueGeneration; } +void PlaylistFetcher::setStoppingThreshold(float thresholdRatio) { + AutoMutex _l(mThresholdLock); + if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) { + return; + } + mThresholdRatio = thresholdRatio; +} + void PlaylistFetcher::startAsync( const sp<AnotherPacketSource> &audioSource, const sp<AnotherPacketSource> &videoSource, @@ -341,8 +463,8 @@ void PlaylistFetcher::startAsync( int64_t startTimeUs, int64_t segmentStartTimeUs, int32_t startDiscontinuitySeq, - bool adaptive) { - sp<AMessage> msg = new AMessage(kWhatStart, id()); + LiveSession::SeekMode seekMode) { + sp<AMessage> msg = new AMessage(kWhatStart, this); uint32_t streamTypeMask = 0ul; @@ -365,22 +487,27 @@ void PlaylistFetcher::startAsync( msg->setInt64("startTimeUs", startTimeUs); msg->setInt64("segmentStartTimeUs", segmentStartTimeUs); msg->setInt32("startDiscontinuitySeq", startDiscontinuitySeq); - msg->setInt32("adaptive", adaptive); + msg->setInt32("seekMode", seekMode); msg->post(); } -void PlaylistFetcher::pauseAsync() { - (new AMessage(kWhatPause, id()))->post(); +void PlaylistFetcher::pauseAsync(float thresholdRatio) { + if (thresholdRatio >= 0.0f) { + setStoppingThreshold(thresholdRatio); + } + (new AMessage(kWhatPause, this))->post(); } void PlaylistFetcher::stopAsync(bool clear) { - sp<AMessage> msg = new AMessage(kWhatStop, id()); + setStoppingThreshold(0.0f); + + sp<AMessage> msg = new AMessage(kWhatStop, this); msg->setInt32("clear", clear); msg->post(); } void PlaylistFetcher::resumeUntilAsync(const sp<AMessage> ¶ms) { - AMessage* msg = new AMessage(kWhatResumeUntil, id()); + AMessage* msg = new AMessage(kWhatResumeUntil, this); msg->setMessage("params", params); msg->post(); } @@ -404,6 +531,10 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) { sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatPaused); + notify->setInt32("seekMode", + mDownloadState->hasSavedState() + ? LiveSession::kSeekModeNextSample + : LiveSession::kSeekModeNextSegment); notify->post(); break; } @@ -450,6 +581,10 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) { status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) { mPacketSources.clear(); + mStopParams.clear(); + mStartTimeUsNotify = mNotify->dup(); + mStartTimeUsNotify->setInt32("what", kWhatStartedAt); + mStartTimeUsNotify->setString("uri", mURI); uint32_t streamTypeMask; CHECK(msg->findInt32("streamTypeMask", (int32_t *)&streamTypeMask)); @@ -457,11 +592,11 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) { int64_t startTimeUs; int64_t segmentStartTimeUs; int32_t startDiscontinuitySeq; - int32_t adaptive; + int32_t seekMode; CHECK(msg->findInt64("startTimeUs", &startTimeUs)); CHECK(msg->findInt64("segmentStartTimeUs", &segmentStartTimeUs)); CHECK(msg->findInt32("startDiscontinuitySeq", &startDiscontinuitySeq)); - CHECK(msg->findInt32("adaptive", &adaptive)); + CHECK(msg->findInt32("seekMode", &seekMode)); if (streamTypeMask & LiveSession::STREAMTYPE_AUDIO) { void *ptr; @@ -493,14 +628,26 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) { mStreamTypeMask = streamTypeMask; mSegmentStartTimeUs = segmentStartTimeUs; - mDiscontinuitySeq = startDiscontinuitySeq; + + if (startDiscontinuitySeq >= 0) { + mDiscontinuitySeq = startDiscontinuitySeq; + } + + mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY; + mSeekMode = (LiveSession::SeekMode) seekMode; + + if (startTimeUs >= 0 || mSeekMode == LiveSession::kSeekModeNextSample) { + mStartup = true; + mIDRFound = false; + mVideoBuffer->clear(); + } if (startTimeUs >= 0) { mStartTimeUs = startTimeUs; + mFirstPTSValid = false; mSeqNumber = -1; - mStartup = true; - mPrepared = false; - mAdaptive = adaptive; + mTimeChangeSignaled = false; + mDownloadState->resetState(); } postMonitorQueue(); @@ -510,6 +657,9 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) { void PlaylistFetcher::onPause() { cancelMonitorQueue(); + mLastDiscontinuitySeq = mDiscontinuitySeq; + + setStoppingThreshold(-1.0f); } void PlaylistFetcher::onStop(const sp<AMessage> &msg) { @@ -524,8 +674,14 @@ void PlaylistFetcher::onStop(const sp<AMessage> &msg) { } } + // close off the connection after use + mHTTPDataSource->disconnect(); + + mDownloadState->resetState(); mPacketSources.clear(); mStreamTypeMask = 0; + + setStoppingThreshold(-1.0f); } // Resume until we have reached the boundary timestamps listed in `msg`; when @@ -535,57 +691,18 @@ status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) { sp<AMessage> params; CHECK(msg->findMessage("params", ¶ms)); - bool stop = false; - for (size_t i = 0; i < mPacketSources.size(); i++) { - sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i); - - const char *stopKey; - int streamType = mPacketSources.keyAt(i); - switch (streamType) { - case LiveSession::STREAMTYPE_VIDEO: - stopKey = "timeUsVideo"; - break; - - case LiveSession::STREAMTYPE_AUDIO: - stopKey = "timeUsAudio"; - break; - - case LiveSession::STREAMTYPE_SUBTITLES: - stopKey = "timeUsSubtitle"; - break; - - default: - TRESPASS(); - } - - // Don't resume if we would stop within a resume threshold. - int32_t discontinuitySeq; - int64_t latestTimeUs = 0, stopTimeUs = 0; - sp<AMessage> latestMeta = packetSource->getLatestEnqueuedMeta(); - if (latestMeta != NULL - && latestMeta->findInt32("discontinuitySeq", &discontinuitySeq) - && discontinuitySeq == mDiscontinuitySeq - && latestMeta->findInt64("timeUs", &latestTimeUs) - && params->findInt64(stopKey, &stopTimeUs) - && stopTimeUs - latestTimeUs < resumeThreshold(latestMeta)) { - stop = true; - } - } - - if (stop) { - for (size_t i = 0; i < mPacketSources.size(); i++) { - mPacketSources.valueAt(i)->queueAccessUnit(mSession->createFormatChangeBuffer()); - } - stopAsync(/* clear = */ false); - return OK; - } - mStopParams = params; - postMonitorQueue(); + onDownloadNext(); return OK; } +void PlaylistFetcher::notifyStopReached() { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatStopReached); + notify->post(); +} + void PlaylistFetcher::notifyError(status_t err) { sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kWhatError); @@ -605,7 +722,12 @@ void PlaylistFetcher::queueDiscontinuity( void PlaylistFetcher::onMonitorQueue() { bool downloadMore = false; - refreshPlaylist(); + + // in the middle of an unfinished download, delay + // playlist refresh as it'll change seq numbers + if (!mDownloadState->hasSavedState()) { + refreshPlaylist(); + } int32_t targetDurationSecs; int64_t targetDurationUs = kMinBufferedDurationUs; @@ -619,27 +741,23 @@ void PlaylistFetcher::onMonitorQueue() { targetDurationUs = targetDurationSecs * 1000000ll; } - // buffer at least 3 times the target duration, or up to 10 seconds - int64_t durationToBufferUs = targetDurationUs * 3; - if (durationToBufferUs > kMinBufferedDurationUs) { - durationToBufferUs = kMinBufferedDurationUs; - } - int64_t bufferedDurationUs = 0ll; - status_t finalResult = NOT_ENOUGH_DATA; + status_t finalResult = OK; if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) { sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(LiveSession::STREAMTYPE_SUBTITLES); bufferedDurationUs = packetSource->getBufferedDurationUs(&finalResult); - finalResult = OK; } else { - // Use max stream duration to prevent us from waiting on a non-existent stream; - // when we cannot make out from the manifest what streams are included in a playlist - // we might assume extra streams. + // Use min stream duration, but ignore streams that never have any packet + // enqueued to prevent us from waiting on a non-existent stream; + // when we cannot make out from the manifest what streams are included in + // a playlist we might assume extra streams. + bufferedDurationUs = -1ll; for (size_t i = 0; i < mPacketSources.size(); ++i) { - if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) { + if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0 + || mPacketSources[i]->getLatestEnqueuedMeta() == NULL) { continue; } @@ -647,46 +765,36 @@ void PlaylistFetcher::onMonitorQueue() { mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult); ALOGV("buffered %" PRId64 " for stream %d", bufferedStreamDurationUs, mPacketSources.keyAt(i)); - if (bufferedStreamDurationUs > bufferedDurationUs) { + if (bufferedDurationUs == -1ll + || bufferedStreamDurationUs < bufferedDurationUs) { bufferedDurationUs = bufferedStreamDurationUs; } } - } - downloadMore = (bufferedDurationUs < durationToBufferUs); - - // signal start if buffered up at least the target size - if (!mPrepared && bufferedDurationUs > targetDurationUs && downloadMore) { - mPrepared = true; - - ALOGV("prepared, buffered=%" PRId64 " > %" PRId64 "", - bufferedDurationUs, targetDurationUs); - sp<AMessage> msg = mNotify->dup(); - msg->setInt32("what", kWhatTemporarilyDoneFetching); - msg->post(); + if (bufferedDurationUs == -1ll) { + bufferedDurationUs = 0ll; + } } - if (finalResult == OK && downloadMore) { + if (finalResult == OK && bufferedDurationUs < kMinBufferedDurationUs) { ALOGV("monitoring, buffered=%" PRId64 " < %" PRId64 "", - bufferedDurationUs, durationToBufferUs); + bufferedDurationUs, kMinBufferedDurationUs); // delay the next download slightly; hopefully this gives other concurrent fetchers // a better chance to run. // onDownloadNext(); - sp<AMessage> msg = new AMessage(kWhatDownloadNext, id()); + sp<AMessage> msg = new AMessage(kWhatDownloadNext, this); msg->setInt32("generation", mMonitorQueueGeneration); msg->post(1000l); } else { - // Nothing to do yet, try again in a second. - - sp<AMessage> msg = mNotify->dup(); - msg->setInt32("what", kWhatTemporarilyDoneFetching); - msg->post(); - - int64_t delayUs = mPrepared ? kMaxMonitorDelayUs : targetDurationUs / 2; + // We'd like to maintain buffering above durationToBufferUs, so try + // again when buffer just about to go below durationToBufferUs + // (or after targetDurationUs / 2, whichever is smaller). + int64_t delayUs = bufferedDurationUs - kMinBufferedDurationUs + 1000000ll; + if (delayUs > targetDurationUs / 2) { + delayUs = targetDurationUs / 2; + } ALOGV("pausing for %" PRId64 ", buffered=%" PRId64 " > %" PRId64 "", - delayUs, bufferedDurationUs, durationToBufferUs); - // :TRICKY: need to enforce minimum delay because the delay to - // refresh the playlist will become 0 - postMonitorQueue(delayUs, mPrepared ? targetDurationUs * 2 : 0); + delayUs, bufferedDurationUs, kMinBufferedDurationUs); + postMonitorQueue(delayUs); } } @@ -715,6 +823,13 @@ status_t PlaylistFetcher::refreshPlaylist() { if (mPlaylist->isComplete() || mPlaylist->isEvent()) { updateDuration(); } + // Notify LiveSession to use target-duration based buffering level + // for up/down switch. Default LiveSession::kUpSwitchMark may not + // be reachable for live streams, as our max buffering amount is + // limited to 3 segments. + if (!mPlaylist->isComplete()) { + updateTargetDuration(); + } } mLastPlaylistFetchTimeUs = ALooper::GetNowUs(); @@ -727,10 +842,69 @@ bool PlaylistFetcher::bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer) { return buffer->size() > 0 && buffer->data()[0] == 0x47; } -void PlaylistFetcher::onDownloadNext() { +bool PlaylistFetcher::shouldPauseDownload() { + if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) { + // doesn't apply to subtitles + return false; + } + + // Calculate threshold to abort current download + int32_t targetDurationSecs; + CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); + int64_t targetDurationUs = targetDurationSecs * 1000000ll; + int64_t thresholdUs = -1; + { + AutoMutex _l(mThresholdLock); + thresholdUs = (mThresholdRatio < 0.0f) ? + -1ll : mThresholdRatio * targetDurationUs; + } + + if (thresholdUs < 0) { + // never abort + return false; + } else if (thresholdUs == 0) { + // immediately abort + return true; + } + + // now we have a positive thresholdUs, abort if remaining + // portion to download is over that threshold. + if (mSegmentFirstPTS < 0) { + // this means we haven't even find the first access unit, + // abort now as we must be very far away from the end. + return true; + } + int64_t lastEnqueueUs = mSegmentFirstPTS; + for (size_t i = 0; i < mPacketSources.size(); ++i) { + if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) { + continue; + } + sp<AMessage> meta = mPacketSources[i]->getLatestEnqueuedMeta(); + int32_t type; + if (meta == NULL || meta->findInt32("discontinuity", &type)) { + continue; + } + int64_t tmpUs; + CHECK(meta->findInt64("timeUs", &tmpUs)); + if (tmpUs > lastEnqueueUs) { + lastEnqueueUs = tmpUs; + } + } + lastEnqueueUs -= mSegmentFirstPTS; + if (targetDurationUs - lastEnqueueUs > thresholdUs) { + return true; + } + return false; +} + +bool PlaylistFetcher::initDownloadState( + AString &uri, + sp<AMessage> &itemMeta, + int32_t &firstSeqNumberInPlaylist, + int32_t &lastSeqNumberInPlaylist) { status_t err = refreshPlaylist(); - int32_t firstSeqNumberInPlaylist = 0; - int32_t lastSeqNumberInPlaylist = 0; + firstSeqNumberInPlaylist = 0; + lastSeqNumberInPlaylist = 0; bool discontinuity = false; if (mPlaylist != NULL) { @@ -746,6 +920,8 @@ void PlaylistFetcher::onDownloadNext() { } } + mSegmentFirstPTS = -1ll; + if (mPlaylist != NULL && mSeqNumber < 0) { CHECK_GE(mStartTimeUs, 0ll); @@ -773,7 +949,8 @@ void PlaylistFetcher::onDownloadNext() { // timestamps coming from the media container) is used to determine the position // inside a segments. mSeqNumber = getSeqNumberForTime(mSegmentStartTimeUs); - if (mAdaptive) { + if (mStreamTypeMask != LiveSession::STREAMTYPE_SUBTITLES + && mSeekMode != LiveSession::kSeekModeNextSample) { // avoid double fetch/decode mSeqNumber += 1; } @@ -823,12 +1000,12 @@ void PlaylistFetcher::onDownloadNext() { mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist, delayUs, mNumRetries); postMonitorQueue(delayUs); - return; + return false; } if (err != OK) { notifyError(err); - return; + return false; } // we've missed the boat, let's start 3 segments prior to the latest sequence @@ -843,12 +1020,8 @@ void PlaylistFetcher::onDownloadNext() { // but since the segments we are supposed to fetch have already rolled off // the playlist, i.e. we have already missed the boat, we inevitably have to // skip. - for (size_t i = 0; i < mPacketSources.size(); i++) { - sp<ABuffer> formatChange = mSession->createFormatChangeBuffer(); - mPacketSources.valueAt(i)->queueAccessUnit(formatChange); - } - stopAsync(/* clear = */ false); - return; + notifyStopReached(); + return false; } mSeqNumber = lastSeqNumberInPlaylist - 3; if (mSeqNumber < firstSeqNumberInPlaylist) { @@ -858,45 +1031,49 @@ void PlaylistFetcher::onDownloadNext() { // fall through } else { - ALOGE("Cannot find sequence number %d in playlist " - "(contains %d - %d)", - mSeqNumber, firstSeqNumberInPlaylist, - firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1); + if (mPlaylist != NULL) { + ALOGE("Cannot find sequence number %d in playlist " + "(contains %d - %d)", + mSeqNumber, firstSeqNumberInPlaylist, + firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1); - notifyError(ERROR_END_OF_STREAM); - return; + notifyError(ERROR_END_OF_STREAM); + } else { + // It's possible that we were never able to download the playlist. + // In this case we should notify error, instead of EOS, as EOS during + // prepare means we succeeded in downloading everything. + ALOGE("Failed to download playlist!"); + notifyError(ERROR_IO); + } + + return false; } } mNumRetries = 0; - AString uri; - sp<AMessage> itemMeta; CHECK(mPlaylist->itemAt( mSeqNumber - firstSeqNumberInPlaylist, &uri, &itemMeta)); + CHECK(itemMeta->findInt32("discontinuity-sequence", &mDiscontinuitySeq)); + int32_t val; if (itemMeta->findInt32("discontinuity", &val) && val != 0) { - mDiscontinuitySeq++; + discontinuity = true; + } else if (mLastDiscontinuitySeq >= 0 + && mDiscontinuitySeq != mLastDiscontinuitySeq) { + // Seek jumped to a new discontinuity sequence. We need to signal + // a format change to decoder. Decoder needs to shutdown and be + // created again if seamless format change is unsupported. + ALOGV("saw discontinuity: mStartup %d, mLastDiscontinuitySeq %d, " + "mDiscontinuitySeq %d, mStartTimeUs %lld", + mStartup, mLastDiscontinuitySeq, mDiscontinuitySeq, (long long)mStartTimeUs); discontinuity = true; } + mLastDiscontinuitySeq = -1; - int64_t range_offset, range_length; - if (!itemMeta->findInt64("range-offset", &range_offset) - || !itemMeta->findInt64("range-length", &range_length)) { - range_offset = 0; - range_length = -1; - } - - ALOGV("fetching segment %d from (%d .. %d)", - mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); - - ALOGV("fetching '%s'", uri.c_str()); - - sp<DataSource> source; - sp<ABuffer> buffer, tsBuffer; // decrypt a junk buffer to prefetch key; since a session uses only one http connection, // this avoids interleaved connections to the key and segment file. { @@ -906,16 +1083,122 @@ void PlaylistFetcher::onDownloadNext() { true /* first */); if (err != OK) { notifyError(err); + return false; + } + } + + if ((mStartup && !mTimeChangeSignaled) || discontinuity) { + // We need to signal a time discontinuity to ATSParser on the + // first segment after start, or on a discontinuity segment. + // Setting mNextPTSTimeUs informs extractAndQueueAccessUnitsXX() + // to send the time discontinuity. + if (mPlaylist->isComplete() || mPlaylist->isEvent()) { + // If this was a live event this made no sense since + // we don't have access to all the segment before the current + // one. + mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber); + } + + // Setting mTimeChangeSignaled to true, so that if start time + // searching goes into 2nd segment (without a discontinuity), + // we don't reset time again. It causes corruption when pending + // data in ATSParser is cleared. + mTimeChangeSignaled = true; + } + + if (discontinuity) { + ALOGI("queueing discontinuity (explicit=%d)", discontinuity); + + // Signal a format discontinuity to ATSParser to clear partial data + // from previous streams. Not doing this causes bitstream corruption. + if (mTSParser != NULL) { + mTSParser->signalDiscontinuity( + ATSParser::DISCONTINUITY_FORMATCHANGE, NULL /* extra */); + } + + queueDiscontinuity( + ATSParser::DISCONTINUITY_FORMATCHANGE, + NULL /* extra */); + + if (mStartup && mStartTimeUsRelative && mFirstPTSValid) { + // This means we guessed mStartTimeUs to be in the previous + // segment (likely very close to the end), but either video or + // audio has not found start by the end of that segment. + // + // If this new segment is not a discontinuity, keep searching. + // + // If this new segment even got a discontinuity marker, just + // set mStartTimeUs=0, and take all samples from now on. + mStartTimeUs = 0; + mFirstPTSValid = false; + } + } + + ALOGV("fetching segment %d from (%d .. %d)", + mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist); + return true; +} + +void PlaylistFetcher::onDownloadNext() { + AString uri; + sp<AMessage> itemMeta; + sp<ABuffer> buffer; + sp<ABuffer> tsBuffer; + int32_t firstSeqNumberInPlaylist = 0; + int32_t lastSeqNumberInPlaylist = 0; + bool connectHTTP = true; + + if (mDownloadState->hasSavedState()) { + mDownloadState->restoreState( + uri, + itemMeta, + buffer, + tsBuffer, + firstSeqNumberInPlaylist, + lastSeqNumberInPlaylist); + connectHTTP = false; + ALOGV("resuming: '%s'", uri.c_str()); + } else { + if (!initDownloadState( + uri, + itemMeta, + firstSeqNumberInPlaylist, + lastSeqNumberInPlaylist)) { return; } + ALOGV("fetching: '%s'", uri.c_str()); + } + + int64_t range_offset, range_length; + if (!itemMeta->findInt64("range-offset", &range_offset) + || !itemMeta->findInt64("range-length", &range_length)) { + range_offset = 0; + range_length = -1; } // block-wise download - bool startup = mStartup; + bool shouldPause = false; ssize_t bytesRead; do { + sp<DataSource> source = mHTTPDataSource; + + int64_t startUs = ALooper::GetNowUs(); bytesRead = mSession->fetchFile( - uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize, &source); + uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize, + &source, NULL, connectHTTP); + + // add sample for bandwidth estimation, excluding samples from subtitles (as + // its too small), or during startup/resumeUntil (when we could have more than + // one connection open which affects bandwidth) + if (!mStartup && mStopParams == NULL && bytesRead > 0 + && (mStreamTypeMask + & (LiveSession::STREAMTYPE_AUDIO + | LiveSession::STREAMTYPE_VIDEO))) { + int64_t delayUs = ALooper::GetNowUs() - startUs; + mSession->addBandwidthMeasurement(bytesRead, delayUs); + } + + connectHTTP = false; if (bytesRead < 0) { status_t err = bytesRead; @@ -941,28 +1224,7 @@ void PlaylistFetcher::onDownloadNext() { return; } - if (startup || discontinuity) { - // Signal discontinuity. - - if (mPlaylist->isComplete() || mPlaylist->isEvent()) { - // If this was a live event this made no sense since - // we don't have access to all the segment before the current - // one. - mNextPTSTimeUs = getSegmentStartTimeUs(mSeqNumber); - } - - if (discontinuity) { - ALOGI("queueing discontinuity (explicit=%d)", discontinuity); - - queueDiscontinuity( - ATSParser::DISCONTINUITY_FORMATCHANGE, - NULL /* extra */); - - discontinuity = false; - } - - startup = false; - } + bool startUp = mStartup; // save current start up state err = OK; if (bufferStartsWithTsSyncByte(buffer)) { @@ -976,7 +1238,6 @@ void PlaylistFetcher::onDownloadNext() { tsBuffer->setRange(tsOff, tsSize); } tsBuffer->setRange(tsBuffer->offset(), tsBuffer->size() + bytesRead); - err = extractAndQueueAccessUnitsFromTs(tsBuffer); } @@ -991,13 +1252,35 @@ void PlaylistFetcher::onDownloadNext() { return; } else if (err == ERROR_OUT_OF_RANGE) { // reached stopping point - stopAsync(/* clear = */ false); + notifyStopReached(); return; } else if (err != OK) { notifyError(err); return; } - + // If we're switching, post start notification + // this should only be posted when the last chunk is full processed by TSParser + if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) { + CHECK(mStartTimeUsNotify != NULL); + mStartTimeUsNotify->post(); + mStartTimeUsNotify.clear(); + shouldPause = true; + } + if (shouldPause || shouldPauseDownload()) { + // save state and return if this is not the last chunk, + // leaving the fetcher in paused state. + if (bytesRead != 0) { + mDownloadState->saveState( + uri, + itemMeta, + buffer, + tsBuffer, + firstSeqNumberInPlaylist, + lastSeqNumberInPlaylist); + return; + } + shouldPause = true; + } } while (bytesRead != 0); if (bufferStartsWithTsSyncByte(buffer)) { @@ -1034,7 +1317,6 @@ void PlaylistFetcher::onDownloadNext() { return; } - err = OK; if (tsBuffer != NULL) { AString method; CHECK(buffer->meta()->findString("cipher-method", &method)); @@ -1048,30 +1330,40 @@ void PlaylistFetcher::onDownloadNext() { } // bulk extract non-ts files + bool startUp = mStartup; if (tsBuffer == NULL) { - err = extractAndQueueAccessUnits(buffer, itemMeta); + status_t err = extractAndQueueAccessUnits(buffer, itemMeta); if (err == -EAGAIN) { // starting sequence number too low/high postMonitorQueue(); return; } else if (err == ERROR_OUT_OF_RANGE) { // reached stopping point - stopAsync(/* clear = */false); + notifyStopReached(); + return; + } else if (err != OK) { + notifyError(err); return; } } - if (err != OK) { - notifyError(err); - return; - } - ++mSeqNumber; - postMonitorQueue(); + // if adapting, pause after found the next starting point + if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) { + CHECK(mStartTimeUsNotify != NULL); + mStartTimeUsNotify->post(); + mStartTimeUsNotify.clear(); + shouldPause = true; + } + + if (!shouldPause) { + postMonitorQueue(); + } } -int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const { +int32_t PlaylistFetcher::getSeqNumberWithAnchorTime( + int64_t anchorTimeUs, int64_t targetDiffUs) const { int32_t firstSeqNumberInPlaylist, lastSeqNumberInPlaylist; if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) { @@ -1080,7 +1372,8 @@ int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const lastSeqNumberInPlaylist = firstSeqNumberInPlaylist + mPlaylist->size() - 1; int32_t index = mSeqNumber - firstSeqNumberInPlaylist - 1; - while (index >= 0 && anchorTimeUs > mStartTimeUs) { + // adjust anchorTimeUs to within targetDiffUs from mStartTimeUs + while (index >= 0 && anchorTimeUs - mStartTimeUs > targetDiffUs) { sp<AMessage> itemMeta; CHECK(mPlaylist->itemAt(index, NULL /* uri */, &itemMeta)); @@ -1101,28 +1394,22 @@ int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const int32_t PlaylistFetcher::getSeqNumberForDiscontinuity(size_t discontinuitySeq) const { int32_t firstSeqNumberInPlaylist; - if (mPlaylist->meta() == NULL - || !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) { + if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( + "media-sequence", &firstSeqNumberInPlaylist)) { firstSeqNumberInPlaylist = 0; } - size_t curDiscontinuitySeq = mPlaylist->getDiscontinuitySeq(); - if (discontinuitySeq < curDiscontinuitySeq) { - return firstSeqNumberInPlaylist <= 0 ? 0 : (firstSeqNumberInPlaylist - 1); - } - size_t index = 0; while (index < mPlaylist->size()) { sp<AMessage> itemMeta; CHECK(mPlaylist->itemAt( index, NULL /* uri */, &itemMeta)); - - int64_t discontinuity; - if (itemMeta->findInt64("discontinuity", &discontinuity)) { - curDiscontinuitySeq++; - } - + size_t curDiscontinuitySeq; + CHECK(itemMeta->findInt32("discontinuity-sequence", (int32_t *)&curDiscontinuitySeq)); + int32_t seqNumber = firstSeqNumberInPlaylist + index; if (curDiscontinuitySeq == discontinuitySeq) { - return firstSeqNumberInPlaylist + index; + return seqNumber; + } else if (curDiscontinuitySeq > discontinuitySeq) { + return seqNumber <= 0 ? 0 : seqNumber - 1; } ++index; @@ -1182,6 +1469,7 @@ const sp<ABuffer> &PlaylistFetcher::setAccessUnitProperties( accessUnit->meta()->setInt32("discontinuitySeq", mDiscontinuitySeq); accessUnit->meta()->setInt64("segmentStartTimeUs", getSegmentStartTimeUs(mSeqNumber)); + accessUnit->meta()->setInt64("segmentDurationUs", getSegmentDurationUs(mSeqNumber)); return accessUnit; } @@ -1197,12 +1485,16 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu // ATSParser from skewing the timestamps of access units. extra->setInt64(IStreamListener::kKeyMediaTimeUs, 0); + // When adapting, signal a recent media time to the parser, + // so that PTS wrap around is handled for the new variant. + if (mStartTimeUs >= 0 && !mStartTimeUsRelative) { + extra->setInt64(IStreamListener::kKeyRecentMediaTimeUs, mStartTimeUs); + } + mTSParser->signalDiscontinuity( ATSParser::DISCONTINUITY_TIME, extra); - mAbsoluteTimeAnchorUs = mNextPTSTimeUs; mNextPTSTimeUs = -1ll; - mFirstPTSValid = false; } size_t offset = 0; @@ -1222,30 +1514,15 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu for (size_t i = mPacketSources.size(); i-- > 0;) { sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i); - const char *key; - ATSParser::SourceType type; const LiveSession::StreamType stream = mPacketSources.keyAt(i); - switch (stream) { - case LiveSession::STREAMTYPE_VIDEO: - type = ATSParser::VIDEO; - key = "timeUsVideo"; - break; - - case LiveSession::STREAMTYPE_AUDIO: - type = ATSParser::AUDIO; - key = "timeUsAudio"; - break; - - case LiveSession::STREAMTYPE_SUBTITLES: - { - ALOGE("MPEG2 Transport streams do not contain subtitles."); - return ERROR_MALFORMED; - break; - } - - default: - TRESPASS(); + if (stream == LiveSession::STREAMTYPE_SUBTITLES) { + ALOGE("MPEG2 Transport streams do not contain subtitles."); + return ERROR_MALFORMED; } + const char *key = LiveSession::getKeyForStream(stream); + ATSParser::SourceType type = + (stream == LiveSession::STREAMTYPE_AUDIO) ? + ATSParser::AUDIO : ATSParser::VIDEO; sp<AnotherPacketSource> source = static_cast<AnotherPacketSource *>( @@ -1255,110 +1532,112 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu continue; } - int64_t timeUs; + const char *mime; + sp<MetaData> format = source->getFormat(); + bool isAvc = format != NULL && format->findCString(kKeyMIMEType, &mime) + && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC); + sp<ABuffer> accessUnit; status_t finalResult; while (source->hasBufferAvailable(&finalResult) && source->dequeueAccessUnit(&accessUnit) == OK) { + int64_t timeUs; CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); + if (mSegmentFirstPTS < 0ll) { + mSegmentFirstPTS = timeUs; + if (!mStartTimeUsRelative) { + int32_t firstSeqNumberInPlaylist; + if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( + "media-sequence", &firstSeqNumberInPlaylist)) { + firstSeqNumberInPlaylist = 0; + } + + int32_t targetDurationSecs; + CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); + int64_t targetDurationUs = targetDurationSecs * 1000000ll; + // mStartup + // mStartup is true until we have queued a packet for all the streams + // we are fetching. We queue packets whose timestamps are greater than + // mStartTimeUs. + // mSegmentStartTimeUs >= 0 + // mSegmentStartTimeUs is non-negative when adapting or switching tracks + // mSeqNumber > firstSeqNumberInPlaylist + // don't decrement mSeqNumber if it already points to the 1st segment + // timeUs - mStartTimeUs > targetDurationUs: + // This and the 2 above conditions should only happen when adapting in a live + // stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher + // would start fetching after timeUs, which should be greater than mStartTimeUs; + // the old fetcher would then continue fetching data until timeUs. We don't want + // timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to + // stop as early as possible. The definition of being "too far ahead" is + // arbitrary; here we use targetDurationUs as threshold. + int64_t targetDiffUs = (mSeekMode == LiveSession::kSeekModeNextSample + ? 0 : targetDurationUs); + if (mStartup && mSegmentStartTimeUs >= 0 + && mSeqNumber > firstSeqNumberInPlaylist + && timeUs - mStartTimeUs > targetDiffUs) { + // we just guessed a starting timestamp that is too high when adapting in a + // live stream; re-adjust based on the actual timestamp extracted from the + // media segment; if we didn't move backward after the re-adjustment + // (newSeqNumber), start at least 1 segment prior. + int32_t newSeqNumber = getSeqNumberWithAnchorTime( + timeUs, targetDiffUs); + if (newSeqNumber >= mSeqNumber) { + --mSeqNumber; + } else { + mSeqNumber = newSeqNumber; + } + mStartTimeUsNotify = mNotify->dup(); + mStartTimeUsNotify->setInt32("what", kWhatStartedAt); + mStartTimeUsNotify->setString("uri", mURI); + mIDRFound = false; + return -EAGAIN; + } + } + } if (mStartup) { if (!mFirstPTSValid) { mFirstTimeUs = timeUs; mFirstPTSValid = true; } + bool startTimeReached = true; if (mStartTimeUsRelative) { timeUs -= mFirstTimeUs; if (timeUs < 0) { timeUs = 0; } + startTimeReached = (timeUs >= mStartTimeUs); } - if (timeUs < mStartTimeUs) { - // buffer up to the closest preceding IDR frame - ALOGV("timeUs %" PRId64 " us < mStartTimeUs %" PRId64 " us", - timeUs, mStartTimeUs); - const char *mime; - sp<MetaData> format = source->getFormat(); - bool isAvc = false; - if (format != NULL && format->findCString(kKeyMIMEType, &mime) - && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) { - isAvc = true; - } - if (isAvc && IsIDR(accessUnit)) { - mVideoBuffer->clear(); - } + if (!startTimeReached || (isAvc && !mIDRFound)) { + // buffer up to the closest preceding IDR frame in the next segement, + // or the closest succeeding IDR frame after the exact position if (isAvc) { - mVideoBuffer->queueAccessUnit(accessUnit); + if (IsIDR(accessUnit)) { + mVideoBuffer->clear(); + mIDRFound = true; + } + if (mIDRFound && mStartTimeUsRelative && !startTimeReached) { + mVideoBuffer->queueAccessUnit(accessUnit); + } } - - continue; - } - } - - CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs)); - if (mStartTimeUsNotify != NULL && timeUs > mStartTimeUs) { - int32_t firstSeqNumberInPlaylist; - if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( - "media-sequence", &firstSeqNumberInPlaylist)) { - firstSeqNumberInPlaylist = 0; - } - - int32_t targetDurationSecs; - CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); - int64_t targetDurationUs = targetDurationSecs * 1000000ll; - // mStartup - // mStartup is true until we have queued a packet for all the streams - // we are fetching. We queue packets whose timestamps are greater than - // mStartTimeUs. - // mSegmentStartTimeUs >= 0 - // mSegmentStartTimeUs is non-negative when adapting or switching tracks - // mSeqNumber > firstSeqNumberInPlaylist - // don't decrement mSeqNumber if it already points to the 1st segment - // timeUs - mStartTimeUs > targetDurationUs: - // This and the 2 above conditions should only happen when adapting in a live - // stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher - // would start fetching after timeUs, which should be greater than mStartTimeUs; - // the old fetcher would then continue fetching data until timeUs. We don't want - // timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to - // stop as early as possible. The definition of being "too far ahead" is - // arbitrary; here we use targetDurationUs as threshold. - if (mStartup && mSegmentStartTimeUs >= 0 - && mSeqNumber > firstSeqNumberInPlaylist - && timeUs - mStartTimeUs > targetDurationUs) { - // we just guessed a starting timestamp that is too high when adapting in a - // live stream; re-adjust based on the actual timestamp extracted from the - // media segment; if we didn't move backward after the re-adjustment - // (newSeqNumber), start at least 1 segment prior. - int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs); - if (newSeqNumber >= mSeqNumber) { - --mSeqNumber; - } else { - mSeqNumber = newSeqNumber; + if (!startTimeReached || (isAvc && !mIDRFound)) { + continue; } - mStartTimeUsNotify = mNotify->dup(); - mStartTimeUsNotify->setInt32("what", kWhatStartedAt); - return -EAGAIN; - } - - int32_t seq; - if (!mStartTimeUsNotify->findInt32("discontinuitySeq", &seq)) { - mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq); } - int64_t startTimeUs; - if (!mStartTimeUsNotify->findInt64(key, &startTimeUs)) { - mStartTimeUsNotify->setInt64(key, timeUs); + } - uint32_t streamMask = 0; - mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask); + if (mStartTimeUsNotify != NULL) { + uint32_t streamMask = 0; + mStartTimeUsNotify->findInt32("streamMask", (int32_t *) &streamMask); + if (!(streamMask & mPacketSources.keyAt(i))) { streamMask |= mPacketSources.keyAt(i); mStartTimeUsNotify->setInt32("streamMask", streamMask); if (streamMask == mStreamTypeMask) { mStartup = false; - mStartTimeUsNotify->post(); - mStartTimeUsNotify.clear(); } } } @@ -1372,7 +1651,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu || !mStopParams->findInt64(key, &stopTimeUs) || (discontinuitySeq == mDiscontinuitySeq && timeUs >= stopTimeUs)) { - packetSource->queueAccessUnit(mSession->createFormatChangeBuffer()); mStreamTypeMask &= ~stream; mPacketSources.removeItemsAt(i); break; @@ -1467,8 +1745,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits( } if (mNextPTSTimeUs >= 0ll) { - mFirstPTSValid = false; - mAbsoluteTimeAnchorUs = mNextPTSTimeUs; mNextPTSTimeUs = -1ll; } @@ -1569,7 +1845,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits( CHECK(packetSource->getFormat()->findInt32(kKeySampleRate, &sampleRate)); int64_t timeUs = (PTS * 100ll) / 9ll; - if (!mFirstPTSValid) { + if (mStartup && !mFirstPTSValid) { mFirstPTSValid = true; mFirstTimeUs = timeUs; } @@ -1621,10 +1897,13 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits( CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); int64_t targetDurationUs = targetDurationSecs * 1000000ll; + int64_t targetDiffUs =(mSeekMode == LiveSession::kSeekModeNextSample + ? 0 : targetDurationUs); // Duplicated logic from how we handle .ts playlists. if (mStartup && mSegmentStartTimeUs >= 0 - && timeUs - mStartTimeUs > targetDurationUs) { - int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs); + && timeUs - mStartTimeUs > targetDiffUs) { + int32_t newSeqNumber = getSeqNumberWithAnchorTime( + timeUs, targetDiffUs); if (newSeqNumber >= mSeqNumber) { --mSeqNumber; } else { @@ -1633,11 +1912,7 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits( return -EAGAIN; } - mStartTimeUsNotify->setInt64("timeUsAudio", timeUs); - mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq); mStartTimeUsNotify->setInt32("streamMask", LiveSession::STREAMTYPE_AUDIO); - mStartTimeUsNotify->post(); - mStartTimeUsNotify.clear(); mStartup = false; } } @@ -1650,7 +1925,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits( || discontinuitySeq > mDiscontinuitySeq || !mStopParams->findInt64("timeUsAudio", &stopTimeUs) || (discontinuitySeq == mDiscontinuitySeq && unitTimeUs >= stopTimeUs)) { - packetSource->queueAccessUnit(mSession->createFormatChangeBuffer()); mStreamTypeMask = 0; mPacketSources.clear(); return ERROR_OUT_OF_RANGE; @@ -1687,33 +1961,15 @@ void PlaylistFetcher::updateDuration() { msg->post(); } -int64_t PlaylistFetcher::resumeThreshold(const sp<AMessage> &msg) { - int64_t durationUs; - if (msg->findInt64("durationUs", &durationUs) && durationUs > 0) { - return kNumSkipFrames * durationUs; - } - - sp<RefBase> obj; - msg->findObject("format", &obj); - MetaData *format = static_cast<MetaData *>(obj.get()); - - const char *mime; - CHECK(format->findCString(kKeyMIMEType, &mime)); - bool audio = !strncasecmp(mime, "audio/", 6); - if (audio) { - // Assumes 1000 samples per frame. - int32_t sampleRate; - CHECK(format->findInt32(kKeySampleRate, &sampleRate)); - return kNumSkipFrames /* frames */ * 1000 /* samples */ - * (1000000 / sampleRate) /* sample duration (us) */; - } else { - int32_t frameRate; - if (format->findInt32(kKeyFrameRate, &frameRate) && frameRate > 0) { - return kNumSkipFrames * (1000000 / frameRate); - } - } +void PlaylistFetcher::updateTargetDuration() { + int32_t targetDurationSecs; + CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs)); + int64_t targetDurationUs = targetDurationSecs * 1000000ll; - return 500000ll; + sp<AMessage> msg = mNotify->dup(); + msg->setInt32("what", kWhatTargetDurationUpdate); + msg->setInt64("targetDurationUs", targetDurationUs); + msg->post(); } } // namespace android diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h index 4e15f85..dab56df 100644 --- a/media/libstagefright/httplive/PlaylistFetcher.h +++ b/media/libstagefright/httplive/PlaylistFetcher.h @@ -36,6 +36,7 @@ class String8; struct PlaylistFetcher : public AHandler { static const int64_t kMinBufferedDurationUs; static const int32_t kDownloadBlockSize; + static const int64_t kFetcherResumeThreshold; enum { kWhatStarted, @@ -43,10 +44,11 @@ struct PlaylistFetcher : public AHandler { kWhatStopped, kWhatError, kWhatDurationUpdate, - kWhatTemporarilyDoneFetching, + kWhatTargetDurationUpdate, kWhatPrepared, kWhatPreparationFailed, kWhatStartedAt, + kWhatStopReached, }; PlaylistFetcher( @@ -64,10 +66,10 @@ struct PlaylistFetcher : public AHandler { int64_t startTimeUs = -1ll, // starting timestamps int64_t segmentStartTimeUs = -1ll, // starting position within playlist // startTimeUs!=segmentStartTimeUs only when playlist is live - int32_t startDiscontinuitySeq = 0, - bool adaptive = false); + int32_t startDiscontinuitySeq = -1, + LiveSession::SeekMode seekMode = LiveSession::kSeekModeExactPosition); - void pauseAsync(); + void pauseAsync(float thresholdRatio); void stopAsync(bool clear = true); @@ -95,6 +97,8 @@ private: kWhatDownloadNext = 'dlnx', }; + struct DownloadState; + static const int64_t kMaxMonitorDelayUs; static const int32_t kNumSkipFrames; @@ -105,6 +109,7 @@ private: sp<AMessage> mNotify; sp<AMessage> mStartTimeUsNotify; + sp<HTTPBase> mHTTPDataSource; sp<LiveSession> mSession; AString mURI; @@ -116,7 +121,7 @@ private: // adapting or switching tracks. int64_t mSegmentStartTimeUs; - ssize_t mDiscontinuitySeq; + int32_t mDiscontinuitySeq; bool mStartTimeUsRelative; sp<AMessage> mStopParams; // message containing the latest timestamps we should fetch. @@ -130,13 +135,16 @@ private: int32_t mSeqNumber; int32_t mNumRetries; bool mStartup; - bool mAdaptive; - bool mPrepared; + bool mIDRFound; + int32_t mSeekMode; + bool mTimeChangeSignaled; int64_t mNextPTSTimeUs; int32_t mMonitorQueueGeneration; const int32_t mSubtitleGeneration; + int32_t mLastDiscontinuitySeq; + enum RefreshState { INITIAL_MINIMUM_RELOAD_DELAY, FIRST_UNCHANGED_RELOAD_ATTEMPT, @@ -150,9 +158,8 @@ private: sp<ATSParser> mTSParser; bool mFirstPTSValid; - uint64_t mFirstPTS; int64_t mFirstTimeUs; - int64_t mAbsoluteTimeAnchorUs; + int64_t mSegmentFirstPTS; sp<AnotherPacketSource> mVideoBuffer; // Stores the initialization vector to decrypt the next block of cipher text, which can @@ -160,6 +167,11 @@ private: // the last block of cipher text (cipher-block chaining). unsigned char mAESInitVec[16]; + Mutex mThresholdLock; + float mThresholdRatio; + + sp<DownloadState> mDownloadState; + // Set first to true if decrypting the first segment of a playlist segment. When // first is true, reset the initialization vector based on the available // information in the manifest; otherwise, use the initialization vector as @@ -175,6 +187,8 @@ private: void postMonitorQueue(int64_t delayUs = 0, int64_t minDelayUs = 0); void cancelMonitorQueue(); + void setStoppingThreshold(float thresholdRatio); + bool shouldPauseDownload(); int64_t delayUsToRefreshPlaylist() const; status_t refreshPlaylist(); @@ -182,12 +196,19 @@ private: // Returns the media time in us of the segment specified by seqNumber. // This is computed by summing the durations of all segments before it. int64_t getSegmentStartTimeUs(int32_t seqNumber) const; + // Returns the duration time in us of the segment specified. + int64_t getSegmentDurationUs(int32_t seqNumber) const; status_t onStart(const sp<AMessage> &msg); void onPause(); void onStop(const sp<AMessage> &msg); void onMonitorQueue(); void onDownloadNext(); + bool initDownloadState( + AString &uri, + sp<AMessage> &itemMeta, + int32_t &firstSeqNumberInPlaylist, + int32_t &lastSeqNumberInPlaylist); // Resume a fetcher to continue until the stopping point stored in msg. status_t onResumeUntil(const sp<AMessage> &msg); @@ -201,20 +222,19 @@ private: status_t extractAndQueueAccessUnits( const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta); + void notifyStopReached(); void notifyError(status_t err); void queueDiscontinuity( ATSParser::DiscontinuityType type, const sp<AMessage> &extra); - int32_t getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const; + int32_t getSeqNumberWithAnchorTime( + int64_t anchorTimeUs, int64_t targetDurationUs) const; int32_t getSeqNumberForDiscontinuity(size_t discontinuitySeq) const; int32_t getSeqNumberForTime(int64_t timeUs) const; void updateDuration(); - - // Before resuming a fetcher in onResume, check the remaining duration is longer than that - // returned by resumeThreshold. - int64_t resumeThreshold(const sp<AMessage> &msg); + void updateTargetDuration(); DISALLOW_EVIL_CONSTRUCTORS(PlaylistFetcher); }; diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h index 1fe6fcf..8c16251 100644 --- a/media/libstagefright/include/MPEG4Extractor.h +++ b/media/libstagefright/include/MPEG4Extractor.h @@ -83,6 +83,8 @@ private: Vector<SidxEntry> mSidxEntries; off64_t mMoofOffset; + bool mMoofFound; + bool mMdatFound; Vector<PsshInfo> mPssh; diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h index c270bc1..dafa07e 100644 --- a/media/libstagefright/include/avc_utils.h +++ b/media/libstagefright/include/avc_utils.h @@ -36,6 +36,11 @@ enum { kAVCProfileCAVLC444Intra = 0x2c }; +struct NALPosition { + size_t nalOffset; + size_t nalSize; +}; + // Optionally returns sample aspect ratio as well. void FindAVCDimensions( const sp<ABuffer> &seqParamSet, diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index 482ccff..0a868bc 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -35,6 +35,7 @@ #include <media/stagefright/Utils.h> #include <media/IStreamSource.h> #include <utils/KeyedVector.h> +#include <utils/Vector.h> #include <inttypes.h> @@ -47,7 +48,8 @@ namespace android { static const size_t kTSPacketSize = 188; struct ATSParser::Program : public RefBase { - Program(ATSParser *parser, unsigned programNumber, unsigned programMapPID); + Program(ATSParser *parser, unsigned programNumber, unsigned programMapPID, + int64_t lastRecoveredPTS); bool parsePSISection( unsigned pid, ABitReader *br, status_t *err); @@ -86,14 +88,22 @@ struct ATSParser::Program : public RefBase { } private: + struct StreamInfo { + unsigned mType; + unsigned mPID; + }; + ATSParser *mParser; unsigned mProgramNumber; unsigned mProgramMapPID; KeyedVector<unsigned, sp<Stream> > mStreams; bool mFirstPTSValid; uint64_t mFirstPTS; + int64_t mLastRecoveredPTS; status_t parseProgramMap(ABitReader *br); + int64_t recoverPTS(uint64_t PTS_33bit); + bool switchPIDs(const Vector<StreamInfo> &infos); DISALLOW_EVIL_CONSTRUCTORS(Program); }; @@ -157,10 +167,12 @@ struct ATSParser::PSISection : public RefBase { PSISection(); status_t append(const void *data, size_t size); + void setSkipBytes(uint8_t skip); void clear(); bool isComplete() const; bool isEmpty() const; + bool isCRCOkay() const; const uint8_t *data() const; size_t size() const; @@ -170,6 +182,8 @@ protected: private: sp<ABuffer> mBuffer; + uint8_t mSkipBytes; + static uint32_t CRC_TABLE[]; DISALLOW_EVIL_CONSTRUCTORS(PSISection); }; @@ -177,12 +191,14 @@ private: //////////////////////////////////////////////////////////////////////////////// ATSParser::Program::Program( - ATSParser *parser, unsigned programNumber, unsigned programMapPID) + ATSParser *parser, unsigned programNumber, unsigned programMapPID, + int64_t lastRecoveredPTS) : mParser(parser), mProgramNumber(programNumber), mProgramMapPID(programMapPID), mFirstPTSValid(false), - mFirstPTS(0) { + mFirstPTS(0), + mLastRecoveredPTS(lastRecoveredPTS) { ALOGV("new program number %u", programNumber); } @@ -237,10 +253,71 @@ void ATSParser::Program::signalEOS(status_t finalResult) { } } -struct StreamInfo { - unsigned mType; - unsigned mPID; -}; +bool ATSParser::Program::switchPIDs(const Vector<StreamInfo> &infos) { + bool success = false; + + if (mStreams.size() == infos.size()) { + // build type->PIDs map for old and new mapping + size_t i; + KeyedVector<int32_t, Vector<int32_t> > oldType2PIDs, newType2PIDs; + for (i = 0; i < mStreams.size(); ++i) { + ssize_t index = oldType2PIDs.indexOfKey(mStreams[i]->type()); + if (index < 0) { + oldType2PIDs.add(mStreams[i]->type(), Vector<int32_t>()); + } + oldType2PIDs.editValueFor(mStreams[i]->type()).push_back(mStreams[i]->pid()); + } + for (i = 0; i < infos.size(); ++i) { + ssize_t index = newType2PIDs.indexOfKey(infos[i].mType); + if (index < 0) { + newType2PIDs.add(infos[i].mType, Vector<int32_t>()); + } + newType2PIDs.editValueFor(infos[i].mType).push_back(infos[i].mPID); + } + + // we can recover if the number of streams for each type hasn't changed + if (oldType2PIDs.size() == newType2PIDs.size()) { + success = true; + for (i = 0; i < oldType2PIDs.size(); ++i) { + // KeyedVector is sorted, we just compare key and size of each index + if (oldType2PIDs.keyAt(i) != newType2PIDs.keyAt(i) + || oldType2PIDs[i].size() != newType2PIDs[i].size()) { + success = false; + break; + } + } + } + + if (success) { + // save current streams to temp + KeyedVector<int32_t, sp<Stream> > temp; + for (i = 0; i < mStreams.size(); ++i) { + temp.add(mStreams.keyAt(i), mStreams.editValueAt(i)); + } + + mStreams.clear(); + for (i = 0; i < temp.size(); ++i) { + // The two checks below shouldn't happen, + // we already checked above the stream count matches + ssize_t index = newType2PIDs.indexOfKey(temp[i]->type()); + CHECK(index >= 0); + Vector<int32_t> &newPIDs = newType2PIDs.editValueAt(index); + CHECK(newPIDs.size() > 0); + + // get the next PID for temp[i]->type() in the new PID map + Vector<int32_t>::iterator it = newPIDs.begin(); + + // change the PID of the stream, and add it back + temp.editValueAt(i)->setPID(*it); + mStreams.add(temp[i]->pid(), temp.editValueAt(i)); + + // removed the used PID + newPIDs.erase(it); + } + } + } + return success; +} status_t ATSParser::Program::parseProgramMap(ABitReader *br) { unsigned table_id = br->getBits(8); @@ -369,39 +446,8 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) { } #endif - // The only case we can recover from is if we have two streams - // and they switched PIDs. - - bool success = false; - - if (mStreams.size() == 2 && infos.size() == 2) { - const StreamInfo &info1 = infos.itemAt(0); - const StreamInfo &info2 = infos.itemAt(1); - - sp<Stream> s1 = mStreams.editValueAt(0); - sp<Stream> s2 = mStreams.editValueAt(1); - - bool caseA = - info1.mPID == s1->pid() && info1.mType == s2->type() - && info2.mPID == s2->pid() && info2.mType == s1->type(); - - bool caseB = - info1.mPID == s2->pid() && info1.mType == s1->type() - && info2.mPID == s1->pid() && info2.mType == s2->type(); - - if (caseA || caseB) { - unsigned pid1 = s1->pid(); - unsigned pid2 = s2->pid(); - s1->setPID(pid2); - s2->setPID(pid1); - - mStreams.clear(); - mStreams.add(s1->pid(), s1); - mStreams.add(s2->pid(), s2); - - success = true; - } - } + // we can recover if number of streams for each type remain the same + bool success = switchPIDs(infos); if (!success) { ALOGI("Stream PIDs changed and we cannot recover."); @@ -425,6 +471,32 @@ status_t ATSParser::Program::parseProgramMap(ABitReader *br) { return OK; } +int64_t ATSParser::Program::recoverPTS(uint64_t PTS_33bit) { + // We only have the lower 33-bit of the PTS. It could overflow within a + // reasonable amount of time. To handle the wrap-around, use fancy math + // to get an extended PTS that is within [-0xffffffff, 0xffffffff] + // of the latest recovered PTS. + if (mLastRecoveredPTS < 0ll) { + // Use the original 33bit number for 1st frame, the reason is that + // if 1st frame wraps to negative that's far away from 0, we could + // never start. Only start wrapping around from 2nd frame. + mLastRecoveredPTS = static_cast<int64_t>(PTS_33bit); + } else { + mLastRecoveredPTS = static_cast<int64_t>( + ((mLastRecoveredPTS - PTS_33bit + 0x100000000ll) + & 0xfffffffe00000000ull) | PTS_33bit); + // We start from 0, but recovered PTS could be slightly below 0. + // Clamp it to 0 as rest of the pipeline doesn't take negative pts. + // (eg. video is read first and starts at 0, but audio starts at 0xfffffff0) + if (mLastRecoveredPTS < 0ll) { + ALOGI("Clamping negative recovered PTS (%" PRId64 ") to 0", mLastRecoveredPTS); + mLastRecoveredPTS = 0ll; + } + } + + return mLastRecoveredPTS; +} + sp<MediaSource> ATSParser::Program::getSource(SourceType type) { size_t index = (type == AUDIO) ? 0 : 0; @@ -455,6 +527,8 @@ bool ATSParser::Program::hasSource(SourceType type) const { } int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) { + PTS = recoverPTS(PTS); + if (!(mParser->mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE)) { if (!mFirstPTSValid) { mFirstPTSValid = true; @@ -969,6 +1043,7 @@ ATSParser::ATSParser(uint32_t flags) mAbsoluteTimeAnchorUs(-1ll), mTimeOffsetValid(false), mTimeOffsetUs(0ll), + mLastRecoveredPTS(-1ll), mNumTSPacketsParsed(0), mNumPCRs(0) { mPSISections.add(0 /* PID */, new PSISection); @@ -987,11 +1062,21 @@ status_t ATSParser::feedTSPacket(const void *data, size_t size) { void ATSParser::signalDiscontinuity( DiscontinuityType type, const sp<AMessage> &extra) { int64_t mediaTimeUs; - if ((type & DISCONTINUITY_TIME) - && extra != NULL - && extra->findInt64( - IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) { - mAbsoluteTimeAnchorUs = mediaTimeUs; + if ((type & DISCONTINUITY_TIME) && extra != NULL) { + if (extra->findInt64(IStreamListener::kKeyMediaTimeUs, &mediaTimeUs)) { + mAbsoluteTimeAnchorUs = mediaTimeUs; + } + if ((mFlags & TS_TIMESTAMPS_ARE_ABSOLUTE) + && extra->findInt64( + IStreamListener::kKeyRecentMediaTimeUs, &mediaTimeUs)) { + if (mAbsoluteTimeAnchorUs >= 0ll) { + mediaTimeUs -= mAbsoluteTimeAnchorUs; + } + if (mTimeOffsetValid) { + mediaTimeUs -= mTimeOffsetUs; + } + mLastRecoveredPTS = (mediaTimeUs * 9) / 100; + } } else if (type == DISCONTINUITY_ABSOLUTE_TIME) { int64_t timeUs; CHECK(extra->findInt64("timeUs", &timeUs)); @@ -1075,7 +1160,7 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) { if (!found) { mPrograms.push( - new Program(this, program_number, programMapPID)); + new Program(this, program_number, programMapPID, mLastRecoveredPTS)); } if (mPSISections.indexOfKey(programMapPID) < 0) { @@ -1098,10 +1183,12 @@ status_t ATSParser::parsePID( if (payload_unit_start_indicator) { if (!section->isEmpty()) { - return ERROR_UNSUPPORTED; + ALOGW("parsePID encounters payload_unit_start_indicator when section is not empty"); + section->clear(); } unsigned skip = br->getBits(8); + section->setSkipBytes(skip + 1); // skip filler bytes + pointer field itself br->skipBits(skip * 8); } @@ -1116,6 +1203,9 @@ status_t ATSParser::parsePID( return OK; } + if (!section->isCRCOkay()) { + return BAD_VALUE; + } ABitReader sectionBits(section->data(), section->size()); if (PID == 0) { @@ -1338,7 +1428,79 @@ void ATSParser::updatePCR( //////////////////////////////////////////////////////////////////////////////// -ATSParser::PSISection::PSISection() { + +// CRC32 used for PSI section. The table was generated by following command: +// $ python pycrc.py --model crc-32-mpeg --algorithm table-driven --generate c +// Visit http://www.tty1.net/pycrc/index_en.html for more details. +uint32_t ATSParser::PSISection::CRC_TABLE[] = { + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, + 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, + 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, + 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, + 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, + 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, + 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, + 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, + 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, + 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, + 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, + 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, + 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, + 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, + 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, + 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, + 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, + 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, + 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, + 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, + 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, + 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, + 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, + 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, + 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, + 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, + 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, + 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, + 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, + 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, + 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, + 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, + 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 + }; + +ATSParser::PSISection::PSISection() : + mSkipBytes(0) { } ATSParser::PSISection::~PSISection() { @@ -1369,10 +1531,15 @@ status_t ATSParser::PSISection::append(const void *data, size_t size) { return OK; } +void ATSParser::PSISection::setSkipBytes(uint8_t skip) { + mSkipBytes = skip; +} + void ATSParser::PSISection::clear() { if (mBuffer != NULL) { mBuffer->setRange(0, 0); } + mSkipBytes = 0; } bool ATSParser::PSISection::isComplete() const { @@ -1396,4 +1563,30 @@ size_t ATSParser::PSISection::size() const { return mBuffer == NULL ? 0 : mBuffer->size(); } +bool ATSParser::PSISection::isCRCOkay() const { + if (!isComplete()) { + return false; + } + uint8_t* data = mBuffer->data(); + + // Return true if section_syntax_indicator says no section follows the field section_length. + if ((data[1] & 0x80) == 0) { + return true; + } + + unsigned sectionLength = U16_AT(data + 1) & 0xfff; + ALOGV("sectionLength %u, skip %u", sectionLength, mSkipBytes); + + // Skip the preceding field present when payload start indicator is on. + sectionLength -= mSkipBytes; + + uint32_t crc = 0xffffffff; + for(unsigned i = 0; i < sectionLength + 4 /* crc */; i++) { + uint8_t b = data[i]; + int index = ((crc >> 24) ^ (b & 0xff)) & 0xff; + crc = CRC_TABLE[index] ^ (crc << 8); + } + ALOGV("crc: %08x\n", crc); + return (crc == 0); +} } // namespace android diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h index 75d76dc..a1405bd 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.h +++ b/media/libstagefright/mpeg2ts/ATSParser.h @@ -46,6 +46,9 @@ struct ATSParser : public RefBase { DISCONTINUITY_AUDIO_FORMAT | DISCONTINUITY_VIDEO_FORMAT | DISCONTINUITY_TIME, + DISCONTINUITY_FORMAT_ONLY = + DISCONTINUITY_AUDIO_FORMAT + | DISCONTINUITY_VIDEO_FORMAT, }; enum Flags { @@ -115,6 +118,7 @@ private: bool mTimeOffsetValid; int64_t mTimeOffsetUs; + int64_t mLastRecoveredPTS; size_t mNumTSPacketsParsed; diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp index f266fe7..c5bb41b 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp @@ -19,6 +19,8 @@ #include "AnotherPacketSource.h" +#include "include/avc_utils.h" + #include <media/stagefright/foundation/ABuffer.h> #include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/foundation/AMessage.h> @@ -27,6 +29,7 @@ #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MetaData.h> +#include <media/stagefright/Utils.h> #include <utils/Vector.h> #include <inttypes.h> @@ -38,6 +41,7 @@ const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta) : mIsAudio(false), mIsVideo(false), + mEnabled(true), mFormat(NULL), mLastQueuedTimeUs(0), mEOSResult(OK), @@ -48,7 +52,10 @@ AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta) } void AnotherPacketSource::setFormat(const sp<MetaData> &meta) { - CHECK(mFormat == NULL); + if (mFormat != NULL) { + // Only allowed to be set once. Requires explicit clear to reset. + return; + } mIsAudio = false; mIsVideo = false; @@ -91,13 +98,12 @@ sp<MetaData> AnotherPacketSource::getFormat() { while (it != mBuffers.end()) { sp<ABuffer> buffer = *it; int32_t discontinuity; - if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { - break; - } - - sp<RefBase> object; - if (buffer->meta()->findObject("format", &object)) { - return mFormat = static_cast<MetaData*>(object.get()); + if (!buffer->meta()->findInt32("discontinuity", &discontinuity)) { + sp<RefBase> object; + if (buffer->meta()->findObject("format", &object)) { + setFormat(static_cast<MetaData*>(object.get())); + return mFormat; + } } ++it; @@ -131,7 +137,7 @@ status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) { sp<RefBase> object; if ((*buffer)->meta()->findObject("format", &object)) { - mFormat = static_cast<MetaData*>(object.get()); + setFormat(static_cast<MetaData*>(object.get())); } return OK; @@ -153,7 +159,6 @@ status_t AnotherPacketSource::read( const sp<ABuffer> buffer = *mBuffers.begin(); mBuffers.erase(mBuffers.begin()); - mLatestDequeuedMeta = buffer->meta()->dup(); int32_t discontinuity; if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { @@ -164,9 +169,11 @@ status_t AnotherPacketSource::read( return INFO_DISCONTINUITY; } + mLatestDequeuedMeta = buffer->meta()->dup(); + sp<RefBase> object; if (buffer->meta()->findObject("format", &object)) { - mFormat = static_cast<MetaData*>(object.get()); + setFormat(static_cast<MetaData*>(object.get())); } int64_t timeUs; @@ -203,20 +210,26 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) { return; } - int64_t lastQueuedTimeUs; - CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs)); - mLastQueuedTimeUs = lastQueuedTimeUs; - ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); - Mutex::Autolock autoLock(mLock); mBuffers.push_back(buffer); mCondition.signal(); int32_t discontinuity; if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { + // discontinuity handling needs to be consistent with queueDiscontinuity() ++mQueuedDiscontinuityCount; + mLastQueuedTimeUs = 0ll; + mEOSResult = OK; + mLatestEnqueuedMeta = NULL; + return; } + int64_t lastQueuedTimeUs; + CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs)); + mLastQueuedTimeUs = lastQueuedTimeUs; + ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", + mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6); + if (mLatestEnqueuedMeta == NULL) { mLatestEnqueuedMeta = buffer->meta()->dup(); } else { @@ -296,6 +309,10 @@ void AnotherPacketSource::signalEOS(status_t result) { bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) { Mutex::Autolock autoLock(mLock); + *finalResult = OK; + if (!mEnabled) { + return false; + } if (!mBuffers.empty()) { return true; } @@ -304,6 +321,24 @@ bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) { return false; } +bool AnotherPacketSource::hasDataBufferAvailable(status_t *finalResult) { + Mutex::Autolock autoLock(mLock); + *finalResult = OK; + if (!mEnabled) { + return false; + } + List<sp<ABuffer> >::iterator it; + for (it = mBuffers.begin(); it != mBuffers.end(); it++) { + int32_t discontinuity; + if (!(*it)->meta()->findInt32("discontinuity", &discontinuity)) { + return true; + } + } + + *finalResult = mEOSResult; + return false; +} + int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) { Mutex::Autolock autoLock(mLock); return getBufferedDurationUs_l(finalResult); @@ -422,4 +457,152 @@ sp<AMessage> AnotherPacketSource::getLatestDequeuedMeta() { return mLatestDequeuedMeta; } +void AnotherPacketSource::enable(bool enable) { + Mutex::Autolock autoLock(mLock); + mEnabled = enable; +} + +/* + * returns the sample meta that's delayUs after queue head + * (NULL if such sample is unavailable) + */ +sp<AMessage> AnotherPacketSource::getMetaAfterLastDequeued(int64_t delayUs) { + Mutex::Autolock autoLock(mLock); + int64_t firstUs = -1; + int64_t lastUs = -1; + int64_t durationUs = 0; + + List<sp<ABuffer> >::iterator it; + for (it = mBuffers.begin(); it != mBuffers.end(); ++it) { + const sp<ABuffer> &buffer = *it; + int32_t discontinuity; + if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { + durationUs += lastUs - firstUs; + firstUs = -1; + lastUs = -1; + continue; + } + int64_t timeUs; + if (buffer->meta()->findInt64("timeUs", &timeUs)) { + if (firstUs < 0) { + firstUs = timeUs; + } + if (lastUs < 0 || timeUs > lastUs) { + lastUs = timeUs; + } + if (durationUs + (lastUs - firstUs) >= delayUs) { + return buffer->meta(); + } + } + } + return NULL; +} + +/* + * removes samples with time equal or after meta + */ +void AnotherPacketSource::trimBuffersAfterMeta( + const sp<AMessage> &meta) { + if (meta == NULL) { + ALOGW("trimming with NULL meta, ignoring"); + return; + } + + Mutex::Autolock autoLock(mLock); + if (mBuffers.empty()) { + return; + } + + HLSTime stopTime(meta); + ALOGV("trimBuffersAfterMeta: discontinuitySeq %zu, timeUs %lld", + stopTime.mSeq, (long long)stopTime.mTimeUs); + + List<sp<ABuffer> >::iterator it; + sp<AMessage> newLatestEnqueuedMeta = NULL; + int64_t newLastQueuedTimeUs = 0; + size_t newDiscontinuityCount = 0; + for (it = mBuffers.begin(); it != mBuffers.end(); ++it) { + const sp<ABuffer> &buffer = *it; + int32_t discontinuity; + if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { + newDiscontinuityCount++; + continue; + } + + HLSTime curTime(buffer->meta()); + if (!(curTime < stopTime)) { + ALOGV("trimming from %lld (inclusive) to end", + (long long)curTime.mTimeUs); + break; + } + newLatestEnqueuedMeta = buffer->meta(); + newLastQueuedTimeUs = curTime.mTimeUs; + } + mBuffers.erase(it, mBuffers.end()); + mLatestEnqueuedMeta = newLatestEnqueuedMeta; + mLastQueuedTimeUs = newLastQueuedTimeUs; + mQueuedDiscontinuityCount = newDiscontinuityCount; +} + +/* + * removes samples with time equal or before meta; + * returns first sample left in the queue. + * + * (for AVC, if trim happens, the samples left will always start + * at next IDR.) + */ +sp<AMessage> AnotherPacketSource::trimBuffersBeforeMeta( + const sp<AMessage> &meta) { + HLSTime startTime(meta); + ALOGV("trimBuffersBeforeMeta: discontinuitySeq %zu, timeUs %lld", + startTime.mSeq, (long long)startTime.mTimeUs); + + sp<AMessage> firstMeta; + Mutex::Autolock autoLock(mLock); + if (mBuffers.empty()) { + return NULL; + } + + sp<MetaData> format; + bool isAvc = false; + + List<sp<ABuffer> >::iterator it; + size_t discontinuityCount = 0; + for (it = mBuffers.begin(); it != mBuffers.end(); ++it) { + const sp<ABuffer> &buffer = *it; + int32_t discontinuity; + if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { + format = NULL; + isAvc = false; + discontinuityCount++; + continue; + } + if (format == NULL) { + sp<RefBase> object; + if (buffer->meta()->findObject("format", &object)) { + const char* mime; + format = static_cast<MetaData*>(object.get()); + isAvc = format != NULL + && format->findCString(kKeyMIMEType, &mime) + && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC); + } + } + if (isAvc && !IsIDR(buffer)) { + continue; + } + + HLSTime curTime(buffer->meta()); + if (startTime < curTime) { + ALOGV("trimming from beginning to %lld (not inclusive)", + (long long)curTime.mTimeUs); + firstMeta = buffer->meta(); + break; + } + } + mBuffers.erase(mBuffers.begin(), it); + mQueuedDiscontinuityCount -= discontinuityCount; + mLatestDequeuedMeta = NULL; + return firstMeta; +} + } // namespace android diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h index 809a858..fa7dd6a 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h @@ -43,8 +43,12 @@ struct AnotherPacketSource : public MediaSource { void clear(); + // Returns true if we have any packets including discontinuities bool hasBufferAvailable(status_t *finalResult); + // Returns true if we have packets that's not discontinuities + bool hasDataBufferAvailable(status_t *finalResult); + // Returns the difference between the last and the first queued // presentation timestamps since the last discontinuity (if any). int64_t getBufferedDurationUs(status_t *finalResult); @@ -66,8 +70,14 @@ struct AnotherPacketSource : public MediaSource { bool isFinished(int64_t duration) const; + void enable(bool enable); + sp<AMessage> getLatestEnqueuedMeta(); sp<AMessage> getLatestDequeuedMeta(); + sp<AMessage> getMetaAfterLastDequeued(int64_t delayUs); + + void trimBuffersAfterMeta(const sp<AMessage> &meta); + sp<AMessage> trimBuffersBeforeMeta(const sp<AMessage> &meta); protected: virtual ~AnotherPacketSource(); @@ -78,6 +88,7 @@ private: bool mIsAudio; bool mIsVideo; + bool mEnabled; sp<MetaData> mFormat; int64_t mLastQueuedTimeUs; List<sp<ABuffer> > mBuffers; diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index a605595..b17985c 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -257,8 +257,8 @@ status_t ElementaryStreamQueue::appendData( uint8_t *ptr = (uint8_t *)data; ssize_t startOffset = -1; - for (size_t i = 0; i + 3 < size; ++i) { - if (!memcmp("\x00\x00\x00\x01", &ptr[i], 4)) { + for (size_t i = 0; i + 2 < size; ++i) { + if (!memcmp("\x00\x00\x01", &ptr[i], 3)) { startOffset = i; break; } @@ -617,8 +617,6 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { // having to interpolate. // The final AAC frame may well extend into the next RangeInfo but // that's ok. - // TODO: the logic commented above is skipped because codec cannot take - // arbitrary sized input buffers; size_t offset = 0; while (offset < info.mLength) { if (offset + 7 > mBuffer->size()) { @@ -683,12 +681,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { size_t headerSize __unused = protection_absent ? 7 : 9; offset += aac_frame_length; - // TODO: move back to concatenation when codec can support arbitrary input buffers. - // For now only queue a single buffer - break; } - int64_t timeUs = fetchTimestampAAC(offset); + int64_t timeUs = fetchTimestamp(offset); sp<ABuffer> accessUnit = new ABuffer(offset); memcpy(accessUnit->data(), mBuffer->data(), offset); @@ -735,50 +730,6 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { return timeUs; } -// TODO: avoid interpolating timestamps once codec supports arbitrary sized input buffers -int64_t ElementaryStreamQueue::fetchTimestampAAC(size_t size) { - int64_t timeUs = -1; - bool first = true; - - size_t samplesize = size; - while (size > 0) { - CHECK(!mRangeInfos.empty()); - - RangeInfo *info = &*mRangeInfos.begin(); - - if (first) { - timeUs = info->mTimestampUs; - first = false; - } - - if (info->mLength > size) { - int32_t sampleRate; - CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate)); - info->mLength -= size; - size_t numSamples = 1024 * size / samplesize; - info->mTimestampUs += numSamples * 1000000ll / sampleRate; - size = 0; - } else { - size -= info->mLength; - - mRangeInfos.erase(mRangeInfos.begin()); - info = NULL; - } - - } - - if (timeUs == 0ll) { - ALOGV("Returning 0 timestamp"); - } - - return timeUs; -} - -struct NALPosition { - size_t nalOffset; - size_t nalSize; -}; - sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() { const uint8_t *data = mBuffer->data(); @@ -786,6 +737,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() { Vector<NALPosition> nals; size_t totalSize = 0; + size_t seiCount = 0; status_t err; const uint8_t *nalStart; @@ -815,6 +767,9 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() { // next frame. flush = true; + } else if (nalType == 6 && nalSize > 0) { + // found non-zero sized SEI + ++seiCount; } if (flush) { @@ -823,21 +778,29 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() { size_t auSize = 4 * nals.size() + totalSize; sp<ABuffer> accessUnit = new ABuffer(auSize); + sp<ABuffer> sei; + + if (seiCount > 0) { + sei = new ABuffer(seiCount * sizeof(NALPosition)); + accessUnit->meta()->setBuffer("sei", sei); + } #if !LOG_NDEBUG AString out; #endif size_t dstOffset = 0; + size_t seiIndex = 0; for (size_t i = 0; i < nals.size(); ++i) { const NALPosition &pos = nals.itemAt(i); unsigned nalType = mBuffer->data()[pos.nalOffset] & 0x1f; - if (nalType == 6) { - sp<ABuffer> sei = new ABuffer(pos.nalSize); - memcpy(sei->data(), mBuffer->data() + pos.nalOffset, pos.nalSize); - accessUnit->meta()->setBuffer("sei", sei); + if (nalType == 6 && pos.nalSize > 0) { + CHECK_LT(seiIndex, sei->size() / sizeof(NALPosition)); + NALPosition &seiPos = ((NALPosition *)sei->data())[seiIndex++]; + seiPos.nalOffset = dstOffset + 4; + seiPos.nalSize = pos.nalSize; } #if !LOG_NDEBUG diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h index eb4b1c9..45b4624 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.h +++ b/media/libstagefright/mpeg2ts/ESQueue.h @@ -77,7 +77,6 @@ private: // consume a logical (compressed) access unit of size "size", // returns its timestamp in us (or -1 if no time information). int64_t fetchTimestamp(size_t size); - int64_t fetchTimestampAAC(size_t size); DISALLOW_EVIL_CONSTRUCTORS(ElementaryStreamQueue); }; diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk index aaa8334..07ea605 100644 --- a/media/libstagefright/omx/Android.mk +++ b/media/libstagefright/omx/Android.mk @@ -1,11 +1,8 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -ifeq ($(TARGET_DEVICE), manta) - LOCAL_CFLAGS += -DSURFACE_IS_BGR32 -endif - LOCAL_SRC_FILES:= \ + FrameDropper.cpp \ GraphicBufferSource.cpp \ OMX.cpp \ OMXMaster.cpp \ diff --git a/media/libstagefright/omx/FrameDropper.cpp b/media/libstagefright/omx/FrameDropper.cpp new file mode 100644 index 0000000..9fba0b7 --- /dev/null +++ b/media/libstagefright/omx/FrameDropper.cpp @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "FrameDropper" +#include <utils/Log.h> + +#include "FrameDropper.h" + +#include <media/stagefright/foundation/ADebug.h> + +namespace android { + +static const int64_t kMaxJitterUs = 2000; + +FrameDropper::FrameDropper() + : mDesiredMinTimeUs(-1), + mMinIntervalUs(0) { +} + +FrameDropper::~FrameDropper() { +} + +status_t FrameDropper::setMaxFrameRate(float maxFrameRate) { + if (maxFrameRate <= 0) { + ALOGE("framerate should be positive but got %f.", maxFrameRate); + return BAD_VALUE; + } + mMinIntervalUs = (int64_t) (1000000.0f / maxFrameRate); + return OK; +} + +bool FrameDropper::shouldDrop(int64_t timeUs) { + if (mMinIntervalUs <= 0) { + return false; + } + + if (mDesiredMinTimeUs < 0) { + mDesiredMinTimeUs = timeUs + mMinIntervalUs; + ALOGV("first frame %lld, next desired frame %lld", timeUs, mDesiredMinTimeUs); + return false; + } + + if (timeUs < (mDesiredMinTimeUs - kMaxJitterUs)) { + ALOGV("drop frame %lld, desired frame %lld, diff %lld", + timeUs, mDesiredMinTimeUs, mDesiredMinTimeUs - timeUs); + return true; + } + + int64_t n = (timeUs - mDesiredMinTimeUs + kMaxJitterUs) / mMinIntervalUs; + mDesiredMinTimeUs += (n + 1) * mMinIntervalUs; + ALOGV("keep frame %lld, next desired frame %lld, diff %lld", + timeUs, mDesiredMinTimeUs, mDesiredMinTimeUs - timeUs); + return false; +} + +} // namespace android diff --git a/media/libstagefright/omx/FrameDropper.h b/media/libstagefright/omx/FrameDropper.h new file mode 100644 index 0000000..c5a6d4b --- /dev/null +++ b/media/libstagefright/omx/FrameDropper.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef FRAME_DROPPER_H_ + +#define FRAME_DROPPER_H_ + +#include <utils/Errors.h> +#include <utils/RefBase.h> + +#include <media/stagefright/foundation/ABase.h> + +namespace android { + +struct FrameDropper : public RefBase { + // No frames will be dropped until a valid max frame rate is set. + FrameDropper(); + + // maxFrameRate required to be positive. + status_t setMaxFrameRate(float maxFrameRate); + + // Returns false if max frame rate has not been set via setMaxFrameRate. + bool shouldDrop(int64_t timeUs); + +protected: + virtual ~FrameDropper(); + +private: + int64_t mDesiredMinTimeUs; + int64_t mMinIntervalUs; + + DISALLOW_EVIL_CONSTRUCTORS(FrameDropper); +}; + +} // namespace android + +#endif // FRAME_DROPPER_H_ diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp index 44c7edc..477cfc6 100644 --- a/media/libstagefright/omx/GraphicBufferSource.cpp +++ b/media/libstagefright/omx/GraphicBufferSource.cpp @@ -28,8 +28,10 @@ #include <media/hardware/MetadataBufferType.h> #include <ui/GraphicBuffer.h> +#include <gui/BufferItem.h> #include <inttypes.h> +#include "FrameDropper.h" namespace android { @@ -53,9 +55,9 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance, mRepeatAfterUs(-1ll), mRepeatLastFrameGeneration(0), mRepeatLastFrameTimestamp(-1ll), - mLatestSubmittedBufferId(-1), - mLatestSubmittedBufferFrameNum(0), - mLatestSubmittedBufferUseCount(0), + mLatestBufferId(-1), + mLatestBufferFrameNum(0), + mLatestBufferUseCount(0), mRepeatBufferDeferred(false), mTimePerCaptureUs(-1ll), mTimePerFrameUs(-1ll), @@ -152,9 +154,9 @@ void GraphicBufferSource::omxExecuting() { mLooper->registerHandler(mReflector); mLooper->start(); - if (mLatestSubmittedBufferId >= 0) { + if (mLatestBufferId >= 0) { sp<AMessage> msg = - new AMessage(kWhatRepeatLastFrame, mReflector->id()); + new AMessage(kWhatRepeatLastFrame, mReflector); msg->setInt32("generation", ++mRepeatLastFrameGeneration); msg->post(mRepeatAfterUs); @@ -287,8 +289,8 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { ALOGV("cbi %d matches bq slot %d, handle=%p", cbi, id, mBufferSlot[id]->handle); - if (id == mLatestSubmittedBufferId) { - CHECK_GT(mLatestSubmittedBufferUseCount--, 0); + if (id == mLatestBufferId) { + CHECK_GT(mLatestBufferUseCount--, 0); } else { mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); @@ -313,11 +315,11 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) { ALOGV("buffer freed, EOS pending"); submitEndOfInputStream_l(); } else if (mRepeatBufferDeferred) { - bool success = repeatLatestSubmittedBuffer_l(); + bool success = repeatLatestBuffer_l(); if (success) { - ALOGV("deferred repeatLatestSubmittedBuffer_l SUCCESS"); + ALOGV("deferred repeatLatestBuffer_l SUCCESS"); } else { - ALOGV("deferred repeatLatestSubmittedBuffer_l FAILURE"); + ALOGV("deferred repeatLatestBuffer_l FAILURE"); } mRepeatBufferDeferred = false; } @@ -359,7 +361,7 @@ void GraphicBufferSource::suspend(bool suspend) { mSuspended = true; while (mNumFramesAvailable > 0) { - BufferQueue::BufferItem item; + BufferItem item; status_t err = mConsumer->acquireBuffer(&item, 0); if (err == BufferQueue::NO_BUFFER_AVAILABLE) { @@ -382,12 +384,12 @@ void GraphicBufferSource::suspend(bool suspend) { mSuspended = false; if (mExecuting && mNumFramesAvailable == 0 && mRepeatBufferDeferred) { - if (repeatLatestSubmittedBuffer_l()) { - ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l SUCCESS"); + if (repeatLatestBuffer_l()) { + ALOGV("suspend/deferred repeatLatestBuffer_l SUCCESS"); mRepeatBufferDeferred = false; } else { - ALOGV("suspend/deferred repeatLatestSubmittedBuffer_l FAILURE"); + ALOGV("suspend/deferred repeatLatestBuffer_l FAILURE"); } } } @@ -409,7 +411,7 @@ bool GraphicBufferSource::fillCodecBuffer_l() { ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%zu", mNumFramesAvailable); - BufferQueue::BufferItem item; + BufferItem item; status_t err = mConsumer->acquireBuffer(&item, 0); if (err == BufferQueue::NO_BUFFER_AVAILABLE) { // shouldn't happen @@ -441,12 +443,22 @@ bool GraphicBufferSource::fillCodecBuffer_l() { // only submit sample if start time is unspecified, or sample // is queued after the specified start time + bool dropped = false; if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) { // if start time is set, offset time stamp by start time if (mSkipFramesBeforeNs > 0) { item.mTimestamp -= mSkipFramesBeforeNs; } - err = submitBuffer_l(item, cbi); + + int64_t timeUs = item.mTimestamp / 1000; + if (mFrameDropper != NULL && mFrameDropper->shouldDrop(timeUs)) { + ALOGV("skipping frame (%lld) to meet max framerate", static_cast<long long>(timeUs)); + // set err to OK so that the skipped frame can still be saved as the lastest frame + err = OK; + dropped = true; + } else { + err = submitBuffer_l(item, cbi); + } } if (err != OK) { @@ -455,46 +467,46 @@ bool GraphicBufferSource::fillCodecBuffer_l() { EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); } else { ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi); - setLatestSubmittedBuffer_l(item); + setLatestBuffer_l(item, dropped); } return true; } -bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() { +bool GraphicBufferSource::repeatLatestBuffer_l() { CHECK(mExecuting && mNumFramesAvailable == 0); - if (mLatestSubmittedBufferId < 0 || mSuspended) { + if (mLatestBufferId < 0 || mSuspended) { return false; } - if (mBufferSlot[mLatestSubmittedBufferId] == NULL) { + if (mBufferSlot[mLatestBufferId] == NULL) { // This can happen if the remote side disconnects, causing // onBuffersReleased() to NULL out our copy of the slots. The // buffer is gone, so we have nothing to show. // // To be on the safe side we try to release the buffer. - ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL"); + ALOGD("repeatLatestBuffer_l: slot was NULL"); mConsumer->releaseBuffer( - mLatestSubmittedBufferId, - mLatestSubmittedBufferFrameNum, + mLatestBufferId, + mLatestBufferFrameNum, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); - mLatestSubmittedBufferId = -1; - mLatestSubmittedBufferFrameNum = 0; + mLatestBufferId = -1; + mLatestBufferFrameNum = 0; return false; } int cbi = findAvailableCodecBuffer_l(); if (cbi < 0) { // No buffers available, bail. - ALOGV("repeatLatestSubmittedBuffer_l: no codec buffers."); + ALOGV("repeatLatestBuffer_l: no codec buffers."); return false; } - BufferQueue::BufferItem item; - item.mBuf = mLatestSubmittedBufferId; - item.mFrameNumber = mLatestSubmittedBufferFrameNum; + BufferItem item; + item.mBuf = mLatestBufferId; + item.mFrameNumber = mLatestBufferFrameNum; item.mTimestamp = mRepeatLastFrameTimestamp; status_t err = submitBuffer_l(item, cbi); @@ -503,7 +515,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() { return false; } - ++mLatestSubmittedBufferUseCount; + ++mLatestBufferUseCount; /* repeat last frame up to kRepeatLastFrameCount times. * in case of static scene, a single repeat might not get rid of encoder @@ -513,7 +525,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() { mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000; if (mReflector != NULL) { - sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id()); + sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector); msg->setInt32("generation", ++mRepeatLastFrameGeneration); msg->post(mRepeatAfterUs); } @@ -522,31 +534,31 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() { return true; } -void GraphicBufferSource::setLatestSubmittedBuffer_l( - const BufferQueue::BufferItem &item) { - ALOGV("setLatestSubmittedBuffer_l"); +void GraphicBufferSource::setLatestBuffer_l( + const BufferItem &item, bool dropped) { + ALOGV("setLatestBuffer_l"); - if (mLatestSubmittedBufferId >= 0) { - if (mLatestSubmittedBufferUseCount == 0) { + if (mLatestBufferId >= 0) { + if (mLatestBufferUseCount == 0) { mConsumer->releaseBuffer( - mLatestSubmittedBufferId, - mLatestSubmittedBufferFrameNum, + mLatestBufferId, + mLatestBufferFrameNum, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE); } } - mLatestSubmittedBufferId = item.mBuf; - mLatestSubmittedBufferFrameNum = item.mFrameNumber; + mLatestBufferId = item.mBuf; + mLatestBufferFrameNum = item.mFrameNumber; mRepeatLastFrameTimestamp = item.mTimestamp + mRepeatAfterUs * 1000; - mLatestSubmittedBufferUseCount = 1; + mLatestBufferUseCount = dropped ? 0 : 1; mRepeatBufferDeferred = false; mRepeatLastFrameCount = kRepeatLastFrameCount; if (mReflector != NULL) { - sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector->id()); + sp<AMessage> msg = new AMessage(kWhatRepeatLastFrame, mReflector); msg->setInt32("generation", ++mRepeatLastFrameGeneration); msg->post(mRepeatAfterUs); } @@ -579,7 +591,7 @@ status_t GraphicBufferSource::signalEndOfInputStream() { return OK; } -int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) { +int64_t GraphicBufferSource::getTimestamp(const BufferItem &item) { int64_t timeUs = item.mTimestamp / 1000; if (mTimePerCaptureUs > 0ll) { @@ -640,7 +652,7 @@ int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) { } status_t GraphicBufferSource::submitBuffer_l( - const BufferQueue::BufferItem &item, int cbi) { + const BufferItem &item, int cbi) { ALOGV("submitBuffer_l cbi=%d", cbi); int64_t timeUs = getTimestamp(item); @@ -766,7 +778,7 @@ void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) { ALOGV("onFrameAvailable: suspended, ignoring frame"); } - BufferQueue::BufferItem item; + BufferItem item; status_t err = mConsumer->acquireBuffer(&item, 0); if (err == OK) { // If this is the first time we're seeing this buffer, add it to our @@ -841,6 +853,23 @@ status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) { return OK; } +status_t GraphicBufferSource::setMaxFps(float maxFps) { + Mutex::Autolock autoLock(mMutex); + + if (mExecuting) { + return INVALID_OPERATION; + } + + mFrameDropper = new FrameDropper(); + status_t err = mFrameDropper->setMaxFrameRate(maxFps); + if (err != OK) { + mFrameDropper.clear(); + return err; + } + + return OK; +} + void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) { Mutex::Autolock autoLock(mMutex); @@ -879,12 +908,12 @@ void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) { break; } - bool success = repeatLatestSubmittedBuffer_l(); + bool success = repeatLatestBuffer_l(); if (success) { - ALOGV("repeatLatestSubmittedBuffer_l SUCCESS"); + ALOGV("repeatLatestBuffer_l SUCCESS"); } else { - ALOGV("repeatLatestSubmittedBuffer_l FAILURE"); + ALOGV("repeatLatestBuffer_l FAILURE"); mRepeatBufferDeferred = true; } break; diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h index c8e3775..1067472 100644 --- a/media/libstagefright/omx/GraphicBufferSource.h +++ b/media/libstagefright/omx/GraphicBufferSource.h @@ -30,6 +30,8 @@ namespace android { +class FrameDropper; + /* * This class is used to feed OMX codecs from a Surface via BufferQueue. * @@ -119,6 +121,9 @@ public: // of suspension on input. status_t setMaxTimestampGapUs(int64_t maxGapUs); + // When set, the max frame rate fed to the encoder will be capped at maxFps. + status_t setMaxFps(float maxFps); + // Sets the time lapse (or slow motion) parameters. // data[0] is the time (us) between two frames for playback // data[1] is the time (us) between two frames for capture @@ -187,15 +192,15 @@ private: // Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer // reference into the codec buffer, and submits the data to the codec. - status_t submitBuffer_l(const BufferQueue::BufferItem &item, int cbi); + status_t submitBuffer_l(const BufferItem &item, int cbi); // Submits an empty buffer, with the EOS flag set. Returns without // doing anything if we don't have a codec buffer available. void submitEndOfInputStream_l(); - void setLatestSubmittedBuffer_l(const BufferQueue::BufferItem &item); - bool repeatLatestSubmittedBuffer_l(); - int64_t getTimestamp(const BufferQueue::BufferItem &item); + void setLatestBuffer_l(const BufferItem &item, bool dropped); + bool repeatLatestBuffer_l(); + int64_t getTimestamp(const BufferItem &item); // Lock, covers all member variables. mutable Mutex mMutex; @@ -250,6 +255,8 @@ private: int64_t mPrevModifiedTimeUs; int64_t mSkipFramesBeforeNs; + sp<FrameDropper> mFrameDropper; + sp<ALooper> mLooper; sp<AHandlerReflector<GraphicBufferSource> > mReflector; @@ -258,11 +265,11 @@ private: int64_t mRepeatLastFrameTimestamp; int32_t mRepeatLastFrameCount; - int mLatestSubmittedBufferId; - uint64_t mLatestSubmittedBufferFrameNum; - int32_t mLatestSubmittedBufferUseCount; + int mLatestBufferId; + uint64_t mLatestBufferFrameNum; + int32_t mLatestBufferUseCount; - // The previously submitted buffer should've been repeated but + // The previous buffer should've been repeated but // no codec buffer was available at the time. bool mRepeatBufferDeferred; diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index ab7419f..4779d6a 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -1075,6 +1075,7 @@ inline static const char *asString(IOMX::InternalOptionType i, const char *def = case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY: return "REPEAT_PREVIOUS_FRAME_DELAY"; case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: return "MAX_TIMESTAMP_GAP"; + case IOMX::INTERNAL_OPTION_MAX_FPS: return "MAX_FPS"; case IOMX::INTERNAL_OPTION_START_TIME: return "START_TIME"; case IOMX::INTERNAL_OPTION_TIME_LAPSE: return "TIME_LAPSE"; default: return def; @@ -1092,6 +1093,7 @@ status_t OMXNodeInstance::setInternalOption( case IOMX::INTERNAL_OPTION_SUSPEND: case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY: case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP: + case IOMX::INTERNAL_OPTION_MAX_FPS: case IOMX::INTERNAL_OPTION_START_TIME: case IOMX::INTERNAL_OPTION_TIME_LAPSE: { @@ -1129,6 +1131,14 @@ status_t OMXNodeInstance::setInternalOption( int64_t maxGapUs = *(int64_t *)data; CLOG_CONFIG(setInternalOption, "gapUs=%lld", (long long)maxGapUs); return bufferSource->setMaxTimestampGapUs(maxGapUs); + } else if (type == IOMX::INTERNAL_OPTION_MAX_FPS) { + if (size != sizeof(float)) { + return INVALID_OPERATION; + } + + float maxFps = *(float *)data; + CLOG_CONFIG(setInternalOption, "maxFps=%f", maxFps); + return bufferSource->setMaxFps(maxFps); } else if (type == IOMX::INTERNAL_OPTION_START_TIME) { if (size != sizeof(int64_t)) { return INVALID_OPERATION; diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp index 7f99dcd..801a1bd 100644 --- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp +++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp @@ -58,7 +58,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::sendCommand( OMX_COMMANDTYPE cmd, OMX_U32 param, OMX_PTR data) { CHECK(data == NULL); - sp<AMessage> msg = new AMessage(kWhatSendCommand, mHandler->id()); + sp<AMessage> msg = new AMessage(kWhatSendCommand, mHandler); msg->setInt32("cmd", cmd); msg->setInt32("param", param); msg->post(); @@ -307,7 +307,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::freeBuffer( OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer( OMX_BUFFERHEADERTYPE *buffer) { - sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler->id()); + sp<AMessage> msg = new AMessage(kWhatEmptyThisBuffer, mHandler); msg->setPointer("header", buffer); msg->post(); @@ -316,7 +316,7 @@ OMX_ERRORTYPE SimpleSoftOMXComponent::emptyThisBuffer( OMX_ERRORTYPE SimpleSoftOMXComponent::fillThisBuffer( OMX_BUFFERHEADERTYPE *buffer) { - sp<AMessage> msg = new AMessage(kWhatFillThisBuffer, mHandler->id()); + sp<AMessage> msg = new AMessage(kWhatFillThisBuffer, mHandler); msg->setPointer("header", buffer); msg->post(); diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk index 447b29e..9be637a 100644 --- a/media/libstagefright/omx/tests/Android.mk +++ b/media/libstagefright/omx/tests/Android.mk @@ -20,3 +20,21 @@ LOCAL_MODULE_TAGS := tests LOCAL_32_BIT_ONLY := true include $(BUILD_EXECUTABLE) + +include $(CLEAR_VARS) + +LOCAL_MODULE := FrameDropper_test + +LOCAL_MODULE_TAGS := tests + +LOCAL_SRC_FILES := \ + FrameDropper_test.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libstagefright_omx \ + libutils \ + +LOCAL_C_INCLUDES := \ + frameworks/av/media/libstagefright/omx \ + +include $(BUILD_NATIVE_TEST) diff --git a/media/libstagefright/omx/tests/FrameDropper_test.cpp b/media/libstagefright/omx/tests/FrameDropper_test.cpp new file mode 100644 index 0000000..4ac72c4 --- /dev/null +++ b/media/libstagefright/omx/tests/FrameDropper_test.cpp @@ -0,0 +1,136 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "FrameDropper_test" +#include <utils/Log.h> + +#include <gtest/gtest.h> + +#include "FrameDropper.h" +#include <media/stagefright/foundation/ADebug.h> + +namespace android { + +struct TestFrame { + int64_t timeUs; + bool shouldDrop; +}; + +static const TestFrame testFrames20Fps[] = { + {1000000, false}, {1050000, false}, {1100000, false}, {1150000, false}, + {1200000, false}, {1250000, false}, {1300000, false}, {1350000, false}, + {1400000, false}, {1450000, false}, {1500000, false}, {1550000, false}, + {1600000, false}, {1650000, false}, {1700000, false}, {1750000, false}, + {1800000, false}, {1850000, false}, {1900000, false}, {1950000, false}, +}; + +static const TestFrame testFrames30Fps[] = { + {1000000, false}, {1033333, false}, {1066667, false}, {1100000, false}, + {1133333, false}, {1166667, false}, {1200000, false}, {1233333, false}, + {1266667, false}, {1300000, false}, {1333333, false}, {1366667, false}, + {1400000, false}, {1433333, false}, {1466667, false}, {1500000, false}, + {1533333, false}, {1566667, false}, {1600000, false}, {1633333, false}, +}; + +static const TestFrame testFrames40Fps[] = { + {1000000, false}, {1025000, true}, {1050000, false}, {1075000, false}, + {1100000, false}, {1125000, true}, {1150000, false}, {1175000, false}, + {1200000, false}, {1225000, true}, {1250000, false}, {1275000, false}, + {1300000, false}, {1325000, true}, {1350000, false}, {1375000, false}, + {1400000, false}, {1425000, true}, {1450000, false}, {1475000, false}, +}; + +static const TestFrame testFrames60Fps[] = { + {1000000, false}, {1016667, true}, {1033333, false}, {1050000, true}, + {1066667, false}, {1083333, true}, {1100000, false}, {1116667, true}, + {1133333, false}, {1150000, true}, {1166667, false}, {1183333, true}, + {1200000, false}, {1216667, true}, {1233333, false}, {1250000, true}, + {1266667, false}, {1283333, true}, {1300000, false}, {1316667, true}, +}; + +static const TestFrame testFramesVariableFps[] = { + // 40fps + {1000000, false}, {1025000, true}, {1050000, false}, {1075000, false}, + {1100000, false}, {1125000, true}, {1150000, false}, {1175000, false}, + {1200000, false}, {1225000, true}, {1250000, false}, {1275000, false}, + {1300000, false}, {1325000, true}, {1350000, false}, {1375000, false}, + {1400000, false}, {1425000, true}, {1450000, false}, {1475000, false}, + // a timestamp jump plus switch to 20fps + {2000000, false}, {2050000, false}, {2100000, false}, {2150000, false}, + {2200000, false}, {2250000, false}, {2300000, false}, {2350000, false}, + {2400000, false}, {2450000, false}, {2500000, false}, {2550000, false}, + {2600000, false}, {2650000, false}, {2700000, false}, {2750000, false}, + {2800000, false}, {2850000, false}, {2900000, false}, {2950000, false}, + // 60fps + {2966667, false}, {2983333, true}, {3000000, false}, {3016667, true}, + {3033333, false}, {3050000, true}, {3066667, false}, {3083333, true}, + {3100000, false}, {3116667, true}, {3133333, false}, {3150000, true}, + {3166667, false}, {3183333, true}, {3200000, false}, {3216667, true}, + {3233333, false}, {3250000, true}, {3266667, false}, {3283333, true}, +}; + +static const int kMaxTestJitterUs = 2000; +// return one of 1000, 0, -1000 as jitter. +static int GetJitter(size_t i) { + return (1 - (i % 3)) * (kMaxTestJitterUs / 2); +} + +class FrameDropperTest : public ::testing::Test { +public: + FrameDropperTest() : mFrameDropper(new FrameDropper()) { + EXPECT_EQ(OK, mFrameDropper->setMaxFrameRate(30.0)); + } + +protected: + void RunTest(const TestFrame* frames, size_t size) { + for (size_t i = 0; i < size; ++i) { + int jitter = GetJitter(i); + int64_t testTimeUs = frames[i].timeUs + jitter; + printf("time %lld, testTime %lld, jitter %d\n", frames[i].timeUs, testTimeUs, jitter); + EXPECT_EQ(frames[i].shouldDrop, mFrameDropper->shouldDrop(testTimeUs)); + } + } + + sp<FrameDropper> mFrameDropper; +}; + +TEST_F(FrameDropperTest, TestInvalidMaxFrameRate) { + EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(-1.0)); + EXPECT_NE(OK, mFrameDropper->setMaxFrameRate(0)); +} + +TEST_F(FrameDropperTest, Test20Fps) { + RunTest(testFrames20Fps, ARRAY_SIZE(testFrames20Fps)); +} + +TEST_F(FrameDropperTest, Test30Fps) { + RunTest(testFrames30Fps, ARRAY_SIZE(testFrames30Fps)); +} + +TEST_F(FrameDropperTest, Test40Fps) { + RunTest(testFrames40Fps, ARRAY_SIZE(testFrames40Fps)); +} + +TEST_F(FrameDropperTest, Test60Fps) { + RunTest(testFrames60Fps, ARRAY_SIZE(testFrames60Fps)); +} + +TEST_F(FrameDropperTest, TestVariableFps) { + RunTest(testFramesVariableFps, ARRAY_SIZE(testFramesVariableFps)); +} + +} // namespace android diff --git a/media/libstagefright/rtsp/ARTPConnection.cpp b/media/libstagefright/rtsp/ARTPConnection.cpp index a6bd824..a86ab74 100644 --- a/media/libstagefright/rtsp/ARTPConnection.cpp +++ b/media/libstagefright/rtsp/ARTPConnection.cpp @@ -82,7 +82,7 @@ void ARTPConnection::addStream( size_t index, const sp<AMessage> ¬ify, bool injected) { - sp<AMessage> msg = new AMessage(kWhatAddStream, id()); + sp<AMessage> msg = new AMessage(kWhatAddStream, this); msg->setInt32("rtp-socket", rtpSocket); msg->setInt32("rtcp-socket", rtcpSocket); msg->setObject("session-desc", sessionDesc); @@ -93,7 +93,7 @@ void ARTPConnection::addStream( } void ARTPConnection::removeStream(int rtpSocket, int rtcpSocket) { - sp<AMessage> msg = new AMessage(kWhatRemoveStream, id()); + sp<AMessage> msg = new AMessage(kWhatRemoveStream, this); msg->setInt32("rtp-socket", rtpSocket); msg->setInt32("rtcp-socket", rtcpSocket); msg->post(); @@ -233,7 +233,7 @@ void ARTPConnection::postPollEvent() { return; } - sp<AMessage> msg = new AMessage(kWhatPollStreams, id()); + sp<AMessage> msg = new AMessage(kWhatPollStreams, this); msg->post(); mPollEventPending = true; @@ -639,7 +639,7 @@ sp<ARTPSource> ARTPConnection::findSource(StreamInfo *info, uint32_t srcId) { } void ARTPConnection::injectPacket(int index, const sp<ABuffer> &buffer) { - sp<AMessage> msg = new AMessage(kWhatInjectPacket, id()); + sp<AMessage> msg = new AMessage(kWhatInjectPacket, this); msg->setInt32("index", index); msg->setBuffer("buffer", buffer); msg->post(); diff --git a/media/libstagefright/rtsp/ARTPSession.cpp b/media/libstagefright/rtsp/ARTPSession.cpp index ba4e33c..e5acb06 100644 --- a/media/libstagefright/rtsp/ARTPSession.cpp +++ b/media/libstagefright/rtsp/ARTPSession.cpp @@ -82,7 +82,7 @@ status_t ARTPSession::setup(const sp<ASessionDescription> &desc) { info->mRTPSocket = rtpSocket; info->mRTCPSocket = rtcpSocket; - sp<AMessage> notify = new AMessage(kWhatAccessUnitComplete, id()); + sp<AMessage> notify = new AMessage(kWhatAccessUnitComplete, this); notify->setSize("track-index", mTracks.size() - 1); mRTPConn->addStream( diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp index e1607bf..56c4aa6 100644 --- a/media/libstagefright/rtsp/ARTPWriter.cpp +++ b/media/libstagefright/rtsp/ARTPWriter.cpp @@ -146,7 +146,7 @@ status_t ARTPWriter::start(MetaData * /* params */) { TRESPASS(); } - (new AMessage(kWhatStart, mReflector->id()))->post(); + (new AMessage(kWhatStart, mReflector))->post(); while (!(mFlags & kFlagStarted)) { mCondition.wait(mLock); @@ -161,7 +161,7 @@ status_t ARTPWriter::stop() { return OK; } - (new AMessage(kWhatStop, mReflector->id()))->post(); + (new AMessage(kWhatStop, mReflector))->post(); while (mFlags & kFlagStarted) { mCondition.wait(mLock); @@ -213,8 +213,8 @@ void ARTPWriter::onMessageReceived(const sp<AMessage> &msg) { mCondition.signal(); } - (new AMessage(kWhatRead, mReflector->id()))->post(); - (new AMessage(kWhatSendSR, mReflector->id()))->post(); + (new AMessage(kWhatRead, mReflector))->post(); + (new AMessage(kWhatSendSR, mReflector))->post(); break; } diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp index 60b3aaf..855ffdc 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.cpp +++ b/media/libstagefright/rtsp/ARTSPConnection.cpp @@ -68,28 +68,28 @@ ARTSPConnection::~ARTSPConnection() { } void ARTSPConnection::connect(const char *url, const sp<AMessage> &reply) { - sp<AMessage> msg = new AMessage(kWhatConnect, id()); + sp<AMessage> msg = new AMessage(kWhatConnect, this); msg->setString("url", url); msg->setMessage("reply", reply); msg->post(); } void ARTSPConnection::disconnect(const sp<AMessage> &reply) { - sp<AMessage> msg = new AMessage(kWhatDisconnect, id()); + sp<AMessage> msg = new AMessage(kWhatDisconnect, this); msg->setMessage("reply", reply); msg->post(); } void ARTSPConnection::sendRequest( const char *request, const sp<AMessage> &reply) { - sp<AMessage> msg = new AMessage(kWhatSendRequest, id()); + sp<AMessage> msg = new AMessage(kWhatSendRequest, this); msg->setString("request", request); msg->setMessage("reply", reply); msg->post(); } void ARTSPConnection::observeBinaryData(const sp<AMessage> &reply) { - sp<AMessage> msg = new AMessage(kWhatObserveBinaryData, id()); + sp<AMessage> msg = new AMessage(kWhatObserveBinaryData, this); msg->setMessage("reply", reply); msg->post(); } @@ -286,7 +286,7 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) { if (err < 0) { if (errno == EINPROGRESS) { - sp<AMessage> msg = new AMessage(kWhatCompleteConnection, id()); + sp<AMessage> msg = new AMessage(kWhatCompleteConnection, this); msg->setMessage("reply", reply); msg->setInt32("connection-id", mConnectionID); msg->post(); @@ -523,7 +523,7 @@ void ARTSPConnection::postReceiveReponseEvent() { return; } - sp<AMessage> msg = new AMessage(kWhatReceiveResponse, id()); + sp<AMessage> msg = new AMessage(kWhatReceiveResponse, this); msg->post(); mReceiveResponseEventPending = true; @@ -746,7 +746,7 @@ bool ARTSPConnection::receiveRTSPReponse() { AString request; CHECK(reply->findString("original-request", &request)); - sp<AMessage> msg = new AMessage(kWhatSendRequest, id()); + sp<AMessage> msg = new AMessage(kWhatSendRequest, this); msg->setMessage("reply", reply); msg->setString("request", request.c_str(), request.size()); diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 3bf489b..0642343 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -169,10 +169,10 @@ struct MyHandler : public AHandler { looper()->registerHandler(mConn); (1 ? mNetLooper : looper())->registerHandler(mRTPConn); - sp<AMessage> notify = new AMessage('biny', id()); + sp<AMessage> notify = new AMessage('biny', this); mConn->observeBinaryData(notify); - sp<AMessage> reply = new AMessage('conn', id()); + sp<AMessage> reply = new AMessage('conn', this); mConn->connect(mOriginalSessionURL.c_str(), reply); } @@ -180,10 +180,10 @@ struct MyHandler : public AHandler { looper()->registerHandler(mConn); (1 ? mNetLooper : looper())->registerHandler(mRTPConn); - sp<AMessage> notify = new AMessage('biny', id()); + sp<AMessage> notify = new AMessage('biny', this); mConn->observeBinaryData(notify); - sp<AMessage> reply = new AMessage('sdpl', id()); + sp<AMessage> reply = new AMessage('sdpl', this); reply->setObject("description", desc); mConn->connect(mOriginalSessionURL.c_str(), reply); } @@ -210,11 +210,11 @@ struct MyHandler : public AHandler { } void disconnect() { - (new AMessage('abor', id()))->post(); + (new AMessage('abor', this))->post(); } void seek(int64_t timeUs) { - sp<AMessage> msg = new AMessage('seek', id()); + sp<AMessage> msg = new AMessage('seek', this); msg->setInt64("time", timeUs); mPauseGeneration++; msg->post(); @@ -225,14 +225,14 @@ struct MyHandler : public AHandler { } void pause() { - sp<AMessage> msg = new AMessage('paus', id()); + sp<AMessage> msg = new AMessage('paus', this); mPauseGeneration++; msg->setInt32("pausecheck", mPauseGeneration); msg->post(kPauseDelayUs); } void resume() { - sp<AMessage> msg = new AMessage('resu', id()); + sp<AMessage> msg = new AMessage('resu', this); mPauseGeneration++; msg->post(); } @@ -454,10 +454,10 @@ struct MyHandler : public AHandler { request.append("Accept: application/sdp\r\n"); request.append("\r\n"); - sp<AMessage> reply = new AMessage('desc', id()); + sp<AMessage> reply = new AMessage('desc', this); mConn->sendRequest(request.c_str(), reply); } else { - (new AMessage('disc', id()))->post(); + (new AMessage('disc', this))->post(); } break; } @@ -468,10 +468,10 @@ struct MyHandler : public AHandler { int32_t reconnect; if (msg->findInt32("reconnect", &reconnect) && reconnect) { - sp<AMessage> reply = new AMessage('conn', id()); + sp<AMessage> reply = new AMessage('conn', this); mConn->connect(mOriginalSessionURL.c_str(), reply); } else { - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); } break; } @@ -514,7 +514,7 @@ struct MyHandler : public AHandler { ALOGI("rewritten session url: '%s'", mSessionURL.c_str()); } - sp<AMessage> reply = new AMessage('conn', id()); + sp<AMessage> reply = new AMessage('conn', this); mConn->connect(mOriginalSessionURL.c_str(), reply); break; } @@ -586,7 +586,7 @@ struct MyHandler : public AHandler { } if (result != OK) { - sp<AMessage> reply = new AMessage('disc', id()); + sp<AMessage> reply = new AMessage('disc', this); mConn->disconnect(reply); } break; @@ -631,7 +631,7 @@ struct MyHandler : public AHandler { } if (result != OK) { - sp<AMessage> reply = new AMessage('disc', id()); + sp<AMessage> reply = new AMessage('disc', this); mConn->disconnect(reply); } break; @@ -703,7 +703,7 @@ struct MyHandler : public AHandler { mSessionID.erase(i, mSessionID.size() - i); } - sp<AMessage> notify = new AMessage('accu', id()); + sp<AMessage> notify = new AMessage('accu', this); notify->setSize("track-index", trackIndex); i = response->mHeaders.indexOfKey("transport"); @@ -769,10 +769,10 @@ struct MyHandler : public AHandler { request.append("\r\n"); - sp<AMessage> reply = new AMessage('play', id()); + sp<AMessage> reply = new AMessage('play', this); mConn->sendRequest(request.c_str(), reply); } else { - sp<AMessage> reply = new AMessage('disc', id()); + sp<AMessage> reply = new AMessage('disc', this); mConn->disconnect(reply); } break; @@ -797,7 +797,7 @@ struct MyHandler : public AHandler { } else { parsePlayResponse(response); - sp<AMessage> timeout = new AMessage('tiou', id()); + sp<AMessage> timeout = new AMessage('tiou', this); mCheckTimeoutGeneration++; timeout->setInt32("tioucheck", mCheckTimeoutGeneration); timeout->post(kStartupTimeoutUs); @@ -805,7 +805,7 @@ struct MyHandler : public AHandler { } if (result != OK) { - sp<AMessage> reply = new AMessage('disc', id()); + sp<AMessage> reply = new AMessage('disc', this); mConn->disconnect(reply); } @@ -831,7 +831,7 @@ struct MyHandler : public AHandler { request.append("\r\n"); request.append("\r\n"); - sp<AMessage> reply = new AMessage('opts', id()); + sp<AMessage> reply = new AMessage('opts', this); reply->setInt32("generation", mKeepAliveGeneration); mConn->sendRequest(request.c_str(), reply); break; @@ -894,7 +894,7 @@ struct MyHandler : public AHandler { mPausing = false; mSeekable = true; - sp<AMessage> reply = new AMessage('tear', id()); + sp<AMessage> reply = new AMessage('tear', this); int32_t reconnect; if (msg->findInt32("reconnect", &reconnect) && reconnect) { @@ -926,7 +926,7 @@ struct MyHandler : public AHandler { ALOGI("TEARDOWN completed with result %d (%s)", result, strerror(-result)); - sp<AMessage> reply = new AMessage('disc', id()); + sp<AMessage> reply = new AMessage('disc', this); int32_t reconnect; if (msg->findInt32("reconnect", &reconnect) && reconnect) { @@ -958,7 +958,7 @@ struct MyHandler : public AHandler { if (mNumAccessUnitsReceived == 0) { #if 1 ALOGI("stream ended? aborting."); - (new AMessage('abor', id()))->post(); + (new AMessage('abor', this))->post(); break; #else ALOGI("haven't seen an AU in a looong time."); @@ -1077,7 +1077,7 @@ struct MyHandler : public AHandler { request.append("\r\n"); - sp<AMessage> reply = new AMessage('pau2', id()); + sp<AMessage> reply = new AMessage('pau2', this); mConn->sendRequest(request.c_str(), reply); break; } @@ -1114,7 +1114,7 @@ struct MyHandler : public AHandler { request.append("\r\n"); - sp<AMessage> reply = new AMessage('res2', id()); + sp<AMessage> reply = new AMessage('res2', this); mConn->sendRequest(request.c_str(), reply); break; } @@ -1143,7 +1143,7 @@ struct MyHandler : public AHandler { // Post new timeout in order to make sure to use // fake timestamps if no new Sender Reports arrive - sp<AMessage> timeout = new AMessage('tiou', id()); + sp<AMessage> timeout = new AMessage('tiou', this); mCheckTimeoutGeneration++; timeout->setInt32("tioucheck", mCheckTimeoutGeneration); timeout->post(kStartupTimeoutUs); @@ -1152,7 +1152,7 @@ struct MyHandler : public AHandler { if (result != OK) { ALOGE("resume failed, aborting."); - (new AMessage('abor', id()))->post(); + (new AMessage('abor', this))->post(); } mPausing = false; @@ -1180,7 +1180,7 @@ struct MyHandler : public AHandler { mCheckPending = true; ++mCheckGeneration; - sp<AMessage> reply = new AMessage('see1', id()); + sp<AMessage> reply = new AMessage('see1', this); reply->setInt64("time", timeUs); if (mPausing) { @@ -1221,7 +1221,7 @@ struct MyHandler : public AHandler { // Start new timeoutgeneration to avoid getting timeout // before PLAY response arrive - sp<AMessage> timeout = new AMessage('tiou', id()); + sp<AMessage> timeout = new AMessage('tiou', this); mCheckTimeoutGeneration++; timeout->setInt32("tioucheck", mCheckTimeoutGeneration); timeout->post(kStartupTimeoutUs); @@ -1243,7 +1243,7 @@ struct MyHandler : public AHandler { request.append("\r\n"); - sp<AMessage> reply = new AMessage('see2', id()); + sp<AMessage> reply = new AMessage('see2', this); mConn->sendRequest(request.c_str(), reply); break; } @@ -1277,7 +1277,7 @@ struct MyHandler : public AHandler { // Post new timeout in order to make sure to use // fake timestamps if no new Sender Reports arrive - sp<AMessage> timeout = new AMessage('tiou', id()); + sp<AMessage> timeout = new AMessage('tiou', this); mCheckTimeoutGeneration++; timeout->setInt32("tioucheck", mCheckTimeoutGeneration); timeout->post(kStartupTimeoutUs); @@ -1293,7 +1293,7 @@ struct MyHandler : public AHandler { if (result != OK) { ALOGE("seek failed, aborting."); - (new AMessage('abor', id()))->post(); + (new AMessage('abor', this))->post(); } mPausing = false; @@ -1343,12 +1343,12 @@ struct MyHandler : public AHandler { mTryTCPInterleaving = true; - sp<AMessage> msg = new AMessage('abor', id()); + sp<AMessage> msg = new AMessage('abor', this); msg->setInt32("reconnect", true); msg->post(); } else { ALOGW("Never received any data, disconnecting."); - (new AMessage('abor', id()))->post(); + (new AMessage('abor', this))->post(); } } else { if (!mAllTracksHaveTime) { @@ -1369,7 +1369,7 @@ struct MyHandler : public AHandler { } void postKeepAlive() { - sp<AMessage> msg = new AMessage('aliv', id()); + sp<AMessage> msg = new AMessage('aliv', this); msg->setInt32("generation", mKeepAliveGeneration); msg->post((mKeepAliveTimeoutUs * 9) / 10); } @@ -1380,7 +1380,7 @@ struct MyHandler : public AHandler { } mCheckPending = true; - sp<AMessage> check = new AMessage('chek', id()); + sp<AMessage> check = new AMessage('chek', this); check->setInt32("generation", mCheckGeneration); check->post(kAccessUnitTimeoutUs); } @@ -1566,7 +1566,7 @@ private: if (source->initCheck() != OK) { ALOGW("Unsupported format. Ignoring track #%d.", index); - sp<AMessage> reply = new AMessage('setu', id()); + sp<AMessage> reply = new AMessage('setu', this); reply->setSize("index", index); reply->setInt32("result", ERROR_UNSUPPORTED); reply->post(); @@ -1652,7 +1652,7 @@ private: request.append("\r\n"); - sp<AMessage> reply = new AMessage('setu', id()); + sp<AMessage> reply = new AMessage('setu', this); reply->setSize("index", index); reply->setSize("track-index", mTracks.size() - 1); mConn->sendRequest(request.c_str(), reply); diff --git a/media/libstagefright/rtsp/MyTransmitter.h b/media/libstagefright/rtsp/MyTransmitter.h index 009a3b1..369f276 100644 --- a/media/libstagefright/rtsp/MyTransmitter.h +++ b/media/libstagefright/rtsp/MyTransmitter.h @@ -100,7 +100,7 @@ struct MyTransmitter : public AHandler { mLooper->registerHandler(this); mLooper->registerHandler(mConn); - sp<AMessage> reply = new AMessage('conn', id()); + sp<AMessage> reply = new AMessage('conn', this); mConn->connect(mServerURL.c_str(), reply); #ifdef ANDROID @@ -229,7 +229,7 @@ struct MyTransmitter : public AHandler { request.append("\r\n"); request.append(sdp); - sp<AMessage> reply = new AMessage('anno', id()); + sp<AMessage> reply = new AMessage('anno', this); mConn->sendRequest(request.c_str(), reply); } @@ -350,7 +350,7 @@ struct MyTransmitter : public AHandler { << result << " (" << strerror(-result) << ")"; if (result != OK) { - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); break; } @@ -381,7 +381,7 @@ struct MyTransmitter : public AHandler { if (response->mStatusCode == 401) { if (mAuthType != NONE) { LOG(INFO) << "FAILED to authenticate"; - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); break; } @@ -391,14 +391,14 @@ struct MyTransmitter : public AHandler { } if (result != OK || response->mStatusCode != 200) { - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); break; } unsigned rtpPort; ARTPConnection::MakePortPair(&mRTPSocket, &mRTCPSocket, &rtpPort); - // (new AMessage('poll', id()))->post(); + // (new AMessage('poll', this))->post(); AString request; request.append("SETUP "); @@ -414,7 +414,7 @@ struct MyTransmitter : public AHandler { request.append(";mode=record\r\n"); request.append("\r\n"); - sp<AMessage> reply = new AMessage('setu', id()); + sp<AMessage> reply = new AMessage('setu', this); mConn->sendRequest(request.c_str(), reply); break; } @@ -468,7 +468,7 @@ struct MyTransmitter : public AHandler { } if (result != OK || response->mStatusCode != 200) { - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); break; } @@ -535,7 +535,7 @@ struct MyTransmitter : public AHandler { request.append("\r\n"); request.append("\r\n"); - sp<AMessage> reply = new AMessage('reco', id()); + sp<AMessage> reply = new AMessage('reco', this); mConn->sendRequest(request.c_str(), reply); break; } @@ -558,13 +558,13 @@ struct MyTransmitter : public AHandler { } if (result != OK) { - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); break; } - (new AMessage('more', id()))->post(); - (new AMessage('sr ', id()))->post(); - (new AMessage('aliv', id()))->post(30000000ll); + (new AMessage('more', this))->post(); + (new AMessage('sr ', this))->post(); + (new AMessage('aliv', this))->post(30000000ll); break; } @@ -586,7 +586,7 @@ struct MyTransmitter : public AHandler { request.append("\r\n"); request.append("\r\n"); - sp<AMessage> reply = new AMessage('opts', id()); + sp<AMessage> reply = new AMessage('opts', this); mConn->sendRequest(request.c_str(), reply); break; } @@ -603,7 +603,7 @@ struct MyTransmitter : public AHandler { break; } - (new AMessage('aliv', id()))->post(30000000ll); + (new AMessage('aliv', this))->post(30000000ll); break; } @@ -702,7 +702,7 @@ struct MyTransmitter : public AHandler { request.append("\r\n"); request.append("\r\n"); - sp<AMessage> reply = new AMessage('paus', id()); + sp<AMessage> reply = new AMessage('paus', this); mConn->sendRequest(request.c_str(), reply); } break; @@ -753,7 +753,7 @@ struct MyTransmitter : public AHandler { request.append("\r\n"); request.append("\r\n"); - sp<AMessage> reply = new AMessage('tear', id()); + sp<AMessage> reply = new AMessage('tear', this); mConn->sendRequest(request.c_str(), reply); break; } @@ -775,7 +775,7 @@ struct MyTransmitter : public AHandler { CHECK(response != NULL); } - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); break; } @@ -784,14 +784,14 @@ struct MyTransmitter : public AHandler { LOG(INFO) << "disconnect completed"; mConnected = false; - (new AMessage('quit', id()))->post(); + (new AMessage('quit', this))->post(); break; } case 'quit': { if (mConnected) { - mConn->disconnect(new AMessage('disc', id())); + mConn->disconnect(new AMessage('disc', this)); break; } diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp index a24eb69..0f46c83 100644 --- a/media/libstagefright/rtsp/SDPLoader.cpp +++ b/media/libstagefright/rtsp/SDPLoader.cpp @@ -51,7 +51,7 @@ SDPLoader::SDPLoader( void SDPLoader::load(const char *url, const KeyedVector<String8, String8> *headers) { mNetLooper->registerHandler(this); - sp<AMessage> msg = new AMessage(kWhatLoad, id()); + sp<AMessage> msg = new AMessage(kWhatLoad, this); msg->setString("url", url); if (headers != NULL) { diff --git a/media/libstagefright/rtsp/UDPPusher.cpp b/media/libstagefright/rtsp/UDPPusher.cpp index 47ea6f1..5c685a1 100644 --- a/media/libstagefright/rtsp/UDPPusher.cpp +++ b/media/libstagefright/rtsp/UDPPusher.cpp @@ -65,7 +65,7 @@ void UDPPusher::start() { mFirstTimeMs = fromlel(timeMs); mFirstTimeUs = ALooper::GetNowUs(); - (new AMessage(kWhatPush, id()))->post(); + (new AMessage(kWhatPush, this))->post(); } bool UDPPusher::onPush() { @@ -103,7 +103,7 @@ bool UDPPusher::onPush() { timeMs -= mFirstTimeMs; int64_t whenUs = mFirstTimeUs + timeMs * 1000ll; int64_t nowUs = ALooper::GetNowUs(); - (new AMessage(kWhatPush, id()))->post(whenUs - nowUs); + (new AMessage(kWhatPush, this))->post(whenUs - nowUs); return true; } diff --git a/media/libstagefright/timedtext/TimedTextPlayer.cpp b/media/libstagefright/timedtext/TimedTextPlayer.cpp index a070487..aecf666 100644 --- a/media/libstagefright/timedtext/TimedTextPlayer.cpp +++ b/media/libstagefright/timedtext/TimedTextPlayer.cpp @@ -56,25 +56,25 @@ TimedTextPlayer::~TimedTextPlayer() { } void TimedTextPlayer::start() { - (new AMessage(kWhatStart, id()))->post(); + (new AMessage(kWhatStart, this))->post(); } void TimedTextPlayer::pause() { - (new AMessage(kWhatPause, id()))->post(); + (new AMessage(kWhatPause, this))->post(); } void TimedTextPlayer::resume() { - (new AMessage(kWhatResume, id()))->post(); + (new AMessage(kWhatResume, this))->post(); } void TimedTextPlayer::seekToAsync(int64_t timeUs) { - sp<AMessage> msg = new AMessage(kWhatSeek, id()); + sp<AMessage> msg = new AMessage(kWhatSeek, this); msg->setInt64("seekTimeUs", timeUs); msg->post(); } void TimedTextPlayer::setDataSource(sp<TimedTextSource> source) { - sp<AMessage> msg = new AMessage(kWhatSetSource, id()); + sp<AMessage> msg = new AMessage(kWhatSetSource, this); msg->setObject("source", source); msg->post(); } @@ -231,7 +231,7 @@ void TimedTextPlayer::doRead(MediaSource::ReadOptions* options) { status_t err = mSource->read(&startTimeUs, &endTimeUs, &(parcelEvent->parcel), options); if (err == WOULD_BLOCK) { - sp<AMessage> msg = new AMessage(kWhatRetryRead, id()); + sp<AMessage> msg = new AMessage(kWhatRetryRead, this); if (options != NULL) { int64_t seekTimeUs = kInvalidTimeUs; MediaSource::ReadOptions::SeekMode seekMode = @@ -259,7 +259,7 @@ void TimedTextPlayer::doRead(MediaSource::ReadOptions* options) { void TimedTextPlayer::postTextEvent(const sp<ParcelEvent>& parcel, int64_t timeUs) { int64_t delayUs = delayUsFromCurrentTime(timeUs); - sp<AMessage> msg = new AMessage(kWhatSendSubtitle, id()); + sp<AMessage> msg = new AMessage(kWhatSendSubtitle, this); msg->setInt32("generation", mSendSubtitleGeneration); if (parcel != NULL) { msg->setObject("subtitle", parcel); diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp index 069961b..737f144 100644 --- a/media/libstagefright/webm/WebmWriter.cpp +++ b/media/libstagefright/webm/WebmWriter.cpp @@ -80,38 +80,6 @@ WebmWriter::WebmWriter(int fd) mCuePoints); } -WebmWriter::WebmWriter(const char *filename) - : mInitCheck(NO_INIT), - mTimeCodeScale(1000000), - mStartTimestampUs(0), - mStartTimeOffsetMs(0), - mSegmentOffset(0), - mSegmentDataStart(0), - mInfoOffset(0), - mInfoSize(0), - mTracksOffset(0), - mCuesOffset(0), - mPaused(false), - mStarted(false), - mIsFileSizeLimitExplicitlyRequested(false), - mIsRealTimeRecording(false), - mStreamableFile(true), - mEstimatedCuesSize(0) { - mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR); - if (mFd >= 0) { - ALOGV("fd %d; flags: %o", mFd, fcntl(mFd, F_GETFL, 0)); - mInitCheck = OK; - } - mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack); - mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack); - mSinkThread = new WebmFrameSinkThread( - mFd, - mSegmentDataStart, - mStreams[kVideoIndex].mSink, - mStreams[kAudioIndex].mSink, - mCuePoints); -} - // static sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) { int32_t width, height; diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h index 36b6965..4ad770e 100644 --- a/media/libstagefright/webm/WebmWriter.h +++ b/media/libstagefright/webm/WebmWriter.h @@ -37,7 +37,6 @@ namespace android { class WebmWriter : public MediaWriter { public: WebmWriter(int fd); - WebmWriter(const char *filename); ~WebmWriter() { reset(); } diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp index b1cdec0..6f0087f 100644 --- a/media/libstagefright/wifi-display/MediaSender.cpp +++ b/media/libstagefright/wifi-display/MediaSender.cpp @@ -121,7 +121,7 @@ status_t MediaSender::initAsync( } if (err == OK) { - sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); + sp<AMessage> notify = new AMessage(kWhatSenderNotify, this); notify->setInt32("generation", mGeneration); mTSSender = new RTPSender(mNetSession, notify); looper()->registerHandler(mTSSender); @@ -170,7 +170,7 @@ status_t MediaSender::initAsync( return INVALID_OPERATION; } - sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); + sp<AMessage> notify = new AMessage(kWhatSenderNotify, this); notify->setInt32("generation", mGeneration); notify->setSize("trackIndex", trackIndex); diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp index e88a3bd..4e72533 100644 --- a/media/libstagefright/wifi-display/rtp/RTPSender.cpp +++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp @@ -95,11 +95,11 @@ status_t RTPSender::initAsync( return INVALID_OPERATION; } - sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id()); + sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, this); sp<AMessage> rtcpNotify; if (remoteRTCPPort >= 0) { - rtcpNotify = new AMessage(kWhatRTCPNotify, id()); + rtcpNotify = new AMessage(kWhatRTCPNotify, this); } CHECK_EQ(mRTPSessionID, 0); diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp index 2834a66..8368945 100644 --- a/media/libstagefright/wifi-display/source/Converter.cpp +++ b/media/libstagefright/wifi-display/source/Converter.cpp @@ -93,7 +93,7 @@ Converter::~Converter() { void Converter::shutdownAsync() { ALOGV("shutdown"); - (new AMessage(kWhatShutdown, id()))->post(); + (new AMessage(kWhatShutdown, this))->post(); } status_t Converter::init() { @@ -482,11 +482,11 @@ void Converter::scheduleDoMoreWork() { #if 1 if (mEncoderActivityNotify == NULL) { - mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, id()); + mEncoderActivityNotify = new AMessage(kWhatEncoderActivity, this); } mEncoder->requestActivityNotification(mEncoderActivityNotify->dup()); #else - sp<AMessage> notify = new AMessage(kWhatEncoderActivity, id()); + sp<AMessage> notify = new AMessage(kWhatEncoderActivity, this); notify->setInt64("whenUs", ALooper::GetNowUs()); mEncoder->requestActivityNotification(notify); #endif @@ -731,8 +731,7 @@ status_t Converter::doMoreWork() { // MediaSender will post the following message when HDCP // is done, to release the output buffer back to encoder. - sp<AMessage> notify(new AMessage( - kWhatReleaseOutputBuffer, id())); + sp<AMessage> notify(new AMessage(kWhatReleaseOutputBuffer, this)); notify->setInt32("bufferIndex", bufferIndex); buffer = new ABuffer( @@ -787,18 +786,18 @@ status_t Converter::doMoreWork() { } void Converter::requestIDRFrame() { - (new AMessage(kWhatRequestIDRFrame, id()))->post(); + (new AMessage(kWhatRequestIDRFrame, this))->post(); } void Converter::dropAFrame() { // Unsupported in surface input mode. CHECK(!(mFlags & FLAG_USE_SURFACE_INPUT)); - (new AMessage(kWhatDropAFrame, id()))->post(); + (new AMessage(kWhatDropAFrame, this))->post(); } void Converter::suspendEncoding(bool suspend) { - sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, id()); + sp<AMessage> msg = new AMessage(kWhatSuspendEncoding, this); msg->setInt32("suspend", suspend); msg->post(); } diff --git a/media/libstagefright/wifi-display/source/MediaPuller.cpp b/media/libstagefright/wifi-display/source/MediaPuller.cpp index 86b918f..ce07a4e 100644 --- a/media/libstagefright/wifi-display/source/MediaPuller.cpp +++ b/media/libstagefright/wifi-display/source/MediaPuller.cpp @@ -63,21 +63,21 @@ status_t MediaPuller::postSynchronouslyAndReturnError( } status_t MediaPuller::start() { - return postSynchronouslyAndReturnError(new AMessage(kWhatStart, id())); + return postSynchronouslyAndReturnError(new AMessage(kWhatStart, this)); } void MediaPuller::stopAsync(const sp<AMessage> ¬ify) { - sp<AMessage> msg = new AMessage(kWhatStop, id()); + sp<AMessage> msg = new AMessage(kWhatStop, this); msg->setMessage("notify", notify); msg->post(); } void MediaPuller::pause() { - (new AMessage(kWhatPause, id()))->post(); + (new AMessage(kWhatPause, this))->post(); } void MediaPuller::resume() { - (new AMessage(kWhatResume, id()))->post(); + (new AMessage(kWhatResume, this))->post(); } void MediaPuller::onMessageReceived(const sp<AMessage> &msg) { @@ -105,7 +105,7 @@ void MediaPuller::onMessageReceived(const sp<AMessage> &msg) { sp<AMessage> response = new AMessage; response->setInt32("err", err); - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); response->postReply(replyID); break; @@ -215,7 +215,7 @@ void MediaPuller::onMessageReceived(const sp<AMessage> &msg) { } void MediaPuller::schedulePull() { - sp<AMessage> msg = new AMessage(kWhatPull, id()); + sp<AMessage> msg = new AMessage(kWhatPull, this); msg->setInt32("generation", mPullGeneration); msg->post(); } diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp index 2cb4786..6080943 100644 --- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp +++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp @@ -214,7 +214,7 @@ void WifiDisplaySource::PlaybackSession::Track::stopAsync() { mConverter->shutdownAsync(); } - sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, id()); + sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, this); if (mStarted && mMediaPuller != NULL) { if (mRepeaterSource != NULL) { @@ -382,7 +382,7 @@ status_t WifiDisplaySource::PlaybackSession::init( size_t videoResolutionIndex, VideoFormats::ProfileType videoProfileType, VideoFormats::LevelType videoLevelType) { - sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, id()); + sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, this); mMediaSender = new MediaSender(mNetSession, notify); looper()->registerHandler(mMediaSender); @@ -440,7 +440,7 @@ void WifiDisplaySource::PlaybackSession::updateLiveness() { status_t WifiDisplaySource::PlaybackSession::play() { updateLiveness(); - (new AMessage(kWhatResume, id()))->post(); + (new AMessage(kWhatResume, this))->post(); return OK; } @@ -460,7 +460,7 @@ status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() { status_t WifiDisplaySource::PlaybackSession::pause() { updateLiveness(); - (new AMessage(kWhatPause, id()))->post(); + (new AMessage(kWhatPause, this))->post(); return OK; } @@ -786,7 +786,7 @@ status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer( size_t trackIndex = mTracks.size(); - sp<AMessage> notify = new AMessage(kWhatTrackNotify, id()); + sp<AMessage> notify = new AMessage(kWhatTrackNotify, this); notify->setSize("trackIndex", trackIndex); sp<Track> track = new Track(notify, format); @@ -833,7 +833,7 @@ void WifiDisplaySource::PlaybackSession::schedulePullExtractor() { int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs; - sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, id()); + sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, this); msg->setInt32("generation", mPullExtractorGeneration); msg->post(whenUs - nowUs); @@ -857,7 +857,7 @@ void WifiDisplaySource::PlaybackSession::onPullExtractor() { size_t trackIndex; CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex)); - sp<AMessage> msg = new AMessage(kWhatConverterNotify, id()); + sp<AMessage> msg = new AMessage(kWhatConverterNotify, this); msg->setSize( "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex)); @@ -955,7 +955,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource( ? MEDIA_MIMETYPE_AUDIO_RAW : MEDIA_MIMETYPE_AUDIO_AAC); } - notify = new AMessage(kWhatConverterNotify, id()); + notify = new AMessage(kWhatConverterNotify, this); notify->setSize("trackIndex", trackIndex); sp<Converter> converter = new Converter(notify, codecLooper, format); @@ -970,7 +970,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource( return err; } - notify = new AMessage(Converter::kWhatMediaPullerNotify, converter->id()); + notify = new AMessage(Converter::kWhatMediaPullerNotify, converter); notify->setSize("trackIndex", trackIndex); sp<MediaPuller> puller = new MediaPuller(source, notify); @@ -980,7 +980,7 @@ status_t WifiDisplaySource::PlaybackSession::addSource( *numInputBuffers = converter->getInputBufferCount(); } - notify = new AMessage(kWhatTrackNotify, id()); + notify = new AMessage(kWhatTrackNotify, this); notify->setSize("trackIndex", trackIndex); sp<Track> track = new Track( diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp index 59d7e6e..af6b663 100644 --- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp +++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp @@ -173,7 +173,7 @@ status_t RepeaterSource::read( } void RepeaterSource::postRead() { - (new AMessage(kWhatRead, mReflector->id()))->post(); + (new AMessage(kWhatRead, mReflector))->post(); } void RepeaterSource::onMessageReceived(const sp<AMessage> &msg) { diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp index 7eb8b73..14d0951 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp @@ -57,7 +57,7 @@ WifiDisplaySource::WifiDisplaySource( mNetSession(netSession), mClient(client), mSessionID(0), - mStopReplyID(0), + mStopReplyID(NULL), mChosenRTPPort(-1), mUsingPCMAudio(false), mClientSessionID(0), @@ -106,7 +106,7 @@ static status_t PostAndAwaitResponse( status_t WifiDisplaySource::start(const char *iface) { CHECK_EQ(mState, INITIALIZED); - sp<AMessage> msg = new AMessage(kWhatStart, id()); + sp<AMessage> msg = new AMessage(kWhatStart, this); msg->setString("iface", iface); sp<AMessage> response; @@ -114,21 +114,21 @@ status_t WifiDisplaySource::start(const char *iface) { } status_t WifiDisplaySource::stop() { - sp<AMessage> msg = new AMessage(kWhatStop, id()); + sp<AMessage> msg = new AMessage(kWhatStop, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t WifiDisplaySource::pause() { - sp<AMessage> msg = new AMessage(kWhatPause, id()); + sp<AMessage> msg = new AMessage(kWhatPause, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); } status_t WifiDisplaySource::resume() { - sp<AMessage> msg = new AMessage(kWhatResume, id()); + sp<AMessage> msg = new AMessage(kWhatResume, this); sp<AMessage> response; return PostAndAwaitResponse(msg, &response); @@ -138,7 +138,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { switch (msg->what()) { case kWhatStart: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); AString iface; @@ -167,7 +167,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { if (err == OK) { if (inet_aton(iface.c_str(), &mInterfaceAddr) != 0) { - sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id()); + sp<AMessage> notify = new AMessage(kWhatRTSPNotify, this); err = mNetSession->createRTSPServer( mInterfaceAddr, port, notify, &mSessionID); @@ -310,7 +310,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { if (err == OK) { mState = AWAITING_CLIENT_TEARDOWN; - (new AMessage(kWhatTeardownTriggerTimedOut, id()))->post( + (new AMessage(kWhatTeardownTriggerTimedOut, this))->post( kTeardownTriggerTimeouSecs * 1000000ll); break; @@ -325,7 +325,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { case kWhatPause: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); status_t err = OK; @@ -345,7 +345,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { case kWhatResume: { - uint32_t replyID; + sp<AReplyToken> replyID; CHECK(msg->senderAwaitsResponse(&replyID)); status_t err = OK; @@ -492,7 +492,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { if (mState == AWAITING_CLIENT_TEARDOWN) { ALOGI("TEARDOWN trigger timed out, forcing disconnection."); - CHECK_NE(mStopReplyID, 0); + CHECK(mStopReplyID != NULL); finishStop(); break; } @@ -529,7 +529,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) { // HDCPObserver::notify is completely handled before // we clear the HDCP instance and unload the shared // library :( - (new AMessage(kWhatFinishStop2, id()))->post(300000ll); + (new AMessage(kWhatFinishStop2, this))->post(300000ll); break; } @@ -1027,7 +1027,7 @@ void WifiDisplaySource::scheduleReaper() { } mReaperPending = true; - (new AMessage(kWhatReapDeadClients, id()))->post(kReaperIntervalUs); + (new AMessage(kWhatReapDeadClients, this))->post(kReaperIntervalUs); } void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) { @@ -1035,7 +1035,7 @@ void WifiDisplaySource::scheduleKeepAlive(int32_t sessionID) { // expire, make sure the timeout is greater than 5 secs to begin with. CHECK_GT(kPlaybackSessionTimeoutUs, 5000000ll); - sp<AMessage> msg = new AMessage(kWhatKeepAlive, id()); + sp<AMessage> msg = new AMessage(kWhatKeepAlive, this); msg->setInt32("sessionID", sessionID); msg->post(kPlaybackSessionTimeoutUs - 5000000ll); } @@ -1239,7 +1239,7 @@ status_t WifiDisplaySource::onSetupRequest( int32_t playbackSessionID = makeUniquePlaybackSessionID(); - sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, id()); + sp<AMessage> notify = new AMessage(kWhatPlaybackSessionNotify, this); notify->setInt32("playbackSessionID", playbackSessionID); notify->setInt32("sessionID", sessionID); @@ -1470,7 +1470,7 @@ status_t WifiDisplaySource::onTeardownRequest( mNetSession->sendRequest(sessionID, response.c_str()); if (mState == AWAITING_CLIENT_TEARDOWN) { - CHECK_NE(mStopReplyID, 0); + CHECK(mStopReplyID != NULL); finishStop(); } else { mClient->onDisplayError(IRemoteDisplayClient::kDisplayErrorUnknown); @@ -1707,7 +1707,7 @@ status_t WifiDisplaySource::makeHDCP() { return ERROR_UNSUPPORTED; } - sp<AMessage> notify = new AMessage(kWhatHDCPNotify, id()); + sp<AMessage> notify = new AMessage(kWhatHDCPNotify, this); mHDCPObserver = new HDCPObserver(notify); status_t err = mHDCP->setObserver(mHDCPObserver); diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h index 750265f..0f779e4 100644 --- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h +++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h @@ -27,6 +27,7 @@ namespace android { +struct AReplyToken; struct IHDCP; struct IRemoteDisplayClient; struct ParsedMessage; @@ -121,7 +122,7 @@ private: struct in_addr mInterfaceAddr; int32_t mSessionID; - uint32_t mStopReplyID; + sp<AReplyToken> mStopReplyID; AString mWfdClientRtpPorts; int32_t mChosenRTPPort; // extracted from "wfd_client_rtp_ports" diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp index bb3e2fd..c098135 100644 --- a/media/libstagefright/yuv/YUVImage.cpp +++ b/media/libstagefright/yuv/YUVImage.cpp @@ -374,13 +374,13 @@ uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) { void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue, uint8_t *r, uint8_t *g, uint8_t *b) const { - *r = yValue + (1.370705 * (vValue-128)); - *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128)); - *b = yValue + (1.732446 * (uValue-128)); + int rTmp = yValue + (1.370705 * (vValue-128)); + int gTmp = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128)); + int bTmp = yValue + (1.732446 * (uValue-128)); - *r = clamp(*r, 0, 255); - *g = clamp(*g, 0, 255); - *b = clamp(*b, 0, 255); + *r = clamp(rTmp, 0, 255); + *g = clamp(gTmp, 0, 255); + *b = clamp(bTmp, 0, 255); } bool YUVImage::writeToPPM(const char *filename) const { diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk index 3a280f0..0e2e48c 100644 --- a/media/mediaserver/Android.mk +++ b/media/mediaserver/Android.mk @@ -11,7 +11,7 @@ endif include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - main_mediaserver.cpp + main_mediaserver.cpp LOCAL_SHARED_LIBRARIES := \ libaudioflinger \ @@ -19,6 +19,7 @@ LOCAL_SHARED_LIBRARIES := \ libcamera_metadata\ libcameraservice \ libmedialogservice \ + libresourcemanagerservice \ libcutils \ libnbaio \ libmedia \ @@ -26,19 +27,25 @@ LOCAL_SHARED_LIBRARIES := \ libutils \ liblog \ libbinder \ - libsoundtriggerservice + libsoundtriggerservice \ + libradioservice LOCAL_STATIC_LIBRARIES := \ - libregistermsext + libregistermsext LOCAL_C_INCLUDES := \ frameworks/av/media/libmediaplayerservice \ frameworks/av/services/medialog \ frameworks/av/services/audioflinger \ frameworks/av/services/audiopolicy \ + frameworks/av/services/audiopolicy/common/managerdefinitions/include \ + frameworks/av/services/audiopolicy/common/include \ + frameworks/av/services/audiopolicy/engine/interface \ frameworks/av/services/camera/libcameraservice \ + frameworks/av/services/mediaresourcemanager \ $(call include-path-for, audio-utils) \ - frameworks/av/services/soundtrigger + frameworks/av/services/soundtrigger \ + frameworks/av/services/radio LOCAL_MODULE:= mediaserver LOCAL_32_BIT_ONLY := true diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp index af1c9e6..99572f8 100644 --- a/media/mediaserver/main_mediaserver.cpp +++ b/media/mediaserver/main_mediaserver.cpp @@ -33,8 +33,9 @@ #include "CameraService.h" #include "MediaLogService.h" #include "MediaPlayerService.h" -#include "AudioPolicyService.h" +#include "service/AudioPolicyService.h" #include "SoundTriggerHwService.h" +#include "RadioService.h" using namespace android; @@ -130,6 +131,7 @@ int main(int argc __unused, char** argv) CameraService::instantiate(); AudioPolicyService::instantiate(); SoundTriggerHwService::instantiate(); + RadioService::instantiate(); registerExtensions(); ProcessState::self()->startThreadPool(); IPCThreadState::self()->joinThreadPool(); diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp index ed00b72..80c1c2f 100644 --- a/media/ndk/NdkMediaCodec.cpp +++ b/media/ndk/NdkMediaCodec.cpp @@ -116,7 +116,7 @@ void CodecHandler::onMessageReceived(const sp<AMessage> &msg) { case kWhatStopActivityNotifications: { - uint32_t replyID; + sp<AReplyToken> replyID; msg->senderAwaitsResponse(&replyID); mCodec->mGeneration++; @@ -136,7 +136,7 @@ void CodecHandler::onMessageReceived(const sp<AMessage> &msg) { static void requestActivityNotification(AMediaCodec *codec) { - (new AMessage(kWhatRequestActivityNotifications, codec->mHandler->id()))->post(); + (new AMessage(kWhatRequestActivityNotifications, codec->mHandler))->post(); } extern "C" { @@ -219,7 +219,7 @@ media_status_t AMediaCodec_start(AMediaCodec *mData) { if (ret != OK) { return translate_error(ret); } - mData->mActivityNotification = new AMessage(kWhatActivityNotify, mData->mHandler->id()); + mData->mActivityNotification = new AMessage(kWhatActivityNotify, mData->mHandler); mData->mActivityNotification->setInt32("generation", mData->mGeneration); requestActivityNotification(mData); return AMEDIA_OK; @@ -229,7 +229,7 @@ EXPORT media_status_t AMediaCodec_stop(AMediaCodec *mData) { media_status_t ret = translate_error(mData->mCodec->stop()); - sp<AMessage> msg = new AMessage(kWhatStopActivityNotifications, mData->mHandler->id()); + sp<AMessage> msg = new AMessage(kWhatStopActivityNotifications, mData->mHandler); sp<AMessage> response; msg->postAndAwaitResponse(&response); mData->mActivityNotification.clear(); @@ -352,7 +352,8 @@ media_status_t AMediaCodec_releaseOutputBufferAtTime( } //EXPORT -media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback, void *userdata) { +media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback, + void *userdata) { mData->mCallback = callback; mData->mCallbackUserData = userdata; return AMEDIA_OK; diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp index 7a1048c..83a5ba1 100644 --- a/media/ndk/NdkMediaDrm.cpp +++ b/media/ndk/NdkMediaDrm.cpp @@ -312,8 +312,10 @@ media_status_t AMediaDrm_getKeyRequest(AMediaDrm *mObj, const AMediaDrmScope *sc String8(optionalParameters[i].mValue)); } String8 defaultUrl; + DrmPlugin::KeyRequestType keyRequestType; status_t status = mObj->mDrm->getKeyRequest(*iter, mdInit, String8(mimeType), - mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl); + mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl, + &keyRequestType); if (status != OK) { return translateStatus(status); } else { @@ -725,4 +727,3 @@ media_status_t AMediaDrm_verify(AMediaDrm *mObj, const AMediaDrmSessionId *sessi } } // extern "C" - diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp index db57d0b..0ecd64f 100644 --- a/media/ndk/NdkMediaExtractor.cpp +++ b/media/ndk/NdkMediaExtractor.cpp @@ -70,7 +70,8 @@ media_status_t AMediaExtractor_delete(AMediaExtractor *mData) { } EXPORT -media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset, off64_t length) { +media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset, + off64_t length) { ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length); return translate_error(mData->mImpl->setDataSource(fd, offset, length)); } diff --git a/radio/Android.mk b/radio/Android.mk new file mode 100644 index 0000000..ecbb8fd --- /dev/null +++ b/radio/Android.mk @@ -0,0 +1,39 @@ +# Copyright 2014 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + Radio.cpp \ + IRadio.cpp \ + IRadioClient.cpp \ + IRadioService.cpp + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + liblog \ + libbinder \ + libhardware \ + libradio_metadata + +#LOCAL_C_INCLUDES += \ + system/media/camera/include \ + system/media/private/camera/include + +LOCAL_MODULE:= libradio + +include $(BUILD_SHARED_LIBRARY) diff --git a/radio/IRadio.cpp b/radio/IRadio.cpp new file mode 100644 index 0000000..242df77 --- /dev/null +++ b/radio/IRadio.cpp @@ -0,0 +1,344 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "IRadio" +#include <utils/Log.h> +#include <utils/Errors.h> +#include <binder/IMemory.h> +#include <radio/IRadio.h> +#include <radio/IRadioService.h> +#include <radio/IRadioClient.h> +#include <system/radio.h> +#include <system/radio_metadata.h> + +namespace android { + +enum { + DETACH = IBinder::FIRST_CALL_TRANSACTION, + SET_CONFIGURATION, + GET_CONFIGURATION, + SET_MUTE, + GET_MUTE, + SCAN, + STEP, + TUNE, + CANCEL, + GET_PROGRAM_INFORMATION, + HAS_CONTROL +}; + +class BpRadio: public BpInterface<IRadio> +{ +public: + BpRadio(const sp<IBinder>& impl) + : BpInterface<IRadio>(impl) + { + } + + void detach() + { + ALOGV("detach"); + Parcel data, reply; + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + remote()->transact(DETACH, data, &reply); + } + + virtual status_t setConfiguration(const struct radio_band_config *config) + { + Parcel data, reply; + if (config == NULL) { + return BAD_VALUE; + } + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + data.write(config, sizeof(struct radio_band_config)); + status_t status = remote()->transact(SET_CONFIGURATION, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + } + return status; + } + + virtual status_t getConfiguration(struct radio_band_config *config) + { + Parcel data, reply; + if (config == NULL) { + return BAD_VALUE; + } + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + status_t status = remote()->transact(GET_CONFIGURATION, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + if (status == NO_ERROR) { + reply.read(config, sizeof(struct radio_band_config)); + } + } + return status; + } + + virtual status_t setMute(bool mute) + { + Parcel data, reply; + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + data.writeInt32(mute ? 1 : 0); + status_t status = remote()->transact(SET_MUTE, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + } + return status; + } + + virtual status_t getMute(bool *mute) + { + Parcel data, reply; + if (mute == NULL) { + return BAD_VALUE; + } + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + status_t status = remote()->transact(GET_MUTE, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + if (status == NO_ERROR) { + int muteread = reply.readInt32(); + *mute = muteread != 0; + } + } + return status; + } + + virtual status_t scan(radio_direction_t direction, bool skipSubChannel) + { + Parcel data, reply; + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + data.writeInt32(direction); + data.writeInt32(skipSubChannel ? 1 : 0); + status_t status = remote()->transact(SCAN, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + } + return status; + } + + virtual status_t step(radio_direction_t direction, bool skipSubChannel) + { + Parcel data, reply; + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + data.writeInt32(direction); + data.writeInt32(skipSubChannel ? 1 : 0); + status_t status = remote()->transact(STEP, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + } + return status; + } + + virtual status_t tune(unsigned int channel, unsigned int subChannel) + { + Parcel data, reply; + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + data.writeInt32(channel); + data.writeInt32(subChannel); + status_t status = remote()->transact(TUNE, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + } + return status; + } + + virtual status_t cancel() + { + Parcel data, reply; + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + status_t status = remote()->transact(CANCEL, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + } + return status; + } + + virtual status_t getProgramInformation(struct radio_program_info *info) + { + Parcel data, reply; + if (info == NULL) { + return BAD_VALUE; + } + radio_metadata_t *metadata = info->metadata; + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + status_t status = remote()->transact(GET_PROGRAM_INFORMATION, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + if (status == NO_ERROR) { + reply.read(info, sizeof(struct radio_program_info)); + info->metadata = metadata; + if (metadata == NULL) { + return status; + } + size_t size = (size_t)reply.readInt32(); + if (size == 0) { + return status; + } + metadata = + (radio_metadata_t *)calloc(size / sizeof(unsigned int), sizeof(unsigned int)); + if (metadata == NULL) { + return NO_MEMORY; + } + reply.read(metadata, size); + status = radio_metadata_add_metadata(&info->metadata, metadata); + free(metadata); + } + } + return status; + } + + virtual status_t hasControl(bool *hasControl) + { + Parcel data, reply; + if (hasControl == NULL) { + return BAD_VALUE; + } + data.writeInterfaceToken(IRadio::getInterfaceDescriptor()); + status_t status = remote()->transact(HAS_CONTROL, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + if (status == NO_ERROR) { + *hasControl = reply.readInt32() != 0; + } + } + return status; + } +}; + +IMPLEMENT_META_INTERFACE(Radio, "android.hardware.IRadio"); + +// ---------------------------------------------------------------------- + +status_t BnRadio::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case DETACH: { + ALOGV("DETACH"); + CHECK_INTERFACE(IRadio, data, reply); + detach(); + return NO_ERROR; + } break; + case SET_CONFIGURATION: { + CHECK_INTERFACE(IRadio, data, reply); + struct radio_band_config config; + data.read(&config, sizeof(struct radio_band_config)); + status_t status = setConfiguration(&config); + reply->writeInt32(status); + return NO_ERROR; + } + case GET_CONFIGURATION: { + CHECK_INTERFACE(IRadio, data, reply); + struct radio_band_config config; + status_t status = getConfiguration(&config); + reply->writeInt32(status); + if (status == NO_ERROR) { + reply->write(&config, sizeof(struct radio_band_config)); + } + return NO_ERROR; + } + case SET_MUTE: { + CHECK_INTERFACE(IRadio, data, reply); + bool mute = data.readInt32() != 0; + status_t status = setMute(mute); + reply->writeInt32(status); + return NO_ERROR; + } + case GET_MUTE: { + CHECK_INTERFACE(IRadio, data, reply); + bool mute; + status_t status = getMute(&mute); + reply->writeInt32(status); + if (status == NO_ERROR) { + reply->writeInt32(mute ? 1 : 0); + } + return NO_ERROR; + } + case SCAN: { + CHECK_INTERFACE(IRadio, data, reply); + radio_direction_t direction = (radio_direction_t)data.readInt32(); + bool skipSubChannel = data.readInt32() == 1; + status_t status = scan(direction, skipSubChannel); + reply->writeInt32(status); + return NO_ERROR; + } + case STEP: { + CHECK_INTERFACE(IRadio, data, reply); + radio_direction_t direction = (radio_direction_t)data.readInt32(); + bool skipSubChannel = data.readInt32() == 1; + status_t status = step(direction, skipSubChannel); + reply->writeInt32(status); + return NO_ERROR; + } + case TUNE: { + CHECK_INTERFACE(IRadio, data, reply); + unsigned int channel = (unsigned int)data.readInt32(); + unsigned int subChannel = (unsigned int)data.readInt32(); + status_t status = tune(channel, subChannel); + reply->writeInt32(status); + return NO_ERROR; + } + case CANCEL: { + CHECK_INTERFACE(IRadio, data, reply); + status_t status = cancel(); + reply->writeInt32(status); + return NO_ERROR; + } + case GET_PROGRAM_INFORMATION: { + CHECK_INTERFACE(IRadio, data, reply); + struct radio_program_info info; + + status_t status = radio_metadata_allocate(&info.metadata, 0, 0); + if (status != NO_ERROR) { + return status; + } + status = getProgramInformation(&info); + reply->writeInt32(status); + if (status == NO_ERROR) { + reply->write(&info, sizeof(struct radio_program_info)); + int count = radio_metadata_get_count(info.metadata); + if (count > 0) { + size_t size = radio_metadata_get_size(info.metadata); + reply->writeInt32(size); + reply->write(info.metadata, size); + } else { + reply->writeInt32(0); + } + } + radio_metadata_deallocate(info.metadata); + return NO_ERROR; + } + case HAS_CONTROL: { + CHECK_INTERFACE(IRadio, data, reply); + bool control; + status_t status = hasControl(&control); + reply->writeInt32(status); + if (status == NO_ERROR) { + reply->writeInt32(control ? 1 : 0); + } + return NO_ERROR; + } + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/radio/IRadioClient.cpp b/radio/IRadioClient.cpp new file mode 100644 index 0000000..033ca49 --- /dev/null +++ b/radio/IRadioClient.cpp @@ -0,0 +1,75 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#include <stdint.h> +#include <sys/types.h> +#include <binder/IMemory.h> +#include <binder/Parcel.h> +#include <binder/IPCThreadState.h> +#include <binder/IServiceManager.h> +#include <radio/IRadioClient.h> + +namespace android { + +enum { + ON_EVENT = IBinder::FIRST_CALL_TRANSACTION, +}; + +class BpRadioClient: public BpInterface<IRadioClient> +{ + +public: + BpRadioClient(const sp<IBinder>& impl) + : BpInterface<IRadioClient>(impl) + { + } + + virtual void onEvent(const sp<IMemory>& eventMemory) + { + Parcel data, reply; + data.writeInterfaceToken(IRadioClient::getInterfaceDescriptor()); + data.writeStrongBinder(IInterface::asBinder(eventMemory)); + remote()->transact(ON_EVENT, + data, + &reply); + } +}; + +IMPLEMENT_META_INTERFACE(RadioClient, + "android.hardware.IRadioClient"); + +// ---------------------------------------------------------------------- + +status_t BnRadioClient::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case ON_EVENT: { + CHECK_INTERFACE(IRadioClient, data, reply); + sp<IMemory> eventMemory = interface_cast<IMemory>( + data.readStrongBinder()); + onEvent(eventMemory); + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } return NO_ERROR; +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/radio/IRadioService.cpp b/radio/IRadioService.cpp new file mode 100644 index 0000000..8c2b3ef --- /dev/null +++ b/radio/IRadioService.cpp @@ -0,0 +1,181 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "BpRadioService" +// +#define LOG_NDEBUG 0 + +#include <utils/Log.h> +#include <utils/Errors.h> + +#include <stdint.h> +#include <sys/types.h> +#include <binder/IMemory.h> +#include <binder/Parcel.h> +#include <binder/IPCThreadState.h> +#include <binder/IServiceManager.h> + +#include <radio/IRadioService.h> +#include <radio/IRadio.h> +#include <radio/IRadioClient.h> + +namespace android { + +enum { + LIST_MODULES = IBinder::FIRST_CALL_TRANSACTION, + ATTACH, +}; + +#define MAX_ITEMS_PER_LIST 1024 + +class BpRadioService: public BpInterface<IRadioService> +{ +public: + BpRadioService(const sp<IBinder>& impl) + : BpInterface<IRadioService>(impl) + { + } + + virtual status_t listModules(struct radio_properties *properties, + uint32_t *numModules) + { + if (numModules == NULL || (*numModules != 0 && properties == NULL)) { + return BAD_VALUE; + } + Parcel data, reply; + data.writeInterfaceToken(IRadioService::getInterfaceDescriptor()); + unsigned int numModulesReq = (properties == NULL) ? 0 : *numModules; + data.writeInt32(numModulesReq); + status_t status = remote()->transact(LIST_MODULES, data, &reply); + if (status == NO_ERROR) { + status = (status_t)reply.readInt32(); + *numModules = (unsigned int)reply.readInt32(); + } + ALOGV("listModules() status %d got *numModules %d", status, *numModules); + if (status == NO_ERROR) { + if (numModulesReq > *numModules) { + numModulesReq = *numModules; + } + if (numModulesReq > 0) { + reply.read(properties, numModulesReq * sizeof(struct radio_properties)); + } + } + return status; + } + + virtual status_t attach(radio_handle_t handle, + const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool withAudio, + sp<IRadio>& radio) + { + Parcel data, reply; + data.writeInterfaceToken(IRadioService::getInterfaceDescriptor()); + data.writeInt32(handle); + data.writeStrongBinder(IInterface::asBinder(client)); + ALOGV("attach() config %p withAudio %d region %d type %d", config, withAudio, config->region, config->band.type); + if (config == NULL) { + data.writeInt32(0); + } else { + data.writeInt32(1); + data.write(config, sizeof(struct radio_band_config)); + } + data.writeInt32(withAudio ? 1 : 0); + status_t status = remote()->transact(ATTACH, data, &reply); + if (status != NO_ERROR) { + return status; + } + status = reply.readInt32(); + if (reply.readInt32() != 0) { + radio = interface_cast<IRadio>(reply.readStrongBinder()); + } + return status; + } +}; + +IMPLEMENT_META_INTERFACE(RadioService, "android.hardware.IRadioService"); + +// ---------------------------------------------------------------------- + +status_t BnRadioService::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) +{ + switch(code) { + case LIST_MODULES: { + CHECK_INTERFACE(IRadioService, data, reply); + unsigned int numModulesReq = data.readInt32(); + if (numModulesReq > MAX_ITEMS_PER_LIST) { + numModulesReq = MAX_ITEMS_PER_LIST; + } + unsigned int numModules = numModulesReq; + struct radio_properties *properties = + (struct radio_properties *)calloc(numModulesReq, + sizeof(struct radio_properties)); + if (properties == NULL) { + reply->writeInt32(NO_MEMORY); + reply->writeInt32(0); + return NO_ERROR; + } + + status_t status = listModules(properties, &numModules); + reply->writeInt32(status); + reply->writeInt32(numModules); + ALOGV("LIST_MODULES status %d got numModules %d", status, numModules); + + if (status == NO_ERROR) { + if (numModulesReq > numModules) { + numModulesReq = numModules; + } + reply->write(properties, + numModulesReq * sizeof(struct radio_properties)); + } + free(properties); + return NO_ERROR; + } break; + + case ATTACH: { + CHECK_INTERFACE(IRadioService, data, reply); + radio_handle_t handle = data.readInt32(); + sp<IRadioClient> client = + interface_cast<IRadioClient>(data.readStrongBinder()); + struct radio_band_config config; + struct radio_band_config *configPtr = NULL; + if (data.readInt32() != 0) { + data.read(&config, sizeof(struct radio_band_config)); + configPtr = &config; + } + bool withAudio = data.readInt32() != 0; + ALOGV("ATTACH configPtr %p withAudio %d", configPtr, withAudio); + sp<IRadio> radio; + status_t status = attach(handle, client, configPtr, withAudio, radio); + reply->writeInt32(status); + if (radio != 0) { + reply->writeInt32(1); + reply->writeStrongBinder(IInterface::asBinder(radio)); + } else { + reply->writeInt32(0); + } + return NO_ERROR; + } break; + default: + return BBinder::onTransact(code, data, reply, flags); + } +} + +// ---------------------------------------------------------------------------- + +}; // namespace android diff --git a/radio/Radio.cpp b/radio/Radio.cpp new file mode 100644 index 0000000..e3554c2 --- /dev/null +++ b/radio/Radio.cpp @@ -0,0 +1,283 @@ +/* +** +** Copyright (C) 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "Radio" +//#define LOG_NDEBUG 0 + +#include <utils/Log.h> +#include <utils/threads.h> +#include <binder/IPCThreadState.h> +#include <binder/IServiceManager.h> +#include <binder/IMemory.h> + +#include <radio/Radio.h> +#include <radio/IRadio.h> +#include <radio/IRadioService.h> +#include <radio/IRadioClient.h> +#include <radio/RadioCallback.h> + +namespace android { + +namespace { + sp<IRadioService> gRadioService; + const int kRadioServicePollDelay = 500000; // 0.5s + const char* kRadioServiceName = "media.radio"; + Mutex gLock; + + class DeathNotifier : public IBinder::DeathRecipient + { + public: + DeathNotifier() { + } + + virtual void binderDied(const wp<IBinder>& who __unused) { + ALOGV("binderDied"); + Mutex::Autolock _l(gLock); + gRadioService.clear(); + ALOGW("Radio service died!"); + } + }; + + sp<DeathNotifier> gDeathNotifier; +}; // namespace anonymous + +const sp<IRadioService>& Radio::getRadioService() +{ + Mutex::Autolock _l(gLock); + if (gRadioService.get() == 0) { + sp<IServiceManager> sm = defaultServiceManager(); + sp<IBinder> binder; + do { + binder = sm->getService(String16(kRadioServiceName)); + if (binder != 0) { + break; + } + ALOGW("RadioService not published, waiting..."); + usleep(kRadioServicePollDelay); + } while(true); + if (gDeathNotifier == NULL) { + gDeathNotifier = new DeathNotifier(); + } + binder->linkToDeath(gDeathNotifier); + gRadioService = interface_cast<IRadioService>(binder); + } + ALOGE_IF(gRadioService == 0, "no RadioService!?"); + return gRadioService; +} + +// Static methods +status_t Radio::listModules(struct radio_properties *properties, + uint32_t *numModules) +{ + ALOGV("listModules()"); + const sp<IRadioService>& service = getRadioService(); + if (service == 0) { + return NO_INIT; + } + return service->listModules(properties, numModules); +} + +sp<Radio> Radio::attach(radio_handle_t handle, + const struct radio_band_config *config, + bool withAudio, + const sp<RadioCallback>& callback) +{ + ALOGV("attach()"); + sp<Radio> radio; + const sp<IRadioService>& service = getRadioService(); + if (service == 0) { + return radio; + } + radio = new Radio(handle, callback); + status_t status = service->attach(handle, radio, config, withAudio, radio->mIRadio); + + if (status == NO_ERROR && radio->mIRadio != 0) { + IInterface::asBinder(radio->mIRadio)->linkToDeath(radio); + } else { + ALOGW("Error %d connecting to radio service", status); + radio.clear(); + } + return radio; +} + + + +// Radio +Radio::Radio(radio_handle_t handle, const sp<RadioCallback>& callback) + : mHandle(handle), mCallback(callback) +{ +} + +Radio::~Radio() +{ + if (mIRadio != 0) { + mIRadio->detach(); + } +} + + +void Radio::detach() { + ALOGV("detach()"); + Mutex::Autolock _l(mLock); + mCallback.clear(); + if (mIRadio != 0) { + mIRadio->detach(); + IInterface::asBinder(mIRadio)->unlinkToDeath(this); + mIRadio = 0; + } +} + +status_t Radio::setConfiguration(const struct radio_band_config *config) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->setConfiguration(config); +} + +status_t Radio::getConfiguration(struct radio_band_config *config) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->getConfiguration(config); +} + +status_t Radio::setMute(bool mute) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->setMute(mute); +} + +status_t Radio::getMute(bool *mute) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->getMute(mute); +} + +status_t Radio::scan(radio_direction_t direction, bool skipSubchannel) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->scan(direction, skipSubchannel); +} + +status_t Radio::step(radio_direction_t direction, bool skipSubchannel) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->step(direction, skipSubchannel); +} + +status_t Radio::tune(unsigned int channel, unsigned int subChannel) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->tune(channel, subChannel); +} + +status_t Radio::cancel() +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->cancel(); +} + +status_t Radio::getProgramInformation(struct radio_program_info *info) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->getProgramInformation(info); +} + +status_t Radio::hasControl(bool *hasControl) +{ + Mutex::Autolock _l(mLock); + if (mIRadio == 0) { + return NO_INIT; + } + return mIRadio->hasControl(hasControl); +} + + +// BpRadioClient +void Radio::onEvent(const sp<IMemory>& eventMemory) +{ + Mutex::Autolock _l(mLock); + if (eventMemory == 0 || eventMemory->pointer() == NULL) { + return; + } + + struct radio_event *event = (struct radio_event *)eventMemory->pointer(); + // restore local metadata pointer from offset + switch (event->type) { + case RADIO_EVENT_TUNED: + case RADIO_EVENT_AF_SWITCH: + if (event->info.metadata != NULL) { + event->info.metadata = + (radio_metadata_t *)((char *)event + (size_t)event->info.metadata); + } + break; + case RADIO_EVENT_METADATA: + if (event->metadata != NULL) { + event->metadata = + (radio_metadata_t *)((char *)event + (size_t)event->metadata); + } + break; + default: + break; + } + + if (mCallback != 0) { + mCallback->onEvent(event); + } +} + + +//IBinder::DeathRecipient +void Radio::binderDied(const wp<IBinder>& who __unused) { + Mutex::Autolock _l(mLock); + ALOGW("Radio server binder Died "); + mIRadio = 0; + struct radio_event event; + memset(&event, 0, sizeof(struct radio_event)); + event.type = RADIO_EVENT_SERVER_DIED; + event.status = DEAD_OBJECT; + if (mCallback != 0) { + mCallback->onEvent(&event); + } +} + +}; // namespace android diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk index 44d2553..fee2347 100644 --- a/services/audioflinger/Android.mk +++ b/services/audioflinger/Android.mk @@ -39,6 +39,9 @@ LOCAL_SRC_FILES:= \ AudioFlinger.cpp \ Threads.cpp \ Tracks.cpp \ + AudioHwDevice.cpp \ + AudioStreamOut.cpp \ + SpdifStreamOut.cpp \ Effects.cpp \ AudioMixer.cpp.arm \ PatchPanel.cpp @@ -52,6 +55,7 @@ LOCAL_C_INCLUDES := \ LOCAL_SHARED_LIBRARIES := \ libaudioresampler \ + libaudiospdif \ libaudioutils \ libcommon_time_client \ libcutils \ @@ -74,9 +78,17 @@ LOCAL_STATIC_LIBRARIES := \ LOCAL_MODULE:= libaudioflinger LOCAL_32_BIT_ONLY := true -LOCAL_SRC_FILES += FastMixer.cpp FastMixerState.cpp AudioWatchdog.cpp -LOCAL_SRC_FILES += FastThread.cpp FastThreadState.cpp -LOCAL_SRC_FILES += FastCapture.cpp FastCaptureState.cpp +LOCAL_SRC_FILES += \ + AudioWatchdog.cpp \ + FastCapture.cpp \ + FastCaptureDumpState.cpp \ + FastCaptureState.cpp \ + FastMixer.cpp \ + FastMixerDumpState.cpp \ + FastMixerState.cpp \ + FastThread.cpp \ + FastThreadDumpState.cpp \ + FastThreadState.cpp LOCAL_CFLAGS += -DSTATE_QUEUE_INSTANTIATIONS='"StateQueueInstantiations.cpp"' diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 993db73..f3206cb 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -185,7 +185,8 @@ AudioFlinger::AudioFlinger() char value[PROPERTY_VALUE_MAX]; bool doLog = (property_get("ro.test_harness", value, "0") > 0) && (atoi(value) == 1); if (doLog) { - mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters", MemoryHeapBase::READ_ONLY); + mLogMemoryDealer = new MemoryDealer(kLogMemorySize, "LogWriters", + MemoryHeapBase::READ_ONLY); } #ifdef TEE_SINK @@ -271,7 +272,7 @@ static const char * const audio_interfaces[] = { }; #define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0]))) -AudioFlinger::AudioHwDevice* AudioFlinger::findSuitableHwDev_l( +AudioHwDevice* AudioFlinger::findSuitableHwDev_l( audio_module_handle_t module, audio_devices_t devices) { @@ -401,6 +402,9 @@ status_t AudioFlinger::dump(int fd, const Vector<String16>& args) String8 result(kClientLockedString); write(fd, result.string(), result.size()); } + + EffectDumpEffects(fd); + dumpClients(fd, args); if (clientLocked) { mClientLock.unlock(); @@ -822,14 +826,20 @@ bool AudioFlinger::getMicMute() const if (ret != NO_ERROR) { return false; } - + bool mute = true; bool state = AUDIO_MODE_INVALID; AutoMutex lock(mHardwareLock); - audio_hw_device_t *dev = mPrimaryHardwareDev->hwDevice(); mHardwareStatus = AUDIO_HW_GET_MIC_MUTE; - dev->get_mic_mute(dev, &state); + for (size_t i = 0; i < mAudioHwDevs.size(); i++) { + audio_hw_device_t *dev = mAudioHwDevs.valueAt(i)->hwDevice(); + status_t result = dev->get_mic_mute(dev, &state); + if (result == NO_ERROR) { + mute = mute && state; + } + } mHardwareStatus = AUDIO_HW_IDLE; - return state; + + return mute; } status_t AudioFlinger::setMasterMute(bool muted) @@ -1706,8 +1716,6 @@ sp<AudioFlinger::PlaybackThread> AudioFlinger::openOutput_l(audio_module_handle_ mHardwareStatus = AUDIO_HW_OUTPUT_OPEN; - audio_stream_out_t *outStream = NULL; - // FOR TESTING ONLY: // This if statement allows overriding the audio policy settings // and forcing a specific format or channel mask to the HAL/Sink device for testing. @@ -1729,25 +1737,18 @@ sp<AudioFlinger::PlaybackThread> AudioFlinger::openOutput_l(audio_module_handle_ } } - status_t status = hwDevHal->open_output_stream(hwDevHal, - *output, - devices, - flags, - config, - &outStream, - address.string()); + AudioStreamOut *outputStream = NULL; + status_t status = outHwDev->openOutputStream( + &outputStream, + *output, + devices, + flags, + config, + address.string()); mHardwareStatus = AUDIO_HW_IDLE; - ALOGV("openOutput_l() openOutputStream returned output %p, sampleRate %d, Format %#x, " - "channelMask %#x, status %d", - outStream, - config->sample_rate, - config->format, - config->channel_mask, - status); - if (status == NO_ERROR && outStream != NULL) { - AudioStreamOut *outputStream = new AudioStreamOut(outHwDev, outStream, flags); + if (status == NO_ERROR) { PlaybackThread *thread; if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) { @@ -1777,7 +1778,7 @@ status_t AudioFlinger::openOutput(audio_module_handle_t module, uint32_t *latencyMs, audio_output_flags_t flags) { - ALOGV("openOutput(), module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, flags %x", + ALOGI("openOutput(), module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, flags %x", module, (devices != NULL) ? *devices : 0, config->sample_rate, @@ -1947,18 +1948,18 @@ status_t AudioFlinger::restoreOutput(audio_io_handle_t output) status_t AudioFlinger::openInput(audio_module_handle_t module, audio_io_handle_t *input, audio_config_t *config, - audio_devices_t *device, + audio_devices_t *devices, const String8& address, audio_source_t source, audio_input_flags_t flags) { Mutex::Autolock _l(mLock); - if (*device == AUDIO_DEVICE_NONE) { + if (*devices == AUDIO_DEVICE_NONE) { return BAD_VALUE; } - sp<RecordThread> thread = openInput_l(module, input, config, *device, address, source, flags); + sp<RecordThread> thread = openInput_l(module, input, config, *devices, address, source, flags); if (thread != 0) { // notify client processes of the new input creation @@ -1971,12 +1972,12 @@ status_t AudioFlinger::openInput(audio_module_handle_t module, sp<AudioFlinger::RecordThread> AudioFlinger::openInput_l(audio_module_handle_t module, audio_io_handle_t *input, audio_config_t *config, - audio_devices_t device, + audio_devices_t devices, const String8& address, audio_source_t source, audio_input_flags_t flags) { - AudioHwDevice *inHwDev = findSuitableHwDev_l(module, device); + AudioHwDevice *inHwDev = findSuitableHwDev_l(module, devices); if (inHwDev == NULL) { *input = AUDIO_IO_HANDLE_NONE; return 0; @@ -1989,7 +1990,7 @@ sp<AudioFlinger::RecordThread> AudioFlinger::openInput_l(audio_module_handle_t m audio_config_t halconfig = *config; audio_hw_device_t *inHwHal = inHwDev->hwDevice(); audio_stream_in_t *inStream = NULL; - status_t status = inHwHal->open_input_stream(inHwHal, *input, device, &halconfig, + status_t status = inHwHal->open_input_stream(inHwHal, *input, devices, &halconfig, &inStream, flags, address.string(), source); ALOGV("openInput_l() openInputStream returned input %p, SamplingRate %d" ", Format %#x, Channels %x, flags %#x, status %d addr %s", @@ -2011,7 +2012,7 @@ sp<AudioFlinger::RecordThread> AudioFlinger::openInput_l(audio_module_handle_t m // FIXME describe the change proposed by HAL (save old values so we can log them here) ALOGV("openInput_l() reopening with proposed sampling rate and channel mask"); inStream = NULL; - status = inHwHal->open_input_stream(inHwHal, *input, device, &halconfig, + status = inHwHal->open_input_stream(inHwHal, *input, devices, &halconfig, &inStream, flags, address.string(), source); // FIXME log this new status; HAL should not propose any further changes } @@ -2076,7 +2077,7 @@ sp<AudioFlinger::RecordThread> AudioFlinger::openInput_l(audio_module_handle_t m inputStream, *input, primaryOutputDevice_l(), - device + devices #ifdef TEE_SINK , teeSink #endif @@ -2799,13 +2800,13 @@ bool AudioFlinger::updateOrphanEffectChains(const sp<AudioFlinger::EffectModule> struct Entry { -#define MAX_NAME 32 // %Y%m%d%H%M%S_%d.wav - char mName[MAX_NAME]; +#define TEE_MAX_FILENAME 32 // %Y%m%d%H%M%S_%d.wav = 4+2+2+2+2+2+1+1+4+1 = 21 + char mFileName[TEE_MAX_FILENAME]; }; int comparEntry(const void *p1, const void *p2) { - return strcmp(((const Entry *) p1)->mName, ((const Entry *) p2)->mName); + return strcmp(((const Entry *) p1)->mFileName, ((const Entry *) p2)->mFileName); } #ifdef TEE_SINK @@ -2824,11 +2825,11 @@ void AudioFlinger::dumpTee(int fd, const sp<NBAIO_Source>& source, audio_io_hand DIR *dir = opendir(teePath); teePath[teePathLen++] = '/'; if (dir != NULL) { -#define MAX_SORT 20 // number of entries to sort -#define MAX_KEEP 10 // number of entries to keep - struct Entry entries[MAX_SORT]; +#define TEE_MAX_SORT 20 // number of entries to sort +#define TEE_MAX_KEEP 10 // number of entries to keep + struct Entry entries[TEE_MAX_SORT]; size_t entryCount = 0; - while (entryCount < MAX_SORT) { + while (entryCount < TEE_MAX_SORT) { struct dirent de; struct dirent *result = NULL; int rc = readdir_r(dir, &de, &result); @@ -2845,17 +2846,17 @@ void AudioFlinger::dumpTee(int fd, const sp<NBAIO_Source>& source, audio_io_hand } // ignore non .wav file entries size_t nameLen = strlen(de.d_name); - if (nameLen <= 4 || nameLen >= MAX_NAME || + if (nameLen <= 4 || nameLen >= TEE_MAX_FILENAME || strcmp(&de.d_name[nameLen - 4], ".wav")) { continue; } - strcpy(entries[entryCount++].mName, de.d_name); + strcpy(entries[entryCount++].mFileName, de.d_name); } (void) closedir(dir); - if (entryCount > MAX_KEEP) { + if (entryCount > TEE_MAX_KEEP) { qsort(entries, entryCount, sizeof(Entry), comparEntry); - for (size_t i = 0; i < entryCount - MAX_KEEP; ++i) { - strcpy(&teePath[teePathLen], entries[i].mName); + for (size_t i = 0; i < entryCount - TEE_MAX_KEEP; ++i) { + strcpy(&teePath[teePathLen], entries[i].mFileName); (void) unlink(teePath); } } @@ -2939,4 +2940,4 @@ status_t AudioFlinger::onTransact( return BnAudioFlinger::onTransact(code, data, reply, flags); } -}; // namespace android +} // namespace android diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index aa0af1f..c7d9161 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -56,6 +56,9 @@ #include <media/nbaio/NBAIO.h> #include "AudioWatchdog.h" #include "AudioMixer.h" +#include "AudioStreamOut.h" +#include "SpdifStreamOut.h" +#include "AudioHwDevice.h" #include <powermanager/IPowerManager.h> @@ -311,7 +314,6 @@ public: wp<RefBase> cookie); private: - class AudioHwDevice; // fwd declaration for findSuitableHwDev_l audio_mode_t getMode() const { return mMode; } @@ -449,7 +451,7 @@ private: class EffectModule; class EffectHandle; class EffectChain; - struct AudioStreamOut; + struct AudioStreamIn; struct stream_type_t { @@ -586,57 +588,11 @@ private: // Return true if the effect was found in mOrphanEffectChains, false otherwise. bool updateOrphanEffectChains(const sp<EffectModule>& effect); - class AudioHwDevice { - public: - enum Flags { - AHWD_CAN_SET_MASTER_VOLUME = 0x1, - AHWD_CAN_SET_MASTER_MUTE = 0x2, - }; - - AudioHwDevice(audio_module_handle_t handle, - const char *moduleName, - audio_hw_device_t *hwDevice, - Flags flags) - : mHandle(handle), mModuleName(strdup(moduleName)) - , mHwDevice(hwDevice) - , mFlags(flags) { } - /*virtual*/ ~AudioHwDevice() { free((void *)mModuleName); } - - bool canSetMasterVolume() const { - return (0 != (mFlags & AHWD_CAN_SET_MASTER_VOLUME)); - } - - bool canSetMasterMute() const { - return (0 != (mFlags & AHWD_CAN_SET_MASTER_MUTE)); - } - - audio_module_handle_t handle() const { return mHandle; } - const char *moduleName() const { return mModuleName; } - audio_hw_device_t *hwDevice() const { return mHwDevice; } - uint32_t version() const { return mHwDevice->common.version; } - private: - const audio_module_handle_t mHandle; - const char * const mModuleName; - audio_hw_device_t * const mHwDevice; - const Flags mFlags; - }; - - // AudioStreamOut and AudioStreamIn are immutable, so their fields are const. + // AudioStreamIn is immutable, so their fields are const. // For emphasis, we could also make all pointers to them be "const *", // but that would clutter the code unnecessarily. - struct AudioStreamOut { - AudioHwDevice* const audioHwDev; - audio_stream_out_t* const stream; - const audio_output_flags_t flags; - - audio_hw_device_t* hwDev() const { return audioHwDev->hwDevice(); } - - AudioStreamOut(AudioHwDevice *dev, audio_stream_out_t *out, audio_output_flags_t flags) : - audioHwDev(dev), stream(out), flags(flags) {} - }; - struct AudioStreamIn { AudioHwDevice* const audioHwDev; audio_stream_in_t* const stream; @@ -796,9 +752,13 @@ private: #undef INCLUDING_FROM_AUDIOFLINGER_H const char *formatToString(audio_format_t format); +String8 inputFlagsToString(audio_input_flags_t flags); +String8 outputFlagsToString(audio_output_flags_t flags); +String8 devicesToString(audio_devices_t devices); +const char *sourceToString(audio_source_t source); // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android #endif // ANDROID_AUDIO_FLINGER_H diff --git a/services/audioflinger/AudioHwDevice.cpp b/services/audioflinger/AudioHwDevice.cpp new file mode 100644 index 0000000..09d86ea --- /dev/null +++ b/services/audioflinger/AudioHwDevice.cpp @@ -0,0 +1,94 @@ +/* +** +** Copyright 2007, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "AudioHwDevice" +//#define LOG_NDEBUG 0 + +#include <hardware/audio.h> +#include <utils/Log.h> + +#include <audio_utils/spdif/SPDIFEncoder.h> + +#include "AudioHwDevice.h" +#include "AudioStreamOut.h" +#include "SpdifStreamOut.h" + +namespace android { + +// ---------------------------------------------------------------------------- + +status_t AudioHwDevice::openOutputStream( + AudioStreamOut **ppStreamOut, + audio_io_handle_t handle, + audio_devices_t devices, + audio_output_flags_t flags, + struct audio_config *config, + const char *address) +{ + + struct audio_config originalConfig = *config; + AudioStreamOut *outputStream = new AudioStreamOut(this, flags); + + // Try to open the HAL first using the current format. + ALOGV("AudioHwDevice::openOutputStream(), try " + " sampleRate %d, Format %#x, " + "channelMask %#x", + config->sample_rate, + config->format, + config->channel_mask); + status_t status = outputStream->open(handle, devices, config, address); + + if (status != NO_ERROR) { + delete outputStream; + outputStream = NULL; + + // FIXME Look at any modification to the config. + // The HAL might modify the config to suggest a wrapped format. + // Log this so we can see what the HALs are doing. + ALOGI("AudioHwDevice::openOutputStream(), HAL returned" + " sampleRate %d, Format %#x, " + "channelMask %#x, status %d", + config->sample_rate, + config->format, + config->channel_mask, + status); + + // If the data is encoded then try again using wrapped PCM. + bool wrapperNeeded = !audio_is_linear_pcm(originalConfig.format) + && ((flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) + && ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0); + + // FIXME - Add isEncodingSupported() query to SPDIF wrapper then + // call it from here. + if (wrapperNeeded) { + outputStream = new SpdifStreamOut(this, flags); + status = outputStream->open(handle, devices, &originalConfig, address); + if (status != NO_ERROR) { + ALOGE("ERROR - AudioHwDevice::openOutputStream(), SPDIF open returned %d", + status); + delete outputStream; + outputStream = NULL; + } + } + } + + *ppStreamOut = outputStream; + return status; +} + + +}; // namespace android diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h new file mode 100644 index 0000000..b9f65c1 --- /dev/null +++ b/services/audioflinger/AudioHwDevice.h @@ -0,0 +1,88 @@ +/* +** +** Copyright 2007, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_AUDIO_HW_DEVICE_H +#define ANDROID_AUDIO_HW_DEVICE_H + +#include <stdint.h> +#include <stdlib.h> +#include <sys/types.h> + +#include <hardware/audio.h> +#include <utils/Errors.h> +#include <system/audio.h> + + +namespace android { + +class AudioStreamOut; + +class AudioHwDevice { +public: + enum Flags { + AHWD_CAN_SET_MASTER_VOLUME = 0x1, + AHWD_CAN_SET_MASTER_MUTE = 0x2, + }; + + AudioHwDevice(audio_module_handle_t handle, + const char *moduleName, + audio_hw_device_t *hwDevice, + Flags flags) + : mHandle(handle) + , mModuleName(strdup(moduleName)) + , mHwDevice(hwDevice) + , mFlags(flags) { } + virtual ~AudioHwDevice() { free((void *)mModuleName); } + + bool canSetMasterVolume() const { + return (0 != (mFlags & AHWD_CAN_SET_MASTER_VOLUME)); + } + + bool canSetMasterMute() const { + return (0 != (mFlags & AHWD_CAN_SET_MASTER_MUTE)); + } + + audio_module_handle_t handle() const { return mHandle; } + const char *moduleName() const { return mModuleName; } + audio_hw_device_t *hwDevice() const { return mHwDevice; } + uint32_t version() const { return mHwDevice->common.version; } + + /** This method creates and opens the audio hardware output stream. + * The "address" parameter qualifies the "devices" audio device type if needed. + * The format format depends on the device type: + * - Bluetooth devices use the MAC address of the device in the form "00:11:22:AA:BB:CC" + * - USB devices use the ALSA card and device numbers in the form "card=X;device=Y" + * - Other devices may use a number or any other string. + */ + status_t openOutputStream( + AudioStreamOut **ppStreamOut, + audio_io_handle_t handle, + audio_devices_t devices, + audio_output_flags_t flags, + struct audio_config *config, + const char *address); + +private: + const audio_module_handle_t mHandle; + const char * const mModuleName; + audio_hw_device_t * const mHwDevice; + const Flags mFlags; +}; + +} // namespace android + +#endif // ANDROID_AUDIO_HW_DEVICE_H diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp index fd28ea1..dddca02 100644 --- a/services/audioflinger/AudioMixer.cpp +++ b/services/audioflinger/AudioMixer.cpp @@ -69,9 +69,9 @@ #define ARRAY_SIZE(x) (sizeof(x)/sizeof((x)[0])) #endif -// Set kUseNewMixer to true to use the new mixer engine. Otherwise the -// original code will be used. This is false for now. -static const bool kUseNewMixer = false; +// Set kUseNewMixer to true to use the new mixer engine always. Otherwise the +// original code will be used for stereo sinks, the new mixer for multichannel. +static const bool kUseNewMixer = true; // Set kUseFloat to true to allow floating input into the mixer engine. // If kUseNewMixer is false, this is ignored or may be overridden internally @@ -341,11 +341,46 @@ AudioMixer::RemixBufferProvider::RemixBufferProvider(audio_channel_mask_t inputC ALOGV("RemixBufferProvider(%p)(%#x, %#x, %#x) %zu %zu", this, format, inputChannelMask, outputChannelMask, mInputChannels, mOutputChannels); - // TODO: consider channel representation in index array formulation - // We ignore channel representation, and just use the bits. - memcpy_by_index_array_initialization(mIdxAry, ARRAY_SIZE(mIdxAry), - audio_channel_mask_get_bits(outputChannelMask), - audio_channel_mask_get_bits(inputChannelMask)); + + const audio_channel_representation_t inputRepresentation = + audio_channel_mask_get_representation(inputChannelMask); + const audio_channel_representation_t outputRepresentation = + audio_channel_mask_get_representation(outputChannelMask); + const uint32_t inputBits = audio_channel_mask_get_bits(inputChannelMask); + const uint32_t outputBits = audio_channel_mask_get_bits(outputChannelMask); + + switch (inputRepresentation) { + case AUDIO_CHANNEL_REPRESENTATION_POSITION: + switch (outputRepresentation) { + case AUDIO_CHANNEL_REPRESENTATION_POSITION: + memcpy_by_index_array_initialization(mIdxAry, ARRAY_SIZE(mIdxAry), + outputBits, inputBits); + return; + case AUDIO_CHANNEL_REPRESENTATION_INDEX: + // TODO: output channel index mask not currently allowed + // fall through + default: + break; + } + break; + case AUDIO_CHANNEL_REPRESENTATION_INDEX: + switch (outputRepresentation) { + case AUDIO_CHANNEL_REPRESENTATION_POSITION: + memcpy_by_index_array_initialization_src_index(mIdxAry, ARRAY_SIZE(mIdxAry), + outputBits, inputBits); + return; + case AUDIO_CHANNEL_REPRESENTATION_INDEX: + // TODO: output channel index mask not currently allowed + // fall through + default: + break; + } + break; + default: + break; + } + LOG_ALWAYS_FATAL("invalid channel mask conversion from %#x to %#x", + inputChannelMask, outputChannelMask); } void AudioMixer::RemixBufferProvider::copyFrames(void *dst, const void *src, size_t frames) @@ -430,6 +465,10 @@ void AudioMixer::setLog(NBLog::Writer *log) mState.mLog = log; } +static inline audio_format_t selectMixerInFormat(audio_format_t inputFormat __unused) { + return kUseFloat && kUseNewMixer ? AUDIO_FORMAT_PCM_FLOAT : AUDIO_FORMAT_PCM_16_BIT; +} + int AudioMixer::getTrackName(audio_channel_mask_t channelMask, audio_format_t format, int sessionId) { @@ -492,24 +531,23 @@ int AudioMixer::getTrackName(audio_channel_mask_t channelMask, t->mInputBufferProvider = NULL; t->mReformatBufferProvider = NULL; t->downmixerBufferProvider = NULL; + t->mPostDownmixReformatBufferProvider = NULL; t->mMixerFormat = AUDIO_FORMAT_PCM_16_BIT; t->mFormat = format; - t->mMixerInFormat = kUseFloat && kUseNewMixer - ? AUDIO_FORMAT_PCM_FLOAT : AUDIO_FORMAT_PCM_16_BIT; + t->mMixerInFormat = selectMixerInFormat(format); + t->mDownmixRequiresFormat = AUDIO_FORMAT_INVALID; // no format required t->mMixerChannelMask = audio_channel_mask_from_representation_and_bits( AUDIO_CHANNEL_REPRESENTATION_POSITION, AUDIO_CHANNEL_OUT_STEREO); t->mMixerChannelCount = audio_channel_count_from_out_mask(t->mMixerChannelMask); // Check the downmixing (or upmixing) requirements. - status_t status = initTrackDownmix(t, n); + status_t status = t->prepareForDownmix(); if (status != OK) { ALOGE("AudioMixer::getTrackName invalid channelMask (%#x)", channelMask); return -1; } - // initTrackDownmix() may change the input format requirement. - // If you desire floating point input to the mixer, it may change - // to integer because the downmixer requires integer to process. + // prepareForDownmix() may change mDownmixRequiresFormat ALOGVV("mMixerFormat:%#x mMixerInFormat:%#x\n", t->mMixerFormat, t->mMixerInFormat); - prepareTrackForReformat(t, n); + t->prepareForReformat(); mTrackNames |= 1 << n; return TRACK0 + n; } @@ -526,7 +564,7 @@ void AudioMixer::invalidateState(uint32_t mask) } // Called when channel masks have changed for a track name -// TODO: Fix Downmixbufferprofider not to (possibly) change mixer input format, +// TODO: Fix DownmixerBufferProvider not to (possibly) change mixer input format, // which will simplify this logic. bool AudioMixer::setChannelMasks(int name, audio_channel_mask_t trackChannelMask, audio_channel_mask_t mixerChannelMask) { @@ -551,21 +589,18 @@ bool AudioMixer::setChannelMasks(int name, // channel masks have changed, does this track need a downmixer? // update to try using our desired format (if we aren't already using it) - const audio_format_t prevMixerInFormat = track.mMixerInFormat; - track.mMixerInFormat = kUseFloat && kUseNewMixer - ? AUDIO_FORMAT_PCM_FLOAT : AUDIO_FORMAT_PCM_16_BIT; - const status_t status = initTrackDownmix(&mState.tracks[name], name); + const audio_format_t prevDownmixerFormat = track.mDownmixRequiresFormat; + const status_t status = mState.tracks[name].prepareForDownmix(); ALOGE_IF(status != OK, - "initTrackDownmix error %d, track channel mask %#x, mixer channel mask %#x", + "prepareForDownmix error %d, track channel mask %#x, mixer channel mask %#x", status, track.channelMask, track.mMixerChannelMask); - const bool mixerInFormatChanged = prevMixerInFormat != track.mMixerInFormat; - if (mixerInFormatChanged) { - prepareTrackForReformat(&track, name); // because of downmixer, track format may change! + if (prevDownmixerFormat != track.mDownmixRequiresFormat) { + track.prepareForReformat(); // because of downmixer, track format may change! } - if (track.resampler && (mixerInFormatChanged || mixerChannelCountChanged)) { - // resampler input format or channels may have changed. + if (track.resampler && mixerChannelCountChanged) { + // resampler channels may have changed. const uint32_t resetToSampleRate = track.sampleRate; delete track.resampler; track.resampler = NULL; @@ -576,99 +611,125 @@ bool AudioMixer::setChannelMasks(int name, return true; } -status_t AudioMixer::initTrackDownmix(track_t* pTrack, int trackName) -{ - // Only remix (upmix or downmix) if the track and mixer/device channel masks - // are not the same and not handled internally, as mono -> stereo currently is. - if (pTrack->channelMask != pTrack->mMixerChannelMask - && !(pTrack->channelMask == AUDIO_CHANNEL_OUT_MONO - && pTrack->mMixerChannelMask == AUDIO_CHANNEL_OUT_STEREO)) { - return prepareTrackForDownmix(pTrack, trackName); - } - // no remix necessary - unprepareTrackForDownmix(pTrack, trackName); - return NO_ERROR; -} +void AudioMixer::track_t::unprepareForDownmix() { + ALOGV("AudioMixer::unprepareForDownmix(%p)", this); -void AudioMixer::unprepareTrackForDownmix(track_t* pTrack, int trackName __unused) { - ALOGV("AudioMixer::unprepareTrackForDownmix(%d)", trackName); - - if (pTrack->downmixerBufferProvider != NULL) { + mDownmixRequiresFormat = AUDIO_FORMAT_INVALID; + if (downmixerBufferProvider != NULL) { // this track had previously been configured with a downmixer, delete it ALOGV(" deleting old downmixer"); - delete pTrack->downmixerBufferProvider; - pTrack->downmixerBufferProvider = NULL; - reconfigureBufferProviders(pTrack); + delete downmixerBufferProvider; + downmixerBufferProvider = NULL; + reconfigureBufferProviders(); } else { ALOGV(" nothing to do, no downmixer to delete"); } } -status_t AudioMixer::prepareTrackForDownmix(track_t* pTrack, int trackName) +status_t AudioMixer::track_t::prepareForDownmix() { - ALOGV("AudioMixer::prepareTrackForDownmix(%d) with mask 0x%x", trackName, pTrack->channelMask); + ALOGV("AudioMixer::prepareForDownmix(%p) with mask 0x%x", + this, channelMask); // discard the previous downmixer if there was one - unprepareTrackForDownmix(pTrack, trackName); - if (DownmixerBufferProvider::isMultichannelCapable()) { - DownmixerBufferProvider* pDbp = new DownmixerBufferProvider(pTrack->channelMask, - pTrack->mMixerChannelMask, - AUDIO_FORMAT_PCM_16_BIT /* TODO: use pTrack->mMixerInFormat, now only PCM 16 */, - pTrack->sampleRate, pTrack->sessionId, kCopyBufferFrameCount); + unprepareForDownmix(); + // Only remix (upmix or downmix) if the track and mixer/device channel masks + // are not the same and not handled internally, as mono -> stereo currently is. + if (channelMask == mMixerChannelMask + || (channelMask == AUDIO_CHANNEL_OUT_MONO + && mMixerChannelMask == AUDIO_CHANNEL_OUT_STEREO)) { + return NO_ERROR; + } + // DownmixerBufferProvider is only used for position masks. + if (audio_channel_mask_get_representation(channelMask) + == AUDIO_CHANNEL_REPRESENTATION_POSITION + && DownmixerBufferProvider::isMultichannelCapable()) { + DownmixerBufferProvider* pDbp = new DownmixerBufferProvider(channelMask, + mMixerChannelMask, + AUDIO_FORMAT_PCM_16_BIT /* TODO: use mMixerInFormat, now only PCM 16 */, + sampleRate, sessionId, kCopyBufferFrameCount); if (pDbp->isValid()) { // if constructor completed properly - pTrack->mMixerInFormat = AUDIO_FORMAT_PCM_16_BIT; // PCM 16 bit required for downmix - pTrack->downmixerBufferProvider = pDbp; - reconfigureBufferProviders(pTrack); + mDownmixRequiresFormat = AUDIO_FORMAT_PCM_16_BIT; // PCM 16 bit required for downmix + downmixerBufferProvider = pDbp; + reconfigureBufferProviders(); return NO_ERROR; } delete pDbp; } // Effect downmixer does not accept the channel conversion. Let's use our remixer. - RemixBufferProvider* pRbp = new RemixBufferProvider(pTrack->channelMask, - pTrack->mMixerChannelMask, pTrack->mMixerInFormat, kCopyBufferFrameCount); + RemixBufferProvider* pRbp = new RemixBufferProvider(channelMask, + mMixerChannelMask, mMixerInFormat, kCopyBufferFrameCount); // Remix always finds a conversion whereas Downmixer effect above may fail. - pTrack->downmixerBufferProvider = pRbp; - reconfigureBufferProviders(pTrack); + downmixerBufferProvider = pRbp; + reconfigureBufferProviders(); return NO_ERROR; } -void AudioMixer::unprepareTrackForReformat(track_t* pTrack, int trackName __unused) { - ALOGV("AudioMixer::unprepareTrackForReformat(%d)", trackName); - if (pTrack->mReformatBufferProvider != NULL) { - delete pTrack->mReformatBufferProvider; - pTrack->mReformatBufferProvider = NULL; - reconfigureBufferProviders(pTrack); +void AudioMixer::track_t::unprepareForReformat() { + ALOGV("AudioMixer::unprepareForReformat(%p)", this); + bool requiresReconfigure = false; + if (mReformatBufferProvider != NULL) { + delete mReformatBufferProvider; + mReformatBufferProvider = NULL; + requiresReconfigure = true; + } + if (mPostDownmixReformatBufferProvider != NULL) { + delete mPostDownmixReformatBufferProvider; + mPostDownmixReformatBufferProvider = NULL; + requiresReconfigure = true; + } + if (requiresReconfigure) { + reconfigureBufferProviders(); } } -status_t AudioMixer::prepareTrackForReformat(track_t* pTrack, int trackName) +status_t AudioMixer::track_t::prepareForReformat() { - ALOGV("AudioMixer::prepareTrackForReformat(%d) with format %#x", trackName, pTrack->mFormat); - // discard the previous reformatter if there was one - unprepareTrackForReformat(pTrack, trackName); - // only configure reformatter if needed - if (pTrack->mFormat != pTrack->mMixerInFormat) { - pTrack->mReformatBufferProvider = new ReformatBufferProvider( - audio_channel_count_from_out_mask(pTrack->channelMask), - pTrack->mFormat, pTrack->mMixerInFormat, + ALOGV("AudioMixer::prepareForReformat(%p) with format %#x", this, mFormat); + // discard previous reformatters + unprepareForReformat(); + // only configure reformatters as needed + const audio_format_t targetFormat = mDownmixRequiresFormat != AUDIO_FORMAT_INVALID + ? mDownmixRequiresFormat : mMixerInFormat; + bool requiresReconfigure = false; + if (mFormat != targetFormat) { + mReformatBufferProvider = new ReformatBufferProvider( + audio_channel_count_from_out_mask(channelMask), + mFormat, + targetFormat, kCopyBufferFrameCount); - reconfigureBufferProviders(pTrack); + requiresReconfigure = true; + } + if (targetFormat != mMixerInFormat) { + mPostDownmixReformatBufferProvider = new ReformatBufferProvider( + audio_channel_count_from_out_mask(mMixerChannelMask), + targetFormat, + mMixerInFormat, + kCopyBufferFrameCount); + requiresReconfigure = true; + } + if (requiresReconfigure) { + reconfigureBufferProviders(); } return NO_ERROR; } -void AudioMixer::reconfigureBufferProviders(track_t* pTrack) +void AudioMixer::track_t::reconfigureBufferProviders() { - pTrack->bufferProvider = pTrack->mInputBufferProvider; - if (pTrack->mReformatBufferProvider) { - pTrack->mReformatBufferProvider->setBufferProvider(pTrack->bufferProvider); - pTrack->bufferProvider = pTrack->mReformatBufferProvider; + bufferProvider = mInputBufferProvider; + if (mReformatBufferProvider) { + mReformatBufferProvider->setBufferProvider(bufferProvider); + bufferProvider = mReformatBufferProvider; + } + if (downmixerBufferProvider) { + downmixerBufferProvider->setBufferProvider(bufferProvider); + bufferProvider = downmixerBufferProvider; } - if (pTrack->downmixerBufferProvider) { - pTrack->downmixerBufferProvider->setBufferProvider(pTrack->bufferProvider); - pTrack->bufferProvider = pTrack->downmixerBufferProvider; + if (mPostDownmixReformatBufferProvider) { + mPostDownmixReformatBufferProvider->setBufferProvider(bufferProvider); + bufferProvider = mPostDownmixReformatBufferProvider; } } @@ -687,9 +748,9 @@ void AudioMixer::deleteTrackName(int name) delete track.resampler; track.resampler = NULL; // delete the downmixer - unprepareTrackForDownmix(&mState.tracks[name], name); + mState.tracks[name].unprepareForDownmix(); // delete the reformatter - unprepareTrackForReformat(&mState.tracks[name], name); + mState.tracks[name].unprepareForReformat(); mTrackNames &= ~(1<<name); } @@ -828,7 +889,7 @@ void AudioMixer::setParameter(int name, int target, int param, void *value) ALOG_ASSERT(audio_is_linear_pcm(format), "Invalid format %#x", format); track.mFormat = format; ALOGV("setParameter(TRACK, FORMAT, %#x)", format); - prepareTrackForReformat(&track, name); + track.prepareForReformat(); invalidateState(1 << name); } } break; @@ -1032,10 +1093,13 @@ void AudioMixer::setBufferProvider(int name, AudioBufferProvider* bufferProvider if (mState.tracks[name].mReformatBufferProvider != NULL) { mState.tracks[name].mReformatBufferProvider->reset(); } else if (mState.tracks[name].downmixerBufferProvider != NULL) { + mState.tracks[name].downmixerBufferProvider->reset(); + } else if (mState.tracks[name].mPostDownmixReformatBufferProvider != NULL) { + mState.tracks[name].mPostDownmixReformatBufferProvider->reset(); } mState.tracks[name].mInputBufferProvider = bufferProvider; - reconfigureBufferProviders(&mState.tracks[name]); + mState.tracks[name].reconfigureBufferProviders(); } @@ -2236,4 +2300,4 @@ AudioMixer::process_hook_t AudioMixer::getProcessHook(int processType, uint32_t } // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/services/audioflinger/AudioMixer.h b/services/audioflinger/AudioMixer.h index f4f142b..381036b 100644 --- a/services/audioflinger/AudioMixer.h +++ b/services/audioflinger/AudioMixer.h @@ -127,10 +127,16 @@ public: size_t getUnreleasedFrames(int name) const; static inline bool isValidPcmTrackFormat(audio_format_t format) { - return format == AUDIO_FORMAT_PCM_16_BIT || - format == AUDIO_FORMAT_PCM_24_BIT_PACKED || - format == AUDIO_FORMAT_PCM_32_BIT || - format == AUDIO_FORMAT_PCM_FLOAT; + switch (format) { + case AUDIO_FORMAT_PCM_8_BIT: + case AUDIO_FORMAT_PCM_16_BIT: + case AUDIO_FORMAT_PCM_24_BIT_PACKED: + case AUDIO_FORMAT_PCM_32_BIT: + case AUDIO_FORMAT_PCM_FLOAT: + return true; + default: + return false; + } } private: @@ -205,17 +211,34 @@ private: int32_t* auxBuffer; // 16-byte boundary + + /* Buffer providers are constructed to translate the track input data as needed. + * + * 1) mInputBufferProvider: The AudioTrack buffer provider. + * 2) mReformatBufferProvider: If not NULL, performs the audio reformat to + * match either mMixerInFormat or mDownmixRequiresFormat, if the downmixer + * requires reformat. For example, it may convert floating point input to + * PCM_16_bit if that's required by the downmixer. + * 3) downmixerBufferProvider: If not NULL, performs the channel remixing to match + * the number of channels required by the mixer sink. + * 4) mPostDownmixReformatBufferProvider: If not NULL, performs reformatting from + * the downmixer requirements to the mixer engine input requirements. + */ AudioBufferProvider* mInputBufferProvider; // externally provided buffer provider. CopyBufferProvider* mReformatBufferProvider; // provider wrapper for reformatting. CopyBufferProvider* downmixerBufferProvider; // wrapper for channel conversion. + CopyBufferProvider* mPostDownmixReformatBufferProvider; + // 16-byte boundary int32_t sessionId; - // 16-byte boundary audio_format_t mMixerFormat; // output mix format: AUDIO_FORMAT_PCM_(FLOAT|16_BIT) audio_format_t mFormat; // input track format audio_format_t mMixerInFormat; // mix internal format AUDIO_FORMAT_PCM_(FLOAT|16_BIT) // each track must be converted to this format. + audio_format_t mDownmixRequiresFormat; // required downmixer format + // AUDIO_FORMAT_PCM_16_BIT if 16 bit necessary + // AUDIO_FORMAT_INVALID if no required format float mVolume[MAX_NUM_VOLUMES]; // floating point set volume float mPrevVolume[MAX_NUM_VOLUMES]; // floating point previous volume @@ -225,7 +248,6 @@ private: float mPrevAuxLevel; // floating point prev aux level float mAuxInc; // floating point aux increment - // 16-byte boundary audio_channel_mask_t mMixerChannelMask; uint32_t mMixerChannelCount; @@ -236,6 +258,12 @@ private: void adjustVolumeRamp(bool aux, bool useFloat = false); size_t getUnreleasedFrames() const { return resampler != NULL ? resampler->getUnreleasedFrames() : 0; }; + + status_t prepareForDownmix(); + void unprepareForDownmix(); + status_t prepareForReformat(); + void unprepareForReformat(); + void reconfigureBufferProviders(); }; typedef void (*process_hook_t)(state_t* state, int64_t pts); @@ -382,14 +410,6 @@ private: bool setChannelMasks(int name, audio_channel_mask_t trackChannelMask, audio_channel_mask_t mixerChannelMask); - // TODO: remove unused trackName/trackNum from functions below. - static status_t initTrackDownmix(track_t* pTrack, int trackName); - static status_t prepareTrackForDownmix(track_t* pTrack, int trackNum); - static void unprepareTrackForDownmix(track_t* pTrack, int trackName); - static status_t prepareTrackForReformat(track_t* pTrack, int trackNum); - static void unprepareTrackForReformat(track_t* pTrack, int trackName); - static void reconfigureBufferProviders(track_t* pTrack); - static void track__genericResample(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux); static void track__nop(track_t* t, int32_t* out, size_t numFrames, int32_t* temp, int32_t* aux); @@ -465,6 +485,6 @@ private: }; // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android #endif // ANDROID_AUDIO_MIXER_H diff --git a/services/audioflinger/AudioResampler.h b/services/audioflinger/AudioResampler.h index 069d946..863614a 100644 --- a/services/audioflinger/AudioResampler.h +++ b/services/audioflinger/AudioResampler.h @@ -170,7 +170,6 @@ private: }; // ---------------------------------------------------------------------------- -} -; // namespace android +} // namespace android #endif // ANDROID_AUDIO_RESAMPLER_H diff --git a/services/audioflinger/AudioResamplerCubic.cpp b/services/audioflinger/AudioResamplerCubic.cpp index 8f14ff9..d3cbd1c 100644 --- a/services/audioflinger/AudioResamplerCubic.cpp +++ b/services/audioflinger/AudioResamplerCubic.cpp @@ -185,5 +185,4 @@ save_state: } // ---------------------------------------------------------------------------- -} -; // namespace android +} // namespace android diff --git a/services/audioflinger/AudioResamplerCubic.h b/services/audioflinger/AudioResamplerCubic.h index b315da5..1ddc5f9 100644 --- a/services/audioflinger/AudioResamplerCubic.h +++ b/services/audioflinger/AudioResamplerCubic.h @@ -63,6 +63,6 @@ private: }; // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android #endif /*ANDROID_AUDIO_RESAMPLER_CUBIC_H*/ diff --git a/services/audioflinger/AudioResamplerDyn.cpp b/services/audioflinger/AudioResamplerDyn.cpp index 0eeb201..c21d4ca 100644 --- a/services/audioflinger/AudioResamplerDyn.cpp +++ b/services/audioflinger/AudioResamplerDyn.cpp @@ -618,4 +618,4 @@ template class AudioResamplerDyn<int16_t, int16_t, int32_t>; template class AudioResamplerDyn<int32_t, int16_t, int32_t>; // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/services/audioflinger/AudioResamplerDyn.h b/services/audioflinger/AudioResamplerDyn.h index e886a68..238b163 100644 --- a/services/audioflinger/AudioResamplerDyn.h +++ b/services/audioflinger/AudioResamplerDyn.h @@ -127,6 +127,6 @@ private: void* mCoefBuffer; // if a filter is created, this is not null }; -}; // namespace android +} // namespace android #endif /*ANDROID_AUDIO_RESAMPLER_DYN_H*/ diff --git a/services/audioflinger/AudioResamplerFirGen.h b/services/audioflinger/AudioResamplerFirGen.h index f3718b6..ad18965 100644 --- a/services/audioflinger/AudioResamplerFirGen.h +++ b/services/audioflinger/AudioResamplerFirGen.h @@ -204,7 +204,8 @@ struct I0ATerm { template <> struct I0ATerm<0> { // 1/sqrt(2*PI); - static const CONSTEXPR double value = 0.398942280401432677939946059934381868475858631164934657665925; + static const CONSTEXPR double value = + 0.398942280401432677939946059934381868475858631164934657665925; }; #if USE_HORNERS_METHOD @@ -706,6 +707,6 @@ static inline void firKaiserGen(T* coef, int L, int halfNumCoef, } } -}; // namespace android +} // namespace android #endif /*ANDROID_AUDIO_RESAMPLER_FIR_GEN_H*/ diff --git a/services/audioflinger/AudioResamplerFirOps.h b/services/audioflinger/AudioResamplerFirOps.h index bf2163f..658285d 100644 --- a/services/audioflinger/AudioResamplerFirOps.h +++ b/services/audioflinger/AudioResamplerFirOps.h @@ -25,7 +25,7 @@ namespace android { #define USE_INLINE_ASSEMBLY (false) #endif -#if USE_INLINE_ASSEMBLY && defined(__ARM_NEON__) +#if defined(__aarch64__) || defined(__ARM_NEON__) #define USE_NEON (true) #include <arm_neon.h> #else @@ -158,6 +158,6 @@ int32_t mulAddRL(int left, uint32_t inRL, int32_t v, int32_t a) #endif } -}; // namespace android +} // namespace android #endif /*ANDROID_AUDIO_RESAMPLER_FIR_OPS_H*/ diff --git a/services/audioflinger/AudioResamplerFirProcess.h b/services/audioflinger/AudioResamplerFirProcess.h index efc8055..176202e 100644 --- a/services/audioflinger/AudioResamplerFirProcess.h +++ b/services/audioflinger/AudioResamplerFirProcess.h @@ -174,7 +174,8 @@ struct InterpNull { * Process() calls ProcessBase() with TFUNC = InterpCompute, for interpolated phase. */ -template <int CHANNELS, int STRIDE, typename TFUNC, typename TC, typename TI, typename TO, typename TINTERP> +template <int CHANNELS, int STRIDE, typename TFUNC, typename TC, typename TI, typename TO, + typename TINTERP> static inline void ProcessBase(TO* const out, size_t count, @@ -242,6 +243,9 @@ void ProcessBase(TO* const out, } } +/* Calculates a single output frame from a polyphase resampling filter. + * See Process() for parameter details. + */ template <int CHANNELS, int STRIDE, typename TC, typename TI, typename TO> static inline void ProcessL(TO* const out, @@ -255,6 +259,39 @@ void ProcessL(TO* const out, ProcessBase<CHANNELS, STRIDE, InterpNull>(out, count, coefsP, coefsN, sP, sN, 0, volumeLR); } +/* + * Calculates a single output frame from a polyphase resampling filter, + * with filter phase interpolation. + * + * @param out should point to the output buffer with space for at least one output frame. + * + * @param count should be half the size of the total filter length (halfNumCoefs), as we + * use symmetry in filter coefficients to evaluate two dot products. + * + * @param coefsP is one phase of the polyphase filter bank of size halfNumCoefs, corresponding + * to the positive sP. + * + * @param coefsN is one phase of the polyphase filter bank of size halfNumCoefs, corresponding + * to the negative sN. + * + * @param coefsP1 is the next phase of coefsP (used for interpolation). + * + * @param coefsN1 is the next phase of coefsN (used for interpolation). + * + * @param sP is the positive half of the coefficients (as viewed by a convolution), + * starting at the original samples pointer and decrementing (by CHANNELS). + * + * @param sN is the negative half of the samples (as viewed by a convolution), + * starting at the original samples pointer + CHANNELS and incrementing (by CHANNELS). + * + * @param lerpP The fractional siting between the polyphase indices is given by the bits + * below coefShift. See fir() for details. + * + * @param volumeLR is a pointer to an array of two 32 bit volume values, one per stereo channel, + * expressed as a S32 integer or float. A negative value inverts the channel 180 degrees. + * The pointer volumeLR should be aligned to a minimum of 8 bytes. + * A typical value for volume is 0x1000 to align to a unity gain output of 20.12. + */ template <int CHANNELS, int STRIDE, typename TC, typename TI, typename TO, typename TINTERP> static inline void Process(TO* const out, @@ -268,11 +305,12 @@ void Process(TO* const out, TINTERP lerpP, const TO* const volumeLR) { - ProcessBase<CHANNELS, STRIDE, InterpCompute>(out, count, coefsP, coefsN, sP, sN, lerpP, volumeLR); + ProcessBase<CHANNELS, STRIDE, InterpCompute>(out, count, coefsP, coefsN, sP, sN, lerpP, + volumeLR); } /* - * Calculates a single output frame (two samples) from input sample pointer. + * Calculates a single output frame from input sample pointer. * * This sets up the params for the accelerated Process() and ProcessL() * functions to do the appropriate dot products. @@ -307,7 +345,7 @@ void Process(TO* const out, * the positive half of the filter is dot product from samples to samples-halfNumCoefs+1. * * @param volumeLR is a pointer to an array of two 32 bit volume values, one per stereo channel, - * expressed as a S32 integer. A negative value inverts the channel 180 degrees. + * expressed as a S32 integer or float. A negative value inverts the channel 180 degrees. * The pointer volumeLR should be aligned to a minimum of 8 bytes. * A typical value for volume is 0x1000 to align to a unity gain output of 20.12. * @@ -396,6 +434,6 @@ void fir(TO* const out, } } -}; // namespace android +} // namespace android #endif /*ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_H*/ diff --git a/services/audioflinger/AudioResamplerFirProcessNeon.h b/services/audioflinger/AudioResamplerFirProcessNeon.h index f311cef..3de9edd 100644 --- a/services/audioflinger/AudioResamplerFirProcessNeon.h +++ b/services/audioflinger/AudioResamplerFirProcessNeon.h @@ -22,14 +22,35 @@ namespace android { // depends on AudioResamplerFirOps.h, AudioResamplerFirProcess.h #if USE_NEON + +// use intrinsics if inline arm32 assembly is not possible +#if !USE_INLINE_ASSEMBLY +#define USE_INTRINSIC +#endif + +// following intrinsics available only on ARM 64 bit ACLE +#ifndef __aarch64__ +#undef vld1q_f32_x2 +#undef vld1q_s32_x2 +#endif + +#define TO_STRING2(x) #x +#define TO_STRING(x) TO_STRING2(x) +// uncomment to print GCC version, may be relevant for intrinsic optimizations +/* #pragma message ("GCC version: " TO_STRING(__GNUC__) \ + "." TO_STRING(__GNUC_MINOR__) \ + "." TO_STRING(__GNUC_PATCHLEVEL__)) */ + // -// NEON specializations are enabled for Process() and ProcessL() +// NEON specializations are enabled for Process() and ProcessL() in AudioResamplerFirProcess.h +// +// Two variants are presented here: +// ARM NEON inline assembly which appears up to 10-15% faster than intrinsics (gcc 4.9) for arm32. +// ARM NEON intrinsics which can also be used by arm64 and x86/64 with NEON header. // -// TODO: Stride 16 and Stride 8 can be combined with one pass stride 8 (if necessary) -// and looping stride 16 (or vice versa). This has some polyphase coef data alignment -// issues with S16 coefs. Consider this later. // Macros to save a mono/stereo accumulator sample in q0 (and q4) as stereo out. +// These are only used for inline assembly. #define ASSEMBLY_ACCUMULATE_MONO \ "vld1.s32 {d2}, [%[vLR]:64] \n"/* (1) load volumes */\ "vld1.s32 {d3}, %[out] \n"/* (2) unaligned load the output */\ @@ -49,6 +70,458 @@ namespace android { "vqadd.s32 d3, d3, d0 \n"/* (1+4d) accumulate result (saturating)*/\ "vst1.s32 {d3}, %[out] \n"/* (2+2d)store result*/ +template <int CHANNELS, int STRIDE, bool FIXED> +static inline void ProcessNeonIntrinsic(int32_t* out, + int count, + const int16_t* coefsP, + const int16_t* coefsN, + const int16_t* sP, + const int16_t* sN, + const int32_t* volumeLR, + uint32_t lerpP, + const int16_t* coefsP1, + const int16_t* coefsN1) +{ + ALOG_ASSERT(count > 0 && (count & 7) == 0); // multiple of 8 + COMPILE_TIME_ASSERT_FUNCTION_SCOPE(CHANNELS == 1 || CHANNELS == 2); + + sP -= CHANNELS*((STRIDE>>1)-1); + coefsP = (const int16_t*)__builtin_assume_aligned(coefsP, 16); + coefsN = (const int16_t*)__builtin_assume_aligned(coefsN, 16); + + int16x4_t interp; + if (!FIXED) { + interp = vdup_n_s16(lerpP); + //interp = (int16x4_t)vset_lane_s32 ((int32x2_t)lerpP, interp, 0); + coefsP1 = (const int16_t*)__builtin_assume_aligned(coefsP1, 16); + coefsN1 = (const int16_t*)__builtin_assume_aligned(coefsN1, 16); + } + int32x4_t accum, accum2; + // warning uninitialized if we use veorq_s32 + // (alternative to below) accum = veorq_s32(accum, accum); + accum = vdupq_n_s32(0); + if (CHANNELS == 2) { + // (alternative to below) accum2 = veorq_s32(accum2, accum2); + accum2 = vdupq_n_s32(0); + } + do { + int16x8_t posCoef = vld1q_s16(coefsP); + coefsP += 8; + int16x8_t negCoef = vld1q_s16(coefsN); + coefsN += 8; + if (!FIXED) { // interpolate + int16x8_t posCoef1 = vld1q_s16(coefsP1); + coefsP1 += 8; + int16x8_t negCoef1 = vld1q_s16(coefsN1); + coefsN1 += 8; + + posCoef1 = vsubq_s16(posCoef1, posCoef); + negCoef = vsubq_s16(negCoef, negCoef1); + + posCoef1 = vqrdmulhq_lane_s16(posCoef1, interp, 0); + negCoef = vqrdmulhq_lane_s16(negCoef, interp, 0); + + posCoef = vaddq_s16(posCoef, posCoef1); + negCoef = vaddq_s16(negCoef, negCoef1); + } + switch (CHANNELS) { + case 1: { + int16x8_t posSamp = vld1q_s16(sP); + int16x8_t negSamp = vld1q_s16(sN); + sN += 8; + posSamp = vrev64q_s16(posSamp); + + // dot product + accum = vmlal_s16(accum, vget_low_s16(posSamp), vget_high_s16(posCoef)); // reversed + accum = vmlal_s16(accum, vget_high_s16(posSamp), vget_low_s16(posCoef)); // reversed + accum = vmlal_s16(accum, vget_low_s16(negSamp), vget_low_s16(negCoef)); + accum = vmlal_s16(accum, vget_high_s16(negSamp), vget_high_s16(negCoef)); + sP -= 8; + } break; + case 2: { + int16x8x2_t posSamp = vld2q_s16(sP); + int16x8x2_t negSamp = vld2q_s16(sN); + sN += 16; + posSamp.val[0] = vrev64q_s16(posSamp.val[0]); + posSamp.val[1] = vrev64q_s16(posSamp.val[1]); + + // dot product + accum = vmlal_s16(accum, vget_low_s16(posSamp.val[0]), vget_high_s16(posCoef)); // r + accum = vmlal_s16(accum, vget_high_s16(posSamp.val[0]), vget_low_s16(posCoef)); // r + accum2 = vmlal_s16(accum2, vget_low_s16(posSamp.val[1]), vget_high_s16(posCoef)); // r + accum2 = vmlal_s16(accum2, vget_high_s16(posSamp.val[1]), vget_low_s16(posCoef)); // r + accum = vmlal_s16(accum, vget_low_s16(negSamp.val[0]), vget_low_s16(negCoef)); + accum = vmlal_s16(accum, vget_high_s16(negSamp.val[0]), vget_high_s16(negCoef)); + accum2 = vmlal_s16(accum2, vget_low_s16(negSamp.val[1]), vget_low_s16(negCoef)); + accum2 = vmlal_s16(accum2, vget_high_s16(negSamp.val[1]), vget_high_s16(negCoef)); + sP -= 16; + } + } break; + } while (count -= 8); + + // multiply by volume and save + volumeLR = (const int32_t*)__builtin_assume_aligned(volumeLR, 8); + int32x2_t vLR = vld1_s32(volumeLR); + int32x2_t outSamp = vld1_s32(out); + // combine and funnel down accumulator + int32x2_t outAccum = vpadd_s32(vget_low_s32(accum), vget_high_s32(accum)); + if (CHANNELS == 1) { + // duplicate accum to both L and R + outAccum = vpadd_s32(outAccum, outAccum); + } else if (CHANNELS == 2) { + // accum2 contains R, fold in + int32x2_t outAccum2 = vpadd_s32(vget_low_s32(accum2), vget_high_s32(accum2)); + outAccum = vpadd_s32(outAccum, outAccum2); + } + outAccum = vqrdmulh_s32(outAccum, vLR); + outSamp = vqadd_s32(outSamp, outAccum); + vst1_s32(out, outSamp); +} + +template <int CHANNELS, int STRIDE, bool FIXED> +static inline void ProcessNeonIntrinsic(int32_t* out, + int count, + const int32_t* coefsP, + const int32_t* coefsN, + const int16_t* sP, + const int16_t* sN, + const int32_t* volumeLR, + uint32_t lerpP, + const int32_t* coefsP1, + const int32_t* coefsN1) +{ + ALOG_ASSERT(count > 0 && (count & 7) == 0); // multiple of 8 + COMPILE_TIME_ASSERT_FUNCTION_SCOPE(CHANNELS == 1 || CHANNELS == 2); + + sP -= CHANNELS*((STRIDE>>1)-1); + coefsP = (const int32_t*)__builtin_assume_aligned(coefsP, 16); + coefsN = (const int32_t*)__builtin_assume_aligned(coefsN, 16); + + int32x2_t interp; + if (!FIXED) { + interp = vdup_n_s32(lerpP); + coefsP1 = (const int32_t*)__builtin_assume_aligned(coefsP1, 16); + coefsN1 = (const int32_t*)__builtin_assume_aligned(coefsN1, 16); + } + int32x4_t accum, accum2; + // warning uninitialized if we use veorq_s32 + // (alternative to below) accum = veorq_s32(accum, accum); + accum = vdupq_n_s32(0); + if (CHANNELS == 2) { + // (alternative to below) accum2 = veorq_s32(accum2, accum2); + accum2 = vdupq_n_s32(0); + } + do { +#ifdef vld1q_s32_x2 + int32x4x2_t posCoef = vld1q_s32_x2(coefsP); + coefsP += 8; + int32x4x2_t negCoef = vld1q_s32_x2(coefsN); + coefsN += 8; +#else + int32x4x2_t posCoef; + posCoef.val[0] = vld1q_s32(coefsP); + coefsP += 4; + posCoef.val[1] = vld1q_s32(coefsP); + coefsP += 4; + int32x4x2_t negCoef; + negCoef.val[0] = vld1q_s32(coefsN); + coefsN += 4; + negCoef.val[1] = vld1q_s32(coefsN); + coefsN += 4; +#endif + if (!FIXED) { // interpolate +#ifdef vld1q_s32_x2 + int32x4x2_t posCoef1 = vld1q_s32_x2(coefsP1); + coefsP1 += 8; + int32x4x2_t negCoef1 = vld1q_s32_x2(coefsN1); + coefsN1 += 8; +#else + int32x4x2_t posCoef1; + posCoef1.val[0] = vld1q_s32(coefsP1); + coefsP1 += 4; + posCoef1.val[1] = vld1q_s32(coefsP1); + coefsP1 += 4; + int32x4x2_t negCoef1; + negCoef1.val[0] = vld1q_s32(coefsN1); + coefsN1 += 4; + negCoef1.val[1] = vld1q_s32(coefsN1); + coefsN1 += 4; +#endif + + posCoef1.val[0] = vsubq_s32(posCoef1.val[0], posCoef.val[0]); + posCoef1.val[1] = vsubq_s32(posCoef1.val[1], posCoef.val[1]); + negCoef.val[0] = vsubq_s32(negCoef.val[0], negCoef1.val[0]); + negCoef.val[1] = vsubq_s32(negCoef.val[1], negCoef1.val[1]); + + posCoef1.val[0] = vqrdmulhq_lane_s32(posCoef1.val[0], interp, 0); + posCoef1.val[1] = vqrdmulhq_lane_s32(posCoef1.val[1], interp, 0); + negCoef.val[0] = vqrdmulhq_lane_s32(negCoef.val[0], interp, 0); + negCoef.val[1] = vqrdmulhq_lane_s32(negCoef.val[1], interp, 0); + + posCoef.val[0] = vaddq_s32(posCoef.val[0], posCoef1.val[0]); + posCoef.val[1] = vaddq_s32(posCoef.val[1], posCoef1.val[1]); + negCoef.val[0] = vaddq_s32(negCoef.val[0], negCoef1.val[0]); + negCoef.val[1] = vaddq_s32(negCoef.val[1], negCoef1.val[1]); + } + switch (CHANNELS) { + case 1: { + int16x8_t posSamp = vld1q_s16(sP); + int16x8_t negSamp = vld1q_s16(sN); + sN += 8; + posSamp = vrev64q_s16(posSamp); + + int32x4_t posSamp0 = vshll_n_s16(vget_low_s16(posSamp), 15); + int32x4_t posSamp1 = vshll_n_s16(vget_high_s16(posSamp), 15); + int32x4_t negSamp0 = vshll_n_s16(vget_low_s16(negSamp), 15); + int32x4_t negSamp1 = vshll_n_s16(vget_high_s16(negSamp), 15); + + // dot product + posSamp0 = vqrdmulhq_s32(posSamp0, posCoef.val[1]); // reversed + posSamp1 = vqrdmulhq_s32(posSamp1, posCoef.val[0]); // reversed + negSamp0 = vqrdmulhq_s32(negSamp0, negCoef.val[0]); + negSamp1 = vqrdmulhq_s32(negSamp1, negCoef.val[1]); + + accum = vaddq_s32(accum, posSamp0); + negSamp0 = vaddq_s32(negSamp0, negSamp1); + accum = vaddq_s32(accum, posSamp1); + accum = vaddq_s32(accum, negSamp0); + + sP -= 8; + } break; + case 2: { + int16x8x2_t posSamp = vld2q_s16(sP); + int16x8x2_t negSamp = vld2q_s16(sN); + sN += 16; + posSamp.val[0] = vrev64q_s16(posSamp.val[0]); + posSamp.val[1] = vrev64q_s16(posSamp.val[1]); + + // left + int32x4_t posSamp0 = vshll_n_s16(vget_low_s16(posSamp.val[0]), 15); + int32x4_t posSamp1 = vshll_n_s16(vget_high_s16(posSamp.val[0]), 15); + int32x4_t negSamp0 = vshll_n_s16(vget_low_s16(negSamp.val[0]), 15); + int32x4_t negSamp1 = vshll_n_s16(vget_high_s16(negSamp.val[0]), 15); + + // dot product + posSamp0 = vqrdmulhq_s32(posSamp0, posCoef.val[1]); // reversed + posSamp1 = vqrdmulhq_s32(posSamp1, posCoef.val[0]); // reversed + negSamp0 = vqrdmulhq_s32(negSamp0, negCoef.val[0]); + negSamp1 = vqrdmulhq_s32(negSamp1, negCoef.val[1]); + + accum = vaddq_s32(accum, posSamp0); + negSamp0 = vaddq_s32(negSamp0, negSamp1); + accum = vaddq_s32(accum, posSamp1); + accum = vaddq_s32(accum, negSamp0); + + // right + posSamp0 = vshll_n_s16(vget_low_s16(posSamp.val[1]), 15); + posSamp1 = vshll_n_s16(vget_high_s16(posSamp.val[1]), 15); + negSamp0 = vshll_n_s16(vget_low_s16(negSamp.val[1]), 15); + negSamp1 = vshll_n_s16(vget_high_s16(negSamp.val[1]), 15); + + // dot product + posSamp0 = vqrdmulhq_s32(posSamp0, posCoef.val[1]); // reversed + posSamp1 = vqrdmulhq_s32(posSamp1, posCoef.val[0]); // reversed + negSamp0 = vqrdmulhq_s32(negSamp0, negCoef.val[0]); + negSamp1 = vqrdmulhq_s32(negSamp1, negCoef.val[1]); + + accum2 = vaddq_s32(accum2, posSamp0); + negSamp0 = vaddq_s32(negSamp0, negSamp1); + accum2 = vaddq_s32(accum2, posSamp1); + accum2 = vaddq_s32(accum2, negSamp0); + + sP -= 16; + } break; + } + } while (count -= 8); + + // multiply by volume and save + volumeLR = (const int32_t*)__builtin_assume_aligned(volumeLR, 8); + int32x2_t vLR = vld1_s32(volumeLR); + int32x2_t outSamp = vld1_s32(out); + // combine and funnel down accumulator + int32x2_t outAccum = vpadd_s32(vget_low_s32(accum), vget_high_s32(accum)); + if (CHANNELS == 1) { + // duplicate accum to both L and R + outAccum = vpadd_s32(outAccum, outAccum); + } else if (CHANNELS == 2) { + // accum2 contains R, fold in + int32x2_t outAccum2 = vpadd_s32(vget_low_s32(accum2), vget_high_s32(accum2)); + outAccum = vpadd_s32(outAccum, outAccum2); + } + outAccum = vqrdmulh_s32(outAccum, vLR); + outSamp = vqadd_s32(outSamp, outAccum); + vst1_s32(out, outSamp); +} + +template <int CHANNELS, int STRIDE, bool FIXED> +static inline void ProcessNeonIntrinsic(float* out, + int count, + const float* coefsP, + const float* coefsN, + const float* sP, + const float* sN, + const float* volumeLR, + float lerpP, + const float* coefsP1, + const float* coefsN1) +{ + ALOG_ASSERT(count > 0 && (count & 7) == 0); // multiple of 8 + COMPILE_TIME_ASSERT_FUNCTION_SCOPE(CHANNELS == 1 || CHANNELS == 2); + + sP -= CHANNELS*((STRIDE>>1)-1); + coefsP = (const float*)__builtin_assume_aligned(coefsP, 16); + coefsN = (const float*)__builtin_assume_aligned(coefsN, 16); + + float32x2_t interp; + if (!FIXED) { + interp = vdup_n_f32(lerpP); + coefsP1 = (const float*)__builtin_assume_aligned(coefsP1, 16); + coefsN1 = (const float*)__builtin_assume_aligned(coefsN1, 16); + } + float32x4_t accum, accum2; + // warning uninitialized if we use veorq_s32 + // (alternative to below) accum = veorq_s32(accum, accum); + accum = vdupq_n_f32(0); + if (CHANNELS == 2) { + // (alternative to below) accum2 = veorq_s32(accum2, accum2); + accum2 = vdupq_n_f32(0); + } + do { +#ifdef vld1q_f32_x2 + float32x4x2_t posCoef = vld1q_f32_x2(coefsP); + coefsP += 8; + float32x4x2_t negCoef = vld1q_f32_x2(coefsN); + coefsN += 8; +#else + float32x4x2_t posCoef; + posCoef.val[0] = vld1q_f32(coefsP); + coefsP += 4; + posCoef.val[1] = vld1q_f32(coefsP); + coefsP += 4; + float32x4x2_t negCoef; + negCoef.val[0] = vld1q_f32(coefsN); + coefsN += 4; + negCoef.val[1] = vld1q_f32(coefsN); + coefsN += 4; +#endif + if (!FIXED) { // interpolate +#ifdef vld1q_f32_x2 + float32x4x2_t posCoef1 = vld1q_f32_x2(coefsP1); + coefsP1 += 8; + float32x4x2_t negCoef1 = vld1q_f32_x2(coefsN1); + coefsN1 += 8; +#else + float32x4x2_t posCoef1; + posCoef1.val[0] = vld1q_f32(coefsP1); + coefsP1 += 4; + posCoef1.val[1] = vld1q_f32(coefsP1); + coefsP1 += 4; + float32x4x2_t negCoef1; + negCoef1.val[0] = vld1q_f32(coefsN1); + coefsN1 += 4; + negCoef1.val[1] = vld1q_f32(coefsN1); + coefsN1 += 4; +#endif + posCoef1.val[0] = vsubq_f32(posCoef1.val[0], posCoef.val[0]); + posCoef1.val[1] = vsubq_f32(posCoef1.val[1], posCoef.val[1]); + negCoef.val[0] = vsubq_f32(negCoef.val[0], negCoef1.val[0]); + negCoef.val[1] = vsubq_f32(negCoef.val[1], negCoef1.val[1]); + + posCoef.val[0] = vmlaq_lane_f32(posCoef.val[0], posCoef1.val[0], interp, 0); + posCoef.val[1] = vmlaq_lane_f32(posCoef.val[1], posCoef1.val[1], interp, 0); + negCoef.val[0] = vmlaq_lane_f32(negCoef1.val[0], negCoef.val[0], interp, 0); // rev + negCoef.val[1] = vmlaq_lane_f32(negCoef1.val[1], negCoef.val[1], interp, 0); // rev + } + switch (CHANNELS) { + case 1: { +#ifdef vld1q_f32_x2 + float32x4x2_t posSamp = vld1q_f32_x2(sP); + float32x4x2_t negSamp = vld1q_f32_x2(sN); + sN += 8; + sP -= 8; +#else + float32x4x2_t posSamp; + posSamp.val[0] = vld1q_f32(sP); + sP += 4; + posSamp.val[1] = vld1q_f32(sP); + sP -= 12; + float32x4x2_t negSamp; + negSamp.val[0] = vld1q_f32(sN); + sN += 4; + negSamp.val[1] = vld1q_f32(sN); + sN += 4; +#endif + // effectively we want a vrev128q_f32() + posSamp.val[0] = vrev64q_f32(posSamp.val[0]); + posSamp.val[1] = vrev64q_f32(posSamp.val[1]); + posSamp.val[0] = vcombine_f32( + vget_high_f32(posSamp.val[0]), vget_low_f32(posSamp.val[0])); + posSamp.val[1] = vcombine_f32( + vget_high_f32(posSamp.val[1]), vget_low_f32(posSamp.val[1])); + + accum = vmlaq_f32(accum, posSamp.val[0], posCoef.val[1]); + accum = vmlaq_f32(accum, posSamp.val[1], posCoef.val[0]); + accum = vmlaq_f32(accum, negSamp.val[0], negCoef.val[0]); + accum = vmlaq_f32(accum, negSamp.val[1], negCoef.val[1]); + } break; + case 2: { + float32x4x2_t posSamp0 = vld2q_f32(sP); + sP += 8; + float32x4x2_t negSamp0 = vld2q_f32(sN); + sN += 8; + posSamp0.val[0] = vrev64q_f32(posSamp0.val[0]); + posSamp0.val[1] = vrev64q_f32(posSamp0.val[1]); + posSamp0.val[0] = vcombine_f32( + vget_high_f32(posSamp0.val[0]), vget_low_f32(posSamp0.val[0])); + posSamp0.val[1] = vcombine_f32( + vget_high_f32(posSamp0.val[1]), vget_low_f32(posSamp0.val[1])); + + float32x4x2_t posSamp1 = vld2q_f32(sP); + sP -= 24; + float32x4x2_t negSamp1 = vld2q_f32(sN); + sN += 8; + posSamp1.val[0] = vrev64q_f32(posSamp1.val[0]); + posSamp1.val[1] = vrev64q_f32(posSamp1.val[1]); + posSamp1.val[0] = vcombine_f32( + vget_high_f32(posSamp1.val[0]), vget_low_f32(posSamp1.val[0])); + posSamp1.val[1] = vcombine_f32( + vget_high_f32(posSamp1.val[1]), vget_low_f32(posSamp1.val[1])); + + // Note: speed is affected by accumulation order. + // Also, speed appears slower using vmul/vadd instead of vmla for + // stereo case, comparable for mono. + + accum = vmlaq_f32(accum, negSamp0.val[0], negCoef.val[0]); + accum = vmlaq_f32(accum, negSamp1.val[0], negCoef.val[1]); + accum2 = vmlaq_f32(accum2, negSamp0.val[1], negCoef.val[0]); + accum2 = vmlaq_f32(accum2, negSamp1.val[1], negCoef.val[1]); + + accum = vmlaq_f32(accum, posSamp0.val[0], posCoef.val[1]); // reversed + accum = vmlaq_f32(accum, posSamp1.val[0], posCoef.val[0]); // reversed + accum2 = vmlaq_f32(accum2, posSamp0.val[1], posCoef.val[1]); // reversed + accum2 = vmlaq_f32(accum2, posSamp1.val[1], posCoef.val[0]); // reversed + } break; + } + } while (count -= 8); + + // multiply by volume and save + volumeLR = (const float*)__builtin_assume_aligned(volumeLR, 8); + float32x2_t vLR = vld1_f32(volumeLR); + float32x2_t outSamp = vld1_f32(out); + // combine and funnel down accumulator + float32x2_t outAccum = vpadd_f32(vget_low_f32(accum), vget_high_f32(accum)); + if (CHANNELS == 1) { + // duplicate accum to both L and R + outAccum = vpadd_f32(outAccum, outAccum); + } else if (CHANNELS == 2) { + // accum2 contains R, fold in + float32x2_t outAccum2 = vpadd_f32(vget_low_f32(accum2), vget_high_f32(accum2)); + outAccum = vpadd_f32(outAccum, outAccum2); + } + outSamp = vmla_f32(outSamp, outAccum, vLR); + vst1_f32(out, outSamp); +} + template <> inline void ProcessL<1, 16>(int32_t* const out, int count, @@ -58,6 +531,10 @@ inline void ProcessL<1, 16>(int32_t* const out, const int16_t* sN, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<1, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR, + 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/); +#else const int CHANNELS = 1; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -99,6 +576,7 @@ inline void ProcessL<1, 16>(int32_t* const out, "q0", "q1", "q2", "q3", "q8", "q10" ); +#endif } template <> @@ -110,6 +588,10 @@ inline void ProcessL<2, 16>(int32_t* const out, const int16_t* sN, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<2, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR, + 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/); +#else const int CHANNELS = 2; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -119,13 +601,13 @@ inline void ProcessL<2, 16>(int32_t* const out, "1: \n" - "vld2.16 {q2, q3}, [%[sP]] \n"// (3+0d) load 8 16-bits stereo samples - "vld2.16 {q5, q6}, [%[sN]]! \n"// (3) load 8 16-bits stereo samples + "vld2.16 {q2, q3}, [%[sP]] \n"// (3+0d) load 8 16-bits stereo frames + "vld2.16 {q5, q6}, [%[sN]]! \n"// (3) load 8 16-bits stereo frames "vld1.16 {q8}, [%[coefsP0]:128]! \n"// (1) load 8 16-bits coefs "vld1.16 {q10}, [%[coefsN0]:128]! \n"// (1) load 8 16-bits coefs - "vrev64.16 q2, q2 \n"// (1) reverse 8 frames of the left positive - "vrev64.16 q3, q3 \n"// (0 combines+) reverse right positive + "vrev64.16 q2, q2 \n"// (1) reverse 8 samples of positive left + "vrev64.16 q3, q3 \n"// (0 combines+) reverse positive right "vmlal.s16 q0, d4, d17 \n"// (1) multiply (reversed) samples left "vmlal.s16 q0, d5, d16 \n"// (1) multiply (reversed) samples left @@ -157,6 +639,7 @@ inline void ProcessL<2, 16>(int32_t* const out, "q4", "q5", "q6", "q8", "q10" ); +#endif } template <> @@ -171,6 +654,11 @@ inline void Process<1, 16>(int32_t* const out, uint32_t lerpP, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<1, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR, + lerpP, coefsP1, coefsN1); +#else + const int CHANNELS = 1; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -227,6 +715,7 @@ inline void Process<1, 16>(int32_t* const out, "q0", "q1", "q2", "q3", "q8", "q9", "q10", "q11" ); +#endif } template <> @@ -241,6 +730,10 @@ inline void Process<2, 16>(int32_t* const out, uint32_t lerpP, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<2, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR, + lerpP, coefsP1, coefsN1); +#else const int CHANNELS = 2; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -251,8 +744,8 @@ inline void Process<2, 16>(int32_t* const out, "1: \n" - "vld2.16 {q2, q3}, [%[sP]] \n"// (3+0d) load 8 16-bits stereo samples - "vld2.16 {q5, q6}, [%[sN]]! \n"// (3) load 8 16-bits stereo samples + "vld2.16 {q2, q3}, [%[sP]] \n"// (3+0d) load 8 16-bits stereo frames + "vld2.16 {q5, q6}, [%[sN]]! \n"// (3) load 8 16-bits stereo frames "vld1.16 {q8}, [%[coefsP0]:128]! \n"// (1) load 8 16-bits coefs "vld1.16 {q9}, [%[coefsP1]:128]! \n"// (1) load 8 16-bits coefs for interpolation "vld1.16 {q10}, [%[coefsN1]:128]! \n"// (1) load 8 16-bits coefs @@ -264,8 +757,8 @@ inline void Process<2, 16>(int32_t* const out, "vqrdmulh.s16 q9, q9, d2[0] \n"// (2) interpolate (step2) 1st set of coefs "vqrdmulh.s16 q11, q11, d2[0] \n"// (2) interpolate (step2) 2nd set of coefs - "vrev64.16 q2, q2 \n"// (1) reverse 8 frames of the left positive - "vrev64.16 q3, q3 \n"// (1) reverse 8 frames of the right positive + "vrev64.16 q2, q2 \n"// (1) reverse 8 samples of positive left + "vrev64.16 q3, q3 \n"// (1) reverse 8 samples of positive right "vadd.s16 q8, q8, q9 \n"// (1+1d) interpolate (step3) 1st set "vadd.s16 q10, q10, q11 \n"// (1+1d) interpolate (step3) 2nd set @@ -303,6 +796,7 @@ inline void Process<2, 16>(int32_t* const out, "q4", "q5", "q6", "q8", "q9", "q10", "q11" ); +#endif } template <> @@ -314,6 +808,10 @@ inline void ProcessL<1, 16>(int32_t* const out, const int16_t* sN, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<1, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR, + 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/); +#else const int CHANNELS = 1; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -327,7 +825,7 @@ inline void ProcessL<1, 16>(int32_t* const out, "vld1.32 {q8, q9}, [%[coefsP0]:128]! \n"// load 8 32-bits coefs "vld1.32 {q10, q11}, [%[coefsN0]:128]! \n"// load 8 32-bits coefs - "vrev64.16 q2, q2 \n"// reverse 8 frames of the positive side + "vrev64.16 q2, q2 \n"// reverse 8 samples of the positive side "vshll.s16 q12, d4, #15 \n"// extend samples to 31 bits "vshll.s16 q13, d5, #15 \n"// extend samples to 31 bits @@ -335,10 +833,10 @@ inline void ProcessL<1, 16>(int32_t* const out, "vshll.s16 q14, d6, #15 \n"// extend samples to 31 bits "vshll.s16 q15, d7, #15 \n"// extend samples to 31 bits - "vqrdmulh.s32 q12, q12, q9 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q15, q15, q11 \n"// multiply samples by interpolated coef + "vqrdmulh.s32 q12, q12, q9 \n"// multiply samples + "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples + "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples + "vqrdmulh.s32 q15, q15, q11 \n"// multiply samples "vadd.s32 q0, q0, q12 \n"// accumulate result "vadd.s32 q13, q13, q14 \n"// accumulate result @@ -364,6 +862,7 @@ inline void ProcessL<1, 16>(int32_t* const out, "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" ); +#endif } template <> @@ -375,6 +874,10 @@ inline void ProcessL<2, 16>(int32_t* const out, const int16_t* sN, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<2, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR, + 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/); +#else const int CHANNELS = 2; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -384,13 +887,13 @@ inline void ProcessL<2, 16>(int32_t* const out, "1: \n" - "vld2.16 {q2, q3}, [%[sP]] \n"// load 4 16-bits stereo samples - "vld2.16 {q5, q6}, [%[sN]]! \n"// load 4 16-bits stereo samples - "vld1.32 {q8, q9}, [%[coefsP0]:128]! \n"// load 4 32-bits coefs - "vld1.32 {q10, q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs + "vld2.16 {q2, q3}, [%[sP]] \n"// load 8 16-bits stereo frames + "vld2.16 {q5, q6}, [%[sN]]! \n"// load 8 16-bits stereo frames + "vld1.32 {q8, q9}, [%[coefsP0]:128]! \n"// load 8 32-bits coefs + "vld1.32 {q10, q11}, [%[coefsN0]:128]! \n"// load 8 32-bits coefs - "vrev64.16 q2, q2 \n"// reverse 8 frames of the positive side - "vrev64.16 q3, q3 \n"// reverse 8 frames of the positive side + "vrev64.16 q2, q2 \n"// reverse 8 samples of positive left + "vrev64.16 q3, q3 \n"// reverse 8 samples of positive right "vshll.s16 q12, d4, #15 \n"// extend samples to 31 bits "vshll.s16 q13, d5, #15 \n"// extend samples to 31 bits @@ -398,15 +901,15 @@ inline void ProcessL<2, 16>(int32_t* const out, "vshll.s16 q14, d10, #15 \n"// extend samples to 31 bits "vshll.s16 q15, d11, #15 \n"// extend samples to 31 bits - "vqrdmulh.s32 q12, q12, q9 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q15, q15, q11 \n"// multiply samples by interpolated coef + "vqrdmulh.s32 q12, q12, q9 \n"// multiply samples by coef + "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples by coef + "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by coef + "vqrdmulh.s32 q15, q15, q11 \n"// multiply samples by coef "vadd.s32 q0, q0, q12 \n"// accumulate result "vadd.s32 q13, q13, q14 \n"// accumulate result - "vadd.s32 q0, q0, q15 \n"// (+1) accumulate result - "vadd.s32 q0, q0, q13 \n"// (+1) accumulate result + "vadd.s32 q0, q0, q15 \n"// accumulate result + "vadd.s32 q0, q0, q13 \n"// accumulate result "vshll.s16 q12, d6, #15 \n"// extend samples to 31 bits "vshll.s16 q13, d7, #15 \n"// extend samples to 31 bits @@ -414,15 +917,15 @@ inline void ProcessL<2, 16>(int32_t* const out, "vshll.s16 q14, d12, #15 \n"// extend samples to 31 bits "vshll.s16 q15, d13, #15 \n"// extend samples to 31 bits - "vqrdmulh.s32 q12, q12, q9 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q15, q15, q11 \n"// multiply samples by interpolated coef + "vqrdmulh.s32 q12, q12, q9 \n"// multiply samples by coef + "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples by coef + "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by coef + "vqrdmulh.s32 q15, q15, q11 \n"// multiply samples by coef "vadd.s32 q4, q4, q12 \n"// accumulate result "vadd.s32 q13, q13, q14 \n"// accumulate result - "vadd.s32 q4, q4, q15 \n"// (+1) accumulate result - "vadd.s32 q4, q4, q13 \n"// (+1) accumulate result + "vadd.s32 q4, q4, q15 \n"// accumulate result + "vadd.s32 q4, q4, q13 \n"// accumulate result "subs %[count], %[count], #8 \n"// update loop counter "sub %[sP], %[sP], #32 \n"// move pointer to next set of samples @@ -444,6 +947,7 @@ inline void ProcessL<2, 16>(int32_t* const out, "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" ); +#endif } template <> @@ -458,6 +962,10 @@ inline void Process<1, 16>(int32_t* const out, uint32_t lerpP, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<1, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR, + lerpP, coefsP1, coefsN1); +#else const int CHANNELS = 1; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -489,7 +997,7 @@ inline void Process<1, 16>(int32_t* const out, "vadd.s32 q10, q10, q14 \n"// interpolate (step3) "vadd.s32 q11, q11, q15 \n"// interpolate (step3) - "vrev64.16 q2, q2 \n"// reverse 8 frames of the positive side + "vrev64.16 q2, q2 \n"// reverse 8 samples of the positive side "vshll.s16 q12, d4, #15 \n"// extend samples to 31 bits "vshll.s16 q13, d5, #15 \n"// extend samples to 31 bits @@ -529,6 +1037,7 @@ inline void Process<1, 16>(int32_t* const out, "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" ); +#endif } template <> @@ -543,6 +1052,10 @@ inline void Process<2, 16>(int32_t* const out, uint32_t lerpP, const int32_t* const volumeLR) { +#ifdef USE_INTRINSIC + ProcessNeonIntrinsic<2, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR, + lerpP, coefsP1, coefsN1); +#else const int CHANNELS = 2; // template specialization does not preserve params const int STRIDE = 16; sP -= CHANNELS*((STRIDE>>1)-1); @@ -553,8 +1066,8 @@ inline void Process<2, 16>(int32_t* const out, "1: \n" - "vld2.16 {q2, q3}, [%[sP]] \n"// load 4 16-bits stereo samples - "vld2.16 {q5, q6}, [%[sN]]! \n"// load 4 16-bits stereo samples + "vld2.16 {q2, q3}, [%[sP]] \n"// load 8 16-bits stereo frames + "vld2.16 {q5, q6}, [%[sN]]! \n"// load 8 16-bits stereo frames "vld1.32 {q8, q9}, [%[coefsP0]:128]! \n"// load 8 32-bits coefs "vld1.32 {q12, q13}, [%[coefsP1]:128]! \n"// load 8 32-bits coefs "vld1.32 {q10, q11}, [%[coefsN1]:128]! \n"// load 8 32-bits coefs @@ -575,8 +1088,8 @@ inline void Process<2, 16>(int32_t* const out, "vadd.s32 q10, q10, q14 \n"// interpolate (step3) "vadd.s32 q11, q11, q15 \n"// interpolate (step3) - "vrev64.16 q2, q2 \n"// reverse 8 frames of the positive side - "vrev64.16 q3, q3 \n"// reverse 8 frames of the positive side + "vrev64.16 q2, q2 \n"// reverse 8 samples of positive left + "vrev64.16 q3, q3 \n"// reverse 8 samples of positive right "vshll.s16 q12, d4, #15 \n"// extend samples to 31 bits "vshll.s16 q13, d5, #15 \n"// extend samples to 31 bits @@ -591,8 +1104,8 @@ inline void Process<2, 16>(int32_t* const out, "vadd.s32 q0, q0, q12 \n"// accumulate result "vadd.s32 q13, q13, q14 \n"// accumulate result - "vadd.s32 q0, q0, q15 \n"// (+1) accumulate result - "vadd.s32 q0, q0, q13 \n"// (+1) accumulate result + "vadd.s32 q0, q0, q15 \n"// accumulate result + "vadd.s32 q0, q0, q13 \n"// accumulate result "vshll.s16 q12, d6, #15 \n"// extend samples to 31 bits "vshll.s16 q13, d7, #15 \n"// extend samples to 31 bits @@ -607,8 +1120,8 @@ inline void Process<2, 16>(int32_t* const out, "vadd.s32 q4, q4, q12 \n"// accumulate result "vadd.s32 q13, q13, q14 \n"// accumulate result - "vadd.s32 q4, q4, q15 \n"// (+1) accumulate result - "vadd.s32 q4, q4, q13 \n"// (+1) accumulate result + "vadd.s32 q4, q4, q15 \n"// accumulate result + "vadd.s32 q4, q4, q13 \n"// accumulate result "subs %[count], %[count], #8 \n"// update loop counter "sub %[sP], %[sP], #32 \n"// move pointer to next set of samples @@ -633,517 +1146,69 @@ inline void Process<2, 16>(int32_t* const out, "q8", "q9", "q10", "q11", "q12", "q13", "q14", "q15" ); +#endif } -template <> -inline void ProcessL<1, 8>(int32_t* const out, +template<> +inline void ProcessL<1, 16>(float* const out, int count, - const int16_t* coefsP, - const int16_t* coefsN, - const int16_t* sP, - const int16_t* sN, - const int32_t* const volumeLR) + const float* coefsP, + const float* coefsN, + const float* sP, + const float* sN, + const float* const volumeLR) { - const int CHANNELS = 1; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "veor q0, q0, q0 \n"// (0 - combines+) accumulator = 0 - - "1: \n" - - "vld1.16 {d4}, [%[sP]] \n"// (2+0d) load 4 16-bits mono samples - "vld1.16 {d6}, [%[sN]]! \n"// (2) load 4 16-bits mono samples - "vld1.16 {d16}, [%[coefsP0]:64]! \n"// (1) load 4 16-bits coefs - "vld1.16 {d20}, [%[coefsN0]:64]! \n"// (1) load 4 16-bits coefs - - "vrev64.16 d4, d4 \n"// (1) reversed s3, s2, s1, s0, s7, s6, s5, s4 - - // reordering the vmal to do d6, d7 before d4, d5 is slower(?) - "vmlal.s16 q0, d4, d16 \n"// (1) multiply (reversed)samples by coef - "vmlal.s16 q0, d6, d20 \n"// (1) multiply neg samples - - // moving these ARM instructions before neon above seems to be slower - "subs %[count], %[count], #4 \n"// (1) update loop counter - "sub %[sP], %[sP], #8 \n"// (0) move pointer to next set of samples - - // sP used after branch (warning) - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_MONO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsN0] "+r" (coefsN), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", - "q8", "q10" - ); + ProcessNeonIntrinsic<1, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR, + 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/); } -template <> -inline void ProcessL<2, 8>(int32_t* const out, +template<> +inline void ProcessL<2, 16>(float* const out, int count, - const int16_t* coefsP, - const int16_t* coefsN, - const int16_t* sP, - const int16_t* sN, - const int32_t* const volumeLR) + const float* coefsP, + const float* coefsN, + const float* sP, + const float* sN, + const float* const volumeLR) { - const int CHANNELS = 2; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "veor q0, q0, q0 \n"// (1) acc_L = 0 - "veor q4, q4, q4 \n"// (0 combines+) acc_R = 0 - - "1: \n" - - "vld2.16 {d4, d5}, [%[sP]] \n"// (2+0d) load 8 16-bits stereo samples - "vld2.16 {d6, d7}, [%[sN]]! \n"// (2) load 8 16-bits stereo samples - "vld1.16 {d16}, [%[coefsP0]:64]! \n"// (1) load 8 16-bits coefs - "vld1.16 {d20}, [%[coefsN0]:64]! \n"// (1) load 8 16-bits coefs - - "vrev64.16 q2, q2 \n"// (1) reverse 8 frames of the left positive - - "vmlal.s16 q0, d4, d16 \n"// (1) multiply (reversed) samples left - "vmlal.s16 q4, d5, d16 \n"// (1) multiply (reversed) samples right - "vmlal.s16 q0, d6, d20 \n"// (1) multiply samples left - "vmlal.s16 q4, d7, d20 \n"// (1) multiply samples right - - // moving these ARM before neon seems to be slower - "subs %[count], %[count], #4 \n"// (1) update loop counter - "sub %[sP], %[sP], #16 \n"// (0) move pointer to next set of samples - - // sP used after branch (warning) - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_STEREO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsN0] "+r" (coefsN), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", - "q4", "q5", "q6", - "q8", "q10" - ); + ProcessNeonIntrinsic<2, 16, true>(out, count, coefsP, coefsN, sP, sN, volumeLR, + 0 /*lerpP*/, NULL /*coefsP1*/, NULL /*coefsN1*/); } -template <> -inline void Process<1, 8>(int32_t* const out, +template<> +inline void Process<1, 16>(float* const out, int count, - const int16_t* coefsP, - const int16_t* coefsN, - const int16_t* coefsP1, - const int16_t* coefsN1, - const int16_t* sP, - const int16_t* sN, - uint32_t lerpP, - const int32_t* const volumeLR) + const float* coefsP, + const float* coefsN, + const float* coefsP1, + const float* coefsN1, + const float* sP, + const float* sN, + float lerpP, + const float* const volumeLR) { - const int CHANNELS = 1; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "vmov.32 d2[0], %[lerpP] \n"// load the positive phase S32 Q15 - "veor q0, q0, q0 \n"// (0 - combines+) accumulator = 0 - - "1: \n" - - "vld1.16 {d4}, [%[sP]] \n"// (2+0d) load 4 16-bits mono samples - "vld1.16 {d6}, [%[sN]]! \n"// (2) load 4 16-bits mono samples - "vld1.16 {d16}, [%[coefsP0]:64]! \n"// (1) load 4 16-bits coefs - "vld1.16 {d17}, [%[coefsP1]:64]! \n"// (1) load 4 16-bits coefs for interpolation - "vld1.16 {d20}, [%[coefsN1]:64]! \n"// (1) load 4 16-bits coefs - "vld1.16 {d21}, [%[coefsN0]:64]! \n"// (1) load 4 16-bits coefs for interpolation - - "vsub.s16 d17, d17, d16 \n"// (1) interpolate (step1) 1st set of coefs - "vsub.s16 d21, d21, d20 \n"// (1) interpolate (step1) 2nd set of coets - - "vqrdmulh.s16 d17, d17, d2[0] \n"// (2) interpolate (step2) 1st set of coefs - "vqrdmulh.s16 d21, d21, d2[0] \n"// (2) interpolate (step2) 2nd set of coefs - - "vrev64.16 d4, d4 \n"// (1) reverse s3, s2, s1, s0, s7, s6, s5, s4 - - "vadd.s16 d16, d16, d17 \n"// (1+2d) interpolate (step3) 1st set - "vadd.s16 d20, d20, d21 \n"// (1+1d) interpolate (step3) 2nd set - - // reordering the vmal to do d6, d7 before d4, d5 is slower(?) - "vmlal.s16 q0, d4, d16 \n"// (1+0d) multiply (reversed)by coef - "vmlal.s16 q0, d6, d20 \n"// (1) multiply neg samples - - // moving these ARM instructions before neon above seems to be slower - "subs %[count], %[count], #4 \n"// (1) update loop counter - "sub %[sP], %[sP], #8 \n"// move pointer to next set of samples - - // sP used after branch (warning) - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_MONO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsN0] "+r" (coefsN), - [coefsP1] "+r" (coefsP1), - [coefsN1] "+r" (coefsN1), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [lerpP] "r" (lerpP), - [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", - "q8", "q9", "q10", "q11" - ); + ProcessNeonIntrinsic<1, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR, + lerpP, coefsP1, coefsN1); } -template <> -inline void Process<2, 8>(int32_t* const out, +template<> +inline void Process<2, 16>(float* const out, int count, - const int16_t* coefsP, - const int16_t* coefsN, - const int16_t* coefsP1, - const int16_t* coefsN1, - const int16_t* sP, - const int16_t* sN, - uint32_t lerpP, - const int32_t* const volumeLR) + const float* coefsP, + const float* coefsN, + const float* coefsP1, + const float* coefsN1, + const float* sP, + const float* sN, + float lerpP, + const float* const volumeLR) { - const int CHANNELS = 2; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "vmov.32 d2[0], %[lerpP] \n"// load the positive phase - "veor q0, q0, q0 \n"// (1) acc_L = 0 - "veor q4, q4, q4 \n"// (0 combines+) acc_R = 0 - - "1: \n" - - "vld2.16 {d4, d5}, [%[sP]] \n"// (3+0d) load 8 16-bits stereo samples - "vld2.16 {d6, d7}, [%[sN]]! \n"// (3) load 8 16-bits stereo samples - "vld1.16 {d16}, [%[coefsP0]:64]! \n"// (1) load 8 16-bits coefs - "vld1.16 {d17}, [%[coefsP1]:64]! \n"// (1) load 8 16-bits coefs for interpolation - "vld1.16 {d20}, [%[coefsN1]:64]! \n"// (1) load 8 16-bits coefs - "vld1.16 {d21}, [%[coefsN0]:64]! \n"// (1) load 8 16-bits coefs for interpolation - - "vsub.s16 d17, d17, d16 \n"// (1) interpolate (step1) 1st set of coefs - "vsub.s16 d21, d21, d20 \n"// (1) interpolate (step1) 2nd set of coets - - "vqrdmulh.s16 d17, d17, d2[0] \n"// (2) interpolate (step2) 1st set of coefs - "vqrdmulh.s16 d21, d21, d2[0] \n"// (2) interpolate (step2) 2nd set of coefs - - "vrev64.16 q2, q2 \n"// (1) reverse 8 frames of the left positive - - "vadd.s16 d16, d16, d17 \n"// (1+1d) interpolate (step3) 1st set - "vadd.s16 d20, d20, d21 \n"// (1+1d) interpolate (step3) 2nd set - - "vmlal.s16 q0, d4, d16 \n"// (1) multiply (reversed) samples left - "vmlal.s16 q4, d5, d16 \n"// (1) multiply (reversed) samples right - "vmlal.s16 q0, d6, d20 \n"// (1) multiply samples left - "vmlal.s16 q4, d7, d20 \n"// (1) multiply samples right - - // moving these ARM before neon seems to be slower - "subs %[count], %[count], #4 \n"// (1) update loop counter - "sub %[sP], %[sP], #16 \n"// move pointer to next set of samples - - // sP used after branch (warning) - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_STEREO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsN0] "+r" (coefsN), - [coefsP1] "+r" (coefsP1), - [coefsN1] "+r" (coefsN1), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [lerpP] "r" (lerpP), - [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", - "q4", "q5", "q6", - "q8", "q9", "q10", "q11" - ); -} - -template <> -inline void ProcessL<1, 8>(int32_t* const out, - int count, - const int32_t* coefsP, - const int32_t* coefsN, - const int16_t* sP, - const int16_t* sN, - const int32_t* const volumeLR) -{ - const int CHANNELS = 1; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "veor q0, q0, q0 \n"// result, initialize to 0 - - "1: \n" - - "vld1.16 {d4}, [%[sP]] \n"// load 4 16-bits mono samples - "vld1.16 {d6}, [%[sN]]! \n"// load 4 16-bits mono samples - "vld1.32 {q8}, [%[coefsP0]:128]! \n"// load 4 32-bits coefs - "vld1.32 {q10}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs - - "vrev64.16 d4, d4 \n"// reverse 2 frames of the positive side - - "vshll.s16 q12, d4, #15 \n"// (stall) extend samples to 31 bits - "vshll.s16 q14, d6, #15 \n"// extend samples to 31 bits - - "vqrdmulh.s32 q12, q12, q8 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by interpolated coef - - "vadd.s32 q0, q0, q12 \n"// accumulate result - "vadd.s32 q0, q0, q14 \n"// (stall) accumulate result - - "subs %[count], %[count], #4 \n"// update loop counter - "sub %[sP], %[sP], #8 \n"// move pointer to next set of samples - - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_MONO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsN0] "+r" (coefsN), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", - "q8", "q9", "q10", "q11", - "q12", "q14" - ); -} - -template <> -inline void ProcessL<2, 8>(int32_t* const out, - int count, - const int32_t* coefsP, - const int32_t* coefsN, - const int16_t* sP, - const int16_t* sN, - const int32_t* const volumeLR) -{ - const int CHANNELS = 2; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "veor q0, q0, q0 \n"// result, initialize to 0 - "veor q4, q4, q4 \n"// result, initialize to 0 - - "1: \n" - - "vld2.16 {d4, d5}, [%[sP]] \n"// load 4 16-bits stereo samples - "vld2.16 {d6, d7}, [%[sN]]! \n"// load 4 16-bits stereo samples - "vld1.32 {q8}, [%[coefsP0]:128]! \n"// load 4 32-bits coefs - "vld1.32 {q10}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs - - "vrev64.16 q2, q2 \n"// reverse 2 frames of the positive side - - "vshll.s16 q12, d4, #15 \n"// extend samples to 31 bits - "vshll.s16 q13, d5, #15 \n"// extend samples to 31 bits - - "vshll.s16 q14, d6, #15 \n"// extend samples to 31 bits - "vshll.s16 q15, d7, #15 \n"// extend samples to 31 bits - - "vqrdmulh.s32 q12, q12, q8 \n"// multiply samples by coef - "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples by coef - "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by coef - "vqrdmulh.s32 q15, q15, q10 \n"// multiply samples by coef - - "vadd.s32 q0, q0, q12 \n"// accumulate result - "vadd.s32 q4, q4, q13 \n"// accumulate result - "vadd.s32 q0, q0, q14 \n"// accumulate result - "vadd.s32 q4, q4, q15 \n"// accumulate result - - "subs %[count], %[count], #4 \n"// update loop counter - "sub %[sP], %[sP], #16 \n"// move pointer to next set of samples - - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_STEREO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsN0] "+r" (coefsN), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", "q4", - "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15" - ); -} - -template <> -inline void Process<1, 8>(int32_t* const out, - int count, - const int32_t* coefsP, - const int32_t* coefsN, - const int32_t* coefsP1, - const int32_t* coefsN1, - const int16_t* sP, - const int16_t* sN, - uint32_t lerpP, - const int32_t* const volumeLR) -{ - const int CHANNELS = 1; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "vmov.32 d2[0], %[lerpP] \n"// load the positive phase - "veor q0, q0, q0 \n"// result, initialize to 0 - - "1: \n" - - "vld1.16 {d4}, [%[sP]] \n"// load 4 16-bits mono samples - "vld1.16 {d6}, [%[sN]]! \n"// load 4 16-bits mono samples - "vld1.32 {q8}, [%[coefsP0]:128]! \n"// load 4 32-bits coefs - "vld1.32 {q9}, [%[coefsP1]:128]! \n"// load 4 32-bits coefs for interpolation - "vld1.32 {q10}, [%[coefsN1]:128]! \n"// load 4 32-bits coefs - "vld1.32 {q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs for interpolation - - "vrev64.16 d4, d4 \n"// reverse 2 frames of the positive side - - "vsub.s32 q9, q9, q8 \n"// interpolate (step1) 1st set of coefs - "vsub.s32 q11, q11, q10 \n"// interpolate (step1) 2nd set of coets - "vshll.s16 q12, d4, #15 \n"// extend samples to 31 bits - - "vqrdmulh.s32 q9, q9, d2[0] \n"// interpolate (step2) 1st set of coefs - "vqrdmulh.s32 q11, q11, d2[0] \n"// interpolate (step2) 2nd set of coefs - "vshll.s16 q14, d6, #15 \n"// extend samples to 31 bits - - "vadd.s32 q8, q8, q9 \n"// interpolate (step3) 1st set - "vadd.s32 q10, q10, q11 \n"// interpolate (step4) 2nd set - - "vqrdmulh.s32 q12, q12, q8 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by interpolated coef - - "vadd.s32 q0, q0, q12 \n"// accumulate result - "vadd.s32 q0, q0, q14 \n"// accumulate result - - "subs %[count], %[count], #4 \n"// update loop counter - "sub %[sP], %[sP], #8 \n"// move pointer to next set of samples - - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_MONO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsP1] "+r" (coefsP1), - [coefsN0] "+r" (coefsN), - [coefsN1] "+r" (coefsN1), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [lerpP] "r" (lerpP), - [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", - "q8", "q9", "q10", "q11", - "q12", "q14" - ); -} - -template <> -inline -void Process<2, 8>(int32_t* const out, - int count, - const int32_t* coefsP, - const int32_t* coefsN, - const int32_t* coefsP1, - const int32_t* coefsN1, - const int16_t* sP, - const int16_t* sN, - uint32_t lerpP, - const int32_t* const volumeLR) -{ - const int CHANNELS = 2; // template specialization does not preserve params - const int STRIDE = 8; - sP -= CHANNELS*((STRIDE>>1)-1); - asm ( - "vmov.32 d2[0], %[lerpP] \n"// load the positive phase - "veor q0, q0, q0 \n"// result, initialize to 0 - "veor q4, q4, q4 \n"// result, initialize to 0 - - "1: \n" - "vld2.16 {d4, d5}, [%[sP]] \n"// load 4 16-bits stereo samples - "vld2.16 {d6, d7}, [%[sN]]! \n"// load 4 16-bits stereo samples - "vld1.32 {q8}, [%[coefsP0]:128]! \n"// load 4 32-bits coefs - "vld1.32 {q9}, [%[coefsP1]:128]! \n"// load 4 32-bits coefs for interpolation - "vld1.32 {q10}, [%[coefsN1]:128]! \n"// load 4 32-bits coefs - "vld1.32 {q11}, [%[coefsN0]:128]! \n"// load 4 32-bits coefs for interpolation - - "vrev64.16 q2, q2 \n"// (reversed) 2 frames of the positive side - - "vsub.s32 q9, q9, q8 \n"// interpolate (step1) 1st set of coefs - "vsub.s32 q11, q11, q10 \n"// interpolate (step1) 2nd set of coets - "vshll.s16 q12, d4, #15 \n"// extend samples to 31 bits - "vshll.s16 q13, d5, #15 \n"// extend samples to 31 bits - - "vqrdmulh.s32 q9, q9, d2[0] \n"// interpolate (step2) 1st set of coefs - "vqrdmulh.s32 q11, q11, d2[1] \n"// interpolate (step3) 2nd set of coefs - "vshll.s16 q14, d6, #15 \n"// extend samples to 31 bits - "vshll.s16 q15, d7, #15 \n"// extend samples to 31 bits - - "vadd.s32 q8, q8, q9 \n"// interpolate (step3) 1st set - "vadd.s32 q10, q10, q11 \n"// interpolate (step4) 2nd set - - "vqrdmulh.s32 q12, q12, q8 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q13, q13, q8 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q14, q14, q10 \n"// multiply samples by interpolated coef - "vqrdmulh.s32 q15, q15, q10 \n"// multiply samples by interpolated coef - - "vadd.s32 q0, q0, q12 \n"// accumulate result - "vadd.s32 q4, q4, q13 \n"// accumulate result - "vadd.s32 q0, q0, q14 \n"// accumulate result - "vadd.s32 q4, q4, q15 \n"// accumulate result - - "subs %[count], %[count], #4 \n"// update loop counter - "sub %[sP], %[sP], #16 \n"// move pointer to next set of samples - - "bne 1b \n"// loop - - ASSEMBLY_ACCUMULATE_STEREO - - : [out] "=Uv" (out[0]), - [count] "+r" (count), - [coefsP0] "+r" (coefsP), - [coefsP1] "+r" (coefsP1), - [coefsN0] "+r" (coefsN), - [coefsN1] "+r" (coefsN1), - [sP] "+r" (sP), - [sN] "+r" (sN) - : [lerpP] "r" (lerpP), - [vLR] "r" (volumeLR) - : "cc", "memory", - "q0", "q1", "q2", "q3", "q4", - "q8", "q9", "q10", "q11", - "q12", "q13", "q14", "q15" - ); + ProcessNeonIntrinsic<2, 16, false>(out, count, coefsP, coefsN, sP, sN, volumeLR, + lerpP, coefsP1, coefsN1); } #endif //USE_NEON -}; // namespace android +} // namespace android #endif /*ANDROID_AUDIO_RESAMPLER_FIR_PROCESS_NEON_H*/ diff --git a/services/audioflinger/AudioResamplerSinc.cpp b/services/audioflinger/AudioResamplerSinc.cpp index e6fb76c..ba9a356 100644 --- a/services/audioflinger/AudioResamplerSinc.cpp +++ b/services/audioflinger/AudioResamplerSinc.cpp @@ -61,135 +61,7 @@ namespace android { * cmd-line: fir -l 7 -s 48000 -c 20478 */ const uint32_t AudioResamplerSinc::mFirCoefsUp[] __attribute__ ((aligned (32))) = { - 0x6d374bc7, 0x111c6ba0, 0xf3240e61, 0x07d14a38, 0xfc509e64, 0x0139cee9, 0xffc8c866, 0xfffcc300, - 0x6d35278a, 0x103e8192, 0xf36b9dfd, 0x07bdfaa5, 0xfc5102d0, 0x013d618d, 0xffc663b9, 0xfffd9592, - 0x6d2ebafe, 0x0f62811a, 0xf3b3d8ac, 0x07a9f399, 0xfc51d9a6, 0x0140bea5, 0xffc41212, 0xfffe631e, - 0x6d24069d, 0x0e8875ad, 0xf3fcb43e, 0x07953976, 0xfc53216f, 0x0143e67c, 0xffc1d373, 0xffff2b9f, - 0x6d150b35, 0x0db06a89, 0xf4462690, 0x077fd0ac, 0xfc54d8ae, 0x0146d965, 0xffbfa7d9, 0xffffef10, - 0x6d01c9e3, 0x0cda6ab5, 0xf4902587, 0x0769bdaf, 0xfc56fdda, 0x014997bb, 0xffbd8f40, 0x0000ad6e, - 0x6cea4418, 0x0c0680fe, 0xf4daa718, 0x07530501, 0xfc598f60, 0x014c21db, 0xffbb89a1, 0x000166b6, - 0x6cce7b97, 0x0b34b7f5, 0xf525a143, 0x073bab28, 0xfc5c8ba5, 0x014e782a, 0xffb996f3, 0x00021ae5, - 0x6cae7272, 0x0a6519f4, 0xf5710a17, 0x0723b4b4, 0xfc5ff105, 0x01509b14, 0xffb7b728, 0x0002c9fd, - 0x6c8a2b0f, 0x0997b116, 0xf5bcd7b1, 0x070b2639, 0xfc63bdd3, 0x01528b08, 0xffb5ea31, 0x000373fb, - 0x6c61a823, 0x08cc873c, 0xf609003f, 0x06f20453, 0xfc67f05a, 0x0154487b, 0xffb42ffc, 0x000418e2, - 0x6c34ecb5, 0x0803a60a, 0xf6557a00, 0x06d853a2, 0xfc6c86dd, 0x0155d3e8, 0xffb28876, 0x0004b8b3, - 0x6c03fc1c, 0x073d16e7, 0xf6a23b44, 0x06be18cd, 0xfc717f97, 0x01572dcf, 0xffb0f388, 0x00055371, - 0x6bced9ff, 0x0678e2fc, 0xf6ef3a6e, 0x06a3587e, 0xfc76d8bc, 0x015856b6, 0xffaf7118, 0x0005e921, - 0x6b958a54, 0x05b71332, 0xf73c6df4, 0x06881761, 0xfc7c9079, 0x01594f25, 0xffae010b, 0x000679c5, - 0x6b581163, 0x04f7b037, 0xf789cc61, 0x066c5a27, 0xfc82a4f4, 0x015a17ab, 0xffaca344, 0x00070564, - 0x6b1673c1, 0x043ac276, 0xf7d74c53, 0x06502583, 0xfc89144d, 0x015ab0db, 0xffab57a1, 0x00078c04, - 0x6ad0b652, 0x0380521c, 0xf824e480, 0x06337e2a, 0xfc8fdc9f, 0x015b1b4e, 0xffaa1e02, 0x00080dab, - 0x6a86de48, 0x02c86715, 0xf8728bb3, 0x061668d2, 0xfc96fbfc, 0x015b579e, 0xffa8f641, 0x00088a62, - 0x6a38f123, 0x0213090c, 0xf8c038d0, 0x05f8ea30, 0xfc9e7074, 0x015b666c, 0xffa7e039, 0x00090230, - 0x69e6f4b1, 0x01603f6e, 0xf90de2d1, 0x05db06fc, 0xfca63810, 0x015b485b, 0xffa6dbc0, 0x0009751e, - 0x6990ef0b, 0x00b01162, 0xf95b80cb, 0x05bcc3ed, 0xfcae50d6, 0x015afe14, 0xffa5e8ad, 0x0009e337, - 0x6936e697, 0x000285d0, 0xf9a909ea, 0x059e25b5, 0xfcb6b8c4, 0x015a8843, 0xffa506d2, 0x000a4c85, - 0x68d8e206, 0xff57a35e, 0xf9f67577, 0x057f310a, 0xfcbf6dd8, 0x0159e796, 0xffa43603, 0x000ab112, - 0x6876e855, 0xfeaf706f, 0xfa43bad2, 0x055fea9d, 0xfcc86e09, 0x01591cc0, 0xffa3760e, 0x000b10ec, - 0x681100c9, 0xfe09f323, 0xfa90d17b, 0x0540571a, 0xfcd1b74c, 0x01582878, 0xffa2c6c2, 0x000b6c1d, - 0x67a732f4, 0xfd673159, 0xfaddb10c, 0x05207b2f, 0xfcdb4793, 0x01570b77, 0xffa227ec, 0x000bc2b3, - 0x673986ac, 0xfcc730aa, 0xfb2a513b, 0x05005b82, 0xfce51ccb, 0x0155c678, 0xffa19957, 0x000c14bb, - 0x66c80413, 0xfc29f670, 0xfb76a9dd, 0x04dffcb6, 0xfcef34e1, 0x01545a3c, 0xffa11acb, 0x000c6244, - 0x6652b392, 0xfb8f87bd, 0xfbc2b2e4, 0x04bf6369, 0xfcf98dbe, 0x0152c783, 0xffa0ac11, 0x000cab5c, - 0x65d99dd5, 0xfaf7e963, 0xfc0e6461, 0x049e9433, 0xfd04254a, 0x01510f13, 0xffa04cf0, 0x000cf012, - 0x655ccbd3, 0xfa631fef, 0xfc59b685, 0x047d93a8, 0xfd0ef969, 0x014f31b2, 0xff9ffd2c, 0x000d3075, - 0x64dc46c3, 0xf9d12fab, 0xfca4a19f, 0x045c6654, 0xfd1a0801, 0x014d3029, 0xff9fbc89, 0x000d6c97, - 0x64581823, 0xf9421c9d, 0xfcef1e20, 0x043b10bd, 0xfd254ef4, 0x014b0b45, 0xff9f8ac9, 0x000da486, - 0x63d049b4, 0xf8b5ea87, 0xfd392498, 0x04199760, 0xfd30cc24, 0x0148c3d2, 0xff9f67ae, 0x000dd854, - 0x6344e578, 0xf82c9ce7, 0xfd82adba, 0x03f7feb4, 0xfd3c7d73, 0x01465a9f, 0xff9f52f7, 0x000e0812, - 0x62b5f5b2, 0xf7a636fa, 0xfdcbb25a, 0x03d64b27, 0xfd4860c2, 0x0143d07f, 0xff9f4c65, 0x000e33d3, - 0x622384e8, 0xf722bbb5, 0xfe142b6e, 0x03b4811d, 0xfd5473f3, 0x01412643, 0xff9f53b4, 0x000e5ba7, - 0x618d9ddc, 0xf6a22dcf, 0xfe5c120f, 0x0392a4f4, 0xfd60b4e7, 0x013e5cc0, 0xff9f68a1, 0x000e7fa1, - 0x60f44b91, 0xf6248fb6, 0xfea35f79, 0x0370bafc, 0xfd6d2180, 0x013b74ca, 0xff9f8ae9, 0x000e9fd5, - 0x60579947, 0xf5a9e398, 0xfeea0d0c, 0x034ec77f, 0xfd79b7a1, 0x01386f3a, 0xff9fba47, 0x000ebc54, - 0x5fb79278, 0xf5322b61, 0xff30144a, 0x032ccebb, 0xfd86752e, 0x01354ce7, 0xff9ff674, 0x000ed533, - 0x5f1442dc, 0xf4bd68b6, 0xff756edc, 0x030ad4e1, 0xfd93580d, 0x01320ea9, 0xffa03f2b, 0x000eea84, - 0x5e6db665, 0xf44b9cfe, 0xffba168d, 0x02e8de19, 0xfda05e23, 0x012eb55a, 0xffa09425, 0x000efc5c, - 0x5dc3f93c, 0xf3dcc959, 0xfffe054e, 0x02c6ee7f, 0xfdad855b, 0x012b41d3, 0xffa0f519, 0x000f0ace, - 0x5d1717c4, 0xf370eea9, 0x00413536, 0x02a50a22, 0xfdbacb9e, 0x0127b4f1, 0xffa161bf, 0x000f15ef, - 0x5c671e96, 0xf3080d8c, 0x0083a081, 0x02833506, 0xfdc82edb, 0x01240f8e, 0xffa1d9cf, 0x000f1dd2, - 0x5bb41a80, 0xf2a2265e, 0x00c54190, 0x02617321, 0xfdd5ad01, 0x01205285, 0xffa25cfe, 0x000f228d, - 0x5afe1886, 0xf23f393b, 0x010612eb, 0x023fc85c, 0xfde34403, 0x011c7eb2, 0xffa2eb04, 0x000f2434, - 0x5a4525df, 0xf1df45fd, 0x01460f41, 0x021e3891, 0xfdf0f1d6, 0x011894f0, 0xffa38395, 0x000f22dc, - 0x59894ff3, 0xf1824c3e, 0x01853165, 0x01fcc78f, 0xfdfeb475, 0x0114961b, 0xffa42668, 0x000f1e99, - 0x58caa45b, 0xf1284b58, 0x01c37452, 0x01db7914, 0xfe0c89db, 0x0110830f, 0xffa4d332, 0x000f1781, - 0x580930e1, 0xf0d14267, 0x0200d32c, 0x01ba50d2, 0xfe1a7009, 0x010c5ca6, 0xffa589a6, 0x000f0da8, - 0x5745037c, 0xf07d3043, 0x023d493c, 0x0199526b, 0xfe286505, 0x010823ba, 0xffa6497c, 0x000f0125, - 0x567e2a51, 0xf02c138a, 0x0278d1f2, 0x01788170, 0xfe3666d5, 0x0103d927, 0xffa71266, 0x000ef20b, - 0x55b4b3af, 0xefddea9a, 0x02b368e6, 0x0157e166, 0xfe447389, 0x00ff7dc4, 0xffa7e41a, 0x000ee070, - 0x54e8ae13, 0xef92b393, 0x02ed09d7, 0x013775bf, 0xfe528931, 0x00fb126b, 0xffa8be4c, 0x000ecc69, - 0x541a281e, 0xef4a6c58, 0x0325b0ad, 0x011741df, 0xfe60a5e5, 0x00f697f3, 0xffa9a0b1, 0x000eb60b, - 0x5349309e, 0xef051290, 0x035d5977, 0x00f7491a, 0xfe6ec7c0, 0x00f20f32, 0xffaa8afe, 0x000e9d6b, - 0x5275d684, 0xeec2a3a3, 0x0394006a, 0x00d78eb3, 0xfe7cece2, 0x00ed78ff, 0xffab7ce7, 0x000e829e, - 0x51a028e8, 0xee831cc3, 0x03c9a1e5, 0x00b815da, 0xfe8b1373, 0x00e8d62d, 0xffac7621, 0x000e65ba, - 0x50c83704, 0xee467ae1, 0x03fe3a6f, 0x0098e1b3, 0xfe99399f, 0x00e4278f, 0xffad7662, 0x000e46d3, - 0x4fee1037, 0xee0cbab9, 0x0431c6b5, 0x0079f54c, 0xfea75d97, 0x00df6df7, 0xffae7d5f, 0x000e25fd, - 0x4f11c3fe, 0xedd5d8ca, 0x0464438c, 0x005b53a4, 0xfeb57d92, 0x00daaa34, 0xffaf8acd, 0x000e034f, - 0x4e3361f7, 0xeda1d15c, 0x0495adf2, 0x003cffa9, 0xfec397cf, 0x00d5dd16, 0xffb09e63, 0x000ddedb, - 0x4d52f9df, 0xed70a07d, 0x04c6030d, 0x001efc35, 0xfed1aa92, 0x00d10769, 0xffb1b7d8, 0x000db8b7, - 0x4c709b8e, 0xed424205, 0x04f54029, 0x00014c12, 0xfedfb425, 0x00cc29f7, 0xffb2d6e1, 0x000d90f6, - 0x4b8c56f8, 0xed16b196, 0x052362ba, 0xffe3f1f7, 0xfeedb2da, 0x00c7458a, 0xffb3fb37, 0x000d67ae, - 0x4aa63c2c, 0xecedea99, 0x0550685d, 0xffc6f08a, 0xfefba508, 0x00c25ae8, 0xffb52490, 0x000d3cf1, - 0x49be5b50, 0xecc7e845, 0x057c4ed4, 0xffaa4a5d, 0xff09890f, 0x00bd6ad7, 0xffb652a7, 0x000d10d5, - 0x48d4c4a2, 0xeca4a59b, 0x05a7140b, 0xff8e01f1, 0xff175d53, 0x00b87619, 0xffb78533, 0x000ce36b, - 0x47e98874, 0xec841d68, 0x05d0b612, 0xff7219b3, 0xff252042, 0x00b37d70, 0xffb8bbed, 0x000cb4c8, - 0x46fcb72d, 0xec664a48, 0x05f93324, 0xff5693fe, 0xff32d04f, 0x00ae8198, 0xffb9f691, 0x000c84ff, - 0x460e6148, 0xec4b26a2, 0x0620899e, 0xff3b731b, 0xff406bf8, 0x00a9834e, 0xffbb34d8, 0x000c5422, - 0x451e9750, 0xec32acb0, 0x0646b808, 0xff20b93e, 0xff4df1be, 0x00a4834c, 0xffbc767f, 0x000c2245, - 0x442d69de, 0xec1cd677, 0x066bbd0d, 0xff066889, 0xff5b602c, 0x009f8249, 0xffbdbb42, 0x000bef79, - 0x433ae99c, 0xec099dcf, 0x068f9781, 0xfeec830d, 0xff68b5d5, 0x009a80f8, 0xffbf02dd, 0x000bbbd2, - 0x4247273f, 0xebf8fc64, 0x06b2465b, 0xfed30ac5, 0xff75f153, 0x0095800c, 0xffc04d0f, 0x000b8760, - 0x41523389, 0xebeaebaf, 0x06d3c8bb, 0xfeba0199, 0xff831148, 0x00908034, 0xffc19996, 0x000b5235, - 0x405c1f43, 0xebdf6500, 0x06f41de3, 0xfea16960, 0xff90145e, 0x008b821b, 0xffc2e832, 0x000b1c64, - 0x3f64fb40, 0xebd6617b, 0x0713453d, 0xfe8943dc, 0xff9cf947, 0x0086866b, 0xffc438a3, 0x000ae5fc, - 0x3e6cd85b, 0xebcfda19, 0x07313e56, 0xfe7192bd, 0xffa9bebe, 0x00818dcb, 0xffc58aaa, 0x000aaf0f, - 0x3d73c772, 0xebcbc7a7, 0x074e08e0, 0xfe5a579d, 0xffb66386, 0x007c98de, 0xffc6de09, 0x000a77ac, - 0x3c79d968, 0xebca22cc, 0x0769a4b2, 0xfe439407, 0xffc2e669, 0x0077a845, 0xffc83285, 0x000a3fe5, - 0x3b7f1f23, 0xebcae405, 0x078411c7, 0xfe2d496f, 0xffcf463a, 0x0072bc9d, 0xffc987e0, 0x000a07c9, - 0x3a83a989, 0xebce03aa, 0x079d503b, 0xfe177937, 0xffdb81d6, 0x006dd680, 0xffcadde1, 0x0009cf67, - 0x3987897f, 0xebd379eb, 0x07b56051, 0xfe0224b0, 0xffe79820, 0x0068f687, 0xffcc344c, 0x000996ce, - 0x388acfe9, 0xebdb3ed5, 0x07cc426c, 0xfded4d13, 0xfff38806, 0x00641d44, 0xffcd8aeb, 0x00095e0e, - 0x378d8da8, 0xebe54a4f, 0x07e1f712, 0xfdd8f38b, 0xffff507b, 0x005f4b4a, 0xffcee183, 0x00092535, - 0x368fd397, 0xebf1941f, 0x07f67eec, 0xfdc5192d, 0x000af07f, 0x005a8125, 0xffd037e0, 0x0008ec50, - 0x3591b28b, 0xec0013e8, 0x0809dac3, 0xfdb1befc, 0x00166718, 0x0055bf60, 0xffd18dcc, 0x0008b36e, - 0x34933b50, 0xec10c12c, 0x081c0b84, 0xfd9ee5e7, 0x0021b355, 0x00510682, 0xffd2e311, 0x00087a9c, - 0x33947eab, 0xec23934f, 0x082d1239, 0xfd8c8ecc, 0x002cd44d, 0x004c570f, 0xffd4377d, 0x000841e8, - 0x32958d55, 0xec388194, 0x083cf010, 0xfd7aba74, 0x0037c922, 0x0047b186, 0xffd58ade, 0x0008095d, - 0x319677fa, 0xec4f8322, 0x084ba654, 0xfd696998, 0x004290fc, 0x00431666, 0xffd6dd02, 0x0007d108, - 0x30974f3b, 0xec688f02, 0x08593671, 0xfd589cdc, 0x004d2b0e, 0x003e8628, 0xffd82dba, 0x000798f5, - 0x2f9823a8, 0xec839c22, 0x0865a1f1, 0xfd4854d3, 0x00579691, 0x003a0141, 0xffd97cd6, 0x00076130, - 0x2e9905c1, 0xeca0a156, 0x0870ea7e, 0xfd3891fd, 0x0061d2ca, 0x00358824, 0xffdaca2a, 0x000729c4, - 0x2d9a05f4, 0xecbf9558, 0x087b11de, 0xfd2954c8, 0x006bdf05, 0x00311b41, 0xffdc1588, 0x0006f2bb, - 0x2c9b349e, 0xece06ecb, 0x088419f6, 0xfd1a9d91, 0x0075ba95, 0x002cbb03, 0xffdd5ec6, 0x0006bc21, - 0x2b9ca203, 0xed032439, 0x088c04c8, 0xfd0c6ca2, 0x007f64da, 0x002867d2, 0xffdea5bb, 0x000685ff, - 0x2a9e5e57, 0xed27ac16, 0x0892d470, 0xfcfec233, 0x0088dd38, 0x00242213, 0xffdfea3c, 0x0006505f, - 0x29a079b2, 0xed4dfcc2, 0x08988b2a, 0xfcf19e6b, 0x0092231e, 0x001fea27, 0xffe12c22, 0x00061b4b, - 0x28a30416, 0xed760c88, 0x089d2b4a, 0xfce50161, 0x009b3605, 0x001bc06b, 0xffe26b48, 0x0005e6cb, - 0x27a60d6a, 0xed9fd1a2, 0x08a0b740, 0xfcd8eb17, 0x00a4156b, 0x0017a53b, 0xffe3a788, 0x0005b2e8, - 0x26a9a57b, 0xedcb4237, 0x08a33196, 0xfccd5b82, 0x00acc0da, 0x001398ec, 0xffe4e0bf, 0x00057faa, - 0x25addbf9, 0xedf8545b, 0x08a49cf0, 0xfcc25285, 0x00b537e1, 0x000f9bd2, 0xffe616c8, 0x00054d1a, - 0x24b2c075, 0xee26fe17, 0x08a4fc0d, 0xfcb7cff0, 0x00bd7a1c, 0x000bae3c, 0xffe74984, 0x00051b3e, - 0x23b86263, 0xee573562, 0x08a451c0, 0xfcadd386, 0x00c5872a, 0x0007d075, 0xffe878d3, 0x0004ea1d, - 0x22bed116, 0xee88f026, 0x08a2a0f8, 0xfca45cf7, 0x00cd5eb7, 0x000402c8, 0xffe9a494, 0x0004b9c0, - 0x21c61bc0, 0xeebc2444, 0x089fecbb, 0xfc9b6be5, 0x00d50075, 0x00004579, 0xffeaccaa, 0x00048a2b, - 0x20ce516f, 0xeef0c78d, 0x089c3824, 0xfc92ffe1, 0x00dc6c1e, 0xfffc98c9, 0xffebf0fa, 0x00045b65, - 0x1fd7810f, 0xef26cfca, 0x08978666, 0xfc8b186d, 0x00e3a175, 0xfff8fcf7, 0xffed1166, 0x00042d74, - 0x1ee1b965, 0xef5e32bd, 0x0891dac8, 0xfc83b4fc, 0x00eaa045, 0xfff5723d, 0xffee2dd7, 0x0004005e, - 0x1ded0911, 0xef96e61c, 0x088b38a9, 0xfc7cd4f0, 0x00f16861, 0xfff1f8d2, 0xffef4632, 0x0003d426, - 0x1cf97e8b, 0xefd0df9a, 0x0883a378, 0xfc76779e, 0x00f7f9a3, 0xffee90eb, 0xfff05a60, 0x0003a8d2, - 0x1c072823, 0xf00c14e1, 0x087b1ebc, 0xfc709c4d, 0x00fe53ef, 0xffeb3ab8, 0xfff16a4a, 0x00037e65, - 0x1b1613ff, 0xf0487b98, 0x0871ae0d, 0xfc6b4233, 0x0104772e, 0xffe7f666, 0xfff275db, 0x000354e5, - 0x1a26501b, 0xf0860962, 0x08675516, 0xfc66687a, 0x010a6353, 0xffe4c41e, 0xfff37d00, 0x00032c54, - 0x1937ea47, 0xf0c4b3e0, 0x085c1794, 0xfc620e3d, 0x01101858, 0xffe1a408, 0xfff47fa5, 0x000304b7, - 0x184af025, 0xf10470b0, 0x084ff957, 0xfc5e328c, 0x0115963d, 0xffde9646, 0xfff57db8, 0x0002de0e, - 0x175f6f2b, 0xf1453571, 0x0842fe3d, 0xfc5ad465, 0x011add0b, 0xffdb9af8, 0xfff67729, 0x0002b85f, - 0x1675749e, 0xf186f7c0, 0x08352a35, 0xfc57f2be, 0x011fecd3, 0xffd8b23b, 0xfff76be9, 0x000293aa, - 0x158d0d95, 0xf1c9ad40, 0x0826813e, 0xfc558c7c, 0x0124c5ab, 0xffd5dc28, 0xfff85be8, 0x00026ff2, - 0x14a646f6, 0xf20d4b92, 0x08170767, 0xfc53a07b, 0x012967b1, 0xffd318d6, 0xfff9471b, 0x00024d39, - 0x13c12d73, 0xf251c85d, 0x0806c0cb, 0xfc522d88, 0x012dd30a, 0xffd06858, 0xfffa2d74, 0x00022b7f, - 0x12ddcd8f, 0xf297194d, 0x07f5b193, 0xfc513266, 0x013207e4, 0xffcdcabe, 0xfffb0ee9, 0x00020ac7, - 0x11fc3395, 0xf2dd3411, 0x07e3ddf7, 0xfc50adcc, 0x01360670, 0xffcb4014, 0xfffbeb70, 0x0001eb10, - 0x111c6ba0, 0xf3240e61, 0x07d14a38, 0xfc509e64, 0x0139cee9, 0xffc8c866, 0xfffcc300, 0x0001cc5c, +#include "AudioResamplerSincUp.h" }; /* @@ -197,135 +69,7 @@ const uint32_t AudioResamplerSinc::mFirCoefsUp[] __attribute__ ((aligned (32))) * cmd-line: fir -l 7 -s 48000 -c 17189 */ const uint32_t AudioResamplerSinc::mFirCoefsDown[] __attribute__ ((aligned (32))) = { - 0x5bacb6f4, 0x1ded1a1d, 0xf0398d56, 0x0394f674, 0x0193a5f9, 0xfe66dbeb, 0x00791043, 0xfffe6631, - 0x5bab6c81, 0x1d3ddccd, 0xf0421d2c, 0x03af9995, 0x01818dc9, 0xfe6bb63e, 0x0079812a, 0xfffdc37d, - 0x5ba78d37, 0x1c8f2cf9, 0xf04beb1d, 0x03c9a04a, 0x016f8aca, 0xfe70a511, 0x0079e34d, 0xfffd2545, - 0x5ba1194f, 0x1be11231, 0xf056f2c7, 0x03e309fe, 0x015d9e64, 0xfe75a79f, 0x007a36e2, 0xfffc8b86, - 0x5b981122, 0x1b3393f8, 0xf0632fb7, 0x03fbd625, 0x014bc9fa, 0xfe7abd23, 0x007a7c20, 0xfffbf639, - 0x5b8c7530, 0x1a86b9bf, 0xf0709d74, 0x04140449, 0x013a0ee9, 0xfe7fe4db, 0x007ab33d, 0xfffb655b, - 0x5b7e461a, 0x19da8ae5, 0xf07f3776, 0x042b93fd, 0x01286e86, 0xfe851e05, 0x007adc72, 0xfffad8e4, - 0x5b6d84a8, 0x192f0eb7, 0xf08ef92d, 0x044284e6, 0x0116ea22, 0xfe8a67dd, 0x007af7f6, 0xfffa50ce, - 0x5b5a31c6, 0x18844c70, 0xf09fddfe, 0x0458d6b7, 0x01058306, 0xfe8fc1a5, 0x007b0603, 0xfff9cd12, - 0x5b444e81, 0x17da4b37, 0xf0b1e143, 0x046e8933, 0x00f43a74, 0xfe952a9b, 0x007b06d4, 0xfff94da9, - 0x5b2bdc0e, 0x17311222, 0xf0c4fe50, 0x04839c29, 0x00e311a9, 0xfe9aa201, 0x007afaa1, 0xfff8d28c, - 0x5b10dbc2, 0x1688a832, 0xf0d9306d, 0x04980f79, 0x00d209db, 0xfea02719, 0x007ae1a7, 0xfff85bb1, - 0x5af34f18, 0x15e11453, 0xf0ee72db, 0x04abe310, 0x00c12439, 0xfea5b926, 0x007abc20, 0xfff7e910, - 0x5ad337af, 0x153a5d5e, 0xf104c0d2, 0x04bf16e9, 0x00b061eb, 0xfeab576d, 0x007a8a49, 0xfff77a9f, - 0x5ab09748, 0x14948a16, 0xf11c1583, 0x04d1ab0d, 0x009fc413, 0xfeb10134, 0x007a4c5d, 0xfff71057, - 0x5a8b6fc7, 0x13efa12c, 0xf1346c17, 0x04e39f93, 0x008f4bcb, 0xfeb6b5c0, 0x007a029a, 0xfff6aa2b, - 0x5a63c336, 0x134ba937, 0xf14dbfb1, 0x04f4f4a2, 0x007efa29, 0xfebc745c, 0x0079ad3d, 0xfff64812, - 0x5a3993c0, 0x12a8a8bb, 0xf1680b6e, 0x0505aa6a, 0x006ed038, 0xfec23c50, 0x00794c82, 0xfff5ea02, - 0x5a0ce3b2, 0x1206a625, 0xf1834a63, 0x0515c12d, 0x005ecf01, 0xfec80ce8, 0x0078e0a9, 0xfff58ff0, - 0x59ddb57f, 0x1165a7cc, 0xf19f77a0, 0x05253938, 0x004ef782, 0xfecde571, 0x007869ee, 0xfff539cf, - 0x59ac0bba, 0x10c5b3ef, 0xf1bc8e31, 0x053412e4, 0x003f4ab4, 0xfed3c538, 0x0077e891, 0xfff4e794, - 0x5977e919, 0x1026d0b8, 0xf1da891b, 0x05424e9b, 0x002fc98a, 0xfed9ab8f, 0x00775ccf, 0xfff49934, - 0x59415075, 0x0f890437, 0xf1f96360, 0x054feccf, 0x002074ed, 0xfedf97c6, 0x0076c6e8, 0xfff44ea3, - 0x590844c9, 0x0eec5465, 0xf21917ff, 0x055cee03, 0x00114dc3, 0xfee58932, 0x00762719, 0xfff407d2, - 0x58ccc930, 0x0e50c723, 0xf239a1ef, 0x056952c3, 0x000254e8, 0xfeeb7f27, 0x00757da3, 0xfff3c4b7, - 0x588ee0ea, 0x0db6623b, 0xf25afc29, 0x05751baa, 0xfff38b32, 0xfef178fc, 0x0074cac4, 0xfff38542, - 0x584e8f56, 0x0d1d2b5d, 0xf27d219f, 0x0580495c, 0xffe4f171, 0xfef7760c, 0x00740ebb, 0xfff34968, - 0x580bd7f4, 0x0c85281f, 0xf2a00d43, 0x058adc8d, 0xffd6886d, 0xfefd75af, 0x007349c7, 0xfff3111b, - 0x57c6be67, 0x0bee5dff, 0xf2c3ba04, 0x0594d5fa, 0xffc850e6, 0xff037744, 0x00727c27, 0xfff2dc4c, - 0x577f4670, 0x0b58d262, 0xf2e822ce, 0x059e366c, 0xffba4b98, 0xff097a29, 0x0071a61b, 0xfff2aaef, - 0x573573f2, 0x0ac48a92, 0xf30d428e, 0x05a6feb9, 0xffac7936, 0xff0f7dbf, 0x0070c7e1, 0xfff27cf3, - 0x56e94af1, 0x0a318bc1, 0xf333142f, 0x05af2fbf, 0xff9eda6d, 0xff15816a, 0x006fe1b8, 0xfff2524c, - 0x569acf90, 0x099fdb04, 0xf359929a, 0x05b6ca6b, 0xff916fe1, 0xff1b848e, 0x006ef3df, 0xfff22aea, - 0x564a0610, 0x090f7d57, 0xf380b8ba, 0x05bdcfb2, 0xff843a32, 0xff218692, 0x006dfe94, 0xfff206bf, - 0x55f6f2d3, 0x0880779d, 0xf3a88179, 0x05c44095, 0xff7739f7, 0xff2786e1, 0x006d0217, 0xfff1e5bb, - 0x55a19a5c, 0x07f2ce9b, 0xf3d0e7c2, 0x05ca1e1f, 0xff6a6fc1, 0xff2d84e5, 0x006bfea4, 0xfff1c7d0, - 0x554a0148, 0x076686fc, 0xf3f9e680, 0x05cf6965, 0xff5ddc1a, 0xff33800e, 0x006af47b, 0xfff1acef, - 0x54f02c56, 0x06dba551, 0xf42378a0, 0x05d42387, 0xff517f86, 0xff3977cb, 0x0069e3d9, 0xfff19508, - 0x54942061, 0x06522e0f, 0xf44d9912, 0x05d84daf, 0xff455a80, 0xff3f6b8f, 0x0068ccfa, 0xfff1800b, - 0x5435e263, 0x05ca258f, 0xf47842c5, 0x05dbe90f, 0xff396d7f, 0xff455acf, 0x0067b01e, 0xfff16de9, - 0x53d57774, 0x0543900d, 0xf4a370ad, 0x05def6e4, 0xff2db8f2, 0xff4b4503, 0x00668d80, 0xfff15e93, - 0x5372e4c6, 0x04be71ab, 0xf4cf1dbf, 0x05e17873, 0xff223d40, 0xff5129a3, 0x0065655d, 0xfff151f9, - 0x530e2fac, 0x043ace6e, 0xf4fb44f4, 0x05e36f0d, 0xff16faca, 0xff57082e, 0x006437f1, 0xfff1480b, - 0x52a75d90, 0x03b8aa40, 0xf527e149, 0x05e4dc08, 0xff0bf1ed, 0xff5ce021, 0x00630577, 0xfff140b9, - 0x523e73fd, 0x033808eb, 0xf554edbd, 0x05e5c0c6, 0xff0122fc, 0xff62b0fd, 0x0061ce2c, 0xfff13bf3, - 0x51d37897, 0x02b8ee22, 0xf5826555, 0x05e61eae, 0xfef68e45, 0xff687a47, 0x00609249, 0xfff139aa, - 0x5166711c, 0x023b5d76, 0xf5b0431a, 0x05e5f733, 0xfeec340f, 0xff6e3b84, 0x005f520a, 0xfff139cd, - 0x50f76368, 0x01bf5a5e, 0xf5de8218, 0x05e54bcd, 0xfee2149b, 0xff73f43d, 0x005e0da8, 0xfff13c4c, - 0x5086556f, 0x0144e834, 0xf60d1d63, 0x05e41dfe, 0xfed83023, 0xff79a3fe, 0x005cc55c, 0xfff14119, - 0x50134d3e, 0x00cc0a36, 0xf63c1012, 0x05e26f4e, 0xfece86db, 0xff7f4a54, 0x005b7961, 0xfff14821, - 0x4f9e50ff, 0x0054c382, 0xf66b5544, 0x05e0414d, 0xfec518f1, 0xff84e6d0, 0x005a29ed, 0xfff15156, - 0x4f2766f2, 0xffdf171b, 0xf69ae81d, 0x05dd9593, 0xfebbe68c, 0xff8a7905, 0x0058d738, 0xfff15ca8, - 0x4eae9571, 0xff6b07e7, 0xf6cac3c7, 0x05da6dbe, 0xfeb2efcd, 0xff900089, 0x0057817b, 0xfff16a07, - 0x4e33e2ee, 0xfef898ae, 0xf6fae373, 0x05d6cb72, 0xfeaa34d0, 0xff957cf4, 0x005628ec, 0xfff17962, - 0x4db755f3, 0xfe87cc1b, 0xf72b425b, 0x05d2b05c, 0xfea1b5a9, 0xff9aede0, 0x0054cdc0, 0xfff18aab, - 0x4d38f520, 0xfe18a4bc, 0xf75bdbbd, 0x05ce1e2d, 0xfe997268, 0xffa052ec, 0x0053702d, 0xfff19dd1, - 0x4cb8c72e, 0xfdab2501, 0xf78caae0, 0x05c9169d, 0xfe916b15, 0xffa5abb8, 0x00521068, 0xfff1b2c5, - 0x4c36d2eb, 0xfd3f4f3d, 0xf7bdab16, 0x05c39b6a, 0xfe899fb2, 0xffaaf7e6, 0x0050aea5, 0xfff1c976, - 0x4bb31f3c, 0xfcd525a5, 0xf7eed7b4, 0x05bdae57, 0xfe82103f, 0xffb0371c, 0x004f4b17, 0xfff1e1d6, - 0x4b2db31a, 0xfc6caa53, 0xf8202c1c, 0x05b7512e, 0xfe7abcb1, 0xffb56902, 0x004de5f1, 0xfff1fbd5, - 0x4aa69594, 0xfc05df40, 0xf851a3b6, 0x05b085bc, 0xfe73a4fb, 0xffba8d44, 0x004c7f66, 0xfff21764, - 0x4a1dcdce, 0xfba0c64b, 0xf88339f5, 0x05a94dd5, 0xfe6cc909, 0xffbfa38d, 0x004b17a6, 0xfff23473, - 0x499362ff, 0xfb3d6133, 0xf8b4ea55, 0x05a1ab52, 0xfe6628c1, 0xffc4ab8f, 0x0049aee3, 0xfff252f3, - 0x49075c72, 0xfadbb19a, 0xf8e6b059, 0x0599a00e, 0xfe5fc405, 0xffc9a4fc, 0x0048454b, 0xfff272d6, - 0x4879c185, 0xfa7bb908, 0xf9188793, 0x05912dea, 0xfe599aaf, 0xffce8f8a, 0x0046db0f, 0xfff2940b, - 0x47ea99a9, 0xfa1d78e3, 0xf94a6b9b, 0x058856cd, 0xfe53ac97, 0xffd36af1, 0x0045705c, 0xfff2b686, - 0x4759ec60, 0xf9c0f276, 0xf97c5815, 0x057f1c9e, 0xfe4df98e, 0xffd836eb, 0x00440561, 0xfff2da36, - 0x46c7c140, 0xf96626f0, 0xf9ae48af, 0x0575814c, 0xfe48815e, 0xffdcf336, 0x00429a4a, 0xfff2ff0d, - 0x46341fed, 0xf90d1761, 0xf9e03924, 0x056b86c6, 0xfe4343d0, 0xffe19f91, 0x00412f43, 0xfff324fd, - 0x459f101d, 0xf8b5c4be, 0xfa122537, 0x05612f00, 0xfe3e40a6, 0xffe63bc0, 0x003fc478, 0xfff34bf9, - 0x45089996, 0xf8602fdc, 0xfa4408ba, 0x05567bf1, 0xfe39779a, 0xffeac787, 0x003e5a12, 0xfff373f0, - 0x4470c42d, 0xf80c5977, 0xfa75df87, 0x054b6f92, 0xfe34e867, 0xffef42af, 0x003cf03d, 0xfff39cd7, - 0x43d797c7, 0xf7ba422b, 0xfaa7a586, 0x05400be1, 0xfe3092bf, 0xfff3ad01, 0x003b871f, 0xfff3c69f, - 0x433d1c56, 0xf769ea78, 0xfad956ab, 0x053452dc, 0xfe2c7650, 0xfff8064b, 0x003a1ee3, 0xfff3f13a, - 0x42a159dc, 0xf71b52c4, 0xfb0aeef6, 0x05284685, 0xfe2892c5, 0xfffc4e5c, 0x0038b7ae, 0xfff41c9c, - 0x42045865, 0xf6ce7b57, 0xfb3c6a73, 0x051be8dd, 0xfe24e7c3, 0x00008507, 0x003751a7, 0xfff448b7, - 0x4166200e, 0xf683645a, 0xfb6dc53c, 0x050f3bec, 0xfe2174ec, 0x0004aa1f, 0x0035ecf4, 0xfff4757e, - 0x40c6b8fd, 0xf63a0ddf, 0xfb9efb77, 0x050241b6, 0xfe1e39da, 0x0008bd7c, 0x003489b9, 0xfff4a2e5, - 0x40262b65, 0xf5f277d9, 0xfbd00956, 0x04f4fc46, 0xfe1b3628, 0x000cbef7, 0x0033281a, 0xfff4d0de, - 0x3f847f83, 0xf5aca21f, 0xfc00eb1b, 0x04e76da3, 0xfe18696a, 0x0010ae6e, 0x0031c83a, 0xfff4ff5d, - 0x3ee1bda2, 0xf5688c6d, 0xfc319d13, 0x04d997d8, 0xfe15d32f, 0x00148bbd, 0x00306a3b, 0xfff52e57, - 0x3e3dee13, 0xf5263665, 0xfc621b9a, 0x04cb7cf2, 0xfe137304, 0x001856c7, 0x002f0e3f, 0xfff55dbf, - 0x3d991932, 0xf4e59f8a, 0xfc926319, 0x04bd1efb, 0xfe114872, 0x001c0f6e, 0x002db466, 0xfff58d89, - 0x3cf34766, 0xf4a6c748, 0xfcc27008, 0x04ae8000, 0xfe0f52fc, 0x001fb599, 0x002c5cd0, 0xfff5bdaa, - 0x3c4c811c, 0xf469aced, 0xfcf23eec, 0x049fa20f, 0xfe0d9224, 0x0023492f, 0x002b079a, 0xfff5ee17, - 0x3ba4cec9, 0xf42e4faf, 0xfd21cc59, 0x04908733, 0xfe0c0567, 0x0026ca1c, 0x0029b4e4, 0xfff61ec5, - 0x3afc38eb, 0xf3f4aea6, 0xfd5114f0, 0x0481317a, 0xfe0aac3f, 0x002a384c, 0x002864c9, 0xfff64fa8, - 0x3a52c805, 0xf3bcc8d3, 0xfd801564, 0x0471a2ef, 0xfe098622, 0x002d93ae, 0x00271766, 0xfff680b5, - 0x39a884a1, 0xf3869d1a, 0xfdaeca73, 0x0461dda0, 0xfe089283, 0x0030dc34, 0x0025ccd7, 0xfff6b1e4, - 0x38fd774e, 0xf3522a49, 0xfddd30eb, 0x0451e396, 0xfe07d0d3, 0x003411d2, 0x00248535, 0xfff6e329, - 0x3851a8a2, 0xf31f6f0f, 0xfe0b45aa, 0x0441b6dd, 0xfe07407d, 0x0037347d, 0x0023409a, 0xfff7147a, - 0x37a52135, 0xf2ee6a07, 0xfe39059b, 0x0431597d, 0xfe06e0eb, 0x003a442e, 0x0021ff1f, 0xfff745cd, - 0x36f7e9a4, 0xf2bf19ae, 0xfe666dbc, 0x0420cd80, 0xfe06b184, 0x003d40e0, 0x0020c0dc, 0xfff7771a, - 0x364a0a90, 0xf2917c6d, 0xfe937b15, 0x041014eb, 0xfe06b1ac, 0x00402a8e, 0x001f85e6, 0xfff7a857, - 0x359b8c9d, 0xf265908f, 0xfec02ac2, 0x03ff31c3, 0xfe06e0c4, 0x00430137, 0x001e4e56, 0xfff7d97a, - 0x34ec786f, 0xf23b544b, 0xfeec79ec, 0x03ee260d, 0xfe073e2a, 0x0045c4dd, 0x001d1a3f, 0xfff80a7c, - 0x343cd6af, 0xf212c5be, 0xff1865cd, 0x03dcf3ca, 0xfe07c93a, 0x00487582, 0x001be9b7, 0xfff83b52, - 0x338cb004, 0xf1ebe2ec, 0xff43ebac, 0x03cb9cf9, 0xfe08814e, 0x004b132b, 0x001abcd0, 0xfff86bf6, - 0x32dc0d17, 0xf1c6a9c3, 0xff6f08e4, 0x03ba2398, 0xfe0965bc, 0x004d9dde, 0x0019939d, 0xfff89c60, - 0x322af693, 0xf1a3181a, 0xff99badb, 0x03a889a1, 0xfe0a75da, 0x005015a5, 0x00186e31, 0xfff8cc86, - 0x3179751f, 0xf1812bb0, 0xffc3ff0c, 0x0396d10c, 0xfe0bb0f9, 0x00527a8a, 0x00174c9c, 0xfff8fc62, - 0x30c79163, 0xf160e22d, 0xffedd2fd, 0x0384fbd1, 0xfe0d166b, 0x0054cc9a, 0x00162eef, 0xfff92bec, - 0x30155404, 0xf1423924, 0x00173447, 0x03730be0, 0xfe0ea57e, 0x00570be4, 0x00151538, 0xfff95b1e, - 0x2f62c5a7, 0xf1252e0f, 0x00402092, 0x0361032a, 0xfe105d7e, 0x00593877, 0x0013ff88, 0xfff989ef, - 0x2eafeeed, 0xf109be56, 0x00689598, 0x034ee39b, 0xfe123db6, 0x005b5267, 0x0012edea, 0xfff9b85b, - 0x2dfcd873, 0xf0efe748, 0x0090911f, 0x033caf1d, 0xfe144570, 0x005d59c6, 0x0011e06d, 0xfff9e65a, - 0x2d498ad3, 0xf0d7a622, 0x00b81102, 0x032a6796, 0xfe1673f2, 0x005f4eac, 0x0010d71d, 0xfffa13e5, - 0x2c960ea3, 0xf0c0f808, 0x00df1328, 0x03180ee7, 0xfe18c884, 0x0061312e, 0x000fd205, 0xfffa40f8, - 0x2be26c73, 0xf0abda0e, 0x0105958c, 0x0305a6f0, 0xfe1b4268, 0x00630167, 0x000ed130, 0xfffa6d8d, - 0x2b2eaccf, 0xf0984931, 0x012b9635, 0x02f3318a, 0xfe1de0e2, 0x0064bf71, 0x000dd4a7, 0xfffa999d, - 0x2a7ad83c, 0xf086425a, 0x0151133e, 0x02e0b08d, 0xfe20a335, 0x00666b68, 0x000cdc74, 0xfffac525, - 0x29c6f738, 0xf075c260, 0x01760ad1, 0x02ce25ca, 0xfe2388a1, 0x0068056b, 0x000be89f, 0xfffaf01e, - 0x2913123c, 0xf066c606, 0x019a7b27, 0x02bb9310, 0xfe269065, 0x00698d98, 0x000af931, 0xfffb1a84, - 0x285f31b7, 0xf05949fb, 0x01be628c, 0x02a8fa2a, 0xfe29b9c1, 0x006b0411, 0x000a0e2f, 0xfffb4453, - 0x27ab5e12, 0xf04d4ade, 0x01e1bf58, 0x02965cdb, 0xfe2d03f2, 0x006c68f8, 0x000927a0, 0xfffb6d86, - 0x26f79fab, 0xf042c539, 0x02048ff8, 0x0283bce6, 0xfe306e35, 0x006dbc71, 0x00084589, 0xfffb961a, - 0x2643feda, 0xf039b587, 0x0226d2e6, 0x02711c05, 0xfe33f7c7, 0x006efea0, 0x000767f0, 0xfffbbe09, - 0x259083eb, 0xf032182f, 0x024886ad, 0x025e7bf0, 0xfe379fe3, 0x00702fae, 0x00068ed8, 0xfffbe552, - 0x24dd3721, 0xf02be98a, 0x0269a9e9, 0x024bde5a, 0xfe3b65c4, 0x00714fc0, 0x0005ba46, 0xfffc0bef, - 0x242a20b3, 0xf02725dc, 0x028a3b44, 0x023944ee, 0xfe3f48a5, 0x00725f02, 0x0004ea3a, 0xfffc31df, - 0x237748cf, 0xf023c95d, 0x02aa397b, 0x0226b156, 0xfe4347c0, 0x00735d9c, 0x00041eb9, 0xfffc571e, - 0x22c4b795, 0xf021d031, 0x02c9a359, 0x02142533, 0xfe476250, 0x00744bba, 0x000357c2, 0xfffc7ba9, - 0x2212751a, 0xf0213671, 0x02e877b9, 0x0201a223, 0xfe4b978e, 0x0075298a, 0x00029558, 0xfffc9f7e, - 0x21608968, 0xf021f823, 0x0306b586, 0x01ef29be, 0xfe4fe6b3, 0x0075f739, 0x0001d779, 0xfffcc29a, - 0x20aefc79, 0xf0241140, 0x03245bbc, 0x01dcbd96, 0xfe544efb, 0x0076b4f5, 0x00011e26, 0xfffce4fc, - 0x1ffdd63b, 0xf0277db1, 0x03416966, 0x01ca5f37, 0xfe58cf9d, 0x007762f0, 0x0000695e, 0xfffd06a1, - 0x1f4d1e8e, 0xf02c3953, 0x035ddd9e, 0x01b81028, 0xfe5d67d4, 0x0078015a, 0xffffb91f, 0xfffd2787, - 0x1e9cdd43, 0xf0323ff5, 0x0379b790, 0x01a5d1ea, 0xfe6216db, 0x00789065, 0xffff0d66, 0xfffd47ae, - 0x1ded1a1d, 0xf0398d56, 0x0394f674, 0x0193a5f9, 0xfe66dbeb, 0x00791043, 0xfffe6631, 0xfffd6713, +#include "AudioResamplerSincDown.h" }; // we use 15 bits to interpolate between these samples @@ -521,7 +265,8 @@ void AudioResamplerSinc::resample(int32_t* out, size_t outFrameCount, if (mConstants == &veryHighQualityConstants && readResampleCoefficients) { mFirCoefs = readResampleCoefficients( mInSampleRate <= mSampleRate ); } else { - mFirCoefs = (const int32_t *) ((mInSampleRate <= mSampleRate) ? mFirCoefsUp : mFirCoefsDown); + mFirCoefs = (const int32_t *) + ((mInSampleRate <= mSampleRate) ? mFirCoefsUp : mFirCoefsDown); } // select the appropriate resampler @@ -856,4 +601,4 @@ void AudioResamplerSinc::interpolate( } } // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android diff --git a/services/audioflinger/AudioResamplerSinc.h b/services/audioflinger/AudioResamplerSinc.h index 4691d0a..6d8e85d 100644 --- a/services/audioflinger/AudioResamplerSinc.h +++ b/services/audioflinger/AudioResamplerSinc.h @@ -95,6 +95,6 @@ private: }; // ---------------------------------------------------------------------------- -}; // namespace android +} // namespace android #endif /*ANDROID_AUDIO_RESAMPLER_SINC_H*/ diff --git a/services/audioflinger/AudioResamplerSincDown.h b/services/audioflinger/AudioResamplerSincDown.h new file mode 100644 index 0000000..2d0fb86 --- /dev/null +++ b/services/audioflinger/AudioResamplerSincDown.h @@ -0,0 +1,131 @@ +// cmd-line: fir -l 7 -s48000 -c 17189 + + 0x5bacb6f4, 0x1ded1a1d, 0xf0398d56, 0x0394f674, 0x0193a5f9, 0xfe66dbeb, 0x00791043, 0xfffe6631, + 0x5bab6c81, 0x1d3ddccd, 0xf0421d2c, 0x03af9995, 0x01818dc9, 0xfe6bb63e, 0x0079812a, 0xfffdc37d, + 0x5ba78d37, 0x1c8f2cf9, 0xf04beb1d, 0x03c9a04a, 0x016f8aca, 0xfe70a511, 0x0079e34d, 0xfffd2545, + 0x5ba1194f, 0x1be11231, 0xf056f2c7, 0x03e309fe, 0x015d9e64, 0xfe75a79f, 0x007a36e2, 0xfffc8b86, + 0x5b981122, 0x1b3393f8, 0xf0632fb7, 0x03fbd625, 0x014bc9fa, 0xfe7abd23, 0x007a7c20, 0xfffbf639, + 0x5b8c7530, 0x1a86b9bf, 0xf0709d74, 0x04140449, 0x013a0ee9, 0xfe7fe4db, 0x007ab33d, 0xfffb655b, + 0x5b7e461a, 0x19da8ae5, 0xf07f3776, 0x042b93fd, 0x01286e86, 0xfe851e05, 0x007adc72, 0xfffad8e4, + 0x5b6d84a8, 0x192f0eb7, 0xf08ef92d, 0x044284e6, 0x0116ea22, 0xfe8a67dd, 0x007af7f6, 0xfffa50ce, + 0x5b5a31c6, 0x18844c70, 0xf09fddfe, 0x0458d6b7, 0x01058306, 0xfe8fc1a5, 0x007b0603, 0xfff9cd12, + 0x5b444e81, 0x17da4b37, 0xf0b1e143, 0x046e8933, 0x00f43a74, 0xfe952a9b, 0x007b06d4, 0xfff94da9, + 0x5b2bdc0e, 0x17311222, 0xf0c4fe50, 0x04839c29, 0x00e311a9, 0xfe9aa201, 0x007afaa1, 0xfff8d28c, + 0x5b10dbc2, 0x1688a832, 0xf0d9306d, 0x04980f79, 0x00d209db, 0xfea02719, 0x007ae1a7, 0xfff85bb1, + 0x5af34f18, 0x15e11453, 0xf0ee72db, 0x04abe310, 0x00c12439, 0xfea5b926, 0x007abc20, 0xfff7e910, + 0x5ad337af, 0x153a5d5e, 0xf104c0d2, 0x04bf16e9, 0x00b061eb, 0xfeab576d, 0x007a8a49, 0xfff77a9f, + 0x5ab09748, 0x14948a16, 0xf11c1583, 0x04d1ab0d, 0x009fc413, 0xfeb10134, 0x007a4c5d, 0xfff71057, + 0x5a8b6fc7, 0x13efa12c, 0xf1346c17, 0x04e39f93, 0x008f4bcb, 0xfeb6b5c0, 0x007a029a, 0xfff6aa2b, + 0x5a63c336, 0x134ba937, 0xf14dbfb1, 0x04f4f4a2, 0x007efa29, 0xfebc745c, 0x0079ad3d, 0xfff64812, + 0x5a3993c0, 0x12a8a8bb, 0xf1680b6e, 0x0505aa6a, 0x006ed038, 0xfec23c50, 0x00794c82, 0xfff5ea02, + 0x5a0ce3b2, 0x1206a625, 0xf1834a63, 0x0515c12d, 0x005ecf01, 0xfec80ce8, 0x0078e0a9, 0xfff58ff0, + 0x59ddb57f, 0x1165a7cc, 0xf19f77a0, 0x05253938, 0x004ef782, 0xfecde571, 0x007869ee, 0xfff539cf, + 0x59ac0bba, 0x10c5b3ef, 0xf1bc8e31, 0x053412e4, 0x003f4ab4, 0xfed3c538, 0x0077e891, 0xfff4e794, + 0x5977e919, 0x1026d0b8, 0xf1da891b, 0x05424e9b, 0x002fc98a, 0xfed9ab8f, 0x00775ccf, 0xfff49934, + 0x59415075, 0x0f890437, 0xf1f96360, 0x054feccf, 0x002074ed, 0xfedf97c6, 0x0076c6e8, 0xfff44ea3, + 0x590844c9, 0x0eec5465, 0xf21917ff, 0x055cee03, 0x00114dc3, 0xfee58932, 0x00762719, 0xfff407d2, + 0x58ccc930, 0x0e50c723, 0xf239a1ef, 0x056952c3, 0x000254e8, 0xfeeb7f27, 0x00757da3, 0xfff3c4b7, + 0x588ee0ea, 0x0db6623b, 0xf25afc29, 0x05751baa, 0xfff38b32, 0xfef178fc, 0x0074cac4, 0xfff38542, + 0x584e8f56, 0x0d1d2b5d, 0xf27d219f, 0x0580495c, 0xffe4f171, 0xfef7760c, 0x00740ebb, 0xfff34968, + 0x580bd7f4, 0x0c85281f, 0xf2a00d43, 0x058adc8d, 0xffd6886d, 0xfefd75af, 0x007349c7, 0xfff3111b, + 0x57c6be67, 0x0bee5dff, 0xf2c3ba04, 0x0594d5fa, 0xffc850e6, 0xff037744, 0x00727c27, 0xfff2dc4c, + 0x577f4670, 0x0b58d262, 0xf2e822ce, 0x059e366c, 0xffba4b98, 0xff097a29, 0x0071a61b, 0xfff2aaef, + 0x573573f2, 0x0ac48a92, 0xf30d428e, 0x05a6feb9, 0xffac7936, 0xff0f7dbf, 0x0070c7e1, 0xfff27cf3, + 0x56e94af1, 0x0a318bc1, 0xf333142f, 0x05af2fbf, 0xff9eda6d, 0xff15816a, 0x006fe1b8, 0xfff2524c, + 0x569acf90, 0x099fdb04, 0xf359929a, 0x05b6ca6b, 0xff916fe1, 0xff1b848e, 0x006ef3df, 0xfff22aea, + 0x564a0610, 0x090f7d57, 0xf380b8ba, 0x05bdcfb2, 0xff843a32, 0xff218692, 0x006dfe94, 0xfff206bf, + 0x55f6f2d3, 0x0880779d, 0xf3a88179, 0x05c44095, 0xff7739f7, 0xff2786e1, 0x006d0217, 0xfff1e5bb, + 0x55a19a5c, 0x07f2ce9b, 0xf3d0e7c2, 0x05ca1e1f, 0xff6a6fc1, 0xff2d84e5, 0x006bfea4, 0xfff1c7d0, + 0x554a0148, 0x076686fc, 0xf3f9e680, 0x05cf6965, 0xff5ddc1a, 0xff33800e, 0x006af47b, 0xfff1acef, + 0x54f02c56, 0x06dba551, 0xf42378a0, 0x05d42387, 0xff517f86, 0xff3977cb, 0x0069e3d9, 0xfff19508, + 0x54942061, 0x06522e0f, 0xf44d9912, 0x05d84daf, 0xff455a80, 0xff3f6b8f, 0x0068ccfa, 0xfff1800b, + 0x5435e263, 0x05ca258f, 0xf47842c5, 0x05dbe90f, 0xff396d7f, 0xff455acf, 0x0067b01e, 0xfff16de9, + 0x53d57774, 0x0543900d, 0xf4a370ad, 0x05def6e4, 0xff2db8f2, 0xff4b4503, 0x00668d80, 0xfff15e93, + 0x5372e4c6, 0x04be71ab, 0xf4cf1dbf, 0x05e17873, 0xff223d40, 0xff5129a3, 0x0065655d, 0xfff151f9, + 0x530e2fac, 0x043ace6e, 0xf4fb44f4, 0x05e36f0d, 0xff16faca, 0xff57082e, 0x006437f1, 0xfff1480b, + 0x52a75d90, 0x03b8aa40, 0xf527e149, 0x05e4dc08, 0xff0bf1ed, 0xff5ce021, 0x00630577, 0xfff140b9, + 0x523e73fd, 0x033808eb, 0xf554edbd, 0x05e5c0c6, 0xff0122fc, 0xff62b0fd, 0x0061ce2c, 0xfff13bf3, + 0x51d37897, 0x02b8ee22, 0xf5826555, 0x05e61eae, 0xfef68e45, 0xff687a47, 0x00609249, 0xfff139aa, + 0x5166711c, 0x023b5d76, 0xf5b0431a, 0x05e5f733, 0xfeec340f, 0xff6e3b84, 0x005f520a, 0xfff139cd, + 0x50f76368, 0x01bf5a5e, 0xf5de8218, 0x05e54bcd, 0xfee2149b, 0xff73f43d, 0x005e0da8, 0xfff13c4c, + 0x5086556f, 0x0144e834, 0xf60d1d63, 0x05e41dfe, 0xfed83023, 0xff79a3fe, 0x005cc55c, 0xfff14119, + 0x50134d3e, 0x00cc0a36, 0xf63c1012, 0x05e26f4e, 0xfece86db, 0xff7f4a54, 0x005b7961, 0xfff14821, + 0x4f9e50ff, 0x0054c382, 0xf66b5544, 0x05e0414d, 0xfec518f1, 0xff84e6d0, 0x005a29ed, 0xfff15156, + 0x4f2766f2, 0xffdf171b, 0xf69ae81d, 0x05dd9593, 0xfebbe68c, 0xff8a7905, 0x0058d738, 0xfff15ca8, + 0x4eae9571, 0xff6b07e7, 0xf6cac3c7, 0x05da6dbe, 0xfeb2efcd, 0xff900089, 0x0057817b, 0xfff16a07, + 0x4e33e2ee, 0xfef898ae, 0xf6fae373, 0x05d6cb72, 0xfeaa34d0, 0xff957cf4, 0x005628ec, 0xfff17962, + 0x4db755f3, 0xfe87cc1b, 0xf72b425b, 0x05d2b05c, 0xfea1b5a9, 0xff9aede0, 0x0054cdc0, 0xfff18aab, + 0x4d38f520, 0xfe18a4bc, 0xf75bdbbd, 0x05ce1e2d, 0xfe997268, 0xffa052ec, 0x0053702d, 0xfff19dd1, + 0x4cb8c72e, 0xfdab2501, 0xf78caae0, 0x05c9169d, 0xfe916b15, 0xffa5abb8, 0x00521068, 0xfff1b2c5, + 0x4c36d2eb, 0xfd3f4f3d, 0xf7bdab16, 0x05c39b6a, 0xfe899fb2, 0xffaaf7e6, 0x0050aea5, 0xfff1c976, + 0x4bb31f3c, 0xfcd525a5, 0xf7eed7b4, 0x05bdae57, 0xfe82103f, 0xffb0371c, 0x004f4b17, 0xfff1e1d6, + 0x4b2db31a, 0xfc6caa53, 0xf8202c1c, 0x05b7512e, 0xfe7abcb1, 0xffb56902, 0x004de5f1, 0xfff1fbd5, + 0x4aa69594, 0xfc05df40, 0xf851a3b6, 0x05b085bc, 0xfe73a4fb, 0xffba8d44, 0x004c7f66, 0xfff21764, + 0x4a1dcdce, 0xfba0c64b, 0xf88339f5, 0x05a94dd5, 0xfe6cc909, 0xffbfa38d, 0x004b17a6, 0xfff23473, + 0x499362ff, 0xfb3d6133, 0xf8b4ea55, 0x05a1ab52, 0xfe6628c1, 0xffc4ab8f, 0x0049aee3, 0xfff252f3, + 0x49075c72, 0xfadbb19a, 0xf8e6b059, 0x0599a00e, 0xfe5fc405, 0xffc9a4fc, 0x0048454b, 0xfff272d6, + 0x4879c185, 0xfa7bb908, 0xf9188793, 0x05912dea, 0xfe599aaf, 0xffce8f8a, 0x0046db0f, 0xfff2940b, + 0x47ea99a9, 0xfa1d78e3, 0xf94a6b9b, 0x058856cd, 0xfe53ac97, 0xffd36af1, 0x0045705c, 0xfff2b686, + 0x4759ec60, 0xf9c0f276, 0xf97c5815, 0x057f1c9e, 0xfe4df98e, 0xffd836eb, 0x00440561, 0xfff2da36, + 0x46c7c140, 0xf96626f0, 0xf9ae48af, 0x0575814c, 0xfe48815e, 0xffdcf336, 0x00429a4a, 0xfff2ff0d, + 0x46341fed, 0xf90d1761, 0xf9e03924, 0x056b86c6, 0xfe4343d0, 0xffe19f91, 0x00412f43, 0xfff324fd, + 0x459f101d, 0xf8b5c4be, 0xfa122537, 0x05612f00, 0xfe3e40a6, 0xffe63bc0, 0x003fc478, 0xfff34bf9, + 0x45089996, 0xf8602fdc, 0xfa4408ba, 0x05567bf1, 0xfe39779a, 0xffeac787, 0x003e5a12, 0xfff373f0, + 0x4470c42d, 0xf80c5977, 0xfa75df87, 0x054b6f92, 0xfe34e867, 0xffef42af, 0x003cf03d, 0xfff39cd7, + 0x43d797c7, 0xf7ba422b, 0xfaa7a586, 0x05400be1, 0xfe3092bf, 0xfff3ad01, 0x003b871f, 0xfff3c69f, + 0x433d1c56, 0xf769ea78, 0xfad956ab, 0x053452dc, 0xfe2c7650, 0xfff8064b, 0x003a1ee3, 0xfff3f13a, + 0x42a159dc, 0xf71b52c4, 0xfb0aeef6, 0x05284685, 0xfe2892c5, 0xfffc4e5c, 0x0038b7ae, 0xfff41c9c, + 0x42045865, 0xf6ce7b57, 0xfb3c6a73, 0x051be8dd, 0xfe24e7c3, 0x00008507, 0x003751a7, 0xfff448b7, + 0x4166200e, 0xf683645a, 0xfb6dc53c, 0x050f3bec, 0xfe2174ec, 0x0004aa1f, 0x0035ecf4, 0xfff4757e, + 0x40c6b8fd, 0xf63a0ddf, 0xfb9efb77, 0x050241b6, 0xfe1e39da, 0x0008bd7c, 0x003489b9, 0xfff4a2e5, + 0x40262b65, 0xf5f277d9, 0xfbd00956, 0x04f4fc46, 0xfe1b3628, 0x000cbef7, 0x0033281a, 0xfff4d0de, + 0x3f847f83, 0xf5aca21f, 0xfc00eb1b, 0x04e76da3, 0xfe18696a, 0x0010ae6e, 0x0031c83a, 0xfff4ff5d, + 0x3ee1bda2, 0xf5688c6d, 0xfc319d13, 0x04d997d8, 0xfe15d32f, 0x00148bbd, 0x00306a3b, 0xfff52e57, + 0x3e3dee13, 0xf5263665, 0xfc621b9a, 0x04cb7cf2, 0xfe137304, 0x001856c7, 0x002f0e3f, 0xfff55dbf, + 0x3d991932, 0xf4e59f8a, 0xfc926319, 0x04bd1efb, 0xfe114872, 0x001c0f6e, 0x002db466, 0xfff58d89, + 0x3cf34766, 0xf4a6c748, 0xfcc27008, 0x04ae8000, 0xfe0f52fc, 0x001fb599, 0x002c5cd0, 0xfff5bdaa, + 0x3c4c811c, 0xf469aced, 0xfcf23eec, 0x049fa20f, 0xfe0d9224, 0x0023492f, 0x002b079a, 0xfff5ee17, + 0x3ba4cec9, 0xf42e4faf, 0xfd21cc59, 0x04908733, 0xfe0c0567, 0x0026ca1c, 0x0029b4e4, 0xfff61ec5, + 0x3afc38eb, 0xf3f4aea6, 0xfd5114f0, 0x0481317a, 0xfe0aac3f, 0x002a384c, 0x002864c9, 0xfff64fa8, + 0x3a52c805, 0xf3bcc8d3, 0xfd801564, 0x0471a2ef, 0xfe098622, 0x002d93ae, 0x00271766, 0xfff680b5, + 0x39a884a1, 0xf3869d1a, 0xfdaeca73, 0x0461dda0, 0xfe089283, 0x0030dc34, 0x0025ccd7, 0xfff6b1e4, + 0x38fd774e, 0xf3522a49, 0xfddd30eb, 0x0451e396, 0xfe07d0d3, 0x003411d2, 0x00248535, 0xfff6e329, + 0x3851a8a2, 0xf31f6f0f, 0xfe0b45aa, 0x0441b6dd, 0xfe07407d, 0x0037347d, 0x0023409a, 0xfff7147a, + 0x37a52135, 0xf2ee6a07, 0xfe39059b, 0x0431597d, 0xfe06e0eb, 0x003a442e, 0x0021ff1f, 0xfff745cd, + 0x36f7e9a4, 0xf2bf19ae, 0xfe666dbc, 0x0420cd80, 0xfe06b184, 0x003d40e0, 0x0020c0dc, 0xfff7771a, + 0x364a0a90, 0xf2917c6d, 0xfe937b15, 0x041014eb, 0xfe06b1ac, 0x00402a8e, 0x001f85e6, 0xfff7a857, + 0x359b8c9d, 0xf265908f, 0xfec02ac2, 0x03ff31c3, 0xfe06e0c4, 0x00430137, 0x001e4e56, 0xfff7d97a, + 0x34ec786f, 0xf23b544b, 0xfeec79ec, 0x03ee260d, 0xfe073e2a, 0x0045c4dd, 0x001d1a3f, 0xfff80a7c, + 0x343cd6af, 0xf212c5be, 0xff1865cd, 0x03dcf3ca, 0xfe07c93a, 0x00487582, 0x001be9b7, 0xfff83b52, + 0x338cb004, 0xf1ebe2ec, 0xff43ebac, 0x03cb9cf9, 0xfe08814e, 0x004b132b, 0x001abcd0, 0xfff86bf6, + 0x32dc0d17, 0xf1c6a9c3, 0xff6f08e4, 0x03ba2398, 0xfe0965bc, 0x004d9dde, 0x0019939d, 0xfff89c60, + 0x322af693, 0xf1a3181a, 0xff99badb, 0x03a889a1, 0xfe0a75da, 0x005015a5, 0x00186e31, 0xfff8cc86, + 0x3179751f, 0xf1812bb0, 0xffc3ff0c, 0x0396d10c, 0xfe0bb0f9, 0x00527a8a, 0x00174c9c, 0xfff8fc62, + 0x30c79163, 0xf160e22d, 0xffedd2fd, 0x0384fbd1, 0xfe0d166b, 0x0054cc9a, 0x00162eef, 0xfff92bec, + 0x30155404, 0xf1423924, 0x00173447, 0x03730be0, 0xfe0ea57e, 0x00570be4, 0x00151538, 0xfff95b1e, + 0x2f62c5a7, 0xf1252e0f, 0x00402092, 0x0361032a, 0xfe105d7e, 0x00593877, 0x0013ff88, 0xfff989ef, + 0x2eafeeed, 0xf109be56, 0x00689598, 0x034ee39b, 0xfe123db6, 0x005b5267, 0x0012edea, 0xfff9b85b, + 0x2dfcd873, 0xf0efe748, 0x0090911f, 0x033caf1d, 0xfe144570, 0x005d59c6, 0x0011e06d, 0xfff9e65a, + 0x2d498ad3, 0xf0d7a622, 0x00b81102, 0x032a6796, 0xfe1673f2, 0x005f4eac, 0x0010d71d, 0xfffa13e5, + 0x2c960ea3, 0xf0c0f808, 0x00df1328, 0x03180ee7, 0xfe18c884, 0x0061312e, 0x000fd205, 0xfffa40f8, + 0x2be26c73, 0xf0abda0e, 0x0105958c, 0x0305a6f0, 0xfe1b4268, 0x00630167, 0x000ed130, 0xfffa6d8d, + 0x2b2eaccf, 0xf0984931, 0x012b9635, 0x02f3318a, 0xfe1de0e2, 0x0064bf71, 0x000dd4a7, 0xfffa999d, + 0x2a7ad83c, 0xf086425a, 0x0151133e, 0x02e0b08d, 0xfe20a335, 0x00666b68, 0x000cdc74, 0xfffac525, + 0x29c6f738, 0xf075c260, 0x01760ad1, 0x02ce25ca, 0xfe2388a1, 0x0068056b, 0x000be89f, 0xfffaf01e, + 0x2913123c, 0xf066c606, 0x019a7b27, 0x02bb9310, 0xfe269065, 0x00698d98, 0x000af931, 0xfffb1a84, + 0x285f31b7, 0xf05949fb, 0x01be628c, 0x02a8fa2a, 0xfe29b9c1, 0x006b0411, 0x000a0e2f, 0xfffb4453, + 0x27ab5e12, 0xf04d4ade, 0x01e1bf58, 0x02965cdb, 0xfe2d03f2, 0x006c68f8, 0x000927a0, 0xfffb6d86, + 0x26f79fab, 0xf042c539, 0x02048ff8, 0x0283bce6, 0xfe306e35, 0x006dbc71, 0x00084589, 0xfffb961a, + 0x2643feda, 0xf039b587, 0x0226d2e6, 0x02711c05, 0xfe33f7c7, 0x006efea0, 0x000767f0, 0xfffbbe09, + 0x259083eb, 0xf032182f, 0x024886ad, 0x025e7bf0, 0xfe379fe3, 0x00702fae, 0x00068ed8, 0xfffbe552, + 0x24dd3721, 0xf02be98a, 0x0269a9e9, 0x024bde5a, 0xfe3b65c4, 0x00714fc0, 0x0005ba46, 0xfffc0bef, + 0x242a20b3, 0xf02725dc, 0x028a3b44, 0x023944ee, 0xfe3f48a5, 0x00725f02, 0x0004ea3a, 0xfffc31df, + 0x237748cf, 0xf023c95d, 0x02aa397b, 0x0226b156, 0xfe4347c0, 0x00735d9c, 0x00041eb9, 0xfffc571e, + 0x22c4b795, 0xf021d031, 0x02c9a359, 0x02142533, 0xfe476250, 0x00744bba, 0x000357c2, 0xfffc7ba9, + 0x2212751a, 0xf0213671, 0x02e877b9, 0x0201a223, 0xfe4b978e, 0x0075298a, 0x00029558, 0xfffc9f7e, + 0x21608968, 0xf021f823, 0x0306b586, 0x01ef29be, 0xfe4fe6b3, 0x0075f739, 0x0001d779, 0xfffcc29a, + 0x20aefc79, 0xf0241140, 0x03245bbc, 0x01dcbd96, 0xfe544efb, 0x0076b4f5, 0x00011e26, 0xfffce4fc, + 0x1ffdd63b, 0xf0277db1, 0x03416966, 0x01ca5f37, 0xfe58cf9d, 0x007762f0, 0x0000695e, 0xfffd06a1, + 0x1f4d1e8e, 0xf02c3953, 0x035ddd9e, 0x01b81028, 0xfe5d67d4, 0x0078015a, 0xffffb91f, 0xfffd2787, + 0x1e9cdd43, 0xf0323ff5, 0x0379b790, 0x01a5d1ea, 0xfe6216db, 0x00789065, 0xffff0d66, 0xfffd47ae, + 0x1ded1a1d, 0xf0398d56, 0x0394f674, 0x0193a5f9, 0xfe66dbeb, 0x00791043, 0xfffe6631, 0xfffd6713, diff --git a/services/audioflinger/AudioResamplerSincUp.h b/services/audioflinger/AudioResamplerSincUp.h new file mode 100644 index 0000000..fd5367e --- /dev/null +++ b/services/audioflinger/AudioResamplerSincUp.h @@ -0,0 +1,131 @@ +// cmd-line: fir -l 7 -s48000 -c 20478 + + 0x6d374bc7, 0x111c6ba0, 0xf3240e61, 0x07d14a38, 0xfc509e64, 0x0139cee9, 0xffc8c866, 0xfffcc300, + 0x6d35278a, 0x103e8192, 0xf36b9dfd, 0x07bdfaa5, 0xfc5102d0, 0x013d618d, 0xffc663b9, 0xfffd9592, + 0x6d2ebafe, 0x0f62811a, 0xf3b3d8ac, 0x07a9f399, 0xfc51d9a6, 0x0140bea5, 0xffc41212, 0xfffe631e, + 0x6d24069d, 0x0e8875ad, 0xf3fcb43e, 0x07953976, 0xfc53216f, 0x0143e67c, 0xffc1d373, 0xffff2b9f, + 0x6d150b35, 0x0db06a89, 0xf4462690, 0x077fd0ac, 0xfc54d8ae, 0x0146d965, 0xffbfa7d9, 0xffffef10, + 0x6d01c9e3, 0x0cda6ab5, 0xf4902587, 0x0769bdaf, 0xfc56fdda, 0x014997bb, 0xffbd8f40, 0x0000ad6e, + 0x6cea4418, 0x0c0680fe, 0xf4daa718, 0x07530501, 0xfc598f60, 0x014c21db, 0xffbb89a1, 0x000166b6, + 0x6cce7b97, 0x0b34b7f5, 0xf525a143, 0x073bab28, 0xfc5c8ba5, 0x014e782a, 0xffb996f3, 0x00021ae5, + 0x6cae7272, 0x0a6519f4, 0xf5710a17, 0x0723b4b4, 0xfc5ff105, 0x01509b14, 0xffb7b728, 0x0002c9fd, + 0x6c8a2b0f, 0x0997b116, 0xf5bcd7b1, 0x070b2639, 0xfc63bdd3, 0x01528b08, 0xffb5ea31, 0x000373fb, + 0x6c61a823, 0x08cc873c, 0xf609003f, 0x06f20453, 0xfc67f05a, 0x0154487b, 0xffb42ffc, 0x000418e2, + 0x6c34ecb5, 0x0803a60a, 0xf6557a00, 0x06d853a2, 0xfc6c86dd, 0x0155d3e8, 0xffb28876, 0x0004b8b3, + 0x6c03fc1c, 0x073d16e7, 0xf6a23b44, 0x06be18cd, 0xfc717f97, 0x01572dcf, 0xffb0f388, 0x00055371, + 0x6bced9ff, 0x0678e2fc, 0xf6ef3a6e, 0x06a3587e, 0xfc76d8bc, 0x015856b6, 0xffaf7118, 0x0005e921, + 0x6b958a54, 0x05b71332, 0xf73c6df4, 0x06881761, 0xfc7c9079, 0x01594f25, 0xffae010b, 0x000679c5, + 0x6b581163, 0x04f7b037, 0xf789cc61, 0x066c5a27, 0xfc82a4f4, 0x015a17ab, 0xffaca344, 0x00070564, + 0x6b1673c1, 0x043ac276, 0xf7d74c53, 0x06502583, 0xfc89144d, 0x015ab0db, 0xffab57a1, 0x00078c04, + 0x6ad0b652, 0x0380521c, 0xf824e480, 0x06337e2a, 0xfc8fdc9f, 0x015b1b4e, 0xffaa1e02, 0x00080dab, + 0x6a86de48, 0x02c86715, 0xf8728bb3, 0x061668d2, 0xfc96fbfc, 0x015b579e, 0xffa8f641, 0x00088a62, + 0x6a38f123, 0x0213090c, 0xf8c038d0, 0x05f8ea30, 0xfc9e7074, 0x015b666c, 0xffa7e039, 0x00090230, + 0x69e6f4b1, 0x01603f6e, 0xf90de2d1, 0x05db06fc, 0xfca63810, 0x015b485b, 0xffa6dbc0, 0x0009751e, + 0x6990ef0b, 0x00b01162, 0xf95b80cb, 0x05bcc3ed, 0xfcae50d6, 0x015afe14, 0xffa5e8ad, 0x0009e337, + 0x6936e697, 0x000285d0, 0xf9a909ea, 0x059e25b5, 0xfcb6b8c4, 0x015a8843, 0xffa506d2, 0x000a4c85, + 0x68d8e206, 0xff57a35e, 0xf9f67577, 0x057f310a, 0xfcbf6dd8, 0x0159e796, 0xffa43603, 0x000ab112, + 0x6876e855, 0xfeaf706f, 0xfa43bad2, 0x055fea9d, 0xfcc86e09, 0x01591cc0, 0xffa3760e, 0x000b10ec, + 0x681100c9, 0xfe09f323, 0xfa90d17b, 0x0540571a, 0xfcd1b74c, 0x01582878, 0xffa2c6c2, 0x000b6c1d, + 0x67a732f4, 0xfd673159, 0xfaddb10c, 0x05207b2f, 0xfcdb4793, 0x01570b77, 0xffa227ec, 0x000bc2b3, + 0x673986ac, 0xfcc730aa, 0xfb2a513b, 0x05005b82, 0xfce51ccb, 0x0155c678, 0xffa19957, 0x000c14bb, + 0x66c80413, 0xfc29f670, 0xfb76a9dd, 0x04dffcb6, 0xfcef34e1, 0x01545a3c, 0xffa11acb, 0x000c6244, + 0x6652b392, 0xfb8f87bd, 0xfbc2b2e4, 0x04bf6369, 0xfcf98dbe, 0x0152c783, 0xffa0ac11, 0x000cab5c, + 0x65d99dd5, 0xfaf7e963, 0xfc0e6461, 0x049e9433, 0xfd04254a, 0x01510f13, 0xffa04cf0, 0x000cf012, + 0x655ccbd3, 0xfa631fef, 0xfc59b685, 0x047d93a8, 0xfd0ef969, 0x014f31b2, 0xff9ffd2c, 0x000d3075, + 0x64dc46c3, 0xf9d12fab, 0xfca4a19f, 0x045c6654, 0xfd1a0801, 0x014d3029, 0xff9fbc89, 0x000d6c97, + 0x64581823, 0xf9421c9d, 0xfcef1e20, 0x043b10bd, 0xfd254ef4, 0x014b0b45, 0xff9f8ac9, 0x000da486, + 0x63d049b4, 0xf8b5ea87, 0xfd392498, 0x04199760, 0xfd30cc24, 0x0148c3d2, 0xff9f67ae, 0x000dd854, + 0x6344e578, 0xf82c9ce7, 0xfd82adba, 0x03f7feb4, 0xfd3c7d73, 0x01465a9f, 0xff9f52f7, 0x000e0812, + 0x62b5f5b2, 0xf7a636fa, 0xfdcbb25a, 0x03d64b27, 0xfd4860c2, 0x0143d07f, 0xff9f4c65, 0x000e33d3, + 0x622384e8, 0xf722bbb5, 0xfe142b6e, 0x03b4811d, 0xfd5473f3, 0x01412643, 0xff9f53b4, 0x000e5ba7, + 0x618d9ddc, 0xf6a22dcf, 0xfe5c120f, 0x0392a4f4, 0xfd60b4e7, 0x013e5cc0, 0xff9f68a1, 0x000e7fa1, + 0x60f44b91, 0xf6248fb6, 0xfea35f79, 0x0370bafc, 0xfd6d2180, 0x013b74ca, 0xff9f8ae9, 0x000e9fd5, + 0x60579947, 0xf5a9e398, 0xfeea0d0c, 0x034ec77f, 0xfd79b7a1, 0x01386f3a, 0xff9fba47, 0x000ebc54, + 0x5fb79278, 0xf5322b61, 0xff30144a, 0x032ccebb, 0xfd86752e, 0x01354ce7, 0xff9ff674, 0x000ed533, + 0x5f1442dc, 0xf4bd68b6, 0xff756edc, 0x030ad4e1, 0xfd93580d, 0x01320ea9, 0xffa03f2b, 0x000eea84, + 0x5e6db665, 0xf44b9cfe, 0xffba168d, 0x02e8de19, 0xfda05e23, 0x012eb55a, 0xffa09425, 0x000efc5c, + 0x5dc3f93c, 0xf3dcc959, 0xfffe054e, 0x02c6ee7f, 0xfdad855b, 0x012b41d3, 0xffa0f519, 0x000f0ace, + 0x5d1717c4, 0xf370eea9, 0x00413536, 0x02a50a22, 0xfdbacb9e, 0x0127b4f1, 0xffa161bf, 0x000f15ef, + 0x5c671e96, 0xf3080d8c, 0x0083a081, 0x02833506, 0xfdc82edb, 0x01240f8e, 0xffa1d9cf, 0x000f1dd2, + 0x5bb41a80, 0xf2a2265e, 0x00c54190, 0x02617321, 0xfdd5ad01, 0x01205285, 0xffa25cfe, 0x000f228d, + 0x5afe1886, 0xf23f393b, 0x010612eb, 0x023fc85c, 0xfde34403, 0x011c7eb2, 0xffa2eb04, 0x000f2434, + 0x5a4525df, 0xf1df45fd, 0x01460f41, 0x021e3891, 0xfdf0f1d6, 0x011894f0, 0xffa38395, 0x000f22dc, + 0x59894ff3, 0xf1824c3e, 0x01853165, 0x01fcc78f, 0xfdfeb475, 0x0114961b, 0xffa42668, 0x000f1e99, + 0x58caa45b, 0xf1284b58, 0x01c37452, 0x01db7914, 0xfe0c89db, 0x0110830f, 0xffa4d332, 0x000f1781, + 0x580930e1, 0xf0d14267, 0x0200d32c, 0x01ba50d2, 0xfe1a7009, 0x010c5ca6, 0xffa589a6, 0x000f0da8, + 0x5745037c, 0xf07d3043, 0x023d493c, 0x0199526b, 0xfe286505, 0x010823ba, 0xffa6497c, 0x000f0125, + 0x567e2a51, 0xf02c138a, 0x0278d1f2, 0x01788170, 0xfe3666d5, 0x0103d927, 0xffa71266, 0x000ef20b, + 0x55b4b3af, 0xefddea9a, 0x02b368e6, 0x0157e166, 0xfe447389, 0x00ff7dc4, 0xffa7e41a, 0x000ee070, + 0x54e8ae13, 0xef92b393, 0x02ed09d7, 0x013775bf, 0xfe528931, 0x00fb126b, 0xffa8be4c, 0x000ecc69, + 0x541a281e, 0xef4a6c58, 0x0325b0ad, 0x011741df, 0xfe60a5e5, 0x00f697f3, 0xffa9a0b1, 0x000eb60b, + 0x5349309e, 0xef051290, 0x035d5977, 0x00f7491a, 0xfe6ec7c0, 0x00f20f32, 0xffaa8afe, 0x000e9d6b, + 0x5275d684, 0xeec2a3a3, 0x0394006a, 0x00d78eb3, 0xfe7cece2, 0x00ed78ff, 0xffab7ce7, 0x000e829e, + 0x51a028e8, 0xee831cc3, 0x03c9a1e5, 0x00b815da, 0xfe8b1373, 0x00e8d62d, 0xffac7621, 0x000e65ba, + 0x50c83704, 0xee467ae1, 0x03fe3a6f, 0x0098e1b3, 0xfe99399f, 0x00e4278f, 0xffad7662, 0x000e46d3, + 0x4fee1037, 0xee0cbab9, 0x0431c6b5, 0x0079f54c, 0xfea75d97, 0x00df6df7, 0xffae7d5f, 0x000e25fd, + 0x4f11c3fe, 0xedd5d8ca, 0x0464438c, 0x005b53a4, 0xfeb57d92, 0x00daaa34, 0xffaf8acd, 0x000e034f, + 0x4e3361f7, 0xeda1d15c, 0x0495adf2, 0x003cffa9, 0xfec397cf, 0x00d5dd16, 0xffb09e63, 0x000ddedb, + 0x4d52f9df, 0xed70a07d, 0x04c6030d, 0x001efc35, 0xfed1aa92, 0x00d10769, 0xffb1b7d8, 0x000db8b7, + 0x4c709b8e, 0xed424205, 0x04f54029, 0x00014c12, 0xfedfb425, 0x00cc29f7, 0xffb2d6e1, 0x000d90f6, + 0x4b8c56f8, 0xed16b196, 0x052362ba, 0xffe3f1f7, 0xfeedb2da, 0x00c7458a, 0xffb3fb37, 0x000d67ae, + 0x4aa63c2c, 0xecedea99, 0x0550685d, 0xffc6f08a, 0xfefba508, 0x00c25ae8, 0xffb52490, 0x000d3cf1, + 0x49be5b50, 0xecc7e845, 0x057c4ed4, 0xffaa4a5d, 0xff09890f, 0x00bd6ad7, 0xffb652a7, 0x000d10d5, + 0x48d4c4a2, 0xeca4a59b, 0x05a7140b, 0xff8e01f1, 0xff175d53, 0x00b87619, 0xffb78533, 0x000ce36b, + 0x47e98874, 0xec841d68, 0x05d0b612, 0xff7219b3, 0xff252042, 0x00b37d70, 0xffb8bbed, 0x000cb4c8, + 0x46fcb72d, 0xec664a48, 0x05f93324, 0xff5693fe, 0xff32d04f, 0x00ae8198, 0xffb9f691, 0x000c84ff, + 0x460e6148, 0xec4b26a2, 0x0620899e, 0xff3b731b, 0xff406bf8, 0x00a9834e, 0xffbb34d8, 0x000c5422, + 0x451e9750, 0xec32acb0, 0x0646b808, 0xff20b93e, 0xff4df1be, 0x00a4834c, 0xffbc767f, 0x000c2245, + 0x442d69de, 0xec1cd677, 0x066bbd0d, 0xff066889, 0xff5b602c, 0x009f8249, 0xffbdbb42, 0x000bef79, + 0x433ae99c, 0xec099dcf, 0x068f9781, 0xfeec830d, 0xff68b5d5, 0x009a80f8, 0xffbf02dd, 0x000bbbd2, + 0x4247273f, 0xebf8fc64, 0x06b2465b, 0xfed30ac5, 0xff75f153, 0x0095800c, 0xffc04d0f, 0x000b8760, + 0x41523389, 0xebeaebaf, 0x06d3c8bb, 0xfeba0199, 0xff831148, 0x00908034, 0xffc19996, 0x000b5235, + 0x405c1f43, 0xebdf6500, 0x06f41de3, 0xfea16960, 0xff90145e, 0x008b821b, 0xffc2e832, 0x000b1c64, + 0x3f64fb40, 0xebd6617b, 0x0713453d, 0xfe8943dc, 0xff9cf947, 0x0086866b, 0xffc438a3, 0x000ae5fc, + 0x3e6cd85b, 0xebcfda19, 0x07313e56, 0xfe7192bd, 0xffa9bebe, 0x00818dcb, 0xffc58aaa, 0x000aaf0f, + 0x3d73c772, 0xebcbc7a7, 0x074e08e0, 0xfe5a579d, 0xffb66386, 0x007c98de, 0xffc6de09, 0x000a77ac, + 0x3c79d968, 0xebca22cc, 0x0769a4b2, 0xfe439407, 0xffc2e669, 0x0077a845, 0xffc83285, 0x000a3fe5, + 0x3b7f1f23, 0xebcae405, 0x078411c7, 0xfe2d496f, 0xffcf463a, 0x0072bc9d, 0xffc987e0, 0x000a07c9, + 0x3a83a989, 0xebce03aa, 0x079d503b, 0xfe177937, 0xffdb81d6, 0x006dd680, 0xffcadde1, 0x0009cf67, + 0x3987897f, 0xebd379eb, 0x07b56051, 0xfe0224b0, 0xffe79820, 0x0068f687, 0xffcc344c, 0x000996ce, + 0x388acfe9, 0xebdb3ed5, 0x07cc426c, 0xfded4d13, 0xfff38806, 0x00641d44, 0xffcd8aeb, 0x00095e0e, + 0x378d8da8, 0xebe54a4f, 0x07e1f712, 0xfdd8f38b, 0xffff507b, 0x005f4b4a, 0xffcee183, 0x00092535, + 0x368fd397, 0xebf1941f, 0x07f67eec, 0xfdc5192d, 0x000af07f, 0x005a8125, 0xffd037e0, 0x0008ec50, + 0x3591b28b, 0xec0013e8, 0x0809dac3, 0xfdb1befc, 0x00166718, 0x0055bf60, 0xffd18dcc, 0x0008b36e, + 0x34933b50, 0xec10c12c, 0x081c0b84, 0xfd9ee5e7, 0x0021b355, 0x00510682, 0xffd2e311, 0x00087a9c, + 0x33947eab, 0xec23934f, 0x082d1239, 0xfd8c8ecc, 0x002cd44d, 0x004c570f, 0xffd4377d, 0x000841e8, + 0x32958d55, 0xec388194, 0x083cf010, 0xfd7aba74, 0x0037c922, 0x0047b186, 0xffd58ade, 0x0008095d, + 0x319677fa, 0xec4f8322, 0x084ba654, 0xfd696998, 0x004290fc, 0x00431666, 0xffd6dd02, 0x0007d108, + 0x30974f3b, 0xec688f02, 0x08593671, 0xfd589cdc, 0x004d2b0e, 0x003e8628, 0xffd82dba, 0x000798f5, + 0x2f9823a8, 0xec839c22, 0x0865a1f1, 0xfd4854d3, 0x00579691, 0x003a0141, 0xffd97cd6, 0x00076130, + 0x2e9905c1, 0xeca0a156, 0x0870ea7e, 0xfd3891fd, 0x0061d2ca, 0x00358824, 0xffdaca2a, 0x000729c4, + 0x2d9a05f4, 0xecbf9558, 0x087b11de, 0xfd2954c8, 0x006bdf05, 0x00311b41, 0xffdc1588, 0x0006f2bb, + 0x2c9b349e, 0xece06ecb, 0x088419f6, 0xfd1a9d91, 0x0075ba95, 0x002cbb03, 0xffdd5ec6, 0x0006bc21, + 0x2b9ca203, 0xed032439, 0x088c04c8, 0xfd0c6ca2, 0x007f64da, 0x002867d2, 0xffdea5bb, 0x000685ff, + 0x2a9e5e57, 0xed27ac16, 0x0892d470, 0xfcfec233, 0x0088dd38, 0x00242213, 0xffdfea3c, 0x0006505f, + 0x29a079b2, 0xed4dfcc2, 0x08988b2a, 0xfcf19e6b, 0x0092231e, 0x001fea27, 0xffe12c22, 0x00061b4b, + 0x28a30416, 0xed760c88, 0x089d2b4a, 0xfce50161, 0x009b3605, 0x001bc06b, 0xffe26b48, 0x0005e6cb, + 0x27a60d6a, 0xed9fd1a2, 0x08a0b740, 0xfcd8eb17, 0x00a4156b, 0x0017a53b, 0xffe3a788, 0x0005b2e8, + 0x26a9a57b, 0xedcb4237, 0x08a33196, 0xfccd5b82, 0x00acc0da, 0x001398ec, 0xffe4e0bf, 0x00057faa, + 0x25addbf9, 0xedf8545b, 0x08a49cf0, 0xfcc25285, 0x00b537e1, 0x000f9bd2, 0xffe616c8, 0x00054d1a, + 0x24b2c075, 0xee26fe17, 0x08a4fc0d, 0xfcb7cff0, 0x00bd7a1c, 0x000bae3c, 0xffe74984, 0x00051b3e, + 0x23b86263, 0xee573562, 0x08a451c0, 0xfcadd386, 0x00c5872a, 0x0007d075, 0xffe878d3, 0x0004ea1d, + 0x22bed116, 0xee88f026, 0x08a2a0f8, 0xfca45cf7, 0x00cd5eb7, 0x000402c8, 0xffe9a494, 0x0004b9c0, + 0x21c61bc0, 0xeebc2444, 0x089fecbb, 0xfc9b6be5, 0x00d50075, 0x00004579, 0xffeaccaa, 0x00048a2b, + 0x20ce516f, 0xeef0c78d, 0x089c3824, 0xfc92ffe1, 0x00dc6c1e, 0xfffc98c9, 0xffebf0fa, 0x00045b65, + 0x1fd7810f, 0xef26cfca, 0x08978666, 0xfc8b186d, 0x00e3a175, 0xfff8fcf7, 0xffed1166, 0x00042d74, + 0x1ee1b965, 0xef5e32bd, 0x0891dac8, 0xfc83b4fc, 0x00eaa045, 0xfff5723d, 0xffee2dd7, 0x0004005e, + 0x1ded0911, 0xef96e61c, 0x088b38a9, 0xfc7cd4f0, 0x00f16861, 0xfff1f8d2, 0xffef4632, 0x0003d426, + 0x1cf97e8b, 0xefd0df9a, 0x0883a378, 0xfc76779e, 0x00f7f9a3, 0xffee90eb, 0xfff05a60, 0x0003a8d2, + 0x1c072823, 0xf00c14e1, 0x087b1ebc, 0xfc709c4d, 0x00fe53ef, 0xffeb3ab8, 0xfff16a4a, 0x00037e65, + 0x1b1613ff, 0xf0487b98, 0x0871ae0d, 0xfc6b4233, 0x0104772e, 0xffe7f666, 0xfff275db, 0x000354e5, + 0x1a26501b, 0xf0860962, 0x08675516, 0xfc66687a, 0x010a6353, 0xffe4c41e, 0xfff37d00, 0x00032c54, + 0x1937ea47, 0xf0c4b3e0, 0x085c1794, 0xfc620e3d, 0x01101858, 0xffe1a408, 0xfff47fa5, 0x000304b7, + 0x184af025, 0xf10470b0, 0x084ff957, 0xfc5e328c, 0x0115963d, 0xffde9646, 0xfff57db8, 0x0002de0e, + 0x175f6f2b, 0xf1453571, 0x0842fe3d, 0xfc5ad465, 0x011add0b, 0xffdb9af8, 0xfff67729, 0x0002b85f, + 0x1675749e, 0xf186f7c0, 0x08352a35, 0xfc57f2be, 0x011fecd3, 0xffd8b23b, 0xfff76be9, 0x000293aa, + 0x158d0d95, 0xf1c9ad40, 0x0826813e, 0xfc558c7c, 0x0124c5ab, 0xffd5dc28, 0xfff85be8, 0x00026ff2, + 0x14a646f6, 0xf20d4b92, 0x08170767, 0xfc53a07b, 0x012967b1, 0xffd318d6, 0xfff9471b, 0x00024d39, + 0x13c12d73, 0xf251c85d, 0x0806c0cb, 0xfc522d88, 0x012dd30a, 0xffd06858, 0xfffa2d74, 0x00022b7f, + 0x12ddcd8f, 0xf297194d, 0x07f5b193, 0xfc513266, 0x013207e4, 0xffcdcabe, 0xfffb0ee9, 0x00020ac7, + 0x11fc3395, 0xf2dd3411, 0x07e3ddf7, 0xfc50adcc, 0x01360670, 0xffcb4014, 0xfffbeb70, 0x0001eb10, + 0x111c6ba0, 0xf3240e61, 0x07d14a38, 0xfc509e64, 0x0139cee9, 0xffc8c866, 0xfffcc300, 0x0001cc5c, diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp new file mode 100644 index 0000000..e6d8f09 --- /dev/null +++ b/services/audioflinger/AudioStreamOut.cpp @@ -0,0 +1,117 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "AudioFlinger" +//#define LOG_NDEBUG 0 + +#include <hardware/audio.h> +#include <utils/Log.h> + +#include "AudioHwDevice.h" +#include "AudioStreamOut.h" + +namespace android { + +// ---------------------------------------------------------------------------- + +AudioStreamOut::AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags) + : audioHwDev(dev) + , stream(NULL) + , flags(flags) +{ +} + +audio_hw_device_t* AudioStreamOut::hwDev() const +{ + return audioHwDev->hwDevice(); +} + +status_t AudioStreamOut::getRenderPosition(uint32_t *frames) +{ + if (stream == NULL) { + return NO_INIT; + } + return stream->get_render_position(stream, frames); +} + +status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) +{ + if (stream == NULL) { + return NO_INIT; + } + return stream->get_presentation_position(stream, frames, timestamp); +} + +status_t AudioStreamOut::open( + audio_io_handle_t handle, + audio_devices_t devices, + struct audio_config *config, + const char *address) +{ + audio_stream_out_t* outStream; + int status = hwDev()->open_output_stream( + hwDev(), + handle, + devices, + flags, + config, + &outStream, + address); + ALOGV("AudioStreamOut::open(), HAL open_output_stream returned " + " %p, sampleRate %d, Format %#x, " + "channelMask %#x, status %d", + outStream, + config->sample_rate, + config->format, + config->channel_mask, + status); + + if (status == NO_ERROR) { + stream = outStream; + } + + return status; +} + +size_t AudioStreamOut::getFrameSize() +{ + ALOG_ASSERT(stream != NULL); + return audio_stream_out_frame_size(stream); +} + +int AudioStreamOut::flush() +{ + ALOG_ASSERT(stream != NULL); + if (stream->flush != NULL) { + return stream->flush(stream); + } + return NO_ERROR; +} + +int AudioStreamOut::standby() +{ + ALOG_ASSERT(stream != NULL); + return stream->common.standby(&stream->common); +} + +ssize_t AudioStreamOut::write(const void* buffer, size_t bytes) +{ + ALOG_ASSERT(stream != NULL); + return stream->write(stream, buffer, bytes); +} + +} // namespace android diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h new file mode 100644 index 0000000..e91ca9c --- /dev/null +++ b/services/audioflinger/AudioStreamOut.h @@ -0,0 +1,83 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_AUDIO_STREAM_OUT_H +#define ANDROID_AUDIO_STREAM_OUT_H + +#include <stdint.h> +#include <sys/types.h> + +#include <system/audio.h> + +#include "AudioStreamOut.h" + +namespace android { + +class AudioHwDevice; + +/** + * Managed access to a HAL output stream. + */ +class AudioStreamOut { +public: +// AudioStreamOut is immutable, so its fields are const. +// For emphasis, we could also make all pointers to them be "const *", +// but that would clutter the code unnecessarily. + AudioHwDevice * const audioHwDev; + audio_stream_out_t *stream; + const audio_output_flags_t flags; + + audio_hw_device_t *hwDev() const; + + AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags); + + virtual status_t open( + audio_io_handle_t handle, + audio_devices_t devices, + struct audio_config *config, + const char *address); + + virtual ~AudioStreamOut() { } + + virtual status_t getRenderPosition(uint32_t *frames); + + virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp); + + /** + * Write audio buffer to driver. Returns number of bytes written, or a + * negative status_t. If at least one frame was written successfully prior to the error, + * it is suggested that the driver return that successful (short) byte count + * and then return an error in the subsequent call. + * + * If set_callback() has previously been called to enable non-blocking mode + * the write() is not allowed to block. It must write only the number of + * bytes that currently fit in the driver/hardware buffer and then return + * this byte count. If this is less than the requested write size the + * callback function must be called when more space is available in the + * driver/hardware buffer. + */ + virtual ssize_t write(const void *buffer, size_t bytes); + + virtual size_t getFrameSize(); + + virtual status_t flush(); + virtual status_t standby(); +}; + +} // namespace android + +#endif // ANDROID_AUDIO_STREAM_OUT_H diff --git a/services/audioflinger/Configuration.h b/services/audioflinger/Configuration.h index 6a8aeb1..845697a 100644 --- a/services/audioflinger/Configuration.h +++ b/services/audioflinger/Configuration.h @@ -29,9 +29,8 @@ // uncomment to display CPU load adjusted for CPU frequency //#define CPU_FREQUENCY_STATISTICS -// uncomment to enable fast mixer to take performance samples for later statistical analysis -#define FAST_MIXER_STATISTICS -// FIXME rename to FAST_THREAD_STATISTICS +// uncomment to enable fast threads to take performance samples for later statistical analysis +#define FAST_THREAD_STATISTICS // uncomment for debugging timing problems related to StateQueue::push() //#define STATE_QUEUE_DUMP diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp index bcaf8ae..8bccb47 100644 --- a/services/audioflinger/Effects.cpp +++ b/services/audioflinger/Effects.cpp @@ -1953,4 +1953,4 @@ void AudioFlinger::EffectChain::setThread(const sp<ThreadBase>& thread) } } -}; // namespace android +} // namespace android diff --git a/services/audioflinger/FastCapture.cpp b/services/audioflinger/FastCapture.cpp index 0c9b976..9e7e8a4 100644 --- a/services/audioflinger/FastCapture.cpp +++ b/services/audioflinger/FastCapture.cpp @@ -29,18 +29,18 @@ namespace android { -/*static*/ const FastCaptureState FastCapture::initial; +/*static*/ const FastCaptureState FastCapture::sInitial; FastCapture::FastCapture() : FastThread(), - inputSource(NULL), inputSourceGen(0), pipeSink(NULL), pipeSinkGen(0), - readBuffer(NULL), readBufferState(-1), format(Format_Invalid), sampleRate(0), - // dummyDumpState - totalNativeFramesRead(0) + mInputSource(NULL), mInputSourceGen(0), mPipeSink(NULL), mPipeSinkGen(0), + mReadBuffer(NULL), mReadBufferState(-1), mFormat(Format_Invalid), mSampleRate(0), + // mDummyDumpState + mTotalNativeFramesRead(0) { - previous = &initial; - current = &initial; + mPrevious = &sInitial; + mCurrent = &sInitial; - mDummyDumpState = &dummyDumpState; + mDummyDumpState = &mDummyFastCaptureDumpState; } FastCapture::~FastCapture() @@ -63,13 +63,13 @@ void FastCapture::setLog(NBLog::Writer *logWriter __unused) void FastCapture::onIdle() { - preIdle = *(const FastCaptureState *)current; - current = &preIdle; + mPreIdle = *(const FastCaptureState *)mCurrent; + mCurrent = &mPreIdle; } void FastCapture::onExit() { - delete[] readBuffer; + free(mReadBuffer); } bool FastCapture::isSubClassCommand(FastThreadState::Command command) @@ -86,67 +86,67 @@ bool FastCapture::isSubClassCommand(FastThreadState::Command command) void FastCapture::onStateChange() { - const FastCaptureState * const current = (const FastCaptureState *) this->current; - const FastCaptureState * const previous = (const FastCaptureState *) this->previous; - FastCaptureDumpState * const dumpState = (FastCaptureDumpState *) this->dumpState; + const FastCaptureState * const current = (const FastCaptureState *) mCurrent; + const FastCaptureState * const previous = (const FastCaptureState *) mPrevious; + FastCaptureDumpState * const dumpState = (FastCaptureDumpState *) mDumpState; const size_t frameCount = current->mFrameCount; bool eitherChanged = false; // check for change in input HAL configuration - NBAIO_Format previousFormat = format; - if (current->mInputSourceGen != inputSourceGen) { - inputSource = current->mInputSource; - inputSourceGen = current->mInputSourceGen; - if (inputSource == NULL) { - format = Format_Invalid; - sampleRate = 0; + NBAIO_Format previousFormat = mFormat; + if (current->mInputSourceGen != mInputSourceGen) { + mInputSource = current->mInputSource; + mInputSourceGen = current->mInputSourceGen; + if (mInputSource == NULL) { + mFormat = Format_Invalid; + mSampleRate = 0; } else { - format = inputSource->format(); - sampleRate = Format_sampleRate(format); - unsigned channelCount = Format_channelCount(format); + mFormat = mInputSource->format(); + mSampleRate = Format_sampleRate(mFormat); + unsigned channelCount = Format_channelCount(mFormat); ALOG_ASSERT(channelCount == 1 || channelCount == 2); } - dumpState->mSampleRate = sampleRate; + dumpState->mSampleRate = mSampleRate; eitherChanged = true; } // check for change in pipe - if (current->mPipeSinkGen != pipeSinkGen) { - pipeSink = current->mPipeSink; - pipeSinkGen = current->mPipeSinkGen; + if (current->mPipeSinkGen != mPipeSinkGen) { + mPipeSink = current->mPipeSink; + mPipeSinkGen = current->mPipeSinkGen; eitherChanged = true; } // input source and pipe sink must be compatible - if (eitherChanged && inputSource != NULL && pipeSink != NULL) { - ALOG_ASSERT(Format_isEqual(format, pipeSink->format())); + if (eitherChanged && mInputSource != NULL && mPipeSink != NULL) { + ALOG_ASSERT(Format_isEqual(mFormat, mPipeSink->format())); } - if ((!Format_isEqual(format, previousFormat)) || (frameCount != previous->mFrameCount)) { - // FIXME to avoid priority inversion, don't delete here - delete[] readBuffer; - readBuffer = NULL; - if (frameCount > 0 && sampleRate > 0) { + if ((!Format_isEqual(mFormat, previousFormat)) || (frameCount != previous->mFrameCount)) { + // FIXME to avoid priority inversion, don't free here + free(mReadBuffer); + mReadBuffer = NULL; + if (frameCount > 0 && mSampleRate > 0) { // FIXME new may block for unbounded time at internal mutex of the heap // implementation; it would be better to have normal capture thread allocate for // us to avoid blocking here and to prevent possible priority inversion - unsigned channelCount = Format_channelCount(format); - // FIXME frameSize - readBuffer = new short[frameCount * channelCount]; - periodNs = (frameCount * 1000000000LL) / sampleRate; // 1.00 - underrunNs = (frameCount * 1750000000LL) / sampleRate; // 1.75 - overrunNs = (frameCount * 500000000LL) / sampleRate; // 0.50 - forceNs = (frameCount * 950000000LL) / sampleRate; // 0.95 - warmupNs = (frameCount * 500000000LL) / sampleRate; // 0.50 + (void)posix_memalign(&mReadBuffer, 32, frameCount * Format_frameSize(mFormat)); + mPeriodNs = (frameCount * 1000000000LL) / mSampleRate; // 1.00 + mUnderrunNs = (frameCount * 1750000000LL) / mSampleRate; // 1.75 + mOverrunNs = (frameCount * 500000000LL) / mSampleRate; // 0.50 + mForceNs = (frameCount * 950000000LL) / mSampleRate; // 0.95 + mWarmupNsMin = (frameCount * 750000000LL) / mSampleRate; // 0.75 + mWarmupNsMax = (frameCount * 1250000000LL) / mSampleRate; // 1.25 } else { - periodNs = 0; - underrunNs = 0; - overrunNs = 0; - forceNs = 0; - warmupNs = 0; + mPeriodNs = 0; + mUnderrunNs = 0; + mOverrunNs = 0; + mForceNs = 0; + mWarmupNsMin = 0; + mWarmupNsMax = LONG_MAX; } - readBufferState = -1; + mReadBufferState = -1; dumpState->mFrameCount = frameCount; } @@ -154,44 +154,43 @@ void FastCapture::onStateChange() void FastCapture::onWork() { - const FastCaptureState * const current = (const FastCaptureState *) this->current; - FastCaptureDumpState * const dumpState = (FastCaptureDumpState *) this->dumpState; - const FastCaptureState::Command command = this->command; + const FastCaptureState * const current = (const FastCaptureState *) mCurrent; + FastCaptureDumpState * const dumpState = (FastCaptureDumpState *) mDumpState; + const FastCaptureState::Command command = mCommand; const size_t frameCount = current->mFrameCount; if ((command & FastCaptureState::READ) /*&& isWarm*/) { - ALOG_ASSERT(inputSource != NULL); - ALOG_ASSERT(readBuffer != NULL); + ALOG_ASSERT(mInputSource != NULL); + ALOG_ASSERT(mReadBuffer != NULL); dumpState->mReadSequence++; ATRACE_BEGIN("read"); - ssize_t framesRead = inputSource->read(readBuffer, frameCount, + ssize_t framesRead = mInputSource->read(mReadBuffer, frameCount, AudioBufferProvider::kInvalidPTS); ATRACE_END(); dumpState->mReadSequence++; if (framesRead >= 0) { LOG_ALWAYS_FATAL_IF((size_t) framesRead > frameCount); - totalNativeFramesRead += framesRead; - dumpState->mFramesRead = totalNativeFramesRead; - readBufferState = framesRead; + mTotalNativeFramesRead += framesRead; + dumpState->mFramesRead = mTotalNativeFramesRead; + mReadBufferState = framesRead; } else { dumpState->mReadErrors++; - readBufferState = 0; + mReadBufferState = 0; } // FIXME rename to attemptedIO - attemptedWrite = true; + mAttemptedWrite = true; } if (command & FastCaptureState::WRITE) { - ALOG_ASSERT(pipeSink != NULL); - ALOG_ASSERT(readBuffer != NULL); - if (readBufferState < 0) { - unsigned channelCount = Format_channelCount(format); - // FIXME frameSize - memset(readBuffer, 0, frameCount * channelCount * sizeof(short)); - readBufferState = frameCount; + ALOG_ASSERT(mPipeSink != NULL); + ALOG_ASSERT(mReadBuffer != NULL); + if (mReadBufferState < 0) { + unsigned channelCount = Format_channelCount(mFormat); + memset(mReadBuffer, 0, frameCount * Format_frameSize(mFormat)); + mReadBufferState = frameCount; } - if (readBufferState > 0) { - ssize_t framesWritten = pipeSink->write(readBuffer, readBufferState); + if (mReadBufferState > 0) { + ssize_t framesWritten = mPipeSink->write(mReadBuffer, mReadBufferState); // FIXME This supports at most one fast capture client. // To handle multiple clients this could be converted to an array, // or with a lot more work the control block could be shared by all clients. @@ -210,13 +209,4 @@ void FastCapture::onWork() } } -FastCaptureDumpState::FastCaptureDumpState() : FastThreadDumpState(), - mReadSequence(0), mFramesRead(0), mReadErrors(0), mSampleRate(0), mFrameCount(0) -{ -} - -FastCaptureDumpState::~FastCaptureDumpState() -{ -} - } // namespace android diff --git a/services/audioflinger/FastCapture.h b/services/audioflinger/FastCapture.h index e535b9d..e258a4d 100644 --- a/services/audioflinger/FastCapture.h +++ b/services/audioflinger/FastCapture.h @@ -20,23 +20,12 @@ #include "FastThread.h" #include "StateQueue.h" #include "FastCaptureState.h" +#include "FastCaptureDumpState.h" namespace android { typedef StateQueue<FastCaptureState> FastCaptureStateQueue; -struct FastCaptureDumpState : FastThreadDumpState { - FastCaptureDumpState(); - /*virtual*/ ~FastCaptureDumpState(); - - // FIXME by renaming, could pull up many of these to FastThreadDumpState - uint32_t mReadSequence; // incremented before and after each read() - uint32_t mFramesRead; // total number of frames read successfully - uint32_t mReadErrors; // total number of read() errors - uint32_t mSampleRate; - size_t mFrameCount; -}; - class FastCapture : public FastThread { public: @@ -57,19 +46,21 @@ private: virtual void onStateChange(); virtual void onWork(); - static const FastCaptureState initial; - FastCaptureState preIdle; // copy of state before we went into idle + static const FastCaptureState sInitial; + + FastCaptureState mPreIdle; // copy of state before we went into idle // FIXME by renaming, could pull up many of these to FastThread - NBAIO_Source *inputSource; - int inputSourceGen; - NBAIO_Sink *pipeSink; - int pipeSinkGen; - short *readBuffer; - ssize_t readBufferState; // number of initialized frames in readBuffer, or -1 to clear - NBAIO_Format format; - unsigned sampleRate; - FastCaptureDumpState dummyDumpState; - uint32_t totalNativeFramesRead; // copied to dumpState->mFramesRead + NBAIO_Source* mInputSource; + int mInputSourceGen; + NBAIO_Sink* mPipeSink; + int mPipeSinkGen; + void* mReadBuffer; + ssize_t mReadBufferState; // number of initialized frames in readBuffer, + // or -1 to clear + NBAIO_Format mFormat; + unsigned mSampleRate; + FastCaptureDumpState mDummyFastCaptureDumpState; + uint32_t mTotalNativeFramesRead; // copied to dumpState->mFramesRead }; // class FastCapture diff --git a/services/audioflinger/FastCaptureDumpState.cpp b/services/audioflinger/FastCaptureDumpState.cpp new file mode 100644 index 0000000..53eeba5 --- /dev/null +++ b/services/audioflinger/FastCaptureDumpState.cpp @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "FastCaptureDumpState" +//define LOG_NDEBUG 0 + +#include "Configuration.h" +#include <utils/Log.h> +#include "FastCaptureDumpState.h" +#include "FastCaptureState.h" + +namespace android { + +FastCaptureDumpState::FastCaptureDumpState() : FastThreadDumpState(), + mReadSequence(0), mFramesRead(0), mReadErrors(0), mSampleRate(0), mFrameCount(0) +{ +} + +FastCaptureDumpState::~FastCaptureDumpState() +{ +} + +void FastCaptureDumpState::dump(int fd) const +{ + if (mCommand == FastCaptureState::INITIAL) { + dprintf(fd, " FastCapture not initialized\n"); + return; + } + double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) + + (mMeasuredWarmupTs.tv_nsec / 1000000.0); + double periodSec = (double) mFrameCount / mSampleRate; + dprintf(fd, " FastCapture command=%s readSequence=%u framesRead=%u\n" + " readErrors=%u sampleRate=%u frameCount=%zu\n" + " measuredWarmup=%.3g ms, warmupCycles=%u period=%.2f ms\n", + FastCaptureState::commandToString(mCommand), mReadSequence, mFramesRead, + mReadErrors, mSampleRate, mFrameCount, measuredWarmupMs, mWarmupCycles, + periodSec * 1e3); +} + +} // android diff --git a/services/audioflinger/FastCaptureDumpState.h b/services/audioflinger/FastCaptureDumpState.h new file mode 100644 index 0000000..6f9c4c3 --- /dev/null +++ b/services/audioflinger/FastCaptureDumpState.h @@ -0,0 +1,42 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H +#define ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H + +#include <stdint.h> +#include "Configuration.h" +#include "FastThreadDumpState.h" + +namespace android { + +struct FastCaptureDumpState : FastThreadDumpState { + FastCaptureDumpState(); + /*virtual*/ ~FastCaptureDumpState(); + + void dump(int fd) const; // should only be called on a stable copy, not the original + + // FIXME by renaming, could pull up many of these to FastThreadDumpState + uint32_t mReadSequence; // incremented before and after each read() + uint32_t mFramesRead; // total number of frames read successfully + uint32_t mReadErrors; // total number of read() errors + uint32_t mSampleRate; + size_t mFrameCount; +}; + +} // android + +#endif // ANDROID_AUDIO_FAST_CAPTURE_DUMP_STATE_H diff --git a/services/audioflinger/FastCaptureState.cpp b/services/audioflinger/FastCaptureState.cpp index 1d029b7..c4d5e45 100644 --- a/services/audioflinger/FastCaptureState.cpp +++ b/services/audioflinger/FastCaptureState.cpp @@ -27,4 +27,19 @@ FastCaptureState::~FastCaptureState() { } +// static +const char *FastCaptureState::commandToString(Command command) +{ + const char *str = FastThreadState::commandToString(command); + if (str != NULL) { + return str; + } + switch (command) { + case FastCaptureState::READ: return "READ"; + case FastCaptureState::WRITE: return "WRITE"; + case FastCaptureState::READ_WRITE: return "READ_WRITE"; + } + LOG_ALWAYS_FATAL("%s", __func__); +} + } // android diff --git a/services/audioflinger/FastCaptureState.h b/services/audioflinger/FastCaptureState.h index 29c865a..9bca2d4 100644 --- a/services/audioflinger/FastCaptureState.h +++ b/services/audioflinger/FastCaptureState.h @@ -29,21 +29,23 @@ struct FastCaptureState : FastThreadState { /*virtual*/ ~FastCaptureState(); // all pointer fields use raw pointers; objects are owned and ref-counted by RecordThread - NBAIO_Source *mInputSource; // HAL input device, must already be negotiated + NBAIO_Source* mInputSource; // HAL input device, must already be negotiated // FIXME by renaming, could pull up these fields to FastThreadState int mInputSourceGen; // increment when mInputSource is assigned - NBAIO_Sink *mPipeSink; // after reading from input source, write to this pipe sink + NBAIO_Sink* mPipeSink; // after reading from input source, write to this pipe sink int mPipeSinkGen; // increment when mPipeSink is assigned size_t mFrameCount; // number of frames per fast capture buffer - audio_track_cblk_t *mCblk; // control block for the single fast client, or NULL + audio_track_cblk_t* mCblk; // control block for the single fast client, or NULL // Extends FastThreadState::Command static const Command // The following commands also process configuration changes, and can be "or"ed: - READ = 0x8, // read from input source - WRITE = 0x10, // write to pipe sink - READ_WRITE = 0x18; // read from input source and write to pipe sink + READ = 0x8, // read from input source + WRITE = 0x10, // write to pipe sink + READ_WRITE = 0x18; // read from input source and write to pipe sink + // never returns NULL; asserts if command is invalid + static const char *commandToString(Command command); }; // struct FastCaptureState } // namespace android diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp index 2678cbf..f1cf0aa 100644 --- a/services/audioflinger/FastMixer.cpp +++ b/services/audioflinger/FastMixer.cpp @@ -27,10 +27,11 @@ #include "Configuration.h" #include <time.h> +#include <utils/Debug.h> #include <utils/Log.h> #include <utils/Trace.h> #include <system/audio.h> -#ifdef FAST_MIXER_STATISTICS +#ifdef FAST_THREAD_STATISTICS #include <cpustats/CentralTendencyStatistics.h> #ifdef CPU_FREQUENCY_STATISTICS #include <cpustats/ThreadCpuUsage.h> @@ -44,15 +45,15 @@ namespace android { -/*static*/ const FastMixerState FastMixer::initial; +/*static*/ const FastMixerState FastMixer::sInitial; FastMixer::FastMixer() : FastThread(), - slopNs(0), - // fastTrackNames - // generations - outputSink(NULL), - outputSinkGen(0), - mixer(NULL), + mSlopNs(0), + // mFastTrackNames + // mGenerations + mOutputSink(NULL), + mOutputSinkGen(0), + mMixer(NULL), mSinkBuffer(NULL), mSinkBufferSize(0), mSinkChannelCount(FCC_2), @@ -60,30 +61,30 @@ FastMixer::FastMixer() : FastThread(), mMixerBufferSize(0), mMixerBufferFormat(AUDIO_FORMAT_PCM_16_BIT), mMixerBufferState(UNDEFINED), - format(Format_Invalid), - sampleRate(0), - fastTracksGen(0), - totalNativeFramesWritten(0), + mFormat(Format_Invalid), + mSampleRate(0), + mFastTracksGen(0), + mTotalNativeFramesWritten(0), // timestamp - nativeFramesWrittenButNotPresented(0) // the = 0 is to silence the compiler + mNativeFramesWrittenButNotPresented(0) // the = 0 is to silence the compiler { - // FIXME pass initial as parameter to base class constructor, and make it static local - previous = &initial; - current = &initial; + // FIXME pass sInitial as parameter to base class constructor, and make it static local + mPrevious = &sInitial; + mCurrent = &sInitial; - mDummyDumpState = &dummyDumpState; + mDummyDumpState = &mDummyFastMixerDumpState; // TODO: Add channel mask to NBAIO_Format. // We assume that the channel mask must be a valid positional channel mask. mSinkChannelMask = audio_channel_out_mask_from_count(mSinkChannelCount); unsigned i; for (i = 0; i < FastMixerState::kMaxFastTracks; ++i) { - fastTrackNames[i] = -1; - generations[i] = 0; + mFastTrackNames[i] = -1; + mGenerations[i] = 0; } -#ifdef FAST_MIXER_STATISTICS - oldLoad.tv_sec = 0; - oldLoad.tv_nsec = 0; +#ifdef FAST_THREAD_STATISTICS + mOldLoad.tv_sec = 0; + mOldLoad.tv_nsec = 0; #endif } @@ -103,20 +104,20 @@ const FastThreadState *FastMixer::poll() void FastMixer::setLog(NBLog::Writer *logWriter) { - if (mixer != NULL) { - mixer->setLog(logWriter); + if (mMixer != NULL) { + mMixer->setLog(logWriter); } } void FastMixer::onIdle() { - preIdle = *(const FastMixerState *)current; - current = &preIdle; + mPreIdle = *(const FastMixerState *)mCurrent; + mCurrent = &mPreIdle; } void FastMixer::onExit() { - delete mixer; + delete mMixer; free(mMixerBuffer); free(mSinkBuffer); } @@ -135,82 +136,84 @@ bool FastMixer::isSubClassCommand(FastThreadState::Command command) void FastMixer::onStateChange() { - const FastMixerState * const current = (const FastMixerState *) this->current; - const FastMixerState * const previous = (const FastMixerState *) this->previous; - FastMixerDumpState * const dumpState = (FastMixerDumpState *) this->dumpState; + const FastMixerState * const current = (const FastMixerState *) mCurrent; + const FastMixerState * const previous = (const FastMixerState *) mPrevious; + FastMixerDumpState * const dumpState = (FastMixerDumpState *) mDumpState; const size_t frameCount = current->mFrameCount; // handle state change here, but since we want to diff the state, - // we're prepared for previous == &initial the first time through + // we're prepared for previous == &sInitial the first time through unsigned previousTrackMask; // check for change in output HAL configuration - NBAIO_Format previousFormat = format; - if (current->mOutputSinkGen != outputSinkGen) { - outputSink = current->mOutputSink; - outputSinkGen = current->mOutputSinkGen; - if (outputSink == NULL) { - format = Format_Invalid; - sampleRate = 0; + NBAIO_Format previousFormat = mFormat; + if (current->mOutputSinkGen != mOutputSinkGen) { + mOutputSink = current->mOutputSink; + mOutputSinkGen = current->mOutputSinkGen; + if (mOutputSink == NULL) { + mFormat = Format_Invalid; + mSampleRate = 0; mSinkChannelCount = 0; mSinkChannelMask = AUDIO_CHANNEL_NONE; } else { - format = outputSink->format(); - sampleRate = Format_sampleRate(format); - mSinkChannelCount = Format_channelCount(format); + mFormat = mOutputSink->format(); + mSampleRate = Format_sampleRate(mFormat); + mSinkChannelCount = Format_channelCount(mFormat); LOG_ALWAYS_FATAL_IF(mSinkChannelCount > AudioMixer::MAX_NUM_CHANNELS); // TODO: Add channel mask to NBAIO_Format // We assume that the channel mask must be a valid positional channel mask. mSinkChannelMask = audio_channel_out_mask_from_count(mSinkChannelCount); } - dumpState->mSampleRate = sampleRate; + dumpState->mSampleRate = mSampleRate; } - if ((!Format_isEqual(format, previousFormat)) || (frameCount != previous->mFrameCount)) { + if ((!Format_isEqual(mFormat, previousFormat)) || (frameCount != previous->mFrameCount)) { // FIXME to avoid priority inversion, don't delete here - delete mixer; - mixer = NULL; + delete mMixer; + mMixer = NULL; free(mMixerBuffer); mMixerBuffer = NULL; free(mSinkBuffer); mSinkBuffer = NULL; - if (frameCount > 0 && sampleRate > 0) { + if (frameCount > 0 && mSampleRate > 0) { // FIXME new may block for unbounded time at internal mutex of the heap // implementation; it would be better to have normal mixer allocate for us // to avoid blocking here and to prevent possible priority inversion - mixer = new AudioMixer(frameCount, sampleRate, FastMixerState::kMaxFastTracks); + mMixer = new AudioMixer(frameCount, mSampleRate, FastMixerState::kMaxFastTracks); const size_t mixerFrameSize = mSinkChannelCount * audio_bytes_per_sample(mMixerBufferFormat); mMixerBufferSize = mixerFrameSize * frameCount; (void)posix_memalign(&mMixerBuffer, 32, mMixerBufferSize); const size_t sinkFrameSize = mSinkChannelCount - * audio_bytes_per_sample(format.mFormat); + * audio_bytes_per_sample(mFormat.mFormat); if (sinkFrameSize > mixerFrameSize) { // need a sink buffer mSinkBufferSize = sinkFrameSize * frameCount; (void)posix_memalign(&mSinkBuffer, 32, mSinkBufferSize); } - periodNs = (frameCount * 1000000000LL) / sampleRate; // 1.00 - underrunNs = (frameCount * 1750000000LL) / sampleRate; // 1.75 - overrunNs = (frameCount * 500000000LL) / sampleRate; // 0.50 - forceNs = (frameCount * 950000000LL) / sampleRate; // 0.95 - warmupNs = (frameCount * 500000000LL) / sampleRate; // 0.50 + mPeriodNs = (frameCount * 1000000000LL) / mSampleRate; // 1.00 + mUnderrunNs = (frameCount * 1750000000LL) / mSampleRate; // 1.75 + mOverrunNs = (frameCount * 500000000LL) / mSampleRate; // 0.50 + mForceNs = (frameCount * 950000000LL) / mSampleRate; // 0.95 + mWarmupNsMin = (frameCount * 750000000LL) / mSampleRate; // 0.75 + mWarmupNsMax = (frameCount * 1250000000LL) / mSampleRate; // 1.25 } else { - periodNs = 0; - underrunNs = 0; - overrunNs = 0; - forceNs = 0; - warmupNs = 0; + mPeriodNs = 0; + mUnderrunNs = 0; + mOverrunNs = 0; + mForceNs = 0; + mWarmupNsMin = 0; + mWarmupNsMax = LONG_MAX; } mMixerBufferState = UNDEFINED; #if !LOG_NDEBUG for (unsigned i = 0; i < FastMixerState::kMaxFastTracks; ++i) { - fastTrackNames[i] = -1; + mFastTrackNames[i] = -1; } #endif // we need to reconfigure all active tracks previousTrackMask = 0; - fastTracksGen = current->mFastTracksGen - 1; + mFastTracksGen = current->mFastTracksGen - 1; dumpState->mFrameCount = frameCount; } else { previousTrackMask = previous->mTrackMask; @@ -219,7 +222,7 @@ void FastMixer::onStateChange() // check for change in active track set const unsigned currentTrackMask = current->mTrackMask; dumpState->mTrackMask = currentTrackMask; - if (current->mFastTracksGen != fastTracksGen) { + if (current->mFastTracksGen != mFastTracksGen) { ALOG_ASSERT(mMixerBuffer != NULL); int name; @@ -230,16 +233,16 @@ void FastMixer::onStateChange() removedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; ALOG_ASSERT(fastTrack->mBufferProvider == NULL); - if (mixer != NULL) { - name = fastTrackNames[i]; + if (mMixer != NULL) { + name = mFastTrackNames[i]; ALOG_ASSERT(name >= 0); - mixer->deleteTrackName(name); + mMixer->deleteTrackName(name); } #if !LOG_NDEBUG - fastTrackNames[i] = -1; + mFastTrackNames[i] = -1; #endif // don't reset track dump state, since other side is ignoring it - generations[i] = fastTrack->mGeneration; + mGenerations[i] = fastTrack->mGeneration; } // now process added tracks @@ -249,29 +252,29 @@ void FastMixer::onStateChange() addedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; - ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); - if (mixer != NULL) { - name = mixer->getTrackName(fastTrack->mChannelMask, + ALOG_ASSERT(bufferProvider != NULL && mFastTrackNames[i] == -1); + if (mMixer != NULL) { + name = mMixer->getTrackName(fastTrack->mChannelMask, fastTrack->mFormat, AUDIO_SESSION_OUTPUT_MIX); ALOG_ASSERT(name >= 0); - fastTrackNames[i] = name; - mixer->setBufferProvider(name, bufferProvider); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, + mFastTrackNames[i] = name; + mMixer->setBufferProvider(name, bufferProvider); + mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *)mMixerBuffer); // newly allocated track names default to full scale volume - mixer->setParameter( + mMixer->setParameter( name, AudioMixer::TRACK, AudioMixer::MIXER_FORMAT, (void *)mMixerBufferFormat); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FORMAT, + mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FORMAT, (void *)(uintptr_t)fastTrack->mFormat); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, + mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *)(uintptr_t)fastTrack->mChannelMask); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MIXER_CHANNEL_MASK, + mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MIXER_CHANNEL_MASK, (void *)(uintptr_t)mSinkChannelMask); - mixer->enable(name); + mMixer->enable(name); } - generations[i] = fastTrack->mGeneration; + mGenerations[i] = fastTrack->mGeneration; } // finally process (potentially) modified tracks; these use the same slot @@ -281,38 +284,38 @@ void FastMixer::onStateChange() int i = __builtin_ctz(modifiedTracks); modifiedTracks &= ~(1 << i); const FastTrack* fastTrack = ¤t->mFastTracks[i]; - if (fastTrack->mGeneration != generations[i]) { + if (fastTrack->mGeneration != mGenerations[i]) { // this track was actually modified AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; ALOG_ASSERT(bufferProvider != NULL); - if (mixer != NULL) { - name = fastTrackNames[i]; + if (mMixer != NULL) { + name = mFastTrackNames[i]; ALOG_ASSERT(name >= 0); - mixer->setBufferProvider(name, bufferProvider); + mMixer->setBufferProvider(name, bufferProvider); if (fastTrack->mVolumeProvider == NULL) { float f = AudioMixer::UNITY_GAIN_FLOAT; - mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, &f); - mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, &f); + mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, &f); + mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, &f); } - mixer->setParameter(name, AudioMixer::RESAMPLE, + mMixer->setParameter(name, AudioMixer::RESAMPLE, AudioMixer::REMOVE, NULL); - mixer->setParameter( + mMixer->setParameter( name, AudioMixer::TRACK, AudioMixer::MIXER_FORMAT, (void *)mMixerBufferFormat); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FORMAT, + mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FORMAT, (void *)(uintptr_t)fastTrack->mFormat); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, + mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, (void *)(uintptr_t)fastTrack->mChannelMask); - mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MIXER_CHANNEL_MASK, + mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MIXER_CHANNEL_MASK, (void *)(uintptr_t)mSinkChannelMask); // already enabled } - generations[i] = fastTrack->mGeneration; + mGenerations[i] = fastTrack->mGeneration; } } - fastTracksGen = current->mFastTracksGen; + mFastTracksGen = current->mFastTracksGen; dumpState->mNumTracks = popcount(currentTrackMask); } @@ -320,12 +323,12 @@ void FastMixer::onStateChange() void FastMixer::onWork() { - const FastMixerState * const current = (const FastMixerState *) this->current; - FastMixerDumpState * const dumpState = (FastMixerDumpState *) this->dumpState; - const FastMixerState::Command command = this->command; + const FastMixerState * const current = (const FastMixerState *) mCurrent; + FastMixerDumpState * const dumpState = (FastMixerDumpState *) mDumpState; + const FastMixerState::Command command = mCommand; const size_t frameCount = current->mFrameCount; - if ((command & FastMixerState::MIX) && (mixer != NULL) && isWarm) { + if ((command & FastMixerState::MIX) && (mMixer != NULL) && mIsWarm) { ALOG_ASSERT(mMixerBuffer != NULL); // for each track, update volume and check for underrun unsigned currentTrackMask = current->mTrackMask; @@ -335,9 +338,9 @@ void FastMixer::onWork() const FastTrack* fastTrack = ¤t->mFastTracks[i]; // Refresh the per-track timestamp - if (timestampStatus == NO_ERROR) { + if (mTimestampStatus == NO_ERROR) { uint32_t trackFramesWrittenButNotPresented = - nativeFramesWrittenButNotPresented; + mNativeFramesWrittenButNotPresented; uint32_t trackFramesWritten = fastTrack->mBufferProvider->framesReleased(); // Can't provide an AudioTimestamp before first frame presented, // or during the brief 32-bit wraparound window @@ -345,20 +348,20 @@ void FastMixer::onWork() AudioTimestamp perTrackTimestamp; perTrackTimestamp.mPosition = trackFramesWritten - trackFramesWrittenButNotPresented; - perTrackTimestamp.mTime = timestamp.mTime; + perTrackTimestamp.mTime = mTimestamp.mTime; fastTrack->mBufferProvider->onTimestamp(perTrackTimestamp); } } - int name = fastTrackNames[i]; + int name = mFastTrackNames[i]; ALOG_ASSERT(name >= 0); if (fastTrack->mVolumeProvider != NULL) { gain_minifloat_packed_t vlr = fastTrack->mVolumeProvider->getVolumeLR(); float vlf = float_from_gain(gain_minifloat_unpack_left(vlr)); float vrf = float_from_gain(gain_minifloat_unpack_right(vlr)); - mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, &vlf); - mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, &vrf); + mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, &vlf); + mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, &vrf); } // FIXME The current implementation of framesReady() for fast tracks // takes a tryLock, which can block @@ -379,43 +382,44 @@ void FastMixer::onWork() if (framesReady == 0) { underruns.mBitFields.mEmpty++; underruns.mBitFields.mMostRecent = UNDERRUN_EMPTY; - mixer->disable(name); + mMixer->disable(name); } else { // allow mixing partial buffer underruns.mBitFields.mPartial++; underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; - mixer->enable(name); + mMixer->enable(name); } } else { underruns.mBitFields.mFull++; underruns.mBitFields.mMostRecent = UNDERRUN_FULL; - mixer->enable(name); + mMixer->enable(name); } ftDump->mUnderruns = underruns; ftDump->mFramesReady = framesReady; } int64_t pts; - if (outputSink == NULL || (OK != outputSink->getNextWriteTimestamp(&pts))) { + if (mOutputSink == NULL || (OK != mOutputSink->getNextWriteTimestamp(&pts))) { pts = AudioBufferProvider::kInvalidPTS; } // process() is CPU-bound - mixer->process(pts); + mMixer->process(pts); mMixerBufferState = MIXED; } else if (mMixerBufferState == MIXED) { mMixerBufferState = UNDEFINED; } //bool didFullWrite = false; // dumpsys could display a count of partial writes - if ((command & FastMixerState::WRITE) && (outputSink != NULL) && (mMixerBuffer != NULL)) { + if ((command & FastMixerState::WRITE) && (mOutputSink != NULL) && (mMixerBuffer != NULL)) { if (mMixerBufferState == UNDEFINED) { memset(mMixerBuffer, 0, mMixerBufferSize); mMixerBufferState = ZEROED; } + // prepare the buffer used to write to sink void *buffer = mSinkBuffer != NULL ? mSinkBuffer : mMixerBuffer; - if (format.mFormat != mMixerBufferFormat) { // sink format not the same as mixer format - memcpy_by_audio_format(buffer, format.mFormat, mMixerBuffer, mMixerBufferFormat, - frameCount * Format_channelCount(format)); + if (mFormat.mFormat != mMixerBufferFormat) { // sink format not the same as mixer format + memcpy_by_audio_format(buffer, mFormat.mFormat, mMixerBuffer, mMixerBufferFormat, + frameCount * Format_channelCount(mFormat)); } // if non-NULL, then duplicate write() to this non-blocking sink NBAIO_Sink* teeSink; @@ -426,252 +430,34 @@ void FastMixer::onWork() // but this code should be modified to handle both non-blocking and blocking sinks dumpState->mWriteSequence++; ATRACE_BEGIN("write"); - ssize_t framesWritten = outputSink->write(buffer, frameCount); + ssize_t framesWritten = mOutputSink->write(buffer, frameCount); ATRACE_END(); dumpState->mWriteSequence++; if (framesWritten >= 0) { ALOG_ASSERT((size_t) framesWritten <= frameCount); - totalNativeFramesWritten += framesWritten; - dumpState->mFramesWritten = totalNativeFramesWritten; + mTotalNativeFramesWritten += framesWritten; + dumpState->mFramesWritten = mTotalNativeFramesWritten; //if ((size_t) framesWritten == frameCount) { // didFullWrite = true; //} } else { dumpState->mWriteErrors++; } - attemptedWrite = true; + mAttemptedWrite = true; // FIXME count # of writes blocked excessively, CPU usage, etc. for dump - timestampStatus = outputSink->getTimestamp(timestamp); - if (timestampStatus == NO_ERROR) { - uint32_t totalNativeFramesPresented = timestamp.mPosition; - if (totalNativeFramesPresented <= totalNativeFramesWritten) { - nativeFramesWrittenButNotPresented = - totalNativeFramesWritten - totalNativeFramesPresented; + mTimestampStatus = mOutputSink->getTimestamp(mTimestamp); + if (mTimestampStatus == NO_ERROR) { + uint32_t totalNativeFramesPresented = mTimestamp.mPosition; + if (totalNativeFramesPresented <= mTotalNativeFramesWritten) { + mNativeFramesWrittenButNotPresented = + mTotalNativeFramesWritten - totalNativeFramesPresented; } else { // HAL reported that more frames were presented than were written - timestampStatus = INVALID_OPERATION; + mTimestampStatus = INVALID_OPERATION; } } } } -FastMixerDumpState::FastMixerDumpState( -#ifdef FAST_MIXER_STATISTICS - uint32_t samplingN -#endif - ) : FastThreadDumpState(), - mWriteSequence(0), mFramesWritten(0), - mNumTracks(0), mWriteErrors(0), - mSampleRate(0), mFrameCount(0), - mTrackMask(0) -{ -#ifdef FAST_MIXER_STATISTICS - increaseSamplingN(samplingN); -#endif -} - -#ifdef FAST_MIXER_STATISTICS -void FastMixerDumpState::increaseSamplingN(uint32_t samplingN) -{ - if (samplingN <= mSamplingN || samplingN > kSamplingN || roundup(samplingN) != samplingN) { - return; - } - uint32_t additional = samplingN - mSamplingN; - // sample arrays aren't accessed atomically with respect to the bounds, - // so clearing reduces chance for dumpsys to read random uninitialized samples - memset(&mMonotonicNs[mSamplingN], 0, sizeof(mMonotonicNs[0]) * additional); - memset(&mLoadNs[mSamplingN], 0, sizeof(mLoadNs[0]) * additional); -#ifdef CPU_FREQUENCY_STATISTICS - memset(&mCpukHz[mSamplingN], 0, sizeof(mCpukHz[0]) * additional); -#endif - mSamplingN = samplingN; -} -#endif - -FastMixerDumpState::~FastMixerDumpState() -{ -} - -// helper function called by qsort() -static int compare_uint32_t(const void *pa, const void *pb) -{ - uint32_t a = *(const uint32_t *)pa; - uint32_t b = *(const uint32_t *)pb; - if (a < b) { - return -1; - } else if (a > b) { - return 1; - } else { - return 0; - } -} - -void FastMixerDumpState::dump(int fd) const -{ - if (mCommand == FastMixerState::INITIAL) { - dprintf(fd, " FastMixer not initialized\n"); - return; - } -#define COMMAND_MAX 32 - char string[COMMAND_MAX]; - switch (mCommand) { - case FastMixerState::INITIAL: - strcpy(string, "INITIAL"); - break; - case FastMixerState::HOT_IDLE: - strcpy(string, "HOT_IDLE"); - break; - case FastMixerState::COLD_IDLE: - strcpy(string, "COLD_IDLE"); - break; - case FastMixerState::EXIT: - strcpy(string, "EXIT"); - break; - case FastMixerState::MIX: - strcpy(string, "MIX"); - break; - case FastMixerState::WRITE: - strcpy(string, "WRITE"); - break; - case FastMixerState::MIX_WRITE: - strcpy(string, "MIX_WRITE"); - break; - default: - snprintf(string, COMMAND_MAX, "%d", mCommand); - break; - } - double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) + - (mMeasuredWarmupTs.tv_nsec / 1000000.0); - double mixPeriodSec = (double) mFrameCount / (double) mSampleRate; - dprintf(fd, " FastMixer command=%s writeSequence=%u framesWritten=%u\n" - " numTracks=%u writeErrors=%u underruns=%u overruns=%u\n" - " sampleRate=%u frameCount=%zu measuredWarmup=%.3g ms, warmupCycles=%u\n" - " mixPeriod=%.2f ms\n", - string, mWriteSequence, mFramesWritten, - mNumTracks, mWriteErrors, mUnderruns, mOverruns, - mSampleRate, mFrameCount, measuredWarmupMs, mWarmupCycles, - mixPeriodSec * 1e3); -#ifdef FAST_MIXER_STATISTICS - // find the interval of valid samples - uint32_t bounds = mBounds; - uint32_t newestOpen = bounds & 0xFFFF; - uint32_t oldestClosed = bounds >> 16; - uint32_t n = (newestOpen - oldestClosed) & 0xFFFF; - if (n > mSamplingN) { - ALOGE("too many samples %u", n); - n = mSamplingN; - } - // statistics for monotonic (wall clock) time, thread raw CPU load in time, CPU clock frequency, - // and adjusted CPU load in MHz normalized for CPU clock frequency - CentralTendencyStatistics wall, loadNs; -#ifdef CPU_FREQUENCY_STATISTICS - CentralTendencyStatistics kHz, loadMHz; - uint32_t previousCpukHz = 0; -#endif - // Assuming a normal distribution for cycle times, three standard deviations on either side of - // the mean account for 99.73% of the population. So if we take each tail to be 1/1000 of the - // sample set, we get 99.8% combined, or close to three standard deviations. - static const uint32_t kTailDenominator = 1000; - uint32_t *tail = n >= kTailDenominator ? new uint32_t[n] : NULL; - // loop over all the samples - for (uint32_t j = 0; j < n; ++j) { - size_t i = oldestClosed++ & (mSamplingN - 1); - uint32_t wallNs = mMonotonicNs[i]; - if (tail != NULL) { - tail[j] = wallNs; - } - wall.sample(wallNs); - uint32_t sampleLoadNs = mLoadNs[i]; - loadNs.sample(sampleLoadNs); -#ifdef CPU_FREQUENCY_STATISTICS - uint32_t sampleCpukHz = mCpukHz[i]; - // skip bad kHz samples - if ((sampleCpukHz & ~0xF) != 0) { - kHz.sample(sampleCpukHz >> 4); - if (sampleCpukHz == previousCpukHz) { - double megacycles = (double) sampleLoadNs * (double) (sampleCpukHz >> 4) * 1e-12; - double adjMHz = megacycles / mixPeriodSec; // _not_ wallNs * 1e9 - loadMHz.sample(adjMHz); - } - } - previousCpukHz = sampleCpukHz; -#endif - } - if (n) { - dprintf(fd, " Simple moving statistics over last %.1f seconds:\n", - wall.n() * mixPeriodSec); - dprintf(fd, " wall clock time in ms per mix cycle:\n" - " mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", - wall.mean()*1e-6, wall.minimum()*1e-6, wall.maximum()*1e-6, - wall.stddev()*1e-6); - dprintf(fd, " raw CPU load in us per mix cycle:\n" - " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", - loadNs.mean()*1e-3, loadNs.minimum()*1e-3, loadNs.maximum()*1e-3, - loadNs.stddev()*1e-3); - } else { - dprintf(fd, " No FastMixer statistics available currently\n"); - } -#ifdef CPU_FREQUENCY_STATISTICS - dprintf(fd, " CPU clock frequency in MHz:\n" - " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", - kHz.mean()*1e-3, kHz.minimum()*1e-3, kHz.maximum()*1e-3, kHz.stddev()*1e-3); - dprintf(fd, " adjusted CPU load in MHz (i.e. normalized for CPU clock frequency):\n" - " mean=%.1f min=%.1f max=%.1f stddev=%.1f\n", - loadMHz.mean(), loadMHz.minimum(), loadMHz.maximum(), loadMHz.stddev()); -#endif - if (tail != NULL) { - qsort(tail, n, sizeof(uint32_t), compare_uint32_t); - // assume same number of tail samples on each side, left and right - uint32_t count = n / kTailDenominator; - CentralTendencyStatistics left, right; - for (uint32_t i = 0; i < count; ++i) { - left.sample(tail[i]); - right.sample(tail[n - (i + 1)]); - } - dprintf(fd, " Distribution of mix cycle times in ms for the tails (> ~3 stddev outliers):\n" - " left tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n" - " right tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", - left.mean()*1e-6, left.minimum()*1e-6, left.maximum()*1e-6, left.stddev()*1e-6, - right.mean()*1e-6, right.minimum()*1e-6, right.maximum()*1e-6, - right.stddev()*1e-6); - delete[] tail; - } -#endif - // The active track mask and track states are updated non-atomically. - // So if we relied on isActive to decide whether to display, - // then we might display an obsolete track or omit an active track. - // Instead we always display all tracks, with an indication - // of whether we think the track is active. - uint32_t trackMask = mTrackMask; - dprintf(fd, " Fast tracks: kMaxFastTracks=%u activeMask=%#x\n", - FastMixerState::kMaxFastTracks, trackMask); - dprintf(fd, " Index Active Full Partial Empty Recent Ready\n"); - for (uint32_t i = 0; i < FastMixerState::kMaxFastTracks; ++i, trackMask >>= 1) { - bool isActive = trackMask & 1; - const FastTrackDump *ftDump = &mTracks[i]; - const FastTrackUnderruns& underruns = ftDump->mUnderruns; - const char *mostRecent; - switch (underruns.mBitFields.mMostRecent) { - case UNDERRUN_FULL: - mostRecent = "full"; - break; - case UNDERRUN_PARTIAL: - mostRecent = "partial"; - break; - case UNDERRUN_EMPTY: - mostRecent = "empty"; - break; - default: - mostRecent = "?"; - break; - } - dprintf(fd, " %5u %6s %4u %7u %5u %7s %5zu\n", i, isActive ? "yes" : "no", - (underruns.mBitFields.mFull) & UNDERRUN_MASK, - (underruns.mBitFields.mPartial) & UNDERRUN_MASK, - (underruns.mBitFields.mEmpty) & UNDERRUN_MASK, - mostRecent, ftDump->mFramesReady); - } -} - } // namespace android diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/FastMixer.h index fde8c2b..06a68fb 100644 --- a/services/audioflinger/FastMixer.h +++ b/services/audioflinger/FastMixer.h @@ -17,11 +17,7 @@ #ifndef ANDROID_AUDIO_FAST_MIXER_H #define ANDROID_AUDIO_FAST_MIXER_H -#include <linux/futex.h> -#include <sys/syscall.h> -#include <utils/Debug.h> #include "FastThread.h" -#include <utils/Thread.h> #include "StateQueue.h" #include "FastMixerState.h" #include "FastMixerDumpState.h" @@ -52,36 +48,39 @@ private: virtual void onStateChange(); virtual void onWork(); - // FIXME these former local variables need comments and to be renamed to have "m" prefix - static const FastMixerState initial; - FastMixerState preIdle; // copy of state before we went into idle - long slopNs; // accumulated time we've woken up too early (> 0) or too late (< 0) - int fastTrackNames[FastMixerState::kMaxFastTracks]; // handles used by mixer to identify tracks - int generations[FastMixerState::kMaxFastTracks]; // last observed mFastTracks[i].mGeneration - NBAIO_Sink *outputSink; - int outputSinkGen; - AudioMixer* mixer; + // FIXME these former local variables need comments + static const FastMixerState sInitial; + + FastMixerState mPreIdle; // copy of state before we went into idle + long mSlopNs; // accumulated time we've woken up too early (> 0) or too late (< 0) + int mFastTrackNames[FastMixerState::kMaxFastTracks]; + // handles used by mixer to identify tracks + int mGenerations[FastMixerState::kMaxFastTracks]; + // last observed mFastTracks[i].mGeneration + NBAIO_Sink* mOutputSink; + int mOutputSinkGen; + AudioMixer* mMixer; // mSinkBuffer audio format is stored in format.mFormat. - void* mSinkBuffer; // used for mixer output format translation + void* mSinkBuffer; // used for mixer output format translation // if sink format is different than mixer output. - size_t mSinkBufferSize; - uint32_t mSinkChannelCount; + size_t mSinkBufferSize; + uint32_t mSinkChannelCount; audio_channel_mask_t mSinkChannelMask; - void* mMixerBuffer; // mixer output buffer. - size_t mMixerBufferSize; - audio_format_t mMixerBufferFormat; // mixer output format: AUDIO_FORMAT_PCM_(16_BIT|FLOAT). + void* mMixerBuffer; // mixer output buffer. + size_t mMixerBufferSize; + audio_format_t mMixerBufferFormat; // mixer output format: AUDIO_FORMAT_PCM_(16_BIT|FLOAT). enum {UNDEFINED, MIXED, ZEROED} mMixerBufferState; - NBAIO_Format format; - unsigned sampleRate; - int fastTracksGen; - FastMixerDumpState dummyDumpState; - uint32_t totalNativeFramesWritten; // copied to dumpState->mFramesWritten + NBAIO_Format mFormat; + unsigned mSampleRate; + int mFastTracksGen; + FastMixerDumpState mDummyFastMixerDumpState; + uint32_t mTotalNativeFramesWritten; // copied to dumpState->mFramesWritten // next 2 fields are valid only when timestampStatus == NO_ERROR - AudioTimestamp timestamp; - uint32_t nativeFramesWrittenButNotPresented; + AudioTimestamp mTimestamp; + uint32_t mNativeFramesWrittenButNotPresented; }; // class FastMixer diff --git a/services/audioflinger/FastMixerDumpState.cpp b/services/audioflinger/FastMixerDumpState.cpp new file mode 100644 index 0000000..b10942b --- /dev/null +++ b/services/audioflinger/FastMixerDumpState.cpp @@ -0,0 +1,199 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "FastMixerDumpState" +//#define LOG_NDEBUG 0 + +#include "Configuration.h" +#ifdef FAST_THREAD_STATISTICS +#include <cpustats/CentralTendencyStatistics.h> +#ifdef CPU_FREQUENCY_STATISTICS +#include <cpustats/ThreadCpuUsage.h> +#endif +#endif +#include <utils/Debug.h> +#include <utils/Log.h> +#include "FastMixerDumpState.h" + +namespace android { + +FastMixerDumpState::FastMixerDumpState() : FastThreadDumpState(), + mWriteSequence(0), mFramesWritten(0), + mNumTracks(0), mWriteErrors(0), + mSampleRate(0), mFrameCount(0), + mTrackMask(0) +{ +} + +FastMixerDumpState::~FastMixerDumpState() +{ +} + +// helper function called by qsort() +static int compare_uint32_t(const void *pa, const void *pb) +{ + uint32_t a = *(const uint32_t *)pa; + uint32_t b = *(const uint32_t *)pb; + if (a < b) { + return -1; + } else if (a > b) { + return 1; + } else { + return 0; + } +} + +void FastMixerDumpState::dump(int fd) const +{ + if (mCommand == FastMixerState::INITIAL) { + dprintf(fd, " FastMixer not initialized\n"); + return; + } + double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) + + (mMeasuredWarmupTs.tv_nsec / 1000000.0); + double mixPeriodSec = (double) mFrameCount / mSampleRate; + dprintf(fd, " FastMixer command=%s writeSequence=%u framesWritten=%u\n" + " numTracks=%u writeErrors=%u underruns=%u overruns=%u\n" + " sampleRate=%u frameCount=%zu measuredWarmup=%.3g ms, warmupCycles=%u\n" + " mixPeriod=%.2f ms\n", + FastMixerState::commandToString(mCommand), mWriteSequence, mFramesWritten, + mNumTracks, mWriteErrors, mUnderruns, mOverruns, + mSampleRate, mFrameCount, measuredWarmupMs, mWarmupCycles, + mixPeriodSec * 1e3); +#ifdef FAST_THREAD_STATISTICS + // find the interval of valid samples + uint32_t bounds = mBounds; + uint32_t newestOpen = bounds & 0xFFFF; + uint32_t oldestClosed = bounds >> 16; + uint32_t n = (newestOpen - oldestClosed) & 0xFFFF; + if (n > mSamplingN) { + ALOGE("too many samples %u", n); + n = mSamplingN; + } + // statistics for monotonic (wall clock) time, thread raw CPU load in time, CPU clock frequency, + // and adjusted CPU load in MHz normalized for CPU clock frequency + CentralTendencyStatistics wall, loadNs; +#ifdef CPU_FREQUENCY_STATISTICS + CentralTendencyStatistics kHz, loadMHz; + uint32_t previousCpukHz = 0; +#endif + // Assuming a normal distribution for cycle times, three standard deviations on either side of + // the mean account for 99.73% of the population. So if we take each tail to be 1/1000 of the + // sample set, we get 99.8% combined, or close to three standard deviations. + static const uint32_t kTailDenominator = 1000; + uint32_t *tail = n >= kTailDenominator ? new uint32_t[n] : NULL; + // loop over all the samples + for (uint32_t j = 0; j < n; ++j) { + size_t i = oldestClosed++ & (mSamplingN - 1); + uint32_t wallNs = mMonotonicNs[i]; + if (tail != NULL) { + tail[j] = wallNs; + } + wall.sample(wallNs); + uint32_t sampleLoadNs = mLoadNs[i]; + loadNs.sample(sampleLoadNs); +#ifdef CPU_FREQUENCY_STATISTICS + uint32_t sampleCpukHz = mCpukHz[i]; + // skip bad kHz samples + if ((sampleCpukHz & ~0xF) != 0) { + kHz.sample(sampleCpukHz >> 4); + if (sampleCpukHz == previousCpukHz) { + double megacycles = (double) sampleLoadNs * (double) (sampleCpukHz >> 4) * 1e-12; + double adjMHz = megacycles / mixPeriodSec; // _not_ wallNs * 1e9 + loadMHz.sample(adjMHz); + } + } + previousCpukHz = sampleCpukHz; +#endif + } + if (n) { + dprintf(fd, " Simple moving statistics over last %.1f seconds:\n", + wall.n() * mixPeriodSec); + dprintf(fd, " wall clock time in ms per mix cycle:\n" + " mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", + wall.mean()*1e-6, wall.minimum()*1e-6, wall.maximum()*1e-6, + wall.stddev()*1e-6); + dprintf(fd, " raw CPU load in us per mix cycle:\n" + " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", + loadNs.mean()*1e-3, loadNs.minimum()*1e-3, loadNs.maximum()*1e-3, + loadNs.stddev()*1e-3); + } else { + dprintf(fd, " No FastMixer statistics available currently\n"); + } +#ifdef CPU_FREQUENCY_STATISTICS + dprintf(fd, " CPU clock frequency in MHz:\n" + " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", + kHz.mean()*1e-3, kHz.minimum()*1e-3, kHz.maximum()*1e-3, kHz.stddev()*1e-3); + dprintf(fd, " adjusted CPU load in MHz (i.e. normalized for CPU clock frequency):\n" + " mean=%.1f min=%.1f max=%.1f stddev=%.1f\n", + loadMHz.mean(), loadMHz.minimum(), loadMHz.maximum(), loadMHz.stddev()); +#endif + if (tail != NULL) { + qsort(tail, n, sizeof(uint32_t), compare_uint32_t); + // assume same number of tail samples on each side, left and right + uint32_t count = n / kTailDenominator; + CentralTendencyStatistics left, right; + for (uint32_t i = 0; i < count; ++i) { + left.sample(tail[i]); + right.sample(tail[n - (i + 1)]); + } + dprintf(fd, " Distribution of mix cycle times in ms for the tails " + "(> ~3 stddev outliers):\n" + " left tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n" + " right tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", + left.mean()*1e-6, left.minimum()*1e-6, left.maximum()*1e-6, left.stddev()*1e-6, + right.mean()*1e-6, right.minimum()*1e-6, right.maximum()*1e-6, + right.stddev()*1e-6); + delete[] tail; + } +#endif + // The active track mask and track states are updated non-atomically. + // So if we relied on isActive to decide whether to display, + // then we might display an obsolete track or omit an active track. + // Instead we always display all tracks, with an indication + // of whether we think the track is active. + uint32_t trackMask = mTrackMask; + dprintf(fd, " Fast tracks: kMaxFastTracks=%u activeMask=%#x\n", + FastMixerState::kMaxFastTracks, trackMask); + dprintf(fd, " Index Active Full Partial Empty Recent Ready\n"); + for (uint32_t i = 0; i < FastMixerState::kMaxFastTracks; ++i, trackMask >>= 1) { + bool isActive = trackMask & 1; + const FastTrackDump *ftDump = &mTracks[i]; + const FastTrackUnderruns& underruns = ftDump->mUnderruns; + const char *mostRecent; + switch (underruns.mBitFields.mMostRecent) { + case UNDERRUN_FULL: + mostRecent = "full"; + break; + case UNDERRUN_PARTIAL: + mostRecent = "partial"; + break; + case UNDERRUN_EMPTY: + mostRecent = "empty"; + break; + default: + mostRecent = "?"; + break; + } + dprintf(fd, " %5u %6s %4u %7u %5u %7s %5zu\n", i, isActive ? "yes" : "no", + (underruns.mBitFields.mFull) & UNDERRUN_MASK, + (underruns.mBitFields.mPartial) & UNDERRUN_MASK, + (underruns.mBitFields.mEmpty) & UNDERRUN_MASK, + mostRecent, ftDump->mFramesReady); + } +} + +} // android diff --git a/services/audioflinger/FastMixerDumpState.h b/services/audioflinger/FastMixerDumpState.h index 6a1e464..ac15e7c 100644 --- a/services/audioflinger/FastMixerDumpState.h +++ b/services/audioflinger/FastMixerDumpState.h @@ -17,7 +17,10 @@ #ifndef ANDROID_AUDIO_FAST_MIXER_DUMP_STATE_H #define ANDROID_AUDIO_FAST_MIXER_DUMP_STATE_H +#include <stdint.h> #include "Configuration.h" +#include "FastThreadDumpState.h" +#include "FastMixerState.h" namespace android { @@ -52,22 +55,12 @@ private: struct FastTrackDump { FastTrackDump() : mFramesReady(0) { } /*virtual*/ ~FastTrackDump() { } - FastTrackUnderruns mUnderruns; - size_t mFramesReady; // most recent value only; no long-term statistics kept + FastTrackUnderruns mUnderruns; + size_t mFramesReady; // most recent value only; no long-term statistics kept }; -// The FastMixerDumpState keeps a cache of FastMixer statistics that can be logged by dumpsys. -// Each individual native word-sized field is accessed atomically. But the -// overall structure is non-atomic, that is there may be an inconsistency between fields. -// No barriers or locks are used for either writing or reading. -// Only POD types are permitted, and the contents shouldn't be trusted (i.e. do range checks). -// It has a different lifetime than the FastMixer, and so it can't be a member of FastMixer. struct FastMixerDumpState : FastThreadDumpState { - FastMixerDumpState( -#ifdef FAST_MIXER_STATISTICS - uint32_t samplingN = kSamplingNforLowRamDevice -#endif - ); + FastMixerDumpState(); /*virtual*/ ~FastMixerDumpState(); void dump(int fd) const; // should only be called on a stable copy, not the original @@ -80,14 +73,6 @@ struct FastMixerDumpState : FastThreadDumpState { size_t mFrameCount; uint32_t mTrackMask; // mask of active tracks FastTrackDump mTracks[FastMixerState::kMaxFastTracks]; - -#ifdef FAST_MIXER_STATISTICS - // Compile-time constant for a "low RAM device", must be a power of 2 <= kSamplingN. - // This value was chosen such that each array uses 1 small page (4 Kbytes). - static const uint32_t kSamplingNforLowRamDevice = 0x400; - // Increase sampling window after construction, must be a power of 2 <= kSamplingN - void increaseSamplingN(uint32_t samplingN); -#endif }; } // android diff --git a/services/audioflinger/FastMixerState.cpp b/services/audioflinger/FastMixerState.cpp index 3aa8dad..a8c2634 100644 --- a/services/audioflinger/FastMixerState.cpp +++ b/services/audioflinger/FastMixerState.cpp @@ -39,4 +39,19 @@ FastMixerState::~FastMixerState() { } +// static +const char *FastMixerState::commandToString(Command command) +{ + const char *str = FastThreadState::commandToString(command); + if (str != NULL) { + return str; + } + switch (command) { + case FastMixerState::MIX: return "MIX"; + case FastMixerState::WRITE: return "WRITE"; + case FastMixerState::MIX_WRITE: return "MIX_WRITE"; + } + LOG_ALWAYS_FATAL("%s", __func__); +} + } // namespace android diff --git a/services/audioflinger/FastMixerState.h b/services/audioflinger/FastMixerState.h index 661c9ca..916514f 100644 --- a/services/audioflinger/FastMixerState.h +++ b/services/audioflinger/FastMixerState.h @@ -73,6 +73,9 @@ struct FastMixerState : FastThreadState { // This might be a one-time configuration rather than per-state NBAIO_Sink* mTeeSink; // if non-NULL, then duplicate write()s to this non-blocking sink + + // never returns NULL; asserts if command is invalid + static const char *commandToString(Command command); }; // struct FastMixerState } // namespace android diff --git a/services/audioflinger/FastThread.cpp b/services/audioflinger/FastThread.cpp index 216dace..5ca579b 100644 --- a/services/audioflinger/FastThread.cpp +++ b/services/audioflinger/FastThread.cpp @@ -25,54 +25,58 @@ #include <utils/Log.h> #include <utils/Trace.h> #include "FastThread.h" +#include "FastThreadDumpState.h" #define FAST_DEFAULT_NS 999999999L // ~1 sec: default time to sleep #define FAST_HOT_IDLE_NS 1000000L // 1 ms: time to sleep while hot idling -#define MIN_WARMUP_CYCLES 2 // minimum number of loop cycles to wait for warmup +#define MIN_WARMUP_CYCLES 2 // minimum number of consecutive in-range loop cycles + // to wait for warmup #define MAX_WARMUP_CYCLES 10 // maximum number of loop cycles to wait for warmup namespace android { FastThread::FastThread() : Thread(false /*canCallJava*/), - // re-initialized to &initial by subclass constructor - previous(NULL), current(NULL), - /* oldTs({0, 0}), */ - oldTsValid(false), - sleepNs(-1), - periodNs(0), - underrunNs(0), - overrunNs(0), - forceNs(0), - warmupNs(0), - // re-initialized to &dummyDumpState by subclass constructor + // re-initialized to &sInitial by subclass constructor + mPrevious(NULL), mCurrent(NULL), + /* mOldTs({0, 0}), */ + mOldTsValid(false), + mSleepNs(-1), + mPeriodNs(0), + mUnderrunNs(0), + mOverrunNs(0), + mForceNs(0), + mWarmupNsMin(0), + mWarmupNsMax(LONG_MAX), + // re-initialized to &mDummySubclassDumpState by subclass constructor mDummyDumpState(NULL), - dumpState(NULL), - ignoreNextOverrun(true), -#ifdef FAST_MIXER_STATISTICS - // oldLoad - oldLoadValid(false), - bounds(0), - full(false), - // tcu + mDumpState(NULL), + mIgnoreNextOverrun(true), +#ifdef FAST_THREAD_STATISTICS + // mOldLoad + mOldLoadValid(false), + mBounds(0), + mFull(false), + // mTcu #endif - coldGen(0), - isWarm(false), - /* measuredWarmupTs({0, 0}), */ - warmupCycles(0), - // dummyLogWriter - logWriter(&dummyLogWriter), - timestampStatus(INVALID_OPERATION), + mColdGen(0), + mIsWarm(false), + /* mMeasuredWarmupTs({0, 0}), */ + mWarmupCycles(0), + mWarmupConsecutiveInRangeCycles(0), + // mDummyLogWriter + mLogWriter(&mDummyLogWriter), + mTimestampStatus(INVALID_OPERATION), - command(FastThreadState::INITIAL), + mCommand(FastThreadState::INITIAL), #if 0 frameCount(0), #endif - attemptedWrite(false) + mAttemptedWrite(false) { - oldTs.tv_sec = 0; - oldTs.tv_nsec = 0; - measuredWarmupTs.tv_sec = 0; - measuredWarmupTs.tv_nsec = 0; + mOldTs.tv_sec = 0; + mOldTs.tv_nsec = 0; + mMeasuredWarmupTs.tv_sec = 0; + mMeasuredWarmupTs.tv_nsec = 0; } FastThread::~FastThread() @@ -84,34 +88,34 @@ bool FastThread::threadLoop() for (;;) { // either nanosleep, sched_yield, or busy wait - if (sleepNs >= 0) { - if (sleepNs > 0) { - ALOG_ASSERT(sleepNs < 1000000000); - const struct timespec req = {0, sleepNs}; + if (mSleepNs >= 0) { + if (mSleepNs > 0) { + ALOG_ASSERT(mSleepNs < 1000000000); + const struct timespec req = {0, mSleepNs}; nanosleep(&req, NULL); } else { sched_yield(); } } // default to long sleep for next cycle - sleepNs = FAST_DEFAULT_NS; + mSleepNs = FAST_DEFAULT_NS; // poll for state change const FastThreadState *next = poll(); if (next == NULL) { // continue to use the default initial state until a real state is available - // FIXME &initial not available, should save address earlier - //ALOG_ASSERT(current == &initial && previous == &initial); - next = current; + // FIXME &sInitial not available, should save address earlier + //ALOG_ASSERT(mCurrent == &sInitial && previous == &sInitial); + next = mCurrent; } - command = next->mCommand; - if (next != current) { + mCommand = next->mCommand; + if (next != mCurrent) { // As soon as possible of learning of a new dump area, start using it - dumpState = next->mDumpState != NULL ? next->mDumpState : mDummyDumpState; - logWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &dummyLogWriter; - setLog(logWriter); + mDumpState = next->mDumpState != NULL ? next->mDumpState : mDummyDumpState; + mLogWriter = next->mNBLogWriter != NULL ? next->mNBLogWriter : &mDummyLogWriter; + setLog(mLogWriter); // We want to always have a valid reference to the previous (non-idle) state. // However, the state queue only guarantees access to current and previous states. @@ -122,37 +126,38 @@ bool FastThread::threadLoop() // non-idle -> idle update previous from copy of current // idle -> idle don't update previous // idle -> non-idle don't update previous - if (!(current->mCommand & FastThreadState::IDLE)) { - if (command & FastThreadState::IDLE) { + if (!(mCurrent->mCommand & FastThreadState::IDLE)) { + if (mCommand & FastThreadState::IDLE) { onIdle(); - oldTsValid = false; -#ifdef FAST_MIXER_STATISTICS - oldLoadValid = false; + mOldTsValid = false; +#ifdef FAST_THREAD_STATISTICS + mOldLoadValid = false; #endif - ignoreNextOverrun = true; + mIgnoreNextOverrun = true; } - previous = current; + mPrevious = mCurrent; } - current = next; + mCurrent = next; } #if !LOG_NDEBUG next = NULL; // not referenced again #endif - dumpState->mCommand = command; + mDumpState->mCommand = mCommand; + // FIXME what does this comment mean? // << current, previous, command, dumpState >> - switch (command) { + switch (mCommand) { case FastThreadState::INITIAL: case FastThreadState::HOT_IDLE: - sleepNs = FAST_HOT_IDLE_NS; + mSleepNs = FAST_HOT_IDLE_NS; continue; case FastThreadState::COLD_IDLE: // only perform a cold idle command once // FIXME consider checking previous state and only perform if previous != COLD_IDLE - if (current->mColdGen != coldGen) { - int32_t *coldFutexAddr = current->mColdFutexAddr; + if (mCurrent->mColdGen != mColdGen) { + int32_t *coldFutexAddr = mCurrent->mColdFutexAddr; ALOG_ASSERT(coldFutexAddr != NULL); int32_t old = android_atomic_dec(coldFutexAddr); if (old <= 0) { @@ -164,41 +169,42 @@ bool FastThread::threadLoop() } // This may be overly conservative; there could be times that the normal mixer // requests such a brief cold idle that it doesn't require resetting this flag. - isWarm = false; - measuredWarmupTs.tv_sec = 0; - measuredWarmupTs.tv_nsec = 0; - warmupCycles = 0; - sleepNs = -1; - coldGen = current->mColdGen; -#ifdef FAST_MIXER_STATISTICS - bounds = 0; - full = false; + mIsWarm = false; + mMeasuredWarmupTs.tv_sec = 0; + mMeasuredWarmupTs.tv_nsec = 0; + mWarmupCycles = 0; + mWarmupConsecutiveInRangeCycles = 0; + mSleepNs = -1; + mColdGen = mCurrent->mColdGen; +#ifdef FAST_THREAD_STATISTICS + mBounds = 0; + mFull = false; #endif - oldTsValid = !clock_gettime(CLOCK_MONOTONIC, &oldTs); - timestampStatus = INVALID_OPERATION; + mOldTsValid = !clock_gettime(CLOCK_MONOTONIC, &mOldTs); + mTimestampStatus = INVALID_OPERATION; } else { - sleepNs = FAST_HOT_IDLE_NS; + mSleepNs = FAST_HOT_IDLE_NS; } continue; case FastThreadState::EXIT: onExit(); return false; default: - LOG_ALWAYS_FATAL_IF(!isSubClassCommand(command)); + LOG_ALWAYS_FATAL_IF(!isSubClassCommand(mCommand)); break; } // there is a non-idle state available to us; did the state change? - if (current != previous) { + if (mCurrent != mPrevious) { onStateChange(); #if 1 // FIXME shouldn't need this // only process state change once - previous = current; + mPrevious = mCurrent; #endif } // do work using current state here - attemptedWrite = false; + mAttemptedWrite = false; onWork(); // To be exactly periodic, compute the next sleep time based on current time. @@ -207,13 +213,13 @@ bool FastThread::threadLoop() struct timespec newTs; int rc = clock_gettime(CLOCK_MONOTONIC, &newTs); if (rc == 0) { - //logWriter->logTimestamp(newTs); - if (oldTsValid) { - time_t sec = newTs.tv_sec - oldTs.tv_sec; - long nsec = newTs.tv_nsec - oldTs.tv_nsec; + //mLogWriter->logTimestamp(newTs); + if (mOldTsValid) { + time_t sec = newTs.tv_sec - mOldTs.tv_sec; + long nsec = newTs.tv_nsec - mOldTs.tv_nsec; ALOGE_IF(sec < 0 || (sec == 0 && nsec < 0), "clock_gettime(CLOCK_MONOTONIC) failed: was %ld.%09ld but now %ld.%09ld", - oldTs.tv_sec, oldTs.tv_nsec, newTs.tv_sec, newTs.tv_nsec); + mOldTs.tv_sec, mOldTs.tv_nsec, newTs.tv_sec, newTs.tv_nsec); if (nsec < 0) { --sec; nsec += 1000000000; @@ -221,62 +227,70 @@ bool FastThread::threadLoop() // To avoid an initial underrun on fast tracks after exiting standby, // do not start pulling data from tracks and mixing until warmup is complete. // Warmup is considered complete after the earlier of: - // MIN_WARMUP_CYCLES write() attempts and last one blocks for at least warmupNs + // MIN_WARMUP_CYCLES consecutive in-range write() attempts, + // where "in-range" means mWarmupNsMin <= cycle time <= mWarmupNsMax // MAX_WARMUP_CYCLES write() attempts. // This is overly conservative, but to get better accuracy requires a new HAL API. - if (!isWarm && attemptedWrite) { - measuredWarmupTs.tv_sec += sec; - measuredWarmupTs.tv_nsec += nsec; - if (measuredWarmupTs.tv_nsec >= 1000000000) { - measuredWarmupTs.tv_sec++; - measuredWarmupTs.tv_nsec -= 1000000000; + if (!mIsWarm && mAttemptedWrite) { + mMeasuredWarmupTs.tv_sec += sec; + mMeasuredWarmupTs.tv_nsec += nsec; + if (mMeasuredWarmupTs.tv_nsec >= 1000000000) { + mMeasuredWarmupTs.tv_sec++; + mMeasuredWarmupTs.tv_nsec -= 1000000000; } - ++warmupCycles; - if ((nsec > warmupNs && warmupCycles >= MIN_WARMUP_CYCLES) || - (warmupCycles >= MAX_WARMUP_CYCLES)) { - isWarm = true; - dumpState->mMeasuredWarmupTs = measuredWarmupTs; - dumpState->mWarmupCycles = warmupCycles; + ++mWarmupCycles; + if (mWarmupNsMin <= nsec && nsec <= mWarmupNsMax) { + ALOGV("warmup cycle %d in range: %.03f ms", mWarmupCycles, nsec * 1e-9); + ++mWarmupConsecutiveInRangeCycles; + } else { + ALOGV("warmup cycle %d out of range: %.03f ms", mWarmupCycles, nsec * 1e-9); + mWarmupConsecutiveInRangeCycles = 0; + } + if ((mWarmupConsecutiveInRangeCycles >= MIN_WARMUP_CYCLES) || + (mWarmupCycles >= MAX_WARMUP_CYCLES)) { + mIsWarm = true; + mDumpState->mMeasuredWarmupTs = mMeasuredWarmupTs; + mDumpState->mWarmupCycles = mWarmupCycles; } } - sleepNs = -1; - if (isWarm) { - if (sec > 0 || nsec > underrunNs) { + mSleepNs = -1; + if (mIsWarm) { + if (sec > 0 || nsec > mUnderrunNs) { ATRACE_NAME("underrun"); // FIXME only log occasionally ALOGV("underrun: time since last cycle %d.%03ld sec", (int) sec, nsec / 1000000L); - dumpState->mUnderruns++; - ignoreNextOverrun = true; - } else if (nsec < overrunNs) { - if (ignoreNextOverrun) { - ignoreNextOverrun = false; + mDumpState->mUnderruns++; + mIgnoreNextOverrun = true; + } else if (nsec < mOverrunNs) { + if (mIgnoreNextOverrun) { + mIgnoreNextOverrun = false; } else { // FIXME only log occasionally ALOGV("overrun: time since last cycle %d.%03ld sec", (int) sec, nsec / 1000000L); - dumpState->mOverruns++; + mDumpState->mOverruns++; } // This forces a minimum cycle time. It: // - compensates for an audio HAL with jitter due to sample rate conversion // - works with a variable buffer depth audio HAL that never pulls at a - // rate < than overrunNs per buffer. + // rate < than mOverrunNs per buffer. // - recovers from overrun immediately after underrun // It doesn't work with a non-blocking audio HAL. - sleepNs = forceNs - nsec; + mSleepNs = mForceNs - nsec; } else { - ignoreNextOverrun = false; + mIgnoreNextOverrun = false; } } -#ifdef FAST_MIXER_STATISTICS - if (isWarm) { +#ifdef FAST_THREAD_STATISTICS + if (mIsWarm) { // advance the FIFO queue bounds - size_t i = bounds & (dumpState->mSamplingN - 1); - bounds = (bounds & 0xFFFF0000) | ((bounds + 1) & 0xFFFF); - if (full) { - bounds += 0x10000; - } else if (!(bounds & (dumpState->mSamplingN - 1))) { - full = true; + size_t i = mBounds & (mDumpState->mSamplingN - 1); + mBounds = (mBounds & 0xFFFF0000) | ((mBounds + 1) & 0xFFFF); + if (mFull) { + mBounds += 0x10000; + } else if (!(mBounds & (mDumpState->mSamplingN - 1))) { + mFull = true; } // compute the delta value of clock_gettime(CLOCK_MONOTONIC) uint32_t monotonicNs = nsec; @@ -288,9 +302,9 @@ bool FastThread::threadLoop() struct timespec newLoad; rc = clock_gettime(CLOCK_THREAD_CPUTIME_ID, &newLoad); if (rc == 0) { - if (oldLoadValid) { - sec = newLoad.tv_sec - oldLoad.tv_sec; - nsec = newLoad.tv_nsec - oldLoad.tv_nsec; + if (mOldLoadValid) { + sec = newLoad.tv_sec - mOldLoad.tv_sec; + nsec = newLoad.tv_nsec - mOldLoad.tv_nsec; if (nsec < 0) { --sec; nsec += 1000000000; @@ -301,42 +315,42 @@ bool FastThread::threadLoop() } } else { // first time through the loop - oldLoadValid = true; + mOldLoadValid = true; } - oldLoad = newLoad; + mOldLoad = newLoad; } #ifdef CPU_FREQUENCY_STATISTICS // get the absolute value of CPU clock frequency in kHz int cpuNum = sched_getcpu(); - uint32_t kHz = tcu.getCpukHz(cpuNum); + uint32_t kHz = mTcu.getCpukHz(cpuNum); kHz = (kHz << 4) | (cpuNum & 0xF); #endif // save values in FIFO queues for dumpsys // these stores #1, #2, #3 are not atomic with respect to each other, // or with respect to store #4 below - dumpState->mMonotonicNs[i] = monotonicNs; - dumpState->mLoadNs[i] = loadNs; + mDumpState->mMonotonicNs[i] = monotonicNs; + mDumpState->mLoadNs[i] = loadNs; #ifdef CPU_FREQUENCY_STATISTICS - dumpState->mCpukHz[i] = kHz; + mDumpState->mCpukHz[i] = kHz; #endif // this store #4 is not atomic with respect to stores #1, #2, #3 above, but // the newest open & oldest closed halves are atomic with respect to each other - dumpState->mBounds = bounds; + mDumpState->mBounds = mBounds; ATRACE_INT("cycle_ms", monotonicNs / 1000000); ATRACE_INT("load_us", loadNs / 1000); } #endif } else { // first time through the loop - oldTsValid = true; - sleepNs = periodNs; - ignoreNextOverrun = true; + mOldTsValid = true; + mSleepNs = mPeriodNs; + mIgnoreNextOverrun = true; } - oldTs = newTs; + mOldTs = newTs; } else { // monotonic clock is broken - oldTsValid = false; - sleepNs = periodNs; + mOldTsValid = false; + mSleepNs = mPeriodNs; } } // for (;;) diff --git a/services/audioflinger/FastThread.h b/services/audioflinger/FastThread.h index 1330334..2efb6de 100644 --- a/services/audioflinger/FastThread.h +++ b/services/audioflinger/FastThread.h @@ -48,42 +48,45 @@ protected: virtual void onStateChange() = 0; virtual void onWork() = 0; - // FIXME these former local variables need comments and to be renamed to have an "m" prefix - const FastThreadState *previous; - const FastThreadState *current; - struct timespec oldTs; - bool oldTsValid; - long sleepNs; // -1: busy wait, 0: sched_yield, > 0: nanosleep - long periodNs; // expected period; the time required to render one mix buffer - long underrunNs; // underrun likely when write cycle is greater than this value - long overrunNs; // overrun likely when write cycle is less than this value - long forceNs; // if overrun detected, force the write cycle to take this much time - long warmupNs; // warmup complete when write cycle is greater than to this value - FastThreadDumpState *mDummyDumpState; - FastThreadDumpState *dumpState; - bool ignoreNextOverrun; // used to ignore initial overrun and first after an underrun -#ifdef FAST_MIXER_STATISTICS - struct timespec oldLoad; // previous value of clock_gettime(CLOCK_THREAD_CPUTIME_ID) - bool oldLoadValid; // whether oldLoad is valid - uint32_t bounds; - bool full; // whether we have collected at least mSamplingN samples + // FIXME these former local variables need comments + const FastThreadState* mPrevious; + const FastThreadState* mCurrent; + struct timespec mOldTs; + bool mOldTsValid; + long mSleepNs; // -1: busy wait, 0: sched_yield, > 0: nanosleep + long mPeriodNs; // expected period; the time required to render one mix buffer + long mUnderrunNs; // underrun likely when write cycle is greater than this value + long mOverrunNs; // overrun likely when write cycle is less than this value + long mForceNs; // if overrun detected, + // force the write cycle to take this much time + long mWarmupNsMin; // warmup complete when write cycle is greater than or equal to + // this value + long mWarmupNsMax; // and less than or equal to this value + FastThreadDumpState* mDummyDumpState; + FastThreadDumpState* mDumpState; + bool mIgnoreNextOverrun; // used to ignore initial overrun and first after an + // underrun +#ifdef FAST_THREAD_STATISTICS + struct timespec mOldLoad; // previous value of clock_gettime(CLOCK_THREAD_CPUTIME_ID) + bool mOldLoadValid; // whether oldLoad is valid + uint32_t mBounds; + bool mFull; // whether we have collected at least mSamplingN samples #ifdef CPU_FREQUENCY_STATISTICS - ThreadCpuUsage tcu; // for reading the current CPU clock frequency in kHz + ThreadCpuUsage mTcu; // for reading the current CPU clock frequency in kHz #endif #endif - unsigned coldGen; // last observed mColdGen - bool isWarm; // true means ready to mix, false means wait for warmup before mixing - struct timespec measuredWarmupTs; // how long did it take for warmup to complete - uint32_t warmupCycles; // counter of number of loop cycles required to warmup - NBLog::Writer dummyLogWriter; - NBLog::Writer *logWriter; - status_t timestampStatus; + unsigned mColdGen; // last observed mColdGen + bool mIsWarm; // true means ready to mix, + // false means wait for warmup before mixing + struct timespec mMeasuredWarmupTs; // how long did it take for warmup to complete + uint32_t mWarmupCycles; // counter of number of loop cycles during warmup phase + uint32_t mWarmupConsecutiveInRangeCycles; // number of consecutive cycles in range + NBLog::Writer mDummyLogWriter; + NBLog::Writer* mLogWriter; + status_t mTimestampStatus; - FastThreadState::Command command; -#if 0 - size_t frameCount; -#endif - bool attemptedWrite; + FastThreadState::Command mCommand; + bool mAttemptedWrite; }; // class FastThread diff --git a/services/audioflinger/FastThreadDumpState.cpp b/services/audioflinger/FastThreadDumpState.cpp new file mode 100644 index 0000000..9df5c4c --- /dev/null +++ b/services/audioflinger/FastThreadDumpState.cpp @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "FastThreadDumpState.h" + +namespace android { + +FastThreadDumpState::FastThreadDumpState() : + mCommand(FastThreadState::INITIAL), mUnderruns(0), mOverruns(0), + /* mMeasuredWarmupTs({0, 0}), */ + mWarmupCycles(0) +#ifdef FAST_THREAD_STATISTICS + , mSamplingN(0), mBounds(0) +#endif +{ + mMeasuredWarmupTs.tv_sec = 0; + mMeasuredWarmupTs.tv_nsec = 0; +#ifdef FAST_THREAD_STATISTICS + increaseSamplingN(1); +#endif +} + +FastThreadDumpState::~FastThreadDumpState() +{ +} + +#ifdef FAST_THREAD_STATISTICS +void FastThreadDumpState::increaseSamplingN(uint32_t samplingN) +{ + if (samplingN <= mSamplingN || samplingN > kSamplingN || roundup(samplingN) != samplingN) { + return; + } + uint32_t additional = samplingN - mSamplingN; + // sample arrays aren't accessed atomically with respect to the bounds, + // so clearing reduces chance for dumpsys to read random uninitialized samples + memset(&mMonotonicNs[mSamplingN], 0, sizeof(mMonotonicNs[0]) * additional); + memset(&mLoadNs[mSamplingN], 0, sizeof(mLoadNs[0]) * additional); +#ifdef CPU_FREQUENCY_STATISTICS + memset(&mCpukHz[mSamplingN], 0, sizeof(mCpukHz[0]) * additional); +#endif + mSamplingN = samplingN; +} +#endif + +} // android diff --git a/services/audioflinger/FastThreadDumpState.h b/services/audioflinger/FastThreadDumpState.h new file mode 100644 index 0000000..1ce0914 --- /dev/null +++ b/services/audioflinger/FastThreadDumpState.h @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2014 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H +#define ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H + +#include "Configuration.h" +#include "FastThreadState.h" + +namespace android { + +// The FastThreadDumpState keeps a cache of FastThread statistics that can be logged by dumpsys. +// Each individual native word-sized field is accessed atomically. But the +// overall structure is non-atomic, that is there may be an inconsistency between fields. +// No barriers or locks are used for either writing or reading. +// Only POD types are permitted, and the contents shouldn't be trusted (i.e. do range checks). +// It has a different lifetime than the FastThread, and so it can't be a member of FastThread. +struct FastThreadDumpState { + FastThreadDumpState(); + /*virtual*/ ~FastThreadDumpState(); + + FastThreadState::Command mCommand; // current command + uint32_t mUnderruns; // total number of underruns + uint32_t mOverruns; // total number of overruns + struct timespec mMeasuredWarmupTs; // measured warmup time + uint32_t mWarmupCycles; // number of loop cycles required to warmup + +#ifdef FAST_THREAD_STATISTICS + // Recently collected samples of per-cycle monotonic time, thread CPU time, and CPU frequency. + // kSamplingN is max size of sampling frame (statistics), and must be a power of 2 <= 0x8000. + // The sample arrays are virtually allocated based on this compile-time constant, + // but are only initialized and used based on the runtime parameter mSamplingN. + static const uint32_t kSamplingN = 0x8000; + // Compile-time constant for a "low RAM device", must be a power of 2 <= kSamplingN. + // This value was chosen such that each array uses 1 small page (4 Kbytes). + static const uint32_t kSamplingNforLowRamDevice = 0x400; + // Corresponding runtime maximum size of sample arrays, must be a power of 2 <= kSamplingN. + uint32_t mSamplingN; + // The bounds define the interval of valid samples, and are represented as follows: + // newest open (excluded) endpoint = lower 16 bits of bounds, modulo N + // oldest closed (included) endpoint = upper 16 bits of bounds, modulo N + // Number of valid samples is newest - oldest. + uint32_t mBounds; // bounds for mMonotonicNs, mThreadCpuNs, and mCpukHz + // The elements in the *Ns arrays are in units of nanoseconds <= 3999999999. + uint32_t mMonotonicNs[kSamplingN]; // delta monotonic (wall clock) time + uint32_t mLoadNs[kSamplingN]; // delta CPU load in time +#ifdef CPU_FREQUENCY_STATISTICS + uint32_t mCpukHz[kSamplingN]; // absolute CPU clock frequency in kHz, bits 0-3 are CPU# +#endif + + // Increase sampling window after construction, must be a power of 2 <= kSamplingN + void increaseSamplingN(uint32_t samplingN); +#endif + +}; // struct FastThreadDumpState + +} // android + +#endif // ANDROID_AUDIO_FAST_THREAD_DUMP_STATE_H diff --git a/services/audioflinger/FastThreadState.cpp b/services/audioflinger/FastThreadState.cpp index 6994872..ad5f31f 100644 --- a/services/audioflinger/FastThreadState.cpp +++ b/services/audioflinger/FastThreadState.cpp @@ -29,21 +29,16 @@ FastThreadState::~FastThreadState() { } - -FastThreadDumpState::FastThreadDumpState() : - mCommand(FastThreadState::INITIAL), mUnderruns(0), mOverruns(0), - /* mMeasuredWarmupTs({0, 0}), */ - mWarmupCycles(0) -#ifdef FAST_MIXER_STATISTICS - , mSamplingN(1), mBounds(0) -#endif -{ - mMeasuredWarmupTs.tv_sec = 0; - mMeasuredWarmupTs.tv_nsec = 0; -} - -FastThreadDumpState::~FastThreadDumpState() +// static +const char *FastThreadState::commandToString(FastThreadState::Command command) { + switch (command) { + case FastThreadState::INITIAL: return "INITIAL"; + case FastThreadState::HOT_IDLE: return "HOT_IDLE"; + case FastThreadState::COLD_IDLE: return "COLD_IDLE"; + case FastThreadState::EXIT: return "EXIT"; + } + return NULL; } } // namespace android diff --git a/services/audioflinger/FastThreadState.h b/services/audioflinger/FastThreadState.h index 1ab8a0a..f18f846 100644 --- a/services/audioflinger/FastThreadState.h +++ b/services/audioflinger/FastThreadState.h @@ -46,43 +46,10 @@ struct FastThreadState { FastThreadDumpState* mDumpState; // if non-NULL, then update dump state periodically NBLog::Writer* mNBLogWriter; // non-blocking logger + // returns NULL if command belongs to a subclass + static const char *commandToString(Command command); }; // struct FastThreadState - -// FIXME extract common part of comment at FastMixerDumpState -struct FastThreadDumpState { - FastThreadDumpState(); - /*virtual*/ ~FastThreadDumpState(); - - FastThreadState::Command mCommand; // current command - uint32_t mUnderruns; // total number of underruns - uint32_t mOverruns; // total number of overruns - struct timespec mMeasuredWarmupTs; // measured warmup time - uint32_t mWarmupCycles; // number of loop cycles required to warmup - -#ifdef FAST_MIXER_STATISTICS - // Recently collected samples of per-cycle monotonic time, thread CPU time, and CPU frequency. - // kSamplingN is max size of sampling frame (statistics), and must be a power of 2 <= 0x8000. - // The sample arrays are virtually allocated based on this compile-time constant, - // but are only initialized and used based on the runtime parameter mSamplingN. - static const uint32_t kSamplingN = 0x8000; - // Corresponding runtime maximum size of sample arrays, must be a power of 2 <= kSamplingN. - uint32_t mSamplingN; - // The bounds define the interval of valid samples, and are represented as follows: - // newest open (excluded) endpoint = lower 16 bits of bounds, modulo N - // oldest closed (included) endpoint = upper 16 bits of bounds, modulo N - // Number of valid samples is newest - oldest. - uint32_t mBounds; // bounds for mMonotonicNs, mThreadCpuNs, and mCpukHz - // The elements in the *Ns arrays are in units of nanoseconds <= 3999999999. - uint32_t mMonotonicNs[kSamplingN]; // delta monotonic (wall clock) time - uint32_t mLoadNs[kSamplingN]; // delta CPU load in time -#ifdef CPU_FREQUENCY_STATISTICS - uint32_t mCpukHz[kSamplingN]; // absolute CPU clock frequency in kHz, bits 0-3 are CPU# -#endif -#endif - -}; // struct FastThreadDumpState - } // android #endif // ANDROID_AUDIO_FAST_THREAD_STATE_H diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp index 4f0c6b1..efbdcff 100644 --- a/services/audioflinger/PatchPanel.cpp +++ b/services/audioflinger/PatchPanel.cpp @@ -694,4 +694,4 @@ status_t AudioFlinger::PatchPanel::setAudioPortConfig(const struct audio_port_co } -}; // namespace android +} // namespace android diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h index ee48276..45df6a9 100644 --- a/services/audioflinger/PlaybackTracks.h +++ b/services/audioflinger/PlaybackTracks.h @@ -157,8 +157,9 @@ private: bool mFlushHwPending; // track requests for thread flush // for last call to getTimestamp - bool mPreviousValid; - uint32_t mPreviousFramesWritten; + bool mPreviousTimestampValid; + // This is either the first timestamp or one that has passed + // the check to prevent retrograde motion. AudioTimestamp mPreviousTimestamp; }; // end of Track @@ -255,7 +256,7 @@ public: class Buffer : public AudioBufferProvider::Buffer { public: - int16_t *mBuffer; + void *mBuffer; }; OutputTrack(PlaybackThread *thread, @@ -271,7 +272,7 @@ public: AudioSystem::SYNC_EVENT_NONE, int triggerSession = 0); virtual void stop(); - bool write(int16_t* data, uint32_t frames); + bool write(void* data, uint32_t frames); bool bufferQueueEmpty() const { return mBufferQueue.size() == 0; } bool isActive() const { return mActive; } const wp<ThreadBase>& thread() const { return mThread; } diff --git a/services/audioflinger/SpdifStreamOut.cpp b/services/audioflinger/SpdifStreamOut.cpp new file mode 100644 index 0000000..d23588e --- /dev/null +++ b/services/audioflinger/SpdifStreamOut.cpp @@ -0,0 +1,166 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#define LOG_TAG "AudioFlinger" +//#define LOG_NDEBUG 0 +#include <hardware/audio.h> +#include <utils/Log.h> + +#include <audio_utils/spdif/SPDIFEncoder.h> + +#include "AudioHwDevice.h" +#include "AudioStreamOut.h" +#include "SpdifStreamOut.h" + +namespace android { + +/** + * If the AudioFlinger is processing encoded data and the HAL expects + * PCM then we need to wrap the data in an SPDIF wrapper. + */ +SpdifStreamOut::SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags) + : AudioStreamOut(dev,flags) + , mRateMultiplier(1) + , mSpdifEncoder(this) + , mRenderPositionHal(0) + , mPreviousHalPosition32(0) +{ +} + +status_t SpdifStreamOut::open( + audio_io_handle_t handle, + audio_devices_t devices, + struct audio_config *config, + const char *address) +{ + struct audio_config customConfig = *config; + + customConfig.format = AUDIO_FORMAT_PCM_16_BIT; + customConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO; + + // Some data bursts run at a higher sample rate. + switch(config->format) { + case AUDIO_FORMAT_E_AC3: + mRateMultiplier = 4; + break; + case AUDIO_FORMAT_AC3: + mRateMultiplier = 1; + break; + default: + ALOGE("ERROR SpdifStreamOut::open() unrecognized format 0x%08X\n", + config->format); + return BAD_VALUE; + } + customConfig.sample_rate = config->sample_rate * mRateMultiplier; + + // Always print this because otherwise it could be very confusing if the + // HAL and AudioFlinger are using different formats. + // Print before open() because HAL may modify customConfig. + ALOGI("SpdifStreamOut::open() AudioFlinger requested" + " sampleRate %d, format %#x, channelMask %#x", + config->sample_rate, + config->format, + config->channel_mask); + ALOGI("SpdifStreamOut::open() HAL configured for" + " sampleRate %d, format %#x, channelMask %#x", + customConfig.sample_rate, + customConfig.format, + customConfig.channel_mask); + + status_t status = AudioStreamOut::open( + handle, + devices, + &customConfig, + address); + + ALOGI("SpdifStreamOut::open() status = %d", status); + + return status; +} + +// Account for possibly higher sample rate. +status_t SpdifStreamOut::getRenderPosition(uint32_t *frames) +{ + uint32_t halPosition = 0; + status_t status = AudioStreamOut::getRenderPosition(&halPosition); + if (status != NO_ERROR) { + return status; + } + + // Accumulate a 64-bit position so that we wrap at the right place. + if (mRateMultiplier != 1) { + // Maintain a 64-bit render position. + int32_t deltaHalPosition = (int32_t)(halPosition - mPreviousHalPosition32); + mPreviousHalPosition32 = halPosition; + mRenderPositionHal += deltaHalPosition; + + // Scale from device sample rate to application rate. + uint64_t renderPositionApp = mRenderPositionHal / mRateMultiplier; + ALOGV("SpdifStreamOut::getRenderPosition() " + "renderPositionAppRate = %llu = %llu / %u\n", + renderPositionApp, mRenderPositionHal, mRateMultiplier); + + *frames = (uint32_t)renderPositionApp; + } else { + *frames = halPosition; + } + return status; +} + +int SpdifStreamOut::flush() +{ + // FIXME Is there an issue here with flush being asynchronous? + mRenderPositionHal = 0; + mPreviousHalPosition32 = 0; + return AudioStreamOut::flush(); +} + +int SpdifStreamOut::standby() +{ + mRenderPositionHal = 0; + mPreviousHalPosition32 = 0; + return AudioStreamOut::standby(); +} + +// Account for possibly higher sample rate. +// This is much easier when all the values are 64-bit. +status_t SpdifStreamOut::getPresentationPosition(uint64_t *frames, + struct timespec *timestamp) +{ + uint64_t halFrames = 0; + status_t status = AudioStreamOut::getPresentationPosition(&halFrames, timestamp); + *frames = halFrames / mRateMultiplier; + return status; +} + +size_t SpdifStreamOut::getFrameSize() +{ + return sizeof(int8_t); +} + +ssize_t SpdifStreamOut::writeDataBurst(const void* buffer, size_t bytes) +{ + return AudioStreamOut::write(buffer, bytes); +} + +ssize_t SpdifStreamOut::write(const void* buffer, size_t bytes) +{ + // Write to SPDIF wrapper. It will call back to writeDataBurst(). + return mSpdifEncoder.write(buffer, bytes); +} + +} // namespace android diff --git a/services/audioflinger/SpdifStreamOut.h b/services/audioflinger/SpdifStreamOut.h new file mode 100644 index 0000000..cb82ac7 --- /dev/null +++ b/services/audioflinger/SpdifStreamOut.h @@ -0,0 +1,107 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_SPDIF_STREAM_OUT_H +#define ANDROID_SPDIF_STREAM_OUT_H + +#include <stdint.h> +#include <sys/types.h> + +#include <system/audio.h> + +#include "AudioHwDevice.h" +#include "AudioStreamOut.h" +#include "SpdifStreamOut.h" + +#include <audio_utils/spdif/SPDIFEncoder.h> + +namespace android { + +/** + * Stream that is a PCM data burst in the HAL but looks like an encoded stream + * to the AudioFlinger. Wraps encoded data in an SPDIF wrapper per IEC61973-3. + */ +class SpdifStreamOut : public AudioStreamOut { +public: + + SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags); + + virtual ~SpdifStreamOut() { } + + virtual status_t open( + audio_io_handle_t handle, + audio_devices_t devices, + struct audio_config *config, + const char *address); + + virtual status_t getRenderPosition(uint32_t *frames); + + virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp); + + /** + * Write audio buffer to driver. Returns number of bytes written, or a + * negative status_t. If at least one frame was written successfully prior to the error, + * it is suggested that the driver return that successful (short) byte count + * and then return an error in the subsequent call. + * + * If set_callback() has previously been called to enable non-blocking mode + * the write() is not allowed to block. It must write only the number of + * bytes that currently fit in the driver/hardware buffer and then return + * this byte count. If this is less than the requested write size the + * callback function must be called when more space is available in the + * driver/hardware buffer. + */ + virtual ssize_t write(const void* buffer, size_t bytes); + + virtual size_t getFrameSize(); + + virtual status_t flush(); + virtual status_t standby(); + +private: + + class MySPDIFEncoder : public SPDIFEncoder + { + public: + MySPDIFEncoder(SpdifStreamOut *spdifStreamOut) + : mSpdifStreamOut(spdifStreamOut) + { + } + + virtual ssize_t writeOutput(const void* buffer, size_t bytes) + { + return mSpdifStreamOut->writeDataBurst(buffer, bytes); + } + protected: + SpdifStreamOut * const mSpdifStreamOut; + }; + + int mRateMultiplier; + MySPDIFEncoder mSpdifEncoder; + + // Used to implement getRenderPosition() + int64_t mRenderPositionHal; + uint32_t mPreviousHalPosition32; + + ssize_t writeDataBurst(const void* data, size_t bytes); + ssize_t writeInternal(const void* buffer, size_t bytes); + +}; + +} // namespace android + +#endif // ANDROID_SPDIF_STREAM_OUT_H diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp index 51025fe..4efb3d7 100644 --- a/services/audioflinger/Threads.cpp +++ b/services/audioflinger/Threads.cpp @@ -23,7 +23,9 @@ #include "Configuration.h" #include <math.h> #include <fcntl.h> +#include <linux/futex.h> #include <sys/stat.h> +#include <sys/syscall.h> #include <cutils/properties.h> #include <media/AudioParameter.h> #include <media/AudioResamplerPublic.h> @@ -314,6 +316,165 @@ void CpuStats::sample(const String8 &title // ThreadBase // ---------------------------------------------------------------------------- +// static +const char *AudioFlinger::ThreadBase::threadTypeToString(AudioFlinger::ThreadBase::type_t type) +{ + switch (type) { + case MIXER: + return "MIXER"; + case DIRECT: + return "DIRECT"; + case DUPLICATING: + return "DUPLICATING"; + case RECORD: + return "RECORD"; + case OFFLOAD: + return "OFFLOAD"; + default: + return "unknown"; + } +} + +String8 devicesToString(audio_devices_t devices) +{ + static const struct mapping { + audio_devices_t mDevices; + const char * mString; + } mappingsOut[] = { + AUDIO_DEVICE_OUT_EARPIECE, "EARPIECE", + AUDIO_DEVICE_OUT_SPEAKER, "SPEAKER", + AUDIO_DEVICE_OUT_WIRED_HEADSET, "WIRED_HEADSET", + AUDIO_DEVICE_OUT_WIRED_HEADPHONE, "WIRED_HEADPHONE", + AUDIO_DEVICE_OUT_TELEPHONY_TX, "TELEPHONY_TX", + AUDIO_DEVICE_NONE, "NONE", // must be last + }, mappingsIn[] = { + AUDIO_DEVICE_IN_BUILTIN_MIC, "BUILTIN_MIC", + AUDIO_DEVICE_IN_WIRED_HEADSET, "WIRED_HEADSET", + AUDIO_DEVICE_IN_VOICE_CALL, "VOICE_CALL", + AUDIO_DEVICE_IN_REMOTE_SUBMIX, "REMOTE_SUBMIX", + AUDIO_DEVICE_NONE, "NONE", // must be last + }; + String8 result; + audio_devices_t allDevices = AUDIO_DEVICE_NONE; + const mapping *entry; + if (devices & AUDIO_DEVICE_BIT_IN) { + devices &= ~AUDIO_DEVICE_BIT_IN; + entry = mappingsIn; + } else { + entry = mappingsOut; + } + for ( ; entry->mDevices != AUDIO_DEVICE_NONE; entry++) { + allDevices = (audio_devices_t) (allDevices | entry->mDevices); + if (devices & entry->mDevices) { + if (!result.isEmpty()) { + result.append("|"); + } + result.append(entry->mString); + } + } + if (devices & ~allDevices) { + if (!result.isEmpty()) { + result.append("|"); + } + result.appendFormat("0x%X", devices & ~allDevices); + } + if (result.isEmpty()) { + result.append(entry->mString); + } + return result; +} + +String8 inputFlagsToString(audio_input_flags_t flags) +{ + static const struct mapping { + audio_input_flags_t mFlag; + const char * mString; + } mappings[] = { + AUDIO_INPUT_FLAG_FAST, "FAST", + AUDIO_INPUT_FLAG_HW_HOTWORD, "HW_HOTWORD", + AUDIO_INPUT_FLAG_NONE, "NONE", // must be last + }; + String8 result; + audio_input_flags_t allFlags = AUDIO_INPUT_FLAG_NONE; + const mapping *entry; + for (entry = mappings; entry->mFlag != AUDIO_INPUT_FLAG_NONE; entry++) { + allFlags = (audio_input_flags_t) (allFlags | entry->mFlag); + if (flags & entry->mFlag) { + if (!result.isEmpty()) { + result.append("|"); + } + result.append(entry->mString); + } + } + if (flags & ~allFlags) { + if (!result.isEmpty()) { + result.append("|"); + } + result.appendFormat("0x%X", flags & ~allFlags); + } + if (result.isEmpty()) { + result.append(entry->mString); + } + return result; +} + +String8 outputFlagsToString(audio_output_flags_t flags) +{ + static const struct mapping { + audio_output_flags_t mFlag; + const char * mString; + } mappings[] = { + AUDIO_OUTPUT_FLAG_DIRECT, "DIRECT", + AUDIO_OUTPUT_FLAG_PRIMARY, "PRIMARY", + AUDIO_OUTPUT_FLAG_FAST, "FAST", + AUDIO_OUTPUT_FLAG_DEEP_BUFFER, "DEEP_BUFFER", + AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD, "COMPRESS_OFFLOAD", + AUDIO_OUTPUT_FLAG_NON_BLOCKING, "NON_BLOCKING", + AUDIO_OUTPUT_FLAG_HW_AV_SYNC, "HW_AV_SYNC", + AUDIO_OUTPUT_FLAG_NONE, "NONE", // must be last + }; + String8 result; + audio_output_flags_t allFlags = AUDIO_OUTPUT_FLAG_NONE; + const mapping *entry; + for (entry = mappings; entry->mFlag != AUDIO_OUTPUT_FLAG_NONE; entry++) { + allFlags = (audio_output_flags_t) (allFlags | entry->mFlag); + if (flags & entry->mFlag) { + if (!result.isEmpty()) { + result.append("|"); + } + result.append(entry->mString); + } + } + if (flags & ~allFlags) { + if (!result.isEmpty()) { + result.append("|"); + } + result.appendFormat("0x%X", flags & ~allFlags); + } + if (result.isEmpty()) { + result.append(entry->mString); + } + return result; +} + +const char *sourceToString(audio_source_t source) +{ + switch (source) { + case AUDIO_SOURCE_DEFAULT: return "default"; + case AUDIO_SOURCE_MIC: return "mic"; + case AUDIO_SOURCE_VOICE_UPLINK: return "voice uplink"; + case AUDIO_SOURCE_VOICE_DOWNLINK: return "voice downlink"; + case AUDIO_SOURCE_VOICE_CALL: return "voice call"; + case AUDIO_SOURCE_CAMCORDER: return "camcorder"; + case AUDIO_SOURCE_VOICE_RECOGNITION: return "voice recognition"; + case AUDIO_SOURCE_VOICE_COMMUNICATION: return "voice communication"; + case AUDIO_SOURCE_REMOTE_SUBMIX: return "remote submix"; + case AUDIO_SOURCE_FM_TUNER: return "FM tuner"; + case AUDIO_SOURCE_HOTWORD: return "hotword"; + default: return "unknown"; + } +} + AudioFlinger::ThreadBase::ThreadBase(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id, audio_devices_t outDevice, audio_devices_t inDevice, type_t type) : Thread(false /*canCallJava*/), @@ -577,20 +738,22 @@ void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector<String16>& args __u bool locked = AudioFlinger::dumpTryLock(mLock); if (!locked) { - dprintf(fd, "thread %p maybe dead locked\n", this); + dprintf(fd, "thread %p may be deadlocked\n", this); } + dprintf(fd, " Thread name: %s\n", mThreadName); dprintf(fd, " I/O handle: %d\n", mId); dprintf(fd, " TID: %d\n", getTid()); dprintf(fd, " Standby: %s\n", mStandby ? "yes" : "no"); - dprintf(fd, " Sample rate: %u\n", mSampleRate); + dprintf(fd, " Sample rate: %u Hz\n", mSampleRate); dprintf(fd, " HAL frame count: %zu\n", mFrameCount); + dprintf(fd, " HAL format: 0x%x (%s)\n", mHALFormat, formatToString(mHALFormat)); dprintf(fd, " HAL buffer size: %u bytes\n", mBufferSize); - dprintf(fd, " Channel Count: %u\n", mChannelCount); - dprintf(fd, " Channel Mask: 0x%08x (%s)\n", mChannelMask, + dprintf(fd, " Channel count: %u\n", mChannelCount); + dprintf(fd, " Channel mask: 0x%08x (%s)\n", mChannelMask, channelMaskToString(mChannelMask, mType != RECORD).string()); - dprintf(fd, " Format: 0x%x (%s)\n", mHALFormat, formatToString(mHALFormat)); - dprintf(fd, " Frame size: %zu\n", mFrameSize); + dprintf(fd, " Format: 0x%x (%s)\n", mFormat, formatToString(mFormat)); + dprintf(fd, " Frame size: %zu bytes\n", mFrameSize); dprintf(fd, " Pending config events:"); size_t numConfig = mConfigEvents.size(); if (numConfig) { @@ -602,6 +765,9 @@ void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector<String16>& args __u } else { dprintf(fd, " none\n"); } + dprintf(fd, " Output device: %#x (%s)\n", mOutDevice, devicesToString(mOutDevice).string()); + dprintf(fd, " Input device: %#x (%s)\n", mInDevice, devicesToString(mInDevice).string()); + dprintf(fd, " Audio source: %d (%s)\n", mAudioSource, sourceToString(mAudioSource)); if (locked) { mLock.unlock(); @@ -635,19 +801,19 @@ void AudioFlinger::ThreadBase::acquireWakeLock(int uid) String16 AudioFlinger::ThreadBase::getWakeLockTag() { switch (mType) { - case MIXER: - return String16("AudioMix"); - case DIRECT: - return String16("AudioDirectOut"); - case DUPLICATING: - return String16("AudioDup"); - case RECORD: - return String16("AudioIn"); - case OFFLOAD: - return String16("AudioOffload"); - default: - ALOG_ASSERT(false); - return String16("AudioUnknown"); + case MIXER: + return String16("AudioMix"); + case DIRECT: + return String16("AudioDirectOut"); + case DUPLICATING: + return String16("AudioDup"); + case RECORD: + return String16("AudioIn"); + case OFFLOAD: + return String16("AudioOffload"); + default: + ALOG_ASSERT(false); + return String16("AudioUnknown"); } } @@ -674,7 +840,7 @@ void AudioFlinger::ThreadBase::acquireWakeLock_l(int uid) if (status == NO_ERROR) { mWakeLockToken = binder; } - ALOGV("acquireWakeLock_l() %s status %d", mName, status); + ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status); } } @@ -687,7 +853,7 @@ void AudioFlinger::ThreadBase::releaseWakeLock() void AudioFlinger::ThreadBase::releaseWakeLock_l() { if (mWakeLockToken != 0) { - ALOGV("releaseWakeLock_l() %s", mName); + ALOGV("releaseWakeLock_l() %s", mThreadName); if (mPowerManager != 0) { mPowerManager->releaseWakeLock(mWakeLockToken, 0, true /* FIXME force oneway contrary to .aidl */); @@ -708,7 +874,7 @@ void AudioFlinger::ThreadBase::getPowerManager_l() { sp<IBinder> binder = defaultServiceManager()->checkService(String16("power")); if (binder == 0) { - ALOGW("Thread %s cannot connect to the power manager service", mName); + ALOGW("Thread %s cannot connect to the power manager service", mThreadName); } else { mPowerManager = interface_cast<IPowerManager>(binder); binder->linkToDeath(mDeathRecipient); @@ -728,7 +894,7 @@ void AudioFlinger::ThreadBase::updateWakeLockUids_l(const SortedVector<int> &uid status_t status; status = mPowerManager->updateWakeLockUids(mWakeLockToken, uids.size(), uids.array(), true /* FIXME force oneway contrary to .aidl */); - ALOGV("acquireWakeLock_l() %s status %d", mName, status); + ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status); } } @@ -912,7 +1078,7 @@ sp<AudioFlinger::EffectHandle> AudioFlinger::ThreadBase::createEffect_l( // mSinkBuffer is not guaranteed to be compatible with effect processing (PCM 16 stereo). if (mType == DIRECT) { ALOGW("createEffect_l() Cannot add effect %s on Direct output type thread %s", - desc->name, mName); + desc->name, mThreadName); lStatus = BAD_VALUE; goto Exit; } @@ -936,7 +1102,8 @@ sp<AudioFlinger::EffectHandle> AudioFlinger::ThreadBase::createEffect_l( case DUPLICATING: case RECORD: default: - ALOGW("createEffect_l() Cannot add global effect %s on thread %s", desc->name, mName); + ALOGW("createEffect_l() Cannot add global effect %s on thread %s", + desc->name, mThreadName); lStatus = BAD_VALUE; goto Exit; } @@ -1201,8 +1368,8 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp<AudioFlinger>& audioFlinge // mLatchD, mLatchQ, mLatchDValid(false), mLatchQValid(false) { - snprintf(mName, kNameLength, "AudioOut_%X", id); - mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mName); + snprintf(mThreadName, kThreadNameLength, "AudioOut_%X", id); + mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName); // Assumes constructor is called by AudioFlinger with it's mLock held, but // it would be safer to explicitly pass initial masterVolume/masterMute as @@ -1315,7 +1482,10 @@ void AudioFlinger::PlaybackThread::dumpTracks(int fd, const Vector<String16>& ar void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector<String16>& args) { - dprintf(fd, "\nOutput thread %p:\n", this); + dprintf(fd, "\nOutput thread %p type %d (%s):\n", this, type(), threadTypeToString(type())); + + dumpBase(fd, args); + dprintf(fd, " Normal frame count: %zu\n", mNormalFrameCount); dprintf(fd, " Last write occurred (msecs): %llu\n", ns2ms(systemTime() - mLastWriteTime)); dprintf(fd, " Total writes: %d\n", mNumWrites); @@ -1326,15 +1496,17 @@ void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector<String16>& dprintf(fd, " Mixer buffer: %p\n", mMixerBuffer); dprintf(fd, " Effect buffer: %p\n", mEffectBuffer); dprintf(fd, " Fast track availMask=%#x\n", mFastTrackAvailMask); - - dumpBase(fd, args); + AudioStreamOut *output = mOutput; + audio_output_flags_t flags = output != NULL ? output->flags : AUDIO_OUTPUT_FLAG_NONE; + String8 flagsAsString = outputFlagsToString(flags); + dprintf(fd, " AudioStreamOut: %p flags %#x (%s)\n", output, flags, flagsAsString.string()); } // Thread virtuals void AudioFlinger::PlaybackThread::onFirstRef() { - run(mName, ANDROID_PRIORITY_URGENT_AUDIO); + run(mThreadName, ANDROID_PRIORITY_URGENT_AUDIO); } // ThreadBase virtuals @@ -1378,9 +1550,10 @@ sp<AudioFlinger::PlaybackThread::Track> AudioFlinger::PlaybackThread::createTrac ( (sharedBuffer != 0) ) || - // use case 2: callback handler and frame count is default or at least as large as HAL + // use case 2: frame count is default or at least as large as HAL ( - (tid != -1) && + // we formerly checked for a callback handler (non-0 tid), + // but that is no longer required for TRANSFER_OBTAIN mode ((frameCount == 0) || (frameCount >= mFrameCount)) ) @@ -1420,20 +1593,25 @@ sp<AudioFlinger::PlaybackThread::Track> AudioFlinger::PlaybackThread::createTrac audio_is_linear_pcm(format), channelMask, sampleRate, mSampleRate, hasFastMixer(), tid, mFastTrackAvailMask); *flags &= ~IAudioFlinger::TRACK_FAST; - // For compatibility with AudioTrack calculation, buffer depth is forced - // to be at least 2 x the normal mixer frame count and cover audio hardware latency. - // This is probably too conservative, but legacy application code may depend on it. - // If you change this calculation, also review the start threshold which is related. + } + } + // For normal PCM streaming tracks, update minimum frame count. + // For compatibility with AudioTrack calculation, buffer depth is forced + // to be at least 2 x the normal mixer frame count and cover audio hardware latency. + // This is probably too conservative, but legacy application code may depend on it. + // If you change this calculation, also review the start threshold which is related. + if (!(*flags & IAudioFlinger::TRACK_FAST) + && audio_is_linear_pcm(format) && sharedBuffer == 0) { uint32_t latencyMs = mOutput->stream->get_latency(mOutput->stream); uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate); if (minBufCount < 2) { minBufCount = 2; } - size_t minFrameCount = mNormalFrameCount * minBufCount; - if (frameCount < minFrameCount) { + size_t minFrameCount = + minBufCount * sourceFramesNeeded(sampleRate, mNormalFrameCount, mSampleRate); + if (frameCount < minFrameCount) { // including frameCount == 0 frameCount = minFrameCount; } - } } *pFrameCount = frameCount; @@ -1831,7 +2009,7 @@ void AudioFlinger::PlaybackThread::readOutputParameters_l() LOG_FATAL("HAL format %#x not supported for mixed output", mFormat); } - mFrameSize = audio_stream_out_frame_size(mOutput->stream); + mFrameSize = mOutput->getFrameSize(); mBufferSize = mOutput->stream->common.get_buffer_size(&mOutput->stream->common); mFrameCount = mBufferSize / mFrameSize; if (mFrameCount & 15) { @@ -1861,6 +2039,22 @@ void AudioFlinger::PlaybackThread::readOutputParameters_l() } } + if (mType == DUPLICATING && mMixerBufferEnabled && mEffectBufferEnabled) { + // For best precision, we use float instead of the associated output + // device format (typically PCM 16 bit). + + mFormat = AUDIO_FORMAT_PCM_FLOAT; + mFrameSize = mChannelCount * audio_bytes_per_sample(mFormat); + mBufferSize = mFrameSize * mFrameCount; + + // TODO: We currently use the associated output device channel mask and sample rate. + // (1) Perhaps use the ORed channel mask of all downstream MixerThreads + // (if a valid mask) to avoid premature downmix. + // (2) Perhaps use the maximum sample rate of all downstream MixerThreads + // instead of the output device sample rate to avoid loss of high frequency information. + // This may need to be updated as MixerThread/OutputTracks are added and not here. + } + // Calculate size of normal sink buffer relative to the HAL output buffer size double multiplier = 1.0; if (mType == MIXER && (kUseFastMixer == FastMixer_Static || @@ -1966,7 +2160,7 @@ status_t AudioFlinger::PlaybackThread::getRenderPosition(uint32_t *halFrames, ui } else { status_t status; uint32_t frames; - status = mOutput->stream->get_render_position(mOutput->stream, &frames); + status = mOutput->getRenderPosition(&frames); *dspFrames = (size_t)frames; return status; } @@ -2008,13 +2202,13 @@ uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId) } -AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::getOutput() const +AudioStreamOut* AudioFlinger::PlaybackThread::getOutput() const { Mutex::Autolock _l(mLock); return mOutput; } -AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::clearOutput() +AudioStreamOut* AudioFlinger::PlaybackThread::clearOutput() { Mutex::Autolock _l(mLock); AudioStreamOut *output = mOutput; @@ -2137,6 +2331,7 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write() } else { bytesWritten = framesWritten; } + mLatchDValid = false; status_t status = mNormalSink->getTimestamp(mLatchD.mTimestamp); if (status == NO_ERROR) { size_t totalFramesWritten = mNormalSink->framesWritten(); @@ -2159,8 +2354,7 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write() } // FIXME We should have an implementation of timestamps for direct output threads. // They are used e.g for multichannel PCM playback over HDMI. - bytesWritten = mOutput->stream->write(mOutput->stream, - (char *)mSinkBuffer + offset, mBytesRemaining); + bytesWritten = mOutput->write((char *)mSinkBuffer + offset, mBytesRemaining); if (mUseAsyncWrite && ((bytesWritten < 0) || (bytesWritten == (ssize_t)mBytesRemaining))) { // do not wait for async callback in case of error of full write @@ -2640,7 +2834,9 @@ bool AudioFlinger::PlaybackThread::threadLoop() } } else { + ATRACE_BEGIN("sleep"); usleep(sleepTime); + ATRACE_END(); } } @@ -2711,8 +2907,7 @@ status_t AudioFlinger::PlaybackThread::getTimestamp_l(AudioTimestamp& timestamp) if ((mType == OFFLOAD || mType == DIRECT) && mOutput != NULL && mOutput->stream->get_presentation_position) { uint64_t position64; - int ret = mOutput->stream->get_presentation_position( - mOutput->stream, &position64, ×tamp.mTime); + int ret = mOutput->getPresentationPosition(&position64, ×tamp.mTime); if (ret == 0) { timestamp.mPosition = (uint32_t)position64; return NO_ERROR; @@ -2800,6 +2995,12 @@ AudioFlinger::MixerThread::MixerThread(const sp<AudioFlinger>& audioFlinger, Aud mNormalFrameCount); mAudioMixer = new AudioMixer(mNormalFrameCount, mSampleRate); + if (type == DUPLICATING) { + // The Duplicating thread uses the AudioMixer and delivers data to OutputTracks + // (downstream MixerThreads) in DuplicatingThread::threadLoop_write(). + // Do not create or use mFastMixer, mOutputSink, mPipeSink, or mNormalSink. + return; + } // create an NBAIO sink for the HAL output stream, and negotiate mOutputSink = new AudioStreamOutSink(output->stream); size_t numCounterOffers = 0; @@ -2841,6 +3042,7 @@ AudioFlinger::MixerThread::MixerThread(const sp<AudioFlinger>& audioFlinger, Aud NBAIO_Format format = mOutputSink->format(); NBAIO_Format origformat = format; // adjust format to match that of the Fast Mixer + ALOGV("format changed from %d to %d", format.mFormat, fastMixerFormat); format.mFormat = fastMixerFormat; format.mFrameSize = audio_bytes_per_sample(format.mFormat) * format.mChannelCount; @@ -3020,8 +3222,10 @@ ssize_t AudioFlinger::MixerThread::threadLoop_write() #endif } state->mCommand = FastMixerState::MIX_WRITE; +#ifdef FAST_THREAD_STATISTICS mFastMixerDumpState.increaseSamplingN(mAudioFlinger->isLowRamDevice() ? - FastMixerDumpState::kSamplingNforLowRamDevice : FastMixerDumpState::kSamplingN); + FastThreadDumpState::kSamplingNforLowRamDevice : FastThreadDumpState::kSamplingN); +#endif sq->end(); sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED); if (kUseFastMixer == FastMixer_Dynamic) { @@ -3083,7 +3287,7 @@ bool AudioFlinger::PlaybackThread::waitingAsyncCallback() void AudioFlinger::PlaybackThread::threadLoop_standby() { ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended); - mOutput->stream->common.standby(&mOutput->stream->common); + mOutput->standby(); if (mUseAsyncWrite != 0) { // discard any pending drain or write ack by incrementing sequence mWriteAckSequence = (mWriteAckSequence + 2) & ~1; @@ -3386,8 +3590,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac if (sr == mSampleRate) { desiredFrames = mNormalFrameCount; } else { - // +1 for rounding and +1 for additional sample needed for interpolation - desiredFrames = (mNormalFrameCount * sr) / mSampleRate + 1 + 1; + desiredFrames = sourceFramesNeeded(sr, mNormalFrameCount, mSampleRate); // add frames already consumed but not yet released by the resampler // because mAudioTrackServerProxy->framesReady() will include these frames desiredFrames += mAudioMixer->getUnreleasedFrames(track->name()); @@ -3405,6 +3608,23 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac } size_t framesReady = track->framesReady(); + if (ATRACE_ENABLED()) { + // I wish we had formatted trace names + char traceName[16]; + strcpy(traceName, "nRdy"); + int name = track->name(); + if (AudioMixer::TRACK0 <= name && + name < (int) (AudioMixer::TRACK0 + AudioMixer::MAX_NUM_TRACKS)) { + name -= AudioMixer::TRACK0; + traceName[4] = (name / 10) + '0'; + traceName[5] = (name % 10) + '0'; + } else { + traceName[4] = '?'; + traceName[5] = '?'; + } + traceName[6] = '\0'; + ATRACE_INT(traceName, framesReady); + } if ((framesReady >= minFrames) && track->isReady() && !track->isPaused() && !track->isTerminated()) { @@ -3836,7 +4056,7 @@ bool AudioFlinger::MixerThread::checkForNewParameter_l(const String8& keyValuePa status = mOutput->stream->common.set_parameters(&mOutput->stream->common, keyValuePair.string()); if (!mStandby && status == INVALID_OPERATION) { - mOutput->stream->common.standby(&mOutput->stream->common); + mOutput->standby(); mStandby = true; mBytesWritten = 0; status = mOutput->stream->common.set_parameters(&mOutput->stream->common, @@ -4096,6 +4316,10 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep } if (track->isStopping_1()) { track->mState = TrackBase::STOPPING_2; + if (last && mHwPaused) { + doHwResume = true; + mHwPaused = false; + } } if ((track->sharedBuffer() != 0) || track->isStopped() || track->isStopping_2() || track->isPaused()) { @@ -4178,8 +4402,8 @@ void AudioFlinger::DirectOutputThread::threadLoop_mix() while (frameCount) { AudioBufferProvider::Buffer buffer; buffer.frameCount = frameCount; - mActiveTrack->getNextBuffer(&buffer); - if (buffer.raw == NULL) { + status_t status = mActiveTrack->getNextBuffer(&buffer); + if (status != NO_ERROR || buffer.raw == NULL) { memset(curBuf, 0, frameCount * mFrameSize); break; } @@ -4235,14 +4459,17 @@ void AudioFlinger::DirectOutputThread::threadLoop_exit() bool AudioFlinger::DirectOutputThread::shouldStandby_l() { bool trackPaused = false; + bool trackStopped = false; // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack // after a timeout and we will enter standby then. if (mTracks.size() > 0) { trackPaused = mTracks[mTracks.size() - 1]->isPaused(); + trackStopped = mTracks[mTracks.size() - 1]->isStopped() || + mTracks[mTracks.size() - 1]->mState == TrackBase::IDLE; } - return !mStandby && !(trackPaused || (usesHwAvSync() && mHwPaused)); + return !mStandby && !(trackPaused || (usesHwAvSync() && mHwPaused && !trackStopped)); } // getTrackName_l() must be called with ThreadBase::mLock held @@ -4291,7 +4518,7 @@ bool AudioFlinger::DirectOutputThread::checkForNewParameter_l(const String8& key status = mOutput->stream->common.set_parameters(&mOutput->stream->common, keyValuePair.string()); if (!mStandby && status == INVALID_OPERATION) { - mOutput->stream->common.standby(&mOutput->stream->common); + mOutput->standby(); mStandby = true; mBytesWritten = 0; status = mOutput->stream->common.set_parameters(&mOutput->stream->common, @@ -4345,7 +4572,10 @@ void AudioFlinger::DirectOutputThread::cacheParameters_l() // use shorter standby delay as on normal output to release // hardware resources as soon as possible - if (audio_is_linear_pcm(mFormat)) { + // no delay on outputs with HW A/V sync + if (usesHwAvSync()) { + standbyDelay = 0; + } else if (audio_is_linear_pcm(mFormat)) { standbyDelay = microseconds(activeSleepTime*2); } else { standbyDelay = kOffloadStandbyDelayNs; @@ -4354,9 +4584,7 @@ void AudioFlinger::DirectOutputThread::cacheParameters_l() void AudioFlinger::DirectOutputThread::flushHw_l() { - if (mOutput->stream->flush != NULL) { - mOutput->stream->flush(mOutput->stream); - } + mOutput->flush(); mHwPaused = false; } @@ -4646,7 +4874,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr size_t audioHALFrames = (mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000; size_t framesWritten = - mBytesWritten / audio_stream_out_frame_size(mOutput->stream); + mBytesWritten / mOutput->getFrameSize(); track->presentationComplete(framesWritten, audioHALFrames); track->reset(); tracksToRemove->add(track); @@ -4797,16 +5025,8 @@ void AudioFlinger::DuplicatingThread::threadLoop_sleepTime() ssize_t AudioFlinger::DuplicatingThread::threadLoop_write() { - // We convert the duplicating thread format to AUDIO_FORMAT_PCM_16_BIT - // for delivery downstream as needed. This in-place conversion is safe as - // AUDIO_FORMAT_PCM_16_BIT is smaller than any other supported format - // (AUDIO_FORMAT_PCM_8_BIT is not allowed here). - if (mFormat != AUDIO_FORMAT_PCM_16_BIT) { - memcpy_by_audio_format(mSinkBuffer, AUDIO_FORMAT_PCM_16_BIT, - mSinkBuffer, mFormat, writeFrames * mChannelCount); - } for (size_t i = 0; i < outputTracks.size(); i++) { - outputTracks[i]->write(reinterpret_cast<int16_t*>(mSinkBuffer), writeFrames); + outputTracks[i]->write(mSinkBuffer, writeFrames); } mStandby = false; return (ssize_t)mSinkBufferSize; @@ -4833,25 +5053,26 @@ void AudioFlinger::DuplicatingThread::clearOutputTracks() void AudioFlinger::DuplicatingThread::addOutputTrack(MixerThread *thread) { Mutex::Autolock _l(mLock); - // FIXME explain this formula - size_t frameCount = (3 * mNormalFrameCount * mSampleRate) / thread->sampleRate(); - // OutputTrack is forced to AUDIO_FORMAT_PCM_16_BIT regardless of mFormat - // due to current usage case and restrictions on the AudioBufferProvider. - // Actual buffer conversion is done in threadLoop_write(). - // - // TODO: This may change in the future, depending on multichannel - // (and non int16_t*) support on AF::PlaybackThread::OutputTrack - OutputTrack *outputTrack = new OutputTrack(thread, + // The downstream MixerThread consumes thread->frameCount() amount of frames per mix pass. + // Adjust for thread->sampleRate() to determine minimum buffer frame count. + // Then triple buffer because Threads do not run synchronously and may not be clock locked. + const size_t frameCount = + 3 * sourceFramesNeeded(mSampleRate, thread->frameCount(), thread->sampleRate()); + // TODO: Consider asynchronous sample rate conversion to handle clock disparity + // from different OutputTracks and their associated MixerThreads (e.g. one may + // nearly empty and the other may be dropping data). + + sp<OutputTrack> outputTrack = new OutputTrack(thread, this, mSampleRate, - AUDIO_FORMAT_PCM_16_BIT, + mFormat, mChannelMask, frameCount, IPCThreadState::self()->getCallingUid()); if (outputTrack->cblk() != NULL) { thread->setStreamVolume(AUDIO_STREAM_PATCH, 1.0f); mOutputTracks.add(outputTrack); - ALOGV("addOutputTrack() track %p, on thread %p", outputTrack, thread); + ALOGV("addOutputTrack() track %p, on thread %p", outputTrack.get(), thread); updateWaitTime_l(); } } @@ -4952,8 +5173,8 @@ AudioFlinger::RecordThread::RecordThread(const sp<AudioFlinger>& audioFlinger, // mFastCaptureNBLogWriter , mFastTrackAvail(false) { - snprintf(mName, kNameLength, "AudioIn_%X", id); - mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mName); + snprintf(mThreadName, kThreadNameLength, "AudioIn_%X", id); + mNBLogWriter = audioFlinger->newWriter_l(kLogSize, mThreadName); readInputParameters_l(); @@ -4993,7 +5214,7 @@ AudioFlinger::RecordThread::RecordThread(const sp<AudioFlinger>& audioFlinger, } if (initFastCapture) { - // create a Pipe for FastMixer to write to, and for us and fast tracks to read from + // create a Pipe for FastCapture to write to, and for us and fast tracks to read from NBAIO_Format format = mInputSource->format(); size_t pipeFramesP2 = roundup(mSampleRate / 25); // double-buffering of 20 ms each size_t pipeSize = pipeFramesP2 * Format_frameSize(format); @@ -5094,7 +5315,7 @@ AudioFlinger::RecordThread::~RecordThread() void AudioFlinger::RecordThread::onFirstRef() { - run(mName, PRIORITY_URGENT_AUDIO); + run(mThreadName, PRIORITY_URGENT_AUDIO); } bool AudioFlinger::RecordThread::threadLoop() @@ -5135,7 +5356,9 @@ reacquire_wakelock: // sleep with mutex unlocked if (sleepUs > 0) { + ATRACE_BEGIN("sleep"); usleep(sleepUs); + ATRACE_END(); sleepUs = 0; } @@ -5279,7 +5502,8 @@ reacquire_wakelock: state->mCommand = FastCaptureState::READ_WRITE; #if 0 // FIXME mFastCaptureDumpState.increaseSamplingN(mAudioFlinger->isLowRamDevice() ? - FastCaptureDumpState::kSamplingNforLowRamDevice : FastMixerDumpState::kSamplingN); + FastThreadDumpState::kSamplingNforLowRamDevice : + FastThreadDumpState::kSamplingN); #endif didModify = true; } @@ -5427,8 +5651,8 @@ reacquire_wakelock: upmix_to_stereo_i16_from_mono_i16((int16_t *)dst, (const int16_t *)src, part1); } else { - downmix_to_mono_i16_from_stereo_i16((int16_t *)dst, (const int16_t *)src, - part1); + downmix_to_mono_i16_from_stereo_i16((int16_t *)dst, + (const int16_t *)src, part1); } dst += part1 * activeTrack->mFrameSize; front += part1; @@ -5649,8 +5873,9 @@ sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createRe // client expresses a preference for FAST, but we get the final say if (*flags & IAudioFlinger::TRACK_FAST) { if ( - // use case: callback handler - (tid != -1) && + // we formerly checked for a callback handler (non-0 tid), + // but that is no longer required for TRANSFER_OBTAIN mode + // // frame count is not specified, or is exactly the pipe depth ((frameCount == 0) || (frameCount == mPipeFramesP2)) && // PCM data @@ -5939,15 +6164,17 @@ void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector<String16>& a { dprintf(fd, "\nInput thread %p:\n", this); - if (mActiveTracks.size() > 0) { - dprintf(fd, " Buffer size: %zu bytes\n", mBufferSize); - } else { + dumpBase(fd, args); + + if (mActiveTracks.size() == 0) { dprintf(fd, " No active record clients\n"); } dprintf(fd, " Fast capture thread: %s\n", hasFastCapture() ? "yes" : "no"); dprintf(fd, " Fast track available: %s\n", mFastTrackAvail ? "yes" : "no"); - dumpBase(fd, args); + // Make a non-atomic copy of fast capture dump state so it won't change underneath us + const FastCaptureDumpState copy(mFastCaptureDumpState); + copy.dump(fd); } void AudioFlinger::RecordThread::dumpTracks(int fd, const Vector<String16>& args __unused) @@ -6412,4 +6639,4 @@ void AudioFlinger::RecordThread::getAudioPortConfig(struct audio_port_config *co config->ext.mix.usecase.source = mAudioSource; } -}; // namespace android +} // namespace android diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h index 1088843..d600ea9 100644 --- a/services/audioflinger/Threads.h +++ b/services/audioflinger/Threads.h @@ -32,6 +32,8 @@ public: OFFLOAD // Thread class is OffloadThread }; + static const char *threadTypeToString(type_t type); + ThreadBase(const sp<AudioFlinger>& audioFlinger, audio_io_handle_t id, audio_devices_t outDevice, audio_devices_t inDevice, type_t type); virtual ~ThreadBase(); @@ -406,6 +408,7 @@ protected: audio_channel_mask_t mChannelMask; uint32_t mChannelCount; size_t mFrameSize; + // not HAL frame size, this is for output sink (to pipe to fast mixer) audio_format_t mFormat; // Source format for Recording and // Sink format for Playback. // Sink format may be different than @@ -424,13 +427,13 @@ protected: bool mStandby; // Whether thread is currently in standby. audio_devices_t mOutDevice; // output device audio_devices_t mInDevice; // input device - audio_source_t mAudioSource; // (see audio.h, audio_source_t) + audio_source_t mAudioSource; const audio_io_handle_t mId; Vector< sp<EffectChain> > mEffectChains; - static const int kNameLength = 16; // prctl(PR_SET_NAME) limit - char mName[kNameLength]; + static const int kThreadNameLength = 16; // prctl(PR_SET_NAME) limit + char mThreadName[kThreadNameLength]; // guaranteed NUL-terminated sp<IPowerManager> mPowerManager; sp<IBinder> mWakeLockToken; const sp<PMDeathRecipient> mDeathRecipient; @@ -1167,7 +1170,8 @@ private: const sp<MemoryDealer> mReadOnlyHeap; // one-time initialization, no locks required - sp<FastCapture> mFastCapture; // non-0 if there is also a fast capture + sp<FastCapture> mFastCapture; // non-0 if there is also + // a fast capture // FIXME audio watchdog thread // contents are not guaranteed to be consistent, no locks required diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp index e970036..dc9f249 100644 --- a/services/audioflinger/Tracks.cpp +++ b/services/audioflinger/Tracks.cpp @@ -20,6 +20,7 @@ //#define LOG_NDEBUG 0 #include "Configuration.h" +#include <linux/futex.h> #include <math.h> #include <sys/syscall.h> #include <utils/Log.h> @@ -404,9 +405,7 @@ AudioFlinger::PlaybackThread::Track::Track( mAudioTrackServerProxy(NULL), mResumeToStopping(false), mFlushHwPending(false), - mPreviousValid(false), - mPreviousFramesWritten(0) - // mPreviousTimestamp + mPreviousTimestampValid(false) { // client == 0 implies sharedBuffer == 0 ALOG_ASSERT(!(client == 0 && sharedBuffer != 0)); @@ -443,8 +442,6 @@ AudioFlinger::PlaybackThread::Track::Track( // this means we are potentially denying other more important fast tracks from // being created. It would be better to allocate the index dynamically. mFastIndex = i; - // Read the initial underruns because this field is never cleared by the fast mixer - mObservedUnderruns = thread->getFastTrackUnderruns(i); thread->mFastTrackAvailMask &= ~(1 << i); } } @@ -693,6 +690,12 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev } PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + if (isFastTrack()) { + // refresh fast track underruns on start because that field is never cleared + // by the fast mixer; furthermore, the same track can be recycled, i.e. start + // after stop. + mObservedUnderruns = playbackThread->getFastTrackUnderruns(mFastIndex); + } status = playbackThread->addTrack_l(this); if (status == INVALID_OPERATION || status == PERMISSION_DENIED) { triggerEvents(AudioSystem::SYNC_EVENT_PRESENTATION_COMPLETE); @@ -742,6 +745,7 @@ void AudioFlinger::PlaybackThread::Track::stop() // move to STOPPING_2 when drain completes and then STOPPED mState = STOPPING_1; } + playbackThread->broadcast_l(); ALOGV("not stopping/stopped => stopping/stopped (%d) on thread %p", mName, playbackThread); } @@ -859,6 +863,7 @@ void AudioFlinger::PlaybackThread::Track::reset() if (mState == FLUSHED) { mState = IDLE; } + mPreviousTimestampValid = false; } } @@ -880,19 +885,22 @@ status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& times { // Client should implement this using SSQ; the unpresented frame count in latch is irrelevant if (isFastTrack()) { - // FIXME no lock held to set mPreviousValid = false + // FIXME no lock held to set mPreviousTimestampValid = false return INVALID_OPERATION; } sp<ThreadBase> thread = mThread.promote(); if (thread == 0) { - // FIXME no lock held to set mPreviousValid = false + // FIXME no lock held to set mPreviousTimestampValid = false return INVALID_OPERATION; } + Mutex::Autolock _l(thread->mLock); PlaybackThread *playbackThread = (PlaybackThread *)thread.get(); + + status_t result = INVALID_OPERATION; if (!isOffloaded() && !isDirect()) { if (!playbackThread->mLatchQValid) { - mPreviousValid = false; + mPreviousTimestampValid = false; return INVALID_OPERATION; } uint32_t unpresentedFrames = @@ -908,36 +916,54 @@ status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& times uint32_t framesWritten = i >= 0 ? playbackThread->mLatchQ.mFramesReleased[i] : mAudioTrackServerProxy->framesReleased(); - bool checkPreviousTimestamp = mPreviousValid && framesWritten >= mPreviousFramesWritten; if (framesWritten < unpresentedFrames) { - mPreviousValid = false; - return INVALID_OPERATION; + mPreviousTimestampValid = false; + // return invalid result + } else { + timestamp.mPosition = framesWritten - unpresentedFrames; + timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime; + result = NO_ERROR; } - mPreviousFramesWritten = framesWritten; - uint32_t position = framesWritten - unpresentedFrames; - struct timespec time = playbackThread->mLatchQ.mTimestamp.mTime; - if (checkPreviousTimestamp) { - if (time.tv_sec < mPreviousTimestamp.mTime.tv_sec || - (time.tv_sec == mPreviousTimestamp.mTime.tv_sec && - time.tv_nsec < mPreviousTimestamp.mTime.tv_nsec)) { - ALOGW("Time is going backwards"); + } else { // offloaded or direct + result = playbackThread->getTimestamp_l(timestamp); + } + + // Prevent retrograde motion in timestamp. + if (result == NO_ERROR) { + if (mPreviousTimestampValid) { + if (timestamp.mTime.tv_sec < mPreviousTimestamp.mTime.tv_sec || + (timestamp.mTime.tv_sec == mPreviousTimestamp.mTime.tv_sec && + timestamp.mTime.tv_nsec < mPreviousTimestamp.mTime.tv_nsec)) { + ALOGW("WARNING - retrograde timestamp time"); + // FIXME Consider blocking this from propagating upwards. } + + // Looking at signed delta will work even when the timestamps + // are wrapping around. + int32_t deltaPosition = static_cast<int32_t>(timestamp.mPosition + - mPreviousTimestamp.mPosition); // position can bobble slightly as an artifact; this hides the bobble - static const uint32_t MINIMUM_POSITION_DELTA = 8u; - if ((position <= mPreviousTimestamp.mPosition) || - (position - mPreviousTimestamp.mPosition) < MINIMUM_POSITION_DELTA) { - position = mPreviousTimestamp.mPosition; - time = mPreviousTimestamp.mTime; + static const int32_t MINIMUM_POSITION_DELTA = 8; + if (deltaPosition < 0) { +#define TIME_TO_NANOS(time) ((uint64_t)time.tv_sec * 1000000000 + time.tv_nsec) + ALOGW("WARNING - retrograde timestamp position corrected," + " %d = %u - %u, (at %llu, %llu nanos)", + deltaPosition, + timestamp.mPosition, + mPreviousTimestamp.mPosition, + TIME_TO_NANOS(timestamp.mTime), + TIME_TO_NANOS(mPreviousTimestamp.mTime)); +#undef TIME_TO_NANOS + } + if (deltaPosition < MINIMUM_POSITION_DELTA) { + // Current timestamp is bad. Use last valid timestamp. + timestamp = mPreviousTimestamp; } } - timestamp.mPosition = position; - timestamp.mTime = time; mPreviousTimestamp = timestamp; - mPreviousValid = true; - return NO_ERROR; + mPreviousTimestampValid = true; } - - return playbackThread->getTimestamp_l(timestamp); + return result; } status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId) @@ -1709,36 +1735,18 @@ void AudioFlinger::PlaybackThread::OutputTrack::stop() mActive = false; } -bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t frames) +bool AudioFlinger::PlaybackThread::OutputTrack::write(void* data, uint32_t frames) { Buffer *pInBuffer; Buffer inBuffer; - uint32_t channelCount = mChannelCount; bool outputBufferFull = false; inBuffer.frameCount = frames; - inBuffer.i16 = data; + inBuffer.raw = data; uint32_t waitTimeLeftMs = mSourceThread->waitTimeMs(); if (!mActive && frames != 0) { - start(); - sp<ThreadBase> thread = mThread.promote(); - if (thread != 0) { - MixerThread *mixerThread = (MixerThread *)thread.get(); - if (mFrameCount > frames) { - if (mBufferQueue.size() < kMaxOverFlowBuffers) { - uint32_t startFrames = (mFrameCount - frames); - pInBuffer = new Buffer; - pInBuffer->mBuffer = new int16_t[startFrames * channelCount]; - pInBuffer->frameCount = startFrames; - pInBuffer->i16 = pInBuffer->mBuffer; - memset(pInBuffer->raw, 0, startFrames * channelCount * sizeof(int16_t)); - mBufferQueue.add(pInBuffer); - } else { - ALOGW("OutputTrack::write() %p no more buffers in queue", this); - } - } - } + (void) start(); } while (waitTimeLeftMs) { @@ -1773,20 +1781,20 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr uint32_t outFrames = pInBuffer->frameCount > mOutBuffer.frameCount ? mOutBuffer.frameCount : pInBuffer->frameCount; - memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * channelCount * sizeof(int16_t)); + memcpy(mOutBuffer.raw, pInBuffer->raw, outFrames * mFrameSize); Proxy::Buffer buf; buf.mFrameCount = outFrames; buf.mRaw = NULL; mClientProxy->releaseBuffer(&buf); pInBuffer->frameCount -= outFrames; - pInBuffer->i16 += outFrames * channelCount; + pInBuffer->raw = (int8_t *)pInBuffer->raw + outFrames * mFrameSize; mOutBuffer.frameCount -= outFrames; - mOutBuffer.i16 += outFrames * channelCount; + mOutBuffer.raw = (int8_t *)mOutBuffer.raw + outFrames * mFrameSize; if (pInBuffer->frameCount == 0) { if (mBufferQueue.size()) { mBufferQueue.removeAt(0); - delete [] pInBuffer->mBuffer; + free(pInBuffer->mBuffer); delete pInBuffer; ALOGV("OutputTrack::write() %p thread %p released overflow buffer %d", this, mThread.unsafe_get(), mBufferQueue.size()); @@ -1802,11 +1810,10 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr if (thread != 0 && !thread->standby()) { if (mBufferQueue.size() < kMaxOverFlowBuffers) { pInBuffer = new Buffer; - pInBuffer->mBuffer = new int16_t[inBuffer.frameCount * channelCount]; + pInBuffer->mBuffer = malloc(inBuffer.frameCount * mFrameSize); pInBuffer->frameCount = inBuffer.frameCount; - pInBuffer->i16 = pInBuffer->mBuffer; - memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * channelCount * - sizeof(int16_t)); + pInBuffer->raw = pInBuffer->mBuffer; + memcpy(pInBuffer->raw, inBuffer.raw, inBuffer.frameCount * mFrameSize); mBufferQueue.add(pInBuffer); ALOGV("OutputTrack::write() %p thread %p adding overflow buffer %d", this, mThread.unsafe_get(), mBufferQueue.size()); @@ -1817,23 +1824,10 @@ bool AudioFlinger::PlaybackThread::OutputTrack::write(int16_t* data, uint32_t fr } } - // Calling write() with a 0 length buffer, means that no more data will be written: - // If no more buffers are pending, fill output track buffer to make sure it is started - // by output mixer. - if (frames == 0 && mBufferQueue.size() == 0) { - // FIXME borken, replace by getting framesReady() from proxy - size_t user = 0; // was mCblk->user - if (user < mFrameCount) { - frames = mFrameCount - user; - pInBuffer = new Buffer; - pInBuffer->mBuffer = new int16_t[frames * channelCount]; - pInBuffer->frameCount = frames; - pInBuffer->i16 = pInBuffer->mBuffer; - memset(pInBuffer->raw, 0, frames * channelCount * sizeof(int16_t)); - mBufferQueue.add(pInBuffer); - } else if (mActive) { - stop(); - } + // Calling write() with a 0 length buffer means that no more data will be written: + // We rely on stop() to set the appropriate flags to allow the remaining frames to play out. + if (frames == 0 && mBufferQueue.size() == 0 && mActive) { + stop(); } return outputBufferFull; @@ -1859,7 +1853,7 @@ void AudioFlinger::PlaybackThread::OutputTrack::clearBufferQueue() for (size_t i = 0; i < size; i++) { Buffer *pBuffer = mBufferQueue.itemAt(i); - delete [] pBuffer->mBuffer; + free(pBuffer->mBuffer); delete pBuffer; } mBufferQueue.clear(); @@ -2212,4 +2206,4 @@ void AudioFlinger::RecordThread::PatchRecord::releaseBuffer(Proxy::Buffer* buffe mProxy->releaseBuffer(buffer); } -}; // namespace android +} // namespace android diff --git a/services/audioflinger/test-resample.cpp b/services/audioflinger/test-resample.cpp index 84a655a..7893778 100644 --- a/services/audioflinger/test-resample.cpp +++ b/services/audioflinger/test-resample.cpp @@ -427,6 +427,14 @@ int main(int argc, char* argv[]) { printf("quality: %d channels: %d msec: %" PRId64 " Mfrms/s: %.2lf\n", quality, channels, time/1000000, output_frames * looplimit / (time / 1e9) / 1e6); resampler->reset(); + + // TODO fix legacy bug: reset does not clear buffers. + // delete and recreate resampler here. + delete resampler; + resampler = AudioResampler::create(format, channels, + output_freq, quality); + resampler->setSampleRate(input_freq); + resampler->setVolume(AudioResampler::UNITY_GAIN_FLOAT, AudioResampler::UNITY_GAIN_FLOAT); } memset(output_vaddr, 0, output_size); diff --git a/services/audioflinger/tests/build_and_run_all_unit_tests.sh b/services/audioflinger/tests/build_and_run_all_unit_tests.sh index 2c453b0..7f4d456 100755 --- a/services/audioflinger/tests/build_and_run_all_unit_tests.sh +++ b/services/audioflinger/tests/build_and_run_all_unit_tests.sh @@ -15,7 +15,7 @@ mm echo "waiting for device" adb root && adb wait-for-device remount adb push $OUT/system/lib/libaudioresampler.so /system/lib -adb push $OUT/system/bin/resampler_tests /system/bin +adb push $OUT/data/nativetest/resampler_tests /system/bin sh $ANDROID_BUILD_TOP/frameworks/av/services/audioflinger/tests/run_all_unit_tests.sh diff --git a/services/audioflinger/tests/mixer_to_wav_tests.sh b/services/audioflinger/tests/mixer_to_wav_tests.sh index 9b39e77..d0482a1 100755 --- a/services/audioflinger/tests/mixer_to_wav_tests.sh +++ b/services/audioflinger/tests/mixer_to_wav_tests.sh @@ -60,11 +60,21 @@ function createwav() { fi # Test: +# process__genericResampling with mixed integer and float track input +# track__Resample / track__genericResample + adb shell test-mixer $1 -s 48000 \ + -o /sdcard/tm48000grif.wav \ + sine:2,4000,7520 chirp:2,9200 sine:1,3000,18000 \ + sine:f,6,6000,19000 chirp:i,4,30000 + adb pull /sdcard/tm48000grif.wav $2 + +# Test: # process__genericResampling # track__Resample / track__genericResample adb shell test-mixer $1 -s 48000 \ -o /sdcard/tm48000gr.wav \ - sine:2,4000,7520 chirp:2,9200 sine:1,3000,18000 + sine:2,4000,7520 chirp:2,9200 sine:1,3000,18000 \ + sine:6,6000,19000 adb pull /sdcard/tm48000gr.wav $2 # Test: diff --git a/services/audioflinger/tests/test-mixer.cpp b/services/audioflinger/tests/test-mixer.cpp index 9a4fad6..8da6245 100644 --- a/services/audioflinger/tests/test-mixer.cpp +++ b/services/audioflinger/tests/test-mixer.cpp @@ -39,7 +39,7 @@ static void usage(const char* name) { fprintf(stderr, "Usage: %s [-f] [-m] [-c channels]" " [-s sample-rate] [-o <output-file>] [-a <aux-buffer-file>] [-P csv]" " (<input-file> | <command>)+\n", name); - fprintf(stderr, " -f enable floating point input track\n"); + fprintf(stderr, " -f enable floating point input track by default\n"); fprintf(stderr, " -m enable floating point mixer output\n"); fprintf(stderr, " -c number of mixer output channels\n"); fprintf(stderr, " -s mixer sample-rate\n"); @@ -47,8 +47,8 @@ static void usage(const char* name) { fprintf(stderr, " -a <aux-buffer-file>\n"); fprintf(stderr, " -P # frames provided per call to resample() in CSV format\n"); fprintf(stderr, " <input-file> is a WAV file\n"); - fprintf(stderr, " <command> can be 'sine:<channels>,<frequency>,<samplerate>'\n"); - fprintf(stderr, " 'chirp:<channels>,<samplerate>'\n"); + fprintf(stderr, " <command> can be 'sine:[(i|f),]<channels>,<frequency>,<samplerate>'\n"); + fprintf(stderr, " 'chirp:[(i|f),]<channels>,<samplerate>'\n"); } static int writeFile(const char *filename, const void *buffer, @@ -78,6 +78,18 @@ static int writeFile(const char *filename, const void *buffer, return EXIT_SUCCESS; } +const char *parseFormat(const char *s, bool *useFloat) { + if (!strncmp(s, "f,", 2)) { + *useFloat = true; + return s + 2; + } + if (!strncmp(s, "i,", 2)) { + *useFloat = false; + return s + 2; + } + return s; +} + int main(int argc, char* argv[]) { const char* const progname = argv[0]; bool useInputFloat = false; @@ -88,8 +100,9 @@ int main(int argc, char* argv[]) { std::vector<int> Pvalues; const char* outputFilename = NULL; const char* auxFilename = NULL; - std::vector<int32_t> Names; - std::vector<SignalProvider> Providers; + std::vector<int32_t> names; + std::vector<SignalProvider> providers; + std::vector<audio_format_t> formats; for (int ch; (ch = getopt(argc, argv, "fmc:s:o:a:P:")) != -1;) { switch (ch) { @@ -138,54 +151,65 @@ int main(int argc, char* argv[]) { size_t outputFrames = 0; // create providers for each track - Providers.resize(argc); + names.resize(argc); + providers.resize(argc); + formats.resize(argc); for (int i = 0; i < argc; ++i) { static const char chirp[] = "chirp:"; static const char sine[] = "sine:"; static const double kSeconds = 1; + bool useFloat = useInputFloat; if (!strncmp(argv[i], chirp, strlen(chirp))) { std::vector<int> v; + const char *s = parseFormat(argv[i] + strlen(chirp), &useFloat); - parseCSV(argv[i] + strlen(chirp), v); + parseCSV(s, v); if (v.size() == 2) { printf("creating chirp(%d %d)\n", v[0], v[1]); - if (useInputFloat) { - Providers[i].setChirp<float>(v[0], 0, v[1]/2, v[1], kSeconds); + if (useFloat) { + providers[i].setChirp<float>(v[0], 0, v[1]/2, v[1], kSeconds); + formats[i] = AUDIO_FORMAT_PCM_FLOAT; } else { - Providers[i].setChirp<int16_t>(v[0], 0, v[1]/2, v[1], kSeconds); + providers[i].setChirp<int16_t>(v[0], 0, v[1]/2, v[1], kSeconds); + formats[i] = AUDIO_FORMAT_PCM_16_BIT; } - Providers[i].setIncr(Pvalues); + providers[i].setIncr(Pvalues); } else { fprintf(stderr, "malformed input '%s'\n", argv[i]); } } else if (!strncmp(argv[i], sine, strlen(sine))) { std::vector<int> v; + const char *s = parseFormat(argv[i] + strlen(sine), &useFloat); - parseCSV(argv[i] + strlen(sine), v); + parseCSV(s, v); if (v.size() == 3) { printf("creating sine(%d %d %d)\n", v[0], v[1], v[2]); - if (useInputFloat) { - Providers[i].setSine<float>(v[0], v[1], v[2], kSeconds); + if (useFloat) { + providers[i].setSine<float>(v[0], v[1], v[2], kSeconds); + formats[i] = AUDIO_FORMAT_PCM_FLOAT; } else { - Providers[i].setSine<int16_t>(v[0], v[1], v[2], kSeconds); + providers[i].setSine<int16_t>(v[0], v[1], v[2], kSeconds); + formats[i] = AUDIO_FORMAT_PCM_16_BIT; } - Providers[i].setIncr(Pvalues); + providers[i].setIncr(Pvalues); } else { fprintf(stderr, "malformed input '%s'\n", argv[i]); } } else { printf("creating filename(%s)\n", argv[i]); if (useInputFloat) { - Providers[i].setFile<float>(argv[i]); + providers[i].setFile<float>(argv[i]); + formats[i] = AUDIO_FORMAT_PCM_FLOAT; } else { - Providers[i].setFile<short>(argv[i]); + providers[i].setFile<short>(argv[i]); + formats[i] = AUDIO_FORMAT_PCM_16_BIT; } - Providers[i].setIncr(Pvalues); + providers[i].setIncr(Pvalues); } // calculate the number of output frames - size_t nframes = (int64_t) Providers[i].getNumFrames() * outputSampleRate - / Providers[i].getSampleRate(); + size_t nframes = (int64_t) providers[i].getNumFrames() * outputSampleRate + / providers[i].getSampleRate(); if (i == 0 || outputFrames > nframes) { // choose minimum for outputFrames outputFrames = nframes; } @@ -213,22 +237,20 @@ int main(int argc, char* argv[]) { // create the mixer. const size_t mixerFrameCount = 320; // typical numbers may range from 240 or 960 AudioMixer *mixer = new AudioMixer(mixerFrameCount, outputSampleRate); - audio_format_t inputFormat = useInputFloat - ? AUDIO_FORMAT_PCM_FLOAT : AUDIO_FORMAT_PCM_16_BIT; audio_format_t mixerFormat = useMixerFloat ? AUDIO_FORMAT_PCM_FLOAT : AUDIO_FORMAT_PCM_16_BIT; - float f = AudioMixer::UNITY_GAIN_FLOAT / Providers.size(); // normalize volume by # tracks + float f = AudioMixer::UNITY_GAIN_FLOAT / providers.size(); // normalize volume by # tracks static float f0; // zero // set up the tracks. - for (size_t i = 0; i < Providers.size(); ++i) { - //printf("track %d out of %d\n", i, Providers.size()); - uint32_t channelMask = audio_channel_out_mask_from_count(Providers[i].getNumChannels()); + for (size_t i = 0; i < providers.size(); ++i) { + //printf("track %d out of %d\n", i, providers.size()); + uint32_t channelMask = audio_channel_out_mask_from_count(providers[i].getNumChannels()); int32_t name = mixer->getTrackName(channelMask, - inputFormat, AUDIO_SESSION_OUTPUT_MIX); + formats[i], AUDIO_SESSION_OUTPUT_MIX); ALOG_ASSERT(name >= 0); - Names.push_back(name); - mixer->setBufferProvider(name, &Providers[i]); + names[i] = name; + mixer->setBufferProvider(name, &providers[i]); mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (void *)outputAddr); mixer->setParameter( @@ -240,7 +262,7 @@ int main(int argc, char* argv[]) { name, AudioMixer::TRACK, AudioMixer::FORMAT, - (void *)(uintptr_t)inputFormat); + (void *)(uintptr_t)formats[i]); mixer->setParameter( name, AudioMixer::TRACK, @@ -255,7 +277,7 @@ int main(int argc, char* argv[]) { name, AudioMixer::RESAMPLE, AudioMixer::SAMPLE_RATE, - (void *)(uintptr_t)Providers[i].getSampleRate()); + (void *)(uintptr_t)providers[i].getSampleRate()); if (useRamp) { mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, &f0); mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, &f0); @@ -277,11 +299,11 @@ int main(int argc, char* argv[]) { // pump the mixer to process data. size_t i; for (i = 0; i < outputFrames - mixerFrameCount; i += mixerFrameCount) { - for (size_t j = 0; j < Names.size(); ++j) { - mixer->setParameter(Names[j], AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, + for (size_t j = 0; j < names.size(); ++j) { + mixer->setParameter(names[j], AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, (char *) outputAddr + i * outputFrameSize); if (auxFilename) { - mixer->setParameter(Names[j], AudioMixer::TRACK, AudioMixer::AUX_BUFFER, + mixer->setParameter(names[j], AudioMixer::TRACK, AudioMixer::AUX_BUFFER, (char *) auxAddr + i * auxFrameSize); } } diff --git a/services/audiopolicy/Android.mk b/services/audiopolicy/Android.mk index 188fc89..d4ce86a 100644 --- a/services/audiopolicy/Android.mk +++ b/services/audiopolicy/Android.mk @@ -3,25 +3,27 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - AudioPolicyService.cpp \ - AudioPolicyEffects.cpp + service/AudioPolicyService.cpp \ + service/AudioPolicyEffects.cpp ifeq ($(USE_LEGACY_AUDIO_POLICY), 1) LOCAL_SRC_FILES += \ - AudioPolicyInterfaceImplLegacy.cpp \ - AudioPolicyClientImplLegacy.cpp + service/AudioPolicyInterfaceImplLegacy.cpp \ + service/AudioPolicyClientImplLegacy.cpp LOCAL_CFLAGS += -DUSE_LEGACY_AUDIO_POLICY else LOCAL_SRC_FILES += \ - AudioPolicyInterfaceImpl.cpp \ - AudioPolicyClientImpl.cpp + service/AudioPolicyInterfaceImpl.cpp \ + service/AudioPolicyClientImpl.cpp endif LOCAL_C_INCLUDES := \ $(TOPDIR)frameworks/av/services/audioflinger \ $(call include-path-for, audio-effects) \ - $(call include-path-for, audio-utils) + $(call include-path-for, audio-utils) \ + $(TOPDIR)frameworks/av/services/audiopolicy/common/include \ + $(TOPDIR)frameworks/av/services/audiopolicy/engine/interface \ LOCAL_SHARED_LIBRARIES := \ libcutils \ @@ -39,7 +41,8 @@ LOCAL_SHARED_LIBRARIES += \ endif LOCAL_STATIC_LIBRARIES := \ - libmedia_helper + libmedia_helper \ + libaudiopolicycomponents LOCAL_MODULE:= libaudiopolicyservice @@ -53,7 +56,7 @@ ifneq ($(USE_LEGACY_AUDIO_POLICY), 1) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - AudioPolicyManager.cpp + managerdefault/AudioPolicyManager.cpp \ LOCAL_SHARED_LIBRARIES := \ libcutils \ @@ -61,8 +64,15 @@ LOCAL_SHARED_LIBRARIES := \ liblog \ libsoundtrigger +LOCAL_SHARED_LIBRARIES += libaudiopolicyenginedefault + +LOCAL_C_INCLUDES += \ + $(TOPDIR)frameworks/av/services/audiopolicy/common/include \ + $(TOPDIR)frameworks/av/services/audiopolicy/engine/interface \ + LOCAL_STATIC_LIBRARIES := \ - libmedia_helper + libmedia_helper \ + libaudiopolicycomponents LOCAL_MODULE:= libaudiopolicymanagerdefault @@ -73,14 +83,26 @@ ifneq ($(USE_CUSTOM_AUDIO_POLICY), 1) include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ - AudioPolicyFactory.cpp + manager/AudioPolicyFactory.cpp LOCAL_SHARED_LIBRARIES := \ libaudiopolicymanagerdefault +LOCAL_STATIC_LIBRARIES := \ + libaudiopolicycomponents + +LOCAL_C_INCLUDES += \ + $(TOPDIR)frameworks/av/services/audiopolicy/common/include \ + $(TOPDIR)frameworks/av/services/audiopolicy/engine/interface \ + LOCAL_MODULE:= libaudiopolicymanager include $(BUILD_SHARED_LIBRARY) endif endif + +####################################################################### +# Recursive call sub-folder Android.mk +# +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h index 4508fa7..116d0d6 100644 --- a/services/audiopolicy/AudioPolicyInterface.h +++ b/services/audiopolicy/AudioPolicyInterface.h @@ -75,7 +75,8 @@ public: // indicate a change in device connection status virtual status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address) = 0; + const char *device_address, + const char *device_name) = 0; // retrieve a device connection status virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device, const char *device_address) = 0; diff --git a/services/audiopolicy/common/Android.mk b/services/audiopolicy/common/Android.mk new file mode 100644 index 0000000..dcce8e3 --- /dev/null +++ b/services/audiopolicy/common/Android.mk @@ -0,0 +1,9 @@ + +LOCAL_PATH := $(call my-dir) +include $(CLEAR_VARS) + +####################################################################### +# Recursive call sub-folder Android.mk +# +include $(call all-makefiles-under,$(LOCAL_PATH)) + diff --git a/services/audiopolicy/common/include/RoutingStrategy.h b/services/audiopolicy/common/include/RoutingStrategy.h new file mode 100644 index 0000000..d38967e --- /dev/null +++ b/services/audiopolicy/common/include/RoutingStrategy.h @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +namespace android { + +// Time in milliseconds after media stopped playing during which we consider that the +// sonification should be as unobtrusive as during the time media was playing. +#define SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY 5000 + +enum routing_strategy { + STRATEGY_MEDIA, + STRATEGY_PHONE, + STRATEGY_SONIFICATION, + STRATEGY_SONIFICATION_RESPECTFUL, + STRATEGY_DTMF, + STRATEGY_ENFORCED_AUDIBLE, + STRATEGY_TRANSMITTED_THROUGH_SPEAKER, + STRATEGY_ACCESSIBILITY, + STRATEGY_REROUTING, + NUM_STRATEGIES +}; + +}; //namespace android diff --git a/services/audiopolicy/common/include/Volume.h b/services/audiopolicy/common/include/Volume.h new file mode 100755 index 0000000..a4cc759 --- /dev/null +++ b/services/audiopolicy/common/include/Volume.h @@ -0,0 +1,137 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <system/audio.h> +#include <utils/Log.h> + +class VolumeCurvePoint +{ +public: + int mIndex; + float mDBAttenuation; +}; + +class Volume +{ +public: + /** + * 4 points to define the volume attenuation curve, each characterized by the volume + * index (from 0 to 100) at which they apply, and the attenuation in dB at that index. + * we use 100 steps to avoid rounding errors when computing the volume in volIndexToAmpl() + * + * @todo shall become configurable + */ + enum { + VOLMIN = 0, + VOLKNEE1 = 1, + VOLKNEE2 = 2, + VOLMAX = 3, + + VOLCNT = 4 + }; + + /** + * device categories used for volume curve management. + */ + enum device_category { + DEVICE_CATEGORY_HEADSET, + DEVICE_CATEGORY_SPEAKER, + DEVICE_CATEGORY_EARPIECE, + DEVICE_CATEGORY_EXT_MEDIA, + DEVICE_CATEGORY_CNT + }; + + /** + * extract one device relevant for volume control from multiple device selection + * + * @param[in] device for which the volume category is associated + * + * @return subset of device required to limit the number of volume category per device + */ + static audio_devices_t getDeviceForVolume(audio_devices_t device) + { + if (device == AUDIO_DEVICE_NONE) { + // this happens when forcing a route update and no track is active on an output. + // In this case the returned category is not important. + device = AUDIO_DEVICE_OUT_SPEAKER; + } else if (popcount(device) > 1) { + // Multiple device selection is either: + // - speaker + one other device: give priority to speaker in this case. + // - one A2DP device + another device: happens with duplicated output. In this case + // retain the device on the A2DP output as the other must not correspond to an active + // selection if not the speaker. + // - HDMI-CEC system audio mode only output: give priority to available item in order. + if (device & AUDIO_DEVICE_OUT_SPEAKER) { + device = AUDIO_DEVICE_OUT_SPEAKER; + } else if (device & AUDIO_DEVICE_OUT_HDMI_ARC) { + device = AUDIO_DEVICE_OUT_HDMI_ARC; + } else if (device & AUDIO_DEVICE_OUT_AUX_LINE) { + device = AUDIO_DEVICE_OUT_AUX_LINE; + } else if (device & AUDIO_DEVICE_OUT_SPDIF) { + device = AUDIO_DEVICE_OUT_SPDIF; + } else { + device = (audio_devices_t)(device & AUDIO_DEVICE_OUT_ALL_A2DP); + } + } + + /*SPEAKER_SAFE is an alias of SPEAKER for purposes of volume control*/ + if (device == AUDIO_DEVICE_OUT_SPEAKER_SAFE) + device = AUDIO_DEVICE_OUT_SPEAKER; + + ALOGW_IF(popcount(device) != 1, + "getDeviceForVolume() invalid device combination: %08x", + device); + + return device; + } + + /** + * returns the category the device belongs to with regard to volume curve management + * + * @param[in] device to check upon the category to whom it belongs to. + * + * @return device category. + */ + static device_category getDeviceCategory(audio_devices_t device) + { + switch(getDeviceForVolume(device)) { + case AUDIO_DEVICE_OUT_EARPIECE: + return DEVICE_CATEGORY_EARPIECE; + case AUDIO_DEVICE_OUT_WIRED_HEADSET: + case AUDIO_DEVICE_OUT_WIRED_HEADPHONE: + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO: + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET: + case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP: + case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES: + return DEVICE_CATEGORY_HEADSET; + case AUDIO_DEVICE_OUT_LINE: + case AUDIO_DEVICE_OUT_AUX_DIGITAL: + /*USB? Remote submix?*/ + return DEVICE_CATEGORY_EXT_MEDIA; + case AUDIO_DEVICE_OUT_SPEAKER: + case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT: + case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER: + case AUDIO_DEVICE_OUT_USB_ACCESSORY: + case AUDIO_DEVICE_OUT_USB_DEVICE: + case AUDIO_DEVICE_OUT_REMOTE_SUBMIX: + default: + return DEVICE_CATEGORY_SPEAKER; + } + } + +}; diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h new file mode 100755 index 0000000..a2327ee --- /dev/null +++ b/services/audiopolicy/common/include/policy.h @@ -0,0 +1,84 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <system/audio.h> + +// For mixed output and inputs, the policy will use max mixer sampling rates. +// Do not limit sampling rate otherwise +#define MAX_MIXER_SAMPLING_RATE 48000 + +// For mixed output and inputs, the policy will use max mixer channel count. +// Do not limit channel count otherwise +#define MAX_MIXER_CHANNEL_COUNT 8 + +/** + * A device mask for all audio input devices that are considered "virtual" when evaluating + * active inputs in getActiveInput() + */ +#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX|AUDIO_DEVICE_IN_FM_TUNER) + + +/** + * A device mask for all audio input and output devices where matching inputs/outputs on device + * type alone is not enough: the address must match too + */ +#define APM_AUDIO_DEVICE_MATCH_ADDRESS_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX | \ + AUDIO_DEVICE_OUT_REMOTE_SUBMIX) + +/** + * Check if the state given correspond to an in call state. + * @TODO find a better name for widely call state + * + * @param[in] state to consider + * + * @return true if given state represents a device in a telephony or VoIP call + */ +static inline bool is_state_in_call(int state) +{ + return (state == AUDIO_MODE_IN_CALL) || (state == AUDIO_MODE_IN_COMMUNICATION); +} + +/** + * Check if the input device given is considered as a virtual device. + * + * @param[in] device to consider + * + * @return true if the device is a virtual one, false otherwise. + */ +static bool is_virtual_input_device(audio_devices_t device) +{ + if ((device & AUDIO_DEVICE_BIT_IN) != 0) { + device &= ~AUDIO_DEVICE_BIT_IN; + if ((popcount(device) == 1) && ((device & ~APM_AUDIO_IN_DEVICE_VIRTUAL_ALL) == 0)) + return true; + } + return false; +} + +/** + * Check whether the device type is one + * where addresses are used to distinguish between one connected device and another + * + * @param[in] device to consider + * + * @return true if the device needs distinguish on address, false otherwise.. + */ +static bool device_distinguishes_on_address(audio_devices_t device) +{ + return ((device & APM_AUDIO_DEVICE_MATCH_ADDRESS_ALL & ~AUDIO_DEVICE_BIT_IN) != 0); +} diff --git a/services/audiopolicy/common/managerdefinitions/Android.mk b/services/audiopolicy/common/managerdefinitions/Android.mk new file mode 100644 index 0000000..71ba1cb --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/Android.mk @@ -0,0 +1,34 @@ +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + src/DeviceDescriptor.cpp \ + src/AudioGain.cpp \ + src/StreamDescriptor.cpp \ + src/HwModule.cpp \ + src/IOProfile.cpp \ + src/AudioPort.cpp \ + src/AudioPolicyMix.cpp \ + src/AudioPatch.cpp \ + src/AudioInputDescriptor.cpp \ + src/AudioOutputDescriptor.cpp \ + src/EffectDescriptor.cpp \ + src/ConfigParsingUtils.cpp \ + src/SoundTriggerSession.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libcutils \ + libutils \ + liblog \ + +LOCAL_C_INCLUDES += \ + $(LOCAL_PATH)/include \ + $(TOPDIR)frameworks/av/services/audiopolicy/common/include \ + +LOCAL_EXPORT_C_INCLUDE_DIRS := \ + $(LOCAL_PATH)/include + +LOCAL_MODULE := libaudiopolicycomponents + +include $(BUILD_STATIC_LIBRARY) diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioGain.h b/services/audiopolicy/common/managerdefinitions/include/AudioGain.h new file mode 100644 index 0000000..21fbf9b --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/AudioGain.h @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <utils/Errors.h> +#include <utils/RefBase.h> +#include <system/audio.h> + +namespace android { + +class AudioGain: public RefBase +{ +public: + AudioGain(int index, bool useInChannelMask); + virtual ~AudioGain() {} + + void dump(int fd, int spaces, int index) const; + + void getDefaultConfig(struct audio_gain_config *config); + status_t checkConfig(const struct audio_gain_config *config); + int mIndex; + struct audio_gain mGain; + bool mUseInChannelMask; +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h new file mode 100644 index 0000000..7536a37 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "AudioPort.h" +#include <utils/Errors.h> +#include <system/audio.h> +#include <utils/SortedVector.h> +#include <utils/KeyedVector.h> + +namespace android { + +class IOProfile; +class AudioMix; + +// descriptor for audio inputs. Used to maintain current configuration of each opened audio input +// and keep track of the usage of this input. +class AudioInputDescriptor: public AudioPortConfig +{ +public: + AudioInputDescriptor(const sp<IOProfile>& profile); + void setIoHandle(audio_io_handle_t ioHandle); + + audio_module_handle_t getModuleHandle() const; + + status_t dump(int fd); + + audio_port_handle_t mId; + audio_io_handle_t mIoHandle; // input handle + audio_devices_t mDevice; // current device this input is routed to + AudioMix *mPolicyMix; // non NULL when used by a dynamic policy + audio_patch_handle_t mPatchHandle; + uint32_t mRefCount; // number of AudioRecord clients using + // this input + uint32_t mOpenRefCount; + audio_source_t mInputSource; // input source selected by application + //(mediarecorder.h) + const sp<IOProfile> mProfile; // I/O profile this output derives from + SortedVector<audio_session_t> mSessions; // audio sessions attached to this input + bool mIsSoundTrigger; // used by a soundtrigger capture + + virtual void toAudioPortConfig(struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig = NULL) const; + virtual sp<AudioPort> getAudioPort() const { return mProfile; } + void toAudioPort(struct audio_port *port) const; +}; + +class AudioInputCollection : + public DefaultKeyedVector< audio_io_handle_t, sp<AudioInputDescriptor> > +{ +public: + bool isSourceActive(audio_source_t source) const; + + sp<AudioInputDescriptor> getInputFromId(audio_port_handle_t id) const; + + uint32_t activeInputsCount() const; + + /** + * return io handle of active input or 0 if no input is active + * Only considers inputs from physical devices (e.g. main mic, headset mic) when + * ignoreVirtualInputs is true. + */ + audio_io_handle_t getActiveInput(bool ignoreVirtualInputs = true); + + audio_devices_t getSupportedDevices(audio_io_handle_t handle) const; + + status_t dump(int fd) const; +}; + + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h new file mode 100644 index 0000000..43ee691 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h @@ -0,0 +1,113 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "AudioPort.h" +#include <RoutingStrategy.h> +#include <utils/Errors.h> +#include <utils/Timers.h> +#include <utils/KeyedVector.h> +#include <system/audio.h> + +namespace android { + +class IOProfile; +class AudioMix; + +// descriptor for audio outputs. Used to maintain current configuration of each opened audio output +// and keep track of the usage of this output by each audio stream type. +class AudioOutputDescriptor: public AudioPortConfig +{ +public: + AudioOutputDescriptor(const sp<IOProfile>& profile); + + status_t dump(int fd); + + audio_devices_t device() const; + void changeRefCount(audio_stream_type_t stream, int delta); + + void setIoHandle(audio_io_handle_t ioHandle); + bool isDuplicated() const { return (mOutput1 != NULL && mOutput2 != NULL); } + audio_devices_t supportedDevices(); + uint32_t latency(); + bool sharesHwModuleWith(const sp<AudioOutputDescriptor> outputDesc); + bool isActive(uint32_t inPastMs = 0) const; + bool isStreamActive(audio_stream_type_t stream, + uint32_t inPastMs = 0, + nsecs_t sysTime = 0) const; + + virtual void toAudioPortConfig(struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig = NULL) const; + virtual sp<AudioPort> getAudioPort() const { return mProfile; } + void toAudioPort(struct audio_port *port) const; + + audio_module_handle_t getModuleHandle() const; + + audio_port_handle_t mId; + audio_io_handle_t mIoHandle; // output handle + uint32_t mLatency; // + audio_output_flags_t mFlags; // + audio_devices_t mDevice; // current device this output is routed to + AudioMix *mPolicyMix; // non NULL when used by a dynamic policy + audio_patch_handle_t mPatchHandle; + uint32_t mRefCount[AUDIO_STREAM_CNT]; // number of streams of each type using this output + nsecs_t mStopTime[AUDIO_STREAM_CNT]; + sp<AudioOutputDescriptor> mOutput1; // used by duplicated outputs: first output + sp<AudioOutputDescriptor> mOutput2; // used by duplicated outputs: second output + float mCurVolume[AUDIO_STREAM_CNT]; // current stream volume + int mMuteCount[AUDIO_STREAM_CNT]; // mute request counter + const sp<IOProfile> mProfile; // I/O profile this output derives from + bool mStrategyMutedByDevice[NUM_STRATEGIES]; // strategies muted because of incompatible + // device selection. See checkDeviceMuteStrategies() + uint32_t mDirectOpenCount; // number of clients using this output (direct outputs only) +}; + +class AudioOutputCollection : + public DefaultKeyedVector< audio_io_handle_t, sp<AudioOutputDescriptor> > +{ +public: + bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const; + + /** + * return whether a stream is playing remotely, override to change the definition of + * local/remote playback, used for instance by notification manager to not make + * media players lose audio focus when not playing locally + * For the base implementation, "remotely" means playing during screen mirroring which + * uses an output for playback with a non-empty, non "0" address. + */ + bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) const; + + /** + * returns the A2DP output handle if it is open or 0 otherwise + */ + audio_io_handle_t getA2dpOutput() const; + + sp<AudioOutputDescriptor> getOutputFromId(audio_port_handle_t id) const; + + sp<AudioOutputDescriptor> getPrimaryOutput() const; + + /** + * return true if any output is playing anything besides the stream to ignore + */ + bool isAnyOutputActive(audio_stream_type_t streamToIgnore) const; + + audio_devices_t getSupportedDevices(audio_io_handle_t handle) const; + + status_t dump(int fd) const; +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h b/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h new file mode 100644 index 0000000..385f257 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/AudioPatch.h @@ -0,0 +1,55 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <system/audio.h> +#include <utils/Errors.h> +#include <utils/RefBase.h> +#include <utils/Errors.h> +#include <utils/KeyedVector.h> + +namespace android { + +class AudioPatch : public RefBase +{ +public: + AudioPatch(const struct audio_patch *patch, uid_t uid); + + status_t dump(int fd, int spaces, int index) const; + + audio_patch_handle_t mHandle; + struct audio_patch mPatch; + uid_t mUid; + audio_patch_handle_t mAfPatchHandle; + +private: + static volatile int32_t mNextUniqueId; +}; + +class AudioPatchCollection : public DefaultKeyedVector<audio_patch_handle_t, sp<AudioPatch> > +{ +public: + status_t addAudioPatch(audio_patch_handle_t handle, const sp<AudioPatch>& patch); + + status_t removeAudioPatch(audio_patch_handle_t handle); + + status_t listAudioPatches(unsigned int *num_patches, struct audio_patch *patches) const; + + status_t dump(int fd) const; +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h new file mode 100644 index 0000000..988aed6 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyMix.h @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <utils/RefBase.h> +#include <media/AudioPolicy.h> +#include <utils/KeyedVector.h> +#include <hardware/audio.h> +#include <utils/String8.h> + +namespace android { + +class AudioOutputDescriptor; + +/** + * custom mix entry in mPolicyMixes + */ +class AudioPolicyMix : public RefBase { +public: + AudioPolicyMix() {} + + const sp<AudioOutputDescriptor> &getOutput() const; + + void setOutput(sp<AudioOutputDescriptor> &output); + + void clearOutput(); + + android::AudioMix &getMix(); + + void setMix(AudioMix &mix); + +private: + AudioMix mMix; // Audio policy mix descriptor + sp<AudioOutputDescriptor> mOutput; // Corresponding output stream +}; + + +class AudioPolicyMixCollection : public DefaultKeyedVector<String8, sp<AudioPolicyMix> > +{ +public: + status_t getAudioPolicyMix(String8 address, sp<AudioPolicyMix> &policyMix) const; + + status_t registerMix(String8 address, AudioMix mix); + + status_t unregisterMix(String8 address); + + void closeOutput(sp<AudioOutputDescriptor> &desc); + + /** + * Try to find an output descriptor for the given attributes. + * + * @param[in] attributes to consider for the research of output descriptor. + * @param[out] desc to return if an output could be found. + * + * @return NO_ERROR if an output was found for the given attribute (in this case, the + * descriptor output param is initialized), error code otherwise. + */ + status_t getOutputForAttr(audio_attributes_t attributes, sp<AudioOutputDescriptor> &desc); + + audio_devices_t getDeviceAndMixForInputSource(audio_source_t inputSource, + audio_devices_t availableDeviceTypes, + AudioMix **policyMix); + + status_t getInputMixForAttr(audio_attributes_t attr, AudioMix *&policyMix); +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPort.h b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h new file mode 100644 index 0000000..4f7f2bc --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/AudioPort.h @@ -0,0 +1,125 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <utils/String8.h> +#include <utils/Vector.h> +#include <utils/RefBase.h> +#include <utils/Errors.h> +#include <system/audio.h> +#include <cutils/config_utils.h> + +namespace android { + +class HwModule; +class AudioGain; + +class AudioPort : public virtual RefBase +{ +public: + AudioPort(const String8& name, audio_port_type_t type, + audio_port_role_t role, const sp<HwModule>& module); + virtual ~AudioPort() {} + + audio_port_handle_t getHandle() { return mId; } + + void attach(const sp<HwModule>& module); + bool isAttached() { return mId != 0; } + + static audio_port_handle_t getNextUniqueId(); + + virtual void toAudioPort(struct audio_port *port) const; + + void importAudioPort(const sp<AudioPort> port); + void clearCapabilities(); + + void loadSamplingRates(char *name); + void loadFormats(char *name); + void loadOutChannels(char *name); + void loadInChannels(char *name); + + audio_gain_mode_t loadGainMode(char *name); + void loadGain(cnode *root, int index); + virtual void loadGains(cnode *root); + + // searches for an exact match + status_t checkExactSamplingRate(uint32_t samplingRate) const; + // searches for a compatible match, and returns the best match via updatedSamplingRate + status_t checkCompatibleSamplingRate(uint32_t samplingRate, + uint32_t *updatedSamplingRate) const; + // searches for an exact match + status_t checkExactChannelMask(audio_channel_mask_t channelMask) const; + // searches for a compatible match, currently implemented for input channel masks only + status_t checkCompatibleChannelMask(audio_channel_mask_t channelMask) const; + status_t checkFormat(audio_format_t format) const; + status_t checkGain(const struct audio_gain_config *gainConfig, int index) const; + + uint32_t pickSamplingRate() const; + audio_channel_mask_t pickChannelMask() const; + audio_format_t pickFormat() const; + + static const audio_format_t sPcmFormatCompareTable[]; + static int compareFormats(audio_format_t format1, audio_format_t format2); + + audio_module_handle_t getModuleHandle() const; + + void dump(int fd, int spaces) const; + + String8 mName; + audio_port_type_t mType; + audio_port_role_t mRole; + bool mUseInChannelMask; + // by convention, "0' in the first entry in mSamplingRates, mChannelMasks or mFormats + // indicates the supported parameters should be read from the output stream + // after it is opened for the first time + Vector <uint32_t> mSamplingRates; // supported sampling rates + Vector <audio_channel_mask_t> mChannelMasks; // supported channel masks + Vector <audio_format_t> mFormats; // supported audio formats + Vector < sp<AudioGain> > mGains; // gain controllers + sp<HwModule> mModule; // audio HW module exposing this I/O stream + uint32_t mFlags; // attribute flags (e.g primary output, + // direct output...). + + +protected: + //TODO - clarify the role of mId in this case, both an "attached" indicator + // and a unique ID for identifying a port to the (upcoming) selection API, + // and its relationship to the mId in AudioOutputDescriptor and AudioInputDescriptor. + audio_port_handle_t mId; + +private: + static volatile int32_t mNextUniqueId; +}; + +class AudioPortConfig : public virtual RefBase +{ +public: + AudioPortConfig(); + virtual ~AudioPortConfig() {} + + status_t applyAudioPortConfig(const struct audio_port_config *config, + struct audio_port_config *backupConfig = NULL); + virtual void toAudioPortConfig(struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig = NULL) const = 0; + virtual sp<AudioPort> getAudioPort() const = 0; + uint32_t mSamplingRate; + audio_format_t mFormat; + audio_channel_mask_t mChannelMask; + struct audio_gain_config mGain; +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/ConfigParsingUtils.h b/services/audiopolicy/common/managerdefinitions/include/ConfigParsingUtils.h new file mode 100644 index 0000000..53cb4a3 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/ConfigParsingUtils.h @@ -0,0 +1,200 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "DeviceDescriptor.h" +#include "HwModule.h" +#include "audio_policy_conf.h" +#include <system/audio.h> +#include <utils/Log.h> +#include <utils/Vector.h> +#include <utils/SortedVector.h> +#include <cutils/config_utils.h> +#include <utils/RefBase.h> +#include <system/audio_policy.h> + +namespace android { + +// ---------------------------------------------------------------------------- +// Definitions for audio_policy.conf file parsing +// ---------------------------------------------------------------------------- + +struct StringToEnum { + const char *name; + uint32_t value; +}; + +#define STRING_TO_ENUM(string) { #string, string } +#ifndef ARRAY_SIZE +#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) +#endif + +const StringToEnum sDeviceNameToEnumTable[] = { + STRING_TO_ENUM(AUDIO_DEVICE_OUT_EARPIECE), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_SPEAKER), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_ALL_SCO), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_ALL_A2DP), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_HDMI), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_ALL_USB), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_LINE), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_SPDIF), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_FM), + STRING_TO_ENUM(AUDIO_DEVICE_OUT_AUX_LINE), + STRING_TO_ENUM(AUDIO_DEVICE_IN_AMBIENT), + STRING_TO_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC), + STRING_TO_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_IN_ALL_SCO), + STRING_TO_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL), + STRING_TO_ENUM(AUDIO_DEVICE_IN_HDMI), + STRING_TO_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX), + STRING_TO_ENUM(AUDIO_DEVICE_IN_VOICE_CALL), + STRING_TO_ENUM(AUDIO_DEVICE_IN_BACK_MIC), + STRING_TO_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX), + STRING_TO_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET), + STRING_TO_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY), + STRING_TO_ENUM(AUDIO_DEVICE_IN_USB_DEVICE), + STRING_TO_ENUM(AUDIO_DEVICE_IN_FM_TUNER), + STRING_TO_ENUM(AUDIO_DEVICE_IN_TV_TUNER), + STRING_TO_ENUM(AUDIO_DEVICE_IN_LINE), + STRING_TO_ENUM(AUDIO_DEVICE_IN_SPDIF), + STRING_TO_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP), + STRING_TO_ENUM(AUDIO_DEVICE_IN_LOOPBACK), +}; + +const StringToEnum sOutputFlagNameToEnumTable[] = { + STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_DIRECT), + STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY), + STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_FAST), + STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER), + STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD), + STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING), + STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC), +}; + +const StringToEnum sInputFlagNameToEnumTable[] = { + STRING_TO_ENUM(AUDIO_INPUT_FLAG_FAST), + STRING_TO_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD), +}; + +const StringToEnum sFormatNameToEnumTable[] = { + STRING_TO_ENUM(AUDIO_FORMAT_PCM_16_BIT), + STRING_TO_ENUM(AUDIO_FORMAT_PCM_8_BIT), + STRING_TO_ENUM(AUDIO_FORMAT_PCM_32_BIT), + STRING_TO_ENUM(AUDIO_FORMAT_PCM_8_24_BIT), + STRING_TO_ENUM(AUDIO_FORMAT_PCM_FLOAT), + STRING_TO_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED), + STRING_TO_ENUM(AUDIO_FORMAT_MP3), + STRING_TO_ENUM(AUDIO_FORMAT_AAC), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_MAIN), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_LC), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_SSR), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_LTP), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_HE_V1), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_SCALABLE), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_ERLC), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_LD), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_HE_V2), + STRING_TO_ENUM(AUDIO_FORMAT_AAC_ELD), + STRING_TO_ENUM(AUDIO_FORMAT_VORBIS), + STRING_TO_ENUM(AUDIO_FORMAT_HE_AAC_V1), + STRING_TO_ENUM(AUDIO_FORMAT_HE_AAC_V2), + STRING_TO_ENUM(AUDIO_FORMAT_OPUS), + STRING_TO_ENUM(AUDIO_FORMAT_AC3), + STRING_TO_ENUM(AUDIO_FORMAT_E_AC3), +}; + +const StringToEnum sOutChannelsNameToEnumTable[] = { + STRING_TO_ENUM(AUDIO_CHANNEL_OUT_MONO), + STRING_TO_ENUM(AUDIO_CHANNEL_OUT_STEREO), + STRING_TO_ENUM(AUDIO_CHANNEL_OUT_QUAD), + STRING_TO_ENUM(AUDIO_CHANNEL_OUT_5POINT1), + STRING_TO_ENUM(AUDIO_CHANNEL_OUT_7POINT1), +}; + +const StringToEnum sInChannelsNameToEnumTable[] = { + STRING_TO_ENUM(AUDIO_CHANNEL_IN_MONO), + STRING_TO_ENUM(AUDIO_CHANNEL_IN_STEREO), + STRING_TO_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK), +}; + +const StringToEnum sGainModeNameToEnumTable[] = { + STRING_TO_ENUM(AUDIO_GAIN_MODE_JOINT), + STRING_TO_ENUM(AUDIO_GAIN_MODE_CHANNELS), + STRING_TO_ENUM(AUDIO_GAIN_MODE_RAMP), +}; + +class ConfigParsingUtils +{ +public: + static uint32_t stringToEnum(const struct StringToEnum *table, + size_t size, + const char *name); + static const char *enumToString(const struct StringToEnum *table, + size_t size, + uint32_t value); + static bool stringToBool(const char *value); + static uint32_t parseOutputFlagNames(char *name); + static uint32_t parseInputFlagNames(char *name); + static audio_devices_t parseDeviceNames(char *name); + + static void loadHwModules(cnode *root, HwModuleCollection &hwModules, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevices, + bool &isSpeakerDrcEnabled); + + static void loadGlobalConfig(cnode *root, const sp<HwModule>& module, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevices, + bool &isSpeakerDrcEnabled); + + static status_t loadAudioPolicyConfig(const char *path, + HwModuleCollection &hwModules, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevices, + bool &isSpeakerDrcEnabled); + +private: + static void loadHwModule(cnode *root, HwModuleCollection &hwModules, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevices, + bool &isSpeakerDrcEnabled); +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h new file mode 100644 index 0000000..d15f6b4 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h @@ -0,0 +1,92 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "AudioPort.h" +#include <utils/Errors.h> +#include <utils/String8.h> +#include <utils/SortedVector.h> +#include <cutils/config_utils.h> +#include <system/audio.h> +#include <system/audio_policy.h> + +namespace android { + +class DeviceDescriptor : public AudioPort, public AudioPortConfig +{ +public: + DeviceDescriptor(const String8& name, audio_devices_t type); + + virtual ~DeviceDescriptor() {} + + bool equals(const sp<DeviceDescriptor>& other) const; + + // AudioPortConfig + virtual sp<AudioPort> getAudioPort() const { return (AudioPort*) this; } + virtual void toAudioPortConfig(struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig = NULL) const; + + // AudioPort + virtual void loadGains(cnode *root); + virtual void toAudioPort(struct audio_port *port) const; + + audio_devices_t type() const { return mDeviceType; } + status_t dump(int fd, int spaces, int index) const; + + String8 mAddress; + audio_port_handle_t mId; + + static String8 emptyNameStr; + +private: + audio_devices_t mDeviceType; + +friend class DeviceVector; +}; + +class DeviceVector : public SortedVector< sp<DeviceDescriptor> > +{ +public: + DeviceVector() : SortedVector(), mDeviceTypes(AUDIO_DEVICE_NONE) {} + + ssize_t add(const sp<DeviceDescriptor>& item); + ssize_t remove(const sp<DeviceDescriptor>& item); + ssize_t indexOf(const sp<DeviceDescriptor>& item) const; + + audio_devices_t types() const { return mDeviceTypes; } + + void loadDevicesFromType(audio_devices_t types); + void loadDevicesFromName(char *name, const DeviceVector& declaredDevices); + + sp<DeviceDescriptor> getDevice(audio_devices_t type, String8 address) const; + DeviceVector getDevicesFromType(audio_devices_t types) const; + sp<DeviceDescriptor> getDeviceFromId(audio_port_handle_t id) const; + sp<DeviceDescriptor> getDeviceFromName(const String8& name) const; + DeviceVector getDevicesFromTypeAddr(audio_devices_t type, String8 address) const; + + audio_devices_t getDevicesFromHwModule(audio_module_handle_t moduleHandle) const; + + audio_policy_dev_state_t getDeviceConnectionState(const sp<DeviceDescriptor> &devDesc) const; + + status_t dump(int fd, const String8 &direction) const; + +private: + void refreshTypes(); + audio_devices_t mDeviceTypes; +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h new file mode 100644 index 0000000..c9783a1 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h @@ -0,0 +1,71 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <RoutingStrategy.h> +#include <hardware/audio_effect.h> +#include <utils/KeyedVector.h> +#include <utils/RefBase.h> +#include <utils/Errors.h> + +namespace android { + + +class EffectDescriptor : public RefBase +{ +public: + status_t dump(int fd); + + int mIo; // io the effect is attached to + routing_strategy mStrategy; // routing strategy the effect is associated to + int mSession; // audio session the effect is on + effect_descriptor_t mDesc; // effect descriptor + bool mEnabled; // enabled state: CPU load being used or not +}; + +class EffectDescriptorCollection : public KeyedVector<int, sp<EffectDescriptor> > +{ +public: + EffectDescriptorCollection(); + + status_t registerEffect(const effect_descriptor_t *desc, audio_io_handle_t io, + uint32_t strategy, int session, int id); + status_t unregisterEffect(int id); + status_t setEffectEnabled(int id, bool enabled); + uint32_t getMaxEffectsCpuLoad() const; + uint32_t getMaxEffectsMemory() const; + bool isNonOffloadableEffectEnabled(); + + status_t dump(int fd); + +private: + status_t setEffectEnabled(const sp<EffectDescriptor> &effectDesc, bool enabled); + + uint32_t mTotalEffectsCpuLoad; // current CPU load used by effects + uint32_t mTotalEffectsMemory; // current memory used by effects + + /** + * Maximum CPU load allocated to audio effects in 0.1 MIPS (ARMv5TE, 0 WS memory) units + */ + static const uint32_t MAX_EFFECTS_CPU_LOAD = 1000; + /** + * Maximum memory allocated to audio effects in KB + */ + static const uint32_t MAX_EFFECTS_MEMORY = 512; +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h new file mode 100644 index 0000000..92c3ea2 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "DeviceDescriptor.h" +#include <utils/RefBase.h> +#include <utils/String8.h> +#include <utils/Errors.h> +#include <utils/Vector.h> +#include <system/audio.h> +#include <cutils/config_utils.h> + +namespace android { + +class IOProfile; + +class HwModule : public RefBase +{ +public: + HwModule(const char *name); + ~HwModule(); + + status_t loadOutput(cnode *root); + status_t loadInput(cnode *root); + status_t loadDevice(cnode *root); + + status_t addOutputProfile(String8 name, const audio_config_t *config, + audio_devices_t device, String8 address); + status_t removeOutputProfile(String8 name); + status_t addInputProfile(String8 name, const audio_config_t *config, + audio_devices_t device, String8 address); + status_t removeInputProfile(String8 name); + + audio_module_handle_t getHandle() const { return mHandle; } + + void dump(int fd); + + const char *const mName; // base name of the audio HW module (primary, a2dp ...) + uint32_t mHalVersion; // audio HAL API version + audio_module_handle_t mHandle; + Vector < sp<IOProfile> > mOutputProfiles; // output profiles exposed by this module + Vector < sp<IOProfile> > mInputProfiles; // input profiles exposed by this module + DeviceVector mDeclaredDevices; // devices declared in audio_policy.conf +}; + +class HwModuleCollection : public Vector< sp<HwModule> > +{ +public: + sp<HwModule> getModuleFromName(const char *name) const; + + sp <HwModule> getModuleForDevice(audio_devices_t device) const; + + sp<DeviceDescriptor> getDeviceDescriptor(const audio_devices_t device, + const char *device_address, + const char *device_name) const; + + status_t dump(int fd) const; +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/IOProfile.h b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h new file mode 100644 index 0000000..095e759 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/IOProfile.h @@ -0,0 +1,58 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include "AudioPort.h" +#include "DeviceDescriptor.h" +#include <utils/String8.h> +#include <system/audio.h> + +namespace android { + +class HwModule; + +// the IOProfile class describes the capabilities of an output or input stream. +// It is currently assumed that all combination of listed parameters are supported. +// It is used by the policy manager to determine if an output or input is suitable for +// a given use case, open/close it accordingly and connect/disconnect audio tracks +// to/from it. +class IOProfile : public AudioPort +{ +public: + IOProfile(const String8& name, audio_port_role_t role, const sp<HwModule>& module); + virtual ~IOProfile(); + + // This method is used for both output and input. + // If parameter updatedSamplingRate is non-NULL, it is assigned the actual sample rate. + // For input, flags is interpreted as audio_input_flags_t. + // TODO: merge audio_output_flags_t and audio_input_flags_t. + bool isCompatibleProfile(audio_devices_t device, + String8 address, + uint32_t samplingRate, + uint32_t *updatedSamplingRate, + audio_format_t format, + audio_channel_mask_t channelMask, + uint32_t flags) const; + + void dump(int fd); + void log(); + + DeviceVector mSupportedDevices; // supported devices + // (devices this output can be routed to) +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/SoundTriggerSession.h b/services/audiopolicy/common/managerdefinitions/include/SoundTriggerSession.h new file mode 100644 index 0000000..420e6d7 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/SoundTriggerSession.h @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <system/audio.h> +#include <utils/Errors.h> +#include <utils/KeyedVector.h> + +namespace android { + +class SoundTriggerSessionCollection : public DefaultKeyedVector<audio_session_t, audio_io_handle_t> +{ +public: + status_t releaseSession(audio_session_t session); + + status_t acquireSession(audio_session_t session, audio_io_handle_t ioHandle); +}; + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h new file mode 100644 index 0000000..84db5ab --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/include/StreamDescriptor.h @@ -0,0 +1,85 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <Volume.h> +#include <utils/KeyedVector.h> +#include <utils/StrongPointer.h> +#include <utils/SortedVector.h> +#include <hardware/audio.h> + +namespace android { + +// stream descriptor used for volume control +class StreamDescriptor +{ +public: + StreamDescriptor(); + + int getVolumeIndex(audio_devices_t device) const; + bool canBeMuted() const { return mCanBeMuted; } + void clearCurrentVolumeIndex(); + void addCurrentVolumeIndex(audio_devices_t device, int index); + int getVolumeIndexMin() const { return mIndexMin; } + int getVolumeIndexMax() const { return mIndexMax; } + void setVolumeIndexMin(int volIndexMin); + void setVolumeIndexMax(int volIndexMax); + + void dump(int fd) const; + + void setVolumeCurvePoint(Volume::device_category deviceCategory, const VolumeCurvePoint *point); + const VolumeCurvePoint *getVolumeCurvePoint(Volume::device_category deviceCategory) const + { + return mVolumeCurve[deviceCategory]; + } + +private: + const VolumeCurvePoint *mVolumeCurve[Volume::DEVICE_CATEGORY_CNT]; + KeyedVector<audio_devices_t, int> mIndexCur; /**< current volume index per device. */ + int mIndexMin; /**< min volume index. */ + int mIndexMax; /**< max volume index. */ + bool mCanBeMuted; /**< true is the stream can be muted. */ +}; + +/** + * stream descriptors collection for volume control + */ +class StreamDescriptorCollection : public DefaultKeyedVector<audio_stream_type_t, StreamDescriptor> +{ +public: + StreamDescriptorCollection(); + + void clearCurrentVolumeIndex(audio_stream_type_t stream); + void addCurrentVolumeIndex(audio_stream_type_t stream, audio_devices_t device, int index); + + bool canBeMuted(audio_stream_type_t stream); + + status_t dump(int fd) const; + + void setVolumeCurvePoint(audio_stream_type_t stream, + Volume::device_category deviceCategory, + const VolumeCurvePoint *point); + + const VolumeCurvePoint *getVolumeCurvePoint(audio_stream_type_t stream, + Volume::device_category deviceCategory) const; + + void setVolumeIndexMin(audio_stream_type_t stream,int volIndexMin); + void setVolumeIndexMax(audio_stream_type_t stream,int volIndexMax); + +}; + +}; // namespace android diff --git a/services/audiopolicy/audio_policy_conf.h b/services/audiopolicy/common/managerdefinitions/include/audio_policy_conf.h index 2535a67..a393e3b 100644 --- a/services/audiopolicy/audio_policy_conf.h +++ b/services/audiopolicy/common/managerdefinitions/include/audio_policy_conf.h @@ -14,9 +14,7 @@ * limitations under the License. */ - -#ifndef ANDROID_AUDIO_POLICY_CONF_H -#define ANDROID_AUDIO_POLICY_CONF_H +#pragma once ///////////////////////////////////////////////// @@ -53,9 +51,9 @@ // "formats" in outputs descriptors indicating that supported // values should be queried after opening the output. -#define DEVICES_TAG "devices" -#define DEVICE_TYPE "type" -#define DEVICE_ADDRESS "address" +#define APM_DEVICES_TAG "devices" +#define APM_DEVICE_TYPE "type" +#define APM_DEVICE_ADDRESS "address" #define MIXERS_TAG "mixers" #define MIXER_TYPE "type" @@ -71,7 +69,3 @@ #define GAIN_STEP_VALUE "step_value_mB" #define GAIN_MIN_RAMP_MS "min_ramp_ms" #define GAIN_MAX_RAMP_MS "max_ramp_ms" - - - -#endif // ANDROID_AUDIO_POLICY_CONF_H diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioGain.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioGain.cpp new file mode 100644 index 0000000..fc7b0cc --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/AudioGain.cpp @@ -0,0 +1,130 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::AudioGain" +//#define LOG_NDEBUG 0 + +//#define VERY_VERBOSE_LOGGING +#ifdef VERY_VERBOSE_LOGGING +#define ALOGVV ALOGV +#else +#define ALOGVV(a...) do { } while(0) +#endif + +#include "AudioGain.h" +#include "StreamDescriptor.h" +#include <utils/Log.h> +#include <utils/String8.h> + +#include <math.h> + +namespace android { + +AudioGain::AudioGain(int index, bool useInChannelMask) +{ + mIndex = index; + mUseInChannelMask = useInChannelMask; + memset(&mGain, 0, sizeof(struct audio_gain)); +} + +void AudioGain::getDefaultConfig(struct audio_gain_config *config) +{ + config->index = mIndex; + config->mode = mGain.mode; + config->channel_mask = mGain.channel_mask; + if ((mGain.mode & AUDIO_GAIN_MODE_JOINT) == AUDIO_GAIN_MODE_JOINT) { + config->values[0] = mGain.default_value; + } else { + uint32_t numValues; + if (mUseInChannelMask) { + numValues = audio_channel_count_from_in_mask(mGain.channel_mask); + } else { + numValues = audio_channel_count_from_out_mask(mGain.channel_mask); + } + for (size_t i = 0; i < numValues; i++) { + config->values[i] = mGain.default_value; + } + } + if ((mGain.mode & AUDIO_GAIN_MODE_RAMP) == AUDIO_GAIN_MODE_RAMP) { + config->ramp_duration_ms = mGain.min_ramp_ms; + } +} + +status_t AudioGain::checkConfig(const struct audio_gain_config *config) +{ + if ((config->mode & ~mGain.mode) != 0) { + return BAD_VALUE; + } + if ((config->mode & AUDIO_GAIN_MODE_JOINT) == AUDIO_GAIN_MODE_JOINT) { + if ((config->values[0] < mGain.min_value) || + (config->values[0] > mGain.max_value)) { + return BAD_VALUE; + } + } else { + if ((config->channel_mask & ~mGain.channel_mask) != 0) { + return BAD_VALUE; + } + uint32_t numValues; + if (mUseInChannelMask) { + numValues = audio_channel_count_from_in_mask(config->channel_mask); + } else { + numValues = audio_channel_count_from_out_mask(config->channel_mask); + } + for (size_t i = 0; i < numValues; i++) { + if ((config->values[i] < mGain.min_value) || + (config->values[i] > mGain.max_value)) { + return BAD_VALUE; + } + } + } + if ((config->mode & AUDIO_GAIN_MODE_RAMP) == AUDIO_GAIN_MODE_RAMP) { + if ((config->ramp_duration_ms < mGain.min_ramp_ms) || + (config->ramp_duration_ms > mGain.max_ramp_ms)) { + return BAD_VALUE; + } + } + return NO_ERROR; +} + +void AudioGain::dump(int fd, int spaces, int index) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "%*sGain %d:\n", spaces, "", index+1); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- mode: %08x\n", spaces, "", mGain.mode); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- channel_mask: %08x\n", spaces, "", mGain.channel_mask); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- min_value: %d mB\n", spaces, "", mGain.min_value); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- max_value: %d mB\n", spaces, "", mGain.max_value); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- default_value: %d mB\n", spaces, "", mGain.default_value); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- step_value: %d mB\n", spaces, "", mGain.step_value); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- min_ramp_ms: %d ms\n", spaces, "", mGain.min_ramp_ms); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- max_ramp_ms: %d ms\n", spaces, "", mGain.max_ramp_ms); + result.append(buffer); + + write(fd, result.string(), result.size()); +} + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp new file mode 100644 index 0000000..fa66728 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp @@ -0,0 +1,187 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::AudioInputDescriptor" +//#define LOG_NDEBUG 0 + +#include "AudioInputDescriptor.h" +#include "IOProfile.h" +#include "AudioGain.h" +#include "HwModule.h" +#include <media/AudioPolicy.h> +#include <policy.h> + +namespace android { + +AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile) + : mId(0), mIoHandle(0), + mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL), mPatchHandle(0), mRefCount(0), + mInputSource(AUDIO_SOURCE_DEFAULT), mProfile(profile), mIsSoundTrigger(false) +{ + if (profile != NULL) { + mSamplingRate = profile->pickSamplingRate(); + mFormat = profile->pickFormat(); + mChannelMask = profile->pickChannelMask(); + if (profile->mGains.size() > 0) { + profile->mGains[0]->getDefaultConfig(&mGain); + } + } +} + +void AudioInputDescriptor::setIoHandle(audio_io_handle_t ioHandle) +{ + mId = AudioPort::getNextUniqueId(); + mIoHandle = ioHandle; +} + +audio_module_handle_t AudioInputDescriptor::getModuleHandle() const +{ + return mProfile->getModuleHandle(); +} + +void AudioInputDescriptor::toAudioPortConfig(struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig) const +{ + ALOG_ASSERT(mProfile != 0, + "toAudioPortConfig() called on input with null profile %d", mIoHandle); + dstConfig->config_mask = AUDIO_PORT_CONFIG_SAMPLE_RATE|AUDIO_PORT_CONFIG_CHANNEL_MASK| + AUDIO_PORT_CONFIG_FORMAT|AUDIO_PORT_CONFIG_GAIN; + if (srcConfig != NULL) { + dstConfig->config_mask |= srcConfig->config_mask; + } + + AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig); + + dstConfig->id = mId; + dstConfig->role = AUDIO_PORT_ROLE_SINK; + dstConfig->type = AUDIO_PORT_TYPE_MIX; + dstConfig->ext.mix.hw_module = mProfile->mModule->mHandle; + dstConfig->ext.mix.handle = mIoHandle; + dstConfig->ext.mix.usecase.source = mInputSource; +} + +void AudioInputDescriptor::toAudioPort(struct audio_port *port) const +{ + ALOG_ASSERT(mProfile != 0, "toAudioPort() called on input with null profile %d", mIoHandle); + + mProfile->toAudioPort(port); + port->id = mId; + toAudioPortConfig(&port->active_config); + port->ext.mix.hw_module = mProfile->mModule->mHandle; + port->ext.mix.handle = mIoHandle; + port->ext.mix.latency_class = AUDIO_LATENCY_NORMAL; +} + +status_t AudioInputDescriptor::dump(int fd) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, " ID: %d\n", mId); + result.append(buffer); + snprintf(buffer, SIZE, " Sampling rate: %d\n", mSamplingRate); + result.append(buffer); + snprintf(buffer, SIZE, " Format: %d\n", mFormat); + result.append(buffer); + snprintf(buffer, SIZE, " Channels: %08x\n", mChannelMask); + result.append(buffer); + snprintf(buffer, SIZE, " Devices %08x\n", mDevice); + result.append(buffer); + snprintf(buffer, SIZE, " Ref Count %d\n", mRefCount); + result.append(buffer); + snprintf(buffer, SIZE, " Open Ref Count %d\n", mOpenRefCount); + result.append(buffer); + + write(fd, result.string(), result.size()); + + return NO_ERROR; +} + +bool AudioInputCollection::isSourceActive(audio_source_t source) const +{ + for (size_t i = 0; i < size(); i++) { + const sp<AudioInputDescriptor> inputDescriptor = valueAt(i); + if (inputDescriptor->mRefCount == 0) { + continue; + } + if (inputDescriptor->mInputSource == (int)source) { + return true; + } + } + return false; +} + +sp<AudioInputDescriptor> AudioInputCollection::getInputFromId(audio_port_handle_t id) const +{ + sp<AudioInputDescriptor> inputDesc = NULL; + for (size_t i = 0; i < size(); i++) { + inputDesc = valueAt(i); + if (inputDesc->mId == id) { + break; + } + } + return inputDesc; +} + +uint32_t AudioInputCollection::activeInputsCount() const +{ + uint32_t count = 0; + for (size_t i = 0; i < size(); i++) { + const sp<AudioInputDescriptor> desc = valueAt(i); + if (desc->mRefCount > 0) { + count++; + } + } + return count; +} + +audio_io_handle_t AudioInputCollection::getActiveInput(bool ignoreVirtualInputs) +{ + for (size_t i = 0; i < size(); i++) { + const sp<AudioInputDescriptor> input_descriptor = valueAt(i); + if ((input_descriptor->mRefCount > 0) + && (!ignoreVirtualInputs || !is_virtual_input_device(input_descriptor->mDevice))) { + return keyAt(i); + } + } + return 0; +} + +audio_devices_t AudioInputCollection::getSupportedDevices(audio_io_handle_t handle) const +{ + sp<AudioInputDescriptor> inputDesc = valueFor(handle); + audio_devices_t devices = inputDesc->mProfile->mSupportedDevices.types(); + return devices; +} + +status_t AudioInputCollection::dump(int fd) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + + snprintf(buffer, SIZE, "\nInputs dump:\n"); + write(fd, buffer, strlen(buffer)); + for (size_t i = 0; i < size(); i++) { + snprintf(buffer, SIZE, "- Input %d dump:\n", keyAt(i)); + write(fd, buffer, strlen(buffer)); + valueAt(i)->dump(fd); + } + + return NO_ERROR; +} + +}; //namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp new file mode 100644 index 0000000..cdb5b51 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp @@ -0,0 +1,333 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::AudioOutputDescriptor" +//#define LOG_NDEBUG 0 + +#include "AudioOutputDescriptor.h" +#include "IOProfile.h" +#include "AudioGain.h" +#include "HwModule.h" +#include <media/AudioPolicy.h> + +// A device mask for all audio output devices that are considered "remote" when evaluating +// active output devices in isStreamActiveRemotely() +#define APM_AUDIO_OUT_DEVICE_REMOTE_ALL AUDIO_DEVICE_OUT_REMOTE_SUBMIX + +namespace android { + +AudioOutputDescriptor::AudioOutputDescriptor(const sp<IOProfile>& profile) + : mId(0), mIoHandle(0), mLatency(0), + mFlags((audio_output_flags_t)0), mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL), + mPatchHandle(0), + mOutput1(0), mOutput2(0), mProfile(profile), mDirectOpenCount(0) +{ + // clear usage count for all stream types + for (int i = 0; i < AUDIO_STREAM_CNT; i++) { + mRefCount[i] = 0; + mCurVolume[i] = -1.0; + mMuteCount[i] = 0; + mStopTime[i] = 0; + } + for (int i = 0; i < NUM_STRATEGIES; i++) { + mStrategyMutedByDevice[i] = false; + } + if (profile != NULL) { + mFlags = (audio_output_flags_t)profile->mFlags; + mSamplingRate = profile->pickSamplingRate(); + mFormat = profile->pickFormat(); + mChannelMask = profile->pickChannelMask(); + if (profile->mGains.size() > 0) { + profile->mGains[0]->getDefaultConfig(&mGain); + } + } +} + +audio_module_handle_t AudioOutputDescriptor::getModuleHandle() const +{ + return mProfile->getModuleHandle(); +} + +audio_devices_t AudioOutputDescriptor::device() const +{ + if (isDuplicated()) { + return (audio_devices_t)(mOutput1->mDevice | mOutput2->mDevice); + } else { + return mDevice; + } +} + +void AudioOutputDescriptor::setIoHandle(audio_io_handle_t ioHandle) +{ + mId = AudioPort::getNextUniqueId(); + mIoHandle = ioHandle; +} + +uint32_t AudioOutputDescriptor::latency() +{ + if (isDuplicated()) { + return (mOutput1->mLatency > mOutput2->mLatency) ? mOutput1->mLatency : mOutput2->mLatency; + } else { + return mLatency; + } +} + +bool AudioOutputDescriptor::sharesHwModuleWith( + const sp<AudioOutputDescriptor> outputDesc) +{ + if (isDuplicated()) { + return mOutput1->sharesHwModuleWith(outputDesc) || mOutput2->sharesHwModuleWith(outputDesc); + } else if (outputDesc->isDuplicated()){ + return sharesHwModuleWith(outputDesc->mOutput1) || sharesHwModuleWith(outputDesc->mOutput2); + } else { + return (mProfile->mModule == outputDesc->mProfile->mModule); + } +} + +void AudioOutputDescriptor::changeRefCount(audio_stream_type_t stream, + int delta) +{ + // forward usage count change to attached outputs + if (isDuplicated()) { + mOutput1->changeRefCount(stream, delta); + mOutput2->changeRefCount(stream, delta); + } + if ((delta + (int)mRefCount[stream]) < 0) { + ALOGW("changeRefCount() invalid delta %d for stream %d, refCount %d", + delta, stream, mRefCount[stream]); + mRefCount[stream] = 0; + return; + } + mRefCount[stream] += delta; + ALOGV("changeRefCount() stream %d, count %d", stream, mRefCount[stream]); +} + +audio_devices_t AudioOutputDescriptor::supportedDevices() +{ + if (isDuplicated()) { + return (audio_devices_t)(mOutput1->supportedDevices() | mOutput2->supportedDevices()); + } else { + return mProfile->mSupportedDevices.types() ; + } +} + +bool AudioOutputDescriptor::isActive(uint32_t inPastMs) const +{ + nsecs_t sysTime = 0; + if (inPastMs != 0) { + sysTime = systemTime(); + } + for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) { + if (i == AUDIO_STREAM_PATCH) { + continue; + } + if (isStreamActive((audio_stream_type_t)i, inPastMs, sysTime)) { + return true; + } + } + return false; +} + +bool AudioOutputDescriptor::isStreamActive(audio_stream_type_t stream, + uint32_t inPastMs, + nsecs_t sysTime) const +{ + if (mRefCount[stream] != 0) { + return true; + } + if (inPastMs == 0) { + return false; + } + if (sysTime == 0) { + sysTime = systemTime(); + } + if (ns2ms(sysTime - mStopTime[stream]) < inPastMs) { + return true; + } + return false; +} + +void AudioOutputDescriptor::toAudioPortConfig( + struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig) const +{ + ALOG_ASSERT(!isDuplicated(), "toAudioPortConfig() called on duplicated output %d", mIoHandle); + + dstConfig->config_mask = AUDIO_PORT_CONFIG_SAMPLE_RATE|AUDIO_PORT_CONFIG_CHANNEL_MASK| + AUDIO_PORT_CONFIG_FORMAT|AUDIO_PORT_CONFIG_GAIN; + if (srcConfig != NULL) { + dstConfig->config_mask |= srcConfig->config_mask; + } + AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig); + + dstConfig->id = mId; + dstConfig->role = AUDIO_PORT_ROLE_SOURCE; + dstConfig->type = AUDIO_PORT_TYPE_MIX; + dstConfig->ext.mix.hw_module = mProfile->mModule->mHandle; + dstConfig->ext.mix.handle = mIoHandle; + dstConfig->ext.mix.usecase.stream = AUDIO_STREAM_DEFAULT; +} + +void AudioOutputDescriptor::toAudioPort( + struct audio_port *port) const +{ + ALOG_ASSERT(!isDuplicated(), "toAudioPort() called on duplicated output %d", mIoHandle); + mProfile->toAudioPort(port); + port->id = mId; + toAudioPortConfig(&port->active_config); + port->ext.mix.hw_module = mProfile->mModule->mHandle; + port->ext.mix.handle = mIoHandle; + port->ext.mix.latency_class = + mFlags & AUDIO_OUTPUT_FLAG_FAST ? AUDIO_LATENCY_LOW : AUDIO_LATENCY_NORMAL; +} + +status_t AudioOutputDescriptor::dump(int fd) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, " ID: %d\n", mId); + result.append(buffer); + snprintf(buffer, SIZE, " Sampling rate: %d\n", mSamplingRate); + result.append(buffer); + snprintf(buffer, SIZE, " Format: %08x\n", mFormat); + result.append(buffer); + snprintf(buffer, SIZE, " Channels: %08x\n", mChannelMask); + result.append(buffer); + snprintf(buffer, SIZE, " Latency: %d\n", mLatency); + result.append(buffer); + snprintf(buffer, SIZE, " Flags %08x\n", mFlags); + result.append(buffer); + snprintf(buffer, SIZE, " Devices %08x\n", device()); + result.append(buffer); + snprintf(buffer, SIZE, " Stream volume refCount muteCount\n"); + result.append(buffer); + for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) { + snprintf(buffer, SIZE, " %02d %.03f %02d %02d\n", + i, mCurVolume[i], mRefCount[i], mMuteCount[i]); + result.append(buffer); + } + write(fd, result.string(), result.size()); + + return NO_ERROR; +} + +bool AudioOutputCollection::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const +{ + nsecs_t sysTime = systemTime(); + for (size_t i = 0; i < this->size(); i++) { + const sp<AudioOutputDescriptor> outputDesc = this->valueAt(i); + if (outputDesc->isStreamActive(stream, inPastMs, sysTime)) { + return true; + } + } + return false; +} + +bool AudioOutputCollection::isStreamActiveRemotely(audio_stream_type_t stream, + uint32_t inPastMs) const +{ + nsecs_t sysTime = systemTime(); + for (size_t i = 0; i < size(); i++) { + const sp<AudioOutputDescriptor> outputDesc = valueAt(i); + if (((outputDesc->device() & APM_AUDIO_OUT_DEVICE_REMOTE_ALL) != 0) && + outputDesc->isStreamActive(stream, inPastMs, sysTime)) { + // do not consider re routing (when the output is going to a dynamic policy) + // as "remote playback" + if (outputDesc->mPolicyMix == NULL) { + return true; + } + } + } + return false; +} + +audio_io_handle_t AudioOutputCollection::getA2dpOutput() const +{ + for (size_t i = 0; i < size(); i++) { + sp<AudioOutputDescriptor> outputDesc = valueAt(i); + if (!outputDesc->isDuplicated() && outputDesc->device() & AUDIO_DEVICE_OUT_ALL_A2DP) { + return this->keyAt(i); + } + } + return 0; +} + +sp<AudioOutputDescriptor> AudioOutputCollection::getPrimaryOutput() const +{ + for (size_t i = 0; i < size(); i++) { + const sp<AudioOutputDescriptor> outputDesc = valueAt(i); + if (outputDesc->mFlags & AUDIO_OUTPUT_FLAG_PRIMARY) { + return outputDesc; + } + } + return NULL; +} + +sp<AudioOutputDescriptor> AudioOutputCollection::getOutputFromId(audio_port_handle_t id) const +{ + sp<AudioOutputDescriptor> outputDesc = NULL; + for (size_t i = 0; i < size(); i++) { + outputDesc = valueAt(i); + if (outputDesc->mId == id) { + break; + } + } + return outputDesc; +} + +bool AudioOutputCollection::isAnyOutputActive(audio_stream_type_t streamToIgnore) const +{ + for (size_t s = 0 ; s < AUDIO_STREAM_CNT ; s++) { + if (s == (size_t) streamToIgnore) { + continue; + } + for (size_t i = 0; i < size(); i++) { + const sp<AudioOutputDescriptor> outputDesc = valueAt(i); + if (outputDesc->mRefCount[s] != 0) { + return true; + } + } + } + return false; +} + +audio_devices_t AudioOutputCollection::getSupportedDevices(audio_io_handle_t handle) const +{ + sp<AudioOutputDescriptor> outputDesc = valueFor(handle); + audio_devices_t devices = outputDesc->mProfile->mSupportedDevices.types(); + return devices; +} + + +status_t AudioOutputCollection::dump(int fd) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + + snprintf(buffer, SIZE, "\nOutputs dump:\n"); + write(fd, buffer, strlen(buffer)); + for (size_t i = 0; i < size(); i++) { + snprintf(buffer, SIZE, "- Output %d dump:\n", keyAt(i)); + write(fd, buffer, strlen(buffer)); + valueAt(i)->dump(fd); + } + + return NO_ERROR; +} + +}; //namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp new file mode 100644 index 0000000..3a317fa --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/AudioPatch.cpp @@ -0,0 +1,154 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::AudioPatch" +//#define LOG_NDEBUG 0 + +#include "AudioPatch.h" +#include "AudioGain.h" +#include "ConfigParsingUtils.h" +#include <cutils/log.h> +#include <utils/String8.h> + +namespace android { + +int32_t volatile AudioPatch::mNextUniqueId = 1; + +AudioPatch::AudioPatch(const struct audio_patch *patch, uid_t uid) : + mHandle(static_cast<audio_patch_handle_t>(android_atomic_inc(&mNextUniqueId))), + mPatch(*patch), + mUid(uid), + mAfPatchHandle(0) +{ +} + +status_t AudioPatch::dump(int fd, int spaces, int index) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "%*sAudio patch %d:\n", spaces, "", index+1); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- handle: %2d\n", spaces, "", mHandle); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- audio flinger handle: %2d\n", spaces, "", mAfPatchHandle); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- owner uid: %2d\n", spaces, "", mUid); + result.append(buffer); + snprintf(buffer, SIZE, "%*s- %d sources:\n", spaces, "", mPatch.num_sources); + result.append(buffer); + for (size_t i = 0; i < mPatch.num_sources; i++) { + if (mPatch.sources[i].type == AUDIO_PORT_TYPE_DEVICE) { + snprintf(buffer, SIZE, "%*s- Device ID %d %s\n", spaces + 2, "", + mPatch.sources[i].id, ConfigParsingUtils::enumToString(sDeviceNameToEnumTable, + ARRAY_SIZE(sDeviceNameToEnumTable), + mPatch.sources[i].ext.device.type)); + } else { + snprintf(buffer, SIZE, "%*s- Mix ID %d I/O handle %d\n", spaces + 2, "", + mPatch.sources[i].id, mPatch.sources[i].ext.mix.handle); + } + result.append(buffer); + } + snprintf(buffer, SIZE, "%*s- %d sinks:\n", spaces, "", mPatch.num_sinks); + result.append(buffer); + for (size_t i = 0; i < mPatch.num_sinks; i++) { + if (mPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE) { + snprintf(buffer, SIZE, "%*s- Device ID %d %s\n", spaces + 2, "", + mPatch.sinks[i].id, ConfigParsingUtils::enumToString(sDeviceNameToEnumTable, + ARRAY_SIZE(sDeviceNameToEnumTable), + mPatch.sinks[i].ext.device.type)); + } else { + snprintf(buffer, SIZE, "%*s- Mix ID %d I/O handle %d\n", spaces + 2, "", + mPatch.sinks[i].id, mPatch.sinks[i].ext.mix.handle); + } + result.append(buffer); + } + + write(fd, result.string(), result.size()); + return NO_ERROR; +} + +status_t AudioPatchCollection::addAudioPatch(audio_patch_handle_t handle, + const sp<AudioPatch>& patch) +{ + ssize_t index = indexOfKey(handle); + + if (index >= 0) { + ALOGW("addAudioPatch() patch %d already in", handle); + return ALREADY_EXISTS; + } + add(handle, patch); + ALOGV("addAudioPatch() handle %d af handle %d num_sources %d num_sinks %d source handle %d" + "sink handle %d", + handle, patch->mAfPatchHandle, patch->mPatch.num_sources, patch->mPatch.num_sinks, + patch->mPatch.sources[0].id, patch->mPatch.sinks[0].id); + return NO_ERROR; +} + +status_t AudioPatchCollection::removeAudioPatch(audio_patch_handle_t handle) +{ + ssize_t index = indexOfKey(handle); + + if (index < 0) { + ALOGW("removeAudioPatch() patch %d not in", handle); + return ALREADY_EXISTS; + } + ALOGV("removeAudioPatch() handle %d af handle %d", handle, valueAt(index)->mAfPatchHandle); + removeItemsAt(index); + return NO_ERROR; +} + +status_t AudioPatchCollection::listAudioPatches(unsigned int *num_patches, + struct audio_patch *patches) const +{ + if (num_patches == NULL || (*num_patches != 0 && patches == NULL)) { + return BAD_VALUE; + } + ALOGV("listAudioPatches() num_patches %d patches %p available patches %zu", + *num_patches, patches, size()); + if (patches == NULL) { + *num_patches = 0; + } + + size_t patchesWritten = 0; + size_t patchesMax = *num_patches; + for (size_t i = 0; i < size() && patchesWritten < patchesMax; i++) { + const sp<AudioPatch> patch = valueAt(i); + patches[patchesWritten] = patch->mPatch; + patches[patchesWritten++].id = patch->mHandle; + ALOGV("listAudioPatches() patch %zu num_sources %d num_sinks %d", + i, patch->mPatch.num_sources, patch->mPatch.num_sinks); + } + *num_patches = size(); + + ALOGV("listAudioPatches() got %zu patches needed %d", patchesWritten, *num_patches); + return NO_ERROR; +} + +status_t AudioPatchCollection::dump(int fd) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + snprintf(buffer, SIZE, "\nAudio Patches:\n"); + write(fd, buffer, strlen(buffer)); + for (size_t i = 0; i < size(); i++) { + valueAt(i)->dump(fd, 2, i); + } + return NO_ERROR; +} + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp new file mode 100644 index 0000000..84a53eb --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyMix.cpp @@ -0,0 +1,193 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::AudioPolicyMix" +//#define LOG_NDEBUG 0 + +#include "AudioPolicyMix.h" +#include "HwModule.h" +#include "AudioPort.h" +#include "IOProfile.h" +#include "AudioGain.h" +#include <AudioOutputDescriptor.h> + +namespace android { + +void AudioPolicyMix::setOutput(sp<AudioOutputDescriptor> &output) +{ + mOutput = output; +} + +const sp<AudioOutputDescriptor> &AudioPolicyMix::getOutput() const +{ + return mOutput; +} + +void AudioPolicyMix::clearOutput() +{ + mOutput.clear(); +} + +void AudioPolicyMix::setMix(AudioMix &mix) +{ + mMix = mix; +} + +android::AudioMix &AudioPolicyMix::getMix() +{ + return mMix; +} + +status_t AudioPolicyMixCollection::registerMix(String8 address, AudioMix mix) +{ + ssize_t index = indexOfKey(address); + if (index >= 0) { + ALOGE("registerPolicyMixes(): mix for address %s already registered", address.string()); + return BAD_VALUE; + } + sp<AudioPolicyMix> policyMix = new AudioPolicyMix(); + policyMix->setMix(mix); + add(address, policyMix); + return NO_ERROR; +} + +status_t AudioPolicyMixCollection::unregisterMix(String8 address) +{ + ssize_t index = indexOfKey(address); + if (index < 0) { + ALOGE("unregisterPolicyMixes(): mix for address %s not registered", address.string()); + return BAD_VALUE; + } + + removeItemsAt(index); + return NO_ERROR; +} + +status_t AudioPolicyMixCollection::getAudioPolicyMix(String8 address, + sp<AudioPolicyMix> &policyMix) const +{ + ssize_t index = indexOfKey(address); + if (index < 0) { + ALOGE("unregisterPolicyMixes(): mix for address %s not registered", address.string()); + return BAD_VALUE; + } + policyMix = valueAt(index); + return NO_ERROR; +} + +void AudioPolicyMixCollection::closeOutput(sp<AudioOutputDescriptor> &desc) +{ + for (size_t i = 0; i < size(); i++) { + sp<AudioPolicyMix> policyMix = valueAt(i); + if (policyMix->getOutput() == desc) { + policyMix->clearOutput(); + } + } +} + +status_t AudioPolicyMixCollection::getOutputForAttr(audio_attributes_t attributes, + sp<AudioOutputDescriptor> &desc) +{ + for (size_t i = 0; i < size(); i++) { + sp<AudioPolicyMix> policyMix = valueAt(i); + AudioMix mix = policyMix->getMix(); + + if (mix.mMixType == MIX_TYPE_PLAYERS) { + for (size_t j = 0; j < mix.mCriteria.size(); j++) { + if ((RULE_MATCH_ATTRIBUTE_USAGE == mix.mCriteria[j].mRule && + mix.mCriteria[j].mAttr.mUsage == attributes.usage) || + (RULE_EXCLUDE_ATTRIBUTE_USAGE == mix.mCriteria[j].mRule && + mix.mCriteria[j].mAttr.mUsage != attributes.usage)) { + desc = policyMix->getOutput(); + break; + } + if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 && + strncmp(attributes.tags + strlen("addr="), + mix.mRegistrationId.string(), + AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) { + desc = policyMix->getOutput(); + break; + } + } + } else if (mix.mMixType == MIX_TYPE_RECORDERS) { + if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE && + strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 && + strncmp(attributes.tags + strlen("addr="), + mix.mRegistrationId.string(), + AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) { + desc = policyMix->getOutput(); + } + } + if (desc != 0) { + desc->mPolicyMix = &mix; + return NO_ERROR; + } + } + return BAD_VALUE; +} + +audio_devices_t AudioPolicyMixCollection::getDeviceAndMixForInputSource(audio_source_t inputSource, + audio_devices_t availDevices, + AudioMix **policyMix) +{ + for (size_t i = 0; i < size(); i++) { + AudioMix mix = valueAt(i)->getMix(); + + if (mix.mMixType != MIX_TYPE_RECORDERS) { + continue; + } + for (size_t j = 0; j < mix.mCriteria.size(); j++) { + if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mix.mCriteria[j].mRule && + mix.mCriteria[j].mAttr.mSource == inputSource) || + (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mix.mCriteria[j].mRule && + mix.mCriteria[j].mAttr.mSource != inputSource)) { + if (availDevices & AUDIO_DEVICE_IN_REMOTE_SUBMIX) { + if (policyMix != NULL) { + *policyMix = &mix; + } + return AUDIO_DEVICE_IN_REMOTE_SUBMIX; + } + break; + } + } + } + return AUDIO_DEVICE_NONE; +} + +status_t AudioPolicyMixCollection::getInputMixForAttr(audio_attributes_t attr, AudioMix *&policyMix) +{ + if (strncmp(attr.tags, "addr=", strlen("addr=")) != 0) { + return BAD_VALUE; + } + String8 address(attr.tags + strlen("addr=")); + + ssize_t index = indexOfKey(address); + if (index < 0) { + ALOGW("getInputForAttr() no policy for address %s", address.string()); + return BAD_VALUE; + } + sp<AudioPolicyMix> audioPolicyMix = valueAt(index); + AudioMix mix = audioPolicyMix->getMix(); + + if (mix.mMixType != MIX_TYPE_PLAYERS) { + ALOGW("getInputForAttr() bad policy mix type for address %s", address.string()); + return BAD_VALUE; + } + policyMix = &mix; + return NO_ERROR; +} + +}; //namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp new file mode 100644 index 0000000..46a119e --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/AudioPort.cpp @@ -0,0 +1,804 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::AudioPort" +//#define LOG_NDEBUG 0 + +#include "AudioPort.h" +#include "HwModule.h" +#include "AudioGain.h" +#include "ConfigParsingUtils.h" +#include "audio_policy_conf.h" +#include <policy.h> + +namespace android { + +int32_t volatile AudioPort::mNextUniqueId = 1; + +// --- AudioPort class implementation + +AudioPort::AudioPort(const String8& name, audio_port_type_t type, + audio_port_role_t role, const sp<HwModule>& module) : + mName(name), mType(type), mRole(role), mModule(module), mFlags(0), mId(0) +{ + mUseInChannelMask = ((type == AUDIO_PORT_TYPE_DEVICE) && (role == AUDIO_PORT_ROLE_SOURCE)) || + ((type == AUDIO_PORT_TYPE_MIX) && (role == AUDIO_PORT_ROLE_SINK)); +} + +void AudioPort::attach(const sp<HwModule>& module) +{ + mId = getNextUniqueId(); + mModule = module; +} + +audio_port_handle_t AudioPort::getNextUniqueId() +{ + return static_cast<audio_port_handle_t>(android_atomic_inc(&mNextUniqueId)); +} + +audio_module_handle_t AudioPort::getModuleHandle() const +{ + return mModule->mHandle; +} + +void AudioPort::toAudioPort(struct audio_port *port) const +{ + port->role = mRole; + port->type = mType; + strlcpy(port->name, mName, AUDIO_PORT_MAX_NAME_LEN); + unsigned int i; + for (i = 0; i < mSamplingRates.size() && i < AUDIO_PORT_MAX_SAMPLING_RATES; i++) { + if (mSamplingRates[i] != 0) { + port->sample_rates[i] = mSamplingRates[i]; + } + } + port->num_sample_rates = i; + for (i = 0; i < mChannelMasks.size() && i < AUDIO_PORT_MAX_CHANNEL_MASKS; i++) { + if (mChannelMasks[i] != 0) { + port->channel_masks[i] = mChannelMasks[i]; + } + } + port->num_channel_masks = i; + for (i = 0; i < mFormats.size() && i < AUDIO_PORT_MAX_FORMATS; i++) { + if (mFormats[i] != 0) { + port->formats[i] = mFormats[i]; + } + } + port->num_formats = i; + + ALOGV("AudioPort::toAudioPort() num gains %zu", mGains.size()); + + for (i = 0; i < mGains.size() && i < AUDIO_PORT_MAX_GAINS; i++) { + port->gains[i] = mGains[i]->mGain; + } + port->num_gains = i; +} + +void AudioPort::importAudioPort(const sp<AudioPort> port) { + for (size_t k = 0 ; k < port->mSamplingRates.size() ; k++) { + const uint32_t rate = port->mSamplingRates.itemAt(k); + if (rate != 0) { // skip "dynamic" rates + bool hasRate = false; + for (size_t l = 0 ; l < mSamplingRates.size() ; l++) { + if (rate == mSamplingRates.itemAt(l)) { + hasRate = true; + break; + } + } + if (!hasRate) { // never import a sampling rate twice + mSamplingRates.add(rate); + } + } + } + for (size_t k = 0 ; k < port->mChannelMasks.size() ; k++) { + const audio_channel_mask_t mask = port->mChannelMasks.itemAt(k); + if (mask != 0) { // skip "dynamic" masks + bool hasMask = false; + for (size_t l = 0 ; l < mChannelMasks.size() ; l++) { + if (mask == mChannelMasks.itemAt(l)) { + hasMask = true; + break; + } + } + if (!hasMask) { // never import a channel mask twice + mChannelMasks.add(mask); + } + } + } + for (size_t k = 0 ; k < port->mFormats.size() ; k++) { + const audio_format_t format = port->mFormats.itemAt(k); + if (format != 0) { // skip "dynamic" formats + bool hasFormat = false; + for (size_t l = 0 ; l < mFormats.size() ; l++) { + if (format == mFormats.itemAt(l)) { + hasFormat = true; + break; + } + } + if (!hasFormat) { // never import a channel mask twice + mFormats.add(format); + } + } + } + for (size_t k = 0 ; k < port->mGains.size() ; k++) { + sp<AudioGain> gain = port->mGains.itemAt(k); + if (gain != 0) { + bool hasGain = false; + for (size_t l = 0 ; l < mGains.size() ; l++) { + if (gain == mGains.itemAt(l)) { + hasGain = true; + break; + } + } + if (!hasGain) { // never import a gain twice + mGains.add(gain); + } + } + } +} + +void AudioPort::clearCapabilities() { + mChannelMasks.clear(); + mFormats.clear(); + mSamplingRates.clear(); + mGains.clear(); +} + +void AudioPort::loadSamplingRates(char *name) +{ + char *str = strtok(name, "|"); + + // by convention, "0' in the first entry in mSamplingRates indicates the supported sampling + // rates should be read from the output stream after it is opened for the first time + if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { + mSamplingRates.add(0); + return; + } + + while (str != NULL) { + uint32_t rate = atoi(str); + if (rate != 0) { + ALOGV("loadSamplingRates() adding rate %d", rate); + mSamplingRates.add(rate); + } + str = strtok(NULL, "|"); + } +} + +void AudioPort::loadFormats(char *name) +{ + char *str = strtok(name, "|"); + + // by convention, "0' in the first entry in mFormats indicates the supported formats + // should be read from the output stream after it is opened for the first time + if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { + mFormats.add(AUDIO_FORMAT_DEFAULT); + return; + } + + while (str != NULL) { + audio_format_t format = (audio_format_t)ConfigParsingUtils::stringToEnum(sFormatNameToEnumTable, + ARRAY_SIZE(sFormatNameToEnumTable), + str); + if (format != AUDIO_FORMAT_DEFAULT) { + mFormats.add(format); + } + str = strtok(NULL, "|"); + } +} + +void AudioPort::loadInChannels(char *name) +{ + const char *str = strtok(name, "|"); + + ALOGV("loadInChannels() %s", name); + + if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { + mChannelMasks.add(0); + return; + } + + while (str != NULL) { + audio_channel_mask_t channelMask = + (audio_channel_mask_t)ConfigParsingUtils::stringToEnum(sInChannelsNameToEnumTable, + ARRAY_SIZE(sInChannelsNameToEnumTable), + str); + if (channelMask != 0) { + ALOGV("loadInChannels() adding channelMask %04x", channelMask); + mChannelMasks.add(channelMask); + } + str = strtok(NULL, "|"); + } +} + +void AudioPort::loadOutChannels(char *name) +{ + const char *str = strtok(name, "|"); + + ALOGV("loadOutChannels() %s", name); + + // by convention, "0' in the first entry in mChannelMasks indicates the supported channel + // masks should be read from the output stream after it is opened for the first time + if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { + mChannelMasks.add(0); + return; + } + + while (str != NULL) { + audio_channel_mask_t channelMask = + (audio_channel_mask_t)ConfigParsingUtils::stringToEnum(sOutChannelsNameToEnumTable, + ARRAY_SIZE(sOutChannelsNameToEnumTable), + str); + if (channelMask != 0) { + mChannelMasks.add(channelMask); + } + str = strtok(NULL, "|"); + } + return; +} + +audio_gain_mode_t AudioPort::loadGainMode(char *name) +{ + const char *str = strtok(name, "|"); + + ALOGV("loadGainMode() %s", name); + audio_gain_mode_t mode = 0; + while (str != NULL) { + mode |= (audio_gain_mode_t)ConfigParsingUtils::stringToEnum(sGainModeNameToEnumTable, + ARRAY_SIZE(sGainModeNameToEnumTable), + str); + str = strtok(NULL, "|"); + } + return mode; +} + +void AudioPort::loadGain(cnode *root, int index) +{ + cnode *node = root->first_child; + + sp<AudioGain> gain = new AudioGain(index, mUseInChannelMask); + + while (node) { + if (strcmp(node->name, GAIN_MODE) == 0) { + gain->mGain.mode = loadGainMode((char *)node->value); + } else if (strcmp(node->name, GAIN_CHANNELS) == 0) { + if (mUseInChannelMask) { + gain->mGain.channel_mask = + (audio_channel_mask_t)ConfigParsingUtils::stringToEnum(sInChannelsNameToEnumTable, + ARRAY_SIZE(sInChannelsNameToEnumTable), + (char *)node->value); + } else { + gain->mGain.channel_mask = + (audio_channel_mask_t)ConfigParsingUtils::stringToEnum(sOutChannelsNameToEnumTable, + ARRAY_SIZE(sOutChannelsNameToEnumTable), + (char *)node->value); + } + } else if (strcmp(node->name, GAIN_MIN_VALUE) == 0) { + gain->mGain.min_value = atoi((char *)node->value); + } else if (strcmp(node->name, GAIN_MAX_VALUE) == 0) { + gain->mGain.max_value = atoi((char *)node->value); + } else if (strcmp(node->name, GAIN_DEFAULT_VALUE) == 0) { + gain->mGain.default_value = atoi((char *)node->value); + } else if (strcmp(node->name, GAIN_STEP_VALUE) == 0) { + gain->mGain.step_value = atoi((char *)node->value); + } else if (strcmp(node->name, GAIN_MIN_RAMP_MS) == 0) { + gain->mGain.min_ramp_ms = atoi((char *)node->value); + } else if (strcmp(node->name, GAIN_MAX_RAMP_MS) == 0) { + gain->mGain.max_ramp_ms = atoi((char *)node->value); + } + node = node->next; + } + + ALOGV("loadGain() adding new gain mode %08x channel mask %08x min mB %d max mB %d", + gain->mGain.mode, gain->mGain.channel_mask, gain->mGain.min_value, gain->mGain.max_value); + + if (gain->mGain.mode == 0) { + return; + } + mGains.add(gain); +} + +void AudioPort::loadGains(cnode *root) +{ + cnode *node = root->first_child; + int index = 0; + while (node) { + ALOGV("loadGains() loading gain %s", node->name); + loadGain(node, index++); + node = node->next; + } +} + +status_t AudioPort::checkExactSamplingRate(uint32_t samplingRate) const +{ + if (mSamplingRates.isEmpty()) { + return NO_ERROR; + } + + for (size_t i = 0; i < mSamplingRates.size(); i ++) { + if (mSamplingRates[i] == samplingRate) { + return NO_ERROR; + } + } + return BAD_VALUE; +} + +status_t AudioPort::checkCompatibleSamplingRate(uint32_t samplingRate, + uint32_t *updatedSamplingRate) const +{ + if (mSamplingRates.isEmpty()) { + return NO_ERROR; + } + + // Search for the closest supported sampling rate that is above (preferred) + // or below (acceptable) the desired sampling rate, within a permitted ratio. + // The sampling rates do not need to be sorted in ascending order. + ssize_t maxBelow = -1; + ssize_t minAbove = -1; + uint32_t candidate; + for (size_t i = 0; i < mSamplingRates.size(); i++) { + candidate = mSamplingRates[i]; + if (candidate == samplingRate) { + if (updatedSamplingRate != NULL) { + *updatedSamplingRate = candidate; + } + return NO_ERROR; + } + // candidate < desired + if (candidate < samplingRate) { + if (maxBelow < 0 || candidate > mSamplingRates[maxBelow]) { + maxBelow = i; + } + // candidate > desired + } else { + if (minAbove < 0 || candidate < mSamplingRates[minAbove]) { + minAbove = i; + } + } + } + // This uses hard-coded knowledge about AudioFlinger resampling ratios. + // TODO Move these assumptions out. + static const uint32_t kMaxDownSampleRatio = 6; // beyond this aliasing occurs + static const uint32_t kMaxUpSampleRatio = 256; // beyond this sample rate inaccuracies occur + // due to approximation by an int32_t of the + // phase increments + // Prefer to down-sample from a higher sampling rate, as we get the desired frequency spectrum. + if (minAbove >= 0) { + candidate = mSamplingRates[minAbove]; + if (candidate / kMaxDownSampleRatio <= samplingRate) { + if (updatedSamplingRate != NULL) { + *updatedSamplingRate = candidate; + } + return NO_ERROR; + } + } + // But if we have to up-sample from a lower sampling rate, that's OK. + if (maxBelow >= 0) { + candidate = mSamplingRates[maxBelow]; + if (candidate * kMaxUpSampleRatio >= samplingRate) { + if (updatedSamplingRate != NULL) { + *updatedSamplingRate = candidate; + } + return NO_ERROR; + } + } + // leave updatedSamplingRate unmodified + return BAD_VALUE; +} + +status_t AudioPort::checkExactChannelMask(audio_channel_mask_t channelMask) const +{ + if (mChannelMasks.isEmpty()) { + return NO_ERROR; + } + + for (size_t i = 0; i < mChannelMasks.size(); i++) { + if (mChannelMasks[i] == channelMask) { + return NO_ERROR; + } + } + return BAD_VALUE; +} + +status_t AudioPort::checkCompatibleChannelMask(audio_channel_mask_t channelMask) + const +{ + if (mChannelMasks.isEmpty()) { + return NO_ERROR; + } + + const bool isRecordThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SINK; + for (size_t i = 0; i < mChannelMasks.size(); i ++) { + // FIXME Does not handle multi-channel automatic conversions yet + audio_channel_mask_t supported = mChannelMasks[i]; + if (supported == channelMask) { + return NO_ERROR; + } + if (isRecordThread) { + // This uses hard-coded knowledge that AudioFlinger can silently down-mix and up-mix. + // FIXME Abstract this out to a table. + if (((supported == AUDIO_CHANNEL_IN_FRONT_BACK || supported == AUDIO_CHANNEL_IN_STEREO) + && channelMask == AUDIO_CHANNEL_IN_MONO) || + (supported == AUDIO_CHANNEL_IN_MONO && (channelMask == AUDIO_CHANNEL_IN_FRONT_BACK + || channelMask == AUDIO_CHANNEL_IN_STEREO))) { + return NO_ERROR; + } + } + } + return BAD_VALUE; +} + +status_t AudioPort::checkFormat(audio_format_t format) const +{ + if (mFormats.isEmpty()) { + return NO_ERROR; + } + + for (size_t i = 0; i < mFormats.size(); i ++) { + if (mFormats[i] == format) { + return NO_ERROR; + } + } + return BAD_VALUE; +} + + +uint32_t AudioPort::pickSamplingRate() const +{ + // special case for uninitialized dynamic profile + if (mSamplingRates.size() == 1 && mSamplingRates[0] == 0) { + return 0; + } + + // For direct outputs, pick minimum sampling rate: this helps ensuring that the + // channel count / sampling rate combination chosen will be supported by the connected + // sink + if ((mType == AUDIO_PORT_TYPE_MIX) && (mRole == AUDIO_PORT_ROLE_SOURCE) && + (mFlags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD))) { + uint32_t samplingRate = UINT_MAX; + for (size_t i = 0; i < mSamplingRates.size(); i ++) { + if ((mSamplingRates[i] < samplingRate) && (mSamplingRates[i] > 0)) { + samplingRate = mSamplingRates[i]; + } + } + return (samplingRate == UINT_MAX) ? 0 : samplingRate; + } + + uint32_t samplingRate = 0; + uint32_t maxRate = MAX_MIXER_SAMPLING_RATE; + + // For mixed output and inputs, use max mixer sampling rates. Do not + // limit sampling rate otherwise + if (mType != AUDIO_PORT_TYPE_MIX) { + maxRate = UINT_MAX; + } + for (size_t i = 0; i < mSamplingRates.size(); i ++) { + if ((mSamplingRates[i] > samplingRate) && (mSamplingRates[i] <= maxRate)) { + samplingRate = mSamplingRates[i]; + } + } + return samplingRate; +} + +audio_channel_mask_t AudioPort::pickChannelMask() const +{ + // special case for uninitialized dynamic profile + if (mChannelMasks.size() == 1 && mChannelMasks[0] == 0) { + return AUDIO_CHANNEL_NONE; + } + audio_channel_mask_t channelMask = AUDIO_CHANNEL_NONE; + + // For direct outputs, pick minimum channel count: this helps ensuring that the + // channel count / sampling rate combination chosen will be supported by the connected + // sink + if ((mType == AUDIO_PORT_TYPE_MIX) && (mRole == AUDIO_PORT_ROLE_SOURCE) && + (mFlags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD))) { + uint32_t channelCount = UINT_MAX; + for (size_t i = 0; i < mChannelMasks.size(); i ++) { + uint32_t cnlCount; + if (mUseInChannelMask) { + cnlCount = audio_channel_count_from_in_mask(mChannelMasks[i]); + } else { + cnlCount = audio_channel_count_from_out_mask(mChannelMasks[i]); + } + if ((cnlCount < channelCount) && (cnlCount > 0)) { + channelMask = mChannelMasks[i]; + channelCount = cnlCount; + } + } + return channelMask; + } + + uint32_t channelCount = 0; + uint32_t maxCount = MAX_MIXER_CHANNEL_COUNT; + + // For mixed output and inputs, use max mixer channel count. Do not + // limit channel count otherwise + if (mType != AUDIO_PORT_TYPE_MIX) { + maxCount = UINT_MAX; + } + for (size_t i = 0; i < mChannelMasks.size(); i ++) { + uint32_t cnlCount; + if (mUseInChannelMask) { + cnlCount = audio_channel_count_from_in_mask(mChannelMasks[i]); + } else { + cnlCount = audio_channel_count_from_out_mask(mChannelMasks[i]); + } + if ((cnlCount > channelCount) && (cnlCount <= maxCount)) { + channelMask = mChannelMasks[i]; + channelCount = cnlCount; + } + } + return channelMask; +} + +/* format in order of increasing preference */ +const audio_format_t AudioPort::sPcmFormatCompareTable[] = { + AUDIO_FORMAT_DEFAULT, + AUDIO_FORMAT_PCM_16_BIT, + AUDIO_FORMAT_PCM_8_24_BIT, + AUDIO_FORMAT_PCM_24_BIT_PACKED, + AUDIO_FORMAT_PCM_32_BIT, + AUDIO_FORMAT_PCM_FLOAT, +}; + +int AudioPort::compareFormats(audio_format_t format1, + audio_format_t format2) +{ + // NOTE: AUDIO_FORMAT_INVALID is also considered not PCM and will be compared equal to any + // compressed format and better than any PCM format. This is by design of pickFormat() + if (!audio_is_linear_pcm(format1)) { + if (!audio_is_linear_pcm(format2)) { + return 0; + } + return 1; + } + if (!audio_is_linear_pcm(format2)) { + return -1; + } + + int index1 = -1, index2 = -1; + for (size_t i = 0; + (i < ARRAY_SIZE(sPcmFormatCompareTable)) && ((index1 == -1) || (index2 == -1)); + i ++) { + if (sPcmFormatCompareTable[i] == format1) { + index1 = i; + } + if (sPcmFormatCompareTable[i] == format2) { + index2 = i; + } + } + // format1 not found => index1 < 0 => format2 > format1 + // format2 not found => index2 < 0 => format2 < format1 + return index1 - index2; +} + +audio_format_t AudioPort::pickFormat() const +{ + // special case for uninitialized dynamic profile + if (mFormats.size() == 1 && mFormats[0] == 0) { + return AUDIO_FORMAT_DEFAULT; + } + + audio_format_t format = AUDIO_FORMAT_DEFAULT; + audio_format_t bestFormat = + AudioPort::sPcmFormatCompareTable[ + ARRAY_SIZE(AudioPort::sPcmFormatCompareTable) - 1]; + // For mixed output and inputs, use best mixer output format. Do not + // limit format otherwise + if ((mType != AUDIO_PORT_TYPE_MIX) || + ((mRole == AUDIO_PORT_ROLE_SOURCE) && + (((mFlags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)) != 0)))) { + bestFormat = AUDIO_FORMAT_INVALID; + } + + for (size_t i = 0; i < mFormats.size(); i ++) { + if ((compareFormats(mFormats[i], format) > 0) && + (compareFormats(mFormats[i], bestFormat) <= 0)) { + format = mFormats[i]; + } + } + return format; +} + +status_t AudioPort::checkGain(const struct audio_gain_config *gainConfig, + int index) const +{ + if (index < 0 || (size_t)index >= mGains.size()) { + return BAD_VALUE; + } + return mGains[index]->checkConfig(gainConfig); +} + +void AudioPort::dump(int fd, int spaces) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + if (mName.size() != 0) { + snprintf(buffer, SIZE, "%*s- name: %s\n", spaces, "", mName.string()); + result.append(buffer); + } + + if (mSamplingRates.size() != 0) { + snprintf(buffer, SIZE, "%*s- sampling rates: ", spaces, ""); + result.append(buffer); + for (size_t i = 0; i < mSamplingRates.size(); i++) { + if (i == 0 && mSamplingRates[i] == 0) { + snprintf(buffer, SIZE, "Dynamic"); + } else { + snprintf(buffer, SIZE, "%d", mSamplingRates[i]); + } + result.append(buffer); + result.append(i == (mSamplingRates.size() - 1) ? "" : ", "); + } + result.append("\n"); + } + + if (mChannelMasks.size() != 0) { + snprintf(buffer, SIZE, "%*s- channel masks: ", spaces, ""); + result.append(buffer); + for (size_t i = 0; i < mChannelMasks.size(); i++) { + ALOGV("AudioPort::dump mChannelMasks %zu %08x", i, mChannelMasks[i]); + + if (i == 0 && mChannelMasks[i] == 0) { + snprintf(buffer, SIZE, "Dynamic"); + } else { + snprintf(buffer, SIZE, "0x%04x", mChannelMasks[i]); + } + result.append(buffer); + result.append(i == (mChannelMasks.size() - 1) ? "" : ", "); + } + result.append("\n"); + } + + if (mFormats.size() != 0) { + snprintf(buffer, SIZE, "%*s- formats: ", spaces, ""); + result.append(buffer); + for (size_t i = 0; i < mFormats.size(); i++) { + const char *formatStr = ConfigParsingUtils::enumToString(sFormatNameToEnumTable, + ARRAY_SIZE(sFormatNameToEnumTable), + mFormats[i]); + if (i == 0 && strcmp(formatStr, "") == 0) { + snprintf(buffer, SIZE, "Dynamic"); + } else { + snprintf(buffer, SIZE, "%s", formatStr); + } + result.append(buffer); + result.append(i == (mFormats.size() - 1) ? "" : ", "); + } + result.append("\n"); + } + write(fd, result.string(), result.size()); + if (mGains.size() != 0) { + snprintf(buffer, SIZE, "%*s- gains:\n", spaces, ""); + write(fd, buffer, strlen(buffer) + 1); + result.append(buffer); + for (size_t i = 0; i < mGains.size(); i++) { + mGains[i]->dump(fd, spaces + 2, i); + } + } +} + + +// --- AudioPortConfig class implementation + +AudioPortConfig::AudioPortConfig() +{ + mSamplingRate = 0; + mChannelMask = AUDIO_CHANNEL_NONE; + mFormat = AUDIO_FORMAT_INVALID; + mGain.index = -1; +} + +status_t AudioPortConfig::applyAudioPortConfig( + const struct audio_port_config *config, + struct audio_port_config *backupConfig) +{ + struct audio_port_config localBackupConfig; + status_t status = NO_ERROR; + + localBackupConfig.config_mask = config->config_mask; + toAudioPortConfig(&localBackupConfig); + + sp<AudioPort> audioport = getAudioPort(); + if (audioport == 0) { + status = NO_INIT; + goto exit; + } + if (config->config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) { + status = audioport->checkExactSamplingRate(config->sample_rate); + if (status != NO_ERROR) { + goto exit; + } + mSamplingRate = config->sample_rate; + } + if (config->config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) { + status = audioport->checkExactChannelMask(config->channel_mask); + if (status != NO_ERROR) { + goto exit; + } + mChannelMask = config->channel_mask; + } + if (config->config_mask & AUDIO_PORT_CONFIG_FORMAT) { + status = audioport->checkFormat(config->format); + if (status != NO_ERROR) { + goto exit; + } + mFormat = config->format; + } + if (config->config_mask & AUDIO_PORT_CONFIG_GAIN) { + status = audioport->checkGain(&config->gain, config->gain.index); + if (status != NO_ERROR) { + goto exit; + } + mGain = config->gain; + } + +exit: + if (status != NO_ERROR) { + applyAudioPortConfig(&localBackupConfig); + } + if (backupConfig != NULL) { + *backupConfig = localBackupConfig; + } + return status; +} + +void AudioPortConfig::toAudioPortConfig(struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig) const +{ + if (dstConfig->config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) { + dstConfig->sample_rate = mSamplingRate; + if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE)) { + dstConfig->sample_rate = srcConfig->sample_rate; + } + } else { + dstConfig->sample_rate = 0; + } + if (dstConfig->config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) { + dstConfig->channel_mask = mChannelMask; + if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK)) { + dstConfig->channel_mask = srcConfig->channel_mask; + } + } else { + dstConfig->channel_mask = AUDIO_CHANNEL_NONE; + } + if (dstConfig->config_mask & AUDIO_PORT_CONFIG_FORMAT) { + dstConfig->format = mFormat; + if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_FORMAT)) { + dstConfig->format = srcConfig->format; + } + } else { + dstConfig->format = AUDIO_FORMAT_INVALID; + } + if (dstConfig->config_mask & AUDIO_PORT_CONFIG_GAIN) { + dstConfig->gain = mGain; + if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_GAIN)) { + dstConfig->gain = srcConfig->gain; + } + } else { + dstConfig->gain.index = -1; + } + if (dstConfig->gain.index != -1) { + dstConfig->config_mask |= AUDIO_PORT_CONFIG_GAIN; + } else { + dstConfig->config_mask &= ~AUDIO_PORT_CONFIG_GAIN; + } +} + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp new file mode 100644 index 0000000..fe5bc5f --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/ConfigParsingUtils.cpp @@ -0,0 +1,288 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::ConfigParsingUtils" +//#define LOG_NDEBUG 0 + +#include "ConfigParsingUtils.h" +#include "AudioGain.h" +#include <hardware/audio.h> +#include <utils/Log.h> +#include <cutils/misc.h> + +namespace android { + +//static +uint32_t ConfigParsingUtils::stringToEnum(const struct StringToEnum *table, + size_t size, + const char *name) +{ + for (size_t i = 0; i < size; i++) { + if (strcmp(table[i].name, name) == 0) { + ALOGV("stringToEnum() found %s", table[i].name); + return table[i].value; + } + } + return 0; +} + +//static +const char *ConfigParsingUtils::enumToString(const struct StringToEnum *table, + size_t size, + uint32_t value) +{ + for (size_t i = 0; i < size; i++) { + if (table[i].value == value) { + return table[i].name; + } + } + return ""; +} + +//static +bool ConfigParsingUtils::stringToBool(const char *value) +{ + return ((strcasecmp("true", value) == 0) || (strcmp("1", value) == 0)); +} + + +// --- audio_policy.conf file parsing +//static +uint32_t ConfigParsingUtils::parseOutputFlagNames(char *name) +{ + uint32_t flag = 0; + + // it is OK to cast name to non const here as we are not going to use it after + // strtok() modifies it + char *flagName = strtok(name, "|"); + while (flagName != NULL) { + if (strlen(flagName) != 0) { + flag |= ConfigParsingUtils::stringToEnum(sOutputFlagNameToEnumTable, + ARRAY_SIZE(sOutputFlagNameToEnumTable), + flagName); + } + flagName = strtok(NULL, "|"); + } + //force direct flag if offload flag is set: offloading implies a direct output stream + // and all common behaviors are driven by checking only the direct flag + // this should normally be set appropriately in the policy configuration file + if ((flag & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) { + flag |= AUDIO_OUTPUT_FLAG_DIRECT; + } + + return flag; +} + +//static +uint32_t ConfigParsingUtils::parseInputFlagNames(char *name) +{ + uint32_t flag = 0; + + // it is OK to cast name to non const here as we are not going to use it after + // strtok() modifies it + char *flagName = strtok(name, "|"); + while (flagName != NULL) { + if (strlen(flagName) != 0) { + flag |= stringToEnum(sInputFlagNameToEnumTable, + ARRAY_SIZE(sInputFlagNameToEnumTable), + flagName); + } + flagName = strtok(NULL, "|"); + } + return flag; +} + +//static +audio_devices_t ConfigParsingUtils::parseDeviceNames(char *name) +{ + uint32_t device = 0; + + char *devName = strtok(name, "|"); + while (devName != NULL) { + if (strlen(devName) != 0) { + device |= stringToEnum(sDeviceNameToEnumTable, + ARRAY_SIZE(sDeviceNameToEnumTable), + devName); + } + devName = strtok(NULL, "|"); + } + return device; +} + +//static +void ConfigParsingUtils::loadHwModule(cnode *root, HwModuleCollection &hwModules, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevices, + bool &isSpeakerDrcEnable) +{ + status_t status = NAME_NOT_FOUND; + cnode *node; + sp<HwModule> module = new HwModule(root->name); + + node = config_find(root, DEVICES_TAG); + if (node != NULL) { + node = node->first_child; + while (node) { + ALOGV("loadHwModule() loading device %s", node->name); + status_t tmpStatus = module->loadDevice(node); + if (status == NAME_NOT_FOUND || status == NO_ERROR) { + status = tmpStatus; + } + node = node->next; + } + } + node = config_find(root, OUTPUTS_TAG); + if (node != NULL) { + node = node->first_child; + while (node) { + ALOGV("loadHwModule() loading output %s", node->name); + status_t tmpStatus = module->loadOutput(node); + if (status == NAME_NOT_FOUND || status == NO_ERROR) { + status = tmpStatus; + } + node = node->next; + } + } + node = config_find(root, INPUTS_TAG); + if (node != NULL) { + node = node->first_child; + while (node) { + ALOGV("loadHwModule() loading input %s", node->name); + status_t tmpStatus = module->loadInput(node); + if (status == NAME_NOT_FOUND || status == NO_ERROR) { + status = tmpStatus; + } + node = node->next; + } + } + loadGlobalConfig(root, module, availableInputDevices, availableOutputDevices, + defaultOutputDevices, isSpeakerDrcEnable); + + if (status == NO_ERROR) { + hwModules.add(module); + } +} + +//static +void ConfigParsingUtils::loadHwModules(cnode *root, HwModuleCollection &hwModules, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevices, + bool &isSpeakerDrcEnabled) +{ + cnode *node = config_find(root, AUDIO_HW_MODULE_TAG); + if (node == NULL) { + return; + } + + node = node->first_child; + while (node) { + ALOGV("loadHwModules() loading module %s", node->name); + loadHwModule(node, hwModules, availableInputDevices, availableOutputDevices, + defaultOutputDevices, isSpeakerDrcEnabled); + node = node->next; + } +} + +//static +void ConfigParsingUtils::loadGlobalConfig(cnode *root, const sp<HwModule>& module, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevice, + bool &speakerDrcEnabled) +{ + cnode *node = config_find(root, GLOBAL_CONFIG_TAG); + + if (node == NULL) { + return; + } + DeviceVector declaredDevices; + if (module != NULL) { + declaredDevices = module->mDeclaredDevices; + } + + node = node->first_child; + while (node) { + if (strcmp(ATTACHED_OUTPUT_DEVICES_TAG, node->name) == 0) { + availableOutputDevices.loadDevicesFromName((char *)node->value, + declaredDevices); + ALOGV("loadGlobalConfig() Attached Output Devices %08x", + availableOutputDevices.types()); + } else if (strcmp(DEFAULT_OUTPUT_DEVICE_TAG, node->name) == 0) { + audio_devices_t device = (audio_devices_t)stringToEnum( + sDeviceNameToEnumTable, + ARRAY_SIZE(sDeviceNameToEnumTable), + (char *)node->value); + if (device != AUDIO_DEVICE_NONE) { + defaultOutputDevice = new DeviceDescriptor(String8("default-output"), device); + } else { + ALOGW("loadGlobalConfig() default device not specified"); + } + ALOGV("loadGlobalConfig() mDefaultOutputDevice %08x", defaultOutputDevice->type()); + } else if (strcmp(ATTACHED_INPUT_DEVICES_TAG, node->name) == 0) { + availableInputDevices.loadDevicesFromName((char *)node->value, + declaredDevices); + ALOGV("loadGlobalConfig() Available InputDevices %08x", availableInputDevices.types()); + } else if (strcmp(SPEAKER_DRC_ENABLED_TAG, node->name) == 0) { + speakerDrcEnabled = stringToBool((char *)node->value); + ALOGV("loadGlobalConfig() mSpeakerDrcEnabled = %d", speakerDrcEnabled); + } else if (strcmp(AUDIO_HAL_VERSION_TAG, node->name) == 0) { + uint32_t major, minor; + sscanf((char *)node->value, "%u.%u", &major, &minor); + module->mHalVersion = HARDWARE_DEVICE_API_VERSION(major, minor); + ALOGV("loadGlobalConfig() mHalVersion = %04x major %u minor %u", + module->mHalVersion, major, minor); + } + node = node->next; + } +} + +//static +status_t ConfigParsingUtils::loadAudioPolicyConfig(const char *path, + HwModuleCollection &hwModules, + DeviceVector &availableInputDevices, + DeviceVector &availableOutputDevices, + sp<DeviceDescriptor> &defaultOutputDevices, + bool &isSpeakerDrcEnabled) +{ + cnode *root; + char *data; + + data = (char *)load_file(path, NULL); + if (data == NULL) { + return -ENODEV; + } + root = config_node("", ""); + config_load(root, data); + + loadHwModules(root, hwModules, + availableInputDevices, availableOutputDevices, + defaultOutputDevices, isSpeakerDrcEnabled); + // legacy audio_policy.conf files have one global_configuration section + loadGlobalConfig(root, hwModules.getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY), + availableInputDevices, availableOutputDevices, + defaultOutputDevices, isSpeakerDrcEnabled); + config_free(root); + free(root); + free(data); + + ALOGI("loadAudioPolicyConfig() loaded %s\n", path); + + return NO_ERROR; +} + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp new file mode 100644 index 0000000..7df7d75 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp @@ -0,0 +1,320 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::Devices" +//#define LOG_NDEBUG 0 + +#include "DeviceDescriptor.h" +#include "AudioGain.h" +#include "HwModule.h" +#include "ConfigParsingUtils.h" + +namespace android { + +String8 DeviceDescriptor::emptyNameStr = String8(""); + +DeviceDescriptor::DeviceDescriptor(const String8& name, audio_devices_t type) : + AudioPort(name, AUDIO_PORT_TYPE_DEVICE, + audio_is_output_device(type) ? AUDIO_PORT_ROLE_SINK : + AUDIO_PORT_ROLE_SOURCE, + NULL), + mAddress(""), mDeviceType(type) +{ + +} + +bool DeviceDescriptor::equals(const sp<DeviceDescriptor>& other) const +{ + // Devices are considered equal if they: + // - are of the same type (a device type cannot be AUDIO_DEVICE_NONE) + // - have the same address or one device does not specify the address + // - have the same channel mask or one device does not specify the channel mask + return (mDeviceType == other->mDeviceType) && + (mAddress == "" || other->mAddress == "" || mAddress == other->mAddress) && + (mChannelMask == 0 || other->mChannelMask == 0 || + mChannelMask == other->mChannelMask); +} + +void DeviceDescriptor::loadGains(cnode *root) +{ + AudioPort::loadGains(root); + if (mGains.size() > 0) { + mGains[0]->getDefaultConfig(&mGain); + } +} + +void DeviceVector::refreshTypes() +{ + mDeviceTypes = AUDIO_DEVICE_NONE; + for(size_t i = 0; i < size(); i++) { + mDeviceTypes |= itemAt(i)->type(); + } + ALOGV("DeviceVector::refreshTypes() mDeviceTypes %08x", mDeviceTypes); +} + +ssize_t DeviceVector::indexOf(const sp<DeviceDescriptor>& item) const +{ + for(size_t i = 0; i < size(); i++) { + if (item->equals(itemAt(i))) { + return i; + } + } + return -1; +} + +ssize_t DeviceVector::add(const sp<DeviceDescriptor>& item) +{ + ssize_t ret = indexOf(item); + + if (ret < 0) { + ret = SortedVector::add(item); + if (ret >= 0) { + refreshTypes(); + } + } else { + ALOGW("DeviceVector::add device %08x already in", item->type()); + ret = -1; + } + return ret; +} + +ssize_t DeviceVector::remove(const sp<DeviceDescriptor>& item) +{ + size_t i; + ssize_t ret = indexOf(item); + + if (ret < 0) { + ALOGW("DeviceVector::remove device %08x not in", item->type()); + } else { + ret = SortedVector::removeAt(ret); + if (ret >= 0) { + refreshTypes(); + } + } + return ret; +} + +audio_devices_t DeviceVector::getDevicesFromHwModule(audio_module_handle_t moduleHandle) const +{ + audio_devices_t devices = AUDIO_DEVICE_NONE; + for (size_t i = 0; i < size(); i++) { + if (itemAt(i)->getModuleHandle() == moduleHandle) { + devices |= itemAt(i)->type(); + } + } + return devices; +} + +void DeviceVector::loadDevicesFromType(audio_devices_t types) +{ + DeviceVector deviceList; + + uint32_t role_bit = AUDIO_DEVICE_BIT_IN & types; + types &= ~role_bit; + + while (types) { + uint32_t i = 31 - __builtin_clz(types); + uint32_t type = 1 << i; + types &= ~type; + add(new DeviceDescriptor(String8("device_type"), type | role_bit)); + } +} + +void DeviceVector::loadDevicesFromName(char *name, + const DeviceVector& declaredDevices) +{ + char *devName = strtok(name, "|"); + while (devName != NULL) { + if (strlen(devName) != 0) { + audio_devices_t type = ConfigParsingUtils::stringToEnum(sDeviceNameToEnumTable, + ARRAY_SIZE(sDeviceNameToEnumTable), + devName); + if (type != AUDIO_DEVICE_NONE) { + sp<DeviceDescriptor> dev = new DeviceDescriptor(String8(name), type); + if (type == AUDIO_DEVICE_IN_REMOTE_SUBMIX || + type == AUDIO_DEVICE_OUT_REMOTE_SUBMIX ) { + dev->mAddress = String8("0"); + } + add(dev); + } else { + sp<DeviceDescriptor> deviceDesc = + declaredDevices.getDeviceFromName(String8(devName)); + if (deviceDesc != 0) { + add(deviceDesc); + } + } + } + devName = strtok(NULL, "|"); + } +} + +sp<DeviceDescriptor> DeviceVector::getDevice(audio_devices_t type, String8 address) const +{ + sp<DeviceDescriptor> device; + for (size_t i = 0; i < size(); i++) { + if (itemAt(i)->type() == type) { + if (address == "" || itemAt(i)->mAddress == address) { + device = itemAt(i); + if (itemAt(i)->mAddress == address) { + break; + } + } + } + } + ALOGV("DeviceVector::getDevice() for type %08x address %s found %p", + type, address.string(), device.get()); + return device; +} + +sp<DeviceDescriptor> DeviceVector::getDeviceFromId(audio_port_handle_t id) const +{ + sp<DeviceDescriptor> device; + for (size_t i = 0; i < size(); i++) { + if (itemAt(i)->getHandle() == id) { + device = itemAt(i); + break; + } + } + return device; +} + +DeviceVector DeviceVector::getDevicesFromType(audio_devices_t type) const +{ + DeviceVector devices; + bool isOutput = audio_is_output_devices(type); + type &= ~AUDIO_DEVICE_BIT_IN; + for (size_t i = 0; (i < size()) && (type != AUDIO_DEVICE_NONE); i++) { + bool curIsOutput = audio_is_output_devices(itemAt(i)->mDeviceType); + audio_devices_t curType = itemAt(i)->mDeviceType & ~AUDIO_DEVICE_BIT_IN; + if ((isOutput == curIsOutput) && ((type & curType) != 0)) { + devices.add(itemAt(i)); + type &= ~curType; + ALOGV("DeviceVector::getDevicesFromType() for type %x found %p", + itemAt(i)->type(), itemAt(i).get()); + } + } + return devices; +} + +DeviceVector DeviceVector::getDevicesFromTypeAddr( + audio_devices_t type, String8 address) const +{ + DeviceVector devices; + for (size_t i = 0; i < size(); i++) { + if (itemAt(i)->type() == type) { + if (itemAt(i)->mAddress == address) { + devices.add(itemAt(i)); + } + } + } + return devices; +} + +sp<DeviceDescriptor> DeviceVector::getDeviceFromName(const String8& name) const +{ + sp<DeviceDescriptor> device; + for (size_t i = 0; i < size(); i++) { + if (itemAt(i)->mName == name) { + device = itemAt(i); + break; + } + } + return device; +} + + +status_t DeviceVector::dump(int fd, const String8 &direction) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + + snprintf(buffer, SIZE, "\n Available %s devices:\n", direction.string()); + write(fd, buffer, strlen(buffer)); + for (size_t i = 0; i < size(); i++) { + itemAt(i)->dump(fd, 2, i); + } + return NO_ERROR; +} + +audio_policy_dev_state_t DeviceVector::getDeviceConnectionState(const sp<DeviceDescriptor> &devDesc) const +{ + ssize_t index = indexOf(devDesc); + return index >= 0 ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE : AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE; +} + +void DeviceDescriptor::toAudioPortConfig(struct audio_port_config *dstConfig, + const struct audio_port_config *srcConfig) const +{ + dstConfig->config_mask = AUDIO_PORT_CONFIG_CHANNEL_MASK|AUDIO_PORT_CONFIG_GAIN; + if (srcConfig != NULL) { + dstConfig->config_mask |= srcConfig->config_mask; + } + + AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig); + + dstConfig->id = mId; + dstConfig->role = audio_is_output_device(mDeviceType) ? + AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE; + dstConfig->type = AUDIO_PORT_TYPE_DEVICE; + dstConfig->ext.device.type = mDeviceType; + + //TODO Understand why this test is necessary. i.e. why at boot time does it crash + // without the test? + // This has been demonstrated to NOT be true (at start up) + // ALOG_ASSERT(mModule != NULL); + dstConfig->ext.device.hw_module = mModule != 0 ? mModule->mHandle : AUDIO_IO_HANDLE_NONE; + strncpy(dstConfig->ext.device.address, mAddress.string(), AUDIO_DEVICE_MAX_ADDRESS_LEN); +} + +void DeviceDescriptor::toAudioPort(struct audio_port *port) const +{ + ALOGV("DeviceDescriptor::toAudioPort() handle %d type %x", mId, mDeviceType); + AudioPort::toAudioPort(port); + port->id = mId; + toAudioPortConfig(&port->active_config); + port->ext.device.type = mDeviceType; + port->ext.device.hw_module = mModule->mHandle; + strncpy(port->ext.device.address, mAddress.string(), AUDIO_DEVICE_MAX_ADDRESS_LEN); +} + +status_t DeviceDescriptor::dump(int fd, int spaces, int index) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "%*sDevice %d:\n", spaces, "", index+1); + result.append(buffer); + if (mId != 0) { + snprintf(buffer, SIZE, "%*s- id: %2d\n", spaces, "", mId); + result.append(buffer); + } + snprintf(buffer, SIZE, "%*s- type: %-48s\n", spaces, "", + ConfigParsingUtils::enumToString(sDeviceNameToEnumTable, + ARRAY_SIZE(sDeviceNameToEnumTable), + mDeviceType)); + result.append(buffer); + if (mAddress.size() != 0) { + snprintf(buffer, SIZE, "%*s- address: %-32s\n", spaces, "", mAddress.string()); + result.append(buffer); + } + write(fd, result.string(), result.size()); + AudioPort::dump(fd, spaces); + + return NO_ERROR; +} + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp new file mode 100644 index 0000000..33d838d --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp @@ -0,0 +1,192 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::EffectDescriptor" +//#define LOG_NDEBUG 0 + +#include "EffectDescriptor.h" +#include <utils/String8.h> + +namespace android { + +status_t EffectDescriptor::dump(int fd) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, " I/O: %d\n", mIo); + result.append(buffer); + snprintf(buffer, SIZE, " Strategy: %d\n", mStrategy); + result.append(buffer); + snprintf(buffer, SIZE, " Session: %d\n", mSession); + result.append(buffer); + snprintf(buffer, SIZE, " Name: %s\n", mDesc.name); + result.append(buffer); + snprintf(buffer, SIZE, " %s\n", mEnabled ? "Enabled" : "Disabled"); + result.append(buffer); + write(fd, result.string(), result.size()); + + return NO_ERROR; +} + +EffectDescriptorCollection::EffectDescriptorCollection() : + mTotalEffectsCpuLoad(0), + mTotalEffectsMemory(0) +{ + +} + +status_t EffectDescriptorCollection::registerEffect(const effect_descriptor_t *desc, + audio_io_handle_t io, + uint32_t strategy, + int session, + int id) +{ + if (mTotalEffectsMemory + desc->memoryUsage > getMaxEffectsMemory()) { + ALOGW("registerEffect() memory limit exceeded for Fx %s, Memory %d KB", + desc->name, desc->memoryUsage); + return INVALID_OPERATION; + } + mTotalEffectsMemory += desc->memoryUsage; + ALOGV("registerEffect() effect %s, io %d, strategy %d session %d id %d", + desc->name, io, strategy, session, id); + ALOGV("registerEffect() memory %d, total memory %d", desc->memoryUsage, mTotalEffectsMemory); + + sp<EffectDescriptor> effectDesc = new EffectDescriptor(); + memcpy (&effectDesc->mDesc, desc, sizeof(effect_descriptor_t)); + effectDesc->mIo = io; + effectDesc->mStrategy = static_cast<routing_strategy>(strategy); + effectDesc->mSession = session; + effectDesc->mEnabled = false; + + add(id, effectDesc); + + return NO_ERROR; +} + +status_t EffectDescriptorCollection::unregisterEffect(int id) +{ + ssize_t index = indexOfKey(id); + if (index < 0) { + ALOGW("unregisterEffect() unknown effect ID %d", id); + return INVALID_OPERATION; + } + + sp<EffectDescriptor> effectDesc = valueAt(index); + + setEffectEnabled(effectDesc, false); + + if (mTotalEffectsMemory < effectDesc->mDesc.memoryUsage) { + ALOGW("unregisterEffect() memory %d too big for total %d", + effectDesc->mDesc.memoryUsage, mTotalEffectsMemory); + effectDesc->mDesc.memoryUsage = mTotalEffectsMemory; + } + mTotalEffectsMemory -= effectDesc->mDesc.memoryUsage; + ALOGV("unregisterEffect() effect %s, ID %d, memory %d total memory %d", + effectDesc->mDesc.name, id, effectDesc->mDesc.memoryUsage, mTotalEffectsMemory); + + removeItem(id); + + return NO_ERROR; +} + +status_t EffectDescriptorCollection::setEffectEnabled(int id, bool enabled) +{ + ssize_t index = indexOfKey(id); + if (index < 0) { + ALOGW("unregisterEffect() unknown effect ID %d", id); + return INVALID_OPERATION; + } + + return setEffectEnabled(valueAt(index), enabled); +} + + +status_t EffectDescriptorCollection::setEffectEnabled(const sp<EffectDescriptor> &effectDesc, + bool enabled) +{ + if (enabled == effectDesc->mEnabled) { + ALOGV("setEffectEnabled(%s) effect already %s", + enabled?"true":"false", enabled?"enabled":"disabled"); + return INVALID_OPERATION; + } + + if (enabled) { + if (mTotalEffectsCpuLoad + effectDesc->mDesc.cpuLoad > getMaxEffectsCpuLoad()) { + ALOGW("setEffectEnabled(true) CPU Load limit exceeded for Fx %s, CPU %f MIPS", + effectDesc->mDesc.name, (float)effectDesc->mDesc.cpuLoad/10); + return INVALID_OPERATION; + } + mTotalEffectsCpuLoad += effectDesc->mDesc.cpuLoad; + ALOGV("setEffectEnabled(true) total CPU %d", mTotalEffectsCpuLoad); + } else { + if (mTotalEffectsCpuLoad < effectDesc->mDesc.cpuLoad) { + ALOGW("setEffectEnabled(false) CPU load %d too high for total %d", + effectDesc->mDesc.cpuLoad, mTotalEffectsCpuLoad); + effectDesc->mDesc.cpuLoad = mTotalEffectsCpuLoad; + } + mTotalEffectsCpuLoad -= effectDesc->mDesc.cpuLoad; + ALOGV("setEffectEnabled(false) total CPU %d", mTotalEffectsCpuLoad); + } + effectDesc->mEnabled = enabled; + return NO_ERROR; +} + +bool EffectDescriptorCollection::isNonOffloadableEffectEnabled() +{ + for (size_t i = 0; i < size(); i++) { + sp<EffectDescriptor> effectDesc = valueAt(i); + if (effectDesc->mEnabled && (effectDesc->mStrategy == STRATEGY_MEDIA) && + ((effectDesc->mDesc.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) == 0)) { + ALOGV("isNonOffloadableEffectEnabled() non offloadable effect %s enabled on session %d", + effectDesc->mDesc.name, effectDesc->mSession); + return true; + } + } + return false; +} + +uint32_t EffectDescriptorCollection::getMaxEffectsCpuLoad() const +{ + return MAX_EFFECTS_CPU_LOAD; +} + +uint32_t EffectDescriptorCollection::getMaxEffectsMemory() const +{ + return MAX_EFFECTS_MEMORY; +} + +status_t EffectDescriptorCollection::dump(int fd) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + + snprintf(buffer, SIZE, "\nTotal Effects CPU: %f MIPS, Total Effects memory: %d KB\n", + (float)mTotalEffectsCpuLoad/10, mTotalEffectsMemory); + write(fd, buffer, strlen(buffer)); + + snprintf(buffer, SIZE, "Registered effects:\n"); + write(fd, buffer, strlen(buffer)); + for (size_t i = 0; i < size(); i++) { + snprintf(buffer, SIZE, "- Effect %d dump:\n", keyAt(i)); + write(fd, buffer, strlen(buffer)); + valueAt(i)->dump(fd); + } + return NO_ERROR; +} + +}; //namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp new file mode 100644 index 0000000..0097d69 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp @@ -0,0 +1,371 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::HwModule" +//#define LOG_NDEBUG 0 + +#include "HwModule.h" +#include "IOProfile.h" +#include "AudioGain.h" +#include "ConfigParsingUtils.h" +#include "audio_policy_conf.h" +#include <hardware/audio.h> +#include <policy.h> + +namespace android { + +HwModule::HwModule(const char *name) + : mName(strndup(name, AUDIO_HARDWARE_MODULE_ID_MAX_LEN)), + mHalVersion(AUDIO_DEVICE_API_VERSION_MIN), mHandle(0) +{ +} + +HwModule::~HwModule() +{ + for (size_t i = 0; i < mOutputProfiles.size(); i++) { + mOutputProfiles[i]->mSupportedDevices.clear(); + } + for (size_t i = 0; i < mInputProfiles.size(); i++) { + mInputProfiles[i]->mSupportedDevices.clear(); + } + free((void *)mName); +} + +status_t HwModule::loadInput(cnode *root) +{ + cnode *node = root->first_child; + + sp<IOProfile> profile = new IOProfile(String8(root->name), AUDIO_PORT_ROLE_SINK, this); + + while (node) { + if (strcmp(node->name, SAMPLING_RATES_TAG) == 0) { + profile->loadSamplingRates((char *)node->value); + } else if (strcmp(node->name, FORMATS_TAG) == 0) { + profile->loadFormats((char *)node->value); + } else if (strcmp(node->name, CHANNELS_TAG) == 0) { + profile->loadInChannels((char *)node->value); + } else if (strcmp(node->name, DEVICES_TAG) == 0) { + profile->mSupportedDevices.loadDevicesFromName((char *)node->value, + mDeclaredDevices); + } else if (strcmp(node->name, FLAGS_TAG) == 0) { + profile->mFlags = ConfigParsingUtils::parseInputFlagNames((char *)node->value); + } else if (strcmp(node->name, GAINS_TAG) == 0) { + profile->loadGains(node); + } + node = node->next; + } + ALOGW_IF(profile->mSupportedDevices.isEmpty(), + "loadInput() invalid supported devices"); + ALOGW_IF(profile->mChannelMasks.size() == 0, + "loadInput() invalid supported channel masks"); + ALOGW_IF(profile->mSamplingRates.size() == 0, + "loadInput() invalid supported sampling rates"); + ALOGW_IF(profile->mFormats.size() == 0, + "loadInput() invalid supported formats"); + if (!profile->mSupportedDevices.isEmpty() && + (profile->mChannelMasks.size() != 0) && + (profile->mSamplingRates.size() != 0) && + (profile->mFormats.size() != 0)) { + + ALOGV("loadInput() adding input Supported Devices %04x", + profile->mSupportedDevices.types()); + + mInputProfiles.add(profile); + return NO_ERROR; + } else { + return BAD_VALUE; + } +} + +status_t HwModule::loadOutput(cnode *root) +{ + cnode *node = root->first_child; + + sp<IOProfile> profile = new IOProfile(String8(root->name), AUDIO_PORT_ROLE_SOURCE, this); + + while (node) { + if (strcmp(node->name, SAMPLING_RATES_TAG) == 0) { + profile->loadSamplingRates((char *)node->value); + } else if (strcmp(node->name, FORMATS_TAG) == 0) { + profile->loadFormats((char *)node->value); + } else if (strcmp(node->name, CHANNELS_TAG) == 0) { + profile->loadOutChannels((char *)node->value); + } else if (strcmp(node->name, DEVICES_TAG) == 0) { + profile->mSupportedDevices.loadDevicesFromName((char *)node->value, + mDeclaredDevices); + } else if (strcmp(node->name, FLAGS_TAG) == 0) { + profile->mFlags = ConfigParsingUtils::parseOutputFlagNames((char *)node->value); + } else if (strcmp(node->name, GAINS_TAG) == 0) { + profile->loadGains(node); + } + node = node->next; + } + ALOGW_IF(profile->mSupportedDevices.isEmpty(), + "loadOutput() invalid supported devices"); + ALOGW_IF(profile->mChannelMasks.size() == 0, + "loadOutput() invalid supported channel masks"); + ALOGW_IF(profile->mSamplingRates.size() == 0, + "loadOutput() invalid supported sampling rates"); + ALOGW_IF(profile->mFormats.size() == 0, + "loadOutput() invalid supported formats"); + if (!profile->mSupportedDevices.isEmpty() && + (profile->mChannelMasks.size() != 0) && + (profile->mSamplingRates.size() != 0) && + (profile->mFormats.size() != 0)) { + + ALOGV("loadOutput() adding output Supported Devices %04x, mFlags %04x", + profile->mSupportedDevices.types(), profile->mFlags); + + mOutputProfiles.add(profile); + return NO_ERROR; + } else { + return BAD_VALUE; + } +} + +status_t HwModule::loadDevice(cnode *root) +{ + cnode *node = root->first_child; + + audio_devices_t type = AUDIO_DEVICE_NONE; + while (node) { + if (strcmp(node->name, APM_DEVICE_TYPE) == 0) { + type = ConfigParsingUtils::parseDeviceNames((char *)node->value); + break; + } + node = node->next; + } + if (type == AUDIO_DEVICE_NONE || + (!audio_is_input_device(type) && !audio_is_output_device(type))) { + ALOGW("loadDevice() bad type %08x", type); + return BAD_VALUE; + } + sp<DeviceDescriptor> deviceDesc = new DeviceDescriptor(String8(root->name), type); + deviceDesc->mModule = this; + + node = root->first_child; + while (node) { + if (strcmp(node->name, APM_DEVICE_ADDRESS) == 0) { + deviceDesc->mAddress = String8((char *)node->value); + } else if (strcmp(node->name, CHANNELS_TAG) == 0) { + if (audio_is_input_device(type)) { + deviceDesc->loadInChannels((char *)node->value); + } else { + deviceDesc->loadOutChannels((char *)node->value); + } + } else if (strcmp(node->name, GAINS_TAG) == 0) { + deviceDesc->loadGains(node); + } + node = node->next; + } + + ALOGV("loadDevice() adding device name %s type %08x address %s", + deviceDesc->mName.string(), type, deviceDesc->mAddress.string()); + + mDeclaredDevices.add(deviceDesc); + + return NO_ERROR; +} + +status_t HwModule::addOutputProfile(String8 name, const audio_config_t *config, + audio_devices_t device, String8 address) +{ + sp<IOProfile> profile = new IOProfile(name, AUDIO_PORT_ROLE_SOURCE, this); + + profile->mSamplingRates.add(config->sample_rate); + profile->mChannelMasks.add(config->channel_mask); + profile->mFormats.add(config->format); + + sp<DeviceDescriptor> devDesc = new DeviceDescriptor(name, device); + devDesc->mAddress = address; + profile->mSupportedDevices.add(devDesc); + + mOutputProfiles.add(profile); + + return NO_ERROR; +} + +status_t HwModule::removeOutputProfile(String8 name) +{ + for (size_t i = 0; i < mOutputProfiles.size(); i++) { + if (mOutputProfiles[i]->mName == name) { + mOutputProfiles.removeAt(i); + break; + } + } + + return NO_ERROR; +} + +status_t HwModule::addInputProfile(String8 name, const audio_config_t *config, + audio_devices_t device, String8 address) +{ + sp<IOProfile> profile = new IOProfile(name, AUDIO_PORT_ROLE_SINK, this); + + profile->mSamplingRates.add(config->sample_rate); + profile->mChannelMasks.add(config->channel_mask); + profile->mFormats.add(config->format); + + sp<DeviceDescriptor> devDesc = new DeviceDescriptor(name, device); + devDesc->mAddress = address; + profile->mSupportedDevices.add(devDesc); + + ALOGV("addInputProfile() name %s rate %d mask 0x08", name.string(), config->sample_rate, config->channel_mask); + + mInputProfiles.add(profile); + + return NO_ERROR; +} + +status_t HwModule::removeInputProfile(String8 name) +{ + for (size_t i = 0; i < mInputProfiles.size(); i++) { + if (mInputProfiles[i]->mName == name) { + mInputProfiles.removeAt(i); + break; + } + } + + return NO_ERROR; +} + + +void HwModule::dump(int fd) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, " - name: %s\n", mName); + result.append(buffer); + snprintf(buffer, SIZE, " - handle: %d\n", mHandle); + result.append(buffer); + snprintf(buffer, SIZE, " - version: %u.%u\n", mHalVersion >> 8, mHalVersion & 0xFF); + result.append(buffer); + write(fd, result.string(), result.size()); + if (mOutputProfiles.size()) { + write(fd, " - outputs:\n", strlen(" - outputs:\n")); + for (size_t i = 0; i < mOutputProfiles.size(); i++) { + snprintf(buffer, SIZE, " output %zu:\n", i); + write(fd, buffer, strlen(buffer)); + mOutputProfiles[i]->dump(fd); + } + } + if (mInputProfiles.size()) { + write(fd, " - inputs:\n", strlen(" - inputs:\n")); + for (size_t i = 0; i < mInputProfiles.size(); i++) { + snprintf(buffer, SIZE, " input %zu:\n", i); + write(fd, buffer, strlen(buffer)); + mInputProfiles[i]->dump(fd); + } + } + if (mDeclaredDevices.size()) { + write(fd, " - devices:\n", strlen(" - devices:\n")); + for (size_t i = 0; i < mDeclaredDevices.size(); i++) { + mDeclaredDevices[i]->dump(fd, 4, i); + } + } +} + +sp <HwModule> HwModuleCollection::getModuleFromName(const char *name) const +{ + sp <HwModule> module; + + for (size_t i = 0; i < size(); i++) + { + if (strcmp(itemAt(i)->mName, name) == 0) { + return itemAt(i); + } + } + return module; +} + + +sp <HwModule> HwModuleCollection::getModuleForDevice(audio_devices_t device) const +{ + sp <HwModule> module; + + for (size_t i = 0; i < size(); i++) { + if (itemAt(i)->mHandle == 0) { + continue; + } + if (audio_is_output_device(device)) { + for (size_t j = 0; j < itemAt(i)->mOutputProfiles.size(); j++) + { + if (itemAt(i)->mOutputProfiles[j]->mSupportedDevices.types() & device) { + return itemAt(i); + } + } + } else { + for (size_t j = 0; j < itemAt(i)->mInputProfiles.size(); j++) { + if (itemAt(i)->mInputProfiles[j]->mSupportedDevices.types() & + device & ~AUDIO_DEVICE_BIT_IN) { + return itemAt(i); + } + } + } + } + return module; +} + +sp<DeviceDescriptor> HwModuleCollection::getDeviceDescriptor(const audio_devices_t device, + const char *device_address, + const char *device_name) const +{ + String8 address = (device_address == NULL) ? String8("") : String8(device_address); + // handle legacy remote submix case where the address was not always specified + if (device_distinguishes_on_address(device) && (address.length() == 0)) { + address = String8("0"); + } + + for (size_t i = 0; i < size(); i++) { + const sp<HwModule> hwModule = itemAt(i); + if (hwModule->mHandle == 0) { + continue; + } + DeviceVector deviceList = + hwModule->mDeclaredDevices.getDevicesFromTypeAddr(device, address); + if (!deviceList.isEmpty()) { + return deviceList.itemAt(0); + } + deviceList = hwModule->mDeclaredDevices.getDevicesFromType(device); + if (!deviceList.isEmpty()) { + return deviceList.itemAt(0); + } + } + + sp<DeviceDescriptor> devDesc = + new DeviceDescriptor(String8(device_name != NULL ? device_name : ""), device); + devDesc->mAddress = address; + return devDesc; +} + +status_t HwModuleCollection::dump(int fd) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + + snprintf(buffer, SIZE, "\nHW Modules dump:\n"); + write(fd, buffer, strlen(buffer)); + for (size_t i = 0; i < size(); i++) { + snprintf(buffer, SIZE, "- HW Module %zu:\n", i + 1); + write(fd, buffer, strlen(buffer)); + itemAt(i)->dump(fd); + } + return NO_ERROR; +} + +} //namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp new file mode 100644 index 0000000..376dd22 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp @@ -0,0 +1,149 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::IOProfile" +//#define LOG_NDEBUG 0 + +#include "IOProfile.h" +#include "HwModule.h" +#include "AudioGain.h" + +namespace android { + +IOProfile::IOProfile(const String8& name, audio_port_role_t role, + const sp<HwModule>& module) + : AudioPort(name, AUDIO_PORT_TYPE_MIX, role, module) +{ +} + +IOProfile::~IOProfile() +{ +} + +// checks if the IO profile is compatible with specified parameters. +// Sampling rate, format and channel mask must be specified in order to +// get a valid a match +bool IOProfile::isCompatibleProfile(audio_devices_t device, + String8 address, + uint32_t samplingRate, + uint32_t *updatedSamplingRate, + audio_format_t format, + audio_channel_mask_t channelMask, + uint32_t flags) const +{ + const bool isPlaybackThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SOURCE; + const bool isRecordThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SINK; + ALOG_ASSERT(isPlaybackThread != isRecordThread); + + + if (device != AUDIO_DEVICE_NONE) { + // just check types if multiple devices are selected + if (popcount(device & ~AUDIO_DEVICE_BIT_IN) > 1) { + if ((mSupportedDevices.types() & device) != device) { + return false; + } + } else if (mSupportedDevices.getDevice(device, address) == 0) { + return false; + } + } + + if (samplingRate == 0) { + return false; + } + uint32_t myUpdatedSamplingRate = samplingRate; + if (isPlaybackThread && checkExactSamplingRate(samplingRate) != NO_ERROR) { + return false; + } + if (isRecordThread && checkCompatibleSamplingRate(samplingRate, &myUpdatedSamplingRate) != + NO_ERROR) { + return false; + } + + if (!audio_is_valid_format(format) || checkFormat(format) != NO_ERROR) { + return false; + } + + if (isPlaybackThread && (!audio_is_output_channel(channelMask) || + checkExactChannelMask(channelMask) != NO_ERROR)) { + return false; + } + if (isRecordThread && (!audio_is_input_channel(channelMask) || + checkCompatibleChannelMask(channelMask) != NO_ERROR)) { + return false; + } + + if (isPlaybackThread && (mFlags & flags) != flags) { + return false; + } + // The only input flag that is allowed to be different is the fast flag. + // An existing fast stream is compatible with a normal track request. + // An existing normal stream is compatible with a fast track request, + // but the fast request will be denied by AudioFlinger and converted to normal track. + if (isRecordThread && ((mFlags ^ flags) & + ~AUDIO_INPUT_FLAG_FAST)) { + return false; + } + + if (updatedSamplingRate != NULL) { + *updatedSamplingRate = myUpdatedSamplingRate; + } + return true; +} + +void IOProfile::dump(int fd) +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + AudioPort::dump(fd, 4); + + snprintf(buffer, SIZE, " - flags: 0x%04x\n", mFlags); + result.append(buffer); + snprintf(buffer, SIZE, " - devices:\n"); + result.append(buffer); + write(fd, result.string(), result.size()); + for (size_t i = 0; i < mSupportedDevices.size(); i++) { + mSupportedDevices[i]->dump(fd, 6, i); + } +} + +void IOProfile::log() +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + ALOGV(" - sampling rates: "); + for (size_t i = 0; i < mSamplingRates.size(); i++) { + ALOGV(" %d", mSamplingRates[i]); + } + + ALOGV(" - channel masks: "); + for (size_t i = 0; i < mChannelMasks.size(); i++) { + ALOGV(" 0x%04x", mChannelMasks[i]); + } + + ALOGV(" - formats: "); + for (size_t i = 0; i < mFormats.size(); i++) { + ALOGV(" 0x%08x", mFormats[i]); + } + + ALOGV(" - devices: 0x%04x\n", mSupportedDevices.types()); + ALOGV(" - flags: 0x%04x\n", mFlags); +} + +}; // namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/SoundTriggerSession.cpp b/services/audiopolicy/common/managerdefinitions/src/SoundTriggerSession.cpp new file mode 100644 index 0000000..8ca3ae0 --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/SoundTriggerSession.cpp @@ -0,0 +1,45 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::SoundTriggerSession" +//#define LOG_NDEBUG 0 + +#include "SoundTriggerSession.h" + + +namespace android { + +status_t SoundTriggerSessionCollection::acquireSession(audio_session_t session, + audio_io_handle_t ioHandle) +{ + add(session, ioHandle); + + return NO_ERROR; +} + +status_t SoundTriggerSessionCollection::releaseSession(audio_session_t session) +{ + ssize_t index = indexOfKey(session); + if (index < 0) { + ALOGW("acquireSoundTriggerSession() session %d not registered", session); + return BAD_VALUE; + } + + removeItem(session); + return NO_ERROR; +} + +}; //namespace android diff --git a/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp new file mode 100644 index 0000000..b682e2c --- /dev/null +++ b/services/audiopolicy/common/managerdefinitions/src/StreamDescriptor.cpp @@ -0,0 +1,162 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::Volumes" +//#define LOG_NDEBUG 0 + +//#define VERY_VERBOSE_LOGGING +#ifdef VERY_VERBOSE_LOGGING +#define ALOGVV ALOGV +#else +#define ALOGVV(a...) do { } while(0) +#endif + +#include "StreamDescriptor.h" +#include <utils/Log.h> +#include <utils/String8.h> + +namespace android { + +// --- StreamDescriptor class implementation + +StreamDescriptor::StreamDescriptor() + : mIndexMin(0), mIndexMax(1), mCanBeMuted(true) +{ + mIndexCur.add(AUDIO_DEVICE_OUT_DEFAULT, 0); +} + +int StreamDescriptor::getVolumeIndex(audio_devices_t device) const +{ + device = Volume::getDeviceForVolume(device); + // there is always a valid entry for AUDIO_DEVICE_OUT_DEFAULT + if (mIndexCur.indexOfKey(device) < 0) { + device = AUDIO_DEVICE_OUT_DEFAULT; + } + return mIndexCur.valueFor(device); +} + +void StreamDescriptor::clearCurrentVolumeIndex() +{ + mIndexCur.clear(); +} + +void StreamDescriptor::addCurrentVolumeIndex(audio_devices_t device, int index) +{ + mIndexCur.add(device, index); +} + +void StreamDescriptor::setVolumeIndexMin(int volIndexMin) +{ + mIndexMin = volIndexMin; +} + +void StreamDescriptor::setVolumeIndexMax(int volIndexMax) +{ + mIndexMax = volIndexMax; +} + +void StreamDescriptor::setVolumeCurvePoint(Volume::device_category deviceCategory, + const VolumeCurvePoint *point) +{ + mVolumeCurve[deviceCategory] = point; +} + +void StreamDescriptor::dump(int fd) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + String8 result; + + snprintf(buffer, SIZE, "%s %02d %02d ", + mCanBeMuted ? "true " : "false", mIndexMin, mIndexMax); + result.append(buffer); + for (size_t i = 0; i < mIndexCur.size(); i++) { + snprintf(buffer, SIZE, "%04x : %02d, ", + mIndexCur.keyAt(i), + mIndexCur.valueAt(i)); + result.append(buffer); + } + result.append("\n"); + + write(fd, result.string(), result.size()); +} + +StreamDescriptorCollection::StreamDescriptorCollection() +{ + for (size_t stream = 0 ; stream < AUDIO_STREAM_CNT; stream++) { + add(static_cast<audio_stream_type_t>(stream), StreamDescriptor()); + } +} + +bool StreamDescriptorCollection::canBeMuted(audio_stream_type_t stream) +{ + return valueAt(stream).canBeMuted(); +} + +void StreamDescriptorCollection::clearCurrentVolumeIndex(audio_stream_type_t stream) +{ + editValueAt(stream).clearCurrentVolumeIndex(); +} + +void StreamDescriptorCollection::addCurrentVolumeIndex(audio_stream_type_t stream, + audio_devices_t device, int index) +{ + editValueAt(stream).addCurrentVolumeIndex(device, index); +} + +void StreamDescriptorCollection::setVolumeCurvePoint(audio_stream_type_t stream, + Volume::device_category deviceCategory, + const VolumeCurvePoint *point) +{ + editValueAt(stream).setVolumeCurvePoint(deviceCategory, point); +} + +const VolumeCurvePoint *StreamDescriptorCollection::getVolumeCurvePoint(audio_stream_type_t stream, + Volume::device_category deviceCategory) const +{ + return valueAt(stream).getVolumeCurvePoint(deviceCategory); +} + +void StreamDescriptorCollection::setVolumeIndexMin(audio_stream_type_t stream,int volIndexMin) +{ + return editValueAt(stream).setVolumeIndexMin(volIndexMin); +} + +void StreamDescriptorCollection::setVolumeIndexMax(audio_stream_type_t stream,int volIndexMax) +{ + return editValueAt(stream).setVolumeIndexMax(volIndexMax); +} + +status_t StreamDescriptorCollection::dump(int fd) const +{ + const size_t SIZE = 256; + char buffer[SIZE]; + + snprintf(buffer, SIZE, "\nStreams dump:\n"); + write(fd, buffer, strlen(buffer)); + snprintf(buffer, SIZE, + " Stream Can be muted Index Min Index Max Index Cur [device : index]...\n"); + write(fd, buffer, strlen(buffer)); + for (size_t i = 0; i < size(); i++) { + snprintf(buffer, SIZE, " %02zu ", i); + write(fd, buffer, strlen(buffer)); + valueAt(i).dump(fd); + } + + return NO_ERROR; +} + +}; // namespace android diff --git a/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h b/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h new file mode 100755 index 0000000..eadaa77 --- /dev/null +++ b/services/audiopolicy/engine/interface/AudioPolicyManagerInterface.h @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <AudioPolicyManagerObserver.h> +#include <RoutingStrategy.h> +#include <Volume.h> +#include <HwModule.h> +#include <DeviceDescriptor.h> +#include <system/audio.h> +#include <system/audio_policy.h> +#include <utils/Errors.h> +#include <utils/Vector.h> + +namespace android { + +/** + * This interface is dedicated to the policy manager that a Policy Engine shall implement. + */ +class AudioPolicyManagerInterface +{ +public: + /** + * Checks if the engine was correctly initialized. + * + * @return NO_ERROR if initialization has been done correctly, error code otherwise.. + */ + virtual status_t initCheck() = 0; + + /** + * Sets the Manager observer that allows the engine to retrieve information on collection + * of devices, streams, HwModules, ... + * + * @param[in] observer handle on the manager. + */ + virtual void setObserver(AudioPolicyManagerObserver *observer) = 0; + + /** + * Get the input device selected for a given input source. + * + * @param[in] inputSource to get the selected input device associated to + * + * @return selected input device for the given input source, may be none if error. + */ + virtual audio_devices_t getDeviceForInputSource(audio_source_t inputSource) const = 0; + + /** + * Get the output device associated to a given strategy. + * + * @param[in] stream type for which the selected ouput device is requested. + * + * @return selected ouput device for the given strategy, may be none if error. + */ + virtual audio_devices_t getDeviceForStrategy(routing_strategy stategy) const = 0; + + /** + * Get the strategy selected for a given stream type. + * + * @param[in] stream: for which the selected strategy followed by is requested. + * + * @return strategy to be followed. + */ + virtual routing_strategy getStrategyForStream(audio_stream_type_t stream) = 0; + + /** + * Get the strategy selected for a given usage. + * + * @param[in] usage to get the selected strategy followed by. + * + * @return strategy to be followed. + */ + virtual routing_strategy getStrategyForUsage(audio_usage_t usage) = 0; + + /** + * Set the Telephony Mode. + * + * @param[in] mode: Android Phone state (normal, ringtone, csv, in communication) + * + * @return NO_ERROR if Telephony Mode set correctly, error code otherwise. + */ + virtual status_t setPhoneState(audio_mode_t mode) = 0; + + /** + * Get the telephony Mode + * + * @return the current telephony mode + */ + virtual audio_mode_t getPhoneState() const = 0; + + /** + * Set Force Use config for a given usage. + * + * @param[in] usage for which a configuration shall be forced. + * @param[in] config wished to be forced for the given usage. + * + * @return NO_ERROR if the Force Use config was set correctly, error code otherwise (e.g. config not + * allowed a given usage...) + */ + virtual status_t setForceUse(audio_policy_force_use_t usage, + audio_policy_forced_cfg_t config) = 0; + + /** + * Get Force Use config for a given usage. + * + * @param[in] usage for which a configuration shall be forced. + * + * @return config wished to be forced for the given usage. + */ + virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) const = 0; + + /** + * Set the connection state of device(s). + * + * @param[in] devDesc for which the state has changed. + * @param[in] state of availability of this(these) device(s). + * + * @return NO_ERROR if devices criterion updated correctly, error code otherwise. + */ + virtual status_t setDeviceConnectionState(const android::sp<android::DeviceDescriptor> devDesc, + audio_policy_dev_state_t state) = 0; + + /** + * Translate a volume index given by the UI to an amplification value for a stream type + * and a device category. + * + * @param[in] deviceCategory for which the conversion is requested. + * @param[in] stream type for which the conversion is requested. + * @param[in] indexInUi index received from the UI to be translated. + * + * @return amplification value matching the UI index for this given device and stream. + */ + virtual float volIndexToAmpl(Volume::device_category deviceCategory, audio_stream_type_t stream, + int indexInUi) = 0; + + /** + * Initialize the min / max index of volume applicable for a given stream type. These indexes + * will be used upon conversion of UI index to volume amplification. + * + * @param[in] stream type for which the indexes need to be set + * @param[in] indexMin Minimum index allowed for this stream. + * @param[in] indexMax Maximum index allowed for this stream. + */ + virtual status_t initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax) = 0; + + /** + * Initialize volume curves for each strategy and device category + * + * @param[in] isSpeakerDrcEnabled true on devices that use DRC on the DEVICE_CATEGORY_SPEAKER + path to boost soft sounds, used to adjust volume curves accordingly + */ + virtual void initializeVolumeCurves(bool isSpeakerDrcEnabled) = 0; + +protected: + virtual ~AudioPolicyManagerInterface() {} +}; + +}; // namespace android diff --git a/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h b/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h new file mode 100755 index 0000000..4f5427e --- /dev/null +++ b/services/audiopolicy/engine/interface/AudioPolicyManagerObserver.h @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <AudioGain.h> +#include <AudioPort.h> +#include <AudioPatch.h> +#include <IOProfile.h> +#include <DeviceDescriptor.h> +#include <AudioInputDescriptor.h> +#include <AudioOutputDescriptor.h> +#include <AudioPolicyMix.h> +#include <SoundTriggerSession.h> +#include <StreamDescriptor.h> + +namespace android { + +/** + * This interface is an observer that the manager shall implement to allows e.g. the engine + * to access to policy pillars elements (like output / input descritors collections, + * HwModule collections, AudioMix, ... + */ +class AudioPolicyManagerObserver +{ +public: + virtual const AudioPatchCollection &getAudioPatches() const = 0; + + virtual const SoundTriggerSessionCollection &getSoundTriggerSessionCollection() const = 0; + + virtual const AudioPolicyMixCollection &getAudioPolicyMixCollection() const = 0; + + virtual const AudioOutputCollection &getOutputs() const = 0; + + virtual const AudioInputCollection &getInputs() const = 0; + + virtual const DeviceVector &getAvailableOutputDevices() const = 0; + + virtual const DeviceVector &getAvailableInputDevices() const = 0; + + virtual StreamDescriptorCollection &getStreamDescriptors() = 0; + + virtual const sp<DeviceDescriptor> &getDefaultOutputDevice() const = 0; + +protected: + virtual ~AudioPolicyManagerObserver() {} +}; + +}; // namespace android diff --git a/services/audiopolicy/enginedefault/Android.mk b/services/audiopolicy/enginedefault/Android.mk new file mode 100755 index 0000000..b0ae835 --- /dev/null +++ b/services/audiopolicy/enginedefault/Android.mk @@ -0,0 +1,48 @@ +LOCAL_PATH := $(call my-dir) + +# Component build +####################################################################### + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := \ + src/Engine.cpp \ + src/EngineInstance.cpp \ + src/Gains.cpp \ + + +audio_policy_engine_includes_common := \ + $(LOCAL_PATH)/include \ + $(TOPDIR)frameworks/av/services/audiopolicy/engine/interface + +LOCAL_CFLAGS += \ + -Wall \ + -Werror \ + -Wextra \ + +LOCAL_EXPORT_C_INCLUDE_DIRS := \ + $(audio_policy_engine_includes_common) + +LOCAL_C_INCLUDES := \ + $(audio_policy_engine_includes_common) \ + $(TARGET_OUT_HEADERS)/hw \ + $(call include-path-for, frameworks-av) \ + $(call include-path-for, audio-utils) \ + $(call include-path-for, bionic) \ + $(TOPDIR)frameworks/av/services/audiopolicy/common/include + + +LOCAL_MODULE := libaudiopolicyenginedefault +LOCAL_MODULE_TAGS := optional +LOCAL_STATIC_LIBRARIES := \ + libmedia_helper \ + libaudiopolicycomponents + +LOCAL_SHARED_LIBRARIES += \ + libcutils \ + libutils \ + libaudioutils \ + +include external/stlport/libstlport.mk + +include $(BUILD_SHARED_LIBRARY) diff --git a/services/audiopolicy/enginedefault/include/AudioPolicyEngineInstance.h b/services/audiopolicy/enginedefault/include/AudioPolicyEngineInstance.h new file mode 100755 index 0000000..1e329f0 --- /dev/null +++ b/services/audiopolicy/enginedefault/include/AudioPolicyEngineInstance.h @@ -0,0 +1,76 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +class AudioPolicyManagerInterface; + +namespace android +{ +namespace audio_policy +{ + +class Engine; + +class EngineInstance +{ +protected: + EngineInstance(); + +public: + virtual ~EngineInstance(); + + /** + * Get Audio Policy Engine instance. + * + * @return pointer to Route Manager Instance object. + */ + static EngineInstance *getInstance(); + + /** + * Interface query. + * The first client of an interface of the policy engine will start the singleton. + * + * @tparam RequestedInterface: interface that the client is wishing to retrieve. + * + * @return interface handle. + */ + template <class RequestedInterface> + RequestedInterface *queryInterface() const; + +protected: + /** + * Get Audio Policy Engine instance. + * + * @return Audio Policy Engine singleton. + */ + Engine *getEngine() const; + +private: + /* Copy facilities are put private to disable copy. */ + EngineInstance(const EngineInstance &object); + EngineInstance &operator=(const EngineInstance &object); +}; + +/** + * Limit template instantation to supported type interfaces. + * Compile time error will claim if invalid interface is requested. + */ +template <> +AudioPolicyManagerInterface *EngineInstance::queryInterface() const; + +} // namespace audio_policy +} // namespace android diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp new file mode 100755 index 0000000..1fd3341 --- /dev/null +++ b/services/audiopolicy/enginedefault/src/Engine.cpp @@ -0,0 +1,707 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::AudioPolicyEngine" +//#define LOG_NDEBUG 0 + +//#define VERY_VERBOSE_LOGGING +#ifdef VERY_VERBOSE_LOGGING +#define ALOGVV ALOGV +#else +#define ALOGVV(a...) do { } while(0) +#endif + +#include "Engine.h" +#include "Gains.h" +#include <AudioPolicyManagerObserver.h> +#include <AudioPort.h> +#include <IOProfile.h> +#include <policy.h> +#include <utils/String8.h> +#include <utils/Log.h> + +namespace android +{ +namespace audio_policy +{ + +Engine::Engine() + : mManagerInterface(this), + mPhoneState(AUDIO_MODE_NORMAL), + mApmObserver(NULL) +{ + for (int i = 0; i < AUDIO_POLICY_FORCE_USE_CNT; i++) { + mForceUse[i] = AUDIO_POLICY_FORCE_NONE; + } +} + +Engine::~Engine() +{ +} + +void Engine::setObserver(AudioPolicyManagerObserver *observer) +{ + ALOG_ASSERT(observer != NULL, "Invalid Audio Policy Manager observer"); + mApmObserver = observer; +} + +status_t Engine::initCheck() +{ + return (mApmObserver != NULL) ? NO_ERROR : NO_INIT; +} + +float Engine::volIndexToAmpl(Volume::device_category category, audio_stream_type_t streamType, + int indexInUi) +{ + const StreamDescriptor &streamDesc = mApmObserver->getStreamDescriptors().valueAt(streamType); + return Gains::volIndexToAmpl(category, streamDesc, indexInUi); +} + +status_t Engine::initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax) +{ + ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax); + if (indexMin < 0 || indexMin >= indexMax) { + ALOGW("initStreamVolume() invalid index limits for stream %d, min %d, max %d", + stream , indexMin, indexMax); + return BAD_VALUE; + } + mApmObserver->getStreamDescriptors().setVolumeIndexMin(stream, indexMin); + mApmObserver->getStreamDescriptors().setVolumeIndexMax(stream, indexMax); + return NO_ERROR; +} + +void Engine::initializeVolumeCurves(bool isSpeakerDrcEnabled) +{ + StreamDescriptorCollection &streams = mApmObserver->getStreamDescriptors(); + + for (int i = 0; i < AUDIO_STREAM_CNT; i++) { + for (int j = 0; j < Volume::DEVICE_CATEGORY_CNT; j++) { + streams.setVolumeCurvePoint(static_cast<audio_stream_type_t>(i), + static_cast<Volume::device_category>(j), + Gains::sVolumeProfiles[i][j]); + } + } + + // Check availability of DRC on speaker path: if available, override some of the speaker curves + if (isSpeakerDrcEnabled) { + streams.setVolumeCurvePoint(AUDIO_STREAM_SYSTEM, Volume::DEVICE_CATEGORY_SPEAKER, + Gains::sDefaultSystemVolumeCurveDrc); + streams.setVolumeCurvePoint(AUDIO_STREAM_RING, Volume::DEVICE_CATEGORY_SPEAKER, + Gains::sSpeakerSonificationVolumeCurveDrc); + streams.setVolumeCurvePoint(AUDIO_STREAM_ALARM, Volume::DEVICE_CATEGORY_SPEAKER, + Gains::sSpeakerSonificationVolumeCurveDrc); + streams.setVolumeCurvePoint(AUDIO_STREAM_NOTIFICATION, Volume::DEVICE_CATEGORY_SPEAKER, + Gains::sSpeakerSonificationVolumeCurveDrc); + streams.setVolumeCurvePoint(AUDIO_STREAM_MUSIC, Volume::DEVICE_CATEGORY_SPEAKER, + Gains::sSpeakerMediaVolumeCurveDrc); + streams.setVolumeCurvePoint(AUDIO_STREAM_ACCESSIBILITY, Volume::DEVICE_CATEGORY_SPEAKER, + Gains::sSpeakerMediaVolumeCurveDrc); + } +} + + +status_t Engine::setPhoneState(audio_mode_t state) +{ + ALOGV("setPhoneState() state %d", state); + + if (state < 0 || state >= AUDIO_MODE_CNT) { + ALOGW("setPhoneState() invalid state %d", state); + return BAD_VALUE; + } + + if (state == mPhoneState ) { + ALOGW("setPhoneState() setting same state %d", state); + return BAD_VALUE; + } + + // store previous phone state for management of sonification strategy below + int oldState = mPhoneState; + mPhoneState = state; + StreamDescriptorCollection &streams = mApmObserver->getStreamDescriptors(); + // are we entering or starting a call + if (!is_state_in_call(oldState) && is_state_in_call(state)) { + ALOGV(" Entering call in setPhoneState()"); + for (int j = 0; j < Volume::DEVICE_CATEGORY_CNT; j++) { + streams.setVolumeCurvePoint(AUDIO_STREAM_DTMF, static_cast<Volume::device_category>(j), + Gains::sVolumeProfiles[AUDIO_STREAM_VOICE_CALL][j]); + } + } else if (is_state_in_call(oldState) && !is_state_in_call(state)) { + ALOGV(" Exiting call in setPhoneState()"); + for (int j = 0; j < Volume::DEVICE_CATEGORY_CNT; j++) { + streams.setVolumeCurvePoint(AUDIO_STREAM_DTMF, static_cast<Volume::device_category>(j), + Gains::sVolumeProfiles[AUDIO_STREAM_DTMF][j]); + } + } + return NO_ERROR; +} + +status_t Engine::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) +{ + switch(usage) { + case AUDIO_POLICY_FORCE_FOR_COMMUNICATION: + if (config != AUDIO_POLICY_FORCE_SPEAKER && config != AUDIO_POLICY_FORCE_BT_SCO && + config != AUDIO_POLICY_FORCE_NONE) { + ALOGW("setForceUse() invalid config %d for FOR_COMMUNICATION", config); + return BAD_VALUE; + } + mForceUse[usage] = config; + break; + case AUDIO_POLICY_FORCE_FOR_MEDIA: + if (config != AUDIO_POLICY_FORCE_HEADPHONES && config != AUDIO_POLICY_FORCE_BT_A2DP && + config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY && + config != AUDIO_POLICY_FORCE_ANALOG_DOCK && + config != AUDIO_POLICY_FORCE_DIGITAL_DOCK && config != AUDIO_POLICY_FORCE_NONE && + config != AUDIO_POLICY_FORCE_NO_BT_A2DP && config != AUDIO_POLICY_FORCE_SPEAKER ) { + ALOGW("setForceUse() invalid config %d for FOR_MEDIA", config); + return BAD_VALUE; + } + mForceUse[usage] = config; + break; + case AUDIO_POLICY_FORCE_FOR_RECORD: + if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY && + config != AUDIO_POLICY_FORCE_NONE) { + ALOGW("setForceUse() invalid config %d for FOR_RECORD", config); + return BAD_VALUE; + } + mForceUse[usage] = config; + break; + case AUDIO_POLICY_FORCE_FOR_DOCK: + if (config != AUDIO_POLICY_FORCE_NONE && config != AUDIO_POLICY_FORCE_BT_CAR_DOCK && + config != AUDIO_POLICY_FORCE_BT_DESK_DOCK && + config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY && + config != AUDIO_POLICY_FORCE_ANALOG_DOCK && + config != AUDIO_POLICY_FORCE_DIGITAL_DOCK) { + ALOGW("setForceUse() invalid config %d for FOR_DOCK", config); + } + mForceUse[usage] = config; + break; + case AUDIO_POLICY_FORCE_FOR_SYSTEM: + if (config != AUDIO_POLICY_FORCE_NONE && + config != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) { + ALOGW("setForceUse() invalid config %d for FOR_SYSTEM", config); + } + mForceUse[usage] = config; + break; + case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO: + if (config != AUDIO_POLICY_FORCE_NONE && + config != AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED) { + ALOGW("setForceUse() invalid config %d forHDMI_SYSTEM_AUDIO", config); + } + mForceUse[usage] = config; + break; + default: + ALOGW("setForceUse() invalid usage %d", usage); + break; + } + return NO_ERROR; +} + +routing_strategy Engine::getStrategyForStream(audio_stream_type_t stream) +{ + // stream to strategy mapping + switch (stream) { + case AUDIO_STREAM_VOICE_CALL: + case AUDIO_STREAM_BLUETOOTH_SCO: + return STRATEGY_PHONE; + case AUDIO_STREAM_RING: + case AUDIO_STREAM_ALARM: + return STRATEGY_SONIFICATION; + case AUDIO_STREAM_NOTIFICATION: + return STRATEGY_SONIFICATION_RESPECTFUL; + case AUDIO_STREAM_DTMF: + return STRATEGY_DTMF; + default: + ALOGE("unknown stream type %d", stream); + case AUDIO_STREAM_SYSTEM: + // NOTE: SYSTEM stream uses MEDIA strategy because muting music and switching outputs + // while key clicks are played produces a poor result + case AUDIO_STREAM_MUSIC: + return STRATEGY_MEDIA; + case AUDIO_STREAM_ENFORCED_AUDIBLE: + return STRATEGY_ENFORCED_AUDIBLE; + case AUDIO_STREAM_TTS: + return STRATEGY_TRANSMITTED_THROUGH_SPEAKER; + case AUDIO_STREAM_ACCESSIBILITY: + return STRATEGY_ACCESSIBILITY; + case AUDIO_STREAM_REROUTING: + return STRATEGY_REROUTING; + } +} + +routing_strategy Engine::getStrategyForUsage(audio_usage_t usage) +{ + const AudioOutputCollection &outputs = mApmObserver->getOutputs(); + + // usage to strategy mapping + switch (usage) { + case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY: + if (outputs.isStreamActive(AUDIO_STREAM_RING) || + outputs.isStreamActive(AUDIO_STREAM_ALARM)) { + return STRATEGY_SONIFICATION; + } + if (isInCall()) { + return STRATEGY_PHONE; + } + return STRATEGY_ACCESSIBILITY; + + case AUDIO_USAGE_MEDIA: + case AUDIO_USAGE_GAME: + case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE: + case AUDIO_USAGE_ASSISTANCE_SONIFICATION: + return STRATEGY_MEDIA; + + case AUDIO_USAGE_VOICE_COMMUNICATION: + return STRATEGY_PHONE; + + case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING: + return STRATEGY_DTMF; + + case AUDIO_USAGE_ALARM: + case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE: + return STRATEGY_SONIFICATION; + + case AUDIO_USAGE_NOTIFICATION: + case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST: + case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT: + case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED: + case AUDIO_USAGE_NOTIFICATION_EVENT: + return STRATEGY_SONIFICATION_RESPECTFUL; + + case AUDIO_USAGE_UNKNOWN: + default: + return STRATEGY_MEDIA; + } +} + +audio_devices_t Engine::getDeviceForStrategy(routing_strategy strategy) const +{ + const DeviceVector &availableOutputDevices = mApmObserver->getAvailableOutputDevices(); + const DeviceVector &availableInputDevices = mApmObserver->getAvailableInputDevices(); + + const AudioOutputCollection &outputs = mApmObserver->getOutputs(); + + uint32_t device = AUDIO_DEVICE_NONE; + uint32_t availableOutputDevicesType = availableOutputDevices.types(); + + switch (strategy) { + + case STRATEGY_TRANSMITTED_THROUGH_SPEAKER: + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER; + if (!device) { + ALOGE("getDeviceForStrategy() no device found for "\ + "STRATEGY_TRANSMITTED_THROUGH_SPEAKER"); + } + break; + + case STRATEGY_SONIFICATION_RESPECTFUL: + if (isInCall()) { + device = getDeviceForStrategy(STRATEGY_SONIFICATION); + } else if (outputs.isStreamActiveRemotely(AUDIO_STREAM_MUSIC, + SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) { + // while media is playing on a remote device, use the the sonification behavior. + // Note that we test this usecase before testing if media is playing because + // the isStreamActive() method only informs about the activity of a stream, not + // if it's for local playback. Note also that we use the same delay between both tests + device = getDeviceForStrategy(STRATEGY_SONIFICATION); + //user "safe" speaker if available instead of normal speaker to avoid triggering + //other acoustic safety mechanisms for notification + if (device == AUDIO_DEVICE_OUT_SPEAKER && (availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER_SAFE)) + device = AUDIO_DEVICE_OUT_SPEAKER_SAFE; + } else if (outputs.isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) { + // while media is playing (or has recently played), use the same device + device = getDeviceForStrategy(STRATEGY_MEDIA); + } else { + // when media is not playing anymore, fall back on the sonification behavior + device = getDeviceForStrategy(STRATEGY_SONIFICATION); + //user "safe" speaker if available instead of normal speaker to avoid triggering + //other acoustic safety mechanisms for notification + if (device == AUDIO_DEVICE_OUT_SPEAKER && (availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER_SAFE)) + device = AUDIO_DEVICE_OUT_SPEAKER_SAFE; + } + break; + + case STRATEGY_DTMF: + if (!isInCall()) { + // when off call, DTMF strategy follows the same rules as MEDIA strategy + device = getDeviceForStrategy(STRATEGY_MEDIA); + break; + } + // when in call, DTMF and PHONE strategies follow the same rules + // FALL THROUGH + + case STRATEGY_PHONE: + // Force use of only devices on primary output if: + // - in call AND + // - cannot route from voice call RX OR + // - audio HAL version is < 3.0 and TX device is on the primary HW module + if (getPhoneState() == AUDIO_MODE_IN_CALL) { + audio_devices_t txDevice = getDeviceForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION); + sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput(); + audio_devices_t availPrimaryInputDevices = + availableInputDevices.getDevicesFromHwModule(primaryOutput->getModuleHandle()); + audio_devices_t availPrimaryOutputDevices = + primaryOutput->supportedDevices() & availableOutputDevices.types(); + + if (((availableInputDevices.types() & + AUDIO_DEVICE_IN_TELEPHONY_RX & ~AUDIO_DEVICE_BIT_IN) == 0) || + (((txDevice & availPrimaryInputDevices & ~AUDIO_DEVICE_BIT_IN) != 0) && + (primaryOutput->getAudioPort()->mModule->mHalVersion < + AUDIO_DEVICE_API_VERSION_3_0))) { + availableOutputDevicesType = availPrimaryOutputDevices; + } + } + // for phone strategy, we first consider the forced use and then the available devices by order + // of priority + switch (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]) { + case AUDIO_POLICY_FORCE_BT_SCO: + if (!isInCall() || strategy != STRATEGY_DTMF) { + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT; + if (device) break; + } + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_SCO; + if (device) break; + // if SCO device is requested but no SCO device is available, fall back to default case + // FALL THROUGH + + default: // FORCE_NONE + // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP + if (!isInCall() && + (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) && + (outputs.getA2dpOutput() != 0)) { + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES; + if (device) break; + } + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_WIRED_HEADPHONE; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_WIRED_HEADSET; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_DEVICE; + if (device) break; + if (!isInCall()) { + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_ACCESSORY; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_AUX_DIGITAL; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET; + if (device) break; + } + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_EARPIECE; + if (device) break; + device = mApmObserver->getDefaultOutputDevice()->type(); + if (device == AUDIO_DEVICE_NONE) { + ALOGE("getDeviceForStrategy() no device found for STRATEGY_PHONE"); + } + break; + + case AUDIO_POLICY_FORCE_SPEAKER: + // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to + // A2DP speaker when forcing to speaker output + if (!isInCall() && + (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) && + (outputs.getA2dpOutput() != 0)) { + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER; + if (device) break; + } + if (!isInCall()) { + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_ACCESSORY; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_DEVICE; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_AUX_DIGITAL; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET; + if (device) break; + } + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_LINE; + if (device) break; + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER; + if (device) break; + device = mApmObserver->getDefaultOutputDevice()->type(); + if (device == AUDIO_DEVICE_NONE) { + ALOGE("getDeviceForStrategy() no device found for STRATEGY_PHONE, FORCE_SPEAKER"); + } + break; + } + break; + + case STRATEGY_SONIFICATION: + + // If incall, just select the STRATEGY_PHONE device: The rest of the behavior is handled by + // handleIncallSonification(). + if (isInCall()) { + device = getDeviceForStrategy(STRATEGY_PHONE); + break; + } + // FALL THROUGH + + case STRATEGY_ENFORCED_AUDIBLE: + // strategy STRATEGY_ENFORCED_AUDIBLE uses same routing policy as STRATEGY_SONIFICATION + // except: + // - when in call where it doesn't default to STRATEGY_PHONE behavior + // - in countries where not enforced in which case it follows STRATEGY_MEDIA + + if ((strategy == STRATEGY_SONIFICATION) || + (mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)) { + device = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER; + if (device == AUDIO_DEVICE_NONE) { + ALOGE("getDeviceForStrategy() speaker device not found for STRATEGY_SONIFICATION"); + } + } + // The second device used for sonification is the same as the device used by media strategy + // FALL THROUGH + + // FIXME: STRATEGY_ACCESSIBILITY and STRATEGY_REROUTING follow STRATEGY_MEDIA for now + case STRATEGY_ACCESSIBILITY: + if (strategy == STRATEGY_ACCESSIBILITY) { + // do not route accessibility prompts to a digital output currently configured with a + // compressed format as they would likely not be mixed and dropped. + for (size_t i = 0; i < outputs.size(); i++) { + sp<AudioOutputDescriptor> desc = outputs.valueAt(i); + audio_devices_t devices = desc->device() & + (AUDIO_DEVICE_OUT_HDMI | AUDIO_DEVICE_OUT_SPDIF | AUDIO_DEVICE_OUT_HDMI_ARC); + if (desc->isActive() && !audio_is_linear_pcm(desc->mFormat) && + devices != AUDIO_DEVICE_NONE) { + availableOutputDevicesType = availableOutputDevices.types() & ~devices; + } + } + } + // FALL THROUGH + + case STRATEGY_REROUTING: + case STRATEGY_MEDIA: { + uint32_t device2 = AUDIO_DEVICE_NONE; + if (strategy != STRATEGY_SONIFICATION) { + // no sonification on remote submix (e.g. WFD) + if (availableOutputDevices.getDevice(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, String8("0")) != 0) { + device2 = availableOutputDevices.types() & AUDIO_DEVICE_OUT_REMOTE_SUBMIX; + } + } + if ((device2 == AUDIO_DEVICE_NONE) && + (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) && + (outputs.getA2dpOutput() != 0)) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP; + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER; + } + } + if ((device2 == AUDIO_DEVICE_NONE) && + (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] == AUDIO_POLICY_FORCE_SPEAKER)) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_WIRED_HEADPHONE; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_LINE; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_WIRED_HEADSET; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_ACCESSORY; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_USB_DEVICE; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET; + } + if ((device2 == AUDIO_DEVICE_NONE) && (strategy != STRATEGY_SONIFICATION)) { + // no sonification on aux digital (e.g. HDMI) + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_AUX_DIGITAL; + } + if ((device2 == AUDIO_DEVICE_NONE) && + (mForceUse[AUDIO_POLICY_FORCE_FOR_DOCK] == AUDIO_POLICY_FORCE_ANALOG_DOCK)) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET; + } + if (device2 == AUDIO_DEVICE_NONE) { + device2 = availableOutputDevicesType & AUDIO_DEVICE_OUT_SPEAKER; + } + int device3 = AUDIO_DEVICE_NONE; + if (strategy == STRATEGY_MEDIA) { + // ARC, SPDIF and AUX_LINE can co-exist with others. + device3 = availableOutputDevicesType & AUDIO_DEVICE_OUT_HDMI_ARC; + device3 |= (availableOutputDevicesType & AUDIO_DEVICE_OUT_SPDIF); + device3 |= (availableOutputDevicesType & AUDIO_DEVICE_OUT_AUX_LINE); + } + + device2 |= device3; + // device is DEVICE_OUT_SPEAKER if we come from case STRATEGY_SONIFICATION or + // STRATEGY_ENFORCED_AUDIBLE, AUDIO_DEVICE_NONE otherwise + device |= device2; + + // If hdmi system audio mode is on, remove speaker out of output list. + if ((strategy == STRATEGY_MEDIA) && + (mForceUse[AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO] == + AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED)) { + device &= ~AUDIO_DEVICE_OUT_SPEAKER; + } + + if (device) break; + device = mApmObserver->getDefaultOutputDevice()->type(); + if (device == AUDIO_DEVICE_NONE) { + ALOGE("getDeviceForStrategy() no device found for STRATEGY_MEDIA"); + } + } break; + + default: + ALOGW("getDeviceForStrategy() unknown strategy: %d", strategy); + break; + } + + ALOGVV("getDeviceForStrategy() strategy %d, device %x", strategy, device); + return device; +} + + +audio_devices_t Engine::getDeviceForInputSource(audio_source_t inputSource) const +{ + const DeviceVector &availableOutputDevices = mApmObserver->getAvailableOutputDevices(); + const DeviceVector &availableInputDevices = mApmObserver->getAvailableInputDevices(); + const AudioOutputCollection &outputs = mApmObserver->getOutputs(); + audio_devices_t availableDeviceTypes = availableInputDevices.types() & ~AUDIO_DEVICE_BIT_IN; + + uint32_t device = AUDIO_DEVICE_NONE; + + switch (inputSource) { + case AUDIO_SOURCE_VOICE_UPLINK: + if (availableDeviceTypes & AUDIO_DEVICE_IN_VOICE_CALL) { + device = AUDIO_DEVICE_IN_VOICE_CALL; + break; + } + break; + + case AUDIO_SOURCE_DEFAULT: + case AUDIO_SOURCE_MIC: + if (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) { + device = AUDIO_DEVICE_IN_BLUETOOTH_A2DP; + } else if ((mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] == AUDIO_POLICY_FORCE_BT_SCO) && + (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) { + device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) { + device = AUDIO_DEVICE_IN_WIRED_HEADSET; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) { + device = AUDIO_DEVICE_IN_USB_DEVICE; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { + device = AUDIO_DEVICE_IN_BUILTIN_MIC; + } + break; + + case AUDIO_SOURCE_VOICE_COMMUNICATION: + // Allow only use of devices on primary input if in call and HAL does not support routing + // to voice call path. + if ((getPhoneState() == AUDIO_MODE_IN_CALL) && + (availableOutputDevices.types() & AUDIO_DEVICE_OUT_TELEPHONY_TX) == 0) { + sp<AudioOutputDescriptor> primaryOutput = outputs.getPrimaryOutput(); + availableDeviceTypes = + availableInputDevices.getDevicesFromHwModule(primaryOutput->getModuleHandle()) + & ~AUDIO_DEVICE_BIT_IN; + } + + switch (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]) { + case AUDIO_POLICY_FORCE_BT_SCO: + // if SCO device is requested but no SCO device is available, fall back to default case + if (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) { + device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET; + break; + } + // FALL THROUGH + + default: // FORCE_NONE + if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) { + device = AUDIO_DEVICE_IN_WIRED_HEADSET; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) { + device = AUDIO_DEVICE_IN_USB_DEVICE; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { + device = AUDIO_DEVICE_IN_BUILTIN_MIC; + } + break; + + case AUDIO_POLICY_FORCE_SPEAKER: + if (availableDeviceTypes & AUDIO_DEVICE_IN_BACK_MIC) { + device = AUDIO_DEVICE_IN_BACK_MIC; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { + device = AUDIO_DEVICE_IN_BUILTIN_MIC; + } + break; + } + break; + + case AUDIO_SOURCE_VOICE_RECOGNITION: + case AUDIO_SOURCE_HOTWORD: + if (mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] == AUDIO_POLICY_FORCE_BT_SCO && + availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) { + device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) { + device = AUDIO_DEVICE_IN_WIRED_HEADSET; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) { + device = AUDIO_DEVICE_IN_USB_DEVICE; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { + device = AUDIO_DEVICE_IN_BUILTIN_MIC; + } + break; + case AUDIO_SOURCE_CAMCORDER: + if (availableDeviceTypes & AUDIO_DEVICE_IN_BACK_MIC) { + device = AUDIO_DEVICE_IN_BACK_MIC; + } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { + device = AUDIO_DEVICE_IN_BUILTIN_MIC; + } + break; + case AUDIO_SOURCE_VOICE_DOWNLINK: + case AUDIO_SOURCE_VOICE_CALL: + if (availableDeviceTypes & AUDIO_DEVICE_IN_VOICE_CALL) { + device = AUDIO_DEVICE_IN_VOICE_CALL; + } + break; + case AUDIO_SOURCE_REMOTE_SUBMIX: + if (availableDeviceTypes & AUDIO_DEVICE_IN_REMOTE_SUBMIX) { + device = AUDIO_DEVICE_IN_REMOTE_SUBMIX; + } + break; + case AUDIO_SOURCE_FM_TUNER: + if (availableDeviceTypes & AUDIO_DEVICE_IN_FM_TUNER) { + device = AUDIO_DEVICE_IN_FM_TUNER; + } + break; + default: + ALOGW("getDeviceForInputSource() invalid input source %d", inputSource); + break; + } + ALOGV("getDeviceForInputSource()input source %d, device %08x", inputSource, device); + return device; +} + +template <> +AudioPolicyManagerInterface *Engine::queryInterface() +{ + return &mManagerInterface; +} + +} // namespace audio_policy +} // namespace android + + diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h new file mode 100755 index 0000000..f44556c --- /dev/null +++ b/services/audiopolicy/enginedefault/src/Engine.h @@ -0,0 +1,158 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + + +#include "AudioPolicyManagerInterface.h" +#include "Gains.h" +#include <AudioGain.h> +#include <policy.h> + +namespace android +{ + +class AudioPolicyManagerObserver; + +namespace audio_policy +{ + +class Engine +{ +public: + Engine(); + virtual ~Engine(); + + template <class RequestedInterface> + RequestedInterface *queryInterface(); + +private: + /// Interface members + class ManagerInterfaceImpl : public AudioPolicyManagerInterface + { + public: + ManagerInterfaceImpl(Engine *policyEngine) + : mPolicyEngine(policyEngine) {} + + virtual void setObserver(AudioPolicyManagerObserver *observer) + { + mPolicyEngine->setObserver(observer); + } + virtual status_t initCheck() + { + return mPolicyEngine->initCheck(); + } + virtual audio_devices_t getDeviceForInputSource(audio_source_t inputSource) const + { + return mPolicyEngine->getDeviceForInputSource(inputSource); + } + virtual audio_devices_t getDeviceForStrategy(routing_strategy strategy) const + { + return mPolicyEngine->getDeviceForStrategy(strategy); + } + virtual routing_strategy getStrategyForStream(audio_stream_type_t stream) + { + return mPolicyEngine->getStrategyForStream(stream); + } + virtual routing_strategy getStrategyForUsage(audio_usage_t usage) + { + return mPolicyEngine->getStrategyForUsage(usage); + } + virtual status_t setPhoneState(audio_mode_t mode) + { + return mPolicyEngine->setPhoneState(mode); + } + virtual audio_mode_t getPhoneState() const + { + return mPolicyEngine->getPhoneState(); + } + virtual status_t setForceUse(audio_policy_force_use_t usage, + audio_policy_forced_cfg_t config) + { + return mPolicyEngine->setForceUse(usage, config); + } + virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) const + { + return mPolicyEngine->getForceUse(usage); + } + virtual status_t setDeviceConnectionState(const sp<DeviceDescriptor> /*devDesc*/, + audio_policy_dev_state_t /*state*/) + { + return NO_ERROR; + } + virtual status_t initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax) + { + return mPolicyEngine->initStreamVolume(stream, indexMin, indexMax); + } + virtual void initializeVolumeCurves(bool isSpeakerDrcEnabled) + { + return mPolicyEngine->initializeVolumeCurves(isSpeakerDrcEnabled); + } + virtual float volIndexToAmpl(Volume::device_category deviceCategory, + audio_stream_type_t stream,int indexInUi) + { + return mPolicyEngine->volIndexToAmpl(deviceCategory, stream, indexInUi); + } + private: + Engine *mPolicyEngine; + } mManagerInterface; + +private: + /* Copy facilities are put private to disable copy. */ + Engine(const Engine &object); + Engine &operator=(const Engine &object); + + void setObserver(AudioPolicyManagerObserver *observer); + + status_t initCheck(); + + inline bool isInCall() const + { + return is_state_in_call(mPhoneState); + } + + status_t setPhoneState(audio_mode_t mode); + audio_mode_t getPhoneState() const + { + return mPhoneState; + } + status_t setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config); + audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage) const + { + return mForceUse[usage]; + } + status_t setDefaultDevice(audio_devices_t device); + + routing_strategy getStrategyForStream(audio_stream_type_t stream); + routing_strategy getStrategyForUsage(audio_usage_t usage); + audio_devices_t getDeviceForStrategy(routing_strategy strategy) const; + audio_devices_t getDeviceForInputSource(audio_source_t inputSource) const; + + float volIndexToAmpl(Volume::device_category category, + audio_stream_type_t stream, int indexInUi); + status_t initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax); + void initializeVolumeCurves(bool isSpeakerDrcEnabled); + + audio_mode_t mPhoneState; /**< current phone state. */ + + /** current forced use configuration. */ + audio_policy_forced_cfg_t mForceUse[AUDIO_POLICY_FORCE_USE_CNT]; + + AudioPolicyManagerObserver *mApmObserver; +}; +} // namespace audio_policy +} // namespace android + diff --git a/services/audiopolicy/enginedefault/src/EngineInstance.cpp b/services/audiopolicy/enginedefault/src/EngineInstance.cpp new file mode 100755 index 0000000..17e9832 --- /dev/null +++ b/services/audiopolicy/enginedefault/src/EngineInstance.cpp @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <AudioPolicyManagerInterface.h> +#include "AudioPolicyEngineInstance.h" +#include "Engine.h" + +namespace android +{ +namespace audio_policy +{ + +EngineInstance::EngineInstance() +{ +} + +EngineInstance *EngineInstance::getInstance() +{ + static EngineInstance instance; + return &instance; +} + +EngineInstance::~EngineInstance() +{ +} + +Engine *EngineInstance::getEngine() const +{ + static Engine engine; + return &engine; +} + +template <> +AudioPolicyManagerInterface *EngineInstance::queryInterface() const +{ + return getEngine()->queryInterface<AudioPolicyManagerInterface>(); +} + +} // namespace audio_policy +} // namespace android + diff --git a/services/audiopolicy/enginedefault/src/Gains.cpp b/services/audiopolicy/enginedefault/src/Gains.cpp new file mode 100644 index 0000000..a684fdd --- /dev/null +++ b/services/audiopolicy/enginedefault/src/Gains.cpp @@ -0,0 +1,247 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "APM::Gains" +//#define LOG_NDEBUG 0 + +//#define VERY_VERBOSE_LOGGING +#ifdef VERY_VERBOSE_LOGGING +#define ALOGVV ALOGV +#else +#define ALOGVV(a...) do { } while(0) +#endif + +#include "Gains.h" +#include <Volume.h> +#include <math.h> +#include <utils/String8.h> + +namespace android { + +// Enginedefault +const VolumeCurvePoint +Gains::sDefaultVolumeCurve[Volume::VOLCNT] = { + {1, -49.5f}, {33, -33.5f}, {66, -17.0f}, {100, 0.0f} +}; + + +const VolumeCurvePoint +Gains::sDefaultMediaVolumeCurve[Volume::VOLCNT] = { + {1, -58.0f}, {20, -40.0f}, {60, -17.0f}, {100, 0.0f} +}; + +const VolumeCurvePoint +Gains::sExtMediaSystemVolumeCurve[Volume::VOLCNT] = { + {1, -58.0f}, {20, -40.0f}, {60, -21.0f}, {100, -10.0f} +}; + +const VolumeCurvePoint +Gains::sSpeakerMediaVolumeCurve[Volume::VOLCNT] = { + {1, -56.0f}, {20, -34.0f}, {60, -11.0f}, {100, 0.0f} +}; + +const VolumeCurvePoint +Gains::sSpeakerMediaVolumeCurveDrc[Volume::VOLCNT] = { + {1, -55.0f}, {20, -43.0f}, {86, -12.0f}, {100, 0.0f} +}; + +const VolumeCurvePoint +Gains::sSpeakerSonificationVolumeCurve[Volume::VOLCNT] = { + {1, -29.7f}, {33, -20.1f}, {66, -10.2f}, {100, 0.0f} +}; + +const VolumeCurvePoint +Gains::sSpeakerSonificationVolumeCurveDrc[Volume::VOLCNT] = { + {1, -35.7f}, {33, -26.1f}, {66, -13.2f}, {100, 0.0f} +}; + +// AUDIO_STREAM_SYSTEM, AUDIO_STREAM_ENFORCED_AUDIBLE and AUDIO_STREAM_DTMF volume tracks +// AUDIO_STREAM_RING on phones and AUDIO_STREAM_MUSIC on tablets. +// AUDIO_STREAM_DTMF tracks AUDIO_STREAM_VOICE_CALL while in call (See AudioService.java). +// The range is constrained between -24dB and -6dB over speaker and -30dB and -18dB over headset. + +const VolumeCurvePoint +Gains::sDefaultSystemVolumeCurve[Volume::VOLCNT] = { + {1, -24.0f}, {33, -18.0f}, {66, -12.0f}, {100, -6.0f} +}; + +const VolumeCurvePoint +Gains::sDefaultSystemVolumeCurveDrc[Volume::VOLCNT] = { + {1, -34.0f}, {33, -24.0f}, {66, -15.0f}, {100, -6.0f} +}; + +const VolumeCurvePoint +Gains::sHeadsetSystemVolumeCurve[Volume::VOLCNT] = { + {1, -30.0f}, {33, -26.0f}, {66, -22.0f}, {100, -18.0f} +}; + +const VolumeCurvePoint +Gains::sDefaultVoiceVolumeCurve[Volume::VOLCNT] = { + {0, -42.0f}, {33, -28.0f}, {66, -14.0f}, {100, 0.0f} +}; + +const VolumeCurvePoint +Gains::sSpeakerVoiceVolumeCurve[Volume::VOLCNT] = { + {0, -24.0f}, {33, -16.0f}, {66, -8.0f}, {100, 0.0f} +}; + +const VolumeCurvePoint +Gains::sLinearVolumeCurve[Volume::VOLCNT] = { + {0, -96.0f}, {33, -68.0f}, {66, -34.0f}, {100, 0.0f} +}; + +const VolumeCurvePoint +Gains::sSilentVolumeCurve[Volume::VOLCNT] = { + {0, -96.0f}, {1, -96.0f}, {2, -96.0f}, {100, -96.0f} +}; + +const VolumeCurvePoint +Gains::sFullScaleVolumeCurve[Volume::VOLCNT] = { + {0, 0.0f}, {1, 0.0f}, {2, 0.0f}, {100, 0.0f} +}; + +const VolumeCurvePoint *Gains::sVolumeProfiles[AUDIO_STREAM_CNT] + [Volume::DEVICE_CATEGORY_CNT] = { + { // AUDIO_STREAM_VOICE_CALL + Gains::sDefaultVoiceVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sSpeakerVoiceVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sSpeakerVoiceVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_SYSTEM + Gains::sHeadsetSystemVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_RING + Gains::sDefaultVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sSpeakerSonificationVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_MUSIC + Gains::sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sSpeakerMediaVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_ALARM + Gains::sDefaultVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sSpeakerSonificationVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_NOTIFICATION + Gains::sDefaultVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sSpeakerSonificationVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_BLUETOOTH_SCO + Gains::sDefaultVoiceVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sSpeakerVoiceVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultVoiceVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_ENFORCED_AUDIBLE + Gains::sHeadsetSystemVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_DTMF + Gains::sHeadsetSystemVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_TTS + // "Transmitted Through Speaker": always silent except on DEVICE_CATEGORY_SPEAKER + Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sLinearVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sSilentVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_ACCESSIBILITY + Gains::sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sSpeakerMediaVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_REROUTING + Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sFullScaleVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, + { // AUDIO_STREAM_PATCH + Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_HEADSET + Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_SPEAKER + Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_EARPIECE + Gains::sFullScaleVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA + }, +}; + +//static +float Gains::volIndexToAmpl(audio_devices_t device, const StreamDescriptor& streamDesc, + int indexInUi) +{ + Volume::device_category deviceCategory = Volume::getDeviceCategory(device); + const VolumeCurvePoint *curve = streamDesc.getVolumeCurvePoint(deviceCategory); + + // the volume index in the UI is relative to the min and max volume indices for this stream type + int nbSteps = 1 + curve[Volume::VOLMAX].mIndex - + curve[Volume::VOLMIN].mIndex; + int volIdx = (nbSteps * (indexInUi - streamDesc.getVolumeIndexMin())) / + (streamDesc.getVolumeIndexMax() - streamDesc.getVolumeIndexMin()); + + // find what part of the curve this index volume belongs to, or if it's out of bounds + int segment = 0; + if (volIdx < curve[Volume::VOLMIN].mIndex) { // out of bounds + return 0.0f; + } else if (volIdx < curve[Volume::VOLKNEE1].mIndex) { + segment = 0; + } else if (volIdx < curve[Volume::VOLKNEE2].mIndex) { + segment = 1; + } else if (volIdx <= curve[Volume::VOLMAX].mIndex) { + segment = 2; + } else { // out of bounds + return 1.0f; + } + + // linear interpolation in the attenuation table in dB + float decibels = curve[segment].mDBAttenuation + + ((float)(volIdx - curve[segment].mIndex)) * + ( (curve[segment+1].mDBAttenuation - + curve[segment].mDBAttenuation) / + ((float)(curve[segment+1].mIndex - + curve[segment].mIndex)) ); + + float amplification = exp( decibels * 0.115129f); // exp( dB * ln(10) / 20 ) + + ALOGVV("VOLUME vol index=[%d %d %d], dB=[%.1f %.1f %.1f] ampl=%.5f", + curve[segment].mIndex, volIdx, + curve[segment+1].mIndex, + curve[segment].mDBAttenuation, + decibels, + curve[segment+1].mDBAttenuation, + amplification); + + return amplification; +} + +}; // namespace android diff --git a/services/audiopolicy/enginedefault/src/Gains.h b/services/audiopolicy/enginedefault/src/Gains.h new file mode 100644 index 0000000..b5601ca --- /dev/null +++ b/services/audiopolicy/enginedefault/src/Gains.h @@ -0,0 +1,59 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#pragma once + +#include <StreamDescriptor.h> +#include <utils/KeyedVector.h> +#include <system/audio.h> +#include <utils/Errors.h> +#include <utils/RefBase.h> + +namespace android { + +class StreamDescriptor; + +class Gains +{ +public : + static float volIndexToAmpl(audio_devices_t device, const StreamDescriptor& streamDesc, + int indexInUi); + + // default volume curve + static const VolumeCurvePoint sDefaultVolumeCurve[Volume::VOLCNT]; + // default volume curve for media strategy + static const VolumeCurvePoint sDefaultMediaVolumeCurve[Volume::VOLCNT]; + // volume curve for non-media audio on ext media outputs (HDMI, Line, etc) + static const VolumeCurvePoint sExtMediaSystemVolumeCurve[Volume::VOLCNT]; + // volume curve for media strategy on speakers + static const VolumeCurvePoint sSpeakerMediaVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sSpeakerMediaVolumeCurveDrc[Volume::VOLCNT]; + // volume curve for sonification strategy on speakers + static const VolumeCurvePoint sSpeakerSonificationVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sSpeakerSonificationVolumeCurveDrc[Volume::VOLCNT]; + static const VolumeCurvePoint sDefaultSystemVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sDefaultSystemVolumeCurveDrc[Volume::VOLCNT]; + static const VolumeCurvePoint sHeadsetSystemVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sDefaultVoiceVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sSpeakerVoiceVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sLinearVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sSilentVolumeCurve[Volume::VOLCNT]; + static const VolumeCurvePoint sFullScaleVolumeCurve[Volume::VOLCNT]; + // default volume curves per stream and device category. See initializeVolumeCurves() + static const VolumeCurvePoint *sVolumeProfiles[AUDIO_STREAM_CNT][Volume::DEVICE_CATEGORY_CNT]; +}; + +}; // namespace android diff --git a/services/audiopolicy/AudioPolicyFactory.cpp b/services/audiopolicy/manager/AudioPolicyFactory.cpp index 2ae7bc1..9910a1f 100644 --- a/services/audiopolicy/AudioPolicyFactory.cpp +++ b/services/audiopolicy/manager/AudioPolicyFactory.cpp @@ -14,7 +14,7 @@ * limitations under the License. */ -#include "AudioPolicyManager.h" +#include "managerdefault/AudioPolicyManager.h" namespace android { diff --git a/services/audiopolicy/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp index 6ebd0ed..797a2b4 100644 --- a/services/audiopolicy/AudioPolicyManager.cpp +++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp @@ -14,7 +14,7 @@ * limitations under the License. */ -#define LOG_TAG "AudioPolicyManager" +#define LOG_TAG "APM::AudioPolicyManager" //#define LOG_NDEBUG 0 //#define VERY_VERBOSE_LOGGING @@ -24,20 +24,11 @@ #define ALOGVV(a...) do { } while(0) #endif -// A device mask for all audio input devices that are considered "virtual" when evaluating -// active inputs in getActiveInput() -#define APM_AUDIO_IN_DEVICE_VIRTUAL_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX|AUDIO_DEVICE_IN_FM_TUNER) -// A device mask for all audio output devices that are considered "remote" when evaluating -// active output devices in isStreamActiveRemotely() -#define APM_AUDIO_OUT_DEVICE_REMOTE_ALL AUDIO_DEVICE_OUT_REMOTE_SUBMIX -// A device mask for all audio input and output devices where matching inputs/outputs on device -// type alone is not enough: the address must match too -#define APM_AUDIO_DEVICE_MATCH_ADDRESS_ALL (AUDIO_DEVICE_IN_REMOTE_SUBMIX | \ - AUDIO_DEVICE_OUT_REMOTE_SUBMIX) - #include <inttypes.h> #include <math.h> +#include <AudioPolicyManagerInterface.h> +#include <AudioPolicyEngineInstance.h> #include <cutils/properties.h> #include <utils/Log.h> #include <hardware/audio.h> @@ -47,188 +38,36 @@ #include <soundtrigger/SoundTrigger.h> #include "AudioPolicyManager.h" #include "audio_policy_conf.h" +#include <ConfigParsingUtils.h> +#include <policy.h> namespace android { // ---------------------------------------------------------------------------- -// Definitions for audio_policy.conf file parsing -// ---------------------------------------------------------------------------- - -struct StringToEnum { - const char *name; - uint32_t value; -}; - -#define STRING_TO_ENUM(string) { #string, string } -#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) - -const StringToEnum sDeviceNameToEnumTable[] = { - STRING_TO_ENUM(AUDIO_DEVICE_OUT_EARPIECE), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_SPEAKER), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_SPEAKER_SAFE), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_WIRED_HEADPHONE), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_ALL_SCO), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_ALL_A2DP), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_AUX_DIGITAL), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_HDMI), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_USB_ACCESSORY), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_USB_DEVICE), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_ALL_USB), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_REMOTE_SUBMIX), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_TELEPHONY_TX), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_LINE), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_HDMI_ARC), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_SPDIF), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_FM), - STRING_TO_ENUM(AUDIO_DEVICE_OUT_AUX_LINE), - STRING_TO_ENUM(AUDIO_DEVICE_IN_AMBIENT), - STRING_TO_ENUM(AUDIO_DEVICE_IN_BUILTIN_MIC), - STRING_TO_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_IN_ALL_SCO), - STRING_TO_ENUM(AUDIO_DEVICE_IN_WIRED_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_IN_AUX_DIGITAL), - STRING_TO_ENUM(AUDIO_DEVICE_IN_HDMI), - STRING_TO_ENUM(AUDIO_DEVICE_IN_TELEPHONY_RX), - STRING_TO_ENUM(AUDIO_DEVICE_IN_VOICE_CALL), - STRING_TO_ENUM(AUDIO_DEVICE_IN_BACK_MIC), - STRING_TO_ENUM(AUDIO_DEVICE_IN_REMOTE_SUBMIX), - STRING_TO_ENUM(AUDIO_DEVICE_IN_ANLG_DOCK_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_IN_DGTL_DOCK_HEADSET), - STRING_TO_ENUM(AUDIO_DEVICE_IN_USB_ACCESSORY), - STRING_TO_ENUM(AUDIO_DEVICE_IN_USB_DEVICE), - STRING_TO_ENUM(AUDIO_DEVICE_IN_FM_TUNER), - STRING_TO_ENUM(AUDIO_DEVICE_IN_TV_TUNER), - STRING_TO_ENUM(AUDIO_DEVICE_IN_LINE), - STRING_TO_ENUM(AUDIO_DEVICE_IN_SPDIF), - STRING_TO_ENUM(AUDIO_DEVICE_IN_BLUETOOTH_A2DP), - STRING_TO_ENUM(AUDIO_DEVICE_IN_LOOPBACK), -}; - -const StringToEnum sOutputFlagNameToEnumTable[] = { - STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_DIRECT), - STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_PRIMARY), - STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_FAST), - STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_DEEP_BUFFER), - STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD), - STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_NON_BLOCKING), - STRING_TO_ENUM(AUDIO_OUTPUT_FLAG_HW_AV_SYNC), -}; - -const StringToEnum sInputFlagNameToEnumTable[] = { - STRING_TO_ENUM(AUDIO_INPUT_FLAG_FAST), - STRING_TO_ENUM(AUDIO_INPUT_FLAG_HW_HOTWORD), -}; - -const StringToEnum sFormatNameToEnumTable[] = { - STRING_TO_ENUM(AUDIO_FORMAT_PCM_16_BIT), - STRING_TO_ENUM(AUDIO_FORMAT_PCM_8_BIT), - STRING_TO_ENUM(AUDIO_FORMAT_PCM_32_BIT), - STRING_TO_ENUM(AUDIO_FORMAT_PCM_8_24_BIT), - STRING_TO_ENUM(AUDIO_FORMAT_PCM_FLOAT), - STRING_TO_ENUM(AUDIO_FORMAT_PCM_24_BIT_PACKED), - STRING_TO_ENUM(AUDIO_FORMAT_MP3), - STRING_TO_ENUM(AUDIO_FORMAT_AAC), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_MAIN), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_LC), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_SSR), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_LTP), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_HE_V1), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_SCALABLE), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_ERLC), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_LD), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_HE_V2), - STRING_TO_ENUM(AUDIO_FORMAT_AAC_ELD), - STRING_TO_ENUM(AUDIO_FORMAT_VORBIS), - STRING_TO_ENUM(AUDIO_FORMAT_HE_AAC_V1), - STRING_TO_ENUM(AUDIO_FORMAT_HE_AAC_V2), - STRING_TO_ENUM(AUDIO_FORMAT_OPUS), - STRING_TO_ENUM(AUDIO_FORMAT_AC3), - STRING_TO_ENUM(AUDIO_FORMAT_E_AC3), -}; - -const StringToEnum sOutChannelsNameToEnumTable[] = { - STRING_TO_ENUM(AUDIO_CHANNEL_OUT_MONO), - STRING_TO_ENUM(AUDIO_CHANNEL_OUT_STEREO), - STRING_TO_ENUM(AUDIO_CHANNEL_OUT_QUAD), - STRING_TO_ENUM(AUDIO_CHANNEL_OUT_5POINT1), - STRING_TO_ENUM(AUDIO_CHANNEL_OUT_7POINT1), -}; - -const StringToEnum sInChannelsNameToEnumTable[] = { - STRING_TO_ENUM(AUDIO_CHANNEL_IN_MONO), - STRING_TO_ENUM(AUDIO_CHANNEL_IN_STEREO), - STRING_TO_ENUM(AUDIO_CHANNEL_IN_FRONT_BACK), -}; - -const StringToEnum sGainModeNameToEnumTable[] = { - STRING_TO_ENUM(AUDIO_GAIN_MODE_JOINT), - STRING_TO_ENUM(AUDIO_GAIN_MODE_CHANNELS), - STRING_TO_ENUM(AUDIO_GAIN_MODE_RAMP), -}; - - -uint32_t AudioPolicyManager::stringToEnum(const struct StringToEnum *table, - size_t size, - const char *name) -{ - for (size_t i = 0; i < size; i++) { - if (strcmp(table[i].name, name) == 0) { - ALOGV("stringToEnum() found %s", table[i].name); - return table[i].value; - } - } - return 0; -} - -const char *AudioPolicyManager::enumToString(const struct StringToEnum *table, - size_t size, - uint32_t value) -{ - for (size_t i = 0; i < size; i++) { - if (table[i].value == value) { - return table[i].name; - } - } - return ""; -} - -bool AudioPolicyManager::stringToBool(const char *value) -{ - return ((strcasecmp("true", value) == 0) || (strcmp("1", value) == 0)); -} - - -// ---------------------------------------------------------------------------- // AudioPolicyInterface implementation // ---------------------------------------------------------------------------- status_t AudioPolicyManager::setDeviceConnectionState(audio_devices_t device, - audio_policy_dev_state_t state, - const char *device_address) + audio_policy_dev_state_t state, + const char *device_address, + const char *device_name) { - return setDeviceConnectionStateInt(device, state, device_address); + return setDeviceConnectionStateInt(device, state, device_address, device_name); } status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address) + const char *device_address, + const char *device_name) { - ALOGV("setDeviceConnectionState() device: %x, state %d, address %s", - device, state, device_address != NULL ? device_address : ""); + ALOGV("setDeviceConnectionStateInt() device: 0x%X, state %d, address %s name %s", +- device, state, device_address, device_name); // connect/disconnect only 1 device at a time if (!audio_is_output_device(device) && !audio_is_input_device(device)) return BAD_VALUE; - sp<DeviceDescriptor> devDesc = getDeviceDescriptor(device, device_address); + sp<DeviceDescriptor> devDesc = + mHwModules.getDeviceDescriptor(device, device_address, device_name); // handle output devices if (audio_is_output_device(device)) { @@ -252,15 +91,14 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, // register new device as available index = mAvailableOutputDevices.add(devDesc); if (index >= 0) { - sp<HwModule> module = getModuleForDevice(device); + sp<HwModule> module = mHwModules.getModuleForDevice(device); if (module == 0) { ALOGD("setDeviceConnectionState() could not find HW module for device %08x", device); mAvailableOutputDevices.remove(devDesc); return INVALID_OPERATION; } - mAvailableOutputDevices[index]->mId = nextUniqueId(); - mAvailableOutputDevices[index]->mModule = module; + mAvailableOutputDevices[index]->attach(module); } else { return NO_MEMORY; } @@ -269,14 +107,16 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, mAvailableOutputDevices.remove(devDesc); return INVALID_OPERATION; } + // Propagate device availability to Engine + mEngine->setDeviceConnectionState(devDesc, state); + // outputs should never be empty here ALOG_ASSERT(outputs.size() != 0, "setDeviceConnectionState():" "checkOutputsForDevice() returned no outputs but status OK"); ALOGV("setDeviceConnectionState() checkOutputsForDevice() returned %zu outputs", outputs.size()); - - // Set connect to HALs + // Send connect to HALs AudioParameter param = AudioParameter(devDesc->mAddress); param.addInt(String8(AUDIO_PARAMETER_DEVICE_CONNECT), device); mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString()); @@ -291,7 +131,7 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, ALOGV("setDeviceConnectionState() disconnecting output device %x", device); - // Set Disconnect to HALs + // Send Disconnect to HALs AudioParameter param = AudioParameter(devDesc->mAddress); param.addInt(String8(AUDIO_PARAMETER_DEVICE_DISCONNECT), device); mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString()); @@ -300,6 +140,9 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, mAvailableOutputDevices.remove(devDesc); checkOutputsForDevice(devDesc, state, outputs, devDesc->mAddress); + + // Propagate device availability to Engine + mEngine->setDeviceConnectionState(devDesc, state); } break; default: @@ -328,20 +171,20 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, } updateDevicesAndOutputs(); - if (mPhoneState == AUDIO_MODE_IN_CALL) { + if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) { audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/); updateCallRouting(newDevice); } for (size_t i = 0; i < mOutputs.size(); i++) { audio_io_handle_t output = mOutputs.keyAt(i); - if ((mPhoneState != AUDIO_MODE_IN_CALL) || (output != mPrimaryOutput)) { + if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) || (output != mPrimaryOutput)) { audio_devices_t newDevice = getNewOutputDevice(mOutputs.keyAt(i), true /*fromCache*/); // do not force device change on duplicated output because if device is 0, it will // also force a device 0 for the two outputs it is duplicated to which may override // a valid device selection on those outputs. bool force = !mOutputs.valueAt(i)->isDuplicated() - && (!deviceDistinguishesOnAddress(device) + && (!device_distinguishes_on_address(device) // always force when disconnecting (a non-duplicated device) || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE)); setOutputDevice(output, newDevice, force, 0); @@ -365,7 +208,7 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, ALOGW("setDeviceConnectionState() device already connected: %d", device); return INVALID_OPERATION; } - sp<HwModule> module = getModuleForDevice(device); + sp<HwModule> module = mHwModules.getModuleForDevice(device); if (module == NULL) { ALOGW("setDeviceConnectionState(): could not find HW module for device %08x", device); @@ -377,8 +220,7 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, index = mAvailableInputDevices.add(devDesc); if (index >= 0) { - mAvailableInputDevices[index]->mId = nextUniqueId(); - mAvailableInputDevices[index]->mModule = module; + mAvailableInputDevices[index]->attach(module); } else { return NO_MEMORY; } @@ -388,6 +230,8 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, param.addInt(String8(AUDIO_PARAMETER_DEVICE_CONNECT), device); mpClientInterface->setParameters(AUDIO_IO_HANDLE_NONE, param.toString()); + // Propagate device availability to Engine + mEngine->setDeviceConnectionState(devDesc, state); } break; // handle input device disconnection @@ -407,6 +251,8 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, checkInputsForDevice(device, state, inputs, devDesc->mAddress); mAvailableInputDevices.remove(devDesc); + // Propagate device availability to Engine + mEngine->setDeviceConnectionState(devDesc, state); } break; default: @@ -416,7 +262,7 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, closeAllInputs(); - if (mPhoneState == AUDIO_MODE_IN_CALL) { + if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) { audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, false /*fromCache*/); updateCallRouting(newDevice); } @@ -430,9 +276,10 @@ status_t AudioPolicyManager::setDeviceConnectionStateInt(audio_devices_t device, } audio_policy_dev_state_t AudioPolicyManager::getDeviceConnectionState(audio_devices_t device, - const char *device_address) + const char *device_address) { - sp<DeviceDescriptor> devDesc = getDeviceDescriptor(device, device_address); + sp<DeviceDescriptor> devDesc = mHwModules.getDeviceDescriptor(device, device_address, ""); + DeviceVector *deviceVector; if (audio_is_output_device(device)) { @@ -443,43 +290,7 @@ audio_policy_dev_state_t AudioPolicyManager::getDeviceConnectionState(audio_devi ALOGW("getDeviceConnectionState() invalid device type %08x", device); return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE; } - - ssize_t index = deviceVector->indexOf(devDesc); - if (index >= 0) { - return AUDIO_POLICY_DEVICE_STATE_AVAILABLE; - } else { - return AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE; - } -} - -sp<AudioPolicyManager::DeviceDescriptor> AudioPolicyManager::getDeviceDescriptor( - const audio_devices_t device, - const char *device_address) -{ - String8 address = (device_address == NULL) ? String8("") : String8(device_address); - // handle legacy remote submix case where the address was not always specified - if (deviceDistinguishesOnAddress(device) && (address.length() == 0)) { - address = String8("0"); - } - - for (size_t i = 0; i < mHwModules.size(); i++) { - if (mHwModules[i]->mHandle == 0) { - continue; - } - DeviceVector deviceList = - mHwModules[i]->mDeclaredDevices.getDevicesFromTypeAddr(device, address); - if (!deviceList.isEmpty()) { - return deviceList.itemAt(0); - } - deviceList = mHwModules[i]->mDeclaredDevices.getDevicesFromType(device); - if (!deviceList.isEmpty()) { - return deviceList.itemAt(0); - } - } - - sp<DeviceDescriptor> devDesc = new DeviceDescriptor(String8(""), device); - devDesc->mAddress = address; - return devDesc; + return deviceVector->getDeviceConnectionState(devDesc); } void AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, int delayMs) @@ -550,8 +361,7 @@ void AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, int delayMs ALOGW_IF(status != NO_ERROR, "updateCallRouting() error %d creating RX audio patch", status); if (status == NO_ERROR) { - mCallRxPatch = new AudioPatch((audio_patch_handle_t)nextUniqueId(), - &patch, mUidCached); + mCallRxPatch = new AudioPatch(&patch, mUidCached); mCallRxPatch->mAfPatchHandle = afPatchHandle; mCallRxPatch->mUid = mUidCached; } @@ -593,8 +403,7 @@ void AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, int delayMs ALOGW_IF(status != NO_ERROR, "setPhoneState() error %d creating TX audio patch", status); if (status == NO_ERROR) { - mCallTxPatch = new AudioPatch((audio_patch_handle_t)nextUniqueId(), - &patch, mUidCached); + mCallTxPatch = new AudioPatch(&patch, mUidCached); mCallTxPatch->mAfPatchHandle = afPatchHandle; mCallTxPatch->mUid = mUidCached; } @@ -604,16 +413,14 @@ void AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, int delayMs void AudioPolicyManager::setPhoneState(audio_mode_t state) { ALOGV("setPhoneState() state %d", state); - if (state < 0 || state >= AUDIO_MODE_CNT) { - ALOGW("setPhoneState() invalid state %d", state); - return; - } + // store previous phone state for management of sonification strategy below + int oldState = mEngine->getPhoneState(); - if (state == mPhoneState ) { - ALOGW("setPhoneState() setting same state %d", state); + if (mEngine->setPhoneState(state) != NO_ERROR) { + ALOGW("setPhoneState() invalid or same state %d", state); return; } - + /// Opens: can these line be executed after the switch of volume curves??? // if leaving call state, handle special case of active streams // pertaining to sonification strategy see handleIncallSonification() if (isInCall()) { @@ -629,36 +436,12 @@ void AudioPolicyManager::setPhoneState(audio_mode_t state) mpClientInterface->invalidateStream(AUDIO_STREAM_ACCESSIBILITY); } - // store previous phone state for management of sonification strategy below - int oldState = mPhoneState; - mPhoneState = state; - bool force = false; - - // are we entering or starting a call - if (!isStateInCall(oldState) && isStateInCall(state)) { - ALOGV(" Entering call in setPhoneState()"); - // force routing command to audio hardware when starting a call - // even if no device change is needed - force = true; - for (int j = 0; j < DEVICE_CATEGORY_CNT; j++) { - mStreams[AUDIO_STREAM_DTMF].mVolumeCurve[j] = - sVolumeProfiles[AUDIO_STREAM_VOICE_CALL][j]; - } - } else if (isStateInCall(oldState) && !isStateInCall(state)) { - ALOGV(" Exiting call in setPhoneState()"); - // force routing command to audio hardware when exiting a call - // even if no device change is needed - force = true; - for (int j = 0; j < DEVICE_CATEGORY_CNT; j++) { - mStreams[AUDIO_STREAM_DTMF].mVolumeCurve[j] = - sVolumeProfiles[AUDIO_STREAM_DTMF][j]; - } - } else if (isStateInCall(state) && (state != oldState)) { - ALOGV(" Switching between telephony and VoIP in setPhoneState()"); - // force routing command to audio hardware when switching between telephony and VoIP - // even if no device change is needed - force = true; - } + /** + * Switching to or from incall state or switching between telephony and VoIP lead to force + * routing command. + */ + bool force = ((is_state_in_call(oldState) != is_state_in_call(state)) + || (is_state_in_call(state) && (state != oldState))); // check for device and output changes triggered by new phone state checkA2dpSuspend(); @@ -675,12 +458,12 @@ void AudioPolicyManager::setPhoneState(audio_mode_t state) // mute media and sonification strategies and delay device switch by the largest // latency of any output where either strategy is active. // This avoid sending the ring tone or music tail into the earpiece or headset. - if ((desc->isStrategyActive(STRATEGY_MEDIA, - SONIFICATION_HEADSET_MUSIC_DELAY, - sysTime) || - desc->isStrategyActive(STRATEGY_SONIFICATION, - SONIFICATION_HEADSET_MUSIC_DELAY, - sysTime)) && + if ((isStrategyActive(desc, STRATEGY_MEDIA, + SONIFICATION_HEADSET_MUSIC_DELAY, + sysTime) || + isStrategyActive(desc, STRATEGY_SONIFICATION, + SONIFICATION_HEADSET_MUSIC_DELAY, + sysTime)) && (delayMs < (int)desc->mLatency*2)) { delayMs = desc->mLatency*2; } @@ -738,84 +521,35 @@ void AudioPolicyManager::setPhoneState(audio_mode_t state) } } +audio_mode_t AudioPolicyManager::getPhoneState() { + return mEngine->getPhoneState(); +} + void AudioPolicyManager::setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config) { - ALOGV("setForceUse() usage %d, config %d, mPhoneState %d", usage, config, mPhoneState); - - bool forceVolumeReeval = false; - switch(usage) { - case AUDIO_POLICY_FORCE_FOR_COMMUNICATION: - if (config != AUDIO_POLICY_FORCE_SPEAKER && config != AUDIO_POLICY_FORCE_BT_SCO && - config != AUDIO_POLICY_FORCE_NONE) { - ALOGW("setForceUse() invalid config %d for FOR_COMMUNICATION", config); - return; - } - forceVolumeReeval = true; - mForceUse[usage] = config; - break; - case AUDIO_POLICY_FORCE_FOR_MEDIA: - if (config != AUDIO_POLICY_FORCE_HEADPHONES && config != AUDIO_POLICY_FORCE_BT_A2DP && - config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY && - config != AUDIO_POLICY_FORCE_ANALOG_DOCK && - config != AUDIO_POLICY_FORCE_DIGITAL_DOCK && config != AUDIO_POLICY_FORCE_NONE && - config != AUDIO_POLICY_FORCE_NO_BT_A2DP && config != AUDIO_POLICY_FORCE_SPEAKER ) { - ALOGW("setForceUse() invalid config %d for FOR_MEDIA", config); - return; - } - mForceUse[usage] = config; - break; - case AUDIO_POLICY_FORCE_FOR_RECORD: - if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY && - config != AUDIO_POLICY_FORCE_NONE) { - ALOGW("setForceUse() invalid config %d for FOR_RECORD", config); - return; - } - mForceUse[usage] = config; - break; - case AUDIO_POLICY_FORCE_FOR_DOCK: - if (config != AUDIO_POLICY_FORCE_NONE && config != AUDIO_POLICY_FORCE_BT_CAR_DOCK && - config != AUDIO_POLICY_FORCE_BT_DESK_DOCK && - config != AUDIO_POLICY_FORCE_WIRED_ACCESSORY && - config != AUDIO_POLICY_FORCE_ANALOG_DOCK && - config != AUDIO_POLICY_FORCE_DIGITAL_DOCK) { - ALOGW("setForceUse() invalid config %d for FOR_DOCK", config); - } - forceVolumeReeval = true; - mForceUse[usage] = config; - break; - case AUDIO_POLICY_FORCE_FOR_SYSTEM: - if (config != AUDIO_POLICY_FORCE_NONE && - config != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) { - ALOGW("setForceUse() invalid config %d for FOR_SYSTEM", config); - } - forceVolumeReeval = true; - mForceUse[usage] = config; - break; - case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO: - if (config != AUDIO_POLICY_FORCE_NONE && - config != AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED) { - ALOGW("setForceUse() invalid config %d forHDMI_SYSTEM_AUDIO", config); - } - mForceUse[usage] = config; - break; - default: - ALOGW("setForceUse() invalid usage %d", usage); - break; + ALOGV("setForceUse() usage %d, config %d, mPhoneState %d", usage, config, mEngine->getPhoneState()); + + if (mEngine->setForceUse(usage, config) != NO_ERROR) { + ALOGW("setForceUse() could not set force cfg %d for usage %d", config, usage); + return; } + bool forceVolumeReeval = (usage == AUDIO_POLICY_FORCE_FOR_COMMUNICATION) || + (usage == AUDIO_POLICY_FORCE_FOR_DOCK) || + (usage == AUDIO_POLICY_FORCE_FOR_SYSTEM); // check for device and output changes triggered by new force usage checkA2dpSuspend(); checkOutputForAllStrategies(); updateDevicesAndOutputs(); - if (mPhoneState == AUDIO_MODE_IN_CALL) { + if (mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) { audio_devices_t newDevice = getNewOutputDevice(mPrimaryOutput, true /*fromCache*/); updateCallRouting(newDevice); } for (size_t i = 0; i < mOutputs.size(); i++) { audio_io_handle_t output = mOutputs.keyAt(i); audio_devices_t newDevice = getNewOutputDevice(output, true /*fromCache*/); - if ((mPhoneState != AUDIO_MODE_IN_CALL) || (output != mPrimaryOutput)) { + if ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) || (output != mPrimaryOutput)) { setOutputDevice(output, newDevice, (newDevice != AUDIO_DEVICE_NONE)); } if (forceVolumeReeval && (newDevice != AUDIO_DEVICE_NONE)) { @@ -823,18 +557,13 @@ void AudioPolicyManager::setForceUse(audio_policy_force_use_t usage, } } - audio_io_handle_t activeInput = getActiveInput(); + audio_io_handle_t activeInput = mInputs.getActiveInput(); if (activeInput != 0) { setInputDevice(activeInput, getNewInputDevice(activeInput)); } } -audio_policy_forced_cfg_t AudioPolicyManager::getForceUse(audio_policy_force_use_t usage) -{ - return mForceUse[usage]; -} - void AudioPolicyManager::setSystemProperty(const char* property, const char* value) { ALOGV("setSystemProperty() property %s, value %s", property, value); @@ -842,7 +571,7 @@ void AudioPolicyManager::setSystemProperty(const char* property, const char* val // Find a direct output profile compatible with the parameters passed, even if the input flags do // not explicitly request a direct output -sp<AudioPolicyManager::IOProfile> AudioPolicyManager::getProfileForDirectOutput( +sp<IOProfile> AudioPolicyManager::getProfileForDirectOutput( audio_devices_t device, uint32_t samplingRate, audio_format_t format, @@ -868,11 +597,11 @@ sp<AudioPolicyManager::IOProfile> AudioPolicyManager::getProfileForDirectOutput( } audio_io_handle_t AudioPolicyManager::getOutput(audio_stream_type_t stream, - uint32_t samplingRate, - audio_format_t format, - audio_channel_mask_t channelMask, - audio_output_flags_t flags, - const audio_offload_info_t *offloadInfo) + uint32_t samplingRate, + audio_format_t format, + audio_channel_mask_t channelMask, + audio_output_flags_t flags, + const audio_offload_info_t *offloadInfo) { routing_strategy strategy = getStrategy(stream); audio_devices_t device = getDeviceForStrategy(strategy, false /*fromCache*/); @@ -910,45 +639,16 @@ status_t AudioPolicyManager::getOutputForAttr(const audio_attributes_t *attr, } stream_type_to_audio_attributes(*stream, &attributes); } - - for (size_t i = 0; i < mPolicyMixes.size(); i++) { - sp<AudioOutputDescriptor> desc; - if (mPolicyMixes[i]->mMix.mMixType == MIX_TYPE_PLAYERS) { - for (size_t j = 0; j < mPolicyMixes[i]->mMix.mCriteria.size(); j++) { - if ((RULE_MATCH_ATTRIBUTE_USAGE == mPolicyMixes[i]->mMix.mCriteria[j].mRule && - mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mUsage == attributes.usage) || - (RULE_EXCLUDE_ATTRIBUTE_USAGE == mPolicyMixes[i]->mMix.mCriteria[j].mRule && - mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mUsage != attributes.usage)) { - desc = mPolicyMixes[i]->mOutput; - break; - } - if (strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 && - strncmp(attributes.tags + strlen("addr="), - mPolicyMixes[i]->mMix.mRegistrationId.string(), - AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) { - desc = mPolicyMixes[i]->mOutput; - break; - } - } - } else if (mPolicyMixes[i]->mMix.mMixType == MIX_TYPE_RECORDERS) { - if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE && - strncmp(attributes.tags, "addr=", strlen("addr=")) == 0 && - strncmp(attributes.tags + strlen("addr="), - mPolicyMixes[i]->mMix.mRegistrationId.string(), - AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - strlen("addr=") - 1) == 0) { - desc = mPolicyMixes[i]->mOutput; - } - } - if (desc != 0) { - if (!audio_is_linear_pcm(format)) { - return BAD_VALUE; - } - desc->mPolicyMix = &mPolicyMixes[i]->mMix; - *stream = streamTypefromAttributesInt(&attributes); - *output = desc->mIoHandle; - ALOGV("getOutputForAttr() returns output %d", *output); - return NO_ERROR; + sp<AudioOutputDescriptor> desc; + if (mPolicyMixes.getOutputForAttr(attributes, desc) == NO_ERROR) { + ALOG_ASSERT(desc != 0, "Invalid desc returned by getOutputForAttr"); + if (!audio_is_linear_pcm(format)) { + return BAD_VALUE; } + *stream = streamTypefromAttributesInt(&attributes); + *output = desc->mIoHandle; + ALOGV("getOutputForAttr() returns output %d", *output); + return NO_ERROR; } if (attributes.usage == AUDIO_USAGE_VIRTUAL_SOURCE) { ALOGW("getOutputForAttr() no policy mix found for usage AUDIO_USAGE_VIRTUAL_SOURCE"); @@ -956,7 +656,7 @@ status_t AudioPolicyManager::getOutputForAttr(const audio_attributes_t *attr, } ALOGV("getOutputForAttr() usage=%d, content=%d, tag=%s flags=%08x", - attributes.usage, attributes.content_type, attributes.tags, attributes.flags); + attributes.usage, attributes.content_type, attributes.tags, attributes.flags); routing_strategy strategy = (routing_strategy) getStrategyForAttr(&attributes); audio_devices_t device = getDeviceForStrategy(strategy, false /*fromCache*/); @@ -1066,7 +766,7 @@ audio_io_handle_t AudioPolicyManager::getOutputForDevice( // in the background. if (((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0) || - !isNonOffloadableEffectEnabled()) { + !mEffects.isNonOffloadableEffectEnabled()) { profile = getProfileForDirectOutput(device, samplingRate, format, @@ -1130,6 +830,10 @@ audio_io_handle_t AudioPolicyManager::getOutputForDevice( if (audio_is_linear_pcm(format) && samplingRate <= MAX_MIXER_SAMPLING_RATE) { goto non_direct_output; } + // fall back to mixer output if possible when the direct output could not be open + if (audio_is_linear_pcm(format) && samplingRate <= MAX_MIXER_SAMPLING_RATE) { + goto non_direct_output; + } return AUDIO_IO_HANDLE_NONE; } outputDesc->mSamplingRate = config.sample_rate; @@ -1248,7 +952,7 @@ status_t AudioPolicyManager::startOutput(audio_io_handle_t output, uint32_t beaconMuteLatency = 0; if (stream == AUDIO_STREAM_TTS) { ALOGV("\t found BEACON stream"); - if (isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) { + if (mOutputs.isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) { return INVALID_OPERATION; } else { beaconMuteLatency = handleEventForBeacon(STARTING_BEACON); @@ -1322,7 +1026,8 @@ status_t AudioPolicyManager::startOutput(audio_io_handle_t output, outputDesc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS) { setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, - outputDesc->mPolicyMix->mRegistrationId); + outputDesc->mPolicyMix->mRegistrationId, + "remote-submix"); } // force reevaluating accessibility routing when ringtone or alarm starts @@ -1371,7 +1076,8 @@ status_t AudioPolicyManager::stopOutput(audio_io_handle_t output, outputDesc->mPolicyMix->mMixType == MIX_TYPE_RECORDERS) { setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, - outputDesc->mPolicyMix->mRegistrationId); + outputDesc->mPolicyMix->mRegistrationId, + "remote-submix"); } outputDesc->mStopTime[stream] = systemTime(); @@ -1425,7 +1131,7 @@ void AudioPolicyManager::releaseOutput(audio_io_handle_t output, sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(index); if (outputDesc->isActive()) { mpClientInterface->closeOutput(output); - mOutputs.removeItem(output); + removeOutput(output); mTestOutputs[testIndex] = 0; } return; @@ -1483,19 +1189,13 @@ status_t AudioPolicyManager::getInputForAttr(const audio_attributes_t *attr, if (inputSource == AUDIO_SOURCE_REMOTE_SUBMIX && strncmp(attr->tags, "addr=", strlen("addr=")) == 0) { - device = AUDIO_DEVICE_IN_REMOTE_SUBMIX; - address = String8(attr->tags + strlen("addr=")); - ssize_t index = mPolicyMixes.indexOfKey(address); - if (index < 0) { - ALOGW("getInputForAttr() no policy for address %s", address.string()); - return BAD_VALUE; - } - if (mPolicyMixes[index]->mMix.mMixType != MIX_TYPE_PLAYERS) { - ALOGW("getInputForAttr() bad policy mix type for address %s", address.string()); - return BAD_VALUE; + status_t ret = mPolicyMixes.getInputMixForAttr(*attr, policyMix); + if (ret != NO_ERROR) { + return ret; } - policyMix = &mPolicyMixes[index]->mMix; *inputType = API_INPUT_MIX_EXT_POLICY_REROUTE; + device = AUDIO_DEVICE_IN_REMOTE_SUBMIX; + address = String8(attr->tags + strlen("addr=")); } else { device = getDeviceAndMixForInputSource(inputSource, &policyMix); if (device == AUDIO_DEVICE_NONE) { @@ -1633,10 +1333,10 @@ status_t AudioPolicyManager::startInput(audio_io_handle_t input, } // virtual input devices are compatible with other input devices - if (!isVirtualInputDevice(inputDesc->mDevice)) { + if (!is_virtual_input_device(inputDesc->mDevice)) { // for a non-virtual input device, check if there is another (non-virtual) active input - audio_io_handle_t activeInput = getActiveInput(); + audio_io_handle_t activeInput = mInputs.getActiveInput(); if (activeInput != 0 && activeInput != input) { // If the already active input uses AUDIO_SOURCE_HOTWORD then it is closed, @@ -1654,7 +1354,7 @@ status_t AudioPolicyManager::startInput(audio_io_handle_t input, } if (inputDesc->mRefCount == 0) { - if (activeInputsCount() == 0) { + if (mInputs.activeInputsCount() == 0) { SoundTrigger::setCaptureState(true); } setInputDevice(input, getNewInputDevice(input), true /* force */); @@ -1672,7 +1372,7 @@ status_t AudioPolicyManager::startInput(audio_io_handle_t input, if (address != "") { setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, - address); + address, "remote-submix"); } } } @@ -1720,13 +1420,13 @@ status_t AudioPolicyManager::stopInput(audio_io_handle_t input, if (address != "") { setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, - address); + address, "remote-submix"); } } resetInputDevice(input); - if (activeInputsCount() == 0) { + if (mInputs.activeInputsCount() == 0) { SoundTrigger::setCaptureState(false); } } @@ -1793,25 +1493,20 @@ void AudioPolicyManager::initStreamVolume(audio_stream_type_t stream, int indexMax) { ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax); - if (indexMin < 0 || indexMin >= indexMax) { - ALOGW("initStreamVolume() invalid index limits for stream %d, min %d, max %d", stream , indexMin, indexMax); - return; - } - mStreams[stream].mIndexMin = indexMin; - mStreams[stream].mIndexMax = indexMax; + mEngine->initStreamVolume(stream, indexMin, indexMax); //FIXME: AUDIO_STREAM_ACCESSIBILITY volume follows AUDIO_STREAM_MUSIC for now if (stream == AUDIO_STREAM_MUSIC) { - mStreams[AUDIO_STREAM_ACCESSIBILITY].mIndexMin = indexMin; - mStreams[AUDIO_STREAM_ACCESSIBILITY].mIndexMax = indexMax; + mEngine->initStreamVolume(AUDIO_STREAM_ACCESSIBILITY, indexMin, indexMax); } } status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream, - int index, - audio_devices_t device) + int index, + audio_devices_t device) { - if ((index < mStreams[stream].mIndexMin) || (index > mStreams[stream].mIndexMax)) { + if ((index < mStreams[stream].getVolumeIndexMin()) || + (index > mStreams[stream].getVolumeIndexMax())) { return BAD_VALUE; } if (!audio_is_output_device(device)) { @@ -1819,7 +1514,7 @@ status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream, } // Force max volume if stream cannot be muted - if (!mStreams[stream].mCanBeMuted) index = mStreams[stream].mIndexMax; + if (!mStreams.canBeMuted(stream)) index = mStreams[stream].getVolumeIndexMax(); ALOGV("setStreamVolumeIndex() stream %d, device %04x, index %d", stream, device, index); @@ -1827,9 +1522,9 @@ status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream, // if device is AUDIO_DEVICE_OUT_DEFAULT set default value and // clear all device specific values if (device == AUDIO_DEVICE_OUT_DEFAULT) { - mStreams[stream].mIndexCur.clear(); + mStreams.clearCurrentVolumeIndex(stream); } - mStreams[stream].mIndexCur.add(device, index); + mStreams.addCurrentVolumeIndex(stream, device, index); // update volume on all outputs whose current device is also selected by the same // strategy as the device specified by the caller @@ -1839,7 +1534,7 @@ status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream, //FIXME: AUDIO_STREAM_ACCESSIBILITY volume follows AUDIO_STREAM_MUSIC for now audio_devices_t accessibilityDevice = AUDIO_DEVICE_NONE; if (stream == AUDIO_STREAM_MUSIC) { - mStreams[AUDIO_STREAM_ACCESSIBILITY].mIndexCur.add(device, index); + mStreams.addCurrentVolumeIndex(AUDIO_STREAM_ACCESSIBILITY, device, index); accessibilityDevice = getDeviceForStrategy(STRATEGY_ACCESSIBILITY, true /*fromCache*/); } if ((device != AUDIO_DEVICE_OUT_DEFAULT) && @@ -1848,8 +1543,7 @@ status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream, } status_t status = NO_ERROR; for (size_t i = 0; i < mOutputs.size(); i++) { - audio_devices_t curDevice = - getDeviceForVolume(mOutputs.valueAt(i)->device()); + audio_devices_t curDevice = Volume::getDeviceForVolume(mOutputs.valueAt(i)->device()); if ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & strategyDevice) != 0)) { status_t volStatus = checkAndSetVolume(stream, index, mOutputs.keyAt(i), curDevice); if (volStatus != NO_ERROR) { @@ -1879,7 +1573,7 @@ status_t AudioPolicyManager::getStreamVolumeIndex(audio_stream_type_t stream, if (device == AUDIO_DEVICE_OUT_DEFAULT) { device = getDeviceForStrategy(getStrategy(stream), true /*fromCache*/); } - device = getDeviceForVolume(device); + device = Volume::getDeviceForVolume(device); *index = mStreams[stream].getVolumeIndex(device); ALOGV("getStreamVolumeIndex() stream %d device %08x index %d", stream, device, *index); @@ -1956,137 +1650,7 @@ status_t AudioPolicyManager::registerEffect(const effect_descriptor_t *desc, return INVALID_OPERATION; } } - - if (mTotalEffectsMemory + desc->memoryUsage > getMaxEffectsMemory()) { - ALOGW("registerEffect() memory limit exceeded for Fx %s, Memory %d KB", - desc->name, desc->memoryUsage); - return INVALID_OPERATION; - } - mTotalEffectsMemory += desc->memoryUsage; - ALOGV("registerEffect() effect %s, io %d, strategy %d session %d id %d", - desc->name, io, strategy, session, id); - ALOGV("registerEffect() memory %d, total memory %d", desc->memoryUsage, mTotalEffectsMemory); - - sp<EffectDescriptor> effectDesc = new EffectDescriptor(); - memcpy (&effectDesc->mDesc, desc, sizeof(effect_descriptor_t)); - effectDesc->mIo = io; - effectDesc->mStrategy = (routing_strategy)strategy; - effectDesc->mSession = session; - effectDesc->mEnabled = false; - - mEffects.add(id, effectDesc); - - return NO_ERROR; -} - -status_t AudioPolicyManager::unregisterEffect(int id) -{ - ssize_t index = mEffects.indexOfKey(id); - if (index < 0) { - ALOGW("unregisterEffect() unknown effect ID %d", id); - return INVALID_OPERATION; - } - - sp<EffectDescriptor> effectDesc = mEffects.valueAt(index); - - setEffectEnabled(effectDesc, false); - - if (mTotalEffectsMemory < effectDesc->mDesc.memoryUsage) { - ALOGW("unregisterEffect() memory %d too big for total %d", - effectDesc->mDesc.memoryUsage, mTotalEffectsMemory); - effectDesc->mDesc.memoryUsage = mTotalEffectsMemory; - } - mTotalEffectsMemory -= effectDesc->mDesc.memoryUsage; - ALOGV("unregisterEffect() effect %s, ID %d, memory %d total memory %d", - effectDesc->mDesc.name, id, effectDesc->mDesc.memoryUsage, mTotalEffectsMemory); - - mEffects.removeItem(id); - - return NO_ERROR; -} - -status_t AudioPolicyManager::setEffectEnabled(int id, bool enabled) -{ - ssize_t index = mEffects.indexOfKey(id); - if (index < 0) { - ALOGW("unregisterEffect() unknown effect ID %d", id); - return INVALID_OPERATION; - } - - return setEffectEnabled(mEffects.valueAt(index), enabled); -} - -status_t AudioPolicyManager::setEffectEnabled(const sp<EffectDescriptor>& effectDesc, bool enabled) -{ - if (enabled == effectDesc->mEnabled) { - ALOGV("setEffectEnabled(%s) effect already %s", - enabled?"true":"false", enabled?"enabled":"disabled"); - return INVALID_OPERATION; - } - - if (enabled) { - if (mTotalEffectsCpuLoad + effectDesc->mDesc.cpuLoad > getMaxEffectsCpuLoad()) { - ALOGW("setEffectEnabled(true) CPU Load limit exceeded for Fx %s, CPU %f MIPS", - effectDesc->mDesc.name, (float)effectDesc->mDesc.cpuLoad/10); - return INVALID_OPERATION; - } - mTotalEffectsCpuLoad += effectDesc->mDesc.cpuLoad; - ALOGV("setEffectEnabled(true) total CPU %d", mTotalEffectsCpuLoad); - } else { - if (mTotalEffectsCpuLoad < effectDesc->mDesc.cpuLoad) { - ALOGW("setEffectEnabled(false) CPU load %d too high for total %d", - effectDesc->mDesc.cpuLoad, mTotalEffectsCpuLoad); - effectDesc->mDesc.cpuLoad = mTotalEffectsCpuLoad; - } - mTotalEffectsCpuLoad -= effectDesc->mDesc.cpuLoad; - ALOGV("setEffectEnabled(false) total CPU %d", mTotalEffectsCpuLoad); - } - effectDesc->mEnabled = enabled; - return NO_ERROR; -} - -bool AudioPolicyManager::isNonOffloadableEffectEnabled() -{ - for (size_t i = 0; i < mEffects.size(); i++) { - sp<EffectDescriptor> effectDesc = mEffects.valueAt(i); - if (effectDesc->mEnabled && (effectDesc->mStrategy == STRATEGY_MEDIA) && - ((effectDesc->mDesc.flags & EFFECT_FLAG_OFFLOAD_SUPPORTED) == 0)) { - ALOGV("isNonOffloadableEffectEnabled() non offloadable effect %s enabled on session %d", - effectDesc->mDesc.name, effectDesc->mSession); - return true; - } - } - return false; -} - -bool AudioPolicyManager::isStreamActive(audio_stream_type_t stream, uint32_t inPastMs) const -{ - nsecs_t sysTime = systemTime(); - for (size_t i = 0; i < mOutputs.size(); i++) { - const sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(i); - if (outputDesc->isStreamActive(stream, inPastMs, sysTime)) { - return true; - } - } - return false; -} - -bool AudioPolicyManager::isStreamActiveRemotely(audio_stream_type_t stream, - uint32_t inPastMs) const -{ - nsecs_t sysTime = systemTime(); - for (size_t i = 0; i < mOutputs.size(); i++) { - const sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(i); - if (((outputDesc->device() & APM_AUDIO_OUT_DEVICE_REMOTE_ALL) != 0) && - outputDesc->isStreamActive(stream, inPastMs, sysTime)) { - // do not consider re routing (when the output is going to a dynamic policy) - // as "remote playback" - if (outputDesc->mPolicyMix == NULL) { - return true; - } - } - } - return false; + return mEffects.registerEffect(desc, io, strategy, session, id); } bool AudioPolicyManager::isSourceActive(audio_source_t source) const @@ -2156,9 +1720,8 @@ status_t AudioPolicyManager::registerPolicyMixes(Vector<AudioMix> mixes) for (size_t i = 0; i < mixes.size(); i++) { String8 address = mixes[i].mRegistrationId; - ssize_t index = mPolicyMixes.indexOfKey(address); - if (index >= 0) { - ALOGE("registerPolicyMixes(): mix for address %s already registered", address.string()); + + if (mPolicyMixes.registerMix(address, mixes[i]) != NO_ERROR) { continue; } audio_config_t outputConfig = mixes[i].mFormat; @@ -2171,17 +1734,15 @@ status_t AudioPolicyManager::registerPolicyMixes(Vector<AudioMix> mixes) AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address); module->addInputProfile(address, &inputConfig, AUDIO_DEVICE_IN_REMOTE_SUBMIX, address); - sp<AudioPolicyMix> policyMix = new AudioPolicyMix(); - policyMix->mMix = mixes[i]; - mPolicyMixes.add(address, policyMix); + if (mixes[i].mMixType == MIX_TYPE_PLAYERS) { setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, - address.string()); + address.string(), "remote-submix"); } else { setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_AVAILABLE, - address.string()); + address.string(), "remote-submix"); } } return NO_ERROR; @@ -2206,20 +1767,17 @@ status_t AudioPolicyManager::unregisterPolicyMixes(Vector<AudioMix> mixes) for (size_t i = 0; i < mixes.size(); i++) { String8 address = mixes[i].mRegistrationId; - ssize_t index = mPolicyMixes.indexOfKey(address); - if (index < 0) { - ALOGE("unregisterPolicyMixes(): mix for address %s not registered", address.string()); + + if (mPolicyMixes.unregisterMix(address) != NO_ERROR) { continue; } - mPolicyMixes.removeItemsAt(index); - if (getDeviceConnectionState(AUDIO_DEVICE_IN_REMOTE_SUBMIX, address.string()) == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) { setDeviceConnectionStateInt(AUDIO_DEVICE_IN_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, - address.string()); + address.string(), "remote-submix"); } if (getDeviceConnectionState(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, address.string()) == @@ -2227,7 +1785,7 @@ status_t AudioPolicyManager::unregisterPolicyMixes(Vector<AudioMix> mixes) { setDeviceConnectionStateInt(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, - address.string()); + address.string(), "remote-submix"); } module->removeOutputProfile(address); module->removeInputProfile(address); @@ -2247,87 +1805,32 @@ status_t AudioPolicyManager::dump(int fd) snprintf(buffer, SIZE, " Primary Output: %d\n", mPrimaryOutput); result.append(buffer); - snprintf(buffer, SIZE, " Phone state: %d\n", mPhoneState); + snprintf(buffer, SIZE, " Phone state: %d\n", mEngine->getPhoneState()); result.append(buffer); snprintf(buffer, SIZE, " Force use for communications %d\n", - mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]); + mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION)); result.append(buffer); - snprintf(buffer, SIZE, " Force use for media %d\n", mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA]); + snprintf(buffer, SIZE, " Force use for media %d\n", mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_MEDIA)); result.append(buffer); - snprintf(buffer, SIZE, " Force use for record %d\n", mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD]); + snprintf(buffer, SIZE, " Force use for record %d\n", mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD)); result.append(buffer); - snprintf(buffer, SIZE, " Force use for dock %d\n", mForceUse[AUDIO_POLICY_FORCE_FOR_DOCK]); + snprintf(buffer, SIZE, " Force use for dock %d\n", mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK)); result.append(buffer); - snprintf(buffer, SIZE, " Force use for system %d\n", mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM]); + snprintf(buffer, SIZE, " Force use for system %d\n", mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM)); result.append(buffer); snprintf(buffer, SIZE, " Force use for hdmi system audio %d\n", - mForceUse[AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO]); - result.append(buffer); - - snprintf(buffer, SIZE, " Available output devices:\n"); + mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO)); result.append(buffer); write(fd, result.string(), result.size()); - for (size_t i = 0; i < mAvailableOutputDevices.size(); i++) { - mAvailableOutputDevices[i]->dump(fd, 2, i); - } - snprintf(buffer, SIZE, "\n Available input devices:\n"); - write(fd, buffer, strlen(buffer)); - for (size_t i = 0; i < mAvailableInputDevices.size(); i++) { - mAvailableInputDevices[i]->dump(fd, 2, i); - } - - snprintf(buffer, SIZE, "\nHW Modules dump:\n"); - write(fd, buffer, strlen(buffer)); - for (size_t i = 0; i < mHwModules.size(); i++) { - snprintf(buffer, SIZE, "- HW Module %zu:\n", i + 1); - write(fd, buffer, strlen(buffer)); - mHwModules[i]->dump(fd); - } - - snprintf(buffer, SIZE, "\nOutputs dump:\n"); - write(fd, buffer, strlen(buffer)); - for (size_t i = 0; i < mOutputs.size(); i++) { - snprintf(buffer, SIZE, "- Output %d dump:\n", mOutputs.keyAt(i)); - write(fd, buffer, strlen(buffer)); - mOutputs.valueAt(i)->dump(fd); - } - - snprintf(buffer, SIZE, "\nInputs dump:\n"); - write(fd, buffer, strlen(buffer)); - for (size_t i = 0; i < mInputs.size(); i++) { - snprintf(buffer, SIZE, "- Input %d dump:\n", mInputs.keyAt(i)); - write(fd, buffer, strlen(buffer)); - mInputs.valueAt(i)->dump(fd); - } - snprintf(buffer, SIZE, "\nStreams dump:\n"); - write(fd, buffer, strlen(buffer)); - snprintf(buffer, SIZE, - " Stream Can be muted Index Min Index Max Index Cur [device : index]...\n"); - write(fd, buffer, strlen(buffer)); - for (size_t i = 0; i < AUDIO_STREAM_CNT; i++) { - snprintf(buffer, SIZE, " %02zu ", i); - write(fd, buffer, strlen(buffer)); - mStreams[i].dump(fd); - } - - snprintf(buffer, SIZE, "\nTotal Effects CPU: %f MIPS, Total Effects memory: %d KB\n", - (float)mTotalEffectsCpuLoad/10, mTotalEffectsMemory); - write(fd, buffer, strlen(buffer)); - - snprintf(buffer, SIZE, "Registered effects:\n"); - write(fd, buffer, strlen(buffer)); - for (size_t i = 0; i < mEffects.size(); i++) { - snprintf(buffer, SIZE, "- Effect %d dump:\n", mEffects.keyAt(i)); - write(fd, buffer, strlen(buffer)); - mEffects.valueAt(i)->dump(fd); - } - - snprintf(buffer, SIZE, "\nAudio Patches:\n"); - write(fd, buffer, strlen(buffer)); - for (size_t i = 0; i < mAudioPatches.size(); i++) { - mAudioPatches[i]->dump(fd, 2, i); - } + mAvailableOutputDevices.dump(fd, String8("output")); + mAvailableInputDevices.dump(fd, String8("input")); + mHwModules.dump(fd); + mOutputs.dump(fd); + mInputs.dump(fd); + mStreams.dump(fd); + mEffects.dump(fd); + mAudioPatches.dump(fd); return NO_ERROR; } @@ -2384,7 +1887,7 @@ bool AudioPolicyManager::isOffloadSupported(const audio_offload_info_t& offloadI // FIXME: We should check the audio session here but we do not have it in this context. // This may prevent offloading in rare situations where effects are left active by apps // in the background. - if (isNonOffloadableEffectEnabled()) { + if (mEffects.isNonOffloadableEffectEnabled()) { return false; } @@ -2463,93 +1966,6 @@ status_t AudioPolicyManager::getAudioPort(struct audio_port *port __unused) return NO_ERROR; } -sp<AudioPolicyManager::AudioOutputDescriptor> AudioPolicyManager::getOutputFromId( - audio_port_handle_t id) const -{ - sp<AudioOutputDescriptor> outputDesc = NULL; - for (size_t i = 0; i < mOutputs.size(); i++) { - outputDesc = mOutputs.valueAt(i); - if (outputDesc->mId == id) { - break; - } - } - return outputDesc; -} - -sp<AudioPolicyManager::AudioInputDescriptor> AudioPolicyManager::getInputFromId( - audio_port_handle_t id) const -{ - sp<AudioInputDescriptor> inputDesc = NULL; - for (size_t i = 0; i < mInputs.size(); i++) { - inputDesc = mInputs.valueAt(i); - if (inputDesc->mId == id) { - break; - } - } - return inputDesc; -} - -sp <AudioPolicyManager::HwModule> AudioPolicyManager::getModuleForDevice( - audio_devices_t device) const -{ - sp <HwModule> module; - - for (size_t i = 0; i < mHwModules.size(); i++) { - if (mHwModules[i]->mHandle == 0) { - continue; - } - if (audio_is_output_device(device)) { - for (size_t j = 0; j < mHwModules[i]->mOutputProfiles.size(); j++) - { - if (mHwModules[i]->mOutputProfiles[j]->mSupportedDevices.types() & device) { - return mHwModules[i]; - } - } - } else { - for (size_t j = 0; j < mHwModules[i]->mInputProfiles.size(); j++) { - if (mHwModules[i]->mInputProfiles[j]->mSupportedDevices.types() & - device & ~AUDIO_DEVICE_BIT_IN) { - return mHwModules[i]; - } - } - } - } - return module; -} - -sp <AudioPolicyManager::HwModule> AudioPolicyManager::getModuleFromName(const char *name) const -{ - sp <HwModule> module; - - for (size_t i = 0; i < mHwModules.size(); i++) - { - if (strcmp(mHwModules[i]->mName, name) == 0) { - return mHwModules[i]; - } - } - return module; -} - -audio_devices_t AudioPolicyManager::availablePrimaryOutputDevices() -{ - sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(mPrimaryOutput); - audio_devices_t devices = outputDesc->mProfile->mSupportedDevices.types(); - return devices & mAvailableOutputDevices.types(); -} - -audio_devices_t AudioPolicyManager::availablePrimaryInputDevices() -{ - audio_module_handle_t primaryHandle = - mOutputs.valueFor(mPrimaryOutput)->mProfile->mModule->mHandle; - audio_devices_t devices = AUDIO_DEVICE_NONE; - for (size_t i = 0; i < mAvailableInputDevices.size(); i++) { - if (mAvailableInputDevices[i]->mModule->mHandle == primaryHandle) { - devices |= mAvailableInputDevices[i]->mDeviceType; - } - } - return devices; -} - status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, audio_patch_handle_t *handle, uid_t uid) @@ -2605,7 +2021,7 @@ status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, } if (patch->sources[0].type == AUDIO_PORT_TYPE_MIX) { - sp<AudioOutputDescriptor> outputDesc = getOutputFromId(patch->sources[0].id); + sp<AudioOutputDescriptor> outputDesc = mOutputs.getOutputFromId(patch->sources[0].id); if (outputDesc == NULL) { ALOGV("createAudioPatch() output not found for id %d", patch->sources[0].id); return BAD_VALUE; @@ -2634,15 +2050,14 @@ status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, return BAD_VALUE; } - if (!outputDesc->mProfile->isCompatibleProfile(devDesc->mDeviceType, + if (!outputDesc->mProfile->isCompatibleProfile(devDesc->type(), devDesc->mAddress, patch->sources[0].sample_rate, - NULL, // updatedSamplingRate - patch->sources[0].format, - patch->sources[0].channel_mask, - AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) { - ALOGV("createAudioPatch() profile not supported for device %08x", - devDesc->mDeviceType); + NULL, // updatedSamplingRate + patch->sources[0].format, + patch->sources[0].channel_mask, + AUDIO_OUTPUT_FLAG_NONE /*FIXME*/)) { + ALOGV("createAudioPatch() profile not supported for device %08x", devDesc->type()); return INVALID_OPERATION; } devices.add(devDesc); @@ -2674,7 +2089,7 @@ status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, if (patch->num_sinks > 1) { return INVALID_OPERATION; } - sp<AudioInputDescriptor> inputDesc = getInputFromId(patch->sinks[0].id); + sp<AudioInputDescriptor> inputDesc = mInputs.getInputFromId(patch->sinks[0].id); if (inputDesc == NULL) { return BAD_VALUE; } @@ -2689,7 +2104,7 @@ status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, return BAD_VALUE; } - if (!inputDesc->mProfile->isCompatibleProfile(devDesc->mDeviceType, + if (!inputDesc->mProfile->isCompatibleProfile(devDesc->type(), devDesc->mAddress, patch->sinks[0].sample_rate, NULL, /*updatedSampleRate*/ @@ -2703,8 +2118,8 @@ status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, } // TODO: reconfigure output format and channels here ALOGV("createAudioPatch() setting device %08x on output %d", - devDesc->mDeviceType, inputDesc->mIoHandle); - setInputDevice(inputDesc->mIoHandle, devDesc->mDeviceType, true, handle); + devDesc->type(), inputDesc->mIoHandle); + setInputDevice(inputDesc->mIoHandle, devDesc->type(), true, handle); index = mAudioPatches.indexOfKey(*handle); if (index >= 0) { if (patchDesc != 0 && patchDesc != mAudioPatches.valueAt(index)) { @@ -2754,8 +2169,7 @@ status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, return INVALID_OPERATION; } SortedVector<audio_io_handle_t> outputs = - getOutputsForDevice(sinkDeviceDesc->mDeviceType, - mOutputs); + getOutputsForDevice(sinkDeviceDesc->type(), mOutputs); // if the sink device is reachable via an opened output stream, request to go via // this output stream by adding a second source to the patch description audio_io_handle_t output = selectOutput(outputs, @@ -2785,8 +2199,7 @@ status_t AudioPolicyManager::createAudioPatch(const struct audio_patch *patch, status, afPatchHandle); if (status == NO_ERROR) { if (index < 0) { - patchDesc = new AudioPatch((audio_patch_handle_t)nextUniqueId(), - &newPatch, uid); + patchDesc = new AudioPatch(&newPatch, uid); addAudioPatch(patchDesc->mHandle, patchDesc); } else { patchDesc->mPatch = newPatch; @@ -2829,7 +2242,7 @@ status_t AudioPolicyManager::releaseAudioPatch(audio_patch_handle_t handle, struct audio_patch *patch = &patchDesc->mPatch; patchDesc->mUid = mUidCached; if (patch->sources[0].type == AUDIO_PORT_TYPE_MIX) { - sp<AudioOutputDescriptor> outputDesc = getOutputFromId(patch->sources[0].id); + sp<AudioOutputDescriptor> outputDesc = mOutputs.getOutputFromId(patch->sources[0].id); if (outputDesc == NULL) { ALOGV("releaseAudioPatch() output not found for id %d", patch->sources[0].id); return BAD_VALUE; @@ -2842,7 +2255,7 @@ status_t AudioPolicyManager::releaseAudioPatch(audio_patch_handle_t handle, NULL); } else if (patch->sources[0].type == AUDIO_PORT_TYPE_DEVICE) { if (patch->sinks[0].type == AUDIO_PORT_TYPE_MIX) { - sp<AudioInputDescriptor> inputDesc = getInputFromId(patch->sinks[0].id); + sp<AudioInputDescriptor> inputDesc = mInputs.getInputFromId(patch->sinks[0].id); if (inputDesc == NULL) { ALOGV("releaseAudioPatch() input not found for id %d", patch->sinks[0].id); return BAD_VALUE; @@ -2872,30 +2285,11 @@ status_t AudioPolicyManager::listAudioPatches(unsigned int *num_patches, struct audio_patch *patches, unsigned int *generation) { - if (num_patches == NULL || (*num_patches != 0 && patches == NULL) || - generation == NULL) { + if (generation == NULL) { return BAD_VALUE; } - ALOGV("listAudioPatches() num_patches %d patches %p available patches %zu", - *num_patches, patches, mAudioPatches.size()); - if (patches == NULL) { - *num_patches = 0; - } - - size_t patchesWritten = 0; - size_t patchesMax = *num_patches; - for (size_t i = 0; - i < mAudioPatches.size() && patchesWritten < patchesMax; i++) { - patches[patchesWritten] = mAudioPatches[i]->mPatch; - patches[patchesWritten++].id = mAudioPatches[i]->mHandle; - ALOGV("listAudioPatches() patch %zu num_sources %d num_sinks %d", - i, mAudioPatches[i]->mPatch.num_sources, mAudioPatches[i]->mPatch.num_sinks); - } - *num_patches = mAudioPatches.size(); - *generation = curAudioPortGeneration(); - ALOGV("listAudioPatches() got %zu patches needed %d", patchesWritten, *num_patches); - return NO_ERROR; + return mAudioPatches.listAudioPatches(num_patches, patches); } status_t AudioPolicyManager::setAudioPortConfig(const struct audio_port_config *config) @@ -2914,7 +2308,7 @@ status_t AudioPolicyManager::setAudioPortConfig(const struct audio_port_config * sp<AudioPortConfig> audioPortConfig; if (config->type == AUDIO_PORT_TYPE_MIX) { if (config->role == AUDIO_PORT_ROLE_SOURCE) { - sp<AudioOutputDescriptor> outputDesc = getOutputFromId(config->id); + sp<AudioOutputDescriptor> outputDesc = mOutputs.getOutputFromId(config->id); if (outputDesc == NULL) { return BAD_VALUE; } @@ -2923,7 +2317,7 @@ status_t AudioPolicyManager::setAudioPortConfig(const struct audio_port_config * outputDesc->mIoHandle); audioPortConfig = outputDesc; } else if (config->role == AUDIO_PORT_ROLE_SINK) { - sp<AudioInputDescriptor> inputDesc = getInputFromId(config->id); + sp<AudioInputDescriptor> inputDesc = mInputs.getInputFromId(config->id); if (inputDesc == NULL) { return BAD_VALUE; } @@ -2980,63 +2374,12 @@ status_t AudioPolicyManager::acquireSoundTriggerSession(audio_session_t *session *ioHandle = (audio_io_handle_t)mpClientInterface->newAudioUniqueId(); *device = getDeviceAndMixForInputSource(AUDIO_SOURCE_HOTWORD); - mSoundTriggerSessions.add(*session, *ioHandle); - - return NO_ERROR; -} - -status_t AudioPolicyManager::releaseSoundTriggerSession(audio_session_t session) -{ - ssize_t index = mSoundTriggerSessions.indexOfKey(session); - if (index < 0) { - ALOGW("acquireSoundTriggerSession() session %d not registered", session); - return BAD_VALUE; - } - - mSoundTriggerSessions.removeItem(session); - return NO_ERROR; -} - -status_t AudioPolicyManager::addAudioPatch(audio_patch_handle_t handle, - const sp<AudioPatch>& patch) -{ - ssize_t index = mAudioPatches.indexOfKey(handle); - - if (index >= 0) { - ALOGW("addAudioPatch() patch %d already in", handle); - return ALREADY_EXISTS; - } - mAudioPatches.add(handle, patch); - ALOGV("addAudioPatch() handle %d af handle %d num_sources %d num_sinks %d source handle %d" - "sink handle %d", - handle, patch->mAfPatchHandle, patch->mPatch.num_sources, patch->mPatch.num_sinks, - patch->mPatch.sources[0].id, patch->mPatch.sinks[0].id); - return NO_ERROR; -} - -status_t AudioPolicyManager::removeAudioPatch(audio_patch_handle_t handle) -{ - ssize_t index = mAudioPatches.indexOfKey(handle); - - if (index < 0) { - ALOGW("removeAudioPatch() patch %d not in", handle); - return ALREADY_EXISTS; - } - ALOGV("removeAudioPatch() handle %d af handle %d", handle, - mAudioPatches.valueAt(index)->mAfPatchHandle); - mAudioPatches.removeItemsAt(index); - return NO_ERROR; + return mSoundTriggerSessions.acquireSession(*session, *ioHandle); } // ---------------------------------------------------------------------------- // AudioPolicyManager // ---------------------------------------------------------------------------- - -uint32_t AudioPolicyManager::nextUniqueId() -{ - return android_atomic_inc(&mNextUniqueId); -} - uint32_t AudioPolicyManager::nextAudioPortGeneration() { return android_atomic_inc(&mAudioPortGeneration); @@ -3048,34 +2391,47 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa Thread(false), #endif //AUDIO_POLICY_TEST mPrimaryOutput((audio_io_handle_t)0), - mPhoneState(AUDIO_MODE_NORMAL), mLimitRingtoneVolume(false), mLastVoiceVolume(-1.0f), - mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0), mA2dpSuspended(false), - mSpeakerDrcEnabled(false), mNextUniqueId(1), + mSpeakerDrcEnabled(false), mAudioPortGeneration(1), mBeaconMuteRefCount(0), mBeaconPlayingRefCount(0), mBeaconMuted(false) { + audio_policy::EngineInstance *engineInstance = audio_policy::EngineInstance::getInstance(); + if (!engineInstance) { + ALOGE("%s: Could not get an instance of policy engine", __FUNCTION__); + return; + } + // Retrieve the Policy Manager Interface + mEngine = engineInstance->queryInterface<AudioPolicyManagerInterface>(); + if (mEngine == NULL) { + ALOGE("%s: Failed to get Policy Engine Interface", __FUNCTION__); + return; + } + mEngine->setObserver(this); + status_t status = mEngine->initCheck(); + ALOG_ASSERT(status == NO_ERROR, "Policy engine not initialized(err=%d)", status); + mUidCached = getuid(); mpClientInterface = clientInterface; - for (int i = 0; i < AUDIO_POLICY_FORCE_USE_CNT; i++) { - mForceUse[i] = AUDIO_POLICY_FORCE_NONE; - } - - mDefaultOutputDevice = new DeviceDescriptor(String8(""), AUDIO_DEVICE_OUT_SPEAKER); - if (loadAudioPolicyConfig(AUDIO_POLICY_VENDOR_CONFIG_FILE) != NO_ERROR) { - if (loadAudioPolicyConfig(AUDIO_POLICY_CONFIG_FILE) != NO_ERROR) { + mDefaultOutputDevice = new DeviceDescriptor(String8("Speaker"), AUDIO_DEVICE_OUT_SPEAKER); + if (ConfigParsingUtils::loadAudioPolicyConfig(AUDIO_POLICY_VENDOR_CONFIG_FILE, + mHwModules, mAvailableInputDevices, mAvailableOutputDevices, + mDefaultOutputDevice, mSpeakerDrcEnabled) != NO_ERROR) { + if (ConfigParsingUtils::loadAudioPolicyConfig(AUDIO_POLICY_CONFIG_FILE, + mHwModules, mAvailableInputDevices, mAvailableOutputDevices, + mDefaultOutputDevice, mSpeakerDrcEnabled) != NO_ERROR) { ALOGE("could not load audio policy configuration file, setting defaults"); defaultAudioPolicyConfig(); } } // mAvailableOutputDevices and mAvailableInputDevices now contain all attached devices - // must be done after reading the policy - initializeVolumeCurves(); + // must be done after reading the policy (since conditionned by Speaker Drc Enabling) + mEngine->initializeVolumeCurves(mSpeakerDrcEnabled); // open all output streams needed to access attached devices audio_devices_t outputDeviceTypes = mAvailableOutputDevices.types(); @@ -3103,13 +2459,13 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa continue; } audio_devices_t profileType = outProfile->mSupportedDevices.types(); - if ((profileType & mDefaultOutputDevice->mDeviceType) != AUDIO_DEVICE_NONE) { - profileType = mDefaultOutputDevice->mDeviceType; + if ((profileType & mDefaultOutputDevice->type()) != AUDIO_DEVICE_NONE) { + profileType = mDefaultOutputDevice->type(); } else { // chose first device present in mSupportedDevices also part of // outputDeviceTypes for (size_t k = 0; k < outProfile->mSupportedDevices.size(); k++) { - profileType = outProfile->mSupportedDevices[k]->mDeviceType; + profileType = outProfile->mSupportedDevices[k]->type(); if ((profileType & outputDeviceTypes) != 0) { break; } @@ -3144,13 +2500,12 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa outputDesc->mFormat = config.format; for (size_t k = 0; k < outProfile->mSupportedDevices.size(); k++) { - audio_devices_t type = outProfile->mSupportedDevices[k]->mDeviceType; + audio_devices_t type = outProfile->mSupportedDevices[k]->type(); ssize_t index = mAvailableOutputDevices.indexOf(outProfile->mSupportedDevices[k]); // give a valid ID to an attached device once confirmed it is reachable - if ((index >= 0) && (mAvailableOutputDevices[index]->mId == 0)) { - mAvailableOutputDevices[index]->mId = nextUniqueId(); - mAvailableOutputDevices[index]->mModule = mHwModules[i]; + if (index >= 0 && !mAvailableOutputDevices[index]->isAttached()) { + mAvailableOutputDevices[index]->attach(mHwModules[i]); } } if (mPrimaryOutput == 0 && @@ -3177,7 +2532,7 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa // inputDeviceTypes audio_devices_t profileType = AUDIO_DEVICE_NONE; for (size_t k = 0; k < inProfile->mSupportedDevices.size(); k++) { - profileType = inProfile->mSupportedDevices[k]->mDeviceType; + profileType = inProfile->mSupportedDevices[k]->type(); if (profileType & inputDeviceTypes) { break; } @@ -3213,13 +2568,12 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa if (status == NO_ERROR) { for (size_t k = 0; k < inProfile->mSupportedDevices.size(); k++) { - audio_devices_t type = inProfile->mSupportedDevices[k]->mDeviceType; + audio_devices_t type = inProfile->mSupportedDevices[k]->type(); ssize_t index = mAvailableInputDevices.indexOf(inProfile->mSupportedDevices[k]); // give a valid ID to an attached device once confirmed it is reachable - if ((index >= 0) && (mAvailableInputDevices[index]->mId == 0)) { - mAvailableInputDevices[index]->mId = nextUniqueId(); - mAvailableInputDevices[index]->mModule = mHwModules[i]; + if (index >= 0 && !mAvailableInputDevices[index]->isAttached()) { + mAvailableInputDevices[index]->attach(mHwModules[i]); } } mpClientInterface->closeInput(input); @@ -3232,24 +2586,30 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa } // make sure all attached devices have been allocated a unique ID for (size_t i = 0; i < mAvailableOutputDevices.size();) { - if (mAvailableOutputDevices[i]->mId == 0) { - ALOGW("Input device %08x unreachable", mAvailableOutputDevices[i]->mDeviceType); + if (!mAvailableOutputDevices[i]->isAttached()) { + ALOGW("Input device %08x unreachable", mAvailableOutputDevices[i]->type()); mAvailableOutputDevices.remove(mAvailableOutputDevices[i]); continue; } + // The device is now validated and can be appended to the available devices of the engine + mEngine->setDeviceConnectionState(mAvailableOutputDevices[i], + AUDIO_POLICY_DEVICE_STATE_AVAILABLE); i++; } for (size_t i = 0; i < mAvailableInputDevices.size();) { - if (mAvailableInputDevices[i]->mId == 0) { - ALOGW("Input device %08x unreachable", mAvailableInputDevices[i]->mDeviceType); + if (!mAvailableInputDevices[i]->isAttached()) { + ALOGW("Input device %08x unreachable", mAvailableInputDevices[i]->type()); mAvailableInputDevices.remove(mAvailableInputDevices[i]); continue; } + // The device is now validated and can be appended to the available devices of the engine + mEngine->setDeviceConnectionState(mAvailableInputDevices[i], + AUDIO_POLICY_DEVICE_STATE_AVAILABLE); i++; } // make sure default device is reachable if (mAvailableOutputDevices.indexOf(mDefaultOutputDevice) < 0) { - ALOGE("Default device %08x is unreachable", mDefaultOutputDevice->mDeviceType); + ALOGE("Default device %08x is unreachable", mDefaultOutputDevice->type()); } ALOGE_IF((mPrimaryOutput == 0), "Failed to open primary output"); @@ -3405,8 +2765,7 @@ bool AudioPolicyManager::threadLoop() audio_module_handle_t moduleHandle = outputDesc->mModule->mHandle; - mOutputs.removeItem(mPrimaryOutput); - + removeOutput(mPrimaryOutput); sp<AudioOutputDescriptor> outputDesc = new AudioOutputDescriptor(NULL); outputDesc->mDevice = AUDIO_DEVICE_OUT_SPEAKER; audio_config_t config = AUDIO_CONFIG_INITIALIZER; @@ -3465,16 +2824,19 @@ int AudioPolicyManager::testOutputIndex(audio_io_handle_t output) void AudioPolicyManager::addOutput(audio_io_handle_t output, sp<AudioOutputDescriptor> outputDesc) { - outputDesc->mIoHandle = output; - outputDesc->mId = nextUniqueId(); + outputDesc->setIoHandle(output); mOutputs.add(output, outputDesc); nextAudioPortGeneration(); } +void AudioPolicyManager::removeOutput(audio_io_handle_t output) +{ + mOutputs.removeItem(output); +} + void AudioPolicyManager::addInput(audio_io_handle_t input, sp<AudioInputDescriptor> inputDesc) { - inputDesc->mIoHandle = input; - inputDesc->mId = nextUniqueId(); + inputDesc->setIoHandle(input); mInputs.add(input, inputDesc); nextAudioPortGeneration(); } @@ -3493,11 +2855,11 @@ void AudioPolicyManager::findIoHandlesByAddress(sp<AudioOutputDescriptor> desc / } status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> devDesc, - audio_policy_dev_state_t state, - SortedVector<audio_io_handle_t>& outputs, - const String8 address) + audio_policy_dev_state_t state, + SortedVector<audio_io_handle_t>& outputs, + const String8 address) { - audio_devices_t device = devDesc->mDeviceType; + audio_devices_t device = devDesc->type(); sp<AudioOutputDescriptor> desc; // erase all current sample rates, formats and channel masks devDesc->clearCapabilities(); @@ -3507,7 +2869,7 @@ status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> de for (size_t i = 0; i < mOutputs.size(); i++) { desc = mOutputs.valueAt(i); if (!desc->isDuplicated() && (desc->mProfile->mSupportedDevices.types() & device)) { - if (!deviceDistinguishesOnAddress(device)) { + if (!device_distinguishes_on_address(device)) { ALOGV("checkOutputsForDevice(): adding opened output %d", mOutputs.keyAt(i)); outputs.add(mOutputs.keyAt(i)); } else { @@ -3527,7 +2889,7 @@ status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> de { sp<IOProfile> profile = mHwModules[i]->mOutputProfiles[j]; if (profile->mSupportedDevices.types() & device) { - if (!deviceDistinguishesOnAddress(device) || + if (!device_distinguishes_on_address(device) || address == profile->mSupportedDevices[0]->mAddress) { profiles.add(profile); ALOGV("checkOutputsForDevice(): adding profile %zu from module %zu", j, i); @@ -3663,15 +3025,15 @@ status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> de if (output != AUDIO_IO_HANDLE_NONE) { addOutput(output, desc); - if (deviceDistinguishesOnAddress(device) && address != "0") { - ssize_t index = mPolicyMixes.indexOfKey(address); - if (index >= 0) { - mPolicyMixes[index]->mOutput = desc; - desc->mPolicyMix = &mPolicyMixes[index]->mMix; - } else { + if (device_distinguishes_on_address(device) && address != "0") { + sp<AudioPolicyMix> policyMix; + if (mPolicyMixes.getAudioPolicyMix(address, policyMix) != NO_ERROR) { ALOGE("checkOutputsForDevice() cannot find policy for address %s", address.string()); } + policyMix->setOutput(desc); + desc->mPolicyMix = &(policyMix->getMix()); + } else if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) == 0) { // no duplicated output for direct outputs and // outputs used by dynamic policy mixes @@ -3701,7 +3063,7 @@ status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> de ALOGW("checkOutputsForDevice() could not open dup output for %d and %d", mPrimaryOutput, output); mpClientInterface->closeOutput(output); - mOutputs.removeItem(output); + removeOutput(output); nextAudioPortGeneration(); output = AUDIO_IO_HANDLE_NONE; } @@ -3718,7 +3080,7 @@ status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> de outputs.add(output); devDesc->importAudioPort(profile); - if (deviceDistinguishesOnAddress(device)) { + if (device_distinguishes_on_address(device)) { ALOGV("checkOutputsForDevice(): setOutputDevice(dev=0x%x, addr=%s)", device, address.string()); setOutputDevice(output, device, true/*force*/, 0/*delay*/, @@ -3738,7 +3100,7 @@ status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> de desc = mOutputs.valueAt(i); if (!desc->isDuplicated()) { // exact match on device - if (deviceDistinguishesOnAddress(device) && + if (device_distinguishes_on_address(device) && (desc->mProfile->mSupportedDevices.types() == device)) { findIoHandlesByAddress(desc, device, address, outputs); } else if (!(desc->mProfile->mSupportedDevices.types() @@ -3781,9 +3143,9 @@ status_t AudioPolicyManager::checkOutputsForDevice(const sp<DeviceDescriptor> de } status_t AudioPolicyManager::checkInputsForDevice(audio_devices_t device, - audio_policy_dev_state_t state, - SortedVector<audio_io_handle_t>& inputs, - const String8 address) + audio_policy_dev_state_t state, + SortedVector<audio_io_handle_t>& inputs, + const String8 address) { sp<AudioInputDescriptor> desc; if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) { @@ -3810,7 +3172,7 @@ status_t AudioPolicyManager::checkInputsForDevice(audio_devices_t device, sp<IOProfile> profile = mHwModules[module_idx]->mInputProfiles[profile_index]; if (profile->mSupportedDevices.types() & (device & ~AUDIO_DEVICE_BIT_IN)) { - if (!deviceDistinguishesOnAddress(device) || + if (!device_distinguishes_on_address(device) || address == profile->mSupportedDevices[0]->mAddress) { profiles.add(profile); ALOGV("checkInputsForDevice(): adding profile %zu from module %zu", @@ -3982,12 +3344,7 @@ void AudioPolicyManager::closeOutput(audio_io_handle_t output) ALOGW("closeOutput() unknown output %d", output); return; } - - for (size_t i = 0; i < mPolicyMixes.size(); i++) { - if (mPolicyMixes[i]->mOutput == outputDesc) { - mPolicyMixes[i]->mOutput.clear(); - } - } + mPolicyMixes.closeOutput(outputDesc); // look for duplicated outputs connected to the output being removed. for (size_t i = 0; i < mOutputs.size(); i++) { @@ -4013,7 +3370,7 @@ void AudioPolicyManager::closeOutput(audio_io_handle_t output) ALOGV("closeOutput() closing also duplicated output %d", duplicatedOutput); mpClientInterface->closeOutput(duplicatedOutput); - mOutputs.removeItem(duplicatedOutput); + removeOutput(duplicatedOutput); } } @@ -4032,7 +3389,7 @@ void AudioPolicyManager::closeOutput(audio_io_handle_t output) mpClientInterface->setParameters(output, param.toString()); mpClientInterface->closeOutput(output); - mOutputs.removeItem(output); + removeOutput(output); mPreviousOutputs = mOutputs; } @@ -4061,7 +3418,7 @@ void AudioPolicyManager::closeInput(audio_io_handle_t input) } SortedVector<audio_io_handle_t> AudioPolicyManager::getOutputsForDevice(audio_devices_t device, - DefaultKeyedVector<audio_io_handle_t, sp<AudioOutputDescriptor> > openOutputs) + AudioOutputCollection openOutputs) { SortedVector<audio_io_handle_t> outputs; @@ -4078,7 +3435,7 @@ SortedVector<audio_io_handle_t> AudioPolicyManager::getOutputsForDevice(audio_de } bool AudioPolicyManager::vectorsEqual(SortedVector<audio_io_handle_t>& outputs1, - SortedVector<audio_io_handle_t>& outputs2) + SortedVector<audio_io_handle_t>& outputs2) { if (outputs1.size() != outputs2.size()) { return false; @@ -4122,7 +3479,7 @@ void AudioPolicyManager::checkOutputForStrategy(routing_strategy strategy) // mute strategy while moving tracks from one output to another for (size_t i = 0; i < srcOutputs.size(); i++) { sp<AudioOutputDescriptor> desc = mOutputs.valueFor(srcOutputs[i]); - if (desc->isStrategyActive(strategy)) { + if (isStrategyActive(desc, strategy)) { setStrategyMute(strategy, true, srcOutputs[i]); setStrategyMute(strategy, false, srcOutputs[i], MUTE_TIME_MS, newDevice); } @@ -4161,10 +3518,10 @@ void AudioPolicyManager::checkOutputForStrategy(routing_strategy strategy) void AudioPolicyManager::checkOutputForAllStrategies() { - if (mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) + if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) checkOutputForStrategy(STRATEGY_ENFORCED_AUDIBLE); checkOutputForStrategy(STRATEGY_PHONE); - if (mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) + if (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) != AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) checkOutputForStrategy(STRATEGY_ENFORCED_AUDIBLE); checkOutputForStrategy(STRATEGY_SONIFICATION); checkOutputForStrategy(STRATEGY_SONIFICATION_RESPECTFUL); @@ -4174,21 +3531,9 @@ void AudioPolicyManager::checkOutputForAllStrategies() checkOutputForStrategy(STRATEGY_REROUTING); } -audio_io_handle_t AudioPolicyManager::getA2dpOutput() -{ - for (size_t i = 0; i < mOutputs.size(); i++) { - sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(i); - if (!outputDesc->isDuplicated() && outputDesc->device() & AUDIO_DEVICE_OUT_ALL_A2DP) { - return mOutputs.keyAt(i); - } - } - - return 0; -} - void AudioPolicyManager::checkA2dpSuspend() { - audio_io_handle_t a2dpOutput = getA2dpOutput(); + audio_io_handle_t a2dpOutput = mOutputs.getA2dpOutput(); if (a2dpOutput == 0) { mA2dpSuspended = false; return; @@ -4212,20 +3557,20 @@ void AudioPolicyManager::checkA2dpSuspend() // if (mA2dpSuspended) { if ((!isScoConnected || - ((mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION] != AUDIO_POLICY_FORCE_BT_SCO) && - (mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] != AUDIO_POLICY_FORCE_BT_SCO))) && - ((mPhoneState != AUDIO_MODE_IN_CALL) && - (mPhoneState != AUDIO_MODE_RINGTONE))) { + ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) != AUDIO_POLICY_FORCE_BT_SCO) && + (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) != AUDIO_POLICY_FORCE_BT_SCO))) && + ((mEngine->getPhoneState() != AUDIO_MODE_IN_CALL) && + (mEngine->getPhoneState() != AUDIO_MODE_RINGTONE))) { mpClientInterface->restoreOutput(a2dpOutput); mA2dpSuspended = false; } } else { if ((isScoConnected && - ((mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION] == AUDIO_POLICY_FORCE_BT_SCO) || - (mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] == AUDIO_POLICY_FORCE_BT_SCO))) || - ((mPhoneState == AUDIO_MODE_IN_CALL) || - (mPhoneState == AUDIO_MODE_RINGTONE))) { + ((mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION) == AUDIO_POLICY_FORCE_BT_SCO) || + (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_RECORD) == AUDIO_POLICY_FORCE_BT_SCO))) || + ((mEngine->getPhoneState() == AUDIO_MODE_IN_CALL) || + (mEngine->getPhoneState() == AUDIO_MODE_RINGTONE))) { mpClientInterface->suspendOutput(a2dpOutput); mA2dpSuspended = true; @@ -4268,27 +3613,27 @@ audio_devices_t AudioPolicyManager::getNewOutputDevice(audio_io_handle_t output, // use device for strategy DTMF // 9: the strategy for beacon, a.k.a. "transmitted through speaker" is active on the output: // use device for strategy t-t-s - if (outputDesc->isStrategyActive(STRATEGY_ENFORCED_AUDIBLE) && - mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) { + if (isStrategyActive(outputDesc, STRATEGY_ENFORCED_AUDIBLE) && + mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED) { device = getDeviceForStrategy(STRATEGY_ENFORCED_AUDIBLE, fromCache); } else if (isInCall() || - outputDesc->isStrategyActive(STRATEGY_PHONE)) { + isStrategyActive(outputDesc, STRATEGY_PHONE)) { device = getDeviceForStrategy(STRATEGY_PHONE, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_ENFORCED_AUDIBLE)) { + } else if (isStrategyActive(outputDesc, STRATEGY_ENFORCED_AUDIBLE)) { device = getDeviceForStrategy(STRATEGY_ENFORCED_AUDIBLE, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_SONIFICATION)) { + } else if (isStrategyActive(outputDesc, STRATEGY_SONIFICATION)) { device = getDeviceForStrategy(STRATEGY_SONIFICATION, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_SONIFICATION_RESPECTFUL)) { + } else if (isStrategyActive(outputDesc, STRATEGY_SONIFICATION_RESPECTFUL)) { device = getDeviceForStrategy(STRATEGY_SONIFICATION_RESPECTFUL, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_ACCESSIBILITY)) { + } else if (isStrategyActive(outputDesc, STRATEGY_ACCESSIBILITY)) { device = getDeviceForStrategy(STRATEGY_ACCESSIBILITY, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_MEDIA)) { + } else if (isStrategyActive(outputDesc, STRATEGY_MEDIA)) { device = getDeviceForStrategy(STRATEGY_MEDIA, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_DTMF)) { + } else if (isStrategyActive(outputDesc, STRATEGY_DTMF)) { device = getDeviceForStrategy(STRATEGY_DTMF, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_TRANSMITTED_THROUGH_SPEAKER)) { + } else if (isStrategyActive(outputDesc, STRATEGY_TRANSMITTED_THROUGH_SPEAKER)) { device = getDeviceForStrategy(STRATEGY_TRANSMITTED_THROUGH_SPEAKER, fromCache); - } else if (outputDesc->isStrategyActive(STRATEGY_REROUTING)) { + } else if (isStrategyActive(outputDesc, STRATEGY_REROUTING)) { device = getDeviceForStrategy(STRATEGY_REROUTING, fromCache); } @@ -4328,12 +3673,12 @@ audio_devices_t AudioPolicyManager::getDevicesForStream(audio_stream_type_t stre return AUDIO_DEVICE_NONE; } audio_devices_t devices; - AudioPolicyManager::routing_strategy strategy = getStrategy(stream); + routing_strategy strategy = getStrategy(stream); devices = getDeviceForStrategy(strategy, true /*fromCache*/); SortedVector<audio_io_handle_t> outputs = getOutputsForDevice(devices, mOutputs); for (size_t i = 0; i < outputs.size(); i++) { sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(outputs[i]); - if (outputDesc->isStrategyActive(strategy)) { + if (isStrategyActive(outputDesc, strategy)) { devices = outputDesc->device(); break; } @@ -4349,39 +3694,10 @@ audio_devices_t AudioPolicyManager::getDevicesForStream(audio_stream_type_t stre return devices; } -AudioPolicyManager::routing_strategy AudioPolicyManager::getStrategy( - audio_stream_type_t stream) { - +routing_strategy AudioPolicyManager::getStrategy(audio_stream_type_t stream) const +{ ALOG_ASSERT(stream != AUDIO_STREAM_PATCH,"getStrategy() called for AUDIO_STREAM_PATCH"); - - // stream to strategy mapping - switch (stream) { - case AUDIO_STREAM_VOICE_CALL: - case AUDIO_STREAM_BLUETOOTH_SCO: - return STRATEGY_PHONE; - case AUDIO_STREAM_RING: - case AUDIO_STREAM_ALARM: - return STRATEGY_SONIFICATION; - case AUDIO_STREAM_NOTIFICATION: - return STRATEGY_SONIFICATION_RESPECTFUL; - case AUDIO_STREAM_DTMF: - return STRATEGY_DTMF; - default: - ALOGE("unknown stream type %d", stream); - case AUDIO_STREAM_SYSTEM: - // NOTE: SYSTEM stream uses MEDIA strategy because muting music and switching outputs - // while key clicks are played produces a poor result - case AUDIO_STREAM_MUSIC: - return STRATEGY_MEDIA; - case AUDIO_STREAM_ENFORCED_AUDIBLE: - return STRATEGY_ENFORCED_AUDIBLE; - case AUDIO_STREAM_TTS: - return STRATEGY_TRANSMITTED_THROUGH_SPEAKER; - case AUDIO_STREAM_ACCESSIBILITY: - return STRATEGY_ACCESSIBILITY; - case AUDIO_STREAM_REROUTING: - return STRATEGY_REROUTING; - } + return mEngine->getStrategyForStream(stream); } uint32_t AudioPolicyManager::getStrategyForAttr(const audio_attributes_t *attr) { @@ -4392,45 +3708,8 @@ uint32_t AudioPolicyManager::getStrategyForAttr(const audio_attributes_t *attr) if ((attr->flags & AUDIO_FLAG_AUDIBILITY_ENFORCED) == AUDIO_FLAG_AUDIBILITY_ENFORCED) { return (uint32_t) STRATEGY_ENFORCED_AUDIBLE; } - // usage to strategy mapping - switch (attr->usage) { - case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY: - if (isStreamActive(AUDIO_STREAM_RING) || isStreamActive(AUDIO_STREAM_ALARM)) { - return (uint32_t) STRATEGY_SONIFICATION; - } - if (isInCall()) { - return (uint32_t) STRATEGY_PHONE; - } - return (uint32_t) STRATEGY_ACCESSIBILITY; - - case AUDIO_USAGE_MEDIA: - case AUDIO_USAGE_GAME: - case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE: - case AUDIO_USAGE_ASSISTANCE_SONIFICATION: - return (uint32_t) STRATEGY_MEDIA; - - case AUDIO_USAGE_VOICE_COMMUNICATION: - return (uint32_t) STRATEGY_PHONE; - - case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING: - return (uint32_t) STRATEGY_DTMF; - - case AUDIO_USAGE_ALARM: - case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE: - return (uint32_t) STRATEGY_SONIFICATION; - - case AUDIO_USAGE_NOTIFICATION: - case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST: - case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT: - case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED: - case AUDIO_USAGE_NOTIFICATION_EVENT: - return (uint32_t) STRATEGY_SONIFICATION_RESPECTFUL; - - case AUDIO_USAGE_UNKNOWN: - default: - return (uint32_t) STRATEGY_MEDIA; - } + return static_cast<uint32_t>(mEngine->getStrategyForUsage(attr->usage)); } void AudioPolicyManager::handleNotificationRoutingForStream(audio_stream_type_t stream) { @@ -4444,21 +3723,6 @@ void AudioPolicyManager::handleNotificationRoutingForStream(audio_stream_type_t } } -bool AudioPolicyManager::isAnyOutputActive(audio_stream_type_t streamToIgnore) { - for (size_t s = 0 ; s < AUDIO_STREAM_CNT ; s++) { - if (s == (size_t) streamToIgnore) { - continue; - } - for (size_t i = 0; i < mOutputs.size(); i++) { - const sp<AudioOutputDescriptor> outputDesc = mOutputs.valueAt(i); - if (outputDesc->mRefCount[s] != 0) { - return true; - } - } - } - return false; -} - uint32_t AudioPolicyManager::handleEventForBeacon(int event) { switch(event) { case STARTING_OUTPUT: @@ -4513,292 +3777,14 @@ uint32_t AudioPolicyManager::setBeaconMute(bool mute) { } audio_devices_t AudioPolicyManager::getDeviceForStrategy(routing_strategy strategy, - bool fromCache) + bool fromCache) { - uint32_t device = AUDIO_DEVICE_NONE; - if (fromCache) { ALOGVV("getDeviceForStrategy() from cache strategy %d, device %x", strategy, mDeviceForStrategy[strategy]); return mDeviceForStrategy[strategy]; } - audio_devices_t availableOutputDeviceTypes = mAvailableOutputDevices.types(); - switch (strategy) { - - case STRATEGY_TRANSMITTED_THROUGH_SPEAKER: - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER; - if (!device) { - ALOGE("getDeviceForStrategy() no device found for "\ - "STRATEGY_TRANSMITTED_THROUGH_SPEAKER"); - } - break; - - case STRATEGY_SONIFICATION_RESPECTFUL: - if (isInCall()) { - device = getDeviceForStrategy(STRATEGY_SONIFICATION, false /*fromCache*/); - } else if (isStreamActiveRemotely(AUDIO_STREAM_MUSIC, - SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) { - // while media is playing on a remote device, use the the sonification behavior. - // Note that we test this usecase before testing if media is playing because - // the isStreamActive() method only informs about the activity of a stream, not - // if it's for local playback. Note also that we use the same delay between both tests - device = getDeviceForStrategy(STRATEGY_SONIFICATION, false /*fromCache*/); - //user "safe" speaker if available instead of normal speaker to avoid triggering - //other acoustic safety mechanisms for notification - if (device == AUDIO_DEVICE_OUT_SPEAKER && (availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER_SAFE)) - device = AUDIO_DEVICE_OUT_SPEAKER_SAFE; - } else if (isStreamActive(AUDIO_STREAM_MUSIC, SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY)) { - // while media is playing (or has recently played), use the same device - device = getDeviceForStrategy(STRATEGY_MEDIA, false /*fromCache*/); - } else { - // when media is not playing anymore, fall back on the sonification behavior - device = getDeviceForStrategy(STRATEGY_SONIFICATION, false /*fromCache*/); - //user "safe" speaker if available instead of normal speaker to avoid triggering - //other acoustic safety mechanisms for notification - if (device == AUDIO_DEVICE_OUT_SPEAKER && (availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER_SAFE)) - device = AUDIO_DEVICE_OUT_SPEAKER_SAFE; - } - - break; - - case STRATEGY_DTMF: - if (!isInCall()) { - // when off call, DTMF strategy follows the same rules as MEDIA strategy - device = getDeviceForStrategy(STRATEGY_MEDIA, false /*fromCache*/); - break; - } - // when in call, DTMF and PHONE strategies follow the same rules - // FALL THROUGH - - case STRATEGY_PHONE: - // Force use of only devices on primary output if: - // - in call AND - // - cannot route from voice call RX OR - // - audio HAL version is < 3.0 and TX device is on the primary HW module - if (mPhoneState == AUDIO_MODE_IN_CALL) { - audio_devices_t txDevice = - getDeviceAndMixForInputSource(AUDIO_SOURCE_VOICE_COMMUNICATION); - sp<AudioOutputDescriptor> hwOutputDesc = mOutputs.valueFor(mPrimaryOutput); - if (((mAvailableInputDevices.types() & - AUDIO_DEVICE_IN_TELEPHONY_RX & ~AUDIO_DEVICE_BIT_IN) == 0) || - (((txDevice & availablePrimaryInputDevices() & ~AUDIO_DEVICE_BIT_IN) != 0) && - (hwOutputDesc->getAudioPort()->mModule->mHalVersion < - AUDIO_DEVICE_API_VERSION_3_0))) { - availableOutputDeviceTypes = availablePrimaryOutputDevices(); - } - } - // for phone strategy, we first consider the forced use and then the available devices by order - // of priority - switch (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]) { - case AUDIO_POLICY_FORCE_BT_SCO: - if (!isInCall() || strategy != STRATEGY_DTMF) { - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT; - if (device) break; - } - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_SCO; - if (device) break; - // if SCO device is requested but no SCO device is available, fall back to default case - // FALL THROUGH - - default: // FORCE_NONE - // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP - if (!isInCall() && - (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) && - (getA2dpOutput() != 0)) { - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES; - if (device) break; - } - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_WIRED_HEADPHONE; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_WIRED_HEADSET; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_DEVICE; - if (device) break; - if (mPhoneState != AUDIO_MODE_IN_CALL) { - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_ACCESSORY; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_AUX_DIGITAL; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET; - if (device) break; - } - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_EARPIECE; - if (device) break; - device = mDefaultOutputDevice->mDeviceType; - if (device == AUDIO_DEVICE_NONE) { - ALOGE("getDeviceForStrategy() no device found for STRATEGY_PHONE"); - } - break; - - case AUDIO_POLICY_FORCE_SPEAKER: - // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to - // A2DP speaker when forcing to speaker output - if (!isInCall() && - (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) && - (getA2dpOutput() != 0)) { - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER; - if (device) break; - } - if (!isInCall()) { - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_ACCESSORY; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_DEVICE; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_AUX_DIGITAL; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET; - if (device) break; - } - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_LINE; - if (device) break; - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER; - if (device) break; - device = mDefaultOutputDevice->mDeviceType; - if (device == AUDIO_DEVICE_NONE) { - ALOGE("getDeviceForStrategy() no device found for STRATEGY_PHONE, FORCE_SPEAKER"); - } - break; - } - break; - - case STRATEGY_SONIFICATION: - - // If incall, just select the STRATEGY_PHONE device: The rest of the behavior is handled by - // handleIncallSonification(). - if (isInCall()) { - device = getDeviceForStrategy(STRATEGY_PHONE, false /*fromCache*/); - break; - } - // FALL THROUGH - - case STRATEGY_ENFORCED_AUDIBLE: - // strategy STRATEGY_ENFORCED_AUDIBLE uses same routing policy as STRATEGY_SONIFICATION - // except: - // - when in call where it doesn't default to STRATEGY_PHONE behavior - // - in countries where not enforced in which case it follows STRATEGY_MEDIA - - if ((strategy == STRATEGY_SONIFICATION) || - (mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)) { - device = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER; - if (device == AUDIO_DEVICE_NONE) { - ALOGE("getDeviceForStrategy() speaker device not found for STRATEGY_SONIFICATION"); - } - } - // The second device used for sonification is the same as the device used by media strategy - // FALL THROUGH - - // FIXME: STRATEGY_ACCESSIBILITY and STRATEGY_REROUTING follow STRATEGY_MEDIA for now - case STRATEGY_ACCESSIBILITY: - if (strategy == STRATEGY_ACCESSIBILITY) { - // do not route accessibility prompts to a digital output currently configured with a - // compressed format as they would likely not be mixed and dropped. - for (size_t i = 0; i < mOutputs.size(); i++) { - sp<AudioOutputDescriptor> desc = mOutputs.valueAt(i); - audio_devices_t devices = desc->device() & - (AUDIO_DEVICE_OUT_HDMI | AUDIO_DEVICE_OUT_SPDIF | AUDIO_DEVICE_OUT_HDMI_ARC); - if (desc->isActive() && !audio_is_linear_pcm(desc->mFormat) && - devices != AUDIO_DEVICE_NONE) { - availableOutputDeviceTypes = availableOutputDeviceTypes & ~devices; - } - } - } - // FALL THROUGH - - case STRATEGY_REROUTING: - case STRATEGY_MEDIA: { - uint32_t device2 = AUDIO_DEVICE_NONE; - if (strategy != STRATEGY_SONIFICATION) { - // no sonification on remote submix (e.g. WFD) - if (mAvailableOutputDevices.getDevice(AUDIO_DEVICE_OUT_REMOTE_SUBMIX, String8("0")) != 0) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_REMOTE_SUBMIX; - } - } - if ((device2 == AUDIO_DEVICE_NONE) && - (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] != AUDIO_POLICY_FORCE_NO_BT_A2DP) && - (getA2dpOutput() != 0)) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP; - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES; - } - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER; - } - } - if ((device2 == AUDIO_DEVICE_NONE) && - (mForceUse[AUDIO_POLICY_FORCE_FOR_MEDIA] == AUDIO_POLICY_FORCE_SPEAKER)) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER; - } - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_WIRED_HEADPHONE; - } - if ((device2 == AUDIO_DEVICE_NONE)) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_LINE; - } - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_WIRED_HEADSET; - } - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_ACCESSORY; - } - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_USB_DEVICE; - } - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET; - } - if ((device2 == AUDIO_DEVICE_NONE) && (strategy != STRATEGY_SONIFICATION)) { - // no sonification on aux digital (e.g. HDMI) - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_AUX_DIGITAL; - } - if ((device2 == AUDIO_DEVICE_NONE) && - (mForceUse[AUDIO_POLICY_FORCE_FOR_DOCK] == AUDIO_POLICY_FORCE_ANALOG_DOCK)) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET; - } - if (device2 == AUDIO_DEVICE_NONE) { - device2 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPEAKER; - } - int device3 = AUDIO_DEVICE_NONE; - if (strategy == STRATEGY_MEDIA) { - // ARC, SPDIF and AUX_LINE can co-exist with others. - device3 = availableOutputDeviceTypes & AUDIO_DEVICE_OUT_HDMI_ARC; - device3 |= (availableOutputDeviceTypes & AUDIO_DEVICE_OUT_SPDIF); - device3 |= (availableOutputDeviceTypes & AUDIO_DEVICE_OUT_AUX_LINE); - } - - device2 |= device3; - // device is DEVICE_OUT_SPEAKER if we come from case STRATEGY_SONIFICATION or - // STRATEGY_ENFORCED_AUDIBLE, AUDIO_DEVICE_NONE otherwise - device |= device2; - - // If hdmi system audio mode is on, remove speaker out of output list. - if ((strategy == STRATEGY_MEDIA) && - (mForceUse[AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO] == - AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED)) { - device &= ~AUDIO_DEVICE_OUT_SPEAKER; - } - - if (device) break; - device = mDefaultOutputDevice->mDeviceType; - if (device == AUDIO_DEVICE_NONE) { - ALOGE("getDeviceForStrategy() no device found for STRATEGY_MEDIA"); - } - } break; - - default: - ALOGW("getDeviceForStrategy() unknown strategy: %d", strategy); - break; - } - - ALOGVV("getDeviceForStrategy() strategy %d, device %x", strategy, device); - return device; + return mEngine->getDeviceForStrategy(strategy); } void AudioPolicyManager::updateDevicesAndOutputs() @@ -4849,7 +3835,7 @@ uint32_t AudioPolicyManager::checkDeviceMuteStrategies(sp<AudioOutputDescriptor> ALOGVV("checkDeviceMuteStrategies() %s strategy %d (curDevice %04x) on output %d", mute ? "muting" : "unmuting", i, curDevice, curOutput); setStrategyMute((routing_strategy)i, mute, curOutput, mute ? 0 : delayMs); - if (desc->isStrategyActive((routing_strategy)i)) { + if (isStrategyActive(desc, (routing_strategy)i)) { if (mute) { // FIXME: should not need to double latency if volume could be applied // immediately by the audioflinger mixer. We must account for the delay @@ -4872,7 +3858,7 @@ uint32_t AudioPolicyManager::checkDeviceMuteStrategies(sp<AudioOutputDescriptor> muteWaitMs = outputDesc->latency() * 2; } for (size_t i = 0; i < NUM_STRATEGIES; i++) { - if (outputDesc->isStrategyActive((routing_strategy)i)) { + if (isStrategyActive(outputDesc, (routing_strategy)i)) { setStrategyMute((routing_strategy)i, true, outputDesc->mIoHandle); // do tempMute unmute after twice the mute wait time setStrategyMute((routing_strategy)i, false, outputDesc->mIoHandle, @@ -4978,8 +3964,7 @@ uint32_t AudioPolicyManager::setOutputDevice(audio_io_handle_t output, status, afPatchHandle, patch.num_sources, patch.num_sinks); if (status == NO_ERROR) { if (index < 0) { - patchDesc = new AudioPatch((audio_patch_handle_t)nextUniqueId(), - &patch, mUidCached); + patchDesc = new AudioPatch(&patch, mUidCached); addAudioPatch(patchDesc->mHandle, patchDesc); } else { patchDesc->mPatch = patch; @@ -4998,7 +3983,7 @@ uint32_t AudioPolicyManager::setOutputDevice(audio_io_handle_t output, // inform all input as well for (size_t i = 0; i < mInputs.size(); i++) { const sp<AudioInputDescriptor> inputDescriptor = mInputs.valueAt(i); - if (!isVirtualInputDevice(inputDescriptor->mDevice)) { + if (!is_virtual_input_device(inputDescriptor->mDevice)) { AudioParameter inputCmd = AudioParameter(); ALOGV("%s: inform input %d of device:%d", __func__, inputDescriptor->mIoHandle, device); @@ -5085,8 +4070,7 @@ status_t AudioPolicyManager::setInputDevice(audio_io_handle_t input, status, afPatchHandle); if (status == NO_ERROR) { if (index < 0) { - patchDesc = new AudioPatch((audio_patch_handle_t)nextUniqueId(), - &patch, mUidCached); + patchDesc = new AudioPatch(&patch, mUidCached); addAudioPatch(patchDesc->mHandle, patchDesc); } else { patchDesc->mPatch = patch; @@ -5128,12 +4112,12 @@ status_t AudioPolicyManager::resetInputDevice(audio_io_handle_t input, return status; } -sp<AudioPolicyManager::IOProfile> AudioPolicyManager::getInputProfile(audio_devices_t device, - String8 address, - uint32_t& samplingRate, - audio_format_t format, - audio_channel_mask_t channelMask, - audio_input_flags_t flags) +sp<IOProfile> AudioPolicyManager::getInputProfile(audio_devices_t device, + String8 address, + uint32_t& samplingRate, + audio_format_t format, + audio_channel_mask_t channelMask, + audio_input_flags_t flags) { // Choose an input profile based on the requested capture parameters: select the first available // profile supporting all requested parameters. @@ -5160,500 +4144,35 @@ sp<AudioPolicyManager::IOProfile> AudioPolicyManager::getInputProfile(audio_devi audio_devices_t AudioPolicyManager::getDeviceAndMixForInputSource(audio_source_t inputSource, - AudioMix **policyMix) + AudioMix **policyMix) { - audio_devices_t availableDeviceTypes = mAvailableInputDevices.types() & - ~AUDIO_DEVICE_BIT_IN; + audio_devices_t availableDeviceTypes = mAvailableInputDevices.types() & ~AUDIO_DEVICE_BIT_IN; + audio_devices_t selectedDeviceFromMix = + mPolicyMixes.getDeviceAndMixForInputSource(inputSource, availableDeviceTypes, policyMix); - for (size_t i = 0; i < mPolicyMixes.size(); i++) { - if (mPolicyMixes[i]->mMix.mMixType != MIX_TYPE_RECORDERS) { - continue; - } - for (size_t j = 0; j < mPolicyMixes[i]->mMix.mCriteria.size(); j++) { - if ((RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET == mPolicyMixes[i]->mMix.mCriteria[j].mRule && - mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mSource == inputSource) || - (RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET == mPolicyMixes[i]->mMix.mCriteria[j].mRule && - mPolicyMixes[i]->mMix.mCriteria[j].mAttr.mSource != inputSource)) { - if (availableDeviceTypes & AUDIO_DEVICE_IN_REMOTE_SUBMIX) { - if (policyMix != NULL) { - *policyMix = &mPolicyMixes[i]->mMix; - } - return AUDIO_DEVICE_IN_REMOTE_SUBMIX; - } - break; - } - } + if (selectedDeviceFromMix != AUDIO_DEVICE_NONE) { + return selectedDeviceFromMix; } - return getDeviceForInputSource(inputSource); } audio_devices_t AudioPolicyManager::getDeviceForInputSource(audio_source_t inputSource) { - uint32_t device = AUDIO_DEVICE_NONE; - audio_devices_t availableDeviceTypes = mAvailableInputDevices.types() & - ~AUDIO_DEVICE_BIT_IN; - - switch (inputSource) { - case AUDIO_SOURCE_VOICE_UPLINK: - if (availableDeviceTypes & AUDIO_DEVICE_IN_VOICE_CALL) { - device = AUDIO_DEVICE_IN_VOICE_CALL; - break; - } - break; - - case AUDIO_SOURCE_DEFAULT: - case AUDIO_SOURCE_MIC: - if (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_A2DP) { - device = AUDIO_DEVICE_IN_BLUETOOTH_A2DP; - } else if ((mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] == AUDIO_POLICY_FORCE_BT_SCO) && - (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) { - device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) { - device = AUDIO_DEVICE_IN_WIRED_HEADSET; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) { - device = AUDIO_DEVICE_IN_USB_DEVICE; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { - device = AUDIO_DEVICE_IN_BUILTIN_MIC; - } - break; - - case AUDIO_SOURCE_VOICE_COMMUNICATION: - // Allow only use of devices on primary input if in call and HAL does not support routing - // to voice call path. - if ((mPhoneState == AUDIO_MODE_IN_CALL) && - (mAvailableOutputDevices.types() & AUDIO_DEVICE_OUT_TELEPHONY_TX) == 0) { - availableDeviceTypes = availablePrimaryInputDevices() & ~AUDIO_DEVICE_BIT_IN; - } - - switch (mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]) { - case AUDIO_POLICY_FORCE_BT_SCO: - // if SCO device is requested but no SCO device is available, fall back to default case - if (availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) { - device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET; - break; - } - // FALL THROUGH - - default: // FORCE_NONE - if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) { - device = AUDIO_DEVICE_IN_WIRED_HEADSET; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) { - device = AUDIO_DEVICE_IN_USB_DEVICE; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { - device = AUDIO_DEVICE_IN_BUILTIN_MIC; - } - break; - - case AUDIO_POLICY_FORCE_SPEAKER: - if (availableDeviceTypes & AUDIO_DEVICE_IN_BACK_MIC) { - device = AUDIO_DEVICE_IN_BACK_MIC; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { - device = AUDIO_DEVICE_IN_BUILTIN_MIC; - } - break; - } - break; - - case AUDIO_SOURCE_VOICE_RECOGNITION: - case AUDIO_SOURCE_HOTWORD: - if (mForceUse[AUDIO_POLICY_FORCE_FOR_RECORD] == AUDIO_POLICY_FORCE_BT_SCO && - availableDeviceTypes & AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET) { - device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) { - device = AUDIO_DEVICE_IN_WIRED_HEADSET; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_USB_DEVICE) { - device = AUDIO_DEVICE_IN_USB_DEVICE; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { - device = AUDIO_DEVICE_IN_BUILTIN_MIC; - } - break; - case AUDIO_SOURCE_CAMCORDER: - if (availableDeviceTypes & AUDIO_DEVICE_IN_BACK_MIC) { - device = AUDIO_DEVICE_IN_BACK_MIC; - } else if (availableDeviceTypes & AUDIO_DEVICE_IN_BUILTIN_MIC) { - device = AUDIO_DEVICE_IN_BUILTIN_MIC; - } - break; - case AUDIO_SOURCE_VOICE_DOWNLINK: - case AUDIO_SOURCE_VOICE_CALL: - if (availableDeviceTypes & AUDIO_DEVICE_IN_VOICE_CALL) { - device = AUDIO_DEVICE_IN_VOICE_CALL; - } - break; - case AUDIO_SOURCE_REMOTE_SUBMIX: - if (availableDeviceTypes & AUDIO_DEVICE_IN_REMOTE_SUBMIX) { - device = AUDIO_DEVICE_IN_REMOTE_SUBMIX; - } - break; - case AUDIO_SOURCE_FM_TUNER: - if (availableDeviceTypes & AUDIO_DEVICE_IN_FM_TUNER) { - device = AUDIO_DEVICE_IN_FM_TUNER; - } - break; - default: - ALOGW("getDeviceForInputSource() invalid input source %d", inputSource); - break; - } - ALOGV("getDeviceForInputSource()input source %d, device %08x", inputSource, device); - return device; -} - -bool AudioPolicyManager::isVirtualInputDevice(audio_devices_t device) -{ - if ((device & AUDIO_DEVICE_BIT_IN) != 0) { - device &= ~AUDIO_DEVICE_BIT_IN; - if ((popcount(device) == 1) && ((device & ~APM_AUDIO_IN_DEVICE_VIRTUAL_ALL) == 0)) - return true; - } - return false; -} - -bool AudioPolicyManager::deviceDistinguishesOnAddress(audio_devices_t device) { - return ((device & APM_AUDIO_DEVICE_MATCH_ADDRESS_ALL & ~AUDIO_DEVICE_BIT_IN) != 0); -} - -audio_io_handle_t AudioPolicyManager::getActiveInput(bool ignoreVirtualInputs) -{ - for (size_t i = 0; i < mInputs.size(); i++) { - const sp<AudioInputDescriptor> input_descriptor = mInputs.valueAt(i); - if ((input_descriptor->mRefCount > 0) - && (!ignoreVirtualInputs || !isVirtualInputDevice(input_descriptor->mDevice))) { - return mInputs.keyAt(i); - } - } - return 0; -} - -uint32_t AudioPolicyManager::activeInputsCount() const -{ - uint32_t count = 0; - for (size_t i = 0; i < mInputs.size(); i++) { - const sp<AudioInputDescriptor> desc = mInputs.valueAt(i); - if (desc->mRefCount > 0) { - count++; - } - } - return count; -} - - -audio_devices_t AudioPolicyManager::getDeviceForVolume(audio_devices_t device) -{ - if (device == AUDIO_DEVICE_NONE) { - // this happens when forcing a route update and no track is active on an output. - // In this case the returned category is not important. - device = AUDIO_DEVICE_OUT_SPEAKER; - } else if (popcount(device) > 1) { - // Multiple device selection is either: - // - speaker + one other device: give priority to speaker in this case. - // - one A2DP device + another device: happens with duplicated output. In this case - // retain the device on the A2DP output as the other must not correspond to an active - // selection if not the speaker. - // - HDMI-CEC system audio mode only output: give priority to available item in order. - if (device & AUDIO_DEVICE_OUT_SPEAKER) { - device = AUDIO_DEVICE_OUT_SPEAKER; - } else if (device & AUDIO_DEVICE_OUT_HDMI_ARC) { - device = AUDIO_DEVICE_OUT_HDMI_ARC; - } else if (device & AUDIO_DEVICE_OUT_AUX_LINE) { - device = AUDIO_DEVICE_OUT_AUX_LINE; - } else if (device & AUDIO_DEVICE_OUT_SPDIF) { - device = AUDIO_DEVICE_OUT_SPDIF; - } else { - device = (audio_devices_t)(device & AUDIO_DEVICE_OUT_ALL_A2DP); - } - } - - /*SPEAKER_SAFE is an alias of SPEAKER for purposes of volume control*/ - if (device == AUDIO_DEVICE_OUT_SPEAKER_SAFE) - device = AUDIO_DEVICE_OUT_SPEAKER; - - ALOGW_IF(popcount(device) != 1, - "getDeviceForVolume() invalid device combination: %08x", - device); - - return device; -} - -AudioPolicyManager::device_category AudioPolicyManager::getDeviceCategory(audio_devices_t device) -{ - switch(getDeviceForVolume(device)) { - case AUDIO_DEVICE_OUT_EARPIECE: - return DEVICE_CATEGORY_EARPIECE; - case AUDIO_DEVICE_OUT_WIRED_HEADSET: - case AUDIO_DEVICE_OUT_WIRED_HEADPHONE: - case AUDIO_DEVICE_OUT_BLUETOOTH_SCO: - case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET: - case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP: - case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES: - return DEVICE_CATEGORY_HEADSET; - case AUDIO_DEVICE_OUT_LINE: - case AUDIO_DEVICE_OUT_AUX_DIGITAL: - /*USB? Remote submix?*/ - return DEVICE_CATEGORY_EXT_MEDIA; - case AUDIO_DEVICE_OUT_SPEAKER: - case AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT: - case AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER: - case AUDIO_DEVICE_OUT_USB_ACCESSORY: - case AUDIO_DEVICE_OUT_USB_DEVICE: - case AUDIO_DEVICE_OUT_REMOTE_SUBMIX: - default: - return DEVICE_CATEGORY_SPEAKER; - } -} - -/* static */ -float AudioPolicyManager::volIndexToAmpl(audio_devices_t device, const StreamDescriptor& streamDesc, - int indexInUi) -{ - device_category deviceCategory = getDeviceCategory(device); - const VolumeCurvePoint *curve = streamDesc.mVolumeCurve[deviceCategory]; - - // the volume index in the UI is relative to the min and max volume indices for this stream type - int nbSteps = 1 + curve[VOLMAX].mIndex - - curve[VOLMIN].mIndex; - int volIdx = (nbSteps * (indexInUi - streamDesc.mIndexMin)) / - (streamDesc.mIndexMax - streamDesc.mIndexMin); - - // find what part of the curve this index volume belongs to, or if it's out of bounds - int segment = 0; - if (volIdx < curve[VOLMIN].mIndex) { // out of bounds - return 0.0f; - } else if (volIdx < curve[VOLKNEE1].mIndex) { - segment = 0; - } else if (volIdx < curve[VOLKNEE2].mIndex) { - segment = 1; - } else if (volIdx <= curve[VOLMAX].mIndex) { - segment = 2; - } else { // out of bounds - return 1.0f; - } - - // linear interpolation in the attenuation table in dB - float decibels = curve[segment].mDBAttenuation + - ((float)(volIdx - curve[segment].mIndex)) * - ( (curve[segment+1].mDBAttenuation - - curve[segment].mDBAttenuation) / - ((float)(curve[segment+1].mIndex - - curve[segment].mIndex)) ); - - float amplification = exp( decibels * 0.115129f); // exp( dB * ln(10) / 20 ) - - ALOGVV("VOLUME vol index=[%d %d %d], dB=[%.1f %.1f %.1f] ampl=%.5f", - curve[segment].mIndex, volIdx, - curve[segment+1].mIndex, - curve[segment].mDBAttenuation, - decibels, - curve[segment+1].mDBAttenuation, - amplification); - - return amplification; -} - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sDefaultVolumeCurve[AudioPolicyManager::VOLCNT] = { - {1, -49.5f}, {33, -33.5f}, {66, -17.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sDefaultMediaVolumeCurve[AudioPolicyManager::VOLCNT] = { - {1, -58.0f}, {20, -40.0f}, {60, -17.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sExtMediaSystemVolumeCurve[AudioPolicyManager::VOLCNT] = { - {1, -58.0f}, {20, -40.0f}, {60, -21.0f}, {100, -10.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sSpeakerMediaVolumeCurve[AudioPolicyManager::VOLCNT] = { - {1, -56.0f}, {20, -34.0f}, {60, -11.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sSpeakerMediaVolumeCurveDrc[AudioPolicyManager::VOLCNT] = { - {1, -55.0f}, {20, -43.0f}, {86, -12.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sSpeakerSonificationVolumeCurve[AudioPolicyManager::VOLCNT] = { - {1, -29.7f}, {33, -20.1f}, {66, -10.2f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sSpeakerSonificationVolumeCurveDrc[AudioPolicyManager::VOLCNT] = { - {1, -35.7f}, {33, -26.1f}, {66, -13.2f}, {100, 0.0f} -}; - -// AUDIO_STREAM_SYSTEM, AUDIO_STREAM_ENFORCED_AUDIBLE and AUDIO_STREAM_DTMF volume tracks -// AUDIO_STREAM_RING on phones and AUDIO_STREAM_MUSIC on tablets. -// AUDIO_STREAM_DTMF tracks AUDIO_STREAM_VOICE_CALL while in call (See AudioService.java). -// The range is constrained between -24dB and -6dB over speaker and -30dB and -18dB over headset. - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sDefaultSystemVolumeCurve[AudioPolicyManager::VOLCNT] = { - {1, -24.0f}, {33, -18.0f}, {66, -12.0f}, {100, -6.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sDefaultSystemVolumeCurveDrc[AudioPolicyManager::VOLCNT] = { - {1, -34.0f}, {33, -24.0f}, {66, -15.0f}, {100, -6.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sHeadsetSystemVolumeCurve[AudioPolicyManager::VOLCNT] = { - {1, -30.0f}, {33, -26.0f}, {66, -22.0f}, {100, -18.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sDefaultVoiceVolumeCurve[AudioPolicyManager::VOLCNT] = { - {0, -42.0f}, {33, -28.0f}, {66, -14.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sSpeakerVoiceVolumeCurve[AudioPolicyManager::VOLCNT] = { - {0, -24.0f}, {33, -16.0f}, {66, -8.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sLinearVolumeCurve[AudioPolicyManager::VOLCNT] = { - {0, -96.0f}, {33, -68.0f}, {66, -34.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sSilentVolumeCurve[AudioPolicyManager::VOLCNT] = { - {0, -96.0f}, {1, -96.0f}, {2, -96.0f}, {100, -96.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - AudioPolicyManager::sFullScaleVolumeCurve[AudioPolicyManager::VOLCNT] = { - {0, 0.0f}, {1, 0.0f}, {2, 0.0f}, {100, 0.0f} -}; - -const AudioPolicyManager::VolumeCurvePoint - *AudioPolicyManager::sVolumeProfiles[AUDIO_STREAM_CNT] - [AudioPolicyManager::DEVICE_CATEGORY_CNT] = { - { // AUDIO_STREAM_VOICE_CALL - sDefaultVoiceVolumeCurve, // DEVICE_CATEGORY_HEADSET - sSpeakerVoiceVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultVoiceVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_SYSTEM - sHeadsetSystemVolumeCurve, // DEVICE_CATEGORY_HEADSET - sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_RING - sDefaultVolumeCurve, // DEVICE_CATEGORY_HEADSET - sSpeakerSonificationVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_MUSIC - sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_HEADSET - sSpeakerMediaVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_ALARM - sDefaultVolumeCurve, // DEVICE_CATEGORY_HEADSET - sSpeakerSonificationVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_NOTIFICATION - sDefaultVolumeCurve, // DEVICE_CATEGORY_HEADSET - sSpeakerSonificationVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_BLUETOOTH_SCO - sDefaultVoiceVolumeCurve, // DEVICE_CATEGORY_HEADSET - sSpeakerVoiceVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultVoiceVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_ENFORCED_AUDIBLE - sHeadsetSystemVolumeCurve, // DEVICE_CATEGORY_HEADSET - sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_DTMF - sHeadsetSystemVolumeCurve, // DEVICE_CATEGORY_HEADSET - sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultSystemVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sExtMediaSystemVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_TTS - // "Transmitted Through Speaker": always silent except on DEVICE_CATEGORY_SPEAKER - sSilentVolumeCurve, // DEVICE_CATEGORY_HEADSET - sLinearVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sSilentVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sSilentVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_ACCESSIBILITY - sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_HEADSET - sSpeakerMediaVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sDefaultMediaVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_REROUTING - sFullScaleVolumeCurve, // DEVICE_CATEGORY_HEADSET - sFullScaleVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sFullScaleVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sFullScaleVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, - { // AUDIO_STREAM_PATCH - sFullScaleVolumeCurve, // DEVICE_CATEGORY_HEADSET - sFullScaleVolumeCurve, // DEVICE_CATEGORY_SPEAKER - sFullScaleVolumeCurve, // DEVICE_CATEGORY_EARPIECE - sFullScaleVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA - }, -}; - -void AudioPolicyManager::initializeVolumeCurves() -{ - for (int i = 0; i < AUDIO_STREAM_CNT; i++) { - for (int j = 0; j < DEVICE_CATEGORY_CNT; j++) { - mStreams[i].mVolumeCurve[j] = - sVolumeProfiles[i][j]; - } - } - - // Check availability of DRC on speaker path: if available, override some of the speaker curves - if (mSpeakerDrcEnabled) { - mStreams[AUDIO_STREAM_SYSTEM].mVolumeCurve[DEVICE_CATEGORY_SPEAKER] = - sDefaultSystemVolumeCurveDrc; - mStreams[AUDIO_STREAM_RING].mVolumeCurve[DEVICE_CATEGORY_SPEAKER] = - sSpeakerSonificationVolumeCurveDrc; - mStreams[AUDIO_STREAM_ALARM].mVolumeCurve[DEVICE_CATEGORY_SPEAKER] = - sSpeakerSonificationVolumeCurveDrc; - mStreams[AUDIO_STREAM_NOTIFICATION].mVolumeCurve[DEVICE_CATEGORY_SPEAKER] = - sSpeakerSonificationVolumeCurveDrc; - mStreams[AUDIO_STREAM_MUSIC].mVolumeCurve[DEVICE_CATEGORY_SPEAKER] = - sSpeakerMediaVolumeCurveDrc; - mStreams[AUDIO_STREAM_ACCESSIBILITY].mVolumeCurve[DEVICE_CATEGORY_SPEAKER] = - sSpeakerMediaVolumeCurveDrc; - } + return mEngine->getDeviceForInputSource(inputSource); } float AudioPolicyManager::computeVolume(audio_stream_type_t stream, - int index, - audio_io_handle_t output, - audio_devices_t device) + int index, + audio_io_handle_t output, + audio_devices_t device) { float volume = 1.0; sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output); - StreamDescriptor &streamDesc = mStreams[stream]; if (device == AUDIO_DEVICE_NONE) { device = outputDesc->device(); } - - volume = volIndexToAmpl(device, streamDesc, index); + volume = mEngine->volIndexToAmpl(Volume::getDeviceCategory(device), stream, index); // if a headset is connected, apply the following rules to ring tones and notifications // to avoid sound level bursts in user's ears: @@ -5669,8 +4188,8 @@ float AudioPolicyManager::computeVolume(audio_stream_type_t stream, || (stream_strategy == STRATEGY_SONIFICATION_RESPECTFUL) || (stream == AUDIO_STREAM_SYSTEM) || ((stream_strategy == STRATEGY_ENFORCED_AUDIBLE) && - (mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_NONE))) && - streamDesc.mCanBeMuted) { + (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_NONE))) && + mStreams.canBeMuted(stream)) { volume *= SONIFICATION_HEADSET_VOLUME_FACTOR; // when the phone is ringing we must consider that music could have been paused just before // by the music application and behave as if music was active if the last music track was @@ -5695,11 +4214,11 @@ float AudioPolicyManager::computeVolume(audio_stream_type_t stream, } status_t AudioPolicyManager::checkAndSetVolume(audio_stream_type_t stream, - int index, - audio_io_handle_t output, - audio_devices_t device, - int delayMs, - bool force) + int index, + audio_io_handle_t output, + audio_devices_t device, + int delayMs, + bool force) { // do not change actual stream volume if the stream is muted @@ -5708,14 +4227,13 @@ status_t AudioPolicyManager::checkAndSetVolume(audio_stream_type_t stream, stream, mOutputs.valueFor(output)->mMuteCount[stream]); return NO_ERROR; } - + audio_policy_forced_cfg_t forceUseForComm = + mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_COMMUNICATION); // do not change in call volume if bluetooth is connected and vice versa - if ((stream == AUDIO_STREAM_VOICE_CALL && - mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION] == AUDIO_POLICY_FORCE_BT_SCO) || - (stream == AUDIO_STREAM_BLUETOOTH_SCO && - mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION] != AUDIO_POLICY_FORCE_BT_SCO)) { + if ((stream == AUDIO_STREAM_VOICE_CALL && forceUseForComm == AUDIO_POLICY_FORCE_BT_SCO) || + (stream == AUDIO_STREAM_BLUETOOTH_SCO && forceUseForComm != AUDIO_POLICY_FORCE_BT_SCO)) { ALOGV("checkAndSetVolume() cannot set stream %d volume with force use = %d for comm", - stream, mForceUse[AUDIO_POLICY_FORCE_FOR_COMMUNICATION]); + stream, forceUseForComm); return INVALID_OPERATION; } @@ -5752,7 +4270,7 @@ status_t AudioPolicyManager::checkAndSetVolume(audio_stream_type_t stream, float voiceVolume; // Force voice volume to max for bluetooth SCO as volume is managed by the headset if (stream == AUDIO_STREAM_VOICE_CALL) { - voiceVolume = (float)index/(float)mStreams[stream].mIndexMax; + voiceVolume = (float)index/(float)mStreams[stream].getVolumeIndexMax(); } else { voiceVolume = 1.0; } @@ -5767,9 +4285,9 @@ status_t AudioPolicyManager::checkAndSetVolume(audio_stream_type_t stream, } void AudioPolicyManager::applyStreamVolumes(audio_io_handle_t output, - audio_devices_t device, - int delayMs, - bool force) + audio_devices_t device, + int delayMs, + bool force) { ALOGVV("applyStreamVolumes() for output %d and device %x", output, device); @@ -5787,10 +4305,10 @@ void AudioPolicyManager::applyStreamVolumes(audio_io_handle_t output, } void AudioPolicyManager::setStrategyMute(routing_strategy strategy, - bool on, - audio_io_handle_t output, - int delayMs, - audio_devices_t device) + bool on, + audio_io_handle_t output, + int delayMs, + audio_devices_t device) { ALOGVV("setStrategyMute() strategy %d, mute %d, output %d", strategy, on, output); for (int stream = 0; stream < AUDIO_STREAM_CNT; stream++) { @@ -5804,12 +4322,12 @@ void AudioPolicyManager::setStrategyMute(routing_strategy strategy, } void AudioPolicyManager::setStreamMute(audio_stream_type_t stream, - bool on, - audio_io_handle_t output, - int delayMs, - audio_devices_t device) + bool on, + audio_io_handle_t output, + int delayMs, + audio_devices_t device) { - StreamDescriptor &streamDesc = mStreams[stream]; + const StreamDescriptor &streamDesc = mStreams[stream]; sp<AudioOutputDescriptor> outputDesc = mOutputs.valueFor(output); if (device == AUDIO_DEVICE_NONE) { device = outputDesc->device(); @@ -5820,9 +4338,9 @@ void AudioPolicyManager::setStreamMute(audio_stream_type_t stream, if (on) { if (outputDesc->mMuteCount[stream] == 0) { - if (streamDesc.mCanBeMuted && + if (streamDesc.canBeMuted() && ((stream != AUDIO_STREAM_ENFORCED_AUDIBLE) || - (mForceUse[AUDIO_POLICY_FORCE_FOR_SYSTEM] == AUDIO_POLICY_FORCE_NONE))) { + (mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_NONE))) { checkAndSetVolume(stream, 0, output, device, delayMs); } } @@ -5888,2103 +4406,12 @@ void AudioPolicyManager::handleIncallSonification(audio_stream_type_t stream, } } -bool AudioPolicyManager::isInCall() -{ - return isStateInCall(mPhoneState); -} - -bool AudioPolicyManager::isStateInCall(int state) { - return ((state == AUDIO_MODE_IN_CALL) || - (state == AUDIO_MODE_IN_COMMUNICATION)); -} - -uint32_t AudioPolicyManager::getMaxEffectsCpuLoad() -{ - return MAX_EFFECTS_CPU_LOAD; -} - -uint32_t AudioPolicyManager::getMaxEffectsMemory() -{ - return MAX_EFFECTS_MEMORY; -} - - -// --- AudioOutputDescriptor class implementation - -AudioPolicyManager::AudioOutputDescriptor::AudioOutputDescriptor( - const sp<IOProfile>& profile) - : mId(0), mIoHandle(0), mLatency(0), - mFlags((audio_output_flags_t)0), mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL), - mPatchHandle(0), - mOutput1(0), mOutput2(0), mProfile(profile), mDirectOpenCount(0) -{ - // clear usage count for all stream types - for (int i = 0; i < AUDIO_STREAM_CNT; i++) { - mRefCount[i] = 0; - mCurVolume[i] = -1.0; - mMuteCount[i] = 0; - mStopTime[i] = 0; - } - for (int i = 0; i < NUM_STRATEGIES; i++) { - mStrategyMutedByDevice[i] = false; - } - if (profile != NULL) { - mFlags = (audio_output_flags_t)profile->mFlags; - mSamplingRate = profile->pickSamplingRate(); - mFormat = profile->pickFormat(); - mChannelMask = profile->pickChannelMask(); - if (profile->mGains.size() > 0) { - profile->mGains[0]->getDefaultConfig(&mGain); - } - } -} - -audio_devices_t AudioPolicyManager::AudioOutputDescriptor::device() const -{ - if (isDuplicated()) { - return (audio_devices_t)(mOutput1->mDevice | mOutput2->mDevice); - } else { - return mDevice; - } -} - -uint32_t AudioPolicyManager::AudioOutputDescriptor::latency() -{ - if (isDuplicated()) { - return (mOutput1->mLatency > mOutput2->mLatency) ? mOutput1->mLatency : mOutput2->mLatency; - } else { - return mLatency; - } -} - -bool AudioPolicyManager::AudioOutputDescriptor::sharesHwModuleWith( - const sp<AudioOutputDescriptor> outputDesc) -{ - if (isDuplicated()) { - return mOutput1->sharesHwModuleWith(outputDesc) || mOutput2->sharesHwModuleWith(outputDesc); - } else if (outputDesc->isDuplicated()){ - return sharesHwModuleWith(outputDesc->mOutput1) || sharesHwModuleWith(outputDesc->mOutput2); - } else { - return (mProfile->mModule == outputDesc->mProfile->mModule); - } -} - -void AudioPolicyManager::AudioOutputDescriptor::changeRefCount(audio_stream_type_t stream, - int delta) -{ - // forward usage count change to attached outputs - if (isDuplicated()) { - mOutput1->changeRefCount(stream, delta); - mOutput2->changeRefCount(stream, delta); - } - if ((delta + (int)mRefCount[stream]) < 0) { - ALOGW("changeRefCount() invalid delta %d for stream %d, refCount %d", - delta, stream, mRefCount[stream]); - mRefCount[stream] = 0; - return; - } - mRefCount[stream] += delta; - ALOGV("changeRefCount() stream %d, count %d", stream, mRefCount[stream]); -} - -audio_devices_t AudioPolicyManager::AudioOutputDescriptor::supportedDevices() -{ - if (isDuplicated()) { - return (audio_devices_t)(mOutput1->supportedDevices() | mOutput2->supportedDevices()); - } else { - return mProfile->mSupportedDevices.types() ; - } -} - -bool AudioPolicyManager::AudioOutputDescriptor::isActive(uint32_t inPastMs) const -{ - return isStrategyActive(NUM_STRATEGIES, inPastMs); -} - -bool AudioPolicyManager::AudioOutputDescriptor::isStrategyActive(routing_strategy strategy, - uint32_t inPastMs, - nsecs_t sysTime) const -{ - if ((sysTime == 0) && (inPastMs != 0)) { - sysTime = systemTime(); - } - for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) { - if (i == AUDIO_STREAM_PATCH) { - continue; - } - if (((getStrategy((audio_stream_type_t)i) == strategy) || - (NUM_STRATEGIES == strategy)) && - isStreamActive((audio_stream_type_t)i, inPastMs, sysTime)) { - return true; - } - } - return false; -} - -bool AudioPolicyManager::AudioOutputDescriptor::isStreamActive(audio_stream_type_t stream, - uint32_t inPastMs, - nsecs_t sysTime) const -{ - if (mRefCount[stream] != 0) { - return true; - } - if (inPastMs == 0) { - return false; - } - if (sysTime == 0) { - sysTime = systemTime(); - } - if (ns2ms(sysTime - mStopTime[stream]) < inPastMs) { - return true; - } - return false; -} - -void AudioPolicyManager::AudioOutputDescriptor::toAudioPortConfig( - struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig) const -{ - ALOG_ASSERT(!isDuplicated(), "toAudioPortConfig() called on duplicated output %d", mIoHandle); - - dstConfig->config_mask = AUDIO_PORT_CONFIG_SAMPLE_RATE|AUDIO_PORT_CONFIG_CHANNEL_MASK| - AUDIO_PORT_CONFIG_FORMAT|AUDIO_PORT_CONFIG_GAIN; - if (srcConfig != NULL) { - dstConfig->config_mask |= srcConfig->config_mask; - } - AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig); - - dstConfig->id = mId; - dstConfig->role = AUDIO_PORT_ROLE_SOURCE; - dstConfig->type = AUDIO_PORT_TYPE_MIX; - dstConfig->ext.mix.hw_module = mProfile->mModule->mHandle; - dstConfig->ext.mix.handle = mIoHandle; - dstConfig->ext.mix.usecase.stream = AUDIO_STREAM_DEFAULT; -} - -void AudioPolicyManager::AudioOutputDescriptor::toAudioPort( - struct audio_port *port) const -{ - ALOG_ASSERT(!isDuplicated(), "toAudioPort() called on duplicated output %d", mIoHandle); - mProfile->toAudioPort(port); - port->id = mId; - toAudioPortConfig(&port->active_config); - port->ext.mix.hw_module = mProfile->mModule->mHandle; - port->ext.mix.handle = mIoHandle; - port->ext.mix.latency_class = - mFlags & AUDIO_OUTPUT_FLAG_FAST ? AUDIO_LATENCY_LOW : AUDIO_LATENCY_NORMAL; -} - -status_t AudioPolicyManager::AudioOutputDescriptor::dump(int fd) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, " ID: %d\n", mId); - result.append(buffer); - snprintf(buffer, SIZE, " Sampling rate: %d\n", mSamplingRate); - result.append(buffer); - snprintf(buffer, SIZE, " Format: %08x\n", mFormat); - result.append(buffer); - snprintf(buffer, SIZE, " Channels: %08x\n", mChannelMask); - result.append(buffer); - snprintf(buffer, SIZE, " Latency: %d\n", mLatency); - result.append(buffer); - snprintf(buffer, SIZE, " Flags %08x\n", mFlags); - result.append(buffer); - snprintf(buffer, SIZE, " Devices %08x\n", device()); - result.append(buffer); - snprintf(buffer, SIZE, " Stream volume refCount muteCount\n"); - result.append(buffer); - for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) { - snprintf(buffer, SIZE, " %02d %.03f %02d %02d\n", - i, mCurVolume[i], mRefCount[i], mMuteCount[i]); - result.append(buffer); - } - write(fd, result.string(), result.size()); - - return NO_ERROR; -} - -// --- AudioInputDescriptor class implementation - -AudioPolicyManager::AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile) - : mId(0), mIoHandle(0), - mDevice(AUDIO_DEVICE_NONE), mPolicyMix(NULL), mPatchHandle(0), mRefCount(0), - mInputSource(AUDIO_SOURCE_DEFAULT), mProfile(profile), mIsSoundTrigger(false) -{ - if (profile != NULL) { - mSamplingRate = profile->pickSamplingRate(); - mFormat = profile->pickFormat(); - mChannelMask = profile->pickChannelMask(); - if (profile->mGains.size() > 0) { - profile->mGains[0]->getDefaultConfig(&mGain); - } - } -} - -void AudioPolicyManager::AudioInputDescriptor::toAudioPortConfig( - struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig) const -{ - ALOG_ASSERT(mProfile != 0, - "toAudioPortConfig() called on input with null profile %d", mIoHandle); - dstConfig->config_mask = AUDIO_PORT_CONFIG_SAMPLE_RATE|AUDIO_PORT_CONFIG_CHANNEL_MASK| - AUDIO_PORT_CONFIG_FORMAT|AUDIO_PORT_CONFIG_GAIN; - if (srcConfig != NULL) { - dstConfig->config_mask |= srcConfig->config_mask; - } - - AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig); - - dstConfig->id = mId; - dstConfig->role = AUDIO_PORT_ROLE_SINK; - dstConfig->type = AUDIO_PORT_TYPE_MIX; - dstConfig->ext.mix.hw_module = mProfile->mModule->mHandle; - dstConfig->ext.mix.handle = mIoHandle; - dstConfig->ext.mix.usecase.source = mInputSource; -} - -void AudioPolicyManager::AudioInputDescriptor::toAudioPort( - struct audio_port *port) const -{ - ALOG_ASSERT(mProfile != 0, "toAudioPort() called on input with null profile %d", mIoHandle); - - mProfile->toAudioPort(port); - port->id = mId; - toAudioPortConfig(&port->active_config); - port->ext.mix.hw_module = mProfile->mModule->mHandle; - port->ext.mix.handle = mIoHandle; - port->ext.mix.latency_class = AUDIO_LATENCY_NORMAL; -} - -status_t AudioPolicyManager::AudioInputDescriptor::dump(int fd) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, " ID: %d\n", mId); - result.append(buffer); - snprintf(buffer, SIZE, " Sampling rate: %d\n", mSamplingRate); - result.append(buffer); - snprintf(buffer, SIZE, " Format: %d\n", mFormat); - result.append(buffer); - snprintf(buffer, SIZE, " Channels: %08x\n", mChannelMask); - result.append(buffer); - snprintf(buffer, SIZE, " Devices %08x\n", mDevice); - result.append(buffer); - snprintf(buffer, SIZE, " Ref Count %d\n", mRefCount); - result.append(buffer); - snprintf(buffer, SIZE, " Open Ref Count %d\n", mOpenRefCount); - result.append(buffer); - - write(fd, result.string(), result.size()); - - return NO_ERROR; -} - -// --- StreamDescriptor class implementation - -AudioPolicyManager::StreamDescriptor::StreamDescriptor() - : mIndexMin(0), mIndexMax(1), mCanBeMuted(true) -{ - mIndexCur.add(AUDIO_DEVICE_OUT_DEFAULT, 0); -} - -int AudioPolicyManager::StreamDescriptor::getVolumeIndex(audio_devices_t device) -{ - device = AudioPolicyManager::getDeviceForVolume(device); - // there is always a valid entry for AUDIO_DEVICE_OUT_DEFAULT - if (mIndexCur.indexOfKey(device) < 0) { - device = AUDIO_DEVICE_OUT_DEFAULT; - } - return mIndexCur.valueFor(device); -} - -void AudioPolicyManager::StreamDescriptor::dump(int fd) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, "%s %02d %02d ", - mCanBeMuted ? "true " : "false", mIndexMin, mIndexMax); - result.append(buffer); - for (size_t i = 0; i < mIndexCur.size(); i++) { - snprintf(buffer, SIZE, "%04x : %02d, ", - mIndexCur.keyAt(i), - mIndexCur.valueAt(i)); - result.append(buffer); - } - result.append("\n"); - - write(fd, result.string(), result.size()); -} - -// --- EffectDescriptor class implementation - -status_t AudioPolicyManager::EffectDescriptor::dump(int fd) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, " I/O: %d\n", mIo); - result.append(buffer); - snprintf(buffer, SIZE, " Strategy: %d\n", mStrategy); - result.append(buffer); - snprintf(buffer, SIZE, " Session: %d\n", mSession); - result.append(buffer); - snprintf(buffer, SIZE, " Name: %s\n", mDesc.name); - result.append(buffer); - snprintf(buffer, SIZE, " %s\n", mEnabled ? "Enabled" : "Disabled"); - result.append(buffer); - write(fd, result.string(), result.size()); - - return NO_ERROR; -} - -// --- HwModule class implementation - -AudioPolicyManager::HwModule::HwModule(const char *name) - : mName(strndup(name, AUDIO_HARDWARE_MODULE_ID_MAX_LEN)), - mHalVersion(AUDIO_DEVICE_API_VERSION_MIN), mHandle(0) -{ -} - -AudioPolicyManager::HwModule::~HwModule() -{ - for (size_t i = 0; i < mOutputProfiles.size(); i++) { - mOutputProfiles[i]->mSupportedDevices.clear(); - } - for (size_t i = 0; i < mInputProfiles.size(); i++) { - mInputProfiles[i]->mSupportedDevices.clear(); - } - free((void *)mName); -} - -status_t AudioPolicyManager::HwModule::loadInput(cnode *root) -{ - cnode *node = root->first_child; - - sp<IOProfile> profile = new IOProfile(String8(root->name), AUDIO_PORT_ROLE_SINK, this); - - while (node) { - if (strcmp(node->name, SAMPLING_RATES_TAG) == 0) { - profile->loadSamplingRates((char *)node->value); - } else if (strcmp(node->name, FORMATS_TAG) == 0) { - profile->loadFormats((char *)node->value); - } else if (strcmp(node->name, CHANNELS_TAG) == 0) { - profile->loadInChannels((char *)node->value); - } else if (strcmp(node->name, DEVICES_TAG) == 0) { - profile->mSupportedDevices.loadDevicesFromName((char *)node->value, - mDeclaredDevices); - } else if (strcmp(node->name, FLAGS_TAG) == 0) { - profile->mFlags = parseInputFlagNames((char *)node->value); - } else if (strcmp(node->name, GAINS_TAG) == 0) { - profile->loadGains(node); - } - node = node->next; - } - ALOGW_IF(profile->mSupportedDevices.isEmpty(), - "loadInput() invalid supported devices"); - ALOGW_IF(profile->mChannelMasks.size() == 0, - "loadInput() invalid supported channel masks"); - ALOGW_IF(profile->mSamplingRates.size() == 0, - "loadInput() invalid supported sampling rates"); - ALOGW_IF(profile->mFormats.size() == 0, - "loadInput() invalid supported formats"); - if (!profile->mSupportedDevices.isEmpty() && - (profile->mChannelMasks.size() != 0) && - (profile->mSamplingRates.size() != 0) && - (profile->mFormats.size() != 0)) { - - ALOGV("loadInput() adding input Supported Devices %04x", - profile->mSupportedDevices.types()); - - mInputProfiles.add(profile); - return NO_ERROR; - } else { - return BAD_VALUE; - } -} - -status_t AudioPolicyManager::HwModule::loadOutput(cnode *root) -{ - cnode *node = root->first_child; - - sp<IOProfile> profile = new IOProfile(String8(root->name), AUDIO_PORT_ROLE_SOURCE, this); - - while (node) { - if (strcmp(node->name, SAMPLING_RATES_TAG) == 0) { - profile->loadSamplingRates((char *)node->value); - } else if (strcmp(node->name, FORMATS_TAG) == 0) { - profile->loadFormats((char *)node->value); - } else if (strcmp(node->name, CHANNELS_TAG) == 0) { - profile->loadOutChannels((char *)node->value); - } else if (strcmp(node->name, DEVICES_TAG) == 0) { - profile->mSupportedDevices.loadDevicesFromName((char *)node->value, - mDeclaredDevices); - } else if (strcmp(node->name, FLAGS_TAG) == 0) { - profile->mFlags = parseOutputFlagNames((char *)node->value); - } else if (strcmp(node->name, GAINS_TAG) == 0) { - profile->loadGains(node); - } - node = node->next; - } - ALOGW_IF(profile->mSupportedDevices.isEmpty(), - "loadOutput() invalid supported devices"); - ALOGW_IF(profile->mChannelMasks.size() == 0, - "loadOutput() invalid supported channel masks"); - ALOGW_IF(profile->mSamplingRates.size() == 0, - "loadOutput() invalid supported sampling rates"); - ALOGW_IF(profile->mFormats.size() == 0, - "loadOutput() invalid supported formats"); - if (!profile->mSupportedDevices.isEmpty() && - (profile->mChannelMasks.size() != 0) && - (profile->mSamplingRates.size() != 0) && - (profile->mFormats.size() != 0)) { - - ALOGV("loadOutput() adding output Supported Devices %04x, mFlags %04x", - profile->mSupportedDevices.types(), profile->mFlags); - - mOutputProfiles.add(profile); - return NO_ERROR; - } else { - return BAD_VALUE; - } -} - -status_t AudioPolicyManager::HwModule::loadDevice(cnode *root) -{ - cnode *node = root->first_child; - - audio_devices_t type = AUDIO_DEVICE_NONE; - while (node) { - if (strcmp(node->name, DEVICE_TYPE) == 0) { - type = parseDeviceNames((char *)node->value); - break; - } - node = node->next; - } - if (type == AUDIO_DEVICE_NONE || - (!audio_is_input_device(type) && !audio_is_output_device(type))) { - ALOGW("loadDevice() bad type %08x", type); - return BAD_VALUE; - } - sp<DeviceDescriptor> deviceDesc = new DeviceDescriptor(String8(root->name), type); - deviceDesc->mModule = this; - - node = root->first_child; - while (node) { - if (strcmp(node->name, DEVICE_ADDRESS) == 0) { - deviceDesc->mAddress = String8((char *)node->value); - } else if (strcmp(node->name, CHANNELS_TAG) == 0) { - if (audio_is_input_device(type)) { - deviceDesc->loadInChannels((char *)node->value); - } else { - deviceDesc->loadOutChannels((char *)node->value); - } - } else if (strcmp(node->name, GAINS_TAG) == 0) { - deviceDesc->loadGains(node); - } - node = node->next; - } - - ALOGV("loadDevice() adding device name %s type %08x address %s", - deviceDesc->mName.string(), type, deviceDesc->mAddress.string()); - - mDeclaredDevices.add(deviceDesc); - - return NO_ERROR; -} - -status_t AudioPolicyManager::HwModule::addOutputProfile(String8 name, const audio_config_t *config, - audio_devices_t device, String8 address) -{ - sp<IOProfile> profile = new IOProfile(name, AUDIO_PORT_ROLE_SOURCE, this); - - profile->mSamplingRates.add(config->sample_rate); - profile->mChannelMasks.add(config->channel_mask); - profile->mFormats.add(config->format); - - sp<DeviceDescriptor> devDesc = new DeviceDescriptor(String8(""), device); - devDesc->mAddress = address; - profile->mSupportedDevices.add(devDesc); - - mOutputProfiles.add(profile); - - return NO_ERROR; -} - -status_t AudioPolicyManager::HwModule::removeOutputProfile(String8 name) -{ - for (size_t i = 0; i < mOutputProfiles.size(); i++) { - if (mOutputProfiles[i]->mName == name) { - mOutputProfiles.removeAt(i); - break; - } - } - - return NO_ERROR; -} - -status_t AudioPolicyManager::HwModule::addInputProfile(String8 name, const audio_config_t *config, - audio_devices_t device, String8 address) -{ - sp<IOProfile> profile = new IOProfile(name, AUDIO_PORT_ROLE_SINK, this); - - profile->mSamplingRates.add(config->sample_rate); - profile->mChannelMasks.add(config->channel_mask); - profile->mFormats.add(config->format); - - sp<DeviceDescriptor> devDesc = new DeviceDescriptor(String8(""), device); - devDesc->mAddress = address; - profile->mSupportedDevices.add(devDesc); - - ALOGV("addInputProfile() name %s rate %d mask 0x08", name.string(), config->sample_rate, config->channel_mask); - - mInputProfiles.add(profile); - - return NO_ERROR; -} - -status_t AudioPolicyManager::HwModule::removeInputProfile(String8 name) -{ - for (size_t i = 0; i < mInputProfiles.size(); i++) { - if (mInputProfiles[i]->mName == name) { - mInputProfiles.removeAt(i); - break; - } - } - - return NO_ERROR; -} - - -void AudioPolicyManager::HwModule::dump(int fd) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, " - name: %s\n", mName); - result.append(buffer); - snprintf(buffer, SIZE, " - handle: %d\n", mHandle); - result.append(buffer); - snprintf(buffer, SIZE, " - version: %u.%u\n", mHalVersion >> 8, mHalVersion & 0xFF); - result.append(buffer); - write(fd, result.string(), result.size()); - if (mOutputProfiles.size()) { - write(fd, " - outputs:\n", strlen(" - outputs:\n")); - for (size_t i = 0; i < mOutputProfiles.size(); i++) { - snprintf(buffer, SIZE, " output %zu:\n", i); - write(fd, buffer, strlen(buffer)); - mOutputProfiles[i]->dump(fd); - } - } - if (mInputProfiles.size()) { - write(fd, " - inputs:\n", strlen(" - inputs:\n")); - for (size_t i = 0; i < mInputProfiles.size(); i++) { - snprintf(buffer, SIZE, " input %zu:\n", i); - write(fd, buffer, strlen(buffer)); - mInputProfiles[i]->dump(fd); - } - } - if (mDeclaredDevices.size()) { - write(fd, " - devices:\n", strlen(" - devices:\n")); - for (size_t i = 0; i < mDeclaredDevices.size(); i++) { - mDeclaredDevices[i]->dump(fd, 4, i); - } - } -} - -// --- AudioPort class implementation - - -AudioPolicyManager::AudioPort::AudioPort(const String8& name, audio_port_type_t type, - audio_port_role_t role, const sp<HwModule>& module) : - mName(name), mType(type), mRole(role), mModule(module), mFlags(0) -{ - mUseInChannelMask = ((type == AUDIO_PORT_TYPE_DEVICE) && (role == AUDIO_PORT_ROLE_SOURCE)) || - ((type == AUDIO_PORT_TYPE_MIX) && (role == AUDIO_PORT_ROLE_SINK)); -} - -void AudioPolicyManager::AudioPort::toAudioPort(struct audio_port *port) const -{ - port->role = mRole; - port->type = mType; - unsigned int i; - for (i = 0; i < mSamplingRates.size() && i < AUDIO_PORT_MAX_SAMPLING_RATES; i++) { - if (mSamplingRates[i] != 0) { - port->sample_rates[i] = mSamplingRates[i]; - } - } - port->num_sample_rates = i; - for (i = 0; i < mChannelMasks.size() && i < AUDIO_PORT_MAX_CHANNEL_MASKS; i++) { - if (mChannelMasks[i] != 0) { - port->channel_masks[i] = mChannelMasks[i]; - } - } - port->num_channel_masks = i; - for (i = 0; i < mFormats.size() && i < AUDIO_PORT_MAX_FORMATS; i++) { - if (mFormats[i] != 0) { - port->formats[i] = mFormats[i]; - } - } - port->num_formats = i; - - ALOGV("AudioPort::toAudioPort() num gains %zu", mGains.size()); - - for (i = 0; i < mGains.size() && i < AUDIO_PORT_MAX_GAINS; i++) { - port->gains[i] = mGains[i]->mGain; - } - port->num_gains = i; -} - -void AudioPolicyManager::AudioPort::importAudioPort(const sp<AudioPort> port) { - for (size_t k = 0 ; k < port->mSamplingRates.size() ; k++) { - const uint32_t rate = port->mSamplingRates.itemAt(k); - if (rate != 0) { // skip "dynamic" rates - bool hasRate = false; - for (size_t l = 0 ; l < mSamplingRates.size() ; l++) { - if (rate == mSamplingRates.itemAt(l)) { - hasRate = true; - break; - } - } - if (!hasRate) { // never import a sampling rate twice - mSamplingRates.add(rate); - } - } - } - for (size_t k = 0 ; k < port->mChannelMasks.size() ; k++) { - const audio_channel_mask_t mask = port->mChannelMasks.itemAt(k); - if (mask != 0) { // skip "dynamic" masks - bool hasMask = false; - for (size_t l = 0 ; l < mChannelMasks.size() ; l++) { - if (mask == mChannelMasks.itemAt(l)) { - hasMask = true; - break; - } - } - if (!hasMask) { // never import a channel mask twice - mChannelMasks.add(mask); - } - } - } - for (size_t k = 0 ; k < port->mFormats.size() ; k++) { - const audio_format_t format = port->mFormats.itemAt(k); - if (format != 0) { // skip "dynamic" formats - bool hasFormat = false; - for (size_t l = 0 ; l < mFormats.size() ; l++) { - if (format == mFormats.itemAt(l)) { - hasFormat = true; - break; - } - } - if (!hasFormat) { // never import a channel mask twice - mFormats.add(format); - } - } - } - for (size_t k = 0 ; k < port->mGains.size() ; k++) { - sp<AudioGain> gain = port->mGains.itemAt(k); - if (gain != 0) { - bool hasGain = false; - for (size_t l = 0 ; l < mGains.size() ; l++) { - if (gain == mGains.itemAt(l)) { - hasGain = true; - break; - } - } - if (!hasGain) { // never import a gain twice - mGains.add(gain); - } - } - } -} - -void AudioPolicyManager::AudioPort::clearCapabilities() { - mChannelMasks.clear(); - mFormats.clear(); - mSamplingRates.clear(); - mGains.clear(); -} - -void AudioPolicyManager::AudioPort::loadSamplingRates(char *name) -{ - char *str = strtok(name, "|"); - - // by convention, "0' in the first entry in mSamplingRates indicates the supported sampling - // rates should be read from the output stream after it is opened for the first time - if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { - mSamplingRates.add(0); - return; - } - - while (str != NULL) { - uint32_t rate = atoi(str); - if (rate != 0) { - ALOGV("loadSamplingRates() adding rate %d", rate); - mSamplingRates.add(rate); - } - str = strtok(NULL, "|"); - } -} - -void AudioPolicyManager::AudioPort::loadFormats(char *name) -{ - char *str = strtok(name, "|"); - - // by convention, "0' in the first entry in mFormats indicates the supported formats - // should be read from the output stream after it is opened for the first time - if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { - mFormats.add(AUDIO_FORMAT_DEFAULT); - return; - } - - while (str != NULL) { - audio_format_t format = (audio_format_t)stringToEnum(sFormatNameToEnumTable, - ARRAY_SIZE(sFormatNameToEnumTable), - str); - if (format != AUDIO_FORMAT_DEFAULT) { - mFormats.add(format); - } - str = strtok(NULL, "|"); - } -} - -void AudioPolicyManager::AudioPort::loadInChannels(char *name) -{ - const char *str = strtok(name, "|"); - - ALOGV("loadInChannels() %s", name); - - if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { - mChannelMasks.add(0); - return; - } - - while (str != NULL) { - audio_channel_mask_t channelMask = - (audio_channel_mask_t)stringToEnum(sInChannelsNameToEnumTable, - ARRAY_SIZE(sInChannelsNameToEnumTable), - str); - if (channelMask != 0) { - ALOGV("loadInChannels() adding channelMask %04x", channelMask); - mChannelMasks.add(channelMask); - } - str = strtok(NULL, "|"); - } -} - -void AudioPolicyManager::AudioPort::loadOutChannels(char *name) -{ - const char *str = strtok(name, "|"); - - ALOGV("loadOutChannels() %s", name); - - // by convention, "0' in the first entry in mChannelMasks indicates the supported channel - // masks should be read from the output stream after it is opened for the first time - if (str != NULL && strcmp(str, DYNAMIC_VALUE_TAG) == 0) { - mChannelMasks.add(0); - return; - } - - while (str != NULL) { - audio_channel_mask_t channelMask = - (audio_channel_mask_t)stringToEnum(sOutChannelsNameToEnumTable, - ARRAY_SIZE(sOutChannelsNameToEnumTable), - str); - if (channelMask != 0) { - mChannelMasks.add(channelMask); - } - str = strtok(NULL, "|"); - } - return; -} - -audio_gain_mode_t AudioPolicyManager::AudioPort::loadGainMode(char *name) -{ - const char *str = strtok(name, "|"); - - ALOGV("loadGainMode() %s", name); - audio_gain_mode_t mode = 0; - while (str != NULL) { - mode |= (audio_gain_mode_t)stringToEnum(sGainModeNameToEnumTable, - ARRAY_SIZE(sGainModeNameToEnumTable), - str); - str = strtok(NULL, "|"); - } - return mode; -} - -void AudioPolicyManager::AudioPort::loadGain(cnode *root, int index) -{ - cnode *node = root->first_child; - - sp<AudioGain> gain = new AudioGain(index, mUseInChannelMask); - - while (node) { - if (strcmp(node->name, GAIN_MODE) == 0) { - gain->mGain.mode = loadGainMode((char *)node->value); - } else if (strcmp(node->name, GAIN_CHANNELS) == 0) { - if (mUseInChannelMask) { - gain->mGain.channel_mask = - (audio_channel_mask_t)stringToEnum(sInChannelsNameToEnumTable, - ARRAY_SIZE(sInChannelsNameToEnumTable), - (char *)node->value); - } else { - gain->mGain.channel_mask = - (audio_channel_mask_t)stringToEnum(sOutChannelsNameToEnumTable, - ARRAY_SIZE(sOutChannelsNameToEnumTable), - (char *)node->value); - } - } else if (strcmp(node->name, GAIN_MIN_VALUE) == 0) { - gain->mGain.min_value = atoi((char *)node->value); - } else if (strcmp(node->name, GAIN_MAX_VALUE) == 0) { - gain->mGain.max_value = atoi((char *)node->value); - } else if (strcmp(node->name, GAIN_DEFAULT_VALUE) == 0) { - gain->mGain.default_value = atoi((char *)node->value); - } else if (strcmp(node->name, GAIN_STEP_VALUE) == 0) { - gain->mGain.step_value = atoi((char *)node->value); - } else if (strcmp(node->name, GAIN_MIN_RAMP_MS) == 0) { - gain->mGain.min_ramp_ms = atoi((char *)node->value); - } else if (strcmp(node->name, GAIN_MAX_RAMP_MS) == 0) { - gain->mGain.max_ramp_ms = atoi((char *)node->value); - } - node = node->next; - } - - ALOGV("loadGain() adding new gain mode %08x channel mask %08x min mB %d max mB %d", - gain->mGain.mode, gain->mGain.channel_mask, gain->mGain.min_value, gain->mGain.max_value); - - if (gain->mGain.mode == 0) { - return; - } - mGains.add(gain); -} - -void AudioPolicyManager::AudioPort::loadGains(cnode *root) -{ - cnode *node = root->first_child; - int index = 0; - while (node) { - ALOGV("loadGains() loading gain %s", node->name); - loadGain(node, index++); - node = node->next; - } -} - -status_t AudioPolicyManager::AudioPort::checkExactSamplingRate(uint32_t samplingRate) const -{ - if (mSamplingRates.isEmpty()) { - return NO_ERROR; - } - - for (size_t i = 0; i < mSamplingRates.size(); i ++) { - if (mSamplingRates[i] == samplingRate) { - return NO_ERROR; - } - } - return BAD_VALUE; -} - -status_t AudioPolicyManager::AudioPort::checkCompatibleSamplingRate(uint32_t samplingRate, - uint32_t *updatedSamplingRate) const -{ - if (mSamplingRates.isEmpty()) { - return NO_ERROR; - } - - // Search for the closest supported sampling rate that is above (preferred) - // or below (acceptable) the desired sampling rate, within a permitted ratio. - // The sampling rates do not need to be sorted in ascending order. - ssize_t maxBelow = -1; - ssize_t minAbove = -1; - uint32_t candidate; - for (size_t i = 0; i < mSamplingRates.size(); i++) { - candidate = mSamplingRates[i]; - if (candidate == samplingRate) { - if (updatedSamplingRate != NULL) { - *updatedSamplingRate = candidate; - } - return NO_ERROR; - } - // candidate < desired - if (candidate < samplingRate) { - if (maxBelow < 0 || candidate > mSamplingRates[maxBelow]) { - maxBelow = i; - } - // candidate > desired - } else { - if (minAbove < 0 || candidate < mSamplingRates[minAbove]) { - minAbove = i; - } - } - } - // This uses hard-coded knowledge about AudioFlinger resampling ratios. - // TODO Move these assumptions out. - static const uint32_t kMaxDownSampleRatio = 6; // beyond this aliasing occurs - static const uint32_t kMaxUpSampleRatio = 256; // beyond this sample rate inaccuracies occur - // due to approximation by an int32_t of the - // phase increments - // Prefer to down-sample from a higher sampling rate, as we get the desired frequency spectrum. - if (minAbove >= 0) { - candidate = mSamplingRates[minAbove]; - if (candidate / kMaxDownSampleRatio <= samplingRate) { - if (updatedSamplingRate != NULL) { - *updatedSamplingRate = candidate; - } - return NO_ERROR; - } - } - // But if we have to up-sample from a lower sampling rate, that's OK. - if (maxBelow >= 0) { - candidate = mSamplingRates[maxBelow]; - if (candidate * kMaxUpSampleRatio >= samplingRate) { - if (updatedSamplingRate != NULL) { - *updatedSamplingRate = candidate; - } - return NO_ERROR; - } - } - // leave updatedSamplingRate unmodified - return BAD_VALUE; -} - -status_t AudioPolicyManager::AudioPort::checkExactChannelMask(audio_channel_mask_t channelMask) const -{ - if (mChannelMasks.isEmpty()) { - return NO_ERROR; - } - - for (size_t i = 0; i < mChannelMasks.size(); i++) { - if (mChannelMasks[i] == channelMask) { - return NO_ERROR; - } - } - return BAD_VALUE; -} - -status_t AudioPolicyManager::AudioPort::checkCompatibleChannelMask(audio_channel_mask_t channelMask) - const -{ - if (mChannelMasks.isEmpty()) { - return NO_ERROR; - } - - const bool isRecordThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SINK; - for (size_t i = 0; i < mChannelMasks.size(); i ++) { - // FIXME Does not handle multi-channel automatic conversions yet - audio_channel_mask_t supported = mChannelMasks[i]; - if (supported == channelMask) { - return NO_ERROR; - } - if (isRecordThread) { - // This uses hard-coded knowledge that AudioFlinger can silently down-mix and up-mix. - // FIXME Abstract this out to a table. - if (((supported == AUDIO_CHANNEL_IN_FRONT_BACK || supported == AUDIO_CHANNEL_IN_STEREO) - && channelMask == AUDIO_CHANNEL_IN_MONO) || - (supported == AUDIO_CHANNEL_IN_MONO && (channelMask == AUDIO_CHANNEL_IN_FRONT_BACK - || channelMask == AUDIO_CHANNEL_IN_STEREO))) { - return NO_ERROR; - } - } - } - return BAD_VALUE; -} - -status_t AudioPolicyManager::AudioPort::checkFormat(audio_format_t format) const -{ - if (mFormats.isEmpty()) { - return NO_ERROR; - } - - for (size_t i = 0; i < mFormats.size(); i ++) { - if (mFormats[i] == format) { - return NO_ERROR; - } - } - return BAD_VALUE; -} - - -uint32_t AudioPolicyManager::AudioPort::pickSamplingRate() const -{ - // special case for uninitialized dynamic profile - if (mSamplingRates.size() == 1 && mSamplingRates[0] == 0) { - return 0; - } - - // For direct outputs, pick minimum sampling rate: this helps ensuring that the - // channel count / sampling rate combination chosen will be supported by the connected - // sink - if ((mType == AUDIO_PORT_TYPE_MIX) && (mRole == AUDIO_PORT_ROLE_SOURCE) && - (mFlags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD))) { - uint32_t samplingRate = UINT_MAX; - for (size_t i = 0; i < mSamplingRates.size(); i ++) { - if ((mSamplingRates[i] < samplingRate) && (mSamplingRates[i] > 0)) { - samplingRate = mSamplingRates[i]; - } - } - return (samplingRate == UINT_MAX) ? 0 : samplingRate; - } - - uint32_t samplingRate = 0; - uint32_t maxRate = MAX_MIXER_SAMPLING_RATE; - - // For mixed output and inputs, use max mixer sampling rates. Do not - // limit sampling rate otherwise - if (mType != AUDIO_PORT_TYPE_MIX) { - maxRate = UINT_MAX; - } - for (size_t i = 0; i < mSamplingRates.size(); i ++) { - if ((mSamplingRates[i] > samplingRate) && (mSamplingRates[i] <= maxRate)) { - samplingRate = mSamplingRates[i]; - } - } - return samplingRate; -} - -audio_channel_mask_t AudioPolicyManager::AudioPort::pickChannelMask() const -{ - // special case for uninitialized dynamic profile - if (mChannelMasks.size() == 1 && mChannelMasks[0] == 0) { - return AUDIO_CHANNEL_NONE; - } - audio_channel_mask_t channelMask = AUDIO_CHANNEL_NONE; - - // For direct outputs, pick minimum channel count: this helps ensuring that the - // channel count / sampling rate combination chosen will be supported by the connected - // sink - if ((mType == AUDIO_PORT_TYPE_MIX) && (mRole == AUDIO_PORT_ROLE_SOURCE) && - (mFlags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD))) { - uint32_t channelCount = UINT_MAX; - for (size_t i = 0; i < mChannelMasks.size(); i ++) { - uint32_t cnlCount; - if (mUseInChannelMask) { - cnlCount = audio_channel_count_from_in_mask(mChannelMasks[i]); - } else { - cnlCount = audio_channel_count_from_out_mask(mChannelMasks[i]); - } - if ((cnlCount < channelCount) && (cnlCount > 0)) { - channelMask = mChannelMasks[i]; - channelCount = cnlCount; - } - } - return channelMask; - } - - uint32_t channelCount = 0; - uint32_t maxCount = MAX_MIXER_CHANNEL_COUNT; - - // For mixed output and inputs, use max mixer channel count. Do not - // limit channel count otherwise - if (mType != AUDIO_PORT_TYPE_MIX) { - maxCount = UINT_MAX; - } - for (size_t i = 0; i < mChannelMasks.size(); i ++) { - uint32_t cnlCount; - if (mUseInChannelMask) { - cnlCount = audio_channel_count_from_in_mask(mChannelMasks[i]); - } else { - cnlCount = audio_channel_count_from_out_mask(mChannelMasks[i]); - } - if ((cnlCount > channelCount) && (cnlCount <= maxCount)) { - channelMask = mChannelMasks[i]; - channelCount = cnlCount; - } - } - return channelMask; -} - -/* format in order of increasing preference */ -const audio_format_t AudioPolicyManager::AudioPort::sPcmFormatCompareTable[] = { - AUDIO_FORMAT_DEFAULT, - AUDIO_FORMAT_PCM_16_BIT, - AUDIO_FORMAT_PCM_8_24_BIT, - AUDIO_FORMAT_PCM_24_BIT_PACKED, - AUDIO_FORMAT_PCM_32_BIT, - AUDIO_FORMAT_PCM_FLOAT, -}; - -int AudioPolicyManager::AudioPort::compareFormats(audio_format_t format1, - audio_format_t format2) -{ - // NOTE: AUDIO_FORMAT_INVALID is also considered not PCM and will be compared equal to any - // compressed format and better than any PCM format. This is by design of pickFormat() - if (!audio_is_linear_pcm(format1)) { - if (!audio_is_linear_pcm(format2)) { - return 0; - } - return 1; - } - if (!audio_is_linear_pcm(format2)) { - return -1; - } - - int index1 = -1, index2 = -1; - for (size_t i = 0; - (i < ARRAY_SIZE(sPcmFormatCompareTable)) && ((index1 == -1) || (index2 == -1)); - i ++) { - if (sPcmFormatCompareTable[i] == format1) { - index1 = i; - } - if (sPcmFormatCompareTable[i] == format2) { - index2 = i; - } - } - // format1 not found => index1 < 0 => format2 > format1 - // format2 not found => index2 < 0 => format2 < format1 - return index1 - index2; -} - -audio_format_t AudioPolicyManager::AudioPort::pickFormat() const -{ - // special case for uninitialized dynamic profile - if (mFormats.size() == 1 && mFormats[0] == 0) { - return AUDIO_FORMAT_DEFAULT; - } - - audio_format_t format = AUDIO_FORMAT_DEFAULT; - audio_format_t bestFormat = - AudioPolicyManager::AudioPort::sPcmFormatCompareTable[ - ARRAY_SIZE(AudioPolicyManager::AudioPort::sPcmFormatCompareTable) - 1]; - // For mixed output and inputs, use best mixer output format. Do not - // limit format otherwise - if ((mType != AUDIO_PORT_TYPE_MIX) || - ((mRole == AUDIO_PORT_ROLE_SOURCE) && - (((mFlags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)) != 0)))) { - bestFormat = AUDIO_FORMAT_INVALID; - } - - for (size_t i = 0; i < mFormats.size(); i ++) { - if ((compareFormats(mFormats[i], format) > 0) && - (compareFormats(mFormats[i], bestFormat) <= 0)) { - format = mFormats[i]; - } - } - return format; -} - -status_t AudioPolicyManager::AudioPort::checkGain(const struct audio_gain_config *gainConfig, - int index) const -{ - if (index < 0 || (size_t)index >= mGains.size()) { - return BAD_VALUE; - } - return mGains[index]->checkConfig(gainConfig); -} - -void AudioPolicyManager::AudioPort::dump(int fd, int spaces) const -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - if (mName.size() != 0) { - snprintf(buffer, SIZE, "%*s- name: %s\n", spaces, "", mName.string()); - result.append(buffer); - } - - if (mSamplingRates.size() != 0) { - snprintf(buffer, SIZE, "%*s- sampling rates: ", spaces, ""); - result.append(buffer); - for (size_t i = 0; i < mSamplingRates.size(); i++) { - if (i == 0 && mSamplingRates[i] == 0) { - snprintf(buffer, SIZE, "Dynamic"); - } else { - snprintf(buffer, SIZE, "%d", mSamplingRates[i]); - } - result.append(buffer); - result.append(i == (mSamplingRates.size() - 1) ? "" : ", "); - } - result.append("\n"); - } - - if (mChannelMasks.size() != 0) { - snprintf(buffer, SIZE, "%*s- channel masks: ", spaces, ""); - result.append(buffer); - for (size_t i = 0; i < mChannelMasks.size(); i++) { - ALOGV("AudioPort::dump mChannelMasks %zu %08x", i, mChannelMasks[i]); - - if (i == 0 && mChannelMasks[i] == 0) { - snprintf(buffer, SIZE, "Dynamic"); - } else { - snprintf(buffer, SIZE, "0x%04x", mChannelMasks[i]); - } - result.append(buffer); - result.append(i == (mChannelMasks.size() - 1) ? "" : ", "); - } - result.append("\n"); - } - - if (mFormats.size() != 0) { - snprintf(buffer, SIZE, "%*s- formats: ", spaces, ""); - result.append(buffer); - for (size_t i = 0; i < mFormats.size(); i++) { - const char *formatStr = enumToString(sFormatNameToEnumTable, - ARRAY_SIZE(sFormatNameToEnumTable), - mFormats[i]); - if (i == 0 && strcmp(formatStr, "") == 0) { - snprintf(buffer, SIZE, "Dynamic"); - } else { - snprintf(buffer, SIZE, "%s", formatStr); - } - result.append(buffer); - result.append(i == (mFormats.size() - 1) ? "" : ", "); - } - result.append("\n"); - } - write(fd, result.string(), result.size()); - if (mGains.size() != 0) { - snprintf(buffer, SIZE, "%*s- gains:\n", spaces, ""); - write(fd, buffer, strlen(buffer) + 1); - result.append(buffer); - for (size_t i = 0; i < mGains.size(); i++) { - mGains[i]->dump(fd, spaces + 2, i); - } - } -} - -// --- AudioGain class implementation - -AudioPolicyManager::AudioGain::AudioGain(int index, bool useInChannelMask) -{ - mIndex = index; - mUseInChannelMask = useInChannelMask; - memset(&mGain, 0, sizeof(struct audio_gain)); -} - -void AudioPolicyManager::AudioGain::getDefaultConfig(struct audio_gain_config *config) -{ - config->index = mIndex; - config->mode = mGain.mode; - config->channel_mask = mGain.channel_mask; - if ((mGain.mode & AUDIO_GAIN_MODE_JOINT) == AUDIO_GAIN_MODE_JOINT) { - config->values[0] = mGain.default_value; - } else { - uint32_t numValues; - if (mUseInChannelMask) { - numValues = audio_channel_count_from_in_mask(mGain.channel_mask); - } else { - numValues = audio_channel_count_from_out_mask(mGain.channel_mask); - } - for (size_t i = 0; i < numValues; i++) { - config->values[i] = mGain.default_value; - } - } - if ((mGain.mode & AUDIO_GAIN_MODE_RAMP) == AUDIO_GAIN_MODE_RAMP) { - config->ramp_duration_ms = mGain.min_ramp_ms; - } -} - -status_t AudioPolicyManager::AudioGain::checkConfig(const struct audio_gain_config *config) -{ - if ((config->mode & ~mGain.mode) != 0) { - return BAD_VALUE; - } - if ((config->mode & AUDIO_GAIN_MODE_JOINT) == AUDIO_GAIN_MODE_JOINT) { - if ((config->values[0] < mGain.min_value) || - (config->values[0] > mGain.max_value)) { - return BAD_VALUE; - } - } else { - if ((config->channel_mask & ~mGain.channel_mask) != 0) { - return BAD_VALUE; - } - uint32_t numValues; - if (mUseInChannelMask) { - numValues = audio_channel_count_from_in_mask(config->channel_mask); - } else { - numValues = audio_channel_count_from_out_mask(config->channel_mask); - } - for (size_t i = 0; i < numValues; i++) { - if ((config->values[i] < mGain.min_value) || - (config->values[i] > mGain.max_value)) { - return BAD_VALUE; - } - } - } - if ((config->mode & AUDIO_GAIN_MODE_RAMP) == AUDIO_GAIN_MODE_RAMP) { - if ((config->ramp_duration_ms < mGain.min_ramp_ms) || - (config->ramp_duration_ms > mGain.max_ramp_ms)) { - return BAD_VALUE; - } - } - return NO_ERROR; -} - -void AudioPolicyManager::AudioGain::dump(int fd, int spaces, int index) const -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, "%*sGain %d:\n", spaces, "", index+1); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- mode: %08x\n", spaces, "", mGain.mode); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- channel_mask: %08x\n", spaces, "", mGain.channel_mask); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- min_value: %d mB\n", spaces, "", mGain.min_value); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- max_value: %d mB\n", spaces, "", mGain.max_value); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- default_value: %d mB\n", spaces, "", mGain.default_value); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- step_value: %d mB\n", spaces, "", mGain.step_value); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- min_ramp_ms: %d ms\n", spaces, "", mGain.min_ramp_ms); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- max_ramp_ms: %d ms\n", spaces, "", mGain.max_ramp_ms); - result.append(buffer); - - write(fd, result.string(), result.size()); -} - -// --- AudioPortConfig class implementation - -AudioPolicyManager::AudioPortConfig::AudioPortConfig() -{ - mSamplingRate = 0; - mChannelMask = AUDIO_CHANNEL_NONE; - mFormat = AUDIO_FORMAT_INVALID; - mGain.index = -1; -} - -status_t AudioPolicyManager::AudioPortConfig::applyAudioPortConfig( - const struct audio_port_config *config, - struct audio_port_config *backupConfig) -{ - struct audio_port_config localBackupConfig; - status_t status = NO_ERROR; - - localBackupConfig.config_mask = config->config_mask; - toAudioPortConfig(&localBackupConfig); - - sp<AudioPort> audioport = getAudioPort(); - if (audioport == 0) { - status = NO_INIT; - goto exit; - } - if (config->config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) { - status = audioport->checkExactSamplingRate(config->sample_rate); - if (status != NO_ERROR) { - goto exit; - } - mSamplingRate = config->sample_rate; - } - if (config->config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) { - status = audioport->checkExactChannelMask(config->channel_mask); - if (status != NO_ERROR) { - goto exit; - } - mChannelMask = config->channel_mask; - } - if (config->config_mask & AUDIO_PORT_CONFIG_FORMAT) { - status = audioport->checkFormat(config->format); - if (status != NO_ERROR) { - goto exit; - } - mFormat = config->format; - } - if (config->config_mask & AUDIO_PORT_CONFIG_GAIN) { - status = audioport->checkGain(&config->gain, config->gain.index); - if (status != NO_ERROR) { - goto exit; - } - mGain = config->gain; - } - -exit: - if (status != NO_ERROR) { - applyAudioPortConfig(&localBackupConfig); - } - if (backupConfig != NULL) { - *backupConfig = localBackupConfig; - } - return status; -} - -void AudioPolicyManager::AudioPortConfig::toAudioPortConfig( - struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig) const -{ - if (dstConfig->config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE) { - dstConfig->sample_rate = mSamplingRate; - if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_SAMPLE_RATE)) { - dstConfig->sample_rate = srcConfig->sample_rate; - } - } else { - dstConfig->sample_rate = 0; - } - if (dstConfig->config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK) { - dstConfig->channel_mask = mChannelMask; - if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_CHANNEL_MASK)) { - dstConfig->channel_mask = srcConfig->channel_mask; - } - } else { - dstConfig->channel_mask = AUDIO_CHANNEL_NONE; - } - if (dstConfig->config_mask & AUDIO_PORT_CONFIG_FORMAT) { - dstConfig->format = mFormat; - if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_FORMAT)) { - dstConfig->format = srcConfig->format; - } - } else { - dstConfig->format = AUDIO_FORMAT_INVALID; - } - if (dstConfig->config_mask & AUDIO_PORT_CONFIG_GAIN) { - dstConfig->gain = mGain; - if ((srcConfig != NULL) && (srcConfig->config_mask & AUDIO_PORT_CONFIG_GAIN)) { - dstConfig->gain = srcConfig->gain; - } - } else { - dstConfig->gain.index = -1; - } - if (dstConfig->gain.index != -1) { - dstConfig->config_mask |= AUDIO_PORT_CONFIG_GAIN; - } else { - dstConfig->config_mask &= ~AUDIO_PORT_CONFIG_GAIN; - } -} - -// --- IOProfile class implementation - -AudioPolicyManager::IOProfile::IOProfile(const String8& name, audio_port_role_t role, - const sp<HwModule>& module) - : AudioPort(name, AUDIO_PORT_TYPE_MIX, role, module) -{ -} - -AudioPolicyManager::IOProfile::~IOProfile() -{ -} - -// checks if the IO profile is compatible with specified parameters. -// Sampling rate, format and channel mask must be specified in order to -// get a valid a match -bool AudioPolicyManager::IOProfile::isCompatibleProfile(audio_devices_t device, - String8 address, - uint32_t samplingRate, - uint32_t *updatedSamplingRate, - audio_format_t format, - audio_channel_mask_t channelMask, - uint32_t flags) const -{ - const bool isPlaybackThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SOURCE; - const bool isRecordThread = mType == AUDIO_PORT_TYPE_MIX && mRole == AUDIO_PORT_ROLE_SINK; - ALOG_ASSERT(isPlaybackThread != isRecordThread); - - if (device != AUDIO_DEVICE_NONE && mSupportedDevices.getDevice(device, address) == 0) { - return false; - } - - if (samplingRate == 0) { - return false; - } - uint32_t myUpdatedSamplingRate = samplingRate; - if (isPlaybackThread && checkExactSamplingRate(samplingRate) != NO_ERROR) { - return false; - } - if (isRecordThread && checkCompatibleSamplingRate(samplingRate, &myUpdatedSamplingRate) != - NO_ERROR) { - return false; - } - - if (!audio_is_valid_format(format) || checkFormat(format) != NO_ERROR) { - return false; - } - - if (isPlaybackThread && (!audio_is_output_channel(channelMask) || - checkExactChannelMask(channelMask) != NO_ERROR)) { - return false; - } - if (isRecordThread && (!audio_is_input_channel(channelMask) || - checkCompatibleChannelMask(channelMask) != NO_ERROR)) { - return false; - } - - if (isPlaybackThread && (mFlags & flags) != flags) { - return false; - } - // The only input flag that is allowed to be different is the fast flag. - // An existing fast stream is compatible with a normal track request. - // An existing normal stream is compatible with a fast track request, - // but the fast request will be denied by AudioFlinger and converted to normal track. - if (isRecordThread && ((mFlags ^ flags) & - ~AUDIO_INPUT_FLAG_FAST)) { - return false; - } - - if (updatedSamplingRate != NULL) { - *updatedSamplingRate = myUpdatedSamplingRate; - } - return true; -} - -void AudioPolicyManager::IOProfile::dump(int fd) -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - AudioPort::dump(fd, 4); - - snprintf(buffer, SIZE, " - flags: 0x%04x\n", mFlags); - result.append(buffer); - snprintf(buffer, SIZE, " - devices:\n"); - result.append(buffer); - write(fd, result.string(), result.size()); - for (size_t i = 0; i < mSupportedDevices.size(); i++) { - mSupportedDevices[i]->dump(fd, 6, i); - } -} - -void AudioPolicyManager::IOProfile::log() -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - ALOGV(" - sampling rates: "); - for (size_t i = 0; i < mSamplingRates.size(); i++) { - ALOGV(" %d", mSamplingRates[i]); - } - - ALOGV(" - channel masks: "); - for (size_t i = 0; i < mChannelMasks.size(); i++) { - ALOGV(" 0x%04x", mChannelMasks[i]); - } - - ALOGV(" - formats: "); - for (size_t i = 0; i < mFormats.size(); i++) { - ALOGV(" 0x%08x", mFormats[i]); - } - - ALOGV(" - devices: 0x%04x\n", mSupportedDevices.types()); - ALOGV(" - flags: 0x%04x\n", mFlags); -} - - -// --- DeviceDescriptor implementation - - -AudioPolicyManager::DeviceDescriptor::DeviceDescriptor(const String8& name, audio_devices_t type) : - AudioPort(name, AUDIO_PORT_TYPE_DEVICE, - audio_is_output_device(type) ? AUDIO_PORT_ROLE_SINK : - AUDIO_PORT_ROLE_SOURCE, - NULL), - mDeviceType(type), mAddress(""), mId(0) -{ -} - -bool AudioPolicyManager::DeviceDescriptor::equals(const sp<DeviceDescriptor>& other) const -{ - // Devices are considered equal if they: - // - are of the same type (a device type cannot be AUDIO_DEVICE_NONE) - // - have the same address or one device does not specify the address - // - have the same channel mask or one device does not specify the channel mask - return (mDeviceType == other->mDeviceType) && - (mAddress == "" || other->mAddress == "" || mAddress == other->mAddress) && - (mChannelMask == 0 || other->mChannelMask == 0 || - mChannelMask == other->mChannelMask); -} - -void AudioPolicyManager::DeviceDescriptor::loadGains(cnode *root) -{ - AudioPort::loadGains(root); - if (mGains.size() > 0) { - mGains[0]->getDefaultConfig(&mGain); - } -} - - -void AudioPolicyManager::DeviceVector::refreshTypes() -{ - mDeviceTypes = AUDIO_DEVICE_NONE; - for(size_t i = 0; i < size(); i++) { - mDeviceTypes |= itemAt(i)->mDeviceType; - } - ALOGV("DeviceVector::refreshTypes() mDeviceTypes %08x", mDeviceTypes); -} - -ssize_t AudioPolicyManager::DeviceVector::indexOf(const sp<DeviceDescriptor>& item) const -{ - for(size_t i = 0; i < size(); i++) { - if (item->equals(itemAt(i))) { - return i; - } - } - return -1; -} - -ssize_t AudioPolicyManager::DeviceVector::add(const sp<DeviceDescriptor>& item) -{ - ssize_t ret = indexOf(item); - - if (ret < 0) { - ret = SortedVector::add(item); - if (ret >= 0) { - refreshTypes(); - } - } else { - ALOGW("DeviceVector::add device %08x already in", item->mDeviceType); - ret = -1; - } - return ret; -} - -ssize_t AudioPolicyManager::DeviceVector::remove(const sp<DeviceDescriptor>& item) -{ - size_t i; - ssize_t ret = indexOf(item); - - if (ret < 0) { - ALOGW("DeviceVector::remove device %08x not in", item->mDeviceType); - } else { - ret = SortedVector::removeAt(ret); - if (ret >= 0) { - refreshTypes(); - } - } - return ret; -} - -void AudioPolicyManager::DeviceVector::loadDevicesFromType(audio_devices_t types) -{ - DeviceVector deviceList; - - uint32_t role_bit = AUDIO_DEVICE_BIT_IN & types; - types &= ~role_bit; - - while (types) { - uint32_t i = 31 - __builtin_clz(types); - uint32_t type = 1 << i; - types &= ~type; - add(new DeviceDescriptor(String8(""), type | role_bit)); - } -} - -void AudioPolicyManager::DeviceVector::loadDevicesFromName(char *name, - const DeviceVector& declaredDevices) -{ - char *devName = strtok(name, "|"); - while (devName != NULL) { - if (strlen(devName) != 0) { - audio_devices_t type = stringToEnum(sDeviceNameToEnumTable, - ARRAY_SIZE(sDeviceNameToEnumTable), - devName); - if (type != AUDIO_DEVICE_NONE) { - sp<DeviceDescriptor> dev = new DeviceDescriptor(String8(""), type); - if (type == AUDIO_DEVICE_IN_REMOTE_SUBMIX || - type == AUDIO_DEVICE_OUT_REMOTE_SUBMIX ) { - dev->mAddress = String8("0"); - } - add(dev); - } else { - sp<DeviceDescriptor> deviceDesc = - declaredDevices.getDeviceFromName(String8(devName)); - if (deviceDesc != 0) { - add(deviceDesc); - } - } - } - devName = strtok(NULL, "|"); - } -} - -sp<AudioPolicyManager::DeviceDescriptor> AudioPolicyManager::DeviceVector::getDevice( - audio_devices_t type, String8 address) const -{ - sp<DeviceDescriptor> device; - for (size_t i = 0; i < size(); i++) { - if (itemAt(i)->mDeviceType == type) { - if (address == "" || itemAt(i)->mAddress == address) { - device = itemAt(i); - if (itemAt(i)->mAddress == address) { - break; - } - } - } - } - ALOGV("DeviceVector::getDevice() for type %08x address %s found %p", - type, address.string(), device.get()); - return device; -} - -sp<AudioPolicyManager::DeviceDescriptor> AudioPolicyManager::DeviceVector::getDeviceFromId( - audio_port_handle_t id) const -{ - sp<DeviceDescriptor> device; - for (size_t i = 0; i < size(); i++) { - ALOGV("DeviceVector::getDeviceFromId(%d) itemAt(%zu)->mId %d", id, i, itemAt(i)->mId); - if (itemAt(i)->mId == id) { - device = itemAt(i); - break; - } - } - return device; -} - -AudioPolicyManager::DeviceVector AudioPolicyManager::DeviceVector::getDevicesFromType( - audio_devices_t type) const -{ - DeviceVector devices; - bool isOutput = audio_is_output_devices(type); - type &= ~AUDIO_DEVICE_BIT_IN; - for (size_t i = 0; (i < size()) && (type != AUDIO_DEVICE_NONE); i++) { - bool curIsOutput = audio_is_output_devices(itemAt(i)->mDeviceType); - audio_devices_t curType = itemAt(i)->mDeviceType & ~AUDIO_DEVICE_BIT_IN; - if ((isOutput == curIsOutput) && ((type & curType) != 0)) { - devices.add(itemAt(i)); - type &= ~curType; - ALOGV("DeviceVector::getDevicesFromType() for type %x found %p", - itemAt(i)->mDeviceType, itemAt(i).get()); - } - } - return devices; -} - -AudioPolicyManager::DeviceVector AudioPolicyManager::DeviceVector::getDevicesFromTypeAddr( - audio_devices_t type, String8 address) const -{ - DeviceVector devices; - for (size_t i = 0; i < size(); i++) { - if (itemAt(i)->mDeviceType == type) { - if (itemAt(i)->mAddress == address) { - devices.add(itemAt(i)); - } - } - } - return devices; -} - -sp<AudioPolicyManager::DeviceDescriptor> AudioPolicyManager::DeviceVector::getDeviceFromName( - const String8& name) const -{ - sp<DeviceDescriptor> device; - for (size_t i = 0; i < size(); i++) { - if (itemAt(i)->mName == name) { - device = itemAt(i); - break; - } - } - return device; -} - -void AudioPolicyManager::DeviceDescriptor::toAudioPortConfig( - struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig) const -{ - dstConfig->config_mask = AUDIO_PORT_CONFIG_CHANNEL_MASK|AUDIO_PORT_CONFIG_GAIN; - if (srcConfig != NULL) { - dstConfig->config_mask |= srcConfig->config_mask; - } - - AudioPortConfig::toAudioPortConfig(dstConfig, srcConfig); - - dstConfig->id = mId; - dstConfig->role = audio_is_output_device(mDeviceType) ? - AUDIO_PORT_ROLE_SINK : AUDIO_PORT_ROLE_SOURCE; - dstConfig->type = AUDIO_PORT_TYPE_DEVICE; - dstConfig->ext.device.type = mDeviceType; - dstConfig->ext.device.hw_module = mModule->mHandle; - strncpy(dstConfig->ext.device.address, mAddress.string(), AUDIO_DEVICE_MAX_ADDRESS_LEN); -} - -void AudioPolicyManager::DeviceDescriptor::toAudioPort(struct audio_port *port) const -{ - ALOGV("DeviceDescriptor::toAudioPort() handle %d type %x", mId, mDeviceType); - AudioPort::toAudioPort(port); - port->id = mId; - toAudioPortConfig(&port->active_config); - port->ext.device.type = mDeviceType; - port->ext.device.hw_module = mModule->mHandle; - strncpy(port->ext.device.address, mAddress.string(), AUDIO_DEVICE_MAX_ADDRESS_LEN); -} - -status_t AudioPolicyManager::DeviceDescriptor::dump(int fd, int spaces, int index) const -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - snprintf(buffer, SIZE, "%*sDevice %d:\n", spaces, "", index+1); - result.append(buffer); - if (mId != 0) { - snprintf(buffer, SIZE, "%*s- id: %2d\n", spaces, "", mId); - result.append(buffer); - } - snprintf(buffer, SIZE, "%*s- type: %-48s\n", spaces, "", - enumToString(sDeviceNameToEnumTable, - ARRAY_SIZE(sDeviceNameToEnumTable), - mDeviceType)); - result.append(buffer); - if (mAddress.size() != 0) { - snprintf(buffer, SIZE, "%*s- address: %-32s\n", spaces, "", mAddress.string()); - result.append(buffer); - } - write(fd, result.string(), result.size()); - AudioPort::dump(fd, spaces); - - return NO_ERROR; -} - -status_t AudioPolicyManager::AudioPatch::dump(int fd, int spaces, int index) const -{ - const size_t SIZE = 256; - char buffer[SIZE]; - String8 result; - - - snprintf(buffer, SIZE, "%*sAudio patch %d:\n", spaces, "", index+1); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- handle: %2d\n", spaces, "", mHandle); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- audio flinger handle: %2d\n", spaces, "", mAfPatchHandle); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- owner uid: %2d\n", spaces, "", mUid); - result.append(buffer); - snprintf(buffer, SIZE, "%*s- %d sources:\n", spaces, "", mPatch.num_sources); - result.append(buffer); - for (size_t i = 0; i < mPatch.num_sources; i++) { - if (mPatch.sources[i].type == AUDIO_PORT_TYPE_DEVICE) { - snprintf(buffer, SIZE, "%*s- Device ID %d %s\n", spaces + 2, "", - mPatch.sources[i].id, enumToString(sDeviceNameToEnumTable, - ARRAY_SIZE(sDeviceNameToEnumTable), - mPatch.sources[i].ext.device.type)); - } else { - snprintf(buffer, SIZE, "%*s- Mix ID %d I/O handle %d\n", spaces + 2, "", - mPatch.sources[i].id, mPatch.sources[i].ext.mix.handle); - } - result.append(buffer); - } - snprintf(buffer, SIZE, "%*s- %d sinks:\n", spaces, "", mPatch.num_sinks); - result.append(buffer); - for (size_t i = 0; i < mPatch.num_sinks; i++) { - if (mPatch.sinks[i].type == AUDIO_PORT_TYPE_DEVICE) { - snprintf(buffer, SIZE, "%*s- Device ID %d %s\n", spaces + 2, "", - mPatch.sinks[i].id, enumToString(sDeviceNameToEnumTable, - ARRAY_SIZE(sDeviceNameToEnumTable), - mPatch.sinks[i].ext.device.type)); - } else { - snprintf(buffer, SIZE, "%*s- Mix ID %d I/O handle %d\n", spaces + 2, "", - mPatch.sinks[i].id, mPatch.sinks[i].ext.mix.handle); - } - result.append(buffer); - } - - write(fd, result.string(), result.size()); - return NO_ERROR; -} - -// --- audio_policy.conf file parsing - -uint32_t AudioPolicyManager::parseOutputFlagNames(char *name) -{ - uint32_t flag = 0; - - // it is OK to cast name to non const here as we are not going to use it after - // strtok() modifies it - char *flagName = strtok(name, "|"); - while (flagName != NULL) { - if (strlen(flagName) != 0) { - flag |= stringToEnum(sOutputFlagNameToEnumTable, - ARRAY_SIZE(sOutputFlagNameToEnumTable), - flagName); - } - flagName = strtok(NULL, "|"); - } - //force direct flag if offload flag is set: offloading implies a direct output stream - // and all common behaviors are driven by checking only the direct flag - // this should normally be set appropriately in the policy configuration file - if ((flag & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) { - flag |= AUDIO_OUTPUT_FLAG_DIRECT; - } - - return flag; -} - -uint32_t AudioPolicyManager::parseInputFlagNames(char *name) -{ - uint32_t flag = 0; - - // it is OK to cast name to non const here as we are not going to use it after - // strtok() modifies it - char *flagName = strtok(name, "|"); - while (flagName != NULL) { - if (strlen(flagName) != 0) { - flag |= stringToEnum(sInputFlagNameToEnumTable, - ARRAY_SIZE(sInputFlagNameToEnumTable), - flagName); - } - flagName = strtok(NULL, "|"); - } - return flag; -} - -audio_devices_t AudioPolicyManager::parseDeviceNames(char *name) -{ - uint32_t device = 0; - - char *devName = strtok(name, "|"); - while (devName != NULL) { - if (strlen(devName) != 0) { - device |= stringToEnum(sDeviceNameToEnumTable, - ARRAY_SIZE(sDeviceNameToEnumTable), - devName); - } - devName = strtok(NULL, "|"); - } - return device; -} - -void AudioPolicyManager::loadHwModule(cnode *root) -{ - status_t status = NAME_NOT_FOUND; - cnode *node; - sp<HwModule> module = new HwModule(root->name); - - node = config_find(root, DEVICES_TAG); - if (node != NULL) { - node = node->first_child; - while (node) { - ALOGV("loadHwModule() loading device %s", node->name); - status_t tmpStatus = module->loadDevice(node); - if (status == NAME_NOT_FOUND || status == NO_ERROR) { - status = tmpStatus; - } - node = node->next; - } - } - node = config_find(root, OUTPUTS_TAG); - if (node != NULL) { - node = node->first_child; - while (node) { - ALOGV("loadHwModule() loading output %s", node->name); - status_t tmpStatus = module->loadOutput(node); - if (status == NAME_NOT_FOUND || status == NO_ERROR) { - status = tmpStatus; - } - node = node->next; - } - } - node = config_find(root, INPUTS_TAG); - if (node != NULL) { - node = node->first_child; - while (node) { - ALOGV("loadHwModule() loading input %s", node->name); - status_t tmpStatus = module->loadInput(node); - if (status == NAME_NOT_FOUND || status == NO_ERROR) { - status = tmpStatus; - } - node = node->next; - } - } - loadGlobalConfig(root, module); - - if (status == NO_ERROR) { - mHwModules.add(module); - } -} - -void AudioPolicyManager::loadHwModules(cnode *root) -{ - cnode *node = config_find(root, AUDIO_HW_MODULE_TAG); - if (node == NULL) { - return; - } - - node = node->first_child; - while (node) { - ALOGV("loadHwModules() loading module %s", node->name); - loadHwModule(node); - node = node->next; - } -} - -void AudioPolicyManager::loadGlobalConfig(cnode *root, const sp<HwModule>& module) -{ - cnode *node = config_find(root, GLOBAL_CONFIG_TAG); - - if (node == NULL) { - return; - } - DeviceVector declaredDevices; - if (module != NULL) { - declaredDevices = module->mDeclaredDevices; - } - - node = node->first_child; - while (node) { - if (strcmp(ATTACHED_OUTPUT_DEVICES_TAG, node->name) == 0) { - mAvailableOutputDevices.loadDevicesFromName((char *)node->value, - declaredDevices); - ALOGV("loadGlobalConfig() Attached Output Devices %08x", - mAvailableOutputDevices.types()); - } else if (strcmp(DEFAULT_OUTPUT_DEVICE_TAG, node->name) == 0) { - audio_devices_t device = (audio_devices_t)stringToEnum(sDeviceNameToEnumTable, - ARRAY_SIZE(sDeviceNameToEnumTable), - (char *)node->value); - if (device != AUDIO_DEVICE_NONE) { - mDefaultOutputDevice = new DeviceDescriptor(String8(""), device); - } else { - ALOGW("loadGlobalConfig() default device not specified"); - } - ALOGV("loadGlobalConfig() mDefaultOutputDevice %08x", mDefaultOutputDevice->mDeviceType); - } else if (strcmp(ATTACHED_INPUT_DEVICES_TAG, node->name) == 0) { - mAvailableInputDevices.loadDevicesFromName((char *)node->value, - declaredDevices); - ALOGV("loadGlobalConfig() Available InputDevices %08x", mAvailableInputDevices.types()); - } else if (strcmp(SPEAKER_DRC_ENABLED_TAG, node->name) == 0) { - mSpeakerDrcEnabled = stringToBool((char *)node->value); - ALOGV("loadGlobalConfig() mSpeakerDrcEnabled = %d", mSpeakerDrcEnabled); - } else if (strcmp(AUDIO_HAL_VERSION_TAG, node->name) == 0) { - uint32_t major, minor; - sscanf((char *)node->value, "%u.%u", &major, &minor); - module->mHalVersion = HARDWARE_DEVICE_API_VERSION(major, minor); - ALOGV("loadGlobalConfig() mHalVersion = %04x major %u minor %u", - module->mHalVersion, major, minor); - } - node = node->next; - } -} - -status_t AudioPolicyManager::loadAudioPolicyConfig(const char *path) -{ - cnode *root; - char *data; - - data = (char *)load_file(path, NULL); - if (data == NULL) { - return -ENODEV; - } - root = config_node("", ""); - config_load(root, data); - - loadHwModules(root); - // legacy audio_policy.conf files have one global_configuration section - loadGlobalConfig(root, getModuleFromName(AUDIO_HARDWARE_MODULE_ID_PRIMARY)); - config_free(root); - free(root); - free(data); - - ALOGI("loadAudioPolicyConfig() loaded %s\n", path); - - return NO_ERROR; -} - void AudioPolicyManager::defaultAudioPolicyConfig(void) { sp<HwModule> module; sp<IOProfile> profile; - sp<DeviceDescriptor> defaultInputDevice = new DeviceDescriptor(String8(""), - AUDIO_DEVICE_IN_BUILTIN_MIC); + sp<DeviceDescriptor> defaultInputDevice = + new DeviceDescriptor(String8("builtin-mic"), AUDIO_DEVICE_IN_BUILTIN_MIC); mAvailableOutputDevices.add(mDefaultOutputDevice); mAvailableInputDevices.add(defaultInputDevice); @@ -8064,7 +4491,8 @@ audio_stream_type_t AudioPolicyManager::streamTypefromAttributesInt(const audio_ } } -bool AudioPolicyManager::isValidAttributes(const audio_attributes_t *paa) { +bool AudioPolicyManager::isValidAttributes(const audio_attributes_t *paa) +{ // has flags that map to a strategy? if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_SCO | AUDIO_FLAG_BEACON)) != 0) { return true; @@ -8095,4 +4523,39 @@ bool AudioPolicyManager::isValidAttributes(const audio_attributes_t *paa) { return true; } +bool AudioPolicyManager::isStrategyActive(const sp<AudioOutputDescriptor> outputDesc, + routing_strategy strategy, uint32_t inPastMs, + nsecs_t sysTime) const +{ + if ((sysTime == 0) && (inPastMs != 0)) { + sysTime = systemTime(); + } + for (int i = 0; i < (int)AUDIO_STREAM_CNT; i++) { + if (i == AUDIO_STREAM_PATCH) { + continue; + } + if (((getStrategy((audio_stream_type_t)i) == strategy) || + (NUM_STRATEGIES == strategy)) && + outputDesc->isStreamActive((audio_stream_type_t)i, inPastMs, sysTime)) { + return true; + } + } + return false; +} + +audio_policy_forced_cfg_t AudioPolicyManager::getForceUse(audio_policy_force_use_t usage) +{ + return mEngine->getForceUse(usage); +} + +bool AudioPolicyManager::isInCall() +{ + return isStateInCall(mEngine->getPhoneState()); +} + +bool AudioPolicyManager::isStateInCall(int state) +{ + return is_state_in_call(state); +} + }; // namespace android diff --git a/services/audiopolicy/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h index cbdafa6..02b678a 100644 --- a/services/audiopolicy/AudioPolicyManager.h +++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h @@ -14,6 +14,7 @@ * limitations under the License. */ +#pragma once #include <stdint.h> #include <sys/types.h> @@ -26,6 +27,21 @@ #include <media/AudioPolicy.h> #include "AudioPolicyInterface.h" +#include <AudioPolicyManagerInterface.h> +#include <AudioPolicyManagerObserver.h> +#include <AudioGain.h> +#include <AudioPort.h> +#include <AudioPatch.h> +#include <ConfigParsingUtils.h> +#include <DeviceDescriptor.h> +#include <IOProfile.h> +#include <HwModule.h> +#include <AudioInputDescriptor.h> +#include <AudioOutputDescriptor.h> +#include <AudioPolicyMix.h> +#include <EffectDescriptor.h> +#include <SoundTriggerSession.h> +#include <StreamDescriptor.h> namespace android { @@ -38,9 +54,7 @@ namespace android { // Time in milliseconds during which we consider that music is still active after a music // track was stopped - see computeVolume() #define SONIFICATION_HEADSET_MUSIC_DELAY 5000 -// Time in milliseconds after media stopped playing during which we consider that the -// sonification should be as unobtrusive as during the time media was playing. -#define SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY 5000 + // Time in milliseconds during witch some streams are muted while the audio path // is switched #define MUTE_TIME_MS 2000 @@ -53,14 +67,12 @@ namespace android { // Can be overridden by the audio.offload.min.duration.secs property #define OFFLOAD_DEFAULT_MIN_DURATION_SECS 60 -#define MAX_MIXER_SAMPLING_RATE 48000 -#define MAX_MIXER_CHANNEL_COUNT 8 - // ---------------------------------------------------------------------------- // AudioPolicyManager implements audio policy manager behavior common to all platforms. // ---------------------------------------------------------------------------- -class AudioPolicyManager: public AudioPolicyInterface +class AudioPolicyManager : public AudioPolicyInterface, public AudioPolicyManagerObserver + #ifdef AUDIO_POLICY_TEST , public Thread #endif //AUDIO_POLICY_TEST @@ -73,13 +85,15 @@ public: // AudioPolicyInterface virtual status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address); + const char *device_address, + const char *device_name); virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device, const char *device_address); virtual void setPhoneState(audio_mode_t state); virtual void setForceUse(audio_policy_force_use_t usage, audio_policy_forced_cfg_t config); virtual audio_policy_forced_cfg_t getForceUse(audio_policy_force_use_t usage); + virtual void setSystemProperty(const char* property, const char* value); virtual status_t initCheck(); virtual audio_io_handle_t getOutput(audio_stream_type_t stream, @@ -149,16 +163,28 @@ public: uint32_t strategy, int session, int id); - virtual status_t unregisterEffect(int id); - virtual status_t setEffectEnabled(int id, bool enabled); + virtual status_t unregisterEffect(int id) + { + return mEffects.unregisterEffect(id); + } + virtual status_t setEffectEnabled(int id, bool enabled) + { + return mEffects.setEffectEnabled(id, enabled); + } - virtual bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const; + virtual bool isStreamActive(audio_stream_type_t stream, uint32_t inPastMs = 0) const + { + return mOutputs.isStreamActive(stream, inPastMs); + } // return whether a stream is playing remotely, override to change the definition of // local/remote playback, used for instance by notification manager to not make // media players lose audio focus when not playing locally // For the base implementation, "remotely" means playing during screen mirroring which // uses an output for playback with a non-empty, non "0" address. - virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) const; + virtual bool isStreamActiveRemotely(audio_stream_type_t stream, uint32_t inPastMs = 0) const + { + return mOutputs.isStreamActiveRemotely(stream, inPastMs); + } virtual bool isSourceActive(audio_source_t source) const; virtual status_t dump(int fd); @@ -186,402 +212,63 @@ public: audio_io_handle_t *ioHandle, audio_devices_t *device); - virtual status_t releaseSoundTriggerSession(audio_session_t session); + virtual status_t releaseSoundTriggerSession(audio_session_t session) + { + return mSoundTriggerSessions.releaseSession(session); + } virtual status_t registerPolicyMixes(Vector<AudioMix> mixes); virtual status_t unregisterPolicyMixes(Vector<AudioMix> mixes); -protected: - - enum routing_strategy { - STRATEGY_MEDIA, - STRATEGY_PHONE, - STRATEGY_SONIFICATION, - STRATEGY_SONIFICATION_RESPECTFUL, - STRATEGY_DTMF, - STRATEGY_ENFORCED_AUDIBLE, - STRATEGY_TRANSMITTED_THROUGH_SPEAKER, - STRATEGY_ACCESSIBILITY, - STRATEGY_REROUTING, - NUM_STRATEGIES - }; - - // 4 points to define the volume attenuation curve, each characterized by the volume - // index (from 0 to 100) at which they apply, and the attenuation in dB at that index. - // we use 100 steps to avoid rounding errors when computing the volume in volIndexToAmpl() - - enum { VOLMIN = 0, VOLKNEE1 = 1, VOLKNEE2 = 2, VOLMAX = 3, VOLCNT = 4}; - - class VolumeCurvePoint - { - public: - int mIndex; - float mDBAttenuation; - }; - - // device categories used for volume curve management. - enum device_category { - DEVICE_CATEGORY_HEADSET, - DEVICE_CATEGORY_SPEAKER, - DEVICE_CATEGORY_EARPIECE, - DEVICE_CATEGORY_EXT_MEDIA, - DEVICE_CATEGORY_CNT - }; - - class HwModule; - - class AudioGain: public RefBase - { - public: - AudioGain(int index, bool useInChannelMask); - virtual ~AudioGain() {} - - void dump(int fd, int spaces, int index) const; - - void getDefaultConfig(struct audio_gain_config *config); - status_t checkConfig(const struct audio_gain_config *config); - int mIndex; - struct audio_gain mGain; - bool mUseInChannelMask; - }; - - class AudioPort: public virtual RefBase - { - public: - AudioPort(const String8& name, audio_port_type_t type, - audio_port_role_t role, const sp<HwModule>& module); - virtual ~AudioPort() {} - - virtual void toAudioPort(struct audio_port *port) const; - - void importAudioPort(const sp<AudioPort> port); - void clearCapabilities(); - - void loadSamplingRates(char *name); - void loadFormats(char *name); - void loadOutChannels(char *name); - void loadInChannels(char *name); - - audio_gain_mode_t loadGainMode(char *name); - void loadGain(cnode *root, int index); - virtual void loadGains(cnode *root); - - // searches for an exact match - status_t checkExactSamplingRate(uint32_t samplingRate) const; - // searches for a compatible match, and returns the best match via updatedSamplingRate - status_t checkCompatibleSamplingRate(uint32_t samplingRate, - uint32_t *updatedSamplingRate) const; - // searches for an exact match - status_t checkExactChannelMask(audio_channel_mask_t channelMask) const; - // searches for a compatible match, currently implemented for input channel masks only - status_t checkCompatibleChannelMask(audio_channel_mask_t channelMask) const; - status_t checkFormat(audio_format_t format) const; - status_t checkGain(const struct audio_gain_config *gainConfig, int index) const; - - uint32_t pickSamplingRate() const; - audio_channel_mask_t pickChannelMask() const; - audio_format_t pickFormat() const; - - static const audio_format_t sPcmFormatCompareTable[]; - static int compareFormats(audio_format_t format1, audio_format_t format2); - - void dump(int fd, int spaces) const; - - String8 mName; - audio_port_type_t mType; - audio_port_role_t mRole; - bool mUseInChannelMask; - // by convention, "0' in the first entry in mSamplingRates, mChannelMasks or mFormats - // indicates the supported parameters should be read from the output stream - // after it is opened for the first time - Vector <uint32_t> mSamplingRates; // supported sampling rates - Vector <audio_channel_mask_t> mChannelMasks; // supported channel masks - Vector <audio_format_t> mFormats; // supported audio formats - Vector < sp<AudioGain> > mGains; // gain controllers - sp<HwModule> mModule; // audio HW module exposing this I/O stream - uint32_t mFlags; // attribute flags (e.g primary output, - // direct output...). - }; - - class AudioPortConfig: public virtual RefBase - { - public: - AudioPortConfig(); - virtual ~AudioPortConfig() {} - - status_t applyAudioPortConfig(const struct audio_port_config *config, - struct audio_port_config *backupConfig = NULL); - virtual void toAudioPortConfig(struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig = NULL) const = 0; - virtual sp<AudioPort> getAudioPort() const = 0; - uint32_t mSamplingRate; - audio_format_t mFormat; - audio_channel_mask_t mChannelMask; - struct audio_gain_config mGain; - }; + // Audio policy configuration file parsing (audio_policy.conf) + // TODO candidates to be moved to ConfigParsingUtils + void defaultAudioPolicyConfig(void); + // return the strategy corresponding to a given stream type + routing_strategy getStrategy(audio_stream_type_t stream) const; - class AudioPatch: public RefBase + // From AudioPolicyManagerObserver + virtual const AudioPatchCollection &getAudioPatches() const { - public: - AudioPatch(audio_patch_handle_t handle, - const struct audio_patch *patch, uid_t uid) : - mHandle(handle), mPatch(*patch), mUid(uid), mAfPatchHandle(0) {} - - status_t dump(int fd, int spaces, int index) const; - - audio_patch_handle_t mHandle; - struct audio_patch mPatch; - uid_t mUid; - audio_patch_handle_t mAfPatchHandle; - }; - - class DeviceDescriptor: public AudioPort, public AudioPortConfig + return mAudioPatches; + } + virtual const SoundTriggerSessionCollection &getSoundTriggerSessionCollection() const { - public: - DeviceDescriptor(const String8& name, audio_devices_t type); - - virtual ~DeviceDescriptor() {} - - bool equals(const sp<DeviceDescriptor>& other) const; - - // AudioPortConfig - virtual sp<AudioPort> getAudioPort() const { return (AudioPort*) this; } - virtual void toAudioPortConfig(struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig = NULL) const; - - // AudioPort - virtual void loadGains(cnode *root); - virtual void toAudioPort(struct audio_port *port) const; - - status_t dump(int fd, int spaces, int index) const; - - audio_devices_t mDeviceType; - String8 mAddress; - audio_port_handle_t mId; - }; - - class DeviceVector : public SortedVector< sp<DeviceDescriptor> > + return mSoundTriggerSessions; + } + virtual const AudioPolicyMixCollection &getAudioPolicyMixCollection() const { - public: - DeviceVector() : SortedVector(), mDeviceTypes(AUDIO_DEVICE_NONE) {} - - ssize_t add(const sp<DeviceDescriptor>& item); - ssize_t remove(const sp<DeviceDescriptor>& item); - ssize_t indexOf(const sp<DeviceDescriptor>& item) const; - - audio_devices_t types() const { return mDeviceTypes; } - - void loadDevicesFromType(audio_devices_t types); - void loadDevicesFromName(char *name, const DeviceVector& declaredDevices); - - sp<DeviceDescriptor> getDevice(audio_devices_t type, String8 address) const; - DeviceVector getDevicesFromType(audio_devices_t types) const; - sp<DeviceDescriptor> getDeviceFromId(audio_port_handle_t id) const; - sp<DeviceDescriptor> getDeviceFromName(const String8& name) const; - DeviceVector getDevicesFromTypeAddr(audio_devices_t type, String8 address) - const; - - private: - void refreshTypes(); - audio_devices_t mDeviceTypes; - }; - - // the IOProfile class describes the capabilities of an output or input stream. - // It is currently assumed that all combination of listed parameters are supported. - // It is used by the policy manager to determine if an output or input is suitable for - // a given use case, open/close it accordingly and connect/disconnect audio tracks - // to/from it. - class IOProfile : public AudioPort + return mPolicyMixes; + } + virtual const AudioOutputCollection &getOutputs() const { - public: - IOProfile(const String8& name, audio_port_role_t role, const sp<HwModule>& module); - virtual ~IOProfile(); - - // This method is used for both output and input. - // If parameter updatedSamplingRate is non-NULL, it is assigned the actual sample rate. - // For input, flags is interpreted as audio_input_flags_t. - // TODO: merge audio_output_flags_t and audio_input_flags_t. - bool isCompatibleProfile(audio_devices_t device, - String8 address, - uint32_t samplingRate, - uint32_t *updatedSamplingRate, - audio_format_t format, - audio_channel_mask_t channelMask, - uint32_t flags) const; - - void dump(int fd); - void log(); - - DeviceVector mSupportedDevices; // supported devices - // (devices this output can be routed to) - }; - - class HwModule : public RefBase + return mOutputs; + } + virtual const AudioInputCollection &getInputs() const { - public: - HwModule(const char *name); - ~HwModule(); - - status_t loadOutput(cnode *root); - status_t loadInput(cnode *root); - status_t loadDevice(cnode *root); - - status_t addOutputProfile(String8 name, const audio_config_t *config, - audio_devices_t device, String8 address); - status_t removeOutputProfile(String8 name); - status_t addInputProfile(String8 name, const audio_config_t *config, - audio_devices_t device, String8 address); - status_t removeInputProfile(String8 name); - - void dump(int fd); - - const char *const mName; // base name of the audio HW module (primary, a2dp ...) - uint32_t mHalVersion; // audio HAL API version - audio_module_handle_t mHandle; - Vector < sp<IOProfile> > mOutputProfiles; // output profiles exposed by this module - Vector < sp<IOProfile> > mInputProfiles; // input profiles exposed by this module - DeviceVector mDeclaredDevices; // devices declared in audio_policy.conf - - }; - - // default volume curve - static const VolumeCurvePoint sDefaultVolumeCurve[AudioPolicyManager::VOLCNT]; - // default volume curve for media strategy - static const VolumeCurvePoint sDefaultMediaVolumeCurve[AudioPolicyManager::VOLCNT]; - // volume curve for non-media audio on ext media outputs (HDMI, Line, etc) - static const VolumeCurvePoint sExtMediaSystemVolumeCurve[AudioPolicyManager::VOLCNT]; - // volume curve for media strategy on speakers - static const VolumeCurvePoint sSpeakerMediaVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sSpeakerMediaVolumeCurveDrc[AudioPolicyManager::VOLCNT]; - // volume curve for sonification strategy on speakers - static const VolumeCurvePoint sSpeakerSonificationVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sSpeakerSonificationVolumeCurveDrc[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sDefaultSystemVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sDefaultSystemVolumeCurveDrc[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sHeadsetSystemVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sDefaultVoiceVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sSpeakerVoiceVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sLinearVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sSilentVolumeCurve[AudioPolicyManager::VOLCNT]; - static const VolumeCurvePoint sFullScaleVolumeCurve[AudioPolicyManager::VOLCNT]; - // default volume curves per stream and device category. See initializeVolumeCurves() - static const VolumeCurvePoint *sVolumeProfiles[AUDIO_STREAM_CNT][DEVICE_CATEGORY_CNT]; - - // descriptor for audio outputs. Used to maintain current configuration of each opened audio output - // and keep track of the usage of this output by each audio stream type. - class AudioOutputDescriptor: public AudioPortConfig + return mInputs; + } + virtual const DeviceVector &getAvailableOutputDevices() const { - public: - AudioOutputDescriptor(const sp<IOProfile>& profile); - - status_t dump(int fd); - - audio_devices_t device() const; - void changeRefCount(audio_stream_type_t stream, int delta); - - bool isDuplicated() const { return (mOutput1 != NULL && mOutput2 != NULL); } - audio_devices_t supportedDevices(); - uint32_t latency(); - bool sharesHwModuleWith(const sp<AudioOutputDescriptor> outputDesc); - bool isActive(uint32_t inPastMs = 0) const; - bool isStreamActive(audio_stream_type_t stream, - uint32_t inPastMs = 0, - nsecs_t sysTime = 0) const; - bool isStrategyActive(routing_strategy strategy, - uint32_t inPastMs = 0, - nsecs_t sysTime = 0) const; - - virtual void toAudioPortConfig(struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig = NULL) const; - virtual sp<AudioPort> getAudioPort() const { return mProfile; } - void toAudioPort(struct audio_port *port) const; - - audio_port_handle_t mId; - audio_io_handle_t mIoHandle; // output handle - uint32_t mLatency; // - audio_output_flags_t mFlags; // - audio_devices_t mDevice; // current device this output is routed to - AudioMix *mPolicyMix; // non NULL when used by a dynamic policy - audio_patch_handle_t mPatchHandle; - uint32_t mRefCount[AUDIO_STREAM_CNT]; // number of streams of each type using this output - nsecs_t mStopTime[AUDIO_STREAM_CNT]; - sp<AudioOutputDescriptor> mOutput1; // used by duplicated outputs: first output - sp<AudioOutputDescriptor> mOutput2; // used by duplicated outputs: second output - float mCurVolume[AUDIO_STREAM_CNT]; // current stream volume - int mMuteCount[AUDIO_STREAM_CNT]; // mute request counter - const sp<IOProfile> mProfile; // I/O profile this output derives from - bool mStrategyMutedByDevice[NUM_STRATEGIES]; // strategies muted because of incompatible - // device selection. See checkDeviceMuteStrategies() - uint32_t mDirectOpenCount; // number of clients using this output (direct outputs only) - }; - - // descriptor for audio inputs. Used to maintain current configuration of each opened audio input - // and keep track of the usage of this input. - class AudioInputDescriptor: public AudioPortConfig + return mAvailableOutputDevices; + } + virtual const DeviceVector &getAvailableInputDevices() const { - public: - AudioInputDescriptor(const sp<IOProfile>& profile); - - status_t dump(int fd); - - audio_port_handle_t mId; - audio_io_handle_t mIoHandle; // input handle - audio_devices_t mDevice; // current device this input is routed to - AudioMix *mPolicyMix; // non NULL when used by a dynamic policy - audio_patch_handle_t mPatchHandle; - uint32_t mRefCount; // number of AudioRecord clients using - // this input - uint32_t mOpenRefCount; - audio_source_t mInputSource; // input source selected by application - //(mediarecorder.h) - const sp<IOProfile> mProfile; // I/O profile this output derives from - SortedVector<audio_session_t> mSessions; // audio sessions attached to this input - bool mIsSoundTrigger; // used by a soundtrigger capture - - virtual void toAudioPortConfig(struct audio_port_config *dstConfig, - const struct audio_port_config *srcConfig = NULL) const; - virtual sp<AudioPort> getAudioPort() const { return mProfile; } - void toAudioPort(struct audio_port *port) const; - }; - - // stream descriptor used for volume control - class StreamDescriptor + return mAvailableInputDevices; + } + virtual StreamDescriptorCollection &getStreamDescriptors() { - public: - StreamDescriptor(); - - int getVolumeIndex(audio_devices_t device); - void dump(int fd); - - int mIndexMin; // min volume index - int mIndexMax; // max volume index - KeyedVector<audio_devices_t, int> mIndexCur; // current volume index per device - bool mCanBeMuted; // true is the stream can be muted - - const VolumeCurvePoint *mVolumeCurve[DEVICE_CATEGORY_CNT]; - }; - - // stream descriptor used for volume control - class EffectDescriptor : public RefBase + return mStreams; + } + virtual const sp<DeviceDescriptor> &getDefaultOutputDevice() const { - public: - - status_t dump(int fd); - - int mIo; // io the effect is attached to - routing_strategy mStrategy; // routing strategy the effect is associated to - int mSession; // audio session the effect is on - effect_descriptor_t mDesc; // effect descriptor - bool mEnabled; // enabled state: CPU load being used or not - }; - + return mDefaultOutputDevice; + } +protected: void addOutput(audio_io_handle_t output, sp<AudioOutputDescriptor> outputDesc); + void removeOutput(audio_io_handle_t output); void addInput(audio_io_handle_t input, sp<AudioInputDescriptor> inputDesc); - // return the strategy corresponding to a given stream type - static routing_strategy getStrategy(audio_stream_type_t stream); - // return appropriate device for streams handled by the specified strategy according to current // phone state, connected devices... // if fromCache is true, the device is returned from mDeviceForStrategy[], @@ -596,6 +283,9 @@ protected: virtual audio_devices_t getDeviceForStrategy(routing_strategy strategy, bool fromCache); + bool isStrategyActive(const sp<AudioOutputDescriptor> outputDesc, routing_strategy strategy, + uint32_t inPastMs = 0, nsecs_t sysTime = 0) const; + // change the route of the specified output. Returns the number of ms we have slept to // allow new routing to take effect in certain cases. virtual uint32_t setOutputDevice(audio_io_handle_t output, @@ -617,16 +307,6 @@ protected: // select input device corresponding to requested audio source virtual audio_devices_t getDeviceForInputSource(audio_source_t inputSource); - // return io handle of active input or 0 if no input is active - // Only considers inputs from physical devices (e.g. main mic, headset mic) when - // ignoreVirtualInputs is true. - audio_io_handle_t getActiveInput(bool ignoreVirtualInputs = true); - - uint32_t activeInputsCount() const; - - // initialize volume curves for each strategy and device category - void initializeVolumeCurves(); - // compute the actual volume for a given stream according to the requested index and a particular // device virtual float computeVolume(audio_stream_type_t stream, int index, @@ -659,9 +339,10 @@ protected: // a special tone in the device used for communication void handleIncallSonification(audio_stream_type_t stream, bool starting, bool stateChange); + audio_mode_t getPhoneState(); + // true if device is in a telephony or VoIP call virtual bool isInCall(); - // true if given state represents a device in a telephony or VoIP call virtual bool isStateInCall(int state); @@ -699,9 +380,6 @@ protected: // manages A2DP output suspend/restore according to phone state and BT SCO usage void checkA2dpSuspend(); - // returns the A2DP output handle if it is open or 0 otherwise - audio_io_handle_t getA2dpOutput(); - // selects the most appropriate device on output for current state // must be called every time a condition that affects the device choice for a given output is // changed: connected device, phone state, force use, output start, output stop.. @@ -718,24 +396,23 @@ protected: // selects the most appropriate device on input for current state audio_devices_t getNewInputDevice(audio_io_handle_t input); - virtual uint32_t getMaxEffectsCpuLoad(); - virtual uint32_t getMaxEffectsMemory(); + virtual uint32_t getMaxEffectsCpuLoad() + { + return mEffects.getMaxEffectsCpuLoad(); + } + + virtual uint32_t getMaxEffectsMemory() + { + return mEffects.getMaxEffectsMemory(); + } #ifdef AUDIO_POLICY_TEST virtual bool threadLoop(); void exit(); int testOutputIndex(audio_io_handle_t output); #endif //AUDIO_POLICY_TEST - status_t setEffectEnabled(const sp<EffectDescriptor>& effectDesc, bool enabled); - - // returns the category the device belongs to with regard to volume curve management - static device_category getDeviceCategory(audio_devices_t device); - - // extract one device relevant for volume control from multiple device selection - static audio_devices_t getDeviceForVolume(audio_devices_t device); - SortedVector<audio_io_handle_t> getOutputsForDevice(audio_devices_t device, - DefaultKeyedVector<audio_io_handle_t, sp<AudioOutputDescriptor> > openOutputs); + AudioOutputCollection openOutputs); bool vectorsEqual(SortedVector<audio_io_handle_t>& outputs1, SortedVector<audio_io_handle_t>& outputs2); @@ -765,79 +442,57 @@ protected: audio_io_handle_t selectOutputForEffects(const SortedVector<audio_io_handle_t>& outputs); - bool isNonOffloadableEffectEnabled(); - - virtual status_t addAudioPatch(audio_patch_handle_t handle, - const sp<AudioPatch>& patch); - virtual status_t removeAudioPatch(audio_patch_handle_t handle); + virtual status_t addAudioPatch(audio_patch_handle_t handle, const sp<AudioPatch>& patch) + { + return mAudioPatches.addAudioPatch(handle, patch); + } + virtual status_t removeAudioPatch(audio_patch_handle_t handle) + { + return mAudioPatches.removeAudioPatch(handle); + } - sp<AudioOutputDescriptor> getOutputFromId(audio_port_handle_t id) const; - sp<AudioInputDescriptor> getInputFromId(audio_port_handle_t id) const; - sp<HwModule> getModuleForDevice(audio_devices_t device) const; - sp<HwModule> getModuleFromName(const char *name) const; - audio_devices_t availablePrimaryOutputDevices(); - audio_devices_t availablePrimaryInputDevices(); + audio_devices_t availablePrimaryOutputDevices() const + { + return mOutputs.getSupportedDevices(mPrimaryOutput) & mAvailableOutputDevices.types(); + } + audio_devices_t availablePrimaryInputDevices() const + { + return mAvailableInputDevices.getDevicesFromHwModule( + mOutputs.valueFor(mPrimaryOutput)->getModuleHandle()); + } void updateCallRouting(audio_devices_t rxDevice, int delayMs = 0); - // - // Audio policy configuration file parsing (audio_policy.conf) - // - static uint32_t stringToEnum(const struct StringToEnum *table, - size_t size, - const char *name); - static const char *enumToString(const struct StringToEnum *table, - size_t size, - uint32_t value); - static bool stringToBool(const char *value); - static uint32_t parseOutputFlagNames(char *name); - static uint32_t parseInputFlagNames(char *name); - static audio_devices_t parseDeviceNames(char *name); - void loadHwModule(cnode *root); - void loadHwModules(cnode *root); - void loadGlobalConfig(cnode *root, const sp<HwModule>& module); - status_t loadAudioPolicyConfig(const char *path); - void defaultAudioPolicyConfig(void); - - uid_t mUidCached; AudioPolicyClientInterface *mpClientInterface; // audio policy client interface audio_io_handle_t mPrimaryOutput; // primary output handle // list of descriptors for outputs currently opened - DefaultKeyedVector<audio_io_handle_t, sp<AudioOutputDescriptor> > mOutputs; + AudioOutputCollection mOutputs; // copy of mOutputs before setDeviceConnectionState() opens new outputs // reset to mOutputs when updateDevicesAndOutputs() is called. - DefaultKeyedVector<audio_io_handle_t, sp<AudioOutputDescriptor> > mPreviousOutputs; - DefaultKeyedVector<audio_io_handle_t, sp<AudioInputDescriptor> > mInputs; // list of input descriptors + AudioOutputCollection mPreviousOutputs; + AudioInputCollection mInputs; // list of input descriptors DeviceVector mAvailableOutputDevices; // all available output devices DeviceVector mAvailableInputDevices; // all available input devices - int mPhoneState; // current phone state - audio_policy_forced_cfg_t mForceUse[AUDIO_POLICY_FORCE_USE_CNT]; // current forced use configuration - StreamDescriptor mStreams[AUDIO_STREAM_CNT]; // stream descriptors for volume control - bool mLimitRingtoneVolume; // limit ringtone volume to music volume if headset connected + StreamDescriptorCollection mStreams; // stream descriptors for volume control + bool mLimitRingtoneVolume; // limit ringtone volume to music volume if headset connected audio_devices_t mDeviceForStrategy[NUM_STRATEGIES]; - float mLastVoiceVolume; // last voice volume value sent to audio HAL - - // Maximum CPU load allocated to audio effects in 0.1 MIPS (ARMv5TE, 0 WS memory) units - static const uint32_t MAX_EFFECTS_CPU_LOAD = 1000; - // Maximum memory allocated to audio effects in KB - static const uint32_t MAX_EFFECTS_MEMORY = 512; - uint32_t mTotalEffectsCpuLoad; // current CPU load used by effects - uint32_t mTotalEffectsMemory; // current memory used by effects - KeyedVector<int, sp<EffectDescriptor> > mEffects; // list of registered audio effects + float mLastVoiceVolume; // last voice volume value sent to audio HAL + + EffectDescriptorCollection mEffects; // list of registered audio effects bool mA2dpSuspended; // true if A2DP output is suspended sp<DeviceDescriptor> mDefaultOutputDevice; // output device selected by default at boot time bool mSpeakerDrcEnabled;// true on devices that use DRC on the DEVICE_CATEGORY_SPEAKER path // to boost soft sounds, used to adjust volume curves accordingly - Vector < sp<HwModule> > mHwModules; - volatile int32_t mNextUniqueId; + HwModuleCollection mHwModules; + volatile int32_t mAudioPortGeneration; - DefaultKeyedVector<audio_patch_handle_t, sp<AudioPatch> > mAudioPatches; + AudioPatchCollection mAudioPatches; - DefaultKeyedVector<audio_session_t, audio_io_handle_t> mSoundTriggerSessions; + SoundTriggerSessionCollection mSoundTriggerSessions; sp<AudioPatch> mCallTxPatch; sp<AudioPatch> mCallRxPatch; @@ -854,16 +509,7 @@ protected: uint32_t mBeaconPlayingRefCount;// ref count for the playing beacon streams bool mBeaconMuted; // has STREAM_TTS been muted - // custom mix entry in mPolicyMixes - class AudioPolicyMix : public RefBase { - public: - AudioPolicyMix() {} - - AudioMix mMix; // Audio policy mix descriptor - sp<AudioOutputDescriptor> mOutput; // Corresponding output stream - }; - DefaultKeyedVector<String8, sp<AudioPolicyMix> > mPolicyMixes; // list of registered mixes - + AudioPolicyMixCollection mPolicyMixes; // list of registered mixes #ifdef AUDIO_POLICY_TEST Mutex mLock; @@ -879,16 +525,15 @@ protected: uint32_t mTestChannels; uint32_t mTestLatencyMs; #endif //AUDIO_POLICY_TEST - static float volIndexToAmpl(audio_devices_t device, const StreamDescriptor& streamDesc, - int indexInUi); - static bool isVirtualInputDevice(audio_devices_t device); - uint32_t nextUniqueId(); + uint32_t nextAudioPortGeneration(); + + // Audio Policy Engine Interface. + AudioPolicyManagerInterface *mEngine; private: // updates device caching and output for streams that can influence the // routing of notifications void handleNotificationRoutingForStream(audio_stream_type_t stream); - static bool deviceDistinguishesOnAddress(audio_devices_t device); // find the outputs on a given output descriptor that have the given address. // to be called on an AudioOutputDescriptor whose supported devices (as defined // in mProfile->mSupportedDevices) matches the device whose address is to be matched. @@ -911,8 +556,6 @@ private: const audio_offload_info_t *offloadInfo); // internal function to derive a stream type value from audio attributes audio_stream_type_t streamTypefromAttributesInt(const audio_attributes_t *attr); - // return true if any output is playing anything besides the stream to ignore - bool isAnyOutputActive(audio_stream_type_t streamToIgnore); // event is one of STARTING_OUTPUT, STARTING_BEACON, STOPPING_OUTPUT, STOPPING_BEACON // returns 0 if no mute/unmute event happened, the largest latency of the device where // the mute/unmute happened @@ -928,10 +571,8 @@ private: // Called by setDeviceConnectionState(). status_t setDeviceConnectionStateInt(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address); - sp<DeviceDescriptor> getDeviceDescriptor(const audio_devices_t device, - const char *device_address); - + const char *device_address, + const char *device_name); }; }; diff --git a/services/audiopolicy/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp index 3e090e9..3e090e9 100644 --- a/services/audiopolicy/AudioPolicyClientImpl.cpp +++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp diff --git a/services/audiopolicy/AudioPolicyClientImplLegacy.cpp b/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp index a79f8ae..a79f8ae 100644 --- a/services/audiopolicy/AudioPolicyClientImplLegacy.cpp +++ b/services/audiopolicy/service/AudioPolicyClientImplLegacy.cpp diff --git a/services/audiopolicy/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp index e6ace20..e6ace20 100644 --- a/services/audiopolicy/AudioPolicyEffects.cpp +++ b/services/audiopolicy/service/AudioPolicyEffects.cpp diff --git a/services/audiopolicy/AudioPolicyEffects.h b/services/audiopolicy/service/AudioPolicyEffects.h index 3dec437..3dec437 100644 --- a/services/audiopolicy/AudioPolicyEffects.h +++ b/services/audiopolicy/service/AudioPolicyEffects.h diff --git a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp index a45dbb3..e9ff838 100644 --- a/services/audiopolicy/AudioPolicyInterfaceImpl.cpp +++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp @@ -28,7 +28,8 @@ namespace android { status_t AudioPolicyService::setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address) + const char *device_address, + const char *device_name) { if (mAudioPolicyManager == NULL) { return NO_INIT; @@ -46,8 +47,8 @@ status_t AudioPolicyService::setDeviceConnectionState(audio_devices_t device, ALOGV("setDeviceConnectionState()"); Mutex::Autolock _l(mLock); - return mAudioPolicyManager->setDeviceConnectionState(device, - state, device_address); + return mAudioPolicyManager->setDeviceConnectionState(device, state, + device_address, device_name); } audio_policy_dev_state_t AudioPolicyService::getDeviceConnectionState( diff --git a/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp index b8846c6..5a91192 100644 --- a/services/audiopolicy/AudioPolicyInterfaceImplLegacy.cpp +++ b/services/audiopolicy/service/AudioPolicyInterfaceImplLegacy.cpp @@ -33,7 +33,8 @@ namespace android { status_t AudioPolicyService::setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address) + const char *device_address, + const char *device_name __unused) { if (mpAudioPolicy == NULL) { return NO_INIT; diff --git a/services/audiopolicy/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp index eb9116d..00f188f 100644 --- a/services/audiopolicy/AudioPolicyService.cpp +++ b/services/audiopolicy/service/AudioPolicyService.cpp @@ -532,7 +532,7 @@ bool AudioPolicyService::AudioCommandThread::threadLoop() mLock.unlock(); svc.clear(); mLock.lock(); - if (!exitPending() && mAudioCommands.isEmpty()) { + if (!exitPending() && (mAudioCommands.isEmpty() || waitTime != INT64_MAX)) { // release delayed commands wake lock release_wake_lock(mName.string()); ALOGV("AudioCommandThread() going to sleep"); diff --git a/services/audiopolicy/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h index 80284a4..0378384 100644 --- a/services/audiopolicy/AudioPolicyService.h +++ b/services/audiopolicy/service/AudioPolicyService.h @@ -35,7 +35,7 @@ #include <hardware_legacy/AudioPolicyInterface.h> #endif #include "AudioPolicyEffects.h" -#include "AudioPolicyManager.h" +#include "managerdefault/AudioPolicyManager.h" namespace android { @@ -61,7 +61,8 @@ public: virtual status_t setDeviceConnectionState(audio_devices_t device, audio_policy_dev_state_t state, - const char *device_address); + const char *device_address, + const char *device_name); virtual audio_policy_dev_state_t getDeviceConnectionState( audio_devices_t device, const char *device_address); diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index e184d97..9c60911 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -23,8 +23,10 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ CameraService.cpp \ CameraDeviceFactory.cpp \ + CameraFlashlight.cpp \ common/Camera2ClientBase.cpp \ common/CameraDeviceBase.cpp \ + common/CameraModule.cpp \ common/FrameProcessorBase.cpp \ api1/CameraClient.cpp \ api1/Camera2Client.cpp \ @@ -40,7 +42,6 @@ LOCAL_SRC_FILES:= \ api1/client2/CaptureSequencer.cpp \ api1/client2/ZslProcessor3.cpp \ api2/CameraDeviceClient.cpp \ - api_pro/ProCamera2Client.cpp \ device2/Camera2Device.cpp \ device3/Camera3Device.cpp \ device3/Camera3Stream.cpp \ @@ -52,6 +53,7 @@ LOCAL_SRC_FILES:= \ device3/StatusTracker.cpp \ gui/RingBufferConsumer.cpp \ utils/CameraTraces.cpp \ + utils/AutoConditionLock.cpp \ LOCAL_SHARED_LIBRARIES:= \ libui \ diff --git a/services/camera/libcameraservice/CameraDeviceFactory.cpp b/services/camera/libcameraservice/CameraDeviceFactory.cpp index bfef50e..6589e27 100644 --- a/services/camera/libcameraservice/CameraDeviceFactory.cpp +++ b/services/camera/libcameraservice/CameraDeviceFactory.cpp @@ -48,6 +48,7 @@ sp<CameraDeviceBase> CameraDeviceFactory::createDevice(int cameraId) { case CAMERA_DEVICE_API_VERSION_3_0: case CAMERA_DEVICE_API_VERSION_3_1: case CAMERA_DEVICE_API_VERSION_3_2: + case CAMERA_DEVICE_API_VERSION_3_3: device = new Camera3Device(cameraId); break; default: diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp new file mode 100644 index 0000000..8613ac6 --- /dev/null +++ b/services/camera/libcameraservice/CameraFlashlight.cpp @@ -0,0 +1,886 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CameraFlashlight" +#define ATRACE_TAG ATRACE_TAG_CAMERA +// #define LOG_NDEBUG 0 + +#include <utils/Log.h> +#include <utils/Trace.h> +#include <cutils/properties.h> + +#include "camera/CameraMetadata.h" +#include "CameraFlashlight.h" +#include "gui/IGraphicBufferConsumer.h" +#include "gui/BufferQueue.h" +#include "camera/camera2/CaptureRequest.h" +#include "CameraDeviceFactory.h" + + +namespace android { + +///////////////////////////////////////////////////////////////////// +// CameraFlashlight implementation begins +// used by camera service to control flashflight. +///////////////////////////////////////////////////////////////////// +CameraFlashlight::CameraFlashlight(CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks) : + mCameraModule(&cameraModule), + mCallbacks(&callbacks), + mFlashlightMapInitialized(false) { +} + +CameraFlashlight::~CameraFlashlight() { +} + +status_t CameraFlashlight::createFlashlightControl(const String8& cameraId) { + ALOGV("%s: creating a flash light control for camera %s", __FUNCTION__, + cameraId.string()); + if (mFlashControl != NULL) { + return INVALID_OPERATION; + } + + status_t res = OK; + + if (mCameraModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) { + mFlashControl = new ModuleFlashControl(*mCameraModule, *mCallbacks); + if (mFlashControl == NULL) { + ALOGV("%s: cannot create flash control for module api v2.4+", + __FUNCTION__); + return NO_MEMORY; + } + } else { + uint32_t deviceVersion = CAMERA_DEVICE_API_VERSION_1_0; + + if (mCameraModule->getModuleApiVersion() >= + CAMERA_MODULE_API_VERSION_2_0) { + camera_info info; + res = mCameraModule->getCameraInfo( + atoi(String8(cameraId).string()), &info); + if (res) { + ALOGE("%s: failed to get camera info for camera %s", + __FUNCTION__, cameraId.string()); + return res; + } + deviceVersion = info.device_version; + } + + if (deviceVersion >= CAMERA_DEVICE_API_VERSION_2_0) { + CameraDeviceClientFlashControl *flashControl = + new CameraDeviceClientFlashControl(*mCameraModule, + *mCallbacks); + if (!flashControl) { + return NO_MEMORY; + } + + mFlashControl = flashControl; + } else { + mFlashControl = + new CameraHardwareInterfaceFlashControl(*mCameraModule, + *mCallbacks); + } + } + + return OK; +} + +status_t CameraFlashlight::setTorchMode(const String8& cameraId, bool enabled) { + if (!mFlashlightMapInitialized) { + ALOGE("%s: findFlashUnits() must be called before this method."); + return NO_INIT; + } + + ALOGV("%s: set torch mode of camera %s to %d", __FUNCTION__, + cameraId.string(), enabled); + + status_t res = OK; + Mutex::Autolock l(mLock); + + if (mOpenedCameraIds.indexOf(cameraId) != NAME_NOT_FOUND) { + // This case is needed to avoid state corruption during the following call sequence: + // CameraService::setTorchMode for camera ID 0 begins, does torch status checks + // CameraService::connect for camera ID 0 begins, calls prepareDeviceOpen, ends + // CameraService::setTorchMode for camera ID 0 continues, calls + // CameraFlashlight::setTorchMode + + // TODO: Move torch status checks and state updates behind this CameraFlashlight lock + // to avoid other similar race conditions. + ALOGE("%s: Camera device %s is in use, cannot set torch mode.", + __FUNCTION__, cameraId.string()); + return -EBUSY; + } + + if (mFlashControl == NULL) { + if (enabled == false) { + return OK; + } + + res = createFlashlightControl(cameraId); + if (res) { + return res; + } + res = mFlashControl->setTorchMode(cameraId, enabled); + return res; + } + + // if flash control already exists, turning on torch mode may fail if it's + // tied to another camera device for module v2.3 and below. + res = mFlashControl->setTorchMode(cameraId, enabled); + if (res == BAD_INDEX) { + // flash control is tied to another camera device, need to close it and + // try again. + mFlashControl.clear(); + res = createFlashlightControl(cameraId); + if (res) { + return res; + } + res = mFlashControl->setTorchMode(cameraId, enabled); + } + + return res; +} + +status_t CameraFlashlight::findFlashUnits() { + Mutex::Autolock l(mLock); + status_t res; + int32_t numCameras = mCameraModule->getNumberOfCameras(); + + mHasFlashlightMap.clear(); + mFlashlightMapInitialized = false; + + for (int32_t i = 0; i < numCameras; i++) { + bool hasFlash = false; + String8 id = String8::format("%d", i); + + res = createFlashlightControl(id); + if (res) { + ALOGE("%s: failed to create flash control for %s", __FUNCTION__, + id.string()); + } else { + res = mFlashControl->hasFlashUnit(id, &hasFlash); + if (res == -EUSERS || res == -EBUSY) { + ALOGE("%s: failed to check if camera %s has a flash unit. Some " + "camera devices may be opened", __FUNCTION__, + id.string()); + return res; + } else if (res) { + ALOGE("%s: failed to check if camera %s has a flash unit. %s" + " (%d)", __FUNCTION__, id.string(), strerror(-res), + res); + } + + mFlashControl.clear(); + } + mHasFlashlightMap.add(id, hasFlash); + } + + mFlashlightMapInitialized = true; + return OK; +} + +bool CameraFlashlight::hasFlashUnit(const String8& cameraId) { + status_t res; + + Mutex::Autolock l(mLock); + return hasFlashUnitLocked(cameraId); +} + +bool CameraFlashlight::hasFlashUnitLocked(const String8& cameraId) { + if (!mFlashlightMapInitialized) { + ALOGE("%s: findFlashUnits() must be called before this method."); + return false; + } + + ssize_t index = mHasFlashlightMap.indexOfKey(cameraId); + if (index == NAME_NOT_FOUND) { + ALOGE("%s: camera %s not present when findFlashUnits() was called", + __FUNCTION__, cameraId.string()); + return false; + } + + return mHasFlashlightMap.valueAt(index); +} + +status_t CameraFlashlight::prepareDeviceOpen(const String8& cameraId) { + ALOGV("%s: prepare for device open", __FUNCTION__); + + Mutex::Autolock l(mLock); + if (!mFlashlightMapInitialized) { + ALOGE("%s: findFlashUnits() must be called before this method."); + return NO_INIT; + } + + if (mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) { + // framework is going to open a camera device, all flash light control + // should be closed for backward compatible support. + mFlashControl.clear(); + + if (mOpenedCameraIds.size() == 0) { + // notify torch unavailable for all cameras with a flash + int numCameras = mCameraModule->getNumberOfCameras(); + for (int i = 0; i < numCameras; i++) { + if (hasFlashUnitLocked(String8::format("%d", i))) { + mCallbacks->torch_mode_status_change(mCallbacks, + String8::format("%d", i).string(), + TORCH_MODE_STATUS_NOT_AVAILABLE); + } + } + } + + // close flash control that may be opened by calling hasFlashUnitLocked. + mFlashControl.clear(); + } + + if (mOpenedCameraIds.indexOf(cameraId) == NAME_NOT_FOUND) { + mOpenedCameraIds.add(cameraId); + } + + return OK; +} + +status_t CameraFlashlight::deviceClosed(const String8& cameraId) { + ALOGV("%s: device %s is closed", __FUNCTION__, cameraId.string()); + + Mutex::Autolock l(mLock); + if (!mFlashlightMapInitialized) { + ALOGE("%s: findFlashUnits() must be called before this method."); + return NO_INIT; + } + + ssize_t index = mOpenedCameraIds.indexOf(cameraId); + if (index == NAME_NOT_FOUND) { + ALOGE("%s: couldn't find camera %s in the opened list", __FUNCTION__, + cameraId.string()); + } else { + mOpenedCameraIds.removeAt(index); + } + + // Cannot do anything until all cameras are closed. + if (mOpenedCameraIds.size() != 0) + return OK; + + if (mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) { + // notify torch available for all cameras with a flash + int numCameras = mCameraModule->getNumberOfCameras(); + for (int i = 0; i < numCameras; i++) { + if (hasFlashUnitLocked(String8::format("%d", i))) { + mCallbacks->torch_mode_status_change(mCallbacks, + String8::format("%d", i).string(), + TORCH_MODE_STATUS_AVAILABLE_OFF); + } + } + } + + return OK; +} +// CameraFlashlight implementation ends + + +FlashControlBase::~FlashControlBase() { +} + +///////////////////////////////////////////////////////////////////// +// ModuleFlashControl implementation begins +// Flash control for camera module v2.4 and above. +///////////////////////////////////////////////////////////////////// +ModuleFlashControl::ModuleFlashControl(CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks) : + mCameraModule(&cameraModule) { +} + +ModuleFlashControl::~ModuleFlashControl() { +} + +status_t ModuleFlashControl::hasFlashUnit(const String8& cameraId, bool *hasFlash) { + if (!hasFlash) { + return BAD_VALUE; + } + + *hasFlash = false; + Mutex::Autolock l(mLock); + + camera_info info; + status_t res = mCameraModule->getCameraInfo(atoi(cameraId.string()), + &info); + if (res != 0) { + return res; + } + + CameraMetadata metadata; + metadata = info.static_camera_characteristics; + camera_metadata_entry flashAvailable = + metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + *hasFlash = true; + } + + return OK; +} + +status_t ModuleFlashControl::setTorchMode(const String8& cameraId, bool enabled) { + ALOGV("%s: set camera %s torch mode to %d", __FUNCTION__, + cameraId.string(), enabled); + + Mutex::Autolock l(mLock); + return mCameraModule->setTorchMode(cameraId.string(), enabled); +} +// ModuleFlashControl implementation ends + +///////////////////////////////////////////////////////////////////// +// CameraDeviceClientFlashControl implementation begins +// Flash control for camera module <= v2.3 and camera HAL v2-v3 +///////////////////////////////////////////////////////////////////// +CameraDeviceClientFlashControl::CameraDeviceClientFlashControl( + CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks) : + mCameraModule(&cameraModule), + mCallbacks(&callbacks), + mTorchEnabled(false), + mMetadata(NULL), + mStreaming(false) { +} + +CameraDeviceClientFlashControl::~CameraDeviceClientFlashControl() { + disconnectCameraDevice(); + if (mMetadata) { + delete mMetadata; + } + + mAnw.clear(); + mSurfaceTexture.clear(); + mProducer.clear(); + mConsumer.clear(); + + if (mTorchEnabled) { + if (mCallbacks) { + ALOGV("%s: notify the framework that torch was turned off", + __FUNCTION__); + mCallbacks->torch_mode_status_change(mCallbacks, + mCameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF); + } + } +} + +status_t CameraDeviceClientFlashControl::initializeSurface( + sp<CameraDeviceBase> &device, int32_t width, int32_t height) { + status_t res; + BufferQueue::createBufferQueue(&mProducer, &mConsumer); + + mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL, + true, true); + if (mSurfaceTexture == NULL) { + return NO_MEMORY; + } + + int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + res = mSurfaceTexture->setDefaultBufferSize(width, height); + if (res) { + return res; + } + res = mSurfaceTexture->setDefaultBufferFormat(format); + if (res) { + return res; + } + + mAnw = new Surface(mProducer, /*useAsync*/ true); + if (mAnw == NULL) { + return NO_MEMORY; + } + res = device->createStream(mAnw, width, height, format, + HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mStreamId); + if (res) { + return res; + } + + res = device->configureStreams(); + if (res) { + return res; + } + + return res; +} + +status_t CameraDeviceClientFlashControl::getSmallestSurfaceSize( + const camera_info& info, int32_t *width, int32_t *height) { + if (!width || !height) { + return BAD_VALUE; + } + + int32_t w = INT32_MAX; + int32_t h = 1; + + CameraMetadata metadata; + metadata = info.static_camera_characteristics; + camera_metadata_entry streamConfigs = + metadata.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); + for (size_t i = 0; i < streamConfigs.count; i += 4) { + int32_t fmt = streamConfigs.data.i32[i]; + if (fmt == ANDROID_SCALER_AVAILABLE_FORMATS_IMPLEMENTATION_DEFINED) { + int32_t ww = streamConfigs.data.i32[i + 1]; + int32_t hh = streamConfigs.data.i32[i + 2]; + + if (w * h > ww * hh) { + w = ww; + h = hh; + } + } + } + + // if stream configuration is not found, try available processed sizes. + if (streamConfigs.count == 0) { + camera_metadata_entry availableProcessedSizes = + metadata.find(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES); + for (size_t i = 0; i < availableProcessedSizes.count; i += 2) { + int32_t ww = availableProcessedSizes.data.i32[i]; + int32_t hh = availableProcessedSizes.data.i32[i + 1]; + if (w * h > ww * hh) { + w = ww; + h = hh; + } + } + } + + if (w == INT32_MAX) { + return NAME_NOT_FOUND; + } + + *width = w; + *height = h; + + return OK; +} + +status_t CameraDeviceClientFlashControl::connectCameraDevice( + const String8& cameraId) { + camera_info info; + status_t res = mCameraModule->getCameraInfo(atoi(cameraId.string()), &info); + if (res != 0) { + ALOGE("%s: failed to get camera info for camera %s", __FUNCTION__, + cameraId.string()); + return res; + } + + sp<CameraDeviceBase> device = + CameraDeviceFactory::createDevice(atoi(cameraId.string())); + if (device == NULL) { + return NO_MEMORY; + } + + res = device->initialize(mCameraModule); + if (res) { + return res; + } + + int32_t width, height; + res = getSmallestSurfaceSize(info, &width, &height); + if (res) { + return res; + } + res = initializeSurface(device, width, height); + if (res) { + return res; + } + + mCameraId = cameraId; + mStreaming = (info.device_version <= CAMERA_DEVICE_API_VERSION_3_1); + mDevice = device; + + return OK; +} + +status_t CameraDeviceClientFlashControl::disconnectCameraDevice() { + if (mDevice != NULL) { + mDevice->disconnect(); + mDevice.clear(); + } + + return OK; +} + + + +status_t CameraDeviceClientFlashControl::hasFlashUnit(const String8& cameraId, + bool *hasFlash) { + ALOGV("%s: checking if camera %s has a flash unit", __FUNCTION__, + cameraId.string()); + + Mutex::Autolock l(mLock); + return hasFlashUnitLocked(cameraId, hasFlash); + +} + +status_t CameraDeviceClientFlashControl::hasFlashUnitLocked( + const String8& cameraId, bool *hasFlash) { + if (!hasFlash) { + return BAD_VALUE; + } + + camera_info info; + status_t res = mCameraModule->getCameraInfo( + atoi(cameraId.string()), &info); + if (res != 0) { + ALOGE("%s: failed to get camera info for camera %s", __FUNCTION__, + cameraId.string()); + return res; + } + + CameraMetadata metadata; + metadata = info.static_camera_characteristics; + camera_metadata_entry flashAvailable = + metadata.find(ANDROID_FLASH_INFO_AVAILABLE); + if (flashAvailable.count == 1 && flashAvailable.data.u8[0] == 1) { + *hasFlash = true; + } + + return OK; +} + +status_t CameraDeviceClientFlashControl::submitTorchEnabledRequest() { + status_t res; + + if (mMetadata == NULL) { + mMetadata = new CameraMetadata(); + if (mMetadata == NULL) { + return NO_MEMORY; + } + res = mDevice->createDefaultRequest( + CAMERA3_TEMPLATE_PREVIEW, mMetadata); + if (res) { + return res; + } + } + + uint8_t torchOn = ANDROID_FLASH_MODE_TORCH; + mMetadata->update(ANDROID_FLASH_MODE, &torchOn, 1); + mMetadata->update(ANDROID_REQUEST_OUTPUT_STREAMS, &mStreamId, 1); + + uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; + mMetadata->update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); + + int32_t requestId = 0; + mMetadata->update(ANDROID_REQUEST_ID, &requestId, 1); + + if (mStreaming) { + res = mDevice->setStreamingRequest(*mMetadata); + } else { + res = mDevice->capture(*mMetadata); + } + return res; +} + + + + +status_t CameraDeviceClientFlashControl::setTorchMode( + const String8& cameraId, bool enabled) { + bool hasFlash = false; + + Mutex::Autolock l(mLock); + status_t res = hasFlashUnitLocked(cameraId, &hasFlash); + + // pre-check + if (enabled) { + // invalid camera? + if (res) { + return -EINVAL; + } + // no flash unit? + if (!hasFlash) { + return -ENOSYS; + } + // already opened for a different device? + if (mDevice != NULL && cameraId != mCameraId) { + return BAD_INDEX; + } + } else if (mDevice == NULL || cameraId != mCameraId) { + // disabling the torch mode of an un-opened or different device. + return OK; + } else { + // disabling the torch mode of currently opened device + disconnectCameraDevice(); + mTorchEnabled = false; + mCallbacks->torch_mode_status_change(mCallbacks, + cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF); + return OK; + } + + if (mDevice == NULL) { + res = connectCameraDevice(cameraId); + if (res) { + return res; + } + } + + res = submitTorchEnabledRequest(); + if (res) { + return res; + } + + mTorchEnabled = true; + mCallbacks->torch_mode_status_change(mCallbacks, + cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_ON); + return OK; +} +// CameraDeviceClientFlashControl implementation ends + + +///////////////////////////////////////////////////////////////////// +// CameraHardwareInterfaceFlashControl implementation begins +// Flash control for camera module <= v2.3 and camera HAL v1 +///////////////////////////////////////////////////////////////////// +CameraHardwareInterfaceFlashControl::CameraHardwareInterfaceFlashControl( + CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks) : + mCameraModule(&cameraModule), + mCallbacks(&callbacks), + mTorchEnabled(false) { + +} + +CameraHardwareInterfaceFlashControl::~CameraHardwareInterfaceFlashControl() { + disconnectCameraDevice(); + + mAnw.clear(); + mSurfaceTexture.clear(); + mProducer.clear(); + mConsumer.clear(); + + if (mTorchEnabled) { + if (mCallbacks) { + ALOGV("%s: notify the framework that torch was turned off", + __FUNCTION__); + mCallbacks->torch_mode_status_change(mCallbacks, + mCameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF); + } + } +} + +status_t CameraHardwareInterfaceFlashControl::setTorchMode( + const String8& cameraId, bool enabled) { + Mutex::Autolock l(mLock); + + // pre-check + status_t res; + if (enabled) { + bool hasFlash = false; + res = hasFlashUnitLocked(cameraId, &hasFlash); + // invalid camera? + if (res) { + // hasFlashUnitLocked() returns BAD_INDEX if mDevice is connected to + // another camera device. + return res == BAD_INDEX ? BAD_INDEX : -EINVAL; + } + // no flash unit? + if (!hasFlash) { + return -ENOSYS; + } + } else if (mDevice == NULL || cameraId != mCameraId) { + // disabling the torch mode of an un-opened or different device. + return OK; + } else { + // disabling the torch mode of currently opened device + disconnectCameraDevice(); + mTorchEnabled = false; + mCallbacks->torch_mode_status_change(mCallbacks, + cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_OFF); + return OK; + } + + res = startPreviewAndTorch(); + if (res) { + return res; + } + + mTorchEnabled = true; + mCallbacks->torch_mode_status_change(mCallbacks, + cameraId.string(), TORCH_MODE_STATUS_AVAILABLE_ON); + return OK; +} + +status_t CameraHardwareInterfaceFlashControl::hasFlashUnit( + const String8& cameraId, bool *hasFlash) { + Mutex::Autolock l(mLock); + return hasFlashUnitLocked(cameraId, hasFlash); +} + +status_t CameraHardwareInterfaceFlashControl::hasFlashUnitLocked( + const String8& cameraId, bool *hasFlash) { + if (!hasFlash) { + return BAD_VALUE; + } + + status_t res; + if (mDevice == NULL) { + res = connectCameraDevice(cameraId); + if (res) { + return res; + } + } + + if (cameraId != mCameraId) { + return BAD_INDEX; + } + + const char *flashMode = + mParameters.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES); + if (flashMode && strstr(flashMode, CameraParameters::FLASH_MODE_TORCH)) { + *hasFlash = true; + } else { + *hasFlash = false; + } + + return OK; +} + +status_t CameraHardwareInterfaceFlashControl::startPreviewAndTorch() { + status_t res = OK; + res = mDevice->startPreview(); + if (res) { + ALOGE("%s: start preview failed. %s (%d)", __FUNCTION__, + strerror(-res), res); + return res; + } + + mParameters.set(CameraParameters::KEY_FLASH_MODE, + CameraParameters::FLASH_MODE_TORCH); + + return mDevice->setParameters(mParameters); +} + +status_t CameraHardwareInterfaceFlashControl::getSmallestSurfaceSize( + int32_t *width, int32_t *height) { + if (!width || !height) { + return BAD_VALUE; + } + + int32_t w = INT32_MAX; + int32_t h = 1; + Vector<Size> sizes; + + mParameters.getSupportedPreviewSizes(sizes); + for (size_t i = 0; i < sizes.size(); i++) { + Size s = sizes[i]; + if (w * h > s.width * s.height) { + w = s.width; + h = s.height; + } + } + + if (w == INT32_MAX) { + return NAME_NOT_FOUND; + } + + *width = w; + *height = h; + + return OK; +} + +status_t CameraHardwareInterfaceFlashControl::initializePreviewWindow( + sp<CameraHardwareInterface> device, int32_t width, int32_t height) { + status_t res; + BufferQueue::createBufferQueue(&mProducer, &mConsumer); + + mSurfaceTexture = new GLConsumer(mConsumer, 0, GLConsumer::TEXTURE_EXTERNAL, + true, true); + if (mSurfaceTexture == NULL) { + return NO_MEMORY; + } + + int32_t format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + res = mSurfaceTexture->setDefaultBufferSize(width, height); + if (res) { + return res; + } + res = mSurfaceTexture->setDefaultBufferFormat(format); + if (res) { + return res; + } + + mAnw = new Surface(mProducer, /*useAsync*/ true); + if (mAnw == NULL) { + return NO_MEMORY; + } + + res = native_window_api_connect(mAnw.get(), NATIVE_WINDOW_API_CAMERA); + if (res) { + ALOGE("%s: Unable to connect to native window", __FUNCTION__); + return res; + } + + return device->setPreviewWindow(mAnw); +} + +status_t CameraHardwareInterfaceFlashControl::connectCameraDevice( + const String8& cameraId) { + sp<CameraHardwareInterface> device = + new CameraHardwareInterface(cameraId.string()); + + status_t res = device->initialize(mCameraModule); + if (res) { + ALOGE("%s: initializing camera %s failed", __FUNCTION__, + cameraId.string()); + return res; + } + + // need to set __get_memory in set_callbacks(). + device->setCallbacks(NULL, NULL, NULL, NULL); + + mParameters = device->getParameters(); + + int32_t width, height; + res = getSmallestSurfaceSize(&width, &height); + if (res) { + ALOGE("%s: failed to get smallest surface size for camera %s", + __FUNCTION__, cameraId.string()); + return res; + } + + res = initializePreviewWindow(device, width, height); + if (res) { + ALOGE("%s: failed to initialize preview window for camera %s", + __FUNCTION__, cameraId.string()); + return res; + } + + mCameraId = cameraId; + mDevice = device; + return OK; +} + +status_t CameraHardwareInterfaceFlashControl::disconnectCameraDevice() { + if (mDevice == NULL) { + return OK; + } + + mParameters.set(CameraParameters::KEY_FLASH_MODE, + CameraParameters::FLASH_MODE_OFF); + mDevice->setParameters(mParameters); + mDevice->stopPreview(); + status_t res = native_window_api_disconnect(mAnw.get(), + NATIVE_WINDOW_API_CAMERA); + if (res) { + ALOGW("%s: native_window_api_disconnect failed: %s (%d)", + __FUNCTION__, strerror(-res), res); + } + mDevice->setPreviewWindow(NULL); + mDevice->release(); + + return OK; +} +// CameraHardwareInterfaceFlashControl implementation ends + +} diff --git a/services/camera/libcameraservice/CameraFlashlight.h b/services/camera/libcameraservice/CameraFlashlight.h new file mode 100644 index 0000000..30f01f0 --- /dev/null +++ b/services/camera/libcameraservice/CameraFlashlight.h @@ -0,0 +1,225 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H +#define ANDROID_SERVERS_CAMERA_CAMERAFLASHLIGHT_H + +#include "hardware/camera_common.h" +#include "utils/KeyedVector.h" +#include "utils/SortedVector.h" +#include "gui/GLConsumer.h" +#include "gui/Surface.h" +#include "common/CameraDeviceBase.h" +#include "device1/CameraHardwareInterface.h" + +namespace android { + +/** + * FlashControlBase is a base class for flash control. It defines the functions + * that a flash control for each camera module/device version should implement. + */ +class FlashControlBase : public virtual VirtualLightRefBase { + public: + virtual ~FlashControlBase(); + + // Whether a camera device has a flash unit. Calling this function may + // cause the torch mode to be turned off in HAL v1 devices. If + // previously-on torch mode is turned off, + // callbacks.torch_mode_status_change() should be invoked. + virtual status_t hasFlashUnit(const String8& cameraId, + bool *hasFlash) = 0; + + // set the torch mode to on or off. + virtual status_t setTorchMode(const String8& cameraId, + bool enabled) = 0; +}; + +/** + * CameraFlashlight can be used by camera service to control flashflight. + */ +class CameraFlashlight : public virtual VirtualLightRefBase { + public: + CameraFlashlight(CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks); + virtual ~CameraFlashlight(); + + // Find all flash units. This must be called before other methods. All + // camera devices must be closed when it's called because HAL v1 devices + // need to be opened to query available flash modes. + status_t findFlashUnits(); + + // Whether a camera device has a flash unit. Before findFlashUnits() is + // called, this function always returns false. + bool hasFlashUnit(const String8& cameraId); + + // set the torch mode to on or off. + status_t setTorchMode(const String8& cameraId, bool enabled); + + // Notify CameraFlashlight that camera service is going to open a camera + // device. CameraFlashlight will free the resources that may cause the + // camera open to fail. Camera service must call this function before + // opening a camera device. + status_t prepareDeviceOpen(const String8& cameraId); + + // Notify CameraFlashlight that camera service has closed a camera + // device. CameraFlashlight may invoke callbacks for torch mode + // available depending on the implementation. + status_t deviceClosed(const String8& cameraId); + + private: + // create flashlight control based on camera module API and camera + // device API versions. + status_t createFlashlightControl(const String8& cameraId); + + // mLock should be locked. + bool hasFlashUnitLocked(const String8& cameraId); + + sp<FlashControlBase> mFlashControl; + CameraModule *mCameraModule; + const camera_module_callbacks_t *mCallbacks; + SortedVector<String8> mOpenedCameraIds; + + // camera id -> if it has a flash unit + KeyedVector<String8, bool> mHasFlashlightMap; + bool mFlashlightMapInitialized; + + Mutex mLock; // protect CameraFlashlight API +}; + +/** + * Flash control for camera module v2.4 and above. + */ +class ModuleFlashControl : public FlashControlBase { + public: + ModuleFlashControl(CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks); + virtual ~ModuleFlashControl(); + + // FlashControlBase + status_t hasFlashUnit(const String8& cameraId, bool *hasFlash); + status_t setTorchMode(const String8& cameraId, bool enabled); + + private: + CameraModule *mCameraModule; + + Mutex mLock; +}; + +/** + * Flash control for camera module <= v2.3 and camera HAL v2-v3 + */ +class CameraDeviceClientFlashControl : public FlashControlBase { + public: + CameraDeviceClientFlashControl(CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks); + virtual ~CameraDeviceClientFlashControl(); + + // FlashControlBase + status_t setTorchMode(const String8& cameraId, bool enabled); + status_t hasFlashUnit(const String8& cameraId, bool *hasFlash); + + private: + // connect to a camera device + status_t connectCameraDevice(const String8& cameraId); + // disconnect and free mDevice + status_t disconnectCameraDevice(); + + // initialize a surface + status_t initializeSurface(sp<CameraDeviceBase>& device, int32_t width, + int32_t height); + + // submit a request to enable the torch mode + status_t submitTorchEnabledRequest(); + + // get the smallest surface size of IMPLEMENTATION_DEFINED + status_t getSmallestSurfaceSize(const camera_info& info, int32_t *width, + int32_t *height); + + // protected by mLock + status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash); + + CameraModule *mCameraModule; + const camera_module_callbacks_t *mCallbacks; + String8 mCameraId; + bool mTorchEnabled; + CameraMetadata *mMetadata; + // WORKAROUND: will be set to true for HAL v2 devices where + // setStreamingRequest() needs to be call for torch mode settings to + // take effect. + bool mStreaming; + + sp<CameraDeviceBase> mDevice; + + sp<IGraphicBufferProducer> mProducer; + sp<IGraphicBufferConsumer> mConsumer; + sp<GLConsumer> mSurfaceTexture; + sp<ANativeWindow> mAnw; + int32_t mStreamId; + + Mutex mLock; +}; + +/** + * Flash control for camera module <= v2.3 and camera HAL v1 + */ +class CameraHardwareInterfaceFlashControl : public FlashControlBase { + public: + CameraHardwareInterfaceFlashControl(CameraModule& cameraModule, + const camera_module_callbacks_t& callbacks); + virtual ~CameraHardwareInterfaceFlashControl(); + + // FlashControlBase + status_t setTorchMode(const String8& cameraId, bool enabled); + status_t hasFlashUnit(const String8& cameraId, bool *hasFlash); + + private: + // connect to a camera device + status_t connectCameraDevice(const String8& cameraId); + + // disconnect and free mDevice + status_t disconnectCameraDevice(); + + // initialize the preview window + status_t initializePreviewWindow(sp<CameraHardwareInterface> device, + int32_t width, int32_t height); + + // start preview and enable torch + status_t startPreviewAndTorch(); + + // get the smallest surface + status_t getSmallestSurfaceSize(int32_t *width, int32_t *height); + + // protected by mLock + status_t hasFlashUnitLocked(const String8& cameraId, bool *hasFlash); + + CameraModule *mCameraModule; + const camera_module_callbacks_t *mCallbacks; + sp<CameraHardwareInterface> mDevice; + String8 mCameraId; + CameraParameters mParameters; + bool mTorchEnabled; + + sp<IGraphicBufferProducer> mProducer; + sp<IGraphicBufferConsumer> mConsumer; + sp<GLConsumer> mSurfaceTexture; + sp<ANativeWindow> mAnw; + + Mutex mLock; +}; + +} // namespace android + +#endif diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index 1232c32..e9c96c6 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -17,9 +17,14 @@ #define LOG_TAG "CameraService" //#define LOG_NDEBUG 0 +#include <algorithm> +#include <climits> #include <stdio.h> -#include <string.h> +#include <cstring> +#include <ctime> +#include <string> #include <sys/types.h> +#include <inttypes.h> #include <pthread.h> #include <binder/AppOpsManager.h> @@ -27,6 +32,7 @@ #include <binder/IServiceManager.h> #include <binder/MemoryBase.h> #include <binder/MemoryHeapBase.h> +#include <binder/ProcessInfoService.h> #include <cutils/atomic.h> #include <cutils/properties.h> #include <gui/Surface.h> @@ -45,7 +51,6 @@ #include "CameraService.h" #include "api1/CameraClient.h" #include "api1/Camera2Client.h" -#include "api_pro/ProCamera2Client.h" #include "api2/CameraDeviceClient.h" #include "utils/CameraTraces.h" #include "CameraDeviceFactory.h" @@ -66,25 +71,48 @@ static void setLogLevel(int level) { // ---------------------------------------------------------------------------- -static int getCallingPid() { - return IPCThreadState::self()->getCallingPid(); -} - -static int getCallingUid() { - return IPCThreadState::self()->getCallingUid(); -} - extern "C" { static void camera_device_status_change( const struct camera_module_callbacks* callbacks, int camera_id, int new_status) { sp<CameraService> cs = const_cast<CameraService*>( + static_cast<const CameraService*>(callbacks)); + + cs->onDeviceStatusChanged(static_cast<camera_device_status_t>(camera_id), + static_cast<camera_device_status_t>(new_status)); +} + +static void torch_mode_status_change( + const struct camera_module_callbacks* callbacks, + const char* camera_id, + int new_status) { + if (!callbacks || !camera_id) { + ALOGE("%s invalid parameters. callbacks %p, camera_id %p", __FUNCTION__, + callbacks, camera_id); + } + sp<CameraService> cs = const_cast<CameraService*>( static_cast<const CameraService*>(callbacks)); - cs->onDeviceStatusChanged( - camera_id, - new_status); + ICameraServiceListener::TorchStatus status; + switch (new_status) { + case TORCH_MODE_STATUS_NOT_AVAILABLE: + status = ICameraServiceListener::TORCH_STATUS_NOT_AVAILABLE; + break; + case TORCH_MODE_STATUS_AVAILABLE_OFF: + status = ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF; + break; + case TORCH_MODE_STATUS_AVAILABLE_ON: + status = ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON; + break; + default: + ALOGE("Unknown torch status %d", new_status); + return; + } + + cs->onTorchStatusChanged( + String8(camera_id), + status); } } // extern "C" @@ -94,50 +122,89 @@ static void camera_device_status_change( // should be ok for now. static CameraService *gCameraService; -CameraService::CameraService() - :mSoundRef(0), mModule(0) -{ +CameraService::CameraService() : mEventLog(DEFAULT_EVICTION_LOG_LENGTH), + mLastUserId(DEFAULT_LAST_USER_ID), mSoundRef(0), mModule(0), mFlashlight(0) { ALOGI("CameraService started (pid=%d)", getpid()); gCameraService = this; - for (size_t i = 0; i < MAX_CAMERAS; ++i) { - mStatusList[i] = ICameraServiceListener::STATUS_PRESENT; - } - this->camera_device_status_change = android::camera_device_status_change; + this->torch_mode_status_change = android::torch_mode_status_change; + + mServiceLockWrapper = std::make_shared<WaitableMutexWrapper>(&mServiceLock); } void CameraService::onFirstRef() { - LOG1("CameraService::onFirstRef"); + ALOGI("CameraService process starting"); BnCameraService::onFirstRef(); + camera_module_t *rawModule; if (hw_get_module(CAMERA_HARDWARE_MODULE_ID, - (const hw_module_t **)&mModule) < 0) { + (const hw_module_t **)&rawModule) < 0) { ALOGE("Could not load camera HAL module"); mNumberOfCameras = 0; } else { - ALOGI("Loaded \"%s\" camera module", mModule->common.name); - mNumberOfCameras = mModule->get_number_of_cameras(); - if (mNumberOfCameras > MAX_CAMERAS) { - ALOGE("Number of cameras(%d) > MAX_CAMERAS(%d).", - mNumberOfCameras, MAX_CAMERAS); - mNumberOfCameras = MAX_CAMERAS; + mModule = new CameraModule(rawModule); + ALOGI("Loaded \"%s\" camera module", mModule->getModuleName()); + mNumberOfCameras = mModule->getNumberOfCameras(); + + mFlashlight = new CameraFlashlight(*mModule, *this); + status_t res = mFlashlight->findFlashUnits(); + if (res) { + // impossible because we haven't open any camera devices. + ALOGE("Failed to find flash units."); } + for (int i = 0; i < mNumberOfCameras; i++) { - setCameraFree(i); + String8 cameraId = String8::format("%d", i); + + // Defaults to use for cost and conflicting devices + int cost = 100; + char** conflicting_devices = nullptr; + size_t conflicting_devices_length = 0; + + // If using post-2.4 module version, query the cost + conflicting devices from the HAL + if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) { + struct camera_info info; + status_t rc = mModule->getCameraInfo(i, &info); + if (rc == NO_ERROR) { + cost = info.resource_cost; + conflicting_devices = info.conflicting_devices; + conflicting_devices_length = info.conflicting_devices_length; + } else { + ALOGE("%s: Received error loading camera info for device %d, cost and" + " conflicting devices fields set to defaults for this device.", + __FUNCTION__, i); + } + } + + std::set<String8> conflicting; + for (size_t i = 0; i < conflicting_devices_length; i++) { + conflicting.emplace(String8(conflicting_devices[i])); + } + + // Initialize state for each camera device + { + Mutex::Autolock lock(mCameraStatesLock); + mCameraStates.emplace(cameraId, std::make_shared<CameraState>(cameraId, cost, + conflicting)); + } + + if (mFlashlight->hasFlashUnit(cameraId)) { + mTorchStatusMap.add(cameraId, + ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF); + } } - if (mModule->common.module_api_version >= - CAMERA_MODULE_API_VERSION_2_1) { - mModule->set_callbacks(this); + if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_1) { + mModule->setCallbacks(this); } VendorTagDescriptor::clearGlobalVendorTagDescriptor(); - if (mModule->common.module_api_version >= CAMERA_MODULE_API_VERSION_2_2) { + if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_2) { setUpVendorTags(); } @@ -146,80 +213,111 @@ void CameraService::onFirstRef() } CameraService::~CameraService() { - for (int i = 0; i < mNumberOfCameras; i++) { - if (mBusy[i]) { - ALOGE("camera %d is still in use in destructor!", i); - } + if (mModule) { + delete mModule; + mModule = nullptr; } - VendorTagDescriptor::clearGlobalVendorTagDescriptor(); - gCameraService = NULL; + gCameraService = nullptr; } -void CameraService::onDeviceStatusChanged(int cameraId, - int newStatus) -{ +void CameraService::onDeviceStatusChanged(camera_device_status_t cameraId, + camera_device_status_t newStatus) { ALOGI("%s: Status changed for cameraId=%d, newStatus=%d", __FUNCTION__, cameraId, newStatus); - if (cameraId < 0 || cameraId >= MAX_CAMERAS) { + String8 id = String8::format("%d", cameraId); + std::shared_ptr<CameraState> state = getCameraState(id); + + if (state == nullptr) { ALOGE("%s: Bad camera ID %d", __FUNCTION__, cameraId); return; } - if ((int)getStatus(cameraId) == newStatus) { - ALOGE("%s: State transition to the same status 0x%x not allowed", - __FUNCTION__, (uint32_t)newStatus); + ICameraServiceListener::Status oldStatus = state->getStatus(); + + if (oldStatus == static_cast<ICameraServiceListener::Status>(newStatus)) { + ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__, newStatus); return; } - /* don't do this in updateStatus - since it is also called from connect and we could get into a deadlock */ if (newStatus == CAMERA_DEVICE_STATUS_NOT_PRESENT) { - Vector<sp<BasicClient> > clientsToDisconnect; + sp<BasicClient> clientToDisconnect; { - Mutex::Autolock al(mServiceLock); - - /* Remove cached parameters from shim cache */ - mShimParams.removeItem(cameraId); - - /* Find all clients that we need to disconnect */ - sp<BasicClient> client = mClient[cameraId].promote(); - if (client.get() != NULL) { - clientsToDisconnect.push_back(client); - } - - int i = cameraId; - for (size_t j = 0; j < mProClientList[i].size(); ++j) { - sp<ProClient> cl = mProClientList[i][j].promote(); - if (cl != NULL) { - clientsToDisconnect.push_back(cl); - } - } - } + // Don't do this in updateStatus to avoid deadlock over mServiceLock + Mutex::Autolock lock(mServiceLock); - /* now disconnect them. don't hold the lock - or we can get into a deadlock */ + // Set the device status to NOT_PRESENT, clients will no longer be able to connect + // to this device until the status changes + updateStatus(ICameraServiceListener::STATUS_NOT_PRESENT, id); - for (size_t i = 0; i < clientsToDisconnect.size(); ++i) { - sp<BasicClient> client = clientsToDisconnect[i]; + // Remove cached shim parameters + state->setShimParams(CameraParameters()); - client->disconnect(); - /** - * The remote app will no longer be able to call methods on the - * client since the client PID will be reset to 0 - */ + // Remove the client from the list of active clients + clientToDisconnect = removeClientLocked(id); + + // Notify the client of disconnection + clientToDisconnect->notifyError(ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED, + CaptureResultExtras{}); + } + + ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL", + __FUNCTION__, id.string()); + + // Disconnect client + if (clientToDisconnect.get() != nullptr) { + // Ensure not in binder RPC so client disconnect PID checks work correctly + LOG_ALWAYS_FATAL_IF(getCallingPid() != getpid(), + "onDeviceStatusChanged must be called from the camera service process!"); + clientToDisconnect->disconnect(); } - ALOGV("%s: After unplug, disconnected %zu clients", - __FUNCTION__, clientsToDisconnect.size()); + } else { + updateStatus(static_cast<ICameraServiceListener::Status>(newStatus), id); } - updateStatus( - static_cast<ICameraServiceListener::Status>(newStatus), cameraId); +} +void CameraService::onTorchStatusChanged(const String8& cameraId, + ICameraServiceListener::TorchStatus newStatus) { + Mutex::Autolock al(mTorchStatusMutex); + onTorchStatusChangedLocked(cameraId, newStatus); } +void CameraService::onTorchStatusChangedLocked(const String8& cameraId, + ICameraServiceListener::TorchStatus newStatus) { + ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d", + __FUNCTION__, cameraId.string(), newStatus); + + ICameraServiceListener::TorchStatus status; + status_t res = getTorchStatusLocked(cameraId, &status); + if (res) { + ALOGE("%s: cannot get torch status of camera %s", cameraId.string()); + return; + } + if (status == newStatus) { + ALOGE("%s: Torch state transition to the same status 0x%x not allowed", + __FUNCTION__, (uint32_t)newStatus); + return; + } + + res = setTorchStatusLocked(cameraId, newStatus); + if (res) { + ALOGE("%s: Failed to set the torch status", __FUNCTION__, + (uint32_t)newStatus); + return; + } + + { + Mutex::Autolock lock(mStatusListenerLock); + for (auto& i : mListenerList) { + i->onTorchStatusChanged(newStatus, String16{cameraId}); + } + } +} + + int32_t CameraService::getNumberOfCameras() { return mNumberOfCameras; } @@ -236,12 +334,21 @@ status_t CameraService::getCameraInfo(int cameraId, struct camera_info info; status_t rc = filterGetInfoErrorCode( - mModule->get_camera_info(cameraId, &info)); + mModule->getCameraInfo(cameraId, &info)); cameraInfo->facing = info.facing; cameraInfo->orientation = info.orientation; return rc; } +int CameraService::cameraIdToInt(const String8& cameraId) { + errno = 0; + size_t pos = 0; + int ret = stoi(std::string{cameraId.string()}, &pos); + if (errno != 0 || pos != cameraId.size()) { + return -1; + } + return ret; +} status_t CameraService::generateShimMetadata(int cameraId, /*out*/CameraMetadata* cameraInfo) { status_t ret = OK; @@ -347,7 +454,7 @@ status_t CameraService::getCameraCharacteristics(int cameraId, int facing; status_t ret = OK; - if (mModule->common.module_api_version < CAMERA_MODULE_API_VERSION_2_0 || + if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_0 || getDeviceVersion(cameraId, &facing) <= CAMERA_DEVICE_API_VERSION_2_1 ) { /** * Backwards compatibility mode for old HALs: @@ -368,13 +475,61 @@ status_t CameraService::getCameraCharacteristics(int cameraId, * Normal HAL 2.1+ codepath. */ struct camera_info info; - ret = filterGetInfoErrorCode(mModule->get_camera_info(cameraId, &info)); + ret = filterGetInfoErrorCode(mModule->getCameraInfo(cameraId, &info)); *cameraInfo = info.static_camera_characteristics; } return ret; } +int CameraService::getCallingPid() { + return IPCThreadState::self()->getCallingPid(); +} + +int CameraService::getCallingUid() { + return IPCThreadState::self()->getCallingUid(); +} + +String8 CameraService::getFormattedCurrentTime() { + time_t now = time(nullptr); + char formattedTime[64]; + strftime(formattedTime, sizeof(formattedTime), "%m-%d %H:%M:%S", localtime(&now)); + return String8(formattedTime); +} + +int CameraService::getCameraPriorityFromProcState(int procState) { + // Find the priority for the camera usage based on the process state. Higher priority clients + // win for evictions. + // Note: Unlike the ordering for ActivityManager, persistent system processes will always lose + // the camera to the top/foreground applications. + switch(procState) { + case PROCESS_STATE_TOP: // User visible + return 100; + case PROCESS_STATE_IMPORTANT_FOREGROUND: // Foreground + return 90; + case PROCESS_STATE_PERSISTENT: // Persistent system services + case PROCESS_STATE_PERSISTENT_UI: + return 80; + case PROCESS_STATE_IMPORTANT_BACKGROUND: // "Important" background processes + return 70; + case PROCESS_STATE_BACKUP: // Everything else + case PROCESS_STATE_HEAVY_WEIGHT: + case PROCESS_STATE_SERVICE: + case PROCESS_STATE_RECEIVER: + case PROCESS_STATE_HOME: + case PROCESS_STATE_LAST_ACTIVITY: + case PROCESS_STATE_CACHED_ACTIVITY: + case PROCESS_STATE_CACHED_ACTIVITY_CLIENT: + case PROCESS_STATE_CACHED_EMPTY: + return 1; + case PROCESS_STATE_NONEXISTENT: + return -1; + default: + ALOGE("%s: Received unknown process state from ActivityManagerService!", __FUNCTION__); + return -1; + } +} + status_t CameraService::getCameraVendorTagDescriptor(/*out*/sp<VendorTagDescriptor>& desc) { if (!mModule) { ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__); @@ -387,12 +542,12 @@ status_t CameraService::getCameraVendorTagDescriptor(/*out*/sp<VendorTagDescript int CameraService::getDeviceVersion(int cameraId, int* facing) { struct camera_info info; - if (mModule->get_camera_info(cameraId, &info) != OK) { + if (mModule->getCameraInfo(cameraId, &info) != OK) { return -1; } int deviceVersion; - if (mModule->common.module_api_version >= CAMERA_MODULE_API_VERSION_2_0) { + if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) { deviceVersion = info.device_version; } else { deviceVersion = CAMERA_DEVICE_API_VERSION_1_0; @@ -405,19 +560,6 @@ int CameraService::getDeviceVersion(int cameraId, int* facing) { return deviceVersion; } -status_t CameraService::filterOpenErrorCode(status_t err) { - switch(err) { - case NO_ERROR: - case -EBUSY: - case -EINVAL: - case -EUSERS: - return err; - default: - break; - } - return -ENODEV; -} - status_t CameraService::filterGetInfoErrorCode(status_t err) { switch(err) { case NO_ERROR: @@ -433,13 +575,13 @@ bool CameraService::setUpVendorTags() { vendor_tag_ops_t vOps = vendor_tag_ops_t(); // Check if vendor operations have been implemented - if (mModule->get_vendor_tag_ops == NULL) { + if (!mModule->isVendorTagDefined()) { ALOGI("%s: No vendor tags defined for this device.", __FUNCTION__); return false; } ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops"); - mModule->get_vendor_tag_ops(&vOps); + mModule->getVendorTagOps(&vOps); ATRACE_END(); // Ensure all vendor operations are present @@ -467,54 +609,90 @@ bool CameraService::setUpVendorTags() { return true; } -status_t CameraService::initializeShimMetadata(int cameraId) { - int pid = getCallingPid(); - int uid = getCallingUid(); - status_t ret = validateConnect(cameraId, uid); - if (ret != OK) { - // Error already logged by callee - return ret; - } +status_t CameraService::makeClient(const sp<CameraService>& cameraService, + const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId, + int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode, + int halVersion, int deviceVersion, apiLevel effectiveApiLevel, + /*out*/sp<BasicClient>* client) { - bool needsNewClient = false; - sp<Client> client; + // TODO: Update CameraClients + HAL interface to use strings for Camera IDs + int id = cameraIdToInt(cameraId); + if (id == -1) { + ALOGE("%s: Invalid camera ID %s, cannot convert to integer.", __FUNCTION__, + cameraId.string()); + return BAD_VALUE; + } - String16 internalPackageName("media"); - { // Scope for service lock - Mutex::Autolock lock(mServiceLock); - if (mClient[cameraId] != NULL) { - client = static_cast<Client*>(mClient[cameraId].promote().get()); - } - if (client == NULL) { - needsNewClient = true; - ret = connectHelperLocked(/*out*/client, - /*cameraClient*/NULL, // Empty binder callbacks - cameraId, - internalPackageName, - uid, - pid); - - if (ret != OK) { - // Error already logged by callee - return ret; + if (halVersion < 0 || halVersion == deviceVersion) { + // Default path: HAL version is unspecified by caller, create CameraClient + // based on device version reported by the HAL. + switch(deviceVersion) { + case CAMERA_DEVICE_API_VERSION_1_0: + if (effectiveApiLevel == API_1) { // Camera1 API route + sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get()); + *client = new CameraClient(cameraService, tmp, packageName, id, facing, + clientPid, clientUid, getpid(), legacyMode); + } else { // Camera2 API route + ALOGW("Camera using old HAL version: %d", deviceVersion); + return -EOPNOTSUPP; + } + break; + case CAMERA_DEVICE_API_VERSION_2_0: + case CAMERA_DEVICE_API_VERSION_2_1: + case CAMERA_DEVICE_API_VERSION_3_0: + case CAMERA_DEVICE_API_VERSION_3_1: + case CAMERA_DEVICE_API_VERSION_3_2: + case CAMERA_DEVICE_API_VERSION_3_3: + if (effectiveApiLevel == API_1) { // Camera1 API route + sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get()); + *client = new Camera2Client(cameraService, tmp, packageName, id, facing, + clientPid, clientUid, servicePid, legacyMode); + } else { // Camera2 API route + sp<ICameraDeviceCallbacks> tmp = + static_cast<ICameraDeviceCallbacks*>(cameraCb.get()); + *client = new CameraDeviceClient(cameraService, tmp, packageName, id, + facing, clientPid, clientUid, servicePid); } + break; + default: + // Should not be reachable + ALOGE("Unknown camera device HAL version: %d", deviceVersion); + return INVALID_OPERATION; } - - if (client == NULL) { - ALOGE("%s: Could not connect to client camera device.", __FUNCTION__); - return BAD_VALUE; + } else { + // A particular HAL version is requested by caller. Create CameraClient + // based on the requested HAL version. + if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 && + halVersion == CAMERA_DEVICE_API_VERSION_1_0) { + // Only support higher HAL version device opened as HAL1.0 device. + sp<ICameraClient> tmp = static_cast<ICameraClient*>(cameraCb.get()); + *client = new CameraClient(cameraService, tmp, packageName, id, facing, + clientPid, clientUid, servicePid, legacyMode); + } else { + // Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet. + ALOGE("Invalid camera HAL version %x: HAL %x device can only be" + " opened as HAL %x device", halVersion, deviceVersion, + CAMERA_DEVICE_API_VERSION_1_0); + return INVALID_OPERATION; } - - String8 rawParams = client->getParameters(); - CameraParameters params(rawParams); - mShimParams.add(cameraId, params); } + return NO_ERROR; +} - // Close client if one was opened solely for this call - if (needsNewClient) { - client->disconnect(); +status_t CameraService::initializeShimMetadata(int cameraId) { + int uid = getCallingUid(); + + String16 internalPackageName("media"); + String8 id = String8::format("%d", cameraId); + status_t ret = NO_ERROR; + sp<Client> tmp = nullptr; + if ((ret = connectHelper<ICameraClient,Client>(sp<ICameraClient>{nullptr}, id, + static_cast<int>(CAMERA_HAL_API_VERSION_UNSPECIFIED), internalPackageName, uid, API_1, + false, true, tmp)) != NO_ERROR) { + ALOGE("%s: Error %d (%s) initializing shim metadata.", __FUNCTION__, ret, strerror(ret)); + return ret; } - return OK; + return NO_ERROR; } status_t CameraService::getLegacyParametersLazy(int cameraId, @@ -530,42 +708,55 @@ status_t CameraService::getLegacyParametersLazy(int cameraId, return BAD_VALUE; } - ssize_t index = -1; - { // Scope for service lock - Mutex::Autolock lock(mServiceLock); - index = mShimParams.indexOfKey(cameraId); - // Release service lock so initializeShimMetadata can be called correctly. + String8 id = String8::format("%d", cameraId); - if (index >= 0) { - *parameters = mShimParams[index]; + // Check if we already have parameters + { + // Scope for service lock + Mutex::Autolock lock(mServiceLock); + auto cameraState = getCameraState(id); + if (cameraState == nullptr) { + ALOGE("%s: Invalid camera ID: %s", __FUNCTION__, id.string()); + return BAD_VALUE; } - } - - if (index < 0) { - int64_t token = IPCThreadState::self()->clearCallingIdentity(); - ret = initializeShimMetadata(cameraId); - IPCThreadState::self()->restoreCallingIdentity(token); - if (ret != OK) { - // Error already logged by callee - return ret; + CameraParameters p = cameraState->getShimParams(); + if (!p.isEmpty()) { + *parameters = p; + return NO_ERROR; } + } - { // Scope for service lock - Mutex::Autolock lock(mServiceLock); - index = mShimParams.indexOfKey(cameraId); - - LOG_ALWAYS_FATAL_IF(index < 0, "index should have been initialized"); + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + ret = initializeShimMetadata(cameraId); + IPCThreadState::self()->restoreCallingIdentity(token); + if (ret != NO_ERROR) { + // Error already logged by callee + return ret; + } - *parameters = mShimParams[index]; + // Check for parameters again + { + // Scope for service lock + Mutex::Autolock lock(mServiceLock); + auto cameraState = getCameraState(id); + if (cameraState == nullptr) { + ALOGE("%s: Invalid camera ID: %s", __FUNCTION__, id.string()); + return BAD_VALUE; + } + CameraParameters p = cameraState->getShimParams(); + if (!p.isEmpty()) { + *parameters = p; + return NO_ERROR; } } - return OK; + ALOGE("%s: Parameters were not initialized, or were empty. Device may not be present.", + __FUNCTION__); + return INVALID_OPERATION; } -status_t CameraService::validateConnect(int cameraId, - /*inout*/ - int& clientUid) const { +status_t CameraService::validateConnectLocked(const String8& cameraId, /*inout*/int& clientUid) + const { int callingPid = getCallingPid(); @@ -574,160 +765,251 @@ status_t CameraService::validateConnect(int cameraId, } else { // We only trust our own process to forward client UIDs if (callingPid != getpid()) { - ALOGE("CameraService::connect X (pid %d) rejected (don't trust clientUid)", + ALOGE("CameraService::connect X (PID %d) rejected (don't trust clientUid)", callingPid); return PERMISSION_DENIED; } } if (!mModule) { - ALOGE("Camera HAL module not loaded"); + ALOGE("CameraService::connect X (PID %d) rejected (camera HAL module not loaded)", + callingPid); return -ENODEV; } - if (cameraId < 0 || cameraId >= mNumberOfCameras) { - ALOGE("CameraService::connect X (pid %d) rejected (invalid cameraId %d).", - callingPid, cameraId); + if (getCameraState(cameraId) == nullptr) { + ALOGE("CameraService::connect X (PID %d) rejected (invalid camera ID %s)", callingPid, + cameraId.string()); return -ENODEV; } + // Check device policy for this camera char value[PROPERTY_VALUE_MAX]; - property_get("sys.secpolicy.camera.disabled", value, "0"); + char key[PROPERTY_KEY_MAX]; + int clientUserId = multiuser_get_user_id(clientUid); + snprintf(key, PROPERTY_KEY_MAX, "sys.secpolicy.camera.off_%d", clientUserId); + property_get(key, value, "0"); if (strcmp(value, "1") == 0) { // Camera is disabled by DevicePolicyManager. - ALOGI("Camera is disabled. connect X (pid %d) rejected", callingPid); + ALOGE("CameraService::connect X (PID %d) rejected (camera %s is disabled by device " + "policy)", callingPid, cameraId.string()); return -EACCES; } - ICameraServiceListener::Status currentStatus = getStatus(cameraId); + // Only allow clients who are being used by the current foreground device user. + if (mLastUserId != clientUserId && mLastUserId != DEFAULT_LAST_USER_ID) { + ALOGE("CameraService::connect X (PID %d) rejected (cannot connect from non-foreground " + "device user)", callingPid); + return PERMISSION_DENIED; + } + + return checkIfDeviceIsUsable(cameraId); +} + +status_t CameraService::checkIfDeviceIsUsable(const String8& cameraId) const { + auto cameraState = getCameraState(cameraId); + int callingPid = getCallingPid(); + if (cameraState == nullptr) { + ALOGE("CameraService::connect X (PID %d) rejected (invalid camera ID %s)", callingPid, + cameraId.string()); + return -ENODEV; + } + + ICameraServiceListener::Status currentStatus = cameraState->getStatus(); if (currentStatus == ICameraServiceListener::STATUS_NOT_PRESENT) { - ALOGI("Camera is not plugged in," - " connect X (pid %d) rejected", callingPid); + ALOGE("CameraService::connect X (PID %d) rejected (camera %s is not connected)", + callingPid, cameraId.string()); return -ENODEV; } else if (currentStatus == ICameraServiceListener::STATUS_ENUMERATING) { - ALOGI("Camera is enumerating," - " connect X (pid %d) rejected", callingPid); + ALOGE("CameraService::connect X (PID %d) rejected, (camera %s is initializing)", + callingPid, cameraId.string()); return -EBUSY; } - // Else don't check for STATUS_NOT_AVAILABLE. - // -- It's done implicitly in canConnectUnsafe /w the mBusy array - return OK; + return NO_ERROR; } -bool CameraService::canConnectUnsafe(int cameraId, - const String16& clientPackageName, - const sp<IBinder>& remoteCallback, - sp<BasicClient> &client) { - String8 clientName8(clientPackageName); - int callingPid = getCallingPid(); +void CameraService::finishConnectLocked(const sp<BasicClient>& client, + const CameraService::DescriptorPtr& desc) { - if (mClient[cameraId] != 0) { - client = mClient[cameraId].promote(); - if (client != 0) { - if (remoteCallback == client->getRemote()) { - LOG1("CameraService::connect X (pid %d) (the same client)", - callingPid); - return true; - } else { - // TODOSC: need to support 1 regular client, - // multiple shared clients here - ALOGW("CameraService::connect X (pid %d) rejected" - " (existing client).", callingPid); - return false; - } + // Make a descriptor for the incoming client + auto clientDescriptor = CameraService::CameraClientManager::makeClientDescriptor(client, desc); + auto evicted = mActiveClientManager.addAndEvict(clientDescriptor); + + logConnected(desc->getKey(), static_cast<int>(desc->getOwnerId()), + String8(client->getPackageName())); + + if (evicted.size() > 0) { + // This should never happen - clients should already have been removed in disconnect + for (auto& i : evicted) { + ALOGE("%s: Invalid state: Client for camera %s was not removed in disconnect", + __FUNCTION__, i->getKey().string()); } - mClient[cameraId].clear(); - } - - /* - mBusy is set to false as the last step of the Client destructor, - after which it is guaranteed that the Client destructor has finished ( - including any inherited destructors) - - We only need this for a Client subclasses since we don't allow - multiple Clents to be opened concurrently, but multiple BasicClient - would be fine - */ - if (mBusy[cameraId]) { - ALOGW("CameraService::connect X (pid %d, \"%s\") rejected" - " (camera %d is still busy).", callingPid, - clientName8.string(), cameraId); - return false; - } - return true; + LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, clients not evicted properly", + __FUNCTION__); + } } -status_t CameraService::connectHelperLocked( +status_t CameraService::handleEvictionsLocked(const String8& cameraId, int clientPid, + apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName, /*out*/ - sp<Client>& client, - /*in*/ - const sp<ICameraClient>& cameraClient, - int cameraId, - const String16& clientPackageName, - int clientUid, - int callingPid, - int halVersion, - bool legacyMode) { + sp<BasicClient>* client, + std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial) { - int facing = -1; - int deviceVersion = getDeviceVersion(cameraId, &facing); + status_t ret = NO_ERROR; + std::vector<sp<BasicClient>> evictedClients; + DescriptorPtr clientDescriptor; + { + if (effectiveApiLevel == API_1) { + // If we are using API1, any existing client for this camera ID with the same remote + // should be returned rather than evicted to allow MediaRecorder to work properly. + + auto current = mActiveClientManager.get(cameraId); + if (current != nullptr) { + auto clientSp = current->getValue(); + if (clientSp.get() != nullptr) { // should never be needed + if (clientSp->getRemote() == remoteCallback) { + ALOGI("CameraService::connect X (PID %d) (second call from same" + "app binder, returning the same client)", clientPid); + *client = clientSp; + return NO_ERROR; + } + } + } + } - if (halVersion < 0 || halVersion == deviceVersion) { - // Default path: HAL version is unspecified by caller, create CameraClient - // based on device version reported by the HAL. - switch(deviceVersion) { - case CAMERA_DEVICE_API_VERSION_1_0: - client = new CameraClient(this, cameraClient, - clientPackageName, cameraId, - facing, callingPid, clientUid, getpid(), legacyMode); - break; - case CAMERA_DEVICE_API_VERSION_2_0: - case CAMERA_DEVICE_API_VERSION_2_1: - case CAMERA_DEVICE_API_VERSION_3_0: - case CAMERA_DEVICE_API_VERSION_3_1: - case CAMERA_DEVICE_API_VERSION_3_2: - client = new Camera2Client(this, cameraClient, - clientPackageName, cameraId, - facing, callingPid, clientUid, getpid(), legacyMode); - break; - case -1: - ALOGE("Invalid camera id %d", cameraId); + // Return error if the device was unplugged or removed by the HAL for some reason + if ((ret = checkIfDeviceIsUsable(cameraId)) != NO_ERROR) { + return ret; + } + + // Get current active client PIDs + std::vector<int> ownerPids(mActiveClientManager.getAllOwners()); + ownerPids.push_back(clientPid); + + // Use the value +PROCESS_STATE_NONEXISTENT, to avoid taking + // address of PROCESS_STATE_NONEXISTENT as a reference argument + // for the vector constructor. PROCESS_STATE_NONEXISTENT does + // not have an out-of-class definition. + std::vector<int> priorities(ownerPids.size(), +PROCESS_STATE_NONEXISTENT); + + // Get priorites of all active PIDs + ProcessInfoService::getProcessStatesFromPids(ownerPids.size(), &ownerPids[0], + /*out*/&priorities[0]); + + // Update all active clients' priorities + std::map<int,int> pidToPriorityMap; + for (size_t i = 0; i < ownerPids.size() - 1; i++) { + pidToPriorityMap.emplace(ownerPids[i], getCameraPriorityFromProcState(priorities[i])); + } + mActiveClientManager.updatePriorities(pidToPriorityMap); + + // Get state for the given cameraId + auto state = getCameraState(cameraId); + if (state == nullptr) { + ALOGE("CameraService::connect X (PID %d) rejected (no camera device with ID %s)", + clientPid, cameraId.string()); return BAD_VALUE; - default: - ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return INVALID_OPERATION; } - } else { - // A particular HAL version is requested by caller. Create CameraClient - // based on the requested HAL version. - if (deviceVersion > CAMERA_DEVICE_API_VERSION_1_0 && - halVersion == CAMERA_DEVICE_API_VERSION_1_0) { - // Only support higher HAL version device opened as HAL1.0 device. - client = new CameraClient(this, cameraClient, - clientPackageName, cameraId, - facing, callingPid, clientUid, getpid(), legacyMode); - } else { - // Other combinations (e.g. HAL3.x open as HAL2.x) are not supported yet. - ALOGE("Invalid camera HAL version %x: HAL %x device can only be" - " opened as HAL %x device", halVersion, deviceVersion, - CAMERA_DEVICE_API_VERSION_1_0); - return INVALID_OPERATION; + + // Make descriptor for incoming client + clientDescriptor = CameraClientManager::makeClientDescriptor(cameraId, + sp<BasicClient>{nullptr}, static_cast<int32_t>(state->getCost()), + state->getConflicting(), + getCameraPriorityFromProcState(priorities[priorities.size() - 1]), clientPid); + + // Find clients that would be evicted + auto evicted = mActiveClientManager.wouldEvict(clientDescriptor); + + // If the incoming client was 'evicted,' higher priority clients have the camera in the + // background, so we cannot do evictions + if (std::find(evicted.begin(), evicted.end(), clientDescriptor) != evicted.end()) { + ALOGE("CameraService::connect X (PID %d) rejected (existing client(s) with higher" + " priority).", clientPid); + + sp<BasicClient> clientSp = clientDescriptor->getValue(); + String8 curTime = getFormattedCurrentTime(); + auto incompatibleClients = + mActiveClientManager.getIncompatibleClients(clientDescriptor); + + String8 msg = String8::format("%s : DENIED connect device %s client for package %s " + "(PID %d, priority %d)", curTime.string(), + cameraId.string(), packageName.string(), clientPid, + getCameraPriorityFromProcState(priorities[priorities.size() - 1])); + + for (auto& i : incompatibleClients) { + msg.appendFormat("\n - Blocked by existing device %s client for package %s" + "(PID %" PRId32 ", priority %" PRId32 ")", i->getKey().string(), + String8{i->getValue()->getPackageName()}.string(), i->getOwnerId(), + i->getPriority()); + } + + // Log the client's attempt + mEventLog.add(msg); + + return -EBUSY; + } + + for (auto& i : evicted) { + sp<BasicClient> clientSp = i->getValue(); + if (clientSp.get() == nullptr) { + ALOGE("%s: Invalid state: Null client in active client list.", __FUNCTION__); + + // TODO: Remove this + LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, null client in active list", + __FUNCTION__); + mActiveClientManager.remove(i); + continue; + } + + ALOGE("CameraService::connect evicting conflicting client for camera ID %s", + i->getKey().string()); + evictedClients.push_back(clientSp); + + String8 curTime = getFormattedCurrentTime(); + + // Log the clients evicted + mEventLog.add(String8::format("%s : EVICT device %s client for package %s (PID %" + PRId32 ", priority %" PRId32 ")\n - Evicted by device %s client for " + "package %s (PID %d, priority %" PRId32 ")", curTime.string(), + i->getKey().string(), String8{clientSp->getPackageName()}.string(), + i->getOwnerId(), i->getPriority(), cameraId.string(), + packageName.string(), clientPid, + getCameraPriorityFromProcState(priorities[priorities.size() - 1]))); + + // Notify the client of disconnection + clientSp->notifyError(ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED, + CaptureResultExtras()); } } - status_t status = connectFinishUnsafe(client, client->getRemote()); - if (status != OK) { - // this is probably not recoverable.. maybe the client can try again - return status; + // Do not hold mServiceLock while disconnecting clients, but retain the condition blocking + // other clients from connecting in mServiceLockWrapper if held + mServiceLock.unlock(); + + // Clear caller identity temporarily so client disconnect PID checks work correctly + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + + // Destroy evicted clients + for (auto& i : evictedClients) { + // Disconnect is blocking, and should only have returned when HAL has cleaned up + i->disconnect(); // Clients will remove themselves from the active client list here } + evictedClients.clear(); - mClient[cameraId] = client; - LOG1("CameraService::connect X (id %d, this pid is %d)", cameraId, - getpid()); + IPCThreadState::self()->restoreCallingIdentity(token); - return OK; + // Once clients have been disconnected, relock + mServiceLock.lock(); + + // Check again if the device was unplugged or something while we weren't holding mServiceLock + if ((ret = checkIfDeviceIsUsable(cameraId)) != NO_ERROR) { + return ret; + } + + *partial = clientDescriptor; + return NO_ERROR; } status_t CameraService::connect( @@ -738,47 +1020,18 @@ status_t CameraService::connect( /*out*/ sp<ICamera>& device) { - String8 clientName8(clientPackageName); - int callingPid = getCallingPid(); - - LOG1("CameraService::connect E (pid %d \"%s\", id %d)", callingPid, - clientName8.string(), cameraId); - - status_t status = validateConnect(cameraId, /*inout*/clientUid); - if (status != OK) { - return status; - } - - - sp<Client> client; - { - Mutex::Autolock lock(mServiceLock); - sp<BasicClient> clientTmp; - if (!canConnectUnsafe(cameraId, clientPackageName, - IInterface::asBinder(cameraClient), - /*out*/clientTmp)) { - return -EBUSY; - } else if (client.get() != NULL) { - device = static_cast<Client*>(clientTmp.get()); - return OK; - } - - status = connectHelperLocked(/*out*/client, - cameraClient, - cameraId, - clientPackageName, - clientUid, - callingPid); - if (status != OK) { - return status; - } + status_t ret = NO_ERROR; + String8 id = String8::format("%d", cameraId); + sp<Client> client = nullptr; + ret = connectHelper<ICameraClient,Client>(cameraClient, id, CAMERA_HAL_API_VERSION_UNSPECIFIED, + clientPackageName, clientUid, API_1, false, false, /*out*/client); + if(ret != NO_ERROR) { + return ret; } - // important: release the mutex here so the client can call back - // into the service from its destructor (can be at the end of the call) device = client; - return OK; + return NO_ERROR; } status_t CameraService::connectLegacy( @@ -789,8 +1042,9 @@ status_t CameraService::connectLegacy( /*out*/ sp<ICamera>& device) { + int apiVersion = mModule->getModuleApiVersion(); if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED && - mModule->common.module_api_version < CAMERA_MODULE_API_VERSION_2_3) { + apiVersion < CAMERA_MODULE_API_VERSION_2_3) { /* * Either the HAL version is unspecified in which case this just creates * a camera client selected by the latest device version, or @@ -798,219 +1052,138 @@ status_t CameraService::connectLegacy( * the open_legacy call */ ALOGE("%s: camera HAL module version %x doesn't support connecting to legacy HAL devices!", - __FUNCTION__, mModule->common.module_api_version); + __FUNCTION__, apiVersion); return INVALID_OPERATION; } - String8 clientName8(clientPackageName); - int callingPid = getCallingPid(); - - LOG1("CameraService::connect legacy E (pid %d \"%s\", id %d)", callingPid, - clientName8.string(), cameraId); + status_t ret = NO_ERROR; + String8 id = String8::format("%d", cameraId); + sp<Client> client = nullptr; + ret = connectHelper<ICameraClient,Client>(cameraClient, id, halVersion, clientPackageName, + clientUid, API_1, true, false, /*out*/client); - status_t status = validateConnect(cameraId, /*inout*/clientUid); - if (status != OK) { - return status; + if(ret != NO_ERROR) { + return ret; } - sp<Client> client; - { - Mutex::Autolock lock(mServiceLock); - sp<BasicClient> clientTmp; - if (!canConnectUnsafe(cameraId, clientPackageName, - IInterface::asBinder(cameraClient), - /*out*/clientTmp)) { - return -EBUSY; - } else if (client.get() != NULL) { - device = static_cast<Client*>(clientTmp.get()); - return OK; - } + device = client; + return NO_ERROR; +} - status = connectHelperLocked(/*out*/client, - cameraClient, - cameraId, - clientPackageName, - clientUid, - callingPid, - halVersion, - /*legacyMode*/true); - if (status != OK) { - return status; - } +status_t CameraService::connectDevice( + const sp<ICameraDeviceCallbacks>& cameraCb, + int cameraId, + const String16& clientPackageName, + int clientUid, + /*out*/ + sp<ICameraDeviceUser>& device) { + + status_t ret = NO_ERROR; + String8 id = String8::format("%d", cameraId); + sp<CameraDeviceClient> client = nullptr; + ret = connectHelper<ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id, + CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName, clientUid, API_2, false, false, + /*out*/client); + if(ret != NO_ERROR) { + return ret; } - // important: release the mutex here so the client can call back - // into the service from its destructor (can be at the end of the call) device = client; - return OK; + return NO_ERROR; } -status_t CameraService::connectFinishUnsafe(const sp<BasicClient>& client, - const sp<IBinder>& remoteCallback) { - status_t status = client->initialize(mModule); - if (status != OK) { - ALOGE("%s: Could not initialize client from HAL module.", __FUNCTION__); - return status; - } - if (remoteCallback != NULL) { - remoteCallback->linkToDeath(this); +status_t CameraService::setTorchMode(const String16& cameraId, bool enabled, + const sp<IBinder>& clientBinder) { + if (enabled && clientBinder == NULL) { + ALOGE("%s: torch client binder is NULL", __FUNCTION__); + return -EINVAL; } - return OK; -} + String8 id = String8(cameraId.string()); -status_t CameraService::connectPro( - const sp<IProCameraCallbacks>& cameraCb, - int cameraId, - const String16& clientPackageName, - int clientUid, - /*out*/ - sp<IProCameraUser>& device) -{ - if (cameraCb == 0) { - ALOGE("%s: Callback must not be null", __FUNCTION__); - return BAD_VALUE; + // verify id is valid. + auto state = getCameraState(id); + if (state == nullptr) { + ALOGE("%s: camera id is invalid %s", id.string()); + return -EINVAL; } - String8 clientName8(clientPackageName); - int callingPid = getCallingPid(); - - LOG1("CameraService::connectPro E (pid %d \"%s\", id %d)", callingPid, - clientName8.string(), cameraId); - status_t status = validateConnect(cameraId, /*inout*/clientUid); - if (status != OK) { - return status; + ICameraServiceListener::Status cameraStatus = state->getStatus(); + if (cameraStatus != ICameraServiceListener::STATUS_PRESENT && + cameraStatus != ICameraServiceListener::STATUS_NOT_AVAILABLE) { + ALOGE("%s: camera id is invalid %s", id.string()); + return -EINVAL; } - sp<ProClient> client; { - Mutex::Autolock lock(mServiceLock); - { - sp<BasicClient> client; - if (!canConnectUnsafe(cameraId, clientPackageName, - IInterface::asBinder(cameraCb), - /*out*/client)) { - return -EBUSY; - } + Mutex::Autolock al(mTorchStatusMutex); + ICameraServiceListener::TorchStatus status; + status_t res = getTorchStatusLocked(id, &status); + if (res) { + ALOGE("%s: getting current torch status failed for camera %s", + __FUNCTION__, id.string()); + return -EINVAL; } - int facing = -1; - int deviceVersion = getDeviceVersion(cameraId, &facing); - - switch(deviceVersion) { - case CAMERA_DEVICE_API_VERSION_1_0: - ALOGE("Camera id %d uses HALv1, doesn't support ProCamera", - cameraId); - return -EOPNOTSUPP; - break; - case CAMERA_DEVICE_API_VERSION_2_0: - case CAMERA_DEVICE_API_VERSION_2_1: - case CAMERA_DEVICE_API_VERSION_3_0: - case CAMERA_DEVICE_API_VERSION_3_1: - case CAMERA_DEVICE_API_VERSION_3_2: - client = new ProCamera2Client(this, cameraCb, clientPackageName, - cameraId, facing, callingPid, clientUid, getpid()); - break; - case -1: - ALOGE("Invalid camera id %d", cameraId); - return BAD_VALUE; - default: - ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return INVALID_OPERATION; - } - - status_t status = connectFinishUnsafe(client, client->getRemote()); - if (status != OK) { - return status; + if (status == ICameraServiceListener::TORCH_STATUS_NOT_AVAILABLE) { + if (cameraStatus == ICameraServiceListener::STATUS_NOT_AVAILABLE) { + ALOGE("%s: torch mode of camera %s is not available because " + "camera is in use", __FUNCTION__, id.string()); + return -EBUSY; + } else { + ALOGE("%s: torch mode of camera %s is not available due to " + "insufficient resources", __FUNCTION__, id.string()); + return -EUSERS; + } } - - mProClientList[cameraId].push(client); - - LOG1("CameraService::connectPro X (id %d, this pid is %d)", cameraId, - getpid()); } - // important: release the mutex here so the client can call back - // into the service from its destructor (can be at the end of the call) - device = client; - return OK; -} - -status_t CameraService::connectDevice( - const sp<ICameraDeviceCallbacks>& cameraCb, - int cameraId, - const String16& clientPackageName, - int clientUid, - /*out*/ - sp<ICameraDeviceUser>& device) -{ - String8 clientName8(clientPackageName); - int callingPid = getCallingPid(); - - LOG1("CameraService::connectDevice E (pid %d \"%s\", id %d)", callingPid, - clientName8.string(), cameraId); - - status_t status = validateConnect(cameraId, /*inout*/clientUid); - if (status != OK) { - return status; + status_t res = mFlashlight->setTorchMode(id, enabled); + if (res) { + ALOGE("%s: setting torch mode of camera %s to %d failed. %s (%d)", + __FUNCTION__, id.string(), enabled, strerror(-res), res); + return res; } - sp<CameraDeviceClient> client; { - Mutex::Autolock lock(mServiceLock); - { - sp<BasicClient> client; - if (!canConnectUnsafe(cameraId, clientPackageName, - IInterface::asBinder(cameraCb), - /*out*/client)) { - return -EBUSY; + // update the link to client's death + Mutex::Autolock al(mTorchClientMapMutex); + ssize_t index = mTorchClientMap.indexOfKey(id); + if (enabled) { + if (index == NAME_NOT_FOUND) { + mTorchClientMap.add(id, clientBinder); + } else { + const sp<IBinder> oldBinder = mTorchClientMap.valueAt(index); + oldBinder->unlinkToDeath(this); + + mTorchClientMap.replaceValueAt(index, clientBinder); } + clientBinder->linkToDeath(this); + } else if (index != NAME_NOT_FOUND) { + sp<IBinder> oldBinder = mTorchClientMap.valueAt(index); + oldBinder->unlinkToDeath(this); } + } - int facing = -1; - int deviceVersion = getDeviceVersion(cameraId, &facing); + return OK; +} - switch(deviceVersion) { - case CAMERA_DEVICE_API_VERSION_1_0: - ALOGW("Camera using old HAL version: %d", deviceVersion); - return -EOPNOTSUPP; - // TODO: don't allow 2.0 Only allow 2.1 and higher - case CAMERA_DEVICE_API_VERSION_2_0: - case CAMERA_DEVICE_API_VERSION_2_1: - case CAMERA_DEVICE_API_VERSION_3_0: - case CAMERA_DEVICE_API_VERSION_3_1: - case CAMERA_DEVICE_API_VERSION_3_2: - client = new CameraDeviceClient(this, cameraCb, clientPackageName, - cameraId, facing, callingPid, clientUid, getpid()); +void CameraService::notifySystemEvent(int eventId, int arg0) { + switch(eventId) { + case ICameraService::USER_SWITCHED: { + doUserSwitch(/*newUserId*/arg0); break; - case -1: - ALOGE("Invalid camera id %d", cameraId); - return BAD_VALUE; - default: - ALOGE("Unknown camera device HAL version: %d", deviceVersion); - return INVALID_OPERATION; } - - status_t status = connectFinishUnsafe(client, client->getRemote()); - if (status != OK) { - // this is probably not recoverable.. maybe the client can try again - return status; + case ICameraService::NO_EVENT: + default: { + ALOGW("%s: Received invalid system event from system_server: %d", __FUNCTION__, + eventId); + break; } - - LOG1("CameraService::connectDevice X (id %d, this pid is %d)", cameraId, - getpid()); - - mClient[cameraId] = client; } - // important: release the mutex here so the client can call back - // into the service from its destructor (can be at the end of the call) - - device = client; - return OK; } - status_t CameraService::addListener( const sp<ICameraServiceListener>& listener) { ALOGV("%s: Add listener %p", __FUNCTION__, listener.get()); @@ -1022,30 +1195,45 @@ status_t CameraService::addListener( Mutex::Autolock lock(mServiceLock); - Vector<sp<ICameraServiceListener> >::iterator it, end; - for (it = mListenerList.begin(); it != mListenerList.end(); ++it) { - if (IInterface::asBinder(*it) == IInterface::asBinder(listener)) { - ALOGW("%s: Tried to add listener %p which was already subscribed", - __FUNCTION__, listener.get()); - return ALREADY_EXISTS; + { + Mutex::Autolock lock(mStatusListenerLock); + for (auto& it : mListenerList) { + if (IInterface::asBinder(it) == IInterface::asBinder(listener)) { + ALOGW("%s: Tried to add listener %p which was already subscribed", + __FUNCTION__, listener.get()); + return ALREADY_EXISTS; + } } + + mListenerList.push_back(listener); } - mListenerList.push_back(listener); /* Immediately signal current status to this listener only */ { - Mutex::Autolock m(mStatusMutex) ; - int numCams = getNumberOfCameras(); - for (int i = 0; i < numCams; ++i) { - listener->onStatusChanged(mStatusList[i], i); + Mutex::Autolock lock(mCameraStatesLock); + for (auto& i : mCameraStates) { + // TODO: Update binder to use String16 for camera IDs and remove; + int id = cameraIdToInt(i.first); + if (id == -1) continue; + + listener->onStatusChanged(i.second->getStatus(), id); + } + } + + /* Immediately signal current torch status to this listener only */ + { + Mutex::Autolock al(mTorchStatusMutex); + for (size_t i = 0; i < mTorchStatusMap.size(); i++ ) { + String16 id = String16(mTorchStatusMap.keyAt(i).string()); + listener->onTorchStatusChanged(mTorchStatusMap.valueAt(i), id); } } return OK; } -status_t CameraService::removeListener( - const sp<ICameraServiceListener>& listener) { + +status_t CameraService::removeListener(const sp<ICameraServiceListener>& listener) { ALOGV("%s: Remove listener %p", __FUNCTION__, listener.get()); if (listener == 0) { @@ -1055,11 +1243,13 @@ status_t CameraService::removeListener( Mutex::Autolock lock(mServiceLock); - Vector<sp<ICameraServiceListener> >::iterator it; - for (it = mListenerList.begin(); it != mListenerList.end(); ++it) { - if (IInterface::asBinder(*it) == IInterface::asBinder(listener)) { - mListenerList.erase(it); - return OK; + { + Mutex::Autolock lock(mStatusListenerLock); + for (auto it = mListenerList.begin(); it != mListenerList.end(); it++) { + if (IInterface::asBinder(*it) == IInterface::asBinder(listener)) { + mListenerList.erase(it); + return OK; + } } } @@ -1069,10 +1259,7 @@ status_t CameraService::removeListener( return BAD_VALUE; } -status_t CameraService::getLegacyParameters( - int cameraId, - /*out*/ - String16* parameters) { +status_t CameraService::getLegacyParameters(int cameraId, /*out*/String16* parameters) { ALOGV("%s: for camera ID = %d", __FUNCTION__, cameraId); if (parameters == NULL) { @@ -1127,6 +1314,7 @@ status_t CameraService::supportsCameraApi(int cameraId, int apiVersion) { return OK; } case CAMERA_DEVICE_API_VERSION_3_2: + case CAMERA_DEVICE_API_VERSION_3_3: ALOGV("%s: Camera id %d uses HAL3.2 or newer, supports api1/api2 directly", __FUNCTION__, cameraId); return OK; @@ -1141,140 +1329,177 @@ status_t CameraService::supportsCameraApi(int cameraId, int apiVersion) { return OK; } -void CameraService::removeClientByRemote(const wp<IBinder>& remoteBinder) { - int callingPid = getCallingPid(); - LOG1("CameraService::removeClientByRemote E (pid %d)", callingPid); - - // Declare this before the lock to make absolutely sure the - // destructor won't be called with the lock held. +void CameraService::removeByClient(const BasicClient* client) { Mutex::Autolock lock(mServiceLock); + for (auto& i : mActiveClientManager.getAll()) { + auto clientSp = i->getValue(); + if (clientSp.get() == client) { + mActiveClientManager.remove(i); + } + } +} - int outIndex; - sp<BasicClient> client = findClientUnsafe(remoteBinder, outIndex); +bool CameraService::evictClientIdByRemote(const wp<IBinder>& remote) { + const int callingPid = getCallingPid(); + const int servicePid = getpid(); + bool ret = false; + { + // Acquire mServiceLock and prevent other clients from connecting + std::unique_ptr<AutoConditionLock> lock = + AutoConditionLock::waitAndAcquire(mServiceLockWrapper); - if (client != 0) { - // Found our camera, clear and leave. - LOG1("removeClient: clear camera %d", outIndex); - sp<IBinder> remote = client->getRemote(); - if (remote != NULL) { - remote->unlinkToDeath(this); + std::vector<sp<BasicClient>> evicted; + for (auto& i : mActiveClientManager.getAll()) { + auto clientSp = i->getValue(); + if (clientSp.get() == nullptr) { + ALOGE("%s: Dead client still in mActiveClientManager.", __FUNCTION__); + mActiveClientManager.remove(i); + continue; + } + if (remote == clientSp->getRemote() && (callingPid == servicePid || + callingPid == clientSp->getClientPid())) { + mActiveClientManager.remove(i); + evicted.push_back(clientSp); + + // Notify the client of disconnection + clientSp->notifyError(ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED, + CaptureResultExtras()); + } } - mClient[outIndex].clear(); - } else { - - sp<ProClient> clientPro = findProClientUnsafe(remoteBinder); + // Do not hold mServiceLock while disconnecting clients, but retain the condition blocking + // other clients from connecting in mServiceLockWrapper if held + mServiceLock.unlock(); - if (clientPro != NULL) { - // Found our camera, clear and leave. - LOG1("removeClient: clear pro %p", clientPro.get()); + // Do not clear caller identity, remote caller should be client proccess - IInterface::asBinder(clientPro->getRemoteCallback())->unlinkToDeath(this); + for (auto& i : evicted) { + if (i.get() != nullptr) { + i->disconnect(); + ret = true; + } } - } - LOG1("CameraService::removeClientByRemote X (pid %d)", callingPid); -} - -sp<CameraService::ProClient> CameraService::findProClientUnsafe( - const wp<IBinder>& cameraCallbacksRemote) -{ - sp<ProClient> clientPro; + // Reacquire mServiceLock + mServiceLock.lock(); - for (int i = 0; i < mNumberOfCameras; ++i) { - Vector<size_t> removeIdx; + } // lock is destroyed, allow further connect calls - for (size_t j = 0; j < mProClientList[i].size(); ++j) { - wp<ProClient> cl = mProClientList[i][j]; + return ret; +} - sp<ProClient> clStrong = cl.promote(); - if (clStrong != NULL && clStrong->getRemote() == cameraCallbacksRemote) { - clientPro = clStrong; - break; - } else if (clStrong == NULL) { - // mark to clean up dead ptr - removeIdx.push(j); - } - } - // remove stale ptrs (in reverse so the indices dont change) - for (ssize_t j = (ssize_t)removeIdx.size() - 1; j >= 0; --j) { - mProClientList[i].removeAt(removeIdx[j]); +std::shared_ptr<CameraService::CameraState> CameraService::getCameraState( + const String8& cameraId) const { + std::shared_ptr<CameraState> state; + { + Mutex::Autolock lock(mCameraStatesLock); + auto iter = mCameraStates.find(cameraId); + if (iter != mCameraStates.end()) { + state = iter->second; } + } + return state; +} +sp<CameraService::BasicClient> CameraService::removeClientLocked(const String8& cameraId) { + // Remove from active clients list + auto clientDescriptorPtr = mActiveClientManager.remove(cameraId); + if (clientDescriptorPtr == nullptr) { + ALOGW("%s: Could not evict client, no client for camera ID %s", __FUNCTION__, + cameraId.string()); + return sp<BasicClient>{nullptr}; } - return clientPro; + return clientDescriptorPtr->getValue(); } -sp<CameraService::BasicClient> CameraService::findClientUnsafe( - const wp<IBinder>& cameraClient, int& outIndex) { - sp<BasicClient> client; +void CameraService::doUserSwitch(int newUserId) { + // Acquire mServiceLock and prevent other clients from connecting + std::unique_ptr<AutoConditionLock> lock = + AutoConditionLock::waitAndAcquire(mServiceLockWrapper); - for (int i = 0; i < mNumberOfCameras; i++) { + if (newUserId <= 0) { + ALOGW("%s: Bad user ID %d given during user switch, resetting to default.", __FUNCTION__, + newUserId); + newUserId = DEFAULT_LAST_USER_ID; + } - // This happens when we have already disconnected (or this is - // just another unused camera). - if (mClient[i] == 0) continue; + mLastUserId = newUserId; - // Promote mClient. It can fail if we are called from this path: - // Client::~Client() -> disconnect() -> removeClientByRemote(). - client = mClient[i].promote(); + // Current user has switched, evict all current clients. + std::vector<sp<BasicClient>> evicted; + for (auto& i : mActiveClientManager.getAll()) { + auto clientSp = i->getValue(); - // Clean up stale client entry - if (client == NULL) { - mClient[i].clear(); + if (clientSp.get() == nullptr) { + ALOGE("%s: Dead client still in mActiveClientManager.", __FUNCTION__); continue; } - if (cameraClient == client->getRemote()) { - // Found our camera - outIndex = i; - return client; - } + evicted.push_back(clientSp); + + String8 curTime = getFormattedCurrentTime(); + + ALOGE("Evicting conflicting client for camera ID %s due to user change", + i->getKey().string()); + // Log the clients evicted + mEventLog.add(String8::format("%s : EVICT device %s client for package %s (PID %" + PRId32 ", priority %" PRId32 ")\n - Evicted due to user switch.", + curTime.string(), i->getKey().string(), + String8{clientSp->getPackageName()}.string(), i->getOwnerId(), + i->getPriority())); + } - outIndex = -1; - return NULL; -} + // Do not hold mServiceLock while disconnecting clients, but retain the condition + // blocking other clients from connecting in mServiceLockWrapper if held. + mServiceLock.unlock(); -CameraService::BasicClient* CameraService::getClientByIdUnsafe(int cameraId) { - if (cameraId < 0 || cameraId >= mNumberOfCameras) return NULL; - return mClient[cameraId].unsafe_get(); -} + // Clear caller identity temporarily so client disconnect PID checks work correctly + int64_t token = IPCThreadState::self()->clearCallingIdentity(); -Mutex* CameraService::getClientLockById(int cameraId) { - if (cameraId < 0 || cameraId >= mNumberOfCameras) return NULL; - return &mClientLock[cameraId]; -} + for (auto& i : evicted) { + i->disconnect(); + } -sp<CameraService::BasicClient> CameraService::getClientByRemote( - const wp<IBinder>& cameraClient) { + IPCThreadState::self()->restoreCallingIdentity(token); - // Declare this before the lock to make absolutely sure the - // destructor won't be called with the lock held. - sp<BasicClient> client; + // Reacquire mServiceLock + mServiceLock.lock(); +} - Mutex::Autolock lock(mServiceLock); +void CameraService::logDisconnected(const String8& cameraId, int clientPid, + const String8& clientPackage) { - int outIndex; - client = findClientUnsafe(cameraClient, outIndex); + String8 curTime = getFormattedCurrentTime(); + // Log the clients evicted + mEventLog.add(String8::format("%s : DISCONNECT device %s client for package %s (PID %d)", + curTime.string(), cameraId.string(), clientPackage.string(), clientPid)); +} + +void CameraService::logConnected(const String8& cameraId, int clientPid, + const String8& clientPackage) { - return client; + String8 curTime = getFormattedCurrentTime(); + // Log the clients evicted + mEventLog.add(String8::format("%s : CONNECT device %s client for package %s (PID %d)", + curTime.string(), cameraId.string(), clientPackage.string(), clientPid)); } -status_t CameraService::onTransact( - uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { +status_t CameraService::onTransact(uint32_t code, const Parcel& data, Parcel* reply, + uint32_t flags) { + + const int pid = getCallingPid(); + const int selfPid = getpid(); + // Permission checks switch (code) { case BnCameraService::CONNECT: - case BnCameraService::CONNECT_PRO: case BnCameraService::CONNECT_DEVICE: - case BnCameraService::CONNECT_LEGACY: - const int pid = getCallingPid(); - const int self_pid = getpid(); - if (pid != self_pid) { + case BnCameraService::CONNECT_LEGACY: { + if (pid != selfPid) { // we're called from a different process, do the real check if (!checkCallingPermission( String16("android.permission.CAMERA"))) { @@ -1285,29 +1510,26 @@ status_t CameraService::onTransact( } } break; + } + case BnCameraService::NOTIFY_SYSTEM_EVENT: { + if (pid != selfPid) { + // Ensure we're being called by system_server, or similar process with + // permissions to notify the camera service about system events + if (!checkCallingPermission( + String16("android.permission.CAMERA_SEND_SYSTEM_EVENTS"))) { + const int uid = getCallingUid(); + ALOGE("Permission Denial: cannot send updates to camera service about system" + " events from pid=%d, uid=%d", pid, uid); + return PERMISSION_DENIED; + } + } + break; + } } return BnCameraService::onTransact(code, data, reply, flags); } -// The reason we need this busy bit is a new CameraService::connect() request -// may come in while the previous Client's destructor has not been run or is -// still running. If the last strong reference of the previous Client is gone -// but the destructor has not been finished, we should not allow the new Client -// to be created because we need to wait for the previous Client to tear down -// the hardware first. -void CameraService::setCameraBusy(int cameraId) { - android_atomic_write(1, &mBusy[cameraId]); - - ALOGV("setCameraBusy cameraId=%d", cameraId); -} - -void CameraService::setCameraFree(int cameraId) { - android_atomic_write(0, &mBusy[cameraId]); - - ALOGV("setCameraFree cameraId=%d", cameraId); -} - // We share the media players for shutter and recording sound for all clients. // A reference count is kept to determine when we will actually release the // media players. @@ -1376,7 +1598,6 @@ CameraService::Client::Client(const sp<CameraService>& cameraService, mRemoteCallback = cameraClient; - cameraService->setCameraBusy(cameraId); cameraService->loadSound(); LOG1("Client::Client X (pid %d, id %d)", callingPid, cameraId); @@ -1398,7 +1619,7 @@ CameraService::BasicClient::BasicClient(const sp<CameraService>& cameraService, int cameraId, int cameraFacing, int clientPid, uid_t clientUid, int servicePid): - mClientPackageName(clientPackageName) + mClientPackageName(clientPackageName), mDisconnected(false) { mCameraService = cameraService; mRemoteBinder = remoteCallback; @@ -1417,14 +1638,38 @@ CameraService::BasicClient::~BasicClient() { } void CameraService::BasicClient::disconnect() { - ALOGV("BasicClient::disconnect"); - mCameraService->removeClientByRemote(mRemoteBinder); + if (mDisconnected) { + ALOGE("%s: Disconnect called on already disconnected client for device %d", __FUNCTION__, + mCameraId); + return; + } + mDisconnected = true;; + + mCameraService->removeByClient(this); + mCameraService->logDisconnected(String8::format("%d", mCameraId), mClientPid, + String8(mClientPackageName)); + + sp<IBinder> remote = getRemote(); + if (remote != nullptr) { + remote->unlinkToDeath(mCameraService); + } finishCameraOps(); + ALOGI("%s: Disconnected client for camera %d for PID %d", __FUNCTION__, mCameraId, mClientPid); + // client shouldn't be able to call into us anymore mClientPid = 0; } +String16 CameraService::BasicClient::getPackageName() const { + return mClientPackageName; +} + + +int CameraService::BasicClient::getClientPid() const { + return mClientPid; +} + status_t CameraService::BasicClient::startCameraOps() { int32_t res; // Notify app ops that the camera is not available @@ -1450,7 +1695,7 @@ status_t CameraService::BasicClient::startCameraOps() { // Transition device availability listeners from PRESENT -> NOT_AVAILABLE mCameraService->updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE, - mCameraId); + String8::format("%d", mCameraId)); return OK; } @@ -1463,19 +1708,16 @@ status_t CameraService::BasicClient::finishCameraOps() { mClientPackageName); mOpsActive = false; - // Notify device availability listeners that this camera is available - // again + auto rejected = {ICameraServiceListener::STATUS_NOT_PRESENT, + ICameraServiceListener::STATUS_ENUMERATING}; - StatusVector rejectSourceStates; - rejectSourceStates.push_back(ICameraServiceListener::STATUS_NOT_PRESENT); - rejectSourceStates.push_back(ICameraServiceListener::STATUS_ENUMERATING); - - // Transition to PRESENT if the camera is not in either of above 2 - // states + // Transition to PRESENT if the camera is not in either of the rejected states mCameraService->updateStatus(ICameraServiceListener::STATUS_PRESENT, - mCameraId, - &rejectSourceStates); + String8::format("%d", mCameraId), rejected); + // Notify flashlight that a camera device is closed. + mCameraService->mFlashlight->deviceClosed( + String8::format("%d", mCameraId)); } // Always stop watching, even if no camera op is active if (mOpsCallback != NULL) { @@ -1518,26 +1760,15 @@ void CameraService::BasicClient::opChanged(int32_t op, const String16& packageNa // ---------------------------------------------------------------------------- -Mutex* CameraService::Client::getClientLockFromCookie(void* user) { - return gCameraService->getClientLockById((int)(intptr_t) user); -} - -// Provide client pointer for callbacks. Client lock returned from getClientLockFromCookie should -// be acquired for this to be safe -CameraService::Client* CameraService::Client::getClientFromCookie(void* user) { - BasicClient *basicClient = gCameraService->getClientByIdUnsafe((int)(intptr_t) user); - // OK: only CameraClient calls this, and they already cast anyway. - Client* client = static_cast<Client*>(basicClient); - - // This could happen if the Client is in the process of shutting down (the - // last strong reference is gone, but the destructor hasn't finished - // stopping the hardware). - if (client == NULL) return NULL; - - // destruction already started, so should not be accessed - if (client->mDestructionStarted) return NULL; - - return client; +// Provide client strong pointer for callbacks. +sp<CameraService::Client> CameraService::Client::getClientFromCookie(void* user) { + String8 cameraId = String8::format("%d", (int)(intptr_t) user); + auto clientDescriptor = gCameraService->mActiveClientManager.get(cameraId); + if (clientDescriptor != nullptr) { + return sp<Client>{ + static_cast<Client*>(clientDescriptor->getValue().get())}; + } + return sp<Client>{nullptr}; } void CameraService::Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode, @@ -1549,7 +1780,6 @@ void CameraService::Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode void CameraService::Client::disconnect() { ALOGV("Client::disconnect"); BasicClient::disconnect(); - mCameraService->setCameraFree(mCameraId); } CameraService::Client::OpsCallback::OpsCallback(wp<BasicClient> client): @@ -1565,30 +1795,101 @@ void CameraService::Client::OpsCallback::opChanged(int32_t op, } // ---------------------------------------------------------------------------- -// IProCamera +// CameraState // ---------------------------------------------------------------------------- -CameraService::ProClient::ProClient(const sp<CameraService>& cameraService, - const sp<IProCameraCallbacks>& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid) - : CameraService::BasicClient(cameraService, IInterface::asBinder(remoteCallback), - clientPackageName, cameraId, cameraFacing, - clientPid, clientUid, servicePid) -{ - mRemoteCallback = remoteCallback; +CameraService::CameraState::CameraState(const String8& id, int cost, + const std::set<String8>& conflicting) : mId(id), + mStatus(ICameraServiceListener::STATUS_PRESENT), mCost(cost), mConflicting(conflicting) {} + +CameraService::CameraState::~CameraState() {} + +ICameraServiceListener::Status CameraService::CameraState::getStatus() const { + Mutex::Autolock lock(mStatusLock); + return mStatus; } -CameraService::ProClient::~ProClient() { +CameraParameters CameraService::CameraState::getShimParams() const { + return mShimParams; } -void CameraService::ProClient::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode, - const CaptureResultExtras& resultExtras) { - mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0); +void CameraService::CameraState::setShimParams(const CameraParameters& params) { + mShimParams = params; +} + +int CameraService::CameraState::getCost() const { + return mCost; +} + +std::set<String8> CameraService::CameraState::getConflicting() const { + return mConflicting; +} + +String8 CameraService::CameraState::getId() const { + return mId; +} + +// ---------------------------------------------------------------------------- +// CameraClientManager +// ---------------------------------------------------------------------------- + +CameraService::CameraClientManager::~CameraClientManager() {} + +sp<CameraService::BasicClient> CameraService::CameraClientManager::getCameraClient( + const String8& id) const { + auto descriptor = get(id); + if (descriptor == nullptr) { + return sp<BasicClient>{nullptr}; + } + return descriptor->getValue(); +} + +String8 CameraService::CameraClientManager::toString() const { + auto all = getAll(); + String8 ret("["); + bool hasAny = false; + for (auto& i : all) { + hasAny = true; + String8 key = i->getKey(); + int32_t cost = i->getCost(); + int32_t pid = i->getOwnerId(); + int32_t priority = i->getPriority(); + auto conflicting = i->getConflicting(); + auto clientSp = i->getValue(); + String8 packageName; + if (clientSp.get() != nullptr) { + packageName = String8{clientSp->getPackageName()}; + } + ret.appendFormat("\n(Camera ID: %s, Cost: %" PRId32 ", PID: %" PRId32 ", Priority: %" + PRId32 ", ", key.string(), cost, pid, priority); + + if (packageName.size() != 0) { + ret.appendFormat("Client Package Name: %s", packageName.string()); + } + + ret.append(", Conflicting Client Devices: {"); + for (auto& j : conflicting) { + ret.appendFormat("%s, ", j.string()); + } + ret.append("})"); + } + if (hasAny) ret.append("\n"); + ret.append("]\n"); + return ret; +} + +CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor( + const String8& key, const sp<BasicClient>& value, int32_t cost, + const std::set<String8>& conflictingKeys, int32_t priority, int32_t ownerId) { + + return std::make_shared<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>( + key, value, cost, conflictingKeys, priority, ownerId); +} + +CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor( + const sp<BasicClient>& value, const CameraService::DescriptorPtr& partial) { + return makeClientDescriptor(partial->getKey(), value, partial->getCost(), + partial->getConflicting(), partial->getPriority(), partial->getOwnerId()); } // ---------------------------------------------------------------------------- @@ -1633,15 +1934,13 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) { return NO_ERROR; } - result = String8::format("Camera module HAL API version: 0x%x\n", - mModule->common.hal_api_version); - result.appendFormat("Camera module API version: 0x%x\n", - mModule->common.module_api_version); - result.appendFormat("Camera module name: %s\n", - mModule->common.name); - result.appendFormat("Camera module author: %s\n", - mModule->common.author); - result.appendFormat("Number of camera devices: %d\n\n", mNumberOfCameras); + result = String8::format("Camera module HAL API version: 0x%x\n", mModule->getHalApiVersion()); + result.appendFormat("Camera module API version: 0x%x\n", mModule->getModuleApiVersion()); + result.appendFormat("Camera module name: %s\n", mModule->getModuleName()); + result.appendFormat("Camera module author: %s\n", mModule->getModuleAuthor()); + result.appendFormat("Number of camera devices: %d\n", mNumberOfCameras); + String8 activeClientString = mActiveClientManager.toString(); + result.appendFormat("Active Camera Clients:\n%s", activeClientString.string()); sp<VendorTagDescriptor> desc = VendorTagDescriptor::getGlobalVendorTagDescriptor(); if (desc == NULL) { @@ -1656,11 +1955,31 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) { desc->dump(fd, /*verbosity*/2, /*indentation*/4); } - for (int i = 0; i < mNumberOfCameras; i++) { - result = String8::format("Camera %d static information:\n", i); + result = String8("Prior client events (most recent at top):\n"); + + for (const auto& msg : mEventLog) { + result.appendFormat("%s\n", msg.string()); + } + + if (mEventLog.size() == DEFAULT_EVICTION_LOG_LENGTH) { + result.append("...\n"); + } + + write(fd, result.string(), result.size()); + + bool stateLocked = tryLock(mCameraStatesLock); + if (!stateLocked) { + result = String8::format("CameraStates in use, may be deadlocked\n"); + write(fd, result.string(), result.size()); + } + + for (auto& state : mCameraStates) { + String8 cameraId = state.first; + result = String8::format("Camera %s information:\n", cameraId.string()); camera_info info; - status_t rc = mModule->get_camera_info(i, &info); + // TODO: Change getCameraInfo + HAL to use String cameraIds + status_t rc = mModule->getCameraInfo(cameraIdToInt(cameraId), &info); if (rc != OK) { result.appendFormat(" Error reading static information!\n"); write(fd, result.string(), result.size()); @@ -1669,13 +1988,24 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) { info.facing == CAMERA_FACING_BACK ? "BACK" : "FRONT"); result.appendFormat(" Orientation: %d\n", info.orientation); int deviceVersion; - if (mModule->common.module_api_version < - CAMERA_MODULE_API_VERSION_2_0) { + if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_0) { deviceVersion = CAMERA_DEVICE_API_VERSION_1_0; } else { deviceVersion = info.device_version; } - result.appendFormat(" Device version: 0x%x\n", deviceVersion); + + auto conflicting = state.second->getConflicting(); + result.appendFormat(" Resource Cost: %d\n", state.second->getCost()); + result.appendFormat(" Conflicting Devices:"); + for (auto& id : conflicting) { + result.appendFormat(" %s", cameraId.string()); + } + if (conflicting.size() == 0) { + result.appendFormat(" NONE"); + } + result.appendFormat("\n"); + + result.appendFormat(" Device version: %#x\n", deviceVersion); if (deviceVersion >= CAMERA_DEVICE_API_VERSION_2_0) { result.appendFormat(" Device static metadata:\n"); write(fd, result.string(), result.size()); @@ -1684,19 +2014,38 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) { } else { write(fd, result.string(), result.size()); } + + CameraParameters p = state.second->getShimParams(); + if (!p.isEmpty()) { + result = String8::format(" Camera1 API shim is using parameters:\n "); + write(fd, result.string(), result.size()); + p.dump(fd, args); + } } - sp<BasicClient> client = mClient[i].promote(); - if (client == 0) { - result = String8::format(" Device is closed, no client instance\n"); + auto clientDescriptor = mActiveClientManager.get(cameraId); + if (clientDescriptor == nullptr) { + result = String8::format(" Device %s is closed, no client instance\n", + cameraId.string()); write(fd, result.string(), result.size()); continue; } hasClient = true; - result = String8::format(" Device is open. Client instance dump:\n"); + result = String8::format(" Device %s is open. Client instance dump:\n\n", + cameraId.string()); + result.appendFormat("Client priority level: %d\n", clientDescriptor->getPriority()); + result.appendFormat("Client PID: %d\n", clientDescriptor->getOwnerId()); + + auto client = clientDescriptor->getValue(); + result.appendFormat("Client package: %s\n", + String8(client->getPackageName()).string()); write(fd, result.string(), result.size()); + client->dump(fd, args); } + + if (stateLocked) mCameraStatesLock.unlock(); + if (!hasClient) { result = String8::format("\nNo active camera clients yet.\n"); write(fd, result.string(), result.size()); @@ -1720,112 +2069,119 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) { write(fd, result.string(), result.size()); } } - } return NO_ERROR; } -/*virtual*/void CameraService::binderDied( - const wp<IBinder> &who) { +void CameraService::handleTorchClientBinderDied(const wp<IBinder> &who) { + Mutex::Autolock al(mTorchClientMapMutex); + for (size_t i = 0; i < mTorchClientMap.size(); i++) { + if (mTorchClientMap[i] == who) { + // turn off the torch mode that was turned on by dead client + String8 cameraId = mTorchClientMap.keyAt(i); + status_t res = mFlashlight->setTorchMode(cameraId, false); + if (res) { + ALOGE("%s: torch client died but couldn't turn off torch: " + "%s (%d)", __FUNCTION__, strerror(-res), res); + return; + } + mTorchClientMap.removeItemsAt(i); + break; + } + } +} + +/*virtual*/void CameraService::binderDied(const wp<IBinder> &who) { /** * While tempting to promote the wp<IBinder> into a sp, * it's actually not supported by the binder driver */ - ALOGV("java clients' binder died"); + // check torch client + handleTorchClientBinderDied(who); - sp<BasicClient> cameraClient = getClientByRemote(who); - - if (cameraClient == 0) { - ALOGV("java clients' binder death already cleaned up (normal case)"); + // check camera device client + if(!evictClientIdByRemote(who)) { + ALOGV("%s: Java client's binder death already cleaned up (normal case)", __FUNCTION__); return; } - ALOGW("Disconnecting camera client %p since the binder for it " - "died (this pid %d)", cameraClient.get(), getCallingPid()); - - cameraClient->disconnect(); - + ALOGE("%s: Java client's binder died, removing it from the list of active clients", + __FUNCTION__); } -void CameraService::updateStatus(ICameraServiceListener::Status status, - int32_t cameraId, - const StatusVector *rejectSourceStates) { - // do not lock mServiceLock here or can get into a deadlock from - // connect() -> ProClient::disconnect -> updateStatus - Mutex::Autolock lock(mStatusMutex); - - ICameraServiceListener::Status oldStatus = mStatusList[cameraId]; - - mStatusList[cameraId] = status; +void CameraService::updateStatus(ICameraServiceListener::Status status, const String8& cameraId) { + updateStatus(status, cameraId, {}); +} - if (oldStatus != status) { - ALOGV("%s: Status has changed for camera ID %d from 0x%x to 0x%x", - __FUNCTION__, cameraId, (uint32_t)oldStatus, (uint32_t)status); +void CameraService::updateStatus(ICameraServiceListener::Status status, const String8& cameraId, + std::initializer_list<ICameraServiceListener::Status> rejectSourceStates) { + // Do not lock mServiceLock here or can get into a deadlock from + // connect() -> disconnect -> updateStatus - if (oldStatus == ICameraServiceListener::STATUS_NOT_PRESENT && - (status != ICameraServiceListener::STATUS_PRESENT && - status != ICameraServiceListener::STATUS_ENUMERATING)) { + auto state = getCameraState(cameraId); - ALOGW("%s: From NOT_PRESENT can only transition into PRESENT" - " or ENUMERATING", __FUNCTION__); - mStatusList[cameraId] = oldStatus; - return; - } + if (state == nullptr) { + ALOGW("%s: Could not update the status for %s, no such device exists", __FUNCTION__, + cameraId.string()); + return; + } - if (rejectSourceStates != NULL) { - const StatusVector &rejectList = *rejectSourceStates; - StatusVector::const_iterator it = rejectList.begin(); - - /** - * Sometimes we want to conditionally do a transition. - * For example if a client disconnects, we want to go to PRESENT - * only if we weren't already in NOT_PRESENT or ENUMERATING. - */ - for (; it != rejectList.end(); ++it) { - if (oldStatus == *it) { - ALOGV("%s: Rejecting status transition for Camera ID %d, " - " since the source state was was in one of the bad " - " states.", __FUNCTION__, cameraId); - mStatusList[cameraId] = oldStatus; - return; - } + // Update the status for this camera state, then send the onStatusChangedCallbacks to each + // of the listeners with both the mStatusStatus and mStatusListenerLock held + state->updateStatus(status, cameraId, rejectSourceStates, [this] + (const String8& cameraId, ICameraServiceListener::Status status) { + + // Update torch status + if (status == ICameraServiceListener::STATUS_NOT_PRESENT || + status == ICameraServiceListener::STATUS_NOT_AVAILABLE) { + // Update torch status to not available when the camera device becomes not present + // or not available. + onTorchStatusChanged(cameraId, ICameraServiceListener::TORCH_STATUS_NOT_AVAILABLE); + } else if (status == ICameraServiceListener::STATUS_PRESENT) { + // Update torch status to available when the camera device becomes present or + // available + onTorchStatusChanged(cameraId, ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF); } - } - /** - * ProClients lose their exclusive lock. - * - Done before the CameraClient can initialize the HAL device, - * since we want to be able to close it before they get to initialize - */ - if (status == ICameraServiceListener::STATUS_NOT_AVAILABLE) { - Vector<wp<ProClient> > proClients(mProClientList[cameraId]); - Vector<wp<ProClient> >::const_iterator it; - - for (it = proClients.begin(); it != proClients.end(); ++it) { - sp<ProClient> proCl = it->promote(); - if (proCl.get() != NULL) { - proCl->onExclusiveLockStolen(); - } + Mutex::Autolock lock(mStatusListenerLock); + + for (auto& listener : mListenerList) { + // TODO: Refactor status listeners to use strings for Camera IDs and remove this. + int id = cameraIdToInt(cameraId); + if (id != -1) listener->onStatusChanged(status, id); } - } + }); +} - Vector<sp<ICameraServiceListener> >::const_iterator it; - for (it = mListenerList.begin(); it != mListenerList.end(); ++it) { - (*it)->onStatusChanged(status, cameraId); - } +status_t CameraService::getTorchStatusLocked( + const String8& cameraId, + ICameraServiceListener::TorchStatus *status) const { + if (!status) { + return BAD_VALUE; + } + ssize_t index = mTorchStatusMap.indexOfKey(cameraId); + if (index == NAME_NOT_FOUND) { + // invalid camera ID or the camera doesn't have a flash unit + return NAME_NOT_FOUND; } + + *status = mTorchStatusMap.valueAt(index); + return OK; } -ICameraServiceListener::Status CameraService::getStatus(int cameraId) const { - if (cameraId < 0 || cameraId >= MAX_CAMERAS) { - ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId); - return ICameraServiceListener::STATUS_UNKNOWN; +status_t CameraService::setTorchStatusLocked(const String8& cameraId, + ICameraServiceListener::TorchStatus status) { + ssize_t index = mTorchStatusMap.indexOfKey(cameraId); + if (index == NAME_NOT_FOUND) { + return BAD_VALUE; } + ICameraServiceListener::TorchStatus& item = + mTorchStatusMap.editValueAt(index); + item = status; - Mutex::Autolock al(mStatusMutex); - return mStatusList[cameraId]; + return OK; } }; // namespace android diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index 126d8d9..ca1c504 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -17,6 +17,7 @@ #ifndef ANDROID_SERVERS_CAMERA_CAMERASERVICE_H #define ANDROID_SERVERS_CAMERA_CAMERASERVICE_H +#include <cutils/multiuser.h> #include <utils/Vector.h> #include <utils/KeyedVector.h> #include <binder/AppOpsManager.h> @@ -27,8 +28,6 @@ #include <camera/ICamera.h> #include <camera/ICameraClient.h> -#include <camera/IProCameraUser.h> -#include <camera/IProCameraCallbacks.h> #include <camera/camera2/ICameraDeviceUser.h> #include <camera/camera2/ICameraDeviceCallbacks.h> #include <camera/VendorTagDescriptor.h> @@ -36,9 +35,17 @@ #include <camera/CameraParameters.h> #include <camera/ICameraServiceListener.h> +#include "CameraFlashlight.h" -/* This needs to be increased if we can have more cameras */ -#define MAX_CAMERAS 2 +#include "common/CameraModule.h" +#include "utils/AutoConditionLock.h" +#include "utils/ClientManager.h" +#include "utils/RingBuffer.h" + +#include <set> +#include <string> +#include <map> +#include <memory> namespace android { @@ -58,6 +65,39 @@ public: class Client; class BasicClient; + enum apiLevel { + API_1 = 1, + API_2 = 2 + }; + + // Process States (mirrors frameworks/base/core/java/android/app/ActivityManager.java) + static const int PROCESS_STATE_NONEXISTENT = -1; + static const int PROCESS_STATE_PERSISTENT = 0; + static const int PROCESS_STATE_PERSISTENT_UI = 1; + static const int PROCESS_STATE_TOP = 2; + static const int PROCESS_STATE_IMPORTANT_FOREGROUND = 3; + static const int PROCESS_STATE_IMPORTANT_BACKGROUND = 4; + static const int PROCESS_STATE_BACKUP = 5; + static const int PROCESS_STATE_HEAVY_WEIGHT = 6; + static const int PROCESS_STATE_SERVICE = 7; + static const int PROCESS_STATE_RECEIVER = 8; + static const int PROCESS_STATE_HOME = 9; + static const int PROCESS_STATE_LAST_ACTIVITY = 10; + static const int PROCESS_STATE_CACHED_ACTIVITY = 11; + static const int PROCESS_STATE_CACHED_ACTIVITY_CLIENT = 12; + static const int PROCESS_STATE_CACHED_EMPTY = 13; + + // 3 second busy timeout when other clients are connecting + static const nsecs_t DEFAULT_CONNECT_TIMEOUT_NS = 3000000000; + + // Default number of messages to store in eviction log + static const size_t DEFAULT_EVICTION_LOG_LENGTH = 50; + + enum { + // Default last user id + DEFAULT_LAST_USER_ID = 0, + }; + // Implementation of BinderService<T> static char const* getServiceName() { return "media.camera"; } @@ -66,8 +106,11 @@ public: ///////////////////////////////////////////////////////////////////// // HAL Callbacks - virtual void onDeviceStatusChanged(int cameraId, - int newStatus); + virtual void onDeviceStatusChanged(camera_device_status_t cameraId, + camera_device_status_t newStatus); + virtual void onTorchStatusChanged(const String8& cameraId, + ICameraServiceListener::TorchStatus + newStatus); ///////////////////////////////////////////////////////////////////// // ICameraService @@ -88,11 +131,6 @@ public: /*out*/ sp<ICamera>& device); - virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb, - int cameraId, const String16& clientPackageName, int clientUid, - /*out*/ - sp<IProCameraUser>& device); - virtual status_t connectDevice( const sp<ICameraDeviceCallbacks>& cameraCb, int cameraId, @@ -110,6 +148,11 @@ public: /*out*/ String16* parameters); + virtual status_t setTorchMode(const String16& cameraId, bool enabled, + const sp<IBinder>& clientBinder); + + virtual void notifySystemEvent(int eventId, int arg0); + // OK = supports api of that version, -EOPNOTSUPP = does not support virtual status_t supportsCameraApi( int cameraId, int apiVersion); @@ -122,7 +165,6 @@ public: ///////////////////////////////////////////////////////////////////// // Client functionality - virtual void removeClientByRemote(const wp<IBinder>& remoteBinder); enum sound_kind { SOUND_SHUTTER = 0, @@ -140,33 +182,36 @@ public: ///////////////////////////////////////////////////////////////////// // Shared utilities - static status_t filterOpenErrorCode(status_t err); static status_t filterGetInfoErrorCode(status_t err); ///////////////////////////////////////////////////////////////////// // CameraClient functionality - // returns plain pointer of client. Note that mClientLock should be acquired to - // prevent the client from destruction. The result can be NULL. - virtual BasicClient* getClientByIdUnsafe(int cameraId); - virtual Mutex* getClientLockById(int cameraId); - class BasicClient : public virtual RefBase { public: - virtual status_t initialize(camera_module_t *module) = 0; + virtual status_t initialize(CameraModule *module) = 0; virtual void disconnect(); // because we can't virtually inherit IInterface, which breaks // virtual inheritance virtual sp<IBinder> asBinderWrapper() = 0; - // Return the remote callback binder object (e.g. IProCameraCallbacks) + // Return the remote callback binder object (e.g. ICameraDeviceCallbacks) sp<IBinder> getRemote() { return mRemoteBinder; } virtual status_t dump(int fd, const Vector<String16>& args) = 0; + // Return the package name for this client + virtual String16 getPackageName() const; + + // Notify client about a fatal error + virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode, + const CaptureResultExtras& resultExtras) = 0; + + // Get the PID of the application client using this + virtual int getClientPid() const; protected: BasicClient(const sp<CameraService>& cameraService, const sp<IBinder>& remoteCallback, @@ -193,6 +238,7 @@ public: pid_t mClientPid; uid_t mClientUid; // immutable after constructor pid_t mServicePid; // immutable after constructor + bool mDisconnected; // - The app-side Binder interface to receive callbacks from us sp<IBinder> mRemoteBinder; // immutable after constructor @@ -201,10 +247,6 @@ public: status_t startCameraOps(); status_t finishCameraOps(); - // Notify client about a fatal error - virtual void notifyError( - ICameraDeviceCallbacks::CameraErrorCode errorCode, - const CaptureResultExtras& resultExtras) = 0; private: AppOpsManager mAppOpsManager; @@ -276,13 +318,11 @@ public: return asBinder(this); } - protected: - static Mutex* getClientLockFromCookie(void* user); - // convert client from cookie. Client lock should be acquired before getting Client. - static Client* getClientFromCookie(void* user); - virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode, const CaptureResultExtras& resultExtras); + protected: + // Convert client from cookie. + static sp<CameraService::Client> getClientFromCookie(void* user); // Initialized in constructor @@ -291,92 +331,231 @@ public: }; // class Client - class ProClient : public BnProCameraUser, public BasicClient { + typedef std::shared_ptr<resource_policy::ClientDescriptor<String8, + sp<CameraService::BasicClient>>> DescriptorPtr; + + /** + * A container class for managing active camera clients that are using HAL devices. Active + * clients are represented by ClientDescriptor objects that contain strong pointers to the + * actual BasicClient subclass binder interface implementation. + * + * This class manages the eviction behavior for the camera clients. See the parent class + * implementation in utils/ClientManager for the specifics of this behavior. + */ + class CameraClientManager : + public resource_policy::ClientManager<String8, sp<CameraService::BasicClient>> { public: - typedef IProCameraCallbacks TCamCallbacks; + virtual ~CameraClientManager(); + + /** + * Return a strong pointer to the active BasicClient for this camera ID, or an empty + * if none exists. + */ + sp<CameraService::BasicClient> getCameraClient(const String8& id) const; + + /** + * Return a string describing the current state. + */ + String8 toString() const; + + /** + * Make a ClientDescriptor object wrapping the given BasicClient strong pointer. + */ + static DescriptorPtr makeClientDescriptor(const String8& key, const sp<BasicClient>& value, + int32_t cost, const std::set<String8>& conflictingKeys, int32_t priority, + int32_t ownerId); + + /** + * Make a ClientDescriptor object wrapping the given BasicClient strong pointer with + * values intialized from a prior ClientDescriptor. + */ + static DescriptorPtr makeClientDescriptor(const sp<BasicClient>& value, + const CameraService::DescriptorPtr& partial); + + }; // class CameraClientManager - ProClient(const sp<CameraService>& cameraService, - const sp<IProCameraCallbacks>& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid); +private: - virtual ~ProClient(); + /** + * Container class for the state of each logical camera device, including: ID, status, and + * dependencies on other devices. The mapping of camera ID -> state saved in mCameraStates + * represents the camera devices advertised by the HAL (and any USB devices, when we add + * those). + * + * This container does NOT represent an active camera client. These are represented using + * the ClientDescriptors stored in mActiveClientManager. + */ + class CameraState { + public: + /** + * Make a new CameraState and set the ID, cost, and conflicting devices using the values + * returned in the HAL's camera_info struct for each device. + */ + CameraState(const String8& id, int cost, const std::set<String8>& conflicting); + virtual ~CameraState(); + + /** + * Return the status for this device. + * + * This method acquires mStatusLock. + */ + ICameraServiceListener::Status getStatus() const; + + /** + * This function updates the status for this camera device, unless the given status + * is in the given list of rejected status states, and execute the function passed in + * with a signature onStatusUpdateLocked(const String8&, ICameraServiceListener::Status) + * if the status has changed. + * + * This method is idempotent, and will not result in the function passed to + * onStatusUpdateLocked being called more than once for the same arguments. + * This method aquires mStatusLock. + */ + template<class Func> + void updateStatus(ICameraServiceListener::Status status, const String8& cameraId, + std::initializer_list<ICameraServiceListener::Status> rejectSourceStates, + Func onStatusUpdatedLocked); + + /** + * Return the last set CameraParameters object generated from the information returned by + * the HAL for this device (or an empty CameraParameters object if none has been set). + */ + CameraParameters getShimParams() const; + + /** + * Set the CameraParameters for this device. + */ + void setShimParams(const CameraParameters& params); + + /** + * Return the resource_cost advertised by the HAL for this device. + */ + int getCost() const; + + /** + * Return a set of the IDs of conflicting devices advertised by the HAL for this device. + */ + std::set<String8> getConflicting() const; + + /** + * Return the ID of this camera device. + */ + String8 getId() const; - const sp<IProCameraCallbacks>& getRemoteCallback() { - return mRemoteCallback; - } + private: + const String8 mId; + ICameraServiceListener::Status mStatus; // protected by mStatusLock + const int mCost; + std::set<String8> mConflicting; + mutable Mutex mStatusLock; + CameraParameters mShimParams; + }; // class CameraState - /*** - IProCamera implementation - ***/ - virtual status_t connect(const sp<IProCameraCallbacks>& callbacks) - = 0; - virtual status_t exclusiveTryLock() = 0; - virtual status_t exclusiveLock() = 0; - virtual status_t exclusiveUnlock() = 0; + // Delay-load the Camera HAL module + virtual void onFirstRef(); - virtual bool hasExclusiveLock() = 0; + // Check if we can connect, before we acquire the service lock. + status_t validateConnectLocked(const String8& cameraId, /*inout*/int& clientUid) const; - // Note that the callee gets a copy of the metadata. - virtual int submitRequest(camera_metadata_t* metadata, - bool streaming = false) = 0; - virtual status_t cancelRequest(int requestId) = 0; + // Handle active client evictions, and update service state. + // Only call with with mServiceLock held. + status_t handleEvictionsLocked(const String8& cameraId, int clientPid, + apiLevel effectiveApiLevel, const sp<IBinder>& remoteCallback, const String8& packageName, + /*out*/ + sp<BasicClient>* client, + std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial); - // Callbacks from camera service - virtual void onExclusiveLockStolen() = 0; + // Single implementation shared between the various connect calls + template<class CALLBACK, class CLIENT> + status_t connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId, int halVersion, + const String16& clientPackageName, int clientUid, apiLevel effectiveApiLevel, + bool legacyMode, bool shimUpdateOnly, /*out*/sp<CLIENT>& device); - protected: - virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode, - const CaptureResultExtras& resultExtras); - sp<IProCameraCallbacks> mRemoteCallback; - }; // class ProClient + // Lock guarding camera service state + Mutex mServiceLock; -private: + // Condition to use with mServiceLock, used to handle simultaneous connect calls from clients + std::shared_ptr<WaitableMutexWrapper> mServiceLockWrapper; - // Delay-load the Camera HAL module - virtual void onFirstRef(); + // Return NO_ERROR if the device with a give ID can be connected to + status_t checkIfDeviceIsUsable(const String8& cameraId) const; - // Step 1. Check if we can connect, before we acquire the service lock. - status_t validateConnect(int cameraId, - /*inout*/ - int& clientUid) const; + // Container for managing currently active application-layer clients + CameraClientManager mActiveClientManager; - // Step 2. Check if we can connect, after we acquire the service lock. - bool canConnectUnsafe(int cameraId, - const String16& clientPackageName, - const sp<IBinder>& remoteCallback, - /*out*/ - sp<BasicClient> &client); + // Mapping from camera ID -> state for each device, map is protected by mCameraStatesLock + std::map<String8, std::shared_ptr<CameraState>> mCameraStates; - // When connection is successful, initialize client and track its death - status_t connectFinishUnsafe(const sp<BasicClient>& client, - const sp<IBinder>& remoteCallback); + // Mutex guarding mCameraStates map + mutable Mutex mCameraStatesLock; - virtual sp<BasicClient> getClientByRemote(const wp<IBinder>& cameraClient); + // Circular buffer for storing event logging for dumps + RingBuffer<String8> mEventLog; - Mutex mServiceLock; - // either a Client or CameraDeviceClient - wp<BasicClient> mClient[MAX_CAMERAS]; // protected by mServiceLock - Mutex mClientLock[MAX_CAMERAS]; // prevent Client destruction inside callbacks - int mNumberOfCameras; + // UID of last user. + int mLastUserId; + + /** + * Get the camera state for a given camera id. + * + * This acquires mCameraStatesLock. + */ + std::shared_ptr<CameraService::CameraState> getCameraState(const String8& cameraId) const; + + /** + * Evict client who's remote binder has died. Returns true if this client was in the active + * list and was disconnected. + * + * This method acquires mServiceLock. + */ + bool evictClientIdByRemote(const wp<IBinder>& cameraClient); - typedef wp<ProClient> weak_pro_client_ptr; - Vector<weak_pro_client_ptr> mProClientList[MAX_CAMERAS]; + /** + * Remove the given client from the active clients list; does not disconnect the client. + * + * This method acquires mServiceLock. + */ + void removeByClient(const BasicClient* client); - // needs to be called with mServiceLock held - sp<BasicClient> findClientUnsafe(const wp<IBinder>& cameraClient, int& outIndex); - sp<ProClient> findProClientUnsafe( - const wp<IBinder>& cameraCallbacksRemote); + /** + * Add new client to active clients list after conflicting clients have disconnected using the + * values set in the partial descriptor passed in to construct the actual client descriptor. + * This is typically called at the end of a connect call. + * + * This method must be called with mServiceLock held. + */ + void finishConnectLocked(const sp<BasicClient>& client, const DescriptorPtr& desc); - // atomics to record whether the hardware is allocated to some client. - volatile int32_t mBusy[MAX_CAMERAS]; - void setCameraBusy(int cameraId); - void setCameraFree(int cameraId); + /** + * Returns the integer corresponding to the given camera ID string, or -1 on failure. + */ + static int cameraIdToInt(const String8& cameraId); + + /** + * Remove a single client corresponding to the given camera id from the list of active clients. + * If none exists, return an empty strongpointer. + * + * This method must be called with mServiceLock held. + */ + sp<CameraService::BasicClient> removeClientLocked(const String8& cameraId); + + /** + * Handle a notification that the current device user has changed. + */ + void doUserSwitch(int newUserId); + + /** + * Add a event log message that a client has been disconnected. + */ + void logDisconnected(const String8& cameraId, int clientPid, const String8& clientPackage); + + /** + * Add a event log message that a client has been connected. + */ + void logConnected(const String8& cameraId, int clientPid, const String8& clientPackage); + + int mNumberOfCameras; // sounds MediaPlayer* newMediaPlayer(const char *file); @@ -385,45 +564,60 @@ private: sp<MediaPlayer> mSoundPlayer[NUM_SOUNDS]; int mSoundRef; // reference count (release all MediaPlayer when 0) - camera_module_t *mModule; - - Vector<sp<ICameraServiceListener> > - mListenerList; + CameraModule* mModule; - // guard only mStatusList and the broadcasting of ICameraServiceListener - mutable Mutex mStatusMutex; - ICameraServiceListener::Status - mStatusList[MAX_CAMERAS]; + // Guarded by mStatusListenerMutex + std::vector<sp<ICameraServiceListener>> mListenerList; + Mutex mStatusListenerLock; - // Read the current status (locks mStatusMutex) - ICameraServiceListener::Status - getStatus(int cameraId) const; - - typedef Vector<ICameraServiceListener::Status> StatusVector; - // Broadcast the new status if it changed (locks the service mutex) - void updateStatus( - ICameraServiceListener::Status status, - int32_t cameraId, - const StatusVector *rejectSourceStates = NULL); + /** + * Update the status for the given camera id (if that device exists), and broadcast the + * status update to all current ICameraServiceListeners if the status has changed. Any + * statuses in rejectedSourceStates will be ignored. + * + * This method must be idempotent. + * This method acquires mStatusLock and mStatusListenerLock. + */ + void updateStatus(ICameraServiceListener::Status status, const String8& cameraId, + std::initializer_list<ICameraServiceListener::Status> rejectedSourceStates); + void updateStatus(ICameraServiceListener::Status status, const String8& cameraId); + + // flashlight control + sp<CameraFlashlight> mFlashlight; + // guard mTorchStatusMap + Mutex mTorchStatusMutex; + // guard mTorchClientMap + Mutex mTorchClientMapMutex; + // camera id -> torch status + KeyedVector<String8, ICameraServiceListener::TorchStatus> mTorchStatusMap; + // camera id -> torch client binder + // only store the last client that turns on each camera's torch mode + KeyedVector<String8, sp<IBinder> > mTorchClientMap; + + // check and handle if torch client's process has died + void handleTorchClientBinderDied(const wp<IBinder> &who); + + // handle torch mode status change and invoke callbacks. mTorchStatusMutex + // should be locked. + void onTorchStatusChangedLocked(const String8& cameraId, + ICameraServiceListener::TorchStatus newStatus); + + // get a camera's torch status. mTorchStatusMutex should be locked. + status_t getTorchStatusLocked(const String8 &cameraId, + ICameraServiceListener::TorchStatus *status) const; + + // set a camera's torch status. mTorchStatusMutex should be locked. + status_t setTorchStatusLocked(const String8 &cameraId, + ICameraServiceListener::TorchStatus status); // IBinder::DeathRecipient implementation virtual void binderDied(const wp<IBinder> &who); // Helpers - bool isValidCameraId(int cameraId); - bool setUpVendorTags(); /** - * A mapping of camera ids to CameraParameters returned by that camera device. - * - * This cache is used to generate CameraCharacteristic metadata when using - * the HAL1 shim. - */ - KeyedVector<int, CameraParameters> mShimParams; - - /** * Initialize and cache the metadata used by the HAL1 shim for a given cameraId. * * Returns OK on success, or a negative error code. @@ -446,25 +640,192 @@ private: */ status_t generateShimMetadata(int cameraId, /*out*/CameraMetadata* cameraInfo); + static int getCallingPid(); + + static int getCallingUid(); + /** - * Connect a new camera client. This should only be used while holding the - * mutex for mServiceLock. - * - * Returns OK on success, or a negative error code. + * Get the current system time as a formatted string. */ - status_t connectHelperLocked( - /*out*/ - sp<Client>& client, - /*in*/ - const sp<ICameraClient>& cameraClient, - int cameraId, - const String16& clientPackageName, - int clientUid, - int callingPid, - int halVersion = CAMERA_HAL_API_VERSION_UNSPECIFIED, - bool legacyMode = false); + static String8 getFormattedCurrentTime(); + + /** + * Get the camera eviction priority from the current process state given by ActivityManager. + */ + static int getCameraPriorityFromProcState(int procState); + + static status_t makeClient(const sp<CameraService>& cameraService, + const sp<IInterface>& cameraCb, const String16& packageName, const String8& cameraId, + int facing, int clientPid, uid_t clientUid, int servicePid, bool legacyMode, + int halVersion, int deviceVersion, apiLevel effectiveApiLevel, + /*out*/sp<BasicClient>* client); }; +template<class Func> +void CameraService::CameraState::updateStatus(ICameraServiceListener::Status status, + const String8& cameraId, + std::initializer_list<ICameraServiceListener::Status> rejectSourceStates, + Func onStatusUpdatedLocked) { + Mutex::Autolock lock(mStatusLock); + ICameraServiceListener::Status oldStatus = mStatus; + mStatus = status; + + if (oldStatus == status) { + return; + } + + ALOGV("%s: Status has changed for camera ID %s from %#x to %#x", __FUNCTION__, + cameraId.string(), oldStatus, status); + + if (oldStatus == ICameraServiceListener::STATUS_NOT_PRESENT && + (status != ICameraServiceListener::STATUS_PRESENT && + status != ICameraServiceListener::STATUS_ENUMERATING)) { + + ALOGW("%s: From NOT_PRESENT can only transition into PRESENT or ENUMERATING", + __FUNCTION__); + mStatus = oldStatus; + return; + } + + /** + * Sometimes we want to conditionally do a transition. + * For example if a client disconnects, we want to go to PRESENT + * only if we weren't already in NOT_PRESENT or ENUMERATING. + */ + for (auto& rejectStatus : rejectSourceStates) { + if (oldStatus == rejectStatus) { + ALOGV("%s: Rejecting status transition for Camera ID %s, since the source " + "state was was in one of the bad states.", __FUNCTION__, cameraId.string()); + mStatus = oldStatus; + return; + } + } + + onStatusUpdatedLocked(cameraId, status); +} + + +template<class CALLBACK, class CLIENT> +status_t CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId, + int halVersion, const String16& clientPackageName, int clientUid, + apiLevel effectiveApiLevel, bool legacyMode, bool shimUpdateOnly, + /*out*/sp<CLIENT>& device) { + status_t ret = NO_ERROR; + String8 clientName8(clientPackageName); + int clientPid = getCallingPid(); + + ALOGI("CameraService::connect call E (PID %d \"%s\", camera ID %s) for HAL version %d and " + "Camera API version %d", clientPid, clientName8.string(), cameraId.string(), + halVersion, static_cast<int>(effectiveApiLevel)); + + sp<CLIENT> client = nullptr; + { + // Acquire mServiceLock and prevent other clients from connecting + std::unique_ptr<AutoConditionLock> lock = + AutoConditionLock::waitAndAcquire(mServiceLockWrapper, DEFAULT_CONNECT_TIMEOUT_NS); + + if (lock == nullptr) { + ALOGE("CameraService::connect X (PID %d) rejected (too many other clients connecting)." + , clientPid); + return -EBUSY; + } + + // Enforce client permissions and do basic sanity checks + if((ret = validateConnectLocked(cameraId, /*inout*/clientUid)) != NO_ERROR) { + return ret; + } + mLastUserId = multiuser_get_user_id(clientUid); + + // Check the shim parameters after acquiring lock, if they have already been updated and + // we were doing a shim update, return immediately + if (shimUpdateOnly) { + auto cameraState = getCameraState(cameraId); + if (cameraState != nullptr) { + if (!cameraState->getShimParams().isEmpty()) return NO_ERROR; + } + } + + sp<BasicClient> clientTmp = nullptr; + std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>> partial; + if ((ret = handleEvictionsLocked(cameraId, clientPid, effectiveApiLevel, + IInterface::asBinder(cameraCb), clientName8, /*out*/&clientTmp, + /*out*/&partial)) != NO_ERROR) { + return ret; + } + + if (clientTmp.get() != nullptr) { + // Handle special case for API1 MediaRecorder where the existing client is returned + device = static_cast<CLIENT*>(clientTmp.get()); + return NO_ERROR; + } + + // give flashlight a chance to close devices if necessary. + mFlashlight->prepareDeviceOpen(cameraId); + + // TODO: Update getDeviceVersion + HAL interface to use strings for Camera IDs + int id = cameraIdToInt(cameraId); + if (id == -1) { + ALOGE("%s: Invalid camera ID %s, cannot get device version from HAL.", __FUNCTION__, + cameraId.string()); + return BAD_VALUE; + } + + int facing = -1; + int deviceVersion = getDeviceVersion(id, /*out*/&facing); + sp<BasicClient> tmp = nullptr; + if((ret = makeClient(this, cameraCb, clientPackageName, cameraId, facing, clientPid, + clientUid, getpid(), legacyMode, halVersion, deviceVersion, effectiveApiLevel, + /*out*/&tmp)) != NO_ERROR) { + return ret; + } + client = static_cast<CLIENT*>(tmp.get()); + + LOG_ALWAYS_FATAL_IF(client.get() == nullptr, "%s: CameraService in invalid state", + __FUNCTION__); + + if ((ret = client->initialize(mModule)) != OK) { + ALOGE("%s: Could not initialize client from HAL module.", __FUNCTION__); + return ret; + } + + sp<IBinder> remoteCallback = client->getRemote(); + if (remoteCallback != nullptr) { + remoteCallback->linkToDeath(this); + } + + // Update shim paremeters for legacy clients + if (effectiveApiLevel == API_1) { + // Assume we have always received a Client subclass for API1 + sp<Client> shimClient = reinterpret_cast<Client*>(client.get()); + String8 rawParams = shimClient->getParameters(); + CameraParameters params(rawParams); + + auto cameraState = getCameraState(cameraId); + if (cameraState != nullptr) { + cameraState->setShimParams(params); + } else { + ALOGE("%s: Cannot update shim parameters for camera %s, no such device exists.", + __FUNCTION__, cameraId.string()); + } + } + + if (shimUpdateOnly) { + // If only updating legacy shim parameters, immediately disconnect client + mServiceLock.unlock(); + client->disconnect(); + mServiceLock.lock(); + } else { + // Otherwise, add client to active clients list + finishConnectLocked(client, partial); + } + } // lock is destroyed, allow further connect calls + + // Important: release the mutex here so the client can call back into the service from its + // destructor (can be at the end of the call) + device = client; + return NO_ERROR; +} + } // namespace android #endif diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp index 0ed5586..6f44aee 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.cpp +++ b/services/camera/libcameraservice/api1/Camera2Client.cpp @@ -67,7 +67,7 @@ Camera2Client::Camera2Client(const sp<CameraService>& cameraService, mLegacyMode = legacyMode; } -status_t Camera2Client::initialize(camera_module_t *module) +status_t Camera2Client::initialize(CameraModule *module) { ATRACE_CALL(); ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); @@ -163,11 +163,9 @@ Camera2Client::~Camera2Client() { status_t Camera2Client::dump(int fd, const Vector<String16>& args) { String8 result; - result.appendFormat("Client2[%d] (%p) Client: %s PID: %d, dump:\n", - mCameraId, + result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", mCameraId, (getRemoteCallback() != NULL ? (IInterface::asBinder(getRemoteCallback()).get()) : NULL), - String8(mClientPackageName).string(), mClientPid); result.append(" State: "); #define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; @@ -1959,7 +1957,7 @@ size_t Camera2Client::calculateBufferSize(int width, int height, return width * height * 2; case HAL_PIXEL_FORMAT_RGBA_8888: return width * height * 4; - case HAL_PIXEL_FORMAT_RAW_SENSOR: + case HAL_PIXEL_FORMAT_RAW16: return width * height * 2; default: ALOGE("%s: Unknown preview format: %x", diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h index d68bb29..5a8241f 100644 --- a/services/camera/libcameraservice/api1/Camera2Client.h +++ b/services/camera/libcameraservice/api1/Camera2Client.h @@ -94,7 +94,7 @@ public: virtual ~Camera2Client(); - status_t initialize(camera_module_t *module); + status_t initialize(CameraModule *module); virtual status_t dump(int fd, const Vector<String16>& args); diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp index bbb2fe0..e552633 100644 --- a/services/camera/libcameraservice/api1/CameraClient.cpp +++ b/services/camera/libcameraservice/api1/CameraClient.cpp @@ -59,7 +59,7 @@ CameraClient::CameraClient(const sp<CameraService>& cameraService, LOG1("CameraClient::CameraClient X (pid %d, id %d)", callingPid, cameraId); } -status_t CameraClient::initialize(camera_module_t *module) { +status_t CameraClient::initialize(CameraModule *module) { int callingPid = getCallingPid(); status_t res; @@ -75,7 +75,7 @@ status_t CameraClient::initialize(camera_module_t *module) { snprintf(camera_device_name, sizeof(camera_device_name), "%d", mCameraId); mHardware = new CameraHardwareInterface(camera_device_name); - res = mHardware->initialize(&module->common); + res = mHardware->initialize(module); if (res != OK) { ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); @@ -99,12 +99,7 @@ status_t CameraClient::initialize(camera_module_t *module) { // tear down the client CameraClient::~CameraClient() { - // this lock should never be NULL - Mutex* lock = mCameraService->getClientLockById(mCameraId); - lock->lock(); mDestructionStarted = true; - // client will not be accessed from callback. should unlock to prevent dead-lock in disconnect - lock->unlock(); int callingPid = getCallingPid(); LOG1("CameraClient::~CameraClient E (pid %d, this %p)", callingPid, this); @@ -116,11 +111,11 @@ status_t CameraClient::dump(int fd, const Vector<String16>& args) { const size_t SIZE = 256; char buffer[SIZE]; - size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) PID: %d\n", + size_t len = snprintf(buffer, SIZE, "Client[%d] (%p) with UID %d\n", mCameraId, (getRemoteCallback() != NULL ? IInterface::asBinder(getRemoteCallback()).get() : NULL), - mClientPid); + mClientUid); len = (len > SIZE - 1) ? SIZE - 1 : len; write(fd, buffer, len); @@ -677,6 +672,13 @@ bool CameraClient::lockIfMessageWanted(int32_t msgType) { LOG1("lockIfMessageWanted(%d): waited for %d ms", msgType, sleepCount * CHECK_MESSAGE_INTERVAL); } + + // If messages are no longer enabled after acquiring lock, release and drop message + if ((mMsgEnabled & msgType) == 0) { + mLock.unlock(); + break; + } + return true; } if (sleepCount++ == 0) { @@ -702,26 +704,13 @@ bool CameraClient::lockIfMessageWanted(int32_t msgType) { // (others) c->dataCallback // dataCallbackTimestamp // (others) c->dataCallbackTimestamp -// -// NOTE: the *Callback functions grab mLock of the client before passing -// control to handle* functions. So the handle* functions must release the -// lock before calling the ICameraClient's callbacks, so those callbacks can -// invoke methods in the Client class again (For example, the preview frame -// callback may want to releaseRecordingFrame). The handle* functions must -// release the lock after all accesses to member variables, so it must be -// handled very carefully. void CameraClient::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user) { LOG2("notifyCallback(%d)", msgType); - Mutex* lock = getClientLockFromCookie(user); - if (lock == NULL) return; - Mutex::Autolock alock(*lock); - - CameraClient* client = - static_cast<CameraClient*>(getClientFromCookie(user)); - if (client == NULL) return; + sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get()); + if (client.get() == nullptr) return; if (!client->lockIfMessageWanted(msgType)) return; @@ -740,13 +729,8 @@ void CameraClient::dataCallback(int32_t msgType, const sp<IMemory>& dataPtr, camera_frame_metadata_t *metadata, void* user) { LOG2("dataCallback(%d)", msgType); - Mutex* lock = getClientLockFromCookie(user); - if (lock == NULL) return; - Mutex::Autolock alock(*lock); - - CameraClient* client = - static_cast<CameraClient*>(getClientFromCookie(user)); - if (client == NULL) return; + sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get()); + if (client.get() == nullptr) return; if (!client->lockIfMessageWanted(msgType)) return; if (dataPtr == 0 && metadata == NULL) { @@ -778,13 +762,8 @@ void CameraClient::dataCallbackTimestamp(nsecs_t timestamp, int32_t msgType, const sp<IMemory>& dataPtr, void* user) { LOG2("dataCallbackTimestamp(%d)", msgType); - Mutex* lock = getClientLockFromCookie(user); - if (lock == NULL) return; - Mutex::Autolock alock(*lock); - - CameraClient* client = - static_cast<CameraClient*>(getClientFromCookie(user)); - if (client == NULL) return; + sp<CameraClient> client = static_cast<CameraClient*>(getClientFromCookie(user).get()); + if (client.get() == nullptr) return; if (!client->lockIfMessageWanted(msgType)) return; diff --git a/services/camera/libcameraservice/api1/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h index 63a9d0f..95616b2 100644 --- a/services/camera/libcameraservice/api1/CameraClient.h +++ b/services/camera/libcameraservice/api1/CameraClient.h @@ -68,7 +68,7 @@ public: bool legacyMode = false); ~CameraClient(); - status_t initialize(camera_module_t *module); + status_t initialize(CameraModule *module); status_t dump(int fd, const Vector<String16>& args); diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp index eadaa00..5c8f750 100644 --- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp @@ -154,8 +154,8 @@ status_t CallbackProcessor::updateStream(const Parameters ¶ms) { params.previewWidth, params.previewHeight, callbackFormat, params.previewFormat); res = device->createStream(mCallbackWindow, - params.previewWidth, params.previewHeight, - callbackFormat, &mCallbackStreamId); + params.previewWidth, params.previewHeight, callbackFormat, + HAL_DATASPACE_JFIF, CAMERA3_STREAM_ROTATION_0, &mCallbackStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for callbacks: " "%s (%d)", __FUNCTION__, mId, diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp index 2772267..34798bf 100644 --- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp @@ -145,7 +145,8 @@ status_t JpegProcessor::updateStream(const Parameters ¶ms) { // Create stream for HAL production res = device->createStream(mCaptureWindow, params.pictureWidth, params.pictureHeight, - HAL_PIXEL_FORMAT_BLOB, &mCaptureStreamId); + HAL_PIXEL_FORMAT_BLOB, HAL_DATASPACE_JFIF, + CAMERA3_STREAM_ROTATION_0, &mCaptureStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for capture: " "%s (%d)", __FUNCTION__, mId, diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp index 4f4cfb0..6b0f8b5 100644 --- a/services/camera/libcameraservice/api1/client2/Parameters.cpp +++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp @@ -65,15 +65,29 @@ status_t Parameters::initialize(const CameraMetadata *info, int deviceVersion) { const Size MAX_PREVIEW_SIZE = { MAX_PREVIEW_WIDTH, MAX_PREVIEW_HEIGHT }; // Treat the H.264 max size as the max supported video size. MediaProfiles *videoEncoderProfiles = MediaProfiles::getInstance(); - int32_t maxVideoWidth = videoEncoderProfiles->getVideoEncoderParamByName( - "enc.vid.width.max", VIDEO_ENCODER_H264); - int32_t maxVideoHeight = videoEncoderProfiles->getVideoEncoderParamByName( - "enc.vid.height.max", VIDEO_ENCODER_H264); - const Size MAX_VIDEO_SIZE = {maxVideoWidth, maxVideoHeight}; + Vector<video_encoder> encoders = videoEncoderProfiles->getVideoEncoders(); + int32_t maxVideoWidth = 0; + int32_t maxVideoHeight = 0; + for (size_t i = 0; i < encoders.size(); i++) { + int width = videoEncoderProfiles->getVideoEncoderParamByName( + "enc.vid.width.max", encoders[i]); + int height = videoEncoderProfiles->getVideoEncoderParamByName( + "enc.vid.height.max", encoders[i]); + // Treat width/height separately here to handle the case where different + // profile might report max size of different aspect ratio + if (width > maxVideoWidth) { + maxVideoWidth = width; + } + if (height > maxVideoHeight) { + maxVideoHeight = height; + } + } + // This is just an upper bound and may not be an actually valid video size + const Size VIDEO_SIZE_UPPER_BOUND = {maxVideoWidth, maxVideoHeight}; res = getFilteredSizes(MAX_PREVIEW_SIZE, &availablePreviewSizes); if (res != OK) return res; - res = getFilteredSizes(MAX_VIDEO_SIZE, &availableVideoSizes); + res = getFilteredSizes(VIDEO_SIZE_UPPER_BOUND, &availableVideoSizes); if (res != OK) return res; // Select initial preview and video size that's under the initial bound and @@ -182,9 +196,9 @@ status_t Parameters::initialize(const CameraMetadata *info, int deviceVersion) { supportedPreviewFormats += CameraParameters::PIXEL_FORMAT_YUV420SP; break; - // Not advertizing JPEG, RAW_SENSOR, etc, for preview formats + // Not advertizing JPEG, RAW16, etc, for preview formats case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: - case HAL_PIXEL_FORMAT_RAW_SENSOR: + case HAL_PIXEL_FORMAT_RAW16: case HAL_PIXEL_FORMAT_BLOB: addComma = false; break; @@ -2253,7 +2267,7 @@ const char* Parameters::formatEnumToString(int format) { case HAL_PIXEL_FORMAT_RGBA_8888: // RGBA8888 fmt = CameraParameters::PIXEL_FORMAT_RGBA8888; break; - case HAL_PIXEL_FORMAT_RAW_SENSOR: + case HAL_PIXEL_FORMAT_RAW16: ALOGW("Raw sensor preview format requested."); fmt = CameraParameters::PIXEL_FORMAT_BAYER_RGGB; break; diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp index 470624b..b6071f6 100644 --- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp @@ -27,6 +27,7 @@ #include <utils/Log.h> #include <utils/Trace.h> +#include <gui/BufferItem.h> #include <gui/Surface.h> #include <media/hardware/MetadataBufferType.h> @@ -181,7 +182,8 @@ status_t StreamingProcessor::updatePreviewStream(const Parameters ¶ms) { if (mPreviewStreamId == NO_STREAM) { res = device->createStream(mPreviewWindow, params.previewWidth, params.previewHeight, - CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, &mPreviewStreamId); + CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_UNKNOWN, + CAMERA3_STREAM_ROTATION_0, &mPreviewStreamId); if (res != OK) { ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)", __FUNCTION__, mId, strerror(-res), res); @@ -420,9 +422,12 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters ¶ms) { if (mRecordingStreamId == NO_STREAM) { mRecordingFrameCount = 0; + // Selecting BT.709 colorspace by default + // TODO: Wire this in from encoder side res = device->createStream(mRecordingWindow, params.videoWidth, params.videoHeight, - CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, &mRecordingStreamId); + CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, HAL_DATASPACE_BT709, + CAMERA3_STREAM_ROTATION_0, &mRecordingStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for recording: " "%s (%d)", __FUNCTION__, mId, @@ -675,7 +680,7 @@ status_t StreamingProcessor::processRecordingFrame() { sp<Camera2Client> client = mClient.promote(); if (client == 0) { // Discard frames during shutdown - BufferItemConsumer::BufferItem imgBuffer; + BufferItem imgBuffer; res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); if (res != OK) { if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { @@ -693,7 +698,7 @@ status_t StreamingProcessor::processRecordingFrame() { with Camera2Client code calling into StreamingProcessor */ SharedParameters::Lock l(client->getParameters()); Mutex::Autolock m(mMutex); - BufferItemConsumer::BufferItem imgBuffer; + BufferItem imgBuffer; res = mRecordingConsumer->acquireBuffer(&imgBuffer, 0); if (res != OK) { if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { @@ -819,8 +824,7 @@ void StreamingProcessor::releaseRecordingFrame(const sp<IMemory>& mem) { size_t itemIndex; for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) { - const BufferItemConsumer::BufferItem item = - mRecordingBuffers[itemIndex]; + const BufferItem item = mRecordingBuffers[itemIndex]; if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT && item.mGraphicBuffer->handle == imgHandle) { break; @@ -864,8 +868,7 @@ void StreamingProcessor::releaseAllRecordingFramesLocked() { size_t releasedCount = 0; for (size_t itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) { - const BufferItemConsumer::BufferItem item = - mRecordingBuffers[itemIndex]; + const BufferItem item = mRecordingBuffers[itemIndex]; if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT) { res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]); if (res != OK) { diff --git a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h index 1d679a4..2474062 100644 --- a/services/camera/libcameraservice/api1/client2/StreamingProcessor.h +++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h @@ -124,7 +124,7 @@ class StreamingProcessor: static const size_t kDefaultRecordingHeapCount = 8; size_t mRecordingHeapCount; - Vector<BufferItemConsumer::BufferItem> mRecordingBuffers; + Vector<BufferItem> mRecordingBuffers; size_t mRecordingHeapHead, mRecordingHeapFree; virtual bool threadLoop(); diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp index 8b7e4b4..a03f9c7 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp @@ -185,8 +185,8 @@ status_t ZslProcessor::updateStream(const Parameters ¶ms) { (int)CAMERA2_HAL_PIXEL_FORMAT_ZSL : (int)HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; res = device->createStream(mZslWindow, - params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, - streamType, &mZslStreamId); + params.fastInfo.arrayWidth, params.fastInfo.arrayHeight, streamType, + HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mZslStreamId); if (res != OK) { ALOGE("%s: Camera %d: Can't create output stream for ZSL: " "%s (%d)", __FUNCTION__, mId, @@ -440,7 +440,7 @@ status_t ZslProcessor::processNewZslBuffer() { zslConsumer = mZslConsumer; } ALOGVV("Trying to get next buffer"); - BufferItemConsumer::BufferItem item; + BufferItem item; res = zslConsumer->acquireBuffer(&item, 0); if (res != OK) { if (res != BufferItemConsumer::NO_BUFFER_AVAILABLE) { diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h index 2099c38..5f50d7b 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h @@ -22,6 +22,7 @@ #include <utils/Vector.h> #include <utils/Mutex.h> #include <utils/Condition.h> +#include <gui/BufferItem.h> #include <gui/BufferItemConsumer.h> #include <camera/CameraMetadata.h> #include <camera/CaptureResult.h> @@ -103,7 +104,7 @@ class ZslProcessor: sp<ANativeWindow> mZslWindow; struct ZslPair { - BufferItemConsumer::BufferItem buffer; + BufferItem buffer; CameraMetadata frame; }; diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h index fc9f70c..2960478 100644 --- a/services/camera/libcameraservice/api1/client2/ZslProcessor3.h +++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h @@ -22,6 +22,7 @@ #include <utils/Vector.h> #include <utils/Mutex.h> #include <utils/Condition.h> +#include <gui/BufferItem.h> #include <gui/BufferItemConsumer.h> #include <camera/CameraMetadata.h> @@ -104,7 +105,7 @@ class ZslProcessor3 : sp<camera3::Camera3ZslStream> mZslStream; struct ZslPair { - BufferItemConsumer::BufferItem buffer; + BufferItem buffer; CameraMetadata frame; }; diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp index 6a1ee44..8587e0e 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp @@ -71,7 +71,7 @@ CameraDeviceClient::CameraDeviceClient(const sp<CameraService>& cameraService, ALOGI("CameraDeviceClient %d: Opened", cameraId); } -status_t CameraDeviceClient::initialize(camera_module_t *module) +status_t CameraDeviceClient::initialize(CameraModule *module) { ATRACE_CALL(); status_t res; @@ -314,17 +314,17 @@ status_t CameraDeviceClient::deleteStream(int streamId) { return res; } -status_t CameraDeviceClient::createStream(int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer) +status_t CameraDeviceClient::createStream(const OutputConfiguration &outputConfiguration) { ATRACE_CALL(); - ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); status_t res; if ( (res = checkPid(__FUNCTION__) ) != OK) return res; Mutex::Autolock icl(mBinderSerializationLock); + + sp<IGraphicBufferProducer> bufferProducer = outputConfiguration.getGraphicBufferProducer(); if (bufferProducer == NULL) { ALOGE("%s: bufferProducer must not be null", __FUNCTION__); return BAD_VALUE; @@ -370,7 +370,8 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, sp<IBinder> binder = IInterface::asBinder(bufferProducer); sp<ANativeWindow> anw = new Surface(bufferProducer, useAsync); - // TODO: remove w,h,f since we are ignoring them + int width, height, format; + android_dataspace dataSpace; if ((res = anw->query(anw.get(), NATIVE_WINDOW_WIDTH, &width)) != OK) { ALOGE("%s: Camera %d: Failed to query Surface width", __FUNCTION__, @@ -387,6 +388,12 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, mCameraId); return res; } + if ((res = anw->query(anw.get(), NATIVE_WINDOW_DEFAULT_DATASPACE, + reinterpret_cast<int*>(&dataSpace))) != OK) { + ALOGE("%s: Camera %d: Failed to query Surface dataSpace", __FUNCTION__, + mCameraId); + return res; + } // FIXME: remove this override since the default format should be // IMPLEMENTATION_DEFINED. b/9487482 @@ -399,14 +406,17 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, // Round dimensions to the nearest dimensions available for this format if (flexibleConsumer && !CameraDeviceClient::roundBufferDimensionNearest(width, height, - format, mDevice->info(), /*out*/&width, /*out*/&height)) { + format, dataSpace, mDevice->info(), /*out*/&width, /*out*/&height)) { ALOGE("%s: No stream configurations with the format %#x defined, failed to create stream.", __FUNCTION__, format); return BAD_VALUE; } int streamId = -1; - res = mDevice->createStream(anw, width, height, format, &streamId); + res = mDevice->createStream(anw, width, height, format, dataSpace, + static_cast<camera3_stream_rotation_t> + (outputConfiguration.getRotation()), + &streamId); if (res == OK) { mStreamMap.add(binder, streamId); @@ -441,10 +451,12 @@ status_t CameraDeviceClient::createStream(int width, int height, int format, bool CameraDeviceClient::roundBufferDimensionNearest(int32_t width, int32_t height, - int32_t format, const CameraMetadata& info, + int32_t format, android_dataspace dataSpace, const CameraMetadata& info, /*out*/int32_t* outWidth, /*out*/int32_t* outHeight) { camera_metadata_ro_entry streamConfigs = + (dataSpace == HAL_DATASPACE_DEPTH) ? + info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) : info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS); int32_t bestWidth = -1; @@ -586,9 +598,7 @@ status_t CameraDeviceClient::dump(int fd, const Vector<String16>& args) { mCameraId, (getRemoteCallback() != NULL ? IInterface::asBinder(getRemoteCallback()).get() : NULL) ); - result.appendFormat(" Current client: %s (PID %d, UID %u)\n", - String8(mClientPackageName).string(), - mClientPid, mClientUid); + result.appendFormat(" Current client UID %u\n", mClientUid); result.append(" State:\n"); result.appendFormat(" Request ID counter: %d\n", mRequestIdCounter); @@ -635,9 +645,6 @@ void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras, } } -// TODO: refactor the code below this with IProCameraUser. -// it's 100% copy-pasted, so lets not change it right now to make it easier. - void CameraDeviceClient::detachDevice() { if (mDevice == 0) return; diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h index 84e46b7..a3dbb90 100644 --- a/services/camera/libcameraservice/api2/CameraDeviceClient.h +++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h @@ -19,6 +19,7 @@ #include <camera/camera2/ICameraDeviceUser.h> #include <camera/camera2/ICameraDeviceCallbacks.h> +#include <camera/camera2/OutputConfiguration.h> #include "CameraService.h" #include "common/FrameProcessorBase.h" @@ -83,11 +84,7 @@ public: // Returns -EBUSY if device is not idle virtual status_t deleteStream(int streamId); - virtual status_t createStream( - int width, - int height, - int format, - const sp<IGraphicBufferProducer>& bufferProducer); + virtual status_t createStream(const OutputConfiguration &outputConfiguration); // Create a request object from a template. virtual status_t createDefaultRequest(int templateId, @@ -119,7 +116,7 @@ public: int servicePid); virtual ~CameraDeviceClient(); - virtual status_t initialize(camera_module_t *module); + virtual status_t initialize(CameraModule *module); virtual status_t dump(int fd, const Vector<String16>& args); @@ -161,7 +158,8 @@ private: // a width <= ROUNDING_WIDTH_CAP static const int32_t ROUNDING_WIDTH_CAP = 1080; static bool roundBufferDimensionNearest(int32_t width, int32_t height, int32_t format, - const CameraMetadata& info, /*out*/int32_t* outWidth, /*out*/int32_t* outHeight); + android_dataspace dataSpace, const CameraMetadata& info, + /*out*/int32_t* outWidth, /*out*/int32_t* outHeight); // IGraphicsBufferProducer binder -> Stream ID KeyedVector<sp<IBinder>, int> mStreamMap; diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp deleted file mode 100644 index 59e5083..0000000 --- a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp +++ /dev/null @@ -1,444 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#define LOG_TAG "ProCamera2Client" -#define ATRACE_TAG ATRACE_TAG_CAMERA -//#define LOG_NDEBUG 0 - -#include <utils/Log.h> -#include <utils/Trace.h> - -#include <cutils/properties.h> -#include <gui/Surface.h> -#include <gui/Surface.h> - -#include "api_pro/ProCamera2Client.h" -#include "common/CameraDeviceBase.h" - -namespace android { -using namespace camera2; - -// Interface used by CameraService - -ProCamera2Client::ProCamera2Client(const sp<CameraService>& cameraService, - const sp<IProCameraCallbacks>& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid) : - Camera2ClientBase(cameraService, remoteCallback, clientPackageName, - cameraId, cameraFacing, clientPid, clientUid, servicePid) -{ - ATRACE_CALL(); - ALOGI("ProCamera %d: Opened", cameraId); - - mExclusiveLock = false; -} - -status_t ProCamera2Client::initialize(camera_module_t *module) -{ - ATRACE_CALL(); - status_t res; - - res = Camera2ClientBase::initialize(module); - if (res != OK) { - return res; - } - - String8 threadName; - mFrameProcessor = new FrameProcessorBase(mDevice); - threadName = String8::format("PC2-%d-FrameProc", mCameraId); - mFrameProcessor->run(threadName.string()); - - mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID, - FRAME_PROCESSOR_LISTENER_MAX_ID, - /*listener*/this); - - return OK; -} - -ProCamera2Client::~ProCamera2Client() { -} - -status_t ProCamera2Client::exclusiveTryLock() { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (!mDevice.get()) return PERMISSION_DENIED; - - if (!mExclusiveLock) { - mExclusiveLock = true; - - if (mRemoteCallback != NULL) { - mRemoteCallback->onLockStatusChanged( - IProCameraCallbacks::LOCK_ACQUIRED); - } - - ALOGV("%s: exclusive lock acquired", __FUNCTION__); - - return OK; - } - - // TODO: have a PERMISSION_DENIED case for when someone else owns the lock - - // don't allow recursive locking - ALOGW("%s: exclusive lock already exists - recursive locking is not" - "allowed", __FUNCTION__); - - return ALREADY_EXISTS; -} - -status_t ProCamera2Client::exclusiveLock() { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (!mDevice.get()) return PERMISSION_DENIED; - - /** - * TODO: this should asynchronously 'wait' until the lock becomes available - * if another client already has an exclusive lock. - * - * once we have proper sharing support this will need to do - * more than just return immediately - */ - if (!mExclusiveLock) { - mExclusiveLock = true; - - if (mRemoteCallback != NULL) { - mRemoteCallback->onLockStatusChanged(IProCameraCallbacks::LOCK_ACQUIRED); - } - - ALOGV("%s: exclusive lock acquired", __FUNCTION__); - - return OK; - } - - // don't allow recursive locking - ALOGW("%s: exclusive lock already exists - recursive locking is not allowed" - , __FUNCTION__); - return ALREADY_EXISTS; -} - -status_t ProCamera2Client::exclusiveUnlock() { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - // don't allow unlocking if we have no lock - if (!mExclusiveLock) { - ALOGW("%s: cannot unlock, no lock was held in the first place", - __FUNCTION__); - return BAD_VALUE; - } - - mExclusiveLock = false; - if (mRemoteCallback != NULL ) { - mRemoteCallback->onLockStatusChanged( - IProCameraCallbacks::LOCK_RELEASED); - } - ALOGV("%s: exclusive lock released", __FUNCTION__); - - return OK; -} - -bool ProCamera2Client::hasExclusiveLock() { - Mutex::Autolock icl(mBinderSerializationLock); - return mExclusiveLock; -} - -void ProCamera2Client::onExclusiveLockStolen() { - ALOGV("%s: ProClient lost exclusivity (id %d)", - __FUNCTION__, mCameraId); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (mExclusiveLock && mRemoteCallback.get() != NULL) { - mRemoteCallback->onLockStatusChanged( - IProCameraCallbacks::LOCK_STOLEN); - } - - mExclusiveLock = false; - - //TODO: we should not need to detach the device, merely reset it. - detachDevice(); -} - -status_t ProCamera2Client::submitRequest(camera_metadata_t* request, - bool streaming) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - if (!mExclusiveLock) { - return PERMISSION_DENIED; - } - - CameraMetadata metadata(request); - - if (!enforceRequestPermissions(metadata)) { - return PERMISSION_DENIED; - } - - if (streaming) { - return mDevice->setStreamingRequest(metadata); - } else { - return mDevice->capture(metadata); - } - - // unreachable. thx gcc for a useless warning - return OK; -} - -status_t ProCamera2Client::cancelRequest(int requestId) { - (void)requestId; - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - if (!mExclusiveLock) { - return PERMISSION_DENIED; - } - - // TODO: implement - ALOGE("%s: not fully implemented yet", __FUNCTION__); - return INVALID_OPERATION; -} - -status_t ProCamera2Client::deleteStream(int streamId) { - ATRACE_CALL(); - ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - mDevice->clearStreamingRequest(); - - status_t code; - if ((code = mDevice->waitUntilDrained()) != OK) { - ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, code); - } - - return mDevice->deleteStream(streamId); -} - -status_t ProCamera2Client::createStream(int width, int height, int format, - const sp<IGraphicBufferProducer>& bufferProducer, - /*out*/ - int* streamId) -{ - if (streamId) { - *streamId = -1; - } - - ATRACE_CALL(); - ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format); - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - sp<IBinder> binder; - sp<ANativeWindow> window; - if (bufferProducer != 0) { - binder = IInterface::asBinder(bufferProducer); - window = new Surface(bufferProducer); - } - - return mDevice->createStream(window, width, height, format, - streamId); -} - -// Create a request object from a template. -// -- Caller owns the newly allocated metadata -status_t ProCamera2Client::createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request) -{ - ATRACE_CALL(); - ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId); - - if (request) { - *request = NULL; - } - - status_t res; - if ( (res = checkPid(__FUNCTION__) ) != OK) return res; - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - CameraMetadata metadata; - if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK) { - *request = metadata.release(); - } - - return res; -} - -status_t ProCamera2Client::getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info) -{ - if (cameraId != mCameraId) { - return INVALID_OPERATION; - } - - Mutex::Autolock icl(mBinderSerializationLock); - - if (!mDevice.get()) return DEAD_OBJECT; - - CameraMetadata deviceInfo = mDevice->info(); - *info = deviceInfo.release(); - - return OK; -} - -status_t ProCamera2Client::dump(int fd, const Vector<String16>& args) { - String8 result; - result.appendFormat("ProCamera2Client[%d] (%p) PID: %d, dump:\n", - mCameraId, - (getRemoteCallback() != NULL ? - IInterface::asBinder(getRemoteCallback()).get() : NULL), - mClientPid); - result.append(" State:\n"); - write(fd, result.string(), result.size()); - - // TODO: print dynamic/request section from most recent requests - mFrameProcessor->dump(fd, args); - return dumpDevice(fd, args); -} - -// IProCameraUser interface - -void ProCamera2Client::detachDevice() { - if (mDevice == 0) return; - - ALOGV("Camera %d: Stopping processors", mCameraId); - - mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID, - FRAME_PROCESSOR_LISTENER_MAX_ID, - /*listener*/this); - mFrameProcessor->requestExit(); - ALOGV("Camera %d: Waiting for threads", mCameraId); - mFrameProcessor->join(); - ALOGV("Camera %d: Disconnecting device", mCameraId); - - // WORKAROUND: HAL refuses to disconnect while there's streams in flight - { - mDevice->clearStreamingRequest(); - - status_t code; - if ((code = mDevice->waitUntilDrained()) != OK) { - ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, - code); - } - } - - Camera2ClientBase::detachDevice(); -} - -void ProCamera2Client::onResultAvailable(const CaptureResult& result) { - ATRACE_CALL(); - ALOGV("%s", __FUNCTION__); - - Mutex::Autolock icl(mBinderSerializationLock); - SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); - - if (mRemoteCallback != NULL) { - CameraMetadata tmp(result.mMetadata); - camera_metadata_t* meta = tmp.release(); - ALOGV("%s: meta = %p ", __FUNCTION__, meta); - mRemoteCallback->onResultReceived(result.mResultExtras.requestId, meta); - tmp.acquire(meta); - } -} - -bool ProCamera2Client::enforceRequestPermissions(CameraMetadata& metadata) { - - const int pid = IPCThreadState::self()->getCallingPid(); - const int selfPid = getpid(); - camera_metadata_entry_t entry; - - /** - * Mixin default important security values - * - android.led.transmit = defaulted ON - */ - CameraMetadata staticInfo = mDevice->info(); - entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS); - for(size_t i = 0; i < entry.count; ++i) { - uint8_t led = entry.data.u8[i]; - - switch(led) { - case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: { - uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON; - if (!metadata.exists(ANDROID_LED_TRANSMIT)) { - metadata.update(ANDROID_LED_TRANSMIT, - &transmitDefault, 1); - } - break; - } - } - } - - // We can do anything! - if (pid == selfPid) { - return true; - } - - /** - * Permission check special fields in the request - * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT - */ - entry = metadata.find(ANDROID_LED_TRANSMIT); - if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) { - String16 permissionString = - String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED"); - if (!checkCallingPermission(permissionString)) { - const int uid = IPCThreadState::self()->getCallingUid(); - ALOGE("Permission Denial: " - "can't disable transmit LED pid=%d, uid=%d", pid, uid); - return false; - } - } - - return true; -} - -} // namespace android diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h deleted file mode 100644 index 9d83122..0000000 --- a/services/camera/libcameraservice/api_pro/ProCamera2Client.h +++ /dev/null @@ -1,124 +0,0 @@ -/* - * Copyright (C) 2013 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H -#define ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H - -#include "CameraService.h" -#include "common/FrameProcessorBase.h" -#include "common/Camera2ClientBase.h" -#include "device2/Camera2Device.h" -#include "camera/CaptureResult.h" - -namespace android { - -class IMemory; -/** - * Implements the binder IProCameraUser API, - * meant for HAL2-level private API access. - */ -class ProCamera2Client : - public Camera2ClientBase<CameraService::ProClient>, - public camera2::FrameProcessorBase::FilteredListener -{ -public: - /** - * IProCameraUser interface (see IProCameraUser for details) - */ - virtual status_t exclusiveTryLock(); - virtual status_t exclusiveLock(); - virtual status_t exclusiveUnlock(); - - virtual bool hasExclusiveLock(); - - // Note that the callee gets a copy of the metadata. - virtual int submitRequest(camera_metadata_t* metadata, - bool streaming = false); - virtual status_t cancelRequest(int requestId); - - virtual status_t deleteStream(int streamId); - - virtual status_t createStream( - int width, - int height, - int format, - const sp<IGraphicBufferProducer>& bufferProducer, - /*out*/ - int* streamId); - - // Create a request object from a template. - // -- Caller owns the newly allocated metadata - virtual status_t createDefaultRequest(int templateId, - /*out*/ - camera_metadata** request); - - // Get the static metadata for the camera - // -- Caller owns the newly allocated metadata - virtual status_t getCameraInfo(int cameraId, - /*out*/ - camera_metadata** info); - - /** - * Interface used by CameraService - */ - - ProCamera2Client(const sp<CameraService>& cameraService, - const sp<IProCameraCallbacks>& remoteCallback, - const String16& clientPackageName, - int cameraId, - int cameraFacing, - int clientPid, - uid_t clientUid, - int servicePid); - virtual ~ProCamera2Client(); - - virtual status_t initialize(camera_module_t *module); - - virtual status_t dump(int fd, const Vector<String16>& args); - - // Callbacks from camera service - virtual void onExclusiveLockStolen(); - - /** - * Interface used by independent components of ProCamera2Client. - */ - -protected: - /** FilteredListener implementation **/ - virtual void onResultAvailable(const CaptureResult& result); - - virtual void detachDevice(); - -private: - /** IProCameraUser interface-related private members */ - - /** Preview callback related members */ - sp<camera2::FrameProcessorBase> mFrameProcessor; - static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0; - static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL; - - /** Utility members */ - bool enforceRequestPermissions(CameraMetadata& metadata); - - // Whether or not we have an exclusive lock on the device - // - if no we can't modify the request queue. - // note that creating/deleting streams we own is still OK - bool mExclusiveLock; -}; - -}; // namespace android - -#endif diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp index 453c8bd..c0c2314 100644 --- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp +++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp @@ -78,7 +78,7 @@ status_t Camera2ClientBase<TClientBase>::checkPid(const char* checkLocation) } template <typename TClientBase> -status_t Camera2ClientBase<TClientBase>::initialize(camera_module_t *module) { +status_t Camera2ClientBase<TClientBase>::initialize(CameraModule *module) { ATRACE_CALL(); ALOGV("%s: Initializing client for camera %d", __FUNCTION__, TClientBase::mCameraId); @@ -337,7 +337,6 @@ void Camera2ClientBase<TClientBase>::SharedCameraCallbacks::clear() { mRemoteCallback.clear(); } -template class Camera2ClientBase<CameraService::ProClient>; template class Camera2ClientBase<CameraService::Client>; template class Camera2ClientBase<CameraDeviceClientBase>; diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h index e09c1b5..168ea0a 100644 --- a/services/camera/libcameraservice/common/Camera2ClientBase.h +++ b/services/camera/libcameraservice/common/Camera2ClientBase.h @@ -18,6 +18,7 @@ #define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H #include "common/CameraDeviceBase.h" +#include "common/CameraModule.h" #include "camera/CaptureResult.h" namespace android { @@ -35,7 +36,7 @@ public: typedef typename TClientBase::TCamCallbacks TCamCallbacks; /** - * Base binder interface (see ICamera/IProCameraUser for details) + * Base binder interface (see ICamera/ICameraDeviceUser for details) */ virtual status_t connect(const sp<TCamCallbacks>& callbacks); virtual void disconnect(); @@ -55,7 +56,7 @@ public: int servicePid); virtual ~Camera2ClientBase(); - virtual status_t initialize(camera_module_t *module); + virtual status_t initialize(CameraModule *module); virtual status_t dump(int fd, const Vector<String16>& args); /** diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h index d26e20c..fe55b9e 100644 --- a/services/camera/libcameraservice/common/CameraDeviceBase.h +++ b/services/camera/libcameraservice/common/CameraDeviceBase.h @@ -29,6 +29,7 @@ #include "hardware/camera3.h" #include "camera/CameraMetadata.h" #include "camera/CaptureResult.h" +#include "common/CameraModule.h" namespace android { @@ -45,7 +46,7 @@ class CameraDeviceBase : public virtual RefBase { */ virtual int getId() const = 0; - virtual status_t initialize(camera_module_t *module) = 0; + virtual status_t initialize(CameraModule *module) = 0; virtual status_t disconnect() = 0; virtual status_t dump(int fd, const Vector<String16> &args) = 0; @@ -99,17 +100,14 @@ class CameraDeviceBase : public virtual RefBase { nsecs_t timeout) = 0; /** - * Create an output stream of the requested size and format. + * Create an output stream of the requested size, format, rotation and dataspace * - * If format is CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, then the HAL device selects - * an appropriate format; it can be queried with getStreamInfo. - * - * If format is HAL_PIXEL_FORMAT_COMPRESSED, the size parameter must be - * equal to the size in bytes of the buffers to allocate for the stream. For - * other formats, the size parameter is ignored. + * For HAL_PIXEL_FORMAT_BLOB formats, the width and height should be the + * logical dimensions of the buffer, not the number of bytes. */ virtual status_t createStream(sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, int format, int *id) = 0; + uint32_t width, uint32_t height, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id) = 0; /** * Create an input reprocess stream that uses buffers from an existing diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp new file mode 100644 index 0000000..e5b12ae --- /dev/null +++ b/services/camera/libcameraservice/common/CameraModule.cpp @@ -0,0 +1,163 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "CameraModule" +//#define LOG_NDEBUG 0 + +#include "CameraModule.h" + +namespace android { + +void CameraModule::deriveCameraCharacteristicsKeys( + uint32_t deviceVersion, CameraMetadata &chars) { + // HAL1 devices should not reach here + if (deviceVersion < CAMERA_DEVICE_API_VERSION_2_0) { + ALOGV("%s: Cannot derive keys for HAL version < 2.0"); + return; + } + + // Keys added in HAL3.3 + if (deviceVersion < CAMERA_DEVICE_API_VERSION_3_3) { + Vector<uint8_t> controlModes; + uint8_t data = ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE; + chars.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE, &data, /*count*/1); + data = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE; + chars.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE, &data, /*count*/1); + controlModes.push(ANDROID_CONTROL_MODE_OFF); + controlModes.push(ANDROID_CONTROL_MODE_AUTO); + camera_metadata_entry entry = chars.find(ANDROID_CONTROL_AVAILABLE_SCENE_MODES); + if (entry.count > 1 || entry.data.u8[0] != ANDROID_CONTROL_SCENE_MODE_DISABLED) { + controlModes.push(ANDROID_CONTROL_MODE_USE_SCENE_MODE); + } + chars.update(ANDROID_CONTROL_AVAILABLE_MODES, controlModes); + } + return; +} + +CameraModule::CameraModule(camera_module_t *module) { + if (module == NULL) { + ALOGE("%s: camera hardware module must not be null", __FUNCTION__); + assert(0); + } + + mModule = module; + mCameraInfoMap.setCapacity(getNumberOfCameras()); +} + +int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) { + Mutex::Autolock lock(mCameraInfoLock); + if (cameraId < 0) { + ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId); + return -EINVAL; + } + + // Only override static_camera_characteristics for API2 devices + int apiVersion = mModule->common.module_api_version; + if (apiVersion < CAMERA_MODULE_API_VERSION_2_0) { + return mModule->get_camera_info(cameraId, info); + } + + ssize_t index = mCameraInfoMap.indexOfKey(cameraId); + if (index == NAME_NOT_FOUND) { + // Get camera info from raw module and cache it + camera_info rawInfo, cameraInfo; + int ret = mModule->get_camera_info(cameraId, &rawInfo); + if (ret != 0) { + return ret; + } + CameraMetadata m; + m = rawInfo.static_camera_characteristics; + deriveCameraCharacteristicsKeys(rawInfo.device_version, m); + mCameraCharacteristicsMap.add(cameraId, m); + cameraInfo = rawInfo; + cameraInfo.static_camera_characteristics = + mCameraCharacteristicsMap.valueFor(cameraId).getAndLock(); + mCameraInfoMap.add(cameraId, cameraInfo); + index = mCameraInfoMap.indexOfKey(cameraId); + } + + assert(index != NAME_NOT_FOUND); + // return the cached camera info + *info = mCameraInfoMap[index]; + return 0; +} + +int CameraModule::open(const char* id, struct hw_device_t** device) { + return filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device)); +} + +int CameraModule::openLegacy( + const char* id, uint32_t halVersion, struct hw_device_t** device) { + return mModule->open_legacy(&mModule->common, id, halVersion, device); +} + +int CameraModule::getNumberOfCameras() { + return mModule->get_number_of_cameras(); +} + +int CameraModule::setCallbacks(const camera_module_callbacks_t *callbacks) { + return mModule->set_callbacks(callbacks); +} + +bool CameraModule::isVendorTagDefined() { + return mModule->get_vendor_tag_ops != NULL; +} + +void CameraModule::getVendorTagOps(vendor_tag_ops_t* ops) { + if (mModule->get_vendor_tag_ops) { + mModule->get_vendor_tag_ops(ops); + } +} + +int CameraModule::setTorchMode(const char* camera_id, bool enable) { + return mModule->set_torch_mode(camera_id, enable); +} + +status_t CameraModule::filterOpenErrorCode(status_t err) { + switch(err) { + case NO_ERROR: + case -EBUSY: + case -EINVAL: + case -EUSERS: + return err; + default: + break; + } + return -ENODEV; +} + +uint16_t CameraModule::getModuleApiVersion() { + return mModule->common.module_api_version; +} + +const char* CameraModule::getModuleName() { + return mModule->common.name; +} + +uint16_t CameraModule::getHalApiVersion() { + return mModule->common.hal_api_version; +} + +const char* CameraModule::getModuleAuthor() { + return mModule->common.author; +} + +void* CameraModule::getDso() { + return mModule->common.dso; +} + +}; // namespace android + diff --git a/services/camera/libcameraservice/common/CameraModule.h b/services/camera/libcameraservice/common/CameraModule.h new file mode 100644 index 0000000..e285b21 --- /dev/null +++ b/services/camera/libcameraservice/common/CameraModule.h @@ -0,0 +1,66 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVERS_CAMERA_CAMERAMODULE_H +#define ANDROID_SERVERS_CAMERA_CAMERAMODULE_H + +#include <hardware/camera.h> +#include <camera/CameraMetadata.h> +#include <utils/Mutex.h> +#include <utils/KeyedVector.h> + +namespace android { +/** + * A wrapper class for HAL camera module. + * + * This class wraps camera_module_t returned from HAL to provide a wrapped + * get_camera_info implementation which CameraService generates some + * camera characteristics keys defined in newer HAL version on an older HAL. + */ +class CameraModule { +public: + CameraModule(camera_module_t *module); + + int getCameraInfo(int cameraId, struct camera_info *info); + int getNumberOfCameras(void); + int open(const char* id, struct hw_device_t** device); + int openLegacy(const char* id, uint32_t halVersion, struct hw_device_t** device); + int setCallbacks(const camera_module_callbacks_t *callbacks); + bool isVendorTagDefined(); + void getVendorTagOps(vendor_tag_ops_t* ops); + int setTorchMode(const char* camera_id, bool enable); + uint16_t getModuleApiVersion(); + const char* getModuleName(); + uint16_t getHalApiVersion(); + const char* getModuleAuthor(); + // Only used by CameraModuleFixture native test. Do NOT use elsewhere. + void *getDso(); + +private: + // Derive camera characteristics keys defined after HAL device version + static void deriveCameraCharacteristicsKeys(uint32_t deviceVersion, CameraMetadata &chars); + status_t filterOpenErrorCode(status_t err); + + camera_module_t *mModule; + KeyedVector<int, camera_info> mCameraInfoMap; + KeyedVector<int, CameraMetadata> mCameraCharacteristicsMap; + Mutex mCameraInfoLock; +}; + +} // namespace android + +#endif + diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h index 1935c2b..7f14cd4 100644 --- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h +++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h @@ -89,24 +89,22 @@ public: } } - status_t initialize(hw_module_t *module) + status_t initialize(CameraModule *module) { ALOGI("Opening camera %s", mName.string()); - camera_module_t *cameraModule = reinterpret_cast<camera_module_t *>(module); camera_info info; - status_t res = cameraModule->get_camera_info(atoi(mName.string()), &info); + status_t res = module->getCameraInfo(atoi(mName.string()), &info); if (res != OK) return res; int rc = OK; - if (module->module_api_version >= CAMERA_MODULE_API_VERSION_2_3 && + if (module->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_3 && info.device_version > CAMERA_DEVICE_API_VERSION_1_0) { // Open higher version camera device as HAL1.0 device. - rc = cameraModule->open_legacy(module, mName.string(), - CAMERA_DEVICE_API_VERSION_1_0, - (hw_device_t **)&mDevice); + rc = module->openLegacy(mName.string(), + CAMERA_DEVICE_API_VERSION_1_0, + (hw_device_t **)&mDevice); } else { - rc = CameraService::filterOpenErrorCode(module->methods->open( - module, mName.string(), (hw_device_t **)&mDevice)); + rc = module->open(mName.string(), (hw_device_t **)&mDevice); } if (rc != OK) { ALOGE("Could not open camera %s: %d", mName.string(), rc); diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp index d1158d6..878986b 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.cpp +++ b/services/camera/libcameraservice/device2/Camera2Device.cpp @@ -53,7 +53,7 @@ int Camera2Device::getId() const { return mId; } -status_t Camera2Device::initialize(camera_module_t *module) +status_t Camera2Device::initialize(CameraModule *module) { ATRACE_CALL(); ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId); @@ -68,8 +68,7 @@ status_t Camera2Device::initialize(camera_module_t *module) camera2_device_t *device; - res = CameraService::filterOpenErrorCode(module->common.methods->open( - &module->common, name, reinterpret_cast<hw_device_t**>(&device))); + res = module->open(name, reinterpret_cast<hw_device_t**>(&device)); if (res != OK) { ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__, @@ -87,7 +86,7 @@ status_t Camera2Device::initialize(camera_module_t *module) } camera_info info; - res = module->get_camera_info(mId, &info); + res = module->getCameraInfo(mId, &info); if (res != OK ) return res; if (info.device_version != device->common.version) { @@ -242,7 +241,8 @@ status_t Camera2Device::waitUntilRequestReceived(int32_t requestId, nsecs_t time } status_t Camera2Device::createStream(sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, int format, int *id) { + uint32_t width, uint32_t height, int format, + android_dataspace /*dataSpace*/, camera3_stream_rotation_t rotation, int *id) { ATRACE_CALL(); status_t res; ALOGV("%s: E", __FUNCTION__); diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h index 4def8ae..9b32fa6 100644 --- a/services/camera/libcameraservice/device2/Camera2Device.h +++ b/services/camera/libcameraservice/device2/Camera2Device.h @@ -43,7 +43,7 @@ class Camera2Device: public CameraDeviceBase { * CameraDevice interface */ virtual int getId() const; - virtual status_t initialize(camera_module_t *module); + virtual status_t initialize(CameraModule *module); virtual status_t disconnect(); virtual status_t dump(int fd, const Vector<String16>& args); virtual const CameraMetadata& info() const; @@ -57,7 +57,8 @@ class Camera2Device: public CameraDeviceBase { virtual status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL); virtual status_t waitUntilRequestReceived(int32_t requestId, nsecs_t timeout); virtual status_t createStream(sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, int format, int *id); + uint32_t width, uint32_t height, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id); virtual status_t createReprocessStreamFromStream(int outputId, int *id); virtual status_t getStreamInfo(int id, uint32_t *width, uint32_t *height, uint32_t *format); diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp index 53e6fa9..8236788 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.cpp +++ b/services/camera/libcameraservice/device3/Camera3Device.cpp @@ -86,7 +86,7 @@ int Camera3Device::getId() const { * CameraDeviceBase interface */ -status_t Camera3Device::initialize(camera_module_t *module) +status_t Camera3Device::initialize(CameraModule *module) { ATRACE_CALL(); Mutex::Autolock il(mInterfaceLock); @@ -106,9 +106,8 @@ status_t Camera3Device::initialize(camera_module_t *module) camera3_device_t *device; ATRACE_BEGIN("camera3->open"); - res = CameraService::filterOpenErrorCode(module->common.methods->open( - &module->common, deviceName.string(), - reinterpret_cast<hw_device_t**>(&device))); + res = module->open(deviceName.string(), + reinterpret_cast<hw_device_t**>(&device)); ATRACE_END(); if (res != OK) { @@ -127,7 +126,7 @@ status_t Camera3Device::initialize(camera_module_t *module) } camera_info info; - res = CameraService::filterGetInfoErrorCode(module->get_camera_info( + res = CameraService::filterGetInfoErrorCode(module->getCameraInfo( mId, &info)); if (res != OK) return res; @@ -802,12 +801,13 @@ status_t Camera3Device::createZslStream( } status_t Camera3Device::createStream(sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, int format, int *id) { + uint32_t width, uint32_t height, int format, android_dataspace dataSpace, + camera3_stream_rotation_t rotation, int *id) { ATRACE_CALL(); Mutex::Autolock il(mInterfaceLock); Mutex::Autolock l(mLock); - ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d", - mId, mNextStreamId, width, height, format); + ALOGV("Camera %d: Creating new stream %d: %d x %d, format %d, dataspace %d rotation %d", + mId, mNextStreamId, width, height, format, dataSpace, rotation); status_t res; bool wasActive = false; @@ -847,10 +847,10 @@ status_t Camera3Device::createStream(sp<ANativeWindow> consumer, } newStream = new Camera3OutputStream(mNextStreamId, consumer, - width, height, jpegBufferSize, format); + width, height, jpegBufferSize, format, dataSpace, rotation); } else { newStream = new Camera3OutputStream(mNextStreamId, consumer, - width, height, format); + width, height, format, dataSpace, rotation); } newStream->setStatusTracker(mStatusTracker); diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h index ec8dc10..a77548d 100644 --- a/services/camera/libcameraservice/device3/Camera3Device.h +++ b/services/camera/libcameraservice/device3/Camera3Device.h @@ -73,7 +73,7 @@ class Camera3Device : virtual int getId() const; // Transitions to idle state on success. - virtual status_t initialize(camera_module_t *module); + virtual status_t initialize(CameraModule *module); virtual status_t disconnect(); virtual status_t dump(int fd, const Vector<String16> &args); virtual const CameraMetadata& info() const; @@ -95,7 +95,8 @@ class Camera3Device : // If adding streams while actively capturing, will pause device before adding // stream, reconfiguring device, and unpausing. virtual status_t createStream(sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, int format, int *id); + uint32_t width, uint32_t height, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation, int *id); virtual status_t createInputStream( uint32_t width, uint32_t height, int format, int *id); diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp index 6656b09..01edfff 100644 --- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp @@ -28,7 +28,7 @@ namespace camera3 { Camera3DummyStream::Camera3DummyStream(int id) : Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, DUMMY_WIDTH, DUMMY_HEIGHT, - /*maxSize*/0, DUMMY_FORMAT) { + /*maxSize*/0, DUMMY_FORMAT, DUMMY_DATASPACE, DUMMY_ROTATION) { } diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h index 3e42623..d023c57 100644 --- a/services/camera/libcameraservice/device3/Camera3DummyStream.h +++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h @@ -75,6 +75,8 @@ class Camera3DummyStream : static const int DUMMY_WIDTH = 320; static const int DUMMY_HEIGHT = 240; static const int DUMMY_FORMAT = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED; + static const android_dataspace DUMMY_DATASPACE = HAL_DATASPACE_UNKNOWN; + static const camera3_stream_rotation_t DUMMY_ROTATION = CAMERA3_STREAM_ROTATION_0; static const uint32_t DUMMY_USAGE = GRALLOC_USAGE_HW_COMPOSER; /** diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp index cc66459..8696413 100644 --- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp @@ -30,9 +30,10 @@ namespace android { namespace camera3 { Camera3IOStreamBase::Camera3IOStreamBase(int id, camera3_stream_type_t type, - uint32_t width, uint32_t height, size_t maxSize, int format) : + uint32_t width, uint32_t height, size_t maxSize, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation) : Camera3Stream(id, type, - width, height, maxSize, format), + width, height, maxSize, format, dataSpace, rotation), mTotalBufferCount(0), mHandoutTotalBufferCount(0), mHandoutOutputBufferCount(0), diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h index a35c290..abcf2b1 100644 --- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.h +++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h @@ -33,7 +33,8 @@ class Camera3IOStreamBase : public Camera3Stream { protected: Camera3IOStreamBase(int id, camera3_stream_type_t type, - uint32_t width, uint32_t height, size_t maxSize, int format); + uint32_t width, uint32_t height, size_t maxSize, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation); public: diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp index 319be1d..6bf671e 100644 --- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp @@ -18,6 +18,7 @@ #define ATRACE_TAG ATRACE_TAG_CAMERA //#define LOG_NDEBUG 0 +#include <gui/BufferItem.h> #include <utils/Log.h> #include <utils/Trace.h> #include "Camera3InputStream.h" @@ -28,8 +29,8 @@ namespace camera3 { Camera3InputStream::Camera3InputStream(int id, uint32_t width, uint32_t height, int format) : - Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, - /*maxSize*/0, format) { + Camera3IOStreamBase(id, CAMERA3_STREAM_INPUT, width, height, /*maxSize*/0, + format, HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0) { if (format == HAL_PIXEL_FORMAT_BLOB) { ALOGE("%s: Bad format, BLOB not supported", __FUNCTION__); diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h index ae49467..fd17f4f 100644 --- a/services/camera/libcameraservice/device3/Camera3InputStream.h +++ b/services/camera/libcameraservice/device3/Camera3InputStream.h @@ -48,8 +48,6 @@ class Camera3InputStream : public Camera3IOStreamBase { private: - typedef BufferItemConsumer::BufferItem BufferItem; - sp<BufferItemConsumer> mConsumer; Vector<BufferItem> mBuffersInFlight; diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp index 77ad503..0c739e9 100644 --- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp @@ -33,9 +33,10 @@ namespace camera3 { Camera3OutputStream::Camera3OutputStream(int id, sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, int format) : + uint32_t width, uint32_t height, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation) : Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, - /*maxSize*/0, format), + /*maxSize*/0, format, dataSpace, rotation), mConsumer(consumer), mTransform(0), mTraceFirstBuffer(true) { @@ -48,9 +49,10 @@ Camera3OutputStream::Camera3OutputStream(int id, Camera3OutputStream::Camera3OutputStream(int id, sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, size_t maxSize, int format) : + uint32_t width, uint32_t height, size_t maxSize, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation) : Camera3IOStreamBase(id, CAMERA3_STREAM_OUTPUT, width, height, maxSize, - format), + format, dataSpace, rotation), mConsumer(consumer), mTransform(0), mTraceFirstBuffer(true) { @@ -69,10 +71,12 @@ Camera3OutputStream::Camera3OutputStream(int id, Camera3OutputStream::Camera3OutputStream(int id, camera3_stream_type_t type, uint32_t width, uint32_t height, - int format) : + int format, + android_dataspace dataSpace, + camera3_stream_rotation_t rotation) : Camera3IOStreamBase(id, type, width, height, /*maxSize*/0, - format), + format, dataSpace, rotation), mTransform(0) { // Subclasses expected to initialize mConsumer themselves @@ -153,33 +157,9 @@ status_t Camera3OutputStream::returnBufferCheckedLocked( ALOG_ASSERT(output, "Expected output to be true"); status_t res; - sp<Fence> releaseFence; - - /** - * Fence management - calculate Release Fence - */ - if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) { - if (buffer.release_fence != -1) { - ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when " - "there is an error", __FUNCTION__, mId, buffer.release_fence); - close(buffer.release_fence); - } - - /** - * Reassign release fence as the acquire fence in case of error - */ - releaseFence = new Fence(buffer.acquire_fence); - } else { - res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); - if (res != OK) { - ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", - __FUNCTION__, mId, strerror(-res), res); - return res; - } - - releaseFence = new Fence(buffer.release_fence); - } + // Fence management - always honor release fence from HAL + sp<Fence> releaseFence = new Fence(buffer.release_fence); int anwReleaseFence = releaseFence->dup(); /** @@ -213,6 +193,13 @@ status_t Camera3OutputStream::returnBufferCheckedLocked( mTraceFirstBuffer = false; } + res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp); + if (res != OK) { + ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)", + __FUNCTION__, mId, strerror(-res), res); + return res; + } + res = currentConsumer->queueBuffer(currentConsumer.get(), container_of(buffer.buffer, ANativeWindowBuffer, handle), anwReleaseFence); @@ -323,6 +310,14 @@ status_t Camera3OutputStream::configureQueueLocked() { return res; } + res = native_window_set_buffers_data_space(mConsumer.get(), + camera3_stream::data_space); + if (res != OK) { + ALOGE("%s: Unable to configure stream dataspace %#x for stream %d", + __FUNCTION__, camera3_stream::data_space, mId); + return res; + } + int maxConsumerBuffers; res = mConsumer->query(mConsumer.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers); diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h index be278c5..12b2ebb 100644 --- a/services/camera/libcameraservice/device3/Camera3OutputStream.h +++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h @@ -39,14 +39,16 @@ class Camera3OutputStream : * Set up a stream for formats that have 2 dimensions, such as RAW and YUV. */ Camera3OutputStream(int id, sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, int format); + uint32_t width, uint32_t height, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation); /** * Set up a stream for formats that have a variable buffer size for the same * dimensions, such as compressed JPEG. */ Camera3OutputStream(int id, sp<ANativeWindow> consumer, - uint32_t width, uint32_t height, size_t maxSize, int format); + uint32_t width, uint32_t height, size_t maxSize, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation); virtual ~Camera3OutputStream(); @@ -64,7 +66,8 @@ class Camera3OutputStream : protected: Camera3OutputStream(int id, camera3_stream_type_t type, - uint32_t width, uint32_t height, int format); + uint32_t width, uint32_t height, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation); /** * Note that we release the lock briefly in this function diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp index 3c0e908..4acbce3 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.cpp +++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp @@ -46,7 +46,8 @@ const Camera3Stream* Camera3Stream::cast(const camera3_stream *stream) { Camera3Stream::Camera3Stream(int id, camera3_stream_type type, - uint32_t width, uint32_t height, size_t maxSize, int format) : + uint32_t width, uint32_t height, size_t maxSize, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation) : camera3_stream(), mId(id), mName(String8::format("Camera3Stream[%d]", id)), @@ -58,6 +59,8 @@ Camera3Stream::Camera3Stream(int id, camera3_stream::width = width; camera3_stream::height = height; camera3_stream::format = format; + camera3_stream::data_space = dataSpace; + camera3_stream::rotation = rotation; camera3_stream::usage = 0; camera3_stream::max_buffers = 0; camera3_stream::priv = NULL; @@ -84,6 +87,10 @@ int Camera3Stream::getFormat() const { return camera3_stream::format; } +android_dataspace Camera3Stream::getDataSpace() const { + return camera3_stream::data_space; +} + camera3_stream* Camera3Stream::startConfiguration() { ATRACE_CALL(); Mutex::Autolock l(mLock); diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h index d0e1337..aba27fe 100644 --- a/services/camera/libcameraservice/device3/Camera3Stream.h +++ b/services/camera/libcameraservice/device3/Camera3Stream.h @@ -119,9 +119,10 @@ class Camera3Stream : /** * Get the stream's dimensions and format */ - uint32_t getWidth() const; - uint32_t getHeight() const; - int getFormat() const; + uint32_t getWidth() const; + uint32_t getHeight() const; + int getFormat() const; + android_dataspace getDataSpace() const; /** * Start the stream configuration process. Returns a handle to the stream's @@ -264,7 +265,8 @@ class Camera3Stream : mutable Mutex mLock; Camera3Stream(int id, camera3_stream_type type, - uint32_t width, uint32_t height, size_t maxSize, int format); + uint32_t width, uint32_t height, size_t maxSize, int format, + android_dataspace dataSpace, camera3_stream_rotation_t rotation); /** * Interface to be implemented by derived classes diff --git a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp index 81330ea..10d7f2e 100644 --- a/services/camera/libcameraservice/device3/Camera3ZslStream.cpp +++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp @@ -114,7 +114,8 @@ Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height, int bufferCount) : Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL, width, height, - HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED), + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, + HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0), mDepth(bufferCount) { sp<IGraphicBufferProducer> producer; diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp index d0f29de..8cd6800 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp @@ -268,7 +268,7 @@ status_t RingBufferConsumer::releaseOldestBufferLocked(size_t* pinnedFrames) { return OK; } -void RingBufferConsumer::onFrameAvailable(const android::BufferItem& item) { +void RingBufferConsumer::onFrameAvailable(const BufferItem& item) { status_t err; { diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h index 90fd734..83e7298 100644 --- a/services/camera/libcameraservice/gui/RingBufferConsumer.h +++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h @@ -17,6 +17,7 @@ #ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H #define ANDROID_GUI_RINGBUFFERCONSUMER_H +#include <gui/BufferItem.h> #include <gui/ConsumerBase.h> #include <ui/GraphicBuffer.h> @@ -54,8 +55,6 @@ class RingBufferConsumer : public ConsumerBase, public: typedef ConsumerBase::FrameAvailableListener FrameAvailableListener; - typedef BufferQueue::BufferItem BufferItem; - enum { INVALID_BUFFER_SLOT = BufferQueue::INVALID_BUFFER_SLOT }; enum { NO_BUFFER_AVAILABLE = BufferQueue::NO_BUFFER_AVAILABLE }; @@ -165,7 +164,7 @@ class RingBufferConsumer : public ConsumerBase, private: // Override ConsumerBase::onFrameAvailable - virtual void onFrameAvailable(const android::BufferItem& item); + virtual void onFrameAvailable(const BufferItem& item); void pinBufferLocked(const BufferItem& item); void unpinBuffer(const BufferItem& item); diff --git a/services/camera/libcameraservice/utils/AutoConditionLock.cpp b/services/camera/libcameraservice/utils/AutoConditionLock.cpp new file mode 100644 index 0000000..c8ee965 --- /dev/null +++ b/services/camera/libcameraservice/utils/AutoConditionLock.cpp @@ -0,0 +1,90 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "AutoConditionLock.h" + +namespace android { + +WaitableMutexWrapper::WaitableMutexWrapper(Mutex* mutex) : mMutex{mutex}, mState{false} {} + +WaitableMutexWrapper::~WaitableMutexWrapper() {} + +// Locks manager-owned mutex +AutoConditionLock::AutoConditionLock(const std::shared_ptr<WaitableMutexWrapper>& manager) : + mManager{manager}, mAutoLock{manager->mMutex} {} + +// Unlocks manager-owned mutex +AutoConditionLock::~AutoConditionLock() { + // Unset the condition and wake everyone up before releasing lock + mManager->mState = false; + mManager->mCondition.broadcast(); +} + +std::unique_ptr<AutoConditionLock> AutoConditionLock::waitAndAcquire( + const std::shared_ptr<WaitableMutexWrapper>& manager, nsecs_t waitTime) { + + if (manager == nullptr || manager->mMutex == nullptr) { + // Bad input, return null + return std::unique_ptr<AutoConditionLock>{nullptr}; + } + + // Acquire scoped lock + std::unique_ptr<AutoConditionLock> scopedLock(new AutoConditionLock(manager)); + + // Figure out what time in the future we should hit the timeout + nsecs_t failTime = systemTime(SYSTEM_TIME_MONOTONIC) + waitTime; + + // Wait until we timeout, or success + while(manager->mState) { + status_t ret = manager->mCondition.waitRelative(*(manager->mMutex), waitTime); + if (ret != NO_ERROR) { + // Timed out or whatever, return null + return std::unique_ptr<AutoConditionLock>{nullptr}; + } + waitTime = failTime - systemTime(SYSTEM_TIME_MONOTONIC); + } + + // Set the condition and return + manager->mState = true; + return scopedLock; +} + +std::unique_ptr<AutoConditionLock> AutoConditionLock::waitAndAcquire( + const std::shared_ptr<WaitableMutexWrapper>& manager) { + + if (manager == nullptr || manager->mMutex == nullptr) { + // Bad input, return null + return std::unique_ptr<AutoConditionLock>{nullptr}; + } + + // Acquire scoped lock + std::unique_ptr<AutoConditionLock> scopedLock(new AutoConditionLock(manager)); + + // Wait until we timeout, or success + while(manager->mState) { + status_t ret = manager->mCondition.wait(*(manager->mMutex)); + if (ret != NO_ERROR) { + // Timed out or whatever, return null + return std::unique_ptr<AutoConditionLock>{nullptr}; + } + } + + // Set the condition and return + manager->mState = true; + return scopedLock; +} + +}; // namespace android diff --git a/services/camera/libcameraservice/utils/AutoConditionLock.h b/services/camera/libcameraservice/utils/AutoConditionLock.h new file mode 100644 index 0000000..9a3eafc --- /dev/null +++ b/services/camera/libcameraservice/utils/AutoConditionLock.h @@ -0,0 +1,99 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef ANDROID_SERVICE_UTILS_SCOPED_CONDITION_H +#define ANDROID_SERVICE_UTILS_SCOPED_CONDITION_H + +#include <utils/Timers.h> +#include <utils/Condition.h> +#include <utils/Errors.h> +#include <utils/Mutex.h> + +#include <memory> + +namespace android { + +/** + * WaitableMutexWrapper can be used with AutoConditionLock to construct scoped locks for the + * wrapped Mutex with timeouts for lock acquisition. + */ +class WaitableMutexWrapper { + friend class AutoConditionLock; +public: + /** + * Construct the ConditionManger with the given Mutex. + */ + WaitableMutexWrapper(Mutex* mutex); + + virtual ~WaitableMutexWrapper(); +private: + Mutex* mMutex; + bool mState; + Condition mCondition; +}; + +/** + * AutoConditionLock is a scoped lock similar to Mutex::Autolock, but allows timeouts to be + * specified for lock acquisition. + * + * AutoConditionLock is used with a WaitableMutexWrapper to lock/unlock the WaitableMutexWrapper's + * wrapped Mutex, and wait/set/signal the WaitableMutexWrapper's wrapped condition. To use this, + * call AutoConditionLock::waitAndAcquire to get an instance. This will: + * - Lock the given WaitableMutexWrapper's mutex. + * - Wait for the WaitableMutexWrapper's condition to become false, or timeout. + * - Set the WaitableMutexWrapper's condition to true. + * + * When the AutoConditionLock goes out of scope and is destroyed, this will: + * - Set the WaitableMutexWrapper's condition to false. + * - Signal threads waiting on this condition to wakeup. + * - Release WaitableMutexWrapper's mutex. + */ +class AutoConditionLock final { +public: + AutoConditionLock() = delete; + AutoConditionLock(const AutoConditionLock& other) = delete; + AutoConditionLock & operator=(const AutoConditionLock&) = delete; + + ~AutoConditionLock(); + + /** + * Make a new AutoConditionLock from a given WaitableMutexWrapper, waiting up to waitTime + * nanoseconds to acquire the WaitableMutexWrapper's wrapped lock. + * + * Return an empty unique_ptr if this fails, or a timeout occurs. + */ + static std::unique_ptr<AutoConditionLock> waitAndAcquire( + const std::shared_ptr<WaitableMutexWrapper>& manager, nsecs_t waitTime); + + /** + * Make a new AutoConditionLock from a given WaitableMutexWrapper, waiting indefinitely to + * acquire the WaitableMutexWrapper's wrapped lock. + * + * Return an empty unique_ptr if this fails. + */ + static std::unique_ptr<AutoConditionLock> waitAndAcquire( + const std::shared_ptr<WaitableMutexWrapper>& manager); +private: + AutoConditionLock(const std::shared_ptr<WaitableMutexWrapper>& manager); + + std::shared_ptr<WaitableMutexWrapper> mManager; + Mutex::Autolock mAutoLock; +}; + +}; // namespace android + +#endif // ANDROID_SERVICE_UTILS_SCOPED_CONDITION_H diff --git a/services/camera/libcameraservice/utils/ClientManager.h b/services/camera/libcameraservice/utils/ClientManager.h new file mode 100644 index 0000000..ad5486d --- /dev/null +++ b/services/camera/libcameraservice/utils/ClientManager.h @@ -0,0 +1,539 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_SERVICE_UTILS_EVICTION_POLICY_MANAGER_H +#define ANDROID_SERVICE_UTILS_EVICTION_POLICY_MANAGER_H + +#include <utils/Mutex.h> + +#include <algorithm> +#include <utility> +#include <vector> +#include <set> +#include <map> +#include <memory> + +namespace android { +namespace resource_policy { + +// -------------------------------------------------------------------------------- + +/** + * The ClientDescriptor class is a container for a given key/value pair identifying a shared + * resource, and the corresponding cost, priority, owner ID, and conflicting keys list used + * in determining eviction behavior. + * + * Aside from the priority, these values are immutable once the ClientDescriptor has been + * constructed. + */ +template<class KEY, class VALUE> +class ClientDescriptor final { +public: + ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost, + const std::set<KEY>& conflictingKeys, int32_t priority, int32_t ownerId); + ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost, std::set<KEY>&& conflictingKeys, + int32_t priority, int32_t ownerId); + + ~ClientDescriptor(); + + /** + * Return the key for this descriptor. + */ + const KEY& getKey() const; + + /** + * Return the value for this descriptor. + */ + const VALUE& getValue() const; + + /** + * Return the cost for this descriptor. + */ + int32_t getCost() const; + + /** + * Return the priority for this descriptor. + */ + int32_t getPriority() const; + + /** + * Return the owner ID for this descriptor. + */ + int32_t getOwnerId() const; + + /** + * Return true if the given key is in this descriptor's conflicting keys list. + */ + bool isConflicting(const KEY& key) const; + + /** + * Return the set of all conflicting keys for this descriptor. + */ + std::set<KEY> getConflicting() const; + + /** + * Set the proirity for this descriptor. + */ + void setPriority(int32_t priority); + + // This class is ordered by key + template<class K, class V> + friend bool operator < (const ClientDescriptor<K, V>& a, const ClientDescriptor<K, V>& b); + +private: + KEY mKey; + VALUE mValue; + int32_t mCost; + std::set<KEY> mConflicting; + int32_t mPriority; + int32_t mOwnerId; +}; // class ClientDescriptor + +template<class K, class V> +bool operator < (const ClientDescriptor<K, V>& a, const ClientDescriptor<K, V>& b) { + return a.mKey < b.mKey; +} + +template<class KEY, class VALUE> +ClientDescriptor<KEY, VALUE>::ClientDescriptor(const KEY& key, const VALUE& value, int32_t cost, + const std::set<KEY>& conflictingKeys, int32_t priority, int32_t ownerId) : mKey{key}, + mValue{value}, mCost{cost}, mConflicting{conflictingKeys}, mPriority{priority}, + mOwnerId{ownerId} {} + +template<class KEY, class VALUE> +ClientDescriptor<KEY, VALUE>::ClientDescriptor(KEY&& key, VALUE&& value, int32_t cost, + std::set<KEY>&& conflictingKeys, int32_t priority, int32_t ownerId) : + mKey{std::forward<KEY>(key)}, mValue{std::forward<VALUE>(value)}, mCost{cost}, + mConflicting{std::forward<std::set<KEY>>(conflictingKeys)}, mPriority{priority}, + mOwnerId{ownerId} {} + +template<class KEY, class VALUE> +ClientDescriptor<KEY, VALUE>::~ClientDescriptor() {} + +template<class KEY, class VALUE> +const KEY& ClientDescriptor<KEY, VALUE>::getKey() const { + return mKey; +} + +template<class KEY, class VALUE> +const VALUE& ClientDescriptor<KEY, VALUE>::getValue() const { + return mValue; +} + +template<class KEY, class VALUE> +int32_t ClientDescriptor<KEY, VALUE>::getCost() const { + return mCost; +} + +template<class KEY, class VALUE> +int32_t ClientDescriptor<KEY, VALUE>::getPriority() const { + return mPriority; +} + +template<class KEY, class VALUE> +int32_t ClientDescriptor<KEY, VALUE>::getOwnerId() const { + return mOwnerId; +} + +template<class KEY, class VALUE> +bool ClientDescriptor<KEY, VALUE>::isConflicting(const KEY& key) const { + if (key == mKey) return true; + for (const auto& x : mConflicting) { + if (key == x) return true; + } + return false; +} + +template<class KEY, class VALUE> +std::set<KEY> ClientDescriptor<KEY, VALUE>::getConflicting() const { + return mConflicting; +} + +template<class KEY, class VALUE> +void ClientDescriptor<KEY, VALUE>::setPriority(int32_t priority) { + mPriority = priority; +} + +// -------------------------------------------------------------------------------- + +/** + * The ClientManager class wraps an LRU-ordered list of active clients and implements eviction + * behavior for handling shared resource access. + * + * When adding a new descriptor, eviction behavior is as follows: + * - Keys are unique, adding a descriptor with the same key as an existing descriptor will + * result in the lower-priority of the two being removed. Priority ties result in the + * LRU descriptor being evicted (this means the incoming descriptor be added in this case). + * - Any descriptors with keys that are in the incoming descriptor's 'conflicting keys' list + * will be removed if they have an equal or lower priority than the incoming descriptor; + * if any have a higher priority, the incoming descriptor is removed instead. + * - If the sum of all descriptors' costs, including the incoming descriptor's, is more than + * the max cost allowed for this ClientManager, descriptors with non-zero cost, equal or lower + * priority, and a different owner will be evicted in LRU order until either the cost is less + * than the max cost, or all descriptors meeting this criteria have been evicted and the + * incoming descriptor has the highest priority. Otherwise, the incoming descriptor is + * removed instead. + */ +template<class KEY, class VALUE> +class ClientManager { +public: + // The default maximum "cost" allowed before evicting + static constexpr int32_t DEFAULT_MAX_COST = 100; + + ClientManager(); + ClientManager(int32_t totalCost); + + /** + * Add a given ClientDescriptor to the managed list. ClientDescriptors for clients that + * are evicted by this action are returned in a vector. + * + * This may return the ClientDescriptor passed in if it would be evicted. + */ + std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> addAndEvict( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client); + + /** + * Given a map containing owner (pid) -> priority mappings, update the priority of each + * ClientDescriptor with an owner in this mapping. + */ + void updatePriorities(const std::map<int32_t,int32_t>& ownerPriorityList); + + /** + * Remove all ClientDescriptors. + */ + void removeAll(); + + /** + * Remove and return the ClientDescriptor with a given key. + */ + std::shared_ptr<ClientDescriptor<KEY, VALUE>> remove(const KEY& key); + + /** + * Remove the given ClientDescriptor. + */ + void remove(const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& value); + + /** + * Return a vector of the ClientDescriptors that would be evicted by adding the given + * ClientDescriptor. + * + * This may return the ClientDescriptor passed in if it would be evicted. + */ + std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> wouldEvict( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) const; + + /** + * Return a vector of active ClientDescriptors that prevent this client from being added. + */ + std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> getIncompatibleClients( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) const; + + /** + * Return a vector containing all currently active ClientDescriptors. + */ + std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> getAll() const; + + /** + * Return a vector containing all keys of currently active ClientDescriptors. + */ + std::vector<KEY> getAllKeys() const; + + /** + * Return a vector of the owner tags of all currently active ClientDescriptors (duplicates + * will be removed). + */ + std::vector<int32_t> getAllOwners() const; + + /** + * Return the ClientDescriptor corresponding to the given key, or an empty shared pointer + * if none exists. + */ + std::shared_ptr<ClientDescriptor<KEY, VALUE>> get(const KEY& key) const; + +protected: + ~ClientManager(); + +private: + + /** + * Return a vector of the ClientDescriptors that would be evicted by adding the given + * ClientDescriptor. If returnIncompatibleClients is set to true, instead, return the + * vector of ClientDescriptors that are higher priority than the incoming client and + * either conflict with this client, or contribute to the resource cost if that would + * prevent the incoming client from being added. + * + * This may return the ClientDescriptor passed in. + */ + std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> wouldEvictLocked( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client, + bool returnIncompatibleClients = false) const; + + int64_t getCurrentCostLocked() const; + + mutable Mutex mLock; + int32_t mMaxCost; + // LRU ordered, most recent at end + std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> mClients; +}; // class ClientManager + +template<class KEY, class VALUE> +ClientManager<KEY, VALUE>::ClientManager() : + ClientManager(DEFAULT_MAX_COST) {} + +template<class KEY, class VALUE> +ClientManager<KEY, VALUE>::ClientManager(int32_t totalCost) : mMaxCost(totalCost) {} + +template<class KEY, class VALUE> +ClientManager<KEY, VALUE>::~ClientManager() {} + +template<class KEY, class VALUE> +std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> ClientManager<KEY, VALUE>::wouldEvict( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) const { + Mutex::Autolock lock(mLock); + return wouldEvictLocked(client); +} + +template<class KEY, class VALUE> +std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> +ClientManager<KEY, VALUE>::getIncompatibleClients( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) const { + Mutex::Autolock lock(mLock); + return wouldEvictLocked(client, /*returnIncompatibleClients*/true); +} + +template<class KEY, class VALUE> +std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> +ClientManager<KEY, VALUE>::wouldEvictLocked( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client, + bool returnIncompatibleClients) const { + + std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> evictList; + + // Disallow null clients, return input + if (client == nullptr) { + evictList.push_back(client); + return evictList; + } + + const KEY& key = client->getKey(); + int32_t cost = client->getCost(); + int32_t priority = client->getPriority(); + int32_t owner = client->getOwnerId(); + + int64_t totalCost = getCurrentCostLocked() + cost; + + // Determine the MRU of the owners tied for having the highest priority + int32_t highestPriorityOwner = owner; + int32_t highestPriority = priority; + for (const auto& i : mClients) { + int32_t curPriority = i->getPriority(); + if (curPriority >= highestPriority) { + highestPriority = curPriority; + highestPriorityOwner = i->getOwnerId(); + } + } + + if (highestPriority == priority) { + // Switch back owner if the incoming client has the highest priority, as it is MRU + highestPriorityOwner = owner; + } + + // Build eviction list of clients to remove + for (const auto& i : mClients) { + const KEY& curKey = i->getKey(); + int32_t curCost = i->getCost(); + int32_t curPriority = i->getPriority(); + int32_t curOwner = i->getOwnerId(); + + bool conflicting = (curKey == key || i->isConflicting(key) || + client->isConflicting(curKey)); + + if (!returnIncompatibleClients) { + // Find evicted clients + + if (conflicting && curPriority > priority) { + // Pre-existing conflicting client with higher priority exists + evictList.clear(); + evictList.push_back(client); + return evictList; + } else if (conflicting || ((totalCost > mMaxCost && curCost > 0) && + (curPriority <= priority) && + !(highestPriorityOwner == owner && owner == curOwner))) { + // Add a pre-existing client to the eviction list if: + // - We are adding a client with higher priority that conflicts with this one. + // - The total cost including the incoming client's is more than the allowable + // maximum, and the client has a non-zero cost, lower priority, and a different + // owner than the incoming client when the incoming client has the + // highest priority. + evictList.push_back(i); + totalCost -= curCost; + } + } else { + // Find clients preventing the incoming client from being added + + if (curPriority > priority && (conflicting || (totalCost > mMaxCost && curCost > 0))) { + // Pre-existing conflicting client with higher priority exists + evictList.push_back(i); + } + } + } + + // Immediately return the incompatible clients if we are calculating these instead + if (returnIncompatibleClients) { + return evictList; + } + + // If the total cost is too high, return the input unless the input has the highest priority + if (totalCost > mMaxCost && highestPriorityOwner != owner) { + evictList.clear(); + evictList.push_back(client); + return evictList; + } + + return evictList; + +} + +template<class KEY, class VALUE> +std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> ClientManager<KEY, VALUE>::addAndEvict( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& client) { + Mutex::Autolock lock(mLock); + auto evicted = wouldEvictLocked(client); + auto it = evicted.begin(); + if (it != evicted.end() && *it == client) { + return evicted; + } + + auto iter = evicted.cbegin(); + + // Remove evicted clients from list + mClients.erase(std::remove_if(mClients.begin(), mClients.end(), + [&iter] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) { + if (curClientPtr->getKey() == (*iter)->getKey()) { + iter++; + return true; + } + return false; + }), mClients.end()); + + mClients.push_back(client); + + return evicted; +} + +template<class KEY, class VALUE> +std::vector<std::shared_ptr<ClientDescriptor<KEY, VALUE>>> +ClientManager<KEY, VALUE>::getAll() const { + Mutex::Autolock lock(mLock); + return mClients; +} + +template<class KEY, class VALUE> +std::vector<KEY> ClientManager<KEY, VALUE>::getAllKeys() const { + Mutex::Autolock lock(mLock); + std::vector<KEY> keys(mClients.size()); + for (const auto& i : mClients) { + keys.push_back(i->getKey()); + } + return keys; +} + +template<class KEY, class VALUE> +std::vector<int32_t> ClientManager<KEY, VALUE>::getAllOwners() const { + Mutex::Autolock lock(mLock); + std::set<int32_t> owners; + for (const auto& i : mClients) { + owners.emplace(i->getOwnerId()); + } + return std::vector<int32_t>(owners.begin(), owners.end()); +} + +template<class KEY, class VALUE> +void ClientManager<KEY, VALUE>::updatePriorities( + const std::map<int32_t,int32_t>& ownerPriorityList) { + Mutex::Autolock lock(mLock); + for (auto& i : mClients) { + auto j = ownerPriorityList.find(i->getOwnerId()); + if (j != ownerPriorityList.end()) { + i->setPriority(j->second); + } + } +} + +template<class KEY, class VALUE> +std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE>::get( + const KEY& key) const { + Mutex::Autolock lock(mLock); + for (const auto& i : mClients) { + if (i->getKey() == key) return i; + } + return std::shared_ptr<ClientDescriptor<KEY, VALUE>>(nullptr); +} + +template<class KEY, class VALUE> +void ClientManager<KEY, VALUE>::removeAll() { + Mutex::Autolock lock(mLock); + mClients.clear(); +} + +template<class KEY, class VALUE> +std::shared_ptr<ClientDescriptor<KEY, VALUE>> ClientManager<KEY, VALUE>::remove(const KEY& key) { + Mutex::Autolock lock(mLock); + + std::shared_ptr<ClientDescriptor<KEY, VALUE>> ret; + + // Remove evicted clients from list + mClients.erase(std::remove_if(mClients.begin(), mClients.end(), + [&key, &ret] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) { + if (curClientPtr->getKey() == key) { + ret = curClientPtr; + return true; + } + return false; + }), mClients.end()); + + return ret; +} + +template<class KEY, class VALUE> +void ClientManager<KEY, VALUE>::remove( + const std::shared_ptr<ClientDescriptor<KEY, VALUE>>& value) { + Mutex::Autolock lock(mLock); + // Remove evicted clients from list + mClients.erase(std::remove_if(mClients.begin(), mClients.end(), + [&value] (std::shared_ptr<ClientDescriptor<KEY, VALUE>>& curClientPtr) { + if (curClientPtr == value) { + return true; + } + return false; + }), mClients.end()); +} + +template<class KEY, class VALUE> +int64_t ClientManager<KEY, VALUE>::getCurrentCostLocked() const { + int64_t totalCost = 0; + for (const auto& x : mClients) { + totalCost += x->getCost(); + } + return totalCost; +} + +// -------------------------------------------------------------------------------- + +}; // namespace resource_policy +}; // namespace android + +#endif // ANDROID_SERVICE_UTILS_EVICTION_POLICY_MANAGER_H diff --git a/services/camera/libcameraservice/utils/RingBuffer.h b/services/camera/libcameraservice/utils/RingBuffer.h new file mode 100644 index 0000000..df7c00e --- /dev/null +++ b/services/camera/libcameraservice/utils/RingBuffer.h @@ -0,0 +1,361 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#ifndef ANDROID_SERVICE_UTILS_RING_BUFFER_H +#define ANDROID_SERVICE_UTILS_RING_BUFFER_H + +#include <utils/Log.h> +#include <cutils/compiler.h> + +#include <iterator> +#include <utility> +#include <vector> + +namespace android { + +/** + * A RingBuffer class that maintains an array of objects that can grow up to a certain size. + * Elements added to the RingBuffer are inserted in the logical front of the buffer, and + * invalidate all current iterators for that RingBuffer object. + */ +template <class T> +class RingBuffer final { +public: + + /** + * Construct a RingBuffer that can grow up to the given length. + */ + RingBuffer(size_t length); + + /** + * Forward iterator to this class. Implements an std:forward_iterator. + */ + class iterator : public std::iterator<std::forward_iterator_tag, T> { + public: + iterator(T* ptr, size_t size, size_t pos, size_t ctr); + + iterator& operator++(); + + iterator operator++(int); + + bool operator==(const iterator& rhs); + + bool operator!=(const iterator& rhs); + + T& operator*(); + + T* operator->(); + + private: + T* mPtr; + size_t mSize; + size_t mPos; + size_t mCtr; + }; + + /** + * Constant forward iterator to this class. Implements an std:forward_iterator. + */ + class const_iterator : public std::iterator<std::forward_iterator_tag, T> { + public: + const_iterator(const T* ptr, size_t size, size_t pos, size_t ctr); + + const_iterator& operator++(); + + const_iterator operator++(int); + + bool operator==(const const_iterator& rhs); + + bool operator!=(const const_iterator& rhs); + + const T& operator*(); + + const T* operator->(); + + private: + const T* mPtr; + size_t mSize; + size_t mPos; + size_t mCtr; + }; + + /** + * Adds item to the front of this RingBuffer. If the RingBuffer is at its maximum length, + * this will result in the last element being replaced (this is done using the element's + * assignment operator). + * + * All current iterators are invalidated. + */ + void add(const T& item); + + /** + * Moves item to the front of this RingBuffer. Following a call to this, item should no + * longer be used. If the RingBuffer is at its maximum length, this will result in the + * last element being replaced (this is done using the element's assignment operator). + * + * All current iterators are invalidated. + */ + void add(T&& item); + + /** + * Construct item in-place in the front of this RingBuffer using the given arguments. If + * the RingBuffer is at its maximum length, this will result in the last element being + * replaced (this is done using the element's assignment operator). + * + * All current iterators are invalidated. + */ + template <class... Args> + void emplace(Args&&... args); + + /** + * Get an iterator to the front of this RingBuffer. + */ + iterator begin(); + + /** + * Get an iterator to the end of this RingBuffer. + */ + iterator end(); + + /** + * Get a const_iterator to the front of this RingBuffer. + */ + const_iterator begin() const; + + /** + * Get a const_iterator to the end of this RingBuffer. + */ + const_iterator end() const; + + /** + * Return a reference to the element at a given index. If the index is out of range for + * this ringbuffer, [0, size), the behavior for this is undefined. + */ + T& operator[](size_t index); + + /** + * Return a const reference to the element at a given index. If the index is out of range + * for this ringbuffer, [0, size), the behavior for this is undefined. + */ + const T& operator[](size_t index) const; + + /** + * Return the current size of this RingBuffer. + */ + size_t size() const; + + /** + * Remove all elements from this RingBuffer and set the size to 0. + */ + void clear(); + +private: + size_t mFrontIdx; + size_t mMaxBufferSize; + std::vector<T> mBuffer; +}; // class RingBuffer + + +template <class T> +RingBuffer<T>::RingBuffer(size_t length) : mFrontIdx{0}, mMaxBufferSize{length} {} + +template <class T> +RingBuffer<T>::iterator::iterator(T* ptr, size_t size, size_t pos, size_t ctr) : + mPtr{ptr}, mSize{size}, mPos{pos}, mCtr{ctr} {} + +template <class T> +typename RingBuffer<T>::iterator& RingBuffer<T>::iterator::operator++() { + ++mCtr; + + if (CC_UNLIKELY(mCtr == mSize)) { + mPos = mSize; + return *this; + } + + mPos = ((CC_UNLIKELY(mPos == 0)) ? mSize - 1 : mPos - 1); + return *this; +} + +template <class T> +typename RingBuffer<T>::iterator RingBuffer<T>::iterator::operator++(int) { + iterator tmp{mPtr, mSize, mPos, mCtr}; + ++(*this); + return tmp; +} + +template <class T> +bool RingBuffer<T>::iterator::operator==(const iterator& rhs) { + return (mPtr + mPos) == (rhs.mPtr + rhs.mPos); +} + +template <class T> +bool RingBuffer<T>::iterator::operator!=(const iterator& rhs) { + return (mPtr + mPos) != (rhs.mPtr + rhs.mPos); +} + +template <class T> +T& RingBuffer<T>::iterator::operator*() { + return *(mPtr + mPos); +} + +template <class T> +T* RingBuffer<T>::iterator::operator->() { + return mPtr + mPos; +} + +template <class T> +RingBuffer<T>::const_iterator::const_iterator(const T* ptr, size_t size, size_t pos, size_t ctr) : + mPtr{ptr}, mSize{size}, mPos{pos}, mCtr{ctr} {} + +template <class T> +typename RingBuffer<T>::const_iterator& RingBuffer<T>::const_iterator::operator++() { + ++mCtr; + + if (CC_UNLIKELY(mCtr == mSize)) { + mPos = mSize; + return *this; + } + + mPos = ((CC_UNLIKELY(mPos == 0)) ? mSize - 1 : mPos - 1); + return *this; +} + +template <class T> +typename RingBuffer<T>::const_iterator RingBuffer<T>::const_iterator::operator++(int) { + const_iterator tmp{mPtr, mSize, mPos, mCtr}; + ++(*this); + return tmp; +} + +template <class T> +bool RingBuffer<T>::const_iterator::operator==(const const_iterator& rhs) { + return (mPtr + mPos) == (rhs.mPtr + rhs.mPos); +} + +template <class T> +bool RingBuffer<T>::const_iterator::operator!=(const const_iterator& rhs) { + return (mPtr + mPos) != (rhs.mPtr + rhs.mPos); +} + +template <class T> +const T& RingBuffer<T>::const_iterator::operator*() { + return *(mPtr + mPos); +} + +template <class T> +const T* RingBuffer<T>::const_iterator::operator->() { + return mPtr + mPos; +} + +template <class T> +void RingBuffer<T>::add(const T& item) { + if (mBuffer.size() < mMaxBufferSize) { + mBuffer.push_back(item); + mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize); + return; + } + + mBuffer[mFrontIdx] = item; + mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize); +} + +template <class T> +void RingBuffer<T>::add(T&& item) { + if (mBuffer.size() != mMaxBufferSize) { + mBuffer.push_back(std::forward<T>(item)); + mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize); + return; + } + + // Only works for types with move assignment operator + mBuffer[mFrontIdx] = std::forward<T>(item); + mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize); +} + +template <class T> +template <class... Args> +void RingBuffer<T>::emplace(Args&&... args) { + if (mBuffer.size() != mMaxBufferSize) { + mBuffer.emplace_back(std::forward<Args>(args)...); + mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize); + return; + } + + // Only works for types with move assignment operator + mBuffer[mFrontIdx] = T(std::forward<Args>(args)...); + mFrontIdx = ((mFrontIdx + 1) % mMaxBufferSize); +} + +template <class T> +typename RingBuffer<T>::iterator RingBuffer<T>::begin() { + size_t tmp = (mBuffer.size() == 0) ? 0 : mBuffer.size() - 1; + return iterator(mBuffer.data(), mBuffer.size(), (mFrontIdx == 0) ? tmp : mFrontIdx - 1, 0); +} + +template <class T> +typename RingBuffer<T>::iterator RingBuffer<T>::end() { + size_t s = mBuffer.size(); + return iterator(mBuffer.data(), s, s, s); +} + +template <class T> +typename RingBuffer<T>::const_iterator RingBuffer<T>::begin() const { + size_t tmp = (mBuffer.size() == 0) ? 0 : mBuffer.size() - 1; + return const_iterator(mBuffer.data(), mBuffer.size(), + (mFrontIdx == 0) ? tmp : mFrontIdx - 1, 0); +} + +template <class T> +typename RingBuffer<T>::const_iterator RingBuffer<T>::end() const { + size_t s = mBuffer.size(); + return const_iterator(mBuffer.data(), s, s, s); +} + +template <class T> +T& RingBuffer<T>::operator[](size_t index) { + LOG_ALWAYS_FATAL_IF(index >= mBuffer.size(), "Index %zu out of bounds, size is %zu.", + index, mBuffer.size()); + size_t pos = (index >= mFrontIdx) ? + mBuffer.size() - 1 - (index - mFrontIdx) : mFrontIdx - 1 - index; + return mBuffer[pos]; +} + +template <class T> +const T& RingBuffer<T>::operator[](size_t index) const { + LOG_ALWAYS_FATAL_IF(index >= mBuffer.size(), "Index %zu out of bounds, size is %zu.", + index, mBuffer.size()); + size_t pos = (index >= mFrontIdx) ? + mBuffer.size() - 1 - (index - mFrontIdx) : mFrontIdx - 1 - index; + return mBuffer[pos]; +} + +template <class T> +size_t RingBuffer<T>::size() const { + return mBuffer.size(); +} + +template <class T> +void RingBuffer<T>::clear() { + mBuffer.clear(); + mFrontIdx = 0; +} + +}; // namespace android + +#endif // ANDROID_SERVICE_UTILS_RING_BUFFER_H + + diff --git a/services/medialog/Android.mk b/services/medialog/Android.mk index 95f2fef..03438bf 100644 --- a/services/medialog/Android.mk +++ b/services/medialog/Android.mk @@ -10,4 +10,6 @@ LOCAL_MODULE:= libmedialogservice LOCAL_32_BIT_ONLY := true +LOCAL_C_INCLUDES := $(call include-path-for, audio-utils) + include $(BUILD_SHARED_LIBRARY) diff --git a/services/mediaresourcemanager/Android.mk b/services/mediaresourcemanager/Android.mk new file mode 100644 index 0000000..84218cf --- /dev/null +++ b/services/mediaresourcemanager/Android.mk @@ -0,0 +1,18 @@ +LOCAL_PATH := $(call my-dir) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES := ResourceManagerService.cpp + +LOCAL_SHARED_LIBRARIES := libmedia libstagefright libbinder libutils liblog + +LOCAL_MODULE:= libresourcemanagerservice + +LOCAL_32_BIT_ONLY := true + +LOCAL_C_INCLUDES += \ + $(TOPDIR)frameworks/av/include + +include $(BUILD_SHARED_LIBRARY) + +include $(call all-makefiles-under,$(LOCAL_PATH)) diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp new file mode 100644 index 0000000..7296d47 --- /dev/null +++ b/services/mediaresourcemanager/ResourceManagerService.cpp @@ -0,0 +1,345 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ResourceManagerService" +#include <utils/Log.h> + +#include <binder/IServiceManager.h> +#include <dirent.h> +#include <media/stagefright/ProcessInfo.h> +#include <string.h> +#include <sys/types.h> +#include <sys/stat.h> +#include <sys/time.h> +#include <unistd.h> + +#include "ResourceManagerService.h" + +namespace android { + +template <typename T> +static String8 getString(const Vector<T> &items) { + String8 itemsStr; + for (size_t i = 0; i < items.size(); ++i) { + itemsStr.appendFormat("%s ", items[i].toString().string()); + } + return itemsStr; +} + +static bool hasResourceType(String8 type, Vector<MediaResource> resources) { + for (size_t i = 0; i < resources.size(); ++i) { + if (resources[i].mType == type) { + return true; + } + } + return false; +} + +static bool hasResourceType(String8 type, ResourceInfos infos) { + for (size_t i = 0; i < infos.size(); ++i) { + if (hasResourceType(type, infos[i].resources)) { + return true; + } + } + return false; +} + +static ResourceInfos& getResourceInfosForEdit( + int pid, + PidResourceInfosMap& map) { + ssize_t index = map.indexOfKey(pid); + if (index < 0) { + // new pid + ResourceInfos infosForPid; + map.add(pid, infosForPid); + } + + return map.editValueFor(pid); +} + +static ResourceInfo& getResourceInfoForEdit( + int64_t clientId, + const sp<IResourceManagerClient> client, + ResourceInfos& infos) { + for (size_t i = 0; i < infos.size(); ++i) { + if (infos[i].clientId == clientId) { + return infos.editItemAt(i); + } + } + ResourceInfo info; + info.clientId = clientId; + info.client = client; + infos.push_back(info); + return infos.editItemAt(infos.size() - 1); +} + +ResourceManagerService::ResourceManagerService() + : mProcessInfo(new ProcessInfo()), + mSupportsMultipleSecureCodecs(true), + mSupportsSecureWithNonSecureCodec(true) {} + +ResourceManagerService::ResourceManagerService(sp<ProcessInfoInterface> processInfo) + : mProcessInfo(processInfo), + mSupportsMultipleSecureCodecs(true), + mSupportsSecureWithNonSecureCodec(true) {} + +ResourceManagerService::~ResourceManagerService() {} + +void ResourceManagerService::config(const Vector<MediaResourcePolicy> &policies) { + ALOGV("config(%s)", getString(policies).string()); + + Mutex::Autolock lock(mLock); + for (size_t i = 0; i < policies.size(); ++i) { + String8 type = policies[i].mType; + uint64_t value = policies[i].mValue; + if (type == kPolicySupportsMultipleSecureCodecs) { + mSupportsMultipleSecureCodecs = (value != 0); + } else if (type == kPolicySupportsSecureWithNonSecureCodec) { + mSupportsSecureWithNonSecureCodec = (value != 0); + } + } +} + +void ResourceManagerService::addResource( + int pid, + int64_t clientId, + const sp<IResourceManagerClient> client, + const Vector<MediaResource> &resources) { + ALOGV("addResource(pid %d, clientId %lld, resources %s)", + pid, (long long) clientId, getString(resources).string()); + + Mutex::Autolock lock(mLock); + ResourceInfos& infos = getResourceInfosForEdit(pid, mMap); + ResourceInfo& info = getResourceInfoForEdit(clientId, client, infos); + info.resources.appendVector(resources); +} + +void ResourceManagerService::removeResource(int64_t clientId) { + ALOGV("removeResource(%lld)", (long long) clientId); + + Mutex::Autolock lock(mLock); + bool found = false; + for (size_t i = 0; i < mMap.size(); ++i) { + ResourceInfos &infos = mMap.editValueAt(i); + for (size_t j = 0; j < infos.size();) { + if (infos[j].clientId == clientId) { + j = infos.removeAt(j); + found = true; + } else { + ++j; + } + } + if (found) { + break; + } + } + if (!found) { + ALOGV("didn't find client"); + } +} + +bool ResourceManagerService::reclaimResource( + int callingPid, const Vector<MediaResource> &resources) { + ALOGV("reclaimResource(callingPid %d, resources %s)", + callingPid, getString(resources).string()); + + Vector<sp<IResourceManagerClient>> clients; + { + Mutex::Autolock lock(mLock); + // first pass to handle secure/non-secure codec conflict + for (size_t i = 0; i < resources.size(); ++i) { + String8 type = resources[i].mType; + if (type == kResourceSecureCodec) { + if (!mSupportsMultipleSecureCodecs) { + if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) { + return false; + } + } + if (!mSupportsSecureWithNonSecureCodec) { + if (!getAllClients_l(callingPid, String8(kResourceNonSecureCodec), &clients)) { + return false; + } + } + } else if (type == kResourceNonSecureCodec) { + if (!mSupportsSecureWithNonSecureCodec) { + if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) { + return false; + } + } + } + } + + if (clients.size() == 0) { + // if no secure/non-secure codec conflict, run second pass to handle other resources. + for (size_t i = 0; i < resources.size(); ++i) { + String8 type = resources[i].mType; + if (type == kResourceGraphicMemory) { + sp<IResourceManagerClient> client; + if (!getLowestPriorityBiggestClient_l(callingPid, type, &client)) { + return false; + } + clients.push_back(client); + } + } + } + } + + if (clients.size() == 0) { + return false; + } + + for (size_t i = 0; i < clients.size(); ++i) { + ALOGV("reclaimResource from client %p", clients[i].get()); + if (!clients[i]->reclaimResource()) { + return false; + } + } + return true; +} + +bool ResourceManagerService::getAllClients_l( + int callingPid, const String8 &type, Vector<sp<IResourceManagerClient>> *clients) { + Vector<sp<IResourceManagerClient>> temp; + for (size_t i = 0; i < mMap.size(); ++i) { + ResourceInfos &infos = mMap.editValueAt(i); + for (size_t j = 0; j < infos.size(); ++j) { + if (hasResourceType(type, infos[j].resources)) { + if (!isCallingPriorityHigher_l(callingPid, mMap.keyAt(i))) { + // some higher/equal priority process owns the resource, + // this request can't be fulfilled. + ALOGE("getAllClients_l: can't reclaim resource %s from pid %d", + type.string(), mMap.keyAt(i)); + return false; + } + temp.push_back(infos[j].client); + } + } + } + if (temp.size() == 0) { + ALOGV("getAllClients_l: didn't find any resource %s", type.string()); + return true; + } + clients->appendVector(temp); + return true; +} + +bool ResourceManagerService::getLowestPriorityBiggestClient_l( + int callingPid, const String8 &type, sp<IResourceManagerClient> *client) { + int lowestPriorityPid; + int lowestPriority; + int callingPriority; + if (!mProcessInfo->getPriority(callingPid, &callingPriority)) { + ALOGE("getLowestPriorityBiggestClient_l: can't get process priority for pid %d", + callingPid); + return false; + } + if (!getLowestPriorityPid_l(type, &lowestPriorityPid, &lowestPriority)) { + return false; + } + if (lowestPriority <= callingPriority) { + ALOGE("getLowestPriorityBiggestClient_l: lowest priority %d vs caller priority %d", + lowestPriority, callingPriority); + return false; + } + + if (!getBiggestClient_l(lowestPriorityPid, type, client)) { + return false; + } + return true; +} + +bool ResourceManagerService::getLowestPriorityPid_l( + const String8 &type, int *lowestPriorityPid, int *lowestPriority) { + int pid = -1; + int priority = -1; + for (size_t i = 0; i < mMap.size(); ++i) { + if (mMap.valueAt(i).size() == 0) { + // no client on this process. + continue; + } + if (!hasResourceType(type, mMap.valueAt(i))) { + // doesn't have the requested resource type + continue; + } + int tempPid = mMap.keyAt(i); + int tempPriority; + if (!mProcessInfo->getPriority(tempPid, &tempPriority)) { + ALOGV("getLowestPriorityPid_l: can't get priority of pid %d, skipped", tempPid); + // TODO: remove this pid from mMap? + continue; + } + if (pid == -1 || tempPriority > priority) { + // initial the value + pid = tempPid; + priority = tempPriority; + } + } + if (pid != -1) { + *lowestPriorityPid = pid; + *lowestPriority = priority; + } + return (pid != -1); +} + +bool ResourceManagerService::isCallingPriorityHigher_l(int callingPid, int pid) { + int callingPidPriority; + if (!mProcessInfo->getPriority(callingPid, &callingPidPriority)) { + return false; + } + + int priority; + if (!mProcessInfo->getPriority(pid, &priority)) { + return false; + } + + return (callingPidPriority < priority); +} + +bool ResourceManagerService::getBiggestClient_l( + int pid, const String8 &type, sp<IResourceManagerClient> *client) { + ssize_t index = mMap.indexOfKey(pid); + if (index < 0) { + ALOGE("getBiggestClient_l: can't find resource info for pid %d", pid); + return false; + } + + sp<IResourceManagerClient> clientTemp; + uint64_t largestValue = 0; + const ResourceInfos &infos = mMap.valueAt(index); + for (size_t i = 0; i < infos.size(); ++i) { + Vector<MediaResource> resources = infos[i].resources; + for (size_t j = 0; j < resources.size(); ++j) { + if (resources[j].mType == type) { + if (resources[j].mValue > largestValue) { + largestValue = resources[j].mValue; + clientTemp = infos[i].client; + } + } + } + } + + if (clientTemp == NULL) { + ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", type.string(), pid); + return false; + } + + *client = clientTemp; + return true; +} + +} // namespace android diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h new file mode 100644 index 0000000..2ed9bf8 --- /dev/null +++ b/services/mediaresourcemanager/ResourceManagerService.h @@ -0,0 +1,106 @@ +/* +** +** Copyright 2015, The Android Open Source Project +** +** Licensed under the Apache License, Version 2.0 (the "License"); +** you may not use this file except in compliance with the License. +** You may obtain a copy of the License at +** +** http://www.apache.org/licenses/LICENSE-2.0 +** +** Unless required by applicable law or agreed to in writing, software +** distributed under the License is distributed on an "AS IS" BASIS, +** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +** See the License for the specific language governing permissions and +** limitations under the License. +*/ + +#ifndef ANDROID_RESOURCEMANAGERSERVICE_H +#define ANDROID_RESOURCEMANAGERSERVICE_H + +#include <arpa/inet.h> +#include <binder/BinderService.h> +#include <utils/Errors.h> +#include <utils/KeyedVector.h> +#include <utils/String8.h> +#include <utils/threads.h> +#include <utils/Vector.h> + +#include <media/IResourceManagerService.h> + +namespace android { + +struct ProcessInfoInterface; + +struct ResourceInfo { + int64_t clientId; + sp<IResourceManagerClient> client; + Vector<MediaResource> resources; +}; + +typedef Vector<ResourceInfo> ResourceInfos; +typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap; + +class ResourceManagerService + : public BinderService<ResourceManagerService>, + public BnResourceManagerService +{ +public: + static char const *getServiceName() { return "media.resource_manager"; } + + ResourceManagerService(); + ResourceManagerService(sp<ProcessInfoInterface> processInfo); + + // IResourceManagerService interface + virtual void config(const Vector<MediaResourcePolicy> &policies); + + virtual void addResource( + int pid, + int64_t clientId, + const sp<IResourceManagerClient> client, + const Vector<MediaResource> &resources); + + virtual void removeResource(int64_t clientId); + + virtual bool reclaimResource(int callingPid, const Vector<MediaResource> &resources); + +protected: + virtual ~ResourceManagerService(); + +private: + friend class ResourceManagerServiceTest; + + // Gets the list of all the clients who own the specified resource type. + // Returns false if any client belongs to a process with higher priority than the + // calling process. The clients will remain unchanged if returns false. + bool getAllClients_l(int callingPid, const String8 &type, + Vector<sp<IResourceManagerClient>> *clients); + + // Gets the client who owns specified resource type from lowest possible priority process. + // Returns false if the calling process priority is not higher than the lowest process + // priority. The client will remain unchanged if returns false. + bool getLowestPriorityBiggestClient_l(int callingPid, const String8 &type, + sp<IResourceManagerClient> *client); + + // Gets lowest priority process that has the specified resource type. + // Returns false if failed. The output parameters will remain unchanged if failed. + bool getLowestPriorityPid_l(const String8 &type, int *pid, int *priority); + + // Gets the client who owns biggest piece of specified resource type from pid. + // Returns false if failed. The client will remain unchanged if failed. + bool getBiggestClient_l(int pid, const String8 &type, sp<IResourceManagerClient> *client); + + bool isCallingPriorityHigher_l(int callingPid, int pid); + + mutable Mutex mLock; + sp<ProcessInfoInterface> mProcessInfo; + PidResourceInfosMap mMap; + bool mSupportsMultipleSecureCodecs; + bool mSupportsSecureWithNonSecureCodec; +}; + +// ---------------------------------------------------------------------------- + +}; // namespace android + +#endif // ANDROID_RESOURCEMANAGERSERVICE_H diff --git a/services/mediaresourcemanager/test/Android.mk b/services/mediaresourcemanager/test/Android.mk new file mode 100644 index 0000000..228b62a --- /dev/null +++ b/services/mediaresourcemanager/test/Android.mk @@ -0,0 +1,25 @@ +# Build the unit tests. +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_MODULE := ResourceManagerService_test + +LOCAL_MODULE_TAGS := tests + +LOCAL_SRC_FILES := \ + ResourceManagerService_test.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libbinder \ + liblog \ + libmedia \ + libresourcemanagerservice \ + libutils \ + +LOCAL_C_INCLUDES := \ + frameworks/av/include \ + frameworks/av/services/mediaresourcemanager \ + +LOCAL_32_BIT_ONLY := true + +include $(BUILD_NATIVE_TEST) diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp new file mode 100644 index 0000000..b73e1bc --- /dev/null +++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp @@ -0,0 +1,464 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "ResourceManagerService_test" +#include <utils/Log.h> + +#include <gtest/gtest.h> + +#include "ResourceManagerService.h" +#include <media/IResourceManagerService.h> +#include <media/MediaResource.h> +#include <media/MediaResourcePolicy.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/ProcessInfoInterface.h> + +namespace android { + +struct TestProcessInfo : public ProcessInfoInterface { + TestProcessInfo() {} + virtual ~TestProcessInfo() {} + + virtual bool getPriority(int pid, int *priority) { + // For testing, use pid as priority. + // Lower the value higher the priority. + *priority = pid; + return true; + } + +private: + DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo); +}; + +struct TestClient : public BnResourceManagerClient { + TestClient(sp<ResourceManagerService> service) + : mReclaimed(false), mService(service) {} + + virtual bool reclaimResource() { + sp<IResourceManagerClient> client(this); + mService->removeResource((int64_t) client.get()); + mReclaimed = true; + return true; + } + + bool reclaimed() const { + return mReclaimed; + } + + void reset() { + mReclaimed = false; + } + +protected: + virtual ~TestClient() {} + +private: + bool mReclaimed; + sp<ResourceManagerService> mService; + DISALLOW_EVIL_CONSTRUCTORS(TestClient); +}; + +static const int kTestPid1 = 30; +static const int kTestPid2 = 20; + +class ResourceManagerServiceTest : public ::testing::Test { +public: + ResourceManagerServiceTest() + : mService(new ResourceManagerService(new TestProcessInfo)), + mTestClient1(new TestClient(mService)), + mTestClient2(new TestClient(mService)), + mTestClient3(new TestClient(mService)) { + } + +protected: + static bool isEqualResources(const Vector<MediaResource> &resources1, + const Vector<MediaResource> &resources2) { + if (resources1.size() != resources2.size()) { + return false; + } + for (size_t i = 0; i < resources1.size(); ++i) { + if (resources1[i] != resources2[i]) { + return false; + } + } + return true; + } + + static void expectEqResourceInfo(const ResourceInfo &info, sp<IResourceManagerClient> client, + const Vector<MediaResource> &resources) { + EXPECT_EQ(client, info.client); + EXPECT_TRUE(isEqualResources(resources, info.resources)); + } + + void verifyClients(bool c1, bool c2, bool c3) { + TestClient *client1 = static_cast<TestClient*>(mTestClient1.get()); + TestClient *client2 = static_cast<TestClient*>(mTestClient2.get()); + TestClient *client3 = static_cast<TestClient*>(mTestClient3.get()); + + EXPECT_EQ(c1, client1->reclaimed()); + EXPECT_EQ(c2, client2->reclaimed()); + EXPECT_EQ(c3, client3->reclaimed()); + + client1->reset(); + client2->reset(); + client3->reset(); + } + + void addResource() { + // kTestPid1 mTestClient1 + Vector<MediaResource> resources1; + resources1.push_back(MediaResource(String8(kResourceSecureCodec), 1)); + mService->addResource(kTestPid1, (int64_t) mTestClient1.get(), mTestClient1, resources1); + resources1.push_back(MediaResource(String8(kResourceGraphicMemory), 200)); + Vector<MediaResource> resources11; + resources11.push_back(MediaResource(String8(kResourceGraphicMemory), 200)); + mService->addResource(kTestPid1, (int64_t) mTestClient1.get(), mTestClient1, resources11); + + // kTestPid2 mTestClient2 + Vector<MediaResource> resources2; + resources2.push_back(MediaResource(String8(kResourceNonSecureCodec), 1)); + resources2.push_back(MediaResource(String8(kResourceGraphicMemory), 300)); + mService->addResource(kTestPid2, (int64_t) mTestClient2.get(), mTestClient2, resources2); + + // kTestPid2 mTestClient3 + Vector<MediaResource> resources3; + mService->addResource(kTestPid2, (int64_t) mTestClient3.get(), mTestClient3, resources3); + resources3.push_back(MediaResource(String8(kResourceSecureCodec), 1)); + resources3.push_back(MediaResource(String8(kResourceGraphicMemory), 100)); + mService->addResource(kTestPid2, (int64_t) mTestClient3.get(), mTestClient3, resources3); + + const PidResourceInfosMap &map = mService->mMap; + EXPECT_EQ(2u, map.size()); + ssize_t index1 = map.indexOfKey(kTestPid1); + ASSERT_GE(index1, 0); + const ResourceInfos &infos1 = map[index1]; + EXPECT_EQ(1u, infos1.size()); + expectEqResourceInfo(infos1[0], mTestClient1, resources1); + + ssize_t index2 = map.indexOfKey(kTestPid2); + ASSERT_GE(index2, 0); + const ResourceInfos &infos2 = map[index2]; + EXPECT_EQ(2u, infos2.size()); + expectEqResourceInfo(infos2[0], mTestClient2, resources2); + expectEqResourceInfo(infos2[1], mTestClient3, resources3); + } + + void testConfig() { + EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs); + EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec); + + Vector<MediaResourcePolicy> policies1; + policies1.push_back(MediaResourcePolicy(String8(kPolicySupportsMultipleSecureCodecs), 1)); + policies1.push_back( + MediaResourcePolicy(String8(kPolicySupportsSecureWithNonSecureCodec), 0)); + mService->config(policies1); + EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs); + EXPECT_FALSE(mService->mSupportsSecureWithNonSecureCodec); + + Vector<MediaResourcePolicy> policies2; + policies2.push_back(MediaResourcePolicy(String8(kPolicySupportsMultipleSecureCodecs), 0)); + policies2.push_back( + MediaResourcePolicy(String8(kPolicySupportsSecureWithNonSecureCodec), 1)); + mService->config(policies2); + EXPECT_FALSE(mService->mSupportsMultipleSecureCodecs); + EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec); + } + + void testRemoveResource() { + addResource(); + + mService->removeResource((int64_t) mTestClient2.get()); + + const PidResourceInfosMap &map = mService->mMap; + EXPECT_EQ(2u, map.size()); + const ResourceInfos &infos1 = map.valueFor(kTestPid1); + const ResourceInfos &infos2 = map.valueFor(kTestPid2); + EXPECT_EQ(1u, infos1.size()); + EXPECT_EQ(1u, infos2.size()); + // mTestClient2 has been removed. + EXPECT_EQ(mTestClient3, infos2[0].client); + } + + void testGetAllClients() { + addResource(); + + String8 type = String8(kResourceSecureCodec); + String8 unknowType = String8("unknowType"); + Vector<sp<IResourceManagerClient> > clients; + int lowPriorityPid = 100; + EXPECT_FALSE(mService->getAllClients_l(lowPriorityPid, type, &clients)); + int midPriorityPid = 25; + EXPECT_FALSE(mService->getAllClients_l(lowPriorityPid, type, &clients)); + int highPriorityPid = 10; + EXPECT_TRUE(mService->getAllClients_l(10, unknowType, &clients)); + EXPECT_TRUE(mService->getAllClients_l(10, type, &clients)); + + EXPECT_EQ(2u, clients.size()); + EXPECT_EQ(mTestClient3, clients[0]); + EXPECT_EQ(mTestClient1, clients[1]); + } + + void testReclaimResourceSecure() { + Vector<MediaResource> resources; + resources.push_back(MediaResource(String8(kResourceSecureCodec), 1)); + resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150)); + + // ### secure codec can't coexist and secure codec can coexist with non-secure codec ### + { + addResource(); + mService->mSupportsMultipleSecureCodecs = false; + mService->mSupportsSecureWithNonSecureCodec = true; + + // priority too low + EXPECT_FALSE(mService->reclaimResource(40, resources)); + EXPECT_FALSE(mService->reclaimResource(25, resources)); + + // reclaim all secure codecs + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(true, false, true); + + // call again should reclaim one largest graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, true, false); + + // nothing left + EXPECT_FALSE(mService->reclaimResource(10, resources)); + } + + // ### secure codecs can't coexist and secure codec can't coexist with non-secure codec ### + { + addResource(); + mService->mSupportsMultipleSecureCodecs = false; + mService->mSupportsSecureWithNonSecureCodec = false; + + // priority too low + EXPECT_FALSE(mService->reclaimResource(40, resources)); + EXPECT_FALSE(mService->reclaimResource(25, resources)); + + // reclaim all secure and non-secure codecs + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(true, true, true); + + // nothing left + EXPECT_FALSE(mService->reclaimResource(10, resources)); + } + + + // ### secure codecs can coexist but secure codec can't coexist with non-secure codec ### + { + addResource(); + mService->mSupportsMultipleSecureCodecs = true; + mService->mSupportsSecureWithNonSecureCodec = false; + + // priority too low + EXPECT_FALSE(mService->reclaimResource(40, resources)); + EXPECT_FALSE(mService->reclaimResource(25, resources)); + + // reclaim all non-secure codecs + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, true, false); + + // call again should reclaim one largest graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(true, false, false); + + // call again should reclaim another largest graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, false, true); + + // nothing left + EXPECT_FALSE(mService->reclaimResource(10, resources)); + } + + // ### secure codecs can coexist and secure codec can coexist with non-secure codec ### + { + addResource(); + mService->mSupportsMultipleSecureCodecs = true; + mService->mSupportsSecureWithNonSecureCodec = true; + + // priority too low + EXPECT_FALSE(mService->reclaimResource(40, resources)); + + EXPECT_TRUE(mService->reclaimResource(10, resources)); + // one largest graphic memory from lowest process got reclaimed + verifyClients(true, false, false); + + // call again should reclaim another graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, true, false); + + // call again should reclaim another graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, false, true); + + // nothing left + EXPECT_FALSE(mService->reclaimResource(10, resources)); + } + } + + void testReclaimResourceNonSecure() { + Vector<MediaResource> resources; + resources.push_back(MediaResource(String8(kResourceNonSecureCodec), 1)); + resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150)); + + // ### secure codec can't coexist with non-secure codec ### + { + addResource(); + mService->mSupportsSecureWithNonSecureCodec = false; + + // priority too low + EXPECT_FALSE(mService->reclaimResource(40, resources)); + EXPECT_FALSE(mService->reclaimResource(25, resources)); + + // reclaim all secure codecs + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(true, false, true); + + // call again should reclaim one graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, true, false); + + // nothing left + EXPECT_FALSE(mService->reclaimResource(10, resources)); + } + + + // ### secure codec can coexist with non-secure codec ### + { + addResource(); + mService->mSupportsSecureWithNonSecureCodec = true; + + // priority too low + EXPECT_FALSE(mService->reclaimResource(40, resources)); + + EXPECT_TRUE(mService->reclaimResource(10, resources)); + // one largest graphic memory from lowest process got reclaimed + verifyClients(true, false, false); + + // call again should reclaim another graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, true, false); + + // call again should reclaim another graphic memory from lowest process + EXPECT_TRUE(mService->reclaimResource(10, resources)); + verifyClients(false, false, true); + + // nothing left + EXPECT_FALSE(mService->reclaimResource(10, resources)); + } + } + + void testGetLowestPriorityBiggestClient() { + String8 type = String8(kResourceGraphicMemory); + sp<IResourceManagerClient> client; + EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(10, type, &client)); + + addResource(); + + EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(100, type, &client)); + EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(10, type, &client)); + + // kTestPid1 is the lowest priority process with kResourceGraphicMemory. + // mTestClient1 has the largest kResourceGraphicMemory within kTestPid1. + EXPECT_EQ(mTestClient1, client); + } + + void testGetLowestPriorityPid() { + int pid; + int priority; + TestProcessInfo processInfo; + + String8 type = String8(kResourceGraphicMemory); + EXPECT_FALSE(mService->getLowestPriorityPid_l(type, &pid, &priority)); + + addResource(); + + EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority)); + EXPECT_EQ(kTestPid1, pid); + int priority1; + processInfo.getPriority(kTestPid1, &priority1); + EXPECT_EQ(priority1, priority); + + type = String8(kResourceNonSecureCodec); + EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority)); + EXPECT_EQ(kTestPid2, pid); + int priority2; + processInfo.getPriority(kTestPid2, &priority2); + EXPECT_EQ(priority2, priority); + } + + void testGetBiggestClient() { + String8 type = String8(kResourceGraphicMemory); + sp<IResourceManagerClient> client; + EXPECT_FALSE(mService->getBiggestClient_l(kTestPid2, type, &client)); + + addResource(); + + EXPECT_TRUE(mService->getBiggestClient_l(kTestPid2, type, &client)); + EXPECT_EQ(mTestClient2, client); + } + + void testIsCallingPriorityHigher() { + EXPECT_FALSE(mService->isCallingPriorityHigher_l(101, 100)); + EXPECT_FALSE(mService->isCallingPriorityHigher_l(100, 100)); + EXPECT_TRUE(mService->isCallingPriorityHigher_l(99, 100)); + } + + sp<ResourceManagerService> mService; + sp<IResourceManagerClient> mTestClient1; + sp<IResourceManagerClient> mTestClient2; + sp<IResourceManagerClient> mTestClient3; +}; + +TEST_F(ResourceManagerServiceTest, config) { + testConfig(); +} + +TEST_F(ResourceManagerServiceTest, addResource) { + addResource(); +} + +TEST_F(ResourceManagerServiceTest, removeResource) { + testRemoveResource(); +} + +TEST_F(ResourceManagerServiceTest, reclaimResource) { + testReclaimResourceSecure(); + testReclaimResourceNonSecure(); +} + +TEST_F(ResourceManagerServiceTest, getAllClients_l) { + testGetAllClients(); +} + +TEST_F(ResourceManagerServiceTest, getLowestPriorityBiggestClient_l) { + testGetLowestPriorityBiggestClient(); +} + +TEST_F(ResourceManagerServiceTest, getLowestPriorityPid_l) { + testGetLowestPriorityPid(); +} + +TEST_F(ResourceManagerServiceTest, getBiggestClient_l) { + testGetBiggestClient(); +} + +TEST_F(ResourceManagerServiceTest, isCallingPriorityHigher_l) { + testIsCallingPriorityHigher(); +} + +} // namespace android diff --git a/services/radio/Android.mk b/services/radio/Android.mk new file mode 100644 index 0000000..9ee5666 --- /dev/null +++ b/services/radio/Android.mk @@ -0,0 +1,36 @@ +# Copyright 2014 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +LOCAL_PATH:= $(call my-dir) + +include $(CLEAR_VARS) + + +LOCAL_SRC_FILES:= \ + RadioService.cpp + +LOCAL_SHARED_LIBRARIES:= \ + libui \ + liblog \ + libutils \ + libbinder \ + libcutils \ + libmedia \ + libhardware \ + libradio \ + libradio_metadata + +LOCAL_MODULE:= libradioservice + +include $(BUILD_SHARED_LIBRARY) diff --git a/services/radio/RadioRegions.h b/services/radio/RadioRegions.h new file mode 100644 index 0000000..3335b8a --- /dev/null +++ b/services/radio/RadioRegions.h @@ -0,0 +1,225 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_RADIO_REGIONS_H +#define ANDROID_HARDWARE_RADIO_REGIONS_H + +namespace android { + +#define RADIO_BAND_LOWER_FM_ITU1 87500 +#define RADIO_BAND_UPPER_FM_ITU1 108000 +#define RADIO_BAND_SPACING_FM_ITU1 100 + +#define RADIO_BAND_LOWER_FM_ITU2 87900 +#define RADIO_BAND_UPPER_FM_ITU2 107900 +#define RADIO_BAND_SPACING_FM_ITU2 200 + +#define RADIO_BAND_LOWER_FM_JAPAN 76000 +#define RADIO_BAND_UPPER_FM_JAPAN 90000 +#define RADIO_BAND_SPACING_FM_JAPAN 100 + +#define RADIO_BAND_LOWER_FM_OIRT 65800 +#define RADIO_BAND_UPPER_FM_OIRT 74000 +#define RADIO_BAND_SPACING_FM_OIRT 10 + +#define RADIO_BAND_LOWER_LW 153 +#define RADIO_BAND_UPPER_LW 279 +#define RADIO_BAND_SPACING_LW 9 + +#define RADIO_BAND_LOWER_MW_IUT1 531 +#define RADIO_BAND_UPPER_MW_ITU1 1611 +#define RADIO_BAND_SPACING_MW_ITU1 9 + +#define RADIO_BAND_LOWER_MW_IUT2 540 +#define RADIO_BAND_UPPER_MW_ITU2 1610 +#define RADIO_BAND_SPACING_MW_ITU2 10 + +#define RADIO_BAND_LOWER_SW 2300 +#define RADIO_BAND_UPPER_SW 26100 +#define RADIO_BAND_SPACING_SW 5 + + +#ifndef ARRAY_SIZE +#define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0])) +#endif + +const radio_band_config_t sKnownRegionConfigs[] = { + { // FM ITU 1 + RADIO_REGION_ITU_1, + { + RADIO_BAND_FM, + false, + RADIO_BAND_LOWER_FM_ITU1, + RADIO_BAND_UPPER_FM_ITU1, + 1, + {RADIO_BAND_SPACING_FM_ITU1}, + { + RADIO_DEEMPHASIS_50, + true, + RADIO_RDS_WORLD, + true, + true, + } + } + }, + { // FM Americas + RADIO_REGION_ITU_2, + { + RADIO_BAND_FM, + false, + RADIO_BAND_LOWER_FM_ITU2, + RADIO_BAND_UPPER_FM_ITU2, + 1, + {RADIO_BAND_SPACING_FM_ITU2}, + { + RADIO_DEEMPHASIS_75, + true, + RADIO_RDS_US, + true, + true, + } + } + }, + { // FM Japan + RADIO_REGION_JAPAN, + { + RADIO_BAND_FM, + false, + RADIO_BAND_LOWER_FM_JAPAN, + RADIO_BAND_UPPER_FM_JAPAN, + 1, + {RADIO_BAND_SPACING_FM_JAPAN}, + { + RADIO_DEEMPHASIS_50, + true, + RADIO_RDS_WORLD, + true, + true, + } + } + }, + { // FM Korea + RADIO_REGION_KOREA, + { + RADIO_BAND_FM, + false, + RADIO_BAND_LOWER_FM_ITU1, + RADIO_BAND_UPPER_FM_ITU1, + 1, + {RADIO_BAND_SPACING_FM_ITU1}, + { + RADIO_DEEMPHASIS_75, + true, + RADIO_RDS_WORLD, + true, + true, + } + } + }, + { // FM OIRT + RADIO_REGION_OIRT, + { + RADIO_BAND_FM, + false, + RADIO_BAND_LOWER_FM_OIRT, + RADIO_BAND_UPPER_FM_OIRT, + 1, + {RADIO_BAND_SPACING_FM_OIRT}, + { + RADIO_DEEMPHASIS_50, + true, + RADIO_RDS_WORLD, + true, + true, + } + } + }, + { // FM US HD radio + RADIO_REGION_ITU_2, + { + RADIO_BAND_FM_HD, + false, + RADIO_BAND_LOWER_FM_ITU2, + RADIO_BAND_UPPER_FM_ITU2, + 1, + {RADIO_BAND_SPACING_FM_ITU2}, + { + RADIO_DEEMPHASIS_75, + true, + RADIO_RDS_US, + true, + true, + } + } + }, + { // AM LW + RADIO_REGION_ITU_1, + { + RADIO_BAND_AM, + false, + RADIO_BAND_LOWER_LW, + RADIO_BAND_UPPER_LW, + 1, + {RADIO_BAND_SPACING_LW}, + { + } + } + }, + { // AM SW + RADIO_REGION_ITU_1, + { + RADIO_BAND_AM, + false, + RADIO_BAND_LOWER_SW, + RADIO_BAND_UPPER_SW, + 1, + {RADIO_BAND_SPACING_SW}, + { + } + } + }, + { // AM MW ITU1 + RADIO_REGION_ITU_1, + { + RADIO_BAND_AM, + false, + RADIO_BAND_LOWER_MW_IUT1, + RADIO_BAND_UPPER_MW_ITU1, + 1, + {RADIO_BAND_SPACING_MW_ITU1}, + { + } + } + }, + { // AM MW ITU2 + RADIO_REGION_ITU_2, + { + RADIO_BAND_AM, + false, + RADIO_BAND_LOWER_MW_IUT2, + RADIO_BAND_UPPER_MW_ITU2, + 1, + {RADIO_BAND_SPACING_MW_ITU2}, + { + } + } + } +}; + + +} // namespace android + +#endif // ANDROID_HARDWARE_RADIO_REGIONS_H diff --git a/services/radio/RadioService.cpp b/services/radio/RadioService.cpp new file mode 100644 index 0000000..a6c2bdf --- /dev/null +++ b/services/radio/RadioService.cpp @@ -0,0 +1,901 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "RadioService" +//#define LOG_NDEBUG 0 + +#include <stdio.h> +#include <string.h> +#include <sys/types.h> +#include <pthread.h> + +#include <system/audio.h> +#include <system/audio_policy.h> +#include <system/radio.h> +#include <system/radio_metadata.h> +#include <cutils/atomic.h> +#include <cutils/properties.h> +#include <hardware/hardware.h> +#include <utils/Errors.h> +#include <utils/Log.h> +#include <binder/IServiceManager.h> +#include <binder/MemoryBase.h> +#include <binder/MemoryHeapBase.h> +#include <hardware/radio.h> +#include <media/AudioSystem.h> +#include "RadioService.h" +#include "RadioRegions.h" + +namespace android { + +static const char kRadioTunerAudioDeviceName[] = "Radio tuner source"; + +RadioService::RadioService() + : BnRadioService(), mNextUniqueId(1) +{ + ALOGI("%s", __FUNCTION__); +} + +void RadioService::onFirstRef() +{ + const hw_module_t *mod; + int rc; + struct radio_hw_device *dev; + + ALOGI("%s", __FUNCTION__); + + rc = hw_get_module_by_class(RADIO_HARDWARE_MODULE_ID, RADIO_HARDWARE_MODULE_ID_FM, &mod); + if (rc != 0) { + ALOGE("couldn't load radio module %s.%s (%s)", + RADIO_HARDWARE_MODULE_ID, "primary", strerror(-rc)); + return; + } + rc = radio_hw_device_open(mod, &dev); + if (rc != 0) { + ALOGE("couldn't open radio hw device in %s.%s (%s)", + RADIO_HARDWARE_MODULE_ID, "primary", strerror(-rc)); + return; + } + if (dev->common.version != RADIO_DEVICE_API_VERSION_CURRENT) { + ALOGE("wrong radio hw device version %04x", dev->common.version); + return; + } + + struct radio_hal_properties halProperties; + rc = dev->get_properties(dev, &halProperties); + if (rc != 0) { + ALOGE("could not read implementation properties"); + return; + } + + radio_properties_t properties; + properties.handle = + (radio_handle_t)android_atomic_inc(&mNextUniqueId); + + ALOGI("loaded default module %s, handle %d", properties.product, properties.handle); + + convertProperties(&properties, &halProperties); + sp<Module> module = new Module(dev, properties); + mModules.add(properties.handle, module); +} + +RadioService::~RadioService() +{ + for (size_t i = 0; i < mModules.size(); i++) { + radio_hw_device_close(mModules.valueAt(i)->hwDevice()); + } +} + +status_t RadioService::listModules(struct radio_properties *properties, + uint32_t *numModules) +{ + ALOGV("listModules"); + + AutoMutex lock(mServiceLock); + if (numModules == NULL || (*numModules != 0 && properties == NULL)) { + return BAD_VALUE; + } + size_t maxModules = *numModules; + *numModules = mModules.size(); + for (size_t i = 0; i < mModules.size() && i < maxModules; i++) { + properties[i] = mModules.valueAt(i)->properties(); + } + return NO_ERROR; +} + +status_t RadioService::attach(radio_handle_t handle, + const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool withAudio, + sp<IRadio>& radio) +{ + ALOGV("%s %d config %p withAudio %d", __FUNCTION__, handle, config, withAudio); + + AutoMutex lock(mServiceLock); + radio.clear(); + if (client == 0) { + return BAD_VALUE; + } + ssize_t index = mModules.indexOfKey(handle); + if (index < 0) { + return BAD_VALUE; + } + sp<Module> module = mModules.valueAt(index); + + if (config == NULL) { + config = module->getDefaultConfig(); + if (config == NULL) { + return INVALID_OPERATION; + } + } + ALOGV("%s region %d type %d", __FUNCTION__, config->region, config->band.type); + + radio = module->addClient(client, config, withAudio); + + if (radio == 0) { + NO_INIT; + } + return NO_ERROR; +} + + +static const int kDumpLockRetries = 50; +static const int kDumpLockSleep = 60000; + +static bool tryLock(Mutex& mutex) +{ + bool locked = false; + for (int i = 0; i < kDumpLockRetries; ++i) { + if (mutex.tryLock() == NO_ERROR) { + locked = true; + break; + } + usleep(kDumpLockSleep); + } + return locked; +} + +status_t RadioService::dump(int fd, const Vector<String16>& args __unused) { + String8 result; + if (checkCallingPermission(String16("android.permission.DUMP")) == false) { + result.appendFormat("Permission Denial: can't dump RadioService"); + write(fd, result.string(), result.size()); + } else { + bool locked = tryLock(mServiceLock); + // failed to lock - RadioService is probably deadlocked + if (!locked) { + result.append("RadioService may be deadlocked\n"); + write(fd, result.string(), result.size()); + } + + if (locked) mServiceLock.unlock(); + } + return NO_ERROR; +} + +status_t RadioService::onTransact( + uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) { + return BnRadioService::onTransact(code, data, reply, flags); +} + + +// static +void RadioService::callback(radio_hal_event_t *halEvent, void *cookie) +{ + CallbackThread *callbackThread = (CallbackThread *)cookie; + if (callbackThread == NULL) { + return; + } + callbackThread->sendEvent(halEvent); +} + +/* static */ +void RadioService::convertProperties(radio_properties_t *properties, + const radio_hal_properties_t *halProperties) +{ + memset(properties, 0, sizeof(struct radio_properties)); + properties->class_id = halProperties->class_id; + strlcpy(properties->implementor, halProperties->implementor, + RADIO_STRING_LEN_MAX); + strlcpy(properties->product, halProperties->product, + RADIO_STRING_LEN_MAX); + strlcpy(properties->version, halProperties->version, + RADIO_STRING_LEN_MAX); + strlcpy(properties->serial, halProperties->serial, + RADIO_STRING_LEN_MAX); + properties->num_tuners = halProperties->num_tuners; + properties->num_audio_sources = halProperties->num_audio_sources; + properties->supports_capture = halProperties->supports_capture; + + for (size_t i = 0; i < ARRAY_SIZE(sKnownRegionConfigs); i++) { + const radio_hal_band_config_t *band = &sKnownRegionConfigs[i].band; + size_t j; + for (j = 0; j < halProperties->num_bands; j++) { + const radio_hal_band_config_t *halBand = &halProperties->bands[j]; + size_t k; + if (band->type != halBand->type) continue; + if (band->lower_limit < halBand->lower_limit) continue; + if (band->upper_limit > halBand->upper_limit) continue; + for (k = 0; k < halBand->num_spacings; k++) { + if (band->spacings[0] == halBand->spacings[k]) break; + } + if (k == halBand->num_spacings) continue; + if (band->type == RADIO_BAND_AM) break; + if ((band->fm.deemphasis & halBand->fm.deemphasis) == 0) continue; + if (halBand->fm.rds == 0) break; + if ((band->fm.rds & halBand->fm.rds) != 0) break; + } + if (j == halProperties->num_bands) continue; + + ALOGI("convertProperties() Adding band type %d region %d", + sKnownRegionConfigs[i].band.type , sKnownRegionConfigs[i].region); + + memcpy(&properties->bands[properties->num_bands++], + &sKnownRegionConfigs[i], + sizeof(radio_band_config_t)); + } +} + +#undef LOG_TAG +#define LOG_TAG "RadioService::CallbackThread" + +RadioService::CallbackThread::CallbackThread(const wp<ModuleClient>& moduleClient) + : mModuleClient(moduleClient), mMemoryDealer(new MemoryDealer(1024 * 1024, "RadioService")) +{ +} + +RadioService::CallbackThread::~CallbackThread() +{ + mEventQueue.clear(); +} + +void RadioService::CallbackThread::onFirstRef() +{ + run("RadioService cbk", ANDROID_PRIORITY_URGENT_AUDIO); +} + +bool RadioService::CallbackThread::threadLoop() +{ + while (!exitPending()) { + sp<IMemory> eventMemory; + sp<ModuleClient> moduleClient; + { + Mutex::Autolock _l(mCallbackLock); + while (mEventQueue.isEmpty() && !exitPending()) { + ALOGV("CallbackThread::threadLoop() sleep"); + mCallbackCond.wait(mCallbackLock); + ALOGV("CallbackThread::threadLoop() wake up"); + } + if (exitPending()) { + break; + } + eventMemory = mEventQueue[0]; + mEventQueue.removeAt(0); + moduleClient = mModuleClient.promote(); + } + if (moduleClient != 0) { + moduleClient->onCallbackEvent(eventMemory); + eventMemory.clear(); + } + } + return false; +} + +void RadioService::CallbackThread::exit() +{ + Mutex::Autolock _l(mCallbackLock); + requestExit(); + mCallbackCond.broadcast(); +} + +sp<IMemory> RadioService::CallbackThread::prepareEvent(radio_hal_event_t *halEvent) +{ + sp<IMemory> eventMemory; + + size_t headerSize = + (sizeof(struct radio_event) + sizeof(unsigned int) - 1) /sizeof(unsigned int); + size_t metadataSize = 0; + switch (halEvent->type) { + case RADIO_EVENT_TUNED: + case RADIO_EVENT_AF_SWITCH: + if (radio_metadata_check(halEvent->info.metadata) == 0) { + metadataSize = radio_metadata_get_size(halEvent->info.metadata); + } + break; + case RADIO_EVENT_METADATA: + if (radio_metadata_check(halEvent->metadata) != 0) { + return eventMemory; + } + metadataSize = radio_metadata_get_size(halEvent->metadata); + break; + default: + break; + } + size_t size = headerSize + metadataSize; + eventMemory = mMemoryDealer->allocate(size); + if (eventMemory == 0 || eventMemory->pointer() == NULL) { + eventMemory.clear(); + return eventMemory; + } + struct radio_event *event = (struct radio_event *)eventMemory->pointer(); + event->type = halEvent->type; + event->status = halEvent->status; + + switch (event->type) { + case RADIO_EVENT_CONFIG: + event->config.band = halEvent->config; + break; + case RADIO_EVENT_TUNED: + case RADIO_EVENT_AF_SWITCH: + event->info = halEvent->info; + if (metadataSize != 0) { + memcpy((char *)event + headerSize, halEvent->info.metadata, metadataSize); + // replace meta data pointer by offset while in shared memory so that receiving side + // can restore the pointer in destination process. + event->info.metadata = (radio_metadata_t *)headerSize; + } + break; + case RADIO_EVENT_TA: + case RADIO_EVENT_ANTENNA: + case RADIO_EVENT_CONTROL: + event->on = halEvent->on; + break; + case RADIO_EVENT_METADATA: + memcpy((char *)event + headerSize, halEvent->metadata, metadataSize); + // replace meta data pointer by offset while in shared memory so that receiving side + // can restore the pointer in destination process. + event->metadata = (radio_metadata_t *)headerSize; + break; + case RADIO_EVENT_HW_FAILURE: + default: + break; + } + + return eventMemory; +} + +void RadioService::CallbackThread::sendEvent(radio_hal_event_t *event) + { + sp<IMemory> eventMemory = prepareEvent(event); + if (eventMemory == 0) { + return; + } + + AutoMutex lock(mCallbackLock); + mEventQueue.add(eventMemory); + mCallbackCond.signal(); + ALOGV("%s DONE", __FUNCTION__); +} + + +#undef LOG_TAG +#define LOG_TAG "RadioService::Module" + +RadioService::Module::Module(radio_hw_device* hwDevice, radio_properties properties) + : mHwDevice(hwDevice), mProperties(properties), mMute(true) +{ +} + +RadioService::Module::~Module() { + mModuleClients.clear(); +} + +status_t RadioService::Module::dump(int fd __unused, const Vector<String16>& args __unused) { + String8 result; + return NO_ERROR; +} + +sp<RadioService::ModuleClient> RadioService::Module::addClient(const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool audio) +{ + ALOGV("addClient() %p config %p product %s", this, config, mProperties.product); + AutoMutex lock(mLock); + sp<ModuleClient> moduleClient; + int ret; + + for (size_t i = 0; i < mModuleClients.size(); i++) { + if (mModuleClients[i]->client() == client) { + // client already connected: reject + return moduleClient; + } + } + moduleClient = new ModuleClient(this, client, config, audio); + + struct radio_hal_band_config halConfig; + halConfig = config->band; + + // Tuner preemption logic: + // There is a limited amount of tuners and a limited amount of radio audio sources per module. + // The minimum is one tuner and one audio source. + // The numbers of tuners and sources are indicated in the module properties. + // NOTE: current framework implementation only supports one radio audio source. + // It is possible to open more than one tuner at a time but only one tuner can be connected + // to the radio audio source (AUDIO_DEVICE_IN_FM_TUNER). + // The base rule is that a newly connected tuner always wins, i.e. always gets a tuner + // and can use the audio source if requested. + // If another client is preempted, it is notified by a callback with RADIO_EVENT_CONTROL + // indicating loss of control. + // - If the newly connected client requests the audio source (audio == true): + // - if an audio source is available + // no problem + // - if not: + // the oldest client in the list using audio is preempted. + // - If the newly connected client does not request the audio source (audio == false): + // - if a tuner is available + // no problem + // - if not: + // The oldest client not using audio is preempted first and if none is found the + // the oldest client using audio is preempted. + // Each time a tuner using the audio source is opened or closed, the audio policy manager is + // notified of the connection or disconnection of AUDIO_DEVICE_IN_FM_TUNER. + + sp<ModuleClient> oldestTuner; + sp<ModuleClient> oldestAudio; + size_t allocatedTuners = 0; + size_t allocatedAudio = 0; + for (size_t i = 0; i < mModuleClients.size(); i++) { + if (mModuleClients[i]->getTuner() != NULL) { + if (mModuleClients[i]->audio()) { + if (oldestAudio == 0) { + oldestAudio = mModuleClients[i]; + } + allocatedAudio++; + } else { + if (oldestTuner == 0) { + oldestTuner = mModuleClients[i]; + } + allocatedTuners++; + } + } + } + + const struct radio_tuner *halTuner; + sp<ModuleClient> preemtedClient; + if (audio) { + if (allocatedAudio >= mProperties.num_audio_sources) { + ALOG_ASSERT(oldestAudio != 0, "addClient() allocatedAudio/oldestAudio mismatch"); + preemtedClient = oldestAudio; + } + } else { + if (allocatedAudio + allocatedTuners >= mProperties.num_tuners) { + if (allocatedTuners != 0) { + ALOG_ASSERT(oldestTuner != 0, "addClient() allocatedTuners/oldestTuner mismatch"); + preemtedClient = oldestTuner; + } else { + ALOG_ASSERT(oldestAudio != 0, "addClient() allocatedAudio/oldestAudio mismatch"); + preemtedClient = oldestAudio; + } + } + } + if (preemtedClient != 0) { + halTuner = preemtedClient->getTuner(); + preemtedClient->setTuner(NULL); + mHwDevice->close_tuner(mHwDevice, halTuner); + if (preemtedClient->audio()) { + notifyDeviceConnection(false, ""); + } + } + + ret = mHwDevice->open_tuner(mHwDevice, &halConfig, audio, + RadioService::callback, moduleClient->callbackThread().get(), + &halTuner); + if (ret == 0) { + ALOGV("addClient() setTuner %p", halTuner); + moduleClient->setTuner(halTuner); + mModuleClients.add(moduleClient); + if (audio) { + notifyDeviceConnection(true, ""); + } + } else { + moduleClient.clear(); + } + + + ALOGV("addClient() DONE moduleClient %p", moduleClient.get()); + + return moduleClient; +} + +void RadioService::Module::removeClient(const sp<ModuleClient>& moduleClient) { + ALOGV("removeClient()"); + AutoMutex lock(mLock); + int ret; + ssize_t index = -1; + + for (size_t i = 0; i < mModuleClients.size(); i++) { + if (mModuleClients[i] == moduleClient) { + index = i; + break; + } + } + if (index == -1) { + return; + } + + mModuleClients.removeAt(index); + const struct radio_tuner *halTuner = moduleClient->getTuner(); + if (halTuner == NULL) { + return; + } + + mHwDevice->close_tuner(mHwDevice, halTuner); + if (moduleClient->audio()) { + notifyDeviceConnection(false, ""); + } + + mMute = true; + + if (mModuleClients.isEmpty()) { + return; + } + + // Tuner reallocation logic: + // When a client is removed and was controlling a tuner, this tuner will be allocated to a + // previously preempted client. This client will be notified by a callback with + // RADIO_EVENT_CONTROL indicating gain of control. + // - If a preempted client is waiting for an audio source and one becomes available: + // Allocate the tuner to the most recently added client waiting for an audio source + // - If not: + // Allocate the tuner to the most recently added client. + // Each time a tuner using the audio source is opened or closed, the audio policy manager is + // notified of the connection or disconnection of AUDIO_DEVICE_IN_FM_TUNER. + + sp<ModuleClient> youngestClient; + sp<ModuleClient> youngestClientAudio; + size_t allocatedTuners = 0; + size_t allocatedAudio = 0; + for (ssize_t i = mModuleClients.size() - 1; i >= 0; i--) { + if (mModuleClients[i]->getTuner() == NULL) { + if (mModuleClients[i]->audio()) { + if (youngestClientAudio == 0) { + youngestClientAudio = mModuleClients[i]; + } + } else { + if (youngestClient == 0) { + youngestClient = mModuleClients[i]; + } + } + } else { + if (mModuleClients[i]->audio()) { + allocatedAudio++; + } else { + allocatedTuners++; + } + } + } + + ALOG_ASSERT(allocatedTuners + allocatedAudio < mProperties.num_tuners, + "removeClient() removed client but no tuner available"); + + ALOG_ASSERT(!moduleClient->audio() || allocatedAudio < mProperties.num_audio_sources, + "removeClient() removed audio client but no tuner with audio available"); + + if (allocatedAudio < mProperties.num_audio_sources && youngestClientAudio != 0) { + youngestClient = youngestClientAudio; + } + + ALOG_ASSERT(youngestClient != 0, "removeClient() removed client no candidate found for tuner"); + + struct radio_hal_band_config halConfig = youngestClient->halConfig(); + ret = mHwDevice->open_tuner(mHwDevice, &halConfig, youngestClient->audio(), + RadioService::callback, moduleClient->callbackThread().get(), + &halTuner); + + if (ret == 0) { + youngestClient->setTuner(halTuner); + if (youngestClient->audio()) { + notifyDeviceConnection(true, ""); + } + } +} + +status_t RadioService::Module::setMute(bool mute) +{ + Mutex::Autolock _l(mLock); + if (mute != mMute) { + mMute = mute; + //TODO notifify audio policy manager of media activity on radio audio device + } + return NO_ERROR; +} + +status_t RadioService::Module::getMute(bool *mute) +{ + Mutex::Autolock _l(mLock); + *mute = mMute; + return NO_ERROR; +} + + +const struct radio_band_config *RadioService::Module::getDefaultConfig() const +{ + if (mProperties.num_bands == 0) { + return NULL; + } + return &mProperties.bands[0]; +} + +void RadioService::Module::notifyDeviceConnection(bool connected, + const char *address) { + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + AudioSystem::setDeviceConnectionState(AUDIO_DEVICE_IN_FM_TUNER, + connected ? AUDIO_POLICY_DEVICE_STATE_AVAILABLE : + AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE, + address, kRadioTunerAudioDeviceName); + IPCThreadState::self()->restoreCallingIdentity(token); +} + +#undef LOG_TAG +#define LOG_TAG "RadioService::ModuleClient" + +RadioService::ModuleClient::ModuleClient(const sp<Module>& module, + const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool audio) + : mModule(module), mClient(client), mConfig(*config), mAudio(audio), mTuner(NULL) +{ +} + +void RadioService::ModuleClient::onFirstRef() +{ + mCallbackThread = new CallbackThread(this); + IInterface::asBinder(mClient)->linkToDeath(this); +} + +RadioService::ModuleClient::~ModuleClient() { + if (mClient != 0) { + IInterface::asBinder(mClient)->unlinkToDeath(this); + mClient.clear(); + } + if (mCallbackThread != 0) { + mCallbackThread->exit(); + } +} + +status_t RadioService::ModuleClient::dump(int fd __unused, + const Vector<String16>& args __unused) { + String8 result; + return NO_ERROR; +} + +void RadioService::ModuleClient::detach() { + ALOGV("%s", __FUNCTION__); + sp<ModuleClient> strongMe = this; + { + AutoMutex lock(mLock); + if (mClient != 0) { + IInterface::asBinder(mClient)->unlinkToDeath(this); + mClient.clear(); + } + } + sp<Module> module = mModule.promote(); + if (module == 0) { + return; + } + module->removeClient(this); +} + +radio_hal_band_config_t RadioService::ModuleClient::halConfig() const +{ + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + return mConfig.band; +} + +const struct radio_tuner *RadioService::ModuleClient::getTuner() const +{ + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + return mTuner; +} + +void RadioService::ModuleClient::setTuner(const struct radio_tuner *tuner) +{ + ALOGV("%s %p", __FUNCTION__, this); + + AutoMutex lock(mLock); + mTuner = tuner; + ALOGV("%s locked", __FUNCTION__); + + radio_hal_event_t event; + event.type = RADIO_EVENT_CONTROL; + event.status = 0; + event.on = mTuner != NULL; + mCallbackThread->sendEvent(&event); + ALOGV("%s DONE", __FUNCTION__); + +} + +status_t RadioService::ModuleClient::setConfiguration(const struct radio_band_config *config) +{ + AutoMutex lock(mLock); + status_t status = NO_ERROR; + ALOGV("%s locked", __FUNCTION__); + + if (mTuner != NULL) { + struct radio_hal_band_config halConfig; + halConfig = config->band; + status = (status_t)mTuner->set_configuration(mTuner, &halConfig); + if (status == NO_ERROR) { + mConfig = *config; + } + } else { + mConfig = *config; + status == INVALID_OPERATION; + } + + return status; +} + +status_t RadioService::ModuleClient::getConfiguration(struct radio_band_config *config) +{ + AutoMutex lock(mLock); + status_t status = NO_ERROR; + ALOGV("%s locked", __FUNCTION__); + + if (mTuner != NULL) { + struct radio_hal_band_config halConfig; + status = (status_t)mTuner->get_configuration(mTuner, &halConfig); + if (status == NO_ERROR) { + mConfig.band = halConfig; + } + } + *config = mConfig; + + return status; +} + +status_t RadioService::ModuleClient::setMute(bool mute) +{ + sp<Module> module; + { + Mutex::Autolock _l(mLock); + ALOGV("%s locked", __FUNCTION__); + if (mTuner == NULL || !mAudio) { + return INVALID_OPERATION; + } + module = mModule.promote(); + if (module == 0) { + return NO_INIT; + } + } + module->setMute(mute); + return NO_ERROR; +} + +status_t RadioService::ModuleClient::getMute(bool *mute) +{ + sp<Module> module; + { + Mutex::Autolock _l(mLock); + ALOGV("%s locked", __FUNCTION__); + module = mModule.promote(); + if (module == 0) { + return NO_INIT; + } + } + return module->getMute(mute); +} + +status_t RadioService::ModuleClient::scan(radio_direction_t direction, bool skipSubChannel) +{ + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + status_t status; + if (mTuner != NULL) { + status = (status_t)mTuner->scan(mTuner, direction, skipSubChannel); + } else { + status = INVALID_OPERATION; + } + return status; +} + +status_t RadioService::ModuleClient::step(radio_direction_t direction, bool skipSubChannel) +{ + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + status_t status; + if (mTuner != NULL) { + status = (status_t)mTuner->step(mTuner, direction, skipSubChannel); + } else { + status = INVALID_OPERATION; + } + return status; +} + +status_t RadioService::ModuleClient::tune(unsigned int channel, unsigned int subChannel) +{ + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + status_t status; + if (mTuner != NULL) { + status = (status_t)mTuner->tune(mTuner, channel, subChannel); + } else { + status = INVALID_OPERATION; + } + return status; +} + +status_t RadioService::ModuleClient::cancel() +{ + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + status_t status; + if (mTuner != NULL) { + status = (status_t)mTuner->cancel(mTuner); + } else { + status = INVALID_OPERATION; + } + return status; +} + +status_t RadioService::ModuleClient::getProgramInformation(struct radio_program_info *info) +{ + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + status_t status; + if (mTuner != NULL) { + status = (status_t)mTuner->get_program_information(mTuner, info); + } else { + status = INVALID_OPERATION; + } + return status; +} + +status_t RadioService::ModuleClient::hasControl(bool *hasControl) +{ + Mutex::Autolock lock(mLock); + ALOGV("%s locked", __FUNCTION__); + *hasControl = mTuner != NULL; + return NO_ERROR; +} + +void RadioService::ModuleClient::onCallbackEvent(const sp<IMemory>& eventMemory) +{ + if (eventMemory == 0 || eventMemory->pointer() == NULL) { + return; + } + + sp<IRadioClient> client; + { + AutoMutex lock(mLock); + ALOGV("%s locked", __FUNCTION__); + radio_event_t *event = (radio_event_t *)eventMemory->pointer(); + switch (event->type) { + case RADIO_EVENT_CONFIG: + mConfig.band = event->config.band; + event->config.region = mConfig.region; + break; + default: + break; + } + + client = mClient; + } + if (client != 0) { + client->onEvent(eventMemory); + } +} + + +void RadioService::ModuleClient::binderDied( + const wp<IBinder> &who __unused) { + ALOGW("client binder died for client %p", this); + detach(); +} + +}; // namespace android diff --git a/services/radio/RadioService.h b/services/radio/RadioService.h new file mode 100644 index 0000000..49feda6 --- /dev/null +++ b/services/radio/RadioService.h @@ -0,0 +1,211 @@ +/* + * Copyright (C) 2015 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_HARDWARE_RADIO_SERVICE_H +#define ANDROID_HARDWARE_RADIO_SERVICE_H + +#include <utils/Vector.h> +//#include <binder/AppOpsManager.h> +#include <binder/MemoryDealer.h> +#include <binder/BinderService.h> +#include <binder/IAppOpsCallback.h> +#include <radio/IRadioService.h> +#include <radio/IRadio.h> +#include <radio/IRadioClient.h> +#include <system/radio.h> +#include <hardware/radio.h> + +namespace android { + +class MemoryHeapBase; + +class RadioService : + public BinderService<RadioService>, + public BnRadioService +{ + friend class BinderService<RadioService>; + +public: + class ModuleClient; + class Module; + + static char const* getServiceName() { return "media.radio"; } + + RadioService(); + virtual ~RadioService(); + + // IRadioService + virtual status_t listModules(struct radio_properties *properties, + uint32_t *numModules); + + virtual status_t attach(radio_handle_t handle, + const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool withAudio, + sp<IRadio>& radio); + + virtual status_t onTransact(uint32_t code, const Parcel& data, + Parcel* reply, uint32_t flags); + + virtual status_t dump(int fd, const Vector<String16>& args); + + + class Module : public virtual RefBase { + public: + + Module(radio_hw_device* hwDevice, + struct radio_properties properties); + + virtual ~Module(); + + sp<ModuleClient> addClient(const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool audio); + + void removeClient(const sp<ModuleClient>& moduleClient); + + status_t setMute(bool mute); + + status_t getMute(bool *mute); + + virtual status_t dump(int fd, const Vector<String16>& args); + + const struct radio_hw_device *hwDevice() const { return mHwDevice; } + const struct radio_properties properties() const { return mProperties; } + const struct radio_band_config *getDefaultConfig() const ; + + private: + + void notifyDeviceConnection(bool connected, const char *address); + + Mutex mLock; // protects mModuleClients + const struct radio_hw_device *mHwDevice; // HAL hardware device + const struct radio_properties mProperties; // cached hardware module properties + Vector< sp<ModuleClient> > mModuleClients; // list of attached clients + bool mMute; // radio audio source state + // when unmuted, audio is routed to the + // output device selected for media use case. + }; // class Module + + class CallbackThread : public Thread { + public: + + CallbackThread(const wp<ModuleClient>& moduleClient); + + virtual ~CallbackThread(); + + + // Thread virtuals + virtual bool threadLoop(); + + // RefBase + virtual void onFirstRef(); + + void exit(); + + void sendEvent(radio_hal_event_t *halEvent); + sp<IMemory> prepareEvent(radio_hal_event_t *halEvent); + + private: + wp<ModuleClient> mModuleClient; // client module the thread belongs to + Condition mCallbackCond; // condition signaled when a new event is posted + Mutex mCallbackLock; // protects mEventQueue + Vector< sp<IMemory> > mEventQueue; // pending callback events + sp<MemoryDealer> mMemoryDealer; // shared memory for callback event + }; // class CallbackThread + + class ModuleClient : public BnRadio, + public IBinder::DeathRecipient { + public: + + ModuleClient(const sp<Module>& module, + const sp<IRadioClient>& client, + const struct radio_band_config *config, + bool audio); + + virtual ~ModuleClient(); + + // IRadio + virtual void detach(); + + virtual status_t setConfiguration(const struct radio_band_config *config); + + virtual status_t getConfiguration(struct radio_band_config *config); + + virtual status_t setMute(bool mute); + + virtual status_t getMute(bool *mute); + + virtual status_t scan(radio_direction_t direction, bool skipSubChannel); + + virtual status_t step(radio_direction_t direction, bool skipSubChannel); + + virtual status_t tune(unsigned int channel, unsigned int subChannel); + + virtual status_t cancel(); + + virtual status_t getProgramInformation(struct radio_program_info *info); + + virtual status_t hasControl(bool *hasControl); + + virtual status_t dump(int fd, const Vector<String16>& args); + + sp<IRadioClient> client() const { return mClient; } + wp<Module> module() const { return mModule; } + radio_hal_band_config_t halConfig() const; + sp<CallbackThread> callbackThread() const { return mCallbackThread; } + void setTuner(const struct radio_tuner *tuner); + const struct radio_tuner *getTuner() const; + bool audio() const { return mAudio; } + + void onCallbackEvent(const sp<IMemory>& event); + + virtual void onFirstRef(); + + + // IBinder::DeathRecipient implementation + virtual void binderDied(const wp<IBinder> &who); + + private: + + mutable Mutex mLock; // protects mClient, mConfig and mTuner + wp<Module> mModule; // The module this client is attached to + sp<IRadioClient> mClient; // event callback binder interface + radio_band_config_t mConfig; // current band configuration + sp<CallbackThread> mCallbackThread; // event callback thread + const bool mAudio; + const struct radio_tuner *mTuner; // HAL tuner interface. NULL indicates that + // this client does not have control on any + // tuner + }; // class ModuleClient + + + static void callback(radio_hal_event_t *halEvent, void *cookie); + +private: + + virtual void onFirstRef(); + + static void convertProperties(radio_properties_t *properties, + const radio_hal_properties_t *halProperties); + Mutex mServiceLock; // protects mModules + volatile int32_t mNextUniqueId; // for module ID allocation + DefaultKeyedVector< radio_handle_t, sp<Module> > mModules; +}; + +} // namespace android + +#endif // ANDROID_HARDWARE_RADIO_SERVICE_H diff --git a/tools/resampler_tools/Android.mk b/tools/resampler_tools/Android.mk index e8cbe39..b58e4cd 100644 --- a/tools/resampler_tools/Android.mk +++ b/tools/resampler_tools/Android.mk @@ -1,6 +1,6 @@ # Copyright 2005 The Android Open Source Project # -# Android.mk for resampler_tools +# Android.mk for resampler_tools # diff --git a/tools/resampler_tools/fir.cpp b/tools/resampler_tools/fir.cpp index 62eddca..fe4d212 100644 --- a/tools/resampler_tools/fir.cpp +++ b/tools/resampler_tools/fir.cpp @@ -66,19 +66,20 @@ static double kaiser(int k, int N, double beta) { static void usage(char* name) { fprintf(stderr, - "usage: %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings]" + "usage: %s [-h] [-d] [-D] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings]" " [-f {float|fixed|fixed16}] [-b beta] [-v dBFS] [-l lerp]\n" - " %s [-h] [-d] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings]" + " %s [-h] [-d] [-D] [-s sample_rate] [-c cut-off_frequency] [-n half_zero_crossings]" " [-f {float|fixed|fixed16}] [-b beta] [-v dBFS] -p M/N\n" " -h this help message\n" " -d debug, print comma-separated coefficient table\n" + " -D generate extra declarations\n" " -p generate poly-phase filter coefficients, with sample increment M/N\n" " -s sample rate (48000)\n" " -c cut-off frequency (20478)\n" " -n number of zero-crossings on one side (8)\n" " -l number of lerping bits (4)\n" " -m number of polyphases (related to -l, default 16)\n" - " -f output format, can be fixed-point or floating-point (fixed)\n" + " -f output format, can be fixed, fixed16, or float (fixed)\n" " -b kaiser window parameter beta (7.865 [-80dB])\n" " -v attenuation in dBFS (0)\n", name, name @@ -97,7 +98,8 @@ int main(int argc, char** argv) double Fs = 48000; double Fc = 20478; double atten = 1; - int format = 0; + int format = 0; // 0=fixed, 1=float + bool declarations = false; // in order to keep the errors associated with the linear // interpolation of the coefficients below the quantization error @@ -158,11 +160,14 @@ int main(int argc, char** argv) int M = 1 << 4; // number of phases for interpolation int ch; - while ((ch = getopt(argc, argv, ":hds:c:n:f:l:m:b:p:v:z:")) != -1) { + while ((ch = getopt(argc, argv, ":hds:c:n:f:l:m:b:p:v:z:D")) != -1) { switch (ch) { case 'd': debug = true; break; + case 'D': + declarations = true; + break; case 'p': if (sscanf(optarg, "%u/%u", &polyM, &polyN) != 2) { usage(argv[0]); @@ -225,24 +230,26 @@ int main(int argc, char** argv) for (int i = M-1 ; i; i>>=1, nz++); // generate the right half of the filter if (!debug) { - printf("// cmd-line: "); - for (int i=1 ; i<argc ; i++) { - printf("%s ", argv[i]); + printf("// cmd-line:"); + for (int i=0 ; i<argc ; i++) { + printf(" %s", argv[i]); } printf("\n"); - if (!polyphase) { - printf("const int32_t RESAMPLE_FIR_SIZE = %d;\n", N); - printf("const int32_t RESAMPLE_FIR_INT_PHASES = %d;\n", M); - printf("const int32_t RESAMPLE_FIR_NUM_COEF = %d;\n", nzc); - } else { - printf("const int32_t RESAMPLE_FIR_SIZE = %d;\n", 2*nzc*polyN); - printf("const int32_t RESAMPLE_FIR_NUM_COEF = %d;\n", 2*nzc); - } - if (!format) { - printf("const int32_t RESAMPLE_FIR_COEF_BITS = %d;\n", nc); + if (declarations) { + if (!polyphase) { + printf("const int32_t RESAMPLE_FIR_SIZE = %d;\n", N); + printf("const int32_t RESAMPLE_FIR_INT_PHASES = %d;\n", M); + printf("const int32_t RESAMPLE_FIR_NUM_COEF = %d;\n", nzc); + } else { + printf("const int32_t RESAMPLE_FIR_SIZE = %d;\n", 2*nzc*polyN); + printf("const int32_t RESAMPLE_FIR_NUM_COEF = %d;\n", 2*nzc); + } + if (!format) { + printf("const int32_t RESAMPLE_FIR_COEF_BITS = %d;\n", nc); + } + printf("\n"); + printf("static %s resampleFIR[] = {", !format ? "int32_t" : "float"); } - printf("\n"); - printf("static %s resampleFIR[] = {", !format ? "int32_t" : "float"); } if (!polyphase) { @@ -260,12 +267,15 @@ int main(int argc, char** argv) if (!format) { int64_t yi = toint(y, 1ULL<<(nc-1)); if (nc > 16) { - printf("0x%08x, ", int32_t(yi)); + printf("0x%08x,", int32_t(yi)); } else { - printf("0x%04x, ", int32_t(yi)&0xffff); + printf("0x%04x,", int32_t(yi)&0xffff); } } else { - printf("%.9g%s ", y, debug ? "," : "f,"); + printf("%.9g%s", y, debug ? "," : "f,"); + } + if (j != nzc-1) { + printf(" "); } } } @@ -283,23 +293,22 @@ int main(int argc, char** argv) if (!format) { int64_t yi = toint(y, 1ULL<<(nc-1)); if (nc > 16) { - printf("0x%08x, ", int32_t(yi)); + printf("0x%08x,", int32_t(yi)); } else { - printf("0x%04x, ", int32_t(yi)&0xffff); + printf("0x%04x,", int32_t(yi)&0xffff); } } else { - printf("%.9g%s", y, debug ? "" : "f"); + printf("%.9g%s", y, debug ? "," : "f,"); } - if (debug && (i==nzc-1)) { - } else { - printf(", "); + if (i != nzc-1) { + printf(" "); } } } } - if (!debug) { + if (!debug && declarations) { printf("\n};"); } printf("\n"); |