summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--camera/Android.mk3
-rw-r--r--camera/CameraBase.cpp2
-rw-r--r--camera/CameraMetadata.cpp2
-rw-r--r--camera/ICameraService.cpp60
-rw-r--r--camera/IProCameraCallbacks.cpp125
-rw-r--r--camera/IProCameraUser.cpp324
-rw-r--r--camera/ProCamera.cpp436
-rw-r--r--camera/tests/Android.mk1
-rw-r--r--camera/tests/ProCameraTests.cpp1284
-rw-r--r--drm/mediadrm/plugins/clearkey/DrmPlugin.cpp5
-rw-r--r--drm/mediadrm/plugins/clearkey/DrmPlugin.h3
-rw-r--r--drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp14
-rw-r--r--drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h3
-rw-r--r--include/camera/CameraMetadata.h4
-rw-r--r--include/camera/ICameraService.h10
-rw-r--r--include/camera/IProCameraCallbacks.h71
-rw-r--r--include/camera/IProCameraUser.h100
-rw-r--r--include/camera/ProCamera.h319
-rw-r--r--include/media/AudioRecord.h88
-rw-r--r--include/media/AudioTrack.h50
-rw-r--r--include/media/IDrm.h3
-rw-r--r--include/media/IResourceManagerClient.h47
-rw-r--r--include/media/IResourceManagerService.h66
-rw-r--r--include/media/MediaResource.h51
-rw-r--r--include/media/MediaResourcePolicy.h45
-rw-r--r--include/media/stagefright/ACodec.h2
-rw-r--r--include/media/stagefright/MediaClock.h24
-rw-r--r--include/media/stagefright/MediaSync.h239
-rw-r--r--include/media/stagefright/OMXCodec.h2
-rw-r--r--media/libmedia/Android.mk4
-rw-r--r--media/libmedia/AudioEffect.cpp2
-rw-r--r--media/libmedia/AudioParameter.cpp2
-rw-r--r--media/libmedia/AudioPolicy.cpp2
-rw-r--r--media/libmedia/AudioRecord.cpp65
-rw-r--r--media/libmedia/AudioSystem.cpp2
-rw-r--r--media/libmedia/AudioTrack.cpp41
-rw-r--r--media/libmedia/IAudioFlinger.cpp2
-rw-r--r--media/libmedia/IAudioFlingerClient.cpp2
-rw-r--r--media/libmedia/IAudioPolicyService.cpp2
-rw-r--r--media/libmedia/IAudioPolicyServiceClient.cpp2
-rw-r--r--media/libmedia/IAudioRecord.cpp2
-rw-r--r--media/libmedia/IAudioTrack.cpp2
-rw-r--r--media/libmedia/IDrm.cpp14
-rw-r--r--media/libmedia/IDrmClient.cpp2
-rw-r--r--media/libmedia/IEffect.cpp2
-rw-r--r--media/libmedia/IEffectClient.cpp2
-rw-r--r--media/libmedia/IMediaCodecList.cpp2
-rw-r--r--media/libmedia/IMediaDeathNotifier.cpp2
-rw-r--r--media/libmedia/IMediaHTTPConnection.cpp3
-rw-r--r--media/libmedia/IMediaHTTPService.cpp3
-rw-r--r--media/libmedia/IMediaLogService.cpp2
-rw-r--r--media/libmedia/IMediaMetadataRetriever.cpp2
-rw-r--r--media/libmedia/IMediaPlayer.cpp2
-rw-r--r--media/libmedia/IMediaPlayerClient.cpp2
-rw-r--r--media/libmedia/IMediaPlayerService.cpp2
-rw-r--r--media/libmedia/IMediaRecorder.cpp2
-rw-r--r--media/libmedia/IMediaRecorderClient.cpp2
-rw-r--r--media/libmedia/IRemoteDisplay.cpp2
-rw-r--r--media/libmedia/IRemoteDisplayClient.cpp2
-rw-r--r--media/libmedia/IResourceManagerClient.cpp70
-rw-r--r--media/libmedia/IResourceManagerService.cpp169
-rw-r--r--media/libmedia/MediaResource.cpp65
-rw-r--r--media/libmedia/MediaResourcePolicy.cpp49
-rw-r--r--media/libmedia/StringArray.cpp2
-rw-r--r--media/libmedia/Visualizer.cpp2
-rw-r--r--media/libmedia/mediametadataretriever.cpp2
-rw-r--r--media/libmedia/mediaplayer.cpp2
-rw-r--r--media/libmedia/mediarecorder.cpp2
-rw-r--r--media/libmediaplayerservice/Drm.cpp6
-rw-r--r--media/libmediaplayerservice/Drm.h3
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp28
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp8
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp12
-rw-r--r--media/libstagefright/ACodec.cpp10
-rw-r--r--media/libstagefright/Android.mk1
-rw-r--r--media/libstagefright/MediaClock.cpp15
-rw-r--r--media/libstagefright/MediaSync.cpp541
-rw-r--r--media/libstagefright/OMXCodec.cpp10
-rw-r--r--media/libstagefright/codecs/g711/dec/SoftG711.cpp7
-rw-r--r--media/libstagefright/codecs/g711/dec/SoftG711.h3
-rw-r--r--media/libstagefright/data/media_codecs_google_audio.xml4
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp980
-rw-r--r--media/libstagefright/httplive/LiveSession.h92
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.cpp630
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.h39
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h3
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp180
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.h11
-rw-r--r--media/mediaserver/Android.mk2
-rw-r--r--media/ndk/NdkMediaDrm.cpp5
-rw-r--r--services/audioflinger/Android.mk4
-rw-r--r--services/audioflinger/AudioFlinger.cpp31
-rw-r--r--services/audioflinger/AudioFlinger.h54
-rw-r--r--services/audioflinger/AudioHwDevice.cpp94
-rw-r--r--services/audioflinger/AudioHwDevice.h88
-rw-r--r--services/audioflinger/AudioMixer.cpp6
-rw-r--r--services/audioflinger/AudioStreamOut.cpp117
-rw-r--r--services/audioflinger/AudioStreamOut.h83
-rw-r--r--services/audioflinger/PlaybackTracks.h5
-rw-r--r--services/audioflinger/SpdifStreamOut.cpp166
-rw-r--r--services/audioflinger/SpdifStreamOut.h107
-rw-r--r--services/audioflinger/Threads.cpp37
-rw-r--r--services/audioflinger/Tracks.cpp77
-rw-r--r--services/audiopolicy/service/AudioPolicyService.cpp2
-rw-r--r--services/camera/libcameraservice/Android.mk1
-rw-r--r--services/camera/libcameraservice/CameraFlashlight.cpp13
-rw-r--r--services/camera/libcameraservice/CameraService.cpp66
-rw-r--r--services/camera/libcameraservice/CameraService.h54
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.cpp3
-rw-r--r--services/camera/libcameraservice/api_pro/ProCamera2Client.cpp445
-rw-r--r--services/camera/libcameraservice/api_pro/ProCamera2Client.h124
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.cpp1
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.h2
-rw-r--r--services/camera/libcameraservice/common/CameraModule.cpp53
-rw-r--r--services/camera/libcameraservice/common/CameraModule.h17
-rw-r--r--services/camera/libcameraservice/device1/CameraHardwareInterface.h2
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStream.cpp35
-rw-r--r--services/mediaresourcemanager/Android.mk18
-rw-r--r--services/mediaresourcemanager/ResourceManagerService.cpp345
-rw-r--r--services/mediaresourcemanager/ResourceManagerService.h106
-rw-r--r--services/mediaresourcemanager/test/Android.mk25
-rw-r--r--services/mediaresourcemanager/test/ResourceManagerService_test.cpp464
122 files changed, 4865 insertions, 4294 deletions
diff --git a/camera/Android.mk b/camera/Android.mk
index df7279f..4c4700b 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -30,13 +30,10 @@ LOCAL_SRC_FILES:= \
ICameraServiceListener.cpp \
ICameraRecordingProxy.cpp \
ICameraRecordingProxyListener.cpp \
- IProCameraUser.cpp \
- IProCameraCallbacks.cpp \
camera2/ICameraDeviceUser.cpp \
camera2/ICameraDeviceCallbacks.cpp \
camera2/CaptureRequest.cpp \
camera2/OutputConfiguration.cpp \
- ProCamera.cpp \
CameraBase.cpp \
CameraUtils.cpp \
VendorTagDescriptor.cpp
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index 65a1a47..5d50aa8 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -29,7 +29,6 @@
#include <camera/ICameraService.h>
// needed to instantiate
-#include <camera/ProCamera.h>
#include <camera/Camera.h>
#include <system/camera_metadata.h>
@@ -217,7 +216,6 @@ status_t CameraBase<TCam, TCamTraits>::removeServiceListener(
return cs->removeListener(listener);
}
-template class CameraBase<ProCamera>;
template class CameraBase<Camera>;
} // namespace android
diff --git a/camera/CameraMetadata.cpp b/camera/CameraMetadata.cpp
index 043437f..e216d26 100644
--- a/camera/CameraMetadata.cpp
+++ b/camera/CameraMetadata.cpp
@@ -74,7 +74,7 @@ CameraMetadata::~CameraMetadata() {
clear();
}
-const camera_metadata_t* CameraMetadata::getAndLock() {
+const camera_metadata_t* CameraMetadata::getAndLock() const {
mLocked = true;
return mBuffer;
}
diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp
index a75cb48..63c82cc 100644
--- a/camera/ICameraService.cpp
+++ b/camera/ICameraService.cpp
@@ -2,16 +2,16 @@
**
** Copyright 2008, The Android Open Source Project
**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
**
-** http://www.apache.org/licenses/LICENSE-2.0
+** http://www.apache.org/licenses/LICENSE-2.0
**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
** limitations under the License.
*/
@@ -29,8 +29,6 @@
#include <camera/ICameraService.h>
#include <camera/ICameraServiceListener.h>
-#include <camera/IProCameraUser.h>
-#include <camera/IProCameraCallbacks.h>
#include <camera/ICamera.h>
#include <camera/ICameraClient.h>
#include <camera/camera2/ICameraDeviceUser.h>
@@ -223,28 +221,6 @@ public:
return reply.readInt32();
}
- // connect to camera service (pro client)
- virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb, int cameraId,
- const String16 &clientPackageName, int clientUid,
- /*out*/
- sp<IProCameraUser>& device)
- {
- Parcel data, reply;
- data.writeInterfaceToken(ICameraService::getInterfaceDescriptor());
- data.writeStrongBinder(IInterface::asBinder(cameraCb));
- data.writeInt32(cameraId);
- data.writeString16(clientPackageName);
- data.writeInt32(clientUid);
- remote()->transact(BnCameraService::CONNECT_PRO, data, &reply);
-
- if (readExceptionCode(reply)) return -EPROTO;
- status_t status = reply.readInt32();
- if (reply.readInt32() != 0) {
- device = interface_cast<IProCameraUser>(reply.readStrongBinder());
- }
- return status;
- }
-
// connect to camera service (android.hardware.camera2.CameraDevice)
virtual status_t connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
@@ -404,26 +380,6 @@ status_t BnCameraService::onTransact(
}
return NO_ERROR;
} break;
- case CONNECT_PRO: {
- CHECK_INTERFACE(ICameraService, data, reply);
- sp<IProCameraCallbacks> cameraClient =
- interface_cast<IProCameraCallbacks>(data.readStrongBinder());
- int32_t cameraId = data.readInt32();
- const String16 clientName = data.readString16();
- int32_t clientUid = data.readInt32();
- sp<IProCameraUser> camera;
- status_t status = connectPro(cameraClient, cameraId,
- clientName, clientUid, /*out*/camera);
- reply->writeNoException();
- reply->writeInt32(status);
- if (camera != NULL) {
- reply->writeInt32(1);
- reply->writeStrongBinder(IInterface::asBinder(camera));
- } else {
- reply->writeInt32(0);
- }
- return NO_ERROR;
- } break;
case CONNECT_DEVICE: {
CHECK_INTERFACE(ICameraService, data, reply);
sp<ICameraDeviceCallbacks> cameraClient =
diff --git a/camera/IProCameraCallbacks.cpp b/camera/IProCameraCallbacks.cpp
deleted file mode 100644
index bd3d420..0000000
--- a/camera/IProCameraCallbacks.cpp
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
-**
-** Copyright 2013, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "IProCameraCallbacks"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-
-#include <binder/Parcel.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/Surface.h>
-#include <utils/Mutex.h>
-
-#include <camera/IProCameraCallbacks.h>
-
-#include "camera/CameraMetadata.h"
-
-namespace android {
-
-enum {
- NOTIFY_CALLBACK = IBinder::FIRST_CALL_TRANSACTION,
- LOCK_STATUS_CHANGED,
- RESULT_RECEIVED,
-};
-
-class BpProCameraCallbacks: public BpInterface<IProCameraCallbacks>
-{
-public:
- BpProCameraCallbacks(const sp<IBinder>& impl)
- : BpInterface<IProCameraCallbacks>(impl)
- {
- }
-
- // generic callback from camera service to app
- void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
- {
- ALOGV("notifyCallback");
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
- data.writeInt32(msgType);
- data.writeInt32(ext1);
- data.writeInt32(ext2);
- remote()->transact(NOTIFY_CALLBACK, data, &reply, IBinder::FLAG_ONEWAY);
- }
-
- void onLockStatusChanged(LockStatus newLockStatus) {
- ALOGV("onLockStatusChanged");
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
- data.writeInt32(newLockStatus);
- remote()->transact(LOCK_STATUS_CHANGED, data, &reply,
- IBinder::FLAG_ONEWAY);
- }
-
- void onResultReceived(int32_t requestId, camera_metadata* result) {
- ALOGV("onResultReceived");
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraCallbacks::getInterfaceDescriptor());
- data.writeInt32(requestId);
- CameraMetadata::writeToParcel(data, result);
- remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY);
- }
-};
-
-IMPLEMENT_META_INTERFACE(ProCameraCallbacks,
- "android.hardware.IProCameraCallbacks");
-
-// ----------------------------------------------------------------------
-
-status_t BnProCameraCallbacks::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- ALOGV("onTransact - code = %d", code);
- switch(code) {
- case NOTIFY_CALLBACK: {
- ALOGV("NOTIFY_CALLBACK");
- CHECK_INTERFACE(IProCameraCallbacks, data, reply);
- int32_t msgType = data.readInt32();
- int32_t ext1 = data.readInt32();
- int32_t ext2 = data.readInt32();
- notifyCallback(msgType, ext1, ext2);
- return NO_ERROR;
- } break;
- case LOCK_STATUS_CHANGED: {
- ALOGV("LOCK_STATUS_CHANGED");
- CHECK_INTERFACE(IProCameraCallbacks, data, reply);
- LockStatus newLockStatus
- = static_cast<LockStatus>(data.readInt32());
- onLockStatusChanged(newLockStatus);
- return NO_ERROR;
- } break;
- case RESULT_RECEIVED: {
- ALOGV("RESULT_RECEIVED");
- CHECK_INTERFACE(IProCameraCallbacks, data, reply);
- int32_t requestId = data.readInt32();
- camera_metadata_t *result = NULL;
- CameraMetadata::readFromParcel(data, &result);
- onResultReceived(requestId, result);
- return NO_ERROR;
- break;
- }
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
-
diff --git a/camera/IProCameraUser.cpp b/camera/IProCameraUser.cpp
deleted file mode 100644
index 9bd7597..0000000
--- a/camera/IProCameraUser.cpp
+++ /dev/null
@@ -1,324 +0,0 @@
-/*
-**
-** Copyright 2013, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-// #define LOG_NDEBUG 0
-#define LOG_TAG "IProCameraUser"
-#include <utils/Log.h>
-#include <stdint.h>
-#include <sys/types.h>
-#include <binder/Parcel.h>
-#include <camera/IProCameraUser.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <gui/Surface.h>
-#include "camera/CameraMetadata.h"
-
-namespace android {
-
-enum {
- DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
- CONNECT,
- EXCLUSIVE_TRY_LOCK,
- EXCLUSIVE_LOCK,
- EXCLUSIVE_UNLOCK,
- HAS_EXCLUSIVE_LOCK,
- SUBMIT_REQUEST,
- CANCEL_REQUEST,
- DELETE_STREAM,
- CREATE_STREAM,
- CREATE_DEFAULT_REQUEST,
- GET_CAMERA_INFO,
-};
-
-class BpProCameraUser: public BpInterface<IProCameraUser>
-{
-public:
- BpProCameraUser(const sp<IBinder>& impl)
- : BpInterface<IProCameraUser>(impl)
- {
- }
-
- // disconnect from camera service
- void disconnect()
- {
- ALOGV("disconnect");
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- remote()->transact(DISCONNECT, data, &reply);
- reply.readExceptionCode();
- }
-
- virtual status_t connect(const sp<IProCameraCallbacks>& cameraClient)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- data.writeStrongBinder(IInterface::asBinder(cameraClient));
- remote()->transact(CONNECT, data, &reply);
- return reply.readInt32();
- }
-
- /* Shared ProCameraUser */
-
- virtual status_t exclusiveTryLock()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- remote()->transact(EXCLUSIVE_TRY_LOCK, data, &reply);
- return reply.readInt32();
- }
- virtual status_t exclusiveLock()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- remote()->transact(EXCLUSIVE_LOCK, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t exclusiveUnlock()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- remote()->transact(EXCLUSIVE_UNLOCK, data, &reply);
- return reply.readInt32();
- }
-
- virtual bool hasExclusiveLock()
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- remote()->transact(HAS_EXCLUSIVE_LOCK, data, &reply);
- return !!reply.readInt32();
- }
-
- virtual int submitRequest(camera_metadata_t* metadata, bool streaming)
- {
-
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
-
- // arg0+arg1
- CameraMetadata::writeToParcel(data, metadata);
-
- // arg2 = streaming (bool)
- data.writeInt32(streaming);
-
- remote()->transact(SUBMIT_REQUEST, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t cancelRequest(int requestId)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- data.writeInt32(requestId);
-
- remote()->transact(CANCEL_REQUEST, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t deleteStream(int streamId)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- data.writeInt32(streamId);
-
- remote()->transact(DELETE_STREAM, data, &reply);
- return reply.readInt32();
- }
-
- virtual status_t createStream(int width, int height, int format,
- const sp<IGraphicBufferProducer>& bufferProducer,
- /*out*/
- int* streamId)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- data.writeInt32(width);
- data.writeInt32(height);
- data.writeInt32(format);
-
- sp<IBinder> b(IInterface::asBinder(bufferProducer));
- data.writeStrongBinder(b);
-
- remote()->transact(CREATE_STREAM, data, &reply);
-
- int sId = reply.readInt32();
- if (streamId) {
- *streamId = sId;
- }
- return reply.readInt32();
- }
-
- // Create a request object from a template.
- virtual status_t createDefaultRequest(int templateId,
- /*out*/
- camera_metadata** request)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- data.writeInt32(templateId);
- remote()->transact(CREATE_DEFAULT_REQUEST, data, &reply);
- CameraMetadata::readFromParcel(reply, /*out*/request);
- return reply.readInt32();
- }
-
-
- virtual status_t getCameraInfo(int cameraId, camera_metadata** info)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IProCameraUser::getInterfaceDescriptor());
- data.writeInt32(cameraId);
- remote()->transact(GET_CAMERA_INFO, data, &reply);
- CameraMetadata::readFromParcel(reply, /*out*/info);
- return reply.readInt32();
- }
-
-
-private:
-
-
-};
-
-IMPLEMENT_META_INTERFACE(ProCameraUser, "android.hardware.IProCameraUser");
-
-// ----------------------------------------------------------------------
-
-status_t BnProCameraUser::onTransact(
- uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
-{
- switch(code) {
- case DISCONNECT: {
- ALOGV("DISCONNECT");
- CHECK_INTERFACE(IProCameraUser, data, reply);
- disconnect();
- reply->writeNoException();
- return NO_ERROR;
- } break;
- case CONNECT: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- sp<IProCameraCallbacks> cameraClient =
- interface_cast<IProCameraCallbacks>(data.readStrongBinder());
- reply->writeInt32(connect(cameraClient));
- return NO_ERROR;
- } break;
-
- /* Shared ProCameraUser */
- case EXCLUSIVE_TRY_LOCK: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- reply->writeInt32(exclusiveTryLock());
- return NO_ERROR;
- } break;
- case EXCLUSIVE_LOCK: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- reply->writeInt32(exclusiveLock());
- return NO_ERROR;
- } break;
- case EXCLUSIVE_UNLOCK: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- reply->writeInt32(exclusiveUnlock());
- return NO_ERROR;
- } break;
- case HAS_EXCLUSIVE_LOCK: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- reply->writeInt32(hasExclusiveLock());
- return NO_ERROR;
- } break;
- case SUBMIT_REQUEST: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- camera_metadata_t* metadata;
- CameraMetadata::readFromParcel(data, /*out*/&metadata);
-
- // arg2 = streaming (bool)
- bool streaming = data.readInt32();
-
- // return code: requestId (int32)
- reply->writeInt32(submitRequest(metadata, streaming));
-
- return NO_ERROR;
- } break;
- case CANCEL_REQUEST: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- int requestId = data.readInt32();
- reply->writeInt32(cancelRequest(requestId));
- return NO_ERROR;
- } break;
- case DELETE_STREAM: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- int streamId = data.readInt32();
- reply->writeInt32(deleteStream(streamId));
- return NO_ERROR;
- } break;
- case CREATE_STREAM: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
- int width, height, format;
-
- width = data.readInt32();
- height = data.readInt32();
- format = data.readInt32();
-
- sp<IGraphicBufferProducer> bp =
- interface_cast<IGraphicBufferProducer>(data.readStrongBinder());
-
- int streamId = -1;
- status_t ret;
- ret = createStream(width, height, format, bp, &streamId);
-
- reply->writeInt32(streamId);
- reply->writeInt32(ret);
-
- return NO_ERROR;
- } break;
-
- case CREATE_DEFAULT_REQUEST: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
-
- int templateId = data.readInt32();
-
- camera_metadata_t* request = NULL;
- status_t ret;
- ret = createDefaultRequest(templateId, &request);
-
- CameraMetadata::writeToParcel(*reply, request);
- reply->writeInt32(ret);
-
- free_camera_metadata(request);
-
- return NO_ERROR;
- } break;
- case GET_CAMERA_INFO: {
- CHECK_INTERFACE(IProCameraUser, data, reply);
-
- int cameraId = data.readInt32();
-
- camera_metadata_t* info = NULL;
- status_t ret;
- ret = getCameraInfo(cameraId, &info);
-
- CameraMetadata::writeToParcel(*reply, info);
- reply->writeInt32(ret);
-
- free_camera_metadata(info);
-
- return NO_ERROR;
- } break;
- default:
- return BBinder::onTransact(code, data, reply, flags);
- }
-}
-
-// ----------------------------------------------------------------------------
-
-}; // namespace android
diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp
deleted file mode 100644
index 48f8e8e..0000000
--- a/camera/ProCamera.cpp
+++ /dev/null
@@ -1,436 +0,0 @@
-/*
-**
-** Copyright (C) 2013, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ProCamera"
-#include <utils/Log.h>
-#include <utils/threads.h>
-#include <utils/Mutex.h>
-
-#include <binder/IPCThreadState.h>
-#include <binder/IServiceManager.h>
-#include <binder/IMemory.h>
-
-#include <camera/ProCamera.h>
-#include <camera/IProCameraUser.h>
-#include <camera/IProCameraCallbacks.h>
-
-#include <gui/IGraphicBufferProducer.h>
-
-#include <system/camera_metadata.h>
-
-namespace android {
-
-sp<ProCamera> ProCamera::connect(int cameraId)
-{
- return CameraBaseT::connect(cameraId, String16(),
- ICameraService::USE_CALLING_UID);
-}
-
-ProCamera::ProCamera(int cameraId)
- : CameraBase(cameraId)
-{
-}
-
-CameraTraits<ProCamera>::TCamConnectService CameraTraits<ProCamera>::fnConnectService =
- &ICameraService::connectPro;
-
-ProCamera::~ProCamera()
-{
-
-}
-
-/* IProCameraUser's implementation */
-
-// callback from camera service
-void ProCamera::notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2)
-{
- return CameraBaseT::notifyCallback(msgType, ext1, ext2);
-}
-
-void ProCamera::onLockStatusChanged(
- IProCameraCallbacks::LockStatus newLockStatus)
-{
- ALOGV("%s: newLockStatus = %d", __FUNCTION__, newLockStatus);
-
- sp<ProCameraListener> listener;
- {
- Mutex::Autolock _l(mLock);
- listener = mListener;
- }
- if (listener != NULL) {
- switch (newLockStatus) {
- case IProCameraCallbacks::LOCK_ACQUIRED:
- listener->onLockAcquired();
- break;
- case IProCameraCallbacks::LOCK_RELEASED:
- listener->onLockReleased();
- break;
- case IProCameraCallbacks::LOCK_STOLEN:
- listener->onLockStolen();
- break;
- default:
- ALOGE("%s: Unknown lock status: %d",
- __FUNCTION__, newLockStatus);
- }
- }
-}
-
-void ProCamera::onResultReceived(int32_t requestId, camera_metadata* result) {
- ALOGV("%s: requestId = %d, result = %p", __FUNCTION__, requestId, result);
-
- sp<ProCameraListener> listener;
- {
- Mutex::Autolock _l(mLock);
- listener = mListener;
- }
-
- CameraMetadata tmp(result);
-
- // Unblock waitForFrame(id) callers
- {
- Mutex::Autolock al(mWaitMutex);
- mMetadataReady = true;
- mLatestMetadata = tmp; // make copy
- mWaitCondition.broadcast();
- }
-
- result = tmp.release();
-
- if (listener != NULL) {
- listener->onResultReceived(requestId, result);
- } else {
- free_camera_metadata(result);
- }
-
-}
-
-status_t ProCamera::exclusiveTryLock()
-{
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- return c->exclusiveTryLock();
-}
-status_t ProCamera::exclusiveLock()
-{
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- return c->exclusiveLock();
-}
-status_t ProCamera::exclusiveUnlock()
-{
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- return c->exclusiveUnlock();
-}
-bool ProCamera::hasExclusiveLock()
-{
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- return c->hasExclusiveLock();
-}
-
-// Note that the callee gets a copy of the metadata.
-int ProCamera::submitRequest(const struct camera_metadata* metadata,
- bool streaming)
-{
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- return c->submitRequest(const_cast<struct camera_metadata*>(metadata),
- streaming);
-}
-
-status_t ProCamera::cancelRequest(int requestId)
-{
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- return c->cancelRequest(requestId);
-}
-
-status_t ProCamera::deleteStream(int streamId)
-{
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- status_t s = c->deleteStream(streamId);
-
- mStreams.removeItem(streamId);
-
- return s;
-}
-
-status_t ProCamera::createStream(int width, int height, int format,
- const sp<Surface>& surface,
- /*out*/
- int* streamId)
-{
- *streamId = -1;
-
- ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height,
- format);
-
- if (surface == 0) {
- return BAD_VALUE;
- }
-
- return createStream(width, height, format,
- surface->getIGraphicBufferProducer(),
- streamId);
-}
-
-status_t ProCamera::createStream(int width, int height, int format,
- const sp<IGraphicBufferProducer>& bufferProducer,
- /*out*/
- int* streamId) {
- *streamId = -1;
-
- ALOGV("%s: createStreamT %dx%d (fmt=0x%x)", __FUNCTION__, width, height,
- format);
-
- if (bufferProducer == 0) {
- return BAD_VALUE;
- }
-
- sp <IProCameraUser> c = mCamera;
- status_t stat = c->createStream(width, height, format, bufferProducer,
- streamId);
-
- if (stat == OK) {
- StreamInfo s(*streamId);
-
- mStreams.add(*streamId, s);
- }
-
- return stat;
-}
-
-status_t ProCamera::createStreamCpu(int width, int height, int format,
- int heapCount,
- /*out*/
- sp<CpuConsumer>* cpuConsumer,
- int* streamId) {
- return createStreamCpu(width, height, format, heapCount,
- /*synchronousMode*/true,
- cpuConsumer, streamId);
-}
-
-status_t ProCamera::createStreamCpu(int width, int height, int format,
- int heapCount,
- bool synchronousMode,
- /*out*/
- sp<CpuConsumer>* cpuConsumer,
- int* streamId)
-{
- ALOGV("%s: createStreamW %dx%d (fmt=0x%x)", __FUNCTION__, width, height,
- format);
-
- *cpuConsumer = NULL;
-
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- sp<IGraphicBufferProducer> producer;
- sp<IGraphicBufferConsumer> consumer;
- BufferQueue::createBufferQueue(&producer, &consumer);
- sp<CpuConsumer> cc = new CpuConsumer(consumer, heapCount
- /*, synchronousMode*/);
- cc->setName(String8("ProCamera::mCpuConsumer"));
-
- sp<Surface> stc = new Surface(producer);
-
- status_t s = createStream(width, height, format,
- stc->getIGraphicBufferProducer(),
- streamId);
-
- if (s != OK) {
- ALOGE("%s: Failure to create stream %dx%d (fmt=0x%x)", __FUNCTION__,
- width, height, format);
- return s;
- }
-
- sp<ProFrameListener> frameAvailableListener =
- new ProFrameListener(this, *streamId);
-
- getStreamInfo(*streamId).cpuStream = true;
- getStreamInfo(*streamId).cpuConsumer = cc;
- getStreamInfo(*streamId).synchronousMode = synchronousMode;
- getStreamInfo(*streamId).stc = stc;
- // for lifetime management
- getStreamInfo(*streamId).frameAvailableListener = frameAvailableListener;
-
- cc->setFrameAvailableListener(frameAvailableListener);
-
- *cpuConsumer = cc;
-
- return s;
-}
-
-camera_metadata* ProCamera::getCameraInfo(int cameraId) {
- ALOGV("%s: cameraId = %d", __FUNCTION__, cameraId);
-
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NULL;
-
- camera_metadata* ptr = NULL;
- status_t status = c->getCameraInfo(cameraId, &ptr);
-
- if (status != OK) {
- ALOGE("%s: Failed to get camera info, error = %d", __FUNCTION__, status);
- }
-
- return ptr;
-}
-
-status_t ProCamera::createDefaultRequest(int templateId,
- camera_metadata** request) const {
- ALOGV("%s: templateId = %d", __FUNCTION__, templateId);
-
- sp <IProCameraUser> c = mCamera;
- if (c == 0) return NO_INIT;
-
- return c->createDefaultRequest(templateId, request);
-}
-
-void ProCamera::onFrameAvailable(int streamId) {
- ALOGV("%s: streamId = %d", __FUNCTION__, streamId);
-
- sp<ProCameraListener> listener = mListener;
- StreamInfo& stream = getStreamInfo(streamId);
-
- if (listener.get() != NULL) {
- listener->onFrameAvailable(streamId, stream.cpuConsumer);
- }
-
- // Unblock waitForFrame(id) callers
- {
- Mutex::Autolock al(mWaitMutex);
- getStreamInfo(streamId).frameReady++;
- mWaitCondition.broadcast();
- }
-}
-
-int ProCamera::waitForFrameBuffer(int streamId) {
- status_t stat = BAD_VALUE;
- Mutex::Autolock al(mWaitMutex);
-
- StreamInfo& si = getStreamInfo(streamId);
-
- if (si.frameReady > 0) {
- int numFrames = si.frameReady;
- si.frameReady = 0;
- return numFrames;
- } else {
- while (true) {
- stat = mWaitCondition.waitRelative(mWaitMutex,
- mWaitTimeout);
- if (stat != OK) {
- ALOGE("%s: Error while waiting for frame buffer: %d",
- __FUNCTION__, stat);
- return stat;
- }
-
- if (si.frameReady > 0) {
- int numFrames = si.frameReady;
- si.frameReady = 0;
- return numFrames;
- }
- // else it was some other stream that got unblocked
- }
- }
-
- return stat;
-}
-
-int ProCamera::dropFrameBuffer(int streamId, int count) {
- StreamInfo& si = getStreamInfo(streamId);
-
- if (!si.cpuStream) {
- return BAD_VALUE;
- } else if (count < 0) {
- return BAD_VALUE;
- }
-
- if (!si.synchronousMode) {
- ALOGW("%s: No need to drop frames on asynchronous streams,"
- " as asynchronous mode only keeps 1 latest frame around.",
- __FUNCTION__);
- return BAD_VALUE;
- }
-
- int numDropped = 0;
- for (int i = 0; i < count; ++i) {
- CpuConsumer::LockedBuffer buffer;
- if (si.cpuConsumer->lockNextBuffer(&buffer) != OK) {
- break;
- }
-
- si.cpuConsumer->unlockBuffer(buffer);
- numDropped++;
- }
-
- return numDropped;
-}
-
-status_t ProCamera::waitForFrameMetadata() {
- status_t stat = BAD_VALUE;
- Mutex::Autolock al(mWaitMutex);
-
- if (mMetadataReady) {
- return OK;
- } else {
- while (true) {
- stat = mWaitCondition.waitRelative(mWaitMutex,
- mWaitTimeout);
-
- if (stat != OK) {
- ALOGE("%s: Error while waiting for metadata: %d",
- __FUNCTION__, stat);
- return stat;
- }
-
- if (mMetadataReady) {
- mMetadataReady = false;
- return OK;
- }
- // else it was some other stream or metadata
- }
- }
-
- return stat;
-}
-
-CameraMetadata ProCamera::consumeFrameMetadata() {
- Mutex::Autolock al(mWaitMutex);
-
- // Destructive: Subsequent calls return empty metadatas
- CameraMetadata tmp = mLatestMetadata;
- mLatestMetadata.clear();
-
- return tmp;
-}
-
-ProCamera::StreamInfo& ProCamera::getStreamInfo(int streamId) {
- return mStreams.editValueFor(streamId);
-}
-
-}; // namespace android
diff --git a/camera/tests/Android.mk b/camera/tests/Android.mk
index 2db4c14..5d37f9e 100644
--- a/camera/tests/Android.mk
+++ b/camera/tests/Android.mk
@@ -17,7 +17,6 @@ include $(CLEAR_VARS)
LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
LOCAL_SRC_FILES:= \
- ProCameraTests.cpp \
VendorTagDescriptorTests.cpp
LOCAL_SHARED_LIBRARIES := \
diff --git a/camera/tests/ProCameraTests.cpp b/camera/tests/ProCameraTests.cpp
deleted file mode 100644
index 24b2327..0000000
--- a/camera/tests/ProCameraTests.cpp
+++ /dev/null
@@ -1,1284 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <gtest/gtest.h>
-#include <iostream>
-
-#include <binder/IPCThreadState.h>
-#include <utils/Thread.h>
-
-#include "Camera.h"
-#include "ProCamera.h"
-#include <utils/Vector.h>
-#include <utils/Mutex.h>
-#include <utils/Condition.h>
-
-#include <gui/SurfaceComposerClient.h>
-#include <gui/Surface.h>
-
-#include <system/camera_metadata.h>
-#include <hardware/camera2.h> // for CAMERA2_TEMPLATE_PREVIEW only
-#include <camera/CameraMetadata.h>
-
-#include <camera/ICameraServiceListener.h>
-
-namespace android {
-namespace camera2 {
-namespace tests {
-namespace client {
-
-#define CAMERA_ID 0
-#define TEST_DEBUGGING 0
-
-#define TEST_LISTENER_TIMEOUT 1000000000 // 1 second listener timeout
-#define TEST_FORMAT HAL_PIXEL_FORMAT_Y16 //TODO: YUY2 instead
-
-#define TEST_FORMAT_MAIN HAL_PIXEL_FORMAT_Y8
-#define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16
-
-// defaults for display "test"
-#define TEST_DISPLAY_FORMAT HAL_PIXEL_FORMAT_Y8
-#define TEST_DISPLAY_WIDTH 320
-#define TEST_DISPLAY_HEIGHT 240
-
-#define TEST_CPU_FRAME_COUNT 2
-#define TEST_CPU_HEAP_COUNT 5
-
-#define TEST_FRAME_PROCESSING_DELAY_US 200000 // 200 ms
-
-#if TEST_DEBUGGING
-#define dout std::cerr
-#else
-#define dout if (0) std::cerr
-#endif
-
-#define EXPECT_OK(x) EXPECT_EQ(OK, (x))
-#define ASSERT_OK(x) ASSERT_EQ(OK, (x))
-
-class ProCameraTest;
-
-struct ServiceListener : public BnCameraServiceListener {
-
- ServiceListener() :
- mLatestStatus(STATUS_UNKNOWN),
- mPrevStatus(STATUS_UNKNOWN)
- {
- }
-
- void onStatusChanged(Status status, int32_t cameraId) {
- dout << "On status changed: 0x" << std::hex
- << (unsigned int) status << " cameraId " << cameraId
- << std::endl;
-
- Mutex::Autolock al(mMutex);
-
- mLatestStatus = status;
- mCondition.broadcast();
- }
-
- void onTorchStatusChanged(TorchStatus status, const String16& cameraId) {
- dout << "On torch status changed: 0x" << std::hex
- << (unsigned int) status << " cameraId " << cameraId.string()
- << std::endl;
- }
-
- status_t waitForStatusChange(Status& newStatus) {
- Mutex::Autolock al(mMutex);
-
- if (mLatestStatus != mPrevStatus) {
- newStatus = mLatestStatus;
- mPrevStatus = mLatestStatus;
- return OK;
- }
-
- status_t stat = mCondition.waitRelative(mMutex,
- TEST_LISTENER_TIMEOUT);
-
- if (stat == OK) {
- newStatus = mLatestStatus;
- mPrevStatus = mLatestStatus;
- }
-
- return stat;
- }
-
- Condition mCondition;
- Mutex mMutex;
-
- Status mLatestStatus;
- Status mPrevStatus;
-};
-
-enum ProEvent {
- UNKNOWN,
- ACQUIRED,
- RELEASED,
- STOLEN,
- FRAME_RECEIVED,
- RESULT_RECEIVED,
-};
-
-inline int ProEvent_Mask(ProEvent e) {
- return (1 << static_cast<int>(e));
-}
-
-typedef Vector<ProEvent> EventList;
-
-class ProCameraTestThread : public Thread
-{
-public:
- ProCameraTestThread() {
- }
-
- virtual bool threadLoop() {
- mProc = ProcessState::self();
- mProc->startThreadPool();
-
- IPCThreadState *ptr = IPCThreadState::self();
-
- ptr->joinThreadPool();
-
- return false;
- }
-
- sp<ProcessState> mProc;
-};
-
-class ProCameraTestListener : public ProCameraListener {
-
-public:
- static const int EVENT_MASK_ALL = 0xFFFFFFFF;
-
- ProCameraTestListener() {
- mEventMask = EVENT_MASK_ALL;
- mDropFrames = false;
- }
-
- status_t WaitForEvent() {
- Mutex::Autolock cal(mConditionMutex);
-
- {
- Mutex::Autolock al(mListenerMutex);
-
- if (mProEventList.size() > 0) {
- return OK;
- }
- }
-
- return mListenerCondition.waitRelative(mConditionMutex,
- TEST_LISTENER_TIMEOUT);
- }
-
- /* Read events into out. Existing queue is flushed */
- void ReadEvents(EventList& out) {
- Mutex::Autolock al(mListenerMutex);
-
- for (size_t i = 0; i < mProEventList.size(); ++i) {
- out.push(mProEventList[i]);
- }
-
- mProEventList.clear();
- }
-
- /**
- * Dequeue 1 event from the event queue.
- * Returns UNKNOWN if queue is empty
- */
- ProEvent ReadEvent() {
- Mutex::Autolock al(mListenerMutex);
-
- if (mProEventList.size() == 0) {
- return UNKNOWN;
- }
-
- ProEvent ev = mProEventList[0];
- mProEventList.removeAt(0);
-
- return ev;
- }
-
- void SetEventMask(int eventMask) {
- Mutex::Autolock al(mListenerMutex);
- mEventMask = eventMask;
- }
-
- // Automatically acquire/release frames as they are available
- void SetDropFrames(bool dropFrames) {
- Mutex::Autolock al(mListenerMutex);
- mDropFrames = dropFrames;
- }
-
-private:
- void QueueEvent(ProEvent ev) {
- bool eventAdded = false;
- {
- Mutex::Autolock al(mListenerMutex);
-
- // Drop events not part of mask
- if (ProEvent_Mask(ev) & mEventMask) {
- mProEventList.push(ev);
- eventAdded = true;
- }
- }
-
- if (eventAdded) {
- mListenerCondition.broadcast();
- }
- }
-
-protected:
-
- //////////////////////////////////////////////////
- ///////// ProCameraListener //////////////////////
- //////////////////////////////////////////////////
-
-
- // Lock has been acquired. Write operations now available.
- virtual void onLockAcquired() {
- QueueEvent(ACQUIRED);
- }
- // Lock has been released with exclusiveUnlock
- virtual void onLockReleased() {
- QueueEvent(RELEASED);
- }
-
- // Lock has been stolen by another client.
- virtual void onLockStolen() {
- QueueEvent(STOLEN);
- }
-
- // Lock free.
- virtual void onTriggerNotify(int32_t ext1, int32_t ext2, int32_t ext3) {
-
- dout << "Trigger notify: " << ext1 << " " << ext2
- << " " << ext3 << std::endl;
- }
-
- virtual void onFrameAvailable(int streamId,
- const sp<CpuConsumer>& consumer) {
-
- QueueEvent(FRAME_RECEIVED);
-
- Mutex::Autolock al(mListenerMutex);
- if (mDropFrames) {
- CpuConsumer::LockedBuffer buf;
- status_t ret;
-
- if (OK == (ret = consumer->lockNextBuffer(&buf))) {
-
- dout << "Frame received on streamId = " << streamId <<
- ", dataPtr = " << (void*)buf.data <<
- ", timestamp = " << buf.timestamp << std::endl;
-
- EXPECT_OK(consumer->unlockBuffer(buf));
- }
- } else {
- dout << "Frame received on streamId = " << streamId << std::endl;
- }
- }
-
- virtual void onResultReceived(int32_t requestId,
- camera_metadata* request) {
- dout << "Result received requestId = " << requestId
- << ", requestPtr = " << (void*)request << std::endl;
- QueueEvent(RESULT_RECEIVED);
- free_camera_metadata(request);
- }
-
- virtual void notify(int32_t msg, int32_t ext1, int32_t ext2) {
- dout << "Notify received: msg " << std::hex << msg
- << ", ext1: " << std::hex << ext1 << ", ext2: " << std::hex << ext2
- << std::endl;
- }
-
- Vector<ProEvent> mProEventList;
- Mutex mListenerMutex;
- Mutex mConditionMutex;
- Condition mListenerCondition;
- int mEventMask;
- bool mDropFrames;
-};
-
-class ProCameraTest : public ::testing::Test {
-
-public:
- ProCameraTest() {
- char* displaySecsEnv = getenv("TEST_DISPLAY_SECS");
- if (displaySecsEnv != NULL) {
- mDisplaySecs = atoi(displaySecsEnv);
- if (mDisplaySecs < 0) {
- mDisplaySecs = 0;
- }
- } else {
- mDisplaySecs = 0;
- }
-
- char* displayFmtEnv = getenv("TEST_DISPLAY_FORMAT");
- if (displayFmtEnv != NULL) {
- mDisplayFmt = FormatFromString(displayFmtEnv);
- } else {
- mDisplayFmt = TEST_DISPLAY_FORMAT;
- }
-
- char* displayWidthEnv = getenv("TEST_DISPLAY_WIDTH");
- if (displayWidthEnv != NULL) {
- mDisplayW = atoi(displayWidthEnv);
- if (mDisplayW < 0) {
- mDisplayW = 0;
- }
- } else {
- mDisplayW = TEST_DISPLAY_WIDTH;
- }
-
- char* displayHeightEnv = getenv("TEST_DISPLAY_HEIGHT");
- if (displayHeightEnv != NULL) {
- mDisplayH = atoi(displayHeightEnv);
- if (mDisplayH < 0) {
- mDisplayH = 0;
- }
- } else {
- mDisplayH = TEST_DISPLAY_HEIGHT;
- }
- }
-
- static void SetUpTestCase() {
- // Binder Thread Pool Initialization
- mTestThread = new ProCameraTestThread();
- mTestThread->run("ProCameraTestThread");
- }
-
- virtual void SetUp() {
- mCamera = ProCamera::connect(CAMERA_ID);
- ASSERT_NE((void*)NULL, mCamera.get());
-
- mListener = new ProCameraTestListener();
- mCamera->setListener(mListener);
- }
-
- virtual void TearDown() {
- ASSERT_NE((void*)NULL, mCamera.get());
- mCamera->disconnect();
- }
-
-protected:
- sp<ProCamera> mCamera;
- sp<ProCameraTestListener> mListener;
-
- static sp<Thread> mTestThread;
-
- int mDisplaySecs;
- int mDisplayFmt;
- int mDisplayW;
- int mDisplayH;
-
- sp<SurfaceComposerClient> mComposerClient;
- sp<SurfaceControl> mSurfaceControl;
-
- sp<SurfaceComposerClient> mDepthComposerClient;
- sp<SurfaceControl> mDepthSurfaceControl;
-
- int getSurfaceWidth() {
- return 512;
- }
- int getSurfaceHeight() {
- return 512;
- }
-
- void createOnScreenSurface(sp<Surface>& surface) {
- mComposerClient = new SurfaceComposerClient;
- ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
-
- mSurfaceControl = mComposerClient->createSurface(
- String8("ProCameraTest StreamingImage Surface"),
- getSurfaceWidth(), getSurfaceHeight(),
- PIXEL_FORMAT_RGB_888, 0);
-
- mSurfaceControl->setPosition(0, 0);
-
- ASSERT_TRUE(mSurfaceControl != NULL);
- ASSERT_TRUE(mSurfaceControl->isValid());
-
- SurfaceComposerClient::openGlobalTransaction();
- ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
- ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
- SurfaceComposerClient::closeGlobalTransaction();
-
- sp<ANativeWindow> window = mSurfaceControl->getSurface();
- surface = mSurfaceControl->getSurface();
-
- ASSERT_NE((void*)NULL, surface.get());
- }
-
- void createDepthOnScreenSurface(sp<Surface>& surface) {
- mDepthComposerClient = new SurfaceComposerClient;
- ASSERT_EQ(NO_ERROR, mDepthComposerClient->initCheck());
-
- mDepthSurfaceControl = mDepthComposerClient->createSurface(
- String8("ProCameraTest StreamingImage Surface"),
- getSurfaceWidth(), getSurfaceHeight(),
- PIXEL_FORMAT_RGB_888, 0);
-
- mDepthSurfaceControl->setPosition(640, 0);
-
- ASSERT_TRUE(mDepthSurfaceControl != NULL);
- ASSERT_TRUE(mDepthSurfaceControl->isValid());
-
- SurfaceComposerClient::openGlobalTransaction();
- ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->setLayer(0x7FFFFFFF));
- ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->show());
- SurfaceComposerClient::closeGlobalTransaction();
-
- sp<ANativeWindow> window = mDepthSurfaceControl->getSurface();
- surface = mDepthSurfaceControl->getSurface();
-
- ASSERT_NE((void*)NULL, surface.get());
- }
-
- template <typename T>
- static bool ExistsItem(T needle, T* array, size_t count) {
- if (!array) {
- return false;
- }
-
- for (size_t i = 0; i < count; ++i) {
- if (array[i] == needle) {
- return true;
- }
- }
- return false;
- }
-
-
- static int FormatFromString(const char* str) {
- std::string s(str);
-
-#define CMP_STR(x, y) \
- if (s == #x) return HAL_PIXEL_FORMAT_ ## y;
-#define CMP_STR_SAME(x) CMP_STR(x, x)
-
- CMP_STR_SAME( Y16);
- CMP_STR_SAME( Y8);
- CMP_STR_SAME( YV12);
- CMP_STR(NV16, YCbCr_422_SP);
- CMP_STR(NV21, YCrCb_420_SP);
- CMP_STR(YUY2, YCbCr_422_I);
- CMP_STR(RAW, RAW16);
- CMP_STR(RGBA, RGBA_8888);
-
- std::cerr << "Unknown format string " << str << std::endl;
- return -1;
-
- }
-
- /**
- * Creating a streaming request for these output streams from a template,
- * and submit it
- */
- void createSubmitRequestForStreams(int32_t* streamIds, size_t count, int requestCount=-1) {
-
- ASSERT_NE((void*)NULL, streamIds);
- ASSERT_LT(0u, count);
-
- camera_metadata_t *requestTmp = NULL;
- EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
- /*out*/&requestTmp));
- ASSERT_NE((void*)NULL, requestTmp);
- CameraMetadata request(requestTmp);
-
- // set the output streams. default is empty
-
- uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
- request.update(tag, streamIds, count);
-
- requestTmp = request.release();
-
- if (requestCount < 0) {
- EXPECT_OK(mCamera->submitRequest(requestTmp, /*streaming*/true));
- } else {
- for (int i = 0; i < requestCount; ++i) {
- EXPECT_OK(mCamera->submitRequest(requestTmp,
- /*streaming*/false));
- }
- }
- request.acquire(requestTmp);
- }
-};
-
-sp<Thread> ProCameraTest::mTestThread;
-
-TEST_F(ProCameraTest, AvailableFormats) {
- if (HasFatalFailure()) {
- return;
- }
-
- CameraMetadata staticInfo = mCamera->getCameraInfo(CAMERA_ID);
- ASSERT_FALSE(staticInfo.isEmpty());
-
- uint32_t tag = static_cast<uint32_t>(ANDROID_SCALER_AVAILABLE_FORMATS);
- EXPECT_TRUE(staticInfo.exists(tag));
- camera_metadata_entry_t entry = staticInfo.find(tag);
-
- EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YV12,
- entry.data.i32, entry.count));
- EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YCrCb_420_SP,
- entry.data.i32, entry.count));
-}
-
-// test around exclusiveTryLock (immediate locking)
-TEST_F(ProCameraTest, LockingImmediate) {
-
- if (HasFatalFailure()) {
- return;
- }
-
- mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
- ProEvent_Mask(STOLEN) |
- ProEvent_Mask(RELEASED));
-
- EXPECT_FALSE(mCamera->hasExclusiveLock());
- EXPECT_EQ(OK, mCamera->exclusiveTryLock());
- // at this point we definitely have the lock
-
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
-
- EXPECT_TRUE(mCamera->hasExclusiveLock());
- EXPECT_EQ(OK, mCamera->exclusiveUnlock());
-
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(RELEASED, mListener->ReadEvent());
-
- EXPECT_FALSE(mCamera->hasExclusiveLock());
-}
-
-// test around exclusiveLock (locking at some future point in time)
-TEST_F(ProCameraTest, LockingAsynchronous) {
-
- if (HasFatalFailure()) {
- return;
- }
-
-
- mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
- ProEvent_Mask(STOLEN) |
- ProEvent_Mask(RELEASED));
-
- // TODO: Add another procamera that has a lock here.
- // then we can be test that the lock wont immediately be acquired
-
- EXPECT_FALSE(mCamera->hasExclusiveLock());
- EXPECT_EQ(OK, mCamera->exclusiveTryLock());
- // at this point we definitely have the lock
-
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
-
- EXPECT_TRUE(mCamera->hasExclusiveLock());
- EXPECT_EQ(OK, mCamera->exclusiveUnlock());
-
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(RELEASED, mListener->ReadEvent());
-
- EXPECT_FALSE(mCamera->hasExclusiveLock());
-}
-
-// Stream directly to the screen.
-TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) {
- if (HasFatalFailure()) {
- return;
- }
-
- sp<Surface> surface;
- if (mDisplaySecs > 0) {
- createOnScreenSurface(/*out*/surface);
- }
- else {
- dout << "Skipping, will not render to screen" << std::endl;
- return;
- }
-
- int depthStreamId = -1;
-
- sp<ServiceListener> listener = new ServiceListener();
- EXPECT_OK(ProCamera::addServiceListener(listener));
-
- ServiceListener::Status currentStatus;
-
- // when subscribing a new listener,
- // we immediately get a callback to the current status
- while (listener->waitForStatusChange(/*out*/currentStatus) != OK);
- EXPECT_EQ(ServiceListener::STATUS_PRESENT, currentStatus);
-
- dout << "Will now stream and resume infinitely..." << std::endl;
- while (true) {
-
- if (currentStatus == ServiceListener::STATUS_PRESENT) {
-
- ASSERT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt,
- surface,
- &depthStreamId));
- EXPECT_NE(-1, depthStreamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
-
- int32_t streams[] = { depthStreamId };
- ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(
- streams,
- /*count*/1));
- }
-
- ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN;
-
- // TODO: maybe check for getch every once in a while?
- while (listener->waitForStatusChange(/*out*/stat) != OK);
-
- if (currentStatus != stat) {
- if (stat == ServiceListener::STATUS_PRESENT) {
- dout << "Reconnecting to camera" << std::endl;
- mCamera = ProCamera::connect(CAMERA_ID);
- } else if (stat == ServiceListener::STATUS_NOT_AVAILABLE) {
- dout << "Disconnecting from camera" << std::endl;
- mCamera->disconnect();
- } else if (stat == ServiceListener::STATUS_NOT_PRESENT) {
- dout << "Camera unplugged" << std::endl;
- mCamera = NULL;
- } else {
- dout << "Unknown status change "
- << std::hex << stat << std::endl;
- }
-
- currentStatus = stat;
- }
- }
-
- EXPECT_OK(ProCamera::removeServiceListener(listener));
- EXPECT_OK(mCamera->deleteStream(depthStreamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-// Stream directly to the screen.
-TEST_F(ProCameraTest, DISABLED_StreamingImageDual) {
- if (HasFatalFailure()) {
- return;
- }
- sp<Surface> surface;
- sp<Surface> depthSurface;
- if (mDisplaySecs > 0) {
- createOnScreenSurface(/*out*/surface);
- createDepthOnScreenSurface(/*out*/depthSurface);
- }
-
- int streamId = -1;
- EXPECT_OK(mCamera->createStream(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, surface, &streamId));
- EXPECT_NE(-1, streamId);
-
- int depthStreamId = -1;
- EXPECT_OK(mCamera->createStream(/*width*/320, /*height*/240,
- TEST_FORMAT_DEPTH, depthSurface, &depthStreamId));
- EXPECT_NE(-1, depthStreamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
- /*
- */
- /* iterate in a loop submitting requests every frame.
- * what kind of requests doesnt really matter, just whatever.
- */
-
- // it would probably be better to use CameraMetadata from camera service.
- camera_metadata_t *request = NULL;
- EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
- /*out*/&request));
- EXPECT_NE((void*)NULL, request);
-
- /*FIXME: dont need this later, at which point the above should become an
- ASSERT_NE*/
- if(request == NULL) request = allocate_camera_metadata(10, 100);
-
- // set the output streams to just this stream ID
-
- // wow what a verbose API.
- int32_t allStreams[] = { streamId, depthStreamId };
- // IMPORTANT. bad things will happen if its not a uint8.
- size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]);
- camera_metadata_entry_t entry;
- uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
- int find = find_camera_metadata_entry(request, tag, &entry);
- if (find == -ENOENT) {
- if (add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/streamCount) != OK) {
- camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
- ASSERT_OK(append_camera_metadata(tmp, request));
- free_camera_metadata(request);
- request = tmp;
-
- ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/streamCount));
- }
- } else {
- ASSERT_OK(update_camera_metadata_entry(request, entry.index,
- &allStreams, /*data_count*/streamCount, &entry));
- }
-
- EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
- dout << "will sleep now for " << mDisplaySecs << std::endl;
- sleep(mDisplaySecs);
-
- free_camera_metadata(request);
-
- for (size_t i = 0; i < streamCount; ++i) {
- EXPECT_OK(mCamera->deleteStream(allStreams[i]));
- }
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, CpuConsumerSingle) {
- if (HasFatalFailure()) {
- return;
- }
-
- mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
- ProEvent_Mask(STOLEN) |
- ProEvent_Mask(RELEASED) |
- ProEvent_Mask(FRAME_RECEIVED));
- mListener->SetDropFrames(true);
-
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
- TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
- /* iterate in a loop submitting requests every frame.
- * what kind of requests doesnt really matter, just whatever.
- */
-
- // it would probably be better to use CameraMetadata from camera service.
- camera_metadata_t *request = NULL;
- EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
- /*out*/&request));
- EXPECT_NE((void*)NULL, request);
-
- /*FIXME: dont need this later, at which point the above should become an
- ASSERT_NE*/
- if(request == NULL) request = allocate_camera_metadata(10, 100);
-
- // set the output streams to just this stream ID
-
- int32_t allStreams[] = { streamId };
- camera_metadata_entry_t entry;
- uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
- int find = find_camera_metadata_entry(request, tag, &entry);
- if (find == -ENOENT) {
- if (add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/1) != OK) {
- camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
- ASSERT_OK(append_camera_metadata(tmp, request));
- free_camera_metadata(request);
- request = tmp;
-
- ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/1));
- }
- } else {
- ASSERT_OK(update_camera_metadata_entry(request, entry.index,
- &allStreams, /*data_count*/1, &entry));
- }
-
- EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
- // Consume a couple of frames
- for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent());
- }
-
- // Done: clean up
- free_camera_metadata(request);
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, CpuConsumerDual) {
- if (HasFatalFailure()) {
- return;
- }
-
- mListener->SetEventMask(ProEvent_Mask(FRAME_RECEIVED));
- mListener->SetDropFrames(true);
-
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- int depthStreamId = -1;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
- TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &consumer, &depthStreamId));
- EXPECT_NE(-1, depthStreamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
- /*
- */
- /* iterate in a loop submitting requests every frame.
- * what kind of requests doesnt really matter, just whatever.
- */
-
- // it would probably be better to use CameraMetadata from camera service.
- camera_metadata_t *request = NULL;
- EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
- /*out*/&request));
- EXPECT_NE((void*)NULL, request);
-
- if(request == NULL) request = allocate_camera_metadata(10, 100);
-
- // set the output streams to just this stream ID
-
- // wow what a verbose API.
- int32_t allStreams[] = { streamId, depthStreamId };
- size_t streamCount = 2;
- camera_metadata_entry_t entry;
- uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
- int find = find_camera_metadata_entry(request, tag, &entry);
- if (find == -ENOENT) {
- if (add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/streamCount) != OK) {
- camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
- ASSERT_OK(append_camera_metadata(tmp, request));
- free_camera_metadata(request);
- request = tmp;
-
- ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/streamCount));
- }
- } else {
- ASSERT_OK(update_camera_metadata_entry(request, entry.index,
- &allStreams, /*data_count*/streamCount, &entry));
- }
-
- EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
- // Consume a couple of frames
- for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
- // stream id 1
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent());
-
- // stream id 2
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(FRAME_RECEIVED, mListener->ReadEvent());
-
- //TODO: events should be a struct with some data like the stream id
- }
-
- // Done: clean up
- free_camera_metadata(request);
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, ResultReceiver) {
- if (HasFatalFailure()) {
- return;
- }
-
- mListener->SetEventMask(ProEvent_Mask(RESULT_RECEIVED));
- mListener->SetDropFrames(true);
- //FIXME: if this is run right after the previous test we get FRAME_RECEIVED
- // need to filter out events at read time
-
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
- /*
- */
- /* iterate in a loop submitting requests every frame.
- * what kind of requests doesnt really matter, just whatever.
- */
-
- camera_metadata_t *request = NULL;
- EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
- /*out*/&request));
- EXPECT_NE((void*)NULL, request);
-
- /*FIXME*/
- if(request == NULL) request = allocate_camera_metadata(10, 100);
-
- // set the output streams to just this stream ID
-
- int32_t allStreams[] = { streamId };
- size_t streamCount = 1;
- camera_metadata_entry_t entry;
- uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
- int find = find_camera_metadata_entry(request, tag, &entry);
- if (find == -ENOENT) {
- if (add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/streamCount) != OK) {
- camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
- ASSERT_OK(append_camera_metadata(tmp, request));
- free_camera_metadata(request);
- request = tmp;
-
- ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
- /*data_count*/streamCount));
- }
- } else {
- ASSERT_OK(update_camera_metadata_entry(request, entry.index,
- &allStreams, /*data_count*/streamCount, &entry));
- }
-
- EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
-
- // Consume a couple of results
- for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
- EXPECT_EQ(OK, mListener->WaitForEvent());
- EXPECT_EQ(RESULT_RECEIVED, mListener->ReadEvent());
- }
-
- // Done: clean up
- free_camera_metadata(request);
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-// FIXME: This is racy and sometimes fails on waitForFrameMetadata
-TEST_F(ProCameraTest, DISABLED_WaitForResult) {
- if (HasFatalFailure()) {
- return;
- }
-
- mListener->SetDropFrames(true);
-
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
-
- int32_t streams[] = { streamId };
- ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1));
-
- // Consume a couple of results
- for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
- EXPECT_OK(mCamera->waitForFrameMetadata());
- CameraMetadata meta = mCamera->consumeFrameMetadata();
- EXPECT_FALSE(meta.isEmpty());
- }
-
- // Done: clean up
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, WaitForSingleStreamBuffer) {
- if (HasFatalFailure()) {
- return;
- }
-
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
-
- int32_t streams[] = { streamId };
- ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
- /*requests*/TEST_CPU_FRAME_COUNT));
-
- // Consume a couple of results
- for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
- EXPECT_EQ(1, mCamera->waitForFrameBuffer(streamId));
-
- CpuConsumer::LockedBuffer buf;
- EXPECT_OK(consumer->lockNextBuffer(&buf));
-
- dout << "Buffer synchronously received on streamId = " << streamId <<
- ", dataPtr = " << (void*)buf.data <<
- ", timestamp = " << buf.timestamp << std::endl;
-
- EXPECT_OK(consumer->unlockBuffer(buf));
- }
-
- // Done: clean up
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-// FIXME: This is racy and sometimes fails on waitForFrameMetadata
-TEST_F(ProCameraTest, DISABLED_WaitForDualStreamBuffer) {
- if (HasFatalFailure()) {
- return;
- }
-
- const int REQUEST_COUNT = TEST_CPU_FRAME_COUNT * 10;
-
- // 15 fps
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- // 30 fps
- int depthStreamId = -1;
- sp<CpuConsumer> depthConsumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
- TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &depthConsumer, &depthStreamId));
- EXPECT_NE(-1, depthStreamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
-
- int32_t streams[] = { streamId, depthStreamId };
- ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/2,
- /*requests*/REQUEST_COUNT));
-
- int depthFrames = 0;
- int greyFrames = 0;
-
- // Consume two frames simultaneously. Unsynchronized by timestamps.
- for (int i = 0; i < REQUEST_COUNT; ++i) {
-
- // Exhaust event queue so it doesn't keep growing
- while (mListener->ReadEvent() != UNKNOWN);
-
- // Get the metadata
- EXPECT_OK(mCamera->waitForFrameMetadata());
- CameraMetadata meta = mCamera->consumeFrameMetadata();
- EXPECT_FALSE(meta.isEmpty());
-
- // Get the buffers
-
- EXPECT_EQ(1, mCamera->waitForFrameBuffer(depthStreamId));
-
- /**
- * Guaranteed to be able to consume the depth frame,
- * since we waited on it.
- */
- CpuConsumer::LockedBuffer depthBuffer;
- EXPECT_OK(depthConsumer->lockNextBuffer(&depthBuffer));
-
- dout << "Depth Buffer synchronously received on streamId = " <<
- streamId <<
- ", dataPtr = " << (void*)depthBuffer.data <<
- ", timestamp = " << depthBuffer.timestamp << std::endl;
-
- EXPECT_OK(depthConsumer->unlockBuffer(depthBuffer));
-
- depthFrames++;
-
-
- /** Consume Greyscale frames if there are any.
- * There may not be since it runs at half FPS */
- CpuConsumer::LockedBuffer greyBuffer;
- while (consumer->lockNextBuffer(&greyBuffer) == OK) {
-
- dout << "GRAY Buffer synchronously received on streamId = " <<
- streamId <<
- ", dataPtr = " << (void*)greyBuffer.data <<
- ", timestamp = " << greyBuffer.timestamp << std::endl;
-
- EXPECT_OK(consumer->unlockBuffer(greyBuffer));
-
- greyFrames++;
- }
- }
-
- dout << "Done, summary: depth frames " << std::dec << depthFrames
- << ", grey frames " << std::dec << greyFrames << std::endl;
-
- // Done: clean up
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesSync) {
- if (HasFatalFailure()) {
- return;
- }
-
- const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT;
-
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT,
- /*synchronousMode*/true, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
-
- int32_t streams[] = { streamId };
- ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
- /*requests*/NUM_REQUESTS));
-
- // Consume a couple of results
- for (int i = 0; i < NUM_REQUESTS; ++i) {
- int numFrames;
- EXPECT_TRUE((numFrames = mCamera->waitForFrameBuffer(streamId)) > 0);
-
- // Drop all but the newest framebuffer
- EXPECT_EQ(numFrames-1, mCamera->dropFrameBuffer(streamId, numFrames-1));
-
- dout << "Dropped " << (numFrames - 1) << " frames" << std::endl;
-
- // Skip the counter ahead, don't try to consume these frames again
- i += numFrames-1;
-
- // "Consume" the buffer
- CpuConsumer::LockedBuffer buf;
- EXPECT_OK(consumer->lockNextBuffer(&buf));
-
- dout << "Buffer synchronously received on streamId = " << streamId <<
- ", dataPtr = " << (void*)buf.data <<
- ", timestamp = " << buf.timestamp << std::endl;
-
- // Process at 10fps, stream is at 15fps.
- // This means we will definitely fill up the buffer queue with
- // extra buffers and need to drop them.
- usleep(TEST_FRAME_PROCESSING_DELAY_US);
-
- EXPECT_OK(consumer->unlockBuffer(buf));
- }
-
- // Done: clean up
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-TEST_F(ProCameraTest, WaitForSingleStreamBufferAndDropFramesAsync) {
- if (HasFatalFailure()) {
- return;
- }
-
- const int NUM_REQUESTS = 20 * TEST_CPU_FRAME_COUNT;
-
- int streamId = -1;
- sp<CpuConsumer> consumer;
- EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
- TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT,
- /*synchronousMode*/false, &consumer, &streamId));
- EXPECT_NE(-1, streamId);
-
- EXPECT_OK(mCamera->exclusiveTryLock());
-
- int32_t streams[] = { streamId };
- ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1,
- /*requests*/NUM_REQUESTS));
-
- uint64_t lastFrameNumber = 0;
- int numFrames;
-
- // Consume a couple of results
- int i;
- for (i = 0; i < NUM_REQUESTS && lastFrameNumber < NUM_REQUESTS; ++i) {
- EXPECT_LT(0, (numFrames = mCamera->waitForFrameBuffer(streamId)));
-
- dout << "Dropped " << (numFrames - 1) << " frames" << std::endl;
-
- // Skip the counter ahead, don't try to consume these frames again
- i += numFrames-1;
-
- // "Consume" the buffer
- CpuConsumer::LockedBuffer buf;
-
- EXPECT_EQ(OK, consumer->lockNextBuffer(&buf));
-
- lastFrameNumber = buf.frameNumber;
-
- dout << "Buffer asynchronously received on streamId = " << streamId <<
- ", dataPtr = " << (void*)buf.data <<
- ", timestamp = " << buf.timestamp <<
- ", framenumber = " << buf.frameNumber << std::endl;
-
- // Process at 10fps, stream is at 15fps.
- // This means we will definitely fill up the buffer queue with
- // extra buffers and need to drop them.
- usleep(TEST_FRAME_PROCESSING_DELAY_US);
-
- EXPECT_OK(consumer->unlockBuffer(buf));
- }
-
- dout << "Done after " << i << " iterations " << std::endl;
-
- // Done: clean up
- EXPECT_OK(mCamera->deleteStream(streamId));
- EXPECT_OK(mCamera->exclusiveUnlock());
-}
-
-
-
-//TODO: refactor into separate file
-TEST_F(ProCameraTest, ServiceListenersSubscribe) {
-
- ASSERT_EQ(4u, sizeof(ServiceListener::Status));
-
- sp<ServiceListener> listener = new ServiceListener();
-
- EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener));
- EXPECT_OK(ProCamera::addServiceListener(listener));
-
- EXPECT_EQ(ALREADY_EXISTS, ProCamera::addServiceListener(listener));
- EXPECT_OK(ProCamera::removeServiceListener(listener));
-
- EXPECT_EQ(BAD_VALUE, ProCamera::removeServiceListener(listener));
-}
-
-//TODO: refactor into separate file
-TEST_F(ProCameraTest, ServiceListenersFunctional) {
-
- sp<ServiceListener> listener = new ServiceListener();
-
- EXPECT_OK(ProCamera::addServiceListener(listener));
-
- sp<Camera> cam = Camera::connect(CAMERA_ID,
- /*clientPackageName*/String16(),
- -1);
- EXPECT_NE((void*)NULL, cam.get());
-
- ServiceListener::Status stat = ServiceListener::STATUS_UNKNOWN;
- EXPECT_OK(listener->waitForStatusChange(/*out*/stat));
-
- EXPECT_EQ(ServiceListener::STATUS_NOT_AVAILABLE, stat);
-
- if (cam.get()) {
- cam->disconnect();
- }
-
- EXPECT_OK(listener->waitForStatusChange(/*out*/stat));
- EXPECT_EQ(ServiceListener::STATUS_PRESENT, stat);
-
- EXPECT_OK(ProCamera::removeServiceListener(listener));
-}
-
-
-
-}
-}
-}
-}
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
index 96fca94..6b8c772 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.cpp
@@ -48,12 +48,13 @@ status_t DrmPlugin::getKeyRequest(
KeyType keyType,
const KeyedVector<String8, String8>& optionalParameters,
Vector<uint8_t>& request,
- String8& defaultUrl) {
+ String8& defaultUrl,
+ DrmPlugin::KeyRequestType *keyRequestType) {
UNUSED(optionalParameters);
if (keyType != kKeyType_Streaming) {
return android::ERROR_DRM_CANNOT_HANDLE;
}
-
+ *keyRequestType = DrmPlugin::kKeyRequestType_Initial;
sp<Session> session = mSessionLibrary->findSession(scope);
defaultUrl.clear();
return session->getKeyRequest(initData, initDataType, &request);
diff --git a/drm/mediadrm/plugins/clearkey/DrmPlugin.h b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
index 6139f1f..ba4aefe 100644
--- a/drm/mediadrm/plugins/clearkey/DrmPlugin.h
+++ b/drm/mediadrm/plugins/clearkey/DrmPlugin.h
@@ -54,7 +54,8 @@ public:
KeyType keyType,
const KeyedVector<String8, String8>& optionalParameters,
Vector<uint8_t>& request,
- String8& defaultUrl);
+ String8& defaultUrl,
+ DrmPlugin::KeyRequestType *keyRequestType);
virtual status_t provideKeyResponse(
const Vector<uint8_t>& scope,
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
index 7eac0a1..9b786c5 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.cpp
@@ -111,7 +111,8 @@ namespace android {
Vector<uint8_t> const &initData,
String8 const &mimeType, KeyType keyType,
KeyedVector<String8, String8> const &optionalParameters,
- Vector<uint8_t> &request, String8 &defaultUrl)
+ Vector<uint8_t> &request, String8 &defaultUrl,
+ KeyRequestType *keyRequestType)
{
Mutex::Autolock lock(mLock);
ALOGD("MockDrmPlugin::getKeyRequest(sessionId=%s, initData=%s, mimeType=%s"
@@ -149,6 +150,7 @@ namespace android {
// Properties used in mock test, set by cts test app returned from mock plugin
// byte[] mock-request -> request
// string mock-default-url -> defaultUrl
+ // string mock-key-request-type -> keyRequestType
index = mByteArrayProperties.indexOfKey(String8("mock-request"));
if (index < 0) {
@@ -165,6 +167,16 @@ namespace android {
} else {
defaultUrl = mStringProperties.valueAt(index);
}
+
+ index = mStringProperties.indexOfKey(String8("mock-keyRequestType"));
+ if (index < 0) {
+ ALOGD("Missing 'mock-keyRequestType' parameter for mock");
+ return BAD_VALUE;
+ } else {
+ *keyRequestType = static_cast<KeyRequestType>(
+ atoi(mStringProperties.valueAt(index).string()));
+ }
+
return OK;
}
diff --git a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
index d1d8058..d0f2ddb 100644
--- a/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
+++ b/drm/mediadrm/plugins/mock/MockDrmCryptoPlugin.h
@@ -62,7 +62,8 @@ namespace android {
Vector<uint8_t> const &initData,
String8 const &mimeType, KeyType keyType,
KeyedVector<String8, String8> const &optionalParameters,
- Vector<uint8_t> &request, String8 &defaultUrl);
+ Vector<uint8_t> &request, String8 &defaultUrl,
+ KeyRequestType *keyRequestType);
status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
Vector<uint8_t> const &response,
diff --git a/include/camera/CameraMetadata.h b/include/camera/CameraMetadata.h
index 1254d3c..953d711 100644
--- a/include/camera/CameraMetadata.h
+++ b/include/camera/CameraMetadata.h
@@ -56,7 +56,7 @@ class CameraMetadata {
* thread-safety, it simply prevents the camera_metadata_t pointer returned
* here from being accidentally invalidated by CameraMetadata operations.
*/
- const camera_metadata_t* getAndLock();
+ const camera_metadata_t* getAndLock() const;
/**
* Unlock the CameraMetadata for use again. After this unlock, the pointer
@@ -208,7 +208,7 @@ class CameraMetadata {
private:
camera_metadata_t *mBuffer;
- bool mLocked;
+ mutable bool mLocked;
/**
* Check if tag has a given type
diff --git a/include/camera/ICameraService.h b/include/camera/ICameraService.h
index 194a646..c8d3d19 100644
--- a/include/camera/ICameraService.h
+++ b/include/camera/ICameraService.h
@@ -25,8 +25,6 @@ namespace android {
class ICamera;
class ICameraClient;
-class IProCameraUser;
-class IProCameraCallbacks;
class ICameraServiceListener;
class ICameraDeviceUser;
class ICameraDeviceCallbacks;
@@ -44,7 +42,6 @@ public:
GET_NUMBER_OF_CAMERAS = IBinder::FIRST_CALL_TRANSACTION,
GET_CAMERA_INFO,
CONNECT,
- CONNECT_PRO,
CONNECT_DEVICE,
ADD_LISTENER,
REMOVE_LISTENER,
@@ -105,13 +102,6 @@ public:
/*out*/
sp<ICamera>& device) = 0;
- virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb,
- int cameraId,
- const String16& clientPackageName,
- int clientUid,
- /*out*/
- sp<IProCameraUser>& device) = 0;
-
virtual status_t connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
diff --git a/include/camera/IProCameraCallbacks.h b/include/camera/IProCameraCallbacks.h
deleted file mode 100644
index e8abb89..0000000
--- a/include/camera/IProCameraCallbacks.h
+++ /dev/null
@@ -1,71 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H
-#define ANDROID_HARDWARE_IPROCAMERA_CALLBACKS_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-#include <utils/Timers.h>
-#include <system/camera.h>
-
-struct camera_metadata;
-
-namespace android {
-
-class IProCameraCallbacks : public IInterface
-{
- /**
- * Keep up-to-date with IProCameraCallbacks.aidl in frameworks/base
- */
-public:
- DECLARE_META_INTERFACE(ProCameraCallbacks);
-
- virtual void notifyCallback(int32_t msgType,
- int32_t ext1,
- int32_t ext2) = 0;
-
- enum LockStatus {
- LOCK_ACQUIRED,
- LOCK_RELEASED,
- LOCK_STOLEN,
- };
-
- virtual void onLockStatusChanged(LockStatus newLockStatus) = 0;
-
- /** Missing by design: implementation is client-side in ProCamera.cpp **/
- // virtual void onBufferReceived(int streamId,
- // const CpuConsumer::LockedBufer& buf);
- virtual void onResultReceived(int32_t requestId,
- camera_metadata* result) = 0;
-};
-
-// ----------------------------------------------------------------------------
-
-class BnProCameraCallbacks : public BnInterface<IProCameraCallbacks>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/include/camera/IProCameraUser.h b/include/camera/IProCameraUser.h
deleted file mode 100644
index 2ccc4d2..0000000
--- a/include/camera/IProCameraUser.h
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_IPROCAMERAUSER_H
-#define ANDROID_HARDWARE_IPROCAMERAUSER_H
-
-#include <utils/RefBase.h>
-#include <binder/IInterface.h>
-#include <binder/Parcel.h>
-#include <binder/IMemory.h>
-#include <utils/String8.h>
-#include <camera/IProCameraCallbacks.h>
-
-struct camera_metadata;
-
-namespace android {
-
-class IProCameraUserClient;
-class IGraphicBufferProducer;
-class Surface;
-
-class IProCameraUser: public IInterface
-{
- /**
- * Keep up-to-date with IProCameraUser.aidl in frameworks/base
- */
-public:
- DECLARE_META_INTERFACE(ProCameraUser);
-
- virtual void disconnect() = 0;
-
- // connect to the service, given a callbacks listener
- virtual status_t connect(const sp<IProCameraCallbacks>& callbacks)
- = 0;
-
- /**
- * Locking
- **/
- virtual status_t exclusiveTryLock() = 0;
- virtual status_t exclusiveLock() = 0;
- virtual status_t exclusiveUnlock() = 0;
-
- virtual bool hasExclusiveLock() = 0;
-
- /**
- * Request Handling
- **/
-
- // Note that the callee gets a copy of the metadata.
- virtual int submitRequest(struct camera_metadata* metadata,
- bool streaming = false) = 0;
- virtual status_t cancelRequest(int requestId) = 0;
-
- virtual status_t deleteStream(int streamId) = 0;
- virtual status_t createStream(
- int width, int height, int format,
- const sp<IGraphicBufferProducer>& bufferProducer,
- /*out*/
- int* streamId) = 0;
-
- // Create a request object from a template.
- virtual status_t createDefaultRequest(int templateId,
- /*out*/
- camera_metadata** request)
- = 0;
-
- // Get static camera metadata
- virtual status_t getCameraInfo(int cameraId,
- /*out*/
- camera_metadata** info) = 0;
-
-};
-
-// ----------------------------------------------------------------------------
-
-class BnProCameraUser: public BnInterface<IProCameraUser>
-{
-public:
- virtual status_t onTransact( uint32_t code,
- const Parcel& data,
- Parcel* reply,
- uint32_t flags = 0);
-};
-
-}; // namespace android
-
-#endif
diff --git a/include/camera/ProCamera.h b/include/camera/ProCamera.h
deleted file mode 100644
index e9b687a..0000000
--- a/include/camera/ProCamera.h
+++ /dev/null
@@ -1,319 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_HARDWARE_PRO_CAMERA_H
-#define ANDROID_HARDWARE_PRO_CAMERA_H
-
-#include <utils/Timers.h>
-#include <utils/KeyedVector.h>
-#include <gui/IGraphicBufferProducer.h>
-#include <system/camera.h>
-#include <camera/IProCameraCallbacks.h>
-#include <camera/IProCameraUser.h>
-#include <camera/Camera.h>
-#include <camera/CameraMetadata.h>
-#include <camera/ICameraService.h>
-#include <gui/CpuConsumer.h>
-
-#include <gui/Surface.h>
-
-#include <utils/Condition.h>
-#include <utils/Mutex.h>
-
-#include <camera/CameraBase.h>
-
-struct camera_metadata;
-
-namespace android {
-
-// All callbacks on this class are concurrent
-// (they come from separate threads)
-class ProCameraListener : virtual public RefBase
-{
-public:
- virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2) = 0;
-
- // Lock has been acquired. Write operations now available.
- virtual void onLockAcquired() = 0;
- // Lock has been released with exclusiveUnlock.
- virtual void onLockReleased() = 0;
- // Lock has been stolen by another client.
- virtual void onLockStolen() = 0;
-
- // Lock free.
- virtual void onTriggerNotify(int32_t msgType, int32_t ext1, int32_t ext2)
- = 0;
- // onFrameAvailable and OnResultReceived can come in with any order,
- // use android.sensor.timestamp and LockedBuffer.timestamp to correlate them
-
- /**
- * A new metadata buffer has been received.
- * -- Ownership of request passes on to the callee, free with
- * free_camera_metadata.
- */
- virtual void onResultReceived(int32_t frameId, camera_metadata* result) = 0;
-
- // TODO: make onFrameAvailable pure virtual
-
- // A new frame buffer has been received for this stream.
- // -- This callback only fires for createStreamCpu streams
- // -- A buffer may be obtained by calling cpuConsumer->lockNextBuffer
- // -- Use buf.timestamp to correlate with result's android.sensor.timestamp
- // -- The buffer should be accessed with CpuConsumer::lockNextBuffer
- // and CpuConsumer::unlockBuffer
- virtual void onFrameAvailable(int /*streamId*/,
- const sp<CpuConsumer>& /*cpuConsumer*/) {
- }
-
-};
-
-class ProCamera;
-
-template <>
-struct CameraTraits<ProCamera>
-{
- typedef ProCameraListener TCamListener;
- typedef IProCameraUser TCamUser;
- typedef IProCameraCallbacks TCamCallbacks;
- typedef status_t (ICameraService::*TCamConnectService)(const sp<IProCameraCallbacks>&,
- int, const String16&, int,
- /*out*/
- sp<IProCameraUser>&);
- static TCamConnectService fnConnectService;
-};
-
-
-class ProCamera :
- public CameraBase<ProCamera>,
- public BnProCameraCallbacks
-{
-public:
- /**
- * Connect a shared camera. By default access is restricted to read only
- * (Lock free) operations. To be able to submit custom requests a lock needs
- * to be acquired with exclusive[Try]Lock.
- */
- static sp<ProCamera> connect(int cameraId);
- virtual ~ProCamera();
-
- /**
- * Exclusive Locks:
- * - We may request exclusive access to a camera if no other
- * clients are using the camera. This works as a traditional
- * client, writing/reading any camera state.
- * - An application opening the camera (a regular 'Camera') will
- * always steal away the exclusive lock from a ProCamera,
- * this will call onLockReleased.
- * - onLockAcquired will be called again once it is possible
- * to again exclusively lock the camera.
- *
- */
-
- /**
- * All exclusiveLock/unlock functions are asynchronous. The remote endpoint
- * shall not block while waiting to acquire the lock. Instead the lock
- * notifications will come in asynchronously on the listener.
- */
-
- /**
- * Attempt to acquire the lock instantly (non-blocking)
- * - If this succeeds, you do not need to wait for onLockAcquired
- * but the event will still be fired
- *
- * Returns -EBUSY if already locked. 0 on success.
- */
- status_t exclusiveTryLock();
- // always returns 0. wait for onLockAcquired before lock is acquired.
- status_t exclusiveLock();
- // release a lock if we have one, or cancel the lock request.
- status_t exclusiveUnlock();
-
- // exclusive lock = do whatever we want. no lock = read only.
- bool hasExclusiveLock();
-
- /**
- * < 0 error, >= 0 the request ID. streaming to have the request repeat
- * until cancelled.
- * The request queue is flushed when a lock is released or stolen
- * if not locked will return PERMISSION_DENIED
- */
- int submitRequest(const struct camera_metadata* metadata,
- bool streaming = false);
- // if not locked will return PERMISSION_DENIED, BAD_VALUE if requestId bad
- status_t cancelRequest(int requestId);
-
- /**
- * Ask for a stream to be enabled.
- * Lock free. Service maintains counter of streams.
- */
- status_t requestStream(int streamId);
-// TODO: remove requestStream, its useless.
-
- /**
- * Delete a stream.
- * Lock free.
- *
- * NOTE: As a side effect this cancels ALL streaming requests.
- *
- * Errors: BAD_VALUE if unknown stream ID.
- * PERMISSION_DENIED if the stream wasn't yours
- */
- status_t deleteStream(int streamId);
-
- /**
- * Create a new HW stream, whose sink will be the window.
- * Lock free. Service maintains counter of streams.
- * Errors: -EBUSY if too many streams created
- */
- status_t createStream(int width, int height, int format,
- const sp<Surface>& surface,
- /*out*/
- int* streamId);
-
- /**
- * Create a new HW stream, whose sink will be the SurfaceTexture.
- * Lock free. Service maintains counter of streams.
- * Errors: -EBUSY if too many streams created
- */
- status_t createStream(int width, int height, int format,
- const sp<IGraphicBufferProducer>& bufferProducer,
- /*out*/
- int* streamId);
- status_t createStreamCpu(int width, int height, int format,
- int heapCount,
- /*out*/
- sp<CpuConsumer>* cpuConsumer,
- int* streamId);
- status_t createStreamCpu(int width, int height, int format,
- int heapCount,
- bool synchronousMode,
- /*out*/
- sp<CpuConsumer>* cpuConsumer,
- int* streamId);
-
- // Create a request object from a template.
- status_t createDefaultRequest(int templateId,
- /*out*/
- camera_metadata** request) const;
-
- // Get static camera metadata
- camera_metadata* getCameraInfo(int cameraId);
-
- // Blocks until a frame is available (CPU streams only)
- // - Obtain the frame data by calling CpuConsumer::lockNextBuffer
- // - Release the frame data after use with CpuConsumer::unlockBuffer
- // Return value:
- // - >0 - number of frames available to be locked
- // - <0 - error (refer to error codes)
- // Error codes:
- // -ETIMEDOUT if it took too long to get a frame
- int waitForFrameBuffer(int streamId);
-
- // Blocks until a metadata result is available
- // - Obtain the metadata by calling consumeFrameMetadata()
- // Error codes:
- // -ETIMEDOUT if it took too long to get a frame
- status_t waitForFrameMetadata();
-
- // Get the latest metadata. This is destructive.
- // - Calling this repeatedly will produce empty metadata objects.
- // - Use waitForFrameMetadata to sync until new data is available.
- CameraMetadata consumeFrameMetadata();
-
- // Convenience method to drop frame buffers (CPU streams only)
- // Return values:
- // >=0 - number of frames dropped (up to count)
- // <0 - error code
- // Error codes:
- // BAD_VALUE - invalid streamId or count passed
- int dropFrameBuffer(int streamId, int count);
-
-protected:
- ////////////////////////////////////////////////////////
- // IProCameraCallbacks implementation
- ////////////////////////////////////////////////////////
- virtual void notifyCallback(int32_t msgType,
- int32_t ext,
- int32_t ext2);
-
- virtual void onLockStatusChanged(
- IProCameraCallbacks::LockStatus newLockStatus);
-
- virtual void onResultReceived(int32_t requestId,
- camera_metadata* result);
-private:
- ProCamera(int cameraId);
-
- class ProFrameListener : public CpuConsumer::FrameAvailableListener {
- public:
- ProFrameListener(wp<ProCamera> camera, int streamID) {
- mCamera = camera;
- mStreamId = streamID;
- }
-
- protected:
- virtual void onFrameAvailable(const BufferItem& /* item */) {
- sp<ProCamera> c = mCamera.promote();
- if (c.get() != NULL) {
- c->onFrameAvailable(mStreamId);
- }
- }
-
- private:
- wp<ProCamera> mCamera;
- int mStreamId;
- };
- friend class ProFrameListener;
-
- struct StreamInfo
- {
- StreamInfo(int streamId) {
- this->streamID = streamId;
- cpuStream = false;
- frameReady = 0;
- }
-
- StreamInfo() {
- streamID = -1;
- cpuStream = false;
- }
-
- int streamID;
- bool cpuStream;
- sp<CpuConsumer> cpuConsumer;
- bool synchronousMode;
- sp<ProFrameListener> frameAvailableListener;
- sp<Surface> stc;
- int frameReady;
- };
-
- Condition mWaitCondition;
- Mutex mWaitMutex;
- static const nsecs_t mWaitTimeout = 1000000000; // 1sec
- KeyedVector<int, StreamInfo> mStreams;
- bool mMetadataReady;
- CameraMetadata mLatestMetadata;
-
- void onFrameAvailable(int streamId);
-
- StreamInfo& getStreamInfo(int streamId);
-
- friend class CameraBase;
-};
-
-}; // namespace android
-
-#endif
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index a68a9cb..7be2c3e 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -42,8 +42,7 @@ public:
EVENT_MORE_DATA = 0, // Request to read available data from buffer.
// If this event is delivered but the callback handler
// does not want to read the available data, the handler must
- // explicitly
- // ignore the event by setting frameCount to zero.
+ // explicitly ignore the event by setting frameCount to zero.
EVENT_OVERRUN = 1, // Buffer overrun occurred.
EVENT_MARKER = 2, // Record head is at the specified marker position
// (See setMarkerPosition()).
@@ -53,7 +52,7 @@ public:
// voluntary invalidation by mediaserver, or mediaserver crash.
};
- /* Client should declare Buffer on the stack and pass address to obtainBuffer()
+ /* Client should declare a Buffer and pass address to obtainBuffer()
* and releaseBuffer(). See also callback_t for EVENT_MORE_DATA.
*/
@@ -62,20 +61,25 @@ public:
public:
// FIXME use m prefix
size_t frameCount; // number of sample frames corresponding to size;
- // on input it is the number of frames available,
- // on output is the number of frames actually drained
- // (currently ignored but will make the primary field in future)
+ // on input to obtainBuffer() it is the number of frames desired
+ // on output from obtainBuffer() it is the number of available
+ // frames to be read
+ // on input to releaseBuffer() it is currently ignored
size_t size; // input/output in bytes == frameCount * frameSize
- // on output is the number of bytes actually drained
- // FIXME this is redundant with respect to frameCount,
- // and TRANSFER_OBTAIN mode is broken for 8-bit data
- // since we don't define the frame format
+ // on input to obtainBuffer() it is ignored
+ // on output from obtainBuffer() it is the number of available
+ // bytes to be read, which is frameCount * frameSize
+ // on input to releaseBuffer() it is the number of bytes to
+ // release
+ // FIXME This is redundant with respect to frameCount. Consider
+ // removing size and making frameCount the primary field.
union {
void* raw;
short* i16; // signed 16-bit
int8_t* i8; // unsigned 8-bit, offset by 0x80
+ // input to obtainBuffer(): unused, output: pointer to buffer
};
};
@@ -119,7 +123,7 @@ public:
enum transfer_type {
TRANSFER_DEFAULT, // not specified explicitly; determine from the other parameters
TRANSFER_CALLBACK, // callback EVENT_MORE_DATA
- TRANSFER_OBTAIN, // FIXME deprecated: call obtainBuffer() and releaseBuffer()
+ TRANSFER_OBTAIN, // call obtainBuffer() and releaseBuffer()
TRANSFER_SYNC, // synchronous read()
};
@@ -145,15 +149,16 @@ public:
* be larger if the requested size is not compatible with current audio HAL
* latency. Zero means to use a default value.
* cbf: Callback function. If not null, this function is called periodically
- * to consume new data and inform of marker, position updates, etc.
+ * to consume new data in TRANSFER_CALLBACK mode
+ * and inform of marker, position updates, etc.
* user: Context for use by the callback receiver.
* notificationFrames: The callback function is called each time notificationFrames PCM
* frames are ready in record track output buffer.
* sessionId: Not yet supported.
* transferType: How data is transferred from AudioRecord.
* flags: See comments on audio_input_flags_t in <system/audio.h>
+ * pAttributes: If not NULL, supersedes inputSource for use case selection.
* threadCanCallJava: Not present in parameter list, and so is fixed at false.
- * pAttributes: if not NULL, supersedes inputSource for use case selection
*/
AudioRecord(audio_source_t inputSource,
@@ -178,6 +183,7 @@ public:
/* Initialize an AudioRecord that was created using the AudioRecord() constructor.
* Don't call set() more than once, or after an AudioRecord() constructor that takes parameters.
+ * set() is not multi-thread safe.
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful intialization
* - INVALID_OPERATION: AudioRecord is already initialized or record device is already in use
@@ -212,7 +218,7 @@ public:
status_t initCheck() const { return mStatus; }
/* Returns this track's estimated latency in milliseconds.
- * This includes the latency due to AudioRecord buffer size,
+ * This includes the latency due to AudioRecord buffer size, resampling if applicable,
* and audio hardware driver.
*/
uint32_t latency() const { return mLatency; }
@@ -244,11 +250,6 @@ public:
*/
uint32_t getSampleRate() const { return mSampleRate; }
- /* Return the notification frame count.
- * This is approximately how often the callback is invoked, for transfer type TRANSFER_CALLBACK.
- */
- size_t notificationFrames() const { return mNotificationFramesAct; }
-
/* Sets marker position. When record reaches the number of frames specified,
* a callback with event type EVENT_MARKER is called. Calling setMarkerPosition
* with marker == 0 cancels marker notification callback.
@@ -310,7 +311,12 @@ public:
* Returned value:
* handle on audio hardware input
*/
- audio_io_handle_t getInput() const;
+// FIXME The only known public caller is frameworks/opt/net/voip/src/jni/rtp/AudioGroup.cpp
+ audio_io_handle_t getInput() const __attribute__((__deprecated__))
+ { return getInputPrivate(); }
+private:
+ audio_io_handle_t getInputPrivate() const;
+public:
/* Returns the audio session ID associated with this AudioRecord.
*
@@ -324,7 +330,8 @@ public:
*/
int getSessionId() const { return mSessionId; }
- /* Obtains a buffer of up to "audioBuffer->frameCount" full frames.
+ /* Public API for TRANSFER_OBTAIN mode.
+ * Obtains a buffer of up to "audioBuffer->frameCount" full frames.
* After draining these frames of data, the caller should release them with releaseBuffer().
* If the track buffer is not empty, obtainBuffer() returns as many contiguous
* full frames as are available immediately.
@@ -337,9 +344,6 @@ public:
* or return WOULD_BLOCK depending on the value of the "waitCount"
* parameter.
*
- * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications,
- * which should use read() or callback EVENT_MORE_DATA instead.
- *
* Interpretation of waitCount:
* +n limits wait time to n * WAIT_PERIOD_MS,
* -1 causes an (almost) infinite wait time,
@@ -348,6 +352,8 @@ public:
* Buffer fields
* On entry:
* frameCount number of frames requested
+ * size ignored
+ * raw ignored
* After error return:
* frameCount 0
* size 0
@@ -358,9 +364,7 @@ public:
* raw pointer to the buffer
*/
- /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */
- status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
- __attribute__((__deprecated__));
+ status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount);
private:
/* If nonContig is non-NULL, it is an output parameter that will be set to the number of
@@ -373,9 +377,15 @@ private:
struct timespec *elapsed = NULL, size_t *nonContig = NULL);
public:
- /* Release an emptied buffer of "audioBuffer->frameCount" frames for AudioFlinger to re-fill. */
- // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed
- void releaseBuffer(Buffer* audioBuffer);
+ /* Public API for TRANSFER_OBTAIN mode.
+ * Release an emptied buffer of "audioBuffer->frameCount" frames for AudioFlinger to re-fill.
+ *
+ * Buffer fields:
+ * frameCount currently ignored but recommend to set to actual number of frames consumed
+ * size actual number of bytes consumed, must be multiple of frameSize
+ * raw ignored
+ */
+ void releaseBuffer(const Buffer* audioBuffer);
/* As a convenience we provide a read() interface to the audio buffer.
* Input parameter 'size' is in byte units.
@@ -387,8 +397,11 @@ public:
* WOULD_BLOCK when obtainBuffer() returns same, or
* AudioRecord was stopped during the read
* or any other error code returned by IAudioRecord::start() or restoreRecord_l().
+ * Default behavior is to only return when all data has been transferred. Set 'blocking' to
+ * false for the method to return immediately without waiting to try multiple times to read
+ * the full content of the buffer.
*/
- ssize_t read(void* buffer, size_t size);
+ ssize_t read(void* buffer, size_t size, bool blocking = true);
/* Return the number of input frames lost in the audio driver since the last call of this
* function. Audio driver is expected to reset the value to 0 and restart counting upon
@@ -417,6 +430,7 @@ private:
void pause(); // suspend thread from execution at next loop boundary
void resume(); // allow thread to execute, if not requested to exit
+ void wake(); // wake to handle changed notification conditions.
private:
void pauseInternal(nsecs_t ns = 0LL);
@@ -431,7 +445,9 @@ private:
bool mPaused; // whether thread is requested to pause at next loop entry
bool mPausedInt; // whether thread internally requests pause
nsecs_t mPausedNs; // if mPausedInt then associated timeout, otherwise ignored
- bool mIgnoreNextPausedInt; // whether to ignore next mPausedInt request
+ bool mIgnoreNextPausedInt; // skip any internal pause and go immediately
+ // to processAudioBuffer() as state may have changed
+ // since pause time calculated.
};
// body of AudioRecordThread::threadLoop()
@@ -459,7 +475,7 @@ private:
bool mActive;
// for client callback handler
- callback_t mCbf; // callback handler for events, or NULL
+ callback_t mCbf; // callback handler for events, or NULL
void* mUserData;
// for notification APIs
@@ -476,10 +492,10 @@ private:
bool mRetryOnPartialBuffer; // sleep and retry after partial obtainBuffer()
uint32_t mObservedSequence; // last observed value of mSequence
- uint32_t mMarkerPosition; // in wrapping (overflow) frame units
+ uint32_t mMarkerPosition; // in wrapping (overflow) frame units
bool mMarkerReached;
- uint32_t mNewPosition; // in frames
- uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS
+ uint32_t mNewPosition; // in frames
+ uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS
status_t mStatus;
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index 3de0774..d9b7057 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -91,7 +91,7 @@ public:
void* raw;
short* i16; // signed 16-bit
int8_t* i8; // unsigned 8-bit, offset by 0x80
- }; // input: unused, output: pointer to buffer
+ }; // input to obtainBuffer(): unused, output: pointer to buffer
};
/* As a convenience, if a callback is supplied, a handler thread
@@ -125,6 +125,7 @@ public:
* - BAD_VALUE: unsupported configuration
* frameCount is guaranteed to be non-zero if status is NO_ERROR,
* and is undefined otherwise.
+ * FIXME This API assumes a route, and so should be deprecated.
*/
static status_t getMinFrameCount(size_t* frameCount,
@@ -136,7 +137,7 @@ public:
enum transfer_type {
TRANSFER_DEFAULT, // not specified explicitly; determine from the other parameters
TRANSFER_CALLBACK, // callback EVENT_MORE_DATA
- TRANSFER_OBTAIN, // FIXME deprecated: call obtainBuffer() and releaseBuffer()
+ TRANSFER_OBTAIN, // call obtainBuffer() and releaseBuffer()
TRANSFER_SYNC, // synchronous write()
TRANSFER_SHARED, // shared memory
};
@@ -149,9 +150,6 @@ public:
/* Creates an AudioTrack object and registers it with AudioFlinger.
* Once created, the track needs to be started before it can be used.
* Unspecified values are set to appropriate default values.
- * With this constructor, the track is configured for streaming mode.
- * Data to be rendered is supplied by write() or by the callback EVENT_MORE_DATA.
- * Intermixing a combination of write() and non-ignored EVENT_MORE_DATA is not allowed.
*
* Parameters:
*
@@ -169,20 +167,28 @@ public:
* configuration. Zero means to use a default value.
* flags: See comments on audio_output_flags_t in <system/audio.h>.
* cbf: Callback function. If not null, this function is called periodically
- * to provide new data and inform of marker, position updates, etc.
+ * to provide new data in TRANSFER_CALLBACK mode
+ * and inform of marker, position updates, etc.
* user: Context for use by the callback receiver.
* notificationFrames: The callback function is called each time notificationFrames PCM
* frames have been consumed from track input buffer.
* This is expressed in units of frames at the initial source sample rate.
* sessionId: Specific session ID, or zero to use default.
* transferType: How data is transferred to AudioTrack.
+ * offloadInfo: If not NULL, provides offload parameters for
+ * AudioSystem::getOutputForAttr().
+ * uid: User ID of the app which initially requested this AudioTrack
+ * for power management tracking, or -1 for current user ID.
+ * pid: Process ID of the app which initially requested this AudioTrack
+ * for power management tracking, or -1 for current process ID.
+ * pAttributes: If not NULL, supersedes streamType for use case selection.
* threadCanCallJava: Not present in parameter list, and so is fixed at false.
*/
AudioTrack( audio_stream_type_t streamType,
uint32_t sampleRate,
audio_format_t format,
- audio_channel_mask_t,
+ audio_channel_mask_t channelMask,
size_t frameCount = 0,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
@@ -198,7 +204,9 @@ public:
/* Creates an audio track and registers it with AudioFlinger.
* With this constructor, the track is configured for static buffer mode.
* Data to be rendered is passed in a shared memory buffer
- * identified by the argument sharedBuffer, which must be non-0.
+ * identified by the argument sharedBuffer, which should be non-0.
+ * If sharedBuffer is zero, this constructor is equivalent to the previous constructor
+ * but without the ability to specify a non-zero value for the frameCount parameter.
* The memory should be initialized to the desired data before calling start().
* The write() method is not supported in this case.
* It is recommended to pass a callback function to be notified of playback end by an
@@ -230,6 +238,7 @@ public:
/* Initialize an AudioTrack that was created using the AudioTrack() constructor.
* Don't call set() more than once, or after the AudioTrack() constructors that take parameters.
+ * set() is not multi-thread safe.
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful initialization
* - INVALID_OPERATION: AudioTrack is already initialized
@@ -464,7 +473,9 @@ public:
* handle on audio hardware output, or AUDIO_IO_HANDLE_NONE if the
* track needed to be re-created but that failed
*/
+private:
audio_io_handle_t getOutput() const;
+public:
/* Returns the unique session ID associated with this track.
*
@@ -511,9 +522,6 @@ public:
* or return WOULD_BLOCK depending on the value of the "waitCount"
* parameter.
*
- * obtainBuffer() and releaseBuffer() are deprecated for direct use by applications,
- * which should use write() or callback EVENT_MORE_DATA instead.
- *
* Interpretation of waitCount:
* +n limits wait time to n * WAIT_PERIOD_MS,
* -1 causes an (almost) infinite wait time,
@@ -533,10 +541,8 @@ public:
* size actual number of bytes available
* raw pointer to the buffer
*/
- /* FIXME Deprecated public API for TRANSFER_OBTAIN mode */
status_t obtainBuffer(Buffer* audioBuffer, int32_t waitCount,
- size_t *nonContig = NULL)
- __attribute__((__deprecated__));
+ size_t *nonContig = NULL);
private:
/* If nonContig is non-NULL, it is an output parameter that will be set to the number of
@@ -559,9 +565,7 @@ public:
* frameCount currently ignored but recommend to set to actual number of frames filled
* size actual number of bytes filled, must be multiple of frameSize
* raw ignored
- *
*/
- // FIXME make private when obtainBuffer() for TRANSFER_OBTAIN is removed
void releaseBuffer(const Buffer* audioBuffer);
/* As a convenience we provide a write() interface to the audio buffer.
@@ -574,7 +578,7 @@ public:
* WOULD_BLOCK when obtainBuffer() returns same, or
* AudioTrack was stopped during the write
* or any other error code returned by IAudioTrack::start() or restoreTrack_l().
- * Default behavior is to only return until all data has been transferred. Set 'blocking' to
+ * Default behavior is to only return when all data has been transferred. Set 'blocking' to
* false for the method to return immediately without waiting to try multiple times to write
* the full content of the buffer.
*/
@@ -582,6 +586,7 @@ public:
/*
* Dumps the state of an audio track.
+ * Not a general-purpose API; intended only for use by media player service to dump its tracks.
*/
status_t dump(int fd, const Vector<String16>& args) const;
@@ -623,8 +628,6 @@ protected:
AudioTrack(const AudioTrack& other);
AudioTrack& operator = (const AudioTrack& other);
- void setAttributesFromStreamType(audio_stream_type_t streamType);
-
/* a small internal class to handle the callback */
class AudioTrackThread : public Thread
{
@@ -667,10 +670,6 @@ protected:
static const nsecs_t NS_WHENEVER = -1, NS_INACTIVE = -2, NS_NEVER = -3;
nsecs_t processAudioBuffer();
- bool isOffloaded() const;
- bool isDirect() const;
- bool isOffloadedOrDirect() const;
-
// caller must hold lock on mLock for all _l methods
status_t createTrack_l();
@@ -683,6 +682,10 @@ protected:
// FIXME enum is faster than strcmp() for parameter 'from'
status_t restoreTrack_l(const char *from);
+ bool isOffloaded() const;
+ bool isDirect() const;
+ bool isOffloadedOrDirect() const;
+
bool isOffloaded_l() const
{ return (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0; }
@@ -773,6 +776,7 @@ protected:
bool mMarkerReached;
uint32_t mNewPosition; // in frames
uint32_t mUpdatePeriod; // in frames, zero means no EVENT_NEW_POS
+
uint32_t mServer; // in frames, last known mProxy->getPosition()
// which is count of frames consumed by server,
// reset by new IAudioTrack,
diff --git a/include/media/IDrm.h b/include/media/IDrm.h
index affcbd7..9449beb 100644
--- a/include/media/IDrm.h
+++ b/include/media/IDrm.h
@@ -47,7 +47,8 @@ struct IDrm : public IInterface {
Vector<uint8_t> const &initData,
String8 const &mimeType, DrmPlugin::KeyType keyType,
KeyedVector<String8, String8> const &optionalParameters,
- Vector<uint8_t> &request, String8 &defaultUrl) = 0;
+ Vector<uint8_t> &request, String8 &defaultUrl,
+ DrmPlugin::KeyRequestType *keyRequestType) = 0;
virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
Vector<uint8_t> const &response,
diff --git a/include/media/IResourceManagerClient.h b/include/media/IResourceManagerClient.h
new file mode 100644
index 0000000..3587aea
--- /dev/null
+++ b/include/media/IResourceManagerClient.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_IRESOURCEMANAGERCLIENT_H
+#define ANDROID_IRESOURCEMANAGERCLIENT_H
+
+#include <utils/RefBase.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+namespace android {
+
+class IResourceManagerClient: public IInterface
+{
+public:
+ DECLARE_META_INTERFACE(ResourceManagerClient);
+
+ virtual bool reclaimResource() = 0;
+};
+
+// ----------------------------------------------------------------------------
+
+class BnResourceManagerClient: public BnInterface<IResourceManagerClient>
+{
+public:
+ virtual status_t onTransact(uint32_t code,
+ const Parcel &data,
+ Parcel *reply,
+ uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif // ANDROID_IRESOURCEMANAGERCLIENT_H
diff --git a/include/media/IResourceManagerService.h b/include/media/IResourceManagerService.h
new file mode 100644
index 0000000..067392c
--- /dev/null
+++ b/include/media/IResourceManagerService.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_IRESOURCEMANAGERSERVICE_H
+#define ANDROID_IRESOURCEMANAGERSERVICE_H
+
+#include <utils/Errors.h> // for status_t
+#include <utils/KeyedVector.h>
+#include <utils/RefBase.h>
+#include <utils/String8.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+#include <media/IResourceManagerClient.h>
+#include <media/MediaResource.h>
+#include <media/MediaResourcePolicy.h>
+
+namespace android {
+
+class IResourceManagerService: public IInterface
+{
+public:
+ DECLARE_META_INTERFACE(ResourceManagerService);
+
+ virtual void config(const Vector<MediaResourcePolicy> &policies) = 0;
+
+ virtual void addResource(
+ int pid,
+ int64_t clientId,
+ const sp<IResourceManagerClient> client,
+ const Vector<MediaResource> &resources) = 0;
+
+ virtual void removeResource(int64_t clientId) = 0;
+
+ virtual bool reclaimResource(
+ int callingPid,
+ const Vector<MediaResource> &resources) = 0;
+};
+
+// ----------------------------------------------------------------------------
+
+class BnResourceManagerService: public BnInterface<IResourceManagerService>
+{
+public:
+ virtual status_t onTransact(uint32_t code,
+ const Parcel &data,
+ Parcel *reply,
+ uint32_t flags = 0);
+};
+
+}; // namespace android
+
+#endif // ANDROID_IRESOURCEMANAGERSERVICE_H
diff --git a/include/media/MediaResource.h b/include/media/MediaResource.h
new file mode 100644
index 0000000..0b57c84
--- /dev/null
+++ b/include/media/MediaResource.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_MEDIA_RESOURCE_H
+#define ANDROID_MEDIA_RESOURCE_H
+
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+
+namespace android {
+
+extern const char kResourceSecureCodec[];
+extern const char kResourceNonSecureCodec[];
+extern const char kResourceGraphicMemory[];
+
+class MediaResource {
+public:
+ MediaResource();
+ MediaResource(String8 type, uint64_t value);
+ MediaResource(String8 type, String8 subType, uint64_t value);
+
+ void readFromParcel(const Parcel &parcel);
+ void writeToParcel(Parcel *parcel) const;
+
+ String8 toString() const;
+
+ bool operator==(const MediaResource &other) const;
+ bool operator!=(const MediaResource &other) const;
+
+ String8 mType;
+ String8 mSubType;
+ uint64_t mValue;
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCE_H
diff --git a/include/media/MediaResourcePolicy.h b/include/media/MediaResourcePolicy.h
new file mode 100644
index 0000000..1e1c341
--- /dev/null
+++ b/include/media/MediaResourcePolicy.h
@@ -0,0 +1,45 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#ifndef ANDROID_MEDIA_RESOURCE_POLICY_H
+#define ANDROID_MEDIA_RESOURCE_POLICY_H
+
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+
+namespace android {
+
+extern const char kPolicySupportsMultipleSecureCodecs[];
+extern const char kPolicySupportsSecureWithNonSecureCodec[];
+
+class MediaResourcePolicy {
+public:
+ MediaResourcePolicy();
+ MediaResourcePolicy(String8 type, uint64_t value);
+
+ void readFromParcel(const Parcel &parcel);
+ void writeToParcel(Parcel *parcel) const;
+
+ String8 toString() const;
+
+ String8 mType;
+ uint64_t mValue;
+};
+
+}; // namespace android
+
+#endif // ANDROID_MEDIA_RESOURCE_POLICY_H
diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
index aa91485..c1483f3 100644
--- a/include/media/stagefright/ACodec.h
+++ b/include/media/stagefright/ACodec.h
@@ -291,7 +291,7 @@ private:
OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat);
status_t setupAMRCodec(bool encoder, bool isWAMR, int32_t bitRate);
- status_t setupG711Codec(bool encoder, int32_t numChannels);
+ status_t setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels);
status_t setupFlacCodec(
bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel);
diff --git a/include/media/stagefright/MediaClock.h b/include/media/stagefright/MediaClock.h
index 660764f..e9c09a1 100644
--- a/include/media/stagefright/MediaClock.h
+++ b/include/media/stagefright/MediaClock.h
@@ -35,9 +35,9 @@ struct MediaClock : public RefBase {
// It's required to use timestamp of just rendered frame as
// anchor time in paused state.
void updateAnchor(
- int64_t anchorTimeMediaUs,
- int64_t anchorTimeRealUs,
- int64_t maxTimeMediaUs = INT64_MAX);
+ int64_t anchorTimeMediaUs,
+ int64_t anchorTimeRealUs,
+ int64_t maxTimeMediaUs = INT64_MAX);
void updateMaxTimeMedia(int64_t maxTimeMediaUs);
@@ -45,22 +45,24 @@ struct MediaClock : public RefBase {
// query media time corresponding to real time |realUs|, and save the
// result in |outMediaUs|.
- status_t getMediaTime(int64_t realUs,
- int64_t *outMediaUs,
- bool allowPastMaxTime = false);
+ status_t getMediaTime(
+ int64_t realUs,
+ int64_t *outMediaUs,
+ bool allowPastMaxTime = false) const;
// query real time corresponding to media time |targetMediaUs|.
// The result is saved in |outRealUs|.
- status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs);
+ status_t getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) const;
protected:
virtual ~MediaClock();
private:
- status_t getMediaTime_l(int64_t realUs,
- int64_t *outMediaUs,
- bool allowPastMaxTime);
+ status_t getMediaTime_l(
+ int64_t realUs,
+ int64_t *outMediaUs,
+ bool allowPastMaxTime) const;
- Mutex mLock;
+ mutable Mutex mLock;
int64_t mAnchorTimeMediaUs;
int64_t mAnchorTimeRealUs;
diff --git a/include/media/stagefright/MediaSync.h b/include/media/stagefright/MediaSync.h
new file mode 100644
index 0000000..8bb8c7f
--- /dev/null
+++ b/include/media/stagefright/MediaSync.h
@@ -0,0 +1,239 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_SYNC_H
+#define MEDIA_SYNC_H
+
+#include <gui/IConsumerListener.h>
+#include <gui/IProducerListener.h>
+
+#include <media/stagefright/foundation/AHandler.h>
+
+#include <utils/Condition.h>
+#include <utils/Mutex.h>
+
+namespace android {
+
+class AudioTrack;
+class BufferItem;
+class Fence;
+class GraphicBuffer;
+class IGraphicBufferConsumer;
+class IGraphicBufferProducer;
+struct MediaClock;
+
+// MediaSync manages media playback and its synchronization to a media clock
+// source. It can be also used for video-only playback.
+//
+// For video playback, it requires an output surface and provides an input
+// surface. It then controls the rendering of input buffers (buffer queued to
+// the input surface) on the output surface to happen at the appropriate time.
+//
+// For audio playback, it requires an audio track and takes updates of
+// information of rendered audio data to maintain media clock when audio track
+// serves as media clock source. (TODO: move audio rendering from JAVA to
+// native code).
+//
+// It can use the audio or video track as media clock source, as well as an
+// external clock. (TODO: actually support external clock as media clock
+// sources; use video track as media clock source for audio-and-video stream).
+//
+// In video-only mode, MediaSync will playback every video frame even though
+// a video frame arrives late based on its timestamp and last frame's.
+//
+// The client needs to configure surface (for output video rendering) and audio
+// track (for querying information of audio rendering) for MediaSync.
+//
+// Then the client needs to obtain a surface from MediaSync and render video
+// frames onto that surface. Internally, the MediaSync will receive those video
+// frames and render them onto the output surface at the appropriate time.
+//
+// The client needs to call updateQueuedAudioData() immediately after it writes
+// audio data to the audio track. Such information will be used to update media
+// clock.
+//
+class MediaSync : public AHandler {
+public:
+ // Create an instance of MediaSync.
+ static sp<MediaSync> create();
+
+ // Called when MediaSync is used to render video. It should be called
+ // before createInputSurface().
+ status_t configureSurface(const sp<IGraphicBufferProducer> &output);
+
+ // Called when audio track is used as media clock source. It should be
+ // called before updateQueuedAudioData().
+ // |nativeSampleRateInHz| is the sample rate of audio data fed into audio
+ // track. It's the same number used to create AudioTrack.
+ status_t configureAudioTrack(
+ const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz);
+
+ // Create a surface for client to render video frames. This is the surface
+ // on which the client should render video frames. Those video frames will
+ // be internally directed to output surface for rendering at appropriate
+ // time.
+ status_t createInputSurface(sp<IGraphicBufferProducer> *outBufferProducer);
+
+ // Update just-rendered audio data size and the presentation timestamp of
+ // the first frame of that audio data. It should be called immediately
+ // after the client write audio data into AudioTrack.
+ // This function assumes continous audio stream.
+ // TODO: support gap or backwards updates.
+ status_t updateQueuedAudioData(
+ size_t sizeInBytes, int64_t presentationTimeUs);
+
+ // Set the consumer name of the input queue.
+ void setName(const AString &name);
+
+ // Set the playback in a desired speed.
+ // This method can be called any time.
+ // |rate| is the ratio between desired speed and the normal one, and should
+ // be non-negative. The meaning of rate values:
+ // 1.0 -- normal playback
+ // 0.0 -- stop or pause
+ // larger than 1.0 -- faster than normal speed
+ // between 0.0 and 1.0 -- slower than normal speed
+ status_t setPlaybackRate(float rate);
+
+ // Get the media clock used by the MediaSync so that the client can obtain
+ // corresponding media time or real time via
+ // MediaClock::getMediaTime() and MediaClock::getRealTimeFor().
+ sp<const MediaClock> getMediaClock();
+
+protected:
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum {
+ kWhatDrainVideo = 'dVid',
+ };
+
+ static const int MAX_OUTSTANDING_BUFFERS = 2;
+
+ // This is a thin wrapper class that lets us listen to
+ // IConsumerListener::onFrameAvailable from mInput.
+ class InputListener : public BnConsumerListener,
+ public IBinder::DeathRecipient {
+ public:
+ InputListener(const sp<MediaSync> &sync);
+ virtual ~InputListener();
+
+ // From IConsumerListener
+ virtual void onFrameAvailable(const BufferItem &item);
+
+ // From IConsumerListener
+ // We don't care about released buffers because we detach each buffer as
+ // soon as we acquire it. See the comment for onBufferReleased below for
+ // some clarifying notes about the name.
+ virtual void onBuffersReleased() {}
+
+ // From IConsumerListener
+ // We don't care about sideband streams, since we won't relay them.
+ virtual void onSidebandStreamChanged();
+
+ // From IBinder::DeathRecipient
+ virtual void binderDied(const wp<IBinder> &who);
+
+ private:
+ sp<MediaSync> mSync;
+ };
+
+ // This is a thin wrapper class that lets us listen to
+ // IProducerListener::onBufferReleased from mOutput.
+ class OutputListener : public BnProducerListener,
+ public IBinder::DeathRecipient {
+ public:
+ OutputListener(const sp<MediaSync> &sync);
+ virtual ~OutputListener();
+
+ // From IProducerListener
+ virtual void onBufferReleased();
+
+ // From IBinder::DeathRecipient
+ virtual void binderDied(const wp<IBinder> &who);
+
+ private:
+ sp<MediaSync> mSync;
+ };
+
+ // mIsAbandoned is set to true when the input or output dies.
+ // Once the MediaSync has been abandoned by one side, it will disconnect
+ // from the other side and not attempt to communicate with it further.
+ bool mIsAbandoned;
+
+ mutable Mutex mMutex;
+ Condition mReleaseCondition;
+ size_t mNumOutstandingBuffers;
+ sp<IGraphicBufferConsumer> mInput;
+ sp<IGraphicBufferProducer> mOutput;
+
+ sp<AudioTrack> mAudioTrack;
+ uint32_t mNativeSampleRateInHz;
+ int64_t mNumFramesWritten;
+ bool mHasAudio;
+
+ int64_t mNextBufferItemMediaUs;
+ List<BufferItem> mBufferItems;
+ sp<ALooper> mLooper;
+ float mPlaybackRate;
+
+ sp<MediaClock> mMediaClock;
+
+ MediaSync();
+
+ // Must be accessed through RefBase
+ virtual ~MediaSync();
+
+ int64_t getRealTime(int64_t mediaTimeUs, int64_t nowUs);
+ int64_t getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames);
+ int64_t getPlayedOutAudioDurationMedia_l(int64_t nowUs);
+
+ void onDrainVideo_l();
+
+ // This implements the onFrameAvailable callback from IConsumerListener.
+ // It gets called from an InputListener.
+ // During this callback, we detach the buffer from the input, and queue
+ // it for rendering on the output. This call can block if there are too
+ // many outstanding buffers. If it blocks, it will resume when
+ // onBufferReleasedByOutput releases a buffer back to the input.
+ void onFrameAvailableFromInput();
+
+ // Send |bufferItem| to the output for rendering.
+ void renderOneBufferItem_l(const BufferItem &bufferItem);
+
+ // This implements the onBufferReleased callback from IProducerListener.
+ // It gets called from an OutputListener.
+ // During this callback, we detach the buffer from the output, and release
+ // it to the input. A blocked onFrameAvailable call will be allowed to proceed.
+ void onBufferReleasedByOutput();
+
+ // Return |buffer| back to the input.
+ void returnBufferToInput_l(const sp<GraphicBuffer> &buffer, const sp<Fence> &fence);
+
+ // When this is called, the MediaSync disconnects from (i.e., abandons) its
+ // input or output, and signals any waiting onFrameAvailable calls to wake
+ // up. This must be called with mMutex locked.
+ void onAbandoned_l(bool isInput);
+
+ // helper.
+ bool isPlaying() { return mPlaybackRate != 0.0; }
+
+ DISALLOW_EVIL_CONSTRUCTORS(MediaSync);
+};
+
+} // namespace android
+
+#endif
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index e341160..84b1b1a 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -250,7 +250,7 @@ private:
status_t setAC3Format(int32_t numChannels, int32_t sampleRate);
- void setG711Format(int32_t numChannels);
+ void setG711Format(int32_t sampleRate, int32_t numChannels);
status_t setVideoPortFormatType(
OMX_U32 portIndex,
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 5378bf2..3b260d6 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -36,6 +36,8 @@ LOCAL_SRC_FILES:= \
IMediaRecorder.cpp \
IRemoteDisplay.cpp \
IRemoteDisplayClient.cpp \
+ IResourceManagerClient.cpp \
+ IResourceManagerService.cpp \
IStreamSource.cpp \
MediaCodecInfo.cpp \
Metadata.cpp \
@@ -53,6 +55,8 @@ LOCAL_SRC_FILES:= \
CharacterEncodingDetector.cpp \
IMediaDeathNotifier.cpp \
MediaProfiles.cpp \
+ MediaResource.cpp \
+ MediaResourcePolicy.cpp \
IEffect.cpp \
IEffectClient.cpp \
AudioEffect.cpp \
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index af103c1..7d8222f 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -486,4 +486,4 @@ status_t AudioEffect::guidToString(const effect_uuid_t *guid, char *str, size_t
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/AudioParameter.cpp b/media/libmedia/AudioParameter.cpp
index 33dbf0b..8c8cf45 100644
--- a/media/libmedia/AudioParameter.cpp
+++ b/media/libmedia/AudioParameter.cpp
@@ -180,4 +180,4 @@ status_t AudioParameter::getAt(size_t index, String8& key, String8& value)
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/AudioPolicy.cpp b/media/libmedia/AudioPolicy.cpp
index d2d0971..c7dafcb 100644
--- a/media/libmedia/AudioPolicy.cpp
+++ b/media/libmedia/AudioPolicy.cpp
@@ -112,4 +112,4 @@ status_t AudioMix::writeToParcel(Parcel *parcel) const
return NO_ERROR;
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 48abb96..100a914 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -112,7 +112,9 @@ AudioRecord::~AudioRecord()
mCblkMemory.clear();
mBufferMemory.clear();
IPCThreadState::self()->flushCommands();
- AudioSystem::releaseAudioSessionId(mSessionId, -1);
+ ALOGV("~AudioRecord, releasing session id %d",
+ mSessionId);
+ AudioSystem::releaseAudioSessionId(mSessionId, -1 /*pid*/);
}
}
@@ -159,8 +161,6 @@ status_t AudioRecord::set(
}
mTransfer = transferType;
- AutoMutex lock(mLock);
-
// invariant that mAudioRecord != 0 is true only after set() returns successfully
if (mAudioRecord != 0) {
ALOGE("Track already in use");
@@ -233,6 +233,7 @@ status_t AudioRecord::set(
if (cbf != NULL) {
mAudioRecordThread = new AudioRecordThread(*this, threadCanCallJava);
mAudioRecordThread->run("AudioRecord", ANDROID_PRIORITY_AUDIO);
+ // thread begins in paused state, and will not reference us until start()
}
// create the IAudioRecord
@@ -286,7 +287,6 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession)
status_t status = NO_ERROR;
if (!(flags & CBLK_INVALID)) {
- ALOGV("mAudioRecord->start()");
status = mAudioRecord->start(event, triggerSession);
if (status == DEAD_OBJECT) {
flags |= CBLK_INVALID;
@@ -352,6 +352,10 @@ status_t AudioRecord::setMarkerPosition(uint32_t marker)
mMarkerPosition = marker;
mMarkerReached = false;
+ sp<AudioRecordThread> t = mAudioRecordThread;
+ if (t != 0) {
+ t->wake();
+ }
return NO_ERROR;
}
@@ -378,6 +382,10 @@ status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod)
mNewPosition = mProxy->getPosition() + updatePeriod;
mUpdatePeriod = updatePeriod;
+ sp<AudioRecordThread> t = mAudioRecordThread;
+ if (t != 0) {
+ t->wake();
+ }
return NO_ERROR;
}
@@ -408,7 +416,7 @@ status_t AudioRecord::getPosition(uint32_t *position) const
uint32_t AudioRecord::getInputFramesLost() const
{
// no need to check mActive, because if inactive this will return 0, which is what we want
- return AudioSystem::getInputFramesLost(getInput());
+ return AudioSystem::getInputFramesLost(getInputPrivate());
}
// -------------------------------------------------------------------------
@@ -416,7 +424,6 @@ uint32_t AudioRecord::getInputFramesLost() const
// must be called with mLock held
status_t AudioRecord::openRecord_l(size_t epoch)
{
- status_t status;
const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
if (audioFlinger == 0) {
ALOGE("Could not get audioflinger");
@@ -431,12 +438,16 @@ status_t AudioRecord::openRecord_l(size_t epoch)
}
// Client can only express a preference for FAST. Server will perform additional tests.
- if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !(
- // use case: callback transfer mode
- (mTransfer == TRANSFER_CALLBACK) &&
+ if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !((
+ // either of these use cases:
+ // use case 1: callback transfer mode
+ (mTransfer == TRANSFER_CALLBACK) ||
+ // use case 2: obtain/release mode
+ (mTransfer == TRANSFER_OBTAIN)) &&
// matching sample rate
(mSampleRate == afSampleRate))) {
- ALOGW("AUDIO_INPUT_FLAG_FAST denied by client");
+ ALOGW("AUDIO_INPUT_FLAG_FAST denied by client; transfer %d, track %u Hz, primary %u Hz",
+ mTransfer, mSampleRate, afSampleRate);
// once denied, do not request again if IAudioRecord is re-created
mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
}
@@ -452,7 +463,8 @@ status_t AudioRecord::openRecord_l(size_t epoch)
}
audio_io_handle_t input;
- status = AudioSystem::getInputForAttr(&mAttributes, &input, (audio_session_t)mSessionId,
+ status_t status = AudioSystem::getInputForAttr(&mAttributes, &input,
+ (audio_session_t)mSessionId,
mSampleRate, mFormat, mChannelMask, mFlags);
if (status != NO_ERROR) {
@@ -684,9 +696,9 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r
return status;
}
-void AudioRecord::releaseBuffer(Buffer* audioBuffer)
+void AudioRecord::releaseBuffer(const Buffer* audioBuffer)
{
- // all TRANSFER_* are valid
+ // FIXME add error checking on mode, by adding an internal version
size_t stepCount = audioBuffer->size / mFrameSize;
if (stepCount == 0) {
@@ -704,7 +716,7 @@ void AudioRecord::releaseBuffer(Buffer* audioBuffer)
// the server does not automatically disable recorder on overrun, so no need to restart
}
-audio_io_handle_t AudioRecord::getInput() const
+audio_io_handle_t AudioRecord::getInputPrivate() const
{
AutoMutex lock(mLock);
return mInput;
@@ -712,7 +724,7 @@ audio_io_handle_t AudioRecord::getInput() const
// -------------------------------------------------------------------------
-ssize_t AudioRecord::read(void* buffer, size_t userSize)
+ssize_t AudioRecord::read(void* buffer, size_t userSize, bool blocking)
{
if (mTransfer != TRANSFER_SYNC) {
return INVALID_OPERATION;
@@ -731,7 +743,8 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)
while (userSize >= mFrameSize) {
audioBuffer.frameCount = userSize / mFrameSize;
- status_t err = obtainBuffer(&audioBuffer, &ClientProxy::kForever);
+ status_t err = obtainBuffer(&audioBuffer,
+ blocking ? &ClientProxy::kForever : &ClientProxy::kNonBlocking);
if (err < 0) {
if (read > 0) {
break;
@@ -993,14 +1006,13 @@ status_t AudioRecord::restoreRecord_l(const char *from)
{
ALOGW("dead IAudioRecord, creating a new one from %s()", from);
++mSequence;
- status_t result;
// if the new IAudioRecord is created, openRecord_l() will modify the
// following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory.
// It will also delete the strong references on previous IAudioRecord and IMemory
size_t position = mProxy->getPosition();
mNewPosition = position + mUpdatePeriod;
- result = openRecord_l(position);
+ status_t result = openRecord_l(position);
if (result == NO_ERROR) {
if (mActive) {
// callback thread or sync event hasn't changed
@@ -1072,8 +1084,8 @@ bool AudioRecord::AudioRecordThread::threadLoop()
case NS_NEVER:
return false;
case NS_WHENEVER:
- // FIXME increase poll interval, or make event-driven
- ns = 1000000000LL;
+ // Event driven: call wake() when callback notifications conditions change.
+ ns = INT64_MAX;
// fall through
default:
LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
@@ -1106,6 +1118,17 @@ void AudioRecord::AudioRecordThread::resume()
}
}
+void AudioRecord::AudioRecordThread::wake()
+{
+ AutoMutex _l(mMyLock);
+ if (!mPaused && mPausedInt && mPausedNs > 0) {
+ // audio record is active and internally paused with timeout.
+ mIgnoreNextPausedInt = true;
+ mPausedInt = false;
+ mMyCond.signal();
+ }
+}
+
void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns)
{
AutoMutex _l(mMyLock);
@@ -1115,4 +1138,4 @@ void AudioRecord::AudioRecordThread::pauseInternal(nsecs_t ns)
// -------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index f5a5712..c6b34a7 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -1003,4 +1003,4 @@ void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index c775e7b..98f64fe 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -203,8 +203,8 @@ AudioTrack::~AudioTrack()
mCblkMemory.clear();
mSharedBuffer.clear();
IPCThreadState::self()->flushCommands();
- ALOGV("~AudioTrack, releasing session id from %d on behalf of %d",
- IPCThreadState::self()->getCallingPid(), mClientPid);
+ ALOGV("~AudioTrack, releasing session id %d from %d on behalf of %d",
+ mSessionId, IPCThreadState::self()->getCallingPid(), mClientPid);
AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
}
}
@@ -229,9 +229,9 @@ status_t AudioTrack::set(
const audio_attributes_t* pAttributes)
{
ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
- "flags #%x, notificationFrames %u, sessionId %d, transferType %d",
+ "flags #%x, notificationFrames %u, sessionId %d, transferType %d, uid %d, pid %d",
streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
- sessionId, transferType);
+ sessionId, transferType, uid, pid);
switch (transferType) {
case TRANSFER_DEFAULT:
@@ -274,8 +274,6 @@ status_t AudioTrack::set(
ALOGV("set() streamType %d frameCount %zu flags %04x", streamType, frameCount, flags);
- AutoMutex lock(mLock);
-
// invariant that mAudioTrack != 0 is true only after set() returns successfully
if (mAudioTrack != 0) {
ALOGE("Track already in use");
@@ -401,6 +399,7 @@ status_t AudioTrack::set(
if (cbf != NULL) {
mAudioTrackThread = new AudioTrackThread(*this, threadCanCallJava);
mAudioTrackThread->run("AudioTrack", ANDROID_PRIORITY_AUDIO, 0 /*stack*/);
+ // thread begins in paused state, and will not reference us until start()
}
// create the IAudioTrack
@@ -964,9 +963,9 @@ status_t AudioTrack::createTrack_l()
if (status != NO_ERROR || output == AUDIO_IO_HANDLE_NONE) {
- ALOGE("Could not get audio output for stream type %d, usage %d, sample rate %u, format %#x,"
+ ALOGE("Could not get audio output for session %d, stream type %d, usage %d, sample rate %u, format %#x,"
" channel mask %#x, flags %#x",
- streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags);
+ mSessionId, streamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags);
return BAD_VALUE;
}
{
@@ -981,6 +980,7 @@ status_t AudioTrack::createTrack_l()
ALOGE("getLatency(%d) failed status %d", output, status);
goto release;
}
+ ALOGV("createTrack_l() output %d afLatency %u", output, afLatency);
size_t afFrameCount;
status = AudioSystem::getFrameCount(output, &afFrameCount);
@@ -1010,11 +1010,11 @@ status_t AudioTrack::createTrack_l()
(mTransfer == TRANSFER_OBTAIN)) &&
// matching sample rate
(mSampleRate == afSampleRate))) {
- ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client");
+ ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client; transfer %d, track %u Hz, output %u Hz",
+ mTransfer, mSampleRate, afSampleRate);
// once denied, do not request again if IAudioTrack is re-created
mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
}
- ALOGV("createTrack_l() output %d afLatency %d", output, afLatency);
// The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
// n = 1 fast track with single buffering; nBuffering is ignored
@@ -1090,6 +1090,7 @@ status_t AudioTrack::createTrack_l()
size_t temp = frameCount; // temp may be replaced by a revised value of frameCount,
// but we will still need the original value also
+ int originalSessionId = mSessionId;
sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
mSampleRate,
mFormat,
@@ -1102,6 +1103,8 @@ status_t AudioTrack::createTrack_l()
&mSessionId,
mClientUid,
&status);
+ ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
+ "session ID changed from %d to %d", originalSessionId, mSessionId);
if (status != NO_ERROR) {
ALOGE("AudioFlinger could not create track, status: %d", status);
@@ -1194,9 +1197,13 @@ status_t AudioTrack::createTrack_l()
// address space. AudioFlinger::TrackBase::mBuffer is for the server address space.
void* buffers;
if (mSharedBuffer == 0) {
- buffers = (char*)cblk + sizeof(audio_track_cblk_t);
+ buffers = cblk + 1;
} else {
buffers = mSharedBuffer->pointer();
+ if (buffers == NULL) {
+ ALOGE("Could not get buffer pointer");
+ return NO_INIT;
+ }
}
mAudioTrack->attachAuxEffect(mAuxEffectId);
@@ -1415,8 +1422,7 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize, bool blocking)
return ssize_t(err);
}
- size_t toWrite;
- toWrite = audioBuffer.size;
+ size_t toWrite = audioBuffer.size;
memcpy(audioBuffer.i8, buffer, toWrite);
buffer = ((const char *) buffer) + toWrite;
userSize -= toWrite;
@@ -1784,7 +1790,7 @@ nsecs_t AudioTrack::processAudioBuffer()
return WAIT_PERIOD_MS * 1000000LL;
}
- size_t releasedFrames = audioBuffer.size / mFrameSize;
+ size_t releasedFrames = writtenSize / mFrameSize;
audioBuffer.frameCount = releasedFrames;
mRemainingFrames -= releasedFrames;
if (misalignment >= releasedFrames) {
@@ -1829,7 +1835,6 @@ status_t AudioTrack::restoreTrack_l(const char *from)
ALOGW("dead IAudioTrack, %s, creating a new one from %s()",
isOffloadedOrDirect_l() ? "Offloaded or Direct" : "PCM", from);
++mSequence;
- status_t result;
// refresh the audio configuration cache in this process to make sure we get new
// output parameters and new IAudioFlinger in createTrack_l()
@@ -1851,13 +1856,13 @@ status_t AudioTrack::restoreTrack_l(const char *from)
// following member variables: mAudioTrack, mCblkMemory and mCblk.
// It will also delete the strong references on previous IAudioTrack and IMemory.
// If a new IAudioTrack cannot be created, the previous (dead) instance will be left intact.
- result = createTrack_l();
+ status_t result = createTrack_l();
// take the frames that will be lost by track recreation into account in saved position
// For streaming tracks, this is the amount we obtained from the user/client
// (not the number actually consumed at the server - those are already lost).
(void) updateAndGetPosition_l();
- if (mStaticProxy != 0) {
+ if (mStaticProxy == 0) {
mPosition = mReleased;
}
@@ -2185,4 +2190,4 @@ void AudioTrack::AudioTrackThread::pauseInternal(nsecs_t ns)
mPausedNs = ns;
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index 8e3b633..6f038ea 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -1369,4 +1369,4 @@ status_t BnAudioFlinger::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp
index 1c299f7..641e6c1 100644
--- a/media/libmedia/IAudioFlingerClient.cpp
+++ b/media/libmedia/IAudioFlingerClient.cpp
@@ -99,4 +99,4 @@ status_t BnAudioFlingerClient::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index f2ff27b..39374d8 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -1228,4 +1228,4 @@ status_t BnAudioPolicyService::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IAudioPolicyServiceClient.cpp b/media/libmedia/IAudioPolicyServiceClient.cpp
index e802277..7c65878 100644
--- a/media/libmedia/IAudioPolicyServiceClient.cpp
+++ b/media/libmedia/IAudioPolicyServiceClient.cpp
@@ -80,4 +80,4 @@ status_t BnAudioPolicyServiceClient::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp
index 8a4a383..9d80753 100644
--- a/media/libmedia/IAudioRecord.cpp
+++ b/media/libmedia/IAudioRecord.cpp
@@ -91,4 +91,4 @@ status_t BnAudioRecord::onTransact(
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index df209fd..651cb61 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -292,4 +292,4 @@ status_t BnAudioTrack::onTransact(
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IDrm.cpp b/media/libmedia/IDrm.cpp
index b08fa82..714a0b3 100644
--- a/media/libmedia/IDrm.cpp
+++ b/media/libmedia/IDrm.cpp
@@ -125,7 +125,8 @@ struct BpDrm : public BpInterface<IDrm> {
Vector<uint8_t> const &initData,
String8 const &mimeType, DrmPlugin::KeyType keyType,
KeyedVector<String8, String8> const &optionalParameters,
- Vector<uint8_t> &request, String8 &defaultUrl) {
+ Vector<uint8_t> &request, String8 &defaultUrl,
+ DrmPlugin::KeyRequestType *keyRequestType) {
Parcel data, reply;
data.writeInterfaceToken(IDrm::getInterfaceDescriptor());
@@ -143,6 +144,7 @@ struct BpDrm : public BpInterface<IDrm> {
readVector(reply, request);
defaultUrl = reply.readString8();
+ *keyRequestType = static_cast<DrmPlugin::KeyRequestType>(reply.readInt32());
return reply.readInt32();
}
@@ -562,13 +564,15 @@ status_t BnDrm::onTransact(
Vector<uint8_t> request;
String8 defaultUrl;
+ DrmPlugin::KeyRequestType keyRequestType;
+
+ status_t result = getKeyRequest(sessionId, initData, mimeType,
+ keyType, optionalParameters, request, defaultUrl,
+ &keyRequestType);
- status_t result = getKeyRequest(sessionId, initData,
- mimeType, keyType,
- optionalParameters,
- request, defaultUrl);
writeVector(reply, request);
reply->writeString8(defaultUrl);
+ reply->writeInt32(static_cast<int32_t>(keyRequestType));
reply->writeInt32(result);
return OK;
}
diff --git a/media/libmedia/IDrmClient.cpp b/media/libmedia/IDrmClient.cpp
index f50715e..490c6ed 100644
--- a/media/libmedia/IDrmClient.cpp
+++ b/media/libmedia/IDrmClient.cpp
@@ -78,4 +78,4 @@ status_t BnDrmClient::onTransact(
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp
index c2fff78..eb4b098 100644
--- a/media/libmedia/IEffect.cpp
+++ b/media/libmedia/IEffect.cpp
@@ -201,4 +201,4 @@ status_t BnEffect::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IEffectClient.cpp b/media/libmedia/IEffectClient.cpp
index aef4371..1322e72 100644
--- a/media/libmedia/IEffectClient.cpp
+++ b/media/libmedia/IEffectClient.cpp
@@ -141,4 +141,4 @@ status_t BnEffectClient::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaCodecList.cpp b/media/libmedia/IMediaCodecList.cpp
index bf7c5ca..80020db 100644
--- a/media/libmedia/IMediaCodecList.cpp
+++ b/media/libmedia/IMediaCodecList.cpp
@@ -160,4 +160,4 @@ status_t BnMediaCodecList::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index 38e9ca0..d4360ea 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -108,4 +108,4 @@ IMediaDeathNotifier::DeathNotifier::~DeathNotifier()
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp
index 7e26ee6..2ff7658 100644
--- a/media/libmedia/IMediaHTTPConnection.cpp
+++ b/media/libmedia/IMediaHTTPConnection.cpp
@@ -178,5 +178,4 @@ private:
IMPLEMENT_META_INTERFACE(
MediaHTTPConnection, "android.media.IMediaHTTPConnection");
-} // namespace android
-
+} // namespace android
diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp
index 1260582..f30d0f3 100644
--- a/media/libmedia/IMediaHTTPService.cpp
+++ b/media/libmedia/IMediaHTTPService.cpp
@@ -54,5 +54,4 @@ struct BpMediaHTTPService : public BpInterface<IMediaHTTPService> {
IMPLEMENT_META_INTERFACE(
MediaHTTPService, "android.media.IMediaHTTPService");
-} // namespace android
-
+} // namespace android
diff --git a/media/libmedia/IMediaLogService.cpp b/media/libmedia/IMediaLogService.cpp
index a4af7b7..230749e 100644
--- a/media/libmedia/IMediaLogService.cpp
+++ b/media/libmedia/IMediaLogService.cpp
@@ -91,4 +91,4 @@ status_t BnMediaLogService::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index aa2665a..551cffe 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -297,4 +297,4 @@ status_t BnMediaMetadataRetriever::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index dcd5670..ce3009a 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -574,4 +574,4 @@ status_t BnMediaPlayer::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaPlayerClient.cpp b/media/libmedia/IMediaPlayerClient.cpp
index a670c96..d608386 100644
--- a/media/libmedia/IMediaPlayerClient.cpp
+++ b/media/libmedia/IMediaPlayerClient.cpp
@@ -75,4 +75,4 @@ status_t BnMediaPlayerClient::onTransact(
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index feea267..aa7b2e1 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -234,4 +234,4 @@ status_t BnMediaPlayerService::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 9181f86..8ca256c 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -463,4 +463,4 @@ status_t BnMediaRecorder::onTransact(
// ----------------------------------------------------------------------------
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IMediaRecorderClient.cpp b/media/libmedia/IMediaRecorderClient.cpp
index e7907e3..6795d23 100644
--- a/media/libmedia/IMediaRecorderClient.cpp
+++ b/media/libmedia/IMediaRecorderClient.cpp
@@ -67,4 +67,4 @@ status_t BnMediaRecorderClient::onTransact(
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IRemoteDisplay.cpp b/media/libmedia/IRemoteDisplay.cpp
index 1e15434..869d11a 100644
--- a/media/libmedia/IRemoteDisplay.cpp
+++ b/media/libmedia/IRemoteDisplay.cpp
@@ -91,4 +91,4 @@ status_t BnRemoteDisplay::onTransact(
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IRemoteDisplayClient.cpp b/media/libmedia/IRemoteDisplayClient.cpp
index 9d63bc9..bedeb6c 100644
--- a/media/libmedia/IRemoteDisplayClient.cpp
+++ b/media/libmedia/IRemoteDisplayClient.cpp
@@ -101,4 +101,4 @@ status_t BnRemoteDisplayClient::onTransact(
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/IResourceManagerClient.cpp b/media/libmedia/IResourceManagerClient.cpp
new file mode 100644
index 0000000..6fa56fc
--- /dev/null
+++ b/media/libmedia/IResourceManagerClient.cpp
@@ -0,0 +1,70 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#include <utils/RefBase.h>
+#include <binder/IInterface.h>
+#include <binder/Parcel.h>
+
+#include <media/IResourceManagerClient.h>
+
+namespace android {
+
+enum {
+ RECLAIM_RESOURCE = IBinder::FIRST_CALL_TRANSACTION,
+};
+
+class BpResourceManagerClient: public BpInterface<IResourceManagerClient>
+{
+public:
+ BpResourceManagerClient(const sp<IBinder> &impl)
+ : BpInterface<IResourceManagerClient>(impl)
+ {
+ }
+
+ virtual bool reclaimResource() {
+ Parcel data, reply;
+ data.writeInterfaceToken(IResourceManagerClient::getInterfaceDescriptor());
+
+ bool ret = false;
+ status_t status = remote()->transact(RECLAIM_RESOURCE, data, &reply);
+ if (status == NO_ERROR) {
+ ret = (bool)reply.readInt32();
+ }
+ return ret;
+ }
+};
+
+IMPLEMENT_META_INTERFACE(ResourceManagerClient, "android.media.IResourceManagerClient");
+
+// ----------------------------------------------------------------------
+
+status_t BnResourceManagerClient::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags)
+{
+ switch (code) {
+ case RECLAIM_RESOURCE: {
+ CHECK_INTERFACE(IResourceManagerClient, data, reply);
+ bool ret = reclaimResource();
+ reply->writeInt32(ret);
+ return NO_ERROR;
+ } break;
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+}; // namespace android
diff --git a/media/libmedia/IResourceManagerService.cpp b/media/libmedia/IResourceManagerService.cpp
new file mode 100644
index 0000000..95a2d1c
--- /dev/null
+++ b/media/libmedia/IResourceManagerService.cpp
@@ -0,0 +1,169 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IResourceManagerService"
+#include <utils/Log.h>
+
+#include "media/IResourceManagerService.h"
+
+#include <binder/Parcel.h>
+
+#include <stdint.h>
+#include <sys/types.h>
+
+namespace android {
+
+enum {
+ CONFIG = IBinder::FIRST_CALL_TRANSACTION,
+ ADD_RESOURCE,
+ REMOVE_RESOURCE,
+ RECLAIM_RESOURCE,
+};
+
+template <typename T>
+static void writeToParcel(Parcel *data, const Vector<T> &items) {
+ size_t size = items.size();
+ size_t sizePosition = data->dataPosition();
+ // truncates size, but should be okay for this usecase
+ data->writeUint32(static_cast<uint32_t>(size));
+ for (size_t i = 0; i < size; i++) {
+ size_t position = data->dataPosition();
+ items[i].writeToParcel(data);
+ }
+}
+
+template <typename T>
+static void readFromParcel(const Parcel &data, Vector<T> *items) {
+ size_t size = (size_t)data.readUint32();
+ for (size_t i = 0; i < size; i++) {
+ T item;
+ item.readFromParcel(data);
+ items->add(item);
+ }
+}
+
+class BpResourceManagerService : public BpInterface<IResourceManagerService>
+{
+public:
+ BpResourceManagerService(const sp<IBinder> &impl)
+ : BpInterface<IResourceManagerService>(impl)
+ {
+ }
+
+ virtual void config(const Vector<MediaResourcePolicy> &policies) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
+ writeToParcel(&data, policies);
+ remote()->transact(CONFIG, data, &reply);
+ }
+
+ virtual void addResource(
+ int pid,
+ int64_t clientId,
+ const sp<IResourceManagerClient> client,
+ const Vector<MediaResource> &resources) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
+ data.writeInt32(pid);
+ data.writeInt64(clientId);
+ data.writeStrongBinder(IInterface::asBinder(client));
+ writeToParcel(&data, resources);
+
+ remote()->transact(ADD_RESOURCE, data, &reply);
+ }
+
+ virtual void removeResource(int64_t clientId) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
+ data.writeInt64(clientId);
+
+ remote()->transact(REMOVE_RESOURCE, data, &reply);
+ }
+
+ virtual bool reclaimResource(int callingPid, const Vector<MediaResource> &resources) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IResourceManagerService::getInterfaceDescriptor());
+ data.writeInt32(callingPid);
+ writeToParcel(&data, resources);
+
+ bool ret = false;
+ status_t status = remote()->transact(RECLAIM_RESOURCE, data, &reply);
+ if (status == NO_ERROR) {
+ ret = (bool)reply.readInt32();
+ }
+ return ret;
+ }
+};
+
+IMPLEMENT_META_INTERFACE(ResourceManagerService, "android.media.IResourceManagerService");
+
+// ----------------------------------------------------------------------
+
+
+status_t BnResourceManagerService::onTransact(
+ uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags)
+{
+ switch (code) {
+ case CONFIG: {
+ CHECK_INTERFACE(IResourceManagerService, data, reply);
+ int pid = data.readInt32();
+ sp<IResourceManagerClient> client(
+ interface_cast<IResourceManagerClient>(data.readStrongBinder()));
+ Vector<MediaResourcePolicy> policies;
+ readFromParcel(data, &policies);
+ config(policies);
+ return NO_ERROR;
+ } break;
+
+ case ADD_RESOURCE: {
+ CHECK_INTERFACE(IResourceManagerService, data, reply);
+ int pid = data.readInt32();
+ int64_t clientId = data.readInt64();
+ sp<IResourceManagerClient> client(
+ interface_cast<IResourceManagerClient>(data.readStrongBinder()));
+ Vector<MediaResource> resources;
+ readFromParcel(data, &resources);
+ addResource(pid, clientId, client, resources);
+ return NO_ERROR;
+ } break;
+
+ case REMOVE_RESOURCE: {
+ CHECK_INTERFACE(IResourceManagerService, data, reply);
+ int64_t clientId = data.readInt64();
+ removeResource(clientId);
+ return NO_ERROR;
+ } break;
+
+ case RECLAIM_RESOURCE: {
+ CHECK_INTERFACE(IResourceManagerService, data, reply);
+ int callingPid = data.readInt32();
+ Vector<MediaResource> resources;
+ readFromParcel(data, &resources);
+ bool ret = reclaimResource(callingPid, resources);
+ reply->writeInt32(ret);
+ return NO_ERROR;
+ } break;
+
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
diff --git a/media/libmedia/MediaResource.cpp b/media/libmedia/MediaResource.cpp
new file mode 100644
index 0000000..8be01bc
--- /dev/null
+++ b/media/libmedia/MediaResource.cpp
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaResource"
+#include <utils/Log.h>
+#include <media/MediaResource.h>
+
+namespace android {
+
+const char kResourceSecureCodec[] = "secure-codec";
+const char kResourceNonSecureCodec[] = "non-secure-codec";
+const char kResourceGraphicMemory[] = "graphic-memory";
+
+MediaResource::MediaResource() : mValue(0) {}
+
+MediaResource::MediaResource(String8 type, uint64_t value)
+ : mType(type),
+ mValue(value) {}
+
+MediaResource::MediaResource(String8 type, String8 subType, uint64_t value)
+ : mType(type),
+ mSubType(subType),
+ mValue(value) {}
+
+void MediaResource::readFromParcel(const Parcel &parcel) {
+ mType = parcel.readString8();
+ mSubType = parcel.readString8();
+ mValue = parcel.readUint64();
+}
+
+void MediaResource::writeToParcel(Parcel *parcel) const {
+ parcel->writeString8(mType);
+ parcel->writeString8(mSubType);
+ parcel->writeUint64(mValue);
+}
+
+String8 MediaResource::toString() const {
+ String8 str;
+ str.appendFormat("%s/%s:%llu", mType.string(), mSubType.string(), mValue);
+ return str;
+}
+
+bool MediaResource::operator==(const MediaResource &other) const {
+ return (other.mType == mType) && (other.mSubType == mSubType) && (other.mValue == mValue);
+}
+
+bool MediaResource::operator!=(const MediaResource &other) const {
+ return !(*this == other);
+}
+
+}; // namespace android
diff --git a/media/libmedia/MediaResourcePolicy.cpp b/media/libmedia/MediaResourcePolicy.cpp
new file mode 100644
index 0000000..2bb996a
--- /dev/null
+++ b/media/libmedia/MediaResourcePolicy.cpp
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaResourcePolicy"
+#include <utils/Log.h>
+#include <media/MediaResourcePolicy.h>
+
+namespace android {
+
+const char kPolicySupportsMultipleSecureCodecs[] = "supports-multiple-secure-codecs";
+const char kPolicySupportsSecureWithNonSecureCodec[] = "supports-secure-with-non-secure-codec";
+
+MediaResourcePolicy::MediaResourcePolicy() : mValue(0) {}
+
+MediaResourcePolicy::MediaResourcePolicy(String8 type, uint64_t value)
+ : mType(type),
+ mValue(value) {}
+
+void MediaResourcePolicy::readFromParcel(const Parcel &parcel) {
+ mType = parcel.readString8();
+ mValue = parcel.readUint64();
+}
+
+void MediaResourcePolicy::writeToParcel(Parcel *parcel) const {
+ parcel->writeString8(mType);
+ parcel->writeUint64(mValue);
+}
+
+String8 MediaResourcePolicy::toString() const {
+ String8 str;
+ str.appendFormat("%s:%llu", mType.string(), mValue);
+ return str;
+}
+
+}; // namespace android
diff --git a/media/libmedia/StringArray.cpp b/media/libmedia/StringArray.cpp
index 477e3fd..b2e5907 100644
--- a/media/libmedia/StringArray.cpp
+++ b/media/libmedia/StringArray.cpp
@@ -110,4 +110,4 @@ void StringArray::setEntry(int idx, const char* str) {
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/Visualizer.cpp b/media/libmedia/Visualizer.cpp
index f91e3e4..9d69b6a 100644
--- a/media/libmedia/Visualizer.cpp
+++ b/media/libmedia/Visualizer.cpp
@@ -429,4 +429,4 @@ bool Visualizer::CaptureThread::threadLoop()
return false;
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 8e8a1ed..873808a 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -176,4 +176,4 @@ MediaMetadataRetriever::DeathNotifier::~DeathNotifier()
}
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index d1d51cc..5dd8c02 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -877,4 +877,4 @@ status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) {
return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 973e156..a2d6e53 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -680,4 +680,4 @@ void MediaRecorder::died()
notify(MEDIA_RECORDER_EVENT_ERROR, MEDIA_ERROR_SERVER_DIED, 0);
}
-}; // namespace android
+} // namespace android
diff --git a/media/libmediaplayerservice/Drm.cpp b/media/libmediaplayerservice/Drm.cpp
index d4f6fab..49e01d1 100644
--- a/media/libmediaplayerservice/Drm.cpp
+++ b/media/libmediaplayerservice/Drm.cpp
@@ -358,7 +358,8 @@ status_t Drm::getKeyRequest(Vector<uint8_t> const &sessionId,
Vector<uint8_t> const &initData,
String8 const &mimeType, DrmPlugin::KeyType keyType,
KeyedVector<String8, String8> const &optionalParameters,
- Vector<uint8_t> &request, String8 &defaultUrl) {
+ Vector<uint8_t> &request, String8 &defaultUrl,
+ DrmPlugin::KeyRequestType *keyRequestType) {
Mutex::Autolock autoLock(mLock);
if (mInitCheck != OK) {
@@ -372,7 +373,8 @@ status_t Drm::getKeyRequest(Vector<uint8_t> const &sessionId,
DrmSessionManager::Instance()->useSession(sessionId);
return mPlugin->getKeyRequest(sessionId, initData, mimeType, keyType,
- optionalParameters, request, defaultUrl);
+ optionalParameters, request, defaultUrl,
+ keyRequestType);
}
status_t Drm::provideKeyResponse(Vector<uint8_t> const &sessionId,
diff --git a/media/libmediaplayerservice/Drm.h b/media/libmediaplayerservice/Drm.h
index 0cea639..7e8f246 100644
--- a/media/libmediaplayerservice/Drm.h
+++ b/media/libmediaplayerservice/Drm.h
@@ -53,7 +53,8 @@ struct Drm : public BnDrm,
Vector<uint8_t> const &initData,
String8 const &mimeType, DrmPlugin::KeyType keyType,
KeyedVector<String8, String8> const &optionalParameters,
- Vector<uint8_t> &request, String8 &defaultUrl);
+ Vector<uint8_t> &request, String8 &defaultUrl,
+ DrmPlugin::KeyRequestType *keyRequestType);
virtual status_t provideKeyResponse(Vector<uint8_t> const &sessionId,
Vector<uint8_t> const &response,
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index d01e83a..0476c9b 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -281,6 +281,34 @@ void NuPlayer::HTTPLiveSource::onSessionNotify(const sp<AMessage> &msg) {
break;
}
+ case LiveSession::kWhatBufferingStart:
+ {
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatPauseOnBufferingStart);
+ notify->post();
+ break;
+ }
+
+ case LiveSession::kWhatBufferingEnd:
+ {
+ sp<AMessage> notify = dupNotify();
+ notify->setInt32("what", kWhatResumeOnBufferingEnd);
+ notify->post();
+ break;
+ }
+
+
+ case LiveSession::kWhatBufferingUpdate:
+ {
+ sp<AMessage> notify = dupNotify();
+ int32_t percentage;
+ CHECK(msg->findInt32("percentage", &percentage));
+ notify->setInt32("what", kWhatBufferingUpdate);
+ notify->setInt32("percentage", percentage);
+ notify->post();
+ break;
+ }
+
case LiveSession::kWhatError:
{
break;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 5887e50..1fa9cef 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -356,14 +356,6 @@ status_t NuPlayerDriver::seekTo(int msec) {
case STATE_PREPARED:
case STATE_STOPPED_AND_PREPARED:
{
- int curpos = 0;
- if (mPositionUs > 0) {
- curpos = (mPositionUs + 500ll) / 1000;
- }
- if (curpos == msec) {
- // nothing to do, and doing something anyway could result in deadlock (b/15323063)
- break;
- }
mStartupSeekTimeUs = seekTimeUs;
// pretend that the seek completed. It will actually happen when starting playback.
// TODO: actually perform the seek here, so the player is ready to go at the new
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 4bccfa8..a2ec51c 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -566,12 +566,22 @@ size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
}
bool NuPlayer::Renderer::onDrainAudioQueue() {
-#if 0
+ // TODO: This call to getPosition checks if AudioTrack has been created
+ // in AudioSink before draining audio. If AudioTrack doesn't exist, then
+ // CHECKs on getPosition will fail.
+ // We still need to figure out why AudioTrack is not created when
+ // this function is called. One possible reason could be leftover
+ // audio. Another possible place is to check whether decoder
+ // has received INFO_FORMAT_CHANGED as the first buffer since
+ // AudioSink is opened there, and possible interactions with flush
+ // immediately after start. Investigate error message
+ // "vorbis_dsp_synthesis returned -135", along with RTSP.
uint32_t numFramesPlayed;
if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
return false;
}
+#if 0
ssize_t numFramesAvailableToWrite =
mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 31e10ce..97f3e20 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1591,7 +1591,11 @@ status_t ACodec::configureCodec(
if (!msg->findInt32("channel-count", &numChannels)) {
err = INVALID_OPERATION;
} else {
- err = setupG711Codec(encoder, numChannels);
+ int32_t sampleRate;
+ if (!msg->findInt32("sample-rate", &sampleRate)) {
+ sampleRate = 8000;
+ }
+ err = setupG711Codec(encoder, sampleRate, numChannels);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
int32_t numChannels, sampleRate, compressionLevel = -1;
@@ -2066,11 +2070,11 @@ status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {
1 /* numChannels */);
}
-status_t ACodec::setupG711Codec(bool encoder, int32_t numChannels) {
+status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) {
CHECK(!encoder); // XXX TODO
return setupRawAudioFormat(
- kPortIndexInput, 8000 /* sampleRate */, numChannels);
+ kPortIndexInput, sampleRate, numChannels);
}
status_t ACodec::setupFlacCodec(
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 177293d..a2cbdaf 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -37,6 +37,7 @@ LOCAL_SRC_FILES:= \
MediaCodecSource.cpp \
MediaDefs.cpp \
MediaExtractor.cpp \
+ MediaSync.cpp \
MidiExtractor.cpp \
http/MediaHTTP.cpp \
MediaMuxer.cpp \
diff --git a/media/libstagefright/MediaClock.cpp b/media/libstagefright/MediaClock.cpp
index 38db5e4..433f555 100644
--- a/media/libstagefright/MediaClock.cpp
+++ b/media/libstagefright/MediaClock.cpp
@@ -93,13 +93,17 @@ void MediaClock::setPlaybackRate(float rate) {
}
status_t MediaClock::getMediaTime(
- int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) {
+ int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const {
+ if (outMediaUs == NULL) {
+ return BAD_VALUE;
+ }
+
Mutex::Autolock autoLock(mLock);
return getMediaTime_l(realUs, outMediaUs, allowPastMaxTime);
}
status_t MediaClock::getMediaTime_l(
- int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) {
+ int64_t realUs, int64_t *outMediaUs, bool allowPastMaxTime) const {
if (mAnchorTimeRealUs == -1) {
return NO_INIT;
}
@@ -119,7 +123,12 @@ status_t MediaClock::getMediaTime_l(
return OK;
}
-status_t MediaClock::getRealTimeFor(int64_t targetMediaUs, int64_t *outRealUs) {
+status_t MediaClock::getRealTimeFor(
+ int64_t targetMediaUs, int64_t *outRealUs) const {
+ if (outRealUs == NULL) {
+ return BAD_VALUE;
+ }
+
Mutex::Autolock autoLock(mLock);
if (mPlaybackRate == 0.0) {
return NO_INIT;
diff --git a/media/libstagefright/MediaSync.cpp b/media/libstagefright/MediaSync.cpp
new file mode 100644
index 0000000..7b6c7d9
--- /dev/null
+++ b/media/libstagefright/MediaSync.cpp
@@ -0,0 +1,541 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSync"
+#include <inttypes.h>
+
+#include <gui/BufferQueue.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+
+#include <media/AudioTrack.h>
+#include <media/stagefright/MediaClock.h>
+#include <media/stagefright/MediaSync.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <ui/GraphicBuffer.h>
+
+// Maximum late time allowed for a video frame to be rendered. When a video
+// frame arrives later than this number, it will be discarded without rendering.
+static const int64_t kMaxAllowedVideoLateTimeUs = 40000ll;
+
+namespace android {
+
+// static
+sp<MediaSync> MediaSync::create() {
+ sp<MediaSync> sync = new MediaSync();
+ sync->mLooper->registerHandler(sync);
+ return sync;
+}
+
+MediaSync::MediaSync()
+ : mIsAbandoned(false),
+ mMutex(),
+ mReleaseCondition(),
+ mNumOutstandingBuffers(0),
+ mNativeSampleRateInHz(0),
+ mNumFramesWritten(0),
+ mHasAudio(false),
+ mNextBufferItemMediaUs(-1),
+ mPlaybackRate(0.0) {
+ mMediaClock = new MediaClock;
+
+ mLooper = new ALooper;
+ mLooper->setName("MediaSync");
+ mLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+}
+
+MediaSync::~MediaSync() {
+ if (mInput != NULL) {
+ mInput->consumerDisconnect();
+ }
+ if (mOutput != NULL) {
+ mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
+ }
+
+ if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+ }
+}
+
+status_t MediaSync::configureSurface(const sp<IGraphicBufferProducer> &output) {
+ Mutex::Autolock lock(mMutex);
+
+ // TODO: support suface change.
+ if (mOutput != NULL) {
+ ALOGE("configureSurface: output surface has already been configured.");
+ return INVALID_OPERATION;
+ }
+
+ if (output != NULL) {
+ IGraphicBufferProducer::QueueBufferOutput queueBufferOutput;
+ sp<OutputListener> listener(new OutputListener(this));
+ IInterface::asBinder(output)->linkToDeath(listener);
+ status_t status =
+ output->connect(listener,
+ NATIVE_WINDOW_API_MEDIA,
+ true /* producerControlledByApp */,
+ &queueBufferOutput);
+ if (status != NO_ERROR) {
+ ALOGE("configureSurface: failed to connect (%d)", status);
+ return status;
+ }
+
+ mOutput = output;
+ }
+
+ return NO_ERROR;
+}
+
+// |audioTrack| is used only for querying information.
+status_t MediaSync::configureAudioTrack(
+ const sp<AudioTrack> &audioTrack, uint32_t nativeSampleRateInHz) {
+ Mutex::Autolock lock(mMutex);
+
+ // TODO: support audio track change.
+ if (mAudioTrack != NULL) {
+ ALOGE("configureAudioTrack: audioTrack has already been configured.");
+ return INVALID_OPERATION;
+ }
+
+ mAudioTrack = audioTrack;
+ mNativeSampleRateInHz = nativeSampleRateInHz;
+
+ return NO_ERROR;
+}
+
+status_t MediaSync::createInputSurface(
+ sp<IGraphicBufferProducer> *outBufferProducer) {
+ if (outBufferProducer == NULL) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+
+ if (mOutput == NULL) {
+ return NO_INIT;
+ }
+
+ if (mInput != NULL) {
+ return INVALID_OPERATION;
+ }
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ sp<IGraphicBufferConsumer> bufferConsumer;
+ BufferQueue::createBufferQueue(&bufferProducer, &bufferConsumer);
+
+ sp<InputListener> listener(new InputListener(this));
+ IInterface::asBinder(bufferConsumer)->linkToDeath(listener);
+ status_t status =
+ bufferConsumer->consumerConnect(listener, false /* controlledByApp */);
+ if (status == NO_ERROR) {
+ bufferConsumer->setConsumerName(String8("MediaSync"));
+ *outBufferProducer = bufferProducer;
+ mInput = bufferConsumer;
+ }
+ return status;
+}
+
+status_t MediaSync::setPlaybackRate(float rate) {
+ if (rate < 0.0) {
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+
+ if (rate > mPlaybackRate) {
+ mNextBufferItemMediaUs = -1;
+ }
+ mPlaybackRate = rate;
+ mMediaClock->setPlaybackRate(rate);
+ onDrainVideo_l();
+
+ return OK;
+}
+
+sp<const MediaClock> MediaSync::getMediaClock() {
+ return mMediaClock;
+}
+
+status_t MediaSync::updateQueuedAudioData(
+ size_t sizeInBytes, int64_t presentationTimeUs) {
+ if (sizeInBytes == 0) {
+ return OK;
+ }
+
+ Mutex::Autolock lock(mMutex);
+
+ if (mAudioTrack == NULL) {
+ ALOGW("updateQueuedAudioData: audioTrack has NOT been configured.");
+ return INVALID_OPERATION;
+ }
+
+ int64_t numFrames = sizeInBytes / mAudioTrack->frameSize();
+ int64_t maxMediaTimeUs = presentationTimeUs
+ + getDurationIfPlayedAtNativeSampleRate_l(numFrames);
+ mNumFramesWritten += numFrames;
+
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t nowMediaUs = maxMediaTimeUs
+ - getDurationIfPlayedAtNativeSampleRate_l(mNumFramesWritten)
+ + getPlayedOutAudioDurationMedia_l(nowUs);
+
+ int64_t oldRealTime = -1;
+ if (mNextBufferItemMediaUs != -1) {
+ oldRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
+ }
+
+ mMediaClock->updateAnchor(nowMediaUs, nowUs, maxMediaTimeUs);
+ mHasAudio = true;
+
+ if (oldRealTime != -1) {
+ int64_t newRealTime = getRealTime(mNextBufferItemMediaUs, nowUs);
+ if (newRealTime < oldRealTime) {
+ mNextBufferItemMediaUs = -1;
+ onDrainVideo_l();
+ }
+ }
+
+ return OK;
+}
+
+void MediaSync::setName(const AString &name) {
+ Mutex::Autolock lock(mMutex);
+ mInput->setConsumerName(String8(name.c_str()));
+}
+
+int64_t MediaSync::getRealTime(int64_t mediaTimeUs, int64_t nowUs) {
+ int64_t realUs;
+ if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
+ // If failed to get current position, e.g. due to audio clock is
+ // not ready, then just play out video immediately without delay.
+ return nowUs;
+ }
+ return realUs;
+}
+
+int64_t MediaSync::getDurationIfPlayedAtNativeSampleRate_l(int64_t numFrames) {
+ return (numFrames * 1000000LL / mNativeSampleRateInHz);
+}
+
+int64_t MediaSync::getPlayedOutAudioDurationMedia_l(int64_t nowUs) {
+ CHECK(mAudioTrack != NULL);
+
+ uint32_t numFramesPlayed;
+ int64_t numFramesPlayedAt;
+ AudioTimestamp ts;
+ static const int64_t kStaleTimestamp100ms = 100000;
+
+ status_t res = mAudioTrack->getTimestamp(ts);
+ if (res == OK) {
+ // case 1: mixing audio tracks.
+ numFramesPlayed = ts.mPosition;
+ numFramesPlayedAt =
+ ts.mTime.tv_sec * 1000000LL + ts.mTime.tv_nsec / 1000;
+ const int64_t timestampAge = nowUs - numFramesPlayedAt;
+ if (timestampAge > kStaleTimestamp100ms) {
+ // This is an audio FIXME.
+ // getTimestamp returns a timestamp which may come from audio
+ // mixing threads. After pausing, the MixerThread may go idle,
+ // thus the mTime estimate may become stale. Assuming that the
+ // MixerThread runs 20ms, with FastMixer at 5ms, the max latency
+ // should be about 25ms with an average around 12ms (to be
+ // verified). For safety we use 100ms.
+ ALOGV("getTimestamp: returned stale timestamp nowUs(%lld) "
+ "numFramesPlayedAt(%lld)",
+ (long long)nowUs, (long long)numFramesPlayedAt);
+ numFramesPlayedAt = nowUs - kStaleTimestamp100ms;
+ }
+ //ALOGD("getTimestamp: OK %d %lld",
+ // numFramesPlayed, (long long)numFramesPlayedAt);
+ } else if (res == WOULD_BLOCK) {
+ // case 2: transitory state on start of a new track
+ numFramesPlayed = 0;
+ numFramesPlayedAt = nowUs;
+ //ALOGD("getTimestamp: WOULD_BLOCK %d %lld",
+ // numFramesPlayed, (long long)numFramesPlayedAt);
+ } else {
+ // case 3: transitory at new track or audio fast tracks.
+ res = mAudioTrack->getPosition(&numFramesPlayed);
+ CHECK_EQ(res, (status_t)OK);
+ numFramesPlayedAt = nowUs;
+ numFramesPlayedAt += 1000LL * mAudioTrack->latency() / 2; /* XXX */
+ //ALOGD("getPosition: %d %lld", numFramesPlayed, numFramesPlayedAt);
+ }
+
+ //can't be negative until 12.4 hrs, test.
+ //CHECK_EQ(numFramesPlayed & (1 << 31), 0);
+ int64_t durationUs =
+ getDurationIfPlayedAtNativeSampleRate_l(numFramesPlayed)
+ + nowUs - numFramesPlayedAt;
+ if (durationUs < 0) {
+ // Occurs when numFramesPlayed position is very small and the following:
+ // (1) In case 1, the time nowUs is computed before getTimestamp() is
+ // called and numFramesPlayedAt is greater than nowUs by time more
+ // than numFramesPlayed.
+ // (2) In case 3, using getPosition and adding mAudioTrack->latency()
+ // to numFramesPlayedAt, by a time amount greater than
+ // numFramesPlayed.
+ //
+ // Both of these are transitory conditions.
+ ALOGV("getPlayedOutAudioDurationMedia_l: negative duration %lld "
+ "set to zero", (long long)durationUs);
+ durationUs = 0;
+ }
+ ALOGV("getPlayedOutAudioDurationMedia_l(%lld) nowUs(%lld) frames(%u) "
+ "framesAt(%lld)",
+ (long long)durationUs, (long long)nowUs, numFramesPlayed,
+ (long long)numFramesPlayedAt);
+ return durationUs;
+}
+
+void MediaSync::onDrainVideo_l() {
+ if (!isPlaying()) {
+ return;
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+
+ while (!mBufferItems.empty()) {
+ BufferItem *bufferItem = &*mBufferItems.begin();
+ int64_t itemMediaUs = bufferItem->mTimestamp / 1000;
+ int64_t itemRealUs = getRealTime(itemMediaUs, nowUs);
+ if (itemRealUs <= nowUs) {
+ if (mHasAudio) {
+ if (nowUs - itemRealUs <= kMaxAllowedVideoLateTimeUs) {
+ renderOneBufferItem_l(*bufferItem);
+ } else {
+ // too late.
+ returnBufferToInput_l(
+ bufferItem->mGraphicBuffer, bufferItem->mFence);
+ }
+ } else {
+ // always render video buffer in video-only mode.
+ renderOneBufferItem_l(*bufferItem);
+
+ // smooth out videos >= 10fps
+ mMediaClock->updateAnchor(
+ itemMediaUs, nowUs, itemMediaUs + 100000);
+ }
+
+ mBufferItems.erase(mBufferItems.begin());
+
+ if (mBufferItems.empty()) {
+ mNextBufferItemMediaUs = -1;
+ }
+ } else {
+ if (mNextBufferItemMediaUs == -1
+ || mNextBufferItemMediaUs != itemMediaUs) {
+ sp<AMessage> msg = new AMessage(kWhatDrainVideo, this);
+ msg->post(itemRealUs - nowUs);
+ }
+ break;
+ }
+ }
+}
+
+void MediaSync::onFrameAvailableFromInput() {
+ Mutex::Autolock lock(mMutex);
+
+ // If there are too many outstanding buffers, wait until a buffer is
+ // released back to the input in onBufferReleased.
+ while (mNumOutstandingBuffers >= MAX_OUTSTANDING_BUFFERS) {
+ mReleaseCondition.wait(mMutex);
+
+ // If the sync is abandoned while we are waiting, the release
+ // condition variable will be broadcast, and we should just return
+ // without attempting to do anything more (since the input queue will
+ // also be abandoned).
+ if (mIsAbandoned) {
+ return;
+ }
+ }
+ ++mNumOutstandingBuffers;
+
+ // Acquire and detach the buffer from the input.
+ BufferItem bufferItem;
+ status_t status = mInput->acquireBuffer(&bufferItem, 0 /* presentWhen */);
+ if (status != NO_ERROR) {
+ ALOGE("acquiring buffer from input failed (%d)", status);
+ return;
+ }
+
+ ALOGV("acquired buffer %#llx from input", (long long)bufferItem.mGraphicBuffer->getId());
+
+ status = mInput->detachBuffer(bufferItem.mBuf);
+ if (status != NO_ERROR) {
+ ALOGE("detaching buffer from input failed (%d)", status);
+ if (status == NO_INIT) {
+ // If the input has been abandoned, move on.
+ onAbandoned_l(true /* isInput */);
+ }
+ return;
+ }
+
+ mBufferItems.push_back(bufferItem);
+ onDrainVideo_l();
+}
+
+void MediaSync::renderOneBufferItem_l( const BufferItem &bufferItem) {
+ IGraphicBufferProducer::QueueBufferInput queueInput(
+ bufferItem.mTimestamp,
+ bufferItem.mIsAutoTimestamp,
+ bufferItem.mDataSpace,
+ bufferItem.mCrop,
+ static_cast<int32_t>(bufferItem.mScalingMode),
+ bufferItem.mTransform,
+ bufferItem.mIsDroppable,
+ bufferItem.mFence);
+
+ // Attach and queue the buffer to the output.
+ int slot;
+ status_t status = mOutput->attachBuffer(&slot, bufferItem.mGraphicBuffer);
+ ALOGE_IF(status != NO_ERROR, "attaching buffer to output failed (%d)", status);
+ if (status == NO_ERROR) {
+ IGraphicBufferProducer::QueueBufferOutput queueOutput;
+ status = mOutput->queueBuffer(slot, queueInput, &queueOutput);
+ ALOGE_IF(status != NO_ERROR, "queueing buffer to output failed (%d)", status);
+ }
+
+ if (status != NO_ERROR) {
+ returnBufferToInput_l(bufferItem.mGraphicBuffer, bufferItem.mFence);
+ if (status == NO_INIT) {
+ // If the output has been abandoned, move on.
+ onAbandoned_l(false /* isInput */);
+ }
+ return;
+ }
+
+ ALOGV("queued buffer %#llx to output", (long long)bufferItem.mGraphicBuffer->getId());
+}
+
+void MediaSync::onBufferReleasedByOutput() {
+ Mutex::Autolock lock(mMutex);
+
+ sp<GraphicBuffer> buffer;
+ sp<Fence> fence;
+ status_t status = mOutput->detachNextBuffer(&buffer, &fence);
+ ALOGE_IF(status != NO_ERROR, "detaching buffer from output failed (%d)", status);
+
+ if (status == NO_INIT) {
+ // If the output has been abandoned, we can't do anything else,
+ // since buffer is invalid.
+ onAbandoned_l(false /* isInput */);
+ return;
+ }
+
+ ALOGV("detached buffer %#llx from output", (long long)buffer->getId());
+
+ // If we've been abandoned, we can't return the buffer to the input, so just
+ // move on.
+ if (mIsAbandoned) {
+ return;
+ }
+
+ returnBufferToInput_l(buffer, fence);
+}
+
+void MediaSync::returnBufferToInput_l(
+ const sp<GraphicBuffer> &buffer, const sp<Fence> &fence) {
+ // Attach and release the buffer back to the input.
+ int consumerSlot;
+ status_t status = mInput->attachBuffer(&consumerSlot, buffer);
+ ALOGE_IF(status != NO_ERROR, "attaching buffer to input failed (%d)", status);
+ if (status == NO_ERROR) {
+ status = mInput->releaseBuffer(consumerSlot, 0 /* frameNumber */,
+ EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
+ ALOGE_IF(status != NO_ERROR, "releasing buffer to input failed (%d)", status);
+ }
+
+ if (status != NO_ERROR) {
+ // TODO: do we need to try to return this buffer later?
+ return;
+ }
+
+ ALOGV("released buffer %#llx to input", (long long)buffer->getId());
+
+ // Notify any waiting onFrameAvailable calls.
+ --mNumOutstandingBuffers;
+ mReleaseCondition.signal();
+}
+
+void MediaSync::onAbandoned_l(bool isInput) {
+ ALOGE("the %s has abandoned me", (isInput ? "input" : "output"));
+ if (!mIsAbandoned) {
+ if (isInput) {
+ mOutput->disconnect(NATIVE_WINDOW_API_MEDIA);
+ } else {
+ mInput->consumerDisconnect();
+ }
+ mIsAbandoned = true;
+ }
+ mReleaseCondition.broadcast();
+}
+
+void MediaSync::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatDrainVideo:
+ {
+ Mutex::Autolock lock(mMutex);
+ onDrainVideo_l();
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+MediaSync::InputListener::InputListener(const sp<MediaSync> &sync)
+ : mSync(sync) {}
+
+MediaSync::InputListener::~InputListener() {}
+
+void MediaSync::InputListener::onFrameAvailable(const BufferItem &/* item */) {
+ mSync->onFrameAvailableFromInput();
+}
+
+// We don't care about sideband streams, since we won't relay them.
+void MediaSync::InputListener::onSidebandStreamChanged() {
+ ALOGE("onSidebandStreamChanged: got sideband stream unexpectedly.");
+}
+
+
+void MediaSync::InputListener::binderDied(const wp<IBinder> &/* who */) {
+ Mutex::Autolock lock(mSync->mMutex);
+ mSync->onAbandoned_l(true /* isInput */);
+}
+
+MediaSync::OutputListener::OutputListener(const sp<MediaSync> &sync)
+ : mSync(sync) {}
+
+MediaSync::OutputListener::~OutputListener() {}
+
+void MediaSync::OutputListener::onBufferReleased() {
+ mSync->onBufferReleasedByOutput();
+}
+
+void MediaSync::OutputListener::binderDied(const wp<IBinder> &/* who */) {
+ Mutex::Autolock lock(mSync->mMutex);
+ mSync->onAbandoned_l(false /* isInput */);
+}
+
+} // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index ea19ab2..4d30069 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -629,10 +629,14 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta) {
// These are PCM-like formats with a fixed sample rate but
// a variable number of channels.
+ int32_t sampleRate;
int32_t numChannels;
CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
+ if (!meta->findInt32(kKeySampleRate, &sampleRate)) {
+ sampleRate = 8000;
+ }
- setG711Format(numChannels);
+ setG711Format(sampleRate, numChannels);
} else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mMIME)) {
CHECK(!mIsEncoder);
@@ -3616,9 +3620,9 @@ status_t OMXCodec::setAC3Format(int32_t numChannels, int32_t sampleRate) {
sizeof(def));
}
-void OMXCodec::setG711Format(int32_t numChannels) {
+void OMXCodec::setG711Format(int32_t sampleRate, int32_t numChannels) {
CHECK(!mIsEncoder);
- setRawAudioFormat(kPortIndexInput, 8000, numChannels);
+ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
}
void OMXCodec::setImageOutputFormat(
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.cpp b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
index 3a69095..015515e 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.cpp
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.cpp
@@ -41,8 +41,9 @@ SoftG711::SoftG711(
OMX_COMPONENTTYPE **component)
: SimpleSoftOMXComponent(name, callbacks, appData, component),
mIsMLaw(true),
+ mSignalledError(false),
mNumChannels(1),
- mSignalledError(false) {
+ mSamplingRate(8000) {
if (!strcmp(name, "OMX.google.g711.alaw.decoder")) {
mIsMLaw = false;
} else {
@@ -129,7 +130,7 @@ OMX_ERRORTYPE SoftG711::internalGetParameter(
pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
pcmParams->nChannels = mNumChannels;
- pcmParams->nSamplingRate = 8000;
+ pcmParams->nSamplingRate = mSamplingRate;
return OMX_ErrorNone;
}
@@ -159,6 +160,8 @@ OMX_ERRORTYPE SoftG711::internalSetParameter(
mNumChannels = pcmParams->nChannels;
}
+ mSamplingRate = pcmParams->nSamplingRate;
+
return OMX_ErrorNone;
}
diff --git a/media/libstagefright/codecs/g711/dec/SoftG711.h b/media/libstagefright/codecs/g711/dec/SoftG711.h
index bff0c68..16b6340 100644
--- a/media/libstagefright/codecs/g711/dec/SoftG711.h
+++ b/media/libstagefright/codecs/g711/dec/SoftG711.h
@@ -46,8 +46,9 @@ private:
};
bool mIsMLaw;
- OMX_U32 mNumChannels;
bool mSignalledError;
+ OMX_U32 mNumChannels;
+ int32_t mSamplingRate;
void initPorts();
diff --git a/media/libstagefright/data/media_codecs_google_audio.xml b/media/libstagefright/data/media_codecs_google_audio.xml
index a06684b..b957b0c 100644
--- a/media/libstagefright/data/media_codecs_google_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_audio.xml
@@ -38,12 +38,12 @@
</MediaCodec>
<MediaCodec name="OMX.google.g711.alaw.decoder" type="audio/g711-alaw">
<Limit name="channel-count" max="1" />
- <Limit name="sample-rate" ranges="8000" />
+ <Limit name="sample-rate" ranges="8000-48000" />
<Limit name="bitrate" range="64000" />
</MediaCodec>
<MediaCodec name="OMX.google.g711.mlaw.decoder" type="audio/g711-mlaw">
<Limit name="channel-count" max="1" />
- <Limit name="sample-rate" ranges="8000" />
+ <Limit name="sample-rate" ranges="8000-48000" />
<Limit name="bitrate" range="64000" />
</MediaCodec>
<MediaCodec name="OMX.google.vorbis.decoder" type="audio/vorbis">
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index f5328a6..4ac2fb2 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -33,6 +33,7 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaErrors.h>
@@ -49,12 +50,6 @@
namespace android {
-// static
-// High water mark to start up switch or report prepared)
-const int64_t LiveSession::kHighWaterMark = 8000000ll;
-const int64_t LiveSession::kMidWaterMark = 5000000ll;
-const int64_t LiveSession::kLowWaterMark = 3000000ll;
-
struct LiveSession::BandwidthEstimator : public RefBase {
BandwidthEstimator();
@@ -119,15 +114,35 @@ bool LiveSession::BandwidthEstimator::estimateBandwidth(int32_t *bandwidthBps) {
return true;
}
+//static
+const char *LiveSession::getKeyForStream(StreamType type) {
+ switch (type) {
+ case STREAMTYPE_VIDEO:
+ return "timeUsVideo";
+ case STREAMTYPE_AUDIO:
+ return "timeUsAudio";
+ case STREAMTYPE_SUBTITLES:
+ return "timeUsSubtitle";
+ default:
+ TRESPASS();
+ }
+ return NULL;
+}
+
LiveSession::LiveSession(
const sp<AMessage> &notify, uint32_t flags,
const sp<IMediaHTTPService> &httpService)
: mNotify(notify),
mFlags(flags),
mHTTPService(httpService),
+ mBuffering(false),
mInPreparationPhase(true),
+ mPollBufferingGeneration(0),
+ mPrevBufferPercentage(-1),
mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())),
mCurBandwidthIndex(-1),
+ mOrigBandwidthIndex(-1),
+ mLastBandwidthBps(-1ll),
mBandwidthEstimator(new BandwidthEstimator()),
mStreamMask(0),
mNewStreamMask(0),
@@ -138,11 +153,12 @@ LiveSession::LiveSession(
mRealTimeBaseUs(0ll),
mReconfigurationInProgress(false),
mSwitchInProgress(false),
+ mUpSwitchMark(kUpSwitchMark),
+ mDownSwitchMark(kDownSwitchMark),
+ mUpSwitchMargin(kUpSwitchMargin),
mFirstTimeUsValid(false),
mFirstTimeUs(0),
- mLastSeekTimeUs(0),
- mPollBufferingGeneration(0) {
-
+ mLastSeekTimeUs(0) {
mStreams[kAudioIndex] = StreamItem("audio");
mStreams[kVideoIndex] = StreamItem("video");
mStreams[kSubtitleIndex] = StreamItem("subtitles");
@@ -159,37 +175,19 @@ LiveSession::~LiveSession() {
}
}
-sp<ABuffer> LiveSession::createFormatChangeBuffer(bool swap) {
- ABuffer *discontinuity = new ABuffer(0);
- discontinuity->meta()->setInt32("discontinuity", ATSParser::DISCONTINUITY_FORMATCHANGE);
- discontinuity->meta()->setInt32("swapPacketSource", swap);
- discontinuity->meta()->setInt32("switchGeneration", mSwitchGeneration);
- discontinuity->meta()->setInt64("timeUs", -1);
- return discontinuity;
-}
-
-void LiveSession::swapPacketSource(StreamType stream) {
- sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream);
- sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream);
- sp<AnotherPacketSource> tmp = aps;
- aps = aps2;
- aps2 = tmp;
- aps2->clear();
-}
-
status_t LiveSession::dequeueAccessUnit(
StreamType stream, sp<ABuffer> *accessUnit) {
- if (!(mStreamMask & stream)) {
- // return -EWOULDBLOCK to avoid halting the decoder
- // when switching between audio/video and audio only.
- return -EWOULDBLOCK;
- }
-
status_t finalResult = OK;
sp<AnotherPacketSource> packetSource = mPacketSources.valueFor(stream);
ssize_t idx = typeToIndex(stream);
- if (!packetSource->hasBufferAvailable(&finalResult)) {
+ // Do not let client pull data if we don't have data packets yet.
+ // We might only have a format discontinuity queued without data.
+ // When NuPlayerDecoder dequeues the format discontinuity, it will
+ // immediately try to getFormat. If we return NULL, NuPlayerDecoder
+ // thinks it can do seamless change, so will not shutdown decoder.
+ // When the actual format arrives, it can't handle it and get stuck.
+ if (!packetSource->hasDataBufferAvailable(&finalResult)) {
if (finalResult == OK) {
return -EAGAIN;
} else {
@@ -197,49 +195,8 @@ status_t LiveSession::dequeueAccessUnit(
}
}
- // Do not let client pull data if we don't have format yet.
- // We might only have a format discontinuity queued without actual data.
- // When NuPlayerDecoder dequeues the format discontinuity, it will
- // immediately try to getFormat. If we return NULL, NuPlayerDecoder
- // thinks it can do seamless change, so will not shutdown decoder.
- // When the actual format arrives, it can't handle it and get stuck.
- // TODO: We need a method to check if the packet source has any
- // data packets available, dequeuing should only start then.
- sp<MetaData> format = packetSource->getFormat();
- if (format == NULL) {
- return -EAGAIN;
- }
- int32_t targetDuration = 0;
- sp<AMessage> meta = packetSource->getLatestEnqueuedMeta();
- if (meta != NULL) {
- meta->findInt32("targetDuration", &targetDuration);
- }
-
- int64_t targetDurationUs = targetDuration * 1000000ll;
- if (targetDurationUs == 0 ||
- targetDurationUs > PlaylistFetcher::kMinBufferedDurationUs) {
- // Fetchers limit buffering to
- // min(3 * targetDuration, kMinBufferedDurationUs)
- targetDurationUs = PlaylistFetcher::kMinBufferedDurationUs;
- }
-
- // wait for counterpart
- sp<AnotherPacketSource> otherSource;
- uint32_t mask = mNewStreamMask & mStreamMask;
- uint32_t fetchersMask = 0;
- for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
- uint32_t fetcherMask = mFetcherInfos.valueAt(i).mFetcher->getStreamTypeMask();
- fetchersMask |= fetcherMask;
- }
- mask &= fetchersMask;
- if (stream == STREAMTYPE_AUDIO && (mask & STREAMTYPE_VIDEO)) {
- otherSource = mPacketSources.valueFor(STREAMTYPE_VIDEO);
- } else if (stream == STREAMTYPE_VIDEO && (mask & STREAMTYPE_AUDIO)) {
- otherSource = mPacketSources.valueFor(STREAMTYPE_AUDIO);
- }
- if (otherSource != NULL && !otherSource->hasBufferAvailable(&finalResult)) {
- return finalResult == OK ? -EAGAIN : finalResult;
- }
+ // Let the client dequeue as long as we have buffers available
+ // Do not make pause/resume decisions here.
status_t err = packetSource->dequeueAccessUnit(accessUnit);
@@ -277,42 +234,6 @@ status_t LiveSession::dequeueAccessUnit(
streamStr,
type,
extra == NULL ? "NULL" : extra->debugString().c_str());
-
- int32_t swap;
- if ((*accessUnit)->meta()->findInt32("swapPacketSource", &swap) && swap) {
- int32_t switchGeneration;
- CHECK((*accessUnit)->meta()->findInt32("switchGeneration", &switchGeneration));
- {
- Mutex::Autolock lock(mSwapMutex);
- if (switchGeneration == mSwitchGeneration) {
- swapPacketSource(stream);
- sp<AMessage> msg = new AMessage(kWhatSwapped, this);
- msg->setInt32("stream", stream);
- msg->setInt32("switchGeneration", switchGeneration);
- msg->post();
- }
- }
- } else {
- size_t seq = strm.mCurDiscontinuitySeq;
- int64_t offsetTimeUs;
- if (mDiscontinuityOffsetTimesUs.indexOfKey(seq) >= 0) {
- offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(seq);
- } else {
- offsetTimeUs = 0;
- }
-
- seq += 1;
- if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
- int64_t firstTimeUs;
- firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq);
- offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs;
- offsetTimeUs += strm.mLastSampleDurationUs;
- } else {
- offsetTimeUs += strm.mLastSampleDurationUs;
- }
-
- mDiscontinuityOffsetTimesUs.add(seq, offsetTimeUs);
- }
} else if (err == OK) {
if (stream == STREAMTYPE_AUDIO || stream == STREAMTYPE_VIDEO) {
@@ -320,7 +241,26 @@ status_t LiveSession::dequeueAccessUnit(
int32_t discontinuitySeq = 0;
CHECK((*accessUnit)->meta()->findInt64("timeUs", &timeUs));
(*accessUnit)->meta()->findInt32("discontinuitySeq", &discontinuitySeq);
- strm.mCurDiscontinuitySeq = discontinuitySeq;
+ if (discontinuitySeq > (int32_t) strm.mCurDiscontinuitySeq) {
+ int64_t offsetTimeUs;
+ if (mDiscontinuityOffsetTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
+ offsetTimeUs = mDiscontinuityOffsetTimesUs.valueFor(strm.mCurDiscontinuitySeq);
+ } else {
+ offsetTimeUs = 0;
+ }
+
+ if (mDiscontinuityAbsStartTimesUs.indexOfKey(strm.mCurDiscontinuitySeq) >= 0) {
+ int64_t firstTimeUs;
+ firstTimeUs = mDiscontinuityAbsStartTimesUs.valueFor(strm.mCurDiscontinuitySeq);
+ offsetTimeUs += strm.mLastDequeuedTimeUs - firstTimeUs;
+ offsetTimeUs += strm.mLastSampleDurationUs;
+ } else {
+ offsetTimeUs += strm.mLastSampleDurationUs;
+ }
+
+ mDiscontinuityOffsetTimesUs.add(discontinuitySeq, offsetTimeUs);
+ strm.mCurDiscontinuitySeq = discontinuitySeq;
+ }
int32_t discard = 0;
int64_t firstTimeUs;
@@ -373,7 +313,6 @@ status_t LiveSession::dequeueAccessUnit(
}
status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) {
- // No swapPacketSource race condition; called from the same thread as dequeueAccessUnit.
if (!(mStreamMask & stream)) {
return UNKNOWN_ERROR;
}
@@ -386,9 +325,18 @@ status_t LiveSession::getStreamFormat(StreamType stream, sp<AMessage> *format) {
return -EAGAIN;
}
+ if (stream == STREAMTYPE_AUDIO) {
+ // set AAC input buffer size to 32K bytes (256kbps x 1sec)
+ meta->setInt32(kKeyMaxInputSize, 32 * 1024);
+ }
+
return convertMetaDataToMessage(meta, format);
}
+sp<HTTPBase> LiveSession::getHTTPDataSource() {
+ return new MediaHTTP(mHTTPService->makeHTTPConnection());
+}
+
void LiveSession::connectAsync(
const char *url, const KeyedVector<String8, String8> *headers) {
sp<AMessage> msg = new AMessage(kWhatConnect, this);
@@ -422,6 +370,102 @@ status_t LiveSession::seekTo(int64_t timeUs) {
return err;
}
+bool LiveSession::checkSwitchProgress(
+ sp<AMessage> &stopParams, int64_t delayUs, bool *needResumeUntil) {
+ AString newUri;
+ CHECK(stopParams->findString("uri", &newUri));
+
+ *needResumeUntil = false;
+ sp<AMessage> firstNewMeta[kMaxStreams];
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ StreamType stream = indexToType(i);
+ if (!(mSwapMask & mNewStreamMask & stream)
+ || (mStreams[i].mNewUri != newUri)) {
+ continue;
+ }
+ if (stream == STREAMTYPE_SUBTITLES) {
+ continue;
+ }
+ sp<AnotherPacketSource> &source = mPacketSources.editValueAt(i);
+
+ // First, get latest dequeued meta, which is where the decoder is at.
+ // (when upswitching, we take the meta after a certain delay, so that
+ // the decoder is left with some cushion)
+ sp<AMessage> lastDequeueMeta, lastEnqueueMeta;
+ if (delayUs > 0) {
+ lastDequeueMeta = source->getMetaAfterLastDequeued(delayUs);
+ } else {
+ lastDequeueMeta = source->getLatestDequeuedMeta();
+ }
+ // Then, trim off packets at beginning of mPacketSources2 that's before
+ // the latest dequeued time. These samples are definitely too late.
+ int64_t lastTimeUs, startTimeUs;
+ int32_t lastSeq, startSeq;
+ if (lastDequeueMeta != NULL) {
+ CHECK(lastDequeueMeta->findInt64("timeUs", &lastTimeUs));
+ CHECK(lastDequeueMeta->findInt32("discontinuitySeq", &lastSeq));
+ firstNewMeta[i] = mPacketSources2.editValueAt(i)
+ ->trimBuffersBeforeTimeUs(lastSeq, lastTimeUs);
+ }
+ // Now firstNewMeta[i] is the first sample after the trim.
+ // If it's NULL, we failed because dequeue already past all samples
+ // in mPacketSource2, we have to try again.
+ if (firstNewMeta[i] == NULL) {
+ ALOGV("[%s] dequeue time (%d, %lld) past start time",
+ stream == STREAMTYPE_AUDIO ? "audio" : "video",
+ lastSeq, (long long) lastTimeUs);
+ return false;
+ }
+
+ // Otherwise, we check if mPacketSources2 overlaps with what old fetcher
+ // already fetched, and see if we need to resumeUntil
+ lastEnqueueMeta = source->getLatestEnqueuedMeta();
+ // lastEnqueueMeta == NULL means old fetcher stopped at a discontinuity
+ // boundary, no need to resume as the content will look different anyways
+ if (lastEnqueueMeta != NULL) {
+ CHECK(lastEnqueueMeta->findInt64("timeUs", &lastTimeUs));
+ CHECK(lastEnqueueMeta->findInt32("discontinuitySeq", &lastSeq));
+ CHECK(firstNewMeta[i]->findInt64("timeUs", &startTimeUs));
+ CHECK(firstNewMeta[i]->findInt32("discontinuitySeq", &startSeq));
+
+ // no need to resume old fetcher if new fetcher started in different
+ // discontinuity sequence, as the content will look different.
+ *needResumeUntil |=
+ (startSeq == lastSeq
+ && startTimeUs - lastTimeUs > 100000ll);
+
+ // update the stopTime for resumeUntil, as we might have removed some
+ // packets from the head in mPacketSource2
+ stopParams->setInt64(getKeyForStream(stream), startTimeUs);
+ }
+ }
+
+ // if we're here, it means dequeue progress hasn't passed some samples in
+ // mPacketSource2, we can trim off the excess in mPacketSource.
+ // (old fetcher might still need to resumeUntil the start time of new fetcher)
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ StreamType stream = indexToType(i);
+ if (!(mSwapMask & mNewStreamMask & stream)
+ || (newUri != mStreams[i].mNewUri)) {
+ continue;
+ }
+ if (stream == STREAMTYPE_SUBTITLES) {
+ continue;
+ }
+ int64_t startTimeUs;
+ int32_t startSeq;
+ CHECK(firstNewMeta[i] != NULL);
+ CHECK(firstNewMeta[i]->findInt64("timeUs", &startTimeUs));
+ CHECK(firstNewMeta[i]->findInt32("discontinuitySeq", &startSeq));
+ mPacketSources.valueFor(stream)->trimBuffersAfterTimeUs(startSeq, startTimeUs);
+ }
+
+ // no resumeUntil if already underflow
+ *needResumeUntil &= !mBuffering;
+
+ return true;
+}
+
void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatConnect:
@@ -468,21 +512,25 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
case PlaylistFetcher::kWhatPaused:
case PlaylistFetcher::kWhatStopped:
{
- if (what == PlaylistFetcher::kWhatStopped) {
- AString uri;
- CHECK(msg->findString("uri", &uri));
- ssize_t index = mFetcherInfos.indexOfKey(uri);
- if (index < 0) {
- // ignore duplicated kWhatStopped messages.
- break;
- }
+ AString uri;
+ CHECK(msg->findString("uri", &uri));
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index < 0) {
+ // ignore msgs from fetchers that's already gone
+ break;
+ }
+ if (what == PlaylistFetcher::kWhatStopped) {
mFetcherLooper->unregisterHandler(
mFetcherInfos[index].mFetcher->id());
mFetcherInfos.removeItemsAt(index);
-
- if (mSwitchInProgress) {
- tryToFinishBandwidthSwitch();
+ } else if (what == PlaylistFetcher::kWhatPaused) {
+ int32_t seekMode;
+ CHECK(msg->findInt32("seekMode", &seekMode));
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mUri == uri) {
+ mStreams[i].mSeekMode = (SeekMode) seekMode;
+ }
}
}
@@ -511,6 +559,16 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case PlaylistFetcher::kWhatTargetDurationUpdate:
+ {
+ int64_t targetDurationUs;
+ CHECK(msg->findInt64("targetDurationUs", &targetDurationUs));
+ mUpSwitchMark = min(kUpSwitchMark, targetDurationUs * 3);
+ mDownSwitchMark = min(kDownSwitchMark, targetDurationUs * 9 / 4);
+ mUpSwitchMargin = min(kUpSwitchMargin, targetDurationUs);
+ break;
+ }
+
case PlaylistFetcher::kWhatError:
{
status_t err;
@@ -548,10 +606,23 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
mPacketSources.valueFor(
STREAMTYPE_SUBTITLES)->signalEOS(err);
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatError);
- notify->setInt32("err", err);
- notify->post();
+ postError(err);
+ break;
+ }
+
+ case PlaylistFetcher::kWhatStopReached:
+ {
+ ALOGV("kWhatStopReached");
+
+ AString uri;
+ CHECK(msg->findString("uri", &uri));
+
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index < 0) {
+ break;
+ }
+
+ tryToFinishBandwidthSwitch(uri);
break;
}
@@ -564,15 +635,69 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- // Resume fetcher for the original variant; the resumed fetcher should
- // continue until the timestamps found in msg, which is stored by the
- // new fetcher to indicate where the new variant has started buffering.
- for (size_t i = 0; i < mFetcherInfos.size(); i++) {
- const FetcherInfo info = mFetcherInfos.valueAt(i);
- if (info.mToBeRemoved) {
- info.mFetcher->resumeUntilAsync(msg);
+ AString uri;
+ CHECK(msg->findString("uri", &uri));
+
+ // mark new fetcher mToBeResumed
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index >= 0) {
+ mFetcherInfos.editValueAt(index).mToBeResumed = true;
+ }
+
+ // temporarily disable packet sources to be swapped to prevent
+ // NuPlayerDecoder from dequeuing while we check progress
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ if ((mSwapMask & mPacketSources.keyAt(i))
+ && uri == mStreams[i].mNewUri) {
+ mPacketSources.editValueAt(i)->enable(false);
}
}
+ bool switchUp = (mCurBandwidthIndex > mOrigBandwidthIndex);
+ // If switching up, require a cushion bigger than kUnderflowMark
+ // to avoid buffering immediately after the switch.
+ // (If we don't have that cushion we'd rather cancel and try again.)
+ int64_t delayUs = switchUp ? (kUnderflowMark + 1000000ll) : 0;
+ bool needResumeUntil = false;
+ sp<AMessage> stopParams = msg;
+ if (checkSwitchProgress(stopParams, delayUs, &needResumeUntil)) {
+ // playback time hasn't passed startAt time
+ if (!needResumeUntil) {
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if ((mSwapMask & indexToType(i))
+ && uri == mStreams[i].mNewUri) {
+ // have to make a copy of mStreams[i].mUri because
+ // tryToFinishBandwidthSwitch is modifying mStreams[]
+ AString oldURI = mStreams[i].mUri;
+ tryToFinishBandwidthSwitch(oldURI);
+ break;
+ }
+ }
+ } else {
+ // startAt time is after last enqueue time
+ // Resume fetcher for the original variant; the resumed fetcher should
+ // continue until the timestamps found in msg, which is stored by the
+ // new fetcher to indicate where the new variant has started buffering.
+ for (size_t i = 0; i < mFetcherInfos.size(); i++) {
+ const FetcherInfo &info = mFetcherInfos.valueAt(i);
+ if (info.mToBeRemoved) {
+ info.mFetcher->resumeUntilAsync(stopParams);
+ }
+ }
+ }
+ } else {
+ // playback time passed startAt time
+ if (switchUp) {
+ // if switching up, cancel and retry if condition satisfies again
+ cancelBandwidthSwitch(true /* resume */);
+ } else {
+ resumeFetcher(uri, mSwapMask, -1, true /* newUri */);
+ }
+ }
+ // re-enable all packet sources
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ mPacketSources.editValueAt(i)->enable(true);
+ }
+
break;
}
@@ -607,12 +732,6 @@ void LiveSession::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatSwapped:
- {
- onSwapped(msg);
- break;
- }
-
case kWhatPollBuffering:
{
int32_t generation;
@@ -746,16 +865,13 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
mPlaylist->pickRandomMediaItems();
changeConfiguration(
0ll /* timeUs */, initialBandwidthIndex, false /* pickTrack */);
-
- schedulePollBuffering();
}
void LiveSession::finishDisconnect() {
+ ALOGV("finishDisconnect");
+
// No reconfiguration is currently pending, make sure none will trigger
// during disconnection either.
-
- // Protect mPacketSources from a swapPacketSource race condition through disconnect.
- // (finishDisconnect, onFinishDisconnect2)
cancelBandwidthSwitch();
// cancel buffer polling
@@ -805,8 +921,8 @@ sp<PlaylistFetcher> LiveSession::addFetcher(const char *uri) {
FetcherInfo info;
info.mFetcher = new PlaylistFetcher(notify, this, uri, mSubtitleGeneration);
info.mDurationUs = -1ll;
- info.mIsPrepared = false;
info.mToBeRemoved = false;
+ info.mToBeResumed = false;
mFetcherLooper->registerHandler(info.mFetcher);
mFetcherInfos.add(uri, info);
@@ -835,14 +951,15 @@ ssize_t LiveSession::fetchFile(
int64_t range_offset, int64_t range_length,
uint32_t block_size, /* download block size */
sp<DataSource> *source, /* to return and reuse source */
- String8 *actualUrl) {
+ String8 *actualUrl,
+ bool forceConnectHTTP /* force connect HTTP when resuing source */) {
off64_t size;
sp<DataSource> temp_source;
if (source == NULL) {
source = &temp_source;
}
- if (*source == NULL) {
+ if (*source == NULL || forceConnectHTTP) {
if (!strncasecmp(url, "file://", 7)) {
*source = new FileSource(url + 7);
} else if (strncasecmp(url, "http://", 7)
@@ -861,13 +978,18 @@ ssize_t LiveSession::fetchFile(
? "" : AStringPrintf("%lld",
range_offset + range_length - 1).c_str()).c_str()));
}
- status_t err = mHTTPDataSource->connect(url, &headers);
+
+ HTTPBase* httpDataSource =
+ (*source == NULL) ? mHTTPDataSource.get() : (HTTPBase*)source->get();
+ status_t err = httpDataSource->connect(url, &headers);
if (err != OK) {
return err;
}
- *source = mHTTPDataSource;
+ if (*source == NULL) {
+ *source = mHTTPDataSource;
+ }
}
}
@@ -1003,6 +1125,99 @@ static double uniformRand() {
}
#endif
+bool LiveSession::resumeFetcher(
+ const AString &uri, uint32_t streamMask, int64_t timeUs, bool newUri) {
+ ssize_t index = mFetcherInfos.indexOfKey(uri);
+ if (index < 0) {
+ ALOGE("did not find fetcher for uri: %s", uri.c_str());
+ return false;
+ }
+
+ bool resume = false;
+ sp<AnotherPacketSource> sources[kMaxStreams];
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if ((streamMask & indexToType(i))
+ && ((!newUri && uri == mStreams[i].mUri)
+ || (newUri && uri == mStreams[i].mNewUri))) {
+ resume = true;
+ if (newUri) {
+ sources[i] = mPacketSources2.valueFor(indexToType(i));
+ sources[i]->clear();
+ } else {
+ sources[i] = mPacketSources.valueFor(indexToType(i));
+ }
+ }
+ }
+
+ if (resume) {
+ ALOGV("resuming fetcher %s, timeUs %lld", uri.c_str(), (long long)timeUs);
+ SeekMode seekMode = newUri ? kSeekModeNextSample : kSeekModeExactPosition;
+ mFetcherInfos.editValueAt(index).mFetcher->startAsync(
+ sources[kAudioIndex],
+ sources[kVideoIndex],
+ sources[kSubtitleIndex],
+ timeUs, -1, -1, seekMode);
+ }
+
+ return resume;
+}
+
+float LiveSession::getAbortThreshold(
+ ssize_t currentBWIndex, ssize_t targetBWIndex) const {
+ float abortThreshold = -1.0f;
+ if (currentBWIndex > 0 && targetBWIndex < currentBWIndex) {
+ /*
+ If we're switching down, we need to decide whether to
+
+ 1) finish last segment of high-bandwidth variant, or
+ 2) abort last segment of high-bandwidth variant, and fetch an
+ overlapping portion from low-bandwidth variant.
+
+ Here we try to maximize the amount of buffer left when the
+ switch point is met. Given the following parameters:
+
+ B: our current buffering level in seconds
+ T: target duration in seconds
+ X: sample duration in seconds remain to fetch in last segment
+ bw0: bandwidth of old variant (as specified in playlist)
+ bw1: bandwidth of new variant (as specified in playlist)
+ bw: measured bandwidth available
+
+ If we choose 1), when switch happens at the end of current
+ segment, our buffering will be
+ B + X - X * bw0 / bw
+
+ If we choose 2), when switch happens where we aborted current
+ segment, our buffering will be
+ B - (T - X) * bw1 / bw
+
+ We should only choose 1) if
+ X/T < bw1 / (bw1 + bw0 - bw)
+ */
+
+ // Taking the measured current bandwidth at 50% face value only,
+ // as our bandwidth estimation is a lagging indicator. Being
+ // conservative on this, we prefer switching to lower bandwidth
+ // unless we're really confident finishing up the last segment
+ // of higher bandwidth will be fast.
+ CHECK(mLastBandwidthBps >= 0);
+ abortThreshold =
+ (float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth
+ / ((float)mBandwidthItems.itemAt(targetBWIndex).mBandwidth
+ + (float)mBandwidthItems.itemAt(currentBWIndex).mBandwidth
+ - (float)mLastBandwidthBps * 0.5f);
+ if (abortThreshold < 0.0f) {
+ abortThreshold = -1.0f; // do not abort
+ }
+ ALOGV("Switching Down: bps %ld => %ld, measured %d, abort ratio %.2f",
+ mBandwidthItems.itemAt(currentBWIndex).mBandwidth,
+ mBandwidthItems.itemAt(targetBWIndex).mBandwidth,
+ mLastBandwidthBps,
+ abortThreshold);
+ }
+ return abortThreshold;
+}
+
void LiveSession::addBandwidthMeasurement(size_t numBytes, int64_t delayUs) {
mBandwidthEstimator->addBandwidthMeasurement(numBytes, delayUs);
}
@@ -1130,7 +1345,7 @@ status_t LiveSession::onSeek(const sp<AMessage> &msg) {
CHECK(msg->findInt64("timeUs", &timeUs));
if (!mReconfigurationInProgress) {
- changeConfiguration(timeUs, mCurBandwidthIndex);
+ changeConfiguration(timeUs);
return OK;
} else {
return -EWOULDBLOCK;
@@ -1186,7 +1401,6 @@ status_t LiveSession::selectTrack(size_t index, bool select) {
status_t err = mPlaylist->selectTrack(index, select);
if (err == OK) {
sp<AMessage> msg = new AMessage(kWhatChangeConfiguration, this);
- msg->setInt32("bandwidthIndex", mCurBandwidthIndex);
msg->setInt32("pickTrack", select);
msg->post();
}
@@ -1202,21 +1416,17 @@ ssize_t LiveSession::getSelectedTrack(media_track_type type) const {
}
void LiveSession::changeConfiguration(
- int64_t timeUs, size_t bandwidthIndex, bool pickTrack) {
- // Protect mPacketSources from a swapPacketSource race condition through reconfiguration.
- // (changeConfiguration, onChangeConfiguration2, onChangeConfiguration3).
+ int64_t timeUs, ssize_t bandwidthIndex, bool pickTrack) {
cancelBandwidthSwitch();
CHECK(!mReconfigurationInProgress);
mReconfigurationInProgress = true;
-
- mCurBandwidthIndex = bandwidthIndex;
-
- ALOGV("changeConfiguration => timeUs:%" PRId64 " us, bwIndex:%zu, pickTrack:%d",
- timeUs, bandwidthIndex, pickTrack);
-
- CHECK_LT(bandwidthIndex, mBandwidthItems.size());
- const BandwidthItem &item = mBandwidthItems.itemAt(bandwidthIndex);
+ if (bandwidthIndex >= 0) {
+ mOrigBandwidthIndex = mCurBandwidthIndex;
+ mCurBandwidthIndex = bandwidthIndex;
+ }
+ CHECK_LT(mCurBandwidthIndex, mBandwidthItems.size());
+ const BandwidthItem &item = mBandwidthItems.itemAt(mCurBandwidthIndex);
uint32_t streamMask = 0; // streams that should be fetched by the new fetcher
uint32_t resumeMask = 0; // streams that should be fetched by the original fetcher
@@ -1231,6 +1441,12 @@ void LiveSession::changeConfiguration(
// Step 1, stop and discard fetchers that are no longer needed.
// Pause those that we'll reuse.
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
+ // skip fetchers that are marked mToBeRemoved,
+ // these are done and can't be reused
+ if (mFetcherInfos[i].mToBeRemoved) {
+ continue;
+ }
+
const AString &uri = mFetcherInfos.keyAt(i);
bool discardFetcher = true;
@@ -1238,7 +1454,6 @@ void LiveSession::changeConfiguration(
if (timeUs < 0ll) {
// delay fetcher removal if not picking tracks
discardFetcher = pickTrack;
-
}
for (size_t j = 0; j < kMaxStreams; ++j) {
@@ -1253,9 +1468,19 @@ void LiveSession::changeConfiguration(
if (discardFetcher) {
mFetcherInfos.valueAt(i).mFetcher->stopAsync();
} else {
- // if we're seeking, pause immediately (no need to finish the segment)
- bool immediate = (timeUs >= 0ll);
- mFetcherInfos.valueAt(i).mFetcher->pauseAsync(immediate);
+ float threshold = -1.0f; // always finish fetching by default
+ if (timeUs >= 0ll) {
+ // seeking, no need to finish fetching
+ threshold = 0.0f;
+ } else if (!pickTrack) {
+ // adapting, abort if remaining of current segment is over threshold
+ threshold = getAbortThreshold(
+ mOrigBandwidthIndex, mCurBandwidthIndex);
+ }
+
+ ALOGV("Pausing with threshold %.3f", threshold);
+
+ mFetcherInfos.valueAt(i).mFetcher->pauseAsync(threshold);
}
}
@@ -1290,10 +1515,9 @@ void LiveSession::changeConfiguration(
void LiveSession::onChangeConfiguration(const sp<AMessage> &msg) {
if (!mReconfigurationInProgress) {
- int32_t pickTrack = 0, bandwidthIndex = mCurBandwidthIndex;
+ int32_t pickTrack = 0;
msg->findInt32("pickTrack", &pickTrack);
- msg->findInt32("bandwidthIndex", &bandwidthIndex);
- changeConfiguration(-1ll /* timeUs */, bandwidthIndex, pickTrack);
+ changeConfiguration(-1ll /* timeUs */, -1, pickTrack);
} else {
msg->post(1000000ll); // retry in 1 sec
}
@@ -1316,6 +1540,10 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
mPacketSources.editValueAt(i)->clear();
}
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ mStreams[i].mCurDiscontinuitySeq = 0;
+ }
+
mDiscontinuityOffsetTimesUs.clear();
mDiscontinuityAbsStartTimesUs.clear();
@@ -1326,6 +1554,10 @@ void LiveSession::onChangeConfiguration2(const sp<AMessage> &msg) {
mSeekReplyID.clear();
mSeekReply.clear();
}
+
+ // restart buffer polling after seek becauese previous
+ // buffering position is no longer valid.
+ restartPollBuffering();
}
uint32_t streamMask, resumeMask;
@@ -1400,12 +1632,14 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
int64_t timeUs;
int32_t pickTrack;
bool switching = false;
+ bool finishSwitching = false;
CHECK(msg->findInt64("timeUs", &timeUs));
CHECK(msg->findInt32("pickTrack", &pickTrack));
if (timeUs < 0ll) {
if (!pickTrack) {
switching = true;
+ finishSwitching = (streamMask == 0);
}
mRealTimeBaseUs = ALooper::GetNowUs() - mLastDequeuedTimeUs;
} else {
@@ -1433,25 +1667,12 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
ALOGV("resuming fetchers for mask 0x%08x", resumeMask);
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
const AString &uri = mFetcherInfos.keyAt(i);
-
- sp<AnotherPacketSource> sources[kMaxStreams];
- for (size_t j = 0; j < kMaxStreams; ++j) {
- if ((resumeMask & indexToType(j)) && uri == mStreams[j].mUri) {
- sources[j] = mPacketSources.valueFor(indexToType(j));
- }
- }
- FetcherInfo &info = mFetcherInfos.editValueAt(i);
- if (sources[kAudioIndex] != NULL || sources[kVideoIndex] != NULL
- || sources[kSubtitleIndex] != NULL) {
- info.mFetcher->startAsync(
- sources[kAudioIndex], sources[kVideoIndex], sources[kSubtitleIndex], timeUs);
- } else {
- info.mToBeRemoved = true;
+ if (!resumeFetcher(uri, resumeMask, timeUs)) {
+ mFetcherInfos.editValueAt(i).mToBeRemoved = true;
}
}
// streamMask now only contains the types that need a new fetcher created.
-
if (streamMask != 0) {
ALOGV("creating new fetchers for mask 0x%08x", streamMask);
}
@@ -1472,6 +1693,7 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
int64_t startTimeUs = -1;
int64_t segmentStartTimeUs = -1ll;
int32_t discontinuitySeq = -1;
+ SeekMode seekMode = kSeekModeExactPosition;
sp<AnotherPacketSource> sources[kMaxStreams];
if (i == kSubtitleIndex) {
@@ -1491,28 +1713,56 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
sp<AMessage> meta;
if (pickTrack) {
// selecting
+
+ // FIXME:
+ // This should only apply to the track that's being picked, we
+ // need a bitmask to indicate that.
+ //
+ // It's possible that selectTrack() gets called during a bandwidth
+ // switch, and we needed to fetch a new variant. The new fetcher
+ // should start from where old fetcher left off, not where decoder
+ // is dequeueing at.
+
meta = sources[j]->getLatestDequeuedMeta();
} else {
// adapting
meta = sources[j]->getLatestEnqueuedMeta();
+ if (meta != NULL && mCurBandwidthIndex > mOrigBandwidthIndex) {
+ // switching up
+ meta = sources[j]->getMetaAfterLastDequeued(mUpSwitchMargin);
+ }
}
- if (meta != NULL && !meta->findInt32("discontinuity", &type)) {
+ if (j != kSubtitleIndex
+ && meta != NULL
+ && !meta->findInt32("discontinuity", &type)) {
int64_t tmpUs;
int64_t tmpSegmentUs;
+ int32_t seq;
CHECK(meta->findInt64("timeUs", &tmpUs));
CHECK(meta->findInt64("segmentStartTimeUs", &tmpSegmentUs));
- if (startTimeUs < 0 || tmpSegmentUs < segmentStartTimeUs) {
+ CHECK(meta->findInt32("discontinuitySeq", &seq));
+ // If we're switching and looking for next sample or segment, set the target
+ // segment start time to tmpSegmentUs + tmpDurationUs / 2, which is
+ // the middle point of the segment where the last sample was.
+ // This is needed if segments of the two variants are not perfectly
+ // aligned. (If the corresponding segment in new variant starts slightly
+ // later than that in the old variant, we still want the switching to
+ // start in the next one, not the current one)
+ if (mStreams[j].mSeekMode == kSeekModeNextSample
+ || mStreams[j].mSeekMode == kSeekModeNextSegment) {
+ int64_t tmpDurationUs;
+ CHECK(meta->findInt64("segmentDurationUs", &tmpDurationUs));
+ tmpSegmentUs += tmpDurationUs / 2;
+ }
+ if (startTimeUs < 0 || seq > discontinuitySeq
+ || (seq == discontinuitySeq
+ && (tmpSegmentUs > segmentStartTimeUs
+ || (tmpSegmentUs == segmentStartTimeUs
+ && tmpUs > startTimeUs)))) {
startTimeUs = tmpUs;
segmentStartTimeUs = tmpSegmentUs;
- } else if (tmpSegmentUs == segmentStartTimeUs && tmpUs < startTimeUs) {
- startTimeUs = tmpUs;
- }
-
- int32_t seq;
- CHECK(meta->findInt32("discontinuitySeq", &seq));
- if (discontinuitySeq < 0 || seq < discontinuitySeq) {
discontinuitySeq = seq;
}
}
@@ -1527,14 +1777,22 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
ALOGV("stream[%zu]: queue format change", j);
sources[j]->queueDiscontinuity(
- ATSParser::DISCONTINUITY_FORMATCHANGE, NULL, true);
+ ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, true);
} else {
// adapting, queue discontinuities after resume
sources[j] = mPacketSources2.valueFor(indexToType(j));
sources[j]->clear();
uint32_t extraStreams = mNewStreamMask & (~mStreamMask);
if (extraStreams & indexToType(j)) {
- sources[j]->queueAccessUnit(createFormatChangeBuffer(/*swap*/ false));
+ sources[j]->queueDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, true);
+ }
+ // the new fetcher might be providing streams that used to be
+ // provided by two different fetchers, if one of the fetcher
+ // paused in the middle while the other somehow paused in next
+ // seg, we have to start from next seg.
+ if (seekMode < mStreams[j].mSeekMode) {
+ seekMode = mStreams[j].mSeekMode;
}
}
}
@@ -1550,7 +1808,7 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
startTimeUs < 0 ? mLastSeekTimeUs : startTimeUs,
segmentStartTimeUs,
discontinuitySeq,
- switching);
+ seekMode);
}
// All fetchers have now been started, the configuration change
@@ -1560,8 +1818,21 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
mReconfigurationInProgress = false;
if (switching) {
mSwitchInProgress = true;
+
+ if (finishSwitching) {
+ // Switch is finished now, no new fetchers are created.
+ // This path is hit when old variant had video and audio from
+ // two separate fetchers, while new variant has audio only,
+ // which reuses the previous audio fetcher.
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mSwapMask & indexToType(i)) {
+ tryToFinishBandwidthSwitch(mStreams[i].mUri);
+ }
+ }
+ }
} else {
mStreamMask = mNewStreamMask;
+ mOrigBandwidthIndex = mCurBandwidthIndex;
}
if (mDisconnectReplyID != NULL) {
@@ -1569,25 +1840,56 @@ void LiveSession::onChangeConfiguration3(const sp<AMessage> &msg) {
}
}
-void LiveSession::onSwapped(const sp<AMessage> &msg) {
- int32_t switchGeneration;
- CHECK(msg->findInt32("switchGeneration", &switchGeneration));
- if (switchGeneration != mSwitchGeneration) {
+void LiveSession::swapPacketSource(StreamType stream) {
+ ALOGV("swapPacketSource: stream = %d", stream);
+
+ // transfer packets from source2 to source
+ sp<AnotherPacketSource> &aps = mPacketSources.editValueFor(stream);
+ sp<AnotherPacketSource> &aps2 = mPacketSources2.editValueFor(stream);
+
+ // queue discontinuity in mPacketSource
+ aps->queueDiscontinuity(ATSParser::DISCONTINUITY_FORMAT_ONLY, NULL, false);
+
+ // queue packets in mPacketSource2 to mPacketSource
+ status_t finalResult = OK;
+ sp<ABuffer> accessUnit;
+ while (aps2->hasBufferAvailable(&finalResult) && finalResult == OK &&
+ OK == aps2->dequeueAccessUnit(&accessUnit)) {
+ aps->queueAccessUnit(accessUnit);
+ }
+ aps2->clear();
+}
+
+void LiveSession::tryToFinishBandwidthSwitch(const AString &oldUri) {
+ if (!mSwitchInProgress) {
return;
}
- int32_t stream;
- CHECK(msg->findInt32("stream", &stream));
+ ssize_t index = mFetcherInfos.indexOfKey(oldUri);
+ if (index < 0 || !mFetcherInfos[index].mToBeRemoved) {
+ return;
+ }
- ssize_t idx = typeToIndex(stream);
- CHECK(idx >= 0);
- if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) {
- ALOGW("swapping stream type %d %s to empty stream", stream, mStreams[idx].mUri.c_str());
+ // Swap packet source of streams provided by old variant
+ for (size_t idx = 0; idx < kMaxStreams; idx++) {
+ StreamType stream = indexToType(idx);
+ if ((mSwapMask & stream) && (oldUri == mStreams[idx].mUri)) {
+ swapPacketSource(stream);
+
+ if ((mNewStreamMask & stream) && mStreams[idx].mNewUri.empty()) {
+ ALOGW("swapping stream type %d %s to empty stream",
+ stream, mStreams[idx].mUri.c_str());
+ }
+ mStreams[idx].mUri = mStreams[idx].mNewUri;
+ mStreams[idx].mNewUri.clear();
+
+ mSwapMask &= ~stream;
+ }
}
- mStreams[idx].mUri = mStreams[idx].mNewUri;
- mStreams[idx].mNewUri.clear();
- mSwapMask &= ~stream;
+ mFetcherInfos.editValueAt(index).mFetcher->stopAsync(false /* clear */);
+
+ ALOGV("tryToFinishBandwidthSwitch: mSwapMask=%x", mSwapMask);
if (mSwapMask != 0) {
return;
}
@@ -1595,21 +1897,40 @@ void LiveSession::onSwapped(const sp<AMessage> &msg) {
// Check if new variant contains extra streams.
uint32_t extraStreams = mNewStreamMask & (~mStreamMask);
while (extraStreams) {
- StreamType extraStream = (StreamType) (extraStreams & ~(extraStreams - 1));
- swapPacketSource(extraStream);
- extraStreams &= ~extraStream;
+ StreamType stream = (StreamType) (extraStreams & ~(extraStreams - 1));
+ extraStreams &= ~stream;
- idx = typeToIndex(extraStream);
+ swapPacketSource(stream);
+
+ ssize_t idx = typeToIndex(stream);
CHECK(idx >= 0);
if (mStreams[idx].mNewUri.empty()) {
ALOGW("swapping extra stream type %d %s to empty stream",
- extraStream, mStreams[idx].mUri.c_str());
+ stream, mStreams[idx].mUri.c_str());
}
mStreams[idx].mUri = mStreams[idx].mNewUri;
mStreams[idx].mNewUri.clear();
}
- tryToFinishBandwidthSwitch();
+ // Restart new fetcher (it was paused after the first 47k block)
+ // and let it fetch into mPacketSources (not mPacketSources2)
+ for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
+ FetcherInfo &info = mFetcherInfos.editValueAt(i);
+ if (info.mToBeResumed) {
+ resumeFetcher(mFetcherInfos.keyAt(i), mNewStreamMask);
+ info.mToBeResumed = false;
+ }
+ }
+
+ ALOGI("#### Finished Bandwidth Switch: %zd => %zd",
+ mOrigBandwidthIndex, mCurBandwidthIndex);
+
+ mStreamMask = mNewStreamMask;
+ mSwitchInProgress = false;
+ mOrigBandwidthIndex = mCurBandwidthIndex;
+
+
+ restartPollBuffering();
}
void LiveSession::schedulePollBuffering() {
@@ -1620,99 +1941,104 @@ void LiveSession::schedulePollBuffering() {
void LiveSession::cancelPollBuffering() {
++mPollBufferingGeneration;
+ mPrevBufferPercentage = -1;
+}
+
+void LiveSession::restartPollBuffering() {
+ cancelPollBuffering();
+ onPollBuffering();
}
void LiveSession::onPollBuffering() {
ALOGV("onPollBuffering: mSwitchInProgress %d, mReconfigurationInProgress %d, "
- "mInPreparationPhase %d, mCurBandwidthIndex %d, mStreamMask 0x%x",
+ "mInPreparationPhase %d, mCurBandwidthIndex %zd, mStreamMask 0x%x",
mSwitchInProgress, mReconfigurationInProgress,
mInPreparationPhase, mCurBandwidthIndex, mStreamMask);
- bool low, mid, high;
- if (checkBuffering(low, mid, high)) {
- if (mInPreparationPhase && mid) {
+ bool underflow, ready, down, up;
+ if (checkBuffering(underflow, ready, down, up)) {
+ if (mInPreparationPhase && ready) {
postPrepared(OK);
}
// don't switch before we report prepared
if (!mInPreparationPhase) {
- switchBandwidthIfNeeded(high, !mid);
- }
+ if (ready) {
+ stopBufferingIfNecessary();
+ } else if (underflow) {
+ startBufferingIfNecessary();
+ }
+ switchBandwidthIfNeeded(up, down);
+ }
+
}
schedulePollBuffering();
}
-// Mark switch done when:
-// 1. all old buffers are swapped out
-void LiveSession::tryToFinishBandwidthSwitch() {
+void LiveSession::cancelBandwidthSwitch(bool resume) {
+ ALOGV("cancelBandwidthSwitch: mSwitchGen(%d)++, orig %zd, cur %zd",
+ mSwitchGeneration, mOrigBandwidthIndex, mCurBandwidthIndex);
if (!mSwitchInProgress) {
return;
}
- bool needToRemoveFetchers = false;
- for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
- if (mFetcherInfos.valueAt(i).mToBeRemoved) {
- needToRemoveFetchers = true;
- break;
- }
- }
-
- if (!needToRemoveFetchers && mSwapMask == 0) {
- ALOGI("mSwitchInProgress = false");
- mStreamMask = mNewStreamMask;
- mSwitchInProgress = false;
- }
-}
-
-void LiveSession::cancelBandwidthSwitch() {
- Mutex::Autolock lock(mSwapMutex);
- mSwitchGeneration++;
- mSwitchInProgress = false;
- mSwapMask = 0;
-
for (size_t i = 0; i < mFetcherInfos.size(); ++i) {
FetcherInfo& info = mFetcherInfos.editValueAt(i);
if (info.mToBeRemoved) {
info.mToBeRemoved = false;
+ if (resume) {
+ resumeFetcher(mFetcherInfos.keyAt(i), mSwapMask);
+ }
}
}
for (size_t i = 0; i < kMaxStreams; ++i) {
- if (!mStreams[i].mNewUri.empty()) {
- ssize_t j = mFetcherInfos.indexOfKey(mStreams[i].mNewUri);
- if (j < 0) {
- mStreams[i].mNewUri.clear();
+ AString newUri = mStreams[i].mNewUri;
+ if (!newUri.empty()) {
+ // clear all mNewUri matching this newUri
+ for (size_t j = i; j < kMaxStreams; ++j) {
+ if (mStreams[j].mNewUri == newUri) {
+ mStreams[j].mNewUri.clear();
+ }
+ }
+ ALOGV("stopping newUri = %s", newUri.c_str());
+ ssize_t index = mFetcherInfos.indexOfKey(newUri);
+ if (index < 0) {
+ ALOGE("did not find fetcher for newUri: %s", newUri.c_str());
continue;
}
-
- const FetcherInfo &info = mFetcherInfos.valueAt(j);
+ FetcherInfo &info = mFetcherInfos.editValueAt(index);
+ info.mToBeRemoved = true;
info.mFetcher->stopAsync();
- mFetcherInfos.removeItemsAt(j);
- mStreams[i].mNewUri.clear();
}
}
+
+ ALOGI("#### Canceled Bandwidth Switch: %zd => %zd",
+ mCurBandwidthIndex, mOrigBandwidthIndex);
+
+ mSwitchGeneration++;
+ mSwitchInProgress = false;
+ mCurBandwidthIndex = mOrigBandwidthIndex;
+ mSwapMask = 0;
}
-bool LiveSession::checkBuffering(bool &low, bool &mid, bool &high) {
- low = mid = high = false;
+bool LiveSession::checkBuffering(
+ bool &underflow, bool &ready, bool &down, bool &up) {
+ underflow = ready = down = up = false;
- if (mSwitchInProgress || mReconfigurationInProgress) {
+ if (mReconfigurationInProgress) {
ALOGV("Switch/Reconfig in progress, defer buffer polling");
return false;
}
- // TODO: Fine tune low/high mark.
- // We also need to pause playback if buffering is too low.
- // Currently during underflow, we depend on decoder to starve
- // to pause, but A/V could have different buffering left,
- // they're not paused together.
- // TODO: Report buffering level to NuPlayer for BUFFERING_UPDATE
-
- // Switch down if any of the fetchers are below low mark;
- // Switch up if all of the fetchers are over high mark.
- size_t activeCount, lowCount, midCount, highCount;
- activeCount = lowCount = midCount = highCount = 0;
+ size_t activeCount, underflowCount, readyCount, downCount, upCount;
+ activeCount = underflowCount = readyCount = downCount = upCount =0;
+ int32_t minBufferPercent = -1;
+ int64_t durationUs;
+ if (getDuration(&durationUs) != OK) {
+ durationUs = -1;
+ }
for (size_t i = 0; i < mPacketSources.size(); ++i) {
// we don't check subtitles for buffering level
if (!(mStreamMask & mPacketSources.keyAt(i)
@@ -1726,40 +2052,106 @@ bool LiveSession::checkBuffering(bool &low, bool &mid, bool &high) {
continue;
}
- ++activeCount;
int64_t bufferedDurationUs =
mPacketSources[i]->getEstimatedDurationUs();
ALOGV("source[%zu]: buffered %lld us", i, (long long)bufferedDurationUs);
- if (bufferedDurationUs < kLowWaterMark) {
- ++lowCount;
- break;
- } else if (bufferedDurationUs > kHighWaterMark) {
- ++midCount;
- ++highCount;
- } else if (bufferedDurationUs > kMidWaterMark) {
- ++midCount;
+ if (durationUs >= 0) {
+ int32_t percent;
+ if (mPacketSources[i]->isFinished(0 /* duration */)) {
+ percent = 100;
+ } else {
+ percent = (int32_t)(100.0 * (mLastDequeuedTimeUs + bufferedDurationUs) / durationUs);
+ }
+ if (minBufferPercent < 0 || percent < minBufferPercent) {
+ minBufferPercent = percent;
+ }
+ }
+
+ ++activeCount;
+ int64_t readyMark = mInPreparationPhase ? kPrepareMark : kReadyMark;
+ if (bufferedDurationUs > readyMark
+ || mPacketSources[i]->isFinished(0)) {
+ ++readyCount;
+ }
+ if (!mPacketSources[i]->isFinished(0)) {
+ if (bufferedDurationUs < kUnderflowMark) {
+ ++underflowCount;
+ }
+ if (bufferedDurationUs > mUpSwitchMark) {
+ ++upCount;
+ } else if (bufferedDurationUs < mDownSwitchMark) {
+ ++downCount;
+ }
}
}
+ if (minBufferPercent >= 0) {
+ notifyBufferingUpdate(minBufferPercent);
+ }
+
if (activeCount > 0) {
- high = (highCount == activeCount);
- mid = (midCount == activeCount);
- low = (lowCount > 0);
+ up = (upCount == activeCount);
+ down = (downCount > 0);
+ ready = (readyCount == activeCount);
+ underflow = (underflowCount > 0);
return true;
}
return false;
}
+void LiveSession::startBufferingIfNecessary() {
+ ALOGV("startBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d",
+ mInPreparationPhase, mBuffering);
+ if (!mBuffering) {
+ mBuffering = true;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBufferingStart);
+ notify->post();
+ }
+}
+
+void LiveSession::stopBufferingIfNecessary() {
+ ALOGV("stopBufferingIfNecessary: mInPreparationPhase=%d, mBuffering=%d",
+ mInPreparationPhase, mBuffering);
+
+ if (mBuffering) {
+ mBuffering = false;
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBufferingEnd);
+ notify->post();
+ }
+}
+
+void LiveSession::notifyBufferingUpdate(int32_t percentage) {
+ if (percentage < mPrevBufferPercentage) {
+ percentage = mPrevBufferPercentage;
+ } else if (percentage > 100) {
+ percentage = 100;
+ }
+
+ mPrevBufferPercentage = percentage;
+
+ ALOGV("notifyBufferingUpdate: percentage=%d%%", percentage);
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatBufferingUpdate);
+ notify->setInt32("percentage", percentage);
+ notify->post();
+}
+
void LiveSession::switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow) {
// no need to check bandwidth if we only have 1 bandwidth settings
- if (mBandwidthItems.size() < 2) {
+ if (mSwitchInProgress || mBandwidthItems.size() < 2) {
return;
}
int32_t bandwidthBps;
if (mBandwidthEstimator->estimateBandwidth(&bandwidthBps)) {
ALOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
+ mLastBandwidthBps = bandwidthBps;
} else {
ALOGV("no bandwidth estimate.");
return;
@@ -1778,12 +2170,28 @@ void LiveSession::switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow) {
return;
}
- ALOGI("#### Initiate Bandwidth Switch: %d => %d",
+ ALOGI("#### Starting Bandwidth Switch: %zd => %zd",
mCurBandwidthIndex, bandwidthIndex);
changeConfiguration(-1, bandwidthIndex, false);
}
}
+void LiveSession::postError(status_t err) {
+ // if we reached EOS, notify buffering of 100%
+ if (err == ERROR_END_OF_STREAM) {
+ notifyBufferingUpdate(100);
+ }
+ // we'll stop buffer polling now, before that notify
+ // stop buffering to stop the spinning icon
+ stopBufferingIfNecessary();
+ cancelPollBuffering();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
void LiveSession::postPrepared(status_t err) {
CHECK(mInPreparationPhase);
@@ -1791,6 +2199,8 @@ void LiveSession::postPrepared(status_t err) {
if (err == OK || err == ERROR_END_OF_STREAM) {
notify->setInt32("what", kWhatPrepared);
} else {
+ cancelPollBuffering();
+
notify->setInt32("what", kWhatPreparationFailed);
notify->setInt32("err", err);
}
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 685fefa..d11675b 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -54,6 +54,12 @@ struct LiveSession : public AHandler {
STREAMTYPE_SUBTITLES = 1 << kSubtitleIndex,
};
+ enum SeekMode {
+ kSeekModeExactPosition = 0, // used for seeking
+ kSeekModeNextSample = 1, // used for seamless switching
+ kSeekModeNextSegment = 2, // used for seamless switching
+ };
+
LiveSession(
const sp<AMessage> &notify,
uint32_t flags,
@@ -63,6 +69,8 @@ struct LiveSession : public AHandler {
status_t getStreamFormat(StreamType stream, sp<AMessage> *format);
+ sp<HTTPBase> getHTTPDataSource();
+
void connectAsync(
const char *url,
const KeyedVector<String8, String8> *headers = NULL);
@@ -81,18 +89,18 @@ struct LiveSession : public AHandler {
bool isSeekable() const;
bool hasDynamicDuration() const;
+ static const char *getKeyForStream(StreamType type);
+
enum {
kWhatStreamsChanged,
kWhatError,
kWhatPrepared,
kWhatPreparationFailed,
+ kWhatBufferingStart,
+ kWhatBufferingEnd,
+ kWhatBufferingUpdate,
};
- // create a format-change discontinuity
- //
- // swap:
- // whether is format-change discontinuity should trigger a buffer swap
- sp<ABuffer> createFormatChangeBuffer(bool swap = true);
protected:
virtual ~LiveSession();
@@ -110,13 +118,18 @@ private:
kWhatChangeConfiguration2 = 'chC2',
kWhatChangeConfiguration3 = 'chC3',
kWhatFinishDisconnect2 = 'fin2',
- kWhatSwapped = 'swap',
kWhatPollBuffering = 'poll',
};
- static const int64_t kHighWaterMark;
- static const int64_t kMidWaterMark;
- static const int64_t kLowWaterMark;
+ // Bandwidth Switch Mark Defaults
+ static const int64_t kUpSwitchMark = 25000000ll;
+ static const int64_t kDownSwitchMark = 18000000ll;
+ static const int64_t kUpSwitchMargin = 5000000ll;
+
+ // Buffer Prepare/Ready/Underflow Marks
+ static const int64_t kReadyMark = 5000000ll;
+ static const int64_t kPrepareMark = 1500000ll;
+ static const int64_t kUnderflowMark = 1000000ll;
struct BandwidthEstimator;
struct BandwidthItem {
@@ -127,23 +140,22 @@ private:
struct FetcherInfo {
sp<PlaylistFetcher> mFetcher;
int64_t mDurationUs;
- bool mIsPrepared;
bool mToBeRemoved;
+ bool mToBeResumed;
};
struct StreamItem {
const char *mType;
AString mUri, mNewUri;
+ SeekMode mSeekMode;
size_t mCurDiscontinuitySeq;
int64_t mLastDequeuedTimeUs;
int64_t mLastSampleDurationUs;
StreamItem()
- : mType(""),
- mCurDiscontinuitySeq(0),
- mLastDequeuedTimeUs(0),
- mLastSampleDurationUs(0) {}
+ : StreamItem("") {}
StreamItem(const char *type)
: mType(type),
+ mSeekMode(kSeekModeExactPosition),
mCurDiscontinuitySeq(0),
mLastDequeuedTimeUs(0),
mLastSampleDurationUs(0) {}
@@ -159,8 +171,10 @@ private:
uint32_t mFlags;
sp<IMediaHTTPService> mHTTPService;
+ bool mBuffering;
bool mInPreparationPhase;
- bool mBuffering[kMaxStreams];
+ int32_t mPollBufferingGeneration;
+ int32_t mPrevBufferPercentage;
sp<HTTPBase> mHTTPDataSource;
KeyedVector<String8, String8> mExtraHeaders;
@@ -169,6 +183,8 @@ private:
Vector<BandwidthItem> mBandwidthItems;
ssize_t mCurBandwidthIndex;
+ ssize_t mOrigBandwidthIndex;
+ int32_t mLastBandwidthBps;
sp<BandwidthEstimator> mBandwidthEstimator;
sp<M3UParser> mPlaylist;
@@ -190,11 +206,6 @@ private:
// A second set of packet sources that buffer content for the variant we're switching to.
KeyedVector<StreamType, sp<AnotherPacketSource> > mPacketSources2;
- // A mutex used to serialize two sets of events:
- // * the swapping of packet sources in dequeueAccessUnit on the player thread, AND
- // * a forced bandwidth switch termination in cancelSwitch on the live looper.
- Mutex mSwapMutex;
-
int32_t mSwitchGeneration;
int32_t mSubtitleGeneration;
@@ -207,6 +218,10 @@ private:
bool mReconfigurationInProgress;
bool mSwitchInProgress;
+ int64_t mUpSwitchMark;
+ int64_t mDownSwitchMark;
+ int64_t mUpSwitchMargin;
+
sp<AReplyToken> mDisconnectReplyID;
sp<AReplyToken> mSeekReplyID;
@@ -216,8 +231,6 @@ private:
KeyedVector<size_t, int64_t> mDiscontinuityAbsStartTimesUs;
KeyedVector<size_t, int64_t> mDiscontinuityOffsetTimesUs;
- int32_t mPollBufferingGeneration;
-
sp<PlaylistFetcher> addFetcher(const char *uri);
void onConnect(const sp<AMessage> &msg);
@@ -243,11 +256,19 @@ private:
uint32_t block_size = 0,
/* reuse DataSource if doing partial fetch */
sp<DataSource> *source = NULL,
- String8 *actualUrl = NULL);
+ String8 *actualUrl = NULL,
+ /* force connect http even when resuing DataSource */
+ bool forceConnectHTTP = false);
sp<M3UParser> fetchPlaylist(
const char *url, uint8_t *curPlaylistHash, bool *unchanged);
+ bool resumeFetcher(
+ const AString &uri, uint32_t streamMask,
+ int64_t timeUs = -1ll, bool newUri = false);
+
+ float getAbortThreshold(
+ ssize_t currentBWIndex, ssize_t targetBWIndex) const;
void addBandwidthMeasurement(size_t numBytes, int64_t delayUs);
size_t getBandwidthIndex(int32_t bandwidthBps);
int64_t latestMediaSegmentStartTimeUs();
@@ -257,29 +278,32 @@ private:
static ssize_t typeToIndex(int32_t type);
void changeConfiguration(
- int64_t timeUs, size_t bandwidthIndex, bool pickTrack = false);
+ int64_t timeUs, ssize_t bwIndex = -1, bool pickTrack = false);
void onChangeConfiguration(const sp<AMessage> &msg);
void onChangeConfiguration2(const sp<AMessage> &msg);
void onChangeConfiguration3(const sp<AMessage> &msg);
- void onSwapped(const sp<AMessage> &msg);
- void tryToFinishBandwidthSwitch();
- // cancelBandwidthSwitch is atomic wrt swapPacketSource; call it to prevent packet sources
- // from being swapped out on stale discontinuities while manipulating
- // mPacketSources/mPacketSources2.
- void cancelBandwidthSwitch();
+ void swapPacketSource(StreamType stream);
+ void tryToFinishBandwidthSwitch(const AString &oldUri);
+ void cancelBandwidthSwitch(bool resume = false);
+ bool checkSwitchProgress(
+ sp<AMessage> &msg, int64_t delayUs, bool *needResumeUntil);
+
+ void switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow);
void schedulePollBuffering();
void cancelPollBuffering();
+ void restartPollBuffering();
void onPollBuffering();
- bool checkBuffering(bool &low, bool &mid, bool &high);
- void switchBandwidthIfNeeded(bool bufferHigh, bool bufferLow);
+ bool checkBuffering(bool &underflow, bool &ready, bool &down, bool &up);
+ void startBufferingIfNecessary();
+ void stopBufferingIfNecessary();
+ void notifyBufferingUpdate(int32_t percentage);
void finishDisconnect();
void postPrepared(status_t err);
-
- void swapPacketSource(StreamType stream);
+ void postError(status_t err);
DISALLOW_EVIL_CONSTRUCTORS(LiveSession);
};
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index a447010..3ace396 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -33,6 +33,7 @@
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AUtils.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaDefs.h>
@@ -47,13 +48,99 @@
namespace android {
// static
-const int64_t PlaylistFetcher::kMinBufferedDurationUs = 10000000ll;
+const int64_t PlaylistFetcher::kMinBufferedDurationUs = 30000000ll;
const int64_t PlaylistFetcher::kMaxMonitorDelayUs = 3000000ll;
const int64_t PlaylistFetcher::kFetcherResumeThreshold = 100000ll;
// LCM of 188 (size of a TS packet) & 1k works well
const int32_t PlaylistFetcher::kDownloadBlockSize = 47 * 1024;
const int32_t PlaylistFetcher::kNumSkipFrames = 5;
+struct PlaylistFetcher::DownloadState : public RefBase {
+ DownloadState();
+ void resetState();
+ bool hasSavedState() const;
+ void restoreState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist);
+ void saveState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist);
+
+private:
+ bool mHasSavedState;
+ AString mUri;
+ sp<AMessage> mItemMeta;
+ sp<ABuffer> mBuffer;
+ sp<ABuffer> mTsBuffer;
+ int32_t mFirstSeqNumberInPlaylist;
+ int32_t mLastSeqNumberInPlaylist;
+};
+
+PlaylistFetcher::DownloadState::DownloadState() {
+ resetState();
+}
+
+bool PlaylistFetcher::DownloadState::hasSavedState() const {
+ return mHasSavedState;
+}
+
+void PlaylistFetcher::DownloadState::resetState() {
+ mHasSavedState = false;
+
+ mUri.clear();
+ mItemMeta = NULL;
+ mBuffer = NULL;
+ mTsBuffer = NULL;
+ mFirstSeqNumberInPlaylist = 0;
+ mLastSeqNumberInPlaylist = 0;
+}
+
+void PlaylistFetcher::DownloadState::restoreState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist) {
+ if (!mHasSavedState) {
+ return;
+ }
+
+ uri = mUri;
+ itemMeta = mItemMeta;
+ buffer = mBuffer;
+ tsBuffer = mTsBuffer;
+ firstSeqNumberInPlaylist = mFirstSeqNumberInPlaylist;
+ lastSeqNumberInPlaylist = mLastSeqNumberInPlaylist;
+
+ resetState();
+}
+
+void PlaylistFetcher::DownloadState::saveState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ sp<ABuffer> &buffer,
+ sp<ABuffer> &tsBuffer,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist) {
+ mHasSavedState = true;
+
+ mUri = uri;
+ mItemMeta = itemMeta;
+ mBuffer = buffer;
+ mTsBuffer = tsBuffer;
+ mFirstSeqNumberInPlaylist = firstSeqNumberInPlaylist;
+ mLastSeqNumberInPlaylist = lastSeqNumberInPlaylist;
+}
+
PlaylistFetcher::PlaylistFetcher(
const sp<AMessage> &notify,
const sp<LiveSession> &session,
@@ -71,18 +158,21 @@ PlaylistFetcher::PlaylistFetcher(
mSeqNumber(-1),
mNumRetries(0),
mStartup(true),
- mAdaptive(false),
- mPrepared(false),
+ mIDRFound(false),
+ mSeekMode(LiveSession::kSeekModeExactPosition),
mTimeChangeSignaled(false),
mNextPTSTimeUs(-1ll),
mMonitorQueueGeneration(0),
mSubtitleGeneration(subtitleGeneration),
mLastDiscontinuitySeq(-1ll),
- mStopping(false),
mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY),
mFirstPTSValid(false),
- mVideoBuffer(new AnotherPacketSource(NULL)) {
+ mFirstTimeUs(-1ll),
+ mVideoBuffer(new AnotherPacketSource(NULL)),
+ mThresholdRatio(-1.0f),
+ mDownloadState(new DownloadState()) {
memset(mPlaylistHash, 0, sizeof(mPlaylistHash));
+ mHTTPDataSource = mSession->getHTTPDataSource();
}
PlaylistFetcher::~PlaylistFetcher() {
@@ -119,6 +209,32 @@ int64_t PlaylistFetcher::getSegmentStartTimeUs(int32_t seqNumber) const {
return segmentStartUs;
}
+int64_t PlaylistFetcher::getSegmentDurationUs(int32_t seqNumber) const {
+ CHECK(mPlaylist != NULL);
+
+ int32_t firstSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
+
+ int32_t lastSeqNumberInPlaylist =
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1;
+
+ CHECK_GE(seqNumber, firstSeqNumberInPlaylist);
+ CHECK_LE(seqNumber, lastSeqNumberInPlaylist);
+
+ int32_t index = seqNumber - firstSeqNumberInPlaylist;
+ sp<AMessage> itemMeta;
+ CHECK(mPlaylist->itemAt(
+ index, NULL /* uri */, &itemMeta));
+
+ int64_t itemDurationUs;
+ CHECK(itemMeta->findInt64("durationUs", &itemDurationUs));
+
+ return itemDurationUs;
+}
+
int64_t PlaylistFetcher::delayUsToRefreshPlaylist() const {
int64_t nowUs = ALooper::GetNowUs();
@@ -334,9 +450,12 @@ void PlaylistFetcher::cancelMonitorQueue() {
++mMonitorQueueGeneration;
}
-void PlaylistFetcher::setStopping(bool stopping) {
- AutoMutex _l(mStoppingLock);
- mStopping = stopping;
+void PlaylistFetcher::setStoppingThreshold(float thresholdRatio) {
+ AutoMutex _l(mThresholdLock);
+ if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {
+ return;
+ }
+ mThresholdRatio = thresholdRatio;
}
void PlaylistFetcher::startAsync(
@@ -346,7 +465,7 @@ void PlaylistFetcher::startAsync(
int64_t startTimeUs,
int64_t segmentStartTimeUs,
int32_t startDiscontinuitySeq,
- bool adaptive) {
+ LiveSession::SeekMode seekMode) {
sp<AMessage> msg = new AMessage(kWhatStart, this);
uint32_t streamTypeMask = 0ul;
@@ -370,19 +489,19 @@ void PlaylistFetcher::startAsync(
msg->setInt64("startTimeUs", startTimeUs);
msg->setInt64("segmentStartTimeUs", segmentStartTimeUs);
msg->setInt32("startDiscontinuitySeq", startDiscontinuitySeq);
- msg->setInt32("adaptive", adaptive);
+ msg->setInt32("seekMode", seekMode);
msg->post();
}
-void PlaylistFetcher::pauseAsync(bool immediate) {
- if (immediate) {
- setStopping(true);
+void PlaylistFetcher::pauseAsync(float thresholdRatio) {
+ if (thresholdRatio >= 0.0f) {
+ setStoppingThreshold(thresholdRatio);
}
(new AMessage(kWhatPause, this))->post();
}
void PlaylistFetcher::stopAsync(bool clear) {
- setStopping(true);
+ setStoppingThreshold(0.0f);
sp<AMessage> msg = new AMessage(kWhatStop, this);
msg->setInt32("clear", clear);
@@ -414,6 +533,10 @@ void PlaylistFetcher::onMessageReceived(const sp<AMessage> &msg) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatPaused);
+ notify->setInt32("seekMode",
+ mDownloadState->hasSavedState()
+ ? LiveSession::kSeekModeNextSample
+ : LiveSession::kSeekModeNextSegment);
notify->post();
break;
}
@@ -463,7 +586,7 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
mStopParams.clear();
mStartTimeUsNotify = mNotify->dup();
mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
- mStartTimeUsNotify->setInt32("streamMask", 0);
+ mStartTimeUsNotify->setString("uri", mURI);
uint32_t streamTypeMask;
CHECK(msg->findInt32("streamTypeMask", (int32_t *)&streamTypeMask));
@@ -471,11 +594,11 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
int64_t startTimeUs;
int64_t segmentStartTimeUs;
int32_t startDiscontinuitySeq;
- int32_t adaptive;
+ int32_t seekMode;
CHECK(msg->findInt64("startTimeUs", &startTimeUs));
CHECK(msg->findInt64("segmentStartTimeUs", &segmentStartTimeUs));
CHECK(msg->findInt32("startDiscontinuitySeq", &startDiscontinuitySeq));
- CHECK(msg->findInt32("adaptive", &adaptive));
+ CHECK(msg->findInt32("seekMode", &seekMode));
if (streamTypeMask & LiveSession::STREAMTYPE_AUDIO) {
void *ptr;
@@ -507,20 +630,26 @@ status_t PlaylistFetcher::onStart(const sp<AMessage> &msg) {
mStreamTypeMask = streamTypeMask;
mSegmentStartTimeUs = segmentStartTimeUs;
- mDiscontinuitySeq = startDiscontinuitySeq;
+
+ if (startDiscontinuitySeq >= 0) {
+ mDiscontinuitySeq = startDiscontinuitySeq;
+ }
mRefreshState = INITIAL_MINIMUM_RELOAD_DELAY;
+ mSeekMode = (LiveSession::SeekMode) seekMode;
+
+ if (startTimeUs >= 0 || mSeekMode == LiveSession::kSeekModeNextSample) {
+ mStartup = true;
+ mIDRFound = false;
+ mVideoBuffer->clear();
+ }
if (startTimeUs >= 0) {
mStartTimeUs = startTimeUs;
mFirstPTSValid = false;
mSeqNumber = -1;
- mStartup = true;
- mPrepared = false;
- mIDRFound = false;
mTimeChangeSignaled = false;
- mAdaptive = adaptive;
- mVideoBuffer->clear();
+ mDownloadState->resetState();
}
postMonitorQueue();
@@ -532,7 +661,7 @@ void PlaylistFetcher::onPause() {
cancelMonitorQueue();
mLastDiscontinuitySeq = mDiscontinuitySeq;
- setStopping(false);
+ setStoppingThreshold(-1.0f);
}
void PlaylistFetcher::onStop(const sp<AMessage> &msg) {
@@ -547,10 +676,11 @@ void PlaylistFetcher::onStop(const sp<AMessage> &msg) {
}
}
+ mDownloadState->resetState();
mPacketSources.clear();
mStreamTypeMask = 0;
- setStopping(false);
+ setStoppingThreshold(-1.0f);
}
// Resume until we have reached the boundary timestamps listed in `msg`; when
@@ -564,8 +694,7 @@ status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) {
for (size_t i = 0; i < mPacketSources.size(); i++) {
sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
- const char *stopKey;
- int streamType = mPacketSources.keyAt(i);
+ LiveSession::StreamType streamType = mPacketSources.keyAt(i);
if (streamType == LiveSession::STREAMTYPE_SUBTITLES) {
// the subtitle track can always be stopped
@@ -573,18 +702,7 @@ status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) {
continue;
}
- switch (streamType) {
- case LiveSession::STREAMTYPE_VIDEO:
- stopKey = "timeUsVideo";
- break;
-
- case LiveSession::STREAMTYPE_AUDIO:
- stopKey = "timeUsAudio";
- break;
-
- default:
- TRESPASS();
- }
+ const char *stopKey = LiveSession::getKeyForStream(streamType);
// check if this stream has too little data left to be resumed
int32_t discontinuitySeq;
@@ -602,10 +720,7 @@ status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) {
// Don't resume if all streams are within a resume threshold
if (stopCount == mPacketSources.size()) {
- for (size_t i = 0; i < mPacketSources.size(); i++) {
- mPacketSources.valueAt(i)->queueAccessUnit(mSession->createFormatChangeBuffer());
- }
- stopAsync(/* clear = */ false);
+ notifyStopReached();
return OK;
}
@@ -615,6 +730,12 @@ status_t PlaylistFetcher::onResumeUntil(const sp<AMessage> &msg) {
return OK;
}
+void PlaylistFetcher::notifyStopReached() {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatStopReached);
+ notify->post();
+}
+
void PlaylistFetcher::notifyError(status_t err) {
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatError);
@@ -634,7 +755,12 @@ void PlaylistFetcher::queueDiscontinuity(
void PlaylistFetcher::onMonitorQueue() {
bool downloadMore = false;
- refreshPlaylist();
+
+ // in the middle of an unfinished download, delay
+ // playlist refresh as it'll change seq numbers
+ if (!mDownloadState->hasSavedState()) {
+ refreshPlaylist();
+ }
int32_t targetDurationSecs;
int64_t targetDurationUs = kMinBufferedDurationUs;
@@ -730,6 +856,13 @@ status_t PlaylistFetcher::refreshPlaylist() {
if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
updateDuration();
}
+ // Notify LiveSession to use target-duration based buffering level
+ // for up/down switch. Default LiveSession::kUpSwitchMark may not
+ // be reachable for live streams, as our max buffering amount is
+ // limited to 3 segments.
+ if (!mPlaylist->isComplete()) {
+ updateTargetDuration();
+ }
}
mLastPlaylistFetchTimeUs = ALooper::GetNowUs();
@@ -742,10 +875,69 @@ bool PlaylistFetcher::bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer) {
return buffer->size() > 0 && buffer->data()[0] == 0x47;
}
-void PlaylistFetcher::onDownloadNext() {
+bool PlaylistFetcher::shouldPauseDownload() {
+ if (mStreamTypeMask == LiveSession::STREAMTYPE_SUBTITLES) {
+ // doesn't apply to subtitles
+ return false;
+ }
+
+ // Calculate threshold to abort current download
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+ int64_t thresholdUs = -1;
+ {
+ AutoMutex _l(mThresholdLock);
+ thresholdUs = (mThresholdRatio < 0.0f) ?
+ -1ll : mThresholdRatio * targetDurationUs;
+ }
+
+ if (thresholdUs < 0) {
+ // never abort
+ return false;
+ } else if (thresholdUs == 0) {
+ // immediately abort
+ return true;
+ }
+
+ // now we have a positive thresholdUs, abort if remaining
+ // portion to download is over that threshold.
+ if (mSegmentFirstPTS < 0) {
+ // this means we haven't even find the first access unit,
+ // abort now as we must be very far away from the end.
+ return true;
+ }
+ int64_t lastEnqueueUs = mSegmentFirstPTS;
+ for (size_t i = 0; i < mPacketSources.size(); ++i) {
+ if ((mStreamTypeMask & mPacketSources.keyAt(i)) == 0) {
+ continue;
+ }
+ sp<AMessage> meta = mPacketSources[i]->getLatestEnqueuedMeta();
+ int32_t type;
+ if (meta == NULL || meta->findInt32("discontinuity", &type)) {
+ continue;
+ }
+ int64_t tmpUs;
+ CHECK(meta->findInt64("timeUs", &tmpUs));
+ if (tmpUs > lastEnqueueUs) {
+ lastEnqueueUs = tmpUs;
+ }
+ }
+ lastEnqueueUs -= mSegmentFirstPTS;
+ if (targetDurationUs - lastEnqueueUs > thresholdUs) {
+ return true;
+ }
+ return false;
+}
+
+bool PlaylistFetcher::initDownloadState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist) {
status_t err = refreshPlaylist();
- int32_t firstSeqNumberInPlaylist = 0;
- int32_t lastSeqNumberInPlaylist = 0;
+ firstSeqNumberInPlaylist = 0;
+ lastSeqNumberInPlaylist = 0;
bool discontinuity = false;
if (mPlaylist != NULL) {
@@ -761,6 +953,8 @@ void PlaylistFetcher::onDownloadNext() {
}
}
+ mSegmentFirstPTS = -1ll;
+
if (mPlaylist != NULL && mSeqNumber < 0) {
CHECK_GE(mStartTimeUs, 0ll);
@@ -788,7 +982,7 @@ void PlaylistFetcher::onDownloadNext() {
// timestamps coming from the media container) is used to determine the position
// inside a segments.
mSeqNumber = getSeqNumberForTime(mSegmentStartTimeUs);
- if (mAdaptive) {
+ if (mSeekMode == LiveSession::kSeekModeNextSegment) {
// avoid double fetch/decode
mSeqNumber += 1;
}
@@ -838,12 +1032,12 @@ void PlaylistFetcher::onDownloadNext() {
mSeqNumber, firstSeqNumberInPlaylist,
lastSeqNumberInPlaylist, delayUs, mNumRetries);
postMonitorQueue(delayUs);
- return;
+ return false;
}
if (err != OK) {
notifyError(err);
- return;
+ return false;
}
// we've missed the boat, let's start 3 segments prior to the latest sequence
@@ -858,12 +1052,8 @@ void PlaylistFetcher::onDownloadNext() {
// but since the segments we are supposed to fetch have already rolled off
// the playlist, i.e. we have already missed the boat, we inevitably have to
// skip.
- for (size_t i = 0; i < mPacketSources.size(); i++) {
- sp<ABuffer> formatChange = mSession->createFormatChangeBuffer();
- mPacketSources.valueAt(i)->queueAccessUnit(formatChange);
- }
- stopAsync(/* clear = */ false);
- return;
+ notifyStopReached();
+ return false;
}
mSeqNumber = lastSeqNumberInPlaylist - 3;
if (mSeqNumber < firstSeqNumberInPlaylist) {
@@ -873,20 +1063,27 @@ void PlaylistFetcher::onDownloadNext() {
// fall through
} else {
- ALOGE("Cannot find sequence number %d in playlist "
- "(contains %d - %d)",
- mSeqNumber, firstSeqNumberInPlaylist,
- firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1);
+ if (mPlaylist != NULL) {
+ ALOGE("Cannot find sequence number %d in playlist "
+ "(contains %d - %d)",
+ mSeqNumber, firstSeqNumberInPlaylist,
+ firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1);
- notifyError(ERROR_END_OF_STREAM);
- return;
+ notifyError(ERROR_END_OF_STREAM);
+ } else {
+ // It's possible that we were never able to download the playlist.
+ // In this case we should notify error, instead of EOS, as EOS during
+ // prepare means we succeeded in downloading everything.
+ ALOGE("Failed to download playlist!");
+ notifyError(ERROR_IO);
+ }
+
+ return false;
}
}
mNumRetries = 0;
- AString uri;
- sp<AMessage> itemMeta;
CHECK(mPlaylist->itemAt(
mSeqNumber - firstSeqNumberInPlaylist,
&uri,
@@ -909,20 +1106,6 @@ void PlaylistFetcher::onDownloadNext() {
}
mLastDiscontinuitySeq = -1;
- int64_t range_offset, range_length;
- if (!itemMeta->findInt64("range-offset", &range_offset)
- || !itemMeta->findInt64("range-length", &range_length)) {
- range_offset = 0;
- range_length = -1;
- }
-
- ALOGV("fetching segment %d from (%d .. %d)",
- mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist);
-
- ALOGV("fetching '%s'", uri.c_str());
-
- sp<DataSource> source;
- sp<ABuffer> buffer, tsBuffer;
// decrypt a junk buffer to prefetch key; since a session uses only one http connection,
// this avoids interleaved connections to the key and segment file.
{
@@ -932,7 +1115,7 @@ void PlaylistFetcher::onDownloadNext() {
true /* first */);
if (err != OK) {
notifyError(err);
- return;
+ return false;
}
}
@@ -960,8 +1143,10 @@ void PlaylistFetcher::onDownloadNext() {
// Signal a format discontinuity to ATSParser to clear partial data
// from previous streams. Not doing this causes bitstream corruption.
- mTSParser->signalDiscontinuity(
- ATSParser::DISCONTINUITY_FORMATCHANGE, NULL /* extra */);
+ if (mTSParser != NULL) {
+ mTSParser->signalDiscontinuity(
+ ATSParser::DISCONTINUITY_FORMATCHANGE, NULL /* extra */);
+ }
queueDiscontinuity(
ATSParser::DISCONTINUITY_FORMATCHANGE,
@@ -981,13 +1166,58 @@ void PlaylistFetcher::onDownloadNext() {
}
}
+ ALOGV("fetching segment %d from (%d .. %d)",
+ mSeqNumber, firstSeqNumberInPlaylist, lastSeqNumberInPlaylist);
+ return true;
+}
+
+void PlaylistFetcher::onDownloadNext() {
+ AString uri;
+ sp<AMessage> itemMeta;
+ sp<ABuffer> buffer;
+ sp<ABuffer> tsBuffer;
+ int32_t firstSeqNumberInPlaylist = 0;
+ int32_t lastSeqNumberInPlaylist = 0;
+ bool connectHTTP = true;
+
+ if (mDownloadState->hasSavedState()) {
+ mDownloadState->restoreState(
+ uri,
+ itemMeta,
+ buffer,
+ tsBuffer,
+ firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist);
+ connectHTTP = false;
+ ALOGV("resuming: '%s'", uri.c_str());
+ } else {
+ if (!initDownloadState(
+ uri,
+ itemMeta,
+ firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist)) {
+ return;
+ }
+ ALOGV("fetching: '%s'", uri.c_str());
+ }
+
+ int64_t range_offset, range_length;
+ if (!itemMeta->findInt64("range-offset", &range_offset)
+ || !itemMeta->findInt64("range-length", &range_length)) {
+ range_offset = 0;
+ range_length = -1;
+ }
+
// block-wise download
+ bool shouldPause = false;
ssize_t bytesRead;
do {
- int64_t startUs = ALooper::GetNowUs();
+ sp<DataSource> source = mHTTPDataSource;
+ int64_t startUs = ALooper::GetNowUs();
bytesRead = mSession->fetchFile(
- uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize, &source);
+ uri.c_str(), &buffer, range_offset, range_length, kDownloadBlockSize,
+ &source, NULL, connectHTTP);
// add sample for bandwidth estimation (excluding subtitles)
if (bytesRead > 0
@@ -998,6 +1228,8 @@ void PlaylistFetcher::onDownloadNext() {
mSession->addBandwidthMeasurement(bytesRead, delayUs);
}
+ connectHTTP = false;
+
if (bytesRead < 0) {
status_t err = bytesRead;
ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str());
@@ -1022,6 +1254,8 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
+ bool startUp = mStartup; // save current start up state
+
err = OK;
if (bufferStartsWithTsSyncByte(buffer)) {
// Incremental extraction is only supported for MPEG2 transport streams.
@@ -1034,7 +1268,6 @@ void PlaylistFetcher::onDownloadNext() {
tsBuffer->setRange(tsOff, tsSize);
}
tsBuffer->setRange(tsBuffer->offset(), tsBuffer->size() + bytesRead);
-
err = extractAndQueueAccessUnitsFromTs(tsBuffer);
}
@@ -1049,14 +1282,36 @@ void PlaylistFetcher::onDownloadNext() {
return;
} else if (err == ERROR_OUT_OF_RANGE) {
// reached stopping point
- stopAsync(/* clear = */ false);
+ notifyStopReached();
return;
} else if (err != OK) {
notifyError(err);
return;
}
-
- } while (bytesRead != 0 && !mStopping);
+ // If we're switching, post start notification
+ // this should only be posted when the last chunk is full processed by TSParser
+ if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) {
+ CHECK(mStartTimeUsNotify != NULL);
+ mStartTimeUsNotify->post();
+ mStartTimeUsNotify.clear();
+ shouldPause = true;
+ }
+ if (shouldPause || shouldPauseDownload()) {
+ // save state and return if this is not the last chunk,
+ // leaving the fetcher in paused state.
+ if (bytesRead != 0) {
+ mDownloadState->saveState(
+ uri,
+ itemMeta,
+ buffer,
+ tsBuffer,
+ firstSeqNumberInPlaylist,
+ lastSeqNumberInPlaylist);
+ return;
+ }
+ shouldPause = true;
+ }
+ } while (bytesRead != 0);
if (bufferStartsWithTsSyncByte(buffer)) {
// If we don't see a stream in the program table after fetching a full ts segment
@@ -1092,7 +1347,6 @@ void PlaylistFetcher::onDownloadNext() {
return;
}
- err = OK;
if (tsBuffer != NULL) {
AString method;
CHECK(buffer->meta()->findString("cipher-method", &method));
@@ -1106,30 +1360,40 @@ void PlaylistFetcher::onDownloadNext() {
}
// bulk extract non-ts files
+ bool startUp = mStartup;
if (tsBuffer == NULL) {
- err = extractAndQueueAccessUnits(buffer, itemMeta);
+ status_t err = extractAndQueueAccessUnits(buffer, itemMeta);
if (err == -EAGAIN) {
// starting sequence number too low/high
postMonitorQueue();
return;
} else if (err == ERROR_OUT_OF_RANGE) {
// reached stopping point
- stopAsync(/* clear = */false);
+ notifyStopReached();
+ return;
+ } else if (err != OK) {
+ notifyError(err);
return;
}
}
- if (err != OK) {
- notifyError(err);
- return;
- }
-
++mSeqNumber;
- postMonitorQueue();
+ // if adapting, pause after found the next starting point
+ if (mSeekMode != LiveSession::kSeekModeExactPosition && startUp != mStartup) {
+ CHECK(mStartTimeUsNotify != NULL);
+ mStartTimeUsNotify->post();
+ mStartTimeUsNotify.clear();
+ shouldPause = true;
+ }
+
+ if (!shouldPause) {
+ postMonitorQueue();
+ }
}
-int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const {
+int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(
+ int64_t anchorTimeUs, int64_t targetDiffUs) const {
int32_t firstSeqNumberInPlaylist, lastSeqNumberInPlaylist;
if (mPlaylist->meta() == NULL
|| !mPlaylist->meta()->findInt32("media-sequence", &firstSeqNumberInPlaylist)) {
@@ -1138,7 +1402,8 @@ int32_t PlaylistFetcher::getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const
lastSeqNumberInPlaylist = firstSeqNumberInPlaylist + mPlaylist->size() - 1;
int32_t index = mSeqNumber - firstSeqNumberInPlaylist - 1;
- while (index >= 0 && anchorTimeUs > mStartTimeUs) {
+ // adjust anchorTimeUs to within targetDiffUs from mStartTimeUs
+ while (index >= 0 && anchorTimeUs - mStartTimeUs > targetDiffUs) {
sp<AMessage> itemMeta;
CHECK(mPlaylist->itemAt(index, NULL /* uri */, &itemMeta));
@@ -1240,6 +1505,7 @@ const sp<ABuffer> &PlaylistFetcher::setAccessUnitProperties(
accessUnit->meta()->setInt32("discontinuitySeq", mDiscontinuitySeq);
accessUnit->meta()->setInt64("segmentStartTimeUs", getSegmentStartTimeUs(mSeqNumber));
+ accessUnit->meta()->setInt64("segmentDurationUs", getSegmentDurationUs(mSeqNumber));
return accessUnit;
}
@@ -1278,30 +1544,15 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
for (size_t i = mPacketSources.size(); i-- > 0;) {
sp<AnotherPacketSource> packetSource = mPacketSources.valueAt(i);
- const char *key;
- ATSParser::SourceType type;
const LiveSession::StreamType stream = mPacketSources.keyAt(i);
- switch (stream) {
- case LiveSession::STREAMTYPE_VIDEO:
- type = ATSParser::VIDEO;
- key = "timeUsVideo";
- break;
-
- case LiveSession::STREAMTYPE_AUDIO:
- type = ATSParser::AUDIO;
- key = "timeUsAudio";
- break;
-
- case LiveSession::STREAMTYPE_SUBTITLES:
- {
- ALOGE("MPEG2 Transport streams do not contain subtitles.");
- return ERROR_MALFORMED;
- break;
- }
-
- default:
- TRESPASS();
+ if (stream == LiveSession::STREAMTYPE_SUBTITLES) {
+ ALOGE("MPEG2 Transport streams do not contain subtitles.");
+ return ERROR_MALFORMED;
}
+ const char *key = LiveSession::getKeyForStream(stream);
+ ATSParser::SourceType type =
+ (stream == LiveSession::STREAMTYPE_AUDIO) ?
+ ATSParser::AUDIO : ATSParser::VIDEO;
sp<AnotherPacketSource> source =
static_cast<AnotherPacketSource *>(
@@ -1324,6 +1575,59 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
int64_t timeUs;
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+ bool seeking = mSeekMode == LiveSession::kSeekModeExactPosition;
+ if (mSegmentFirstPTS < 0ll) {
+ mSegmentFirstPTS = timeUs;
+ if (!seeking) {
+ int32_t firstSeqNumberInPlaylist;
+ if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
+ "media-sequence", &firstSeqNumberInPlaylist)) {
+ firstSeqNumberInPlaylist = 0;
+ }
+
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+ // mStartup
+ // mStartup is true until we have queued a packet for all the streams
+ // we are fetching. We queue packets whose timestamps are greater than
+ // mStartTimeUs.
+ // mSegmentStartTimeUs >= 0
+ // mSegmentStartTimeUs is non-negative when adapting or switching tracks
+ // mSeqNumber > firstSeqNumberInPlaylist
+ // don't decrement mSeqNumber if it already points to the 1st segment
+ // timeUs - mStartTimeUs > targetDurationUs:
+ // This and the 2 above conditions should only happen when adapting in a live
+ // stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher
+ // would start fetching after timeUs, which should be greater than mStartTimeUs;
+ // the old fetcher would then continue fetching data until timeUs. We don't want
+ // timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to
+ // stop as early as possible. The definition of being "too far ahead" is
+ // arbitrary; here we use targetDurationUs as threshold.
+ int64_t targetDiffUs =(mSeekMode == LiveSession::kSeekModeNextSample
+ ? 0 : targetDurationUs);
+ if (mStartup && mSegmentStartTimeUs >= 0
+ && mSeqNumber > firstSeqNumberInPlaylist
+ && timeUs - mStartTimeUs > targetDiffUs) {
+ // we just guessed a starting timestamp that is too high when adapting in a
+ // live stream; re-adjust based on the actual timestamp extracted from the
+ // media segment; if we didn't move backward after the re-adjustment
+ // (newSeqNumber), start at least 1 segment prior.
+ int32_t newSeqNumber = getSeqNumberWithAnchorTime(
+ timeUs, targetDiffUs);
+ if (newSeqNumber >= mSeqNumber) {
+ --mSeqNumber;
+ } else {
+ mSeqNumber = newSeqNumber;
+ }
+ mStartTimeUsNotify = mNotify->dup();
+ mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
+ mStartTimeUsNotify->setString("uri", mURI);
+ mIDRFound = false;
+ return -EAGAIN;
+ }
+ }
+ }
if (mStartup) {
if (!mFirstPTSValid) {
mFirstTimeUs = timeUs;
@@ -1336,68 +1640,28 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
}
}
- if (timeUs < mStartTimeUs || (isAvc && !mIDRFound)) {
- // buffer up to the closest preceding IDR frame
- ALOGV("timeUs %" PRId64 " us < mStartTimeUs %" PRId64 " us",
- timeUs, mStartTimeUs);
+ bool startTimeReached =
+ seeking ? (timeUs >= mStartTimeUs) : true;
+
+ if (!startTimeReached || (isAvc && !mIDRFound)) {
+ // buffer up to the closest preceding IDR frame in the next segement,
+ // or the closest succeeding IDR frame after the exact position
if (isAvc) {
- if (IsIDR(accessUnit)) {
+ if (IsIDR(accessUnit) && (seeking || startTimeReached)) {
mVideoBuffer->clear();
mIDRFound = true;
}
- if (mIDRFound) {
+ if (mIDRFound && seeking && !startTimeReached) {
mVideoBuffer->queueAccessUnit(accessUnit);
}
}
-
- continue;
+ if (!startTimeReached || (isAvc && !mIDRFound)) {
+ continue;
+ }
}
}
if (mStartTimeUsNotify != NULL) {
- int32_t firstSeqNumberInPlaylist;
- if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32(
- "media-sequence", &firstSeqNumberInPlaylist)) {
- firstSeqNumberInPlaylist = 0;
- }
-
- int32_t targetDurationSecs;
- CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
- int64_t targetDurationUs = targetDurationSecs * 1000000ll;
- // mStartup
- // mStartup is true until we have queued a packet for all the streams
- // we are fetching. We queue packets whose timestamps are greater than
- // mStartTimeUs.
- // mSegmentStartTimeUs >= 0
- // mSegmentStartTimeUs is non-negative when adapting or switching tracks
- // mSeqNumber > firstSeqNumberInPlaylist
- // don't decrement mSeqNumber if it already points to the 1st segment
- // timeUs - mStartTimeUs > targetDurationUs:
- // This and the 2 above conditions should only happen when adapting in a live
- // stream; the old fetcher has already fetched to mStartTimeUs; the new fetcher
- // would start fetching after timeUs, which should be greater than mStartTimeUs;
- // the old fetcher would then continue fetching data until timeUs. We don't want
- // timeUs to be too far ahead of mStartTimeUs because we want the old fetcher to
- // stop as early as possible. The definition of being "too far ahead" is
- // arbitrary; here we use targetDurationUs as threshold.
- if (mStartup && mSegmentStartTimeUs >= 0
- && mSeqNumber > firstSeqNumberInPlaylist
- && timeUs - mStartTimeUs > targetDurationUs) {
- // we just guessed a starting timestamp that is too high when adapting in a
- // live stream; re-adjust based on the actual timestamp extracted from the
- // media segment; if we didn't move backward after the re-adjustment
- // (newSeqNumber), start at least 1 segment prior.
- int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs);
- if (newSeqNumber >= mSeqNumber) {
- --mSeqNumber;
- } else {
- mSeqNumber = newSeqNumber;
- }
- mStartTimeUsNotify = mNotify->dup();
- mStartTimeUsNotify->setInt32("what", kWhatStartedAt);
- return -EAGAIN;
- }
-
int32_t seq;
if (!mStartTimeUsNotify->findInt32("discontinuitySeq", &seq)) {
mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);
@@ -1413,8 +1677,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
if (streamMask == mStreamTypeMask) {
mStartup = false;
- mStartTimeUsNotify->post();
- mStartTimeUsNotify.clear();
}
}
}
@@ -1428,7 +1690,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
|| !mStopParams->findInt64(key, &stopTimeUs)
|| (discontinuitySeq == mDiscontinuitySeq
&& timeUs >= stopTimeUs)) {
- packetSource->queueAccessUnit(mSession->createFormatChangeBuffer());
mStreamTypeMask &= ~stream;
mPacketSources.removeItemsAt(i);
break;
@@ -1675,10 +1936,13 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+ int64_t targetDiffUs =(mSeekMode == LiveSession::kSeekModeNextSample
+ ? 0 : targetDurationUs);
// Duplicated logic from how we handle .ts playlists.
if (mStartup && mSegmentStartTimeUs >= 0
- && timeUs - mStartTimeUs > targetDurationUs) {
- int32_t newSeqNumber = getSeqNumberWithAnchorTime(timeUs);
+ && timeUs - mStartTimeUs > targetDiffUs) {
+ int32_t newSeqNumber = getSeqNumberWithAnchorTime(
+ timeUs, targetDiffUs);
if (newSeqNumber >= mSeqNumber) {
--mSeqNumber;
} else {
@@ -1690,8 +1954,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
mStartTimeUsNotify->setInt64("timeUsAudio", timeUs);
mStartTimeUsNotify->setInt32("discontinuitySeq", mDiscontinuitySeq);
mStartTimeUsNotify->setInt32("streamMask", LiveSession::STREAMTYPE_AUDIO);
- mStartTimeUsNotify->post();
- mStartTimeUsNotify.clear();
mStartup = false;
}
}
@@ -1704,7 +1966,6 @@ status_t PlaylistFetcher::extractAndQueueAccessUnits(
|| discontinuitySeq > mDiscontinuitySeq
|| !mStopParams->findInt64("timeUsAudio", &stopTimeUs)
|| (discontinuitySeq == mDiscontinuitySeq && unitTimeUs >= stopTimeUs)) {
- packetSource->queueAccessUnit(mSession->createFormatChangeBuffer());
mStreamTypeMask = 0;
mPacketSources.clear();
return ERROR_OUT_OF_RANGE;
@@ -1741,4 +2002,15 @@ void PlaylistFetcher::updateDuration() {
msg->post();
}
+void PlaylistFetcher::updateTargetDuration() {
+ int32_t targetDurationSecs;
+ CHECK(mPlaylist->meta()->findInt32("target-duration", &targetDurationSecs));
+ int64_t targetDurationUs = targetDurationSecs * 1000000ll;
+
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatTargetDurationUpdate);
+ msg->setInt64("targetDurationUs", targetDurationUs);
+ msg->post();
+}
+
} // namespace android
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index b82e50d..dab56df 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -44,9 +44,11 @@ struct PlaylistFetcher : public AHandler {
kWhatStopped,
kWhatError,
kWhatDurationUpdate,
+ kWhatTargetDurationUpdate,
kWhatPrepared,
kWhatPreparationFailed,
kWhatStartedAt,
+ kWhatStopReached,
};
PlaylistFetcher(
@@ -64,10 +66,10 @@ struct PlaylistFetcher : public AHandler {
int64_t startTimeUs = -1ll, // starting timestamps
int64_t segmentStartTimeUs = -1ll, // starting position within playlist
// startTimeUs!=segmentStartTimeUs only when playlist is live
- int32_t startDiscontinuitySeq = 0,
- bool adaptive = false);
+ int32_t startDiscontinuitySeq = -1,
+ LiveSession::SeekMode seekMode = LiveSession::kSeekModeExactPosition);
- void pauseAsync(bool immediate = false);
+ void pauseAsync(float thresholdRatio);
void stopAsync(bool clear = true);
@@ -95,6 +97,8 @@ private:
kWhatDownloadNext = 'dlnx',
};
+ struct DownloadState;
+
static const int64_t kMaxMonitorDelayUs;
static const int32_t kNumSkipFrames;
@@ -105,6 +109,7 @@ private:
sp<AMessage> mNotify;
sp<AMessage> mStartTimeUsNotify;
+ sp<HTTPBase> mHTTPDataSource;
sp<LiveSession> mSession;
AString mURI;
@@ -131,8 +136,7 @@ private:
int32_t mNumRetries;
bool mStartup;
bool mIDRFound;
- bool mAdaptive;
- bool mPrepared;
+ int32_t mSeekMode;
bool mTimeChangeSignaled;
int64_t mNextPTSTimeUs;
@@ -141,9 +145,6 @@ private:
int32_t mLastDiscontinuitySeq;
- Mutex mStoppingLock;
- bool mStopping;
-
enum RefreshState {
INITIAL_MINIMUM_RELOAD_DELAY,
FIRST_UNCHANGED_RELOAD_ATTEMPT,
@@ -157,8 +158,8 @@ private:
sp<ATSParser> mTSParser;
bool mFirstPTSValid;
- uint64_t mFirstPTS;
int64_t mFirstTimeUs;
+ int64_t mSegmentFirstPTS;
sp<AnotherPacketSource> mVideoBuffer;
// Stores the initialization vector to decrypt the next block of cipher text, which can
@@ -166,6 +167,11 @@ private:
// the last block of cipher text (cipher-block chaining).
unsigned char mAESInitVec[16];
+ Mutex mThresholdLock;
+ float mThresholdRatio;
+
+ sp<DownloadState> mDownloadState;
+
// Set first to true if decrypting the first segment of a playlist segment. When
// first is true, reset the initialization vector based on the available
// information in the manifest; otherwise, use the initialization vector as
@@ -181,7 +187,8 @@ private:
void postMonitorQueue(int64_t delayUs = 0, int64_t minDelayUs = 0);
void cancelMonitorQueue();
- void setStopping(bool stopping);
+ void setStoppingThreshold(float thresholdRatio);
+ bool shouldPauseDownload();
int64_t delayUsToRefreshPlaylist() const;
status_t refreshPlaylist();
@@ -189,12 +196,19 @@ private:
// Returns the media time in us of the segment specified by seqNumber.
// This is computed by summing the durations of all segments before it.
int64_t getSegmentStartTimeUs(int32_t seqNumber) const;
+ // Returns the duration time in us of the segment specified.
+ int64_t getSegmentDurationUs(int32_t seqNumber) const;
status_t onStart(const sp<AMessage> &msg);
void onPause();
void onStop(const sp<AMessage> &msg);
void onMonitorQueue();
void onDownloadNext();
+ bool initDownloadState(
+ AString &uri,
+ sp<AMessage> &itemMeta,
+ int32_t &firstSeqNumberInPlaylist,
+ int32_t &lastSeqNumberInPlaylist);
// Resume a fetcher to continue until the stopping point stored in msg.
status_t onResumeUntil(const sp<AMessage> &msg);
@@ -208,16 +222,19 @@ private:
status_t extractAndQueueAccessUnits(
const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta);
+ void notifyStopReached();
void notifyError(status_t err);
void queueDiscontinuity(
ATSParser::DiscontinuityType type, const sp<AMessage> &extra);
- int32_t getSeqNumberWithAnchorTime(int64_t anchorTimeUs) const;
+ int32_t getSeqNumberWithAnchorTime(
+ int64_t anchorTimeUs, int64_t targetDurationUs) const;
int32_t getSeqNumberForDiscontinuity(size_t discontinuitySeq) const;
int32_t getSeqNumberForTime(int64_t timeUs) const;
void updateDuration();
+ void updateTargetDuration();
DISALLOW_EVIL_CONSTRUCTORS(PlaylistFetcher);
};
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 75d76dc..5c50747 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -46,6 +46,9 @@ struct ATSParser : public RefBase {
DISCONTINUITY_AUDIO_FORMAT
| DISCONTINUITY_VIDEO_FORMAT
| DISCONTINUITY_TIME,
+ DISCONTINUITY_FORMAT_ONLY =
+ DISCONTINUITY_AUDIO_FORMAT
+ | DISCONTINUITY_VIDEO_FORMAT,
};
enum Flags {
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 79a9b04..c2f1527 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -19,6 +19,8 @@
#include "AnotherPacketSource.h"
+#include "include/avc_utils.h"
+
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -38,6 +40,7 @@ const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs
AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)
: mIsAudio(false),
mIsVideo(false),
+ mEnabled(true),
mFormat(NULL),
mLastQueuedTimeUs(0),
mEOSResult(OK),
@@ -155,7 +158,6 @@ status_t AnotherPacketSource::read(
const sp<ABuffer> buffer = *mBuffers.begin();
mBuffers.erase(mBuffers.begin());
- mLatestDequeuedMeta = buffer->meta()->dup();
int32_t discontinuity;
if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
@@ -166,6 +168,8 @@ status_t AnotherPacketSource::read(
return INFO_DISCONTINUITY;
}
+ mLatestDequeuedMeta = buffer->meta()->dup();
+
sp<RefBase> object;
if (buffer->meta()->findObject("format", &object)) {
setFormat(static_cast<MetaData*>(object.get()));
@@ -205,20 +209,26 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
return;
}
- int64_t lastQueuedTimeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs));
- mLastQueuedTimeUs = lastQueuedTimeUs;
- ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
-
Mutex::Autolock autoLock(mLock);
mBuffers.push_back(buffer);
mCondition.signal();
int32_t discontinuity;
if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ // discontinuity handling needs to be consistent with queueDiscontinuity()
++mQueuedDiscontinuityCount;
+ mLastQueuedTimeUs = 0ll;
+ mEOSResult = OK;
+ mLatestEnqueuedMeta = NULL;
+ return;
}
+ int64_t lastQueuedTimeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs));
+ mLastQueuedTimeUs = lastQueuedTimeUs;
+ ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)",
+ mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
+
if (mLatestEnqueuedMeta == NULL) {
mLatestEnqueuedMeta = buffer->meta()->dup();
} else {
@@ -298,6 +308,10 @@ void AnotherPacketSource::signalEOS(status_t result) {
bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {
Mutex::Autolock autoLock(mLock);
+ *finalResult = OK;
+ if (!mEnabled) {
+ return false;
+ }
if (!mBuffers.empty()) {
return true;
}
@@ -306,6 +320,24 @@ bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) {
return false;
}
+bool AnotherPacketSource::hasDataBufferAvailable(status_t *finalResult) {
+ Mutex::Autolock autoLock(mLock);
+ *finalResult = OK;
+ if (!mEnabled) {
+ return false;
+ }
+ List<sp<ABuffer> >::iterator it;
+ for (it = mBuffers.begin(); it != mBuffers.end(); it++) {
+ int32_t discontinuity;
+ if (!(*it)->meta()->findInt32("discontinuity", &discontinuity)) {
+ return true;
+ }
+ }
+
+ *finalResult = mEOSResult;
+ return false;
+}
+
int64_t AnotherPacketSource::getBufferedDurationUs(status_t *finalResult) {
Mutex::Autolock autoLock(mLock);
return getBufferedDurationUs_l(finalResult);
@@ -424,4 +456,140 @@ sp<AMessage> AnotherPacketSource::getLatestDequeuedMeta() {
return mLatestDequeuedMeta;
}
+void AnotherPacketSource::enable(bool enable) {
+ Mutex::Autolock autoLock(mLock);
+ mEnabled = enable;
+}
+
+sp<AMessage> AnotherPacketSource::getMetaAfterLastDequeued(int64_t delayUs) {
+ Mutex::Autolock autoLock(mLock);
+ int64_t firstUs = -1;
+ int64_t lastUs = -1;
+ int64_t durationUs = 0;
+
+ List<sp<ABuffer> >::iterator it;
+ for (it = mBuffers.begin(); it != mBuffers.end(); ++it) {
+ const sp<ABuffer> &buffer = *it;
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ durationUs += lastUs - firstUs;
+ firstUs = -1;
+ lastUs = -1;
+ continue;
+ }
+ int64_t timeUs;
+ if (buffer->meta()->findInt64("timeUs", &timeUs)) {
+ if (firstUs < 0) {
+ firstUs = timeUs;
+ }
+ if (lastUs < 0 || timeUs > lastUs) {
+ lastUs = timeUs;
+ }
+ if (durationUs + (lastUs - firstUs) >= delayUs) {
+ return buffer->meta();
+ }
+ }
+ }
+ return mLatestEnqueuedMeta;
+}
+
+void AnotherPacketSource::trimBuffersAfterTimeUs(
+ size_t discontinuitySeq, int64_t timeUs) {
+ ALOGV("trimBuffersAfterTimeUs: discontinuitySeq %zu, timeUs %lld",
+ discontinuitySeq, (long long)timeUs);
+
+ Mutex::Autolock autoLock(mLock);
+ if (mBuffers.empty()) {
+ return;
+ }
+
+ List<sp<ABuffer> >::iterator it;
+ sp<AMessage> newLatestEnqueuedMeta = NULL;
+ int64_t newLastQueuedTimeUs = 0;
+ size_t newDiscontinuityCount = 0;
+ for (it = mBuffers.begin(); it != mBuffers.end(); ++it) {
+ const sp<ABuffer> &buffer = *it;
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ newDiscontinuityCount++;
+ continue;
+ }
+ size_t curDiscontinuitySeq;
+ int64_t curTimeUs;
+ CHECK(buffer->meta()->findInt32(
+ "discontinuitySeq", (int32_t*)&curDiscontinuitySeq));
+ CHECK(buffer->meta()->findInt64("timeUs", &curTimeUs));
+ if ((curDiscontinuitySeq > discontinuitySeq
+ || (curDiscontinuitySeq == discontinuitySeq
+ && curTimeUs >= timeUs))) {
+ ALOGI("trimming from %lld (inclusive) to end",
+ (long long)curTimeUs);
+ break;
+ }
+ newLatestEnqueuedMeta = buffer->meta();
+ newLastQueuedTimeUs = curTimeUs;
+ }
+ mBuffers.erase(it, mBuffers.end());
+ mLatestEnqueuedMeta = newLatestEnqueuedMeta;
+ mLastQueuedTimeUs = newLastQueuedTimeUs;
+ mQueuedDiscontinuityCount = newDiscontinuityCount;
+}
+
+sp<AMessage> AnotherPacketSource::trimBuffersBeforeTimeUs(
+ size_t discontinuitySeq, int64_t timeUs) {
+ ALOGV("trimBuffersBeforeTimeUs: discontinuitySeq %zu, timeUs %lld",
+ discontinuitySeq, (long long)timeUs);
+ sp<AMessage> meta;
+ Mutex::Autolock autoLock(mLock);
+ if (mBuffers.empty()) {
+ return NULL;
+ }
+
+ sp<MetaData> format;
+ bool isAvc = false;
+
+ List<sp<ABuffer> >::iterator it;
+ size_t discontinuityCount = 0;
+ for (it = mBuffers.begin(); it != mBuffers.end(); ++it) {
+ const sp<ABuffer> &buffer = *it;
+ int32_t discontinuity;
+ if (buffer->meta()->findInt32("discontinuity", &discontinuity)) {
+ format = NULL;
+ isAvc = false;
+ discontinuityCount++;
+ continue;
+ }
+ if (format == NULL) {
+ sp<RefBase> object;
+ if (buffer->meta()->findObject("format", &object)) {
+ const char* mime;
+ format = static_cast<MetaData*>(object.get());
+ isAvc = format != NULL
+ && format->findCString(kKeyMIMEType, &mime)
+ && !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+ }
+ }
+ if (isAvc && !IsIDR(buffer)) {
+ continue;
+ }
+ size_t curDiscontinuitySeq;
+ int64_t curTimeUs;
+ CHECK(buffer->meta()->findInt32(
+ "discontinuitySeq", (int32_t*)&curDiscontinuitySeq));
+ CHECK(buffer->meta()->findInt64("timeUs", &curTimeUs));
+ if ((curDiscontinuitySeq > discontinuitySeq
+ || (curDiscontinuitySeq == discontinuitySeq
+ && curTimeUs > timeUs))) {
+ ALOGI("trimming from beginning to %lld (not inclusive)",
+ (long long)curTimeUs);
+ meta = buffer->meta();
+ break;
+ }
+ }
+ mBuffers.erase(mBuffers.begin(), it);
+ mQueuedDiscontinuityCount -= discontinuityCount;
+ mLatestDequeuedMeta = NULL;
+ return meta;
+}
+
} // namespace android
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index 809a858..e126006 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -43,8 +43,12 @@ struct AnotherPacketSource : public MediaSource {
void clear();
+ // Returns true if we have any packets including discontinuities
bool hasBufferAvailable(status_t *finalResult);
+ // Returns true if we have packets that's not discontinuities
+ bool hasDataBufferAvailable(status_t *finalResult);
+
// Returns the difference between the last and the first queued
// presentation timestamps since the last discontinuity (if any).
int64_t getBufferedDurationUs(status_t *finalResult);
@@ -66,8 +70,14 @@ struct AnotherPacketSource : public MediaSource {
bool isFinished(int64_t duration) const;
+ void enable(bool enable);
+
sp<AMessage> getLatestEnqueuedMeta();
sp<AMessage> getLatestDequeuedMeta();
+ sp<AMessage> getMetaAfterLastDequeued(int64_t delayUs);
+
+ void trimBuffersAfterTimeUs(size_t discontinuitySeq, int64_t timeUs);
+ sp<AMessage> trimBuffersBeforeTimeUs(size_t discontinuitySeq, int64_t timeUs);
protected:
virtual ~AnotherPacketSource();
@@ -78,6 +88,7 @@ private:
bool mIsAudio;
bool mIsVideo;
+ bool mEnabled;
sp<MetaData> mFormat;
int64_t mLastQueuedTimeUs;
List<sp<ABuffer> > mBuffers;
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index 0ad0bf3..baf65f6 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -19,6 +19,7 @@ LOCAL_SHARED_LIBRARIES := \
libcamera_metadata\
libcameraservice \
libmedialogservice \
+ libresourcemanagerservice \
libcutils \
libnbaio \
libmedia \
@@ -38,6 +39,7 @@ LOCAL_C_INCLUDES := \
frameworks/av/services/audioflinger \
frameworks/av/services/audiopolicy \
frameworks/av/services/camera/libcameraservice \
+ frameworks/av/services/mediaresourcemanager \
$(call include-path-for, audio-utils) \
frameworks/av/services/soundtrigger \
frameworks/av/services/radio
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
index 7a1048c..83a5ba1 100644
--- a/media/ndk/NdkMediaDrm.cpp
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -312,8 +312,10 @@ media_status_t AMediaDrm_getKeyRequest(AMediaDrm *mObj, const AMediaDrmScope *sc
String8(optionalParameters[i].mValue));
}
String8 defaultUrl;
+ DrmPlugin::KeyRequestType keyRequestType;
status_t status = mObj->mDrm->getKeyRequest(*iter, mdInit, String8(mimeType),
- mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl);
+ mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl,
+ &keyRequestType);
if (status != OK) {
return translateStatus(status);
} else {
@@ -725,4 +727,3 @@ media_status_t AMediaDrm_verify(AMediaDrm *mObj, const AMediaDrmSessionId *sessi
}
} // extern "C"
-
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index 642ff82..fee2347 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -39,6 +39,9 @@ LOCAL_SRC_FILES:= \
AudioFlinger.cpp \
Threads.cpp \
Tracks.cpp \
+ AudioHwDevice.cpp \
+ AudioStreamOut.cpp \
+ SpdifStreamOut.cpp \
Effects.cpp \
AudioMixer.cpp.arm \
PatchPanel.cpp
@@ -52,6 +55,7 @@ LOCAL_C_INCLUDES := \
LOCAL_SHARED_LIBRARIES := \
libaudioresampler \
+ libaudiospdif \
libaudioutils \
libcommon_time_client \
libcutils \
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 461b5d3..f3206cb 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -272,7 +272,7 @@ static const char * const audio_interfaces[] = {
};
#define ARRAY_SIZE(x) (sizeof((x))/sizeof(((x)[0])))
-AudioFlinger::AudioHwDevice* AudioFlinger::findSuitableHwDev_l(
+AudioHwDevice* AudioFlinger::findSuitableHwDev_l(
audio_module_handle_t module,
audio_devices_t devices)
{
@@ -1716,8 +1716,6 @@ sp<AudioFlinger::PlaybackThread> AudioFlinger::openOutput_l(audio_module_handle_
mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
- audio_stream_out_t *outStream = NULL;
-
// FOR TESTING ONLY:
// This if statement allows overriding the audio policy settings
// and forcing a specific format or channel mask to the HAL/Sink device for testing.
@@ -1739,25 +1737,18 @@ sp<AudioFlinger::PlaybackThread> AudioFlinger::openOutput_l(audio_module_handle_
}
}
- status_t status = hwDevHal->open_output_stream(hwDevHal,
- *output,
- devices,
- flags,
- config,
- &outStream,
- address.string());
+ AudioStreamOut *outputStream = NULL;
+ status_t status = outHwDev->openOutputStream(
+ &outputStream,
+ *output,
+ devices,
+ flags,
+ config,
+ address.string());
mHardwareStatus = AUDIO_HW_IDLE;
- ALOGV("openOutput_l() openOutputStream returned output %p, sampleRate %d, Format %#x, "
- "channelMask %#x, status %d",
- outStream,
- config->sample_rate,
- config->format,
- config->channel_mask,
- status);
- if (status == NO_ERROR && outStream != NULL) {
- AudioStreamOut *outputStream = new AudioStreamOut(outHwDev, outStream, flags);
+ if (status == NO_ERROR) {
PlaybackThread *thread;
if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
@@ -1787,7 +1778,7 @@ status_t AudioFlinger::openOutput(audio_module_handle_t module,
uint32_t *latencyMs,
audio_output_flags_t flags)
{
- ALOGV("openOutput(), module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, flags %x",
+ ALOGI("openOutput(), module %d Device %x, SamplingRate %d, Format %#08x, Channels %x, flags %x",
module,
(devices != NULL) ? *devices : 0,
config->sample_rate,
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 7b76185..c7d9161 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -56,6 +56,9 @@
#include <media/nbaio/NBAIO.h>
#include "AudioWatchdog.h"
#include "AudioMixer.h"
+#include "AudioStreamOut.h"
+#include "SpdifStreamOut.h"
+#include "AudioHwDevice.h"
#include <powermanager/IPowerManager.h>
@@ -311,7 +314,6 @@ public:
wp<RefBase> cookie);
private:
- class AudioHwDevice; // fwd declaration for findSuitableHwDev_l
audio_mode_t getMode() const { return mMode; }
@@ -449,7 +451,7 @@ private:
class EffectModule;
class EffectHandle;
class EffectChain;
- struct AudioStreamOut;
+
struct AudioStreamIn;
struct stream_type_t {
@@ -586,57 +588,11 @@ private:
// Return true if the effect was found in mOrphanEffectChains, false otherwise.
bool updateOrphanEffectChains(const sp<EffectModule>& effect);
- class AudioHwDevice {
- public:
- enum Flags {
- AHWD_CAN_SET_MASTER_VOLUME = 0x1,
- AHWD_CAN_SET_MASTER_MUTE = 0x2,
- };
-
- AudioHwDevice(audio_module_handle_t handle,
- const char *moduleName,
- audio_hw_device_t *hwDevice,
- Flags flags)
- : mHandle(handle), mModuleName(strdup(moduleName))
- , mHwDevice(hwDevice)
- , mFlags(flags) { }
- /*virtual*/ ~AudioHwDevice() { free((void *)mModuleName); }
-
- bool canSetMasterVolume() const {
- return (0 != (mFlags & AHWD_CAN_SET_MASTER_VOLUME));
- }
-
- bool canSetMasterMute() const {
- return (0 != (mFlags & AHWD_CAN_SET_MASTER_MUTE));
- }
-
- audio_module_handle_t handle() const { return mHandle; }
- const char *moduleName() const { return mModuleName; }
- audio_hw_device_t *hwDevice() const { return mHwDevice; }
- uint32_t version() const { return mHwDevice->common.version; }
- private:
- const audio_module_handle_t mHandle;
- const char * const mModuleName;
- audio_hw_device_t * const mHwDevice;
- const Flags mFlags;
- };
-
- // AudioStreamOut and AudioStreamIn are immutable, so their fields are const.
+ // AudioStreamIn is immutable, so their fields are const.
// For emphasis, we could also make all pointers to them be "const *",
// but that would clutter the code unnecessarily.
- struct AudioStreamOut {
- AudioHwDevice* const audioHwDev;
- audio_stream_out_t* const stream;
- const audio_output_flags_t flags;
-
- audio_hw_device_t* hwDev() const { return audioHwDev->hwDevice(); }
-
- AudioStreamOut(AudioHwDevice *dev, audio_stream_out_t *out, audio_output_flags_t flags) :
- audioHwDev(dev), stream(out), flags(flags) {}
- };
-
struct AudioStreamIn {
AudioHwDevice* const audioHwDev;
audio_stream_in_t* const stream;
diff --git a/services/audioflinger/AudioHwDevice.cpp b/services/audioflinger/AudioHwDevice.cpp
new file mode 100644
index 0000000..09d86ea
--- /dev/null
+++ b/services/audioflinger/AudioHwDevice.cpp
@@ -0,0 +1,94 @@
+/*
+**
+** Copyright 2007, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "AudioHwDevice"
+//#define LOG_NDEBUG 0
+
+#include <hardware/audio.h>
+#include <utils/Log.h>
+
+#include <audio_utils/spdif/SPDIFEncoder.h>
+
+#include "AudioHwDevice.h"
+#include "AudioStreamOut.h"
+#include "SpdifStreamOut.h"
+
+namespace android {
+
+// ----------------------------------------------------------------------------
+
+status_t AudioHwDevice::openOutputStream(
+ AudioStreamOut **ppStreamOut,
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ audio_output_flags_t flags,
+ struct audio_config *config,
+ const char *address)
+{
+
+ struct audio_config originalConfig = *config;
+ AudioStreamOut *outputStream = new AudioStreamOut(this, flags);
+
+ // Try to open the HAL first using the current format.
+ ALOGV("AudioHwDevice::openOutputStream(), try "
+ " sampleRate %d, Format %#x, "
+ "channelMask %#x",
+ config->sample_rate,
+ config->format,
+ config->channel_mask);
+ status_t status = outputStream->open(handle, devices, config, address);
+
+ if (status != NO_ERROR) {
+ delete outputStream;
+ outputStream = NULL;
+
+ // FIXME Look at any modification to the config.
+ // The HAL might modify the config to suggest a wrapped format.
+ // Log this so we can see what the HALs are doing.
+ ALOGI("AudioHwDevice::openOutputStream(), HAL returned"
+ " sampleRate %d, Format %#x, "
+ "channelMask %#x, status %d",
+ config->sample_rate,
+ config->format,
+ config->channel_mask,
+ status);
+
+ // If the data is encoded then try again using wrapped PCM.
+ bool wrapperNeeded = !audio_is_linear_pcm(originalConfig.format)
+ && ((flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0)
+ && ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0);
+
+ // FIXME - Add isEncodingSupported() query to SPDIF wrapper then
+ // call it from here.
+ if (wrapperNeeded) {
+ outputStream = new SpdifStreamOut(this, flags);
+ status = outputStream->open(handle, devices, &originalConfig, address);
+ if (status != NO_ERROR) {
+ ALOGE("ERROR - AudioHwDevice::openOutputStream(), SPDIF open returned %d",
+ status);
+ delete outputStream;
+ outputStream = NULL;
+ }
+ }
+ }
+
+ *ppStreamOut = outputStream;
+ return status;
+}
+
+
+}; // namespace android
diff --git a/services/audioflinger/AudioHwDevice.h b/services/audioflinger/AudioHwDevice.h
new file mode 100644
index 0000000..b9f65c1
--- /dev/null
+++ b/services/audioflinger/AudioHwDevice.h
@@ -0,0 +1,88 @@
+/*
+**
+** Copyright 2007, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_AUDIO_HW_DEVICE_H
+#define ANDROID_AUDIO_HW_DEVICE_H
+
+#include <stdint.h>
+#include <stdlib.h>
+#include <sys/types.h>
+
+#include <hardware/audio.h>
+#include <utils/Errors.h>
+#include <system/audio.h>
+
+
+namespace android {
+
+class AudioStreamOut;
+
+class AudioHwDevice {
+public:
+ enum Flags {
+ AHWD_CAN_SET_MASTER_VOLUME = 0x1,
+ AHWD_CAN_SET_MASTER_MUTE = 0x2,
+ };
+
+ AudioHwDevice(audio_module_handle_t handle,
+ const char *moduleName,
+ audio_hw_device_t *hwDevice,
+ Flags flags)
+ : mHandle(handle)
+ , mModuleName(strdup(moduleName))
+ , mHwDevice(hwDevice)
+ , mFlags(flags) { }
+ virtual ~AudioHwDevice() { free((void *)mModuleName); }
+
+ bool canSetMasterVolume() const {
+ return (0 != (mFlags & AHWD_CAN_SET_MASTER_VOLUME));
+ }
+
+ bool canSetMasterMute() const {
+ return (0 != (mFlags & AHWD_CAN_SET_MASTER_MUTE));
+ }
+
+ audio_module_handle_t handle() const { return mHandle; }
+ const char *moduleName() const { return mModuleName; }
+ audio_hw_device_t *hwDevice() const { return mHwDevice; }
+ uint32_t version() const { return mHwDevice->common.version; }
+
+ /** This method creates and opens the audio hardware output stream.
+ * The "address" parameter qualifies the "devices" audio device type if needed.
+ * The format format depends on the device type:
+ * - Bluetooth devices use the MAC address of the device in the form "00:11:22:AA:BB:CC"
+ * - USB devices use the ALSA card and device numbers in the form "card=X;device=Y"
+ * - Other devices may use a number or any other string.
+ */
+ status_t openOutputStream(
+ AudioStreamOut **ppStreamOut,
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ audio_output_flags_t flags,
+ struct audio_config *config,
+ const char *address);
+
+private:
+ const audio_module_handle_t mHandle;
+ const char * const mModuleName;
+ audio_hw_device_t * const mHwDevice;
+ const Flags mFlags;
+};
+
+} // namespace android
+
+#endif // ANDROID_AUDIO_HW_DEVICE_H
diff --git a/services/audioflinger/AudioMixer.cpp b/services/audioflinger/AudioMixer.cpp
index 93d821a..dddca02 100644
--- a/services/audioflinger/AudioMixer.cpp
+++ b/services/audioflinger/AudioMixer.cpp
@@ -69,9 +69,9 @@
#define ARRAY_SIZE(x) (sizeof(x)/sizeof((x)[0]))
#endif
-// Set kUseNewMixer to true to use the new mixer engine. Otherwise the
-// original code will be used. This is false for now.
-static const bool kUseNewMixer = false;
+// Set kUseNewMixer to true to use the new mixer engine always. Otherwise the
+// original code will be used for stereo sinks, the new mixer for multichannel.
+static const bool kUseNewMixer = true;
// Set kUseFloat to true to allow floating input into the mixer engine.
// If kUseNewMixer is false, this is ignored or may be overridden internally
diff --git a/services/audioflinger/AudioStreamOut.cpp b/services/audioflinger/AudioStreamOut.cpp
new file mode 100644
index 0000000..e6d8f09
--- /dev/null
+++ b/services/audioflinger/AudioStreamOut.cpp
@@ -0,0 +1,117 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "AudioFlinger"
+//#define LOG_NDEBUG 0
+
+#include <hardware/audio.h>
+#include <utils/Log.h>
+
+#include "AudioHwDevice.h"
+#include "AudioStreamOut.h"
+
+namespace android {
+
+// ----------------------------------------------------------------------------
+
+AudioStreamOut::AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags)
+ : audioHwDev(dev)
+ , stream(NULL)
+ , flags(flags)
+{
+}
+
+audio_hw_device_t* AudioStreamOut::hwDev() const
+{
+ return audioHwDev->hwDevice();
+}
+
+status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
+{
+ if (stream == NULL) {
+ return NO_INIT;
+ }
+ return stream->get_render_position(stream, frames);
+}
+
+status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
+{
+ if (stream == NULL) {
+ return NO_INIT;
+ }
+ return stream->get_presentation_position(stream, frames, timestamp);
+}
+
+status_t AudioStreamOut::open(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ struct audio_config *config,
+ const char *address)
+{
+ audio_stream_out_t* outStream;
+ int status = hwDev()->open_output_stream(
+ hwDev(),
+ handle,
+ devices,
+ flags,
+ config,
+ &outStream,
+ address);
+ ALOGV("AudioStreamOut::open(), HAL open_output_stream returned "
+ " %p, sampleRate %d, Format %#x, "
+ "channelMask %#x, status %d",
+ outStream,
+ config->sample_rate,
+ config->format,
+ config->channel_mask,
+ status);
+
+ if (status == NO_ERROR) {
+ stream = outStream;
+ }
+
+ return status;
+}
+
+size_t AudioStreamOut::getFrameSize()
+{
+ ALOG_ASSERT(stream != NULL);
+ return audio_stream_out_frame_size(stream);
+}
+
+int AudioStreamOut::flush()
+{
+ ALOG_ASSERT(stream != NULL);
+ if (stream->flush != NULL) {
+ return stream->flush(stream);
+ }
+ return NO_ERROR;
+}
+
+int AudioStreamOut::standby()
+{
+ ALOG_ASSERT(stream != NULL);
+ return stream->common.standby(&stream->common);
+}
+
+ssize_t AudioStreamOut::write(const void* buffer, size_t bytes)
+{
+ ALOG_ASSERT(stream != NULL);
+ return stream->write(stream, buffer, bytes);
+}
+
+} // namespace android
diff --git a/services/audioflinger/AudioStreamOut.h b/services/audioflinger/AudioStreamOut.h
new file mode 100644
index 0000000..e91ca9c
--- /dev/null
+++ b/services/audioflinger/AudioStreamOut.h
@@ -0,0 +1,83 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_AUDIO_STREAM_OUT_H
+#define ANDROID_AUDIO_STREAM_OUT_H
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <system/audio.h>
+
+#include "AudioStreamOut.h"
+
+namespace android {
+
+class AudioHwDevice;
+
+/**
+ * Managed access to a HAL output stream.
+ */
+class AudioStreamOut {
+public:
+// AudioStreamOut is immutable, so its fields are const.
+// For emphasis, we could also make all pointers to them be "const *",
+// but that would clutter the code unnecessarily.
+ AudioHwDevice * const audioHwDev;
+ audio_stream_out_t *stream;
+ const audio_output_flags_t flags;
+
+ audio_hw_device_t *hwDev() const;
+
+ AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags);
+
+ virtual status_t open(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ struct audio_config *config,
+ const char *address);
+
+ virtual ~AudioStreamOut() { }
+
+ virtual status_t getRenderPosition(uint32_t *frames);
+
+ virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+
+ /**
+ * Write audio buffer to driver. Returns number of bytes written, or a
+ * negative status_t. If at least one frame was written successfully prior to the error,
+ * it is suggested that the driver return that successful (short) byte count
+ * and then return an error in the subsequent call.
+ *
+ * If set_callback() has previously been called to enable non-blocking mode
+ * the write() is not allowed to block. It must write only the number of
+ * bytes that currently fit in the driver/hardware buffer and then return
+ * this byte count. If this is less than the requested write size the
+ * callback function must be called when more space is available in the
+ * driver/hardware buffer.
+ */
+ virtual ssize_t write(const void *buffer, size_t bytes);
+
+ virtual size_t getFrameSize();
+
+ virtual status_t flush();
+ virtual status_t standby();
+};
+
+} // namespace android
+
+#endif // ANDROID_AUDIO_STREAM_OUT_H
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 902d5e4..45df6a9 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -157,8 +157,9 @@ private:
bool mFlushHwPending; // track requests for thread flush
// for last call to getTimestamp
- bool mPreviousValid;
- uint32_t mPreviousFramesWritten;
+ bool mPreviousTimestampValid;
+ // This is either the first timestamp or one that has passed
+ // the check to prevent retrograde motion.
AudioTimestamp mPreviousTimestamp;
}; // end of Track
diff --git a/services/audioflinger/SpdifStreamOut.cpp b/services/audioflinger/SpdifStreamOut.cpp
new file mode 100644
index 0000000..d23588e
--- /dev/null
+++ b/services/audioflinger/SpdifStreamOut.cpp
@@ -0,0 +1,166 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "AudioFlinger"
+//#define LOG_NDEBUG 0
+#include <hardware/audio.h>
+#include <utils/Log.h>
+
+#include <audio_utils/spdif/SPDIFEncoder.h>
+
+#include "AudioHwDevice.h"
+#include "AudioStreamOut.h"
+#include "SpdifStreamOut.h"
+
+namespace android {
+
+/**
+ * If the AudioFlinger is processing encoded data and the HAL expects
+ * PCM then we need to wrap the data in an SPDIF wrapper.
+ */
+SpdifStreamOut::SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags)
+ : AudioStreamOut(dev,flags)
+ , mRateMultiplier(1)
+ , mSpdifEncoder(this)
+ , mRenderPositionHal(0)
+ , mPreviousHalPosition32(0)
+{
+}
+
+status_t SpdifStreamOut::open(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ struct audio_config *config,
+ const char *address)
+{
+ struct audio_config customConfig = *config;
+
+ customConfig.format = AUDIO_FORMAT_PCM_16_BIT;
+ customConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+
+ // Some data bursts run at a higher sample rate.
+ switch(config->format) {
+ case AUDIO_FORMAT_E_AC3:
+ mRateMultiplier = 4;
+ break;
+ case AUDIO_FORMAT_AC3:
+ mRateMultiplier = 1;
+ break;
+ default:
+ ALOGE("ERROR SpdifStreamOut::open() unrecognized format 0x%08X\n",
+ config->format);
+ return BAD_VALUE;
+ }
+ customConfig.sample_rate = config->sample_rate * mRateMultiplier;
+
+ // Always print this because otherwise it could be very confusing if the
+ // HAL and AudioFlinger are using different formats.
+ // Print before open() because HAL may modify customConfig.
+ ALOGI("SpdifStreamOut::open() AudioFlinger requested"
+ " sampleRate %d, format %#x, channelMask %#x",
+ config->sample_rate,
+ config->format,
+ config->channel_mask);
+ ALOGI("SpdifStreamOut::open() HAL configured for"
+ " sampleRate %d, format %#x, channelMask %#x",
+ customConfig.sample_rate,
+ customConfig.format,
+ customConfig.channel_mask);
+
+ status_t status = AudioStreamOut::open(
+ handle,
+ devices,
+ &customConfig,
+ address);
+
+ ALOGI("SpdifStreamOut::open() status = %d", status);
+
+ return status;
+}
+
+// Account for possibly higher sample rate.
+status_t SpdifStreamOut::getRenderPosition(uint32_t *frames)
+{
+ uint32_t halPosition = 0;
+ status_t status = AudioStreamOut::getRenderPosition(&halPosition);
+ if (status != NO_ERROR) {
+ return status;
+ }
+
+ // Accumulate a 64-bit position so that we wrap at the right place.
+ if (mRateMultiplier != 1) {
+ // Maintain a 64-bit render position.
+ int32_t deltaHalPosition = (int32_t)(halPosition - mPreviousHalPosition32);
+ mPreviousHalPosition32 = halPosition;
+ mRenderPositionHal += deltaHalPosition;
+
+ // Scale from device sample rate to application rate.
+ uint64_t renderPositionApp = mRenderPositionHal / mRateMultiplier;
+ ALOGV("SpdifStreamOut::getRenderPosition() "
+ "renderPositionAppRate = %llu = %llu / %u\n",
+ renderPositionApp, mRenderPositionHal, mRateMultiplier);
+
+ *frames = (uint32_t)renderPositionApp;
+ } else {
+ *frames = halPosition;
+ }
+ return status;
+}
+
+int SpdifStreamOut::flush()
+{
+ // FIXME Is there an issue here with flush being asynchronous?
+ mRenderPositionHal = 0;
+ mPreviousHalPosition32 = 0;
+ return AudioStreamOut::flush();
+}
+
+int SpdifStreamOut::standby()
+{
+ mRenderPositionHal = 0;
+ mPreviousHalPosition32 = 0;
+ return AudioStreamOut::standby();
+}
+
+// Account for possibly higher sample rate.
+// This is much easier when all the values are 64-bit.
+status_t SpdifStreamOut::getPresentationPosition(uint64_t *frames,
+ struct timespec *timestamp)
+{
+ uint64_t halFrames = 0;
+ status_t status = AudioStreamOut::getPresentationPosition(&halFrames, timestamp);
+ *frames = halFrames / mRateMultiplier;
+ return status;
+}
+
+size_t SpdifStreamOut::getFrameSize()
+{
+ return sizeof(int8_t);
+}
+
+ssize_t SpdifStreamOut::writeDataBurst(const void* buffer, size_t bytes)
+{
+ return AudioStreamOut::write(buffer, bytes);
+}
+
+ssize_t SpdifStreamOut::write(const void* buffer, size_t bytes)
+{
+ // Write to SPDIF wrapper. It will call back to writeDataBurst().
+ return mSpdifEncoder.write(buffer, bytes);
+}
+
+} // namespace android
diff --git a/services/audioflinger/SpdifStreamOut.h b/services/audioflinger/SpdifStreamOut.h
new file mode 100644
index 0000000..cb82ac7
--- /dev/null
+++ b/services/audioflinger/SpdifStreamOut.h
@@ -0,0 +1,107 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_SPDIF_STREAM_OUT_H
+#define ANDROID_SPDIF_STREAM_OUT_H
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <system/audio.h>
+
+#include "AudioHwDevice.h"
+#include "AudioStreamOut.h"
+#include "SpdifStreamOut.h"
+
+#include <audio_utils/spdif/SPDIFEncoder.h>
+
+namespace android {
+
+/**
+ * Stream that is a PCM data burst in the HAL but looks like an encoded stream
+ * to the AudioFlinger. Wraps encoded data in an SPDIF wrapper per IEC61973-3.
+ */
+class SpdifStreamOut : public AudioStreamOut {
+public:
+
+ SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags);
+
+ virtual ~SpdifStreamOut() { }
+
+ virtual status_t open(
+ audio_io_handle_t handle,
+ audio_devices_t devices,
+ struct audio_config *config,
+ const char *address);
+
+ virtual status_t getRenderPosition(uint32_t *frames);
+
+ virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
+
+ /**
+ * Write audio buffer to driver. Returns number of bytes written, or a
+ * negative status_t. If at least one frame was written successfully prior to the error,
+ * it is suggested that the driver return that successful (short) byte count
+ * and then return an error in the subsequent call.
+ *
+ * If set_callback() has previously been called to enable non-blocking mode
+ * the write() is not allowed to block. It must write only the number of
+ * bytes that currently fit in the driver/hardware buffer and then return
+ * this byte count. If this is less than the requested write size the
+ * callback function must be called when more space is available in the
+ * driver/hardware buffer.
+ */
+ virtual ssize_t write(const void* buffer, size_t bytes);
+
+ virtual size_t getFrameSize();
+
+ virtual status_t flush();
+ virtual status_t standby();
+
+private:
+
+ class MySPDIFEncoder : public SPDIFEncoder
+ {
+ public:
+ MySPDIFEncoder(SpdifStreamOut *spdifStreamOut)
+ : mSpdifStreamOut(spdifStreamOut)
+ {
+ }
+
+ virtual ssize_t writeOutput(const void* buffer, size_t bytes)
+ {
+ return mSpdifStreamOut->writeDataBurst(buffer, bytes);
+ }
+ protected:
+ SpdifStreamOut * const mSpdifStreamOut;
+ };
+
+ int mRateMultiplier;
+ MySPDIFEncoder mSpdifEncoder;
+
+ // Used to implement getRenderPosition()
+ int64_t mRenderPositionHal;
+ uint32_t mPreviousHalPosition32;
+
+ ssize_t writeDataBurst(const void* data, size_t bytes);
+ ssize_t writeInternal(const void* buffer, size_t bytes);
+
+};
+
+} // namespace android
+
+#endif // ANDROID_SPDIF_STREAM_OUT_H
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 9881764..5988d2c 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -2009,7 +2009,7 @@ void AudioFlinger::PlaybackThread::readOutputParameters_l()
LOG_FATAL("HAL format %#x not supported for mixed output",
mFormat);
}
- mFrameSize = audio_stream_out_frame_size(mOutput->stream);
+ mFrameSize = mOutput->getFrameSize();
mBufferSize = mOutput->stream->common.get_buffer_size(&mOutput->stream->common);
mFrameCount = mBufferSize / mFrameSize;
if (mFrameCount & 15) {
@@ -2160,7 +2160,7 @@ status_t AudioFlinger::PlaybackThread::getRenderPosition(uint32_t *halFrames, ui
} else {
status_t status;
uint32_t frames;
- status = mOutput->stream->get_render_position(mOutput->stream, &frames);
+ status = mOutput->getRenderPosition(&frames);
*dspFrames = (size_t)frames;
return status;
}
@@ -2202,13 +2202,13 @@ uint32_t AudioFlinger::PlaybackThread::getStrategyForSession_l(int sessionId)
}
-AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::getOutput() const
+AudioStreamOut* AudioFlinger::PlaybackThread::getOutput() const
{
Mutex::Autolock _l(mLock);
return mOutput;
}
-AudioFlinger::AudioStreamOut* AudioFlinger::PlaybackThread::clearOutput()
+AudioStreamOut* AudioFlinger::PlaybackThread::clearOutput()
{
Mutex::Autolock _l(mLock);
AudioStreamOut *output = mOutput;
@@ -2354,8 +2354,7 @@ ssize_t AudioFlinger::PlaybackThread::threadLoop_write()
}
// FIXME We should have an implementation of timestamps for direct output threads.
// They are used e.g for multichannel PCM playback over HDMI.
- bytesWritten = mOutput->stream->write(mOutput->stream,
- (char *)mSinkBuffer + offset, mBytesRemaining);
+ bytesWritten = mOutput->write((char *)mSinkBuffer + offset, mBytesRemaining);
if (mUseAsyncWrite &&
((bytesWritten < 0) || (bytesWritten == (ssize_t)mBytesRemaining))) {
// do not wait for async callback in case of error of full write
@@ -2908,8 +2907,7 @@ status_t AudioFlinger::PlaybackThread::getTimestamp_l(AudioTimestamp& timestamp)
if ((mType == OFFLOAD || mType == DIRECT)
&& mOutput != NULL && mOutput->stream->get_presentation_position) {
uint64_t position64;
- int ret = mOutput->stream->get_presentation_position(
- mOutput->stream, &position64, &timestamp.mTime);
+ int ret = mOutput->getPresentationPosition(&position64, &timestamp.mTime);
if (ret == 0) {
timestamp.mPosition = (uint32_t)position64;
return NO_ERROR;
@@ -3289,7 +3287,7 @@ bool AudioFlinger::PlaybackThread::waitingAsyncCallback()
void AudioFlinger::PlaybackThread::threadLoop_standby()
{
ALOGV("Audio hardware entering standby, mixer %p, suspend count %d", this, mSuspended);
- mOutput->stream->common.standby(&mOutput->stream->common);
+ mOutput->standby();
if (mUseAsyncWrite != 0) {
// discard any pending drain or write ack by incrementing sequence
mWriteAckSequence = (mWriteAckSequence + 2) & ~1;
@@ -4058,7 +4056,7 @@ bool AudioFlinger::MixerThread::checkForNewParameter_l(const String8& keyValuePa
status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
keyValuePair.string());
if (!mStandby && status == INVALID_OPERATION) {
- mOutput->stream->common.standby(&mOutput->stream->common);
+ mOutput->standby();
mStandby = true;
mBytesWritten = 0;
status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
@@ -4400,8 +4398,8 @@ void AudioFlinger::DirectOutputThread::threadLoop_mix()
while (frameCount) {
AudioBufferProvider::Buffer buffer;
buffer.frameCount = frameCount;
- mActiveTrack->getNextBuffer(&buffer);
- if (buffer.raw == NULL) {
+ status_t status = mActiveTrack->getNextBuffer(&buffer);
+ if (status != NO_ERROR || buffer.raw == NULL) {
memset(curBuf, 0, frameCount * mFrameSize);
break;
}
@@ -4513,7 +4511,7 @@ bool AudioFlinger::DirectOutputThread::checkForNewParameter_l(const String8& key
status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
keyValuePair.string());
if (!mStandby && status == INVALID_OPERATION) {
- mOutput->stream->common.standby(&mOutput->stream->common);
+ mOutput->standby();
mStandby = true;
mBytesWritten = 0;
status = mOutput->stream->common.set_parameters(&mOutput->stream->common,
@@ -4576,9 +4574,7 @@ void AudioFlinger::DirectOutputThread::cacheParameters_l()
void AudioFlinger::DirectOutputThread::flushHw_l()
{
- if (mOutput->stream->flush != NULL) {
- mOutput->stream->flush(mOutput->stream);
- }
+ mOutput->flush();
mHwPaused = false;
}
@@ -4868,7 +4864,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr
size_t audioHALFrames =
(mOutput->stream->get_latency(mOutput->stream)*mSampleRate) / 1000;
size_t framesWritten =
- mBytesWritten / audio_stream_out_frame_size(mOutput->stream);
+ mBytesWritten / mOutput->getFrameSize();
track->presentationComplete(framesWritten, audioHALFrames);
track->reset();
tracksToRemove->add(track);
@@ -5208,7 +5204,7 @@ AudioFlinger::RecordThread::RecordThread(const sp<AudioFlinger>& audioFlinger,
}
if (initFastCapture) {
- // create a Pipe for FastMixer to write to, and for us and fast tracks to read from
+ // create a Pipe for FastCapture to write to, and for us and fast tracks to read from
NBAIO_Format format = mInputSource->format();
size_t pipeFramesP2 = roundup(mSampleRate / 25); // double-buffering of 20 ms each
size_t pipeSize = pipeFramesP2 * Format_frameSize(format);
@@ -5867,8 +5863,9 @@ sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createRe
// client expresses a preference for FAST, but we get the final say
if (*flags & IAudioFlinger::TRACK_FAST) {
if (
- // use case: callback handler
- (tid != -1) &&
+ // we formerly checked for a callback handler (non-0 tid),
+ // but that is no longer required for TRANSFER_OBTAIN mode
+ //
// frame count is not specified, or is exactly the pipe depth
((frameCount == 0) || (frameCount == mPipeFramesP2)) &&
// PCM data
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 38667b9..7692315 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -405,9 +405,7 @@ AudioFlinger::PlaybackThread::Track::Track(
mAudioTrackServerProxy(NULL),
mResumeToStopping(false),
mFlushHwPending(false),
- mPreviousValid(false),
- mPreviousFramesWritten(0)
- // mPreviousTimestamp
+ mPreviousTimestampValid(false)
{
// client == 0 implies sharedBuffer == 0
ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -864,7 +862,7 @@ void AudioFlinger::PlaybackThread::Track::reset()
if (mState == FLUSHED) {
mState = IDLE;
}
- mPreviousValid = false;
+ mPreviousTimestampValid = false;
}
}
@@ -886,19 +884,22 @@ status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& times
{
// Client should implement this using SSQ; the unpresented frame count in latch is irrelevant
if (isFastTrack()) {
- // FIXME no lock held to set mPreviousValid = false
+ // FIXME no lock held to set mPreviousTimestampValid = false
return INVALID_OPERATION;
}
sp<ThreadBase> thread = mThread.promote();
if (thread == 0) {
- // FIXME no lock held to set mPreviousValid = false
+ // FIXME no lock held to set mPreviousTimestampValid = false
return INVALID_OPERATION;
}
+
Mutex::Autolock _l(thread->mLock);
PlaybackThread *playbackThread = (PlaybackThread *)thread.get();
+
+ status_t result = INVALID_OPERATION;
if (!isOffloaded() && !isDirect()) {
if (!playbackThread->mLatchQValid) {
- mPreviousValid = false;
+ mPreviousTimestampValid = false;
return INVALID_OPERATION;
}
uint32_t unpresentedFrames =
@@ -914,36 +915,54 @@ status_t AudioFlinger::PlaybackThread::Track::getTimestamp(AudioTimestamp& times
uint32_t framesWritten = i >= 0 ?
playbackThread->mLatchQ.mFramesReleased[i] :
mAudioTrackServerProxy->framesReleased();
- bool checkPreviousTimestamp = mPreviousValid && framesWritten >= mPreviousFramesWritten;
if (framesWritten < unpresentedFrames) {
- mPreviousValid = false;
- return INVALID_OPERATION;
+ mPreviousTimestampValid = false;
+ // return invalid result
+ } else {
+ timestamp.mPosition = framesWritten - unpresentedFrames;
+ timestamp.mTime = playbackThread->mLatchQ.mTimestamp.mTime;
+ result = NO_ERROR;
}
- mPreviousFramesWritten = framesWritten;
- uint32_t position = framesWritten - unpresentedFrames;
- struct timespec time = playbackThread->mLatchQ.mTimestamp.mTime;
- if (checkPreviousTimestamp) {
- if (time.tv_sec < mPreviousTimestamp.mTime.tv_sec ||
- (time.tv_sec == mPreviousTimestamp.mTime.tv_sec &&
- time.tv_nsec < mPreviousTimestamp.mTime.tv_nsec)) {
- ALOGW("Time is going backwards");
+ } else { // offloaded or direct
+ result = playbackThread->getTimestamp_l(timestamp);
+ }
+
+ // Prevent retrograde motion in timestamp.
+ if (result == NO_ERROR) {
+ if (mPreviousTimestampValid) {
+ if (timestamp.mTime.tv_sec < mPreviousTimestamp.mTime.tv_sec ||
+ (timestamp.mTime.tv_sec == mPreviousTimestamp.mTime.tv_sec &&
+ timestamp.mTime.tv_nsec < mPreviousTimestamp.mTime.tv_nsec)) {
+ ALOGW("WARNING - retrograde timestamp time");
+ // FIXME Consider blocking this from propagating upwards.
}
+
+ // Looking at signed delta will work even when the timestamps
+ // are wrapping around.
+ int32_t deltaPosition = static_cast<int32_t>(timestamp.mPosition
+ - mPreviousTimestamp.mPosition);
// position can bobble slightly as an artifact; this hides the bobble
- static const uint32_t MINIMUM_POSITION_DELTA = 8u;
- if ((position <= mPreviousTimestamp.mPosition) ||
- (position - mPreviousTimestamp.mPosition) < MINIMUM_POSITION_DELTA) {
- position = mPreviousTimestamp.mPosition;
- time = mPreviousTimestamp.mTime;
+ static const int32_t MINIMUM_POSITION_DELTA = 8;
+ if (deltaPosition < 0) {
+#define TIME_TO_NANOS(time) ((uint64_t)time.tv_sec * 1000000000 + time.tv_nsec)
+ ALOGW("WARNING - retrograde timestamp position corrected,"
+ " %d = %u - %u, (at %llu, %llu nanos)",
+ deltaPosition,
+ timestamp.mPosition,
+ mPreviousTimestamp.mPosition,
+ TIME_TO_NANOS(timestamp.mTime),
+ TIME_TO_NANOS(mPreviousTimestamp.mTime));
+#undef TIME_TO_NANOS
+ }
+ if (deltaPosition < MINIMUM_POSITION_DELTA) {
+ // Current timestamp is bad. Use last valid timestamp.
+ timestamp = mPreviousTimestamp;
}
}
- timestamp.mPosition = position;
- timestamp.mTime = time;
mPreviousTimestamp = timestamp;
- mPreviousValid = true;
- return NO_ERROR;
+ mPreviousTimestampValid = true;
}
-
- return playbackThread->getTimestamp_l(timestamp);
+ return result;
}
status_t AudioFlinger::PlaybackThread::Track::attachAuxEffect(int EffectId)
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index eb9116d..00f188f 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -532,7 +532,7 @@ bool AudioPolicyService::AudioCommandThread::threadLoop()
mLock.unlock();
svc.clear();
mLock.lock();
- if (!exitPending() && mAudioCommands.isEmpty()) {
+ if (!exitPending() && (mAudioCommands.isEmpty() || waitTime != INT64_MAX)) {
// release delayed commands wake lock
release_wake_lock(mName.string());
ALOGV("AudioCommandThread() going to sleep");
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index de841c8..9c60911 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -42,7 +42,6 @@ LOCAL_SRC_FILES:= \
api1/client2/CaptureSequencer.cpp \
api1/client2/ZslProcessor3.cpp \
api2/CameraDeviceClient.cpp \
- api_pro/ProCamera2Client.cpp \
device2/Camera2Device.cpp \
device3/Camera3Device.cpp \
device3/Camera3Stream.cpp \
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 6fda9b2..8613ac6 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -55,8 +55,7 @@ status_t CameraFlashlight::createFlashlightControl(const String8& cameraId) {
status_t res = OK;
- if (mCameraModule->getRawModule()->module_api_version >=
- CAMERA_MODULE_API_VERSION_2_4) {
+ if (mCameraModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) {
mFlashControl = new ModuleFlashControl(*mCameraModule, *mCallbacks);
if (mFlashControl == NULL) {
ALOGV("%s: cannot create flash control for module api v2.4+",
@@ -66,8 +65,8 @@ status_t CameraFlashlight::createFlashlightControl(const String8& cameraId) {
} else {
uint32_t deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
- if (mCameraModule->getRawModule()->module_api_version >=
- CAMERA_MODULE_API_VERSION_2_0) {
+ if (mCameraModule->getModuleApiVersion() >=
+ CAMERA_MODULE_API_VERSION_2_0) {
camera_info info;
res = mCameraModule->getCameraInfo(
atoi(String8(cameraId).string()), &info);
@@ -224,8 +223,7 @@ status_t CameraFlashlight::prepareDeviceOpen(const String8& cameraId) {
return NO_INIT;
}
- if (mCameraModule->getRawModule()->module_api_version <
- CAMERA_MODULE_API_VERSION_2_4) {
+ if (mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) {
// framework is going to open a camera device, all flash light control
// should be closed for backward compatible support.
mFlashControl.clear();
@@ -274,8 +272,7 @@ status_t CameraFlashlight::deviceClosed(const String8& cameraId) {
if (mOpenedCameraIds.size() != 0)
return OK;
- if (mCameraModule->getRawModule()->module_api_version <
- CAMERA_MODULE_API_VERSION_2_4) {
+ if (mCameraModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_4) {
// notify torch available for all cameras with a flash
int numCameras = mCameraModule->getNumberOfCameras();
for (int i = 0; i < numCameras; i++) {
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 55f7a40..c2df489 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -52,7 +52,6 @@
#include "CameraService.h"
#include "api1/CameraClient.h"
#include "api1/Camera2Client.h"
-#include "api_pro/ProCamera2Client.h"
#include "api2/CameraDeviceClient.h"
#include "utils/CameraTraces.h"
#include "CameraDeviceFactory.h"
@@ -150,8 +149,7 @@ void CameraService::onFirstRef()
}
else {
mModule = new CameraModule(rawModule);
- const hw_module_t *common = mModule->getRawModule();
- ALOGI("Loaded \"%s\" camera module", common->name);
+ ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
mNumberOfCameras = mModule->getNumberOfCameras();
mFlashlight = new CameraFlashlight(*mModule, *this);
@@ -170,7 +168,7 @@ void CameraService::onFirstRef()
size_t conflicting_devices_length = 0;
// If using post-2.4 module version, query the cost + conflicting devices from the HAL
- if (common->module_api_version >= CAMERA_MODULE_API_VERSION_2_4) {
+ if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4) {
struct camera_info info;
status_t rc = mModule->getCameraInfo(i, &info);
if (rc == NO_ERROR) {
@@ -202,13 +200,13 @@ void CameraService::onFirstRef()
}
}
- if (common->module_api_version >= CAMERA_MODULE_API_VERSION_2_1) {
+ if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_1) {
mModule->setCallbacks(this);
}
VendorTagDescriptor::clearGlobalVendorTagDescriptor();
- if (common->module_api_version >= CAMERA_MODULE_API_VERSION_2_2) {
+ if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_2) {
setUpVendorTags();
}
@@ -458,7 +456,7 @@ status_t CameraService::getCameraCharacteristics(int cameraId,
int facing;
status_t ret = OK;
- if (mModule->getRawModule()->module_api_version < CAMERA_MODULE_API_VERSION_2_0 ||
+ if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_0 ||
getDeviceVersion(cameraId, &facing) <= CAMERA_DEVICE_API_VERSION_2_1 ) {
/**
* Backwards compatibility mode for old HALs:
@@ -551,7 +549,7 @@ int CameraService::getDeviceVersion(int cameraId, int* facing) {
}
int deviceVersion;
- if (mModule->getRawModule()->module_api_version >= CAMERA_MODULE_API_VERSION_2_0) {
+ if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_0) {
deviceVersion = info.device_version;
} else {
deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
@@ -1038,7 +1036,7 @@ status_t CameraService::connectLegacy(
/*out*/
sp<ICamera>& device) {
- int apiVersion = mModule->getRawModule()->module_api_version;
+ int apiVersion = mModule->getModuleApiVersion();
if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED &&
apiVersion < CAMERA_MODULE_API_VERSION_2_3) {
/*
@@ -1066,16 +1064,6 @@ status_t CameraService::connectLegacy(
return NO_ERROR;
}
-status_t CameraService::connectPro(const sp<IProCameraCallbacks>& cameraCb,
- int cameraId,
- const String16& clientPackageName,
- int clientUid,
- /*out*/
- sp<IProCameraUser>& device) {
- ALOGE("%s: Unimplemented, please use connectDevice", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
status_t CameraService::connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
@@ -1428,7 +1416,6 @@ status_t CameraService::onTransact(
// Permission checks
switch (code) {
case BnCameraService::CONNECT:
- case BnCameraService::CONNECT_PRO:
case BnCameraService::CONNECT_DEVICE:
case BnCameraService::CONNECT_LEGACY:
const int pid = getCallingPid();
@@ -1710,33 +1697,6 @@ void CameraService::Client::OpsCallback::opChanged(int32_t op,
}
// ----------------------------------------------------------------------------
-// IProCamera
-// ----------------------------------------------------------------------------
-
-CameraService::ProClient::ProClient(const sp<CameraService>& cameraService,
- const sp<IProCameraCallbacks>& remoteCallback,
- const String16& clientPackageName,
- int cameraId,
- int cameraFacing,
- int clientPid,
- uid_t clientUid,
- int servicePid)
- : CameraService::BasicClient(cameraService, IInterface::asBinder(remoteCallback),
- clientPackageName, cameraId, cameraFacing,
- clientPid, clientUid, servicePid)
-{
- mRemoteCallback = remoteCallback;
-}
-
-CameraService::ProClient::~ProClient() {
-}
-
-void CameraService::ProClient::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
- const CaptureResultExtras& resultExtras) {
- mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
-}
-
-// ----------------------------------------------------------------------------
// CameraState
// ----------------------------------------------------------------------------
@@ -1876,16 +1836,14 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) {
return NO_ERROR;
}
- const hw_module_t* common = mModule->getRawModule();
- result = String8::format("Camera module HAL API version: %#x\n", common->hal_api_version);
- result.appendFormat("Camera module API version: %#x\n", common->module_api_version);
- result.appendFormat("Camera module name: %s\n", common->name);
- result.appendFormat("Camera module author: %s\n", common->author);
+ result = String8::format("Camera module HAL API version: 0x%x\n", mModule->getHalApiVersion());
+ result.appendFormat("Camera module API version: 0x%x\n", mModule->getModuleApiVersion());
+ result.appendFormat("Camera module name: %s\n", mModule->getModuleName());
+ result.appendFormat("Camera module author: %s\n", mModule->getModuleAuthor());
result.appendFormat("Number of camera devices: %d\n", mNumberOfCameras);
String8 activeClientString = mActiveClientManager.toString();
result.appendFormat("Active Camera Clients:\n%s", activeClientString.string());
-
sp<VendorTagDescriptor> desc = VendorTagDescriptor::getGlobalVendorTagDescriptor();
if (desc == NULL) {
result.appendFormat("Vendor tags left unimplemented.\n");
@@ -1932,7 +1890,7 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) {
info.facing == CAMERA_FACING_BACK ? "BACK" : "FRONT");
result.appendFormat(" Orientation: %d\n", info.orientation);
int deviceVersion;
- if (common->module_api_version < CAMERA_MODULE_API_VERSION_2_0) {
+ if (mModule->getModuleApiVersion() < CAMERA_MODULE_API_VERSION_2_0) {
deviceVersion = CAMERA_DEVICE_API_VERSION_1_0;
} else {
deviceVersion = info.device_version;
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 53420e5..53f1c72 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -27,8 +27,6 @@
#include <camera/ICamera.h>
#include <camera/ICameraClient.h>
-#include <camera/IProCameraUser.h>
-#include <camera/IProCameraCallbacks.h>
#include <camera/camera2/ICameraDeviceUser.h>
#include <camera/camera2/ICameraDeviceCallbacks.h>
#include <camera/VendorTagDescriptor.h>
@@ -127,11 +125,6 @@ public:
/*out*/
sp<ICamera>& device);
- virtual status_t connectPro(const sp<IProCameraCallbacks>& cameraCb,
- int cameraId, const String16& clientPackageName, int clientUid,
- /*out*/
- sp<IProCameraUser>& device);
-
virtual status_t connectDevice(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
@@ -195,7 +188,7 @@ public:
// virtual inheritance
virtual sp<IBinder> asBinderWrapper() = 0;
- // Return the remote callback binder object (e.g. IProCameraCallbacks)
+ // Return the remote callback binder object (e.g. ICameraDeviceCallbacks)
sp<IBinder> getRemote() {
return mRemoteBinder;
}
@@ -330,51 +323,6 @@ public:
}; // class Client
- class ProClient : public BnProCameraUser, public BasicClient {
- public:
- typedef IProCameraCallbacks TCamCallbacks;
-
- ProClient(const sp<CameraService>& cameraService,
- const sp<IProCameraCallbacks>& remoteCallback,
- const String16& clientPackageName,
- int cameraId,
- int cameraFacing,
- int clientPid,
- uid_t clientUid,
- int servicePid);
-
- virtual ~ProClient();
-
- const sp<IProCameraCallbacks>& getRemoteCallback() {
- return mRemoteCallback;
- }
-
- /***
- IProCamera implementation
- ***/
- virtual status_t connect(const sp<IProCameraCallbacks>& callbacks)
- = 0;
- virtual status_t exclusiveTryLock() = 0;
- virtual status_t exclusiveLock() = 0;
- virtual status_t exclusiveUnlock() = 0;
-
- virtual bool hasExclusiveLock() = 0;
-
- // Note that the callee gets a copy of the metadata.
- virtual int submitRequest(camera_metadata_t* metadata,
- bool streaming = false) = 0;
- virtual status_t cancelRequest(int requestId) = 0;
-
- // Callbacks from camera service
- virtual void onExclusiveLockStolen() = 0;
-
- virtual void notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
- const CaptureResultExtras& resultExtras);
- protected:
-
- sp<IProCameraCallbacks> mRemoteCallback;
- }; // class ProClient
-
typedef std::shared_ptr<resource_policy::ClientDescriptor<String8,
sp<CameraService::BasicClient>>> DescriptorPtr;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 0e2311c..8587e0e 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -645,9 +645,6 @@ void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
}
}
-// TODO: refactor the code below this with IProCameraUser.
-// it's 100% copy-pasted, so lets not change it right now to make it easier.
-
void CameraDeviceClient::detachDevice() {
if (mDevice == 0) return;
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
deleted file mode 100644
index a977494..0000000
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
+++ /dev/null
@@ -1,445 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#define LOG_TAG "ProCamera2Client"
-#define ATRACE_TAG ATRACE_TAG_CAMERA
-//#define LOG_NDEBUG 0
-
-#include <utils/Log.h>
-#include <utils/Trace.h>
-
-#include <cutils/properties.h>
-#include <gui/Surface.h>
-#include <gui/Surface.h>
-
-#include "api_pro/ProCamera2Client.h"
-#include "common/CameraDeviceBase.h"
-
-namespace android {
-using namespace camera2;
-
-// Interface used by CameraService
-
-ProCamera2Client::ProCamera2Client(const sp<CameraService>& cameraService,
- const sp<IProCameraCallbacks>& remoteCallback,
- const String16& clientPackageName,
- int cameraId,
- int cameraFacing,
- int clientPid,
- uid_t clientUid,
- int servicePid) :
- Camera2ClientBase(cameraService, remoteCallback, clientPackageName,
- cameraId, cameraFacing, clientPid, clientUid, servicePid)
-{
- ATRACE_CALL();
- ALOGI("ProCamera %d: Opened", cameraId);
-
- mExclusiveLock = false;
-}
-
-status_t ProCamera2Client::initialize(CameraModule *module)
-{
- ATRACE_CALL();
- status_t res;
-
- res = Camera2ClientBase::initialize(module);
- if (res != OK) {
- return res;
- }
-
- String8 threadName;
- mFrameProcessor = new FrameProcessorBase(mDevice);
- threadName = String8::format("PC2-%d-FrameProc", mCameraId);
- mFrameProcessor->run(threadName.string());
-
- mFrameProcessor->registerListener(FRAME_PROCESSOR_LISTENER_MIN_ID,
- FRAME_PROCESSOR_LISTENER_MAX_ID,
- /*listener*/this);
-
- return OK;
-}
-
-ProCamera2Client::~ProCamera2Client() {
-}
-
-status_t ProCamera2Client::exclusiveTryLock() {
- ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
-
- Mutex::Autolock icl(mBinderSerializationLock);
- SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-
- if (!mDevice.get()) return PERMISSION_DENIED;
-
- if (!mExclusiveLock) {
- mExclusiveLock = true;
-
- if (mRemoteCallback != NULL) {
- mRemoteCallback->onLockStatusChanged(
- IProCameraCallbacks::LOCK_ACQUIRED);
- }
-
- ALOGV("%s: exclusive lock acquired", __FUNCTION__);
-
- return OK;
- }
-
- // TODO: have a PERMISSION_DENIED case for when someone else owns the lock
-
- // don't allow recursive locking
- ALOGW("%s: exclusive lock already exists - recursive locking is not"
- "allowed", __FUNCTION__);
-
- return ALREADY_EXISTS;
-}
-
-status_t ProCamera2Client::exclusiveLock() {
- ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
-
- Mutex::Autolock icl(mBinderSerializationLock);
- SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-
- if (!mDevice.get()) return PERMISSION_DENIED;
-
- /**
- * TODO: this should asynchronously 'wait' until the lock becomes available
- * if another client already has an exclusive lock.
- *
- * once we have proper sharing support this will need to do
- * more than just return immediately
- */
- if (!mExclusiveLock) {
- mExclusiveLock = true;
-
- if (mRemoteCallback != NULL) {
- mRemoteCallback->onLockStatusChanged(IProCameraCallbacks::LOCK_ACQUIRED);
- }
-
- ALOGV("%s: exclusive lock acquired", __FUNCTION__);
-
- return OK;
- }
-
- // don't allow recursive locking
- ALOGW("%s: exclusive lock already exists - recursive locking is not allowed"
- , __FUNCTION__);
- return ALREADY_EXISTS;
-}
-
-status_t ProCamera2Client::exclusiveUnlock() {
- ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
-
- Mutex::Autolock icl(mBinderSerializationLock);
- SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-
- // don't allow unlocking if we have no lock
- if (!mExclusiveLock) {
- ALOGW("%s: cannot unlock, no lock was held in the first place",
- __FUNCTION__);
- return BAD_VALUE;
- }
-
- mExclusiveLock = false;
- if (mRemoteCallback != NULL ) {
- mRemoteCallback->onLockStatusChanged(
- IProCameraCallbacks::LOCK_RELEASED);
- }
- ALOGV("%s: exclusive lock released", __FUNCTION__);
-
- return OK;
-}
-
-bool ProCamera2Client::hasExclusiveLock() {
- Mutex::Autolock icl(mBinderSerializationLock);
- return mExclusiveLock;
-}
-
-void ProCamera2Client::onExclusiveLockStolen() {
- ALOGV("%s: ProClient lost exclusivity (id %d)",
- __FUNCTION__, mCameraId);
-
- Mutex::Autolock icl(mBinderSerializationLock);
- SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-
- if (mExclusiveLock && mRemoteCallback.get() != NULL) {
- mRemoteCallback->onLockStatusChanged(
- IProCameraCallbacks::LOCK_STOLEN);
- }
-
- mExclusiveLock = false;
-
- //TODO: we should not need to detach the device, merely reset it.
- detachDevice();
-}
-
-status_t ProCamera2Client::submitRequest(camera_metadata_t* request,
- bool streaming) {
- ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
-
- Mutex::Autolock icl(mBinderSerializationLock);
-
- if (!mDevice.get()) return DEAD_OBJECT;
-
- if (!mExclusiveLock) {
- return PERMISSION_DENIED;
- }
-
- CameraMetadata metadata(request);
-
- if (!enforceRequestPermissions(metadata)) {
- return PERMISSION_DENIED;
- }
-
- if (streaming) {
- return mDevice->setStreamingRequest(metadata);
- } else {
- return mDevice->capture(metadata);
- }
-
- // unreachable. thx gcc for a useless warning
- return OK;
-}
-
-status_t ProCamera2Client::cancelRequest(int requestId) {
- (void)requestId;
- ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
-
- Mutex::Autolock icl(mBinderSerializationLock);
-
- if (!mDevice.get()) return DEAD_OBJECT;
-
- if (!mExclusiveLock) {
- return PERMISSION_DENIED;
- }
-
- // TODO: implement
- ALOGE("%s: not fully implemented yet", __FUNCTION__);
- return INVALID_OPERATION;
-}
-
-status_t ProCamera2Client::deleteStream(int streamId) {
- ATRACE_CALL();
- ALOGV("%s (streamId = 0x%x)", __FUNCTION__, streamId);
-
- status_t res;
- if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
-
- Mutex::Autolock icl(mBinderSerializationLock);
-
- if (!mDevice.get()) return DEAD_OBJECT;
- mDevice->clearStreamingRequest();
-
- status_t code;
- if ((code = mDevice->waitUntilDrained()) != OK) {
- ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__, code);
- }
-
- return mDevice->deleteStream(streamId);
-}
-
-status_t ProCamera2Client::createStream(int width, int height, int format,
- const sp<IGraphicBufferProducer>& bufferProducer,
- /*out*/
- int* streamId)
-{
- if (streamId) {
- *streamId = -1;
- }
-
- ATRACE_CALL();
- ALOGV("%s (w = %d, h = %d, f = 0x%x)", __FUNCTION__, width, height, format);
-
- status_t res;
- if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
-
- Mutex::Autolock icl(mBinderSerializationLock);
-
- if (!mDevice.get()) return DEAD_OBJECT;
-
- sp<IBinder> binder;
- sp<ANativeWindow> window;
- if (bufferProducer != 0) {
- binder = IInterface::asBinder(bufferProducer);
- window = new Surface(bufferProducer);
- }
-
- return mDevice->createStream(window, width, height, format,
- HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0,
- streamId);
-}
-
-// Create a request object from a template.
-// -- Caller owns the newly allocated metadata
-status_t ProCamera2Client::createDefaultRequest(int templateId,
- /*out*/
- camera_metadata** request)
-{
- ATRACE_CALL();
- ALOGV("%s (templateId = 0x%x)", __FUNCTION__, templateId);
-
- if (request) {
- *request = NULL;
- }
-
- status_t res;
- if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
-
- Mutex::Autolock icl(mBinderSerializationLock);
-
- if (!mDevice.get()) return DEAD_OBJECT;
-
- CameraMetadata metadata;
- if ( (res = mDevice->createDefaultRequest(templateId, &metadata) ) == OK) {
- *request = metadata.release();
- }
-
- return res;
-}
-
-status_t ProCamera2Client::getCameraInfo(int cameraId,
- /*out*/
- camera_metadata** info)
-{
- if (cameraId != mCameraId) {
- return INVALID_OPERATION;
- }
-
- Mutex::Autolock icl(mBinderSerializationLock);
-
- if (!mDevice.get()) return DEAD_OBJECT;
-
- CameraMetadata deviceInfo = mDevice->info();
- *info = deviceInfo.release();
-
- return OK;
-}
-
-status_t ProCamera2Client::dump(int fd, const Vector<String16>& args) {
- String8 result;
- result.appendFormat("ProCamera2Client[%d] (%p) PID: %d, dump:\n",
- mCameraId,
- (getRemoteCallback() != NULL ?
- IInterface::asBinder(getRemoteCallback()).get() : NULL),
- mClientPid);
- result.append(" State:\n");
- write(fd, result.string(), result.size());
-
- // TODO: print dynamic/request section from most recent requests
- mFrameProcessor->dump(fd, args);
- return dumpDevice(fd, args);
-}
-
-// IProCameraUser interface
-
-void ProCamera2Client::detachDevice() {
- if (mDevice == 0) return;
-
- ALOGV("Camera %d: Stopping processors", mCameraId);
-
- mFrameProcessor->removeListener(FRAME_PROCESSOR_LISTENER_MIN_ID,
- FRAME_PROCESSOR_LISTENER_MAX_ID,
- /*listener*/this);
- mFrameProcessor->requestExit();
- ALOGV("Camera %d: Waiting for threads", mCameraId);
- mFrameProcessor->join();
- ALOGV("Camera %d: Disconnecting device", mCameraId);
-
- // WORKAROUND: HAL refuses to disconnect while there's streams in flight
- {
- mDevice->clearStreamingRequest();
-
- status_t code;
- if ((code = mDevice->waitUntilDrained()) != OK) {
- ALOGE("%s: waitUntilDrained failed with code 0x%x", __FUNCTION__,
- code);
- }
- }
-
- Camera2ClientBase::detachDevice();
-}
-
-void ProCamera2Client::onResultAvailable(const CaptureResult& result) {
- ATRACE_CALL();
- ALOGV("%s", __FUNCTION__);
-
- Mutex::Autolock icl(mBinderSerializationLock);
- SharedCameraCallbacks::Lock l(mSharedCameraCallbacks);
-
- if (mRemoteCallback != NULL) {
- CameraMetadata tmp(result.mMetadata);
- camera_metadata_t* meta = tmp.release();
- ALOGV("%s: meta = %p ", __FUNCTION__, meta);
- mRemoteCallback->onResultReceived(result.mResultExtras.requestId, meta);
- tmp.acquire(meta);
- }
-}
-
-bool ProCamera2Client::enforceRequestPermissions(CameraMetadata& metadata) {
-
- const int pid = IPCThreadState::self()->getCallingPid();
- const int selfPid = getpid();
- camera_metadata_entry_t entry;
-
- /**
- * Mixin default important security values
- * - android.led.transmit = defaulted ON
- */
- CameraMetadata staticInfo = mDevice->info();
- entry = staticInfo.find(ANDROID_LED_AVAILABLE_LEDS);
- for(size_t i = 0; i < entry.count; ++i) {
- uint8_t led = entry.data.u8[i];
-
- switch(led) {
- case ANDROID_LED_AVAILABLE_LEDS_TRANSMIT: {
- uint8_t transmitDefault = ANDROID_LED_TRANSMIT_ON;
- if (!metadata.exists(ANDROID_LED_TRANSMIT)) {
- metadata.update(ANDROID_LED_TRANSMIT,
- &transmitDefault, 1);
- }
- break;
- }
- }
- }
-
- // We can do anything!
- if (pid == selfPid) {
- return true;
- }
-
- /**
- * Permission check special fields in the request
- * - android.led.transmit = android.permission.CAMERA_DISABLE_TRANSMIT
- */
- entry = metadata.find(ANDROID_LED_TRANSMIT);
- if (entry.count > 0 && entry.data.u8[0] != ANDROID_LED_TRANSMIT_ON) {
- String16 permissionString =
- String16("android.permission.CAMERA_DISABLE_TRANSMIT_LED");
- if (!checkCallingPermission(permissionString)) {
- const int uid = IPCThreadState::self()->getCallingUid();
- ALOGE("Permission Denial: "
- "can't disable transmit LED pid=%d, uid=%d", pid, uid);
- return false;
- }
- }
-
- return true;
-}
-
-} // namespace android
diff --git a/services/camera/libcameraservice/api_pro/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
deleted file mode 100644
index 7f5f6ac..0000000
--- a/services/camera/libcameraservice/api_pro/ProCamera2Client.h
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H
-#define ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H
-
-#include "CameraService.h"
-#include "common/FrameProcessorBase.h"
-#include "common/Camera2ClientBase.h"
-#include "device2/Camera2Device.h"
-#include "camera/CaptureResult.h"
-
-namespace android {
-
-class IMemory;
-/**
- * Implements the binder IProCameraUser API,
- * meant for HAL2-level private API access.
- */
-class ProCamera2Client :
- public Camera2ClientBase<CameraService::ProClient>,
- public camera2::FrameProcessorBase::FilteredListener
-{
-public:
- /**
- * IProCameraUser interface (see IProCameraUser for details)
- */
- virtual status_t exclusiveTryLock();
- virtual status_t exclusiveLock();
- virtual status_t exclusiveUnlock();
-
- virtual bool hasExclusiveLock();
-
- // Note that the callee gets a copy of the metadata.
- virtual int submitRequest(camera_metadata_t* metadata,
- bool streaming = false);
- virtual status_t cancelRequest(int requestId);
-
- virtual status_t deleteStream(int streamId);
-
- virtual status_t createStream(
- int width,
- int height,
- int format,
- const sp<IGraphicBufferProducer>& bufferProducer,
- /*out*/
- int* streamId);
-
- // Create a request object from a template.
- // -- Caller owns the newly allocated metadata
- virtual status_t createDefaultRequest(int templateId,
- /*out*/
- camera_metadata** request);
-
- // Get the static metadata for the camera
- // -- Caller owns the newly allocated metadata
- virtual status_t getCameraInfo(int cameraId,
- /*out*/
- camera_metadata** info);
-
- /**
- * Interface used by CameraService
- */
-
- ProCamera2Client(const sp<CameraService>& cameraService,
- const sp<IProCameraCallbacks>& remoteCallback,
- const String16& clientPackageName,
- int cameraId,
- int cameraFacing,
- int clientPid,
- uid_t clientUid,
- int servicePid);
- virtual ~ProCamera2Client();
-
- virtual status_t initialize(CameraModule *module);
-
- virtual status_t dump(int fd, const Vector<String16>& args);
-
- // Callbacks from camera service
- virtual void onExclusiveLockStolen();
-
- /**
- * Interface used by independent components of ProCamera2Client.
- */
-
-protected:
- /** FilteredListener implementation **/
- virtual void onResultAvailable(const CaptureResult& result);
-
- virtual void detachDevice();
-
-private:
- /** IProCameraUser interface-related private members */
-
- /** Preview callback related members */
- sp<camera2::FrameProcessorBase> mFrameProcessor;
- static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0;
- static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL;
-
- /** Utility members */
- bool enforceRequestPermissions(CameraMetadata& metadata);
-
- // Whether or not we have an exclusive lock on the device
- // - if no we can't modify the request queue.
- // note that creating/deleting streams we own is still OK
- bool mExclusiveLock;
-};
-
-}; // namespace android
-
-#endif
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 0415d67..c0c2314 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -337,7 +337,6 @@ void Camera2ClientBase<TClientBase>::SharedCameraCallbacks::clear() {
mRemoteCallback.clear();
}
-template class Camera2ClientBase<CameraService::ProClient>;
template class Camera2ClientBase<CameraService::Client>;
template class Camera2ClientBase<CameraDeviceClientBase>;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index eb21d55..168ea0a 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -36,7 +36,7 @@ public:
typedef typename TClientBase::TCamCallbacks TCamCallbacks;
/**
- * Base binder interface (see ICamera/IProCameraUser for details)
+ * Base binder interface (see ICamera/ICameraDeviceUser for details)
*/
virtual status_t connect(const sp<TCamCallbacks>& callbacks);
virtual void disconnect();
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
index 5f767ad..e5b12ae 100644
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ b/services/camera/libcameraservice/common/CameraModule.cpp
@@ -54,14 +54,12 @@ CameraModule::CameraModule(camera_module_t *module) {
}
mModule = module;
- for (int i = 0; i < MAX_CAMERAS_PER_MODULE; i++) {
- mCameraInfoCached[i] = false;
- }
+ mCameraInfoMap.setCapacity(getNumberOfCameras());
}
int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
Mutex::Autolock lock(mCameraInfoLock);
- if (cameraId < 0 || cameraId >= MAX_CAMERAS_PER_MODULE) {
+ if (cameraId < 0) {
ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId);
return -EINVAL;
}
@@ -72,21 +70,28 @@ int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
return mModule->get_camera_info(cameraId, info);
}
- camera_info &wrappedInfo = mCameraInfo[cameraId];
- if (!mCameraInfoCached[cameraId]) {
- camera_info rawInfo;
+ ssize_t index = mCameraInfoMap.indexOfKey(cameraId);
+ if (index == NAME_NOT_FOUND) {
+ // Get camera info from raw module and cache it
+ camera_info rawInfo, cameraInfo;
int ret = mModule->get_camera_info(cameraId, &rawInfo);
if (ret != 0) {
return ret;
}
- CameraMetadata &m = mCameraCharacteristics[cameraId];
+ CameraMetadata m;
m = rawInfo.static_camera_characteristics;
deriveCameraCharacteristicsKeys(rawInfo.device_version, m);
- wrappedInfo = rawInfo;
- wrappedInfo.static_camera_characteristics = m.getAndLock();
- mCameraInfoCached[cameraId] = true;
+ mCameraCharacteristicsMap.add(cameraId, m);
+ cameraInfo = rawInfo;
+ cameraInfo.static_camera_characteristics =
+ mCameraCharacteristicsMap.valueFor(cameraId).getAndLock();
+ mCameraInfoMap.add(cameraId, cameraInfo);
+ index = mCameraInfoMap.indexOfKey(cameraId);
}
- *info = wrappedInfo;
+
+ assert(index != NAME_NOT_FOUND);
+ // return the cached camera info
+ *info = mCameraInfoMap[index];
return 0;
}
@@ -99,10 +104,6 @@ int CameraModule::openLegacy(
return mModule->open_legacy(&mModule->common, id, halVersion, device);
}
-const hw_module_t* CameraModule::getRawModule() {
- return &mModule->common;
-}
-
int CameraModule::getNumberOfCameras() {
return mModule->get_number_of_cameras();
}
@@ -125,7 +126,6 @@ int CameraModule::setTorchMode(const char* camera_id, bool enable) {
return mModule->set_torch_mode(camera_id, enable);
}
-
status_t CameraModule::filterOpenErrorCode(status_t err) {
switch(err) {
case NO_ERROR:
@@ -139,6 +139,25 @@ status_t CameraModule::filterOpenErrorCode(status_t err) {
return -ENODEV;
}
+uint16_t CameraModule::getModuleApiVersion() {
+ return mModule->common.module_api_version;
+}
+
+const char* CameraModule::getModuleName() {
+ return mModule->common.name;
+}
+
+uint16_t CameraModule::getHalApiVersion() {
+ return mModule->common.hal_api_version;
+}
+
+const char* CameraModule::getModuleAuthor() {
+ return mModule->common.author;
+}
+
+void* CameraModule::getDso() {
+ return mModule->common.dso;
+}
}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraModule.h b/services/camera/libcameraservice/common/CameraModule.h
index 16207aa..e285b21 100644
--- a/services/camera/libcameraservice/common/CameraModule.h
+++ b/services/camera/libcameraservice/common/CameraModule.h
@@ -20,10 +20,7 @@
#include <hardware/camera.h>
#include <camera/CameraMetadata.h>
#include <utils/Mutex.h>
-
-/* This needs to be increased if we can have more cameras */
-#define MAX_CAMERAS_PER_MODULE 2
-
+#include <utils/KeyedVector.h>
namespace android {
/**
@@ -37,7 +34,6 @@ class CameraModule {
public:
CameraModule(camera_module_t *module);
- const hw_module_t* getRawModule();
int getCameraInfo(int cameraId, struct camera_info *info);
int getNumberOfCameras(void);
int open(const char* id, struct hw_device_t** device);
@@ -46,6 +42,12 @@ public:
bool isVendorTagDefined();
void getVendorTagOps(vendor_tag_ops_t* ops);
int setTorchMode(const char* camera_id, bool enable);
+ uint16_t getModuleApiVersion();
+ const char* getModuleName();
+ uint16_t getHalApiVersion();
+ const char* getModuleAuthor();
+ // Only used by CameraModuleFixture native test. Do NOT use elsewhere.
+ void *getDso();
private:
// Derive camera characteristics keys defined after HAL device version
@@ -53,9 +55,8 @@ private:
status_t filterOpenErrorCode(status_t err);
camera_module_t *mModule;
- CameraMetadata mCameraCharacteristics[MAX_CAMERAS_PER_MODULE];
- camera_info mCameraInfo[MAX_CAMERAS_PER_MODULE];
- bool mCameraInfoCached[MAX_CAMERAS_PER_MODULE];
+ KeyedVector<int, camera_info> mCameraInfoMap;
+ KeyedVector<int, CameraMetadata> mCameraCharacteristicsMap;
Mutex mCameraInfoLock;
};
diff --git a/services/camera/libcameraservice/device1/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
index f5ebbf8..7f14cd4 100644
--- a/services/camera/libcameraservice/device1/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
@@ -97,7 +97,7 @@ public:
if (res != OK) return res;
int rc = OK;
- if (module->getRawModule()->module_api_version >= CAMERA_MODULE_API_VERSION_2_3 &&
+ if (module->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_3 &&
info.device_version > CAMERA_DEVICE_API_VERSION_1_0) {
// Open higher version camera device as HAL1.0 device.
rc = module->openLegacy(mName.string(),
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 96bed0d..0c739e9 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -157,33 +157,9 @@ status_t Camera3OutputStream::returnBufferCheckedLocked(
ALOG_ASSERT(output, "Expected output to be true");
status_t res;
- sp<Fence> releaseFence;
-
- /**
- * Fence management - calculate Release Fence
- */
- if (buffer.status == CAMERA3_BUFFER_STATUS_ERROR) {
- if (buffer.release_fence != -1) {
- ALOGE("%s: Stream %d: HAL should not set release_fence(%d) when "
- "there is an error", __FUNCTION__, mId, buffer.release_fence);
- close(buffer.release_fence);
- }
-
- /**
- * Reassign release fence as the acquire fence in case of error
- */
- releaseFence = new Fence(buffer.acquire_fence);
- } else {
- res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
- if (res != OK) {
- ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
- __FUNCTION__, mId, strerror(-res), res);
- return res;
- }
-
- releaseFence = new Fence(buffer.release_fence);
- }
+ // Fence management - always honor release fence from HAL
+ sp<Fence> releaseFence = new Fence(buffer.release_fence);
int anwReleaseFence = releaseFence->dup();
/**
@@ -217,6 +193,13 @@ status_t Camera3OutputStream::returnBufferCheckedLocked(
mTraceFirstBuffer = false;
}
+ res = native_window_set_buffers_timestamp(mConsumer.get(), timestamp);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+ __FUNCTION__, mId, strerror(-res), res);
+ return res;
+ }
+
res = currentConsumer->queueBuffer(currentConsumer.get(),
container_of(buffer.buffer, ANativeWindowBuffer, handle),
anwReleaseFence);
diff --git a/services/mediaresourcemanager/Android.mk b/services/mediaresourcemanager/Android.mk
new file mode 100644
index 0000000..84218cf
--- /dev/null
+++ b/services/mediaresourcemanager/Android.mk
@@ -0,0 +1,18 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := ResourceManagerService.cpp
+
+LOCAL_SHARED_LIBRARIES := libmedia libstagefright libbinder libutils liblog
+
+LOCAL_MODULE:= libresourcemanagerservice
+
+LOCAL_32_BIT_ONLY := true
+
+LOCAL_C_INCLUDES += \
+ $(TOPDIR)frameworks/av/include
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
new file mode 100644
index 0000000..7296d47
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -0,0 +1,345 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerService"
+#include <utils/Log.h>
+
+#include <binder/IServiceManager.h>
+#include <dirent.h>
+#include <media/stagefright/ProcessInfo.h>
+#include <string.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/time.h>
+#include <unistd.h>
+
+#include "ResourceManagerService.h"
+
+namespace android {
+
+template <typename T>
+static String8 getString(const Vector<T> &items) {
+ String8 itemsStr;
+ for (size_t i = 0; i < items.size(); ++i) {
+ itemsStr.appendFormat("%s ", items[i].toString().string());
+ }
+ return itemsStr;
+}
+
+static bool hasResourceType(String8 type, Vector<MediaResource> resources) {
+ for (size_t i = 0; i < resources.size(); ++i) {
+ if (resources[i].mType == type) {
+ return true;
+ }
+ }
+ return false;
+}
+
+static bool hasResourceType(String8 type, ResourceInfos infos) {
+ for (size_t i = 0; i < infos.size(); ++i) {
+ if (hasResourceType(type, infos[i].resources)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+static ResourceInfos& getResourceInfosForEdit(
+ int pid,
+ PidResourceInfosMap& map) {
+ ssize_t index = map.indexOfKey(pid);
+ if (index < 0) {
+ // new pid
+ ResourceInfos infosForPid;
+ map.add(pid, infosForPid);
+ }
+
+ return map.editValueFor(pid);
+}
+
+static ResourceInfo& getResourceInfoForEdit(
+ int64_t clientId,
+ const sp<IResourceManagerClient> client,
+ ResourceInfos& infos) {
+ for (size_t i = 0; i < infos.size(); ++i) {
+ if (infos[i].clientId == clientId) {
+ return infos.editItemAt(i);
+ }
+ }
+ ResourceInfo info;
+ info.clientId = clientId;
+ info.client = client;
+ infos.push_back(info);
+ return infos.editItemAt(infos.size() - 1);
+}
+
+ResourceManagerService::ResourceManagerService()
+ : mProcessInfo(new ProcessInfo()),
+ mSupportsMultipleSecureCodecs(true),
+ mSupportsSecureWithNonSecureCodec(true) {}
+
+ResourceManagerService::ResourceManagerService(sp<ProcessInfoInterface> processInfo)
+ : mProcessInfo(processInfo),
+ mSupportsMultipleSecureCodecs(true),
+ mSupportsSecureWithNonSecureCodec(true) {}
+
+ResourceManagerService::~ResourceManagerService() {}
+
+void ResourceManagerService::config(const Vector<MediaResourcePolicy> &policies) {
+ ALOGV("config(%s)", getString(policies).string());
+
+ Mutex::Autolock lock(mLock);
+ for (size_t i = 0; i < policies.size(); ++i) {
+ String8 type = policies[i].mType;
+ uint64_t value = policies[i].mValue;
+ if (type == kPolicySupportsMultipleSecureCodecs) {
+ mSupportsMultipleSecureCodecs = (value != 0);
+ } else if (type == kPolicySupportsSecureWithNonSecureCodec) {
+ mSupportsSecureWithNonSecureCodec = (value != 0);
+ }
+ }
+}
+
+void ResourceManagerService::addResource(
+ int pid,
+ int64_t clientId,
+ const sp<IResourceManagerClient> client,
+ const Vector<MediaResource> &resources) {
+ ALOGV("addResource(pid %d, clientId %lld, resources %s)",
+ pid, (long long) clientId, getString(resources).string());
+
+ Mutex::Autolock lock(mLock);
+ ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
+ ResourceInfo& info = getResourceInfoForEdit(clientId, client, infos);
+ info.resources.appendVector(resources);
+}
+
+void ResourceManagerService::removeResource(int64_t clientId) {
+ ALOGV("removeResource(%lld)", (long long) clientId);
+
+ Mutex::Autolock lock(mLock);
+ bool found = false;
+ for (size_t i = 0; i < mMap.size(); ++i) {
+ ResourceInfos &infos = mMap.editValueAt(i);
+ for (size_t j = 0; j < infos.size();) {
+ if (infos[j].clientId == clientId) {
+ j = infos.removeAt(j);
+ found = true;
+ } else {
+ ++j;
+ }
+ }
+ if (found) {
+ break;
+ }
+ }
+ if (!found) {
+ ALOGV("didn't find client");
+ }
+}
+
+bool ResourceManagerService::reclaimResource(
+ int callingPid, const Vector<MediaResource> &resources) {
+ ALOGV("reclaimResource(callingPid %d, resources %s)",
+ callingPid, getString(resources).string());
+
+ Vector<sp<IResourceManagerClient>> clients;
+ {
+ Mutex::Autolock lock(mLock);
+ // first pass to handle secure/non-secure codec conflict
+ for (size_t i = 0; i < resources.size(); ++i) {
+ String8 type = resources[i].mType;
+ if (type == kResourceSecureCodec) {
+ if (!mSupportsMultipleSecureCodecs) {
+ if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+ return false;
+ }
+ }
+ if (!mSupportsSecureWithNonSecureCodec) {
+ if (!getAllClients_l(callingPid, String8(kResourceNonSecureCodec), &clients)) {
+ return false;
+ }
+ }
+ } else if (type == kResourceNonSecureCodec) {
+ if (!mSupportsSecureWithNonSecureCodec) {
+ if (!getAllClients_l(callingPid, String8(kResourceSecureCodec), &clients)) {
+ return false;
+ }
+ }
+ }
+ }
+
+ if (clients.size() == 0) {
+ // if no secure/non-secure codec conflict, run second pass to handle other resources.
+ for (size_t i = 0; i < resources.size(); ++i) {
+ String8 type = resources[i].mType;
+ if (type == kResourceGraphicMemory) {
+ sp<IResourceManagerClient> client;
+ if (!getLowestPriorityBiggestClient_l(callingPid, type, &client)) {
+ return false;
+ }
+ clients.push_back(client);
+ }
+ }
+ }
+ }
+
+ if (clients.size() == 0) {
+ return false;
+ }
+
+ for (size_t i = 0; i < clients.size(); ++i) {
+ ALOGV("reclaimResource from client %p", clients[i].get());
+ if (!clients[i]->reclaimResource()) {
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ResourceManagerService::getAllClients_l(
+ int callingPid, const String8 &type, Vector<sp<IResourceManagerClient>> *clients) {
+ Vector<sp<IResourceManagerClient>> temp;
+ for (size_t i = 0; i < mMap.size(); ++i) {
+ ResourceInfos &infos = mMap.editValueAt(i);
+ for (size_t j = 0; j < infos.size(); ++j) {
+ if (hasResourceType(type, infos[j].resources)) {
+ if (!isCallingPriorityHigher_l(callingPid, mMap.keyAt(i))) {
+ // some higher/equal priority process owns the resource,
+ // this request can't be fulfilled.
+ ALOGE("getAllClients_l: can't reclaim resource %s from pid %d",
+ type.string(), mMap.keyAt(i));
+ return false;
+ }
+ temp.push_back(infos[j].client);
+ }
+ }
+ }
+ if (temp.size() == 0) {
+ ALOGV("getAllClients_l: didn't find any resource %s", type.string());
+ return true;
+ }
+ clients->appendVector(temp);
+ return true;
+}
+
+bool ResourceManagerService::getLowestPriorityBiggestClient_l(
+ int callingPid, const String8 &type, sp<IResourceManagerClient> *client) {
+ int lowestPriorityPid;
+ int lowestPriority;
+ int callingPriority;
+ if (!mProcessInfo->getPriority(callingPid, &callingPriority)) {
+ ALOGE("getLowestPriorityBiggestClient_l: can't get process priority for pid %d",
+ callingPid);
+ return false;
+ }
+ if (!getLowestPriorityPid_l(type, &lowestPriorityPid, &lowestPriority)) {
+ return false;
+ }
+ if (lowestPriority <= callingPriority) {
+ ALOGE("getLowestPriorityBiggestClient_l: lowest priority %d vs caller priority %d",
+ lowestPriority, callingPriority);
+ return false;
+ }
+
+ if (!getBiggestClient_l(lowestPriorityPid, type, client)) {
+ return false;
+ }
+ return true;
+}
+
+bool ResourceManagerService::getLowestPriorityPid_l(
+ const String8 &type, int *lowestPriorityPid, int *lowestPriority) {
+ int pid = -1;
+ int priority = -1;
+ for (size_t i = 0; i < mMap.size(); ++i) {
+ if (mMap.valueAt(i).size() == 0) {
+ // no client on this process.
+ continue;
+ }
+ if (!hasResourceType(type, mMap.valueAt(i))) {
+ // doesn't have the requested resource type
+ continue;
+ }
+ int tempPid = mMap.keyAt(i);
+ int tempPriority;
+ if (!mProcessInfo->getPriority(tempPid, &tempPriority)) {
+ ALOGV("getLowestPriorityPid_l: can't get priority of pid %d, skipped", tempPid);
+ // TODO: remove this pid from mMap?
+ continue;
+ }
+ if (pid == -1 || tempPriority > priority) {
+ // initial the value
+ pid = tempPid;
+ priority = tempPriority;
+ }
+ }
+ if (pid != -1) {
+ *lowestPriorityPid = pid;
+ *lowestPriority = priority;
+ }
+ return (pid != -1);
+}
+
+bool ResourceManagerService::isCallingPriorityHigher_l(int callingPid, int pid) {
+ int callingPidPriority;
+ if (!mProcessInfo->getPriority(callingPid, &callingPidPriority)) {
+ return false;
+ }
+
+ int priority;
+ if (!mProcessInfo->getPriority(pid, &priority)) {
+ return false;
+ }
+
+ return (callingPidPriority < priority);
+}
+
+bool ResourceManagerService::getBiggestClient_l(
+ int pid, const String8 &type, sp<IResourceManagerClient> *client) {
+ ssize_t index = mMap.indexOfKey(pid);
+ if (index < 0) {
+ ALOGE("getBiggestClient_l: can't find resource info for pid %d", pid);
+ return false;
+ }
+
+ sp<IResourceManagerClient> clientTemp;
+ uint64_t largestValue = 0;
+ const ResourceInfos &infos = mMap.valueAt(index);
+ for (size_t i = 0; i < infos.size(); ++i) {
+ Vector<MediaResource> resources = infos[i].resources;
+ for (size_t j = 0; j < resources.size(); ++j) {
+ if (resources[j].mType == type) {
+ if (resources[j].mValue > largestValue) {
+ largestValue = resources[j].mValue;
+ clientTemp = infos[i].client;
+ }
+ }
+ }
+ }
+
+ if (clientTemp == NULL) {
+ ALOGE("getBiggestClient_l: can't find resource type %s for pid %d", type.string(), pid);
+ return false;
+ }
+
+ *client = clientTemp;
+ return true;
+}
+
+} // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.h b/services/mediaresourcemanager/ResourceManagerService.h
new file mode 100644
index 0000000..2ed9bf8
--- /dev/null
+++ b/services/mediaresourcemanager/ResourceManagerService.h
@@ -0,0 +1,106 @@
+/*
+**
+** Copyright 2015, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_RESOURCEMANAGERSERVICE_H
+#define ANDROID_RESOURCEMANAGERSERVICE_H
+
+#include <arpa/inet.h>
+#include <binder/BinderService.h>
+#include <utils/Errors.h>
+#include <utils/KeyedVector.h>
+#include <utils/String8.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+
+#include <media/IResourceManagerService.h>
+
+namespace android {
+
+struct ProcessInfoInterface;
+
+struct ResourceInfo {
+ int64_t clientId;
+ sp<IResourceManagerClient> client;
+ Vector<MediaResource> resources;
+};
+
+typedef Vector<ResourceInfo> ResourceInfos;
+typedef KeyedVector<int, ResourceInfos> PidResourceInfosMap;
+
+class ResourceManagerService
+ : public BinderService<ResourceManagerService>,
+ public BnResourceManagerService
+{
+public:
+ static char const *getServiceName() { return "media.resource_manager"; }
+
+ ResourceManagerService();
+ ResourceManagerService(sp<ProcessInfoInterface> processInfo);
+
+ // IResourceManagerService interface
+ virtual void config(const Vector<MediaResourcePolicy> &policies);
+
+ virtual void addResource(
+ int pid,
+ int64_t clientId,
+ const sp<IResourceManagerClient> client,
+ const Vector<MediaResource> &resources);
+
+ virtual void removeResource(int64_t clientId);
+
+ virtual bool reclaimResource(int callingPid, const Vector<MediaResource> &resources);
+
+protected:
+ virtual ~ResourceManagerService();
+
+private:
+ friend class ResourceManagerServiceTest;
+
+ // Gets the list of all the clients who own the specified resource type.
+ // Returns false if any client belongs to a process with higher priority than the
+ // calling process. The clients will remain unchanged if returns false.
+ bool getAllClients_l(int callingPid, const String8 &type,
+ Vector<sp<IResourceManagerClient>> *clients);
+
+ // Gets the client who owns specified resource type from lowest possible priority process.
+ // Returns false if the calling process priority is not higher than the lowest process
+ // priority. The client will remain unchanged if returns false.
+ bool getLowestPriorityBiggestClient_l(int callingPid, const String8 &type,
+ sp<IResourceManagerClient> *client);
+
+ // Gets lowest priority process that has the specified resource type.
+ // Returns false if failed. The output parameters will remain unchanged if failed.
+ bool getLowestPriorityPid_l(const String8 &type, int *pid, int *priority);
+
+ // Gets the client who owns biggest piece of specified resource type from pid.
+ // Returns false if failed. The client will remain unchanged if failed.
+ bool getBiggestClient_l(int pid, const String8 &type, sp<IResourceManagerClient> *client);
+
+ bool isCallingPriorityHigher_l(int callingPid, int pid);
+
+ mutable Mutex mLock;
+ sp<ProcessInfoInterface> mProcessInfo;
+ PidResourceInfosMap mMap;
+ bool mSupportsMultipleSecureCodecs;
+ bool mSupportsSecureWithNonSecureCodec;
+};
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
+
+#endif // ANDROID_RESOURCEMANAGERSERVICE_H
diff --git a/services/mediaresourcemanager/test/Android.mk b/services/mediaresourcemanager/test/Android.mk
new file mode 100644
index 0000000..228b62a
--- /dev/null
+++ b/services/mediaresourcemanager/test/Android.mk
@@ -0,0 +1,25 @@
+# Build the unit tests.
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := ResourceManagerService_test
+
+LOCAL_MODULE_TAGS := tests
+
+LOCAL_SRC_FILES := \
+ ResourceManagerService_test.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ liblog \
+ libmedia \
+ libresourcemanagerservice \
+ libutils \
+
+LOCAL_C_INCLUDES := \
+ frameworks/av/include \
+ frameworks/av/services/mediaresourcemanager \
+
+LOCAL_32_BIT_ONLY := true
+
+include $(BUILD_NATIVE_TEST)
diff --git a/services/mediaresourcemanager/test/ResourceManagerService_test.cpp b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
new file mode 100644
index 0000000..b73e1bc
--- /dev/null
+++ b/services/mediaresourcemanager/test/ResourceManagerService_test.cpp
@@ -0,0 +1,464 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "ResourceManagerService_test"
+#include <utils/Log.h>
+
+#include <gtest/gtest.h>
+
+#include "ResourceManagerService.h"
+#include <media/IResourceManagerService.h>
+#include <media/MediaResource.h>
+#include <media/MediaResourcePolicy.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/ProcessInfoInterface.h>
+
+namespace android {
+
+struct TestProcessInfo : public ProcessInfoInterface {
+ TestProcessInfo() {}
+ virtual ~TestProcessInfo() {}
+
+ virtual bool getPriority(int pid, int *priority) {
+ // For testing, use pid as priority.
+ // Lower the value higher the priority.
+ *priority = pid;
+ return true;
+ }
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(TestProcessInfo);
+};
+
+struct TestClient : public BnResourceManagerClient {
+ TestClient(sp<ResourceManagerService> service)
+ : mReclaimed(false), mService(service) {}
+
+ virtual bool reclaimResource() {
+ sp<IResourceManagerClient> client(this);
+ mService->removeResource((int64_t) client.get());
+ mReclaimed = true;
+ return true;
+ }
+
+ bool reclaimed() const {
+ return mReclaimed;
+ }
+
+ void reset() {
+ mReclaimed = false;
+ }
+
+protected:
+ virtual ~TestClient() {}
+
+private:
+ bool mReclaimed;
+ sp<ResourceManagerService> mService;
+ DISALLOW_EVIL_CONSTRUCTORS(TestClient);
+};
+
+static const int kTestPid1 = 30;
+static const int kTestPid2 = 20;
+
+class ResourceManagerServiceTest : public ::testing::Test {
+public:
+ ResourceManagerServiceTest()
+ : mService(new ResourceManagerService(new TestProcessInfo)),
+ mTestClient1(new TestClient(mService)),
+ mTestClient2(new TestClient(mService)),
+ mTestClient3(new TestClient(mService)) {
+ }
+
+protected:
+ static bool isEqualResources(const Vector<MediaResource> &resources1,
+ const Vector<MediaResource> &resources2) {
+ if (resources1.size() != resources2.size()) {
+ return false;
+ }
+ for (size_t i = 0; i < resources1.size(); ++i) {
+ if (resources1[i] != resources2[i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ static void expectEqResourceInfo(const ResourceInfo &info, sp<IResourceManagerClient> client,
+ const Vector<MediaResource> &resources) {
+ EXPECT_EQ(client, info.client);
+ EXPECT_TRUE(isEqualResources(resources, info.resources));
+ }
+
+ void verifyClients(bool c1, bool c2, bool c3) {
+ TestClient *client1 = static_cast<TestClient*>(mTestClient1.get());
+ TestClient *client2 = static_cast<TestClient*>(mTestClient2.get());
+ TestClient *client3 = static_cast<TestClient*>(mTestClient3.get());
+
+ EXPECT_EQ(c1, client1->reclaimed());
+ EXPECT_EQ(c2, client2->reclaimed());
+ EXPECT_EQ(c3, client3->reclaimed());
+
+ client1->reset();
+ client2->reset();
+ client3->reset();
+ }
+
+ void addResource() {
+ // kTestPid1 mTestClient1
+ Vector<MediaResource> resources1;
+ resources1.push_back(MediaResource(String8(kResourceSecureCodec), 1));
+ mService->addResource(kTestPid1, (int64_t) mTestClient1.get(), mTestClient1, resources1);
+ resources1.push_back(MediaResource(String8(kResourceGraphicMemory), 200));
+ Vector<MediaResource> resources11;
+ resources11.push_back(MediaResource(String8(kResourceGraphicMemory), 200));
+ mService->addResource(kTestPid1, (int64_t) mTestClient1.get(), mTestClient1, resources11);
+
+ // kTestPid2 mTestClient2
+ Vector<MediaResource> resources2;
+ resources2.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
+ resources2.push_back(MediaResource(String8(kResourceGraphicMemory), 300));
+ mService->addResource(kTestPid2, (int64_t) mTestClient2.get(), mTestClient2, resources2);
+
+ // kTestPid2 mTestClient3
+ Vector<MediaResource> resources3;
+ mService->addResource(kTestPid2, (int64_t) mTestClient3.get(), mTestClient3, resources3);
+ resources3.push_back(MediaResource(String8(kResourceSecureCodec), 1));
+ resources3.push_back(MediaResource(String8(kResourceGraphicMemory), 100));
+ mService->addResource(kTestPid2, (int64_t) mTestClient3.get(), mTestClient3, resources3);
+
+ const PidResourceInfosMap &map = mService->mMap;
+ EXPECT_EQ(2u, map.size());
+ ssize_t index1 = map.indexOfKey(kTestPid1);
+ ASSERT_GE(index1, 0);
+ const ResourceInfos &infos1 = map[index1];
+ EXPECT_EQ(1u, infos1.size());
+ expectEqResourceInfo(infos1[0], mTestClient1, resources1);
+
+ ssize_t index2 = map.indexOfKey(kTestPid2);
+ ASSERT_GE(index2, 0);
+ const ResourceInfos &infos2 = map[index2];
+ EXPECT_EQ(2u, infos2.size());
+ expectEqResourceInfo(infos2[0], mTestClient2, resources2);
+ expectEqResourceInfo(infos2[1], mTestClient3, resources3);
+ }
+
+ void testConfig() {
+ EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs);
+ EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
+
+ Vector<MediaResourcePolicy> policies1;
+ policies1.push_back(MediaResourcePolicy(String8(kPolicySupportsMultipleSecureCodecs), 1));
+ policies1.push_back(
+ MediaResourcePolicy(String8(kPolicySupportsSecureWithNonSecureCodec), 0));
+ mService->config(policies1);
+ EXPECT_TRUE(mService->mSupportsMultipleSecureCodecs);
+ EXPECT_FALSE(mService->mSupportsSecureWithNonSecureCodec);
+
+ Vector<MediaResourcePolicy> policies2;
+ policies2.push_back(MediaResourcePolicy(String8(kPolicySupportsMultipleSecureCodecs), 0));
+ policies2.push_back(
+ MediaResourcePolicy(String8(kPolicySupportsSecureWithNonSecureCodec), 1));
+ mService->config(policies2);
+ EXPECT_FALSE(mService->mSupportsMultipleSecureCodecs);
+ EXPECT_TRUE(mService->mSupportsSecureWithNonSecureCodec);
+ }
+
+ void testRemoveResource() {
+ addResource();
+
+ mService->removeResource((int64_t) mTestClient2.get());
+
+ const PidResourceInfosMap &map = mService->mMap;
+ EXPECT_EQ(2u, map.size());
+ const ResourceInfos &infos1 = map.valueFor(kTestPid1);
+ const ResourceInfos &infos2 = map.valueFor(kTestPid2);
+ EXPECT_EQ(1u, infos1.size());
+ EXPECT_EQ(1u, infos2.size());
+ // mTestClient2 has been removed.
+ EXPECT_EQ(mTestClient3, infos2[0].client);
+ }
+
+ void testGetAllClients() {
+ addResource();
+
+ String8 type = String8(kResourceSecureCodec);
+ String8 unknowType = String8("unknowType");
+ Vector<sp<IResourceManagerClient> > clients;
+ int lowPriorityPid = 100;
+ EXPECT_FALSE(mService->getAllClients_l(lowPriorityPid, type, &clients));
+ int midPriorityPid = 25;
+ EXPECT_FALSE(mService->getAllClients_l(lowPriorityPid, type, &clients));
+ int highPriorityPid = 10;
+ EXPECT_TRUE(mService->getAllClients_l(10, unknowType, &clients));
+ EXPECT_TRUE(mService->getAllClients_l(10, type, &clients));
+
+ EXPECT_EQ(2u, clients.size());
+ EXPECT_EQ(mTestClient3, clients[0]);
+ EXPECT_EQ(mTestClient1, clients[1]);
+ }
+
+ void testReclaimResourceSecure() {
+ Vector<MediaResource> resources;
+ resources.push_back(MediaResource(String8(kResourceSecureCodec), 1));
+ resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150));
+
+ // ### secure codec can't coexist and secure codec can coexist with non-secure codec ###
+ {
+ addResource();
+ mService->mSupportsMultipleSecureCodecs = false;
+ mService->mSupportsSecureWithNonSecureCodec = true;
+
+ // priority too low
+ EXPECT_FALSE(mService->reclaimResource(40, resources));
+ EXPECT_FALSE(mService->reclaimResource(25, resources));
+
+ // reclaim all secure codecs
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(true, false, true);
+
+ // call again should reclaim one largest graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, true, false);
+
+ // nothing left
+ EXPECT_FALSE(mService->reclaimResource(10, resources));
+ }
+
+ // ### secure codecs can't coexist and secure codec can't coexist with non-secure codec ###
+ {
+ addResource();
+ mService->mSupportsMultipleSecureCodecs = false;
+ mService->mSupportsSecureWithNonSecureCodec = false;
+
+ // priority too low
+ EXPECT_FALSE(mService->reclaimResource(40, resources));
+ EXPECT_FALSE(mService->reclaimResource(25, resources));
+
+ // reclaim all secure and non-secure codecs
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(true, true, true);
+
+ // nothing left
+ EXPECT_FALSE(mService->reclaimResource(10, resources));
+ }
+
+
+ // ### secure codecs can coexist but secure codec can't coexist with non-secure codec ###
+ {
+ addResource();
+ mService->mSupportsMultipleSecureCodecs = true;
+ mService->mSupportsSecureWithNonSecureCodec = false;
+
+ // priority too low
+ EXPECT_FALSE(mService->reclaimResource(40, resources));
+ EXPECT_FALSE(mService->reclaimResource(25, resources));
+
+ // reclaim all non-secure codecs
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, true, false);
+
+ // call again should reclaim one largest graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(true, false, false);
+
+ // call again should reclaim another largest graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, false, true);
+
+ // nothing left
+ EXPECT_FALSE(mService->reclaimResource(10, resources));
+ }
+
+ // ### secure codecs can coexist and secure codec can coexist with non-secure codec ###
+ {
+ addResource();
+ mService->mSupportsMultipleSecureCodecs = true;
+ mService->mSupportsSecureWithNonSecureCodec = true;
+
+ // priority too low
+ EXPECT_FALSE(mService->reclaimResource(40, resources));
+
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ // one largest graphic memory from lowest process got reclaimed
+ verifyClients(true, false, false);
+
+ // call again should reclaim another graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, true, false);
+
+ // call again should reclaim another graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, false, true);
+
+ // nothing left
+ EXPECT_FALSE(mService->reclaimResource(10, resources));
+ }
+ }
+
+ void testReclaimResourceNonSecure() {
+ Vector<MediaResource> resources;
+ resources.push_back(MediaResource(String8(kResourceNonSecureCodec), 1));
+ resources.push_back(MediaResource(String8(kResourceGraphicMemory), 150));
+
+ // ### secure codec can't coexist with non-secure codec ###
+ {
+ addResource();
+ mService->mSupportsSecureWithNonSecureCodec = false;
+
+ // priority too low
+ EXPECT_FALSE(mService->reclaimResource(40, resources));
+ EXPECT_FALSE(mService->reclaimResource(25, resources));
+
+ // reclaim all secure codecs
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(true, false, true);
+
+ // call again should reclaim one graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, true, false);
+
+ // nothing left
+ EXPECT_FALSE(mService->reclaimResource(10, resources));
+ }
+
+
+ // ### secure codec can coexist with non-secure codec ###
+ {
+ addResource();
+ mService->mSupportsSecureWithNonSecureCodec = true;
+
+ // priority too low
+ EXPECT_FALSE(mService->reclaimResource(40, resources));
+
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ // one largest graphic memory from lowest process got reclaimed
+ verifyClients(true, false, false);
+
+ // call again should reclaim another graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, true, false);
+
+ // call again should reclaim another graphic memory from lowest process
+ EXPECT_TRUE(mService->reclaimResource(10, resources));
+ verifyClients(false, false, true);
+
+ // nothing left
+ EXPECT_FALSE(mService->reclaimResource(10, resources));
+ }
+ }
+
+ void testGetLowestPriorityBiggestClient() {
+ String8 type = String8(kResourceGraphicMemory);
+ sp<IResourceManagerClient> client;
+ EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(10, type, &client));
+
+ addResource();
+
+ EXPECT_FALSE(mService->getLowestPriorityBiggestClient_l(100, type, &client));
+ EXPECT_TRUE(mService->getLowestPriorityBiggestClient_l(10, type, &client));
+
+ // kTestPid1 is the lowest priority process with kResourceGraphicMemory.
+ // mTestClient1 has the largest kResourceGraphicMemory within kTestPid1.
+ EXPECT_EQ(mTestClient1, client);
+ }
+
+ void testGetLowestPriorityPid() {
+ int pid;
+ int priority;
+ TestProcessInfo processInfo;
+
+ String8 type = String8(kResourceGraphicMemory);
+ EXPECT_FALSE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+
+ addResource();
+
+ EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+ EXPECT_EQ(kTestPid1, pid);
+ int priority1;
+ processInfo.getPriority(kTestPid1, &priority1);
+ EXPECT_EQ(priority1, priority);
+
+ type = String8(kResourceNonSecureCodec);
+ EXPECT_TRUE(mService->getLowestPriorityPid_l(type, &pid, &priority));
+ EXPECT_EQ(kTestPid2, pid);
+ int priority2;
+ processInfo.getPriority(kTestPid2, &priority2);
+ EXPECT_EQ(priority2, priority);
+ }
+
+ void testGetBiggestClient() {
+ String8 type = String8(kResourceGraphicMemory);
+ sp<IResourceManagerClient> client;
+ EXPECT_FALSE(mService->getBiggestClient_l(kTestPid2, type, &client));
+
+ addResource();
+
+ EXPECT_TRUE(mService->getBiggestClient_l(kTestPid2, type, &client));
+ EXPECT_EQ(mTestClient2, client);
+ }
+
+ void testIsCallingPriorityHigher() {
+ EXPECT_FALSE(mService->isCallingPriorityHigher_l(101, 100));
+ EXPECT_FALSE(mService->isCallingPriorityHigher_l(100, 100));
+ EXPECT_TRUE(mService->isCallingPriorityHigher_l(99, 100));
+ }
+
+ sp<ResourceManagerService> mService;
+ sp<IResourceManagerClient> mTestClient1;
+ sp<IResourceManagerClient> mTestClient2;
+ sp<IResourceManagerClient> mTestClient3;
+};
+
+TEST_F(ResourceManagerServiceTest, config) {
+ testConfig();
+}
+
+TEST_F(ResourceManagerServiceTest, addResource) {
+ addResource();
+}
+
+TEST_F(ResourceManagerServiceTest, removeResource) {
+ testRemoveResource();
+}
+
+TEST_F(ResourceManagerServiceTest, reclaimResource) {
+ testReclaimResourceSecure();
+ testReclaimResourceNonSecure();
+}
+
+TEST_F(ResourceManagerServiceTest, getAllClients_l) {
+ testGetAllClients();
+}
+
+TEST_F(ResourceManagerServiceTest, getLowestPriorityBiggestClient_l) {
+ testGetLowestPriorityBiggestClient();
+}
+
+TEST_F(ResourceManagerServiceTest, getLowestPriorityPid_l) {
+ testGetLowestPriorityPid();
+}
+
+TEST_F(ResourceManagerServiceTest, getBiggestClient_l) {
+ testGetBiggestClient();
+}
+
+TEST_F(ResourceManagerServiceTest, isCallingPriorityHigher_l) {
+ testIsCallingPriorityHigher();
+}
+
+} // namespace android