summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--camera/ICameraServiceProxy.cpp23
-rw-r--r--camera/camera2/ICameraDeviceUser.cpp27
-rw-r--r--include/camera/ICameraServiceProxy.h17
-rw-r--r--include/camera/camera2/ICameraDeviceUser.h5
-rw-r--r--include/media/AudioSystem.h1
-rw-r--r--include/media/stagefright/MediaCodec.h5
-rw-r--r--media/img_utils/include/img_utils/DngUtils.h31
-rw-r--r--media/img_utils/src/DngUtils.cpp85
-rw-r--r--media/libmedia/AudioSystem.cpp14
-rw-r--r--media/libmedia/AudioTrack.cpp24
-rw-r--r--media/libmedia/AudioTrackShared.cpp6
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp4
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp6
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp5
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp5
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp66
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h2
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp4
-rw-r--r--media/libstagefright/AudioSource.cpp4
-rwxr-xr-xmedia/libstagefright/MPEG4Extractor.cpp6
-rw-r--r--media/libstagefright/MediaCodec.cpp56
-rw-r--r--media/libstagefright/Utils.cpp4
-rw-r--r--media/libstagefright/codecs/avcdec/SoftAVCDec.cpp384
-rw-r--r--media/libstagefright/codecs/avcdec/SoftAVCDec.h16
-rw-r--r--media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp1
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.cpp333
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.h13
-rw-r--r--media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp3
-rw-r--r--media/libstagefright/data/media_codecs_google_tv.xml29
-rw-r--r--[-rwxr-xr-x]media/libstagefright/data/media_codecs_google_video.xml9
-rw-r--r--media/libstagefright/foundation/ALooper.cpp10
-rw-r--r--media/utils/Android.mk2
-rw-r--r--media/utils/ISchedulingPolicyService.cpp (renamed from services/audioflinger/ISchedulingPolicyService.cpp)0
-rw-r--r--media/utils/ISchedulingPolicyService.h (renamed from services/audioflinger/ISchedulingPolicyService.h)0
-rw-r--r--media/utils/SchedulingPolicyService.cpp (renamed from services/audioflinger/SchedulingPolicyService.cpp)2
-rw-r--r--media/utils/include/mediautils/SchedulingPolicyService.h (renamed from services/audioflinger/SchedulingPolicyService.h)0
-rw-r--r--services/audioflinger/Android.mk15
-rw-r--r--services/audioflinger/AudioFlinger.cpp10
-rw-r--r--services/audioflinger/AudioFlinger.h8
-rw-r--r--services/audioflinger/Threads.cpp8
-rw-r--r--services/audioflinger/Tracks.cpp6
-rw-r--r--services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h12
-rw-r--r--services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp20
-rw-r--r--services/audiopolicy/enginedefault/src/Gains.cpp8
-rw-r--r--services/audiopolicy/managerdefault/AudioPolicyManager.cpp66
-rw-r--r--services/audiopolicy/managerdefault/AudioPolicyManager.h1
-rw-r--r--services/camera/libcameraservice/Android.mk2
-rw-r--r--services/camera/libcameraservice/CameraFlashlight.cpp13
-rw-r--r--services/camera/libcameraservice/CameraService.cpp118
-rw-r--r--services/camera/libcameraservice/CameraService.h20
-rw-r--r--services/camera/libcameraservice/api1/Camera2Client.cpp8
-rw-r--r--services/camera/libcameraservice/api1/CameraClient.cpp25
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.cpp4
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.cpp39
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.h4
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.cpp17
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.h2
-rw-r--r--services/camera/libcameraservice/common/CameraDeviceBase.h6
-rw-r--r--services/camera/libcameraservice/common/CameraModule.cpp56
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.cpp6
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.h1
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.cpp577
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.h64
-rw-r--r--services/camera/libcameraservice/device3/Camera3DummyStream.cpp4
-rw-r--r--services/camera/libcameraservice/device3/Camera3DummyStream.h5
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStream.cpp11
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStream.h5
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h5
-rw-r--r--services/camera/libcameraservice/device3/Camera3Stream.cpp33
-rw-r--r--services/camera/libcameraservice/device3/Camera3Stream.h9
-rw-r--r--services/camera/libcameraservice/device3/Camera3StreamInterface.h11
-rw-r--r--services/mediaresourcemanager/ResourceManagerService.cpp37
-rw-r--r--soundtrigger/ISoundTrigger.cpp14
-rw-r--r--soundtrigger/ISoundTriggerHwService.cpp7
74 files changed, 1509 insertions, 950 deletions
diff --git a/camera/ICameraServiceProxy.cpp b/camera/ICameraServiceProxy.cpp
index 06a5afb..694e9c3 100644
--- a/camera/ICameraServiceProxy.cpp
+++ b/camera/ICameraServiceProxy.cpp
@@ -29,11 +29,21 @@ public:
BpCameraServiceProxy(const sp<IBinder>& impl) : BpInterface<ICameraServiceProxy>(impl) {}
virtual void pingForUserUpdate() {
- Parcel data, reply;
+ Parcel data;
data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
- remote()->transact(BnCameraServiceProxy::PING_FOR_USER_UPDATE, data, &reply,
+ remote()->transact(BnCameraServiceProxy::PING_FOR_USER_UPDATE, data, nullptr,
IBinder::FLAG_ONEWAY);
}
+
+ virtual void notifyCameraState(String16 cameraId, CameraState newCameraState) {
+ Parcel data;
+ data.writeInterfaceToken(ICameraServiceProxy::getInterfaceDescriptor());
+ data.writeString16(cameraId);
+ data.writeInt32(newCameraState);
+ remote()->transact(BnCameraServiceProxy::NOTIFY_CAMERA_STATE, data, nullptr,
+ IBinder::FLAG_ONEWAY);
+ }
+
};
@@ -47,9 +57,16 @@ status_t BnCameraServiceProxy::onTransact(uint32_t code, const Parcel& data, Par
pingForUserUpdate();
return NO_ERROR;
} break;
+ case NOTIFY_CAMERA_STATE: {
+ CHECK_INTERFACE(ICameraServiceProxy, data, reply);
+ String16 cameraId = data.readString16();
+ CameraState newCameraState =
+ static_cast<CameraState>(data.readInt32());
+ notifyCameraState(cameraId, newCameraState);
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
}
}; // namespace android
-
diff --git a/camera/camera2/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp
index d2dc200..2a9fd2b 100644
--- a/camera/camera2/ICameraDeviceUser.cpp
+++ b/camera/camera2/ICameraDeviceUser.cpp
@@ -49,7 +49,8 @@ enum {
WAIT_UNTIL_IDLE,
FLUSH,
PREPARE,
- TEAR_DOWN
+ TEAR_DOWN,
+ PREPARE2
};
namespace {
@@ -366,6 +367,21 @@ public:
return reply.readInt32();
}
+ virtual status_t prepare2(int maxCount, int streamId)
+ {
+ ALOGV("prepare2");
+ Parcel data, reply;
+
+ data.writeInterfaceToken(ICameraDeviceUser::getInterfaceDescriptor());
+ data.writeInt32(maxCount);
+ data.writeInt32(streamId);
+
+ remote()->transact(PREPARE2, data, &reply);
+
+ reply.readExceptionCode();
+ return reply.readInt32();
+ }
+
virtual status_t tearDown(int streamId)
{
ALOGV("tearDown");
@@ -592,7 +608,14 @@ status_t BnCameraDeviceUser::onTransact(
reply->writeInt32(tearDown(streamId));
return NO_ERROR;
} break;
-
+ case PREPARE2: {
+ CHECK_INTERFACE(ICameraDeviceUser, data, reply);
+ int maxCount = data.readInt32();
+ int streamId = data.readInt32();
+ reply->writeNoException();
+ reply->writeInt32(prepare2(maxCount, streamId));
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/include/camera/ICameraServiceProxy.h b/include/camera/ICameraServiceProxy.h
index 12a555f..2613c01 100644
--- a/include/camera/ICameraServiceProxy.h
+++ b/include/camera/ICameraServiceProxy.h
@@ -23,15 +23,30 @@
namespace android {
+/**
+ * Interface from native camera service to managed-side camera service proxy.
+ *
+ * Keep in sync with frameworks/base/core/java/android/hardware/ICameraServiceProxy.aidl
+ *
+ */
class ICameraServiceProxy : public IInterface {
public:
enum {
PING_FOR_USER_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
+ NOTIFY_CAMERA_STATE
+ };
+
+ enum CameraState {
+ CAMERA_STATE_OPEN,
+ CAMERA_STATE_ACTIVE,
+ CAMERA_STATE_IDLE,
+ CAMERA_STATE_CLOSED
};
DECLARE_META_INTERFACE(CameraServiceProxy);
virtual void pingForUserUpdate() = 0;
+ virtual void notifyCameraState(String16 cameraId, CameraState newCameraState) = 0;
};
class BnCameraServiceProxy: public BnInterface<ICameraServiceProxy>
@@ -48,5 +63,3 @@ public:
}; // namespace android
#endif // ANDROID_HARDWARE_ICAMERASERVICEPROXY_H
-
-
diff --git a/include/camera/camera2/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h
index a7bf8ab..4d8eb53 100644
--- a/include/camera/camera2/ICameraDeviceUser.h
+++ b/include/camera/camera2/ICameraDeviceUser.h
@@ -140,6 +140,11 @@ public:
virtual status_t prepare(int streamId) = 0;
/**
+ * Preallocate up to maxCount buffers for a given output stream asynchronously.
+ */
+ virtual status_t prepare2(int maxCount, int streamId) = 0;
+
+ /**
* Free all unused buffers for a given output stream.
*/
virtual status_t tearDown(int streamId) = 0;
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index 06116a5..26a0bb2 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -399,6 +399,7 @@ private:
uint32_t mInSamplingRate;
audio_format_t mInFormat;
audio_channel_mask_t mInChannelMask;
+ sp<AudioIoDescriptor> getIoDescriptor_l(audio_io_handle_t ioHandle);
};
class AudioPolicyServiceClient: public IBinder::DeathRecipient,
diff --git a/include/media/stagefright/MediaCodec.h b/include/media/stagefright/MediaCodec.h
index c10963d..cdfa159 100644
--- a/include/media/stagefright/MediaCodec.h
+++ b/include/media/stagefright/MediaCodec.h
@@ -176,7 +176,7 @@ protected:
private:
// used by ResourceManagerClient
- status_t reclaim();
+ status_t reclaim(bool force = false);
friend struct ResourceManagerClient;
private:
@@ -385,6 +385,9 @@ private:
uint64_t getGraphicBufferSize();
void addResource(const String8 &type, const String8 &subtype, uint64_t value);
+ bool hasPendingBuffer(int portIndex);
+ bool hasPendingBuffer();
+
/* called to get the last codec error when the sticky flag is set.
* if no such codec error is found, returns UNKNOWN_ERROR.
*/
diff --git a/media/img_utils/include/img_utils/DngUtils.h b/media/img_utils/include/img_utils/DngUtils.h
index 3dcedc5..1d8df9c 100644
--- a/media/img_utils/include/img_utils/DngUtils.h
+++ b/media/img_utils/include/img_utils/DngUtils.h
@@ -138,6 +138,34 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
double opticalCenterY,
const double* kCoeffs);
+
+ /**
+ * Add FixBadPixelsList opcode for the given metadata parameters.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addBadPixelListForMetadata(const uint32_t* hotPixels,
+ uint32_t xyPairCount,
+ uint32_t colorFilterArrangement);
+
+ /**
+ * Add FixBadPixelsList opcode.
+ *
+ * bayerPhase - 0=top-left of image is red, 1=top-left of image is green pixel in red row,
+ * 2=top-left of image is green pixel in blue row, 3=top-left of image is
+ * blue.
+ * badPointCount - number of (x,y) pairs of bad pixels are given in badPointRowColPairs.
+ * badRectCount - number of (top, left, bottom, right) tuples are given in
+ * badRectTopLeftBottomRightTuples
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addBadPixelList(uint32_t bayerPhase,
+ uint32_t badPointCount,
+ uint32_t badRectCount,
+ const uint32_t* badPointRowColPairs,
+ const uint32_t* badRectTopLeftBottomRightTuples);
+
// TODO: Add other Opcode methods
protected:
static const uint32_t FLAG_OPTIONAL = 0x1u;
@@ -146,6 +174,7 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
// Opcode IDs
enum {
WARP_RECTILINEAR_ID = 1,
+ FIX_BAD_PIXELS_LIST = 5,
GAIN_MAP_ID = 9,
};
@@ -161,6 +190,8 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
ByteArrayOutput mOpList;
EndianOutput mEndianOut;
+ status_t addOpcodePreamble(uint32_t opcodeId);
+
};
} /*namespace img_utils*/
diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp
index b213403..9473dce 100644
--- a/media/img_utils/src/DngUtils.cpp
+++ b/media/img_utils/src/DngUtils.cpp
@@ -224,13 +224,7 @@ status_t OpcodeListBuilder::addGainMap(uint32_t top,
uint32_t mapPlanes,
const float* mapGains) {
- uint32_t opcodeId = GAIN_MAP_ID;
-
- status_t err = mEndianOut.write(&opcodeId, 0, 1);
- if (err != OK) return err;
-
- uint8_t version[] = {1, 3, 0, 0};
- err = mEndianOut.write(version, 0, NELEMS(version));
+ status_t err = addOpcodePreamble(GAIN_MAP_ID);
if (err != OK) return err;
// Allow this opcode to be skipped if not supported
@@ -334,13 +328,7 @@ status_t OpcodeListBuilder::addWarpRectilinear(uint32_t numPlanes,
double opticalCenterY,
const double* kCoeffs) {
- uint32_t opcodeId = WARP_RECTILINEAR_ID;
-
- status_t err = mEndianOut.write(&opcodeId, 0, 1);
- if (err != OK) return err;
-
- uint8_t version[] = {1, 3, 0, 0};
- err = mEndianOut.write(version, 0, NELEMS(version));
+ status_t err = addOpcodePreamble(WARP_RECTILINEAR_ID);
if (err != OK) return err;
// Allow this opcode to be skipped if not supported
@@ -373,5 +361,74 @@ status_t OpcodeListBuilder::addWarpRectilinear(uint32_t numPlanes,
return OK;
}
+status_t OpcodeListBuilder::addBadPixelListForMetadata(const uint32_t* hotPixels,
+ uint32_t xyPairCount,
+ uint32_t colorFilterArrangement) {
+ if (colorFilterArrangement > 3) {
+ ALOGE("%s: Unknown color filter arrangement %" PRIu32, __FUNCTION__,
+ colorFilterArrangement);
+ return BAD_VALUE;
+ }
+
+ return addBadPixelList(colorFilterArrangement, xyPairCount, 0, hotPixels, nullptr);
+}
+
+status_t OpcodeListBuilder::addBadPixelList(uint32_t bayerPhase,
+ uint32_t badPointCount,
+ uint32_t badRectCount,
+ const uint32_t* badPointRowColPairs,
+ const uint32_t* badRectTopLeftBottomRightTuples) {
+
+ status_t err = addOpcodePreamble(FIX_BAD_PIXELS_LIST);
+ if (err != OK) return err;
+
+ // Allow this opcode to be skipped if not supported
+ uint32_t flags = FLAG_OPTIONAL;
+
+ err = mEndianOut.write(&flags, 0, 1);
+ if (err != OK) return err;
+
+ const uint32_t NUM_NON_VARLEN_FIELDS = 3;
+ const uint32_t SIZE_OF_POINT = 2;
+ const uint32_t SIZE_OF_RECT = 4;
+
+ uint32_t totalSize = (NUM_NON_VARLEN_FIELDS + badPointCount * SIZE_OF_POINT +
+ badRectCount * SIZE_OF_RECT) * sizeof(uint32_t);
+ err = mEndianOut.write(&totalSize, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&bayerPhase, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&badPointCount, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&badRectCount, 0, 1);
+ if (err != OK) return err;
+
+ if (badPointCount > 0) {
+ err = mEndianOut.write(badPointRowColPairs, 0, SIZE_OF_POINT * badPointCount);
+ if (err != OK) return err;
+ }
+
+ if (badRectCount > 0) {
+ err = mEndianOut.write(badRectTopLeftBottomRightTuples, 0, SIZE_OF_RECT * badRectCount);
+ if (err != OK) return err;
+ }
+
+ mCount++;
+ return OK;
+}
+
+status_t OpcodeListBuilder::addOpcodePreamble(uint32_t opcodeId) {
+ status_t err = mEndianOut.write(&opcodeId, 0, 1);
+ if (err != OK) return err;
+
+ uint8_t version[] = {1, 3, 0, 0};
+ err = mEndianOut.write(version, 0, NELEMS(version));
+ if (err != OK) return err;
+ return OK;
+}
+
} /*namespace img_utils*/
} /*namespace android*/
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 3bfb09a..9d645f0 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -476,7 +476,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event even
switch (event) {
case AUDIO_OUTPUT_OPENED:
case AUDIO_INPUT_OPENED: {
- sp<AudioIoDescriptor> oldDesc = getIoDescriptor(ioDesc->mIoHandle);
+ sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
if (oldDesc == 0) {
mIoDescriptors.add(ioDesc->mIoHandle, ioDesc);
} else {
@@ -498,7 +498,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event even
} break;
case AUDIO_OUTPUT_CLOSED:
case AUDIO_INPUT_CLOSED: {
- if (getIoDescriptor(ioDesc->mIoHandle) == 0) {
+ if (getIoDescriptor_l(ioDesc->mIoHandle) == 0) {
ALOGW("ioConfigChanged() closing unknown %s %d",
event == AUDIO_OUTPUT_CLOSED ? "output" : "input", ioDesc->mIoHandle);
break;
@@ -512,7 +512,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(audio_io_config_event even
case AUDIO_OUTPUT_CONFIG_CHANGED:
case AUDIO_INPUT_CONFIG_CHANGED: {
- sp<AudioIoDescriptor> oldDesc = getIoDescriptor(ioDesc->mIoHandle);
+ sp<AudioIoDescriptor> oldDesc = getIoDescriptor_l(ioDesc->mIoHandle);
if (oldDesc == 0) {
ALOGW("ioConfigChanged() modifying unknown output! %d", ioDesc->mIoHandle);
break;
@@ -575,7 +575,7 @@ status_t AudioSystem::AudioFlingerClient::getInputBufferSize(
return NO_ERROR;
}
-sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle)
+sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor_l(audio_io_handle_t ioHandle)
{
sp<AudioIoDescriptor> desc;
ssize_t index = mIoDescriptors.indexOfKey(ioHandle);
@@ -585,6 +585,12 @@ sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_
return desc;
}
+sp<AudioIoDescriptor> AudioSystem::AudioFlingerClient::getIoDescriptor(audio_io_handle_t ioHandle)
+{
+ Mutex::Autolock _l(mLock);
+ return getIoDescriptor_l(ioHandle);
+}
+
status_t AudioSystem::AudioFlingerClient::addAudioDeviceCallback(
const sp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
{
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 444f4d8..ff5fe1d 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -523,6 +523,15 @@ status_t AudioTrack::start()
mTimestampStartupGlitchReported = false;
mRetrogradeMotionReported = false;
+ // If previousState == STATE_STOPPED, we reactivate markers (mMarkerPosition != 0)
+ // as the position is reset to 0. This is legacy behavior. This is not done
+ // in stop() to avoid a race condition where the last marker event is issued twice.
+ // Note: the if is technically unnecessary because previousState == STATE_FLUSHED
+ // is only for streaming tracks, and mMarkerReached is already set to false.
+ if (previousState == STATE_STOPPED) {
+ mMarkerReached = false;
+ }
+
// For offloaded tracks, we don't know if the hardware counters are really zero here,
// since the flush is asynchronous and stop may not fully drain.
// We save the time when the track is started to later verify whether
@@ -592,9 +601,9 @@ void AudioTrack::stop()
mProxy->interrupt();
mAudioTrack->stop();
- // the playback head position will reset to 0, so if a marker is set, we need
- // to activate it again
- mMarkerReached = false;
+
+ // Note: legacy handling - stop does not clear playback marker
+ // and periodic update counter, but flush does for streaming tracks.
if (mSharedBuffer != 0) {
// clear buffer position and loop count.
@@ -1843,7 +1852,11 @@ nsecs_t AudioTrack::processAudioBuffer()
case NO_ERROR:
case DEAD_OBJECT:
case TIMED_OUT:
- mCbf(EVENT_STREAM_END, mUserData, NULL);
+ if (status != DEAD_OBJECT) {
+ // for DEAD_OBJECT, we do not send a EVENT_STREAM_END after stop();
+ // instead, the application should handle the EVENT_NEW_IAUDIOTRACK.
+ mCbf(EVENT_STREAM_END, mUserData, NULL);
+ }
{
AutoMutex lock(mLock);
// The previously assigned value of waitStreamEnd is no longer valid,
@@ -1967,7 +1980,8 @@ nsecs_t AudioTrack::processAudioBuffer()
if (err != NO_ERROR) {
if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR ||
(isOffloaded() && (err == DEAD_OBJECT))) {
- return 0;
+ // FIXME bug 25195759
+ return 1000000;
}
ALOGE("Error %d obtaining an audio buffer, giving up.", err);
return NS_NEVER;
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index 6a51a76..caa84fb 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -932,7 +932,7 @@ ssize_t StaticAudioTrackServerProxy::pollPosition()
return (ssize_t) mState.mPosition;
}
-status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush __unused)
+status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
{
if (mIsShutdown) {
buffer->mFrameCount = 0;
@@ -970,7 +970,9 @@ status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush
// it is always larger or equal to avail.
LOG_ALWAYS_FATAL_IF(mFramesReady < (int64_t) avail);
buffer->mNonContig = mFramesReady == INT64_MAX ? SIZE_MAX : clampToSize(mFramesReady - avail);
- mUnreleased = avail;
+ if (!ackFlush) {
+ mUnreleased = avail;
+ }
return NO_ERROR;
}
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index 56521a2..bcfd83a 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -1734,7 +1734,7 @@ status_t MediaPlayerService::AudioOutput::open(
t->setVolume(mLeftVolume, mRightVolume);
mSampleRateHz = sampleRate;
- mFlags = t->getFlags(); // we suggest the flags above, but new AudioTrack() may not grant it.
+ mFlags = flags;
mMsecsPerFrame = 1E3f / (mPlaybackRate.mSpeed * sampleRate);
mFrameSize = t->frameSize();
uint32_t pos;
@@ -1746,7 +1746,7 @@ status_t MediaPlayerService::AudioOutput::open(
status_t res = NO_ERROR;
// Note some output devices may give us a direct track even though we don't specify it.
// Example: Line application b/17459982.
- if ((mFlags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
+ if ((t->getFlags() & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) {
res = t->setPlaybackRate(mPlaybackRate);
if (res == NO_ERROR) {
t->setAuxEffectSendLevel(mSendLevel);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index c0146d5..26532d7 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -1075,6 +1075,12 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
+ if (audio) {
+ mAudioEOS = false;
+ } else {
+ mVideoEOS = false;
+ }
+
ALOGV("renderer %s flush completed.", audio ? "audio" : "video");
if (audio && (mFlushingAudio == NONE || mFlushingAudio == FLUSHED
|| mFlushingAudio == SHUT_DOWN)) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 3646828..c005f3f 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -635,8 +635,11 @@ void NuPlayer::Decoder::handleOutputFormatChange(const sp<AMessage> &format) {
flags = AUDIO_OUTPUT_FLAG_NONE;
}
- mRenderer->openAudioSink(
+ status_t err = mRenderer->openAudioSink(
format, false /* offloadOnly */, hasVideo, flags, NULL /* isOffloaed */);
+ if (err != OK) {
+ handleError(err);
+ }
}
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 7370224..f288c36 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -362,9 +362,9 @@ bool NuPlayerDriver::isPlaying() {
}
status_t NuPlayerDriver::setPlaybackSettings(const AudioPlaybackRate &rate) {
- Mutex::Autolock autoLock(mLock);
status_t err = mPlayer->setPlaybackSettings(rate);
if (err == OK) {
+ Mutex::Autolock autoLock(mLock);
if (rate.mSpeed == 0.f && mState == STATE_RUNNING) {
mState = STATE_PAUSED;
// try to update position
@@ -747,7 +747,8 @@ void NuPlayerDriver::notifyListener_l(
// the last little bit of audio. If we're looping, we need to restart it.
mAudioSink->start();
}
- break;
+ // don't send completion event when looping
+ return;
}
mPlayer->pause();
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 04a46f4..4d25294 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -106,10 +106,12 @@ NuPlayer::Renderer::Renderer(
mNotifyCompleteVideo(false),
mSyncQueues(false),
mPaused(false),
+ mPauseDrainAudioAllowedUs(0),
mVideoSampleReceived(false),
mVideoRenderingStarted(false),
mVideoRenderingStartGeneration(0),
mAudioRenderingStartGeneration(0),
+ mRenderingDataDelivered(false),
mAudioOffloadPauseTimeoutGeneration(0),
mAudioTornDown(false),
mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
@@ -630,6 +632,14 @@ void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
return;
}
+ // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
+ if (mPaused) {
+ const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
+ if (diffUs > delayUs) {
+ delayUs = diffUs;
+ }
+ }
+
mDrainAudioQueuePending = true;
sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
msg->setInt32("drainGeneration", mAudioDrainGeneration);
@@ -639,11 +649,16 @@ void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
mAudioRenderingStartGeneration = mAudioDrainGeneration;
mVideoRenderingStartGeneration = mVideoDrainGeneration;
+ mRenderingDataDelivered = false;
}
void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
mAudioRenderingStartGeneration == mAudioDrainGeneration) {
+ mRenderingDataDelivered = true;
+ if (mPaused) {
+ return;
+ }
mVideoRenderingStartGeneration = -1;
mAudioRenderingStartGeneration = -1;
@@ -798,6 +813,10 @@ void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
}
bool NuPlayer::Renderer::onDrainAudioQueue() {
+ // do not drain audio during teardown as queued buffers may be invalid.
+ if (mAudioTornDown) {
+ return false;
+ }
// TODO: This call to getPosition checks if AudioTrack has been created
// in AudioSink before draining audio. If AudioTrack doesn't exist, then
// CHECKs on getPosition will fail.
@@ -877,6 +896,8 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
ALOGV("AudioSink write would block when writing %zu bytes", copy);
} else {
ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
+ // This can only happen when AudioSink was opened with doNotReconnect flag set to
+ // true, in which case the NuPlayer will handle the reconnect.
notifyAudioTearDown();
}
break;
@@ -895,6 +916,13 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
{
Mutex::Autolock autoLock(mLock);
+ int64_t maxTimeMedia;
+ maxTimeMedia =
+ mAnchorTimeMediaUs +
+ (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
+ * 1000LL * mAudioSink->msecsPerFrame());
+ mMediaClock->updateMaxTimeMedia(maxTimeMedia);
+
notifyIfMediaRenderingStarted_l();
}
@@ -921,15 +949,6 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
break;
}
}
- int64_t maxTimeMedia;
- {
- Mutex::Autolock autoLock(mLock);
- maxTimeMedia =
- mAnchorTimeMediaUs +
- (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
- * 1000LL * mAudioSink->msecsPerFrame());
- }
- mMediaClock->updateMaxTimeMedia(maxTimeMedia);
// calculate whether we need to reschedule another write.
bool reschedule = !mAudioQueue.empty()
@@ -943,6 +962,10 @@ bool NuPlayer::Renderer::onDrainAudioQueue() {
int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
int32_t sampleRate = offloadingAudio() ?
mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
+ if (sampleRate == 0) {
+ ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
+ return 0;
+ }
// TODO: remove the (int32_t) casting below as it may overflow at 12.4 hours.
return (int64_t)((int32_t)numFrames * 1000000LL / sampleRate);
}
@@ -1338,8 +1361,16 @@ void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
mAudioSink->flush();
// Call stop() to signal to the AudioSink to completely fill the
// internal buffer before resuming playback.
+ // FIXME: this is ignored after flush().
mAudioSink->stop();
- if (!mPaused) {
+ if (mPaused) {
+ // Race condition: if renderer is paused and audio sink is stopped,
+ // we need to make sure that the audio track buffer fully drains
+ // before delivering data.
+ // FIXME: remove this if we can detect if stop() is complete.
+ const int delayUs = 2 * 50 * 1000; // (2 full mixer thread cycles at 50ms)
+ mPauseDrainAudioAllowedUs = ALooper::GetNowUs() + delayUs;
+ } else {
mAudioSink->start();
}
mNumFramesWritten = 0;
@@ -1471,6 +1502,7 @@ void NuPlayer::Renderer::onResume() {
cancelAudioOffloadPauseTimeout();
status_t err = mAudioSink->start();
if (err != OK) {
+ ALOGE("cannot start AudioSink err %d", err);
notifyAudioTearDown();
}
}
@@ -1478,7 +1510,10 @@ void NuPlayer::Renderer::onResume() {
{
Mutex::Autolock autoLock(mLock);
mPaused = false;
-
+ // rendering started message may have been delayed if we were paused.
+ if (mRenderingDataDelivered) {
+ notifyIfMediaRenderingStarted_l();
+ }
// configure audiosink as we did not do it when pausing
if (mAudioSink != NULL && mAudioSink->ready()) {
mAudioSink->setPlaybackRate(mPlaybackSettings);
@@ -1764,6 +1799,12 @@ status_t NuPlayer::Renderer::onOpenAudioSink(
const uint32_t frameCount =
(unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
+ // The doNotReconnect means AudioSink will signal back and let NuPlayer to re-construct
+ // AudioSink. We don't want this when there's video because it will cause a video seek to
+ // the previous I frame. But we do want this when there's only audio because it will give
+ // NuPlayer a chance to switch from non-offload mode to offload mode.
+ // So we only set doNotReconnect when there's no video.
+ const bool doNotReconnect = !hasVideo;
status_t err = mAudioSink->open(
sampleRate,
numChannels,
@@ -1774,13 +1815,14 @@ status_t NuPlayer::Renderer::onOpenAudioSink(
mUseAudioCallback ? this : NULL,
(audio_output_flags_t)pcmFlags,
NULL,
- true /* doNotReconnect */,
+ doNotReconnect,
frameCount);
if (err == OK) {
err = mAudioSink->setPlaybackRate(mPlaybackSettings);
}
if (err != OK) {
ALOGW("openAudioSink: non offloaded open failed status: %d", err);
+ mAudioSink->close();
mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
return err;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 3e65649..9479c31 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -170,11 +170,13 @@ private:
// modified on only renderer's thread.
bool mPaused;
+ int64_t mPauseDrainAudioAllowedUs; // time when we can drain/deliver audio in pause mode.
bool mVideoSampleReceived;
bool mVideoRenderingStarted;
int32_t mVideoRenderingStartGeneration;
int32_t mAudioRenderingStartGeneration;
+ bool mRenderingDataDelivered;
int64_t mLastPositionUpdateUs;
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 58ff113..af0351e 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -134,7 +134,9 @@ void NuPlayer::RTSPSource::pause() {
return;
}
}
- mHandler->pause();
+ if (mHandler != NULL) {
+ mHandler->pause();
+ }
}
void NuPlayer::RTSPSource::resume() {
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 3505844..6e4a1dd 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -290,6 +290,10 @@ void AudioSource::signalBufferReturned(MediaBuffer *buffer) {
status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
int64_t timeUs = systemTime() / 1000ll;
+ // Estimate the real sampling time of the 1st sample in this buffer
+ // from AudioRecord's latency. (Apply this adjustment first so that
+ // the start time logic is not affected.)
+ timeUs -= mRecord->latency() * 1000LL;
ALOGV("dataCallbackTimestamp: %" PRId64 " us", timeUs);
Mutex::Autolock autoLock(mLock);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index f2d30b3..e4f8384 100755
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -1776,13 +1776,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
if (!isParsingMetaKeys) {
uint8_t buffer[4];
if (chunk_data_size < (off64_t)sizeof(buffer)) {
- *offset += chunk_size;
+ *offset = stop_offset;
return ERROR_MALFORMED;
}
if (mDataSource->readAt(
data_offset, buffer, 4) < 4) {
- *offset += chunk_size;
+ *offset = stop_offset;
return ERROR_IO;
}
@@ -1793,7 +1793,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
// apparently malformed chunks that don't have flags
// and completely different semantics than what's
// in the MPEG4 specs and skip it.
- *offset += chunk_size;
+ *offset = stop_offset;
return OK;
}
*offset += sizeof(buffer);
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index cd59709..c2ffdf2 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -63,6 +63,7 @@ static bool isResourceError(status_t err) {
}
static const int kMaxRetry = 2;
+static const int kMaxReclaimWaitTimeInUs = 500000; // 0.5s
struct ResourceManagerClient : public BnResourceManagerClient {
ResourceManagerClient(MediaCodec* codec) : mMediaCodec(codec) {}
@@ -74,6 +75,12 @@ struct ResourceManagerClient : public BnResourceManagerClient {
return true;
}
status_t err = codec->reclaim();
+ if (err == WOULD_BLOCK) {
+ ALOGD("Wait for the client to release codec.");
+ usleep(kMaxReclaimWaitTimeInUs);
+ ALOGD("Try to reclaim again.");
+ err = codec->reclaim(true /* force */);
+ }
if (err != OK) {
ALOGW("ResourceManagerClient failed to release codec with err %d", err);
}
@@ -571,12 +578,34 @@ status_t MediaCodec::stop() {
return PostAndAwaitResponse(msg, &response);
}
-status_t MediaCodec::reclaim() {
+bool MediaCodec::hasPendingBuffer(int portIndex) {
+ const Vector<BufferInfo> &buffers = mPortBuffers[portIndex];
+ for (size_t i = 0; i < buffers.size(); ++i) {
+ const BufferInfo &info = buffers.itemAt(i);
+ if (info.mOwnedByClient) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool MediaCodec::hasPendingBuffer() {
+ return hasPendingBuffer(kPortIndexInput) || hasPendingBuffer(kPortIndexOutput);
+}
+
+status_t MediaCodec::reclaim(bool force) {
+ ALOGD("MediaCodec::reclaim(%p) %s", this, mInitName.c_str());
sp<AMessage> msg = new AMessage(kWhatRelease, this);
msg->setInt32("reclaimed", 1);
+ msg->setInt32("force", force ? 1 : 0);
sp<AMessage> response;
- return PostAndAwaitResponse(msg, &response);
+ status_t ret = PostAndAwaitResponse(msg, &response);
+ if (ret == -ENOENT) {
+ ALOGD("MediaCodec looper is gone, skip reclaim");
+ ret = OK;
+ }
+ return ret;
}
status_t MediaCodec::release() {
@@ -1154,8 +1183,10 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
resourceType = String8(kResourceNonSecureCodec);
}
- const char *subtype = mIsVideo ? kResourceVideoCodec : kResourceAudioCodec;
- addResource(resourceType, String8(subtype), 1);
+ if (mIsVideo) {
+ // audio codec is currently ignored.
+ addResource(resourceType, String8(kResourceVideoCodec), 1);
+ }
(new AMessage)->postReply(mReplyID);
break;
@@ -1784,6 +1815,23 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
msg->findInt32("reclaimed", &reclaimed);
if (reclaimed) {
mReleasedByResourceManager = true;
+
+ int32_t force = 0;
+ msg->findInt32("force", &force);
+ if (!force && hasPendingBuffer()) {
+ ALOGW("Can't reclaim codec right now due to pending buffers.");
+
+ // return WOULD_BLOCK to ask resource manager to retry later.
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", WOULD_BLOCK);
+ response->postReply(replyID);
+
+ // notify the async client
+ if (mFlags & kFlagIsAsync) {
+ onError(DEAD_OBJECT, ACTION_CODE_FATAL);
+ }
+ break;
+ }
}
if (!((mFlags & kFlagIsComponentAllocated) && targetState == UNINITIALIZED) // See 1
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 7c8d441..17f0201 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -199,7 +199,7 @@ status_t convertMetaDataToMessage(
}
int32_t fps;
- if (meta->findInt32(kKeyFrameRate, &fps)) {
+ if (meta->findInt32(kKeyFrameRate, &fps) && fps > 0) {
msg->setInt32("frame-rate", fps);
}
@@ -664,7 +664,7 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
}
int32_t fps;
- if (msg->findInt32("frame-rate", &fps)) {
+ if (msg->findInt32("frame-rate", &fps) && fps > 0) {
meta->setInt32(kKeyFrameRate, fps);
}
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index e083315..afbe230 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -38,10 +38,10 @@ namespace android {
/** Function and structure definitions to keep code similar for each codec */
#define ivdec_api_function ih264d_api_function
-#define ivdext_init_ip_t ih264d_init_ip_t
-#define ivdext_init_op_t ih264d_init_op_t
-#define ivdext_fill_mem_rec_ip_t ih264d_fill_mem_rec_ip_t
-#define ivdext_fill_mem_rec_op_t ih264d_fill_mem_rec_op_t
+#define ivdext_create_ip_t ih264d_create_ip_t
+#define ivdext_create_op_t ih264d_create_op_t
+#define ivdext_delete_ip_t ih264d_delete_ip_t
+#define ivdext_delete_op_t ih264d_delete_op_t
#define ivdext_ctl_set_num_cores_ip_t ih264d_ctl_set_num_cores_ip_t
#define ivdext_ctl_set_num_cores_op_t ih264d_ctl_set_num_cores_op_t
@@ -115,15 +115,12 @@ SoftAVC::SoftAVC(
320 /* width */, 240 /* height */, callbacks,
appData, component),
mCodecCtx(NULL),
- mMemRecords(NULL),
mFlushOutBuffer(NULL),
mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
mIvColorFormat(IV_YUV_420P),
- mNewWidth(mWidth),
- mNewHeight(mHeight),
- mNewLevel(0),
mChangingResolution(false),
- mSignalledError(false) {
+ mSignalledError(false),
+ mStride(mWidth){
initPorts(
kNumBuffers, INPUT_BUF_SIZE, kNumBuffers, CODEC_MIME_TYPE);
@@ -132,14 +129,23 @@ SoftAVC::SoftAVC(
// If input dump is enabled, then open create an empty file
GENERATE_FILE_NAMES();
CREATE_DUMP_FILE(mInFile);
-
- CHECK_EQ(initDecoder(mWidth, mHeight), (status_t)OK);
}
SoftAVC::~SoftAVC() {
CHECK_EQ(deInitDecoder(), (status_t)OK);
}
+static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
+ UNUSED(ctxt);
+ return memalign(alignment, size);
+}
+
+static void ivd_aligned_free(void *ctxt, void *buf) {
+ UNUSED(ctxt);
+ free(buf);
+ return;
+}
+
static size_t GetCPUCoreCount() {
long cpuCoreCount = 1;
#if defined(_SC_NPROCESSORS_ONLN)
@@ -149,7 +155,7 @@ static size_t GetCPUCoreCount() {
cpuCoreCount = sysconf(_SC_NPROC_ONLN);
#endif
CHECK(cpuCoreCount >= 1);
- ALOGD("Number of CPU cores: %ld", cpuCoreCount);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
return (size_t)cpuCoreCount;
}
@@ -235,12 +241,10 @@ status_t SoftAVC::resetDecoder() {
}
mSignalledError = false;
- /* Set the run-time (dynamic) parameters */
- setParams(outputBufferWidth());
-
/* Set number of cores/threads to be used by the codec */
setNumCores();
+ mStride = 0;
return OK;
}
@@ -287,160 +291,41 @@ status_t SoftAVC::setFlushMode() {
return OK;
}
-status_t SoftAVC::initDecoder(uint32_t width, uint32_t height) {
+status_t SoftAVC::initDecoder() {
IV_API_CALL_STATUS_T status;
- UWORD32 u4_num_reorder_frames;
- UWORD32 u4_num_ref_frames;
- UWORD32 u4_share_disp_buf;
- WORD32 i4_level;
-
mNumCores = GetCPUCoreCount();
mCodecCtx = NULL;
- /* Initialize number of ref and reorder modes (for H264) */
- u4_num_reorder_frames = 16;
- u4_num_ref_frames = 16;
- u4_share_disp_buf = 0;
-
- uint32_t displayStride = mIsAdaptive ? mAdaptiveMaxWidth : width;
- uint32_t displayHeight = mIsAdaptive ? mAdaptiveMaxHeight : height;
- uint32_t displaySizeY = displayStride * displayHeight;
-
- if(mNewLevel == 0){
- if (displaySizeY > (1920 * 1088)) {
- i4_level = 50;
- } else if (displaySizeY > (1280 * 720)) {
- i4_level = 40;
- } else if (displaySizeY > (720 * 576)) {
- i4_level = 31;
- } else if (displaySizeY > (624 * 320)) {
- i4_level = 30;
- } else if (displaySizeY > (352 * 288)) {
- i4_level = 21;
- } else {
- i4_level = 20;
- }
- } else {
- i4_level = mNewLevel;
- }
-
- {
- iv_num_mem_rec_ip_t s_num_mem_rec_ip;
- iv_num_mem_rec_op_t s_num_mem_rec_op;
-
- s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
- s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
- s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
-
- ALOGV("Get number of mem records");
- status = ivdec_api_function(
- mCodecCtx, (void *)&s_num_mem_rec_ip, (void *)&s_num_mem_rec_op);
- if (IV_SUCCESS != status) {
- ALOGE("Error in getting mem records: 0x%x",
- s_num_mem_rec_op.u4_error_code);
- return UNKNOWN_ERROR;
- }
-
- mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
- }
-
- mMemRecords = (iv_mem_rec_t *)ivd_aligned_malloc(
- 128, mNumMemRecords * sizeof(iv_mem_rec_t));
- if (mMemRecords == NULL) {
- ALOGE("Allocation failure");
- return NO_MEMORY;
- }
-
- memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
-
- {
- size_t i;
- ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
- ivdext_fill_mem_rec_op_t s_fill_mem_op;
- iv_mem_rec_t *ps_mem_rec;
-
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
- sizeof(ivdext_fill_mem_rec_ip_t);
- s_fill_mem_ip.i4_level = i4_level;
- s_fill_mem_ip.u4_num_reorder_frames = u4_num_reorder_frames;
- s_fill_mem_ip.u4_num_ref_frames = u4_num_ref_frames;
- s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
- s_fill_mem_ip.u4_num_extra_disp_buf = 0;
- s_fill_mem_ip.e_output_format = mIvColorFormat;
-
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight;
- s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
- sizeof(ivdext_fill_mem_rec_op_t);
-
- ps_mem_rec = mMemRecords;
- for (i = 0; i < mNumMemRecords; i++) {
- ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
- }
-
- status = ivdec_api_function(
- mCodecCtx, (void *)&s_fill_mem_ip, (void *)&s_fill_mem_op);
-
- if (IV_SUCCESS != status) {
- ALOGE("Error in filling mem records: 0x%x",
- s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
- return UNKNOWN_ERROR;
- }
- mNumMemRecords =
- s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
-
- ps_mem_rec = mMemRecords;
-
- for (i = 0; i < mNumMemRecords; i++) {
- ps_mem_rec->pv_base = ivd_aligned_malloc(
- ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
- if (ps_mem_rec->pv_base == NULL) {
- ALOGE("Allocation failure for memory record #%zu of size %u",
- i, ps_mem_rec->u4_mem_size);
- status = IV_FAIL;
- return NO_MEMORY;
- }
-
- ps_mem_rec++;
- }
- }
+ mStride = outputBufferWidth();
/* Initialize the decoder */
{
- ivdext_init_ip_t s_init_ip;
- ivdext_init_op_t s_init_op;
+ ivdext_create_ip_t s_create_ip;
+ ivdext_create_op_t s_create_op;
void *dec_fxns = (void *)ivdec_api_function;
- s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
- s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
- s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
- s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride;
- s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;
-
- s_init_ip.i4_level = i4_level;
- s_init_ip.u4_num_reorder_frames = u4_num_reorder_frames;
- s_init_ip.u4_num_ref_frames = u4_num_ref_frames;
- s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
- s_init_ip.u4_num_extra_disp_buf = 0;
-
- s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
+ s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+ s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
+ s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
+ s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
+ s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
+ s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = NULL;
- s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
- s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
+ status = ivdec_api_function(mCodecCtx, (void *)&s_create_ip, (void *)&s_create_op);
- mCodecCtx = (iv_obj_t *)mMemRecords[0].pv_base;
+ mCodecCtx = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
mCodecCtx->pv_fxns = dec_fxns;
mCodecCtx->u4_size = sizeof(iv_obj_t);
- status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip, (void *)&s_init_op);
if (status != IV_SUCCESS) {
+ ALOGE("Error in create: 0x%x",
+ s_create_op.s_ivd_create_op_t.u4_error_code);
+ deInitDecoder();
mCodecCtx = NULL;
- ALOGE("Error in init: 0x%x",
- s_init_op.s_ivd_init_op_t.u4_error_code);
return UNKNOWN_ERROR;
}
}
@@ -449,7 +334,7 @@ status_t SoftAVC::initDecoder(uint32_t width, uint32_t height) {
resetPlugin();
/* Set the run time (dynamic) parameters */
- setParams(displayStride);
+ setParams(mStride);
/* Set number of cores/threads to be used by the codec */
setNumCores();
@@ -457,61 +342,37 @@ status_t SoftAVC::initDecoder(uint32_t width, uint32_t height) {
/* Get codec version */
logVersion();
- /* Allocate internal picture buffer */
- uint32_t bufferSize = displaySizeY * 3 / 2;
- mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
- if (NULL == mFlushOutBuffer) {
- ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
- return NO_MEMORY;
- }
-
- mInitNeeded = false;
mFlushNeeded = false;
return OK;
}
status_t SoftAVC::deInitDecoder() {
size_t i;
+ IV_API_CALL_STATUS_T status;
- if (mMemRecords) {
- iv_mem_rec_t *ps_mem_rec;
+ if (mCodecCtx) {
+ ivdext_delete_ip_t s_delete_ip;
+ ivdext_delete_op_t s_delete_op;
- ps_mem_rec = mMemRecords;
- for (i = 0; i < mNumMemRecords; i++) {
- if (ps_mem_rec->pv_base) {
- ivd_aligned_free(ps_mem_rec->pv_base);
- }
- ps_mem_rec++;
+ s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
+ s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
+
+ s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_delete_ip, (void *)&s_delete_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in delete: 0x%x",
+ s_delete_op.s_ivd_delete_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
}
- ivd_aligned_free(mMemRecords);
- mMemRecords = NULL;
}
- if (mFlushOutBuffer) {
- ivd_aligned_free(mFlushOutBuffer);
- mFlushOutBuffer = NULL;
- }
- mInitNeeded = true;
mChangingResolution = false;
return OK;
}
-status_t SoftAVC::reInitDecoder(uint32_t width, uint32_t height) {
- status_t ret;
-
- deInitDecoder();
-
- ret = initDecoder(width, height);
- if (OK != ret) {
- ALOGE("Create failure");
- deInitDecoder();
- return NO_MEMORY;
- }
- return OK;
-}
-
void SoftAVC::onReset() {
SoftVideoDecoderOMXComponent::onReset();
@@ -520,23 +381,6 @@ void SoftAVC::onReset() {
resetPlugin();
}
-OMX_ERRORTYPE SoftAVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
- const uint32_t oldWidth = mWidth;
- const uint32_t oldHeight = mHeight;
- OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params);
- if (mWidth != oldWidth || mHeight != oldHeight) {
- mNewWidth = mWidth;
- mNewHeight = mHeight;
- status_t err = reInitDecoder(mNewWidth, mNewHeight);
- if (err != OK) {
- notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL);
- mSignalledError = true;
- return OMX_ErrorUnsupportedSetting;
- }
- }
- return ret;
-}
-
void SoftAVC::setDecodeArgs(
ivd_video_decode_ip_t *ps_dec_ip,
ivd_video_decode_op_t *ps_dec_op,
@@ -587,6 +431,17 @@ void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
if (kOutputPortIndex == portIndex) {
setFlushMode();
+ /* Allocate a picture buffer to flushed data */
+ uint32_t displayStride = outputBufferWidth();
+ uint32_t displayHeight = outputBufferHeight();
+
+ uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
+ mFlushOutBuffer = (uint8_t *)memalign(128, bufferSize);
+ if (NULL == mFlushOutBuffer) {
+ ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize);
+ return;
+ }
+
while (true) {
ivd_video_decode_ip_t s_dec_ip;
ivd_video_decode_op_t s_dec_op;
@@ -601,6 +456,12 @@ void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
break;
}
}
+
+ if (mFlushOutBuffer) {
+ free(mFlushOutBuffer);
+ mFlushOutBuffer = NULL;
+ }
+
}
}
@@ -614,6 +475,20 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
return;
}
+ if (NULL == mCodecCtx) {
+ if (OK != initDecoder()) {
+ ALOGE("Failed to initialize decoder");
+ notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+ }
+ if (outputBufferWidth() != mStride) {
+ /* Set the run-time (dynamic) parameters */
+ mStride = outputBufferWidth();
+ setParams(mStride);
+ }
+
List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
@@ -676,22 +551,6 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
}
}
- // When there is an init required and the decoder is not in flush mode,
- // update output port's definition and reinitialize decoder.
- if (mInitNeeded && !mIsInFlush) {
- bool portWillReset = false;
-
- status_t err = reInitDecoder(mNewWidth, mNewHeight);
- if (err != OK) {
- notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL);
- mSignalledError = true;
- return;
- }
-
- handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
- return;
- }
-
/* Get a free slot in timestamp array to hold input timestamp */
{
size_t i;
@@ -726,10 +585,26 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
IV_API_CALL_STATUS_T status;
status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
- bool unsupportedDimensions =
+ bool unsupportedResolution =
(IVD_STREAM_WIDTH_HEIGHT_NOT_SUPPORTED == (s_dec_op.u4_error_code & 0xFF));
+
+ /* Check for unsupported dimensions */
+ if (unsupportedResolution) {
+ ALOGE("Unsupported resolution : %dx%d", mWidth, mHeight);
+ notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
+ bool allocationFailed = (IVD_MEM_ALLOC_FAILED == (s_dec_op.u4_error_code & 0xFF));
+ if (allocationFailed) {
+ ALOGE("Allocation failure in decoder");
+ notify(OMX_EventError, OMX_ErrorUnsupportedSetting, 0, NULL);
+ mSignalledError = true;
+ return;
+ }
+
bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
- bool unsupportedLevel = (IH264D_UNSUPPORTED_LEVEL == (s_dec_op.u4_error_code & 0xFF));
GETTIME(&mTimeEnd, NULL);
/* Compute time taken for decode() */
@@ -747,46 +622,6 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
mTimeStampsValid[timeStampIx] = false;
}
-
- // This is needed to handle CTS DecoderTest testCodecResetsH264WithoutSurface,
- // which is not sending SPS/PPS after port reconfiguration and flush to the codec.
- if (unsupportedDimensions && !mFlushNeeded) {
- bool portWillReset = false;
- mNewWidth = s_dec_op.u4_pic_wd;
- mNewHeight = s_dec_op.u4_pic_ht;
-
- status_t err = reInitDecoder(mNewWidth, mNewHeight);
- if (err != OK) {
- notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL);
- mSignalledError = true;
- return;
- }
-
- handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
-
- setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
-
- ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
- return;
- }
-
- if (unsupportedLevel && !mFlushNeeded) {
-
- mNewLevel = 51;
-
- status_t err = reInitDecoder(mNewWidth, mNewHeight);
- if (err != OK) {
- notify(OMX_EventError, OMX_ErrorUnsupportedSetting, err, NULL);
- mSignalledError = true;
- return;
- }
-
- setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
-
- ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
- return;
- }
-
// If the decoder is in the changing resolution mode and there is no output present,
// that means the switching is done and it's ready to reset the decoder and the plugin.
if (mChangingResolution && !s_dec_op.u4_output_present) {
@@ -796,28 +631,11 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
continue;
}
- if (unsupportedDimensions || resChanged) {
+ if (resChanged) {
mChangingResolution = true;
if (mFlushNeeded) {
setFlushMode();
}
-
- if (unsupportedDimensions) {
- mNewWidth = s_dec_op.u4_pic_wd;
- mNewHeight = s_dec_op.u4_pic_ht;
- mInitNeeded = true;
- }
- continue;
- }
-
- if (unsupportedLevel) {
-
- if (mFlushNeeded) {
- setFlushMode();
- }
-
- mNewLevel = 51;
- mInitNeeded = true;
continue;
}
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.h b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
index 1ec8991..9dcabb4 100644
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.h
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.h
@@ -23,9 +23,6 @@
namespace android {
-#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
-#define ivd_aligned_free(buf) free(buf)
-
/** Number of entries in the time-stamp array */
#define MAX_TIME_STAMPS 64
@@ -62,7 +59,6 @@ protected:
virtual void onQueueFilled(OMX_U32 portIndex);
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onReset();
- virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
private:
// Number of input and output buffers
enum {
@@ -70,8 +66,6 @@ private:
};
iv_obj_t *mCodecCtx; // Codec context
- iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
- size_t mNumMemRecords; // Number of memory records requested by the codec
size_t mNumCores; // Number of cores to be uesd by the codec
@@ -97,17 +91,15 @@ private:
bool mIsInFlush; // codec is flush mode
bool mReceivedEOS; // EOS is receieved on input port
- bool mInitNeeded;
- uint32_t mNewWidth;
- uint32_t mNewHeight;
- uint32_t mNewLevel;
+
// The input stream has changed to a different resolution, which is still supported by the
// codec. So the codec is switching to decode the new resolution.
bool mChangingResolution;
bool mFlushNeeded;
bool mSignalledError;
+ size_t mStride;
- status_t initDecoder(uint32_t width, uint32_t height);
+ status_t initDecoder();
status_t deInitDecoder();
status_t setFlushMode();
status_t setParams(size_t stride);
@@ -115,7 +107,7 @@ private:
status_t setNumCores();
status_t resetDecoder();
status_t resetPlugin();
- status_t reInitDecoder(uint32_t width, uint32_t height);
+
void setDecodeArgs(
ivd_video_decode_ip_t *ps_dec_ip,
diff --git a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
index 6e55034..387d17d 100644
--- a/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
+++ b/media/libstagefright/codecs/avcenc/SoftAVCEnc.cpp
@@ -637,6 +637,7 @@ OMX_ERRORTYPE SoftAVC::initEncoder() {
for (size_t i = 0; i < MAX_CONVERSION_BUFFERS; i++) {
if (mConversionBuffers[i] != NULL) {
free(mConversionBuffers[i]);
+ mConversionBuffers[i] = 0;
}
if (((uint64_t)mStride * mHeight) > ((uint64_t)INT32_MAX / 3)) {
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
index 4b2ec1c..e601125 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -37,10 +37,10 @@ namespace android {
/** Function and structure definitions to keep code similar for each codec */
#define ivdec_api_function ihevcd_cxa_api_function
-#define ivdext_init_ip_t ihevcd_cxa_init_ip_t
-#define ivdext_init_op_t ihevcd_cxa_init_op_t
-#define ivdext_fill_mem_rec_ip_t ihevcd_cxa_fill_mem_rec_ip_t
-#define ivdext_fill_mem_rec_op_t ihevcd_cxa_fill_mem_rec_op_t
+#define ivdext_create_ip_t ihevcd_cxa_create_ip_t
+#define ivdext_create_op_t ihevcd_cxa_create_op_t
+#define ivdext_delete_ip_t ihevcd_cxa_delete_ip_t
+#define ivdext_delete_op_t ihevcd_cxa_delete_op_t
#define ivdext_ctl_set_num_cores_ip_t ihevcd_cxa_ctl_set_num_cores_ip_t
#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t
@@ -68,13 +68,13 @@ SoftHEVC::SoftHEVC(
kProfileLevels, ARRAY_SIZE(kProfileLevels),
320 /* width */, 240 /* height */, callbacks,
appData, component),
- mMemRecords(NULL),
+ mCodecCtx(NULL),
mFlushOutBuffer(NULL),
mOmxColorFormat(OMX_COLOR_FormatYUV420Planar),
mIvColorFormat(IV_YUV_420P),
- mNewWidth(mWidth),
- mNewHeight(mHeight),
- mChangingResolution(false) {
+ mChangingResolution(false),
+ mSignalledError(false),
+ mStride(mWidth) {
const size_t kMinCompressionRatio = 4 /* compressionRatio (for Level 4+) */;
const size_t kMaxOutputBufferSize = 2048 * 2048 * 3 / 2;
// INPUT_BUF_SIZE is given by HEVC codec as minimum input size
@@ -88,10 +88,21 @@ status_t SoftHEVC::init() {
}
SoftHEVC::~SoftHEVC() {
- ALOGD("In SoftHEVC::~SoftHEVC");
+ ALOGV("In SoftHEVC::~SoftHEVC");
CHECK_EQ(deInitDecoder(), (status_t)OK);
}
+static void *ivd_aligned_malloc(void *ctxt, WORD32 alignment, WORD32 size) {
+ UNUSED(ctxt);
+ return memalign(alignment, size);
+}
+
+static void ivd_aligned_free(void *ctxt, void *buf) {
+ UNUSED(ctxt);
+ free(buf);
+ return;
+}
+
static size_t GetCPUCoreCount() {
long cpuCoreCount = 1;
#if defined(_SC_NPROCESSORS_ONLN)
@@ -101,7 +112,7 @@ static size_t GetCPUCoreCount() {
cpuCoreCount = sysconf(_SC_NPROC_ONLN);
#endif
CHECK(cpuCoreCount >= 1);
- ALOGD("Number of CPU cores: %ld", cpuCoreCount);
+ ALOGV("Number of CPU cores: %ld", cpuCoreCount);
return (size_t)cpuCoreCount;
}
@@ -125,7 +136,7 @@ void SoftHEVC::logVersion() {
ALOGE("Error in getting version number: 0x%x",
s_ctl_op.u4_error_code);
} else {
- ALOGD("Ittiam decoder version number: %s",
+ ALOGV("Ittiam decoder version number: %s",
(char *)s_ctl_ip.pv_version_buffer);
}
return;
@@ -187,13 +198,12 @@ status_t SoftHEVC::resetDecoder() {
ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
return UNKNOWN_ERROR;
}
-
- /* Set the run-time (dynamic) parameters */
- setParams(outputBufferWidth());
+ mSignalledError = false;
/* Set number of cores/threads to be used by the codec */
setNumCores();
+ mStride = 0;
return OK;
}
@@ -206,7 +216,7 @@ status_t SoftHEVC::setNumCores() {
s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
- ALOGD("Set number of cores to %u", s_set_cores_ip.u4_num_cores);
+ ALOGV("Set number of cores to %u", s_set_cores_ip.u4_num_cores);
status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip,
(void *)&s_set_cores_op);
if (IV_SUCCESS != status) {
@@ -226,7 +236,7 @@ status_t SoftHEVC::setFlushMode() {
s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
- ALOGD("Set the decoder in flush mode ");
+ ALOGV("Set the decoder in flush mode ");
/* Set the decoder in Flush mode, subsequent decode() calls will flush */
status = ivdec_api_function(mCodecCtx, (void *)&s_video_flush_ip,
@@ -245,151 +255,38 @@ status_t SoftHEVC::setFlushMode() {
status_t SoftHEVC::initDecoder() {
IV_API_CALL_STATUS_T status;
- UWORD32 u4_num_reorder_frames;
- UWORD32 u4_num_ref_frames;
- UWORD32 u4_share_disp_buf;
- WORD32 i4_level;
-
mNumCores = GetCPUCoreCount();
+ mCodecCtx = NULL;
- /* Initialize number of ref and reorder modes (for HEVC) */
- u4_num_reorder_frames = 16;
- u4_num_ref_frames = 16;
- u4_share_disp_buf = 0;
-
- uint32_t displayStride = outputBufferWidth();
- uint32_t displayHeight = outputBufferHeight();
- uint32_t displaySizeY = displayStride * displayHeight;
-
- if (displaySizeY > (1920 * 1088)) {
- i4_level = 50;
- } else if (displaySizeY > (1280 * 720)) {
- i4_level = 40;
- } else if (displaySizeY > (960 * 540)) {
- i4_level = 31;
- } else if (displaySizeY > (640 * 360)) {
- i4_level = 30;
- } else if (displaySizeY > (352 * 288)) {
- i4_level = 21;
- } else {
- i4_level = 20;
- }
- {
- iv_num_mem_rec_ip_t s_num_mem_rec_ip;
- iv_num_mem_rec_op_t s_num_mem_rec_op;
-
- s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
- s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
- s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
-
- ALOGV("Get number of mem records");
- status = ivdec_api_function(mCodecCtx, (void*)&s_num_mem_rec_ip,
- (void*)&s_num_mem_rec_op);
- if (IV_SUCCESS != status) {
- ALOGE("Error in getting mem records: 0x%x",
- s_num_mem_rec_op.u4_error_code);
- return UNKNOWN_ERROR;
- }
-
- mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
- }
-
- mMemRecords = (iv_mem_rec_t*)ivd_aligned_malloc(
- 128, mNumMemRecords * sizeof(iv_mem_rec_t));
- if (mMemRecords == NULL) {
- ALOGE("Allocation failure");
- return NO_MEMORY;
- }
-
- memset(mMemRecords, 0, mNumMemRecords * sizeof(iv_mem_rec_t));
-
- {
- size_t i;
- ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
- ivdext_fill_mem_rec_op_t s_fill_mem_op;
- iv_mem_rec_t *ps_mem_rec;
-
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
- sizeof(ivdext_fill_mem_rec_ip_t);
- s_fill_mem_ip.i4_level = i4_level;
- s_fill_mem_ip.u4_num_reorder_frames = u4_num_reorder_frames;
- s_fill_mem_ip.u4_num_ref_frames = u4_num_ref_frames;
- s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
- s_fill_mem_ip.u4_num_extra_disp_buf = 0;
- s_fill_mem_ip.e_output_format = mIvColorFormat;
-
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
- s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = displayHeight;
- s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
- sizeof(ivdext_fill_mem_rec_op_t);
-
- ps_mem_rec = mMemRecords;
- for (i = 0; i < mNumMemRecords; i++)
- ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
-
- status = ivdec_api_function(mCodecCtx, (void *)&s_fill_mem_ip,
- (void *)&s_fill_mem_op);
-
- if (IV_SUCCESS != status) {
- ALOGE("Error in filling mem records: 0x%x",
- s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
- return UNKNOWN_ERROR;
- }
- mNumMemRecords =
- s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
-
- ps_mem_rec = mMemRecords;
-
- for (i = 0; i < mNumMemRecords; i++) {
- ps_mem_rec->pv_base = ivd_aligned_malloc(
- ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
- if (ps_mem_rec->pv_base == NULL) {
- ALOGE("Allocation failure for memory record #%zu of size %u",
- i, ps_mem_rec->u4_mem_size);
- status = IV_FAIL;
- return NO_MEMORY;
- }
-
- ps_mem_rec++;
- }
- }
+ mStride = outputBufferWidth();
/* Initialize the decoder */
{
- ivdext_init_ip_t s_init_ip;
- ivdext_init_op_t s_init_op;
+ ivdext_create_ip_t s_create_ip;
+ ivdext_create_op_t s_create_op;
void *dec_fxns = (void *)ivdec_api_function;
- s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
- s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
- s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
- s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = displayStride;
- s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;
-
- s_init_ip.i4_level = i4_level;
- s_init_ip.u4_num_reorder_frames = u4_num_reorder_frames;
- s_init_ip.u4_num_ref_frames = u4_num_ref_frames;
- s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
- s_init_ip.u4_num_extra_disp_buf = 0;
-
- s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
+ s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
+ s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
+ s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
+ s_create_op.s_ivd_create_op_t.u4_size = sizeof(ivdext_create_op_t);
+ s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_alloc = ivd_aligned_malloc;
+ s_create_ip.s_ivd_create_ip_t.pf_aligned_free = ivd_aligned_free;
+ s_create_ip.s_ivd_create_ip_t.pv_mem_ctxt = NULL;
- s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
- s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
+ status = ivdec_api_function(mCodecCtx, (void *)&s_create_ip, (void *)&s_create_op);
- mCodecCtx = (iv_obj_t*)mMemRecords[0].pv_base;
+ mCodecCtx = (iv_obj_t*)s_create_op.s_ivd_create_op_t.pv_handle;
mCodecCtx->pv_fxns = dec_fxns;
mCodecCtx->u4_size = sizeof(iv_obj_t);
- ALOGD("Initializing decoder");
- status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip,
- (void *)&s_init_op);
if (status != IV_SUCCESS) {
- ALOGE("Error in init: 0x%x",
- s_init_op.s_ivd_init_op_t.u4_error_code);
+ ALOGE("Error in create: 0x%x",
+ s_create_op.s_ivd_create_op_t.u4_error_code);
+ deInitDecoder();
+ mCodecCtx = NULL;
return UNKNOWN_ERROR;
}
}
@@ -398,7 +295,7 @@ status_t SoftHEVC::initDecoder() {
resetPlugin();
/* Set the run time (dynamic) parameters */
- setParams(displayStride);
+ setParams(mStride);
/* Set number of cores/threads to be used by the codec */
setNumCores();
@@ -406,80 +303,46 @@ status_t SoftHEVC::initDecoder() {
/* Get codec version */
logVersion();
- /* Allocate internal picture buffer */
- uint32_t bufferSize = displaySizeY * 3 / 2;
- mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, bufferSize);
- if (NULL == mFlushOutBuffer) {
- ALOGE("Could not allocate flushOutputBuffer of size %u", bufferSize);
- return NO_MEMORY;
- }
-
- mInitNeeded = false;
mFlushNeeded = false;
return OK;
}
status_t SoftHEVC::deInitDecoder() {
size_t i;
+ IV_API_CALL_STATUS_T status;
- if (mMemRecords) {
- iv_mem_rec_t *ps_mem_rec;
+ if (mCodecCtx) {
+ ivdext_delete_ip_t s_delete_ip;
+ ivdext_delete_op_t s_delete_op;
- ps_mem_rec = mMemRecords;
- ALOGD("Freeing codec memory");
- for (i = 0; i < mNumMemRecords; i++) {
- if(ps_mem_rec->pv_base) {
- ivd_aligned_free(ps_mem_rec->pv_base);
- }
- ps_mem_rec++;
+ s_delete_ip.s_ivd_delete_ip_t.u4_size = sizeof(ivdext_delete_ip_t);
+ s_delete_ip.s_ivd_delete_ip_t.e_cmd = IVD_CMD_DELETE;
+
+ s_delete_op.s_ivd_delete_op_t.u4_size = sizeof(ivdext_delete_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_delete_ip, (void *)&s_delete_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in delete: 0x%x",
+ s_delete_op.s_ivd_delete_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
}
- ivd_aligned_free(mMemRecords);
- mMemRecords = NULL;
}
- if(mFlushOutBuffer) {
- ivd_aligned_free(mFlushOutBuffer);
- mFlushOutBuffer = NULL;
- }
- mInitNeeded = true;
mChangingResolution = false;
return OK;
}
-status_t SoftHEVC::reInitDecoder() {
- status_t ret;
-
- deInitDecoder();
-
- ret = initDecoder();
- if (OK != ret) {
- ALOGE("Create failure");
- deInitDecoder();
- return NO_MEMORY;
- }
- return OK;
-}
-
void SoftHEVC::onReset() {
- ALOGD("onReset called");
+ ALOGV("onReset called");
SoftVideoDecoderOMXComponent::onReset();
+ mSignalledError = false;
resetDecoder();
resetPlugin();
}
-OMX_ERRORTYPE SoftHEVC::internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params) {
- const uint32_t oldWidth = mWidth;
- const uint32_t oldHeight = mHeight;
- OMX_ERRORTYPE ret = SoftVideoDecoderOMXComponent::internalSetParameter(index, params);
- if (mWidth != oldWidth || mHeight != oldHeight) {
- reInitDecoder();
- }
- return ret;
-}
-
void SoftHEVC::setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
ivd_video_decode_op_t *ps_dec_op,
OMX_BUFFERHEADERTYPE *inHeader,
@@ -529,6 +392,17 @@ void SoftHEVC::onPortFlushCompleted(OMX_U32 portIndex) {
if (kOutputPortIndex == portIndex) {
setFlushMode();
+ /* Allocate a picture buffer to flushed data */
+ uint32_t displayStride = outputBufferWidth();
+ uint32_t displayHeight = outputBufferHeight();
+
+ uint32_t bufferSize = displayStride * displayHeight * 3 / 2;
+ mFlushOutBuffer = (uint8_t *)memalign(128, bufferSize);
+ if (NULL == mFlushOutBuffer) {
+ ALOGE("Could not allocate flushOutputBuffer of size %zu", bufferSize);
+ return;
+ }
+
while (true) {
ivd_video_decode_ip_t s_dec_ip;
ivd_video_decode_op_t s_dec_op;
@@ -544,16 +418,36 @@ void SoftHEVC::onPortFlushCompleted(OMX_U32 portIndex) {
break;
}
}
+
+ if (mFlushOutBuffer) {
+ free(mFlushOutBuffer);
+ mFlushOutBuffer = NULL;
+ }
+
}
}
void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
UNUSED(portIndex);
+ if (mSignalledError) {
+ return;
+ }
if (mOutputPortSettingsChange != NONE) {
return;
}
+ if (NULL == mCodecCtx) {
+ if (OK != initDecoder()) {
+ return;
+ }
+ }
+ if (outputBufferWidth() != mStride) {
+ /* Set the run-time (dynamic) parameters */
+ mStride = outputBufferWidth();
+ setParams(mStride);
+ }
+
List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
@@ -594,7 +488,6 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
outHeader->nOffset = 0;
if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
- ALOGD("EOS seen on input");
mReceivedEOS = true;
if (inHeader->nFilledLen == 0) {
inQueue.erase(inQueue.begin());
@@ -605,16 +498,6 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
}
}
- // When there is an init required and the decoder is not in flush mode,
- // update output port's definition and reinitialize decoder.
- if (mInitNeeded && !mIsInFlush) {
- bool portWillReset = false;
- handlePortSettingsChange(&portWillReset, mNewWidth, mNewHeight);
-
- CHECK_EQ(reInitDecoder(), (status_t)OK);
- return;
- }
-
/* Get a free slot in timestamp array to hold input timestamp */
{
size_t i;
@@ -646,13 +529,7 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
IV_API_CALL_STATUS_T status;
status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
- // FIXME: Compare |status| to IHEVCD_UNSUPPORTED_DIMENSIONS, which is not one of the
- // IV_API_CALL_STATUS_T, seems be wrong. But this is what the decoder returns right now.
- // The decoder should be fixed so that |u4_error_code| instead of |status| returns
- // IHEVCD_UNSUPPORTED_DIMENSIONS.
- bool unsupportedDimensions =
- ((IHEVCD_UNSUPPORTED_DIMENSIONS == (IHEVCD_CXA_ERROR_CODES_T)status)
- || (IHEVCD_UNSUPPORTED_DIMENSIONS == s_dec_op.u4_error_code));
+
bool resChanged = (IVD_RES_CHANGED == (s_dec_op.u4_error_code & 0xFF));
GETTIME(&mTimeEnd, NULL);
@@ -671,20 +548,6 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
mTimeStampsValid[timeStampIx] = false;
}
- // This is needed to handle CTS DecoderTest testCodecResetsHEVCWithoutSurface,
- // which is not sending SPS/PPS after port reconfiguration and flush to the codec.
- if (unsupportedDimensions && !mFlushNeeded) {
- bool portWillReset = false;
- handlePortSettingsChange(&portWillReset, s_dec_op.u4_pic_wd, s_dec_op.u4_pic_ht);
-
- CHECK_EQ(reInitDecoder(), (status_t)OK);
-
- setDecodeArgs(&s_dec_ip, &s_dec_op, inHeader, outHeader, timeStampIx);
-
- ivdec_api_function(mCodecCtx, (void *)&s_dec_ip, (void *)&s_dec_op);
- return;
- }
-
// If the decoder is in the changing resolution mode and there is no output present,
// that means the switching is done and it's ready to reset the decoder and the plugin.
if (mChangingResolution && !s_dec_op.u4_output_present) {
@@ -694,17 +557,11 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
continue;
}
- if (unsupportedDimensions || resChanged) {
+ if (resChanged) {
mChangingResolution = true;
if (mFlushNeeded) {
setFlushMode();
}
-
- if (unsupportedDimensions) {
- mNewWidth = s_dec_op.u4_pic_wd;
- mNewHeight = s_dec_op.u4_pic_ht;
- mInitNeeded = true;
- }
continue;
}
@@ -721,7 +578,7 @@ void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
}
if (s_dec_op.u4_output_present) {
- outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+ outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
mTimeStampsValid[s_dec_op.u4_ts] = false;
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
index c6344cf..21bb99e 100644
--- a/media/libstagefright/codecs/hevcdec/SoftHEVC.h
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -23,9 +23,6 @@
namespace android {
-#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
-#define ivd_aligned_free(buf) free(buf)
-
/** Number of entries in the time-stamp array */
#define MAX_TIME_STAMPS 64
@@ -64,7 +61,6 @@ protected:
virtual void onQueueFilled(OMX_U32 portIndex);
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onReset();
- virtual OMX_ERRORTYPE internalSetParameter(OMX_INDEXTYPE index, const OMX_PTR params);
private:
// Number of input and output buffers
enum {
@@ -72,8 +68,6 @@ private:
};
iv_obj_t *mCodecCtx; // Codec context
- iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
- size_t mNumMemRecords; // Number of memory records requested by the codec
size_t mNumCores; // Number of cores to be uesd by the codec
@@ -95,13 +89,13 @@ private:
bool mIsInFlush; // codec is flush mode
bool mReceivedEOS; // EOS is receieved on input port
- bool mInitNeeded;
- uint32_t mNewWidth;
- uint32_t mNewHeight;
+
// The input stream has changed to a different resolution, which is still supported by the
// codec. So the codec is switching to decode the new resolution.
bool mChangingResolution;
bool mFlushNeeded;
+ bool mSignalledError;
+ size_t mStride;
status_t initDecoder();
status_t deInitDecoder();
@@ -111,7 +105,6 @@ private:
status_t setNumCores();
status_t resetDecoder();
status_t resetPlugin();
- status_t reInitDecoder();
void setDecodeArgs(ivd_video_decode_ip_t *ps_dec_ip,
ivd_video_decode_op_t *ps_dec_op,
diff --git a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
index 32e5da7..4307c4e 100644
--- a/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
+++ b/media/libstagefright/codecs/mpeg2dec/SoftMPEG2.cpp
@@ -307,7 +307,7 @@ status_t SoftMPEG2::initDecoder() {
s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
s_fill_mem_ip.e_output_format = mIvColorFormat;
-
+ s_fill_mem_ip.u4_deinterlace = 1;
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = displayStride;
@@ -361,6 +361,7 @@ status_t SoftMPEG2::initDecoder() {
s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = displayHeight;
s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_init_ip.u4_deinterlace = 1;
s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
diff --git a/media/libstagefright/data/media_codecs_google_tv.xml b/media/libstagefright/data/media_codecs_google_tv.xml
new file mode 100644
index 0000000..330c6fb
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_tv.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2015 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.mpeg2.decoder" type="video/mpeg2">
+ <!-- profiles and levels: ProfileMain : LevelHL -->
+ <Limit name="size" min="16x16" max="1920x1088" />
+ <Limit name="alignment" value="2x2" />
+ <Limit name="block-size" value="16x16" />
+ <Limit name="blocks-per-second" range="1-244800" />
+ <Limit name="bitrate" range="1-20000000" />
+ <Feature name="adaptive-playback" />
+ </MediaCodec>
+ </Decoders>
+</Included>
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
index 740f96b..81a6d00 100755..100644
--- a/media/libstagefright/data/media_codecs_google_video.xml
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -16,15 +16,6 @@
<Included>
<Decoders>
- <MediaCodec name="OMX.google.mpeg2.decoder" type="video/mpeg2">
- <!-- profiles and levels: ProfileMain : LevelHL -->
- <Limit name="size" min="16x16" max="1920x1088" />
- <Limit name="alignment" value="2x2" />
- <Limit name="block-size" value="16x16" />
- <Limit name="blocks-per-second" range="1-244800" />
- <Limit name="bitrate" range="1-20000000" />
- <Feature name="adaptive-playback" />
- </MediaCodec>
<MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es">
<!-- profiles and levels: ProfileSimple : Level3 -->
<Limit name="size" min="2x2" max="352x288" />
diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp
index 90b5f68..9921636 100644
--- a/media/libstagefright/foundation/ALooper.cpp
+++ b/media/libstagefright/foundation/ALooper.cpp
@@ -151,6 +151,10 @@ status_t ALooper::stop() {
}
mQueueChangedCondition.signal();
+ {
+ Mutex::Autolock autoLock(mRepliesLock);
+ mRepliesCondition.broadcast();
+ }
if (!runningLocally && !thread->isCurrentThread()) {
// If not running locally and this thread _is_ the looper thread,
@@ -234,6 +238,12 @@ status_t ALooper::awaitResponse(const sp<AReplyToken> &replyToken, sp<AMessage>
Mutex::Autolock autoLock(mRepliesLock);
CHECK(replyToken != NULL);
while (!replyToken->retrieveReply(response)) {
+ {
+ Mutex::Autolock autoLock(mLock);
+ if (mThread == NULL) {
+ return -ENOENT;
+ }
+ }
mRepliesCondition.wait(mRepliesLock);
}
return OK;
diff --git a/media/utils/Android.mk b/media/utils/Android.mk
index dfadbc8..54d22b1 100644
--- a/media/utils/Android.mk
+++ b/media/utils/Android.mk
@@ -18,6 +18,8 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
BatteryNotifier.cpp \
+ ISchedulingPolicyService.cpp \
+ SchedulingPolicyService.cpp
LOCAL_SHARED_LIBRARIES := \
libbinder \
diff --git a/services/audioflinger/ISchedulingPolicyService.cpp b/media/utils/ISchedulingPolicyService.cpp
index f55bc02..f55bc02 100644
--- a/services/audioflinger/ISchedulingPolicyService.cpp
+++ b/media/utils/ISchedulingPolicyService.cpp
diff --git a/services/audioflinger/ISchedulingPolicyService.h b/media/utils/ISchedulingPolicyService.h
index b94b191..b94b191 100644
--- a/services/audioflinger/ISchedulingPolicyService.h
+++ b/media/utils/ISchedulingPolicyService.h
diff --git a/services/audioflinger/SchedulingPolicyService.cpp b/media/utils/SchedulingPolicyService.cpp
index 70a3f1a..17ee9bc 100644
--- a/services/audioflinger/SchedulingPolicyService.cpp
+++ b/media/utils/SchedulingPolicyService.cpp
@@ -20,7 +20,7 @@
#include <binder/IServiceManager.h>
#include <utils/Mutex.h>
#include "ISchedulingPolicyService.h"
-#include "SchedulingPolicyService.h"
+#include "mediautils/SchedulingPolicyService.h"
namespace android {
diff --git a/services/audioflinger/SchedulingPolicyService.h b/media/utils/include/mediautils/SchedulingPolicyService.h
index a9870d4..a9870d4 100644
--- a/services/audioflinger/SchedulingPolicyService.h
+++ b/media/utils/include/mediautils/SchedulingPolicyService.h
diff --git a/services/audioflinger/Android.mk b/services/audioflinger/Android.mk
index debcdf9..9b4ba79 100644
--- a/services/audioflinger/Android.mk
+++ b/services/audioflinger/Android.mk
@@ -3,17 +3,6 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
- ISchedulingPolicyService.cpp \
- SchedulingPolicyService.cpp
-
-# FIXME Move this library to frameworks/native
-LOCAL_MODULE := libscheduling_policy
-
-include $(BUILD_STATIC_LIBRARY)
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
ServiceUtilities.cpp
# FIXME Move this library to frameworks/native
@@ -64,10 +53,10 @@ LOCAL_SHARED_LIBRARIES := \
libeffects \
libpowermanager \
libserviceutility \
- libsonic
+ libsonic \
+ libmediautils
LOCAL_STATIC_LIBRARIES := \
- libscheduling_policy \
libcpustats \
libmedia_helper
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 9ec5802..fab1ef5 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -1352,12 +1352,16 @@ sp<AudioFlinger::PlaybackThread> AudioFlinger::getEffectThread_l(int sessionId,
AudioFlinger::Client::Client(const sp<AudioFlinger>& audioFlinger, pid_t pid)
: RefBase(),
mAudioFlinger(audioFlinger),
- // FIXME should be a "k" constant not hard-coded, in .h or ro. property, see 4 lines below
- mMemoryDealer(new MemoryDealer(1024*1024, "AudioFlinger::Client")),
mPid(pid),
mTimedTrackCount(0)
{
- // 1 MB of address space is good for 32 tracks, 8 buffers each, 4 KB/buffer
+ size_t heapSize = kClientSharedHeapSizeBytes;
+ // Increase heap size on non low ram devices to limit risk of reconnection failure for
+ // invalidated tracks
+ if (!audioFlinger->isLowRamDevice()) {
+ heapSize *= kClientSharedHeapSizeMultiplier;
+ }
+ mMemoryDealer = new MemoryDealer(heapSize, "AudioFlinger::Client");
}
// Client destructor must be called with AudioFlinger::mClientLock held
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 20c34ef..08fa70d 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -88,6 +88,12 @@ class ServerProxy;
static const nsecs_t kDefaultStandbyTimeInNsecs = seconds(3);
+
+// Max shared memory size for audio tracks and audio records per client process
+static const size_t kClientSharedHeapSizeBytes = 1024*1024;
+// Shared memory size multiplier for non low ram devices
+static const size_t kClientSharedHeapSizeMultiplier = 4;
+
#define INCLUDING_FROM_AUDIOFLINGER_H
class AudioFlinger :
@@ -423,7 +429,7 @@ private:
Client(const Client&);
Client& operator = (const Client&);
const sp<AudioFlinger> mAudioFlinger;
- const sp<MemoryDealer> mMemoryDealer;
+ sp<MemoryDealer> mMemoryDealer;
const pid_t mPid;
Mutex mTimedTrackLock;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 246f6ba..71fc498 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -60,7 +60,7 @@
#include "FastMixer.h"
#include "FastCapture.h"
#include "ServiceUtilities.h"
-#include "SchedulingPolicyService.h"
+#include "mediautils/SchedulingPolicyService.h"
#ifdef ADD_BATTERY_DATA
#include <media/IMediaPlayerService.h>
@@ -3479,6 +3479,12 @@ ssize_t AudioFlinger::MixerThread::threadLoop_write()
if (state->mCommand != FastMixerState::MIX_WRITE &&
(kUseFastMixer != FastMixer_Dynamic || state->mTrackMask > 1)) {
if (state->mCommand == FastMixerState::COLD_IDLE) {
+
+ // FIXME workaround for first HAL write being CPU bound on some devices
+ ATRACE_BEGIN("write");
+ mOutput->write((char *)mSinkBuffer, 0);
+ ATRACE_END();
+
int32_t old = android_atomic_inc(&mFastMixerFutex);
if (old == -1) {
(void) syscall(__NR_futex, &mFastMixerFutex, FUTEX_WAKE_PRIVATE, 1);
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index f7da209..0e24b52 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -432,7 +432,10 @@ AudioFlinger::PlaybackThread::Track::Track(
}
// only allocate a fast track index if we were able to allocate a normal track name
if (flags & IAudioFlinger::TRACK_FAST) {
- mAudioTrackServerProxy->framesReadyIsCalledByMultipleThreads();
+ // FIXME: Not calling framesReadyIsCalledByMultipleThreads() exposes a potential
+ // race with setSyncEvent(). However, if we call it, we cannot properly start
+ // static fast tracks (SoundPool) immediately after stopping.
+ //mAudioTrackServerProxy->framesReadyIsCalledByMultipleThreads();
ALOG_ASSERT(thread->mFastTrackAvailMask != 0);
int i = __builtin_ctz(thread->mFastTrackAvailMask);
ALOG_ASSERT(0 < i && i < (int)FastMixerState::kMaxFastTracks);
@@ -712,6 +715,7 @@ status_t AudioFlinger::PlaybackThread::Track::start(AudioSystem::sync_event_t ev
// But in this case we know the mixer thread (whether normal mixer or fast mixer)
// isn't looking at this track yet: we still hold the normal mixer thread lock,
// and for fast tracks the track is not yet in the fast mixer thread's active set.
+ // For static tracks, this is used to acknowledge change in position or loop.
ServerProxy::Buffer buffer;
buffer.mFrameCount = 1;
(void) mAudioTrackServerProxy->obtainBuffer(&buffer, true /*ackFlush*/);
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index 18bcfdb..48d09ed 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -56,9 +56,21 @@ public:
const struct audio_port_config *srcConfig = NULL) const;
virtual sp<AudioPort> getAudioPort() const { return mProfile; }
void toAudioPort(struct audio_port *port) const;
+ void setPreemptedSessions(const SortedVector<audio_session_t>& sessions);
+ SortedVector<audio_session_t> getPreemptedSessions() const;
+ bool hasPreemptedSession(audio_session_t session) const;
+ void clearPreemptedSessions();
private:
audio_port_handle_t mId;
+ // Because a preemtible capture session can preempt another one, we end up in an endless loop
+ // situation were each session is allowed to restart after being preempted,
+ // thus preempting the other one which restarts and so on.
+ // To avoid this situation, we store which audio session was preempted when
+ // a particular input started and prevent preemption of this active input by this session.
+ // We also inherit sessions from the preempted input to avoid a 3 way preemption loop etc...
+ SortedVector<audio_session_t> mPreemptedSessions;
+
};
class AudioInputCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 937160b..626fdae 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -93,6 +93,26 @@ void AudioInputDescriptor::toAudioPort(struct audio_port *port) const
port->ext.mix.latency_class = AUDIO_LATENCY_NORMAL;
}
+void AudioInputDescriptor::setPreemptedSessions(const SortedVector<audio_session_t>& sessions)
+{
+ mPreemptedSessions = sessions;
+}
+
+SortedVector<audio_session_t> AudioInputDescriptor::getPreemptedSessions() const
+{
+ return mPreemptedSessions;
+}
+
+bool AudioInputDescriptor::hasPreemptedSession(audio_session_t session) const
+{
+ return (mPreemptedSessions.indexOf(session) >= 0);
+}
+
+void AudioInputDescriptor::clearPreemptedSessions()
+{
+ mPreemptedSessions.clear();
+}
+
status_t AudioInputDescriptor::dump(int fd)
{
const size_t SIZE = 256;
diff --git a/services/audiopolicy/enginedefault/src/Gains.cpp b/services/audiopolicy/enginedefault/src/Gains.cpp
index 78f2909..d06365c 100644
--- a/services/audiopolicy/enginedefault/src/Gains.cpp
+++ b/services/audiopolicy/enginedefault/src/Gains.cpp
@@ -171,10 +171,10 @@ const VolumeCurvePoint *Gains::sVolumeProfiles[AUDIO_STREAM_CNT]
},
{ // AUDIO_STREAM_TTS
// "Transmitted Through Speaker": always silent except on DEVICE_CATEGORY_SPEAKER
- Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_HEADSET
- Gains::sLinearVolumeCurve, // DEVICE_CATEGORY_SPEAKER
- Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_EARPIECE
- Gains::sSilentVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA
+ Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_HEADSET
+ Gains::sFullScaleVolumeCurve, // DEVICE_CATEGORY_SPEAKER
+ Gains::sSilentVolumeCurve, // DEVICE_CATEGORY_EARPIECE
+ Gains::sSilentVolumeCurve // DEVICE_CATEGORY_EXT_MEDIA
},
{ // AUDIO_STREAM_ACCESSIBILITY
Gains::sDefaultMediaVolumeCurve, // DEVICE_CATEGORY_HEADSET
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index e7f6864..5ff1c0b 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -402,6 +402,20 @@ void AudioPolicyManager::updateCallRouting(audio_devices_t rxDevice, int delayMs
patch.num_sources = 2;
}
+ // terminate active capture if on the same HW module as the call TX source device
+ // FIXME: would be better to refine to only inputs whose profile connects to the
+ // call TX device but this information is not in the audio patch and logic here must be
+ // symmetric to the one in startInput()
+ audio_io_handle_t activeInput = mInputs.getActiveInput();
+ if (activeInput != 0) {
+ sp<AudioInputDescriptor> activeDesc = mInputs.valueFor(activeInput);
+ if (activeDesc->getModuleHandle() == txSourceDeviceDesc->getModuleHandle()) {
+ audio_session_t activeSession = activeDesc->mSessions.itemAt(0);
+ stopInput(activeInput, activeSession);
+ releaseInput(activeInput, activeSession);
+ }
+ }
+
afPatchHandle = AUDIO_PATCH_HANDLE_NONE;
status = mpClientInterface->createAudioPatch(&patch, &afPatchHandle, 0);
ALOGW_IF(status != NO_ERROR, "setPhoneState() error %d creating TX audio patch",
@@ -566,9 +580,15 @@ void AudioPolicyManager::setForceUse(audio_policy_force_use_t usage,
audio_io_handle_t activeInput = mInputs.getActiveInput();
if (activeInput != 0) {
- setInputDevice(activeInput, getNewInputDevice(activeInput));
+ sp<AudioInputDescriptor> activeDesc = mInputs.valueFor(activeInput);
+ audio_devices_t newDevice = getNewInputDevice(activeInput);
+ // Force new input selection if the new device can not be reached via current input
+ if (activeDesc->mProfile->mSupportedDevices.types() & (newDevice & ~AUDIO_DEVICE_BIT_IN)) {
+ setInputDevice(activeInput, newDevice);
+ } else {
+ closeInput(activeInput);
+ }
}
-
}
void AudioPolicyManager::setSystemProperty(const char* property, const char* value)
@@ -1066,7 +1086,7 @@ status_t AudioPolicyManager::startSource(sp<AudioOutputDescriptor> outputDesc,
*delayMs = 0;
if (stream == AUDIO_STREAM_TTS) {
ALOGV("\t found BEACON stream");
- if (mOutputs.isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) {
+ if (!mTtsOutputAvailable && mOutputs.isAnyOutputActive(AUDIO_STREAM_TTS /*streamToIgnore*/)) {
return INVALID_OPERATION;
} else {
beaconMuteLatency = handleEventForBeacon(STARTING_BEACON);
@@ -1485,15 +1505,29 @@ status_t AudioPolicyManager::startInput(audio_io_handle_t input,
// If the already active input uses AUDIO_SOURCE_HOTWORD then it is closed,
// otherwise the active input continues and the new input cannot be started.
sp<AudioInputDescriptor> activeDesc = mInputs.valueFor(activeInput);
- if (activeDesc->mInputSource == AUDIO_SOURCE_HOTWORD) {
+ if ((activeDesc->mInputSource == AUDIO_SOURCE_HOTWORD) &&
+ !activeDesc->hasPreemptedSession(session)) {
ALOGW("startInput(%d) preempting low-priority input %d", input, activeInput);
- stopInput(activeInput, activeDesc->mSessions.itemAt(0));
- releaseInput(activeInput, activeDesc->mSessions.itemAt(0));
+ audio_session_t activeSession = activeDesc->mSessions.itemAt(0);
+ SortedVector<audio_session_t> sessions = activeDesc->getPreemptedSessions();
+ sessions.add(activeSession);
+ inputDesc->setPreemptedSessions(sessions);
+ stopInput(activeInput, activeSession);
+ releaseInput(activeInput, activeSession);
} else {
ALOGE("startInput(%d) failed: other input %d already started", input, activeInput);
return INVALID_OPERATION;
}
}
+
+ // Do not allow capture if an active voice call is using a software patch and
+ // the call TX source device is on the same HW module.
+ // FIXME: would be better to refine to only inputs whose profile connects to the
+ // call TX device but this information is not in the audio patch
+ if (mCallTxPatch != 0 &&
+ inputDesc->getModuleHandle() == mCallTxPatch->mPatch.sources[0].ext.device.hw_module) {
+ return INVALID_OPERATION;
+ }
}
// Routing?
@@ -1592,6 +1626,7 @@ status_t AudioPolicyManager::stopInput(audio_io_handle_t input,
if (mInputs.activeInputsCount() == 0) {
SoundTrigger::setCaptureState(false);
}
+ inputDesc->clearPreemptedSessions();
}
return NO_ERROR;
}
@@ -1718,7 +1753,9 @@ status_t AudioPolicyManager::setStreamVolumeIndex(audio_stream_type_t stream,
status = volStatus;
}
}
- if ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & accessibilityDevice) != 0)) {
+ if ((accessibilityDevice != AUDIO_DEVICE_NONE) &&
+ ((device == AUDIO_DEVICE_OUT_DEFAULT) || ((curDevice & accessibilityDevice) != 0)))
+ {
status_t volStatus = checkAndSetVolume(AUDIO_STREAM_ACCESSIBILITY,
index, desc, curDevice);
}
@@ -2000,6 +2037,9 @@ status_t AudioPolicyManager::dump(int fd)
snprintf(buffer, SIZE, " Force use for hdmi system audio %d\n",
mEngine->getForceUse(AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO));
result.append(buffer);
+ snprintf(buffer, SIZE, " TTS output %s\n", mTtsOutputAvailable ? "available" : "not available");
+ result.append(buffer);
+
write(fd, result.string(), result.size());
mAvailableOutputDevices.dump(fd, String8("output"));
@@ -2680,7 +2720,8 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa
mAudioPortGeneration(1),
mBeaconMuteRefCount(0),
mBeaconPlayingRefCount(0),
- mBeaconMuted(false)
+ mBeaconMuted(false),
+ mTtsOutputAvailable(false)
{
audio_policy::EngineInstance *engineInstance = audio_policy::EngineInstance::getInstance();
if (!engineInstance) {
@@ -2737,6 +2778,9 @@ AudioPolicyManager::AudioPolicyManager(AudioPolicyClientInterface *clientInterfa
ALOGW("Output profile contains no device on module %s", mHwModules[i]->mName);
continue;
}
+ if ((outProfile->mFlags & AUDIO_OUTPUT_FLAG_TTS) != 0) {
+ mTtsOutputAvailable = true;
+ }
if ((outProfile->mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
continue;
@@ -4036,6 +4080,12 @@ void AudioPolicyManager::handleNotificationRoutingForStream(audio_stream_type_t
}
uint32_t AudioPolicyManager::handleEventForBeacon(int event) {
+
+ // skip beacon mute management if a dedicated TTS output is available
+ if (mTtsOutputAvailable) {
+ return 0;
+ }
+
switch(event) {
case STARTING_OUTPUT:
mBeaconMuteRefCount++;
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index bf3ae4a..bbdf396 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -548,6 +548,7 @@ protected:
uint32_t mBeaconMuteRefCount; // ref count for stream that would mute beacon
uint32_t mBeaconPlayingRefCount;// ref count for the playing beacon streams
bool mBeaconMuted; // has STREAM_TTS been muted
+ bool mTtsOutputAvailable; // true if a dedicated output for TTS stream is available
AudioPolicyMixCollection mPolicyMixes; // list of registered mixes
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index e8ef24e..45900c4 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -53,7 +53,7 @@ LOCAL_SRC_FILES:= \
device3/StatusTracker.cpp \
gui/RingBufferConsumer.cpp \
utils/CameraTraces.cpp \
- utils/AutoConditionLock.cpp \
+ utils/AutoConditionLock.cpp
LOCAL_SHARED_LIBRARIES:= \
libui \
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 280bb9d..406c1c4 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -99,7 +99,8 @@ status_t CameraFlashlight::createFlashlightControl(const String8& cameraId) {
status_t CameraFlashlight::setTorchMode(const String8& cameraId, bool enabled) {
if (!mFlashlightMapInitialized) {
- ALOGE("%s: findFlashUnits() must be called before this method.");
+ ALOGE("%s: findFlashUnits() must be called before this method.",
+ __FUNCTION__);
return NO_INIT;
}
@@ -200,7 +201,8 @@ bool CameraFlashlight::hasFlashUnit(const String8& cameraId) {
bool CameraFlashlight::hasFlashUnitLocked(const String8& cameraId) {
if (!mFlashlightMapInitialized) {
- ALOGE("%s: findFlashUnits() must be called before this method.");
+ ALOGE("%s: findFlashUnits() must be called before this method.",
+ __FUNCTION__);
return false;
}
@@ -219,7 +221,8 @@ status_t CameraFlashlight::prepareDeviceOpen(const String8& cameraId) {
Mutex::Autolock l(mLock);
if (!mFlashlightMapInitialized) {
- ALOGE("%s: findFlashUnits() must be called before this method.");
+ ALOGE("%s: findFlashUnits() must be called before this method.",
+ __FUNCTION__);
return NO_INIT;
}
@@ -256,7 +259,8 @@ status_t CameraFlashlight::deviceClosed(const String8& cameraId) {
Mutex::Autolock l(mLock);
if (!mFlashlightMapInitialized) {
- ALOGE("%s: findFlashUnits() must be called before this method.");
+ ALOGE("%s: findFlashUnits() must be called before this method.",
+ __FUNCTION__);
return NO_INIT;
}
@@ -878,6 +882,7 @@ status_t CameraHardwareInterfaceFlashControl::disconnectCameraDevice() {
}
mDevice->setPreviewWindow(NULL);
mDevice->release();
+ mDevice = NULL;
return OK;
}
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 8391f26..7c4594f 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -15,6 +15,7 @@
*/
#define LOG_TAG "CameraService"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
#include <algorithm>
@@ -33,7 +34,6 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <binder/ProcessInfoService.h>
-#include <camera/ICameraServiceProxy.h>
#include <cutils/atomic.h>
#include <cutils/properties.h>
#include <gui/Surface.h>
@@ -157,7 +157,6 @@ void CameraService::onFirstRef()
}
mModule = new CameraModule(rawModule);
- ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
err = mModule->init();
if (err != OK) {
ALOGE("Could not initialize camera HAL module: %d (%s)", err,
@@ -169,10 +168,18 @@ void CameraService::onFirstRef()
mModule = nullptr;
return;
}
+ ALOGI("Loaded \"%s\" camera module", mModule->getModuleName());
mNumberOfCameras = mModule->getNumberOfCameras();
mNumberOfNormalCameras = mNumberOfCameras;
+ // Setup vendor tags before we call get_camera_info the first time
+ // because HAL might need to setup static vendor keys in get_camera_info
+ VendorTagDescriptor::clearGlobalVendorTagDescriptor();
+ if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_2) {
+ setUpVendorTags();
+ }
+
mFlashlight = new CameraFlashlight(*mModule, *this);
status_t res = mFlashlight->findFlashUnits();
if (res) {
@@ -239,24 +246,24 @@ void CameraService::onFirstRef()
mModule->setCallbacks(this);
}
- VendorTagDescriptor::clearGlobalVendorTagDescriptor();
-
- if (mModule->getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_2) {
- setUpVendorTags();
- }
-
CameraDeviceFactory::registerService(this);
CameraService::pingCameraServiceProxy();
}
-void CameraService::pingCameraServiceProxy() {
+sp<ICameraServiceProxy> CameraService::getCameraServiceProxy() {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.camera.proxy"));
if (binder == nullptr) {
- return;
+ return nullptr;
}
sp<ICameraServiceProxy> proxyBinder = interface_cast<ICameraServiceProxy>(binder);
+ return proxyBinder;
+}
+
+void CameraService::pingCameraServiceProxy() {
+ sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ if (proxyBinder == nullptr) return;
proxyBinder->pingForUserUpdate();
}
@@ -308,8 +315,10 @@ void CameraService::onDeviceStatusChanged(camera_device_status_t cameraId,
clientToDisconnect = removeClientLocked(id);
// Notify the client of disconnection
- clientToDisconnect->notifyError(ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
- CaptureResultExtras{});
+ if (clientToDisconnect != nullptr) {
+ clientToDisconnect->notifyError(ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
+ CaptureResultExtras{});
+ }
}
ALOGI("%s: Client for camera ID %s evicted due to device status change from HAL",
@@ -398,10 +407,12 @@ void CameraService::onTorchStatusChangedLocked(const String8& cameraId,
}
int32_t CameraService::getNumberOfCameras() {
+ ATRACE_CALL();
return getNumberOfCameras(CAMERA_TYPE_BACKWARD_COMPATIBLE);
}
int32_t CameraService::getNumberOfCameras(int type) {
+ ATRACE_CALL();
switch (type) {
case CAMERA_TYPE_BACKWARD_COMPATIBLE:
return mNumberOfNormalCameras;
@@ -416,6 +427,7 @@ int32_t CameraService::getNumberOfCameras(int type) {
status_t CameraService::getCameraInfo(int cameraId,
struct CameraInfo* cameraInfo) {
+ ATRACE_CALL();
if (!mModule) {
return -ENODEV;
}
@@ -443,6 +455,7 @@ int CameraService::cameraIdToInt(const String8& cameraId) {
}
status_t CameraService::generateShimMetadata(int cameraId, /*out*/CameraMetadata* cameraInfo) {
+ ATRACE_CALL();
status_t ret = OK;
struct CameraInfo info;
if ((ret = getCameraInfo(cameraId, &info)) != OK) {
@@ -529,6 +542,7 @@ status_t CameraService::generateShimMetadata(int cameraId, /*out*/CameraMetadata
status_t CameraService::getCameraCharacteristics(int cameraId,
CameraMetadata* cameraInfo) {
+ ATRACE_CALL();
if (!cameraInfo) {
ALOGE("%s: cameraInfo is NULL", __FUNCTION__);
return BAD_VALUE;
@@ -597,10 +611,16 @@ int CameraService::getCameraPriorityFromProcState(int procState) {
procState);
return -1;
}
+ // Treat sleeping TOP processes the same as regular TOP processes, for
+ // access priority. This is important for lock-screen camera launch scenarios
+ if (procState == PROCESS_STATE_TOP_SLEEPING) {
+ procState = PROCESS_STATE_TOP;
+ }
return INT_MAX - procState;
}
status_t CameraService::getCameraVendorTagDescriptor(/*out*/sp<VendorTagDescriptor>& desc) {
+ ATRACE_CALL();
if (!mModule) {
ALOGE("%s: camera hardware module doesn't exist", __FUNCTION__);
return -ENODEV;
@@ -611,6 +631,7 @@ status_t CameraService::getCameraVendorTagDescriptor(/*out*/sp<VendorTagDescript
}
int CameraService::getDeviceVersion(int cameraId, int* facing) {
+ ATRACE_CALL();
struct camera_info info;
if (mModule->getCameraInfo(cameraId, &info) != OK) {
return -1;
@@ -642,6 +663,7 @@ status_t CameraService::filterGetInfoErrorCode(status_t err) {
}
bool CameraService::setUpVendorTags() {
+ ATRACE_CALL();
vendor_tag_ops_t vOps = vendor_tag_ops_t();
// Check if vendor operations have been implemented
@@ -650,9 +672,7 @@ bool CameraService::setUpVendorTags() {
return false;
}
- ATRACE_BEGIN("camera3->get_metadata_vendor_tag_ops");
mModule->getVendorTagOps(&vOps);
- ATRACE_END();
// Ensure all vendor operations are present
if (vOps.get_tag_count == NULL || vOps.get_all_tags == NULL ||
@@ -935,6 +955,16 @@ void CameraService::finishConnectLocked(const sp<BasicClient>& client,
LOG_ALWAYS_FATAL("%s: Invalid state for CameraService, clients not evicted properly",
__FUNCTION__);
}
+
+ // And register a death notification for the client callback. Do
+ // this last to avoid Binder policy where a nested Binder
+ // transaction might be pre-empted to service the client death
+ // notification if the client process dies before linkToDeath is
+ // invoked.
+ sp<IBinder> remoteCallback = client->getRemote();
+ if (remoteCallback != nullptr) {
+ remoteCallback->linkToDeath(this);
+ }
}
status_t CameraService::handleEvictionsLocked(const String8& cameraId, int clientPid,
@@ -942,7 +972,7 @@ status_t CameraService::handleEvictionsLocked(const String8& cameraId, int clien
/*out*/
sp<BasicClient>* client,
std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial) {
-
+ ATRACE_CALL();
status_t ret = NO_ERROR;
std::vector<DescriptorPtr> evictedClients;
DescriptorPtr clientDescriptor;
@@ -1131,6 +1161,7 @@ status_t CameraService::connect(
/*out*/
sp<ICamera>& device) {
+ ATRACE_CALL();
status_t ret = NO_ERROR;
String8 id = String8::format("%d", cameraId);
sp<Client> client = nullptr;
@@ -1155,6 +1186,7 @@ status_t CameraService::connectLegacy(
/*out*/
sp<ICamera>& device) {
+ ATRACE_CALL();
String8 id = String8::format("%d", cameraId);
int apiVersion = mModule->getModuleApiVersion();
if (halVersion != CAMERA_HAL_API_VERSION_UNSPECIFIED &&
@@ -1195,6 +1227,7 @@ status_t CameraService::connectDevice(
/*out*/
sp<ICameraDeviceUser>& device) {
+ ATRACE_CALL();
status_t ret = NO_ERROR;
String8 id = String8::format("%d", cameraId);
sp<CameraDeviceClient> client = nullptr;
@@ -1214,6 +1247,8 @@ status_t CameraService::connectDevice(
status_t CameraService::setTorchMode(const String16& cameraId, bool enabled,
const sp<IBinder>& clientBinder) {
+
+ ATRACE_CALL();
if (enabled && clientBinder == nullptr) {
ALOGE("%s: torch client binder is NULL", __FUNCTION__);
return -EINVAL;
@@ -1302,6 +1337,8 @@ status_t CameraService::setTorchMode(const String16& cameraId, bool enabled,
}
void CameraService::notifySystemEvent(int32_t eventId, const int32_t* args, size_t length) {
+ ATRACE_CALL();
+
switch(eventId) {
case ICameraService::USER_SWITCHED: {
doUserSwitch(/*newUserIds*/args, /*length*/length);
@@ -1317,6 +1354,8 @@ void CameraService::notifySystemEvent(int32_t eventId, const int32_t* args, size
}
status_t CameraService::addListener(const sp<ICameraServiceListener>& listener) {
+ ATRACE_CALL();
+
ALOGV("%s: Add listener %p", __FUNCTION__, listener.get());
if (listener == nullptr) {
@@ -1365,6 +1404,8 @@ status_t CameraService::addListener(const sp<ICameraServiceListener>& listener)
}
status_t CameraService::removeListener(const sp<ICameraServiceListener>& listener) {
+ ATRACE_CALL();
+
ALOGV("%s: Remove listener %p", __FUNCTION__, listener.get());
if (listener == 0) {
@@ -1391,6 +1432,8 @@ status_t CameraService::removeListener(const sp<ICameraServiceListener>& listene
}
status_t CameraService::getLegacyParameters(int cameraId, /*out*/String16* parameters) {
+
+ ATRACE_CALL();
ALOGV("%s: for camera ID = %d", __FUNCTION__, cameraId);
if (parameters == NULL) {
@@ -1415,6 +1458,8 @@ status_t CameraService::getLegacyParameters(int cameraId, /*out*/String16* param
}
status_t CameraService::supportsCameraApi(int cameraId, int apiVersion) {
+ ATRACE_CALL();
+
ALOGV("%s: for camera ID = %d", __FUNCTION__, cameraId);
switch (apiVersion) {
@@ -1782,12 +1827,15 @@ MediaPlayer* CameraService::newMediaPlayer(const char *file) {
}
void CameraService::loadSound() {
+ ATRACE_CALL();
+
Mutex::Autolock lock(mSoundLock);
LOG1("CameraService::loadSound ref=%d", mSoundRef);
if (mSoundRef++) return;
mSoundPlayer[SOUND_SHUTTER] = newMediaPlayer("/system/media/audio/ui/camera_click.ogg");
- mSoundPlayer[SOUND_RECORDING] = newMediaPlayer("/system/media/audio/ui/VideoRecord.ogg");
+ mSoundPlayer[SOUND_RECORDING_START] = newMediaPlayer("/system/media/audio/ui/VideoRecord.ogg");
+ mSoundPlayer[SOUND_RECORDING_STOP] = newMediaPlayer("/system/media/audio/ui/VideoStop.ogg");
}
void CameraService::releaseSound() {
@@ -1804,6 +1852,8 @@ void CameraService::releaseSound() {
}
void CameraService::playSound(sound_kind kind) {
+ ATRACE_CALL();
+
LOG1("playSound(%d)", kind);
Mutex::Autolock lock(mSoundLock);
sp<MediaPlayer> player = mSoundPlayer[kind];
@@ -1874,11 +1924,9 @@ CameraService::BasicClient::~BasicClient() {
void CameraService::BasicClient::disconnect() {
if (mDisconnected) {
- ALOGE("%s: Disconnect called on already disconnected client for device %d", __FUNCTION__,
- mCameraId);
return;
}
- mDisconnected = true;;
+ mDisconnected = true;
mCameraService->removeByClient(this);
mCameraService->logDisconnected(String8::format("%d", mCameraId), mClientPid,
@@ -1923,6 +1971,8 @@ bool CameraService::BasicClient::canCastToApiClient(apiLevel level) const {
}
status_t CameraService::BasicClient::startCameraOps() {
+ ATRACE_CALL();
+
int32_t res;
// Notify app ops that the camera is not available
mOpsCallback = new OpsCallback(this);
@@ -1956,10 +2006,16 @@ status_t CameraService::BasicClient::startCameraOps() {
mCameraService->updateStatus(ICameraServiceListener::STATUS_NOT_AVAILABLE,
String8::format("%d", mCameraId));
+ // Transition device state to OPEN
+ mCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_OPEN,
+ String8::format("%d", mCameraId));
+
return OK;
}
status_t CameraService::BasicClient::finishCameraOps() {
+ ATRACE_CALL();
+
// Check if startCameraOps succeeded, and if so, finish the camera op
if (mOpsActive) {
// Notify app ops that the camera is available again
@@ -1974,6 +2030,10 @@ status_t CameraService::BasicClient::finishCameraOps() {
mCameraService->updateStatus(ICameraServiceListener::STATUS_PRESENT,
String8::format("%d", mCameraId), rejected);
+ // Transition device state to CLOSED
+ mCameraService->updateProxyDeviceState(ICameraServiceProxy::CAMERA_STATE_CLOSED,
+ String8::format("%d", mCameraId));
+
// Notify flashlight that a camera device is closed.
mCameraService->mFlashlight->deviceClosed(
String8::format("%d", mCameraId));
@@ -1988,6 +2048,8 @@ status_t CameraService::BasicClient::finishCameraOps() {
}
void CameraService::BasicClient::opChanged(int32_t op, const String16& packageName) {
+ ATRACE_CALL();
+
String8 name(packageName);
String8 myName(mClientPackageName);
@@ -2032,7 +2094,11 @@ sp<CameraService::Client> CameraService::Client::getClientFromCookie(void* user)
void CameraService::Client::notifyError(ICameraDeviceCallbacks::CameraErrorCode errorCode,
const CaptureResultExtras& resultExtras) {
- mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
+ if (mRemoteCallback != NULL) {
+ mRemoteCallback->notifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_RELEASED, 0);
+ } else {
+ ALOGE("mRemoteCallback is NULL!!");
+ }
}
// NOTE: function is idempotent
@@ -2211,9 +2277,11 @@ static bool tryLock(Mutex& mutex)
}
status_t CameraService::dump(int fd, const Vector<String16>& args) {
+ ATRACE_CALL();
+
String8 result("Dump of the Camera Service:\n");
if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
- result.appendFormat("Permission Denial: "
+ result = result.format("Permission Denial: "
"can't dump CameraService from pid=%d, uid=%d\n",
getCallingPid(),
getCallingUid());
@@ -2474,6 +2542,14 @@ void CameraService::updateStatus(ICameraServiceListener::Status status, const St
});
}
+void CameraService::updateProxyDeviceState(ICameraServiceProxy::CameraState newState,
+ const String8& cameraId) {
+ sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+ if (proxyBinder == nullptr) return;
+ String16 id(cameraId);
+ proxyBinder->notifyCameraState(id, newState);
+}
+
status_t CameraService::getTorchStatusLocked(
const String8& cameraId,
ICameraServiceListener::TorchStatus *status) const {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 3905d62..d2c1bd3 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -24,6 +24,7 @@
#include <binder/BinderService.h>
#include <binder/IAppOpsCallback.h>
#include <camera/ICameraService.h>
+#include <camera/ICameraServiceProxy.h>
#include <hardware/camera.h>
#include <camera/ICamera.h>
@@ -74,6 +75,8 @@ public:
// Process state (mirrors frameworks/base/core/java/android/app/ActivityManager.java)
static const int PROCESS_STATE_NONEXISTENT = -1;
+ static const int PROCESS_STATE_TOP = 2;
+ static const int PROCESS_STATE_TOP_SLEEPING = 5;
// 3 second busy timeout when other clients are connecting
static const nsecs_t DEFAULT_CONNECT_TIMEOUT_NS = 3000000000;
@@ -159,7 +162,8 @@ public:
enum sound_kind {
SOUND_SHUTTER = 0,
- SOUND_RECORDING = 1,
+ SOUND_RECORDING_START = 1,
+ SOUND_RECORDING_STOP = 2,
NUM_SOUNDS
};
@@ -167,6 +171,14 @@ public:
void playSound(sound_kind kind);
void releaseSound();
+ /**
+ * Update the state of a given camera device (open/close/active/idle) with
+ * the camera proxy service in the system service
+ */
+ static void updateProxyDeviceState(
+ ICameraServiceProxy::CameraState newState,
+ const String8& cameraId);
+
/////////////////////////////////////////////////////////////////////
// CameraDeviceFactory functionality
int getDeviceVersion(int cameraId, int* facing = NULL);
@@ -736,6 +748,7 @@ private:
static String8 toString(std::set<userid_t> intSet);
+ static sp<ICameraServiceProxy> getCameraServiceProxy();
static void pingCameraServiceProxy();
};
@@ -867,11 +880,6 @@ status_t CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const String
return ret;
}
- sp<IBinder> remoteCallback = client->getRemote();
- if (remoteCallback != nullptr) {
- remoteCallback->linkToDeath(this);
- }
-
// Update shim paremeters for legacy clients
if (effectiveApiLevel == API_1) {
// Assume we have always received a Client subclass for API1
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 1695309..fbd4034 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -1044,7 +1044,7 @@ status_t Camera2Client::startRecordingL(Parameters &params, bool restart) {
}
if (!restart) {
- mCameraService->playSound(CameraService::SOUND_RECORDING);
+ mCameraService->playSound(CameraService::SOUND_RECORDING_START);
mStreamingProcessor->updateRecordingRequest(params);
if (res != OK) {
ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)",
@@ -1216,7 +1216,7 @@ void Camera2Client::stopRecording() {
return;
};
- mCameraService->playSound(CameraService::SOUND_RECORDING);
+ mCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
// Remove recording stream to prevent it from slowing down takePicture later
if (!l.mParameters.recordingHint && l.mParameters.isJpegSizeOverridden()) {
@@ -1642,7 +1642,7 @@ status_t Camera2Client::commandEnableShutterSoundL(bool enable) {
}
status_t Camera2Client::commandPlayRecordingSoundL() {
- mCameraService->playSound(CameraService::SOUND_RECORDING);
+ mCameraService->playSound(CameraService::SOUND_RECORDING_START);
return OK;
}
@@ -1916,6 +1916,8 @@ void Camera2Client::notifyShutter(const CaptureResultExtras& resultExtras,
ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
__FUNCTION__, resultExtras.requestId, timestamp);
mCaptureSequencer->notifyShutter(resultExtras, timestamp);
+
+ Camera2ClientBase::notifyShutter(resultExtras, timestamp);
}
camera2::SharedParameters& Camera2Client::getParameters() {
diff --git a/services/camera/libcameraservice/api1/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index 9e6ed4e..6020e35 100644
--- a/services/camera/libcameraservice/api1/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -255,6 +255,9 @@ void CameraClient::disconnect() {
// Turn off all messages.
disableMsgType(CAMERA_MSG_ALL_MSGS);
mHardware->stopPreview();
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
mHardware->cancelPicture();
// Release the hardware resources.
mHardware->release();
@@ -413,7 +416,11 @@ status_t CameraClient::startPreviewMode() {
}
mHardware->setPreviewWindow(mPreviewWindow);
result = mHardware->startPreview();
-
+ if (result == NO_ERROR) {
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_ACTIVE,
+ String8::format("%d", mCameraId));
+ }
return result;
}
@@ -436,7 +443,7 @@ status_t CameraClient::startRecordingMode() {
// start recording mode
enableMsgType(CAMERA_MSG_VIDEO_FRAME);
- mCameraService->playSound(CameraService::SOUND_RECORDING);
+ mCameraService->playSound(CameraService::SOUND_RECORDING_START);
result = mHardware->startRecording();
if (result != NO_ERROR) {
ALOGE("mHardware->startRecording() failed with status %d", result);
@@ -453,7 +460,9 @@ void CameraClient::stopPreview() {
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
mHardware->stopPreview();
-
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
mPreviewBuffer.clear();
}
@@ -465,7 +474,7 @@ void CameraClient::stopRecording() {
disableMsgType(CAMERA_MSG_VIDEO_FRAME);
mHardware->stopRecording();
- mCameraService->playSound(CameraService::SOUND_RECORDING);
+ mCameraService->playSound(CameraService::SOUND_RECORDING_STOP);
mPreviewBuffer.clear();
}
@@ -643,7 +652,7 @@ status_t CameraClient::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
}
return OK;
} else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
- mCameraService->playSound(CameraService::SOUND_RECORDING);
+ mCameraService->playSound(CameraService::SOUND_RECORDING_START);
} else if (cmd == CAMERA_CMD_SET_VIDEO_BUFFER_COUNT) {
// Silently ignore this command
return INVALID_OPERATION;
@@ -794,6 +803,12 @@ void CameraClient::handleShutter(void) {
}
disableMsgType(CAMERA_MSG_SHUTTER);
+ // Shutters only happen in response to takePicture, so mark device as
+ // idle now, until preview is restarted
+ mCameraService->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", mCameraId));
+
mLock.unlock();
}
diff --git a/services/camera/libcameraservice/api1/client2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 442eb75..44447b4 100644
--- a/services/camera/libcameraservice/api1/client2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
@@ -214,8 +214,8 @@ status_t Parameters::initialize(const CameraMetadata *info, int deviceVersion) {
supportedPreviewFormats);
}
- previewFpsRange[0] = availableFpsRanges.data.i32[0];
- previewFpsRange[1] = availableFpsRanges.data.i32[1];
+ previewFpsRange[0] = fastInfo.bestStillCaptureFpsRange[0];
+ previewFpsRange[1] = fastInfo.bestStillCaptureFpsRange[1];
// PREVIEW_FRAME_RATE / SUPPORTED_PREVIEW_FRAME_RATES are deprecated, but
// still have to do something sane for them
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 84c0c3e..bd9fea3 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -719,6 +719,43 @@ status_t CameraDeviceClient::prepare(int streamId) {
return res;
}
+status_t CameraDeviceClient::prepare2(int maxCount, int streamId) {
+ ATRACE_CALL();
+ ALOGV("%s", __FUNCTION__);
+
+ status_t res = OK;
+ if ( (res = checkPid(__FUNCTION__) ) != OK) return res;
+
+ Mutex::Autolock icl(mBinderSerializationLock);
+
+ // Guard against trying to prepare non-created streams
+ ssize_t index = NAME_NOT_FOUND;
+ for (size_t i = 0; i < mStreamMap.size(); ++i) {
+ if (streamId == mStreamMap.valueAt(i)) {
+ index = i;
+ break;
+ }
+ }
+
+ if (index == NAME_NOT_FOUND) {
+ ALOGW("%s: Camera %d: Invalid stream ID (%d) specified, no stream created yet",
+ __FUNCTION__, mCameraId, streamId);
+ return BAD_VALUE;
+ }
+
+ if (maxCount <= 0) {
+ ALOGE("%s: Camera %d: Invalid maxCount (%d) specified, must be greater than 0.",
+ __FUNCTION__, mCameraId, maxCount);
+ return BAD_VALUE;
+ }
+
+ // Also returns BAD_VALUE if stream ID was not valid, or stream already
+ // has been used
+ res = mDevice->prepare(maxCount, streamId);
+
+ return res;
+}
+
status_t CameraDeviceClient::tearDown(int streamId) {
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
@@ -802,6 +839,7 @@ void CameraDeviceClient::notifyIdle() {
if (remoteCb != 0) {
remoteCb->onDeviceIdle();
}
+ Camera2ClientBase::notifyIdle();
}
void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
@@ -811,6 +849,7 @@ void CameraDeviceClient::notifyShutter(const CaptureResultExtras& resultExtras,
if (remoteCb != 0) {
remoteCb->onCaptureStarted(resultExtras, timestamp);
}
+ Camera2ClientBase::notifyShutter(resultExtras, timestamp);
}
void CameraDeviceClient::notifyPrepared(int streamId) {
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 486e68b..b1d1762 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -114,6 +114,9 @@ public:
// Tear down stream resources by freeing its unused buffers
virtual status_t tearDown(int streamId);
+ // Prepare stream by preallocating up to maxCount of its buffers
+ virtual status_t prepare2(int maxCount, int streamId);
+
/**
* Interface used by CameraService
*/
@@ -191,6 +194,7 @@ private:
Vector<int> mStreamingRequestList;
int32_t mRequestIdCounter;
+
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index fdb801e..c7de56a 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -55,7 +55,8 @@ Camera2ClientBase<TClientBase>::Camera2ClientBase(
TClientBase(cameraService, remoteCallback, clientPackageName,
cameraId, cameraFacing, clientPid, clientUid, servicePid),
mSharedCameraCallbacks(remoteCallback),
- mDeviceVersion(cameraService->getDeviceVersion(cameraId))
+ mDeviceVersion(cameraService->getDeviceVersion(cameraId)),
+ mDeviceActive(false)
{
ALOGI("Camera %d: Opened. Client: %s (PID %d, UID %d)", cameraId,
String8(clientPackageName).string(), clientPid, clientUid);
@@ -235,6 +236,13 @@ void Camera2ClientBase<TClientBase>::notifyError(
template <typename TClientBase>
void Camera2ClientBase<TClientBase>::notifyIdle() {
+ if (mDeviceActive) {
+ getCameraService()->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_IDLE,
+ String8::format("%d", TClientBase::mCameraId));
+ }
+ mDeviceActive = false;
+
ALOGV("Camera device is now idle");
}
@@ -244,6 +252,13 @@ void Camera2ClientBase<TClientBase>::notifyShutter(const CaptureResultExtras& re
(void)resultExtras;
(void)timestamp;
+ if (!mDeviceActive) {
+ getCameraService()->updateProxyDeviceState(
+ ICameraServiceProxy::CAMERA_STATE_ACTIVE,
+ String8::format("%d", TClientBase::mCameraId));
+ }
+ mDeviceActive = true;
+
ALOGV("%s: Shutter notification for request id %" PRId32 " at time %" PRId64,
__FUNCTION__, resultExtras.requestId, timestamp);
}
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index d66e11c..4568af0 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -136,6 +136,8 @@ protected:
status_t checkPid(const char *checkLocation) const;
virtual void detachDevice();
+
+ bool mDeviceActive;
};
}; // namespace android
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index cd25949..7b083a3 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -294,6 +294,12 @@ class CameraDeviceBase : public virtual RefBase {
virtual status_t tearDown(int streamId) = 0;
/**
+ * Prepare stream by preallocating up to maxCount buffers for it asynchronously.
+ * Calls notifyPrepared() once allocation is complete.
+ */
+ virtual status_t prepare(int maxCount, int streamId) = 0;
+
+ /**
* Get the HAL device version.
*/
virtual uint32_t getDeviceVersion() = 0;
diff --git a/services/camera/libcameraservice/common/CameraModule.cpp b/services/camera/libcameraservice/common/CameraModule.cpp
index 6a4dfe0..16b8aba 100644
--- a/services/camera/libcameraservice/common/CameraModule.cpp
+++ b/services/camera/libcameraservice/common/CameraModule.cpp
@@ -15,14 +15,18 @@
*/
#define LOG_TAG "CameraModule"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
+#include <utils/Trace.h>
+
#include "CameraModule.h"
namespace android {
void CameraModule::deriveCameraCharacteristicsKeys(
uint32_t deviceVersion, CameraMetadata &chars) {
+ ATRACE_CALL();
// HAL1 devices should not reach here
if (deviceVersion < CAMERA_DEVICE_API_VERSION_2_0) {
ALOGV("%s: Cannot derive keys for HAL version < 2.0");
@@ -150,9 +154,7 @@ CameraModule::CameraModule(camera_module_t *module) {
ALOGE("%s: camera hardware module must not be null", __FUNCTION__);
assert(0);
}
-
mModule = module;
- mCameraInfoMap.setCapacity(getNumberOfCameras());
}
CameraModule::~CameraModule()
@@ -168,14 +170,20 @@ CameraModule::~CameraModule()
}
int CameraModule::init() {
+ ATRACE_CALL();
+ int res = OK;
if (getModuleApiVersion() >= CAMERA_MODULE_API_VERSION_2_4 &&
mModule->init != NULL) {
- return mModule->init();
+ ATRACE_BEGIN("camera_module->init");
+ res = mModule->init();
+ ATRACE_END();
}
- return OK;
+ mCameraInfoMap.setCapacity(getNumberOfCameras());
+ return res;
}
int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
+ ATRACE_CALL();
Mutex::Autolock lock(mCameraInfoLock);
if (cameraId < 0) {
ALOGE("%s: Invalid camera ID %d", __FUNCTION__, cameraId);
@@ -185,14 +193,20 @@ int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
// Only override static_camera_characteristics for API2 devices
int apiVersion = mModule->common.module_api_version;
if (apiVersion < CAMERA_MODULE_API_VERSION_2_0) {
- return mModule->get_camera_info(cameraId, info);
+ int ret;
+ ATRACE_BEGIN("camera_module->get_camera_info");
+ ret = mModule->get_camera_info(cameraId, info);
+ ATRACE_END();
+ return ret;
}
ssize_t index = mCameraInfoMap.indexOfKey(cameraId);
if (index == NAME_NOT_FOUND) {
// Get camera info from raw module and cache it
camera_info rawInfo, cameraInfo;
+ ATRACE_BEGIN("camera_module->get_camera_info");
int ret = mModule->get_camera_info(cameraId, &rawInfo);
+ ATRACE_END();
if (ret != 0) {
return ret;
}
@@ -217,20 +231,36 @@ int CameraModule::getCameraInfo(int cameraId, struct camera_info *info) {
}
int CameraModule::open(const char* id, struct hw_device_t** device) {
- return filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device));
+ int res;
+ ATRACE_BEGIN("camera_module->open");
+ res = filterOpenErrorCode(mModule->common.methods->open(&mModule->common, id, device));
+ ATRACE_END();
+ return res;
}
int CameraModule::openLegacy(
const char* id, uint32_t halVersion, struct hw_device_t** device) {
- return mModule->open_legacy(&mModule->common, id, halVersion, device);
+ int res;
+ ATRACE_BEGIN("camera_module->open_legacy");
+ res = mModule->open_legacy(&mModule->common, id, halVersion, device);
+ ATRACE_END();
+ return res;
}
int CameraModule::getNumberOfCameras() {
- return mModule->get_number_of_cameras();
+ int numCameras;
+ ATRACE_BEGIN("camera_module->get_number_of_cameras");
+ numCameras = mModule->get_number_of_cameras();
+ ATRACE_END();
+ return numCameras;
}
int CameraModule::setCallbacks(const camera_module_callbacks_t *callbacks) {
- return mModule->set_callbacks(callbacks);
+ int res;
+ ATRACE_BEGIN("camera_module->set_callbacks");
+ res = mModule->set_callbacks(callbacks);
+ ATRACE_END();
+ return res;
}
bool CameraModule::isVendorTagDefined() {
@@ -239,12 +269,18 @@ bool CameraModule::isVendorTagDefined() {
void CameraModule::getVendorTagOps(vendor_tag_ops_t* ops) {
if (mModule->get_vendor_tag_ops) {
+ ATRACE_BEGIN("camera_module->get_vendor_tag_ops");
mModule->get_vendor_tag_ops(ops);
+ ATRACE_END();
}
}
int CameraModule::setTorchMode(const char* camera_id, bool enable) {
- return mModule->set_torch_mode(camera_id, enable);
+ int res;
+ ATRACE_BEGIN("camera_module->set_torch_mode");
+ res = mModule->set_torch_mode(camera_id, enable);
+ ATRACE_END();
+ return res;
}
status_t CameraModule::filterOpenErrorCode(status_t err) {
diff --git a/services/camera/libcameraservice/device2/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index c9c990c..d74f976 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
@@ -632,6 +632,12 @@ status_t Camera2Device::tearDown(int streamId) {
return NO_INIT;
}
+status_t Camera2Device::prepare(int maxCount, int streamId) {
+ ATRACE_CALL();
+ ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
+ return NO_INIT;
+}
+
uint32_t Camera2Device::getDeviceVersion() {
ATRACE_CALL();
return mDeviceVersion;
diff --git a/services/camera/libcameraservice/device2/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 34c1ded..b4d343c 100644
--- a/services/camera/libcameraservice/device2/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -88,6 +88,7 @@ class Camera2Device: public CameraDeviceBase {
// Prepare and tearDown are no-ops
virtual status_t prepare(int streamId);
virtual status_t tearDown(int streamId);
+ virtual status_t prepare(int maxCount, int streamId);
virtual uint32_t getDeviceVersion();
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0c941fb..50d9d75 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -44,6 +44,7 @@
#include <utils/Timers.h>
#include "utils/CameraTraces.h"
+#include "mediautils/SchedulingPolicyService.h"
#include "device3/Camera3Device.h"
#include "device3/Camera3OutputStream.h"
#include "device3/Camera3InputStream.h"
@@ -66,6 +67,7 @@ Camera3Device::Camera3Device(int id):
mNextResultFrameNumber(0),
mNextReprocessResultFrameNumber(0),
mNextShutterFrameNumber(0),
+ mNextReprocessShutterFrameNumber(0),
mListener(NULL)
{
ATRACE_CALL();
@@ -285,19 +287,27 @@ status_t Camera3Device::disconnect() {
mStatusTracker->join();
}
+ camera3_device_t *hal3Device;
{
Mutex::Autolock l(mLock);
mRequestThread.clear();
mStatusTracker.clear();
- if (mHal3Device != NULL) {
- ATRACE_BEGIN("camera3->close");
- mHal3Device->common.close(&mHal3Device->common);
- ATRACE_END();
- mHal3Device = NULL;
- }
+ hal3Device = mHal3Device;
+ }
+
+ // Call close without internal mutex held, as the HAL close may need to
+ // wait on assorted callbacks,etc, to complete before it can return.
+ if (hal3Device != NULL) {
+ ATRACE_BEGIN("camera3->close");
+ hal3Device->common.close(&hal3Device->common);
+ ATRACE_END();
+ }
+ {
+ Mutex::Autolock l(mLock);
+ mHal3Device = NULL;
internalUpdateStatusLocked(STATUS_UNINITIALIZED);
}
@@ -557,6 +567,18 @@ status_t Camera3Device::convertMetadataListToRequestListLocked(
ALOGV("%s: requestId = %" PRId32, __FUNCTION__, newRequest->mResultExtras.requestId);
}
+
+ // Setup batch size if this is a high speed video recording request.
+ if (mIsConstrainedHighSpeedConfiguration && requestList->size() > 0) {
+ auto firstRequest = requestList->begin();
+ for (auto& outputStream : (*firstRequest)->mOutputStreams) {
+ if (outputStream->isVideoStream()) {
+ (*firstRequest)->mBatchSize = requestList->size();
+ break;
+ }
+ }
+ }
+
return OK;
}
@@ -1398,7 +1420,7 @@ status_t Camera3Device::flush(int64_t *frameNumber) {
status_t res;
if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
- res = mHal3Device->ops->flush(mHal3Device);
+ res = mRequestThread->flush();
} else {
Mutex::Autolock l(mLock);
res = waitUntilDrainedLocked();
@@ -1408,6 +1430,10 @@ status_t Camera3Device::flush(int64_t *frameNumber) {
}
status_t Camera3Device::prepare(int streamId) {
+ return prepare(camera3::Camera3StreamInterface::ALLOCATE_PIPELINE_MAX, streamId);
+}
+
+status_t Camera3Device::prepare(int maxCount, int streamId) {
ATRACE_CALL();
ALOGV("%s: Camera %d: Preparing stream %d", __FUNCTION__, mId, streamId);
Mutex::Autolock il(mInterfaceLock);
@@ -1432,7 +1458,7 @@ status_t Camera3Device::prepare(int streamId) {
return BAD_VALUE;
}
- return mPreparerThread->prepare(stream);
+ return mPreparerThread->prepare(maxCount, stream);
}
status_t Camera3Device::tearDown(int streamId) {
@@ -1583,6 +1609,7 @@ sp<Camera3Device::CaptureRequest> Camera3Device::createCaptureRequest(
newRequest->mOutputStreams.push(stream);
}
newRequest->mSettings.erase(ANDROID_REQUEST_OUTPUT_STREAMS);
+ newRequest->mBatchSize = 1;
return newRequest;
}
@@ -1741,6 +1768,21 @@ status_t Camera3Device::configureStreamsLocked() {
// across configure_streams() calls
mRequestThread->configurationComplete();
+ // Boost priority of request thread for high speed recording to SCHED_FIFO
+ if (mIsConstrainedHighSpeedConfiguration) {
+ pid_t requestThreadTid = mRequestThread->getTid();
+ res = requestPriority(getpid(), requestThreadTid,
+ kConstrainedHighSpeedThreadPriority, true);
+ if (res != OK) {
+ ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
+ strerror(-res), res);
+ } else {
+ ALOGD("Set real time priority for request queue thread (tid %d)", requestThreadTid);
+ }
+ } else {
+ // TODO: Set/restore normal priority for normal use cases
+ }
+
// Update device state
mNeedConfig = false;
@@ -2493,18 +2535,6 @@ void Camera3Device::notifyError(const camera3_error_msg_t &msg,
void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
NotificationListener *listener) {
ssize_t idx;
- // Verify ordering of shutter notifications
- {
- Mutex::Autolock l(mOutputLock);
- // TODO: need to track errors for tighter bounds on expected frame number.
- if (msg.frame_number < mNextShutterFrameNumber) {
- SET_ERR("Shutter notification out-of-order. Expected "
- "notification for frame %d, got frame %d",
- mNextShutterFrameNumber, msg.frame_number);
- return;
- }
- mNextShutterFrameNumber = msg.frame_number + 1;
- }
// Set timestamp for the request in the in-flight tracking
// and get the request ID to send upstream
@@ -2514,6 +2544,29 @@ void Camera3Device::notifyShutter(const camera3_shutter_msg_t &msg,
if (idx >= 0) {
InFlightRequest &r = mInFlightMap.editValueAt(idx);
+ // Verify ordering of shutter notifications
+ {
+ Mutex::Autolock l(mOutputLock);
+ // TODO: need to track errors for tighter bounds on expected frame number.
+ if (r.hasInputBuffer) {
+ if (msg.frame_number < mNextReprocessShutterFrameNumber) {
+ SET_ERR("Shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ mNextReprocessShutterFrameNumber, msg.frame_number);
+ return;
+ }
+ mNextReprocessShutterFrameNumber = msg.frame_number + 1;
+ } else {
+ if (msg.frame_number < mNextShutterFrameNumber) {
+ SET_ERR("Shutter notification out-of-order. Expected "
+ "notification for frame %d, got frame %d",
+ mNextShutterFrameNumber, msg.frame_number);
+ return;
+ }
+ mNextShutterFrameNumber = msg.frame_number + 1;
+ }
+ }
+
ALOGVV("Camera %d: %s: Shutter fired for frame %d (id %d) at %" PRId64,
mId, __FUNCTION__,
msg.frame_number, r.resultExtras.requestId, msg.timestamp);
@@ -2754,6 +2807,17 @@ status_t Camera3Device::RequestThread::clear(
return OK;
}
+status_t Camera3Device::RequestThread::flush() {
+ ATRACE_CALL();
+ Mutex::Autolock l(mFlushLock);
+
+ if (mHal3Device->common.version >= CAMERA_DEVICE_API_VERSION_3_1) {
+ return mHal3Device->ops->flush(mHal3Device);
+ }
+
+ return -ENOTSUP;
+}
+
void Camera3Device::RequestThread::setPaused(bool paused) {
Mutex::Autolock l(mPauseLock);
mDoPause = paused;
@@ -2844,7 +2908,7 @@ void Camera3Device::overrideResultForPrecaptureCancel(
}
bool Camera3Device::RequestThread::threadLoop() {
-
+ ATRACE_CALL();
status_t res;
// Handle paused state.
@@ -2852,203 +2916,240 @@ bool Camera3Device::RequestThread::threadLoop() {
return true;
}
- // Get work to do
-
- sp<CaptureRequest> nextRequest = waitForNextRequest();
- if (nextRequest == NULL) {
+ // Wait for the next batch of requests.
+ waitForNextRequestBatch();
+ if (mNextRequests.size() == 0) {
return true;
}
- // Create request to HAL
- camera3_capture_request_t request = camera3_capture_request_t();
- request.frame_number = nextRequest->mResultExtras.frameNumber;
- Vector<camera3_stream_buffer_t> outputBuffers;
-
- // Get the request ID, if any
- int requestId;
- camera_metadata_entry_t requestIdEntry =
- nextRequest->mSettings.find(ANDROID_REQUEST_ID);
+ // Get the latest request ID, if any
+ int latestRequestId;
+ camera_metadata_entry_t requestIdEntry = mNextRequests[mNextRequests.size() - 1].
+ captureRequest->mSettings.find(ANDROID_REQUEST_ID);
if (requestIdEntry.count > 0) {
- requestId = requestIdEntry.data.i32[0];
+ latestRequestId = requestIdEntry.data.i32[0];
} else {
- ALOGW("%s: Did not have android.request.id set in the request",
- __FUNCTION__);
- requestId = NAME_NOT_FOUND;
+ ALOGW("%s: Did not have android.request.id set in the request.", __FUNCTION__);
+ latestRequestId = NAME_NOT_FOUND;
}
- // Insert any queued triggers (before metadata is locked)
- int32_t triggerCount;
- res = insertTriggers(nextRequest);
- if (res < 0) {
- SET_ERR("RequestThread: Unable to insert triggers "
- "(capture request %d, HAL device: %s (%d)",
- request.frame_number, strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
+ // Prepare a batch of HAL requests and output buffers.
+ res = prepareHalRequests();
+ if (res == TIMED_OUT) {
+ // Not a fatal error if getting output buffers time out.
+ cleanUpFailedRequests(/*sendRequestError*/ true);
+ return true;
+ } else if (res != OK) {
+ cleanUpFailedRequests(/*sendRequestError*/ false);
return false;
}
- triggerCount = res;
- bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
+ // Inform waitUntilRequestProcessed thread of a new request ID
+ {
+ Mutex::Autolock al(mLatestRequestMutex);
+
+ mLatestRequestId = latestRequestId;
+ mLatestRequestSignal.signal();
+ }
+
+ // Submit a batch of requests to HAL.
+ // Use flush lock only when submitting multilple requests in a batch.
+ // TODO: The problem with flush lock is flush() will be blocked by process_capture_request()
+ // which may take a long time to finish so synchronizing flush() and
+ // process_capture_request() defeats the purpose of cancelling requests ASAP with flush().
+ // For now, only synchronize for high speed recording and we should figure something out for
+ // removing the synchronization.
+ bool useFlushLock = mNextRequests.size() > 1;
+
+ if (useFlushLock) {
+ mFlushLock.lock();
+ }
+
+ ALOGVV("%s: %d: submitting %d requests in a batch.", __FUNCTION__, __LINE__,
+ mNextRequests.size());
+ for (auto& nextRequest : mNextRequests) {
+ // Submit request and block until ready for next one
+ ATRACE_ASYNC_BEGIN("frame capture", nextRequest.halRequest.frame_number);
+ ATRACE_BEGIN("camera3->process_capture_request");
+ res = mHal3Device->ops->process_capture_request(mHal3Device, &nextRequest.halRequest);
+ ATRACE_END();
- // If the request is the same as last, or we had triggers last time
- if (mPrevRequest != nextRequest || triggersMixedIn) {
- /**
- * HAL workaround:
- * Insert a dummy trigger ID if a trigger is set but no trigger ID is
- */
- res = addDummyTriggerIds(nextRequest);
if (res != OK) {
- SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
- "(capture request %d, HAL device: %s (%d)",
- request.frame_number, strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
+ // Should only get a failure here for malformed requests or device-level
+ // errors, so consider all errors fatal. Bad metadata failures should
+ // come through notify.
+ SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
+ " device: %s (%d)", nextRequest.halRequest.frame_number, strerror(-res),
+ res);
+ cleanUpFailedRequests(/*sendRequestError*/ false);
+ if (useFlushLock) {
+ mFlushLock.unlock();
+ }
return false;
}
- /**
- * The request should be presorted so accesses in HAL
- * are O(logn). Sidenote, sorting a sorted metadata is nop.
- */
- nextRequest->mSettings.sort();
- request.settings = nextRequest->mSettings.getAndLock();
- mPrevRequest = nextRequest;
- ALOGVV("%s: Request settings are NEW", __FUNCTION__);
-
- IF_ALOGV() {
- camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
- find_camera_metadata_ro_entry(
- request.settings,
- ANDROID_CONTROL_AF_TRIGGER,
- &e
- );
- if (e.count > 0) {
- ALOGV("%s: Request (frame num %d) had AF trigger 0x%x",
- __FUNCTION__,
- request.frame_number,
- e.data.u8[0]);
- }
- }
- } else {
- // leave request.settings NULL to indicate 'reuse latest given'
- ALOGVV("%s: Request settings are REUSED",
- __FUNCTION__);
- }
+ // Mark that the request has be submitted successfully.
+ nextRequest.submitted = true;
- uint32_t totalNumBuffers = 0;
+ // Update the latest request sent to HAL
+ if (nextRequest.halRequest.settings != NULL) { // Don't update if they were unchanged
+ Mutex::Autolock al(mLatestRequestMutex);
- // Fill in buffers
- if (nextRequest->mInputStream != NULL) {
- request.input_buffer = &nextRequest->mInputBuffer;
- totalNumBuffers += 1;
- } else {
- request.input_buffer = NULL;
- }
+ camera_metadata_t* cloned = clone_camera_metadata(nextRequest.halRequest.settings);
+ mLatestRequest.acquire(cloned);
+ }
- outputBuffers.insertAt(camera3_stream_buffer_t(), 0,
- nextRequest->mOutputStreams.size());
- request.output_buffers = outputBuffers.array();
- for (size_t i = 0; i < nextRequest->mOutputStreams.size(); i++) {
- res = nextRequest->mOutputStreams.editItemAt(i)->
- getBuffer(&outputBuffers.editItemAt(i));
+ if (nextRequest.halRequest.settings != NULL) {
+ nextRequest.captureRequest->mSettings.unlock(nextRequest.halRequest.settings);
+ }
+
+ // Remove any previously queued triggers (after unlock)
+ res = removeTriggers(mPrevRequest);
if (res != OK) {
- // Can't get output buffer from gralloc queue - this could be due to
- // abandoned queue or other consumer misbehavior, so not a fatal
- // error
- ALOGE("RequestThread: Can't get output buffer, skipping request:"
- " %s (%d)", strerror(-res), res);
- {
- Mutex::Autolock l(mRequestLock);
- if (mListener != NULL) {
- mListener->notifyError(
- ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
- nextRequest->mResultExtras);
- }
+ SET_ERR("RequestThread: Unable to remove triggers "
+ "(capture request %d, HAL device: %s (%d)",
+ nextRequest.halRequest.frame_number, strerror(-res), res);
+ cleanUpFailedRequests(/*sendRequestError*/ false);
+ if (useFlushLock) {
+ mFlushLock.unlock();
}
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return true;
+ return false;
}
- request.num_output_buffers++;
}
- totalNumBuffers += request.num_output_buffers;
- // Log request in the in-flight queue
- sp<Camera3Device> parent = mParent.promote();
- if (parent == NULL) {
- // Should not happen, and nowhere to send errors to, so just log it
- CLOGE("RequestThread: Parent is gone");
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return false;
+ if (useFlushLock) {
+ mFlushLock.unlock();
}
- res = parent->registerInFlight(request.frame_number,
- totalNumBuffers, nextRequest->mResultExtras,
- /*hasInput*/request.input_buffer != NULL,
- nextRequest->mAeTriggerCancelOverride);
- ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
- ", burstId = %" PRId32 ".",
- __FUNCTION__,
- nextRequest->mResultExtras.requestId, nextRequest->mResultExtras.frameNumber,
- nextRequest->mResultExtras.burstId);
- if (res != OK) {
- SET_ERR("RequestThread: Unable to register new in-flight request:"
- " %s (%d)", strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return false;
+ // Unset as current request
+ {
+ Mutex::Autolock l(mRequestLock);
+ mNextRequests.clear();
}
- // Inform waitUntilRequestProcessed thread of a new request ID
- {
- Mutex::Autolock al(mLatestRequestMutex);
+ return true;
+}
- mLatestRequestId = requestId;
- mLatestRequestSignal.signal();
- }
+status_t Camera3Device::RequestThread::prepareHalRequests() {
+ ATRACE_CALL();
- // Submit request and block until ready for next one
- ATRACE_ASYNC_BEGIN("frame capture", request.frame_number);
- ATRACE_BEGIN("camera3->process_capture_request");
- res = mHal3Device->ops->process_capture_request(mHal3Device, &request);
- ATRACE_END();
+ for (auto& nextRequest : mNextRequests) {
+ sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+ camera3_capture_request_t* halRequest = &nextRequest.halRequest;
+ Vector<camera3_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
- if (res != OK) {
- // Should only get a failure here for malformed requests or device-level
- // errors, so consider all errors fatal. Bad metadata failures should
- // come through notify.
- SET_ERR("RequestThread: Unable to submit capture request %d to HAL"
- " device: %s (%d)", request.frame_number, strerror(-res), res);
- cleanUpFailedRequest(request, nextRequest, outputBuffers);
- return false;
- }
+ // Prepare a request to HAL
+ halRequest->frame_number = captureRequest->mResultExtras.frameNumber;
- // Update the latest request sent to HAL
- if (request.settings != NULL) { // Don't update them if they were unchanged
- Mutex::Autolock al(mLatestRequestMutex);
+ // Insert any queued triggers (before metadata is locked)
+ status_t res = insertTriggers(captureRequest);
- camera_metadata_t* cloned = clone_camera_metadata(request.settings);
- mLatestRequest.acquire(cloned);
- }
+ if (res < 0) {
+ SET_ERR("RequestThread: Unable to insert triggers "
+ "(capture request %d, HAL device: %s (%d)",
+ halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
+ int triggerCount = res;
+ bool triggersMixedIn = (triggerCount > 0 || mPrevTriggers > 0);
+ mPrevTriggers = triggerCount;
- if (request.settings != NULL) {
- nextRequest->mSettings.unlock(request.settings);
- }
+ // If the request is the same as last, or we had triggers last time
+ if (mPrevRequest != captureRequest || triggersMixedIn) {
+ /**
+ * HAL workaround:
+ * Insert a dummy trigger ID if a trigger is set but no trigger ID is
+ */
+ res = addDummyTriggerIds(captureRequest);
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to insert dummy trigger IDs "
+ "(capture request %d, HAL device: %s (%d)",
+ halRequest->frame_number, strerror(-res), res);
+ return INVALID_OPERATION;
+ }
- // Unset as current request
- {
- Mutex::Autolock l(mRequestLock);
- mNextRequest.clear();
- }
+ /**
+ * The request should be presorted so accesses in HAL
+ * are O(logn). Sidenote, sorting a sorted metadata is nop.
+ */
+ captureRequest->mSettings.sort();
+ halRequest->settings = captureRequest->mSettings.getAndLock();
+ mPrevRequest = captureRequest;
+ ALOGVV("%s: Request settings are NEW", __FUNCTION__);
+
+ IF_ALOGV() {
+ camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
+ find_camera_metadata_ro_entry(
+ halRequest->settings,
+ ANDROID_CONTROL_AF_TRIGGER,
+ &e
+ );
+ if (e.count > 0) {
+ ALOGV("%s: Request (frame num %d) had AF trigger 0x%x",
+ __FUNCTION__,
+ halRequest->frame_number,
+ e.data.u8[0]);
+ }
+ }
+ } else {
+ // leave request.settings NULL to indicate 'reuse latest given'
+ ALOGVV("%s: Request settings are REUSED",
+ __FUNCTION__);
+ }
- // Remove any previously queued triggers (after unlock)
- res = removeTriggers(mPrevRequest);
- if (res != OK) {
- SET_ERR("RequestThread: Unable to remove triggers "
- "(capture request %d, HAL device: %s (%d)",
- request.frame_number, strerror(-res), res);
- return false;
+ uint32_t totalNumBuffers = 0;
+
+ // Fill in buffers
+ if (captureRequest->mInputStream != NULL) {
+ halRequest->input_buffer = &captureRequest->mInputBuffer;
+ totalNumBuffers += 1;
+ } else {
+ halRequest->input_buffer = NULL;
+ }
+
+ outputBuffers->insertAt(camera3_stream_buffer_t(), 0,
+ captureRequest->mOutputStreams.size());
+ halRequest->output_buffers = outputBuffers->array();
+ for (size_t i = 0; i < captureRequest->mOutputStreams.size(); i++) {
+ res = captureRequest->mOutputStreams.editItemAt(i)->
+ getBuffer(&outputBuffers->editItemAt(i));
+ if (res != OK) {
+ // Can't get output buffer from gralloc queue - this could be due to
+ // abandoned queue or other consumer misbehavior, so not a fatal
+ // error
+ ALOGE("RequestThread: Can't get output buffer, skipping request:"
+ " %s (%d)", strerror(-res), res);
+
+ return TIMED_OUT;
+ }
+ halRequest->num_output_buffers++;
+ }
+ totalNumBuffers += halRequest->num_output_buffers;
+
+ // Log request in the in-flight queue
+ sp<Camera3Device> parent = mParent.promote();
+ if (parent == NULL) {
+ // Should not happen, and nowhere to send errors to, so just log it
+ CLOGE("RequestThread: Parent is gone");
+ return INVALID_OPERATION;
+ }
+ res = parent->registerInFlight(halRequest->frame_number,
+ totalNumBuffers, captureRequest->mResultExtras,
+ /*hasInput*/halRequest->input_buffer != NULL,
+ captureRequest->mAeTriggerCancelOverride);
+ ALOGVV("%s: registered in flight requestId = %" PRId32 ", frameNumber = %" PRId64
+ ", burstId = %" PRId32 ".",
+ __FUNCTION__,
+ captureRequest->mResultExtras.requestId, captureRequest->mResultExtras.frameNumber,
+ captureRequest->mResultExtras.burstId);
+ if (res != OK) {
+ SET_ERR("RequestThread: Unable to register new in-flight request:"
+ " %s (%d)", strerror(-res), res);
+ return INVALID_OPERATION;
+ }
}
- mPrevTriggers = triggerCount;
- return true;
+ return OK;
}
CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
@@ -3063,11 +3164,13 @@ bool Camera3Device::RequestThread::isStreamPending(
sp<Camera3StreamInterface>& stream) {
Mutex::Autolock l(mRequestLock);
- if (mNextRequest != nullptr) {
- for (const auto& s : mNextRequest->mOutputStreams) {
- if (stream == s) return true;
+ for (const auto& nextRequest : mNextRequests) {
+ if (!nextRequest.submitted) {
+ for (const auto& s : nextRequest.captureRequest->mOutputStreams) {
+ if (stream == s) return true;
+ }
+ if (stream == nextRequest.captureRequest->mInputStream) return true;
}
- if (stream == mNextRequest->mInputStream) return true;
}
for (const auto& request : mRequestQueue) {
@@ -3087,37 +3190,95 @@ bool Camera3Device::RequestThread::isStreamPending(
return false;
}
-void Camera3Device::RequestThread::cleanUpFailedRequest(
- camera3_capture_request_t &request,
- sp<CaptureRequest> &nextRequest,
- Vector<camera3_stream_buffer_t> &outputBuffers) {
+void Camera3Device::RequestThread::cleanUpFailedRequests(bool sendRequestError) {
+ if (mNextRequests.empty()) {
+ return;
+ }
+
+ for (auto& nextRequest : mNextRequests) {
+ // Skip the ones that have been submitted successfully.
+ if (nextRequest.submitted) {
+ continue;
+ }
- if (request.settings != NULL) {
- nextRequest->mSettings.unlock(request.settings);
+ sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
+ camera3_capture_request_t* halRequest = &nextRequest.halRequest;
+ Vector<camera3_stream_buffer_t>* outputBuffers = &nextRequest.outputBuffers;
+
+ if (halRequest->settings != NULL) {
+ captureRequest->mSettings.unlock(halRequest->settings);
+ }
+
+ if (captureRequest->mInputStream != NULL) {
+ captureRequest->mInputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
+ captureRequest->mInputStream->returnInputBuffer(captureRequest->mInputBuffer);
+ }
+
+ for (size_t i = 0; i < halRequest->num_output_buffers; i++) {
+ outputBuffers->editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
+ captureRequest->mOutputStreams.editItemAt(i)->returnBuffer((*outputBuffers)[i], 0);
+ }
+
+ if (sendRequestError) {
+ Mutex::Autolock l(mRequestLock);
+ if (mListener != NULL) {
+ mListener->notifyError(
+ ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
+ captureRequest->mResultExtras);
+ }
+ }
}
- if (nextRequest->mInputStream != NULL) {
- nextRequest->mInputBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
- nextRequest->mInputStream->returnInputBuffer(nextRequest->mInputBuffer);
+
+ Mutex::Autolock l(mRequestLock);
+ mNextRequests.clear();
+}
+
+void Camera3Device::RequestThread::waitForNextRequestBatch() {
+ // Optimized a bit for the simple steady-state case (single repeating
+ // request), to avoid putting that request in the queue temporarily.
+ Mutex::Autolock l(mRequestLock);
+
+ assert(mNextRequests.empty());
+
+ NextRequest nextRequest;
+ nextRequest.captureRequest = waitForNextRequestLocked();
+ if (nextRequest.captureRequest == nullptr) {
+ return;
+ }
+
+ nextRequest.halRequest = camera3_capture_request_t();
+ nextRequest.submitted = false;
+ mNextRequests.add(nextRequest);
+
+ // Wait for additional requests
+ const size_t batchSize = nextRequest.captureRequest->mBatchSize;
+
+ for (size_t i = 1; i < batchSize; i++) {
+ NextRequest additionalRequest;
+ additionalRequest.captureRequest = waitForNextRequestLocked();
+ if (additionalRequest.captureRequest == nullptr) {
+ break;
+ }
+
+ additionalRequest.halRequest = camera3_capture_request_t();
+ additionalRequest.submitted = false;
+ mNextRequests.add(additionalRequest);
}
- for (size_t i = 0; i < request.num_output_buffers; i++) {
- outputBuffers.editItemAt(i).status = CAMERA3_BUFFER_STATUS_ERROR;
- nextRequest->mOutputStreams.editItemAt(i)->returnBuffer(
- outputBuffers[i], 0);
+
+ if (mNextRequests.size() < batchSize) {
+ ALOGE("RequestThread: only get %d out of %d requests. Skipping requests.",
+ mNextRequests.size(), batchSize);
+ cleanUpFailedRequests(/*sendRequestError*/true);
}
- Mutex::Autolock l(mRequestLock);
- mNextRequest.clear();
+ return;
}
sp<Camera3Device::CaptureRequest>
- Camera3Device::RequestThread::waitForNextRequest() {
+ Camera3Device::RequestThread::waitForNextRequestLocked() {
status_t res;
sp<CaptureRequest> nextRequest;
- // Optimized a bit for the simple steady-state case (single repeating
- // request), to avoid putting that request in the queue temporarily.
- Mutex::Autolock l(mRequestLock);
-
while (mRequestQueue.empty()) {
if (!mRepeatingRequests.empty()) {
// Always atomically enqueue all requests in a repeating request
@@ -3212,8 +3373,6 @@ sp<Camera3Device::CaptureRequest>
handleAePrecaptureCancelRequest(nextRequest);
- mNextRequest = nextRequest;
-
return nextRequest;
}
@@ -3478,12 +3637,12 @@ Camera3Device::PreparerThread::~PreparerThread() {
clear();
}
-status_t Camera3Device::PreparerThread::prepare(sp<Camera3StreamInterface>& stream) {
+status_t Camera3Device::PreparerThread::prepare(int maxCount, sp<Camera3StreamInterface>& stream) {
status_t res;
Mutex::Autolock l(mLock);
- res = stream->startPrepare();
+ res = stream->startPrepare(maxCount);
if (res == OK) {
// No preparation needed, fire listener right off
ALOGV("%s: Stream %d already prepared", __FUNCTION__, stream->getId());
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 5287058..2cd5af3 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -62,6 +62,7 @@ class Camera3Device :
public CameraDeviceBase,
private camera3_callback_ops {
public:
+
Camera3Device(int id);
virtual ~Camera3Device();
@@ -143,6 +144,8 @@ class Camera3Device :
virtual status_t tearDown(int streamId);
+ virtual status_t prepare(int maxCount, int streamId);
+
virtual uint32_t getDeviceVersion();
virtual ssize_t getJpegBufferSize(uint32_t width, uint32_t height) const;
@@ -158,6 +161,8 @@ class Camera3Device :
static const nsecs_t kActiveTimeout = 500000000; // 500 ms
static const size_t kInFlightWarnLimit = 20;
static const size_t kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
+ // SCHED_FIFO priority for request submission thread in HFR mode
+ static const int kConstrainedHighSpeedThreadPriority = 1;
struct RequestTrigger;
// minimal jpeg buffer size: 256KB + blob header
@@ -261,6 +266,11 @@ class Camera3Device :
// Used to cancel AE precapture trigger for devices doesn't support
// CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
AeTriggerCancelOverride_t mAeTriggerCancelOverride;
+ // The number of requests that should be submitted to HAL at a time.
+ // For example, if batch size is 8, this request and the following 7
+ // requests will be submitted to HAL at a time. The batch size for
+ // the following 7 requests will be ignored by the request thread.
+ int mBatchSize;
};
typedef List<sp<CaptureRequest> > RequestList;
@@ -438,6 +448,11 @@ class Camera3Device :
int64_t *lastFrameNumber = NULL);
/**
+ * Flush all pending requests in HAL.
+ */
+ status_t flush();
+
+ /**
* Queue a trigger to be dispatched with the next outgoing
* process_capture_request. The settings for that request only
* will be temporarily rewritten to add the trigger tag/value.
@@ -498,16 +513,30 @@ class Camera3Device :
static const nsecs_t kRequestTimeout = 50e6; // 50 ms
- // Waits for a request, or returns NULL if times out.
- sp<CaptureRequest> waitForNextRequest();
+ // Used to prepare a batch of requests.
+ struct NextRequest {
+ sp<CaptureRequest> captureRequest;
+ camera3_capture_request_t halRequest;
+ Vector<camera3_stream_buffer_t> outputBuffers;
+ bool submitted;
+ };
- // Return buffers, etc, for a request that couldn't be fully
- // constructed. The buffers will be returned in the ERROR state
- // to mark them as not having valid data.
- // All arguments will be modified.
- void cleanUpFailedRequest(camera3_capture_request_t &request,
- sp<CaptureRequest> &nextRequest,
- Vector<camera3_stream_buffer_t> &outputBuffers);
+ // Wait for the next batch of requests and put them in mNextRequests. mNextRequests will
+ // be empty if it times out.
+ void waitForNextRequestBatch();
+
+ // Waits for a request, or returns NULL if times out. Must be called with mRequestLock hold.
+ sp<CaptureRequest> waitForNextRequestLocked();
+
+ // Prepare HAL requests and output buffers in mNextRequests. Return TIMED_OUT if getting any
+ // output buffer timed out. If an error is returned, the caller should clean up the pending
+ // request batch.
+ status_t prepareHalRequests();
+
+ // Return buffers, etc, for requests in mNextRequests that couldn't be fully constructed and
+ // send request errors if sendRequestError is true. The buffers will be returned in the
+ // ERROR state to mark them as not having valid data. mNextRequests will be cleared.
+ void cleanUpFailedRequests(bool sendRequestError);
// Pause handling
bool waitIfPaused();
@@ -536,10 +565,13 @@ class Camera3Device :
Condition mRequestSignal;
RequestList mRequestQueue;
RequestList mRepeatingRequests;
- // The next request being prepped for submission to the HAL, no longer
+ // The next batch of requests being prepped for submission to the HAL, no longer
// on the request queue. Read-only even with mRequestLock held, outside
// of threadLoop
- sp<const CaptureRequest> mNextRequest;
+ Vector<NextRequest> mNextRequests;
+
+ // To protect flush() and sending a request batch to HAL.
+ Mutex mFlushLock;
bool mReconfigured;
@@ -698,10 +730,11 @@ class Camera3Device :
void setNotificationListener(NotificationListener *listener);
/**
- * Queue up a stream to be prepared. Streams are processed by
- * a background thread in FIFO order
+ * Queue up a stream to be prepared. Streams are processed by a background thread in FIFO
+ * order. Pre-allocate up to maxCount buffers for the stream, or the maximum number needed
+ * for the pipeline if maxCount is ALLOCATE_PIPELINE_MAX.
*/
- status_t prepare(sp<camera3::Camera3StreamInterface>& stream);
+ status_t prepare(int maxCount, sp<camera3::Camera3StreamInterface>& stream);
/**
* Cancel all current and pending stream preparation
@@ -738,7 +771,10 @@ class Camera3Device :
uint32_t mNextResultFrameNumber;
// the minimal frame number of the next reprocess result
uint32_t mNextReprocessResultFrameNumber;
+ // the minimal frame number of the next non-reprocess shutter
uint32_t mNextShutterFrameNumber;
+ // the minimal frame number of the next reprocess shutter
+ uint32_t mNextReprocessShutterFrameNumber;
List<CaptureResult> mResultQueue;
Condition mResultSignal;
NotificationListener *mListener;
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
index ecb8ac8..1d9d04f 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.cpp
@@ -92,6 +92,10 @@ status_t Camera3DummyStream::getEndpointUsage(uint32_t *usage) const {
return OK;
}
+bool Camera3DummyStream::isVideoStream() const {
+ return false;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3DummyStream.h b/services/camera/libcameraservice/device3/Camera3DummyStream.h
index 3a3dbf4..97c0c96 100644
--- a/services/camera/libcameraservice/device3/Camera3DummyStream.h
+++ b/services/camera/libcameraservice/device3/Camera3DummyStream.h
@@ -54,6 +54,11 @@ class Camera3DummyStream :
status_t setTransform(int transform);
+ /**
+ * Return if this output stream is for video encoding.
+ */
+ bool isVideoStream() const;
+
protected:
/**
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 8c611d5..3f0a736 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -426,6 +426,17 @@ status_t Camera3OutputStream::getEndpointUsage(uint32_t *usage) const {
return res;
}
+bool Camera3OutputStream::isVideoStream() const {
+ uint32_t usage = 0;
+ status_t res = getEndpointUsage(&usage);
+ if (res != OK) {
+ ALOGE("%s: getting end point usage failed: %s (%d).", __FUNCTION__, strerror(-res), res);
+ return false;
+ }
+
+ return (usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) != 0;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 941d693..3c083ec 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -64,6 +64,11 @@ class Camera3OutputStream :
*/
status_t setTransform(int transform);
+ /**
+ * Return if this output stream is for video encoding.
+ */
+ bool isVideoStream() const;
+
protected:
Camera3OutputStream(int id, camera3_stream_type_t type,
uint32_t width, uint32_t height, int format,
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index aae72cf..df89b34 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
@@ -34,6 +34,11 @@ class Camera3OutputStreamInterface : public virtual Camera3StreamInterface {
* HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants.
*/
virtual status_t setTransform(int transform) = 0;
+
+ /**
+ * Return if this output stream is for video encoding.
+ */
+ virtual bool isVideoStream() const = 0;
};
} // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 2527fd6..96299b3 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -53,7 +53,8 @@ Camera3Stream::Camera3Stream(int id,
mName(String8::format("Camera3Stream[%d]", id)),
mMaxSize(maxSize),
mState(STATE_CONSTRUCTED),
- mStatusId(StatusTracker::NO_STATUS_ID) {
+ mStatusId(StatusTracker::NO_STATUS_ID),
+ mLastMaxCount(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX) {
camera3_stream::stream_type = type;
camera3_stream::width = width;
@@ -252,12 +253,18 @@ bool Camera3Stream::isUnpreparable() {
return mStreamUnpreparable;
}
-status_t Camera3Stream::startPrepare() {
+status_t Camera3Stream::startPrepare(int maxCount) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
status_t res = OK;
+ if (maxCount < 0) {
+ ALOGE("%s: Stream %d: Can't prepare stream if max buffer count (%d) is < 0",
+ __FUNCTION__, mId, maxCount);
+ return BAD_VALUE;
+ }
+
// This function should be only called when the stream is configured already.
if (mState != STATE_CONFIGURED) {
ALOGE("%s: Stream %d: Can't prepare stream if stream is not in CONFIGURED "
@@ -279,9 +286,19 @@ status_t Camera3Stream::startPrepare() {
return INVALID_OPERATION;
}
+
+
+ size_t pipelineMax = getBufferCountLocked();
+ size_t clampedCount = (pipelineMax < static_cast<size_t>(maxCount)) ?
+ pipelineMax : static_cast<size_t>(maxCount);
+ size_t bufferCount = (maxCount == Camera3StreamInterface::ALLOCATE_PIPELINE_MAX) ?
+ pipelineMax : clampedCount;
+
+ mPrepared = bufferCount <= mLastMaxCount;
+
if (mPrepared) return OK;
- size_t bufferCount = getBufferCountLocked();
+ mLastMaxCount = bufferCount;
mPreparedBuffers.insertAt(camera3_stream_buffer_t(), /*index*/0, bufferCount);
mPreparedBufferIdx = 0;
@@ -438,8 +455,9 @@ status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer) {
res = mOutputBufferReturnedSignal.waitRelative(mLock, kWaitForBufferDuration);
if (res != OK) {
if (res == TIMED_OUT) {
- ALOGE("%s: wait for output buffer return timed out after %lldms", __FUNCTION__,
- kWaitForBufferDuration / 1000000LL);
+ ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
+ __FUNCTION__, kWaitForBufferDuration / 1000000LL,
+ camera3_stream::max_buffers);
}
return res;
}
@@ -469,9 +487,12 @@ status_t Camera3Stream::returnBuffer(const camera3_stream_buffer &buffer,
status_t res = returnBufferLocked(buffer, timestamp);
if (res == OK) {
fireBufferListenersLocked(buffer, /*acquired*/false, /*output*/true);
- mOutputBufferReturnedSignal.signal();
}
+ // Even if returning the buffer failed, we still want to signal whoever is waiting for the
+ // buffer to be returned.
+ mOutputBufferReturnedSignal.signal();
+
return res;
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index bab2177..753280b 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -188,7 +188,9 @@ class Camera3Stream :
/**
* Start stream preparation. May only be called in the CONFIGURED state,
- * when no valid buffers have yet been returned to this stream.
+ * when no valid buffers have yet been returned to this stream. Prepares
+ * up to maxCount buffers, or the maximum number of buffers needed by the
+ * pipeline if maxCount is ALLOCATE_PIPELINE_MAX.
*
* If no prepartion is necessary, returns OK and does not transition to
* PREPARING state. Otherwise, returns NOT_ENOUGH_DATA and transitions
@@ -204,7 +206,7 @@ class Camera3Stream :
* INVALID_OPERATION if called when not in CONFIGURED state, or a
* valid buffer has already been returned to this stream.
*/
- status_t startPrepare();
+ status_t startPrepare(int maxCount);
/**
* Check if the stream is mid-preparing.
@@ -444,6 +446,9 @@ class Camera3Stream :
Vector<camera3_stream_buffer_t> mPreparedBuffers;
size_t mPreparedBufferIdx;
+ // Number of buffers allocated on last prepare call.
+ int mLastMaxCount;
+
}; // class Camera3Stream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index c086eaf..54009ae 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -34,6 +34,11 @@ class StatusTracker;
*/
class Camera3StreamInterface : public virtual RefBase {
public:
+
+ enum {
+ ALLOCATE_PIPELINE_MAX = 0, // Allocate max buffers used by a given surface
+ };
+
/**
* Get the stream's ID
*/
@@ -98,7 +103,9 @@ class Camera3StreamInterface : public virtual RefBase {
/**
* Start stream preparation. May only be called in the CONFIGURED state,
- * when no valid buffers have yet been returned to this stream.
+ * when no valid buffers have yet been returned to this stream. Prepares
+ * up to maxCount buffers, or the maximum number of buffers needed by the
+ * pipeline if maxCount is ALLOCATE_PIPELINE_MAX.
*
* If no prepartion is necessary, returns OK and does not transition to
* PREPARING state. Otherwise, returns NOT_ENOUGH_DATA and transitions
@@ -112,7 +119,7 @@ class Camera3StreamInterface : public virtual RefBase {
* INVALID_OPERATION if called when not in CONFIGURED state, or a
* valid buffer has already been returned to this stream.
*/
- virtual status_t startPrepare() = 0;
+ virtual status_t startPrepare(int maxCount) = 0;
/**
* Check if the stream is mid-preparing.
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 4790754..6781a36 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -90,11 +90,7 @@ static ResourceInfo& getResourceInfoForEdit(
}
status_t ResourceManagerService::dump(int fd, const Vector<String16>& /* args */) {
- Mutex::Autolock lock(mLock);
-
String8 result;
- const size_t SIZE = 256;
- char buffer[SIZE];
if (checkCallingPermission(String16("android.permission.DUMP")) == false) {
result.format("Permission Denial: "
@@ -105,20 +101,35 @@ status_t ResourceManagerService::dump(int fd, const Vector<String16>& /* args */
return PERMISSION_DENIED;
}
+ PidResourceInfosMap mapCopy;
+ bool supportsMultipleSecureCodecs;
+ bool supportsSecureWithNonSecureCodec;
+ String8 serviceLog;
+ {
+ Mutex::Autolock lock(mLock);
+ mapCopy = mMap; // Shadow copy, real copy will happen on write.
+ supportsMultipleSecureCodecs = mSupportsMultipleSecureCodecs;
+ supportsSecureWithNonSecureCodec = mSupportsSecureWithNonSecureCodec;
+ serviceLog = mServiceLog->toString(" " /* linePrefix */);
+ }
+
+ const size_t SIZE = 256;
+ char buffer[SIZE];
snprintf(buffer, SIZE, "ResourceManagerService: %p\n", this);
result.append(buffer);
result.append(" Policies:\n");
- snprintf(buffer, SIZE, " SupportsMultipleSecureCodecs: %d\n", mSupportsMultipleSecureCodecs);
+ snprintf(buffer, SIZE, " SupportsMultipleSecureCodecs: %d\n", supportsMultipleSecureCodecs);
result.append(buffer);
- snprintf(buffer, SIZE, " SupportsSecureWithNonSecureCodec: %d\n", mSupportsSecureWithNonSecureCodec);
+ snprintf(buffer, SIZE, " SupportsSecureWithNonSecureCodec: %d\n",
+ supportsSecureWithNonSecureCodec);
result.append(buffer);
result.append(" Processes:\n");
- for (size_t i = 0; i < mMap.size(); ++i) {
- snprintf(buffer, SIZE, " Pid: %d\n", mMap.keyAt(i));
+ for (size_t i = 0; i < mapCopy.size(); ++i) {
+ snprintf(buffer, SIZE, " Pid: %d\n", mapCopy.keyAt(i));
result.append(buffer);
- const ResourceInfos &infos = mMap.valueAt(i);
+ const ResourceInfos &infos = mapCopy.valueAt(i);
for (size_t j = 0; j < infos.size(); ++j) {
result.append(" Client:\n");
snprintf(buffer, SIZE, " Id: %lld\n", (long long)infos[j].clientId);
@@ -136,7 +147,7 @@ status_t ResourceManagerService::dump(int fd, const Vector<String16>& /* args */
}
}
result.append(" Events logs (most recent at top):\n");
- result.append(mServiceLog->toString(" " /* linePrefix */));
+ result.append(serviceLog);
write(fd, result.string(), result.size());
return OK;
@@ -307,6 +318,10 @@ bool ResourceManagerService::reclaimResource(
}
}
+ if (failedClient == NULL) {
+ return true;
+ }
+
{
Mutex::Autolock lock(mLock);
bool found = false;
@@ -329,7 +344,7 @@ bool ResourceManagerService::reclaimResource(
}
}
- return (failedClient == NULL);
+ return false;
}
bool ResourceManagerService::getAllClients_l(
diff --git a/soundtrigger/ISoundTrigger.cpp b/soundtrigger/ISoundTrigger.cpp
index eecc1ea..4df2068 100644
--- a/soundtrigger/ISoundTrigger.cpp
+++ b/soundtrigger/ISoundTrigger.cpp
@@ -60,11 +60,13 @@ public:
data.writeInterfaceToken(ISoundTrigger::getInterfaceDescriptor());
data.writeStrongBinder(IInterface::asBinder(modelMemory));
status_t status = remote()->transact(LOAD_SOUND_MODEL, data, &reply);
- if (status != NO_ERROR ||
- (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ if (status != NO_ERROR) {
return status;
}
- reply.read(handle, sizeof(sound_model_handle_t));
+ status = (status_t)reply.readInt32();
+ if (status == NO_ERROR) {
+ reply.read(handle, sizeof(sound_model_handle_t));
+ }
return status;
}
@@ -74,7 +76,7 @@ public:
data.writeInterfaceToken(ISoundTrigger::getInterfaceDescriptor());
data.write(&handle, sizeof(sound_model_handle_t));
status_t status = remote()->transact(UNLOAD_SOUND_MODEL, data, &reply);
- if (status != NO_ERROR) {
+ if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
}
return status;
@@ -93,7 +95,7 @@ public:
}
data.writeStrongBinder(IInterface::asBinder(dataMemory));
status_t status = remote()->transact(START_RECOGNITION, data, &reply);
- if (status != NO_ERROR) {
+ if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
}
return status;
@@ -105,7 +107,7 @@ public:
data.writeInterfaceToken(ISoundTrigger::getInterfaceDescriptor());
data.write(&handle, sizeof(sound_model_handle_t));
status_t status = remote()->transact(STOP_RECOGNITION, data, &reply);
- if (status != NO_ERROR) {
+ if (status == NO_ERROR) {
status = (status_t)reply.readInt32();
}
return status;
diff --git a/soundtrigger/ISoundTriggerHwService.cpp b/soundtrigger/ISoundTriggerHwService.cpp
index e14a771..e37bae3 100644
--- a/soundtrigger/ISoundTriggerHwService.cpp
+++ b/soundtrigger/ISoundTriggerHwService.cpp
@@ -85,8 +85,11 @@ public:
data.writeInterfaceToken(ISoundTriggerHwService::getInterfaceDescriptor());
data.write(&handle, sizeof(sound_trigger_module_handle_t));
data.writeStrongBinder(IInterface::asBinder(client));
- remote()->transact(ATTACH, data, &reply);
- status_t status = reply.readInt32();
+ status_t status = remote()->transact(ATTACH, data, &reply);
+ if (status != NO_ERROR) {
+ return status;
+ }
+ status = reply.readInt32();
if (reply.readInt32() != 0) {
module = interface_cast<ISoundTrigger>(reply.readStrongBinder());
}