summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--camera/Android.mk6
-rw-r--r--camera/ICameraService.cpp6
-rw-r--r--camera/ProCamera.cpp3
-rw-r--r--camera/camera2/CaptureRequest.cpp (renamed from camera/photography/CaptureRequest.cpp)2
-rw-r--r--camera/camera2/ICameraDeviceCallbacks.cpp (renamed from camera/photography/ICameraDeviceCallbacks.cpp)20
-rw-r--r--camera/camera2/ICameraDeviceUser.cpp (renamed from camera/photography/ICameraDeviceUser.cpp)6
-rw-r--r--cmds/screenrecord/screenrecord.cpp61
-rw-r--r--cmds/stagefright/stagefright.cpp2
-rw-r--r--include/camera/camera2/CaptureRequest.h (renamed from include/camera/photography/CaptureRequest.h)0
-rw-r--r--include/camera/camera2/ICameraDeviceCallbacks.h (renamed from include/camera/photography/ICameraDeviceCallbacks.h)0
-rw-r--r--include/camera/camera2/ICameraDeviceUser.h (renamed from include/camera/photography/ICameraDeviceUser.h)0
-rw-r--r--include/media/AudioRecord.h44
-rw-r--r--include/media/AudioSystem.h2
-rw-r--r--include/media/AudioTrack.h27
-rw-r--r--include/media/IAudioFlinger.h6
-rw-r--r--include/media/IOMX.h4
-rw-r--r--include/media/stagefright/SurfaceMediaSource.h2
-rw-r--r--include/media/stagefright/foundation/ALooperRoster.h1
-rw-r--r--include/private/media/AudioTrackShared.h25
-rwxr-xr-xlibvideoeditor/lvpp/NativeWindowRenderer.cpp2
-rw-r--r--media/libmedia/AudioRecord.cpp105
-rw-r--r--media/libmedia/AudioTrack.cpp23
-rw-r--r--media/libmedia/AudioTrackShared.cpp43
-rw-r--r--media/libmedia/IAudioFlinger.cpp18
-rw-r--r--media/libmedia/IAudioFlingerClient.cpp2
-rw-r--r--media/libmedia/IOMX.cpp33
-rw-r--r--media/libmedia/mediaplayer.cpp7
-rw-r--r--media/libstagefright/ACodec.cpp17
-rw-r--r--media/libstagefright/AwesomePlayer.cpp2
-rw-r--r--media/libstagefright/OMXClient.cpp11
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp8
-rw-r--r--media/libstagefright/foundation/ALooper.cpp4
-rw-r--r--media/libstagefright/foundation/ALooperRoster.cpp14
-rw-r--r--media/libstagefright/include/OMX.h4
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h4
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp7
-rw-r--r--media/libstagefright/omx/OMX.cpp7
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp20
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp3
-rw-r--r--services/audioflinger/AudioFlinger.cpp15
-rw-r--r--services/audioflinger/AudioFlinger.h3
-rw-r--r--services/audioflinger/AudioResampler.h8
-rw-r--r--services/audioflinger/PlaybackTracks.h2
-rw-r--r--services/audioflinger/Threads.cpp164
-rw-r--r--services/audioflinger/Threads.h29
-rw-r--r--services/audioflinger/Tracks.cpp37
-rw-r--r--services/camera/libcameraservice/Android.mk48
-rw-r--r--services/camera/libcameraservice/CameraDeviceFactory.cpp7
-rw-r--r--services/camera/libcameraservice/CameraDeviceFactory.h1
-rw-r--r--services/camera/libcameraservice/CameraService.cpp8
-rw-r--r--services/camera/libcameraservice/CameraService.h4
-rw-r--r--services/camera/libcameraservice/api1/Camera2Client.cpp (renamed from services/camera/libcameraservice/Camera2Client.cpp)14
-rw-r--r--services/camera/libcameraservice/api1/Camera2Client.h (renamed from services/camera/libcameraservice/Camera2Client.h)28
-rw-r--r--services/camera/libcameraservice/api1/CameraClient.cpp (renamed from services/camera/libcameraservice/CameraClient.cpp)5
-rw-r--r--services/camera/libcameraservice/api1/CameraClient.h (renamed from services/camera/libcameraservice/CameraClient.h)0
-rw-r--r--services/camera/libcameraservice/api1/client2/BurstCapture.cpp (renamed from services/camera/libcameraservice/camera2/BurstCapture.cpp)4
-rw-r--r--services/camera/libcameraservice/api1/client2/BurstCapture.h (renamed from services/camera/libcameraservice/camera2/BurstCapture.h)5
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp (renamed from services/camera/libcameraservice/camera2/CallbackProcessor.cpp)11
-rw-r--r--services/camera/libcameraservice/api1/client2/CallbackProcessor.h (renamed from services/camera/libcameraservice/camera2/CallbackProcessor.h)7
-rw-r--r--services/camera/libcameraservice/api1/client2/Camera2Heap.h (renamed from services/camera/libcameraservice/camera2/Camera2Heap.h)0
-rw-r--r--services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp (renamed from services/camera/libcameraservice/camera2/CaptureSequencer.cpp)11
-rw-r--r--services/camera/libcameraservice/api1/client2/CaptureSequencer.h (renamed from services/camera/libcameraservice/camera2/CaptureSequencer.h)0
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.cpp (renamed from services/camera/libcameraservice/camera2/FrameProcessor.cpp)10
-rw-r--r--services/camera/libcameraservice/api1/client2/FrameProcessor.h (renamed from services/camera/libcameraservice/camera2/FrameProcessor.h)4
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegCompressor.cpp (renamed from services/camera/libcameraservice/camera2/JpegCompressor.cpp)0
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegCompressor.h (renamed from services/camera/libcameraservice/camera2/JpegCompressor.h)0
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.cpp (renamed from services/camera/libcameraservice/camera2/JpegProcessor.cpp)12
-rw-r--r--services/camera/libcameraservice/api1/client2/JpegProcessor.h (renamed from services/camera/libcameraservice/camera2/JpegProcessor.h)3
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.cpp (renamed from services/camera/libcameraservice/camera2/Parameters.cpp)0
-rw-r--r--services/camera/libcameraservice/api1/client2/Parameters.h (renamed from services/camera/libcameraservice/camera2/Parameters.h)0
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp (renamed from services/camera/libcameraservice/camera2/StreamingProcessor.cpp)11
-rw-r--r--services/camera/libcameraservice/api1/client2/StreamingProcessor.h (renamed from services/camera/libcameraservice/camera2/StreamingProcessor.h)2
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.cpp (renamed from services/camera/libcameraservice/camera2/ZslProcessor.cpp)11
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor.h (renamed from services/camera/libcameraservice/camera2/ZslProcessor.h)12
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp (renamed from services/camera/libcameraservice/camera2/ZslProcessor3.cpp)10
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessor3.h (renamed from services/camera/libcameraservice/camera2/ZslProcessor3.h)13
-rw-r--r--services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h (renamed from services/camera/libcameraservice/camera2/ZslProcessorInterface.h)0
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.cpp (renamed from services/camera/libcameraservice/photography/CameraDeviceClient.cpp)16
-rw-r--r--services/camera/libcameraservice/api2/CameraDeviceClient.h (renamed from services/camera/libcameraservice/photography/CameraDeviceClient.h)14
-rw-r--r--services/camera/libcameraservice/api_pro/ProCamera2Client.cpp (renamed from services/camera/libcameraservice/ProCamera2Client.cpp)10
-rw-r--r--services/camera/libcameraservice/api_pro/ProCamera2Client.h (renamed from services/camera/libcameraservice/ProCamera2Client.h)10
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.cpp (renamed from services/camera/libcameraservice/Camera2ClientBase.cpp)8
-rw-r--r--services/camera/libcameraservice/common/Camera2ClientBase.h (renamed from services/camera/libcameraservice/Camera2ClientBase.h)5
-rw-r--r--services/camera/libcameraservice/common/CameraDeviceBase.cpp (renamed from services/camera/libcameraservice/CameraDeviceBase.cpp)0
-rw-r--r--services/camera/libcameraservice/common/CameraDeviceBase.h (renamed from services/camera/libcameraservice/CameraDeviceBase.h)0
-rw-r--r--services/camera/libcameraservice/common/FrameProcessorBase.cpp (renamed from services/camera/libcameraservice/camera2/ProFrameProcessor.cpp)34
-rw-r--r--services/camera/libcameraservice/common/FrameProcessorBase.h (renamed from services/camera/libcameraservice/camera2/ProFrameProcessor.h)6
-rw-r--r--services/camera/libcameraservice/device1/CameraHardwareInterface.h (renamed from services/camera/libcameraservice/CameraHardwareInterface.h)0
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.cpp (renamed from services/camera/libcameraservice/Camera2Device.cpp)0
-rw-r--r--services/camera/libcameraservice/device2/Camera2Device.h (renamed from services/camera/libcameraservice/Camera2Device.h)2
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.cpp (renamed from services/camera/libcameraservice/Camera3Device.cpp)8
-rw-r--r--services/camera/libcameraservice/device3/Camera3Device.h (renamed from services/camera/libcameraservice/Camera3Device.h)18
-rw-r--r--services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp (renamed from services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp)0
-rw-r--r--services/camera/libcameraservice/device3/Camera3IOStreamBase.h (renamed from services/camera/libcameraservice/camera3/Camera3IOStreamBase.h)2
-rw-r--r--services/camera/libcameraservice/device3/Camera3InputStream.cpp (renamed from services/camera/libcameraservice/camera3/Camera3InputStream.cpp)10
-rw-r--r--services/camera/libcameraservice/device3/Camera3InputStream.h (renamed from services/camera/libcameraservice/camera3/Camera3InputStream.h)9
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStream.cpp (renamed from services/camera/libcameraservice/camera3/Camera3OutputStream.cpp)11
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStream.h (renamed from services/camera/libcameraservice/camera3/Camera3OutputStream.h)3
-rw-r--r--services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h (renamed from services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h)0
-rw-r--r--services/camera/libcameraservice/device3/Camera3Stream.cpp (renamed from services/camera/libcameraservice/camera3/Camera3Stream.cpp)18
-rw-r--r--services/camera/libcameraservice/device3/Camera3Stream.h (renamed from services/camera/libcameraservice/camera3/Camera3Stream.h)4
-rw-r--r--services/camera/libcameraservice/device3/Camera3StreamBufferListener.h (renamed from services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h)0
-rw-r--r--services/camera/libcameraservice/device3/Camera3StreamInterface.h (renamed from services/camera/libcameraservice/camera3/Camera3StreamInterface.h)0
-rw-r--r--services/camera/libcameraservice/device3/Camera3ZslStream.cpp (renamed from services/camera/libcameraservice/camera3/Camera3ZslStream.cpp)8
-rw-r--r--services/camera/libcameraservice/device3/Camera3ZslStream.h (renamed from services/camera/libcameraservice/camera3/Camera3ZslStream.h)0
-rw-r--r--services/camera/libcameraservice/gui/RingBufferConsumer.cpp17
-rw-r--r--services/camera/libcameraservice/gui/RingBufferConsumer.h4
107 files changed, 838 insertions, 429 deletions
diff --git a/camera/Android.mk b/camera/Android.mk
index 8f58f87..e633450 100644
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -16,9 +16,9 @@ LOCAL_SRC_FILES:= \
ICameraRecordingProxyListener.cpp \
IProCameraUser.cpp \
IProCameraCallbacks.cpp \
- photography/ICameraDeviceUser.cpp \
- photography/ICameraDeviceCallbacks.cpp \
- photography/CaptureRequest.cpp \
+ camera2/ICameraDeviceUser.cpp \
+ camera2/ICameraDeviceCallbacks.cpp \
+ camera2/CaptureRequest.cpp \
ProCamera.cpp \
CameraBase.cpp \
diff --git a/camera/ICameraService.cpp b/camera/ICameraService.cpp
index 068fb0f..876a2df 100644
--- a/camera/ICameraService.cpp
+++ b/camera/ICameraService.cpp
@@ -31,8 +31,8 @@
#include <camera/IProCameraCallbacks.h>
#include <camera/ICamera.h>
#include <camera/ICameraClient.h>
-#include <camera/photography/ICameraDeviceUser.h>
-#include <camera/photography/ICameraDeviceCallbacks.h>
+#include <camera/camera2/ICameraDeviceUser.h>
+#include <camera/camera2/ICameraDeviceCallbacks.h>
namespace android {
@@ -151,7 +151,7 @@ public:
return interface_cast<IProCameraUser>(reply.readStrongBinder());
}
- // connect to camera service (android.hardware.photography.CameraDevice)
+ // connect to camera service (android.hardware.camera2.CameraDevice)
virtual sp<ICameraDeviceUser> connect(
const sp<ICameraDeviceCallbacks>& cameraCb,
int cameraId,
diff --git a/camera/ProCamera.cpp b/camera/ProCamera.cpp
index 1040415..f6c9ca1 100644
--- a/camera/ProCamera.cpp
+++ b/camera/ProCamera.cpp
@@ -251,8 +251,7 @@ status_t ProCamera::createStreamCpu(int width, int height, int format,
sp<CpuConsumer> cc = new CpuConsumer(bq, heapCount/*, synchronousMode*/);
cc->setName(String8("ProCamera::mCpuConsumer"));
- sp<Surface> stc = new Surface(
- cc->getProducerInterface());
+ sp<Surface> stc = new Surface(bq);
status_t s = createStream(width, height, format,
stc->getIGraphicBufferProducer(),
diff --git a/camera/photography/CaptureRequest.cpp b/camera/camera2/CaptureRequest.cpp
index b822fc9..57e5319 100644
--- a/camera/photography/CaptureRequest.cpp
+++ b/camera/camera2/CaptureRequest.cpp
@@ -19,7 +19,7 @@
#define LOG_TAG "CameraRequest"
#include <utils/Log.h>
-#include <camera/photography/CaptureRequest.h>
+#include <camera/camera2/CaptureRequest.h>
#include <binder/Parcel.h>
#include <gui/Surface.h>
diff --git a/camera/photography/ICameraDeviceCallbacks.cpp b/camera/camera2/ICameraDeviceCallbacks.cpp
index 19763d7..3cec1f4 100644
--- a/camera/photography/ICameraDeviceCallbacks.cpp
+++ b/camera/camera2/ICameraDeviceCallbacks.cpp
@@ -26,7 +26,7 @@
#include <gui/Surface.h>
#include <utils/Mutex.h>
-#include <camera/photography/ICameraDeviceCallbacks.h>
+#include <camera/camera2/ICameraDeviceCallbacks.h>
#include "camera/CameraMetadata.h"
namespace android {
@@ -57,11 +57,12 @@ public:
data.writeNoException();
}
- void onResultReceived(int32_t frameId, const CameraMetadata& result) {
+ void onResultReceived(int32_t requestId, const CameraMetadata& result) {
ALOGV("onResultReceived");
Parcel data, reply;
data.writeInterfaceToken(ICameraDeviceCallbacks::getInterfaceDescriptor());
- data.writeInt32(frameId);
+ data.writeInt32(requestId);
+ data.writeInt32(1); // to mark presence of metadata object
result.writeToParcel(&data);
remote()->transact(RESULT_RECEIVED, data, &reply, IBinder::FLAG_ONEWAY);
data.writeNoException();
@@ -69,7 +70,7 @@ public:
};
IMPLEMENT_META_INTERFACE(CameraDeviceCallbacks,
- "android.hardware.photography.ICameraDeviceCallbacks");
+ "android.hardware.camera2.ICameraDeviceCallbacks");
// ----------------------------------------------------------------------
@@ -91,10 +92,14 @@ status_t BnCameraDeviceCallbacks::onTransact(
case RESULT_RECEIVED: {
ALOGV("RESULT_RECEIVED");
CHECK_INTERFACE(ICameraDeviceCallbacks, data, reply);
- int32_t frameId = data.readInt32();
+ int32_t requestId = data.readInt32();
CameraMetadata result;
- result.readFromParcel(const_cast<Parcel*>(&data));
- onResultReceived(frameId, result);
+ if (data.readInt32() != 0) {
+ result.readFromParcel(const_cast<Parcel*>(&data));
+ } else {
+ ALOGW("No metadata object is present in result");
+ }
+ onResultReceived(requestId, result);
data.readExceptionCode();
return NO_ERROR;
break;
@@ -107,4 +112,3 @@ status_t BnCameraDeviceCallbacks::onTransact(
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/camera/photography/ICameraDeviceUser.cpp b/camera/camera2/ICameraDeviceUser.cpp
index 95609da..923f487 100644
--- a/camera/photography/ICameraDeviceUser.cpp
+++ b/camera/camera2/ICameraDeviceUser.cpp
@@ -21,11 +21,11 @@
#include <stdint.h>
#include <sys/types.h>
#include <binder/Parcel.h>
-#include <camera/photography/ICameraDeviceUser.h>
+#include <camera/camera2/ICameraDeviceUser.h>
#include <gui/IGraphicBufferProducer.h>
#include <gui/Surface.h>
#include <camera/CameraMetadata.h>
-#include <camera/photography/CaptureRequest.h>
+#include <camera/camera2/CaptureRequest.h>
namespace android {
@@ -189,7 +189,7 @@ private:
};
IMPLEMENT_META_INTERFACE(CameraDeviceUser,
- "android.hardware.photography.ICameraDeviceUser");
+ "android.hardware.camera2.ICameraDeviceUser");
// ----------------------------------------------------------------------
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 3e79ee0..28fc00f 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -44,8 +44,9 @@ using namespace android;
// Command-line parameters.
static bool gVerbose = false; // chatty on stdout
static bool gRotate = false; // rotate 90 degrees
-static uint32_t gVideoWidth = 1280; // 720p
-static uint32_t gVideoHeight = 720;
+static bool gSizeSpecified = false; // was size explicitly requested?
+static uint32_t gVideoWidth = 0; // default width+height
+static uint32_t gVideoHeight = 0;
static uint32_t gBitRate = 4000000; // 4Mbps
// Set by signal handler to stop recording.
@@ -107,6 +108,14 @@ static status_t configureSignals()
}
/*
+ * Returns "true" if the device is rotated 90 degrees.
+ */
+static bool isDeviceRotated(int orientation) {
+ return orientation != DISPLAY_ORIENTATION_0 &&
+ orientation != DISPLAY_ORIENTATION_180;
+}
+
+/*
* Configures and starts the MediaCodec encoder. Obtains an input surface
* from the codec.
*/
@@ -114,6 +123,11 @@ static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
sp<IGraphicBufferProducer>* pBufferProducer) {
status_t err;
+ if (gVerbose) {
+ printf("Configuring recorder for %dx%d video at %.2fMbps\n",
+ gVideoWidth, gVideoHeight, gBitRate / 1000000.0);
+ }
+
sp<AMessage> format = new AMessage;
format->setInt32("width", gVideoWidth);
format->setInt32("height", gVideoHeight);
@@ -152,6 +166,7 @@ static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
return err;
}
+ ALOGV("Codec prepared");
*pCodec = codec;
*pBufferProducer = bufferProducer;
return 0;
@@ -169,8 +184,7 @@ static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
// Set the region of the layer stack we're interested in, which in our
// case is "all of it". If the app is rotated (so that the width of the
// app is based on the height of the display), reverse width/height.
- bool deviceRotated = mainDpyInfo.orientation != DISPLAY_ORIENTATION_0 &&
- mainDpyInfo.orientation != DISPLAY_ORIENTATION_180;
+ bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
uint32_t sourceWidth, sourceHeight;
if (!deviceRotated) {
sourceWidth = mainDpyInfo.w;
@@ -295,6 +309,12 @@ static status_t runEncoder(const sp<MediaCodec>& encoder,
bufIndex, size, ptsUsec);
CHECK(trackIdx != -1);
+ // If the virtual display isn't providing us with timestamps,
+ // use the current time.
+ if (ptsUsec == 0) {
+ ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
+ }
+
// The MediaMuxer docs are unclear, but it appears that we
// need to pass either the full set of BufferInfo flags, or
// (flags & BUFFER_FLAG_SYNCFRAME).
@@ -370,11 +390,6 @@ static status_t runEncoder(const sp<MediaCodec>& encoder,
static status_t recordScreen(const char* fileName) {
status_t err;
- if (gVerbose) {
- printf("Recording %dx%d video at %.2fMbps\n",
- gVideoWidth, gVideoHeight, gBitRate / 1000000.0);
- }
-
// Configure signal handler.
err = configureSignals();
if (err != NO_ERROR) return err;
@@ -399,11 +414,31 @@ static status_t recordScreen(const char* fileName) {
mainDpyInfo.orientation);
}
+ bool rotated = isDeviceRotated(mainDpyInfo.orientation);
+ if (gVideoWidth == 0) {
+ gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
+ }
+ if (gVideoHeight == 0) {
+ gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
+ }
+
// Configure and start the encoder.
sp<MediaCodec> encoder;
sp<IGraphicBufferProducer> bufferProducer;
err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
- if (err != NO_ERROR) return err;
+ if (err != NO_ERROR && !gSizeSpecified) {
+ ALOGV("Retrying with 720p");
+ if (gVideoWidth != 1280 && gVideoHeight != 720) {
+ fprintf(stderr, "WARNING: failed at %dx%d, retrying at 720p\n",
+ gVideoWidth, gVideoHeight);
+ gVideoWidth = 1280;
+ gVideoHeight = 720;
+ err = prepareEncoder(mainDpyInfo.fps, &encoder, &bufferProducer);
+ }
+ }
+ if (err != NO_ERROR) {
+ return err;
+ }
// Configure virtual display.
sp<IBinder> dpy;
@@ -478,6 +513,8 @@ static void usage() {
fprintf(stderr,
"Usage: screenrecord [options] <filename>\n"
"\n"
+ "Records the device's display to a .mp4 file.\n"
+ "\n"
"Options:\n"
"--size WIDTHxHEIGHT\n"
" Set the video size, e.g. \"1280x720\". For best results, use\n"
@@ -485,8 +522,7 @@ static void usage() {
"--bit-rate RATE\n"
" Set the video bit rate, in megabits per second. Default 4Mbps.\n"
"--rotate\n"
- " Rotate the output 90 degrees. Useful for filling the frame\n"
- " when in portrait mode.\n"
+ " Rotate the output 90 degrees.\n"
"--verbose\n"
" Display interesting information on stdout.\n"
"--help\n"
@@ -536,6 +572,7 @@ int main(int argc, char* const argv[]) {
gVideoWidth, gVideoHeight);
return 2;
}
+ gSizeSpecified = true;
break;
case 'b':
gBitRate = atoi(optarg);
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index 529b96c..797e0b6 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -939,7 +939,7 @@ int main(int argc, char **argv) {
sp<BufferQueue> bq = new BufferQueue();
sp<GLConsumer> texture = new GLConsumer(bq, 0 /* tex */);
- gSurface = new Surface(texture->getBufferQueue());
+ gSurface = new Surface(bq);
}
CHECK_EQ((status_t)OK,
diff --git a/include/camera/photography/CaptureRequest.h b/include/camera/camera2/CaptureRequest.h
index e56d61f..e56d61f 100644
--- a/include/camera/photography/CaptureRequest.h
+++ b/include/camera/camera2/CaptureRequest.h
diff --git a/include/camera/photography/ICameraDeviceCallbacks.h b/include/camera/camera2/ICameraDeviceCallbacks.h
index 041fa65..041fa65 100644
--- a/include/camera/photography/ICameraDeviceCallbacks.h
+++ b/include/camera/camera2/ICameraDeviceCallbacks.h
diff --git a/include/camera/photography/ICameraDeviceUser.h b/include/camera/camera2/ICameraDeviceUser.h
index 45988d0..45988d0 100644
--- a/include/camera/photography/ICameraDeviceUser.h
+++ b/include/camera/camera2/ICameraDeviceUser.h
diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h
index 7aa3c24..eef108b 100644
--- a/include/media/AudioRecord.h
+++ b/include/media/AudioRecord.h
@@ -35,8 +35,6 @@ class AudioRecord : public RefBase
{
public:
- static const int DEFAULT_SAMPLE_RATE = 8000;
-
/* Events used by AudioRecord callback function (callback_t).
* Keep in sync with frameworks/base/media/java/android/media/AudioRecord.java NATIVE_EVENT_*.
*/
@@ -62,6 +60,7 @@ public:
size_t frameCount; // number of sample frames corresponding to size;
// on input it is the number of frames available,
// on output is the number of frames actually drained
+ // (currently ignored, but will make the primary field in future)
size_t size; // input/output in bytes == frameCount * frameSize
// FIXME this is redundant with respect to frameCount,
@@ -131,7 +130,7 @@ public:
* sampleRate: Data sink sampling rate in Hz.
* format: Audio format (e.g AUDIO_FORMAT_PCM_16_BIT for signed
* 16 bits per sample).
- * channelMask: Channel mask.
+ * channelMask: Channel mask, such that audio_is_input_channel(channelMask) is true.
* frameCount: Minimum size of track PCM buffer in frames. This defines the
* application's contribution to the
* latency of the track. The actual size selected by the AudioRecord could
@@ -144,19 +143,21 @@ public:
* frames are ready in record track output buffer.
* sessionId: Not yet supported.
* transferType: How data is transferred from AudioRecord.
+ * flags: See comments on audio_input_flags_t in <system/audio.h>
* threadCanCallJava: Not present in parameter list, and so is fixed at false.
*/
AudioRecord(audio_source_t inputSource,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = AUDIO_CHANNEL_IN_MONO,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
int frameCount = 0,
callback_t cbf = NULL,
void* user = NULL,
int notificationFrames = 0,
int sessionId = 0,
- transfer_type transferType = TRANSFER_DEFAULT);
+ transfer_type transferType = TRANSFER_DEFAULT,
+ audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
/* Terminates the AudioRecord and unregisters it from AudioFlinger.
* Also destroys all resources associated with the AudioRecord.
@@ -178,17 +179,18 @@ public:
*
* threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI.
*/
- status_t set(audio_source_t inputSource = AUDIO_SOURCE_DEFAULT,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = AUDIO_CHANNEL_IN_MONO,
+ status_t set(audio_source_t inputSource,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
int frameCount = 0,
callback_t cbf = NULL,
void* user = NULL,
int notificationFrames = 0,
bool threadCanCallJava = false,
int sessionId = 0,
- transfer_type transferType = TRANSFER_DEFAULT);
+ transfer_type transferType = TRANSFER_DEFAULT,
+ audio_input_flags_t flags = AUDIO_INPUT_FLAG_NONE);
/* Result of constructing the AudioRecord. This must be checked
* before using any AudioRecord API (except for set()), because using
@@ -363,7 +365,12 @@ public:
* Input parameter 'size' is in byte units.
* This is implemented on top of obtainBuffer/releaseBuffer. For best
* performance use callbacks. Returns actual number of bytes read >= 0,
- * or a negative status code.
+ * or one of the following negative status codes:
+ * INVALID_OPERATION AudioRecord is configured for streaming mode
+ * BAD_VALUE size is invalid
+ * WOULD_BLOCK when obtainBuffer() returns same, or
+ * AudioRecord was stopped during the read
+ * or any other error code returned by IAudioRecord::start() or restoreRecord_l().
*/
ssize_t read(void* buffer, size_t size);
@@ -420,6 +427,7 @@ private:
status_t openRecord_l(uint32_t sampleRate,
audio_format_t format,
size_t frameCount,
+ audio_input_flags_t flags,
audio_io_handle_t input,
size_t epoch);
@@ -437,10 +445,13 @@ private:
// for client callback handler
callback_t mCbf; // callback handler for events, or NULL
- void* mUserData; // for client callback handler
+ void* mUserData;
// for notification APIs
- uint32_t mNotificationFrames; // frames between each notification callback
+ uint32_t mNotificationFramesReq; // requested number of frames between each
+ // notification callback
+ uint32_t mNotificationFramesAct; // actual number of frames between each
+ // notification callback
bool mRefreshRemaining; // processAudioBuffer() should refresh next 2
// These are private to processAudioBuffer(), and are not protected by a lock
@@ -464,6 +475,7 @@ private:
audio_source_t mInputSource;
uint32_t mLatency; // in ms
audio_channel_mask_t mChannelMask;
+ audio_input_flags_t mFlags;
int mSessionId;
transfer_type mTransfer;
@@ -476,11 +488,13 @@ private:
int mPreviousPriority; // before start()
SchedPolicy mPreviousSchedulingGroup;
+ bool mAwaitBoost; // thread should wait for priority boost before running
// The proxy should only be referenced while a lock is held because the proxy isn't
// multi-thread safe.
// An exception is that a blocking ClientProxy::obtainBuffer() may be called without a lock,
// provided that the caller also holds an extra reference to the proxy and shared memory to keep
+ // them around in case they are replaced during the obtainBuffer().
sp<AudioRecordClientProxy> mProxy;
bool mInOverrun; // whether recorder is currently in overrun state
diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h
index f9e625e..006af08 100644
--- a/include/media/AudioSystem.h
+++ b/include/media/AudioSystem.h
@@ -158,7 +158,7 @@ public:
: samplingRate(0), format(AUDIO_FORMAT_DEFAULT), channelMask(0), frameCount(0), latency(0) {}
uint32_t samplingRate;
- int32_t format;
+ audio_format_t format;
audio_channel_mask_t channelMask;
size_t frameCount;
uint32_t latency;
diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h
index da13a7f..ddb5842 100644
--- a/include/media/AudioTrack.h
+++ b/include/media/AudioTrack.h
@@ -116,6 +116,7 @@ public:
* Returned status (from utils/Errors.h) can be:
* - NO_ERROR: successful operation
* - NO_INIT: audio server or audio hardware not initialized
+ * - BAD_VALUE: unsupported configuration
*/
static status_t getMinFrameCount(size_t* frameCount,
@@ -170,9 +171,9 @@ public:
*/
AudioTrack( audio_stream_type_t streamType,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = 0,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t,
int frameCount = 0,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
@@ -194,10 +195,10 @@ public:
*/
AudioTrack( audio_stream_type_t streamType,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = 0,
- const sp<IMemory>& sharedBuffer = 0,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ const sp<IMemory>& sharedBuffer,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
void* user = NULL,
@@ -227,10 +228,10 @@ public:
*
* threadCanCallJava: Whether callbacks are made from an attached thread and thus can call JNI.
*/
- status_t set(audio_stream_type_t streamType = AUDIO_STREAM_DEFAULT,
- uint32_t sampleRate = 0,
- audio_format_t format = AUDIO_FORMAT_DEFAULT,
- audio_channel_mask_t channelMask = 0,
+ status_t set(audio_stream_type_t streamType,
+ uint32_t sampleRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
int frameCount = 0,
audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
callback_t cbf = NULL,
@@ -682,8 +683,9 @@ protected:
STATE_STOPPING,
} mState;
+ // for client callback handler
callback_t mCbf; // callback handler for events, or NULL
- void* mUserData; // for client callback handler
+ void* mUserData;
// for notification APIs
uint32_t mNotificationFramesReq; // requested number of frames between each
@@ -726,6 +728,7 @@ protected:
sp<AudioTrackClientProxy> mProxy; // primary owner of the memory
bool mInUnderrun; // whether track is currently in underrun state
+ String8 mName; // server's name for this IAudioTrack
private:
class DeathNotifier : public IBinder::DeathRecipient {
diff --git a/include/media/IAudioFlinger.h b/include/media/IAudioFlinger.h
index de45aa8..49f921b 100644
--- a/include/media/IAudioFlinger.h
+++ b/include/media/IAudioFlinger.h
@@ -67,6 +67,10 @@ public:
audio_io_handle_t output,
pid_t tid, // -1 means unused, otherwise must be valid non-0
int *sessionId,
+ // input: ignored
+ // output: server's description of IAudioTrack for display in logs.
+ // Don't attempt to parse, as the format could change.
+ String8& name,
status_t *status) = 0;
virtual sp<IAudioRecord> openRecord(
@@ -75,7 +79,7 @@ public:
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- track_flags_t flags,
+ track_flags_t *flags,
pid_t tid, // -1 means unused, otherwise must be valid non-0
int *sessionId,
status_t *status) = 0;
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 38f9d11..6d116f0 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -97,6 +97,10 @@ public:
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0;
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) = 0;
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) = 0;
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
index 7d40379..db5f947 100644
--- a/include/media/stagefright/SurfaceMediaSource.h
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -56,7 +56,7 @@ class GraphicBuffer;
class SurfaceMediaSource : public MediaSource,
public MediaBufferObserver,
- protected BufferQueue::ConsumerListener {
+ protected ConsumerListener {
public:
enum { MIN_UNDEQUEUED_BUFFERS = 4};
diff --git a/include/media/stagefright/foundation/ALooperRoster.h b/include/media/stagefright/foundation/ALooperRoster.h
index 2e5fd73..940fc55 100644
--- a/include/media/stagefright/foundation/ALooperRoster.h
+++ b/include/media/stagefright/foundation/ALooperRoster.h
@@ -30,6 +30,7 @@ struct ALooperRoster {
const sp<ALooper> looper, const sp<AHandler> &handler);
void unregisterHandler(ALooper::handler_id handlerID);
+ void unregisterStaleHandlers();
status_t postMessage(const sp<AMessage> &msg, int64_t delayUs = 0);
void deliverMessage(const sp<AMessage> &msg);
diff --git a/include/private/media/AudioTrackShared.h b/include/private/media/AudioTrackShared.h
index b890180..1379379 100644
--- a/include/private/media/AudioTrackShared.h
+++ b/include/private/media/AudioTrackShared.h
@@ -31,6 +31,7 @@ namespace android {
// ----------------------------------------------------------------------------
+// for audio_track_cblk_t::mFlags
#define CBLK_UNDERRUN 0x01 // set by server immediately on output underrun, cleared by client
#define CBLK_FORCEREADY 0x02 // set: track is considered ready immediately by AudioFlinger,
// clear: track is ready when buffer full
@@ -89,8 +90,11 @@ struct audio_track_cblk_t
// The data members are grouped so that members accessed frequently and in the same context
// are in the same line of data cache.
- volatile uint32_t server; // updated asynchronously by server,
- // "for entertainment purposes only"
+ uint32_t mServer; // Number of filled frames consumed by server (mIsOut),
+ // or filled frames provided by server (!mIsOut).
+ // It is updated asynchronously by server without a barrier.
+ // The value should be used "for entertainment purposes only",
+ // which means don't make important decisions based on it.
size_t frameCount_; // used during creation to pass actual track buffer size
// from AudioFlinger to client, and not referenced again
@@ -118,13 +122,11 @@ private:
// client write-only, server read-only
uint16_t mSendLevel; // Fixed point U4.12 so 0x1000 means 1.0
- uint8_t mPad2; // unused
+ uint16_t mPad2; // unused
public:
- // read-only for client, server writes once at initialization and is then read-only
- uint8_t mName; // normal tracks: track name, fast tracks: track index
- volatile int32_t flags;
+ volatile int32_t mFlags; // combinations of CBLK_*
// Cache line boundary (32 bytes)
@@ -235,7 +237,7 @@ public:
void interrupt();
size_t getPosition() {
- return mEpoch + mCblk->server;
+ return mEpoch + mCblk->mServer;
}
void setEpoch(size_t epoch) {
@@ -413,6 +415,13 @@ public:
virtual void framesReadyIsCalledByMultipleThreads() { }
bool setStreamEndDone(); // and return previous value
+
+ // Add to the tally of underrun frames, and inform client of underrun
+ virtual void tallyUnderrunFrames(uint32_t frameCount);
+
+ // Return the total number of frames which AudioFlinger desired but were unavailable,
+ // and thus which resulted in an underrun.
+ virtual uint32_t getUnderrunFrames() const { return mCblk->u.mStreaming.mUnderrunFrames; }
};
class StaticAudioTrackServerProxy : public AudioTrackServerProxy {
@@ -427,6 +436,8 @@ public:
virtual void framesReadyIsCalledByMultipleThreads();
virtual status_t obtainBuffer(Buffer* buffer);
virtual void releaseBuffer(Buffer* buffer);
+ virtual void tallyUnderrunFrames(uint32_t frameCount);
+ virtual uint32_t getUnderrunFrames() const { return 0; }
private:
ssize_t pollPosition(); // poll for state queue update, and return current position
diff --git a/libvideoeditor/lvpp/NativeWindowRenderer.cpp b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
index 84a8e15..8b362ef 100755
--- a/libvideoeditor/lvpp/NativeWindowRenderer.cpp
+++ b/libvideoeditor/lvpp/NativeWindowRenderer.cpp
@@ -570,7 +570,7 @@ RenderInput::RenderInput(NativeWindowRenderer* renderer, GLuint textureId)
, mTextureId(textureId) {
sp<BufferQueue> bq = new BufferQueue();
mST = new GLConsumer(bq, mTextureId);
- mSTC = new Surface(mST->getBufferQueue());
+ mSTC = new Surface(bq);
native_window_connect(mSTC.get(), NATIVE_WINDOW_API_MEDIA);
}
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index 8ae0908..2718420 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -60,10 +60,9 @@ status_t AudioRecord::getMinFrameCount(
// We double the size of input buffer for ping pong use of record buffer.
size <<= 1;
- if (audio_is_linear_pcm(format)) {
- uint32_t channelCount = popcount(channelMask);
- size /= channelCount * audio_bytes_per_sample(format);
- }
+ // Assumes audio_is_linear_pcm(format)
+ uint32_t channelCount = popcount(channelMask);
+ size /= channelCount * audio_bytes_per_sample(format);
*frameCount = size;
return NO_ERROR;
@@ -87,7 +86,8 @@ AudioRecord::AudioRecord(
void* user,
int notificationFrames,
int sessionId,
- transfer_type transferType)
+ transfer_type transferType,
+ audio_input_flags_t flags)
: mStatus(NO_INIT), mSessionId(0),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
mPreviousSchedulingGroup(SP_DEFAULT),
@@ -129,7 +129,8 @@ status_t AudioRecord::set(
int notificationFrames,
bool threadCanCallJava,
int sessionId,
- transfer_type transferType)
+ transfer_type transferType,
+ audio_input_flags_t flags)
{
switch (transferType) {
case TRANSFER_DEFAULT:
@@ -176,7 +177,8 @@ status_t AudioRecord::set(
}
if (sampleRate == 0) {
- sampleRate = DEFAULT_SAMPLE_RATE;
+ ALOGE("Invalid sample rate %u", sampleRate);
+ return BAD_VALUE;
}
mSampleRate = sampleRate;
@@ -190,6 +192,11 @@ status_t AudioRecord::set(
ALOGE("Invalid format %d", format);
return BAD_VALUE;
}
+ // Temporary restriction: AudioFlinger currently supports 16-bit PCM only
+ if (format != AUDIO_FORMAT_PCM_16_BIT) {
+ ALOGE("Format %d is not supported", format);
+ return BAD_VALUE;
+ }
mFormat = format;
if (!audio_is_input_channel(channelMask)) {
@@ -200,11 +207,8 @@ status_t AudioRecord::set(
uint32_t channelCount = popcount(channelMask);
mChannelCount = channelCount;
- if (audio_is_linear_pcm(format)) {
- mFrameSize = channelCount * audio_bytes_per_sample(format);
- } else {
- mFrameSize = sizeof(uint8_t);
- }
+ // Assumes audio_is_linear_pcm(format), else sizeof(uint8_t)
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
if (sessionId == 0 ) {
mSessionId = AudioSystem::newAudioSessionId();
@@ -213,6 +217,8 @@ status_t AudioRecord::set(
}
ALOGV("set(): mSessionId %d", mSessionId);
+ mFlags = flags;
+
audio_io_handle_t input = AudioSystem::getInput(inputSource,
sampleRate,
format,
@@ -244,7 +250,7 @@ status_t AudioRecord::set(
}
// create the IAudioRecord
- status = openRecord_l(sampleRate, format, frameCount, input, 0 /*epoch*/);
+ status = openRecord_l(sampleRate, format, frameCount, mFlags, input, 0 /*epoch*/);
if (status != NO_ERROR) {
return status;
}
@@ -261,7 +267,8 @@ status_t AudioRecord::set(
mActive = false;
mCbf = cbf;
- mNotificationFrames = notificationFrames;
+ mNotificationFramesReq = notificationFrames;
+ mNotificationFramesAct = 0;
mRefreshRemaining = true;
mUserData = user;
// TODO: add audio hardware input latency here
@@ -295,7 +302,7 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession)
mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition());
mNewPosition = mProxy->getPosition() + mUpdatePeriod;
- int32_t flags = android_atomic_acquire_load(&mCblk->flags);
+ int32_t flags = android_atomic_acquire_load(&mCblk->mFlags);
status_t status = NO_ERROR;
if (!(flags & CBLK_INVALID)) {
@@ -429,6 +436,7 @@ status_t AudioRecord::openRecord_l(
uint32_t sampleRate,
audio_format_t format,
size_t frameCount,
+ audio_input_flags_t flags,
audio_io_handle_t input,
size_t epoch)
{
@@ -439,15 +447,29 @@ status_t AudioRecord::openRecord_l(
return NO_INIT;
}
+ IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT;
pid_t tid = -1;
- // FIXME see similar logic at AudioTrack for tid
+
+ // Client can only express a preference for FAST. Server will perform additional tests.
+ // The only supported use case for FAST is callback transfer mode.
+ if (flags & AUDIO_INPUT_FLAG_FAST) {
+ if ((mTransfer != TRANSFER_CALLBACK) || (mAudioRecordThread == 0)) {
+ ALOGW("AUDIO_INPUT_FLAG_FAST denied by client");
+ // once denied, do not request again if IAudioRecord is re-created
+ flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST);
+ mFlags = flags;
+ } else {
+ trackFlags |= IAudioFlinger::TRACK_FAST;
+ tid = mAudioRecordThread->getTid();
+ }
+ }
int originalSessionId = mSessionId;
sp<IAudioRecord> record = audioFlinger->openRecord(input,
sampleRate, format,
mChannelMask,
frameCount,
- IAudioFlinger::TRACK_DEFAULT,
+ &trackFlags,
tid,
&mSessionId,
&status);
@@ -471,6 +493,27 @@ status_t AudioRecord::openRecord_l(
mCblkMemory = iMem;
audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer());
mCblk = cblk;
+ // FIXME missing fast track frameCount logic
+ mAwaitBoost = false;
+ mNotificationFramesAct = mNotificationFramesReq;
+ if (flags & AUDIO_INPUT_FLAG_FAST) {
+ if (trackFlags & IAudioFlinger::TRACK_FAST) {
+ ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", frameCount);
+ mAwaitBoost = true;
+ // double-buffering is not required for fast tracks, due to tighter scheduling
+ if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount) {
+ mNotificationFramesAct = frameCount;
+ }
+ } else {
+ ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %u", frameCount);
+ // once denied, do not request again if IAudioRecord is re-created
+ flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_FAST);
+ mFlags = flags;
+ if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
+ mNotificationFramesAct = frameCount/2;
+ }
+ }
+ }
// starting address of buffers in shared memory
void *buffers = (char*)cblk + sizeof(audio_track_cblk_t);
@@ -478,7 +521,7 @@ status_t AudioRecord::openRecord_l(
// update proxy
mProxy = new AudioRecordClientProxy(cblk, buffers, frameCount, mFrameSize);
mProxy->setEpoch(epoch);
- mProxy->setMinimum(mNotificationFrames);
+ mProxy->setMinimum(mNotificationFramesAct);
mDeathNotifier = new DeathNotifier(this);
mAudioRecord->asBinder()->linkToDeath(mDeathNotifier, this);
@@ -660,9 +703,29 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)
nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
{
mLock.lock();
+ if (mAwaitBoost) {
+ mAwaitBoost = false;
+ mLock.unlock();
+ static const int32_t kMaxTries = 5;
+ int32_t tryCounter = kMaxTries;
+ uint32_t pollUs = 10000;
+ do {
+ int policy = sched_getscheduler(0);
+ if (policy == SCHED_FIFO || policy == SCHED_RR) {
+ break;
+ }
+ usleep(pollUs);
+ pollUs <<= 1;
+ } while (tryCounter-- > 0);
+ if (tryCounter < 0) {
+ ALOGE("did not receive expected priority boost on time");
+ }
+ // Run again immediately
+ return 0;
+ }
// Can only reference mCblk while locked
- int32_t flags = android_atomic_and(~CBLK_OVERRUN, &mCblk->flags);
+ int32_t flags = android_atomic_and(~CBLK_OVERRUN, &mCblk->mFlags);
// Check for track invalidation
if (flags & CBLK_INVALID) {
@@ -705,7 +768,7 @@ nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
}
// Cache other fields that will be needed soon
- size_t notificationFrames = mNotificationFrames;
+ size_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
mRefreshRemaining = false;
mRemainingFrames = notificationFrames;
@@ -881,7 +944,7 @@ status_t AudioRecord::restoreRecord_l(const char *from)
// It will also delete the strong references on previous IAudioRecord and IMemory
size_t position = mProxy->getPosition();
mNewPosition = position + mUpdatePeriod;
- result = openRecord_l(mSampleRate, mFormat, mFrameCount, getInput_l(), position);
+ result = openRecord_l(mSampleRate, mFormat, mFrameCount, mFlags, getInput_l(), position);
if (result == NO_ERROR) {
if (mActive) {
// callback thread or sync event hasn't changed
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 3653b7f..dd0ec73 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -389,7 +389,7 @@ status_t AudioTrack::start()
mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition());
}
mNewPosition = mProxy->getPosition() + mUpdatePeriod;
- int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->flags);
+ int32_t flags = android_atomic_and(~CBLK_DISABLED, &mCblk->mFlags);
sp<AudioTrackThread> t = mAudioTrackThread;
if (t != 0) {
@@ -953,6 +953,7 @@ status_t AudioTrack::createTrack_l(
output,
tid,
&mSessionId,
+ mName,
&status);
if (track == 0) {
@@ -1182,9 +1183,9 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer)
// restart track if it was disabled by audioflinger due to previous underrun
if (mState == STATE_ACTIVE) {
audio_track_cblk_t* cblk = mCblk;
- if (android_atomic_and(~CBLK_DISABLED, &cblk->flags) & CBLK_DISABLED) {
- ALOGW("releaseBuffer() track %p name=%#x disabled due to previous underrun, restarting",
- this, cblk->mName);
+ if (android_atomic_and(~CBLK_DISABLED, &cblk->mFlags) & CBLK_DISABLED) {
+ ALOGW("releaseBuffer() track %p name=%s disabled due to previous underrun, restarting",
+ this, mName.string());
// FIXME ignoring status
mAudioTrack->start();
}
@@ -1261,16 +1262,16 @@ status_t TimedAudioTrack::allocateTimedBuffer(size_t size, sp<IMemory>* buffer)
// fails indicating that the server is dead, flag the track as invalid so
// we can attempt to restore in just a bit.
audio_track_cblk_t* cblk = mCblk;
- if (!(cblk->flags & CBLK_INVALID)) {
+ if (!(cblk->mFlags & CBLK_INVALID)) {
result = mAudioTrack->allocateTimedBuffer(size, buffer);
if (result == DEAD_OBJECT) {
- android_atomic_or(CBLK_INVALID, &cblk->flags);
+ android_atomic_or(CBLK_INVALID, &cblk->mFlags);
}
}
// If the track is invalid at this point, attempt to restore it. and try the
// allocation one more time.
- if (cblk->flags & CBLK_INVALID) {
+ if (cblk->mFlags & CBLK_INVALID) {
result = restoreTrack_l("allocateTimedBuffer");
if (result == NO_ERROR) {
@@ -1290,8 +1291,8 @@ status_t TimedAudioTrack::queueTimedBuffer(const sp<IMemory>& buffer,
audio_track_cblk_t* cblk = mCblk;
// restart track if it was disabled by audioflinger due to previous underrun
if (buffer->size() != 0 && status == NO_ERROR &&
- (mState == STATE_ACTIVE) && (cblk->flags & CBLK_DISABLED)) {
- android_atomic_and(~CBLK_DISABLED, &cblk->flags);
+ (mState == STATE_ACTIVE) && (cblk->mFlags & CBLK_DISABLED)) {
+ android_atomic_and(~CBLK_DISABLED, &cblk->mFlags);
ALOGW("queueTimedBuffer() track %p disabled, restarting", this);
// FIXME ignoring status
mAudioTrack->start();
@@ -1339,7 +1340,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
// Can only reference mCblk while locked
int32_t flags = android_atomic_and(
- ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->flags);
+ ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->mFlags);
// Check for track invalidation
if (flags & CBLK_INVALID) {
@@ -1681,7 +1682,7 @@ status_t AudioTrack::restoreTrack_l(const char *from)
// the actual amount of audio frames played (e.g SoundPool) receives them.
if (mSharedBuffer == 0) {
// restart playback even if buffer is not completely filled.
- android_atomic_or(CBLK_FORCEREADY, &mCblk->flags);
+ android_atomic_or(CBLK_FORCEREADY, &mCblk->mFlags);
}
}
#endif
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index aa45a2f..e7abb40 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -26,8 +26,8 @@ extern "C" {
namespace android {
audio_track_cblk_t::audio_track_cblk_t()
- : server(0), frameCount_(0), mFutex(0), mMinimum(0),
- mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mName(0), flags(0)
+ : mServer(0), frameCount_(0), mFutex(0), mMinimum(0),
+ mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mFlags(0)
{
memset(&u, 0, sizeof(u));
}
@@ -99,7 +99,7 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques
goto end;
}
for (;;) {
- int32_t flags = android_atomic_and(~CBLK_INTERRUPT, &cblk->flags);
+ int32_t flags = android_atomic_and(~CBLK_INTERRUPT, &cblk->mFlags);
// check for track invalidation by server, or server death detection
if (flags & CBLK_INVALID) {
ALOGV("Track invalidated");
@@ -293,7 +293,7 @@ void ClientProxy::releaseBuffer(Buffer* buffer)
void ClientProxy::binderDied()
{
audio_track_cblk_t* cblk = mCblk;
- if (!(android_atomic_or(CBLK_INVALID, &cblk->flags) & CBLK_INVALID)) {
+ if (!(android_atomic_or(CBLK_INVALID, &cblk->mFlags) & CBLK_INVALID)) {
// it seems that a FUTEX_WAKE_PRIVATE will not wake a FUTEX_WAIT, even within same process
(void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
1);
@@ -303,7 +303,7 @@ void ClientProxy::binderDied()
void ClientProxy::interrupt()
{
audio_track_cblk_t* cblk = mCblk;
- if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->flags) & CBLK_INTERRUPT)) {
+ if (!(android_atomic_or(CBLK_INTERRUPT, &cblk->mFlags) & CBLK_INTERRUPT)) {
(void) __futex_syscall3(&cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
1);
}
@@ -324,11 +324,11 @@ void AudioTrackClientProxy::flush()
}
bool AudioTrackClientProxy::clearStreamEndDone() {
- return (android_atomic_and(~CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE) != 0;
+ return (android_atomic_and(~CBLK_STREAM_END_DONE, &mCblk->mFlags) & CBLK_STREAM_END_DONE) != 0;
}
bool AudioTrackClientProxy::getStreamEndDone() const {
- return (mCblk->flags & CBLK_STREAM_END_DONE) != 0;
+ return (mCblk->mFlags & CBLK_STREAM_END_DONE) != 0;
}
status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *requested)
@@ -354,7 +354,7 @@ status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *request
timeout = TIMEOUT_FINITE;
}
for (;;) {
- int32_t flags = android_atomic_and(~(CBLK_INTERRUPT|CBLK_STREAM_END_DONE), &cblk->flags);
+ int32_t flags = android_atomic_and(~(CBLK_INTERRUPT|CBLK_STREAM_END_DONE), &cblk->mFlags);
// check for track invalidation by server, or server death detection
if (flags & CBLK_INVALID) {
ALOGV("Track invalidated");
@@ -594,7 +594,7 @@ void ServerProxy::releaseBuffer(Buffer* buffer)
android_atomic_release_store(stepCount + rear, &cblk->u.mStreaming.mRear);
}
- mCblk->server += stepCount;
+ mCblk->mServer += stepCount;
size_t half = mFrameCount / 2;
if (half == 0) {
@@ -653,7 +653,7 @@ size_t AudioTrackServerProxy::framesReady()
bool AudioTrackServerProxy::setStreamEndDone() {
bool old =
- (android_atomic_or(CBLK_STREAM_END_DONE, &mCblk->flags) & CBLK_STREAM_END_DONE) != 0;
+ (android_atomic_or(CBLK_STREAM_END_DONE, &mCblk->mFlags) & CBLK_STREAM_END_DONE) != 0;
if (!old) {
(void) __futex_syscall3(&mCblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
1);
@@ -661,6 +661,14 @@ bool AudioTrackServerProxy::setStreamEndDone() {
return old;
}
+void AudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
+{
+ mCblk->u.mStreaming.mUnderrunFrames += frameCount;
+
+ // FIXME also wake futex so that underrun is noticed more quickly
+ (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags);
+}
+
// ---------------------------------------------------------------------------
StaticAudioTrackServerProxy::StaticAudioTrackServerProxy(audio_track_cblk_t* cblk, void *buffers,
@@ -805,10 +813,10 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
}
mPosition = newPosition;
- cblk->server += stepCount;
+ cblk->mServer += stepCount;
cblk->u.mStatic.mBufferPosition = newPosition;
if (setFlags != 0) {
- (void) android_atomic_or(setFlags, &cblk->flags);
+ (void) android_atomic_or(setFlags, &cblk->mFlags);
// this would be a good place to wake a futex
}
@@ -817,6 +825,17 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
buffer->mNonContig = 0;
}
+void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
+{
+ // Unlike AudioTrackServerProxy::tallyUnderrunFrames() used for streaming tracks,
+ // we don't have a location to count underrun frames. The underrun frame counter
+ // only exists in AudioTrackSharedStreaming. Fortunately, underruns are not
+ // possible for static buffer tracks other than at end of buffer, so this is not a loss.
+
+ // FIXME also wake futex so that underrun is noticed more quickly
+ (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags);
+}
+
// ---------------------------------------------------------------------------
} // namespace android
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index c670936..be818c6 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -95,6 +95,7 @@ public:
audio_io_handle_t output,
pid_t tid,
int *sessionId,
+ String8& name,
status_t *status)
{
Parcel data, reply;
@@ -127,6 +128,7 @@ public:
if (sessionId != NULL) {
*sessionId = lSessionId;
}
+ name = reply.readString8();
lStatus = reply.readInt32();
track = interface_cast<IAudioTrack>(reply.readStrongBinder());
}
@@ -142,7 +144,7 @@ public:
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- track_flags_t flags,
+ track_flags_t *flags,
pid_t tid,
int *sessionId,
status_t *status)
@@ -155,7 +157,8 @@ public:
data.writeInt32(format);
data.writeInt32(channelMask);
data.writeInt32(frameCount);
- data.writeInt32(flags);
+ track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
+ data.writeInt32(lFlags);
data.writeInt32((int32_t) tid);
int lSessionId = 0;
if (sessionId != NULL) {
@@ -166,6 +169,10 @@ public:
if (lStatus != NO_ERROR) {
ALOGE("openRecord error: %s", strerror(-lStatus));
} else {
+ lFlags = reply.readInt32();
+ if (flags != NULL) {
+ *flags = lFlags;
+ }
lSessionId = reply.readInt32();
if (sessionId != NULL) {
*sessionId = lSessionId;
@@ -735,12 +742,14 @@ status_t BnAudioFlinger::onTransact(
audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
pid_t tid = (pid_t) data.readInt32();
int sessionId = data.readInt32();
+ String8 name;
status_t status;
sp<IAudioTrack> track = createTrack(
(audio_stream_type_t) streamType, sampleRate, format,
- channelMask, frameCount, &flags, buffer, output, tid, &sessionId, &status);
+ channelMask, frameCount, &flags, buffer, output, tid, &sessionId, name, &status);
reply->writeInt32(flags);
reply->writeInt32(sessionId);
+ reply->writeString8(name);
reply->writeInt32(status);
reply->writeStrongBinder(track->asBinder());
return NO_ERROR;
@@ -757,7 +766,8 @@ status_t BnAudioFlinger::onTransact(
int sessionId = data.readInt32();
status_t status;
sp<IAudioRecord> record = openRecord(input,
- sampleRate, format, channelMask, frameCount, flags, tid, &sessionId, &status);
+ sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status);
+ reply->writeInt32(flags);
reply->writeInt32(sessionId);
reply->writeInt32(status);
reply->writeStrongBinder(record->asBinder());
diff --git a/media/libmedia/IAudioFlingerClient.cpp b/media/libmedia/IAudioFlingerClient.cpp
index 84a589a..3c0d4cf 100644
--- a/media/libmedia/IAudioFlingerClient.cpp
+++ b/media/libmedia/IAudioFlingerClient.cpp
@@ -83,7 +83,7 @@ status_t BnAudioFlingerClient::onTransact(
ALOGV("STREAM_CONFIG_CHANGED stream %d", stream);
} else if (event != AudioSystem::OUTPUT_CLOSED && event != AudioSystem::INPUT_CLOSED) {
desc.samplingRate = data.readInt32();
- desc.format = data.readInt32();
+ desc.format = (audio_format_t) data.readInt32();
desc.channelMask = (audio_channel_mask_t) data.readInt32();
desc.frameCount = data.readInt32();
desc.latency = data.readInt32();
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index 5bbb2f0..ef99f4f 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -52,6 +52,7 @@ enum {
OBSERVER_ON_MSG,
GET_GRAPHIC_BUFFER_USAGE,
SET_INTERNAL_OPTION,
+ UPDATE_GRAPHIC_BUFFER_IN_META,
};
class BpOMX : public BpInterface<IOMX> {
@@ -283,6 +284,21 @@ public:
return err;
}
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.write(*graphicBuffer);
+ data.writeIntPtr((intptr_t)buffer);
+ remote()->transact(UPDATE_GRAPHIC_BUFFER_IN_META, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) {
@@ -691,6 +707,23 @@ status_t BnOMX::onTransact(
return NO_ERROR;
}
+ case UPDATE_GRAPHIC_BUFFER_IN_META:
+ {
+ CHECK_OMX_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+ data.read(*graphicBuffer);
+ buffer_id buffer = (void*)data.readIntPtr();
+
+ status_t err = updateGraphicBufferInMeta(
+ node, port_index, graphicBuffer, buffer);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case CREATE_INPUT_SURFACE:
{
CHECK_OMX_INTERFACE(IOMX, data, reply);
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 963b04f..056cc0a 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -811,6 +811,13 @@ status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) {
if (mPlayer == NULL) {
return NO_INIT;
}
+
+ if (next != NULL && !(next->mCurrentState &
+ (MEDIA_PLAYER_PREPARED | MEDIA_PLAYER_PAUSED | MEDIA_PLAYER_PLAYBACK_COMPLETE))) {
+ ALOGE("next player is not prepared");
+ return INVALID_OPERATION;
+ }
+
return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 00804c5..5aefa58 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -833,15 +833,20 @@ ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
oldest->mStatus = BufferInfo::OWNED_BY_US;
- struct VideoDecoderOutputMetaData metaData;
- metaData.eType = kMetadataBufferTypeGrallocSource;
- metaData.pHandle = oldest->mGraphicBuffer->handle;
- memcpy(oldest->mData->base(), &metaData, sizeof(metaData));
+ mOMX->updateGraphicBufferInMeta(
+ mNode, kPortIndexOutput, oldest->mGraphicBuffer,
+ oldest->mBufferID);
- ALOGV("replaced oldest buffer #%u with age %u (%p stored in %p)",
+ VideoDecoderOutputMetaData *metaData =
+ reinterpret_cast<VideoDecoderOutputMetaData *>(
+ oldest->mData->base());
+ CHECK_EQ(metaData->eType, kMetadataBufferTypeGrallocSource);
+
+ ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
oldest - &mBuffers[kPortIndexOutput][0],
mDequeueCounter - oldest->mDequeuedAt,
- metaData.pHandle, oldest->mData->base());
+ metaData->pHandle,
+ oldest->mGraphicBuffer->handle, oldest->mData->base());
return oldest;
}
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 3e70dd7..79f2c91 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -214,7 +214,7 @@ AwesomePlayer::AwesomePlayer()
mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate);
mBufferingEventPending = false;
mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate);
- mVideoEventPending = false;
+ mVideoLagEventPending = false;
mCheckAudioStatusEvent = new AwesomeEvent(
this, &AwesomePlayer::onCheckAudioStatus);
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 810d88f..9820ef5 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -83,6 +83,10 @@ struct MuxOMX : public IOMX {
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer);
@@ -287,6 +291,13 @@ status_t MuxOMX::useGraphicBuffer(
node, port_index, graphicBuffer, buffer);
}
+status_t MuxOMX::updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
+ return getOMX(node)->updateGraphicBufferInMeta(
+ node, port_index, graphicBuffer, buffer);
+}
+
status_t MuxOMX::createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) {
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index befd4cc..6b934d4 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -65,10 +65,8 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig
// reference once the ctor ends, as that would cause the refcount of 'this'
// dropping to 0 at the end of the ctor. Since all we need is a wp<...>
// that's what we create.
- wp<BufferQueue::ConsumerListener> listener;
- sp<BufferQueue::ConsumerListener> proxy;
- listener = static_cast<BufferQueue::ConsumerListener*>(this);
- proxy = new BufferQueue::ProxyConsumerListener(listener);
+ wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
+ sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
status_t err = mBufferQueue->consumerConnect(proxy, false);
if (err != NO_ERROR) {
@@ -107,7 +105,7 @@ void SurfaceMediaSource::dump(String8& result, const char* prefix,
Mutex::Autolock lock(mMutex);
result.append(buffer);
- mBufferQueue->dump(result);
+ mBufferQueue->dump(result, "");
}
status_t SurfaceMediaSource::setFrameRate(int32_t fps)
diff --git a/media/libstagefright/foundation/ALooper.cpp b/media/libstagefright/foundation/ALooper.cpp
index 22777a2..ebf9d8d 100644
--- a/media/libstagefright/foundation/ALooper.cpp
+++ b/media/libstagefright/foundation/ALooper.cpp
@@ -72,6 +72,10 @@ ALooper::ALooper()
ALooper::~ALooper() {
stop();
+
+ // Since this looper is "dead" (or as good as dead by now),
+ // have ALooperRoster unregister any handlers still registered for it.
+ gLooperRoster.unregisterStaleHandlers();
}
void ALooper::setName(const char *name) {
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index ad10d2b..0c181ff 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -71,6 +71,20 @@ void ALooperRoster::unregisterHandler(ALooper::handler_id handlerID) {
mHandlers.removeItemsAt(index);
}
+void ALooperRoster::unregisterStaleHandlers() {
+ Mutex::Autolock autoLock(mLock);
+
+ for (size_t i = mHandlers.size(); i-- > 0;) {
+ const HandlerInfo &info = mHandlers.valueAt(i);
+
+ sp<ALooper> looper = info.mLooper.promote();
+ if (looper == NULL) {
+ ALOGV("Unregistering stale handler %d", mHandlers.keyAt(i));
+ mHandlers.removeItemsAt(i);
+ }
+ }
+}
+
status_t ALooperRoster::postMessage(
const sp<AMessage> &msg, int64_t delayUs) {
Mutex::Autolock autoLock(mLock);
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 7fed7d4..7e53af3 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -79,6 +79,10 @@ public:
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+ virtual status_t updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer);
+
virtual status_t createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index f6ae376..ae498b4 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -66,6 +66,10 @@ struct OMXNodeInstance {
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
OMX::buffer_id *buffer);
+ status_t updateGraphicBufferInMeta(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id buffer);
+
status_t createInputSurface(
OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer);
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index d6fd95b..325ffcf 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -69,11 +69,8 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
// reference once the ctor ends, as that would cause the refcount of 'this'
// dropping to 0 at the end of the ctor. Since all we need is a wp<...>
// that's what we create.
- wp<BufferQueue::ConsumerListener> listener;
- listener = static_cast<BufferQueue::ConsumerListener*>(this);
-
- sp<BufferQueue::ConsumerListener> proxy;
- proxy = new BufferQueue::ProxyConsumerListener(listener);
+ wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this);
+ sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
mInitCheck = mBufferQueue->consumerConnect(proxy, false);
if (mInitCheck != NO_ERROR) {
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 4b1dbe6..aaa9f89 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -345,6 +345,13 @@ status_t OMX::useGraphicBuffer(
port_index, graphicBuffer, buffer);
}
+status_t OMX::updateGraphicBufferInMeta(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id buffer) {
+ return findInstance(node)->updateGraphicBufferInMeta(
+ port_index, graphicBuffer, buffer);
+}
+
status_t OMX::createInputSurface(
node_id node, OMX_U32 port_index,
sp<IGraphicBufferProducer> *bufferProducer) {
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 525e18d..8d100f1 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -70,6 +70,10 @@ struct BufferMeta {
header->nFilledLen);
}
+ void setGraphicBuffer(const sp<GraphicBuffer> &graphicBuffer) {
+ mGraphicBuffer = graphicBuffer;
+ }
+
private:
sp<GraphicBuffer> mGraphicBuffer;
sp<IMemory> mMem;
@@ -566,6 +570,22 @@ status_t OMXNodeInstance::useGraphicBuffer(
return OK;
}
+status_t OMXNodeInstance::updateGraphicBufferInMeta(
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+ OMX::buffer_id buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)(buffer);
+ VideoDecoderOutputMetaData *metadata =
+ (VideoDecoderOutputMetaData *)(header->pBuffer);
+ BufferMeta *bufferMeta = (BufferMeta *)(header->pAppPrivate);
+ bufferMeta->setGraphicBuffer(graphicBuffer);
+ metadata->eType = kMetadataBufferTypeGrallocSource;
+ metadata->pHandle = graphicBuffer->handle;
+
+ return OK;
+}
+
status_t OMXNodeInstance::createInputSurface(
OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer) {
Mutex::Autolock autolock(mLock);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index 906aef3..5116550 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -571,6 +571,9 @@ bool ARTSPConnection::receiveLine(AString *line) {
if (sawCR && c == '\n') {
line->erase(line->size() - 1, 1);
return true;
+ } else if (c == '\n') {
+ // some reponse line ended with '\n', instead of '\r\n'.
+ return true;
}
line->append(&c, 1);
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index cc5af87..3d65c44 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -37,7 +37,6 @@
#include <cutils/bitops.h>
#include <cutils/properties.h>
-#include <cutils/compiler.h>
#include <system/audio.h>
#include <hardware/audio.h>
@@ -437,6 +436,7 @@ sp<IAudioTrack> AudioFlinger::createTrack(
audio_io_handle_t output,
pid_t tid,
int *sessionId,
+ String8& name,
status_t *status)
{
sp<PlaybackThread::Track> track;
@@ -525,6 +525,9 @@ sp<IAudioTrack> AudioFlinger::createTrack(
}
}
if (lStatus == NO_ERROR) {
+ // s for server's pid, n for normal mixer name, f for fast index
+ name = String8::format("s:%d;n:%d;f:%d", getpid_cached, track->name() - AudioMixer::TRACK0,
+ track->fastIndex());
trackHandle = new TrackHandle(track);
} else {
// remove local strong reference to Client before deleting the Track so that the Client
@@ -1209,7 +1212,7 @@ sp<IAudioRecord> AudioFlinger::openRecord(
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
int *sessionId,
status_t *status)
@@ -1228,6 +1231,12 @@ sp<IAudioRecord> AudioFlinger::openRecord(
goto Exit;
}
+ if (format != AUDIO_FORMAT_PCM_16_BIT) {
+ ALOGE("openRecord() invalid format %d", format);
+ lStatus = BAD_VALUE;
+ goto Exit;
+ }
+
// add client to list
{ // scope for mLock
Mutex::Autolock _l(mLock);
@@ -1735,7 +1744,7 @@ audio_io_handle_t AudioFlinger::openInput(audio_module_handle_t module,
AudioStreamIn *input = new AudioStreamIn(inHwDev, inStream);
// Start record thread
- // RecorThread require both input and output device indication to forward to audio
+ // RecordThread requires both input and output device indication to forward to audio
// pre processing modules
thread = new RecordThread(this,
input,
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index eee5da5..e5e4113 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -108,6 +108,7 @@ public:
audio_io_handle_t output,
pid_t tid,
int *sessionId,
+ String8& name,
status_t *status);
virtual sp<IAudioRecord> openRecord(
@@ -116,7 +117,7 @@ public:
audio_format_t format,
audio_channel_mask_t channelMask,
size_t frameCount,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
int *sessionId,
status_t *status);
diff --git a/services/audioflinger/AudioResampler.h b/services/audioflinger/AudioResampler.h
index 29dc5b6..33e64ce 100644
--- a/services/audioflinger/AudioResampler.h
+++ b/services/audioflinger/AudioResampler.h
@@ -56,6 +56,14 @@ public:
// set the PTS of the next buffer output by the resampler
virtual void setPTS(int64_t pts);
+ // Resample int16_t samples from provider and accumulate into 'out'.
+ // A mono provider delivers a sequence of samples.
+ // A stereo provider delivers a sequence of interleaved pairs of samples.
+ // Multi-channel providers are not supported.
+ // In either case, 'out' holds interleaved pairs of fixed-point signed Q19.12.
+ // That is, for a mono provider, there is an implicit up-channeling.
+ // Since this method accumulates, the caller is responsible for clearing 'out' initially.
+ // FIXME assumes provider is always successful; it should return the actual frame count.
virtual void resample(int32_t* out, size_t outFrameCount,
AudioBufferProvider* provider) = 0;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 8b7433c..5600411c 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -106,6 +106,7 @@ public:
bool isInvalid() const { return mIsInvalid; }
virtual bool isTimedTrack() const { return false; }
bool isFastTrack() const { return (mFlags & IAudioFlinger::TRACK_FAST) != 0; }
+ int fastIndex() const { return mFastIndex; }
protected:
@@ -139,7 +140,6 @@ private:
// but the slot is only used if track is active
FastTrackUnderruns mObservedUnderruns; // Most recently observed value of
// mFastMixerDumpState.mTracks[mFastIndex].mUnderruns
- uint32_t mUnderrunCount; // Counter of total number of underruns, never reset
volatile float mCachedVolume; // combined master volume and stream type volume;
// 'volatile' means accessed without lock or
// barrier, but is read/written atomically
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 1a513c4..2c2931f 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -25,7 +25,6 @@
#include <fcntl.h>
#include <sys/stat.h>
#include <cutils/properties.h>
-#include <cutils/compiler.h>
#include <media/AudioParameter.h>
#include <utils/Log.h>
#include <utils/Trace.h>
@@ -266,10 +265,9 @@ AudioFlinger::ThreadBase::ThreadBase(const sp<AudioFlinger>& audioFlinger, audio
audio_devices_t outDevice, audio_devices_t inDevice, type_t type)
: Thread(false /*canCallJava*/),
mType(type),
- mAudioFlinger(audioFlinger), mSampleRate(0), mFrameCount(0), mNormalFrameCount(0),
- // mChannelMask
- mChannelCount(0),
- mFrameSize(1), mFormat(AUDIO_FORMAT_INVALID),
+ mAudioFlinger(audioFlinger),
+ // mSampleRate, mFrameCount, mChannelMask, mChannelCount, mFrameSize, and mFormat are
+ // set by PlaybackThread::readOutputParameters() or RecordThread::readInputParameters()
mParamStatus(NO_ERROR),
mStandby(false), mOutDevice(outDevice), mInDevice(inDevice),
mAudioSource(AUDIO_SOURCE_DEFAULT), mId(id),
@@ -425,8 +423,6 @@ void AudioFlinger::ThreadBase::dumpBase(int fd, const Vector<String16>& args)
result.append(buffer);
snprintf(buffer, SIZE, "HAL frame count: %d\n", mFrameCount);
result.append(buffer);
- snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount);
- result.append(buffer);
snprintf(buffer, SIZE, "Channel Count: %u\n", mChannelCount);
result.append(buffer);
snprintf(buffer, SIZE, "Channel Mask: 0x%08x\n", mChannelMask);
@@ -932,6 +928,7 @@ AudioFlinger::PlaybackThread::PlaybackThread(const sp<AudioFlinger>& audioFlinge
audio_devices_t device,
type_t type)
: ThreadBase(audioFlinger, id, device, AUDIO_DEVICE_NONE, type),
+ mNormalFrameCount(0), mMixBuffer(NULL),
mAllocMixBuffer(NULL), mSuspended(0), mBytesWritten(0),
// mStreamTypes[] initialized in constructor body
mOutput(output),
@@ -1054,6 +1051,8 @@ void AudioFlinger::PlaybackThread::dumpInternals(int fd, const Vector<String16>&
snprintf(buffer, SIZE, "\nOutput thread %p internals\n", this);
result.append(buffer);
+ snprintf(buffer, SIZE, "Normal frame count: %d\n", mNormalFrameCount);
+ result.append(buffer);
snprintf(buffer, SIZE, "last write occurred (msecs): %llu\n",
ns2ms(systemTime() - mLastWriteTime));
result.append(buffer);
@@ -1754,7 +1753,7 @@ void AudioFlinger::PlaybackThread::threadLoop_removeTracks(
const Vector< sp<Track> >& tracksToRemove)
{
size_t count = tracksToRemove.size();
- if (CC_UNLIKELY(count)) {
+ if (count) {
for (size_t i = 0 ; i < count ; i++) {
const sp<Track>& track = tracksToRemove.itemAt(i);
if (!track->isOutputTrack()) {
@@ -2282,7 +2281,7 @@ if (mType == MIXER) {
void AudioFlinger::PlaybackThread::removeTracks_l(const Vector< sp<Track> >& tracksToRemove)
{
size_t count = tracksToRemove.size();
- if (CC_UNLIKELY(count)) {
+ if (count) {
for (size_t i=0 ; i<count ; i++) {
const sp<Track>& track = tracksToRemove.itemAt(i);
mActiveTracks.remove(track);
@@ -2704,7 +2703,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
}
for (size_t i=0 ; i<count ; i++) {
- sp<Track> t = mActiveTracks[i].promote();
+ const sp<Track> t = mActiveTracks[i].promote();
if (t == 0) {
continue;
}
@@ -2740,8 +2739,10 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
track->mObservedUnderruns = underruns;
// don't count underruns that occur while stopping or pausing
// or stopped which can occur when flush() is called while active
- if (!(track->isStopping() || track->isPausing() || track->isStopped())) {
- track->mUnderrunCount += recentUnderruns;
+ if (!(track->isStopping() || track->isPausing() || track->isStopped()) &&
+ recentUnderruns > 0) {
+ // FIXME fast mixer will pull & mix partial buffers, but we count as a full underrun
+ track->mAudioTrackServerProxy->tallyUnderrunFrames(recentUnderruns * mFrameCount);
}
// This is similar to the state machine for normal tracks,
@@ -2784,7 +2785,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
}
// indicate to client process that the track was disabled because of underrun;
// it will then automatically call start() when data is available
- android_atomic_or(CBLK_DISABLED, &track->mCblk->flags);
+ android_atomic_or(CBLK_DISABLED, &track->mCblk->mFlags);
// remove from active list, but state remains ACTIVE [confusing but true]
isActive = false;
break;
@@ -2873,11 +2874,12 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
// hence the test on (mMixerStatus == MIXER_TRACKS_READY) meaning the track was mixed
// during last round
size_t desiredFrames;
- if (t->sampleRate() == mSampleRate) {
+ uint32_t sr = track->sampleRate();
+ if (sr == mSampleRate) {
desiredFrames = mNormalFrameCount;
} else {
// +1 for rounding and +1 for additional sample needed for interpolation
- desiredFrames = (mNormalFrameCount * t->sampleRate()) / mSampleRate + 1 + 1;
+ desiredFrames = (mNormalFrameCount * sr) / mSampleRate + 1 + 1;
// add frames already consumed but not yet released by the resampler
// because cblk->framesReady() will include these frames
desiredFrames += mAudioMixer->getUnreleasedFrames(track->name());
@@ -2903,7 +2905,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
if ((framesReady >= minFrames) && track->isReady() &&
!track->isPaused() && !track->isTerminated())
{
- ALOGVV("track %d s=%08x [OK] on thread %p", name, cblk->server, this);
+ ALOGVV("track %d s=%08x [OK] on thread %p", name, cblk->mServer, this);
mixedTracks++;
@@ -2932,7 +2934,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
param = AudioMixer::RAMP_VOLUME;
}
mAudioMixer->setParameter(name, AudioMixer::RESAMPLE, AudioMixer::RESET, NULL);
- } else if (cblk->server != 0) {
+ // FIXME should not make a decision based on mServer
+ } else if (cblk->mServer != 0) {
// If the track is stopped before the first frame was mixed,
// do not apply ramp
param = AudioMixer::RAMP_VOLUME;
@@ -3055,12 +3058,8 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
mixerStatus = MIXER_TRACKS_READY;
}
} else {
- // only implemented for normal tracks, not fast tracks
if (framesReady < desiredFrames && !track->isStopped() && !track->isPaused()) {
- // we missed desiredFrames whatever the actual number of frames missing was
- cblk->u.mStreaming.mUnderrunFrames += desiredFrames;
- // FIXME also wake futex so that underrun is noticed more quickly
- (void) android_atomic_or(CBLK_UNDERRUN, &cblk->flags);
+ track->mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
}
// clear effect chain input buffer if an active track underruns to avoid sending
// previous audio buffer again to effects
@@ -3069,7 +3068,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
chain->clearInputBuffer();
}
- ALOGVV("track %d s=%08x [NOT READY] on thread %p", name, cblk->server, this);
+ ALOGVV("track %d s=%08x [NOT READY] on thread %p", name, cblk->mServer, this);
if ((track->sharedBuffer() != 0) || track->isTerminated() ||
track->isStopped() || track->isPaused()) {
// We have consumed all the buffers of this track.
@@ -3085,7 +3084,6 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
tracksToRemove->add(track);
}
} else {
- track->mUnderrunCount++;
// No buffers for this track. Give it a few chances to
// fill a buffer, then remove it from active list.
if (--(track->mRetryCount) <= 0) {
@@ -3093,7 +3091,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::MixerThread::prepareTrac
tracksToRemove->add(track);
// indicate to client process that the track was disabled because of underrun;
// it will then automatically call start() when data is available
- android_atomic_or(CBLK_DISABLED, &cblk->flags);
+ android_atomic_or(CBLK_DISABLED, &cblk->mFlags);
// If one track is not ready, mark the mixer also not ready if:
// - the mixer was ready during previous round OR
// - no other track is ready
@@ -3483,7 +3481,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep
if ((track->framesReady() >= minFrames) && track->isReady() &&
!track->isPaused() && !track->isTerminated())
{
- ALOGVV("track %d u=%08x, s=%08x [OK]", track->name(), cblk->user, cblk->server);
+ ALOGVV("track %d s=%08x [OK]", track->name(), cblk->mServer);
if (track->mFillingUpStatus == Track::FS_FILLED) {
track->mFillingUpStatus = Track::FS_ACTIVE;
@@ -3508,7 +3506,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::DirectOutputThread::prep
mEffectChains[0]->clearInputBuffer();
}
- ALOGVV("track %d u=%08x, s=%08x [NOT READY]", track->name(), cblk->user, cblk->server);
+ ALOGVV("track %d s=%08x [NOT READY]", track->name(), cblk->mServer);
if ((track->sharedBuffer() != 0) || track->isTerminated() ||
track->isStopped() || track->isPaused()) {
// We have consumed all the buffers of this track.
@@ -3551,7 +3549,7 @@ void AudioFlinger::DirectOutputThread::threadLoop_mix()
AudioBufferProvider::Buffer buffer;
buffer.frameCount = frameCount;
mActiveTrack->getNextBuffer(&buffer);
- if (CC_UNLIKELY(buffer.raw == NULL)) {
+ if (buffer.raw == NULL) {
memset(curBuf, 0, frameCount * mFrameSize);
break;
}
@@ -3847,12 +3845,12 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr
tracksToRemove->add(track);
} else if (track->framesReady() && track->isReady() &&
!track->isPaused() && !track->isTerminated()) {
- ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->server);
+ ALOGVV("OffloadThread: track %d s=%08x [OK]", track->name(), cblk->mServer);
if (track->mFillingUpStatus == Track::FS_FILLED) {
track->mFillingUpStatus = Track::FS_ACTIVE;
mLeftVolFloat = mRightVolFloat = 0;
if (track->mState == TrackBase::RESUMING) {
- if (CC_UNLIKELY(mPausedBytesRemaining)) {
+ if (mPausedBytesRemaining) {
// Need to continue write that was interrupted
mCurrentWriteLength = mPausedWriteLength;
mBytesRemaining = mPausedBytesRemaining;
@@ -3875,7 +3873,7 @@ AudioFlinger::PlaybackThread::mixer_state AudioFlinger::OffloadThread::prepareTr
mixerStatus = MIXER_TRACKS_READY;
}
} else {
- ALOGVV("OffloadThread: track %d s=%08x [NOT READY]", track->name(), cblk->server);
+ ALOGVV("OffloadThread: track %d s=%08x [NOT READY]", track->name(), cblk->mServer);
if (track->isStopping_1()) {
// Hardware buffer can hold a large amount of audio so we must
// wait for all current track's data to drain before we say
@@ -4161,7 +4159,7 @@ AudioFlinger::RecordThread::RecordThread(const sp<AudioFlinger>& audioFlinger,
) :
ThreadBase(audioFlinger, id, outDevice, inDevice, RECORD),
mInput(input), mResampler(NULL), mRsmpOutBuffer(NULL), mRsmpInBuffer(NULL),
- // mRsmpInIndex and mInputBytes set by readInputParameters()
+ // mRsmpInIndex and mBufferSize set by readInputParameters()
mReqChannelCount(popcount(channelMask)),
mReqSampleRate(sampleRate)
// mBytesRead is only meaningful while active, and so is cleared in start()
@@ -4274,7 +4272,7 @@ bool AudioFlinger::RecordThread::threadLoop()
buffer.frameCount = mFrameCount;
status_t status = mActiveTrack->getNextBuffer(&buffer);
- if (CC_LIKELY(status == NO_ERROR)) {
+ if (status == NO_ERROR) {
readOnce = true;
size_t framesOut = buffer.frameCount;
if (mResampler == NULL) {
@@ -4289,8 +4287,7 @@ bool AudioFlinger::RecordThread::threadLoop()
framesIn = framesOut;
mRsmpInIndex += framesIn;
framesOut -= framesIn;
- if (mChannelCount == mReqChannelCount ||
- mFormat != AUDIO_FORMAT_PCM_16_BIT) {
+ if (mChannelCount == mReqChannelCount) {
memcpy(dst, src, framesIn * mFrameSize);
} else {
if (mChannelCount == 1) {
@@ -4304,9 +4301,7 @@ bool AudioFlinger::RecordThread::threadLoop()
}
if (framesOut && mFrameCount == mRsmpInIndex) {
void *readInto;
- if (framesOut == mFrameCount &&
- (mChannelCount == mReqChannelCount ||
- mFormat != AUDIO_FORMAT_PCM_16_BIT)) {
+ if (framesOut == mFrameCount && mChannelCount == mReqChannelCount) {
readInto = buffer.raw;
framesOut = 0;
} else {
@@ -4314,7 +4309,7 @@ bool AudioFlinger::RecordThread::threadLoop()
mRsmpInIndex = 0;
}
mBytesRead = mInput->stream->read(mInput->stream, readInto,
- mInputBytes);
+ mBufferSize);
if (mBytesRead <= 0) {
if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE))
{
@@ -4339,7 +4334,8 @@ bool AudioFlinger::RecordThread::threadLoop()
} else {
// resampling
- memset(mRsmpOutBuffer, 0, framesOut * 2 * sizeof(int32_t));
+ // resampler accumulates, but we only have one source track
+ memset(mRsmpOutBuffer, 0, framesOut * FCC_2 * sizeof(int32_t));
// alter output frame count as if we were expecting stereo samples
if (mChannelCount == 1 && mReqChannelCount == 1) {
framesOut >>= 1;
@@ -4349,6 +4345,7 @@ bool AudioFlinger::RecordThread::threadLoop()
// ditherAndClamp() works as long as all buffers returned by
// mActiveTrack->getNextBuffer() are 32 bit aligned which should be always true.
if (mChannelCount == 2 && mReqChannelCount == 1) {
+ // temporarily type pun mRsmpOutBuffer from Q19.12 to int16_t
ditherAndClamp(mRsmpOutBuffer, mRsmpOutBuffer, framesOut);
// the resampler always outputs stereo samples:
// do post stereo to mono conversion
@@ -4357,6 +4354,7 @@ bool AudioFlinger::RecordThread::threadLoop()
} else {
ditherAndClamp((int32_t *)buffer.raw, mRsmpOutBuffer, framesOut);
}
+ // now done with mRsmpOutBuffer
}
if (mFramestoDrop == 0) {
@@ -4435,7 +4433,7 @@ sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createR
audio_channel_mask_t channelMask,
size_t frameCount,
int sessionId,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
status_t *status)
{
@@ -4448,6 +4446,57 @@ sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createR
goto Exit;
}
+ // client expresses a preference for FAST, but we get the final say
+ if (*flags & IAudioFlinger::TRACK_FAST) {
+ if (
+ // use case: callback handler and frame count is default or at least as large as HAL
+ (
+ (tid != -1) &&
+ ((frameCount == 0) ||
+ (frameCount >= (mFrameCount * kFastTrackMultiplier)))
+ ) &&
+ // FIXME when record supports non-PCM data, also check for audio_is_linear_pcm(format)
+ // mono or stereo
+ ( (channelMask == AUDIO_CHANNEL_OUT_MONO) ||
+ (channelMask == AUDIO_CHANNEL_OUT_STEREO) ) &&
+ // hardware sample rate
+ (sampleRate == mSampleRate) &&
+ // record thread has an associated fast recorder
+ hasFastRecorder()
+ // FIXME test that RecordThread for this fast track has a capable output HAL
+ // FIXME add a permission test also?
+ ) {
+ // if frameCount not specified, then it defaults to fast recorder (HAL) frame count
+ if (frameCount == 0) {
+ frameCount = mFrameCount * kFastTrackMultiplier;
+ }
+ ALOGV("AUDIO_INPUT_FLAG_FAST accepted: frameCount=%d mFrameCount=%d",
+ frameCount, mFrameCount);
+ } else {
+ ALOGV("AUDIO_INPUT_FLAG_FAST denied: frameCount=%d "
+ "mFrameCount=%d format=%d isLinear=%d channelMask=%#x sampleRate=%u mSampleRate=%u "
+ "hasFastRecorder=%d tid=%d",
+ frameCount, mFrameCount, format,
+ audio_is_linear_pcm(format),
+ channelMask, sampleRate, mSampleRate, hasFastRecorder(), tid);
+ *flags &= ~IAudioFlinger::TRACK_FAST;
+ // For compatibility with AudioRecord calculation, buffer depth is forced
+ // to be at least 2 x the record thread frame count and cover audio hardware latency.
+ // This is probably too conservative, but legacy application code may depend on it.
+ // If you change this calculation, also review the start threshold which is related.
+ uint32_t latencyMs = 50; // FIXME mInput->stream->get_latency(mInput->stream);
+ size_t mNormalFrameCount = 2048; // FIXME
+ uint32_t minBufCount = latencyMs / ((1000 * mNormalFrameCount) / mSampleRate);
+ if (minBufCount < 2) {
+ minBufCount = 2;
+ }
+ size_t minFrameCount = mNormalFrameCount * minBufCount;
+ if (frameCount < minFrameCount) {
+ frameCount = minFrameCount;
+ }
+ }
+ }
+
// FIXME use flags and tid similar to createTrack_l()
{ // scope for mLock
@@ -4467,6 +4516,13 @@ sp<AudioFlinger::RecordThread::RecordTrack> AudioFlinger::RecordThread::createR
mAudioFlinger->btNrecIsOff();
setEffectSuspended_l(FX_IID_AEC, suspend, sessionId);
setEffectSuspended_l(FX_IID_NS, suspend, sessionId);
+
+ if ((*flags & IAudioFlinger::TRACK_FAST) && (tid != -1)) {
+ pid_t callingPid = IPCThreadState::self()->getCallingPid();
+ // we don't have CAP_SYS_NICE, nor do we want to have it as it's too powerful,
+ // so ask activity manager to do this on our behalf
+ sendPrioConfigEvent_l(callingPid, tid, kPriorityAudioApp);
+ }
}
lStatus = NO_ERROR;
@@ -4669,7 +4725,7 @@ void AudioFlinger::RecordThread::dumpInternals(int fd, const Vector<String16>& a
if (mActiveTrack != 0) {
snprintf(buffer, SIZE, "In index: %d\n", mRsmpInIndex);
result.append(buffer);
- snprintf(buffer, SIZE, "In size: %d\n", mInputBytes);
+ snprintf(buffer, SIZE, "Buffer size: %u bytes\n", mBufferSize);
result.append(buffer);
snprintf(buffer, SIZE, "Resampling: %d\n", (mResampler != NULL));
result.append(buffer);
@@ -4722,7 +4778,7 @@ status_t AudioFlinger::RecordThread::getNextBuffer(AudioBufferProvider::Buffer*
int channelCount;
if (framesReady == 0) {
- mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mInputBytes);
+ mBytesRead = mInput->stream->read(mInput->stream, mRsmpInBuffer, mBufferSize);
if (mBytesRead <= 0) {
if ((mBytesRead < 0) && (mActiveTrack->mState == TrackBase::ACTIVE)) {
ALOGE("RecordThread::getNextBuffer() Error reading audio input");
@@ -4778,8 +4834,12 @@ bool AudioFlinger::RecordThread::checkForNewParameters_l()
reconfig = true;
}
if (param.getInt(String8(AudioParameter::keyFormat), value) == NO_ERROR) {
- reqFormat = (audio_format_t) value;
- reconfig = true;
+ if ((audio_format_t) value != AUDIO_FORMAT_PCM_16_BIT) {
+ status = BAD_VALUE;
+ } else {
+ reqFormat = (audio_format_t) value;
+ reconfig = true;
+ }
}
if (param.getInt(String8(AudioParameter::keyChannels), value) == NO_ERROR) {
reqChannelCount = popcount(value);
@@ -4906,9 +4966,9 @@ void AudioFlinger::RecordThread::audioConfigChanged_l(int event, int param) {
void AudioFlinger::RecordThread::readInputParameters()
{
- delete mRsmpInBuffer;
+ delete[] mRsmpInBuffer;
// mRsmpInBuffer is always assigned a new[] below
- delete mRsmpOutBuffer;
+ delete[] mRsmpOutBuffer;
mRsmpOutBuffer = NULL;
delete mResampler;
mResampler = NULL;
@@ -4917,10 +4977,12 @@ void AudioFlinger::RecordThread::readInputParameters()
mChannelMask = mInput->stream->common.get_channels(&mInput->stream->common);
mChannelCount = popcount(mChannelMask);
mFormat = mInput->stream->common.get_format(&mInput->stream->common);
+ if (mFormat != AUDIO_FORMAT_PCM_16_BIT) {
+ ALOGE("HAL format %d not supported; must be AUDIO_FORMAT_PCM_16_BIT", mFormat);
+ }
mFrameSize = audio_stream_frame_size(&mInput->stream->common);
- mInputBytes = mInput->stream->common.get_buffer_size(&mInput->stream->common);
- mFrameCount = mInputBytes / mFrameSize;
- mNormalFrameCount = mFrameCount; // not used by record, but used by input effects
+ mBufferSize = mInput->stream->common.get_buffer_size(&mInput->stream->common);
+ mFrameCount = mBufferSize / mFrameSize;
mRsmpInBuffer = new int16_t[mFrameCount * mChannelCount];
if (mSampleRate != mReqSampleRate && mChannelCount <= FCC_2 && mReqChannelCount <= FCC_2)
@@ -4936,7 +4998,7 @@ void AudioFlinger::RecordThread::readInputParameters()
mResampler = AudioResampler::create(16, channelCount, mReqSampleRate);
mResampler->setSampleRate(mSampleRate);
mResampler->setVolume(AudioMixer::UNITY_GAIN, AudioMixer::UNITY_GAIN);
- mRsmpOutBuffer = new int32_t[mFrameCount * 2];
+ mRsmpOutBuffer = new int32_t[mFrameCount * FCC_2];
// optmization: if mono to mono, alter input frame count as if we were inputing
// stereo samples
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index a17c279..31d5323 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -126,10 +126,8 @@ public:
audio_channel_mask_t channelMask() const { return mChannelMask; }
audio_format_t format() const { return mFormat; }
// Called by AudioFlinger::frameCount(audio_io_handle_t output) and effects,
- // and returns the normal mix buffer's frame count.
- size_t frameCount() const { return mNormalFrameCount; }
- // Return's the HAL's frame count i.e. fast mixer buffer size.
- size_t frameCountHAL() const { return mFrameCount; }
+ // and returns the [normal mix] buffer's frame count.
+ virtual size_t frameCount() const = 0;
size_t frameSize() const { return mFrameSize; }
// Should be "virtual status_t requestExitAndWait()" and override same
@@ -263,9 +261,11 @@ protected:
Condition mWaitWorkCV;
const sp<AudioFlinger> mAudioFlinger;
+
+ // updated by PlaybackThread::readOutputParameters() or
+ // RecordThread::readInputParameters()
uint32_t mSampleRate;
size_t mFrameCount; // output HAL, direct output, record
- size_t mNormalFrameCount; // normal mixer and effects
audio_channel_mask_t mChannelMask;
uint32_t mChannelCount;
size_t mFrameSize;
@@ -461,8 +461,15 @@ public:
// called with AudioFlinger lock held
void invalidateTracks(audio_stream_type_t streamType);
+ virtual size_t frameCount() const { return mNormalFrameCount; }
+
+ // Return's the HAL's frame count i.e. fast mixer buffer size.
+ size_t frameCountHAL() const { return mFrameCount; }
protected:
+ // updated by readOutputParameters()
+ size_t mNormalFrameCount; // normal mixer and effects
+
int16_t* mMixBuffer; // frame size aligned mix buffer
int8_t* mAllocMixBuffer; // mixer buffer allocation address
@@ -830,7 +837,7 @@ public:
audio_channel_mask_t channelMask,
size_t frameCount,
int sessionId,
- IAudioFlinger::track_flags_t flags,
+ IAudioFlinger::track_flags_t *flags,
pid_t tid,
status_t *status);
@@ -871,6 +878,9 @@ public:
static void syncStartEventCallback(const wp<SyncEvent>& event);
void handleSyncStartEvent(const sp<SyncEvent>& event);
+ virtual size_t frameCount() const { return mFrameCount; }
+ bool hasFastRecorder() const { return false; }
+
private:
void clearSyncStartEvent();
@@ -886,11 +896,14 @@ private:
// is used together with mStartStopCond to indicate start()/stop() progress
sp<RecordTrack> mActiveTrack;
Condition mStartStopCond;
+
+ // updated by RecordThread::readInputParameters()
AudioResampler *mResampler;
+ // interleaved stereo pairs of fixed-point signed Q19.12
int32_t *mRsmpOutBuffer;
- int16_t *mRsmpInBuffer;
+ int16_t *mRsmpInBuffer; // [mFrameCount * mChannelCount]
size_t mRsmpInIndex;
- size_t mInputBytes;
+ size_t mBufferSize; // stream buffer size for read()
const uint32_t mReqChannelCount;
const uint32_t mReqSampleRate;
ssize_t mBytesRead;
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index a6c4bda..e676365 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -21,7 +21,6 @@
#include "Configuration.h"
#include <math.h>
-#include <cutils/compiler.h>
#include <utils/Log.h>
#include <private/media/AudioTrackShared.h>
@@ -129,7 +128,7 @@ AudioFlinger::ThreadBase::TrackBase::TrackBase(
} else {
mBuffer = sharedBuffer->pointer();
#if 0
- mCblk->flags = CBLK_FORCEREADY; // FIXME hack, need to fix the track ready logic
+ mCblk->mFlags = CBLK_FORCEREADY; // FIXME hack, need to fix the track ready logic
#endif
}
@@ -317,7 +316,6 @@ AudioFlinger::PlaybackThread::Track::Track(
mPresentationCompleteFrames(0),
mFlags(flags),
mFastIndex(-1),
- mUnderrunCount(0),
mCachedVolume(1.0),
mIsInvalid(false),
mAudioTrackServerProxy(NULL),
@@ -334,7 +332,6 @@ AudioFlinger::PlaybackThread::Track::Track(
mServerProxy = mAudioTrackServerProxy;
// to avoid leaking a track name, do not allocate one unless there is an mCblk
mName = thread->getTrackName_l(channelMask, sessionId);
- mCblk->mName = mName;
if (mName < 0) {
ALOGE("no more track names available");
return;
@@ -350,7 +347,6 @@ AudioFlinger::PlaybackThread::Track::Track(
// this means we are potentially denying other more important fast tracks from
// being created. It would be better to allocate the index dynamically.
mFastIndex = i;
- mCblk->mName = i;
// Read the initial underruns because this field is never cleared by the fast mixer
mObservedUnderruns = thread->getFastTrackUnderruns(i);
thread->mFastTrackAvailMask &= ~(1 << i);
@@ -392,7 +388,7 @@ void AudioFlinger::PlaybackThread::Track::destroy()
/*static*/ void AudioFlinger::PlaybackThread::Track::appendDumpHeader(String8& result)
{
result.append(" Name Client Type Fmt Chn mask Session fCount S F SRate "
- "L dB R dB Server Main buf Aux Buf Flags Underruns\n");
+ "L dB R dB Server Main buf Aux Buf Flags UndFrmCnt\n");
}
void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size)
@@ -469,11 +465,11 @@ void AudioFlinger::PlaybackThread::Track::dump(char* buffer, size_t size)
mAudioTrackServerProxy->getSampleRate(),
20.0 * log10((vlr & 0xFFFF) / 4096.0),
20.0 * log10((vlr >> 16) / 4096.0),
- mCblk->server,
+ mCblk->mServer,
(int)mMainBuffer,
(int)mAuxBuffer,
- mCblk->flags,
- mUnderrunCount,
+ mCblk->mFlags,
+ mAudioTrackServerProxy->getUnderrunFrames(),
nowInUnderrun);
}
@@ -492,10 +488,7 @@ status_t AudioFlinger::PlaybackThread::Track::getNextBuffer(
buffer->frameCount = buf.mFrameCount;
buffer->raw = buf.mRaw;
if (buf.mFrameCount == 0) {
- // only implemented so far for normal tracks, not fast tracks
- mCblk->u.mStreaming.mUnderrunFrames += desiredFrames;
- // FIXME also wake futex so that underrun is noticed more quickly
- (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->flags);
+ mAudioTrackServerProxy->tallyUnderrunFrames(desiredFrames);
}
return status;
}
@@ -519,9 +512,9 @@ bool AudioFlinger::PlaybackThread::Track::isReady() const {
}
if (framesReady() >= mFrameCount ||
- (mCblk->flags & CBLK_FORCEREADY)) {
+ (mCblk->mFlags & CBLK_FORCEREADY)) {
mFillingUpStatus = FS_FILLED;
- android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags);
+ android_atomic_and(~CBLK_FORCEREADY, &mCblk->mFlags);
return true;
}
return false;
@@ -695,7 +688,7 @@ void AudioFlinger::PlaybackThread::Track::reset()
if (!mResetDone) {
// Force underrun condition to avoid false underrun callback until first data is
// written to buffer
- android_atomic_and(~CBLK_FORCEREADY, &mCblk->flags);
+ android_atomic_and(~CBLK_FORCEREADY, &mCblk->mFlags);
mFillingUpStatus = FS_FILLING;
mResetDone = true;
if (mState == FLUSHED) {
@@ -857,7 +850,7 @@ void AudioFlinger::PlaybackThread::Track::invalidate()
{
// FIXME should use proxy, and needs work
audio_track_cblk_t* cblk = mCblk;
- android_atomic_or(CBLK_INVALID, &cblk->flags);
+ android_atomic_or(CBLK_INVALID, &cblk->mFlags);
android_atomic_release_store(0x40000000, &cblk->mFutex);
// client is not in server, so FUTEX_WAKE is needed instead of FUTEX_WAKE_PRIVATE
(void) __futex_syscall3(&cblk->mFutex, FUTEX_WAKE, INT_MAX);
@@ -1173,10 +1166,12 @@ status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer(
}
}
+ uint32_t sr = sampleRate();
+
// adjust the head buffer's PTS to reflect the portion of the head buffer
// that has already been consumed
int64_t effectivePTS = headLocalPTS +
- ((head.position() / mFrameSize) * mLocalTimeFreq / sampleRate());
+ ((head.position() / mFrameSize) * mLocalTimeFreq / sr);
// Calculate the delta in samples between the head of the input buffer
// queue and the start of the next output buffer that will be written.
@@ -1208,7 +1203,7 @@ status_t AudioFlinger::PlaybackThread::TimedTrack::getNextBuffer(
// the current output position is within this threshold, then we will
// concatenate the next input samples to the previous output
const int64_t kSampleContinuityThreshold =
- (static_cast<int64_t>(sampleRate()) << 32) / 250;
+ (static_cast<int64_t>(sr) << 32) / 250;
// if this is the first buffer of audio that we're emitting from this track
// then it should be almost exactly on time.
@@ -1678,7 +1673,7 @@ status_t AudioFlinger::RecordThread::RecordTrack::getNextBuffer(AudioBufferProvi
buffer->raw = buf.mRaw;
if (buf.mFrameCount == 0) {
// FIXME also wake futex so that overrun is noticed more quickly
- (void) android_atomic_or(CBLK_OVERRUN, &mCblk->flags);
+ (void) android_atomic_or(CBLK_OVERRUN, &mCblk->mFlags);
}
return status;
}
@@ -1738,7 +1733,7 @@ void AudioFlinger::RecordThread::RecordTrack::dump(char* buffer, size_t size)
mChannelMask,
mSessionId,
mState,
- mCblk->server,
+ mCblk->mServer,
mFrameCount);
}
diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk
index 0fede7e..d659ebb 100644
--- a/services/camera/libcameraservice/Android.mk
+++ b/services/camera/libcameraservice/Android.mk
@@ -8,31 +8,31 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
CameraService.cpp \
- CameraClient.cpp \
- Camera2Client.cpp \
- ProCamera2Client.cpp \
- Camera2ClientBase.cpp \
- CameraDeviceBase.cpp \
- Camera2Device.cpp \
- Camera3Device.cpp \
CameraDeviceFactory.cpp \
- camera2/Parameters.cpp \
- camera2/FrameProcessor.cpp \
- camera2/StreamingProcessor.cpp \
- camera2/JpegProcessor.cpp \
- camera2/CallbackProcessor.cpp \
- camera2/ZslProcessor.cpp \
- camera2/BurstCapture.cpp \
- camera2/JpegCompressor.cpp \
- camera2/CaptureSequencer.cpp \
- camera2/ProFrameProcessor.cpp \
- camera2/ZslProcessor3.cpp \
- camera3/Camera3Stream.cpp \
- camera3/Camera3IOStreamBase.cpp \
- camera3/Camera3InputStream.cpp \
- camera3/Camera3OutputStream.cpp \
- camera3/Camera3ZslStream.cpp \
- photography/CameraDeviceClient.cpp \
+ common/Camera2ClientBase.cpp \
+ common/CameraDeviceBase.cpp \
+ common/FrameProcessorBase.cpp \
+ api1/CameraClient.cpp \
+ api1/Camera2Client.cpp \
+ api1/client2/Parameters.cpp \
+ api1/client2/FrameProcessor.cpp \
+ api1/client2/StreamingProcessor.cpp \
+ api1/client2/JpegProcessor.cpp \
+ api1/client2/CallbackProcessor.cpp \
+ api1/client2/ZslProcessor.cpp \
+ api1/client2/BurstCapture.cpp \
+ api1/client2/JpegCompressor.cpp \
+ api1/client2/CaptureSequencer.cpp \
+ api1/client2/ZslProcessor3.cpp \
+ api2/CameraDeviceClient.cpp \
+ api_pro/ProCamera2Client.cpp \
+ device2/Camera2Device.cpp \
+ device3/Camera3Device.cpp \
+ device3/Camera3Stream.cpp \
+ device3/Camera3IOStreamBase.cpp \
+ device3/Camera3InputStream.cpp \
+ device3/Camera3OutputStream.cpp \
+ device3/Camera3ZslStream.cpp \
gui/RingBufferConsumer.cpp \
LOCAL_SHARED_LIBRARIES:= \
diff --git a/services/camera/libcameraservice/CameraDeviceFactory.cpp b/services/camera/libcameraservice/CameraDeviceFactory.cpp
index 2acdb5e..7fdf304 100644
--- a/services/camera/libcameraservice/CameraDeviceFactory.cpp
+++ b/services/camera/libcameraservice/CameraDeviceFactory.cpp
@@ -18,11 +18,11 @@
#define LOG_TAG "CameraDeviceFactory"
#include <utils/Log.h>
-#include "CameraDeviceBase.h"
-#include "Camera2Device.h"
-#include "Camera3Device.h"
#include "CameraService.h"
#include "CameraDeviceFactory.h"
+#include "common/CameraDeviceBase.h"
+#include "device2/Camera2Device.h"
+#include "device3/Camera3Device.h"
namespace android {
@@ -69,4 +69,3 @@ void CameraDeviceFactory::registerService(wp<CameraService> service) {
}
}; // namespace android
-
diff --git a/services/camera/libcameraservice/CameraDeviceFactory.h b/services/camera/libcameraservice/CameraDeviceFactory.h
index 93ffaf8..236dc56 100644
--- a/services/camera/libcameraservice/CameraDeviceFactory.h
+++ b/services/camera/libcameraservice/CameraDeviceFactory.h
@@ -20,6 +20,7 @@
#include <utils/RefBase.h>
namespace android {
+
class CameraDeviceBase;
class CameraService;
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 0eb3e32..359b3ca 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -38,10 +38,10 @@
#include <utils/String16.h>
#include "CameraService.h"
-#include "CameraClient.h"
-#include "Camera2Client.h"
-#include "ProCamera2Client.h"
-#include "photography/CameraDeviceClient.h"
+#include "api1/CameraClient.h"
+#include "api1/Camera2Client.h"
+#include "api_pro/ProCamera2Client.h"
+#include "api2/CameraDeviceClient.h"
#include "CameraDeviceFactory.h"
namespace android {
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 2bf7b49..980eb97 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -29,8 +29,8 @@
#include <camera/ICameraClient.h>
#include <camera/IProCameraUser.h>
#include <camera/IProCameraCallbacks.h>
-#include <camera/photography/ICameraDeviceUser.h>
-#include <camera/photography/ICameraDeviceCallbacks.h>
+#include <camera/camera2/ICameraDeviceUser.h>
+#include <camera/camera2/ICameraDeviceCallbacks.h>
#include <camera/ICameraServiceListener.h>
diff --git a/services/camera/libcameraservice/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 203d7c0..46aa60c 100644
--- a/services/camera/libcameraservice/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -23,13 +23,15 @@
#include <cutils/properties.h>
#include <gui/Surface.h>
-#include "camera2/Parameters.h"
-#include "Camera2Client.h"
-#include "Camera2Device.h"
-#include "Camera3Device.h"
-#include "camera2/ZslProcessor.h"
-#include "camera2/ZslProcessor3.h"
+#include "api1/Camera2Client.h"
+
+#include "api1/client2/StreamingProcessor.h"
+#include "api1/client2/JpegProcessor.h"
+#include "api1/client2/CaptureSequencer.h"
+#include "api1/client2/CallbackProcessor.h"
+#include "api1/client2/ZslProcessor.h"
+#include "api1/client2/ZslProcessor3.h"
#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__);
#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__);
diff --git a/services/camera/libcameraservice/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 078e3a3..ed448f3 100644
--- a/services/camera/libcameraservice/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -17,19 +17,29 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H
#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_H
-#include "CameraDeviceBase.h"
#include "CameraService.h"
-#include "camera2/Parameters.h"
-#include "camera2/FrameProcessor.h"
-#include "camera2/StreamingProcessor.h"
-#include "camera2/JpegProcessor.h"
-#include "camera2/ZslProcessorInterface.h"
-#include "camera2/CaptureSequencer.h"
-#include "camera2/CallbackProcessor.h"
-#include "Camera2ClientBase.h"
+#include "common/CameraDeviceBase.h"
+#include "common/Camera2ClientBase.h"
+#include "api1/client2/Parameters.h"
+#include "api1/client2/FrameProcessor.h"
+//#include "api1/client2/StreamingProcessor.h"
+//#include "api1/client2/JpegProcessor.h"
+//#include "api1/client2/ZslProcessorInterface.h"
+//#include "api1/client2/CaptureSequencer.h"
+//#include "api1/client2/CallbackProcessor.h"
namespace android {
+namespace camera2 {
+
+class StreamingProcessor;
+class JpegProcessor;
+class ZslProcessorInterface;
+class CaptureSequencer;
+class CallbackProcessor;
+
+}
+
class IMemory;
/**
* Interface between android.hardware.Camera API and Camera HAL device for versions
diff --git a/services/camera/libcameraservice/CameraClient.cpp b/services/camera/libcameraservice/api1/CameraClient.cpp
index be78f69..ad8856b 100644
--- a/services/camera/libcameraservice/CameraClient.cpp
+++ b/services/camera/libcameraservice/api1/CameraClient.cpp
@@ -20,8 +20,8 @@
#include <cutils/properties.h>
#include <gui/Surface.h>
-#include "CameraClient.h"
-#include "CameraHardwareInterface.h"
+#include "api1/CameraClient.h"
+#include "device1/CameraHardwareInterface.h"
#include "CameraService.h"
namespace android {
@@ -349,6 +349,7 @@ void CameraClient::setPreviewCallbackFlag(int callback_flag) {
status_t CameraClient::setPreviewCallbackTarget(
const sp<IGraphicBufferProducer>& callbackProducer) {
+ (void)callbackProducer;
ALOGE("%s: Unimplemented!", __FUNCTION__);
return INVALID_OPERATION;
}
diff --git a/services/camera/libcameraservice/CameraClient.h b/services/camera/libcameraservice/api1/CameraClient.h
index abde75a..abde75a 100644
--- a/services/camera/libcameraservice/CameraClient.h
+++ b/services/camera/libcameraservice/api1/CameraClient.h
diff --git a/services/camera/libcameraservice/camera2/BurstCapture.cpp b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
index 192d419..0bfdfd4 100644
--- a/services/camera/libcameraservice/camera2/BurstCapture.cpp
+++ b/services/camera/libcameraservice/api1/client2/BurstCapture.cpp
@@ -22,8 +22,8 @@
#include "BurstCapture.h"
-#include "../Camera2Client.h"
-#include "JpegCompressor.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/JpegCompressor.h"
namespace android {
namespace camera2 {
diff --git a/services/camera/libcameraservice/camera2/BurstCapture.h b/services/camera/libcameraservice/api1/client2/BurstCapture.h
index a2cc893..ea321fd 100644
--- a/services/camera/libcameraservice/camera2/BurstCapture.h
+++ b/services/camera/libcameraservice/api1/client2/BurstCapture.h
@@ -17,11 +17,12 @@
#ifndef ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H
#define ANDROID_SERVERS_CAMERA_BURST_CAPTURE_H
-#include "camera/CameraMetadata.h"
+#include <camera/CameraMetadata.h>
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <gui/CpuConsumer.h>
-#include "Camera2Device.h"
+
+#include "device2/Camera2Device.h"
namespace android {
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index d7bafda..9d8c4a1 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -20,11 +20,11 @@
#include <utils/Log.h>
#include <utils/Trace.h>
-
-#include "CallbackProcessor.h"
#include <gui/Surface.h>
-#include "../CameraDeviceBase.h"
-#include "../Camera2Client.h"
+
+#include "common/CameraDeviceBase.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/CallbackProcessor.h"
#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
@@ -114,8 +114,7 @@ status_t CallbackProcessor::updateStream(const Parameters &params) {
mCallbackConsumer = new CpuConsumer(bq, kCallbackHeapCount);
mCallbackConsumer->setFrameAvailableListener(this);
mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer"));
- mCallbackWindow = new Surface(
- mCallbackConsumer->getProducerInterface());
+ mCallbackWindow = new Surface(bq);
}
if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/camera2/CallbackProcessor.h b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
index 17dcfb1..613f5be 100644
--- a/services/camera/libcameraservice/camera2/CallbackProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.h
@@ -23,9 +23,8 @@
#include <utils/Mutex.h>
#include <utils/Condition.h>
#include <gui/CpuConsumer.h>
-#include "Parameters.h"
-#include "camera/CameraMetadata.h"
-#include "Camera2Heap.h"
+
+#include "api1/client2/Camera2Heap.h"
namespace android {
@@ -34,6 +33,8 @@ class CameraDeviceBase;
namespace camera2 {
+class Parameters;
+
/***
* Still image capture output image processing
*/
diff --git a/services/camera/libcameraservice/camera2/Camera2Heap.h b/services/camera/libcameraservice/api1/client2/Camera2Heap.h
index 9c72d76..9c72d76 100644
--- a/services/camera/libcameraservice/camera2/Camera2Heap.h
+++ b/services/camera/libcameraservice/api1/client2/Camera2Heap.h
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
index e5a011c..ad1590a 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.cpp
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.cpp
@@ -22,12 +22,11 @@
#include <utils/Trace.h>
#include <utils/Vector.h>
-#include "CaptureSequencer.h"
-#include "BurstCapture.h"
-#include "../Camera2Device.h"
-#include "../Camera2Client.h"
-#include "Parameters.h"
-#include "ZslProcessorInterface.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/CaptureSequencer.h"
+#include "api1/client2/BurstCapture.h"
+#include "api1/client2/Parameters.h"
+#include "api1/client2/ZslProcessorInterface.h"
namespace android {
namespace camera2 {
diff --git a/services/camera/libcameraservice/camera2/CaptureSequencer.h b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
index 76750aa..76750aa 100644
--- a/services/camera/libcameraservice/camera2/CaptureSequencer.h
+++ b/services/camera/libcameraservice/api1/client2/CaptureSequencer.h
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.cpp b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
index 114a7a8..c34cb12 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.cpp
@@ -21,16 +21,16 @@
#include <utils/Log.h>
#include <utils/Trace.h>
-#include "FrameProcessor.h"
-#include "../CameraDeviceBase.h"
-#include "../Camera2Client.h"
+#include "common/CameraDeviceBase.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/FrameProcessor.h"
namespace android {
namespace camera2 {
FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
wp<Camera2Client> client) :
- ProFrameProcessor(device),
+ FrameProcessorBase(device),
mClient(client),
mLastFrameNumberOfFaces(0) {
@@ -58,7 +58,7 @@ bool FrameProcessor::processSingleFrame(CameraMetadata &frame,
process3aState(frame, client);
}
- if (!ProFrameProcessor::processSingleFrame(frame, device)) {
+ if (!FrameProcessorBase::processSingleFrame(frame, device)) {
return false;
}
diff --git a/services/camera/libcameraservice/camera2/FrameProcessor.h b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
index f480c55..2a17d45 100644
--- a/services/camera/libcameraservice/camera2/FrameProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/FrameProcessor.h
@@ -24,7 +24,7 @@
#include <utils/List.h>
#include <camera/CameraMetadata.h>
-#include "ProFrameProcessor.h"
+#include "common/FrameProcessorBase.h"
struct camera_frame_metadata;
@@ -37,7 +37,7 @@ namespace camera2 {
/* Output frame metadata processing thread. This thread waits for new
* frames from the device, and analyzes them as necessary.
*/
-class FrameProcessor : public ProFrameProcessor {
+class FrameProcessor : public FrameProcessorBase {
public:
FrameProcessor(wp<CameraDeviceBase> device, wp<Camera2Client> client);
~FrameProcessor();
diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.cpp b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
index 2f0c67d..2f0c67d 100644
--- a/services/camera/libcameraservice/camera2/JpegCompressor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegCompressor.cpp
diff --git a/services/camera/libcameraservice/camera2/JpegCompressor.h b/services/camera/libcameraservice/api1/client2/JpegCompressor.h
index 945b1de..945b1de 100644
--- a/services/camera/libcameraservice/camera2/JpegCompressor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegCompressor.h
diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index 1d739cd..77d5c8a 100644
--- a/services/camera/libcameraservice/camera2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -24,12 +24,13 @@
#include <binder/MemoryHeapBase.h>
#include <utils/Log.h>
#include <utils/Trace.h>
-
-#include "JpegProcessor.h"
#include <gui/Surface.h>
-#include "../CameraDeviceBase.h"
-#include "../Camera2Client.h"
+#include "common/CameraDeviceBase.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/Camera2Heap.h"
+#include "api1/client2/CaptureSequencer.h"
+#include "api1/client2/JpegProcessor.h"
namespace android {
namespace camera2 {
@@ -86,8 +87,7 @@ status_t JpegProcessor::updateStream(const Parameters &params) {
mCaptureConsumer = new CpuConsumer(bq, 1);
mCaptureConsumer->setFrameAvailableListener(this);
mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
- mCaptureWindow = new Surface(
- mCaptureConsumer->getProducerInterface());
+ mCaptureWindow = new Surface(bq);
// Create memory for API consumption
mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
"Camera2Client::CaptureHeap");
diff --git a/services/camera/libcameraservice/camera2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index a38611c..b2c05df 100644
--- a/services/camera/libcameraservice/camera2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -23,7 +23,7 @@
#include <utils/Mutex.h>
#include <utils/Condition.h>
#include <gui/CpuConsumer.h>
-#include "Parameters.h"
+
#include "camera/CameraMetadata.h"
namespace android {
@@ -35,6 +35,7 @@ class MemoryHeapBase;
namespace camera2 {
class CaptureSequencer;
+class Parameters;
/***
* Still image capture output image processing
diff --git a/services/camera/libcameraservice/camera2/Parameters.cpp b/services/camera/libcameraservice/api1/client2/Parameters.cpp
index 0459866..0459866 100644
--- a/services/camera/libcameraservice/camera2/Parameters.cpp
+++ b/services/camera/libcameraservice/api1/client2/Parameters.cpp
diff --git a/services/camera/libcameraservice/camera2/Parameters.h b/services/camera/libcameraservice/api1/client2/Parameters.h
index 464830c..464830c 100644
--- a/services/camera/libcameraservice/camera2/Parameters.h
+++ b/services/camera/libcameraservice/api1/client2/Parameters.h
diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
index 5981be7..dfe8580 100644
--- a/services/camera/libcameraservice/camera2/StreamingProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.cpp
@@ -30,10 +30,10 @@
#include <gui/Surface.h>
#include <media/hardware/MetadataBufferType.h>
-#include "StreamingProcessor.h"
-#include "Camera2Heap.h"
-#include "../Camera2Client.h"
-#include "../CameraDeviceBase.h"
+#include "common/CameraDeviceBase.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/StreamingProcessor.h"
+#include "api1/client2/Camera2Heap.h"
namespace android {
namespace camera2 {
@@ -325,8 +325,7 @@ status_t StreamingProcessor::updateRecordingStream(const Parameters &params) {
mRecordingHeapCount + 1);
mRecordingConsumer->setFrameAvailableListener(this);
mRecordingConsumer->setName(String8("Camera2-RecordingConsumer"));
- mRecordingWindow = new Surface(
- mRecordingConsumer->getProducerInterface());
+ mRecordingWindow = new Surface(bq);
newConsumer = true;
// Allocate memory later, since we don't know buffer size until receipt
}
diff --git a/services/camera/libcameraservice/camera2/StreamingProcessor.h b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
index 3ec2df7..d879b83 100644
--- a/services/camera/libcameraservice/camera2/StreamingProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/StreamingProcessor.h
@@ -21,7 +21,6 @@
#include <utils/String16.h>
#include <gui/BufferItemConsumer.h>
-#include "Parameters.h"
#include "camera/CameraMetadata.h"
namespace android {
@@ -32,6 +31,7 @@ class IMemory;
namespace camera2 {
+class Parameters;
class Camera2Heap;
/**
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index 0094992..3b118f4 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -27,12 +27,12 @@
#include <utils/Log.h>
#include <utils/Trace.h>
-
-#include "ZslProcessor.h"
#include <gui/Surface.h>
-#include "../CameraDeviceBase.h"
-#include "../Camera2Client.h"
+#include "common/CameraDeviceBase.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/CaptureSequencer.h"
+#include "api1/client2/ZslProcessor.h"
namespace android {
namespace camera2 {
@@ -134,8 +134,7 @@ status_t ZslProcessor::updateStream(const Parameters &params) {
kZslBufferDepth);
mZslConsumer->setFrameAvailableListener(this);
mZslConsumer->setName(String8("Camera2Client::ZslConsumer"));
- mZslWindow = new Surface(
- mZslConsumer->getProducerInterface());
+ mZslWindow = new Surface(bq);
}
if (mZslStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 27b597e..5fb178f 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -23,12 +23,11 @@
#include <utils/Mutex.h>
#include <utils/Condition.h>
#include <gui/BufferItemConsumer.h>
-#include "Parameters.h"
-#include "FrameProcessor.h"
-#include "camera/CameraMetadata.h"
-#include "Camera2Heap.h"
-#include "../CameraDeviceBase.h"
-#include "ZslProcessorInterface.h"
+#include <camera/CameraMetadata.h>
+
+#include "common/CameraDeviceBase.h"
+#include "api1/client2/ZslProcessorInterface.h"
+#include "api1/client2/FrameProcessor.h"
namespace android {
@@ -37,6 +36,7 @@ class Camera2Client;
namespace camera2 {
class CaptureSequencer;
+class Parameters;
/***
* ZSL queue processing
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
index 40c77df..7c4da50 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor3.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.cpp
@@ -27,13 +27,13 @@
#include <utils/Log.h>
#include <utils/Trace.h>
-
-#include "ZslProcessor3.h"
#include <gui/Surface.h>
-#include "../CameraDeviceBase.h"
-#include "../Camera3Device.h"
-#include "../Camera2Client.h"
+#include "common/CameraDeviceBase.h"
+#include "api1/Camera2Client.h"
+#include "api1/client2/CaptureSequencer.h"
+#include "api1/client2/ZslProcessor3.h"
+#include "device3/Camera3Device.h"
namespace android {
namespace camera2 {
diff --git a/services/camera/libcameraservice/camera2/ZslProcessor3.h b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
index cb98b99..35b85f5 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessor3.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor3.h
@@ -23,13 +23,11 @@
#include <utils/Mutex.h>
#include <utils/Condition.h>
#include <gui/BufferItemConsumer.h>
-#include "Parameters.h"
-#include "FrameProcessor.h"
-#include "camera/CameraMetadata.h"
-#include "Camera2Heap.h"
-#include "../CameraDeviceBase.h"
-#include "ZslProcessorInterface.h"
-#include "../camera3/Camera3ZslStream.h"
+#include <camera/CameraMetadata.h>
+
+#include "api1/client2/FrameProcessor.h"
+#include "api1/client2/ZslProcessorInterface.h"
+#include "device3/Camera3ZslStream.h"
namespace android {
@@ -38,6 +36,7 @@ class Camera2Client;
namespace camera2 {
class CaptureSequencer;
+class Parameters;
/***
* ZSL queue processing
diff --git a/services/camera/libcameraservice/camera2/ZslProcessorInterface.h b/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h
index 183c0c2..183c0c2 100644
--- a/services/camera/libcameraservice/camera2/ZslProcessorInterface.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessorInterface.h
diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index b7239e2..414316d 100644
--- a/services/camera/libcameraservice/photography/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -18,16 +18,16 @@
#define ATRACE_TAG ATRACE_TAG_CAMERA
// #define LOG_NDEBUG 0
+#include <cutils/properties.h>
#include <utils/Log.h>
#include <utils/Trace.h>
-
-#include <cutils/properties.h>
#include <gui/Surface.h>
-#include "camera2/Parameters.h"
-#include "CameraDeviceClient.h"
-#include "camera2/ProFrameProcessor.h"
-#include "CameraDeviceBase.h"
-#include <camera/photography/CaptureRequest.h>
+#include <camera/camera2/CaptureRequest.h>
+
+#include "common/CameraDeviceBase.h"
+#include "api2/CameraDeviceClient.h"
+
+
namespace android {
using namespace camera2;
@@ -83,7 +83,7 @@ status_t CameraDeviceClient::initialize(camera_module_t *module)
}
String8 threadName;
- mFrameProcessor = new ProFrameProcessor(mDevice);
+ mFrameProcessor = new FrameProcessorBase(mDevice);
threadName = String8::format("CDU-%d-FrameProc", mCameraId);
mFrameProcessor->run(threadName.string());
diff --git a/services/camera/libcameraservice/photography/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index bb2949c..21d633c 100644
--- a/services/camera/libcameraservice/photography/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -17,12 +17,12 @@
#ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H
#define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERADEVICECLIENT_H
-#include "CameraDeviceBase.h"
+#include <camera/camera2/ICameraDeviceUser.h>
+#include <camera/camera2/ICameraDeviceCallbacks.h>
+
#include "CameraService.h"
-#include "camera2/ProFrameProcessor.h"
-#include "Camera2ClientBase.h"
-#include <camera/photography/ICameraDeviceUser.h>
-#include <camera/photography/ICameraDeviceCallbacks.h>
+#include "common/FrameProcessorBase.h"
+#include "common/Camera2ClientBase.h"
namespace android {
@@ -57,7 +57,7 @@ protected:
*/
class CameraDeviceClient :
public Camera2ClientBase<CameraDeviceClientBase>,
- public camera2::ProFrameProcessor::FilteredListener
+ public camera2::FrameProcessorBase::FilteredListener
{
public:
/**
@@ -120,7 +120,7 @@ private:
/** ICameraDeviceUser interface-related private members */
/** Preview callback related members */
- sp<camera2::ProFrameProcessor> mFrameProcessor;
+ sp<camera2::FrameProcessorBase> mFrameProcessor;
static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0;
static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL;
diff --git a/services/camera/libcameraservice/ProCamera2Client.cpp b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
index 251fdab..2b583e5 100644
--- a/services/camera/libcameraservice/ProCamera2Client.cpp
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.cpp
@@ -24,10 +24,9 @@
#include <cutils/properties.h>
#include <gui/Surface.h>
#include <gui/Surface.h>
-#include "camera2/Parameters.h"
-#include "ProCamera2Client.h"
-#include "camera2/ProFrameProcessor.h"
-#include "CameraDeviceBase.h"
+
+#include "api_pro/ProCamera2Client.h"
+#include "common/CameraDeviceBase.h"
namespace android {
using namespace camera2;
@@ -62,7 +61,7 @@ status_t ProCamera2Client::initialize(camera_module_t *module)
}
String8 threadName;
- mFrameProcessor = new ProFrameProcessor(mDevice);
+ mFrameProcessor = new FrameProcessorBase(mDevice);
threadName = String8::format("PC2-%d-FrameProc", mCameraId);
mFrameProcessor->run(threadName.string());
@@ -218,6 +217,7 @@ status_t ProCamera2Client::submitRequest(camera_metadata_t* request,
}
status_t ProCamera2Client::cancelRequest(int requestId) {
+ (void)requestId;
ATRACE_CALL();
ALOGV("%s", __FUNCTION__);
diff --git a/services/camera/libcameraservice/ProCamera2Client.h b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
index faee9f9..0bf6784 100644
--- a/services/camera/libcameraservice/ProCamera2Client.h
+++ b/services/camera/libcameraservice/api_pro/ProCamera2Client.h
@@ -17,10 +17,10 @@
#ifndef ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H
#define ANDROID_SERVERS_CAMERA_PROCAMERA2CLIENT_H
-#include "Camera2Device.h"
#include "CameraService.h"
-#include "camera2/ProFrameProcessor.h"
-#include "Camera2ClientBase.h"
+#include "common/FrameProcessorBase.h"
+#include "common/Camera2ClientBase.h"
+#include "device2/Camera2Device.h"
namespace android {
@@ -31,7 +31,7 @@ class IMemory;
*/
class ProCamera2Client :
public Camera2ClientBase<CameraService::ProClient>,
- public camera2::ProFrameProcessor::FilteredListener
+ public camera2::FrameProcessorBase::FilteredListener
{
public:
/**
@@ -105,7 +105,7 @@ private:
/** IProCameraUser interface-related private members */
/** Preview callback related members */
- sp<camera2::ProFrameProcessor> mFrameProcessor;
+ sp<camera2::FrameProcessorBase> mFrameProcessor;
static const int32_t FRAME_PROCESSOR_LISTENER_MIN_ID = 0;
static const int32_t FRAME_PROCESSOR_LISTENER_MAX_ID = 0x7fffffffL;
diff --git a/services/camera/libcameraservice/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 561dcfc..060e2a2 100644
--- a/services/camera/libcameraservice/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -24,13 +24,11 @@
#include <cutils/properties.h>
#include <gui/Surface.h>
#include <gui/Surface.h>
-#include "camera2/Parameters.h"
-#include "Camera2ClientBase.h"
-#include "camera2/ProFrameProcessor.h"
-#include "photography/CameraDeviceClient.h"
+#include "common/Camera2ClientBase.h"
+
+#include "api2/CameraDeviceClient.h"
-#include "CameraDeviceBase.h"
#include "CameraDeviceFactory.h"
namespace android {
diff --git a/services/camera/libcameraservice/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index c9a24d7..d23197c 100644
--- a/services/camera/libcameraservice/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -17,13 +17,14 @@
#ifndef ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H
#define ANDROID_SERVERS_CAMERA_CAMERA2CLIENT_BASE_H
-#include "CameraDeviceBase.h"
-#include "CameraService.h"
+#include "common/CameraDeviceBase.h"
namespace android {
class IMemory;
+class CameraService;
+
template <typename TClientBase>
class Camera2ClientBase :
public TClientBase,
diff --git a/services/camera/libcameraservice/CameraDeviceBase.cpp b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
index 6c4e87f..6c4e87f 100644
--- a/services/camera/libcameraservice/CameraDeviceBase.cpp
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.cpp
diff --git a/services/camera/libcameraservice/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index aa92bec..aa92bec 100644
--- a/services/camera/libcameraservice/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
index 4012fc5..e7b440a 100644
--- a/services/camera/libcameraservice/camera2/ProFrameProcessor.cpp
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.cpp
@@ -14,29 +14,29 @@
* limitations under the License.
*/
-#define LOG_TAG "Camera2-ProFrameProcessor"
+#define LOG_TAG "Camera2-FrameProcessorBase"
#define ATRACE_TAG ATRACE_TAG_CAMERA
//#define LOG_NDEBUG 0
#include <utils/Log.h>
#include <utils/Trace.h>
-#include "ProFrameProcessor.h"
-#include "../CameraDeviceBase.h"
+#include "common/FrameProcessorBase.h"
+#include "common/CameraDeviceBase.h"
namespace android {
namespace camera2 {
-ProFrameProcessor::ProFrameProcessor(wp<CameraDeviceBase> device) :
+FrameProcessorBase::FrameProcessorBase(wp<CameraDeviceBase> device) :
Thread(/*canCallJava*/false),
mDevice(device) {
}
-ProFrameProcessor::~ProFrameProcessor() {
+FrameProcessorBase::~FrameProcessorBase() {
ALOGV("%s: Exit", __FUNCTION__);
}
-status_t ProFrameProcessor::registerListener(int32_t minId,
+status_t FrameProcessorBase::registerListener(int32_t minId,
int32_t maxId, wp<FilteredListener> listener) {
Mutex::Autolock l(mInputMutex);
ALOGV("%s: Registering listener for frame id range %d - %d",
@@ -46,7 +46,7 @@ status_t ProFrameProcessor::registerListener(int32_t minId,
return OK;
}
-status_t ProFrameProcessor::removeListener(int32_t minId,
+status_t FrameProcessorBase::removeListener(int32_t minId,
int32_t maxId,
wp<FilteredListener> listener) {
Mutex::Autolock l(mInputMutex);
@@ -63,13 +63,13 @@ status_t ProFrameProcessor::removeListener(int32_t minId,
return OK;
}
-void ProFrameProcessor::dump(int fd, const Vector<String16>& /*args*/) {
+void FrameProcessorBase::dump(int fd, const Vector<String16>& /*args*/) {
String8 result(" Latest received frame:\n");
write(fd, result.string(), result.size());
mLastFrame.dump(fd, 2, 6);
}
-bool ProFrameProcessor::threadLoop() {
+bool FrameProcessorBase::threadLoop() {
status_t res;
sp<CameraDeviceBase> device;
@@ -82,14 +82,14 @@ bool ProFrameProcessor::threadLoop() {
if (res == OK) {
processNewFrames(device);
} else if (res != TIMED_OUT) {
- ALOGE("ProFrameProcessor: Error waiting for new "
+ ALOGE("FrameProcessorBase: Error waiting for new "
"frames: %s (%d)", strerror(-res), res);
}
return true;
}
-void ProFrameProcessor::processNewFrames(const sp<CameraDeviceBase> &device) {
+void FrameProcessorBase::processNewFrames(const sp<CameraDeviceBase> &device) {
status_t res;
ATRACE_CALL();
CameraMetadata frame;
@@ -125,14 +125,14 @@ void ProFrameProcessor::processNewFrames(const sp<CameraDeviceBase> &device) {
return;
}
-bool ProFrameProcessor::processSingleFrame(CameraMetadata &frame,
+bool FrameProcessorBase::processSingleFrame(CameraMetadata &frame,
const sp<CameraDeviceBase> &device) {
ALOGV("%s: Camera %d: Process single frame (is empty? %d)",
__FUNCTION__, device->getId(), frame.isEmpty());
return processListeners(frame, device) == OK;
}
-status_t ProFrameProcessor::processListeners(const CameraMetadata &frame,
+status_t FrameProcessorBase::processListeners(const CameraMetadata &frame,
const sp<CameraDeviceBase> &device) {
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
@@ -143,7 +143,7 @@ status_t ProFrameProcessor::processListeners(const CameraMetadata &frame,
__FUNCTION__, device->getId());
return BAD_VALUE;
}
- int32_t frameId = entry.data.i32[0];
+ int32_t requestId = entry.data.i32[0];
List<sp<FilteredListener> > listeners;
{
@@ -151,8 +151,8 @@ status_t ProFrameProcessor::processListeners(const CameraMetadata &frame,
List<RangeListener>::iterator item = mRangeListeners.begin();
while (item != mRangeListeners.end()) {
- if (frameId >= item->minId &&
- frameId < item->maxId) {
+ if (requestId >= item->minId &&
+ requestId < item->maxId) {
sp<FilteredListener> listener = item->listener.promote();
if (listener == 0) {
item = mRangeListeners.erase(item);
@@ -167,7 +167,7 @@ status_t ProFrameProcessor::processListeners(const CameraMetadata &frame,
ALOGV("Got %d range listeners out of %d", listeners.size(), mRangeListeners.size());
List<sp<FilteredListener> >::iterator item = listeners.begin();
for (; item != listeners.end(); item++) {
- (*item)->onFrameAvailable(frameId, frame);
+ (*item)->onFrameAvailable(requestId, frame);
}
return OK;
}
diff --git a/services/camera/libcameraservice/camera2/ProFrameProcessor.h b/services/camera/libcameraservice/common/FrameProcessorBase.h
index b82942c..1e46beb 100644
--- a/services/camera/libcameraservice/camera2/ProFrameProcessor.h
+++ b/services/camera/libcameraservice/common/FrameProcessorBase.h
@@ -33,10 +33,10 @@ namespace camera2 {
/* Output frame metadata processing thread. This thread waits for new
* frames from the device, and analyzes them as necessary.
*/
-class ProFrameProcessor: public Thread {
+class FrameProcessorBase: public Thread {
public:
- ProFrameProcessor(wp<CameraDeviceBase> device);
- virtual ~ProFrameProcessor();
+ FrameProcessorBase(wp<CameraDeviceBase> device);
+ virtual ~FrameProcessorBase();
struct FilteredListener: virtual public RefBase {
virtual void onFrameAvailable(int32_t frameId,
diff --git a/services/camera/libcameraservice/CameraHardwareInterface.h b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
index 87b2807..87b2807 100644
--- a/services/camera/libcameraservice/CameraHardwareInterface.h
+++ b/services/camera/libcameraservice/device1/CameraHardwareInterface.h
diff --git a/services/camera/libcameraservice/Camera2Device.cpp b/services/camera/libcameraservice/device2/Camera2Device.cpp
index 710d0e9..710d0e9 100644
--- a/services/camera/libcameraservice/Camera2Device.cpp
+++ b/services/camera/libcameraservice/device2/Camera2Device.cpp
diff --git a/services/camera/libcameraservice/Camera2Device.h b/services/camera/libcameraservice/device2/Camera2Device.h
index 372ce9f..8945ec2 100644
--- a/services/camera/libcameraservice/Camera2Device.h
+++ b/services/camera/libcameraservice/device2/Camera2Device.h
@@ -22,7 +22,7 @@
#include <utils/List.h>
#include <utils/Mutex.h>
-#include "CameraDeviceBase.h"
+#include "common/CameraDeviceBase.h"
namespace android {
diff --git a/services/camera/libcameraservice/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 9d0f392..0a4a24c 100644
--- a/services/camera/libcameraservice/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -40,9 +40,11 @@
#include <utils/Log.h>
#include <utils/Trace.h>
#include <utils/Timers.h>
-#include "Camera3Device.h"
-#include "camera3/Camera3OutputStream.h"
-#include "camera3/Camera3InputStream.h"
+
+#include "device3/Camera3Device.h"
+#include "device3/Camera3OutputStream.h"
+#include "device3/Camera3InputStream.h"
+#include "device3/Camera3ZslStream.h"
using namespace android::camera3;
diff --git a/services/camera/libcameraservice/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 2328f89..76c08ae 100644
--- a/services/camera/libcameraservice/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -22,13 +22,10 @@
#include <utils/List.h>
#include <utils/Mutex.h>
#include <utils/Thread.h>
+#include <utils/KeyedVector.h>
+#include <hardware/camera3.h>
-#include "CameraDeviceBase.h"
-#include "camera3/Camera3Stream.h"
-#include "camera3/Camera3OutputStream.h"
-#include "camera3/Camera3ZslStream.h"
-
-#include "hardware/camera3.h"
+#include "common/CameraDeviceBase.h"
/**
* Function pointer types with C calling convention to
@@ -46,6 +43,15 @@ extern "C" {
namespace android {
+namespace camera3 {
+
+class Camera3Stream;
+class Camera3ZslStream;
+class Camera3OutputStreamInterface;
+class Camera3StreamInterface;
+
+}
+
/**
* CameraDevice for HAL devices with version CAMERA_DEVICE_API_VERSION_3_0
*/
diff --git a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 0850566..0850566 100644
--- a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
diff --git a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
index 74c4484..9432a59 100644
--- a/services/camera/libcameraservice/camera3/Camera3IOStreamBase.h
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.h
@@ -77,6 +77,8 @@ class Camera3IOStreamBase :
virtual size_t getBufferCountLocked();
+ virtual status_t getEndpointUsage(uint32_t *usage) = 0;
+
status_t getBufferPreconditionCheckLocked() const;
status_t returnBufferPreconditionCheckLocked() const;
diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index e9a9c2b..c80f512 100644
--- a/services/camera/libcameraservice/camera3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -182,10 +182,6 @@ status_t Camera3InputStream::disconnectLocked() {
return OK;
}
-sp<IGraphicBufferProducer> Camera3InputStream::getProducerInterface() const {
- return mConsumer->getProducerInterface();
-}
-
void Camera3InputStream::dump(int fd, const Vector<String16> &args) const {
(void) args;
String8 lines;
@@ -234,6 +230,12 @@ status_t Camera3InputStream::configureQueueLocked() {
return OK;
}
+status_t Camera3InputStream::getEndpointUsage(uint32_t *usage) {
+ // Per HAL3 spec, input streams have 0 for their initial usage field.
+ *usage = 0;
+ return OK;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/camera3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index 8adda88..681d684 100644
--- a/services/camera/libcameraservice/camera3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -44,13 +44,6 @@ class Camera3InputStream : public Camera3IOStreamBase {
virtual void dump(int fd, const Vector<String16> &args) const;
- /**
- * Get the producer interface for this stream, to hand off to a producer.
- * The producer must be connected to the provided interface before
- * finishConfigure is called on this stream.
- */
- sp<IGraphicBufferProducer> getProducerInterface() const;
-
private:
typedef BufferItemConsumer::BufferItem BufferItem;
@@ -79,6 +72,8 @@ class Camera3InputStream : public Camera3IOStreamBase {
virtual status_t configureQueueLocked();
+ virtual status_t getEndpointUsage(uint32_t *usage);
+
}; // class Camera3InputStream
}; // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 0ec2b05..35cb5ba 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -364,6 +364,17 @@ status_t Camera3OutputStream::disconnectLocked() {
return OK;
}
+status_t Camera3OutputStream::getEndpointUsage(uint32_t *usage) {
+
+ status_t res;
+ int32_t u = 0;
+ res = mConsumer->query(mConsumer.get(),
+ NATIVE_WINDOW_CONSUMER_USAGE_BITS, &u);
+ *usage = u;
+
+ return res;
+}
+
}; // namespace camera3
}; // namespace android
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 774fbdd..6cbb9f4 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -92,6 +92,9 @@ class Camera3OutputStream :
virtual status_t configureQueueLocked();
virtual status_t disconnectLocked();
+
+ virtual status_t getEndpointUsage(uint32_t *usage);
+
}; // class Camera3OutputStream
} // namespace camera3
diff --git a/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
index aae72cf..aae72cf 100644
--- a/services/camera/libcameraservice/camera3/Camera3OutputStreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStreamInterface.h
diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index ab563df..a6872aa 100644
--- a/services/camera/libcameraservice/camera3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -77,7 +77,9 @@ int Camera3Stream::getFormat() const {
}
camera3_stream* Camera3Stream::startConfiguration() {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
+ status_t res;
switch (mState) {
case STATE_ERROR:
@@ -107,8 +109,15 @@ camera3_stream* Camera3Stream::startConfiguration() {
return NULL;
}
- oldUsage = usage;
- oldMaxBuffers = max_buffers;
+ oldUsage = camera3_stream::usage;
+ oldMaxBuffers = camera3_stream::max_buffers;
+
+ res = getEndpointUsage(&(camera3_stream::usage));
+ if (res != OK) {
+ ALOGE("%s: Cannot query consumer endpoint usage!",
+ __FUNCTION__);
+ return NULL;
+ }
if (mState == STATE_CONSTRUCTED) {
mState = STATE_IN_CONFIG;
@@ -125,6 +134,7 @@ bool Camera3Stream::isConfiguring() const {
}
status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) {
+ ATRACE_CALL();
Mutex::Autolock l(mLock);
switch (mState) {
case STATE_ERROR:
@@ -147,8 +157,8 @@ status_t Camera3Stream::finishConfiguration(camera3_device *hal3Device) {
// Check if the stream configuration is unchanged, and skip reallocation if
// so. As documented in hardware/camera3.h:configure_streams().
if (mState == STATE_IN_RECONFIG &&
- oldUsage == usage &&
- oldMaxBuffers == max_buffers) {
+ oldUsage == camera3_stream::usage &&
+ oldMaxBuffers == camera3_stream::max_buffers) {
mState = STATE_CONFIGURED;
return OK;
}
diff --git a/services/camera/libcameraservice/camera3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 69d81e4..b64fd86 100644
--- a/services/camera/libcameraservice/camera3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -263,6 +263,10 @@ class Camera3Stream :
// Get the total number of buffers in the queue
virtual size_t getBufferCountLocked() = 0;
+ // Get the usage flags for the other endpoint, or return
+ // INVALID_OPERATION if they cannot be obtained.
+ virtual status_t getEndpointUsage(uint32_t *usage) = 0;
+
private:
uint32_t oldUsage;
uint32_t oldMaxBuffers;
diff --git a/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h
index 62ea6c0..62ea6c0 100644
--- a/services/camera/libcameraservice/camera3/Camera3StreamBufferListener.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h
diff --git a/services/camera/libcameraservice/camera3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 4768536..4768536 100644
--- a/services/camera/libcameraservice/camera3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
index 8790c8c..04f5dc5 100644
--- a/services/camera/libcameraservice/camera3/Camera3ZslStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.cpp
@@ -113,11 +113,11 @@ Camera3ZslStream::Camera3ZslStream(int id, uint32_t width, uint32_t height,
Camera3OutputStream(id, CAMERA3_STREAM_BIDIRECTIONAL,
width, height,
HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED),
- mDepth(depth),
- mProducer(new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL,
- depth)) {
+ mDepth(depth) {
- mConsumer = new Surface(mProducer->getProducerInterface());
+ sp<BufferQueue> bq = new BufferQueue();
+ mProducer = new RingBufferConsumer(bq, GRALLOC_USAGE_HW_CAMERA_ZSL, depth);
+ mConsumer = new Surface(bq);
}
Camera3ZslStream::~Camera3ZslStream() {
diff --git a/services/camera/libcameraservice/camera3/Camera3ZslStream.h b/services/camera/libcameraservice/device3/Camera3ZslStream.h
index c7f4490..c7f4490 100644
--- a/services/camera/libcameraservice/camera3/Camera3ZslStream.h
+++ b/services/camera/libcameraservice/device3/Camera3ZslStream.h
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 8141f4e..ebc7ea7 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -34,13 +34,14 @@ typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
namespace android {
-RingBufferConsumer::RingBufferConsumer(uint32_t consumerUsage,
+RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
+ uint32_t consumerUsage,
int bufferCount) :
- ConsumerBase(new BufferQueue()),
+ ConsumerBase(consumer),
mBufferCount(bufferCount)
{
- mBufferQueue->setConsumerUsageBits(consumerUsage);
- mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+ mConsumer->setConsumerUsageBits(consumerUsage);
+ mConsumer->setMaxAcquiredBufferCount(bufferCount);
assert(bufferCount > 0);
}
@@ -51,7 +52,7 @@ RingBufferConsumer::~RingBufferConsumer() {
void RingBufferConsumer::setName(const String8& name) {
Mutex::Autolock _l(mMutex);
mName = name;
- mBufferQueue->setConsumerName(name);
+ mConsumer->setConsumerName(name);
}
sp<PinnedBufferItem> RingBufferConsumer::pinSelectedBuffer(
@@ -342,17 +343,17 @@ void RingBufferConsumer::unpinBuffer(const BufferItem& item) {
status_t RingBufferConsumer::setDefaultBufferSize(uint32_t w, uint32_t h) {
Mutex::Autolock _l(mMutex);
- return mBufferQueue->setDefaultBufferSize(w, h);
+ return mConsumer->setDefaultBufferSize(w, h);
}
status_t RingBufferConsumer::setDefaultBufferFormat(uint32_t defaultFormat) {
Mutex::Autolock _l(mMutex);
- return mBufferQueue->setDefaultBufferFormat(defaultFormat);
+ return mConsumer->setDefaultBufferFormat(defaultFormat);
}
status_t RingBufferConsumer::setConsumerUsage(uint32_t usage) {
Mutex::Autolock _l(mMutex);
- return mBufferQueue->setConsumerUsageBits(usage);
+ return mConsumer->setConsumerUsageBits(usage);
}
} // namespace android
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 454fbae..b4ad824 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -63,7 +63,7 @@ class RingBufferConsumer : public ConsumerBase,
// the consumer usage flags passed to the graphics allocator. The
// bufferCount parameter specifies how many buffers can be pinned for user
// access at the same time.
- RingBufferConsumer(uint32_t consumerUsage,
+ RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint32_t consumerUsage,
int bufferCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS);
virtual ~RingBufferConsumer();
@@ -72,8 +72,6 @@ class RingBufferConsumer : public ConsumerBase,
// log messages.
void setName(const String8& name);
- sp<IGraphicBufferProducer> getProducerInterface() const { return getBufferQueue(); }
-
// setDefaultBufferSize is used to set the size of buffers returned by
// requestBuffers when a with and height of zero is requested.
status_t setDefaultBufferSize(uint32_t w, uint32_t h);