summaryrefslogtreecommitdiffstats
path: root/include/media/stagefright
diff options
context:
space:
mode:
authorJames Dong <jdong@google.com>2010-10-08 11:59:32 -0700
committerJames Dong <jdong@google.com>2010-10-08 18:32:43 -0700
commit54ff19ac69ace7c05ea90d225e26dab3b133f487 (patch)
treeabf0b910f9a40a2409d7e725c99c6144238d0625 /include/media/stagefright
parentfa6a5d481474ba11517c8d0eb6431595d387b81e (diff)
downloadframeworks_av-54ff19ac69ace7c05ea90d225e26dab3b133f487.zip
frameworks_av-54ff19ac69ace7c05ea90d225e26dab3b133f487.tar.gz
frameworks_av-54ff19ac69ace7c05ea90d225e26dab3b133f487.tar.bz2
Move Camera specific logic out from StagefrightRecorder to CameraSource
o updated comments and streamlined the logic in checkVideoSize() and checkFrameRate() as suggested Change-Id: I49d04ac7998d4a215997aa63555dfb6e814e38d3
Diffstat (limited to 'include/media/stagefright')
-rw-r--r--include/media/stagefright/CameraSource.h83
-rw-r--r--include/media/stagefright/CameraSourceTimeLapse.h27
2 files changed, 93 insertions, 17 deletions
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index ed5f09f..b0bce29 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -20,25 +20,71 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
#include <utils/List.h>
#include <utils/RefBase.h>
namespace android {
-class ICamera;
class IMemory;
class Camera;
+class Surface;
class CameraSource : public MediaSource, public MediaBufferObserver {
public:
+ /**
+ * Factory method to create a new CameraSource using the current
+ * settings (such as video size, frame rate, color format, etc)
+ * from the default camera.
+ *
+ * @return NULL on error.
+ */
static CameraSource *Create();
- static CameraSource *CreateFromCamera(const sp<Camera> &camera);
+
+ /**
+ * Factory method to create a new CameraSource.
+ *
+ * @param camera the video input frame data source. If it is NULL,
+ * we will try to connect to the camera with the given
+ * cameraId.
+ *
+ * @param cameraId the id of the camera that the source will connect
+ * to if camera is NULL; otherwise ignored.
+ *
+ * @param videoSize the dimension (in pixels) of the video frame
+ * @param frameRate the target frames per second
+ * @param surface the preview surface for display where preview
+ * frames are sent to
+ *
+ * @return NULL on error.
+ */
+ static CameraSource *CreateFromCamera(const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface);
virtual ~CameraSource();
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
+ /**
+ * Check whether a CameraSource object is properly initialized.
+ * Must call this method before stop().
+ * @return OK if initialization has successfully completed.
+ */
+ virtual status_t initCheck() const;
+
+ /**
+ * Returns the MetaData associated with the CameraSource,
+ * including:
+ * kKeyColorFormat: YUV color format of the video frames
+ * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
+ * kKeySampleRate: frame rate in frames per second
+ * kKeyMimeType: always fixed
+ */
virtual sp<MetaData> getFormat();
virtual status_t read(
@@ -47,7 +93,19 @@ public:
virtual void signalBufferReturned(MediaBuffer* buffer);
protected:
- sp<Camera> mCamera;
+ enum CameraFlags {
+ FLAGS_SET_CAMERA = 1L << 0,
+ FLAGS_HOT_CAMERA = 1L << 1,
+ };
+
+ int32_t mCameraFlags;
+ Size mVideoSize;
+ int32_t mVideoFrameRate;
+ int32_t mColorFormat;
+ status_t mInitCheck;
+
+ sp<Camera> mCamera;
+ sp<Surface> mSurface;
sp<MetaData> mMeta;
int64_t mStartTimeUs;
@@ -55,7 +113,9 @@ protected:
int64_t mLastFrameTimestampUs;
bool mStarted;
- CameraSource(const sp<Camera> &camera);
+ CameraSource(const sp<ICamera>& camera, int32_t cameraId,
+ Size videoSize, int32_t frameRate,
+ const sp<Surface>& surface);
virtual void startCameraRecording();
virtual void stopCameraRecording();
@@ -91,6 +151,21 @@ private:
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
+
+ status_t init(const sp<ICamera>& camera, int32_t cameraId,
+ Size videoSize, int32_t frameRate);
+ status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId);
+ status_t isCameraColorFormatSupported(const CameraParameters& params);
+ status_t configureCamera(CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate);
+
+ status_t checkVideoSize(const CameraParameters& params,
+ int32_t width, int32_t height);
+
+ status_t checkFrameRate(const CameraParameters& params,
+ int32_t frameRate);
+
CameraSource(const CameraSource &);
CameraSource &operator=(const CameraSource &);
};
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
index 3b303f8..afe7287 100644
--- a/include/media/stagefright/CameraSourceTimeLapse.h
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -31,15 +31,13 @@ class Camera;
class CameraSourceTimeLapse : public CameraSource {
public:
- static CameraSourceTimeLapse *Create(
- int64_t timeBetweenTimeLapseFrameCaptureUs,
- int32_t width, int32_t height,
- int32_t videoFrameRate);
-
- static CameraSourceTimeLapse *CreateFromCamera(const sp<Camera> &camera,
- int64_t timeBetweenTimeLapseFrameCaptureUs,
- int32_t width, int32_t height,
- int32_t videoFrameRate);
+ static CameraSourceTimeLapse *CreateFromCamera(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
virtual ~CameraSourceTimeLapse();
@@ -132,10 +130,13 @@ private:
// Status code for last read.
status_t mLastReadStatus;
- CameraSourceTimeLapse(const sp<Camera> &camera,
- int64_t timeBetweenTimeLapseFrameCaptureUs,
- int32_t width, int32_t height,
- int32_t videoFrameRate);
+ CameraSourceTimeLapse(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
// Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
// It only handles the case when mLastReadBufferCopy is signalled. Otherwise