diff options
Diffstat (limited to 'include')
36 files changed, 1579 insertions, 71 deletions
diff --git a/include/android_runtime/AndroidRuntime.h b/include/android_runtime/AndroidRuntime.h index 09f0de1..22c9b72 100644 --- a/include/android_runtime/AndroidRuntime.h +++ b/include/android_runtime/AndroidRuntime.h @@ -30,7 +30,9 @@ namespace android { - + +class CursorWindow; + class AndroidRuntime { public: @@ -122,6 +124,8 @@ private: // Returns the Unix file descriptor for a ParcelFileDescriptor object extern int getParcelFileDescriptorFD(JNIEnv* env, jobject object); +extern CursorWindow * get_window_from_object(JNIEnv * env, jobject javaWindow); + } #endif diff --git a/include/binder/CursorWindow.h b/include/binder/CursorWindow.h new file mode 100644 index 0000000..4fbff2a --- /dev/null +++ b/include/binder/CursorWindow.h @@ -0,0 +1,200 @@ +/* + * Copyright (C) 2006 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _ANDROID__DATABASE_WINDOW_H +#define _ANDROID__DATABASE_WINDOW_H + +#include <cutils/log.h> +#include <stddef.h> +#include <stdint.h> + +#include <binder/IMemory.h> +#include <utils/RefBase.h> + +#define DEFAULT_WINDOW_SIZE 4096 +#define MAX_WINDOW_SIZE (1024 * 1024) +#define WINDOW_ALLOCATION_SIZE 4096 + +#define ROW_SLOT_CHUNK_NUM_ROWS 16 + +// Row slots are allocated in chunks of ROW_SLOT_CHUNK_NUM_ROWS, +// with an offset after the rows that points to the next chunk +#define ROW_SLOT_CHUNK_SIZE ((ROW_SLOT_CHUNK_NUM_ROWS * sizeof(row_slot_t)) + sizeof(uint32_t)) + + +#if LOG_NDEBUG + +#define IF_LOG_WINDOW() if (false) +#define LOG_WINDOW(...) + +#else + +#define IF_LOG_WINDOW() IF_LOG(LOG_DEBUG, "CursorWindow") +#define LOG_WINDOW(...) LOG(LOG_DEBUG, "CursorWindow", __VA_ARGS__) + +#endif + + +// When defined to true strings are stored as UTF8, otherwise they're UTF16 +#define WINDOW_STORAGE_UTF8 1 + +// When defined to true numberic values are stored inline in the field_slot_t, otherwise they're allocated in the window +#define WINDOW_STORAGE_INLINE_NUMERICS 1 + +namespace android { + +typedef struct +{ + uint32_t numRows; + uint32_t numColumns; +} window_header_t; + +typedef struct +{ + uint32_t offset; +} row_slot_t; + +typedef struct +{ + uint8_t type; + union { + double d; + int64_t l; + struct { + uint32_t offset; + uint32_t size; + } buffer; + } data; +} __attribute__((packed)) field_slot_t; + +#define FIELD_TYPE_NULL 0 +#define FIELD_TYPE_INTEGER 1 +#define FIELD_TYPE_FLOAT 2 +#define FIELD_TYPE_STRING 3 +#define FIELD_TYPE_BLOB 4 + +/** + * This class stores a set of rows from a database in a buffer. The begining of the + * window has first chunk of row_slot_ts, which are offsets to the row directory, followed by + * an offset to the next chunk in a linked-list of additional chunk of row_slot_ts in case + * the pre-allocated chunk isn't big enough to refer to all rows. Each row directory has a + * field_slot_t per column, which has the size, offset, and type of the data for that field. + * Note that the data types come from sqlite3.h. + */ +class CursorWindow +{ +public: + CursorWindow(size_t maxSize); + CursorWindow(){} + bool setMemory(const sp<IMemory>&); + ~CursorWindow(); + + bool initBuffer(bool localOnly); + sp<IMemory> getMemory() {return mMemory;} + + size_t size() {return mSize;} + uint8_t * data() {return mData;} + uint32_t getNumRows() {return mHeader->numRows;} + uint32_t getNumColumns() {return mHeader->numColumns;} + void freeLastRow() { + if (mHeader->numRows > 0) { + mHeader->numRows--; + } + } + bool setNumColumns(uint32_t numColumns) + { + uint32_t cur = mHeader->numColumns; + if (cur > 0 && cur != numColumns) { + LOGE("Trying to go from %d columns to %d", cur, numColumns); + return false; + } + mHeader->numColumns = numColumns; + return true; + } + + int32_t freeSpace(); + + void clear(); + + /** + * Allocate a row slot and its directory. The returned + * pointer points to the begining of the row's directory + * or NULL if there wasn't room. The directory is + * initialied with NULL entries for each field. + */ + field_slot_t * allocRow(); + + /** + * Allocate a portion of the window. Returns the offset + * of the allocation, or 0 if there isn't enough space. + * If aligned is true, the allocation gets 4 byte alignment. + */ + uint32_t alloc(size_t size, bool aligned = false); + + uint32_t read_field_slot(int row, int column, field_slot_t * slot); + + /** + * Copy data into the window at the given offset. + */ + void copyIn(uint32_t offset, uint8_t const * data, size_t size); + void copyIn(uint32_t offset, int64_t data); + void copyIn(uint32_t offset, double data); + + void copyOut(uint32_t offset, uint8_t * data, size_t size); + int64_t copyOutLong(uint32_t offset); + double copyOutDouble(uint32_t offset); + + bool putLong(unsigned int row, unsigned int col, int64_t value); + bool putDouble(unsigned int row, unsigned int col, double value); + bool putNull(unsigned int row, unsigned int col); + + bool getLong(unsigned int row, unsigned int col, int64_t * valueOut); + bool getDouble(unsigned int row, unsigned int col, double * valueOut); + bool getNull(unsigned int row, unsigned int col, bool * valueOut); + + uint8_t * offsetToPtr(uint32_t offset) {return mData + offset;} + + row_slot_t * allocRowSlot(); + + row_slot_t * getRowSlot(int row); + + /** + * return NULL if Failed to find rowSlot or + * Invalid rowSlot + */ + field_slot_t * getFieldSlotWithCheck(int row, int column); + field_slot_t * getFieldSlot(int row, int column) + { + int fieldDirOffset = getRowSlot(row)->offset; + return ((field_slot_t *)offsetToPtr(fieldDirOffset)) + column; + } + +private: + uint8_t * mData; + size_t mSize; + size_t mMaxSize; + window_header_t * mHeader; + sp<IMemory> mMemory; + + /** + * Offset of the lowest unused data byte in the array. + */ + uint32_t mFreeOffset; +}; + +}; // namespace android + +#endif diff --git a/include/camera/Camera.h b/include/camera/Camera.h index e6d84ba..171a3b6 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -22,8 +22,6 @@ namespace android { -class ISurface; - /* * A set of bit masks for specifying how the received preview frames are * handled before the previewCallback() call. @@ -83,19 +81,15 @@ enum { enum { CAMERA_CMD_START_SMOOTH_ZOOM = 1, CAMERA_CMD_STOP_SMOOTH_ZOOM = 2, - // Set the clockwise rotation of preview display (setPreviewDisplay) in - // degrees. This affects the preview frames and the picture displayed after - // snapshot. This method is useful for portrait mode applications. Note that - // preview display of front-facing cameras is flipped horizontally before - // the rotation, that is, the image is reflected along the central vertical - // axis of the camera sensor. So the users can see themselves as looking - // into a mirror. - // - // This does not affect the order of byte array of CAMERA_MSG_PREVIEW_FRAME, - // CAMERA_MSG_VIDEO_FRAME, CAMERA_MSG_POSTVIEW_FRAME, CAMERA_MSG_RAW_IMAGE, - // or CAMERA_MSG_COMPRESSED_IMAGE. This is not allowed to be set during - // preview. CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3, + + // cmdType to disable/enable shutter sound. + // In sendCommand passing arg1 = 0 will disable, + // while passing arg1 = 1 will enable the shutter sound. + CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4, + + // cmdType to play recording sound. + CAMERA_CMD_PLAY_RECORDING_SOUND = 5, }; // camera fatal errors @@ -164,9 +158,8 @@ public: status_t getStatus() { return mStatus; } - // pass the buffered ISurface to the camera service + // pass the buffered Surface to the camera service status_t setPreviewDisplay(const sp<Surface>& surface); - status_t setPreviewDisplay(const sp<ISurface>& surface); // start preview mode, must call setPreviewDisplay first status_t startPreview(); @@ -207,6 +200,15 @@ public: // send command to camera driver status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2); + // return the total number of available video buffers. + int32_t getNumberOfVideoBuffers() const; + + // return the individual video buffer corresponding to the given index. + sp<IMemory> getVideoBuffer(int32_t index) const; + + // tell camera hal to store meta data or real YUV in video buffers. + status_t storeMetaDataInBuffers(bool enabled); + void setListener(const sp<CameraListener>& listener); void setPreviewCallbackFlags(int preview_callback_flag); diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h index 6a66e3c..5465441 100644 --- a/include/camera/CameraHardwareInterface.h +++ b/include/camera/CameraHardwareInterface.h @@ -18,8 +18,11 @@ #define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H #include <binder/IMemory.h> +#include <ui/egl/android_natives.h> #include <utils/RefBase.h> #include <surfaceflinger/ISurface.h> +#include <ui/android_native_buffer.h> +#include <ui/GraphicBuffer.h> #include <camera/Camera.h> #include <camera/CameraParameters.h> @@ -86,8 +89,8 @@ class CameraHardwareInterface : public virtual RefBase { public: virtual ~CameraHardwareInterface() { } - /** Return the IMemoryHeap for the preview image heap */ - virtual sp<IMemoryHeap> getPreviewHeap() const = 0; + /** Set the ANativeWindow to which preview frames are sent */ + virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0; /** Return the IMemoryHeap for the raw image heap */ virtual sp<IMemoryHeap> getRawHeap() const = 0; @@ -143,6 +146,82 @@ public: virtual bool previewEnabled() = 0; /** + * Retrieve the total number of available buffers from camera hal for passing + * video frame data in a recording session. Must be called again if a new + * recording session is started. + * + * This method should be called after startRecording(), since + * the some camera hal may choose to allocate the video buffers only after + * recording is started. + * + * Some camera hal may not implement this method, and 0 can be returned to + * indicate that this feature is not available. + * + * @return the number of video buffers that camera hal makes available. + * Zero (0) is returned to indicate that camera hal does not support + * this feature. + */ + virtual int32_t getNumberOfVideoBuffers() const { return 0; } + + /** + * Retrieve the video buffer corresponding to the given index in a + * recording session. Must be called again if a new recording session + * is started. + * + * It allows a client to retrieve all video buffers that camera hal makes + * available to passing video frame data by calling this method with all + * valid index values. The valid index value ranges from 0 to n, where + * n = getNumberOfVideoBuffers() - 1. With an index outside of the valid + * range, 0 must be returned. This method should be called after + * startRecording(). + * + * The video buffers should NOT be modified/released by camera hal + * until stopRecording() is called and all outstanding video buffers + * previously sent out via CAMERA_MSG_VIDEO_FRAME have been released + * via releaseVideoBuffer(). + * + * @param index an index to retrieve the corresponding video buffer. + * + * @return the video buffer corresponding to the given index. + */ + virtual sp<IMemory> getVideoBuffer(int32_t index) const { return 0; } + + /** + * Request the camera hal to store meta data or real YUV data in + * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a + * recording session. If it is not called, the default camera + * hal behavior is to store real YUV data in the video buffers. + * + * This method should be called before startRecording() in order + * to be effective. + * + * If meta data is stored in the video buffers, it is up to the + * receiver of the video buffers to interpret the contents and + * to find the actual frame data with the help of the meta data + * in the buffer. How this is done is outside of the scope of + * this method. + * + * Some camera hal may not support storing meta data in the video + * buffers, but all camera hal should support storing real YUV data + * in the video buffers. If the camera hal does not support storing + * the meta data in the video buffers when it is requested to do + * do, INVALID_OPERATION must be returned. It is very useful for + * the camera hal to pass meta data rather than the actual frame + * data directly to the video encoder, since the amount of the + * uncompressed frame data can be very large if video size is large. + * + * @param enable if true to instruct the camera hal to store + * meta data in the video buffers; false to instruct + * the camera hal to store real YUV data in the video + * buffers. + * + * @return OK on success. + */ + virtual status_t storeMetaDataInBuffers(bool enable) { + return enable? INVALID_OPERATION: OK; + } + + /** * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME * message is sent with the corresponding frame. Every record frame must be released * by calling releaseRecordingFrame(). diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h index 53039a0..60031a4 100644 --- a/include/camera/CameraParameters.h +++ b/include/camera/CameraParameters.h @@ -59,6 +59,27 @@ public: void setPreviewSize(int width, int height); void getPreviewSize(int *width, int *height) const; void getSupportedPreviewSizes(Vector<Size> &sizes) const; + + // Set the dimensions in pixels to the given width and height + // for video frames. The given width and height must be one + // of the supported dimensions returned from + // getSupportedVideoSizes(). Must not be called if + // getSupportedVideoSizes() returns an empty Vector of Size. + void setVideoSize(int width, int height); + // Retrieve the current dimensions (width and height) + // in pixels for video frames, which must be one of the + // supported dimensions returned from getSupportedVideoSizes(). + // Must not be called if getSupportedVideoSizes() returns an + // empty Vector of Size. + void getVideoSize(int *width, int *height) const; + // Retrieve a Vector of supported dimensions (width and height) + // in pixels for video frames. If sizes returned from the method + // is empty, the camera does not support calls to setVideoSize() + // or getVideoSize(). In adddition, it also indicates that + // the camera only has a single output, and does not have + // separate output for video frames and preview frame. + void getSupportedVideoSizes(Vector<Size> &sizes) const; + void setPreviewFrameRate(int fps); int getPreviewFrameRate() const; void getPreviewFpsRange(int *min_fps, int *max_fps) const; @@ -281,6 +302,16 @@ public: // Example value: "0.95,1.9,Infinity" or "0.049,0.05,0.051". Read only. static const char KEY_FOCUS_DISTANCES[]; + // The current dimensions in pixels (width x height) for video frames. + // The width and height must be one of the supported sizes retrieved + // via KEY_SUPPORTED_VIDEO_SIZES. + // Example value: "1280x720". Read/write. + static const char KEY_VIDEO_SIZE[]; + // A list of the supported dimensions in pixels (width x height) + // for video frames. See CAMERA_MSG_VIDEO_FRAME for details in + // frameworks/base/include/camera/Camera.h. + // Example: "176x144,1280x720". Read only. + static const char KEY_SUPPORTED_VIDEO_SIZES[]; // The image format for video frames. See CAMERA_MSG_VIDEO_FRAME in // frameworks/base/include/camera/Camera.h. // Example value: "yuv420sp" or PIXEL_FORMAT_XXX constants. Read only. @@ -354,7 +385,10 @@ public: // for barcode reading. static const char SCENE_MODE_BARCODE[]; - // Formats for setPreviewFormat and setPictureFormat. + // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT, + // and KEY_VIDEO_FRAME_FORMAT + // Planar variant of the YUV420 color format + static const char PIXEL_FORMAT_YUV420P[]; static const char PIXEL_FORMAT_YUV422SP[]; static const char PIXEL_FORMAT_YUV420SP[]; // NV21 static const char PIXEL_FORMAT_YUV422I[]; // YUY2 diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h index 6fcf9e5..b69e075 100644 --- a/include/camera/ICamera.h +++ b/include/camera/ICamera.h @@ -20,7 +20,7 @@ #include <utils/RefBase.h> #include <binder/IInterface.h> #include <binder/Parcel.h> -#include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> #include <binder/IMemory.h> #include <utils/String8.h> #include <camera/Camera.h> @@ -45,8 +45,8 @@ public: // allow other processes to use this ICamera interface virtual status_t unlock() = 0; - // pass the buffered ISurface to the camera service - virtual status_t setPreviewDisplay(const sp<ISurface>& surface) = 0; + // pass the buffered Surface to the camera service + virtual status_t setPreviewDisplay(const sp<Surface>& surface) = 0; // set the preview callback flag to affect how the received frames from // preview are handled. @@ -90,6 +90,15 @@ public: // send command to camera driver virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0; + + // return the total number of available video buffers + virtual int32_t getNumberOfVideoBuffers() const = 0; + + // return the individual video buffer corresponding to the given index. + virtual sp<IMemory> getVideoBuffer(int32_t index) const = 0; + + // tell the camera hal to store meta data or real YUV data in video buffers. + virtual status_t storeMetaDataInBuffers(bool enabled) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h index af9a7ed..a1ce113 100644 --- a/include/media/IMediaPlayer.h +++ b/include/media/IMediaPlayer.h @@ -25,6 +25,7 @@ namespace android { class Parcel; class ISurface; +class Surface; class IMediaPlayer: public IInterface { @@ -33,7 +34,8 @@ public: virtual void disconnect() = 0; - virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0; + virtual status_t setVideoISurface(const sp<ISurface>& surface) = 0; + virtual status_t setVideoSurface(const sp<Surface>& surface) = 0; virtual status_t prepareAsync() = 0; virtual status_t start() = 0; virtual status_t stop() = 0; diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h index 54adca8..28be7c1 100644 --- a/include/media/IMediaRecorder.h +++ b/include/media/IMediaRecorder.h @@ -22,7 +22,7 @@ namespace android { -class ISurface; +class Surface; class ICamera; class IMediaRecorderClient; @@ -32,7 +32,7 @@ public: DECLARE_META_INTERFACE(MediaRecorder); virtual status_t setCamera(const sp<ICamera>& camera) = 0; - virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0; + virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0; virtual status_t setVideoSource(int vs) = 0; virtual status_t setAudioSource(int as) = 0; virtual status_t setOutputFormat(int of) = 0; @@ -40,6 +40,7 @@ public: virtual status_t setAudioEncoder(int ae) = 0; virtual status_t setOutputFile(const char* path) = 0; virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0; + virtual status_t setOutputFileAuxiliary(int fd) = 0; virtual status_t setVideoSize(int width, int height) = 0; virtual status_t setVideoFrameRate(int frames_per_second) = 0; virtual status_t setParameters(const String8& params) = 0; @@ -68,4 +69,3 @@ public: }; // namespace android #endif // ANDROID_IMEDIARECORDER_H - diff --git a/include/media/IOMX.h b/include/media/IOMX.h index 2f61cbe..1f8ce71 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -19,6 +19,7 @@ #define ANDROID_IOMX_H_ #include <binder/IInterface.h> +#include <ui/GraphicBuffer.h> #include <utils/List.h> #include <utils/String8.h> @@ -78,10 +79,17 @@ public: node_id node, OMX_INDEXTYPE index, const void *params, size_t size) = 0; + virtual status_t enableGraphicBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0; + virtual status_t useBuffer( node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, buffer_id *buffer) = 0; + virtual status_t useGraphicBuffer( + node_id node, OMX_U32 port_index, + const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0; + // This API clearly only makes sense if the caller lives in the // same process as the callee, i.e. is the media_server, as the // returned "buffer_data" pointer is just that, a pointer into local diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h index 0521709..eae0d7b 100644 --- a/include/media/MediaPlayerInterface.h +++ b/include/media/MediaPlayerInterface.h @@ -33,6 +33,7 @@ namespace android { class Parcel; class ISurface; +class Surface; template<typename T> class SortedVector; @@ -105,7 +106,8 @@ public: const KeyedVector<String8, String8> *headers = NULL) = 0; virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0; - virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0; + virtual status_t setVideoISurface(const sp<ISurface>& surface) = 0; + virtual status_t setVideoSurface(const sp<Surface>& surface) = 0; virtual status_t prepare() = 0; virtual status_t prepareAsync() = 0; virtual status_t start() = 0; diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h index c3cd361..aa97874 100644 --- a/include/media/MediaProfiles.h +++ b/include/media/MediaProfiles.h @@ -25,7 +25,20 @@ namespace android { enum camcorder_quality { CAMCORDER_QUALITY_LOW = 0, - CAMCORDER_QUALITY_HIGH = 1 + CAMCORDER_QUALITY_HIGH = 1, + CAMCORDER_QUALITY_QCIF = 2, + CAMCORDER_QUALITY_CIF = 3, + CAMCORDER_QUALITY_480P = 4, + CAMCORDER_QUALITY_720P = 5, + CAMCORDER_QUALITY_1080P = 6, + + CAMCORDER_QUALITY_TIME_LAPSE_LOW = 1000, + CAMCORDER_QUALITY_TIME_LAPSE_HIGH = 1001, + CAMCORDER_QUALITY_TIME_LAPSE_QCIF = 1002, + CAMCORDER_QUALITY_TIME_LAPSE_CIF = 1003, + CAMCORDER_QUALITY_TIME_LAPSE_480P = 1004, + CAMCORDER_QUALITY_TIME_LAPSE_720P = 1005, + CAMCORDER_QUALITY_TIME_LAPSE_1080P = 1006 }; enum video_decoder { @@ -68,6 +81,12 @@ public: camcorder_quality quality) const; /** + * Returns true if a profile for the given camera at the given quality exists, + * or false if not. + */ + bool hasCamcorderProfile(int cameraId, camcorder_quality quality) const; + + /** * Returns the output file formats supported. */ Vector<output_format> getOutputFileFormats() const; @@ -252,6 +271,8 @@ private: Vector<int> mLevels; }; + int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const; + // Debug static void logVideoCodec(const VideoCodec& codec); static void logAudioCodec(const AudioCodec& codec); @@ -281,8 +302,25 @@ private: // If the xml configuration file does not exist, use hard-coded values static MediaProfiles* createDefaultInstance(); - static CamcorderProfile *createDefaultCamcorderLowProfile(); - static CamcorderProfile *createDefaultCamcorderHighProfile(); + + static CamcorderProfile *createDefaultCamcorderQcifProfile(camcorder_quality quality); + static CamcorderProfile *createDefaultCamcorderCifProfile(camcorder_quality quality); + static void createDefaultCamcorderLowProfiles( + MediaProfiles::CamcorderProfile **lowProfile, + MediaProfiles::CamcorderProfile **lowSpecificProfile); + static void createDefaultCamcorderHighProfiles( + MediaProfiles::CamcorderProfile **highProfile, + MediaProfiles::CamcorderProfile **highSpecificProfile); + + static CamcorderProfile *createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality); + static CamcorderProfile *createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality); + static void createDefaultCamcorderTimeLapseLowProfiles( + MediaProfiles::CamcorderProfile **lowTimeLapseProfile, + MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile); + static void createDefaultCamcorderTimeLapseHighProfiles( + MediaProfiles::CamcorderProfile **highTimeLapseProfile, + MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile); + static void createDefaultCamcorderProfiles(MediaProfiles *profiles); static void createDefaultVideoEncoders(MediaProfiles *profiles); static void createDefaultAudioEncoders(MediaProfiles *profiles); diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h index 5e9e368..c42346e 100644 --- a/include/media/MediaRecorderBase.h +++ b/include/media/MediaRecorderBase.h @@ -22,7 +22,7 @@ namespace android { -class ISurface; +class Surface; struct MediaRecorderBase { MediaRecorderBase() {} @@ -37,9 +37,10 @@ struct MediaRecorderBase { virtual status_t setVideoSize(int width, int height) = 0; virtual status_t setVideoFrameRate(int frames_per_second) = 0; virtual status_t setCamera(const sp<ICamera>& camera) = 0; - virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0; + virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0; virtual status_t setOutputFile(const char *path) = 0; virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0; + virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;} virtual status_t setParameters(const String8& params) = 0; virtual status_t setListener(const sp<IMediaRecorderClient>& listener) = 0; virtual status_t prepare() = 0; diff --git a/include/media/PVMediaRecorder.h b/include/media/PVMediaRecorder.h index c091c39..4b44ccc 100644 --- a/include/media/PVMediaRecorder.h +++ b/include/media/PVMediaRecorder.h @@ -23,7 +23,7 @@ namespace android { -class ISurface; +class Surface; class ICamera; class AuthorDriverWrapper; @@ -41,7 +41,7 @@ public: virtual status_t setVideoSize(int width, int height); virtual status_t setVideoFrameRate(int frames_per_second); virtual status_t setCamera(const sp<ICamera>& camera); - virtual status_t setPreviewSurface(const sp<ISurface>& surface); + virtual status_t setPreviewSurface(const sp<Surface>& surface); virtual status_t setOutputFile(const char *path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); virtual status_t setParameters(const String8& params); @@ -66,4 +66,3 @@ private: }; // namespace android #endif // ANDROID_PVMEDIARECORDER_H - diff --git a/include/media/PVPlayer.h b/include/media/PVPlayer.h index df50981..657e7a6 100644 --- a/include/media/PVPlayer.h +++ b/include/media/PVPlayer.h @@ -43,7 +43,8 @@ public: const char *url, const KeyedVector<String8, String8> *headers); virtual status_t setDataSource(int fd, int64_t offset, int64_t length); - virtual status_t setVideoSurface(const sp<ISurface>& surface); + virtual status_t setVideoISurface(const sp<ISurface>& surface); + virtual status_t setVideoSurface(const sp<Surface>& surface); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h index 5ab1640..32b6fa1 100644 --- a/include/media/mediarecorder.h +++ b/include/media/mediarecorder.h @@ -173,6 +173,7 @@ public: status_t setAudioEncoder(int ae); status_t setOutputFile(const char* path); status_t setOutputFile(int fd, int64_t offset, int64_t length); + status_t setOutputFileAuxiliary(int fd); status_t setVideoSize(int width, int height); status_t setVideoFrameRate(int frames_per_second); status_t setParameters(const String8& params); @@ -199,6 +200,7 @@ private: bool mIsAudioEncoderSet; bool mIsVideoEncoderSet; bool mIsOutputFileSet; + bool mIsAuxiliaryOutputFileSet; Mutex mLock; Mutex mNotifyLock; }; diff --git a/include/media/mediascanner.h b/include/media/mediascanner.h index 0d397ac..74c9d5d 100644 --- a/include/media/mediascanner.h +++ b/include/media/mediascanner.h @@ -38,8 +38,7 @@ struct MediaScanner { typedef bool (*ExceptionCheck)(void* env); virtual status_t processDirectory( - const char *path, const char *extensions, - MediaScannerClient &client, + const char *path, MediaScannerClient &client, ExceptionCheck exceptionCheck, void *exceptionEnv); void setLocale(const char *locale); @@ -55,9 +54,8 @@ private: char *mLocale; status_t doProcessDirectory( - char *path, int pathRemaining, const char *extensions, - MediaScannerClient &client, ExceptionCheck exceptionCheck, - void *exceptionEnv); + char *path, int pathRemaining, MediaScannerClient &client, + ExceptionCheck exceptionCheck, void *exceptionEnv); MediaScanner(const MediaScanner &); MediaScanner &operator=(const MediaScanner &); diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h index 3192d03..e6c9f93 100644 --- a/include/media/stagefright/CameraSource.h +++ b/include/media/stagefright/CameraSource.h @@ -20,39 +20,167 @@ #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaSource.h> +#include <camera/ICamera.h> +#include <camera/CameraParameters.h> #include <utils/List.h> #include <utils/RefBase.h> -#include <utils/threads.h> namespace android { -class ICamera; class IMemory; class Camera; +class Surface; class CameraSource : public MediaSource, public MediaBufferObserver { public: + /** + * Factory method to create a new CameraSource using the current + * settings (such as video size, frame rate, color format, etc) + * from the default camera. + * + * @return NULL on error. + */ static CameraSource *Create(); - static CameraSource *CreateFromCamera(const sp<Camera> &camera); + + /** + * Factory method to create a new CameraSource. + * + * @param camera the video input frame data source. If it is NULL, + * we will try to connect to the camera with the given + * cameraId. + * + * @param cameraId the id of the camera that the source will connect + * to if camera is NULL; otherwise ignored. + * + * @param videoSize the dimension (in pixels) of the video frame + * @param frameRate the target frames per second + * @param surface the preview surface for display where preview + * frames are sent to + * @param storeMetaDataInVideoBuffers true to request the camera + * source to store meta data in video buffers; false to + * request the camera source to store real YUV frame data + * in the video buffers. The camera source may not support + * storing meta data in video buffers, if so, a request + * to do that will NOT be honored. To find out whether + * meta data is actually being stored in video buffers + * during recording, call isMetaDataStoredInVideoBuffers(). + * + * @return NULL on error. + */ + static CameraSource *CreateFromCamera(const sp<ICamera> &camera, + int32_t cameraId, + Size videoSize, + int32_t frameRate, + const sp<Surface>& surface, + bool storeMetaDataInVideoBuffers = false); virtual ~CameraSource(); virtual status_t start(MetaData *params = NULL); virtual status_t stop(); + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + /** + * Check whether a CameraSource object is properly initialized. + * Must call this method before stop(). + * @return OK if initialization has successfully completed. + */ + virtual status_t initCheck() const; + + /** + * Returns the MetaData associated with the CameraSource, + * including: + * kKeyColorFormat: YUV color format of the video frames + * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames + * kKeySampleRate: frame rate in frames per second + * kKeyMIMEType: always fixed to be MEDIA_MIMETYPE_VIDEO_RAW + */ virtual sp<MetaData> getFormat(); - virtual status_t read( - MediaBuffer **buffer, const ReadOptions *options = NULL); + /** + * Retrieve the total number of video buffers available from + * this source. + * + * This method is useful if these video buffers are used + * for passing video frame data to other media components, + * such as OMX video encoders, in order to eliminate the + * memcpy of the data. + * + * @return the total numbner of video buffers. Returns 0 to + * indicate that this source does not make the video + * buffer information availalble. + */ + size_t getNumberOfVideoBuffers() const; + + /** + * Retrieve the individual video buffer available from + * this source. + * + * @param index the index corresponding to the video buffer. + * Valid range of the index is [0, n], where n = + * getNumberOfVideoBuffers() - 1. + * + * @return the video buffer corresponding to the given index. + * If index is out of range, 0 should be returned. + */ + sp<IMemory> getVideoBuffer(size_t index) const; + + /** + * Tell whether this camera source stores meta data or real YUV + * frame data in video buffers. + * + * @return true if meta data is stored in the video + * buffers; false if real YUV data is stored in + * the video buffers. + */ + bool isMetaDataStoredInVideoBuffers() const; virtual void signalBufferReturned(MediaBuffer* buffer); -private: - friend class CameraSourceListener; +protected: + enum CameraFlags { + FLAGS_SET_CAMERA = 1L << 0, + FLAGS_HOT_CAMERA = 1L << 1, + }; + + int32_t mCameraFlags; + Size mVideoSize; + int32_t mVideoFrameRate; + int32_t mColorFormat; + status_t mInitCheck; - sp<Camera> mCamera; + sp<Camera> mCamera; + sp<Surface> mSurface; sp<MetaData> mMeta; + int64_t mStartTimeUs; + int32_t mNumFramesReceived; + int64_t mLastFrameTimestampUs; + bool mStarted; + + CameraSource(const sp<ICamera>& camera, int32_t cameraId, + Size videoSize, int32_t frameRate, + const sp<Surface>& surface, + bool storeMetaDataInVideoBuffers); + + virtual void startCameraRecording(); + virtual void stopCameraRecording(); + virtual void releaseRecordingFrame(const sp<IMemory>& frame); + + // Returns true if need to skip the current frame. + // Called from dataCallbackTimestamp. + virtual bool skipCurrentFrame(int64_t timestampUs) {return false;} + + // Callback called when still camera raw data is available. + virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {} + + virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, + const sp<IMemory> &data); + +private: + friend class CameraSourceListener; + Mutex mLock; Condition mFrameAvailableCondition; Condition mFrameCompleteCondition; @@ -60,25 +188,33 @@ private: List<sp<IMemory> > mFramesBeingEncoded; List<int64_t> mFrameTimes; - int64_t mStartTimeUs; int64_t mFirstFrameTimeUs; - int64_t mLastFrameTimestampUs; - int32_t mNumFramesReceived; int32_t mNumFramesEncoded; int32_t mNumFramesDropped; int32_t mNumGlitches; int64_t mGlitchDurationThresholdUs; bool mCollectStats; - bool mStarted; - - CameraSource(const sp<Camera> &camera); - - void dataCallbackTimestamp( - int64_t timestampUs, int32_t msgType, const sp<IMemory> &data); + bool mIsMetaDataStoredInVideoBuffers; void releaseQueuedFrames(); void releaseOneRecordingFrame(const sp<IMemory>& frame); + + status_t init(const sp<ICamera>& camera, int32_t cameraId, + Size videoSize, int32_t frameRate, + bool storeMetaDataInVideoBuffers); + status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId); + status_t isCameraColorFormatSupported(const CameraParameters& params); + status_t configureCamera(CameraParameters* params, + int32_t width, int32_t height, + int32_t frameRate); + + status_t checkVideoSize(const CameraParameters& params, + int32_t width, int32_t height); + + status_t checkFrameRate(const CameraParameters& params, + int32_t frameRate); + CameraSource(const CameraSource &); CameraSource &operator=(const CameraSource &); }; diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h new file mode 100644 index 0000000..afe7287 --- /dev/null +++ b/include/media/stagefright/CameraSourceTimeLapse.h @@ -0,0 +1,243 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CAMERA_SOURCE_TIME_LAPSE_H_ + +#define CAMERA_SOURCE_TIME_LAPSE_H_ + +#include <pthread.h> + +#include <utils/RefBase.h> +#include <utils/threads.h> + +namespace android { + +class ICamera; +class IMemory; +class Camera; + +class CameraSourceTimeLapse : public CameraSource { +public: + static CameraSourceTimeLapse *CreateFromCamera( + const sp<ICamera> &camera, + int32_t cameraId, + Size videoSize, + int32_t videoFrameRate, + const sp<Surface>& surface, + int64_t timeBetweenTimeLapseFrameCaptureUs); + + virtual ~CameraSourceTimeLapse(); + + // If the frame capture interval is large, read will block for a long time. + // Due to the way the mediaRecorder framework works, a stop() call from + // mediaRecorder waits until the read returns, causing a long wait for + // stop() to return. To avoid this, we can make read() return a copy of the + // last read frame with the same time stamp frequently. This keeps the + // read() call from blocking too long. Calling this function quickly + // captures another frame, keeps its copy, and enables this mode of read() + // returning quickly. + void startQuickReadReturns(); + +private: + // If true, will use still camera takePicture() for time lapse frames + // If false, will use the videocamera frames instead. + bool mUseStillCameraForTimeLapse; + + // Size of picture taken from still camera. This may be larger than the size + // of the video, as still camera may not support the exact video resolution + // demanded. See setPictureSizeToClosestSupported(). + int32_t mPictureWidth; + int32_t mPictureHeight; + + // size of the encoded video. + int32_t mVideoWidth; + int32_t mVideoHeight; + + // True if we need to crop the still camera image to get the video frame. + bool mNeedCropping; + + // Start location of the cropping rectangle. + int32_t mCropRectStartX; + int32_t mCropRectStartY; + + // Time between capture of two frames during time lapse recording + // Negative value indicates that timelapse is disabled. + int64_t mTimeBetweenTimeLapseFrameCaptureUs; + + // Time between two frames in final video (1/frameRate) + int64_t mTimeBetweenTimeLapseVideoFramesUs; + + // Real timestamp of the last encoded time lapse frame + int64_t mLastTimeLapseFrameRealTimestampUs; + + // Thread id of thread which takes still picture and sleeps in a loop. + pthread_t mThreadTimeLapse; + + // Variable set in dataCallbackTimestamp() to help skipCurrentFrame() + // to know if current frame needs to be skipped. + bool mSkipCurrentFrame; + + // Lock for accessing mCameraIdle + Mutex mCameraIdleLock; + + // Condition variable to wait on if camera is is not yet idle. Once the + // camera gets idle, this variable will be signalled. + Condition mCameraIdleCondition; + + // True if camera is in preview mode and ready for takePicture(). + // False after a call to takePicture() but before the final compressed + // data callback has been called and preview has been restarted. + volatile bool mCameraIdle; + + // True if stop() is waiting for camera to get idle, i.e. for the last + // takePicture() to complete. This is needed so that dataCallbackTimestamp() + // can return immediately. + volatile bool mStopWaitingForIdleCamera; + + // Lock for accessing quick stop variables. + Mutex mQuickStopLock; + + // Condition variable to wake up still picture thread. + Condition mTakePictureCondition; + + // mQuickStop is set to true if we use quick read() returns, otherwise it is set + // to false. Once in this mode read() return a copy of the last read frame + // with the same time stamp. See startQuickReadReturns(). + volatile bool mQuickStop; + + // Forces the next frame passed to dataCallbackTimestamp() to be read + // as a time lapse frame. Used by startQuickReadReturns() so that the next + // frame wakes up any blocking read. + volatile bool mForceRead; + + // Stores a copy of the MediaBuffer read in the last read() call after + // mQuickStop was true. + MediaBuffer* mLastReadBufferCopy; + + // Status code for last read. + status_t mLastReadStatus; + + CameraSourceTimeLapse( + const sp<ICamera> &camera, + int32_t cameraId, + Size videoSize, + int32_t videoFrameRate, + const sp<Surface>& surface, + int64_t timeBetweenTimeLapseFrameCaptureUs); + + // Wrapper over CameraSource::signalBufferReturned() to implement quick stop. + // It only handles the case when mLastReadBufferCopy is signalled. Otherwise + // it calls the base class' function. + virtual void signalBufferReturned(MediaBuffer* buffer); + + // Wrapper over CameraSource::read() to implement quick stop. + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL); + + // For still camera case starts a thread which calls camera's takePicture() + // in a loop. For video camera case, just starts the camera's video recording. + virtual void startCameraRecording(); + + // For still camera case joins the thread created in startCameraRecording(). + // For video camera case, just stops the camera's video recording. + virtual void stopCameraRecording(); + + // For still camera case don't need to do anything as memory is locally + // allocated with refcounting. + // For video camera case just tell the camera to release the frame. + virtual void releaseRecordingFrame(const sp<IMemory>& frame); + + // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current + // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame. + virtual bool skipCurrentFrame(int64_t timestampUs); + + // Handles the callback to handle raw frame data from the still camera. + // Creates a copy of the frame data as the camera can reuse the frame memory + // once this callback returns. The function also sets a new timstamp corresponding + // to one frame time ahead of the last encoded frame's time stamp. It then + // calls dataCallbackTimestamp() of the base class with the copied data and the + // modified timestamp, which will think that it recieved the frame from a video + // camera and proceed as usual. + virtual void dataCallback(int32_t msgType, const sp<IMemory> &data); + + // In the video camera case calls skipFrameAndModifyTimeStamp() to modify + // timestamp and set mSkipCurrentFrame. + // Then it calls the base CameraSource::dataCallbackTimestamp() + virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, + const sp<IMemory> &data); + + // Convenience function to fill mLastReadBufferCopy from the just read + // buffer. + void fillLastReadBufferCopy(MediaBuffer& sourceBuffer); + + // If the passed in size (width x height) is a supported preview size, + // the function sets the camera's preview size to it and returns true. + // Otherwise returns false. + bool trySettingPreviewSize(int32_t width, int32_t height); + + // The still camera may not support the demanded video width and height. + // We look for the supported picture sizes from the still camera and + // choose the smallest one with either dimensions higher than the corresponding + // video dimensions. The still picture will be cropped to get the video frame. + // The function returns true if the camera supports picture sizes greater than + // or equal to the passed in width and height, and false otherwise. + bool setPictureSizeToClosestSupported(int32_t width, int32_t height); + + // Computes the offset of the rectangle from where to start cropping the + // still image into the video frame. We choose the center of the image to be + // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY). + bool computeCropRectangleOffset(); + + // Crops the source data into a smaller image starting at + // (mCropRectStartX, mCropRectStartY) and of the size of the video frame. + // The data is returned into a newly allocated IMemory. + sp<IMemory> cropYUVImage(const sp<IMemory> &source_data); + + // When video camera is used for time lapse capture, returns true + // until enough time has passed for the next time lapse frame. When + // the frame needs to be encoded, it returns false and also modifies + // the time stamp to be one frame time ahead of the last encoded + // frame's time stamp. + bool skipFrameAndModifyTimeStamp(int64_t *timestampUs); + + // Wrapper to enter threadTimeLapseEntry() + static void *ThreadTimeLapseWrapper(void *me); + + // Runs a loop which sleeps until a still picture is required + // and then calls mCamera->takePicture() to take the still picture. + // Used only in the case mUseStillCameraForTimeLapse = true. + void threadTimeLapseEntry(); + + // Wrapper to enter threadStartPreview() + static void *ThreadStartPreviewWrapper(void *me); + + // Starts the camera's preview. + void threadStartPreview(); + + // Starts thread ThreadStartPreviewWrapper() for restarting preview. + // Needs to be done in a thread so that dataCallback() which calls this function + // can return, and the camera can know that takePicture() is done. + void restartPreview(); + + // Creates a copy of source_data into a new memory of final type MemoryBase. + sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data); + + CameraSourceTimeLapse(const CameraSourceTimeLapse &); + CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &); +}; + +} // namespace android + +#endif // CAMERA_SOURCE_TIME_LAPSE_H_ diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h index 221c679..4ded5e8 100644 --- a/include/media/stagefright/HardwareAPI.h +++ b/include/media/stagefright/HardwareAPI.h @@ -21,10 +21,60 @@ #include <media/stagefright/OMXPluginBase.h> #include <media/stagefright/VideoRenderer.h> #include <surfaceflinger/ISurface.h> +#include <ui/android_native_buffer.h> #include <utils/RefBase.h> #include <OMX_Component.h> +namespace android { + +// A pointer to this struct is passed to the OMX_SetParameter when the extension +// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension +// is given. +// +// When Android native buffer use is disabled for a port (the default state), +// the OMX node should operate as normal, and expect UseBuffer calls to set its +// buffers. This is the mode that will be used when CPU access to the buffer is +// required. +// +// When Android native buffer use has been enabled, the OMX node must support +// only color formats in the range [OMX_COLOR_FormatAndroidPrivateStart, +// OMX_COLOR_FormatAndroidPrivateEnd). The node should then expect to receive +// UseAndroidNativeBuffer calls (via OMX_SetParameter) rather than UseBuffer +// calls. +struct EnableAndroidNativeBuffersParams { + OMX_U32 nSize; + OMX_VERSIONTYPE nVersion; + OMX_U32 nPortIndex; + OMX_BOOL enable; +}; + +// Color formats in the range [OMX_COLOR_FormatAndroidPrivateStart, +// OMX_COLOR_FormatAndroidPrivateEnd) will be converted to a gralloc pixel +// format when used to allocate Android native buffers via gralloc. The +// conversion is done by subtracting OMX_COLOR_FormatAndroidPrivateStart from +// the color format reported by the codec. +enum { + OMX_COLOR_FormatAndroidPrivateStart = 0xA0000000, + OMX_COLOR_FormatAndroidPrivateEnd = 0xB0000000, +}; + +// A pointer to this struct is passed to OMX_SetParameter when the extension +// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is +// given. This call will only be performed if a prior call was made with the +// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index, +// enabling use of Android native buffers. +struct UseAndroidNativeBufferParams { + OMX_U32 nSize; + OMX_VERSIONTYPE nVersion; + OMX_U32 nPortIndex; + OMX_PTR pAppPrivate; + OMX_BUFFERHEADERTYPE **bufferHeader; + const sp<android_native_buffer_t>& nativeBuffer; +}; + +} // namespace android + extern android::VideoRenderer *createRenderer( const android::sp<android::ISurface> &surface, const char *componentName, @@ -35,4 +85,3 @@ extern android::VideoRenderer *createRenderer( extern android::OMXPluginBase *createOMXPlugin(); #endif // HARDWARE_API_H_ - diff --git a/include/media/stagefright/MediaBuffer.h b/include/media/stagefright/MediaBuffer.h index 339e6fb..c1c4f94 100644 --- a/include/media/stagefright/MediaBuffer.h +++ b/include/media/stagefright/MediaBuffer.h @@ -25,6 +25,7 @@ namespace android { +class GraphicBuffer; class MediaBuffer; class MediaBufferObserver; class MetaData; @@ -48,6 +49,8 @@ public: MediaBuffer(size_t size); + MediaBuffer(const sp<GraphicBuffer>& graphicBuffer); + // Decrements the reference count and returns the buffer to its // associated MediaBufferGroup if the reference count drops to 0. void release(); @@ -63,6 +66,8 @@ public: void set_range(size_t offset, size_t length); + sp<GraphicBuffer> graphicBuffer() const; + sp<MetaData> meta_data(); // Clears meta data and resets the range to the full extent. @@ -94,6 +99,7 @@ private: void *mData; size_t mSize, mRangeOffset, mRangeLength; + sp<GraphicBuffer> mGraphicBuffer; bool mOwnsData; diff --git a/include/media/stagefright/MediaSourceSplitter.h b/include/media/stagefright/MediaSourceSplitter.h new file mode 100644 index 0000000..568f4c2 --- /dev/null +++ b/include/media/stagefright/MediaSourceSplitter.h @@ -0,0 +1,193 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This class provides a way to split a single media source into multiple sources. +// The constructor takes in the real mediaSource and createClient() can then be +// used to create multiple sources served from this real mediaSource. +// +// Usage: +// - Create MediaSourceSplitter by passing in a real mediaSource from which +// multiple duplicate channels are needed. +// - Create a client using createClient() and use it as any other mediaSource. +// +// Note that multiple clients can be created using createClient() and +// started/stopped in any order. MediaSourceSplitter stops the real source only +// when all clients have been stopped. +// +// If a new client is created/started after some existing clients have already +// started, the new client will start getting its read frames from the current +// time. + +#ifndef MEDIA_SOURCE_SPLITTER_H_ + +#define MEDIA_SOURCE_SPLITTER_H_ + +#include <media/stagefright/MediaSource.h> +#include <utils/threads.h> +#include <utils/Vector.h> +#include <utils/RefBase.h> + +namespace android { + +class MediaBuffer; +class MetaData; + +class MediaSourceSplitter : public RefBase { +public: + // Constructor + // mediaSource: The real mediaSource. The class keeps a reference to it to + // implement the various clients. + MediaSourceSplitter(sp<MediaSource> mediaSource); + + ~MediaSourceSplitter(); + + // Creates a new client of base type MediaSource. Multiple clients can be + // created which get their data through the same real mediaSource. These + // clients can then be used like any other MediaSource, all of which provide + // data from the same real source. + sp<MediaSource> createClient(); + +private: + // Total number of clients created through createClient(). + int32_t mNumberOfClients; + + // reference to the real MediaSource passed to the constructor. + sp<MediaSource> mSource; + + // Stores pointer to the MediaBuffer read from the real MediaSource. + // All clients use this to implement the read() call. + MediaBuffer *mLastReadMediaBuffer; + + // Status code for read from the real MediaSource. All clients return + // this for their read(). + status_t mLastReadStatus; + + // Boolean telling whether the real MediaSource has started. + bool mSourceStarted; + + // List of booleans, one for each client, storing whether the corresponding + // client's start() has been called. + Vector<bool> mClientsStarted; + + // Stores the number of clients which are currently started. + int32_t mNumberOfClientsStarted; + + // Since different clients call read() asynchronously, we need to keep track + // of what data is currently read into the mLastReadMediaBuffer. + // mCurrentReadBit stores the bit for the current read buffer. This bit + // flips each time a new buffer is read from the source. + // mClientsDesiredReadBit stores the bit for the next desired read buffer + // for each client. This bit flips each time read() is completed for this + // client. + bool mCurrentReadBit; + Vector<bool> mClientsDesiredReadBit; + + // Number of clients whose current read has been completed. + int32_t mNumberOfCurrentReads; + + // Boolean telling whether the last read has been completed for all clients. + // The variable is reset to false each time buffer is read from the real + // source. + bool mLastReadCompleted; + + // A global mutex for access to critical sections. + Mutex mLock; + + // Condition variable for waiting on read from source to complete. + Condition mReadFromSourceCondition; + + // Condition variable for waiting on all client's last read to complete. + Condition mAllReadsCompleteCondition; + + // Functions used by Client to implement the MediaSource interface. + + // If the real source has not been started yet by any client, starts it. + status_t start(int clientId, MetaData *params); + + // Stops the real source after all clients have called stop(). + status_t stop(int clientId); + + // returns the real source's getFormat(). + sp<MetaData> getFormat(int clientId); + + // If the client's desired buffer has already been read into + // mLastReadMediaBuffer, points the buffer to that. Otherwise if it is the + // master client, reads the buffer from source or else waits for the master + // client to read the buffer and uses that. + status_t read(int clientId, + MediaBuffer **buffer, const MediaSource::ReadOptions *options = NULL); + + // Not implemented right now. + status_t pause(int clientId); + + // Function which reads a buffer from the real source into + // mLastReadMediaBuffer + void readFromSource_lock(const MediaSource::ReadOptions *options); + + // Waits until read from the real source has been completed. + // _lock means that the function should be called when the thread has already + // obtained the lock for the mutex mLock. + void waitForReadFromSource_lock(int32_t clientId); + + // Waits until all clients have read the current buffer in + // mLastReadCompleted. + void waitForAllClientsLastRead_lock(int32_t clientId); + + // Each client calls this after it completes its read(). Once all clients + // have called this for the current buffer, the function calls + // mAllReadsCompleteCondition.broadcast() to signal the waiting clients. + void signalReadComplete_lock(bool readAborted); + + // Make these constructors private. + MediaSourceSplitter(); + MediaSourceSplitter(const MediaSourceSplitter &); + MediaSourceSplitter &operator=(const MediaSourceSplitter &); + + // This class implements the MediaSource interface. Each client stores a + // reference to the parent MediaSourceSplitter and uses it to complete the + // various calls. + class Client : public MediaSource { + public: + // Constructor stores reference to the parent MediaSourceSplitter and it + // client id. + Client(sp<MediaSourceSplitter> splitter, int32_t clientId); + + // MediaSource interface + virtual status_t start(MetaData *params = NULL); + + virtual status_t stop(); + + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + + virtual status_t pause(); + + private: + // Refernce to the parent MediaSourceSplitter + sp<MediaSourceSplitter> mSplitter; + + // Id of this client. + int32_t mClientId; + }; + + friend class Client; +}; + +} // namespace android + +#endif // MEDIA_SOURCE_SPLITTER_H_ diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h index d2bd9f2..423f385 100644 --- a/include/media/stagefright/MetaData.h +++ b/include/media/stagefright/MetaData.h @@ -99,6 +99,9 @@ enum { kKeyValidSamples = 'valD', // int32_t kKeyIsUnreadable = 'unre', // bool (int32_t) + + // An indication that a video buffer has been rendered. + kKeyRendered = 'rend', // bool (int32_t) }; enum { diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h index 1d94160..6fef2e7 100644 --- a/include/media/stagefright/OMXCodec.h +++ b/include/media/stagefright/OMXCodec.h @@ -18,6 +18,7 @@ #define OMX_CODEC_H_ +#include <android/native_window.h> #include <media/IOMX.h> #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaSource.h> @@ -44,7 +45,8 @@ struct OMXCodec : public MediaSource, const sp<MetaData> &meta, bool createEncoder, const sp<MediaSource> &source, const char *matchComponentName = NULL, - uint32_t flags = 0); + uint32_t flags = 0, + const sp<ANativeWindow> &nativeWindow = NULL); static void setComponentRole( const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder, @@ -114,6 +116,7 @@ private: struct BufferInfo { IOMX::buffer_id mBuffer; bool mOwnedByComponent; + bool mOwnedByNativeWindow; sp<IMemory> mMem; size_t mSize; void *mData; @@ -159,13 +162,21 @@ private: bool mPaused; + sp<ANativeWindow> mNativeWindow; + + // The index in each of the mPortBuffers arrays of the buffer that will be + // submitted to OMX next. This only applies when using buffers from a + // native window. + size_t mNextNativeBufferIndex[2]; + // A list of indices into mPortStatus[kPortIndexOutput] filled with data. List<size_t> mFilledBuffers; Condition mBufferFilled; OMXCodec(const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks, bool isEncoder, const char *mime, const char *componentName, - const sp<MediaSource> &source); + const sp<MediaSource> &source, + const sp<ANativeWindow> &nativeWindow); void addCodecSpecificData(const void *data, size_t size); void clearCodecSpecificData(); @@ -216,6 +227,11 @@ private: status_t allocateBuffers(); status_t allocateBuffersOnPort(OMX_U32 portIndex); + status_t allocateOutputBuffersFromNativeWindow(); + + status_t queueBufferToNativeWindow(BufferInfo *info); + status_t cancelBufferToNativeWindow(BufferInfo *info); + BufferInfo* dequeueBufferFromNativeWindow(); status_t freeBuffersOnPort( OMX_U32 portIndex, bool onlyThoseWeOwn = false); @@ -250,6 +266,7 @@ private: status_t init(); void initOutputFormat(const sp<MetaData> &inputFormat); + status_t initNativeWindow(); void dumpPortStatus(OMX_U32 portIndex); diff --git a/include/media/stagefright/VideoSourceDownSampler.h b/include/media/stagefright/VideoSourceDownSampler.h new file mode 100644 index 0000000..439918c --- /dev/null +++ b/include/media/stagefright/VideoSourceDownSampler.h @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// VideoSourceDownSampler implements the MediaSource interface, +// downsampling frames provided from a real video source. + +#ifndef VIDEO_SOURCE_DOWN_SAMPLER_H_ + +#define VIDEO_SOURCE_DOWN_SAMPLER_H_ + +#include <media/stagefright/MediaSource.h> +#include <utils/RefBase.h> + +namespace android { + +class IMemory; +class MediaBuffer; +class MetaData; + +class VideoSourceDownSampler : public MediaSource { +public: + virtual ~VideoSourceDownSampler(); + + // Constructor: + // videoSource: The real video source which provides the original frames. + // width, height: The desired width, height. These should be less than or equal + // to those of the real video source. We then downsample the original frames to + // this size. + VideoSourceDownSampler(const sp<MediaSource> &videoSource, + int32_t width, int32_t height); + + // MediaSource interface + virtual status_t start(MetaData *params = NULL); + + virtual status_t stop(); + + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + + virtual status_t pause(); + +private: + // Reference to the real video source. + sp<MediaSource> mRealVideoSource; + + // Size of frames to be provided by this source. + int32_t mWidth; + int32_t mHeight; + + // Size of frames provided by the real source. + int32_t mRealSourceWidth; + int32_t mRealSourceHeight; + + // Down sampling paramters. + int32_t mDownSampleOffsetX; + int32_t mDownSampleOffsetY; + int32_t mDownSampleSkipX; + int32_t mDownSampleSkipY; + + // True if we need to crop the still video image to get the video frame. + bool mNeedDownSampling; + + // Meta data. This is a copy of the real source except for the width and + // height parameters. + sp<MetaData> mMeta; + + // Computes the offset, skip parameters for downsampling the original frame + // to the desired size. + void computeDownSamplingParameters(); + + // Downsamples the frame in sourceBuffer to size (mWidth x mHeight). A new + // buffer is created which stores the downsampled image. + void downSampleYUVImage(const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const; + + // Disallow these. + VideoSourceDownSampler(const VideoSourceDownSampler &); + VideoSourceDownSampler &operator=(const VideoSourceDownSampler &); +}; + +} // namespace android + +#endif // VIDEO_SOURCE_DOWN_SAMPLER_H_ diff --git a/include/media/stagefright/YUVCanvas.h b/include/media/stagefright/YUVCanvas.h new file mode 100644 index 0000000..ff70923 --- /dev/null +++ b/include/media/stagefright/YUVCanvas.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// YUVCanvas holds a reference to a YUVImage on which it can do various +// drawing operations. It provides various utility functions for filling, +// cropping, etc. + + +#ifndef YUV_CANVAS_H_ + +#define YUV_CANVAS_H_ + +#include <stdint.h> + +namespace android { + +class YUVImage; +class Rect; + +class YUVCanvas { +public: + + // Constructor takes in reference to a yuvImage on which it can do + // various drawing opreations. + YUVCanvas(YUVImage &yuvImage); + ~YUVCanvas(); + + // Fills the entire image with the given YUV values. + void FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue); + + // Fills the rectangular region [startX,endX]x[startY,endY] with the given YUV values. + void FillYUVRectangle(const Rect& rect, + uint8_t yValue, uint8_t uValue, uint8_t vValue); + + // Copies the region [startX,endX]x[startY,endY] from srcImage into the + // canvas' target image (mYUVImage) starting at + // (destinationStartX,destinationStartY). + // Note that undefined behavior may occur if srcImage is same as the canvas' + // target image. + void CopyImageRect( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage); + + // Downsamples the srcImage into the canvas' target image (mYUVImage) + // The downsampling copies pixels from the source image starting at + // (srcOffsetX, srcOffsetY) to the target image, starting at (0, 0). + // For each X increment in the target image, skipX pixels are skipped + // in the source image. + // Similarly for each Y increment in the target image, skipY pixels + // are skipped in the source image. + void downsample( + int32_t srcOffsetX, int32_t srcOffsetY, + int32_t skipX, int32_t skipY, + const YUVImage &srcImage); + +private: + YUVImage& mYUVImage; + + YUVCanvas(const YUVCanvas &); + YUVCanvas &operator=(const YUVCanvas &); +}; + +} // namespace android + +#endif // YUV_CANVAS_H_ diff --git a/include/media/stagefright/YUVImage.h b/include/media/stagefright/YUVImage.h new file mode 100644 index 0000000..4e98618 --- /dev/null +++ b/include/media/stagefright/YUVImage.h @@ -0,0 +1,178 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// A container class to hold YUV data and provide various utilities, +// e.g. to set/get pixel values. +// Supported formats: +// - YUV420 Planar +// - YUV420 Semi Planar +// +// Currently does not support variable strides. +// +// Implementation: Two simple abstractions are done to simplify access +// to YUV channels for different formats: +// - initializeYUVPointers() sets up pointers (mYdata, mUdata, mVdata) to +// point to the right start locations of the different channel data depending +// on the format. +// - getOffsets() returns the correct offset for the different channels +// depending on the format. +// Location of any pixel's YUV channels can then be easily computed using these. +// + +#ifndef YUV_IMAGE_H_ + +#define YUV_IMAGE_H_ + +#include <stdint.h> +#include <cstring> + +namespace android { + +class Rect; + +class YUVImage { +public: + // Supported YUV formats + enum YUVFormat { + YUV420Planar, + YUV420SemiPlanar + }; + + // Constructs an image with the given size, format. Also allocates and owns + // the required memory. + YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height); + + // Constructs an image with the given size, format. The memory is provided + // by the caller and we don't own it. + YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer); + + // Destructor to delete the memory if it owns it. + ~YUVImage(); + + // Returns the size of the buffer required to store the YUV data for the given + // format and geometry. Useful when the caller wants to allocate the requisite + // memory. + static size_t bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height); + + int32_t width() const {return mWidth;} + int32_t height() const {return mHeight;} + + // Returns true if pixel is the range [0, width-1] x [0, height-1] + // and false otherwise. + bool validPixel(int32_t x, int32_t y) const; + + // Get the pixel YUV value at pixel (x,y). + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + // Returns true if get was successful and false otherwise. + bool getPixelValue(int32_t x, int32_t y, + uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const; + + // Set the pixel YUV value at pixel (x,y). + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + // Returns true if set was successful and false otherwise. + bool setPixelValue(int32_t x, int32_t y, + uint8_t yValue, uint8_t uValue, uint8_t vValue); + + // Uses memcpy to copy an entire row of data + static void fastCopyRectangle420Planar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage); + + // Uses memcpy to copy an entire row of data + static void fastCopyRectangle420SemiPlanar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage); + + // Tries to use memcopy to copy entire rows of data. + // Returns false if fast copy is not possible for the passed image formats. + static bool fastCopyRectangle( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage); + + // Convert the given YUV value to RGB. + void yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue, + uint8_t *r, uint8_t *g, uint8_t *b) const; + + // Write the image to a human readable PPM file. + // Returns true if write was succesful and false otherwise. + bool writeToPPM(const char *filename) const; + +private: + // YUV Format of the image. + YUVFormat mYUVFormat; + + int32_t mWidth; + int32_t mHeight; + + // Pointer to the memory buffer. + uint8_t *mBuffer; + + // Boolean telling whether we own the memory buffer. + bool mOwnBuffer; + + // Pointer to start of the Y data plane. + uint8_t *mYdata; + + // Pointer to start of the U data plane. Note that in case of interleaved formats like + // YUV420 semiplanar, mUdata points to the start of the U data in the UV plane. + uint8_t *mUdata; + + // Pointer to start of the V data plane. Note that in case of interleaved formats like + // YUV420 semiplanar, mVdata points to the start of the V data in the UV plane. + uint8_t *mVdata; + + // Initialize the pointers mYdata, mUdata, mVdata to point to the right locations for + // the given format and geometry. + // Returns true if initialize was succesful and false otherwise. + bool initializeYUVPointers(); + + // For the given pixel location, this returns the offset of the location of y, u and v + // data from the corresponding base pointers -- mYdata, mUdata, mVdata. + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + // Returns true if getting offsets was succesful and false otherwise. + bool getOffsets(int32_t x, int32_t y, + int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const; + + // Returns the offset increments incurred in going from one data row to the next data row + // for the YUV channels. Note that this corresponds to data rows and not pixel rows. + // E.g. depending on formats, U/V channels may have only one data row corresponding + // to two pixel rows. + bool getOffsetIncrementsPerDataRow( + int32_t *yDataOffsetIncrement, + int32_t *uDataOffsetIncrement, + int32_t *vDataOffsetIncrement) const; + + // Given the offset return the address of the corresponding channel's data. + uint8_t* getYAddress(int32_t offset) const; + uint8_t* getUAddress(int32_t offset) const; + uint8_t* getVAddress(int32_t offset) const; + + // Given the pixel location, returns the address of the corresponding channel's data. + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + bool getYUVAddresses(int32_t x, int32_t y, + uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const; + + // Disallow implicit casting and copying. + YUVImage(const YUVImage &); + YUVImage &operator=(const YUVImage &); +}; + +} // namespace android + +#endif // YUV_IMAGE_H_ diff --git a/include/surfaceflinger/ISurfaceComposer.h b/include/surfaceflinger/ISurfaceComposer.h index da4d56f..db57859 100644 --- a/include/surfaceflinger/ISurfaceComposer.h +++ b/include/surfaceflinger/ISurfaceComposer.h @@ -43,6 +43,7 @@ public: eSecure = 0x00000080, eNonPremultiplied = 0x00000100, ePushBuffers = 0x00000200, + eOpaque = 0x00000400, eFXSurfaceNormal = 0x00000000, eFXSurfaceBlur = 0x00010000, diff --git a/include/surfaceflinger/Surface.h b/include/surfaceflinger/Surface.h index 22684db..cef439c 100644 --- a/include/surfaceflinger/Surface.h +++ b/include/surfaceflinger/Surface.h @@ -94,7 +94,7 @@ private: friend class SurfaceComposerClient; // camera and camcorder need access to the ISurface binder interface for preview - friend class Camera; + friend class CameraService; friend class MediaRecorder; // mediaplayer needs access to ISurface for display friend class MediaPlayer; @@ -173,11 +173,12 @@ private: * (eventually this should go away and be replaced by proper APIs) */ // camera and camcorder need access to the ISurface binder interface for preview - friend class Camera; + friend class CameraService; friend class MediaRecorder; // MediaPlayer needs access to ISurface for display friend class MediaPlayer; friend class IOMX; + friend class SoftwareRenderer; // this is just to be able to write some unit tests friend class Test; @@ -314,4 +315,3 @@ private: }; // namespace android #endif // ANDROID_SF_SURFACE_H - diff --git a/include/ui/EventHub.h b/include/ui/EventHub.h index d78e35f..1431964 100644 --- a/include/ui/EventHub.h +++ b/include/ui/EventHub.h @@ -187,6 +187,9 @@ public: virtual bool markSupportedKeyCodes(int32_t deviceId, size_t numCodes, const int32_t* keyCodes, uint8_t* outFlags) const = 0; + virtual bool hasLed(int32_t deviceId, int32_t led) const = 0; + virtual void setLedState(int32_t deviceId, int32_t led, bool on) = 0; + virtual void dump(String8& dump) = 0; }; @@ -198,9 +201,9 @@ public: status_t errorCheck() const; virtual uint32_t getDeviceClasses(int32_t deviceId) const; - + virtual String8 getDeviceName(int32_t deviceId) const; - + virtual status_t getAbsoluteAxisInfo(int32_t deviceId, int axis, RawAbsoluteAxisInfo* outAxisInfo) const; @@ -218,6 +221,9 @@ public: virtual bool getEvent(RawEvent* outEvent); + virtual bool hasLed(int32_t deviceId, int32_t led) const; + virtual void setLedState(int32_t deviceId, int32_t led, bool on); + virtual void dump(String8& dump); protected: @@ -240,7 +246,10 @@ private: uint32_t classes; uint8_t* keyBitmask; KeyLayoutMap* layoutMap; - String8 keylayoutFilename; + String8 keyMapName; + bool defaultKeyMap; + String8 keyLayoutFilename; + String8 keyCharacterMapFilename; int fd; device_t* next; @@ -250,13 +259,19 @@ private: device_t* getDeviceLocked(int32_t deviceId) const; bool hasKeycodeLocked(device_t* device, int keycode) const; - + int32_t getScanCodeStateLocked(device_t* device, int32_t scanCode) const; int32_t getKeyCodeStateLocked(device_t* device, int32_t keyCode) const; int32_t getSwitchStateLocked(device_t* device, int32_t sw) const; bool markSupportedKeyCodesLocked(device_t* device, size_t numCodes, const int32_t* keyCodes, uint8_t* outFlags) const; + void configureKeyMap(device_t* device); + bool probeKeyMap(device_t* device, const String8& keyMapName, bool defaultKeyMap); + void selectKeyMap(device_t* device, const String8& keyMapName, bool defaultKeyMap); + void setKeyboardProperties(device_t* device, bool firstKeyboard); + void clearKeyboardProperties(device_t* device, bool firstKeyboard); + // Protect all internal state. mutable Mutex mLock; diff --git a/include/ui/GraphicBuffer.h b/include/ui/GraphicBuffer.h index a3e85a9..c446633 100644 --- a/include/ui/GraphicBuffer.h +++ b/include/ui/GraphicBuffer.h @@ -72,6 +72,9 @@ public: GraphicBuffer(uint32_t w, uint32_t h, PixelFormat format, uint32_t usage, uint32_t stride, native_handle_t* handle, bool keepOwnership); + // create a buffer from an existing android_native_buffer_t + GraphicBuffer(android_native_buffer_t* buffer, bool keepOwnership); + // return status status_t initCheck() const; @@ -137,6 +140,10 @@ private: GraphicBufferMapper& mBufferMapper; ssize_t mInitCheck; int mIndex; + + // If we're wrapping another buffer then this reference will make sure it + // doesn't get freed. + sp<android_native_buffer_t> mWrappedBuffer; }; }; // namespace android diff --git a/include/ui/Input.h b/include/ui/Input.h index 8c6018b..1355bab 100644 --- a/include/ui/Input.h +++ b/include/ui/Input.h @@ -76,7 +76,7 @@ namespace android { */ enum { /* These flags originate in RawEvents and are generally set in the key map. - * See also labels for policy flags in KeycodeLabels.h. */ + * NOTE: If you edit these flags, also edit labels in KeycodeLabels.h. */ POLICY_FLAG_WAKE = 0x00000001, POLICY_FLAG_WAKE_DROPPED = 0x00000002, @@ -87,6 +87,7 @@ enum { POLICY_FLAG_MENU = 0x00000040, POLICY_FLAG_LAUNCHER = 0x00000080, POLICY_FLAG_VIRTUAL = 0x00000100, + POLICY_FLAG_FUNCTION = 0x00000200, POLICY_FLAG_RAW_MASK = 0x0000ffff, diff --git a/include/ui/InputDispatcher.h b/include/ui/InputDispatcher.h index b0b855e..738f62a 100644 --- a/include/ui/InputDispatcher.h +++ b/include/ui/InputDispatcher.h @@ -387,6 +387,14 @@ public: */ virtual void setInputDispatchMode(bool enabled, bool frozen) = 0; + /* Transfers touch focus from the window associated with one channel to the + * window associated with the other channel. + * + * Returns true on success. False if the window did not actually have touch focus. + */ + virtual bool transferTouchFocus(const sp<InputChannel>& fromChannel, + const sp<InputChannel>& toChannel) = 0; + /* Registers or unregister input channels that may be used as targets for input events. * If monitor is true, the channel will receive a copy of all input events. * @@ -443,6 +451,9 @@ public: virtual void setFocusedApplication(const InputApplication* inputApplication); virtual void setInputDispatchMode(bool enabled, bool frozen); + virtual bool transferTouchFocus(const sp<InputChannel>& fromChannel, + const sp<InputChannel>& toChannel); + virtual status_t registerInputChannel(const sp<InputChannel>& inputChannel, bool monitor); virtual status_t unregisterInputChannel(const sp<InputChannel>& inputChannel); @@ -744,6 +755,9 @@ private: // Clears the current state. void clear(); + // Copies pointer-related parts of the input state to another instance. + void copyPointerStateTo(InputState& other) const; + private: struct KeyMemento { int32_t deviceId; diff --git a/include/ui/InputReader.h b/include/ui/InputReader.h index 923cdbf..2d4bf8b 100644 --- a/include/ui/InputReader.h +++ b/include/ui/InputReader.h @@ -419,9 +419,18 @@ private: Vector<KeyDown> keyDowns; // keys that are down int32_t metaState; nsecs_t downTime; // time of most recent key down + + struct LedState { + bool avail; // led is available + bool on; // we think the led is currently on + }; + LedState capsLockLedState; + LedState numLockLedState; + LedState scrollLockLedState; } mLocked; void initializeLocked(); + void initializeLedStateLocked(LockedState::LedState& ledState, int32_t led); bool isKeyboardOrGamepadKey(int32_t scanCode); @@ -429,6 +438,10 @@ private: uint32_t policyFlags); ssize_t findKeyDownLocked(int32_t scanCode); + + void updateLedStateLocked(bool reset); + void updateLedStateForModifierLocked(LockedState::LedState& ledState, int32_t led, + int32_t modifier, bool reset); }; @@ -571,6 +584,16 @@ protected: // Immutable calibration parameters in parsed form. struct Calibration { + // Position + bool haveXOrigin; + int32_t xOrigin; + bool haveYOrigin; + int32_t yOrigin; + bool haveXScale; + float xScale; + bool haveYScale; + float yScale; + // Touch Size enum TouchSizeCalibration { TOUCH_SIZE_CALIBRATION_DEFAULT, diff --git a/include/ui/KeycodeLabels.h b/include/ui/KeycodeLabels.h index f71d9cd..ef2b6b3 100755 --- a/include/ui/KeycodeLabels.h +++ b/include/ui/KeycodeLabels.h @@ -135,6 +135,59 @@ static const KeycodeLabel KEYCODES[] = { { "BUTTON_START", 108 }, { "BUTTON_SELECT", 109 }, { "BUTTON_MODE", 110 }, + { "ESCAPE", 111 }, + { "FORWARD_DEL", 112 }, + { "CTRL_LEFT", 113 }, + { "CTRL_RIGHT", 114 }, + { "CAPS_LOCK", 115 }, + { "SCROLL_LOCK", 116 }, + { "META_LEFT", 117 }, + { "META_RIGHT", 118 }, + { "FUNCTION", 119 }, + { "SYSRQ", 120 }, + { "BREAK", 121 }, + { "MOVE_HOME", 122 }, + { "MOVE_END", 123 }, + { "INSERT", 124 }, + { "FORWARD", 125 }, + { "MEDIA_PLAY", 126 }, + { "MEDIA_PAUSE", 127 }, + { "MEDIA_CLOSE", 128 }, + { "MEDIA_EJECT", 129 }, + { "MEDIA_RECORD", 130 }, + { "F1", 131 }, + { "F2", 132 }, + { "F3", 133 }, + { "F4", 134 }, + { "F5", 135 }, + { "F6", 136 }, + { "F7", 137 }, + { "F8", 138 }, + { "F9", 139 }, + { "F10", 140 }, + { "F11", 141 }, + { "F12", 142 }, + { "NUM_LOCK", 143 }, + { "NUMPAD_0", 144 }, + { "NUMPAD_1", 145 }, + { "NUMPAD_2", 146 }, + { "NUMPAD_3", 147 }, + { "NUMPAD_4", 148 }, + { "NUMPAD_5", 149 }, + { "NUMPAD_6", 150 }, + { "NUMPAD_7", 151 }, + { "NUMPAD_8", 152 }, + { "NUMPAD_9", 153 }, + { "NUMPAD_DIVIDE", 154 }, + { "NUMPAD_MULTIPLY", 155 }, + { "NUMPAD_SUBTRACT", 156 }, + { "NUMPAD_ADD", 157 }, + { "NUMPAD_DOT", 158 }, + { "NUMPAD_COMMA", 159 }, + { "NUMPAD_ENTER", 160 }, + { "NUMPAD_EQUALS", 161 }, + { "NUMPAD_LEFT_PAREN", 162 }, + { "NUMPAD_RIGHT_PAREN", 163 }, // NOTE: If you add a new keycode here you must also add it to several other files. // Refer to frameworks/base/core/java/android/view/KeyEvent.java for the full list. @@ -142,7 +195,7 @@ static const KeycodeLabel KEYCODES[] = { { NULL, 0 } }; -// See also policy flags in Input.h. +// NOTE: If you edit these flags, also edit policy flags in Input.h. static const KeycodeLabel FLAGS[] = { { "WAKE", 0x00000001 }, { "WAKE_DROPPED", 0x00000002 }, @@ -153,6 +206,7 @@ static const KeycodeLabel FLAGS[] = { { "MENU", 0x00000040 }, { "LAUNCHER", 0x00000080 }, { "VIRTUAL", 0x00000100 }, + { "FUNCTION", 0x00000200 }, { NULL, 0 } }; diff --git a/include/utils/Singleton.h b/include/utils/Singleton.h index 3b975b4..e1ee8eb 100644 --- a/include/utils/Singleton.h +++ b/include/utils/Singleton.h @@ -37,6 +37,11 @@ public: } return *instance; } + + static bool hasInstance() { + Mutex::Autolock _l(sLock); + return sInstance != 0; + } protected: ~Singleton() { }; diff --git a/include/utils/String8.h b/include/utils/String8.h index ef0b51a..cef8eca 100644 --- a/include/utils/String8.h +++ b/include/utils/String8.h @@ -157,9 +157,12 @@ public: inline size_t size() const; inline size_t length() const; inline size_t bytes() const; + inline bool isEmpty() const; inline const SharedBuffer* sharedBuffer() const; + void clear(); + void setTo(const String8& other); status_t setTo(const char* other); status_t setTo(const char* other, size_t numChars); @@ -345,6 +348,11 @@ inline size_t String8::size() const return length(); } +inline bool String8::isEmpty() const +{ + return length() == 0; +} + inline size_t String8::bytes() const { return SharedBuffer::sizeFromData(mString)-1; |