summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--camera/Camera.cpp40
-rw-r--r--camera/CameraParameters.cpp25
-rw-r--r--camera/ICamera.cpp63
-rw-r--r--cmds/stagefright/Android.mk25
-rw-r--r--cmds/stagefright/recordvideo.cpp303
-rw-r--r--drm/drmserver/DrmManagerService.cpp17
-rw-r--r--include/camera/Camera.h22
-rw-r--r--include/camera/CameraHardwareInterface.h83
-rw-r--r--include/camera/CameraParameters.h36
-rw-r--r--include/camera/ICamera.h15
-rw-r--r--include/media/IMediaPlayer.h4
-rw-r--r--include/media/IMediaRecorder.h6
-rw-r--r--include/media/IOMX.h11
-rw-r--r--include/media/MediaPlayerInterface.h4
-rw-r--r--include/media/MediaProfiles.h44
-rw-r--r--include/media/MediaRecorderBase.h5
-rw-r--r--include/media/PVMediaRecorder.h5
-rw-r--r--include/media/PVPlayer.h3
-rw-r--r--include/media/mediarecorder.h2
-rw-r--r--include/media/mediascanner.h8
-rw-r--r--include/media/stagefright/CameraSource.h170
-rw-r--r--include/media/stagefright/CameraSourceTimeLapse.h243
-rw-r--r--include/media/stagefright/HardwareAPI.h77
-rw-r--r--include/media/stagefright/MediaBuffer.h6
-rw-r--r--include/media/stagefright/MediaSourceSplitter.h193
-rw-r--r--include/media/stagefright/MetaData.h3
-rw-r--r--include/media/stagefright/OMXCodec.h26
-rw-r--r--include/media/stagefright/VideoSourceDownSampler.h97
-rw-r--r--include/media/stagefright/YUVCanvas.h79
-rw-r--r--include/media/stagefright/YUVImage.h178
-rw-r--r--media/libeffects/visualizer/Android.mk2
-rw-r--r--media/libmedia/AudioSystem.cpp3
-rw-r--r--media/libmedia/IMediaPlayer.cpp21
-rw-r--r--media/libmedia/IMediaRecorder.cpp25
-rw-r--r--media/libmedia/IOMX.cpp101
-rw-r--r--media/libmedia/MediaProfiles.cpp139
-rw-r--r--media/libmedia/MediaScanner.cpp50
-rw-r--r--media/libmedia/mediaplayer.cpp13
-rw-r--r--media/libmedia/mediarecorder.cpp30
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp10
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.h3
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp14
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.h4
-rw-r--r--media/libmediaplayerservice/MidiFile.h3
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.cpp11
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.h3
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp471
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.h54
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.h5
-rw-r--r--media/libstagefright/Android.mk4
-rw-r--r--media/libstagefright/AwesomePlayer.cpp120
-rw-r--r--media/libstagefright/CameraSource.cpp522
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp518
-rw-r--r--media/libstagefright/DataSource.cpp2
-rw-r--r--media/libstagefright/FileSource.cpp2
-rw-r--r--media/libstagefright/MPEG4Writer.cpp3
-rw-r--r--media/libstagefright/MediaBuffer.cpp29
-rw-r--r--media/libstagefright/MediaExtractor.cpp2
-rw-r--r--media/libstagefright/MediaSourceSplitter.cpp234
-rw-r--r--media/libstagefright/OMXCodec.cpp380
-rw-r--r--media/libstagefright/SampleTable.cpp2
-rw-r--r--media/libstagefright/VideoSourceDownSampler.cpp142
-rw-r--r--media/libstagefright/colorconversion/Android.mk8
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp183
-rw-r--r--media/libstagefright/include/AwesomePlayer.h8
-rw-r--r--media/libstagefright/include/OMX.h10
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h8
-rw-r--r--media/libstagefright/include/SoftwareRenderer.h18
-rw-r--r--media/libstagefright/omx/OMX.cpp23
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp130
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp263
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.h20
-rw-r--r--media/libstagefright/rtsp/ASessionDescription.cpp16
-rw-r--r--media/libstagefright/rtsp/Android.mk1
-rw-r--r--media/libstagefright/rtsp/MyHandler.h25
-rw-r--r--media/libstagefright/yuv/Android.mk13
-rw-r--r--media/libstagefright/yuv/YUVCanvas.cpp111
-rw-r--r--media/libstagefright/yuv/YUVImage.cpp413
-rw-r--r--media/mtp/Android.mk78
-rw-r--r--media/mtp/MtpClient.cpp262
-rw-r--r--media/mtp/MtpClient.h68
-rw-r--r--media/mtp/MtpCursor.cpp461
-rw-r--r--media/mtp/MtpCursor.h78
-rw-r--r--media/mtp/MtpDataPacket.cpp486
-rw-r--r--media/mtp/MtpDataPacket.h118
-rw-r--r--media/mtp/MtpDatabase.h104
-rw-r--r--media/mtp/MtpDebug.cpp387
-rw-r--r--media/mtp/MtpDebug.h37
-rw-r--r--media/mtp/MtpDevice.cpp496
-rw-r--r--media/mtp/MtpDevice.h105
-rw-r--r--media/mtp/MtpDeviceInfo.cpp97
-rw-r--r--media/mtp/MtpDeviceInfo.h54
-rw-r--r--media/mtp/MtpEventPacket.cpp67
-rw-r--r--media/mtp/MtpEventPacket.h48
-rw-r--r--media/mtp/MtpObjectInfo.cpp108
-rw-r--r--media/mtp/MtpObjectInfo.h60
-rw-r--r--media/mtp/MtpPacket.cpp154
-rw-r--r--media/mtp/MtpPacket.h69
-rw-r--r--media/mtp/MtpProperty.cpp356
-rw-r--r--media/mtp/MtpProperty.h108
-rw-r--r--media/mtp/MtpRequestPacket.cpp56
-rw-r--r--media/mtp/MtpRequestPacket.h48
-rw-r--r--media/mtp/MtpResponsePacket.cpp57
-rw-r--r--media/mtp/MtpResponsePacket.h48
-rw-r--r--media/mtp/MtpServer.cpp797
-rw-r--r--media/mtp/MtpServer.h107
-rw-r--r--media/mtp/MtpStorage.cpp80
-rw-r--r--media/mtp/MtpStorage.h50
-rw-r--r--media/mtp/MtpStorageInfo.cpp72
-rw-r--r--media/mtp/MtpStorageInfo.h49
-rw-r--r--media/mtp/MtpStringBuffer.cpp171
-rw-r--r--media/mtp/MtpStringBuffer.h57
-rw-r--r--media/mtp/MtpTypes.h78
-rw-r--r--media/mtp/MtpUtils.cpp78
-rw-r--r--media/mtp/MtpUtils.h29
-rw-r--r--media/mtp/mtp.h475
-rw-r--r--services/audioflinger/AudioPolicyManagerBase.cpp35
-rw-r--r--services/camera/libcameraservice/CameraHardwareStub.cpp4
-rw-r--r--services/camera/libcameraservice/CameraHardwareStub.h2
-rw-r--r--services/camera/libcameraservice/CameraService.cpp197
-rw-r--r--services/camera/libcameraservice/CameraService.h16
121 files changed, 11690 insertions, 582 deletions
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 7efc6d7..148e864 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -167,39 +167,52 @@ status_t Camera::unlock()
return c->unlock();
}
-// pass the buffered ISurface to the camera service
+// pass the buffered Surface to the camera service
status_t Camera::setPreviewDisplay(const sp<Surface>& surface)
{
- LOGV("setPreviewDisplay");
+ LOGV("setPreviewDisplay(%p)", surface.get());
sp <ICamera> c = mCamera;
if (c == 0) return NO_INIT;
if (surface != 0) {
- return c->setPreviewDisplay(surface->getISurface());
+ return c->setPreviewDisplay(surface);
} else {
LOGD("app passed NULL surface");
return c->setPreviewDisplay(0);
}
}
-status_t Camera::setPreviewDisplay(const sp<ISurface>& surface)
+// start preview mode
+status_t Camera::startPreview()
{
- LOGV("setPreviewDisplay");
- if (surface == 0) {
- LOGD("app passed NULL surface");
- }
+ LOGV("startPreview");
sp <ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- return c->setPreviewDisplay(surface);
+ return c->startPreview();
+}
+
+int32_t Camera::getNumberOfVideoBuffers() const
+{
+ LOGV("getNumberOfVideoBuffers");
+ sp <ICamera> c = mCamera;
+ if (c == 0) return 0;
+ return c->getNumberOfVideoBuffers();
}
+sp<IMemory> Camera::getVideoBuffer(int32_t index) const
+{
+ LOGV("getVideoBuffer: %d", index);
+ sp <ICamera> c = mCamera;
+ if (c == 0) return 0;
+ return c->getVideoBuffer(index);
+}
-// start preview mode
-status_t Camera::startPreview()
+status_t Camera::storeMetaDataInBuffers(bool enabled)
{
- LOGV("startPreview");
+ LOGV("storeMetaDataInBuffers: %s",
+ enabled? "true": "false");
sp <ICamera> c = mCamera;
if (c == 0) return NO_INIT;
- return c->startPreview();
+ return c->storeMetaDataInBuffers(enabled);
}
// start recording mode, must call setPreviewDisplay first
@@ -375,4 +388,3 @@ void Camera::DeathNotifier::binderDied(const wp<IBinder>& who) {
}
}; // namespace android
-
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index 83e5e57..45b1b9a 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -73,6 +73,8 @@ const char CameraParameters::KEY_ZOOM_SUPPORTED[] = "zoom-supported";
const char CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED[] = "smooth-zoom-supported";
const char CameraParameters::KEY_FOCUS_DISTANCES[] = "focus-distances";
const char CameraParameters::KEY_VIDEO_FRAME_FORMAT[] = "video-frame-format";
+const char CameraParameters::KEY_VIDEO_SIZE[] = "video-size";
+const char CameraParameters::KEY_SUPPORTED_VIDEO_SIZES[] = "video-size-values";
const char CameraParameters::TRUE[] = "true";
const char CameraParameters::FOCUS_DISTANCE_INFINITY[] = "Infinity";
@@ -129,7 +131,7 @@ const char CameraParameters::SCENE_MODE_PARTY[] = "party";
const char CameraParameters::SCENE_MODE_CANDLELIGHT[] = "candlelight";
const char CameraParameters::SCENE_MODE_BARCODE[] = "barcode";
-// Formats for setPreviewFormat and setPictureFormat.
+const char CameraParameters::PIXEL_FORMAT_YUV420P[] = "yuv420p";
const char CameraParameters::PIXEL_FORMAT_YUV422SP[] = "yuv422sp";
const char CameraParameters::PIXEL_FORMAT_YUV420SP[] = "yuv420sp";
const char CameraParameters::PIXEL_FORMAT_YUV422I[] = "yuv422i-yuyv";
@@ -337,6 +339,27 @@ void CameraParameters::getSupportedPreviewSizes(Vector<Size> &sizes) const
parseSizesList(previewSizesStr, sizes);
}
+void CameraParameters::setVideoSize(int width, int height)
+{
+ char str[32];
+ sprintf(str, "%dx%d", width, height);
+ set(KEY_VIDEO_SIZE, str);
+}
+
+void CameraParameters::getVideoSize(int *width, int *height) const
+{
+ *width = *height = -1;
+ const char *p = get(KEY_VIDEO_SIZE);
+ if (p == 0) return;
+ parse_pair(p, width, height, 'x');
+}
+
+void CameraParameters::getSupportedVideoSizes(Vector<Size> &sizes) const
+{
+ const char *videoSizesStr = get(KEY_SUPPORTED_VIDEO_SIZES);
+ parseSizesList(videoSizesStr, sizes);
+}
+
void CameraParameters::setPreviewFrameRate(int fps)
{
set(KEY_PREVIEW_FRAME_RATE, fps);
diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp
index 13673b5..7ba8d12 100644
--- a/camera/ICamera.cpp
+++ b/camera/ICamera.cpp
@@ -45,6 +45,9 @@ enum {
STOP_RECORDING,
RECORDING_ENABLED,
RELEASE_RECORDING_FRAME,
+ GET_NUM_VIDEO_BUFFERS,
+ GET_VIDEO_BUFFER,
+ STORE_META_DATA_IN_BUFFERS,
};
class BpCamera: public BpInterface<ICamera>
@@ -64,13 +67,13 @@ public:
remote()->transact(DISCONNECT, data, &reply);
}
- // pass the buffered ISurface to the camera service
- status_t setPreviewDisplay(const sp<ISurface>& surface)
+ // pass the buffered Surface to the camera service
+ status_t setPreviewDisplay(const sp<Surface>& surface)
{
LOGV("setPreviewDisplay");
Parcel data, reply;
data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
- data.writeStrongBinder(surface->asBinder());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_PREVIEW_DISPLAY, data, &reply);
return reply.readInt32();
}
@@ -133,6 +136,37 @@ public:
remote()->transact(RELEASE_RECORDING_FRAME, data, &reply);
}
+ int32_t getNumberOfVideoBuffers() const
+ {
+ LOGV("getNumberOfVideoBuffers");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ remote()->transact(GET_NUM_VIDEO_BUFFERS, data, &reply);
+ return reply.readInt32();
+ }
+
+ sp<IMemory> getVideoBuffer(int32_t index) const
+ {
+ LOGV("getVideoBuffer: %d", index);
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ data.writeInt32(index);
+ remote()->transact(GET_VIDEO_BUFFER, data, &reply);
+ sp<IMemory> mem = interface_cast<IMemory>(
+ reply.readStrongBinder());
+ return mem;
+ }
+
+ status_t storeMetaDataInBuffers(bool enabled)
+ {
+ LOGV("storeMetaDataInBuffers: %s", enabled? "true": "false");
+ Parcel data, reply;
+ data.writeInterfaceToken(ICamera::getInterfaceDescriptor());
+ data.writeInt32(enabled);
+ remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+ return reply.readInt32();
+ }
+
// check preview state
bool previewEnabled()
{
@@ -258,7 +292,7 @@ status_t BnCamera::onTransact(
case SET_PREVIEW_DISPLAY: {
LOGV("SET_PREVIEW_DISPLAY");
CHECK_INTERFACE(ICamera, data, reply);
- sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setPreviewDisplay(surface));
return NO_ERROR;
} break;
@@ -300,6 +334,26 @@ status_t BnCamera::onTransact(
releaseRecordingFrame(mem);
return NO_ERROR;
} break;
+ case GET_NUM_VIDEO_BUFFERS: {
+ LOGV("GET_NUM_VIDEO_BUFFERS");
+ CHECK_INTERFACE(ICamera, data, reply);
+ reply->writeInt32(getNumberOfVideoBuffers());
+ return NO_ERROR;
+ } break;
+ case GET_VIDEO_BUFFER: {
+ LOGV("GET_VIDEO_BUFFER");
+ CHECK_INTERFACE(ICamera, data, reply);
+ int32_t index = data.readInt32();
+ reply->writeStrongBinder(getVideoBuffer(index)->asBinder());
+ return NO_ERROR;
+ } break;
+ case STORE_META_DATA_IN_BUFFERS: {
+ LOGV("STORE_META_DATA_IN_BUFFERS");
+ CHECK_INTERFACE(ICamera, data, reply);
+ bool enabled = data.readInt32();
+ reply->writeInt32(storeMetaDataInBuffers(enabled));
+ return NO_ERROR;
+ } break;
case PREVIEW_ENABLED: {
LOGV("PREVIEW_ENABLED");
CHECK_INTERFACE(ICamera, data, reply);
@@ -376,4 +430,3 @@ status_t BnCamera::onTransact(
// ----------------------------------------------------------------------------
}; // namespace android
-
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index 5b74007..654d9dc 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -53,6 +53,31 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
SineSource.cpp \
+ recordvideo.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder
+
+LOCAL_C_INCLUDES:= \
+ $(JNI_H_INCLUDE) \
+ frameworks/base/media/libstagefright \
+ $(TOP)/frameworks/base/include/media/stagefright/openmax
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_MODULE_TAGS := debug
+
+LOCAL_MODULE:= recordvideo
+
+include $(BUILD_EXECUTABLE)
+
+
+################################################################################
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ SineSource.cpp \
audioloop.cpp
LOCAL_SHARED_LIBRARIES := \
diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp
new file mode 100644
index 0000000..f8eb514
--- /dev/null
+++ b/cmds/stagefright/recordvideo.cpp
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "SineSource.h"
+
+#include <binder/ProcessState.h>
+#include <media/stagefright/AudioPlayer.h>
+#include <media/stagefright/MediaBufferGroup.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/OMXClient.h>
+#include <media/stagefright/OMXCodec.h>
+#include <media/MediaPlayerInterface.h>
+
+using namespace android;
+
+// Print usage showing how to use this utility to record videos
+static void usage(const char *me) {
+ fprintf(stderr, "usage: %s\n", me);
+ fprintf(stderr, " -h(elp)\n");
+ fprintf(stderr, " -b bit rate in bits per second (default: 300000)\n");
+ fprintf(stderr, " -c YUV420 color format: [0] semi planar or [1] planar (default: 1)\n");
+ fprintf(stderr, " -f frame rate in frames per second (default: 30)\n");
+ fprintf(stderr, " -i I frame interval in seconds (default: 1)\n");
+ fprintf(stderr, " -n number of frames to be recorded (default: 300)\n");
+ fprintf(stderr, " -w width in pixels (default: 176)\n");
+ fprintf(stderr, " -t height in pixels (default: 144)\n");
+ fprintf(stderr, " -l encoder level. see omx il header (default: encoder specific)\n");
+ fprintf(stderr, " -p encoder profile. see omx il header (default: encoder specific)\n");
+ fprintf(stderr, " -v video codec: [0] AVC [1] M4V [2] H263 (default: 0)\n");
+ fprintf(stderr, "The output file is /sdcard/output.mp4\n");
+ exit(1);
+}
+
+class DummySource : public MediaSource {
+
+public:
+ DummySource(int width, int height, int nFrames, int fps, int colorFormat)
+ : mWidth(width),
+ mHeight(height),
+ mMaxNumFrames(nFrames),
+ mFrameRate(fps),
+ mColorFormat(colorFormat),
+ mSize((width * height * 3) / 2) {
+
+ mGroup.add_buffer(new MediaBuffer(mSize));
+
+ // Check the color format to make sure
+ // that the buffer size mSize it set correctly above.
+ CHECK(colorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+ colorFormat == OMX_COLOR_FormatYUV420Planar);
+ }
+
+ virtual sp<MetaData> getFormat() {
+ sp<MetaData> meta = new MetaData;
+ meta->setInt32(kKeyWidth, mWidth);
+ meta->setInt32(kKeyHeight, mHeight);
+ meta->setInt32(kKeyColorFormat, mColorFormat);
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+
+ return meta;
+ }
+
+ virtual status_t start(MetaData *params) {
+ mNumFramesOutput = 0;
+ return OK;
+ }
+
+ virtual status_t stop() {
+ return OK;
+ }
+
+ virtual status_t read(
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+
+ if (mNumFramesOutput % 10 == 0) {
+ fprintf(stderr, ".");
+ }
+ if (mNumFramesOutput == mMaxNumFrames) {
+ return ERROR_END_OF_STREAM;
+ }
+
+ status_t err = mGroup.acquire_buffer(buffer);
+ if (err != OK) {
+ return err;
+ }
+
+ // We don't care about the contents. we just test video encoder
+ // Also, by skipping the content generation, we can return from
+ // read() much faster.
+ //char x = (char)((double)rand() / RAND_MAX * 255);
+ //memset((*buffer)->data(), x, mSize);
+ (*buffer)->set_range(0, mSize);
+ (*buffer)->meta_data()->clear();
+ (*buffer)->meta_data()->setInt64(
+ kKeyTime, (mNumFramesOutput * 1000000) / mFrameRate);
+ ++mNumFramesOutput;
+
+ return OK;
+ }
+
+protected:
+ virtual ~DummySource() {}
+
+private:
+ MediaBufferGroup mGroup;
+ int mWidth, mHeight;
+ int mMaxNumFrames;
+ int mFrameRate;
+ int mColorFormat;
+ size_t mSize;
+ int64_t mNumFramesOutput;;
+
+ DummySource(const DummySource &);
+ DummySource &operator=(const DummySource &);
+};
+
+enum {
+ kYUV420SP = 0,
+ kYUV420P = 1,
+};
+
+// returns -1 if mapping of the given color is unsuccessful
+// returns an omx color enum value otherwise
+static int translateColorToOmxEnumValue(int color) {
+ switch (color) {
+ case kYUV420SP:
+ return OMX_COLOR_FormatYUV420SemiPlanar;
+ case kYUV420P:
+ return OMX_COLOR_FormatYUV420Planar;
+ default:
+ fprintf(stderr, "Unsupported color: %d\n", color);
+ return -1;
+ }
+}
+
+int main(int argc, char **argv) {
+
+ // Default values for the program if not overwritten
+ int frameRateFps = 30;
+ int width = 176;
+ int height = 144;
+ int bitRateBps = 300000;
+ int iFramesIntervalSeconds = 1;
+ int colorFormat = OMX_COLOR_FormatYUV420Planar;
+ int nFrames = 300;
+ int level = -1; // Encoder specific default
+ int profile = -1; // Encoder specific default
+ int codec = 0;
+ const char *fileName = "/sdcard/output.mp4";
+
+ android::ProcessState::self()->startThreadPool();
+ int res;
+ while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) {
+ switch (res) {
+ case 'b':
+ {
+ bitRateBps = atoi(optarg);
+ break;
+ }
+
+ case 'c':
+ {
+ colorFormat = translateColorToOmxEnumValue(atoi(optarg));
+ if (colorFormat == -1) {
+ usage(argv[0]);
+ }
+ break;
+ }
+
+ case 'f':
+ {
+ frameRateFps = atoi(optarg);
+ break;
+ }
+
+ case 'i':
+ {
+ iFramesIntervalSeconds = atoi(optarg);
+ break;
+ }
+
+ case 'n':
+ {
+ nFrames = atoi(optarg);
+ break;
+ }
+
+ case 'w':
+ {
+ width = atoi(optarg);
+ break;
+ }
+
+ case 't':
+ {
+ height = atoi(optarg);
+ break;
+ }
+
+ case 'l':
+ {
+ level = atoi(optarg);
+ break;
+ }
+
+ case 'p':
+ {
+ profile = atoi(optarg);
+ break;
+ }
+
+ case 'v':
+ {
+ codec = atoi(optarg);
+ if (codec < 0 || codec > 2) {
+ usage(argv[0]);
+ }
+ break;
+ }
+
+ case 'h':
+ default:
+ {
+ usage(argv[0]);
+ break;
+ }
+ }
+ }
+
+ OMXClient client;
+ CHECK_EQ(client.connect(), OK);
+
+ status_t err = OK;
+ sp<MediaSource> source =
+ new DummySource(width, height, nFrames, frameRateFps, colorFormat);
+
+ sp<MetaData> enc_meta = new MetaData;
+ switch (codec) {
+ case 1:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ break;
+ case 2:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ break;
+ default:
+ enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ break;
+ }
+ enc_meta->setInt32(kKeyWidth, width);
+ enc_meta->setInt32(kKeyHeight, height);
+ enc_meta->setInt32(kKeySampleRate, frameRateFps);
+ enc_meta->setInt32(kKeyBitRate, bitRateBps);
+ enc_meta->setInt32(kKeyStride, width);
+ enc_meta->setInt32(kKeySliceHeight, height);
+ enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds);
+ enc_meta->setInt32(kKeyColorFormat, colorFormat);
+ if (level != -1) {
+ enc_meta->setInt32(kKeyVideoLevel, level);
+ }
+ if (profile != -1) {
+ enc_meta->setInt32(kKeyVideoProfile, profile);
+ }
+
+ sp<MediaSource> encoder =
+ OMXCodec::Create(
+ client.interface(), enc_meta, true /* createEncoder */, source);
+
+ sp<MPEG4Writer> writer = new MPEG4Writer(fileName);
+ writer->addSource(encoder);
+ int64_t start = systemTime();
+ CHECK_EQ(OK, writer->start());
+ while (!writer->reachedEOS()) {
+ }
+ err = writer->stop();
+ int64_t end = systemTime();
+
+ fprintf(stderr, "$\n");
+ client.disconnect();
+
+ if (err != OK && err != ERROR_END_OF_STREAM) {
+ fprintf(stderr, "record failed: %d\n", err);
+ return 1;
+ }
+ fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000);
+ fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start));
+ return 0;
+}
diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp
index 843dddb..cf9bab1 100644
--- a/drm/drmserver/DrmManagerService.cpp
+++ b/drm/drmserver/DrmManagerService.cpp
@@ -28,25 +28,10 @@
using namespace android;
#define SUCCESS 0
-#define DRM_DIRECTORY_PERMISSION 0700
-#define DRM_PLUGINS_ROOT "/data/drm/plugins"
-#define DRM_PLUGINS_NATIVE "/data/drm/plugins/native"
-#define DRM_PLUGINS_NATIVE_DATABASES "/data/drm/plugins/native/databases"
void DrmManagerService::instantiate() {
LOGV("instantiate");
-
- int res = mkdir(DRM_PLUGINS_ROOT, DRM_DIRECTORY_PERMISSION);
- if (SUCCESS == res || EEXIST == errno) {
- res = mkdir(DRM_PLUGINS_NATIVE, DRM_DIRECTORY_PERMISSION);
- if (SUCCESS == res || EEXIST == errno) {
- res = mkdir(DRM_PLUGINS_NATIVE_DATABASES, DRM_DIRECTORY_PERMISSION);
- if (SUCCESS == res || EEXIST == errno) {
- defaultServiceManager()
- ->addService(String16("drm.drmManager"), new DrmManagerService());
- }
- }
- }
+ defaultServiceManager()->addService(String16("drm.drmManager"), new DrmManagerService());
}
DrmManagerService::DrmManagerService() {
diff --git a/include/camera/Camera.h b/include/camera/Camera.h
index e6d84ba..8d8edd6 100644
--- a/include/camera/Camera.h
+++ b/include/camera/Camera.h
@@ -22,8 +22,6 @@
namespace android {
-class ISurface;
-
/*
* A set of bit masks for specifying how the received preview frames are
* handled before the previewCallback() call.
@@ -96,6 +94,14 @@ enum {
// or CAMERA_MSG_COMPRESSED_IMAGE. This is not allowed to be set during
// preview.
CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3,
+
+ // cmdType to disable/enable shutter sound.
+ // In sendCommand passing arg1 = 0 will disable,
+ // while passing arg1 = 1 will enable the shutter sound.
+ CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4,
+
+ // cmdType to play recording sound.
+ CAMERA_CMD_PLAY_RECORDING_SOUND = 5,
};
// camera fatal errors
@@ -164,9 +170,8 @@ public:
status_t getStatus() { return mStatus; }
- // pass the buffered ISurface to the camera service
+ // pass the buffered Surface to the camera service
status_t setPreviewDisplay(const sp<Surface>& surface);
- status_t setPreviewDisplay(const sp<ISurface>& surface);
// start preview mode, must call setPreviewDisplay first
status_t startPreview();
@@ -207,6 +212,15 @@ public:
// send command to camera driver
status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+ // return the total number of available video buffers.
+ int32_t getNumberOfVideoBuffers() const;
+
+ // return the individual video buffer corresponding to the given index.
+ sp<IMemory> getVideoBuffer(int32_t index) const;
+
+ // tell camera hal to store meta data or real YUV in video buffers.
+ status_t storeMetaDataInBuffers(bool enabled);
+
void setListener(const sp<CameraListener>& listener);
void setPreviewCallbackFlags(int preview_callback_flag);
diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h
index 6a66e3c..5465441 100644
--- a/include/camera/CameraHardwareInterface.h
+++ b/include/camera/CameraHardwareInterface.h
@@ -18,8 +18,11 @@
#define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H
#include <binder/IMemory.h>
+#include <ui/egl/android_natives.h>
#include <utils/RefBase.h>
#include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBuffer.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
@@ -86,8 +89,8 @@ class CameraHardwareInterface : public virtual RefBase {
public:
virtual ~CameraHardwareInterface() { }
- /** Return the IMemoryHeap for the preview image heap */
- virtual sp<IMemoryHeap> getPreviewHeap() const = 0;
+ /** Set the ANativeWindow to which preview frames are sent */
+ virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0;
/** Return the IMemoryHeap for the raw image heap */
virtual sp<IMemoryHeap> getRawHeap() const = 0;
@@ -143,6 +146,82 @@ public:
virtual bool previewEnabled() = 0;
/**
+ * Retrieve the total number of available buffers from camera hal for passing
+ * video frame data in a recording session. Must be called again if a new
+ * recording session is started.
+ *
+ * This method should be called after startRecording(), since
+ * the some camera hal may choose to allocate the video buffers only after
+ * recording is started.
+ *
+ * Some camera hal may not implement this method, and 0 can be returned to
+ * indicate that this feature is not available.
+ *
+ * @return the number of video buffers that camera hal makes available.
+ * Zero (0) is returned to indicate that camera hal does not support
+ * this feature.
+ */
+ virtual int32_t getNumberOfVideoBuffers() const { return 0; }
+
+ /**
+ * Retrieve the video buffer corresponding to the given index in a
+ * recording session. Must be called again if a new recording session
+ * is started.
+ *
+ * It allows a client to retrieve all video buffers that camera hal makes
+ * available to passing video frame data by calling this method with all
+ * valid index values. The valid index value ranges from 0 to n, where
+ * n = getNumberOfVideoBuffers() - 1. With an index outside of the valid
+ * range, 0 must be returned. This method should be called after
+ * startRecording().
+ *
+ * The video buffers should NOT be modified/released by camera hal
+ * until stopRecording() is called and all outstanding video buffers
+ * previously sent out via CAMERA_MSG_VIDEO_FRAME have been released
+ * via releaseVideoBuffer().
+ *
+ * @param index an index to retrieve the corresponding video buffer.
+ *
+ * @return the video buffer corresponding to the given index.
+ */
+ virtual sp<IMemory> getVideoBuffer(int32_t index) const { return 0; }
+
+ /**
+ * Request the camera hal to store meta data or real YUV data in
+ * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a
+ * recording session. If it is not called, the default camera
+ * hal behavior is to store real YUV data in the video buffers.
+ *
+ * This method should be called before startRecording() in order
+ * to be effective.
+ *
+ * If meta data is stored in the video buffers, it is up to the
+ * receiver of the video buffers to interpret the contents and
+ * to find the actual frame data with the help of the meta data
+ * in the buffer. How this is done is outside of the scope of
+ * this method.
+ *
+ * Some camera hal may not support storing meta data in the video
+ * buffers, but all camera hal should support storing real YUV data
+ * in the video buffers. If the camera hal does not support storing
+ * the meta data in the video buffers when it is requested to do
+ * do, INVALID_OPERATION must be returned. It is very useful for
+ * the camera hal to pass meta data rather than the actual frame
+ * data directly to the video encoder, since the amount of the
+ * uncompressed frame data can be very large if video size is large.
+ *
+ * @param enable if true to instruct the camera hal to store
+ * meta data in the video buffers; false to instruct
+ * the camera hal to store real YUV data in the video
+ * buffers.
+ *
+ * @return OK on success.
+ */
+ virtual status_t storeMetaDataInBuffers(bool enable) {
+ return enable? INVALID_OPERATION: OK;
+ }
+
+ /**
* Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME
* message is sent with the corresponding frame. Every record frame must be released
* by calling releaseRecordingFrame().
diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h
index 53039a0..60031a4 100644
--- a/include/camera/CameraParameters.h
+++ b/include/camera/CameraParameters.h
@@ -59,6 +59,27 @@ public:
void setPreviewSize(int width, int height);
void getPreviewSize(int *width, int *height) const;
void getSupportedPreviewSizes(Vector<Size> &sizes) const;
+
+ // Set the dimensions in pixels to the given width and height
+ // for video frames. The given width and height must be one
+ // of the supported dimensions returned from
+ // getSupportedVideoSizes(). Must not be called if
+ // getSupportedVideoSizes() returns an empty Vector of Size.
+ void setVideoSize(int width, int height);
+ // Retrieve the current dimensions (width and height)
+ // in pixels for video frames, which must be one of the
+ // supported dimensions returned from getSupportedVideoSizes().
+ // Must not be called if getSupportedVideoSizes() returns an
+ // empty Vector of Size.
+ void getVideoSize(int *width, int *height) const;
+ // Retrieve a Vector of supported dimensions (width and height)
+ // in pixels for video frames. If sizes returned from the method
+ // is empty, the camera does not support calls to setVideoSize()
+ // or getVideoSize(). In adddition, it also indicates that
+ // the camera only has a single output, and does not have
+ // separate output for video frames and preview frame.
+ void getSupportedVideoSizes(Vector<Size> &sizes) const;
+
void setPreviewFrameRate(int fps);
int getPreviewFrameRate() const;
void getPreviewFpsRange(int *min_fps, int *max_fps) const;
@@ -281,6 +302,16 @@ public:
// Example value: "0.95,1.9,Infinity" or "0.049,0.05,0.051". Read only.
static const char KEY_FOCUS_DISTANCES[];
+ // The current dimensions in pixels (width x height) for video frames.
+ // The width and height must be one of the supported sizes retrieved
+ // via KEY_SUPPORTED_VIDEO_SIZES.
+ // Example value: "1280x720". Read/write.
+ static const char KEY_VIDEO_SIZE[];
+ // A list of the supported dimensions in pixels (width x height)
+ // for video frames. See CAMERA_MSG_VIDEO_FRAME for details in
+ // frameworks/base/include/camera/Camera.h.
+ // Example: "176x144,1280x720". Read only.
+ static const char KEY_SUPPORTED_VIDEO_SIZES[];
// The image format for video frames. See CAMERA_MSG_VIDEO_FRAME in
// frameworks/base/include/camera/Camera.h.
// Example value: "yuv420sp" or PIXEL_FORMAT_XXX constants. Read only.
@@ -354,7 +385,10 @@ public:
// for barcode reading.
static const char SCENE_MODE_BARCODE[];
- // Formats for setPreviewFormat and setPictureFormat.
+ // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT,
+ // and KEY_VIDEO_FRAME_FORMAT
+ // Planar variant of the YUV420 color format
+ static const char PIXEL_FORMAT_YUV420P[];
static const char PIXEL_FORMAT_YUV422SP[];
static const char PIXEL_FORMAT_YUV420SP[]; // NV21
static const char PIXEL_FORMAT_YUV422I[]; // YUY2
diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h
index 6fcf9e5..b69e075 100644
--- a/include/camera/ICamera.h
+++ b/include/camera/ICamera.h
@@ -20,7 +20,7 @@
#include <utils/RefBase.h>
#include <binder/IInterface.h>
#include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <binder/IMemory.h>
#include <utils/String8.h>
#include <camera/Camera.h>
@@ -45,8 +45,8 @@ public:
// allow other processes to use this ICamera interface
virtual status_t unlock() = 0;
- // pass the buffered ISurface to the camera service
- virtual status_t setPreviewDisplay(const sp<ISurface>& surface) = 0;
+ // pass the buffered Surface to the camera service
+ virtual status_t setPreviewDisplay(const sp<Surface>& surface) = 0;
// set the preview callback flag to affect how the received frames from
// preview are handled.
@@ -90,6 +90,15 @@ public:
// send command to camera driver
virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0;
+
+ // return the total number of available video buffers
+ virtual int32_t getNumberOfVideoBuffers() const = 0;
+
+ // return the individual video buffer corresponding to the given index.
+ virtual sp<IMemory> getVideoBuffer(int32_t index) const = 0;
+
+ // tell the camera hal to store meta data or real YUV data in video buffers.
+ virtual status_t storeMetaDataInBuffers(bool enabled) = 0;
};
// ----------------------------------------------------------------------------
diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h
index af9a7ed..a1ce113 100644
--- a/include/media/IMediaPlayer.h
+++ b/include/media/IMediaPlayer.h
@@ -25,6 +25,7 @@ namespace android {
class Parcel;
class ISurface;
+class Surface;
class IMediaPlayer: public IInterface
{
@@ -33,7 +34,8 @@ public:
virtual void disconnect() = 0;
- virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setVideoISurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setVideoSurface(const sp<Surface>& surface) = 0;
virtual status_t prepareAsync() = 0;
virtual status_t start() = 0;
virtual status_t stop() = 0;
diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h
index 54adca8..28be7c1 100644
--- a/include/media/IMediaRecorder.h
+++ b/include/media/IMediaRecorder.h
@@ -22,7 +22,7 @@
namespace android {
-class ISurface;
+class Surface;
class ICamera;
class IMediaRecorderClient;
@@ -32,7 +32,7 @@ public:
DECLARE_META_INTERFACE(MediaRecorder);
virtual status_t setCamera(const sp<ICamera>& camera) = 0;
- virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0;
virtual status_t setVideoSource(int vs) = 0;
virtual status_t setAudioSource(int as) = 0;
virtual status_t setOutputFormat(int of) = 0;
@@ -40,6 +40,7 @@ public:
virtual status_t setAudioEncoder(int ae) = 0;
virtual status_t setOutputFile(const char* path) = 0;
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+ virtual status_t setOutputFileAuxiliary(int fd) = 0;
virtual status_t setVideoSize(int width, int height) = 0;
virtual status_t setVideoFrameRate(int frames_per_second) = 0;
virtual status_t setParameters(const String8& params) = 0;
@@ -68,4 +69,3 @@ public:
}; // namespace android
#endif // ANDROID_IMEDIARECORDER_H
-
diff --git a/include/media/IOMX.h b/include/media/IOMX.h
index 2f61cbe..fa775e7 100644
--- a/include/media/IOMX.h
+++ b/include/media/IOMX.h
@@ -19,6 +19,7 @@
#define ANDROID_IOMX_H_
#include <binder/IInterface.h>
+#include <ui/GraphicBuffer.h>
#include <utils/List.h>
#include <utils/String8.h>
@@ -78,10 +79,20 @@ public:
node_id node, OMX_INDEXTYPE index,
const void *params, size_t size) = 0;
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0;
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) = 0;
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0;
+
// This API clearly only makes sense if the caller lives in the
// same process as the callee, i.e. is the media_server, as the
// returned "buffer_data" pointer is just that, a pointer into local
diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h
index 0521709..eae0d7b 100644
--- a/include/media/MediaPlayerInterface.h
+++ b/include/media/MediaPlayerInterface.h
@@ -33,6 +33,7 @@ namespace android {
class Parcel;
class ISurface;
+class Surface;
template<typename T> class SortedVector;
@@ -105,7 +106,8 @@ public:
const KeyedVector<String8, String8> *headers = NULL) = 0;
virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0;
- virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setVideoISurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setVideoSurface(const sp<Surface>& surface) = 0;
virtual status_t prepare() = 0;
virtual status_t prepareAsync() = 0;
virtual status_t start() = 0;
diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h
index c3cd361..aa97874 100644
--- a/include/media/MediaProfiles.h
+++ b/include/media/MediaProfiles.h
@@ -25,7 +25,20 @@ namespace android {
enum camcorder_quality {
CAMCORDER_QUALITY_LOW = 0,
- CAMCORDER_QUALITY_HIGH = 1
+ CAMCORDER_QUALITY_HIGH = 1,
+ CAMCORDER_QUALITY_QCIF = 2,
+ CAMCORDER_QUALITY_CIF = 3,
+ CAMCORDER_QUALITY_480P = 4,
+ CAMCORDER_QUALITY_720P = 5,
+ CAMCORDER_QUALITY_1080P = 6,
+
+ CAMCORDER_QUALITY_TIME_LAPSE_LOW = 1000,
+ CAMCORDER_QUALITY_TIME_LAPSE_HIGH = 1001,
+ CAMCORDER_QUALITY_TIME_LAPSE_QCIF = 1002,
+ CAMCORDER_QUALITY_TIME_LAPSE_CIF = 1003,
+ CAMCORDER_QUALITY_TIME_LAPSE_480P = 1004,
+ CAMCORDER_QUALITY_TIME_LAPSE_720P = 1005,
+ CAMCORDER_QUALITY_TIME_LAPSE_1080P = 1006
};
enum video_decoder {
@@ -68,6 +81,12 @@ public:
camcorder_quality quality) const;
/**
+ * Returns true if a profile for the given camera at the given quality exists,
+ * or false if not.
+ */
+ bool hasCamcorderProfile(int cameraId, camcorder_quality quality) const;
+
+ /**
* Returns the output file formats supported.
*/
Vector<output_format> getOutputFileFormats() const;
@@ -252,6 +271,8 @@ private:
Vector<int> mLevels;
};
+ int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const;
+
// Debug
static void logVideoCodec(const VideoCodec& codec);
static void logAudioCodec(const AudioCodec& codec);
@@ -281,8 +302,25 @@ private:
// If the xml configuration file does not exist, use hard-coded values
static MediaProfiles* createDefaultInstance();
- static CamcorderProfile *createDefaultCamcorderLowProfile();
- static CamcorderProfile *createDefaultCamcorderHighProfile();
+
+ static CamcorderProfile *createDefaultCamcorderQcifProfile(camcorder_quality quality);
+ static CamcorderProfile *createDefaultCamcorderCifProfile(camcorder_quality quality);
+ static void createDefaultCamcorderLowProfiles(
+ MediaProfiles::CamcorderProfile **lowProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificProfile);
+ static void createDefaultCamcorderHighProfiles(
+ MediaProfiles::CamcorderProfile **highProfile,
+ MediaProfiles::CamcorderProfile **highSpecificProfile);
+
+ static CamcorderProfile *createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality);
+ static CamcorderProfile *createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality);
+ static void createDefaultCamcorderTimeLapseLowProfiles(
+ MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile);
+ static void createDefaultCamcorderTimeLapseHighProfiles(
+ MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile);
+
static void createDefaultCamcorderProfiles(MediaProfiles *profiles);
static void createDefaultVideoEncoders(MediaProfiles *profiles);
static void createDefaultAudioEncoders(MediaProfiles *profiles);
diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h
index 5e9e368..c42346e 100644
--- a/include/media/MediaRecorderBase.h
+++ b/include/media/MediaRecorderBase.h
@@ -22,7 +22,7 @@
namespace android {
-class ISurface;
+class Surface;
struct MediaRecorderBase {
MediaRecorderBase() {}
@@ -37,9 +37,10 @@ struct MediaRecorderBase {
virtual status_t setVideoSize(int width, int height) = 0;
virtual status_t setVideoFrameRate(int frames_per_second) = 0;
virtual status_t setCamera(const sp<ICamera>& camera) = 0;
- virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0;
+ virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0;
virtual status_t setOutputFile(const char *path) = 0;
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0;
+ virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;}
virtual status_t setParameters(const String8& params) = 0;
virtual status_t setListener(const sp<IMediaRecorderClient>& listener) = 0;
virtual status_t prepare() = 0;
diff --git a/include/media/PVMediaRecorder.h b/include/media/PVMediaRecorder.h
index c091c39..4b44ccc 100644
--- a/include/media/PVMediaRecorder.h
+++ b/include/media/PVMediaRecorder.h
@@ -23,7 +23,7 @@
namespace android {
-class ISurface;
+class Surface;
class ICamera;
class AuthorDriverWrapper;
@@ -41,7 +41,7 @@ public:
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setOutputFile(const char *path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
virtual status_t setParameters(const String8& params);
@@ -66,4 +66,3 @@ private:
}; // namespace android
#endif // ANDROID_PVMEDIARECORDER_H
-
diff --git a/include/media/PVPlayer.h b/include/media/PVPlayer.h
index df50981..657e7a6 100644
--- a/include/media/PVPlayer.h
+++ b/include/media/PVPlayer.h
@@ -43,7 +43,8 @@ public:
const char *url, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface>& surface);
+ virtual status_t setVideoISurface(const sp<ISurface>& surface);
+ virtual status_t setVideoSurface(const sp<Surface>& surface);
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h
index 5ab1640..32b6fa1 100644
--- a/include/media/mediarecorder.h
+++ b/include/media/mediarecorder.h
@@ -173,6 +173,7 @@ public:
status_t setAudioEncoder(int ae);
status_t setOutputFile(const char* path);
status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ status_t setOutputFileAuxiliary(int fd);
status_t setVideoSize(int width, int height);
status_t setVideoFrameRate(int frames_per_second);
status_t setParameters(const String8& params);
@@ -199,6 +200,7 @@ private:
bool mIsAudioEncoderSet;
bool mIsVideoEncoderSet;
bool mIsOutputFileSet;
+ bool mIsAuxiliaryOutputFileSet;
Mutex mLock;
Mutex mNotifyLock;
};
diff --git a/include/media/mediascanner.h b/include/media/mediascanner.h
index 0d397ac..74c9d5d 100644
--- a/include/media/mediascanner.h
+++ b/include/media/mediascanner.h
@@ -38,8 +38,7 @@ struct MediaScanner {
typedef bool (*ExceptionCheck)(void* env);
virtual status_t processDirectory(
- const char *path, const char *extensions,
- MediaScannerClient &client,
+ const char *path, MediaScannerClient &client,
ExceptionCheck exceptionCheck, void *exceptionEnv);
void setLocale(const char *locale);
@@ -55,9 +54,8 @@ private:
char *mLocale;
status_t doProcessDirectory(
- char *path, int pathRemaining, const char *extensions,
- MediaScannerClient &client, ExceptionCheck exceptionCheck,
- void *exceptionEnv);
+ char *path, int pathRemaining, MediaScannerClient &client,
+ ExceptionCheck exceptionCheck, void *exceptionEnv);
MediaScanner(const MediaScanner &);
MediaScanner &operator=(const MediaScanner &);
diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h
index 3192d03..e6c9f93 100644
--- a/include/media/stagefright/CameraSource.h
+++ b/include/media/stagefright/CameraSource.h
@@ -20,39 +20,167 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
#include <utils/List.h>
#include <utils/RefBase.h>
-#include <utils/threads.h>
namespace android {
-class ICamera;
class IMemory;
class Camera;
+class Surface;
class CameraSource : public MediaSource, public MediaBufferObserver {
public:
+ /**
+ * Factory method to create a new CameraSource using the current
+ * settings (such as video size, frame rate, color format, etc)
+ * from the default camera.
+ *
+ * @return NULL on error.
+ */
static CameraSource *Create();
- static CameraSource *CreateFromCamera(const sp<Camera> &camera);
+
+ /**
+ * Factory method to create a new CameraSource.
+ *
+ * @param camera the video input frame data source. If it is NULL,
+ * we will try to connect to the camera with the given
+ * cameraId.
+ *
+ * @param cameraId the id of the camera that the source will connect
+ * to if camera is NULL; otherwise ignored.
+ *
+ * @param videoSize the dimension (in pixels) of the video frame
+ * @param frameRate the target frames per second
+ * @param surface the preview surface for display where preview
+ * frames are sent to
+ * @param storeMetaDataInVideoBuffers true to request the camera
+ * source to store meta data in video buffers; false to
+ * request the camera source to store real YUV frame data
+ * in the video buffers. The camera source may not support
+ * storing meta data in video buffers, if so, a request
+ * to do that will NOT be honored. To find out whether
+ * meta data is actually being stored in video buffers
+ * during recording, call isMetaDataStoredInVideoBuffers().
+ *
+ * @return NULL on error.
+ */
+ static CameraSource *CreateFromCamera(const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers = false);
virtual ~CameraSource();
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+ /**
+ * Check whether a CameraSource object is properly initialized.
+ * Must call this method before stop().
+ * @return OK if initialization has successfully completed.
+ */
+ virtual status_t initCheck() const;
+
+ /**
+ * Returns the MetaData associated with the CameraSource,
+ * including:
+ * kKeyColorFormat: YUV color format of the video frames
+ * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames
+ * kKeySampleRate: frame rate in frames per second
+ * kKeyMIMEType: always fixed to be MEDIA_MIMETYPE_VIDEO_RAW
+ */
virtual sp<MetaData> getFormat();
- virtual status_t read(
- MediaBuffer **buffer, const ReadOptions *options = NULL);
+ /**
+ * Retrieve the total number of video buffers available from
+ * this source.
+ *
+ * This method is useful if these video buffers are used
+ * for passing video frame data to other media components,
+ * such as OMX video encoders, in order to eliminate the
+ * memcpy of the data.
+ *
+ * @return the total numbner of video buffers. Returns 0 to
+ * indicate that this source does not make the video
+ * buffer information availalble.
+ */
+ size_t getNumberOfVideoBuffers() const;
+
+ /**
+ * Retrieve the individual video buffer available from
+ * this source.
+ *
+ * @param index the index corresponding to the video buffer.
+ * Valid range of the index is [0, n], where n =
+ * getNumberOfVideoBuffers() - 1.
+ *
+ * @return the video buffer corresponding to the given index.
+ * If index is out of range, 0 should be returned.
+ */
+ sp<IMemory> getVideoBuffer(size_t index) const;
+
+ /**
+ * Tell whether this camera source stores meta data or real YUV
+ * frame data in video buffers.
+ *
+ * @return true if meta data is stored in the video
+ * buffers; false if real YUV data is stored in
+ * the video buffers.
+ */
+ bool isMetaDataStoredInVideoBuffers() const;
virtual void signalBufferReturned(MediaBuffer* buffer);
-private:
- friend class CameraSourceListener;
+protected:
+ enum CameraFlags {
+ FLAGS_SET_CAMERA = 1L << 0,
+ FLAGS_HOT_CAMERA = 1L << 1,
+ };
+
+ int32_t mCameraFlags;
+ Size mVideoSize;
+ int32_t mVideoFrameRate;
+ int32_t mColorFormat;
+ status_t mInitCheck;
- sp<Camera> mCamera;
+ sp<Camera> mCamera;
+ sp<Surface> mSurface;
sp<MetaData> mMeta;
+ int64_t mStartTimeUs;
+ int32_t mNumFramesReceived;
+ int64_t mLastFrameTimestampUs;
+ bool mStarted;
+
+ CameraSource(const sp<ICamera>& camera, int32_t cameraId,
+ Size videoSize, int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers);
+
+ virtual void startCameraRecording();
+ virtual void stopCameraRecording();
+ virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+ // Returns true if need to skip the current frame.
+ // Called from dataCallbackTimestamp.
+ virtual bool skipCurrentFrame(int64_t timestampUs) {return false;}
+
+ // Callback called when still camera raw data is available.
+ virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {}
+
+ virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data);
+
+private:
+ friend class CameraSourceListener;
+
Mutex mLock;
Condition mFrameAvailableCondition;
Condition mFrameCompleteCondition;
@@ -60,25 +188,33 @@ private:
List<sp<IMemory> > mFramesBeingEncoded;
List<int64_t> mFrameTimes;
- int64_t mStartTimeUs;
int64_t mFirstFrameTimeUs;
- int64_t mLastFrameTimestampUs;
- int32_t mNumFramesReceived;
int32_t mNumFramesEncoded;
int32_t mNumFramesDropped;
int32_t mNumGlitches;
int64_t mGlitchDurationThresholdUs;
bool mCollectStats;
- bool mStarted;
-
- CameraSource(const sp<Camera> &camera);
-
- void dataCallbackTimestamp(
- int64_t timestampUs, int32_t msgType, const sp<IMemory> &data);
+ bool mIsMetaDataStoredInVideoBuffers;
void releaseQueuedFrames();
void releaseOneRecordingFrame(const sp<IMemory>& frame);
+
+ status_t init(const sp<ICamera>& camera, int32_t cameraId,
+ Size videoSize, int32_t frameRate,
+ bool storeMetaDataInVideoBuffers);
+ status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId);
+ status_t isCameraColorFormatSupported(const CameraParameters& params);
+ status_t configureCamera(CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate);
+
+ status_t checkVideoSize(const CameraParameters& params,
+ int32_t width, int32_t height);
+
+ status_t checkFrameRate(const CameraParameters& params,
+ int32_t frameRate);
+
CameraSource(const CameraSource &);
CameraSource &operator=(const CameraSource &);
};
diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h
new file mode 100644
index 0000000..afe7287
--- /dev/null
+++ b/include/media/stagefright/CameraSourceTimeLapse.h
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CAMERA_SOURCE_TIME_LAPSE_H_
+
+#define CAMERA_SOURCE_TIME_LAPSE_H_
+
+#include <pthread.h>
+
+#include <utils/RefBase.h>
+#include <utils/threads.h>
+
+namespace android {
+
+class ICamera;
+class IMemory;
+class Camera;
+
+class CameraSourceTimeLapse : public CameraSource {
+public:
+ static CameraSourceTimeLapse *CreateFromCamera(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+ virtual ~CameraSourceTimeLapse();
+
+ // If the frame capture interval is large, read will block for a long time.
+ // Due to the way the mediaRecorder framework works, a stop() call from
+ // mediaRecorder waits until the read returns, causing a long wait for
+ // stop() to return. To avoid this, we can make read() return a copy of the
+ // last read frame with the same time stamp frequently. This keeps the
+ // read() call from blocking too long. Calling this function quickly
+ // captures another frame, keeps its copy, and enables this mode of read()
+ // returning quickly.
+ void startQuickReadReturns();
+
+private:
+ // If true, will use still camera takePicture() for time lapse frames
+ // If false, will use the videocamera frames instead.
+ bool mUseStillCameraForTimeLapse;
+
+ // Size of picture taken from still camera. This may be larger than the size
+ // of the video, as still camera may not support the exact video resolution
+ // demanded. See setPictureSizeToClosestSupported().
+ int32_t mPictureWidth;
+ int32_t mPictureHeight;
+
+ // size of the encoded video.
+ int32_t mVideoWidth;
+ int32_t mVideoHeight;
+
+ // True if we need to crop the still camera image to get the video frame.
+ bool mNeedCropping;
+
+ // Start location of the cropping rectangle.
+ int32_t mCropRectStartX;
+ int32_t mCropRectStartY;
+
+ // Time between capture of two frames during time lapse recording
+ // Negative value indicates that timelapse is disabled.
+ int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+
+ // Time between two frames in final video (1/frameRate)
+ int64_t mTimeBetweenTimeLapseVideoFramesUs;
+
+ // Real timestamp of the last encoded time lapse frame
+ int64_t mLastTimeLapseFrameRealTimestampUs;
+
+ // Thread id of thread which takes still picture and sleeps in a loop.
+ pthread_t mThreadTimeLapse;
+
+ // Variable set in dataCallbackTimestamp() to help skipCurrentFrame()
+ // to know if current frame needs to be skipped.
+ bool mSkipCurrentFrame;
+
+ // Lock for accessing mCameraIdle
+ Mutex mCameraIdleLock;
+
+ // Condition variable to wait on if camera is is not yet idle. Once the
+ // camera gets idle, this variable will be signalled.
+ Condition mCameraIdleCondition;
+
+ // True if camera is in preview mode and ready for takePicture().
+ // False after a call to takePicture() but before the final compressed
+ // data callback has been called and preview has been restarted.
+ volatile bool mCameraIdle;
+
+ // True if stop() is waiting for camera to get idle, i.e. for the last
+ // takePicture() to complete. This is needed so that dataCallbackTimestamp()
+ // can return immediately.
+ volatile bool mStopWaitingForIdleCamera;
+
+ // Lock for accessing quick stop variables.
+ Mutex mQuickStopLock;
+
+ // Condition variable to wake up still picture thread.
+ Condition mTakePictureCondition;
+
+ // mQuickStop is set to true if we use quick read() returns, otherwise it is set
+ // to false. Once in this mode read() return a copy of the last read frame
+ // with the same time stamp. See startQuickReadReturns().
+ volatile bool mQuickStop;
+
+ // Forces the next frame passed to dataCallbackTimestamp() to be read
+ // as a time lapse frame. Used by startQuickReadReturns() so that the next
+ // frame wakes up any blocking read.
+ volatile bool mForceRead;
+
+ // Stores a copy of the MediaBuffer read in the last read() call after
+ // mQuickStop was true.
+ MediaBuffer* mLastReadBufferCopy;
+
+ // Status code for last read.
+ status_t mLastReadStatus;
+
+ CameraSourceTimeLapse(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs);
+
+ // Wrapper over CameraSource::signalBufferReturned() to implement quick stop.
+ // It only handles the case when mLastReadBufferCopy is signalled. Otherwise
+ // it calls the base class' function.
+ virtual void signalBufferReturned(MediaBuffer* buffer);
+
+ // Wrapper over CameraSource::read() to implement quick stop.
+ virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+ // For still camera case starts a thread which calls camera's takePicture()
+ // in a loop. For video camera case, just starts the camera's video recording.
+ virtual void startCameraRecording();
+
+ // For still camera case joins the thread created in startCameraRecording().
+ // For video camera case, just stops the camera's video recording.
+ virtual void stopCameraRecording();
+
+ // For still camera case don't need to do anything as memory is locally
+ // allocated with refcounting.
+ // For video camera case just tell the camera to release the frame.
+ virtual void releaseRecordingFrame(const sp<IMemory>& frame);
+
+ // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current
+ // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame.
+ virtual bool skipCurrentFrame(int64_t timestampUs);
+
+ // Handles the callback to handle raw frame data from the still camera.
+ // Creates a copy of the frame data as the camera can reuse the frame memory
+ // once this callback returns. The function also sets a new timstamp corresponding
+ // to one frame time ahead of the last encoded frame's time stamp. It then
+ // calls dataCallbackTimestamp() of the base class with the copied data and the
+ // modified timestamp, which will think that it recieved the frame from a video
+ // camera and proceed as usual.
+ virtual void dataCallback(int32_t msgType, const sp<IMemory> &data);
+
+ // In the video camera case calls skipFrameAndModifyTimeStamp() to modify
+ // timestamp and set mSkipCurrentFrame.
+ // Then it calls the base CameraSource::dataCallbackTimestamp()
+ virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data);
+
+ // Convenience function to fill mLastReadBufferCopy from the just read
+ // buffer.
+ void fillLastReadBufferCopy(MediaBuffer& sourceBuffer);
+
+ // If the passed in size (width x height) is a supported preview size,
+ // the function sets the camera's preview size to it and returns true.
+ // Otherwise returns false.
+ bool trySettingPreviewSize(int32_t width, int32_t height);
+
+ // The still camera may not support the demanded video width and height.
+ // We look for the supported picture sizes from the still camera and
+ // choose the smallest one with either dimensions higher than the corresponding
+ // video dimensions. The still picture will be cropped to get the video frame.
+ // The function returns true if the camera supports picture sizes greater than
+ // or equal to the passed in width and height, and false otherwise.
+ bool setPictureSizeToClosestSupported(int32_t width, int32_t height);
+
+ // Computes the offset of the rectangle from where to start cropping the
+ // still image into the video frame. We choose the center of the image to be
+ // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY).
+ bool computeCropRectangleOffset();
+
+ // Crops the source data into a smaller image starting at
+ // (mCropRectStartX, mCropRectStartY) and of the size of the video frame.
+ // The data is returned into a newly allocated IMemory.
+ sp<IMemory> cropYUVImage(const sp<IMemory> &source_data);
+
+ // When video camera is used for time lapse capture, returns true
+ // until enough time has passed for the next time lapse frame. When
+ // the frame needs to be encoded, it returns false and also modifies
+ // the time stamp to be one frame time ahead of the last encoded
+ // frame's time stamp.
+ bool skipFrameAndModifyTimeStamp(int64_t *timestampUs);
+
+ // Wrapper to enter threadTimeLapseEntry()
+ static void *ThreadTimeLapseWrapper(void *me);
+
+ // Runs a loop which sleeps until a still picture is required
+ // and then calls mCamera->takePicture() to take the still picture.
+ // Used only in the case mUseStillCameraForTimeLapse = true.
+ void threadTimeLapseEntry();
+
+ // Wrapper to enter threadStartPreview()
+ static void *ThreadStartPreviewWrapper(void *me);
+
+ // Starts the camera's preview.
+ void threadStartPreview();
+
+ // Starts thread ThreadStartPreviewWrapper() for restarting preview.
+ // Needs to be done in a thread so that dataCallback() which calls this function
+ // can return, and the camera can know that takePicture() is done.
+ void restartPreview();
+
+ // Creates a copy of source_data into a new memory of final type MemoryBase.
+ sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data);
+
+ CameraSourceTimeLapse(const CameraSourceTimeLapse &);
+ CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &);
+};
+
+} // namespace android
+
+#endif // CAMERA_SOURCE_TIME_LAPSE_H_
diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h
index 221c679..b009e1b 100644
--- a/include/media/stagefright/HardwareAPI.h
+++ b/include/media/stagefright/HardwareAPI.h
@@ -21,10 +21,86 @@
#include <media/stagefright/OMXPluginBase.h>
#include <media/stagefright/VideoRenderer.h>
#include <surfaceflinger/ISurface.h>
+#include <ui/android_native_buffer.h>
#include <utils/RefBase.h>
#include <OMX_Component.h>
+namespace android {
+
+// A pointer to this struct is passed to the OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension
+// is given.
+//
+// When Android native buffer use is disabled for a port (the default state),
+// the OMX node should operate as normal, and expect UseBuffer calls to set its
+// buffers. This is the mode that will be used when CPU access to the buffer is
+// required.
+//
+// When Android native buffer use has been enabled, the OMX node must support
+// only color formats in the range [OMX_COLOR_FormatAndroidPrivateStart,
+// OMX_COLOR_FormatAndroidPrivateEnd). The node should then expect to receive
+// UseAndroidNativeBuffer calls (via OMX_SetParameter) rather than UseBuffer
+// calls.
+struct EnableAndroidNativeBuffersParams {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL enable;
+};
+
+// A pointer to this struct is passed to OMX_SetParameter() when the extension
+// index "OMX.google.android.index.storeMetaDataInBuffers"
+// is given.
+//
+// When meta data is stored in the video buffers passed between OMX clients
+// and OMX components, interpretation of the buffer data is up to the
+// buffer receiver, and the data may or may not be the actual video data, but
+// some information helpful for the receiver to locate the actual data.
+// The buffer receiver thus needs to know how to interpret what is stored
+// in these buffers, with mechanisms pre-determined externally. How to
+// interpret the meta data is outside of the scope of this method.
+//
+// Currently, this is specifically used to pass meta data from video source
+// (camera component, for instance) to video encoder to avoid memcpying of
+// input video frame data. To do this, bStoreMetaDta is set to OMX_TRUE.
+// If bStoreMetaData is set to false, real YUV frame data will be stored
+// in the buffers. In addition, if no OMX_SetParameter() call is made
+// with the corresponding extension index, real YUV data is stored
+// in the buffers.
+struct StoreMetaDataInBuffersParams {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_BOOL bStoreMetaData;
+};
+
+// Color formats in the range [OMX_COLOR_FormatAndroidPrivateStart,
+// OMX_COLOR_FormatAndroidPrivateEnd) will be converted to a gralloc pixel
+// format when used to allocate Android native buffers via gralloc. The
+// conversion is done by subtracting OMX_COLOR_FormatAndroidPrivateStart from
+// the color format reported by the codec.
+enum {
+ OMX_COLOR_FormatAndroidPrivateStart = 0xA0000000,
+ OMX_COLOR_FormatAndroidPrivateEnd = 0xB0000000,
+};
+
+// A pointer to this struct is passed to OMX_SetParameter when the extension
+// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is
+// given. This call will only be performed if a prior call was made with the
+// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index,
+// enabling use of Android native buffers.
+struct UseAndroidNativeBufferParams {
+ OMX_U32 nSize;
+ OMX_VERSIONTYPE nVersion;
+ OMX_U32 nPortIndex;
+ OMX_PTR pAppPrivate;
+ OMX_BUFFERHEADERTYPE **bufferHeader;
+ const sp<android_native_buffer_t>& nativeBuffer;
+};
+
+} // namespace android
+
extern android::VideoRenderer *createRenderer(
const android::sp<android::ISurface> &surface,
const char *componentName,
@@ -35,4 +111,3 @@ extern android::VideoRenderer *createRenderer(
extern android::OMXPluginBase *createOMXPlugin();
#endif // HARDWARE_API_H_
-
diff --git a/include/media/stagefright/MediaBuffer.h b/include/media/stagefright/MediaBuffer.h
index 339e6fb..c1c4f94 100644
--- a/include/media/stagefright/MediaBuffer.h
+++ b/include/media/stagefright/MediaBuffer.h
@@ -25,6 +25,7 @@
namespace android {
+class GraphicBuffer;
class MediaBuffer;
class MediaBufferObserver;
class MetaData;
@@ -48,6 +49,8 @@ public:
MediaBuffer(size_t size);
+ MediaBuffer(const sp<GraphicBuffer>& graphicBuffer);
+
// Decrements the reference count and returns the buffer to its
// associated MediaBufferGroup if the reference count drops to 0.
void release();
@@ -63,6 +66,8 @@ public:
void set_range(size_t offset, size_t length);
+ sp<GraphicBuffer> graphicBuffer() const;
+
sp<MetaData> meta_data();
// Clears meta data and resets the range to the full extent.
@@ -94,6 +99,7 @@ private:
void *mData;
size_t mSize, mRangeOffset, mRangeLength;
+ sp<GraphicBuffer> mGraphicBuffer;
bool mOwnsData;
diff --git a/include/media/stagefright/MediaSourceSplitter.h b/include/media/stagefright/MediaSourceSplitter.h
new file mode 100644
index 0000000..568f4c2
--- /dev/null
+++ b/include/media/stagefright/MediaSourceSplitter.h
@@ -0,0 +1,193 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// This class provides a way to split a single media source into multiple sources.
+// The constructor takes in the real mediaSource and createClient() can then be
+// used to create multiple sources served from this real mediaSource.
+//
+// Usage:
+// - Create MediaSourceSplitter by passing in a real mediaSource from which
+// multiple duplicate channels are needed.
+// - Create a client using createClient() and use it as any other mediaSource.
+//
+// Note that multiple clients can be created using createClient() and
+// started/stopped in any order. MediaSourceSplitter stops the real source only
+// when all clients have been stopped.
+//
+// If a new client is created/started after some existing clients have already
+// started, the new client will start getting its read frames from the current
+// time.
+
+#ifndef MEDIA_SOURCE_SPLITTER_H_
+
+#define MEDIA_SOURCE_SPLITTER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/threads.h>
+#include <utils/Vector.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class MediaBuffer;
+class MetaData;
+
+class MediaSourceSplitter : public RefBase {
+public:
+ // Constructor
+ // mediaSource: The real mediaSource. The class keeps a reference to it to
+ // implement the various clients.
+ MediaSourceSplitter(sp<MediaSource> mediaSource);
+
+ ~MediaSourceSplitter();
+
+ // Creates a new client of base type MediaSource. Multiple clients can be
+ // created which get their data through the same real mediaSource. These
+ // clients can then be used like any other MediaSource, all of which provide
+ // data from the same real source.
+ sp<MediaSource> createClient();
+
+private:
+ // Total number of clients created through createClient().
+ int32_t mNumberOfClients;
+
+ // reference to the real MediaSource passed to the constructor.
+ sp<MediaSource> mSource;
+
+ // Stores pointer to the MediaBuffer read from the real MediaSource.
+ // All clients use this to implement the read() call.
+ MediaBuffer *mLastReadMediaBuffer;
+
+ // Status code for read from the real MediaSource. All clients return
+ // this for their read().
+ status_t mLastReadStatus;
+
+ // Boolean telling whether the real MediaSource has started.
+ bool mSourceStarted;
+
+ // List of booleans, one for each client, storing whether the corresponding
+ // client's start() has been called.
+ Vector<bool> mClientsStarted;
+
+ // Stores the number of clients which are currently started.
+ int32_t mNumberOfClientsStarted;
+
+ // Since different clients call read() asynchronously, we need to keep track
+ // of what data is currently read into the mLastReadMediaBuffer.
+ // mCurrentReadBit stores the bit for the current read buffer. This bit
+ // flips each time a new buffer is read from the source.
+ // mClientsDesiredReadBit stores the bit for the next desired read buffer
+ // for each client. This bit flips each time read() is completed for this
+ // client.
+ bool mCurrentReadBit;
+ Vector<bool> mClientsDesiredReadBit;
+
+ // Number of clients whose current read has been completed.
+ int32_t mNumberOfCurrentReads;
+
+ // Boolean telling whether the last read has been completed for all clients.
+ // The variable is reset to false each time buffer is read from the real
+ // source.
+ bool mLastReadCompleted;
+
+ // A global mutex for access to critical sections.
+ Mutex mLock;
+
+ // Condition variable for waiting on read from source to complete.
+ Condition mReadFromSourceCondition;
+
+ // Condition variable for waiting on all client's last read to complete.
+ Condition mAllReadsCompleteCondition;
+
+ // Functions used by Client to implement the MediaSource interface.
+
+ // If the real source has not been started yet by any client, starts it.
+ status_t start(int clientId, MetaData *params);
+
+ // Stops the real source after all clients have called stop().
+ status_t stop(int clientId);
+
+ // returns the real source's getFormat().
+ sp<MetaData> getFormat(int clientId);
+
+ // If the client's desired buffer has already been read into
+ // mLastReadMediaBuffer, points the buffer to that. Otherwise if it is the
+ // master client, reads the buffer from source or else waits for the master
+ // client to read the buffer and uses that.
+ status_t read(int clientId,
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options = NULL);
+
+ // Not implemented right now.
+ status_t pause(int clientId);
+
+ // Function which reads a buffer from the real source into
+ // mLastReadMediaBuffer
+ void readFromSource_lock(const MediaSource::ReadOptions *options);
+
+ // Waits until read from the real source has been completed.
+ // _lock means that the function should be called when the thread has already
+ // obtained the lock for the mutex mLock.
+ void waitForReadFromSource_lock(int32_t clientId);
+
+ // Waits until all clients have read the current buffer in
+ // mLastReadCompleted.
+ void waitForAllClientsLastRead_lock(int32_t clientId);
+
+ // Each client calls this after it completes its read(). Once all clients
+ // have called this for the current buffer, the function calls
+ // mAllReadsCompleteCondition.broadcast() to signal the waiting clients.
+ void signalReadComplete_lock(bool readAborted);
+
+ // Make these constructors private.
+ MediaSourceSplitter();
+ MediaSourceSplitter(const MediaSourceSplitter &);
+ MediaSourceSplitter &operator=(const MediaSourceSplitter &);
+
+ // This class implements the MediaSource interface. Each client stores a
+ // reference to the parent MediaSourceSplitter and uses it to complete the
+ // various calls.
+ class Client : public MediaSource {
+ public:
+ // Constructor stores reference to the parent MediaSourceSplitter and it
+ // client id.
+ Client(sp<MediaSourceSplitter> splitter, int32_t clientId);
+
+ // MediaSource interface
+ virtual status_t start(MetaData *params = NULL);
+
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+ virtual status_t pause();
+
+ private:
+ // Refernce to the parent MediaSourceSplitter
+ sp<MediaSourceSplitter> mSplitter;
+
+ // Id of this client.
+ int32_t mClientId;
+ };
+
+ friend class Client;
+};
+
+} // namespace android
+
+#endif // MEDIA_SOURCE_SPLITTER_H_
diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h
index ac0624a..a72aabf 100644
--- a/include/media/stagefright/MetaData.h
+++ b/include/media/stagefright/MetaData.h
@@ -101,6 +101,9 @@ enum {
kKeyValidSamples = 'valD', // int32_t
kKeyIsUnreadable = 'unre', // bool (int32_t)
+
+ // An indication that a video buffer has been rendered.
+ kKeyRendered = 'rend', // bool (int32_t)
};
enum {
diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h
index 1d94160..0f4fbfb 100644
--- a/include/media/stagefright/OMXCodec.h
+++ b/include/media/stagefright/OMXCodec.h
@@ -18,6 +18,7 @@
#define OMX_CODEC_H_
+#include <android/native_window.h>
#include <media/IOMX.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaSource.h>
@@ -38,13 +39,19 @@ struct OMXCodec : public MediaSource,
// The client wants to access the output buffer's video
// data for example for thumbnail extraction.
kClientNeedsFramebuffer = 4,
+
+ // Request for software or hardware codecs. If request
+ // can not be fullfilled, Create() returns NULL.
+ kSoftwareCodecsOnly = 8,
+ kHardwareCodecsOnly = 16,
};
static sp<MediaSource> Create(
const sp<IOMX> &omx,
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName = NULL,
- uint32_t flags = 0);
+ uint32_t flags = 0,
+ const sp<ANativeWindow> &nativeWindow = NULL);
static void setComponentRole(
const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder,
@@ -114,6 +121,7 @@ private:
struct BufferInfo {
IOMX::buffer_id mBuffer;
bool mOwnedByComponent;
+ bool mOwnedByNativeWindow;
sp<IMemory> mMem;
size_t mSize;
void *mData;
@@ -159,13 +167,21 @@ private:
bool mPaused;
+ sp<ANativeWindow> mNativeWindow;
+
+ // The index in each of the mPortBuffers arrays of the buffer that will be
+ // submitted to OMX next. This only applies when using buffers from a
+ // native window.
+ size_t mNextNativeBufferIndex[2];
+
// A list of indices into mPortStatus[kPortIndexOutput] filled with data.
List<size_t> mFilledBuffers;
Condition mBufferFilled;
OMXCodec(const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks,
bool isEncoder, const char *mime, const char *componentName,
- const sp<MediaSource> &source);
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow);
void addCodecSpecificData(const void *data, size_t size);
void clearCodecSpecificData();
@@ -216,6 +232,11 @@ private:
status_t allocateBuffers();
status_t allocateBuffersOnPort(OMX_U32 portIndex);
+ status_t allocateOutputBuffersFromNativeWindow();
+
+ status_t queueBufferToNativeWindow(BufferInfo *info);
+ status_t cancelBufferToNativeWindow(BufferInfo *info);
+ BufferInfo* dequeueBufferFromNativeWindow();
status_t freeBuffersOnPort(
OMX_U32 portIndex, bool onlyThoseWeOwn = false);
@@ -250,6 +271,7 @@ private:
status_t init();
void initOutputFormat(const sp<MetaData> &inputFormat);
+ status_t initNativeWindow();
void dumpPortStatus(OMX_U32 portIndex);
diff --git a/include/media/stagefright/VideoSourceDownSampler.h b/include/media/stagefright/VideoSourceDownSampler.h
new file mode 100644
index 0000000..439918c
--- /dev/null
+++ b/include/media/stagefright/VideoSourceDownSampler.h
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// VideoSourceDownSampler implements the MediaSource interface,
+// downsampling frames provided from a real video source.
+
+#ifndef VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#define VIDEO_SOURCE_DOWN_SAMPLER_H_
+
+#include <media/stagefright/MediaSource.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+class IMemory;
+class MediaBuffer;
+class MetaData;
+
+class VideoSourceDownSampler : public MediaSource {
+public:
+ virtual ~VideoSourceDownSampler();
+
+ // Constructor:
+ // videoSource: The real video source which provides the original frames.
+ // width, height: The desired width, height. These should be less than or equal
+ // to those of the real video source. We then downsample the original frames to
+ // this size.
+ VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+ int32_t width, int32_t height);
+
+ // MediaSource interface
+ virtual status_t start(MetaData *params = NULL);
+
+ virtual status_t stop();
+
+ virtual sp<MetaData> getFormat();
+
+ virtual status_t read(
+ MediaBuffer **buffer, const ReadOptions *options = NULL);
+
+ virtual status_t pause();
+
+private:
+ // Reference to the real video source.
+ sp<MediaSource> mRealVideoSource;
+
+ // Size of frames to be provided by this source.
+ int32_t mWidth;
+ int32_t mHeight;
+
+ // Size of frames provided by the real source.
+ int32_t mRealSourceWidth;
+ int32_t mRealSourceHeight;
+
+ // Down sampling paramters.
+ int32_t mDownSampleOffsetX;
+ int32_t mDownSampleOffsetY;
+ int32_t mDownSampleSkipX;
+ int32_t mDownSampleSkipY;
+
+ // True if we need to crop the still video image to get the video frame.
+ bool mNeedDownSampling;
+
+ // Meta data. This is a copy of the real source except for the width and
+ // height parameters.
+ sp<MetaData> mMeta;
+
+ // Computes the offset, skip parameters for downsampling the original frame
+ // to the desired size.
+ void computeDownSamplingParameters();
+
+ // Downsamples the frame in sourceBuffer to size (mWidth x mHeight). A new
+ // buffer is created which stores the downsampled image.
+ void downSampleYUVImage(const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const;
+
+ // Disallow these.
+ VideoSourceDownSampler(const VideoSourceDownSampler &);
+ VideoSourceDownSampler &operator=(const VideoSourceDownSampler &);
+};
+
+} // namespace android
+
+#endif // VIDEO_SOURCE_DOWN_SAMPLER_H_
diff --git a/include/media/stagefright/YUVCanvas.h b/include/media/stagefright/YUVCanvas.h
new file mode 100644
index 0000000..ff70923
--- /dev/null
+++ b/include/media/stagefright/YUVCanvas.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// YUVCanvas holds a reference to a YUVImage on which it can do various
+// drawing operations. It provides various utility functions for filling,
+// cropping, etc.
+
+
+#ifndef YUV_CANVAS_H_
+
+#define YUV_CANVAS_H_
+
+#include <stdint.h>
+
+namespace android {
+
+class YUVImage;
+class Rect;
+
+class YUVCanvas {
+public:
+
+ // Constructor takes in reference to a yuvImage on which it can do
+ // various drawing opreations.
+ YUVCanvas(YUVImage &yuvImage);
+ ~YUVCanvas();
+
+ // Fills the entire image with the given YUV values.
+ void FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+ // Fills the rectangular region [startX,endX]x[startY,endY] with the given YUV values.
+ void FillYUVRectangle(const Rect& rect,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+ // Copies the region [startX,endX]x[startY,endY] from srcImage into the
+ // canvas' target image (mYUVImage) starting at
+ // (destinationStartX,destinationStartY).
+ // Note that undefined behavior may occur if srcImage is same as the canvas'
+ // target image.
+ void CopyImageRect(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage);
+
+ // Downsamples the srcImage into the canvas' target image (mYUVImage)
+ // The downsampling copies pixels from the source image starting at
+ // (srcOffsetX, srcOffsetY) to the target image, starting at (0, 0).
+ // For each X increment in the target image, skipX pixels are skipped
+ // in the source image.
+ // Similarly for each Y increment in the target image, skipY pixels
+ // are skipped in the source image.
+ void downsample(
+ int32_t srcOffsetX, int32_t srcOffsetY,
+ int32_t skipX, int32_t skipY,
+ const YUVImage &srcImage);
+
+private:
+ YUVImage& mYUVImage;
+
+ YUVCanvas(const YUVCanvas &);
+ YUVCanvas &operator=(const YUVCanvas &);
+};
+
+} // namespace android
+
+#endif // YUV_CANVAS_H_
diff --git a/include/media/stagefright/YUVImage.h b/include/media/stagefright/YUVImage.h
new file mode 100644
index 0000000..4e98618
--- /dev/null
+++ b/include/media/stagefright/YUVImage.h
@@ -0,0 +1,178 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// A container class to hold YUV data and provide various utilities,
+// e.g. to set/get pixel values.
+// Supported formats:
+// - YUV420 Planar
+// - YUV420 Semi Planar
+//
+// Currently does not support variable strides.
+//
+// Implementation: Two simple abstractions are done to simplify access
+// to YUV channels for different formats:
+// - initializeYUVPointers() sets up pointers (mYdata, mUdata, mVdata) to
+// point to the right start locations of the different channel data depending
+// on the format.
+// - getOffsets() returns the correct offset for the different channels
+// depending on the format.
+// Location of any pixel's YUV channels can then be easily computed using these.
+//
+
+#ifndef YUV_IMAGE_H_
+
+#define YUV_IMAGE_H_
+
+#include <stdint.h>
+#include <cstring>
+
+namespace android {
+
+class Rect;
+
+class YUVImage {
+public:
+ // Supported YUV formats
+ enum YUVFormat {
+ YUV420Planar,
+ YUV420SemiPlanar
+ };
+
+ // Constructs an image with the given size, format. Also allocates and owns
+ // the required memory.
+ YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+ // Constructs an image with the given size, format. The memory is provided
+ // by the caller and we don't own it.
+ YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer);
+
+ // Destructor to delete the memory if it owns it.
+ ~YUVImage();
+
+ // Returns the size of the buffer required to store the YUV data for the given
+ // format and geometry. Useful when the caller wants to allocate the requisite
+ // memory.
+ static size_t bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height);
+
+ int32_t width() const {return mWidth;}
+ int32_t height() const {return mHeight;}
+
+ // Returns true if pixel is the range [0, width-1] x [0, height-1]
+ // and false otherwise.
+ bool validPixel(int32_t x, int32_t y) const;
+
+ // Get the pixel YUV value at pixel (x,y).
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ // Returns true if get was successful and false otherwise.
+ bool getPixelValue(int32_t x, int32_t y,
+ uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const;
+
+ // Set the pixel YUV value at pixel (x,y).
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ // Returns true if set was successful and false otherwise.
+ bool setPixelValue(int32_t x, int32_t y,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue);
+
+ // Uses memcpy to copy an entire row of data
+ static void fastCopyRectangle420Planar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage);
+
+ // Uses memcpy to copy an entire row of data
+ static void fastCopyRectangle420SemiPlanar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage);
+
+ // Tries to use memcopy to copy entire rows of data.
+ // Returns false if fast copy is not possible for the passed image formats.
+ static bool fastCopyRectangle(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage);
+
+ // Convert the given YUV value to RGB.
+ void yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+ uint8_t *r, uint8_t *g, uint8_t *b) const;
+
+ // Write the image to a human readable PPM file.
+ // Returns true if write was succesful and false otherwise.
+ bool writeToPPM(const char *filename) const;
+
+private:
+ // YUV Format of the image.
+ YUVFormat mYUVFormat;
+
+ int32_t mWidth;
+ int32_t mHeight;
+
+ // Pointer to the memory buffer.
+ uint8_t *mBuffer;
+
+ // Boolean telling whether we own the memory buffer.
+ bool mOwnBuffer;
+
+ // Pointer to start of the Y data plane.
+ uint8_t *mYdata;
+
+ // Pointer to start of the U data plane. Note that in case of interleaved formats like
+ // YUV420 semiplanar, mUdata points to the start of the U data in the UV plane.
+ uint8_t *mUdata;
+
+ // Pointer to start of the V data plane. Note that in case of interleaved formats like
+ // YUV420 semiplanar, mVdata points to the start of the V data in the UV plane.
+ uint8_t *mVdata;
+
+ // Initialize the pointers mYdata, mUdata, mVdata to point to the right locations for
+ // the given format and geometry.
+ // Returns true if initialize was succesful and false otherwise.
+ bool initializeYUVPointers();
+
+ // For the given pixel location, this returns the offset of the location of y, u and v
+ // data from the corresponding base pointers -- mYdata, mUdata, mVdata.
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ // Returns true if getting offsets was succesful and false otherwise.
+ bool getOffsets(int32_t x, int32_t y,
+ int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const;
+
+ // Returns the offset increments incurred in going from one data row to the next data row
+ // for the YUV channels. Note that this corresponds to data rows and not pixel rows.
+ // E.g. depending on formats, U/V channels may have only one data row corresponding
+ // to two pixel rows.
+ bool getOffsetIncrementsPerDataRow(
+ int32_t *yDataOffsetIncrement,
+ int32_t *uDataOffsetIncrement,
+ int32_t *vDataOffsetIncrement) const;
+
+ // Given the offset return the address of the corresponding channel's data.
+ uint8_t* getYAddress(int32_t offset) const;
+ uint8_t* getUAddress(int32_t offset) const;
+ uint8_t* getVAddress(int32_t offset) const;
+
+ // Given the pixel location, returns the address of the corresponding channel's data.
+ // Note that the range of x is [0, width-1] and the range of y is [0, height-1].
+ bool getYUVAddresses(int32_t x, int32_t y,
+ uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const;
+
+ // Disallow implicit casting and copying.
+ YUVImage(const YUVImage &);
+ YUVImage &operator=(const YUVImage &);
+};
+
+} // namespace android
+
+#endif // YUV_IMAGE_H_
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index 48b45ff..e6ff654 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -27,4 +27,4 @@ LOCAL_C_INCLUDES := \
LOCAL_PRELINK_MODULE := false
-include $(BUILD_SHARED_LIBRARY) \ No newline at end of file
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index 7e3b743..9c2a8ba 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -763,7 +763,8 @@ bool AudioSystem::isBluetoothScoDevice(audio_devices device)
if ((popCount(device) == 1 ) &&
(device & (AudioSystem::DEVICE_OUT_BLUETOOTH_SCO |
AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
- AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT))) {
+ AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT |
+ AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET))) {
return true;
} else {
return false;
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index 0f55b19..9dfdcb0 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -22,12 +22,14 @@
#include <media/IMediaPlayer.h>
#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
namespace android {
enum {
DISCONNECT = IBinder::FIRST_CALL_TRANSACTION,
SET_VIDEO_SURFACE,
+ SET_VIDEO_ISURFACE,
PREPARE_ASYNC,
START,
STOP,
@@ -65,11 +67,20 @@ public:
remote()->transact(DISCONNECT, data, &reply);
}
- status_t setVideoSurface(const sp<ISurface>& surface)
+ status_t setVideoISurface(const sp<ISurface>& surface)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
data.writeStrongBinder(surface->asBinder());
+ remote()->transact(SET_VIDEO_ISURFACE, data, &reply);
+ return reply.readInt32();
+ }
+
+ status_t setVideoSurface(const sp<Surface>& surface)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_VIDEO_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -256,9 +267,15 @@ status_t BnMediaPlayer::onTransact(
disconnect();
return NO_ERROR;
} break;
- case SET_VIDEO_SURFACE: {
+ case SET_VIDEO_ISURFACE: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ reply->writeInt32(setVideoISurface(surface));
+ return NO_ERROR;
+ } break;
+ case SET_VIDEO_SURFACE: {
+ CHECK_INTERFACE(IMediaPlayer, data, reply);
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setVideoSurface(surface));
return NO_ERROR;
} break;
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 947ff34..59cd1b7 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -19,7 +19,7 @@
#define LOG_TAG "IMediaRecorder"
#include <utils/Log.h>
#include <binder/Parcel.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <camera/ICamera.h>
#include <media/IMediaRecorderClient.h>
#include <media/IMediaRecorder.h>
@@ -43,6 +43,7 @@ enum {
SET_AUDIO_ENCODER,
SET_OUTPUT_FILE_PATH,
SET_OUTPUT_FILE_FD,
+ SET_OUTPUT_FILE_AUXILIARY_FD,
SET_VIDEO_SIZE,
SET_VIDEO_FRAMERATE,
SET_PARAMETERS,
@@ -69,12 +70,12 @@ public:
return reply.readInt32();
}
- status_t setPreviewSurface(const sp<ISurface>& surface)
+ status_t setPreviewSurface(const sp<Surface>& surface)
{
LOGV("setPreviewSurface(%p)", surface.get());
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
- data.writeStrongBinder(surface->asBinder());
+ Surface::writeToParcel(surface, &data);
remote()->transact(SET_PREVIEW_SURFACE, data, &reply);
return reply.readInt32();
}
@@ -159,6 +160,15 @@ public:
return reply.readInt32();
}
+ status_t setOutputFileAuxiliary(int fd) {
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ Parcel data, reply;
+ data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
+ data.writeFileDescriptor(fd);
+ remote()->transact(SET_OUTPUT_FILE_AUXILIARY_FD, data, &reply);
+ return reply.readInt32();
+ }
+
status_t setVideoSize(int width, int height)
{
LOGV("setVideoSize(%dx%d)", width, height);
@@ -377,6 +387,13 @@ status_t BnMediaRecorder::onTransact(
::close(fd);
return NO_ERROR;
} break;
+ case SET_OUTPUT_FILE_AUXILIARY_FD: {
+ LOGV("SET_OUTPUT_FILE_AUXILIARY_FD");
+ CHECK_INTERFACE(IMediaRecorder, data, reply);
+ int fd = dup(data.readFileDescriptor());
+ reply->writeInt32(setOutputFileAuxiliary(fd));
+ return NO_ERROR;
+ } break;
case SET_VIDEO_SIZE: {
LOGV("SET_VIDEO_SIZE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
@@ -409,7 +426,7 @@ status_t BnMediaRecorder::onTransact(
case SET_PREVIEW_SURFACE: {
LOGV("SET_PREVIEW_SURFACE");
CHECK_INTERFACE(IMediaRecorder, data, reply);
- sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder());
+ sp<Surface> surface = Surface::readFromParcel(data);
reply->writeInt32(setPreviewSurface(surface));
return NO_ERROR;
} break;
diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp
index f3804b8..f975217 100644
--- a/media/libmedia/IOMX.cpp
+++ b/media/libmedia/IOMX.cpp
@@ -21,7 +21,10 @@ enum {
SET_PARAMETER,
GET_CONFIG,
SET_CONFIG,
+ ENABLE_GRAPHIC_BUFFERS,
USE_BUFFER,
+ USE_GRAPHIC_BUFFER,
+ STORE_META_DATA_IN_BUFFERS,
ALLOC_BUFFER,
ALLOC_BUFFER_WITH_BACKUP,
FREE_BUFFER,
@@ -216,6 +219,19 @@ public:
return reply.readInt32();
}
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.writeInt32((uint32_t)enable);
+ remote()->transact(ENABLE_GRAPHIC_BUFFERS, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) {
@@ -238,6 +254,42 @@ public:
return err;
}
+
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.write(*graphicBuffer);
+ remote()->transact(USE_GRAPHIC_BUFFER, data, &reply);
+
+ status_t err = reply.readInt32();
+ if (err != OK) {
+ *buffer = 0;
+
+ return err;
+ }
+
+ *buffer = (void*)reply.readIntPtr();
+
+ return err;
+ }
+
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ Parcel data, reply;
+ data.writeInterfaceToken(IOMX::getInterfaceDescriptor());
+ data.writeIntPtr((intptr_t)node);
+ data.writeInt32(port_index);
+ data.writeInt32((uint32_t)enable);
+ remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply);
+
+ status_t err = reply.readInt32();
+ return err;
+ }
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -541,6 +593,20 @@ status_t BnOMX::onTransact(
return NO_ERROR;
}
+ case ENABLE_GRAPHIC_BUFFERS:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+ status_t err = enableGraphicBuffers(node, port_index, enable);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case USE_BUFFER:
{
CHECK_INTERFACE(IOMX, data, reply);
@@ -561,6 +627,41 @@ status_t BnOMX::onTransact(
return NO_ERROR;
}
+ case USE_GRAPHIC_BUFFER:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ sp<GraphicBuffer> graphicBuffer = new GraphicBuffer();
+ data.read(*graphicBuffer);
+
+ buffer_id buffer;
+ status_t err = useGraphicBuffer(
+ node, port_index, graphicBuffer, &buffer);
+ reply->writeInt32(err);
+
+ if (err == OK) {
+ reply->writeIntPtr((intptr_t)buffer);
+ }
+
+ return NO_ERROR;
+ }
+
+ case STORE_META_DATA_IN_BUFFERS:
+ {
+ CHECK_INTERFACE(IOMX, data, reply);
+
+ node_id node = (void*)data.readIntPtr();
+ OMX_U32 port_index = data.readInt32();
+ OMX_BOOL enable = (OMX_BOOL)data.readInt32();
+
+ status_t err = storeMetaDataInBuffers(node, port_index, enable);
+ reply->writeInt32(err);
+
+ return NO_ERROR;
+ }
+
case ALLOC_BUFFER:
{
CHECK_INTERFACE(IOMX, data, reply);
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 3869389..9ad63f0 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -59,8 +59,21 @@ const MediaProfiles::NameToTagMap MediaProfiles::sAudioDecoderNameMap[] = {
};
const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
+ {"low", CAMCORDER_QUALITY_LOW},
{"high", CAMCORDER_QUALITY_HIGH},
- {"low", CAMCORDER_QUALITY_LOW}
+ {"qcif", CAMCORDER_QUALITY_QCIF},
+ {"cif", CAMCORDER_QUALITY_CIF},
+ {"480p", CAMCORDER_QUALITY_480P},
+ {"720p", CAMCORDER_QUALITY_720P},
+ {"1080p", CAMCORDER_QUALITY_1080P},
+
+ {"timelapselow", CAMCORDER_QUALITY_TIME_LAPSE_LOW},
+ {"timelapsehigh", CAMCORDER_QUALITY_TIME_LAPSE_HIGH},
+ {"timelapseqcif", CAMCORDER_QUALITY_TIME_LAPSE_QCIF},
+ {"timelapsecif", CAMCORDER_QUALITY_TIME_LAPSE_CIF},
+ {"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P},
+ {"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P},
+ {"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P}
};
/*static*/ void
@@ -411,24 +424,57 @@ MediaProfiles::createDefaultVideoEncoders(MediaProfiles *profiles)
}
/*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderHighProfile()
+MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality)
{
MediaProfiles::VideoCodec *videoCodec =
- new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20);
+
+ AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
+ profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+ profile->mQuality = quality;
+ profile->mDuration = 60;
+ profile->mVideoCodec = videoCodec;
+ profile->mAudioCodec = audioCodec;
+ return profile;
+}
+
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality)
+{
+ MediaProfiles::VideoCodec *videoCodec =
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20);
AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
- profile->mQuality = CAMCORDER_QUALITY_HIGH;
+ profile->mQuality = quality;
profile->mDuration = 60;
profile->mVideoCodec = videoCodec;
profile->mAudioCodec = audioCodec;
return profile;
}
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles(
+ MediaProfiles::CamcorderProfile **lowTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) {
+ *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW);
+ *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles(
+ MediaProfiles::CamcorderProfile **highTimeLapseProfile,
+ MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) {
+ *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH);
+ *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P);
+}
+
/*static*/ MediaProfiles::CamcorderProfile*
-MediaProfiles::createDefaultCamcorderLowProfile()
+MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality)
{
MediaProfiles::VideoCodec *videoCodec =
new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20);
@@ -439,18 +485,72 @@ MediaProfiles::createDefaultCamcorderLowProfile()
MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
profile->mCameraId = 0;
profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
- profile->mQuality = CAMCORDER_QUALITY_LOW;
+ profile->mQuality = quality;
profile->mDuration = 30;
profile->mVideoCodec = videoCodec;
profile->mAudioCodec = audioCodec;
return profile;
}
+/*static*/ MediaProfiles::CamcorderProfile*
+MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality)
+{
+ MediaProfiles::VideoCodec *videoCodec =
+ new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20);
+
+ AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1);
+ CamcorderProfile *profile = new MediaProfiles::CamcorderProfile;
+ profile->mCameraId = 0;
+ profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP;
+ profile->mQuality = quality;
+ profile->mDuration = 60;
+ profile->mVideoCodec = videoCodec;
+ profile->mAudioCodec = audioCodec;
+ return profile;
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderLowProfiles(
+ MediaProfiles::CamcorderProfile **lowProfile,
+ MediaProfiles::CamcorderProfile **lowSpecificProfile) {
+ *lowProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_LOW);
+ *lowSpecificProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_QCIF);
+}
+
+/*static*/ void
+MediaProfiles::createDefaultCamcorderHighProfiles(
+ MediaProfiles::CamcorderProfile **highProfile,
+ MediaProfiles::CamcorderProfile **highSpecificProfile) {
+ *highProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_HIGH);
+ *highSpecificProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_CIF);
+}
+
/*static*/ void
MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles)
{
- profiles->mCamcorderProfiles.add(createDefaultCamcorderHighProfile());
- profiles->mCamcorderProfiles.add(createDefaultCamcorderLowProfile());
+ // low camcorder profiles.
+ MediaProfiles::CamcorderProfile *lowProfile, *lowSpecificProfile;
+ createDefaultCamcorderLowProfiles(&lowProfile, &lowSpecificProfile);
+ profiles->mCamcorderProfiles.add(lowProfile);
+ profiles->mCamcorderProfiles.add(lowSpecificProfile);
+
+ // high camcorder profiles.
+ MediaProfiles::CamcorderProfile* highProfile, *highSpecificProfile;
+ createDefaultCamcorderHighProfiles(&highProfile, &highSpecificProfile);
+ profiles->mCamcorderProfiles.add(highProfile);
+ profiles->mCamcorderProfiles.add(highSpecificProfile);
+
+ // low camcorder time lapse profiles.
+ MediaProfiles::CamcorderProfile *lowTimeLapseProfile, *lowSpecificTimeLapseProfile;
+ createDefaultCamcorderTimeLapseLowProfiles(&lowTimeLapseProfile, &lowSpecificTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(lowTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(lowSpecificTimeLapseProfile);
+
+ // high camcorder time lapse profiles.
+ MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile;
+ createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(highTimeLapseProfile);
+ profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile);
}
/*static*/ void
@@ -668,13 +768,8 @@ Vector<audio_decoder> MediaProfiles::getAudioDecoders() const
return decoders; // copy out
}
-int MediaProfiles::getCamcorderProfileParamByName(const char *name,
- int cameraId,
- camcorder_quality quality) const
+int MediaProfiles::getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const
{
- LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
- name, cameraId, quality);
-
int index = -1;
for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) {
if (mCamcorderProfiles[i]->mCameraId == cameraId &&
@@ -683,6 +778,17 @@ int MediaProfiles::getCamcorderProfileParamByName(const char *name,
break;
}
}
+ return index;
+}
+
+int MediaProfiles::getCamcorderProfileParamByName(const char *name,
+ int cameraId,
+ camcorder_quality quality) const
+{
+ LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d",
+ name, cameraId, quality);
+
+ int index = getCamcorderProfileIndex(cameraId, quality);
if (index == -1) {
LOGE("The given camcorder profile camera %d quality %d is not found",
cameraId, quality);
@@ -705,6 +811,11 @@ int MediaProfiles::getCamcorderProfileParamByName(const char *name,
return -1;
}
+bool MediaProfiles::hasCamcorderProfile(int cameraId, camcorder_quality quality) const
+{
+ return (getCamcorderProfileIndex(cameraId, quality) != -1);
+}
+
Vector<int> MediaProfiles::getImageEncodingQualityLevels(int cameraId) const
{
Vector<int> result;
diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp
index c5112a5..c31b622 100644
--- a/media/libmedia/MediaScanner.cpp
+++ b/media/libmedia/MediaScanner.cpp
@@ -48,8 +48,7 @@ const char *MediaScanner::locale() const {
}
status_t MediaScanner::processDirectory(
- const char *path, const char *extensions,
- MediaScannerClient &client,
+ const char *path, MediaScannerClient &client,
ExceptionCheck exceptionCheck, void *exceptionEnv) {
int pathLength = strlen(path);
if (pathLength >= PATH_MAX) {
@@ -72,35 +71,16 @@ status_t MediaScanner::processDirectory(
status_t result =
doProcessDirectory(
- pathBuffer, pathRemaining, extensions, client,
- exceptionCheck, exceptionEnv);
+ pathBuffer, pathRemaining, client, exceptionCheck, exceptionEnv);
free(pathBuffer);
return result;
}
-static bool fileMatchesExtension(const char* path, const char* extensions) {
- const char* extension = strrchr(path, '.');
- if (!extension) return false;
- ++extension; // skip the dot
- if (extension[0] == 0) return false;
-
- while (extensions[0]) {
- const char* comma = strchr(extensions, ',');
- size_t length = (comma ? comma - extensions : strlen(extensions));
- if (length == strlen(extension) && strncasecmp(extension, extensions, length) == 0) return true;
- extensions += length;
- if (extensions[0] == ',') ++extensions;
- }
-
- return false;
-}
-
status_t MediaScanner::doProcessDirectory(
- char *path, int pathRemaining, const char *extensions,
- MediaScannerClient &client, ExceptionCheck exceptionCheck,
- void *exceptionEnv) {
+ char *path, int pathRemaining, MediaScannerClient &client,
+ ExceptionCheck exceptionCheck, void *exceptionEnv) {
// place to copy file or directory name
char* fileSpot = path + strlen(path);
struct dirent* entry;
@@ -133,6 +113,13 @@ status_t MediaScanner::doProcessDirectory(
continue;
}
+ int nameLength = strlen(name);
+ if (nameLength + 1 > pathRemaining) {
+ // path too long!
+ continue;
+ }
+ strcpy(fileSpot, name);
+
int type = entry->d_type;
if (type == DT_UNKNOWN) {
// If the type is unknown, stat() the file instead.
@@ -150,29 +137,20 @@ status_t MediaScanner::doProcessDirectory(
}
}
if (type == DT_REG || type == DT_DIR) {
- int nameLength = strlen(name);
- bool isDirectory = (type == DT_DIR);
-
- if (nameLength > pathRemaining || (isDirectory && nameLength + 1 > pathRemaining)) {
- // path too long!
- continue;
- }
-
- strcpy(fileSpot, name);
- if (isDirectory) {
+ if (type == DT_DIR) {
// ignore directories with a name that starts with '.'
// for example, the Mac ".Trashes" directory
if (name[0] == '.') continue;
strcat(fileSpot, "/");
- int err = doProcessDirectory(path, pathRemaining - nameLength - 1, extensions, client, exceptionCheck, exceptionEnv);
+ int err = doProcessDirectory(path, pathRemaining - nameLength - 1, client, exceptionCheck, exceptionEnv);
if (err) {
// pass exceptions up - ignore other errors
if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure;
LOGE("Error processing '%s' - skipping\n", path);
continue;
}
- } else if (fileMatchesExtension(path, extensions)) {
+ } else {
struct stat statbuf;
stat(path, &statbuf);
if (statbuf.st_size > 0) {
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index cc41e66..ee3f660 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -207,10 +207,15 @@ status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface)
LOGV("setVideoSurface");
Mutex::Autolock _l(mLock);
if (mPlayer == 0) return NO_INIT;
- if (surface != NULL)
- return mPlayer->setVideoSurface(surface->getISurface());
- else
- return mPlayer->setVideoSurface(NULL);
+
+ status_t err = mPlayer->setVideoISurface(
+ surface == NULL ? NULL : surface->getISurface());
+
+ if (err != OK) {
+ return err;
+ }
+
+ return mPlayer->setVideoSurface(surface);
}
// must call with lock held
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index e20e3ba..fd575fe 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -65,7 +65,7 @@ status_t MediaRecorder::setPreviewSurface(const sp<Surface>& surface)
return INVALID_OPERATION;
}
- status_t ret = mMediaRecorder->setPreviewSurface(surface->getISurface());
+ status_t ret = mMediaRecorder->setPreviewSurface(surface);
if (OK != ret) {
LOGV("setPreviewSurface failed: %d", ret);
mCurrentState = MEDIA_RECORDER_ERROR;
@@ -308,6 +308,32 @@ status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)
return ret;
}
+status_t MediaRecorder::setOutputFileAuxiliary(int fd)
+{
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ if(mMediaRecorder == NULL) {
+ LOGE("media recorder is not initialized yet");
+ return INVALID_OPERATION;
+ }
+ if (mIsAuxiliaryOutputFileSet) {
+ LOGE("output file has already been set");
+ return INVALID_OPERATION;
+ }
+ if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) {
+ LOGE("setOutputFile called in an invalid state(%d)", mCurrentState);
+ return INVALID_OPERATION;
+ }
+
+ status_t ret = mMediaRecorder->setOutputFileAuxiliary(fd);
+ if (OK != ret) {
+ LOGV("setOutputFileAuxiliary failed: %d", ret);
+ mCurrentState = MEDIA_RECORDER_ERROR;
+ return ret;
+ }
+ mIsAuxiliaryOutputFileSet = true;
+ return ret;
+}
+
status_t MediaRecorder::setVideoSize(int width, int height)
{
LOGV("setVideoSize(%d, %d)", width, height);
@@ -571,6 +597,7 @@ void MediaRecorder::doCleanUp()
mIsAudioEncoderSet = false;
mIsVideoEncoderSet = false;
mIsOutputFileSet = false;
+ mIsAuxiliaryOutputFileSet = false;
}
// Release should be OK in any state
@@ -643,4 +670,3 @@ void MediaRecorder::died()
}
}; // namespace android
-
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index c43e9bb..80922d6 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -891,7 +891,15 @@ status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64
return mStatus;
}
-status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface)
+status_t MediaPlayerService::Client::setVideoISurface(const sp<ISurface>& surface)
+{
+ LOGV("[%d] setVideoISurface(%p)", mConnId, surface.get());
+ sp<MediaPlayerBase> p = getPlayer();
+ if (p == 0) return UNKNOWN_ERROR;
+ return p->setVideoISurface(surface);
+}
+
+status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface)
{
LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get());
sp<MediaPlayerBase> p = getPlayer();
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 4492e20..c4e78f7 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -206,7 +206,8 @@ private:
// IMediaPlayer interface
virtual void disconnect();
- virtual status_t setVideoSurface(const sp<ISurface>& surface);
+ virtual status_t setVideoISurface(const sp<ISurface>& surface);
+ virtual status_t setVideoSurface(const sp<Surface>& surface);
virtual status_t prepareAsync();
virtual status_t start();
virtual status_t stop();
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index 19915f1..be6a8be 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -70,7 +70,7 @@ status_t MediaRecorderClient::setCamera(const sp<ICamera>& camera)
return mRecorder->setCamera(camera);
}
-status_t MediaRecorderClient::setPreviewSurface(const sp<ISurface>& surface)
+status_t MediaRecorderClient::setPreviewSurface(const sp<Surface>& surface)
{
LOGV("setPreviewSurface");
Mutex::Autolock lock(mLock);
@@ -164,6 +164,17 @@ status_t MediaRecorderClient::setOutputFile(int fd, int64_t offset, int64_t leng
return mRecorder->setOutputFile(fd, offset, length);
}
+status_t MediaRecorderClient::setOutputFileAuxiliary(int fd)
+{
+ LOGV("setOutputFileAuxiliary(%d)", fd);
+ Mutex::Autolock lock(mLock);
+ if (mRecorder == NULL) {
+ LOGE("recorder is not initialized");
+ return NO_INIT;
+ }
+ return mRecorder->setOutputFileAuxiliary(fd);
+}
+
status_t MediaRecorderClient::setVideoSize(int width, int height)
{
LOGV("setVideoSize(%dx%d)", width, height);
@@ -337,4 +348,3 @@ status_t MediaRecorderClient::dump(int fd, const Vector<String16>& args) const {
}
}; // namespace android
-
diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h
index 1d1913d..fded98e 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.h
+++ b/media/libmediaplayerservice/MediaRecorderClient.h
@@ -29,7 +29,7 @@ class MediaRecorderClient : public BnMediaRecorder
{
public:
virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setVideoSource(int vs);
virtual status_t setAudioSource(int as);
virtual status_t setOutputFormat(int of);
@@ -37,6 +37,7 @@ public:
virtual status_t setAudioEncoder(int ae);
virtual status_t setOutputFile(const char* path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ virtual status_t setOutputFileAuxiliary(int fd);
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setParameters(const String8& params);
@@ -66,4 +67,3 @@ private:
}; // namespace android
#endif // ANDROID_MEDIARECORDERCLIENT_H
-
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 4a60ece..06e4b70 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -35,7 +35,8 @@ public:
const char* path, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; }
+ virtual status_t setVideoISurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; }
+ virtual status_t setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; }
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index 6bded09..b3e2da0 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -44,13 +44,20 @@ status_t StagefrightPlayer::setDataSource(int fd, int64_t offset, int64_t length
return mPlayer->setDataSource(dup(fd), offset, length);
}
-status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) {
- LOGV("setVideoSurface");
+status_t StagefrightPlayer::setVideoISurface(const sp<ISurface> &surface) {
+ LOGV("setVideoISurface");
mPlayer->setISurface(surface);
return OK;
}
+status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) {
+ LOGV("setVideoSurface");
+
+ mPlayer->setSurface(surface);
+ return OK;
+}
+
status_t StagefrightPlayer::prepare() {
return mPlayer->prepare();
}
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 781eb44..dd37102 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -35,7 +35,8 @@ public:
const char *url, const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
- virtual status_t setVideoSurface(const sp<ISurface> &surface);
+ virtual status_t setVideoISurface(const sp<ISurface> &surface);
+ virtual status_t setVideoSurface(const sp<Surface> &surface);
virtual status_t prepare();
virtual status_t prepareAsync();
virtual status_t start();
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index d37d83d..913d953 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -20,10 +20,12 @@
#include "StagefrightRecorder.h"
-#include <binder/IPCThreadState.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaSourceSplitter.h>
#include <media/stagefright/MPEG2TSWriter.h>
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDebug.h>
@@ -33,21 +35,20 @@
#include <media/stagefright/OMXCodec.h>
#include <media/MediaProfiles.h>
#include <camera/ICamera.h>
-#include <camera/Camera.h>
#include <camera/CameraParameters.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <utils/Errors.h>
#include <sys/types.h>
-#include <unistd.h>
#include <ctype.h>
+#include <unistd.h>
#include "ARTPWriter.h"
namespace android {
StagefrightRecorder::StagefrightRecorder()
- : mWriter(NULL),
- mOutputFd(-1) {
+ : mWriter(NULL), mWriterAux(NULL),
+ mOutputFd(-1), mOutputFdAux(-1) {
LOGV("Constructor");
reset();
@@ -164,7 +165,8 @@ status_t StagefrightRecorder::setVideoSize(int width, int height) {
status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) {
LOGV("setVideoFrameRate: %d", frames_per_second);
- if (frames_per_second <= 0 || frames_per_second > 30) {
+ if ((frames_per_second <= 0 && frames_per_second != -1) ||
+ frames_per_second > 120) {
LOGE("Invalid video frame rate: %d", frames_per_second);
return BAD_VALUE;
}
@@ -182,26 +184,11 @@ status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera) {
return BAD_VALUE;
}
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mFlags &= ~FLAGS_HOT_CAMERA;
- mCamera = Camera::create(camera);
- if (mCamera == 0) {
- LOGE("Unable to connect to camera");
- IPCThreadState::self()->restoreCallingIdentity(token);
- return -EBUSY;
- }
-
- LOGV("Connected to camera");
- if (mCamera->previewEnabled()) {
- LOGV("camera is hot");
- mFlags |= FLAGS_HOT_CAMERA;
- }
- IPCThreadState::self()->restoreCallingIdentity(token);
-
+ mCamera = camera;
return OK;
}
-status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) {
+status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) {
LOGV("setPreviewSurface: %p", surface.get());
mPreviewSurface = surface;
@@ -235,6 +222,24 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng
return OK;
}
+status_t StagefrightRecorder::setOutputFileAuxiliary(int fd) {
+ LOGV("setOutputFileAuxiliary: %d", fd);
+
+ if (fd < 0) {
+ LOGE("Invalid file descriptor: %d", fd);
+ return -EBADF;
+ }
+
+ mCaptureAuxVideo = true;
+
+ if (mOutputFdAux >= 0) {
+ ::close(mOutputFdAux);
+ }
+ mOutputFdAux = dup(fd);
+
+ return OK;
+}
+
// Attempt to parse an int64 literal optionally surrounded by whitespace,
// returns true on success, false otherwise.
static bool safe_strtoi64(const char *s, int64_t *val) {
@@ -474,6 +479,68 @@ status_t StagefrightRecorder::setParamAudioTimeScale(int32_t timeScale) {
return OK;
}
+status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) {
+ LOGV("setParamTimeLapseEnable: %d", timeLapseEnable);
+
+ if(timeLapseEnable == 0) {
+ mCaptureTimeLapse = false;
+ } else if (timeLapseEnable == 1) {
+ mCaptureTimeLapse = true;
+ } else {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) {
+ LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs);
+
+ // Not allowing time more than a day
+ if (timeUs <= 0 || timeUs > 86400*1E6) {
+ LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs);
+ return BAD_VALUE;
+ }
+
+ mTimeBetweenTimeLapseFrameCaptureUs = timeUs;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoWidth(int32_t width) {
+ LOGV("setParamAuxVideoWidth : %d", width);
+
+ if (width <= 0) {
+ LOGE("Width (%d) is not positive", width);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoWidth = width;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoHeight(int32_t height) {
+ LOGV("setParamAuxVideoHeight : %d", height);
+
+ if (height <= 0) {
+ LOGE("Height (%d) is not positive", height);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoHeight = height;
+ return OK;
+}
+
+status_t StagefrightRecorder::setParamAuxVideoEncodingBitRate(int32_t bitRate) {
+ LOGV("StagefrightRecorder::setParamAuxVideoEncodingBitRate: %d", bitRate);
+
+ if (bitRate <= 0) {
+ LOGE("Invalid video encoding bit rate: %d", bitRate);
+ return BAD_VALUE;
+ }
+
+ mAuxVideoBitRate = bitRate;
+ return OK;
+}
+
status_t StagefrightRecorder::setParameter(
const String8 &key, const String8 &value) {
LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string());
@@ -557,6 +624,32 @@ status_t StagefrightRecorder::setParameter(
if (safe_strtoi32(value.string(), &timeScale)) {
return setParamVideoTimeScale(timeScale);
}
+ } else if (key == "time-lapse-enable") {
+ int32_t timeLapseEnable;
+ if (safe_strtoi32(value.string(), &timeLapseEnable)) {
+ return setParamTimeLapseEnable(timeLapseEnable);
+ }
+ } else if (key == "time-between-time-lapse-frame-capture") {
+ int64_t timeBetweenTimeLapseFrameCaptureMs;
+ if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+ return setParamTimeBetweenTimeLapseFrameCapture(
+ 1000LL * timeBetweenTimeLapseFrameCaptureMs);
+ }
+ } else if (key == "video-aux-param-width") {
+ int32_t auxWidth;
+ if (safe_strtoi32(value.string(), &auxWidth)) {
+ return setParamAuxVideoWidth(auxWidth);
+ }
+ } else if (key == "video-aux-param-height") {
+ int32_t auxHeight;
+ if (safe_strtoi32(value.string(), &auxHeight)) {
+ return setParamAuxVideoHeight(auxHeight);
+ }
+ } else if (key == "video-aux-param-encoding-bitrate") {
+ int32_t auxVideoBitRate;
+ if (safe_strtoi32(value.string(), &auxVideoBitRate)) {
+ return setParamAuxVideoEncodingBitRate(auxVideoBitRate);
+ }
} else {
LOGE("setParameter: failed to find key %s", key.string());
}
@@ -790,7 +883,14 @@ status_t StagefrightRecorder::startRTPRecording() {
if (mAudioSource != AUDIO_SOURCE_LIST_END) {
source = createAudioSource();
} else {
- status_t err = setupVideoEncoder(&source);
+
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+
+ err = setupVideoEncoder(cameraSource, mVideoBitRate, &source);
if (err != OK) {
return err;
}
@@ -826,8 +926,14 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
return ERROR_UNSUPPORTED;
}
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+
sp<MediaSource> encoder;
- status_t err = setupVideoEncoder(&encoder);
+ err = setupVideoEncoder(cameraSource, mVideoBitRate, &encoder);
if (err != OK) {
return err;
@@ -855,7 +961,7 @@ void StagefrightRecorder::clipVideoFrameRate() {
"enc.vid.fps.min", mVideoEncoder);
int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName(
"enc.vid.fps.max", mVideoEncoder);
- if (mFrameRate < minFrameRate) {
+ if (mFrameRate < minFrameRate && mFrameRate != -1) {
LOGW("Intended video encoding frame rate (%d fps) is too small"
" and will be set to (%d fps)", mFrameRate, minFrameRate);
mFrameRate = minFrameRate;
@@ -900,57 +1006,15 @@ void StagefrightRecorder::clipVideoFrameWidth() {
}
}
-status_t StagefrightRecorder::setupCameraSource() {
- clipVideoBitRate();
- clipVideoFrameRate();
- clipVideoFrameWidth();
- clipVideoFrameHeight();
-
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if (mCamera == 0) {
- mCamera = Camera::connect(mCameraId);
- if (mCamera == 0) {
- LOGE("Camera connection could not be established.");
- return -EBUSY;
- }
- mFlags &= ~FLAGS_HOT_CAMERA;
- mCamera->lock();
- }
-
- // Set the actual video recording frame size
- CameraParameters params(mCamera->getParameters());
- params.setPreviewSize(mVideoWidth, mVideoHeight);
- params.setPreviewFrameRate(mFrameRate);
- String8 s = params.flatten();
- if (OK != mCamera->setParameters(s)) {
- LOGE("Could not change settings."
- " Someone else is using camera %d?", mCameraId);
- return -EBUSY;
- }
- CameraParameters newCameraParams(mCamera->getParameters());
-
- // Check on video frame size
- int frameWidth = 0, frameHeight = 0;
- newCameraParams.getPreviewSize(&frameWidth, &frameHeight);
- if (frameWidth < 0 || frameWidth != mVideoWidth ||
- frameHeight < 0 || frameHeight != mVideoHeight) {
- LOGE("Failed to set the video frame size to %dx%d",
- mVideoWidth, mVideoHeight);
- IPCThreadState::self()->restoreCallingIdentity(token);
- return UNKNOWN_ERROR;
- }
-
- // Check on video frame rate
- int frameRate = newCameraParams.getPreviewFrameRate();
- if (frameRate < 0 || (frameRate - mFrameRate) != 0) {
- LOGE("Failed to set frame rate to %d fps. The actual "
- "frame rate is %d", mFrameRate, frameRate);
+status_t StagefrightRecorder::checkVideoEncoderCapabilities() {
+ if (!mCaptureTimeLapse) {
+ // Dont clip for time lapse capture as encoder will have enough
+ // time to encode because of slow capture rate of time lapse.
+ clipVideoBitRate();
+ clipVideoFrameRate();
+ clipVideoFrameWidth();
+ clipVideoFrameHeight();
}
-
- // This CHECK is good, since we just passed the lock/unlock
- // check earlier by calling mCamera->setParameters().
- CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface));
- IPCThreadState::self()->restoreCallingIdentity(token);
return OK;
}
@@ -971,17 +1035,49 @@ void StagefrightRecorder::clipVideoFrameHeight() {
}
}
-status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
- source->clear();
+status_t StagefrightRecorder::setupCameraSource(sp<CameraSource> *cameraSource) {
+ status_t err = OK;
+ if ((err = checkVideoEncoderCapabilities()) != OK) {
+ return err;
+ }
+ Size videoSize;
+ videoSize.width = mVideoWidth;
+ videoSize.height = mVideoHeight;
+ if (mCaptureTimeLapse) {
+ mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera(
+ mCamera, mCameraId,
+ videoSize, mFrameRate, mPreviewSurface,
+ mTimeBetweenTimeLapseFrameCaptureUs);
+ *cameraSource = mCameraSourceTimeLapse;
+ } else {
+ *cameraSource = CameraSource::CreateFromCamera(
+ mCamera, mCameraId, videoSize, mFrameRate, mPreviewSurface);
+ }
+ CHECK(*cameraSource != NULL);
+
+ // When frame rate is not set, the actual frame rate will be set to
+ // the current frame rate being used.
+ if (mFrameRate == -1) {
+ int32_t frameRate = 0;
+ CHECK ((*cameraSource)->getFormat()->findInt32(
+ kKeySampleRate, &frameRate));
+ LOGI("Frame rate is not explicitly set. Use the current frame "
+ "rate (%d fps)", frameRate);
+ mFrameRate = frameRate;
+ }
- status_t err = setupCameraSource();
- if (err != OK) return err;
+ CHECK(mFrameRate != -1);
+ return OK;
+}
- sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera);
- CHECK(cameraSource != NULL);
+status_t StagefrightRecorder::setupVideoEncoder(
+ sp<MediaSource> cameraSource,
+ int32_t videoBitRate,
+ sp<MediaSource> *source) {
+ source->clear();
sp<MetaData> enc_meta = new MetaData;
- enc_meta->setInt32(kKeyBitRate, mVideoBitRate);
+ enc_meta->setInt32(kKeyBitRate, videoBitRate);
enc_meta->setInt32(kKeySampleRate, mFrameRate);
switch (mVideoEncoder) {
@@ -1025,14 +1121,24 @@ status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) {
}
if (mVideoEncoderLevel != -1) {
enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+ } else if (mCaptureTimeLapse) {
+ // Check if we are using high resolution and/or high bitrate and
+ // set appropriate level for the software AVCEncoder.
+ if ((width * height >= 921600) // 720p
+ || (videoBitRate >= 20000000)) {
+ enc_meta->setInt32(kKeyVideoLevel, 50);
+ }
}
OMXClient client;
CHECK_EQ(client.connect(), OK);
+ // Use software codec for time lapse
+ uint32_t encoder_flags = (mCaptureTimeLapse) ? OMXCodec::kPreferSoftwareCodecs : 0;
sp<MediaSource> encoder = OMXCodec::Create(
client.interface(), enc_meta,
- true /* createEncoder */, cameraSource);
+ true /* createEncoder */, cameraSource,
+ NULL, encoder_flags);
if (encoder == NULL) {
return UNKNOWN_ERROR;
}
@@ -1063,51 +1169,147 @@ status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
return OK;
}
-status_t StagefrightRecorder::startMPEG4Recording() {
- int32_t totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4Recording(
+ bool useSplitCameraSource,
+ int outputFd,
+ int32_t videoWidth, int32_t videoHeight,
+ int32_t videoBitRate,
+ int32_t *totalBitRate,
+ sp<MediaWriter> *mediaWriter) {
+ mediaWriter->clear();
+ *totalBitRate = 0;
status_t err = OK;
- sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd));
+ sp<MediaWriter> writer = new MPEG4Writer(dup(outputFd));
// Add audio source first if it exists
- if (mAudioSource != AUDIO_SOURCE_LIST_END) {
+ if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_LIST_END)) {
err = setupAudioEncoder(writer);
if (err != OK) return err;
- totalBitRate += mAudioBitRate;
+ *totalBitRate += mAudioBitRate;
}
if (mVideoSource == VIDEO_SOURCE_DEFAULT
|| mVideoSource == VIDEO_SOURCE_CAMERA) {
+
+ sp<MediaSource> cameraMediaSource;
+ if (useSplitCameraSource) {
+ LOGV("Using Split camera source");
+ cameraMediaSource = mCameraSourceSplitter->createClient();
+ } else {
+ sp<CameraSource> cameraSource;
+ err = setupCameraSource(&cameraSource);
+ cameraMediaSource = cameraSource;
+ }
+ if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) {
+ // Use downsampling from the original source.
+ cameraMediaSource =
+ new VideoSourceDownSampler(cameraMediaSource, videoWidth, videoHeight);
+ }
+ if (err != OK) {
+ return err;
+ }
+
sp<MediaSource> encoder;
- err = setupVideoEncoder(&encoder);
- if (err != OK) return err;
+ err = setupVideoEncoder(cameraMediaSource, videoBitRate, &encoder);
+ if (err != OK) {
+ return err;
+ }
+
writer->addSource(encoder);
- totalBitRate += mVideoBitRate;
+ *totalBitRate += videoBitRate;
}
if (mInterleaveDurationUs > 0) {
reinterpret_cast<MPEG4Writer *>(writer.get())->
setInterleaveDuration(mInterleaveDurationUs);
}
-
if (mMaxFileDurationUs != 0) {
writer->setMaxFileDuration(mMaxFileDurationUs);
}
if (mMaxFileSizeBytes != 0) {
writer->setMaxFileSize(mMaxFileSizeBytes);
}
- sp<MetaData> meta = new MetaData;
- meta->setInt64(kKeyTime, systemTime() / 1000);
- meta->setInt32(kKeyFileType, mOutputFormat);
- meta->setInt32(kKeyBitRate, totalBitRate);
- meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
+
+ writer->setListener(mListener);
+ *mediaWriter = writer;
+ return OK;
+}
+
+void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+ sp<MetaData> *meta) {
+ (*meta)->setInt64(kKeyTime, startTimeUs);
+ (*meta)->setInt32(kKeyFileType, mOutputFormat);
+ (*meta)->setInt32(kKeyBitRate, totalBitRate);
+ (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
if (mMovieTimeScale > 0) {
- meta->setInt32(kKeyTimeScale, mMovieTimeScale);
+ (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
}
if (mTrackEveryTimeDurationUs > 0) {
- meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
+ (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs);
}
- writer->setListener(mListener);
- mWriter = writer;
- return mWriter->start(meta.get());
+}
+
+status_t StagefrightRecorder::startMPEG4Recording() {
+ if (mCaptureAuxVideo) {
+ if (!mCaptureTimeLapse) {
+ LOGE("Auxiliary video can be captured only in time lapse mode");
+ return UNKNOWN_ERROR;
+ }
+ LOGV("Creating MediaSourceSplitter");
+ sp<CameraSource> cameraSource;
+ status_t err = setupCameraSource(&cameraSource);
+ if (err != OK) {
+ return err;
+ }
+ mCameraSourceSplitter = new MediaSourceSplitter(cameraSource);
+ } else {
+ mCameraSourceSplitter = NULL;
+ }
+
+ int32_t totalBitRate;
+ status_t err = setupMPEG4Recording(mCaptureAuxVideo,
+ mOutputFd, mVideoWidth, mVideoHeight,
+ mVideoBitRate, &totalBitRate, &mWriter);
+ if (err != OK) {
+ return err;
+ }
+
+ int64_t startTimeUs = systemTime() / 1000;
+ sp<MetaData> meta = new MetaData;
+ setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
+
+ err = mWriter->start(meta.get());
+ if (err != OK) {
+ return err;
+ }
+
+ if (mCaptureAuxVideo) {
+ CHECK(mOutputFdAux >= 0);
+ if (mWriterAux != NULL) {
+ LOGE("Auxiliary File writer is not avaialble");
+ return UNKNOWN_ERROR;
+ }
+ if ((mAuxVideoWidth > mVideoWidth) || (mAuxVideoHeight > mVideoHeight) ||
+ ((mAuxVideoWidth == mVideoWidth) && mAuxVideoHeight == mVideoHeight)) {
+ LOGE("Auxiliary video size (%d x %d) same or larger than the main video size (%d x %d)",
+ mAuxVideoWidth, mAuxVideoHeight, mVideoWidth, mVideoHeight);
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t totalBitrateAux;
+ err = setupMPEG4Recording(mCaptureAuxVideo,
+ mOutputFdAux, mAuxVideoWidth, mAuxVideoHeight,
+ mAuxVideoBitRate, &totalBitrateAux, &mWriterAux);
+ if (err != OK) {
+ return err;
+ }
+
+ sp<MetaData> metaAux = new MetaData;
+ setupMPEG4MetaData(startTimeUs, totalBitrateAux, &metaAux);
+
+ return mWriterAux->start(metaAux.get());
+ }
+
+ return OK;
}
status_t StagefrightRecorder::pause() {
@@ -1116,28 +1318,36 @@ status_t StagefrightRecorder::pause() {
return UNKNOWN_ERROR;
}
mWriter->pause();
+
+ if (mCaptureAuxVideo) {
+ if (mWriterAux == NULL) {
+ return UNKNOWN_ERROR;
+ }
+ mWriterAux->pause();
+ }
+
return OK;
}
status_t StagefrightRecorder::stop() {
LOGV("stop");
status_t err = OK;
- if (mWriter != NULL) {
- err = mWriter->stop();
- mWriter.clear();
+
+ if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) {
+ mCameraSourceTimeLapse->startQuickReadReturns();
+ mCameraSourceTimeLapse = NULL;
}
- if (mCamera != 0) {
- LOGV("Disconnect camera");
- int64_t token = IPCThreadState::self()->clearCallingIdentity();
- if ((mFlags & FLAGS_HOT_CAMERA) == 0) {
- LOGV("Camera was cold when we started, stopping preview");
- mCamera->stopPreview();
+ if (mCaptureAuxVideo) {
+ if (mWriterAux != NULL) {
+ mWriterAux->stop();
+ mWriterAux.clear();
}
- mCamera->unlock();
- mCamera.clear();
- IPCThreadState::self()->restoreCallingIdentity(token);
- mFlags = 0;
+ }
+
+ if (mWriter != NULL) {
+ err = mWriter->stop();
+ mWriter.clear();
}
if (mOutputFd >= 0) {
@@ -1145,6 +1355,13 @@ status_t StagefrightRecorder::stop() {
mOutputFd = -1;
}
+ if (mCaptureAuxVideo) {
+ if (mOutputFdAux >= 0) {
+ ::close(mOutputFdAux);
+ mOutputFdAux = -1;
+ }
+ }
+
return err;
}
@@ -1169,8 +1386,11 @@ status_t StagefrightRecorder::reset() {
mVideoEncoder = VIDEO_ENCODER_H263;
mVideoWidth = 176;
mVideoHeight = 144;
- mFrameRate = 20;
+ mAuxVideoWidth = 176;
+ mAuxVideoHeight = 144;
+ mFrameRate = -1;
mVideoBitRate = 192000;
+ mAuxVideoBitRate = 192000;
mSampleRate = 8000;
mAudioChannels = 1;
mAudioBitRate = 12200;
@@ -1187,10 +1407,15 @@ status_t StagefrightRecorder::reset() {
mMaxFileDurationUs = 0;
mMaxFileSizeBytes = 0;
mTrackEveryTimeDurationUs = 0;
+ mCaptureTimeLapse = false;
+ mTimeBetweenTimeLapseFrameCaptureUs = -1;
+ mCaptureAuxVideo = false;
+ mCameraSourceSplitter = NULL;
+ mCameraSourceTimeLapse = NULL;
mEncoderProfiles = MediaProfiles::getInstance();
mOutputFd = -1;
- mFlags = 0;
+ mOutputFdAux = -1;
return OK;
}
@@ -1227,6 +1452,8 @@ status_t StagefrightRecorder::dump(
snprintf(buffer, SIZE, " Recorder: %p\n", this);
snprintf(buffer, SIZE, " Output file (fd %d):\n", mOutputFd);
result.append(buffer);
+ snprintf(buffer, SIZE, " Output file Auxiliary (fd %d):\n", mOutputFdAux);
+ result.append(buffer);
snprintf(buffer, SIZE, " File format: %d\n", mOutputFormat);
result.append(buffer);
snprintf(buffer, SIZE, " Max file size (bytes): %lld\n", mMaxFileSizeBytes);
@@ -1259,8 +1486,6 @@ status_t StagefrightRecorder::dump(
result.append(buffer);
snprintf(buffer, SIZE, " Camera Id: %d\n", mCameraId);
result.append(buffer);
- snprintf(buffer, SIZE, " Camera flags: %d\n", mFlags);
- result.append(buffer);
snprintf(buffer, SIZE, " Encoder: %d\n", mVideoEncoder);
result.append(buffer);
snprintf(buffer, SIZE, " Encoder profile: %d\n", mVideoEncoderProfile);
@@ -1271,10 +1496,14 @@ status_t StagefrightRecorder::dump(
result.append(buffer);
snprintf(buffer, SIZE, " Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight);
result.append(buffer);
+ snprintf(buffer, SIZE, " Aux Frame size (pixels): %dx%d\n", mAuxVideoWidth, mAuxVideoHeight);
+ result.append(buffer);
snprintf(buffer, SIZE, " Frame rate (fps): %d\n", mFrameRate);
result.append(buffer);
snprintf(buffer, SIZE, " Bit rate (bps): %d\n", mVideoBitRate);
result.append(buffer);
+ snprintf(buffer, SIZE, " Aux Bit rate (bps): %d\n", mAuxVideoBitRate);
+ result.append(buffer);
::write(fd, result.string(), result.size());
return OK;
}
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index ad0dfa0..7d2549f 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -19,13 +19,18 @@
#define STAGEFRIGHT_RECORDER_H_
#include <media/MediaRecorderBase.h>
+#include <camera/CameraParameters.h>
#include <utils/String8.h>
namespace android {
class Camera;
+class CameraSource;
+class CameraSourceTimeLapse;
+class MediaSourceSplitter;
struct MediaSource;
struct MediaWriter;
+class MetaData;
struct AudioSource;
class MediaProfiles;
@@ -42,9 +47,10 @@ struct StagefrightRecorder : public MediaRecorderBase {
virtual status_t setVideoSize(int width, int height);
virtual status_t setVideoFrameRate(int frames_per_second);
virtual status_t setCamera(const sp<ICamera>& camera);
- virtual status_t setPreviewSurface(const sp<ISurface>& surface);
+ virtual status_t setPreviewSurface(const sp<Surface>& surface);
virtual status_t setOutputFile(const char *path);
virtual status_t setOutputFile(int fd, int64_t offset, int64_t length);
+ virtual status_t setOutputFileAuxiliary(int fd);
virtual status_t setParameters(const String8& params);
virtual status_t setListener(const sp<IMediaRecorderClient>& listener);
virtual status_t prepare();
@@ -57,15 +63,10 @@ struct StagefrightRecorder : public MediaRecorderBase {
virtual status_t dump(int fd, const Vector<String16>& args) const;
private:
- enum CameraFlags {
- FLAGS_SET_CAMERA = 1L << 0,
- FLAGS_HOT_CAMERA = 1L << 1,
- };
-
- sp<Camera> mCamera;
- sp<ISurface> mPreviewSurface;
+ sp<ICamera> mCamera;
+ sp<Surface> mPreviewSurface;
sp<IMediaRecorderClient> mListener;
- sp<MediaWriter> mWriter;
+ sp<MediaWriter> mWriter, mWriterAux;
sp<AudioSource> mAudioSourceNode;
audio_source mAudioSource;
@@ -75,8 +76,9 @@ private:
video_encoder mVideoEncoder;
bool mUse64BitFileOffset;
int32_t mVideoWidth, mVideoHeight;
+ int32_t mAuxVideoWidth, mAuxVideoHeight;
int32_t mFrameRate;
- int32_t mVideoBitRate;
+ int32_t mVideoBitRate, mAuxVideoBitRate;
int32_t mAudioBitRate;
int32_t mAudioChannels;
int32_t mSampleRate;
@@ -92,21 +94,39 @@ private:
int64_t mMaxFileDurationUs;
int64_t mTrackEveryTimeDurationUs;
+ bool mCaptureTimeLapse;
+ int64_t mTimeBetweenTimeLapseFrameCaptureUs;
+ bool mCaptureAuxVideo;
+ sp<MediaSourceSplitter> mCameraSourceSplitter;
+ sp<CameraSourceTimeLapse> mCameraSourceTimeLapse;
+
String8 mParams;
- int mOutputFd;
- int32_t mFlags;
+ int mOutputFd, mOutputFdAux;
MediaProfiles *mEncoderProfiles;
+ status_t setupMPEG4Recording(
+ bool useSplitCameraSource,
+ int outputFd,
+ int32_t videoWidth, int32_t videoHeight,
+ int32_t videoBitRate,
+ int32_t *totalBitRate,
+ sp<MediaWriter> *mediaWriter);
+ void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
+ sp<MetaData> *meta);
status_t startMPEG4Recording();
status_t startAMRRecording();
status_t startAACRecording();
status_t startRTPRecording();
status_t startMPEG2TSRecording();
sp<MediaSource> createAudioSource();
- status_t setupCameraSource();
+ status_t checkVideoEncoderCapabilities();
+ status_t setupCameraSource(sp<CameraSource> *cameraSource);
status_t setupAudioEncoder(const sp<MediaWriter>& writer);
- status_t setupVideoEncoder(sp<MediaSource> *source);
+ status_t setupVideoEncoder(
+ sp<MediaSource> cameraSource,
+ int32_t videoBitRate,
+ sp<MediaSource> *source);
// Encoding parameter handling utilities
status_t setParameter(const String8 &key, const String8 &value);
@@ -114,6 +134,11 @@ private:
status_t setParamAudioNumberOfChannels(int32_t channles);
status_t setParamAudioSamplingRate(int32_t sampleRate);
status_t setParamAudioTimeScale(int32_t timeScale);
+ status_t setParamTimeLapseEnable(int32_t timeLapseEnable);
+ status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs);
+ status_t setParamAuxVideoHeight(int32_t height);
+ status_t setParamAuxVideoWidth(int32_t width);
+ status_t setParamAuxVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoEncodingBitRate(int32_t bitRate);
status_t setParamVideoIFramesInterval(int32_t seconds);
status_t setParamVideoEncoderProfile(int32_t profile);
@@ -138,4 +163,3 @@ private:
} // namespace android
#endif // STAGEFRIGHT_RECORDER_H_
-
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index 6e6c3cd..5eaf592 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -75,7 +75,10 @@ class TestPlayerStub : public MediaPlayerInterface {
// All the methods below wrap the mPlayer instance.
- virtual status_t setVideoSurface(const android::sp<android::ISurface>& s) {
+ virtual status_t setVideoISurface(const android::sp<android::ISurface>& s) {
+ return mPlayer->setVideoISurface(s);
+ }
+ virtual status_t setVideoSurface(const android::sp<android::Surface>& s) {
return mPlayer->setVideoSurface(s);
}
virtual status_t prepare() {return mPlayer->prepare();}
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index d674547..d1870ee 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -10,6 +10,8 @@ LOCAL_SRC_FILES:= \
AudioSource.cpp \
AwesomePlayer.cpp \
CameraSource.cpp \
+ CameraSourceTimeLapse.cpp \
+ VideoSourceDownSampler.cpp \
DataSource.cpp \
DRMExtractor.cpp \
ESDS.cpp \
@@ -25,6 +27,7 @@ LOCAL_SRC_FILES:= \
MediaDefs.cpp \
MediaExtractor.cpp \
MediaSource.cpp \
+ MediaSourceSplitter.cpp \
MetaData.cpp \
NuCachedSource2.cpp \
NuHTTPDataSource.cpp \
@@ -60,6 +63,7 @@ LOCAL_SHARED_LIBRARIES := \
libsonivox \
libvorbisidec \
libsurfaceflinger_client \
+ libstagefright_yuv \
libcamera_client \
libdrmframework
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 6eeab05..cf04e92 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -45,7 +45,7 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -101,13 +101,14 @@ struct AwesomeLocalRenderer : public AwesomeRenderer {
bool previewOnly,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<ISurface> &isurface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight)
: mTarget(NULL),
mLibHandle(NULL) {
init(previewOnly, componentName,
- colorFormat, surface, displayWidth,
+ colorFormat, isurface, surface, displayWidth,
displayHeight, decodedWidth, decodedHeight);
}
@@ -139,7 +140,8 @@ private:
bool previewOnly,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<ISurface> &isurface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight);
@@ -151,7 +153,8 @@ void AwesomeLocalRenderer::init(
bool previewOnly,
const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<ISurface> &isurface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight) {
if (!previewOnly) {
@@ -177,7 +180,7 @@ void AwesomeLocalRenderer::init(
if (func) {
mTarget =
- (*func)(surface, componentName, colorFormat,
+ (*func)(isurface, componentName, colorFormat,
displayWidth, displayHeight,
decodedWidth, decodedHeight);
}
@@ -191,6 +194,35 @@ void AwesomeLocalRenderer::init(
}
}
+struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
+ AwesomeNativeWindowRenderer(const sp<ANativeWindow> &nativeWindow)
+ : mNativeWindow(nativeWindow) {
+ }
+
+ virtual void render(MediaBuffer *buffer) {
+ status_t err = mNativeWindow->queueBuffer(
+ mNativeWindow.get(), buffer->graphicBuffer().get());
+ if (err != 0) {
+ LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
+ -err);
+ return;
+ }
+
+ sp<MetaData> metaData = buffer->meta_data();
+ metaData->setInt32(kKeyRendered, 1);
+ }
+
+protected:
+ virtual ~AwesomeNativeWindowRenderer() {}
+
+private:
+ sp<ANativeWindow> mNativeWindow;
+
+ AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
+ AwesomeNativeWindowRenderer &operator=(
+ const AwesomeNativeWindowRenderer &);
+};
+
AwesomePlayer::AwesomePlayer()
: mQueueStarted(false),
mTimeSource(NULL),
@@ -406,6 +438,12 @@ void AwesomePlayer::reset_l() {
LOGI("interrupting the connection process");
mConnectingDataSource->disconnect();
}
+
+ if (mFlags & PREPARING_CONNECTED) {
+ // We are basically done preparing, we're just buffering
+ // enough data to start playback, we can safely interrupt that.
+ finishAsyncPrepare_l();
+ }
}
while (mFlags & PREPARING) {
@@ -806,8 +844,18 @@ status_t AwesomePlayer::play_l() {
return OK;
}
+void AwesomePlayer::notifyVideoSize_l() {
+ sp<MetaData> meta = mVideoSource->getFormat();
+
+ int32_t decodedWidth, decodedHeight;
+ CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
+ CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
+
+ notifyListener_l(MEDIA_SET_VIDEO_SIZE, decodedWidth, decodedHeight);
+}
+
void AwesomePlayer::initRenderer_l() {
- if (mISurface != NULL) {
+ if (mSurface != NULL || mISurface != NULL) {
sp<MetaData> meta = mVideoSource->getFormat();
int32_t format;
@@ -824,7 +872,27 @@ void AwesomePlayer::initRenderer_l() {
// before creating a new one.
IPCThreadState::self()->flushCommands();
- if (!strncmp("OMX.", component, 4)) {
+ if (mSurface != NULL) {
+ if (strncmp(component, "OMX.", 4) == 0) {
+ // Hardware decoders avoid the CPU color conversion by decoding
+ // directly to ANativeBuffers, so we must use a renderer that
+ // just pushes those buffers to the ANativeWindow.
+ mVideoRenderer = new AwesomeNativeWindowRenderer(mSurface);
+ } else {
+ // Other decoders are instantiated locally and as a consequence
+ // allocate their buffers in local address space. This renderer
+ // then performs a color conversion and copy to get the data
+ // into the ANativeBuffer.
+ mVideoRenderer = new AwesomeLocalRenderer(
+ false, // previewOnly
+ component,
+ (OMX_COLOR_FORMATTYPE)format,
+ mISurface,
+ mSurface,
+ mVideoWidth, mVideoHeight,
+ decodedWidth, decodedHeight);
+ }
+ } else {
// Our OMX codecs allocate buffers on the media_server side
// therefore they require a remote IOMXRenderer that knows how
// to display them.
@@ -834,16 +902,6 @@ void AwesomePlayer::initRenderer_l() {
(OMX_COLOR_FORMATTYPE)format,
decodedWidth, decodedHeight,
mVideoWidth, mVideoHeight));
- } else {
- // Other decoders are instantiated locally and as a consequence
- // allocate their buffers in local address space.
- mVideoRenderer = new AwesomeLocalRenderer(
- false, // previewOnly
- component,
- (OMX_COLOR_FORMATTYPE)format,
- mISurface,
- mVideoWidth, mVideoHeight,
- decodedWidth, decodedHeight);
}
}
}
@@ -894,6 +952,12 @@ void AwesomePlayer::setISurface(const sp<ISurface> &isurface) {
mISurface = isurface;
}
+void AwesomePlayer::setSurface(const sp<Surface> &surface) {
+ Mutex::Autolock autoLock(mLock);
+
+ mSurface = surface;
+}
+
void AwesomePlayer::setAudioSink(
const sp<MediaPlayerBase::AudioSink> &audioSink) {
Mutex::Autolock autoLock(mLock);
@@ -1079,7 +1143,7 @@ status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
mClient.interface(), mVideoTrack->getFormat(),
false, // createEncoder
mVideoTrack,
- NULL, flags);
+ NULL, flags, mSurface);
if (mVideoSource != NULL) {
int64_t durationUs;
@@ -1110,7 +1174,7 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
}
if (mAudioPlayer != NULL) {
- LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
+ LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
// If we don't have a video time, seek audio to the originally
// requested seek time instead.
@@ -1184,6 +1248,8 @@ void AwesomePlayer::onVideoEvent() {
if (err == INFO_FORMAT_CHANGED) {
LOGV("VideoSource signalled format change.");
+ notifyVideoSize_l();
+
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
initRenderer_l();
@@ -1628,7 +1694,7 @@ void AwesomePlayer::abortPrepare(status_t err) {
}
mPrepareResult = err;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
}
@@ -1676,6 +1742,8 @@ void AwesomePlayer::onPrepareAsyncEvent() {
}
}
+ mFlags |= PREPARING_CONNECTED;
+
if (mCachedSource != NULL || mRTSPController != NULL) {
postBufferingEvent_l();
} else {
@@ -1685,17 +1753,17 @@ void AwesomePlayer::onPrepareAsyncEvent() {
void AwesomePlayer::finishAsyncPrepare_l() {
if (mIsAsyncPrepare) {
- if (mVideoWidth < 0 || mVideoHeight < 0) {
+ if (mVideoSource == NULL) {
notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
} else {
- notifyListener_l(MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight);
+ notifyVideoSize_l();
}
notifyListener_l(MEDIA_PREPARED);
}
mPrepareResult = OK;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mFlags |= PREPARED;
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
@@ -1810,13 +1878,14 @@ status_t AwesomePlayer::resume() {
mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
- if (state->mLastVideoFrame && mISurface != NULL) {
+ if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) {
mVideoRenderer =
new AwesomeLocalRenderer(
true, // previewOnly
"",
(OMX_COLOR_FORMATTYPE)state->mColorFormat,
mISurface,
+ mSurface,
state->mVideoWidth,
state->mVideoHeight,
state->mDecodedWidth,
@@ -1851,4 +1920,3 @@ void AwesomePlayer::postAudioSeekComplete() {
}
} // namespace android
-
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 89cb135..159d937 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,6 +27,7 @@
#include <media/stagefright/MetaData.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <surfaceflinger/Surface.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -65,6 +66,11 @@ void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
LOGV("postData(%d, ptr:%p, size:%d)",
msgType, dataPtr->pointer(), dataPtr->size());
+
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != NULL) {
+ source->dataCallback(msgType, dataPtr);
+ }
}
void CameraSourceListener::postDataTimestamp(
@@ -77,6 +83,10 @@ void CameraSourceListener::postDataTimestamp(
}
static int32_t getColorFormat(const char* colorFormat) {
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+ return OMX_COLOR_FormatYUV420Planar;
+ }
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
return OMX_COLOR_FormatYUV422SemiPlanar;
}
@@ -99,72 +109,430 @@ static int32_t getColorFormat(const char* colorFormat) {
CHECK_EQ(0, "Unknown color format");
}
-// static
CameraSource *CameraSource::Create() {
- sp<Camera> camera = Camera::connect(0);
-
- if (camera.get() == NULL) {
- return NULL;
- }
+ Size size;
+ size.width = -1;
+ size.height = -1;
- return new CameraSource(camera);
+ sp<ICamera> camera;
+ return new CameraSource(camera, 0, size, -1, NULL, false);
}
// static
-CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
- if (camera.get() == NULL) {
- return NULL;
+CameraSource *CameraSource::CreateFromCamera(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers) {
+
+ CameraSource *source = new CameraSource(camera, cameraId,
+ videoSize, frameRate, surface,
+ storeMetaDataInVideoBuffers);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
}
-
- return new CameraSource(camera);
+ return source;
}
-CameraSource::CameraSource(const sp<Camera> &camera)
- : mCamera(camera),
- mFirstFrameTimeUs(0),
- mLastFrameTimestampUs(0),
+CameraSource::CameraSource(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers)
+ : mCameraFlags(0),
+ mVideoFrameRate(-1),
+ mCamera(0),
+ mSurface(surface),
mNumFramesReceived(0),
+ mLastFrameTimestampUs(0),
+ mStarted(false),
+ mFirstFrameTimeUs(0),
mNumFramesEncoded(0),
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false),
- mStarted(false) {
+ mCollectStats(false) {
+
+ mVideoSize.width = -1;
+ mVideoSize.height = -1;
+
+ mInitCheck = init(camera, cameraId,
+ videoSize, frameRate,
+ storeMetaDataInVideoBuffers);
+}
+
+status_t CameraSource::initCheck() const {
+ return mInitCheck;
+}
+
+status_t CameraSource::isCameraAvailable(
+ const sp<ICamera>& camera, int32_t cameraId) {
+
+ if (camera == 0) {
+ mCamera = Camera::connect(cameraId);
+ mCameraFlags &= ~FLAGS_HOT_CAMERA;
+ } else {
+ mCamera = Camera::create(camera);
+ mCameraFlags |= FLAGS_HOT_CAMERA;
+ }
+
+ // Is camera available?
+ if (mCamera == 0) {
+ LOGE("Camera connection could not be established.");
+ return -EBUSY;
+ }
+ if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
+ mCamera->lock();
+ }
+ return OK;
+}
+
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+ int32_t width, int32_t height,
+ const Vector<Size>& supportedSizes) {
+
+ LOGV("isVideoSizeSupported");
+ for (size_t i = 0; i < supportedSizes.size(); ++i) {
+ if (width == supportedSizes[i].width &&
+ height == supportedSizes[i].height) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ * CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ * supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+ const CameraParameters& params,
+ bool *isSetVideoSizeSupported,
+ Vector<Size>& sizes) {
+
+ *isSetVideoSizeSupported = true;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ LOGD("Camera does not support setVideoSize()");
+ params.getSupportedPreviewSizes(sizes);
+ *isSetVideoSizeSupported = false;
+ }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t CameraSource::isCameraColorFormatSupported(
+ const CameraParameters& params) {
+ mColorFormat = getColorFormat(params.get(
+ CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+ if (mColorFormat == -1) {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t CameraSource::configureCamera(
+ CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate) {
+
+ Vector<Size> sizes;
+ bool isSetVideoSizeSupportedByCamera = true;
+ getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+ bool isCameraParamChanged = false;
+ if (width != -1 && height != -1) {
+ if (!isVideoSizeSupported(width, height, sizes)) {
+ LOGE("Video dimension (%dx%d) is unsupported", width, height);
+ return BAD_VALUE;
+ }
+ if (isSetVideoSizeSupportedByCamera) {
+ params->setVideoSize(width, height);
+ } else {
+ params->setPreviewSize(width, height);
+ }
+ isCameraParamChanged = true;
+ } else if ((width == -1 && height != -1) ||
+ (width != -1 && height == -1)) {
+ // If one and only one of the width and height is -1
+ // we reject such a request.
+ LOGE("Requested video size (%dx%d) is not supported", width, height);
+ return BAD_VALUE;
+ } else { // width == -1 && height == -1
+ // Do not configure the camera.
+ // Use the current width and height value setting from the camera.
+ }
+
+ if (frameRate != -1) {
+ CHECK(frameRate > 0 && frameRate <= 120);
+ const char* supportedFrameRates =
+ params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+ CHECK(supportedFrameRates != NULL);
+ LOGV("Supported frame rates: %s", supportedFrameRates);
+ char buf[4];
+ snprintf(buf, 4, "%d", frameRate);
+ if (strstr(supportedFrameRates, buf) == NULL) {
+ LOGE("Requested frame rate (%d) is not supported: %s",
+ frameRate, supportedFrameRates);
+ return BAD_VALUE;
+ }
+
+ // The frame rate is supported, set the camera to the requested value.
+ params->setPreviewFrameRate(frameRate);
+ isCameraParamChanged = true;
+ } else { // frameRate == -1
+ // Do not configure the camera.
+ // Use the current frame rate value setting from the camera
+ }
+
+ if (isCameraParamChanged) {
+ // Either frame rate or frame size needs to be changed.
+ String8 s = params->flatten();
+ if (OK != mCamera->setParameters(s)) {
+ LOGE("Could not change settings."
+ " Someone else is using camera %p?", mCamera.get());
+ return -EBUSY;
+ }
+ }
+ return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t CameraSource::checkVideoSize(
+ const CameraParameters& params,
+ int32_t width, int32_t height) {
+
+ // The actual video size is the same as the preview size
+ // if the camera hal does not support separate video and
+ // preview output. In this case, we retrieve the video
+ // size from preview.
+ int32_t frameWidthActual = -1;
+ int32_t frameHeightActual = -1;
+ Vector<Size> sizes;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ // video size is the same as preview size
+ params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+ } else {
+ // video size may not be the same as preview
+ params.getVideoSize(&frameWidthActual, &frameHeightActual);
+ }
+ if (frameWidthActual < 0 || frameHeightActual < 0) {
+ LOGE("Failed to retrieve video frame size (%dx%d)",
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+ // Check the actual video frame size against the target/requested
+ // video frame size.
+ if (width != -1 && height != -1) {
+ if (frameWidthActual != width || frameHeightActual != height) {
+ LOGE("Failed to set video frame size to %dx%d. "
+ "The actual video size is %dx%d ", width, height,
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Good now.
+ mVideoSize.width = frameWidthActual;
+ mVideoSize.height = frameHeightActual;
+ return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t CameraSource::checkFrameRate(
+ const CameraParameters& params,
+ int32_t frameRate) {
+
+ int32_t frameRateActual = params.getPreviewFrameRate();
+ if (frameRateActual < 0) {
+ LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame rate against the target/requested
+ // video frame rate.
+ if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+ LOGE("Failed to set preview frame rate to %d fps. The actual "
+ "frame rate is %d", frameRate, frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Good now.
+ mVideoFrameRate = frameRateActual;
+ return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ * as the video source
+ * @param videoSize the target video frame size. If both
+ * width and height in videoSize is -1, use the current
+ * width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ * if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ * data or real YUV data in video buffers. Request to
+ * store meta data in video buffers may not be honored
+ * if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t CameraSource::init(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ bool storeMetaDataInVideoBuffers) {
+
+ status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- String8 s = mCamera->getParameters();
- IPCThreadState::self()->restoreCallingIdentity(token);
- printf("params: \"%s\"\n", s.string());
+ if ((err = isCameraAvailable(camera, cameraId)) != OK) {
+ return err;
+ }
+ CameraParameters params(mCamera->getParameters());
+ if ((err = isCameraColorFormatSupported(params)) != OK) {
+ return err;
+ }
+
+ // Set the camera to use the requested video frame size
+ // and/or frame rate.
+ if ((err = configureCamera(&params,
+ videoSize.width, videoSize.height,
+ frameRate))) {
+ return err;
+ }
+
+ // Check on video frame size and frame rate.
+ CameraParameters newCameraParams(mCamera->getParameters());
+ if ((err = checkVideoSize(newCameraParams,
+ videoSize.width, videoSize.height)) != OK) {
+ return err;
+ }
+ if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+ return err;
+ }
- int32_t width, height, stride, sliceHeight;
- CameraParameters params(s);
- params.getPreviewSize(&width, &height);
+ // This CHECK is good, since we just passed the lock/unlock
+ // check earlier by calling mCamera->setParameters().
+ CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
- // Calculate glitch duraton threshold based on frame rate
- int32_t frameRate = params.getPreviewFrameRate();
- int64_t glitchDurationUs = (1000000LL / frameRate);
+ mIsMetaDataStoredInVideoBuffers = false;
+ if (storeMetaDataInVideoBuffers &&
+ OK == mCamera->storeMetaDataInBuffers(true)) {
+ mIsMetaDataStoredInVideoBuffers = true;
+ }
+
+ /*
+ * mCamera->startRecording() signals camera hal to make
+ * available the video buffers (for instance, allocation
+ * of the video buffers may be triggered when camera hal's
+ * startRecording() method is called). Making available these
+ * video buffers earlier (before calling start()) is critical,
+ * if one wants to configure omx video encoders to use these
+ * buffers for passing video frame data during video recording
+ * without the need to memcpy the video frame data stored
+ * in these buffers. Eliminating memcpy for video frame data
+ * is crucial in performance for HD quality video recording
+ * applications.
+ *
+ * Based on OMX IL spec, configuring the omx video encoders
+ * must occur in loaded state. When start() is called, omx
+ * video encoders are already in idle state, which is too
+ * late. Thus, we must call mCamera->startRecording() earlier.
+ */
+ startCameraRecording();
+
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
if (glitchDurationUs > mGlitchDurationThresholdUs) {
mGlitchDurationThresholdUs = glitchDurationUs;
}
- const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
- CHECK(colorFormatStr != NULL);
- int32_t colorFormat = getColorFormat(colorFormatStr);
-
// XXX: query camera for the stride and slice height
// when the capability becomes available.
- stride = width;
- sliceHeight = height;
-
mMeta = new MetaData;
- mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
- mMeta->setInt32(kKeyColorFormat, colorFormat);
- mMeta->setInt32(kKeyWidth, width);
- mMeta->setInt32(kKeyHeight, height);
- mMeta->setInt32(kKeyStride, stride);
- mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ mMeta->setInt32(kKeyColorFormat, mColorFormat);
+ mMeta->setInt32(kKeyWidth, mVideoSize.width);
+ mMeta->setInt32(kKeyHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyStride, mVideoSize.width);
+ mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+ mMeta->setInt32(kKeySampleRate, mVideoFrameRate);
+ return OK;
}
CameraSource::~CameraSource() {
@@ -173,8 +541,17 @@ CameraSource::~CameraSource() {
}
}
+void CameraSource::startCameraRecording() {
+ CHECK_EQ(OK, mCamera->startRecording());
+ CHECK(mCamera->recordingEnabled());
+}
+
status_t CameraSource::start(MetaData *meta) {
CHECK(!mStarted);
+ if (mInitCheck != OK) {
+ LOGE("CameraSource is not initialized yet");
+ return mInitCheck;
+ }
char value[PROPERTY_VALUE_MAX];
if (property_get("media.stagefright.record-stats", value, NULL)
@@ -190,13 +567,17 @@ status_t CameraSource::start(MetaData *meta) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(new CameraSourceListener(this));
- CHECK_EQ(OK, mCamera->startRecording());
IPCThreadState::self()->restoreCallingIdentity(token);
mStarted = true;
return OK;
}
+void CameraSource::stopCameraRecording() {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+}
+
status_t CameraSource::stop() {
LOGV("stop");
Mutex::Autolock autoLock(mLock);
@@ -204,15 +585,23 @@ status_t CameraSource::stop() {
mFrameAvailableCondition.signal();
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->setListener(NULL);
- mCamera->stopRecording();
+ stopCameraRecording();
releaseQueuedFrames();
while (!mFramesBeingEncoded.empty()) {
LOGI("Waiting for outstanding frames being encoded: %d",
mFramesBeingEncoded.size());
mFrameCompleteCondition.wait(mLock);
}
- mCamera = NULL;
+
+ LOGV("Disconnect camera");
+ if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+ LOGV("Camera was cold when we started, stopping preview");
+ mCamera->stopPreview();
+ }
+ mCamera->unlock();
+ mCamera.clear();
+ mCamera = 0;
+ mCameraFlags = 0;
IPCThreadState::self()->restoreCallingIdentity(token);
if (mCollectStats) {
@@ -225,11 +614,15 @@ status_t CameraSource::stop() {
return OK;
}
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+ mCamera->releaseRecordingFrame(frame);
+}
+
void CameraSource::releaseQueuedFrames() {
List<sp<IMemory> >::iterator it;
while (!mFramesReceived.empty()) {
it = mFramesReceived.begin();
- mCamera->releaseRecordingFrame(*it);
+ releaseRecordingFrame(*it);
mFramesReceived.erase(it);
++mNumFramesDropped;
}
@@ -241,7 +634,7 @@ sp<MetaData> CameraSource::getFormat() {
void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
+ releaseRecordingFrame(frame);
IPCThreadState::self()->restoreCallingIdentity(token);
}
@@ -251,7 +644,6 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
-
releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
@@ -343,6 +735,13 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
++mNumGlitches;
}
+ // May need to skip frame or modify timestamp. Currently implemented
+ // by the subclass CameraSourceTimeLapse.
+ if(skipCurrentFrame(timestampUs)) {
+ releaseOneRecordingFrame(data);
+ return;
+ }
+
mLastFrameTimestampUs = timestampUs;
if (mNumFramesReceived == 0) {
mFirstFrameTimeUs = timestampUs;
@@ -367,4 +766,31 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
mFrameAvailableCondition.signal();
}
+size_t CameraSource::getNumberOfVideoBuffers() const {
+ LOGV("getNumberOfVideoBuffers");
+ size_t nBuffers = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ nBuffers = mCamera->getNumberOfVideoBuffers();
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return nBuffers;
+}
+
+sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
+ LOGV("getVideoBuffer: %d", index);
+ sp<IMemory> buffer = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ buffer = mCamera->getVideoBuffer(index);
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return buffer;
+}
+
+bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+ LOGV("isMetaDataStoredInVideoBuffers");
+ return mIsMetaDataStoredInVideoBuffers;
+}
+
} // namespace android
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..6fd1825
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,518 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include "OMX_Video.h"
+#include <limits.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs) {
+
+ CameraSourceTimeLapse *source = new
+ CameraSourceTimeLapse(camera, cameraId,
+ videoSize, videoFrameRate, surface,
+ timeBetweenTimeLapseFrameCaptureUs);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
+ }
+ return source;
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs)
+ : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false),
+ mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+ mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+ mLastTimeLapseFrameRealTimestampUs(0),
+ mSkipCurrentFrame(false) {
+
+ LOGV("starting time lapse mode");
+ mVideoWidth = videoSize.width;
+ mVideoHeight = videoSize.height;
+
+ if (trySettingPreviewSize(videoSize.width, videoSize.height)) {
+ mUseStillCameraForTimeLapse = false;
+ } else {
+ // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
+ // than the fastest rate at which the still camera can take pictures.
+ mUseStillCameraForTimeLapse = true;
+ CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
+ mNeedCropping = computeCropRectangleOffset();
+ mMeta->setInt32(kKeyWidth, videoSize.width);
+ mMeta->setInt32(kKeyHeight, videoSize.height);
+ }
+
+ // Initialize quick stop variables.
+ mQuickStop = false;
+ mForceRead = false;
+ mLastReadBufferCopy = NULL;
+ mStopWaitingForIdleCamera = false;
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+void CameraSourceTimeLapse::startQuickReadReturns() {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ LOGV("Enabling quick read returns");
+
+ // Enable quick stop mode.
+ mQuickStop = true;
+
+ if (mUseStillCameraForTimeLapse) {
+ // wake up the thread right away.
+ mTakePictureCondition.signal();
+ } else {
+ // Force dataCallbackTimestamp() coming from the video camera to not skip the
+ // next frame as we want read() to get a get a frame right away.
+ mForceRead = true;
+ }
+}
+
+bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPreviewSizes(supportedSizes);
+
+ bool previewSizeSupported = false;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth == width) && (pictureHeight == height)) {
+ previewSizeSupported = true;
+ }
+ }
+
+ if (previewSizeSupported) {
+ LOGV("Video size (%d, %d) is a supported preview size", width, height);
+ params.setPreviewSize(width, height);
+ CHECK(mCamera->setParameters(params.flatten()));
+ return true;
+ }
+
+ return false;
+}
+
+bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPictureSizes(supportedSizes);
+
+ int32_t minPictureSize = INT_MAX;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth >= width) && (pictureHeight >= height)) {
+ int32_t pictureSize = pictureWidth*pictureHeight;
+ if (pictureSize < minPictureSize) {
+ minPictureSize = pictureSize;
+ mPictureWidth = pictureWidth;
+ mPictureHeight = pictureHeight;
+ }
+ }
+ }
+ LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
+ return (minPictureSize != INT_MAX);
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+ if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+ return false;
+ }
+
+ CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+ int32_t widthDifference = mPictureWidth - mVideoWidth;
+ int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+ mCropRectStartX = widthDifference/2;
+ mCropRectStartY = heightDifference/2;
+
+ LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+ return true;
+}
+
+void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && (buffer == mLastReadBufferCopy)) {
+ buffer->setObserver(NULL);
+ buffer->release();
+ } else {
+ return CameraSource::signalBufferReturned(buffer);
+ }
+}
+
+void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
+ size_t sourceSize = sourceBuffer.size();
+ void* sourcePointer = sourceBuffer.data();
+
+ (*newBuffer) = new MediaBuffer(sourceSize);
+ memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
+
+ (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
+}
+
+void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+ int64_t frameTime;
+ CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
+ createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
+ mLastReadBufferCopy->add_ref();
+ mLastReadBufferCopy->setObserver(this);
+}
+
+status_t CameraSourceTimeLapse::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ if (mLastReadBufferCopy == NULL) {
+ mLastReadStatus = CameraSource::read(buffer, options);
+
+ // mQuickStop may have turned to true while read was blocked. Make a copy of
+ // the buffer in that case.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && *buffer) {
+ fillLastReadBufferCopy(**buffer);
+ }
+ return mLastReadStatus;
+ } else {
+ (*buffer) = mLastReadBufferCopy;
+ (*buffer)->add_ref();
+ return mLastReadStatus;
+ }
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadTimeLapseEntry();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+ while (mStarted) {
+ {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCameraIdle = false;
+ }
+
+ // Even if mQuickStop == true we need to take one more picture
+ // as a read() may be blocked, waiting for a frame to get available.
+ // After this takePicture, if mQuickStop == true, we can safely exit
+ // this thread as read() will make a copy of this last frame and keep
+ // returning it in the quick stop mode.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ CHECK_EQ(OK, mCamera->takePicture());
+ if (mQuickStop) {
+ LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
+ return;
+ }
+ mTakePictureCondition.waitRelative(mQuickStopLock,
+ mTimeBetweenTimeLapseFrameCaptureUs * 1000);
+ }
+ LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ LOGV("start time lapse recording using still camera");
+
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ params.setPictureSize(mPictureWidth, mPictureHeight);
+ mCamera->setParameters(params.flatten());
+ mCameraIdle = true;
+ mStopWaitingForIdleCamera = false;
+
+ // disable shutter sound and play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+
+ // create a thread which takes pictures in a loop
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+ pthread_attr_destroy(&attr);
+ } else {
+ LOGV("start time lapse recording using video camera");
+ CHECK_EQ(OK, mCamera->startRecording());
+ }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ void *dummy;
+ pthread_join(mThreadTimeLapse, &dummy);
+
+ // Last takePicture may still be underway. Wait for the camera to get
+ // idle.
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mStopWaitingForIdleCamera = true;
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCamera->setListener(NULL);
+
+ // play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+ } else {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+ }
+ if (mLastReadBufferCopy) {
+ mLastReadBufferCopy->release();
+ mLastReadBufferCopy = NULL;
+ }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+ if (!mUseStillCameraForTimeLapse) {
+ mCamera->releaseRecordingFrame(frame);
+ }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+ size_t source_size = source_data->size();
+ void* source_pointer = source_data->pointer();
+
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+ memcpy(newMemory->pointer(), source_pointer, source_size);
+ return newMemory;
+}
+
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+ return newMemory;
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadStartPreview();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadStartPreview() {
+ CHECK_EQ(OK, mCamera->startPreview());
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mCameraIdle = true;
+ mCameraIdleCondition.signal();
+}
+
+void CameraSourceTimeLapse::restartPreview() {
+ // Start this in a different thread, so that the dataCallback can return
+ LOGV("restartPreview");
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+
+ pthread_t threadPreview;
+ pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
+ pthread_attr_destroy(&attr);
+}
+
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else {
+ CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate memory for cropped image and setup a canvas using it.
+ sp<IMemory> croppedImageMemory = allocateIMemory(
+ YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+ YUVImage yuvImageCropped(yuvFormat,
+ mVideoWidth, mVideoHeight,
+ (uint8_t *)croppedImageMemory->pointer());
+ YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mPictureWidth, mPictureHeight,
+ (uint8_t *)source_data->pointer());
+ yuvCanvasCrop.CopyImageRect(
+ Rect(mCropRectStartX, mCropRectStartY,
+ mCropRectStartX + mVideoWidth,
+ mCropRectStartY + mVideoHeight),
+ 0, 0,
+ yuvImageSource);
+
+ return croppedImageMemory;
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+ if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+ // takePicture will complete after this callback, so restart preview.
+ restartPreview();
+ return;
+ }
+ if (msgType != CAMERA_MSG_RAW_IMAGE) {
+ return;
+ }
+
+ LOGV("dataCallback for timelapse still frame");
+ CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+ int64_t timestampUs;
+ if (mNumFramesReceived == 0) {
+ timestampUs = mStartTimeUs;
+ } else {
+ timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ }
+
+ if (mNeedCropping) {
+ sp<IMemory> croppedImageData = cropYUVImage(data);
+ dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+ } else {
+ sp<IMemory> dataCopy = createIMemoryCopy(data);
+ dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+ }
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+ if (mSkipCurrentFrame) {
+ mSkipCurrentFrame = false;
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+ if (!mUseStillCameraForTimeLapse) {
+ if (mLastTimeLapseFrameRealTimestampUs == 0) {
+ // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+ // to current time (timestampUs) and save frame data.
+ LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ return false;
+ }
+
+ {
+ Mutex::Autolock autoLock(mQuickStopLock);
+
+ // mForceRead may be set to true by startQuickReadReturns(). In that
+ // case don't skip this frame.
+ if (mForceRead) {
+ LOGV("dataCallbackTimestamp timelapse: forced read");
+ mForceRead = false;
+ *timestampUs = mLastFrameTimestampUs;
+ return false;
+ }
+ }
+
+ if (*timestampUs <
+ (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+ // Skip all frames from last encoded frame until
+ // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+ // Tell the camera to release its recording frame and return.
+ LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+ return true;
+ } else {
+ // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+ // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+ // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+ // of the last encoded frame's time stamp.
+ LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ return false;
+ }
+ }
+ return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data) {
+ if (!mUseStillCameraForTimeLapse) {
+ mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
+ } else {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ // If we are using the still camera and stop() has been called, it may
+ // be waiting for the camera to get idle. In that case return
+ // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
+ // to a deadlock since it tries to access CameraSource::mLock which in
+ // this case is held by CameraSource::stop() currently waiting for the
+ // camera to get idle. And camera will not get idle until this call
+ // returns.
+ if (mStopWaitingForIdleCamera) {
+ return;
+ }
+ }
+ CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+} // namespace android
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index be23c60..ea5577d 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -105,7 +105,7 @@ void DataSource::RegisterDefaultSniffers() {
RegisterSniffer(SniffAMR);
RegisterSniffer(SniffMPEG2TS);
RegisterSniffer(SniffMP3);
- RegisterSniffer(SniffDRM);
+ //RegisterSniffer(SniffDRM);
}
// static
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index b46d8d0..e4f9a47 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -21,7 +21,7 @@ namespace android {
FileSource::FileSource(const char *filename)
: mFile(fopen(filename, "rb")),
- mFd(fileno(mFile)),
+ mFd(mFile == NULL ? -1 : fileno(mFile)),
mOffset(0),
mLength(-1),
mDecryptHandle(NULL),
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index a15c274..9e79aa9 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -2217,6 +2217,9 @@ void MPEG4Writer::Track::writeTrackHeader(
CHECK(mCodecSpecificData);
CHECK(mCodecSpecificDataSize > 0);
+ // Make sure all sizes encode to a single byte.
+ CHECK(mCodecSpecificDataSize + 23 < 128);
+
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt8(0x03); // ES_DescrTag
mOwner->writeInt8(23 + mCodecSpecificDataSize);
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
index b973745..cbccd31 100644
--- a/media/libstagefright/MediaBuffer.cpp
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -25,6 +25,8 @@
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MetaData.h>
+#include <ui/GraphicBuffer.h>
+
namespace android {
// XXX make this truly atomic.
@@ -61,6 +63,20 @@ MediaBuffer::MediaBuffer(size_t size)
mOriginal(NULL) {
}
+MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer)
+ : mObserver(NULL),
+ mNextBuffer(NULL),
+ mRefCount(0),
+ mData(NULL),
+ mSize(1),
+ mRangeOffset(0),
+ mRangeLength(mSize),
+ mGraphicBuffer(graphicBuffer),
+ mOwnsData(false),
+ mMetaData(new MetaData),
+ mOriginal(NULL) {
+}
+
void MediaBuffer::release() {
if (mObserver == NULL) {
CHECK_EQ(mRefCount, 0);
@@ -92,10 +108,12 @@ void MediaBuffer::add_ref() {
}
void *MediaBuffer::data() const {
+ CHECK(mGraphicBuffer == NULL);
return mData;
}
size_t MediaBuffer::size() const {
+ CHECK(mGraphicBuffer == NULL);
return mSize;
}
@@ -108,15 +126,19 @@ size_t MediaBuffer::range_length() const {
}
void MediaBuffer::set_range(size_t offset, size_t length) {
- if (offset + length > mSize) {
+ if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
}
- CHECK(offset + length <= mSize);
+ CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
mRangeOffset = offset;
mRangeLength = length;
}
+sp<GraphicBuffer> MediaBuffer::graphicBuffer() const {
+ return mGraphicBuffer;
+}
+
sp<MetaData> MediaBuffer::meta_data() {
return mMetaData;
}
@@ -158,6 +180,8 @@ int MediaBuffer::refcount() const {
}
MediaBuffer *MediaBuffer::clone() {
+ CHECK_EQ(mGraphicBuffer, NULL);
+
MediaBuffer *buffer = new MediaBuffer(mData, mSize);
buffer->set_range(mRangeOffset, mRangeLength);
buffer->mMetaData = new MetaData(*mMetaData.get());
@@ -169,4 +193,3 @@ MediaBuffer *MediaBuffer::clone() {
}
} // namespace android
-
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index ee03c52..965c370 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -65,7 +65,7 @@ sp<MediaExtractor> MediaExtractor::Create(
}
if (!strncmp(mime, "drm", 3)) {
- char *originalMime = strrchr(mime, '+') + 1;
+ const char *originalMime = strrchr(mime, '+') + 1;
if (!strncmp(mime, "drm+es_based", 12)) {
return new DRMExtractor(source, originalMime);
diff --git a/media/libstagefright/MediaSourceSplitter.cpp b/media/libstagefright/MediaSourceSplitter.cpp
new file mode 100644
index 0000000..abc7012
--- /dev/null
+++ b/media/libstagefright/MediaSourceSplitter.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSourceSplitter"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaSourceSplitter.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaSourceSplitter::MediaSourceSplitter(sp<MediaSource> mediaSource) {
+ mNumberOfClients = 0;
+ mSource = mediaSource;
+ mSourceStarted = false;
+
+ mNumberOfClientsStarted = 0;
+ mNumberOfCurrentReads = 0;
+ mCurrentReadBit = 0;
+ mLastReadCompleted = true;
+}
+
+MediaSourceSplitter::~MediaSourceSplitter() {
+}
+
+sp<MediaSource> MediaSourceSplitter::createClient() {
+ Mutex::Autolock autoLock(mLock);
+
+ sp<MediaSource> client = new Client(this, mNumberOfClients++);
+ mClientsStarted.push(false);
+ mClientsDesiredReadBit.push(0);
+ return client;
+}
+
+status_t MediaSourceSplitter::start(int clientId, MetaData *params) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("start client (%d)", clientId);
+ if (mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ mNumberOfClientsStarted++;
+
+ if (!mSourceStarted) {
+ LOGV("Starting real source from client (%d)", clientId);
+ status_t err = mSource->start(params);
+
+ if (err == OK) {
+ mSourceStarted = true;
+ mClientsStarted.editItemAt(clientId) = true;
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ }
+
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = true;
+ if (mLastReadCompleted) {
+ // Last read was completed. So join in the threads for the next read.
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ } else {
+ // Last read is ongoing. So join in the threads for the current read.
+ mClientsDesiredReadBit.editItemAt(clientId) = mCurrentReadBit;
+ }
+ return OK;
+ }
+}
+
+status_t MediaSourceSplitter::stop(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("stop client (%d)", clientId);
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+ CHECK(mClientsStarted[clientId]);
+
+ if (--mNumberOfClientsStarted == 0) {
+ LOGV("Stopping real source from client (%d)", clientId);
+ status_t err = mSource->stop();
+ mSourceStarted = false;
+ mClientsStarted.editItemAt(clientId) = false;
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = false;
+ if (!mLastReadCompleted && (mClientsDesiredReadBit[clientId] == mCurrentReadBit)) {
+ // !mLastReadCompleted implies that buffer has been read from source, but all
+ // clients haven't read it.
+ // mClientsDesiredReadBit[clientId] == mCurrentReadBit implies that this
+ // client would have wanted to read from this buffer. (i.e. it has not yet
+ // called read() for the current read buffer.)
+ // Since other threads may be waiting for all the clients' reads to complete,
+ // signal that this read has been aborted.
+ signalReadComplete_lock(true);
+ }
+ return OK;
+ }
+}
+
+sp<MetaData> MediaSourceSplitter::getFormat(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("getFormat client (%d)", clientId);
+ return mSource->getFormat();
+}
+
+status_t MediaSourceSplitter::read(int clientId,
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+ Mutex::Autolock autoLock(mLock);
+
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+
+ LOGV("read client (%d)", clientId);
+ *buffer = NULL;
+
+ if (!mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ if (mCurrentReadBit != mClientsDesiredReadBit[clientId]) {
+ // Desired buffer has not been read from source yet.
+
+ // If the current client is the special client with clientId = 0
+ // then read from source, else wait until the client 0 has finished
+ // reading from source.
+ if (clientId == 0) {
+ // Wait for all client's last read to complete first so as to not
+ // corrupt the buffer at mLastReadMediaBuffer.
+ waitForAllClientsLastRead_lock(clientId);
+
+ readFromSource_lock(options);
+ *buffer = mLastReadMediaBuffer;
+ } else {
+ waitForReadFromSource_lock(clientId);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+ CHECK(mCurrentReadBit == mClientsDesiredReadBit[clientId]);
+ } else {
+ // Desired buffer has already been read from source. Use the cached data.
+ CHECK(clientId != 0);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+
+ mClientsDesiredReadBit.editItemAt(clientId) = !mClientsDesiredReadBit[clientId];
+ signalReadComplete_lock(false);
+
+ return mLastReadStatus;
+}
+
+void MediaSourceSplitter::readFromSource_lock(const MediaSource::ReadOptions *options) {
+ mLastReadStatus = mSource->read(&mLastReadMediaBuffer , options);
+
+ mCurrentReadBit = !mCurrentReadBit;
+ mLastReadCompleted = false;
+ mReadFromSourceCondition.broadcast();
+}
+
+void MediaSourceSplitter::waitForReadFromSource_lock(int32_t clientId) {
+ mReadFromSourceCondition.wait(mLock);
+}
+
+void MediaSourceSplitter::waitForAllClientsLastRead_lock(int32_t clientId) {
+ if (mLastReadCompleted) {
+ return;
+ }
+ mAllReadsCompleteCondition.wait(mLock);
+ CHECK(mLastReadCompleted);
+}
+
+void MediaSourceSplitter::signalReadComplete_lock(bool readAborted) {
+ if (!readAborted) {
+ mNumberOfCurrentReads++;
+ }
+
+ if (mNumberOfCurrentReads == mNumberOfClientsStarted) {
+ mLastReadCompleted = true;
+ mNumberOfCurrentReads = 0;
+ mAllReadsCompleteCondition.broadcast();
+ }
+}
+
+status_t MediaSourceSplitter::pause(int clientId) {
+ return ERROR_UNSUPPORTED;
+}
+
+// Client
+
+MediaSourceSplitter::Client::Client(
+ sp<MediaSourceSplitter> splitter,
+ int32_t clientId) {
+ mSplitter = splitter;
+ mClientId = clientId;
+}
+
+status_t MediaSourceSplitter::Client::start(MetaData *params) {
+ return mSplitter->start(mClientId, params);
+}
+
+status_t MediaSourceSplitter::Client::stop() {
+ return mSplitter->stop(mClientId);
+}
+
+sp<MetaData> MediaSourceSplitter::Client::getFormat() {
+ return mSplitter->getFormat(mClientId);
+}
+
+status_t MediaSourceSplitter::Client::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ return mSplitter->read(mClientId, buffer, options);
+}
+
+status_t MediaSourceSplitter::Client::pause() {
+ return mSplitter->pause(mClientId);
+}
+
+} // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 9a49a9b..0d8abe2 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -39,6 +39,7 @@
#include <binder/MemoryDealer.h>
#include <binder/ProcessState.h>
#include <media/IMediaPlayerService.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
@@ -152,37 +153,37 @@ static sp<MediaSource> InstantiateSoftwareCodec(
static const CodecInfo kDecoderInfo[] = {
{ MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
+// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
{ MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
-// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
+// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },
+// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
{ MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
};
@@ -194,25 +195,24 @@ static const CodecInfo kEncoderInfo[] = {
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
};
#undef OPTIONAL
@@ -307,16 +307,15 @@ static void InitOMXParams(T *params) {
}
static bool IsSoftwareCodec(const char *componentName) {
- if (!strncmp("OMX.PV.", componentName, 7)) {
- return true;
+ if (!strncmp("OMX.", componentName, 4)) {
+ return false;
}
- return false;
+ return true;
}
// A sort order in which non-OMX components are first,
-// followed by software codecs, i.e. OMX.PV.*, followed
-// by all the others.
+// followed by software codecs, and followed by all the others.
static int CompareSoftwareCodecsFirst(
const String8 *elem1, const String8 *elem2) {
bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4);
@@ -350,9 +349,13 @@ uint32_t OMXCodec::getComponentQuirks(
const char *componentName, bool isEncoder) {
uint32_t quirks = 0;
- if (!strcmp(componentName, "OMX.PV.avcdec")) {
- quirks |= kWantsNALFragments;
+ if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.aac.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) {
+ quirks |= kDecoderLiesAboutNumberOfChannels;
}
+
if (!strcmp(componentName, "OMX.TI.MP3.decode")) {
quirks |= kNeedsFlushBeforeDisable;
quirks |= kDecoderLiesAboutNumberOfChannels;
@@ -447,7 +450,16 @@ void OMXCodec::findMatchingCodecs(
continue;
}
- matchingCodecs->push(String8(componentName));
+ // When requesting software-only codecs, only push software codecs
+ // When requesting hardware-only codecs, only push hardware codecs
+ // When there is request neither for software-only nor for
+ // hardware-only codecs, push all codecs
+ if (((flags & kSoftwareCodecsOnly) && IsSoftwareCodec(componentName)) ||
+ ((flags & kHardwareCodecsOnly) && !IsSoftwareCodec(componentName)) ||
+ (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
+
+ matchingCodecs->push(String8(componentName));
+ }
}
if (flags & kPreferSoftwareCodecs) {
@@ -461,7 +473,8 @@ sp<MediaSource> OMXCodec::Create(
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName,
- uint32_t flags) {
+ uint32_t flags,
+ const sp<ANativeWindow> &nativeWindow) {
const char *mime;
bool success = meta->findCString(kKeyMIMEType, &mime);
CHECK(success);
@@ -517,7 +530,7 @@ sp<MediaSource> OMXCodec::Create(
sp<OMXCodec> codec = new OMXCodec(
omx, node, quirks,
createEncoder, mime, componentName,
- source);
+ source, nativeWindow);
observer->setCodec(codec);
@@ -725,6 +738,16 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
mQuirks &= ~kOutputBuffersAreUnreadable;
}
+ if (mNativeWindow != NULL
+ && !mIsEncoder
+ && !strncasecmp(mMIME, "video/", 6)
+ && !strncmp(mComponentName, "OMX.", 4)) {
+ status_t err = initNativeWindow();
+ if (err != OK) {
+ return err;
+ }
+ }
+
return OK;
}
@@ -1283,6 +1306,10 @@ status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
h264type.bMBAFF = OMX_FALSE;
h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+ if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) {
+ h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+ }
+
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
CHECK_EQ(err, OK);
@@ -1408,7 +1435,8 @@ OMXCodec::OMXCodec(
bool isEncoder,
const char *mime,
const char *componentName,
- const sp<MediaSource> &source)
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow)
: mOMX(omx),
mOMXLivesLocally(omx->livesLocally(getpid())),
mNode(node),
@@ -1428,7 +1456,8 @@ OMXCodec::OMXCodec(
mTargetTimeUs(-1),
mSkipTimeUs(-1),
mLeftOverBuffer(NULL),
- mPaused(false) {
+ mPaused(false),
+ mNativeWindow(nativeWindow) {
mPortStatus[kPortIndexInput] = ENABLED;
mPortStatus[kPortIndexOutput] = ENABLED;
@@ -1572,6 +1601,10 @@ status_t OMXCodec::allocateBuffers() {
}
status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+ if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+ return allocateOutputBuffersFromNativeWindow();
+ }
+
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
@@ -1638,6 +1671,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
info.mBuffer = buffer;
info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
info.mMem = mem;
info.mMediaBuffer = NULL;
@@ -1664,6 +1698,178 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
return OK;
}
+status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
+ // Get the number of buffers needed.
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ // Check that the color format is in the correct range.
+ CHECK(OMX_COLOR_FormatAndroidPrivateStart <= def.format.video.eColorFormat);
+ CHECK(def.format.video.eColorFormat < OMX_COLOR_FormatAndroidPrivateEnd);
+
+ err = native_window_set_buffers_geometry(
+ mNativeWindow.get(),
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ def.format.video.eColorFormat
+ - OMX_COLOR_FormatAndroidPrivateStart);
+
+ if (err != 0) {
+ LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+
+ // Increase the buffer count by one to allow for the ANativeWindow to hold
+ // on to one of the buffers.
+ def.nBufferCountActual++;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ // Set up the native window.
+ // XXX TODO: Get the gralloc usage flags from the OMX plugin!
+ err = native_window_set_usage(
+ mNativeWindow.get(), GRALLOC_USAGE_HW_TEXTURE);
+ if (err != 0) {
+ LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ err = native_window_set_buffer_count(
+ mNativeWindow.get(), def.nBufferCountActual);
+ if (err != 0) {
+ LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+ -err);
+ return err;
+ }
+
+ // XXX TODO: Do something so the ANativeWindow knows that we'll need to get
+ // the same set of buffers.
+
+ CODEC_LOGI("allocating %lu buffers from a native window of size %lu on "
+ "output port", def.nBufferCountActual, def.nBufferSize);
+
+ // Dequeue buffers and send them to OMX
+ OMX_U32 i;
+ for (i = 0; i < def.nBufferCountActual; i++) {
+ android_native_buffer_t* buf;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ IOMX::buffer_id bufferId;
+ err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+ &bufferId);
+ if (err != 0) {
+ break;
+ }
+
+ CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)",
+ bufferId, graphicBuffer.get());
+
+ BufferInfo info;
+ info.mData = NULL;
+ info.mSize = def.nBufferSize;
+ info.mBuffer = bufferId;
+ info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
+ info.mMem = NULL;
+ info.mMediaBuffer = new MediaBuffer(graphicBuffer);
+ info.mMediaBuffer->setObserver(this);
+
+ mPortBuffers[kPortIndexOutput].push(info);
+ }
+
+ OMX_U32 cancelStart;
+ OMX_U32 cancelEnd;
+
+ if (err != 0) {
+ // If an error occurred while dequeuing we need to cancel any buffers
+ // that were dequeued.
+ cancelStart = 0;
+ cancelEnd = i;
+ } else {
+ // Return the last two buffers to the native window.
+ // XXX TODO: The number of buffers the native window owns should probably be
+ // queried from it when we put the native window in fixed buffer pool mode
+ // (which needs to be implemented). Currently it's hard-coded to 2.
+ cancelStart = def.nBufferCountActual - 2;
+ cancelEnd = def.nBufferCountActual;
+ }
+
+ for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
+ cancelBufferToNativeWindow(info);
+ }
+
+ return err;
+}
+
+status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer);
+ int err = mNativeWindow->cancelBuffer(
+ mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get());
+ if (err != 0) {
+ CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return err;
+ }
+ info->mOwnedByNativeWindow = true;
+ return OK;
+}
+
+OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
+ // Dequeue the next buffer from the native window.
+ android_native_buffer_t* buf;
+ int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // Determine which buffer we just dequeued.
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ BufferInfo *bufInfo = 0;
+ for (size_t i = 0; i < buffers->size(); i++) {
+ sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
+ mMediaBuffer->graphicBuffer();
+ if (graphicBuffer->handle == buf->handle) {
+ bufInfo = &buffers->editItemAt(i);
+ break;
+ }
+ }
+
+ if (bufInfo == 0) {
+ CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // The native window no longer owns the buffer.
+ CHECK(bufInfo->mOwnedByNativeWindow);
+ bufInfo->mOwnedByNativeWindow = false;
+
+ return bufInfo;
+}
+
void OMXCodec::on_message(const omx_message &msg) {
Mutex::Autolock autoLock(mLock);
@@ -1748,6 +1954,15 @@ void OMXCodec::on_message(const omx_message &msg) {
mOMX->freeBuffer(mNode, kPortIndexOutput, buffer);
CHECK_EQ(err, OK);
+ // Cancel the buffer if it belongs to an ANativeWindow.
+ if (info->mMediaBuffer != NULL) {
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (!info->mOwnedByNativeWindow && graphicBuffer != 0) {
+ cancelBufferToNativeWindow(info);
+ // Ignore any errors
+ }
+ }
+
buffers->removeAt(i);
#if 0
} else if (mPortStatus[kPortIndexOutput] == ENABLED
@@ -1776,8 +1991,10 @@ void OMXCodec::on_message(const omx_message &msg) {
}
MediaBuffer *buffer = info->mMediaBuffer;
+ bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
- if (msg.u.extended_buffer_data.range_offset
+ if (!isGraphicBuffer
+ && msg.u.extended_buffer_data.range_offset
+ msg.u.extended_buffer_data.range_length
> buffer->size()) {
CODEC_LOGE(
@@ -1801,7 +2018,7 @@ void OMXCodec::on_message(const omx_message &msg) {
buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
}
- if (mQuirks & kOutputBuffersAreUnreadable) {
+ if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
}
@@ -1871,7 +2088,32 @@ void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
case OMX_EventPortSettingsChanged:
{
- onPortSettingsChanged(data1);
+ CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)",
+ data1, data2);
+
+ if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+ onPortSettingsChanged(data1);
+ } else if (data1 == kPortIndexOutput
+ && data2 == OMX_IndexConfigCommonOutputCrop) {
+
+ OMX_CONFIG_RECTTYPE rect;
+ rect.nPortIndex = kPortIndexOutput;
+ InitOMXParams(&rect);
+
+ status_t err =
+ mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect));
+
+ if (err == OK) {
+ CODEC_LOGV(
+ "output crop (%ld, %ld, %ld, %ld)",
+ rect.nLeft, rect.nTop, rect.nWidth, rect.nHeight);
+ } else {
+ CODEC_LOGE("getConfig(OMX_IndexConfigCommonOutputCrop) "
+ "returned error 0x%08x", err);
+ }
+ }
break;
}
@@ -2201,6 +2443,15 @@ status_t OMXCodec::freeBuffersOnPort(
// Make sure nobody but us owns this buffer at this point.
CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+ // Cancel the buffer if it belongs to an ANativeWindow.
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (!info->mOwnedByNativeWindow && graphicBuffer != 0) {
+ status_t err = cancelBufferToNativeWindow(info);
+ if (err != OK) {
+ stickyErr = err;
+ }
+ }
+
info->mMediaBuffer->release();
}
@@ -2261,6 +2512,7 @@ void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
CHECK_EQ(mPortStatus[portIndex], ENABLED);
mPortStatus[portIndex] = DISABLING;
+ CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
CHECK_EQ(err, OK);
@@ -2274,6 +2526,7 @@ void OMXCodec::enablePortAsync(OMX_U32 portIndex) {
CHECK_EQ(mPortStatus[portIndex], DISABLED);
mPortStatus[portIndex] = ENABLING;
+ CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
CHECK_EQ(err, OK);
@@ -2299,7 +2552,10 @@ void OMXCodec::fillOutputBuffers() {
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
for (size_t i = 0; i < buffers->size(); ++i) {
- fillOutputBuffer(&buffers->editItemAt(i));
+ BufferInfo *info = &buffers->editItemAt(i);
+ if (!info->mOwnedByNativeWindow) {
+ fillOutputBuffer(&buffers->editItemAt(i));
+ }
}
}
@@ -2516,7 +2772,23 @@ void OMXCodec::fillOutputBuffer(BufferInfo *info) {
return;
}
- CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer);
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (graphicBuffer != 0) {
+ // When using a native buffer we need to lock the buffer before giving
+ // it to OMX.
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
+ int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
+ graphicBuffer.get());
+ if (err != 0) {
+ CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return;
+ }
+ }
+
+ CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);
status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
if (err != OK) {
@@ -3099,7 +3371,32 @@ void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
if (info->mMediaBuffer == buffer) {
CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
- fillOutputBuffer(info);
+ if (buffer->graphicBuffer() == 0) {
+ fillOutputBuffer(info);
+ } else {
+ sp<MetaData> metaData = info->mMediaBuffer->meta_data();
+ int32_t rendered = 0;
+ if (!metaData->findInt32(kKeyRendered, &rendered)) {
+ rendered = 0;
+ }
+ if (!rendered) {
+ status_t err = cancelBufferToNativeWindow(info);
+ if (err < 0) {
+ return;
+ }
+ } else {
+ info->mOwnedByNativeWindow = true;
+ }
+
+ // Dequeue the next buffer from the native window.
+ BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
+ if (nextBufInfo == 0) {
+ return;
+ }
+
+ // Give the buffer to the OMX node to fill.
+ fillOutputBuffer(nextBufInfo);
+ }
return;
}
}
@@ -3432,6 +3729,18 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("}\n");
}
+status_t OMXCodec::initNativeWindow() {
+ // Enable use of a GraphicBuffer as the output for this node. This must
+ // happen before getting the IndexParamPortDefinition parameter because it
+ // will affect the pixel format that the node reports.
+ status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+ if (err != 0) {
+ return err;
+ }
+
+ return OK;
+}
+
void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
mOutputFormat = new MetaData;
mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
@@ -3566,17 +3875,8 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
CHECK(!"Unknown compression format.");
}
- if (!strcmp(mComponentName, "OMX.PV.avcdec")) {
- // This component appears to be lying to me.
- mOutputFormat->setInt32(
- kKeyWidth, (video_def->nFrameWidth + 15) & -16);
- mOutputFormat->setInt32(
- kKeyHeight, (video_def->nFrameHeight + 15) & -16);
- } else {
- mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
- mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
- }
-
+ mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+ mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
break;
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 27faf4f..092c33e 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -281,7 +281,7 @@ status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) {
mNumSyncSamples = U32_AT(&header[4]);
if (mNumSyncSamples < 2) {
- LOGW("Table of sync samples is empty or has only a single entry!");
+ LOGV("Table of sync samples is empty or has only a single entry!");
}
mSyncSamples = new uint32_t[mNumSyncSamples];
diff --git a/media/libstagefright/VideoSourceDownSampler.cpp b/media/libstagefright/VideoSourceDownSampler.cpp
new file mode 100644
index 0000000..ea7b09a
--- /dev/null
+++ b/media/libstagefright/VideoSourceDownSampler.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoSourceDownSampler"
+
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include "OMX_Video.h"
+
+namespace android {
+
+VideoSourceDownSampler::VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+ int32_t width, int32_t height) {
+ LOGV("Construct VideoSourceDownSampler");
+ CHECK(width > 0);
+ CHECK(height > 0);
+
+ mRealVideoSource = videoSource;
+ mWidth = width;
+ mHeight = height;
+
+ mMeta = new MetaData(*(mRealVideoSource->getFormat()));
+ CHECK(mMeta->findInt32(kKeyWidth, &mRealSourceWidth));
+ CHECK(mMeta->findInt32(kKeyHeight, &mRealSourceHeight));
+
+ if ((mWidth != mRealSourceWidth) || (mHeight != mRealSourceHeight)) {
+ // Change meta data for width and height.
+ CHECK(mWidth <= mRealSourceWidth);
+ CHECK(mHeight <= mRealSourceHeight);
+
+ mNeedDownSampling = true;
+ computeDownSamplingParameters();
+ mMeta->setInt32(kKeyWidth, mWidth);
+ mMeta->setInt32(kKeyHeight, mHeight);
+ } else {
+ mNeedDownSampling = false;
+ }
+}
+
+VideoSourceDownSampler::~VideoSourceDownSampler() {
+}
+
+void VideoSourceDownSampler::computeDownSamplingParameters() {
+ mDownSampleSkipX = mRealSourceWidth / mWidth;
+ mDownSampleSkipY = mRealSourceHeight / mHeight;
+
+ mDownSampleOffsetX = mRealSourceWidth - mDownSampleSkipX * mWidth;
+ mDownSampleOffsetY = mRealSourceHeight - mDownSampleSkipY * mHeight;
+}
+
+void VideoSourceDownSampler::downSampleYUVImage(
+ const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate mediaBuffer for down sampled image and setup a canvas.
+ *buffer = new MediaBuffer(YUVImage::bufferSize(yuvFormat, mWidth, mHeight));
+ YUVImage yuvDownSampledImage(yuvFormat,
+ mWidth, mHeight,
+ (uint8_t *)(*buffer)->data());
+ YUVCanvas yuvCanvasDownSample(yuvDownSampledImage);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mRealSourceWidth, mRealSourceHeight,
+ (uint8_t *)sourceBuffer.data());
+ yuvCanvasDownSample.downsample(mDownSampleOffsetX, mDownSampleOffsetY,
+ mDownSampleSkipX, mDownSampleSkipY,
+ yuvImageSource);
+}
+
+status_t VideoSourceDownSampler::start(MetaData *params) {
+ LOGV("start");
+ return mRealVideoSource->start();
+}
+
+status_t VideoSourceDownSampler::stop() {
+ LOGV("stop");
+ return mRealVideoSource->stop();
+}
+
+sp<MetaData> VideoSourceDownSampler::getFormat() {
+ LOGV("getFormat");
+ return mMeta;
+}
+
+status_t VideoSourceDownSampler::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ LOGV("read");
+ MediaBuffer *realBuffer;
+ status_t err = mRealVideoSource->read(&realBuffer, options);
+
+ if (mNeedDownSampling) {
+ downSampleYUVImage(*realBuffer, buffer);
+
+ int64_t frameTime;
+ realBuffer->meta_data()->findInt64(kKeyTime, &frameTime);
+ (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+
+ // We just want this buffer to be deleted when the encoder releases it.
+ // So don't add a reference to it and set the observer to NULL.
+ (*buffer)->setObserver(NULL);
+
+ // The original buffer is no longer required. Release it.
+ realBuffer->release();
+ } else {
+ *buffer = realBuffer;
+ }
+
+ return err;
+}
+
+status_t VideoSourceDownSampler::pause() {
+ LOGV("pause");
+ return mRealVideoSource->pause();
+}
+
+} // namespace android
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 0dcbd73..ef2dba0 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -6,7 +6,8 @@ LOCAL_SRC_FILES:= \
SoftwareRenderer.cpp
LOCAL_C_INCLUDES := \
- $(TOP)/frameworks/base/include/media/stagefright/openmax
+ $(TOP)/frameworks/base/include/media/stagefright/openmax \
+ $(TOP)/hardware/msm7k
LOCAL_SHARED_LIBRARIES := \
libbinder \
@@ -17,6 +18,11 @@ LOCAL_SHARED_LIBRARIES := \
libsurfaceflinger_client\
libcamera_client
+# ifeq ($(TARGET_BOARD_PLATFORM),msm7k)
+ifeq ($(TARGET_PRODUCT),passion)
+ LOCAL_CFLAGS += -DHAS_YCBCR420_SP_ADRENO
+endif
+
LOCAL_MODULE:= libstagefright_color_conversion
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index a6dbf69..662a84a 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -22,65 +22,182 @@
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryHeapPmem.h>
#include <media/stagefright/MediaDebug.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+// XXX: Temporary hack to allow referencing the _ADRENO pixel format here.
+#include <libgralloc-qsd8k/gralloc_priv.h>
namespace android {
SoftwareRenderer::SoftwareRenderer(
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight)
: mColorFormat(colorFormat),
- mConverter(colorFormat, OMX_COLOR_Format16bitRGB565),
- mISurface(surface),
+ mConverter(NULL),
+ mYUVMode(None),
+ mSurface(surface),
mDisplayWidth(displayWidth),
mDisplayHeight(displayHeight),
mDecodedWidth(decodedWidth),
- mDecodedHeight(decodedHeight),
- mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565
- mIndex(0) {
- mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize);
- if (mMemoryHeap->heapID() < 0) {
- LOGI("Creating physical memory heap failed, reverting to regular heap.");
- mMemoryHeap = new MemoryHeapBase(2 * mFrameSize);
- } else {
- sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap);
- pmemHeap->slap();
- mMemoryHeap = pmemHeap;
+ mDecodedHeight(decodedHeight) {
+ LOGI("input format = %d", mColorFormat);
+ LOGI("display = %d x %d, decoded = %d x %d",
+ mDisplayWidth, mDisplayHeight, mDecodedWidth, mDecodedHeight);
+
+ mDecodedWidth = mDisplayWidth;
+ mDecodedHeight = mDisplayHeight;
+
+ int halFormat;
+ switch (mColorFormat) {
+#if HAS_YCBCR420_SP_ADRENO
+ case OMX_COLOR_FormatYUV420Planar:
+ {
+ halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+ mYUVMode = YUV420ToYUV420sp;
+ break;
+ }
+
+ case 0x7fa30c00:
+ {
+ halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+ mYUVMode = YUV420spToYUV420sp;
+ break;
+ }
+#endif
+
+ default:
+ halFormat = HAL_PIXEL_FORMAT_RGB_565;
+
+ mConverter = new ColorConverter(
+ mColorFormat, OMX_COLOR_Format16bitRGB565);
+ CHECK(mConverter->isValid());
+ break;
}
- CHECK(mISurface.get() != NULL);
+ CHECK(mSurface.get() != NULL);
CHECK(mDecodedWidth > 0);
CHECK(mDecodedHeight > 0);
- CHECK(mMemoryHeap->heapID() >= 0);
- CHECK(mConverter.isValid());
+ CHECK(mConverter == NULL || mConverter->isValid());
- ISurface::BufferHeap bufferHeap(
- mDisplayWidth, mDisplayHeight,
- mDecodedWidth, mDecodedHeight,
- PIXEL_FORMAT_RGB_565,
- mMemoryHeap);
+ CHECK_EQ(0,
+ native_window_set_usage(
+ mSurface.get(),
+ GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+ | GRALLOC_USAGE_HW_TEXTURE));
- status_t err = mISurface->registerBuffers(bufferHeap);
- CHECK_EQ(err, OK);
+ CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2));
+
+ // Width must be multiple of 32???
+ CHECK_EQ(0, native_window_set_buffers_geometry(
+ mSurface.get(), mDecodedWidth, mDecodedHeight,
+ halFormat));
}
SoftwareRenderer::~SoftwareRenderer() {
- mISurface->unregisterBuffers();
+ delete mConverter;
+ mConverter = NULL;
+}
+
+static inline size_t ALIGN(size_t x, size_t alignment) {
+ return (x + alignment - 1) & ~(alignment - 1);
}
void SoftwareRenderer::render(
const void *data, size_t size, void *platformPrivate) {
- size_t offset = mIndex * mFrameSize;
- void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
+ android_native_buffer_t *buf;
+ int err;
+ if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {
+ LOGW("Surface::dequeueBuffer returned error %d", err);
+ return;
+ }
+
+ CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));
+
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+
+ Rect bounds(mDecodedWidth, mDecodedHeight);
- mConverter.convert(
- mDecodedWidth, mDecodedHeight,
- data, 0, dst, 2 * mDecodedWidth);
+ void *dst;
+ CHECK_EQ(0, mapper.lock(
+ buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
- mISurface->postBuffer(offset);
- mIndex = 1 - mIndex;
+ if (mConverter) {
+ mConverter->convert(
+ mDecodedWidth, mDecodedHeight,
+ data, 0, dst, buf->stride * 2);
+ } else if (mYUVMode == YUV420spToYUV420sp) {
+ // Input and output are both YUV420sp, but the alignment requirements
+ // are different.
+ size_t srcYStride = mDecodedWidth;
+ const uint8_t *srcY = (const uint8_t *)data;
+ uint8_t *dstY = (uint8_t *)dst;
+ for (size_t i = 0; i < mDecodedHeight; ++i) {
+ memcpy(dstY, srcY, mDecodedWidth);
+ srcY += srcYStride;
+ dstY += buf->stride;
+ }
+
+ size_t srcUVStride = (mDecodedWidth + 1) & ~1;
+ size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2;
+
+ const uint8_t *srcUV = (const uint8_t *)data
+ + mDecodedHeight * mDecodedWidth;
+
+ size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096);
+ uint8_t *dstUV = (uint8_t *)dst + dstUVOffset;
+
+ for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) {
+ memcpy(dstUV, srcUV, (mDecodedWidth + 1) & ~1);
+ srcUV += srcUVStride;
+ dstUV += dstUVStride;
+ }
+ } else if (mYUVMode == YUV420ToYUV420sp) {
+ // Input is YUV420 planar, output is YUV420sp, adhere to proper
+ // alignment requirements.
+ size_t srcYStride = mDecodedWidth;
+ const uint8_t *srcY = (const uint8_t *)data;
+ uint8_t *dstY = (uint8_t *)dst;
+ for (size_t i = 0; i < mDecodedHeight; ++i) {
+ memcpy(dstY, srcY, mDecodedWidth);
+ srcY += srcYStride;
+ dstY += buf->stride;
+ }
+
+ size_t srcUVStride = (mDecodedWidth + 1) / 2;
+ size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2;
+
+ const uint8_t *srcU = (const uint8_t *)data
+ + mDecodedHeight * mDecodedWidth;
+
+ const uint8_t *srcV =
+ srcU + ((mDecodedWidth + 1) / 2) * ((mDecodedHeight + 1) / 2);
+
+ size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096);
+ uint8_t *dstUV = (uint8_t *)dst + dstUVOffset;
+
+ for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) {
+ for (size_t j = 0; j < (mDecodedWidth + 1) / 2; ++j) {
+ dstUV[2 * j + 1] = srcU[j];
+ dstUV[2 * j] = srcV[j];
+ }
+ srcU += srcUVStride;
+ srcV += srcUVStride;
+ dstUV += dstUVStride;
+ }
+ } else {
+ memcpy(dst, data, size);
+ }
+
+ CHECK_EQ(0, mapper.unlock(buf->handle));
+
+ if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {
+ LOGW("Surface::queueBuffer returned error %d", err);
+ }
+ buf = NULL;
}
} // namespace android
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 4526bf1..a0a7436 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -80,6 +80,7 @@ struct AwesomePlayer {
bool isPlaying() const;
void setISurface(const sp<ISurface> &isurface);
+ void setSurface(const sp<Surface> &surface);
void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
status_t setLooping(bool shouldLoop);
@@ -114,6 +115,11 @@ private:
AUDIO_AT_EOS = 256,
VIDEO_AT_EOS = 512,
AUTO_LOOPING = 1024,
+
+ // We are basically done preparing but are currently buffering
+ // sufficient data to begin playback and finish the preparation phase
+ // for good.
+ PREPARING_CONNECTED = 2048,
};
mutable Mutex mLock;
@@ -125,6 +131,7 @@ private:
wp<MediaPlayerBase> mListener;
sp<ISurface> mISurface;
+ sp<Surface> mSurface;
sp<MediaPlayerBase::AudioSink> mAudioSink;
SystemTimeSource mSystemTimeSource;
@@ -233,6 +240,7 @@ private:
status_t seekTo_l(int64_t timeUs);
status_t pause_l(bool at_eos = false);
void initRenderer_l();
+ void notifyVideoSize_l();
void seekAudioIfNecessary_l();
void cancelPlayerEvents(bool keepBufferingGoing = false);
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index c99da59..5a6c96f 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -59,10 +59,20 @@ public:
node_id node, OMX_INDEXTYPE index,
const void *params, size_t size);
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer);
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index b5b31ac..86c102c 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -49,10 +49,17 @@ struct OMXNodeInstance {
status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
+ status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+ status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+
status_t useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer);
+ status_t useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id *buffer);
+
status_t allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data);
@@ -125,4 +132,3 @@ private:
} // namespace android
#endif // OMX_NODE_INSTANCE_H_
-
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 9eed089..8d58056 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -24,14 +24,14 @@
namespace android {
-class ISurface;
+class Surface;
class MemoryHeapBase;
class SoftwareRenderer : public VideoRenderer {
public:
SoftwareRenderer(
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight);
@@ -41,14 +41,18 @@ public:
const void *data, size_t size, void *platformPrivate);
private:
+ enum YUVMode {
+ None,
+ YUV420ToYUV420sp,
+ YUV420spToYUV420sp,
+ };
+
OMX_COLOR_FORMATTYPE mColorFormat;
- ColorConverter mConverter;
- sp<ISurface> mISurface;
+ ColorConverter *mConverter;
+ YUVMode mYUVMode;
+ sp<Surface> mSurface;
size_t mDisplayWidth, mDisplayHeight;
size_t mDecodedWidth, mDecodedHeight;
- size_t mFrameSize;
- sp<MemoryHeapBase> mMemoryHeap;
- int mIndex;
SoftwareRenderer(const SoftwareRenderer &);
SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index c927da1..f9f638f 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -289,6 +289,16 @@ status_t OMX::setConfig(
index, params, size);
}
+status_t OMX::enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->enableGraphicBuffers(port_index, enable);
+}
+
+status_t OMX::storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
+}
+
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) {
@@ -296,6 +306,13 @@ status_t OMX::useBuffer(
port_index, params, buffer);
}
+status_t OMX::useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+ return findInstance(node)->useGraphicBuffer(
+ port_index, graphicBuffer, buffer);
+}
+
status_t OMX::allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -497,12 +514,17 @@ sp<IOMXRenderer> OMX::createRenderer(
}
if (!impl) {
+#if 0
LOGW("Using software renderer.");
impl = new SoftwareRenderer(
colorFormat,
surface,
displayWidth, displayHeight,
encodedWidth, encodedHeight);
+#else
+ CHECK(!"Should not be here.");
+ return NULL;
+#endif
}
return new OMXRenderer(impl);
@@ -527,4 +549,3 @@ void OMXRenderer::render(IOMX::buffer_id buffer) {
}
} // namespace android
-
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5db516e..9b6d441 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -24,6 +24,7 @@
#include <OMX_Component.h>
#include <binder/IMemory.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaErrors.h>
@@ -40,6 +41,11 @@ struct BufferMeta {
mIsBackup(false) {
}
+ BufferMeta(const sp<GraphicBuffer> &graphicBuffer)
+ : mGraphicBuffer(graphicBuffer),
+ mIsBackup(false) {
+ }
+
void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) {
if (!mIsBackup) {
return;
@@ -61,6 +67,7 @@ struct BufferMeta {
}
private:
+ sp<GraphicBuffer> mGraphicBuffer;
sp<IMemory> mMem;
size_t mSize;
bool mIsBackup;
@@ -240,6 +247,74 @@ status_t OMXNodeInstance::setConfig(
return StatusFromOMXError(err);
}
+status_t OMXNodeInstance::enableGraphicBuffers(
+ OMX_U32 portIndex, OMX_BOOL enable) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ EnableAndroidNativeBuffersParams params = {
+ sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
+ };
+
+ err = OMX_SetParameter(mHandle, index, &params);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
+ err, err);
+
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t OMXNodeInstance::storeMetaDataInBuffers(
+ OMX_U32 portIndex,
+ OMX_BOOL enable) {
+ Mutex::Autolock autolock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.storeMetaDataInBuffers");
+
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex %s failed", name);
+ return StatusFromOMXError(err);
+ }
+
+ StoreMetaDataInBuffersParams params;
+ memset(&params, 0, sizeof(params));
+ params.nSize = sizeof(params);
+
+ // Version: 1.0.0.0
+ params.nVersion.s.nVersionMajor = 1;
+
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
+ if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
+ LOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+ return UNKNOWN_ERROR;
+ }
+ return err;
+}
+
status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer) {
@@ -273,6 +348,60 @@ status_t OMXNodeInstance::useBuffer(
return OK;
}
+status_t OMXNodeInstance::useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+ OMX::buffer_id *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
+
+ OMX_BUFFERHEADERTYPE *header;
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ UseAndroidNativeBufferParams params = {
+ sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta,
+ &header, graphicBuffer,
+ };
+
+ err = OMX_SetParameter(mHandle, index, &params);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
+ err);
+
+ delete bufferMeta;
+ bufferMeta = NULL;
+
+ *buffer = 0;
+
+ return UNKNOWN_ERROR;
+ }
+
+ CHECK_EQ(header->pAppPrivate, bufferMeta);
+
+ *buffer = header;
+
+ addActiveBuffer(portIndex, *buffer);
+
+ return OK;
+}
+
status_t OMXNodeInstance::allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data) {
@@ -498,4 +627,3 @@ void OMXNodeInstance::freeActiveBuffers() {
}
} // namespace android
-
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index f928c06..e936923 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -23,11 +23,13 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/MediaErrors.h>
#include <arpa/inet.h>
#include <fcntl.h>
#include <netdb.h>
+#include <openssl/md5.h>
#include <sys/socket.h>
namespace android {
@@ -37,6 +39,7 @@ const int64_t ARTSPConnection::kSelectTimeoutUs = 1000ll;
ARTSPConnection::ARTSPConnection()
: mState(DISCONNECTED),
+ mAuthType(NONE),
mSocket(-1),
mConnectionID(0),
mNextCSeq(0),
@@ -114,10 +117,13 @@ void ARTSPConnection::onMessageReceived(const sp<AMessage> &msg) {
// static
bool ARTSPConnection::ParseURL(
- const char *url, AString *host, unsigned *port, AString *path) {
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass) {
host->clear();
*port = 0;
path->clear();
+ user->clear();
+ pass->clear();
if (strncasecmp("rtsp://", url, 7)) {
return false;
@@ -133,6 +139,24 @@ bool ARTSPConnection::ParseURL(
path->setTo(slashPos);
}
+ ssize_t atPos = host->find("@");
+
+ if (atPos >= 0) {
+ // Split of user:pass@ from hostname.
+
+ AString userPass(*host, 0, atPos);
+ host->erase(0, atPos + 1);
+
+ ssize_t colonPos = userPass.find(":");
+
+ if (colonPos < 0) {
+ *user = userPass;
+ } else {
+ user->setTo(userPass, 0, colonPos);
+ pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
+ }
+ }
+
const char *colonPos = strchr(host->c_str(), ':');
if (colonPos != NULL) {
@@ -187,7 +211,12 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
AString host, path;
unsigned port;
- if (!ParseURL(url.c_str(), &host, &port, &path)) {
+ if (!ParseURL(url.c_str(), &host, &port, &path, &mUser, &mPass)
+ || (mUser.size() > 0 && mPass.size() == 0)) {
+ // If we have a user name but no password we have to give up
+ // right here, since we currently have no way of asking the user
+ // for this information.
+
LOGE("Malformed rtsp url %s", url.c_str());
reply->setInt32("result", ERROR_MALFORMED);
@@ -197,6 +226,10 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
return;
}
+ if (mUser.size() > 0) {
+ LOGV("user = '%s', pass = '%s'", mUser.c_str(), mPass.c_str());
+ }
+
struct hostent *ent = gethostbyname(host.c_str());
if (ent == NULL) {
LOGE("Unknown host %s", host.c_str());
@@ -262,6 +295,11 @@ void ARTSPConnection::onDisconnect(const sp<AMessage> &msg) {
reply->setInt32("result", OK);
mState = DISCONNECTED;
+ mUser.clear();
+ mPass.clear();
+ mAuthType = NONE;
+ mNonce.clear();
+
reply->post();
}
@@ -335,6 +373,12 @@ void ARTSPConnection::onSendRequest(const sp<AMessage> &msg) {
AString request;
CHECK(msg->findString("request", &request));
+ // Just in case we need to re-issue the request with proper authentication
+ // later, stash it away.
+ reply->setString("original-request", request.c_str(), request.size());
+
+ addAuthentication(&request);
+
// Find the boundary between headers and the body.
ssize_t i = request.find("\r\n\r\n");
CHECK_GE(i, 0);
@@ -347,7 +391,7 @@ void ARTSPConnection::onSendRequest(const sp<AMessage> &msg) {
request.insert(cseqHeader, i + 2);
- LOGV("%s", request.c_str());
+ LOGV("request: '%s'", request.c_str());
size_t numBytesSent = 0;
while (numBytesSent < request.size()) {
@@ -612,6 +656,30 @@ bool ARTSPConnection::receiveRTSPReponse() {
}
}
+ if (response->mStatusCode == 401) {
+ if (mAuthType == NONE && mUser.size() > 0
+ && parseAuthMethod(response)) {
+ ssize_t i;
+ CHECK_EQ((status_t)OK, findPendingRequest(response, &i));
+ CHECK_GE(i, 0);
+
+ sp<AMessage> reply = mPendingRequests.valueAt(i);
+ mPendingRequests.removeItemsAt(i);
+
+ AString request;
+ CHECK(reply->findString("original-request", &request));
+
+ sp<AMessage> msg = new AMessage(kWhatSendRequest, id());
+ msg->setMessage("reply", reply);
+ msg->setString("request", request.c_str(), request.size());
+
+ LOGI("re-sending request with authentication headers...");
+ onSendRequest(msg);
+
+ return true;
+ }
+ }
+
return notifyResponseListener(response);
}
@@ -628,26 +696,47 @@ bool ARTSPConnection::ParseSingleUnsignedLong(
return true;
}
-bool ARTSPConnection::notifyResponseListener(
- const sp<ARTSPResponse> &response) {
+status_t ARTSPConnection::findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const {
+ *index = 0;
+
ssize_t i = response->mHeaders.indexOfKey("cseq");
if (i < 0) {
- return true;
+ // This is an unsolicited server->client message.
+ return OK;
}
AString value = response->mHeaders.valueAt(i);
unsigned long cseq;
if (!ParseSingleUnsignedLong(value.c_str(), &cseq)) {
- return false;
+ return ERROR_MALFORMED;
}
i = mPendingRequests.indexOfKey(cseq);
if (i < 0) {
- // Unsolicited response?
- TRESPASS();
+ return -ENOENT;
+ }
+
+ *index = i;
+
+ return OK;
+}
+
+bool ARTSPConnection::notifyResponseListener(
+ const sp<ARTSPResponse> &response) {
+ ssize_t i;
+ status_t err = findPendingRequest(response, &i);
+
+ if (err == OK && i < 0) {
+ // An unsolicited server response is not a problem.
+ return true;
+ }
+
+ if (err != OK) {
+ return false;
}
sp<AMessage> reply = mPendingRequests.valueAt(i);
@@ -660,4 +749,160 @@ bool ARTSPConnection::notifyResponseListener(
return true;
}
+bool ARTSPConnection::parseAuthMethod(const sp<ARTSPResponse> &response) {
+ ssize_t i = response->mHeaders.indexOfKey("www-authenticate");
+
+ if (i < 0) {
+ return false;
+ }
+
+ AString value = response->mHeaders.valueAt(i);
+
+ if (!strncmp(value.c_str(), "Basic", 5)) {
+ mAuthType = BASIC;
+ } else {
+#if !defined(HAVE_ANDROID_OS)
+ // We don't have access to the MD5 implementation on the simulator,
+ // so we won't support digest authentication.
+ return false;
+#endif
+
+ CHECK(!strncmp(value.c_str(), "Digest", 6));
+ mAuthType = DIGEST;
+
+ i = value.find("nonce=");
+ CHECK_GE(i, 0);
+ CHECK_EQ(value.c_str()[i + 6], '\"');
+ ssize_t j = value.find("\"", i + 7);
+ CHECK_GE(j, 0);
+
+ mNonce.setTo(value, i + 7, j - i - 7);
+ }
+
+ return true;
+}
+
+#if defined(HAVE_ANDROID_OS)
+static void H(const AString &s, AString *out) {
+ out->clear();
+
+ MD5_CTX m;
+ MD5_Init(&m);
+ MD5_Update(&m, s.c_str(), s.size());
+
+ uint8_t key[16];
+ MD5_Final(key, &m);
+
+ for (size_t i = 0; i < 16; ++i) {
+ char nibble = key[i] >> 4;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+
+ nibble = key[i] & 0x0f;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+ }
+}
+#endif
+
+static void GetMethodAndURL(
+ const AString &request, AString *method, AString *url) {
+ ssize_t space1 = request.find(" ");
+ CHECK_GE(space1, 0);
+
+ ssize_t space2 = request.find(" ", space1 + 1);
+ CHECK_GE(space2, 0);
+
+ method->setTo(request, 0, space1);
+ url->setTo(request, space1 + 1, space2 - space1);
+}
+
+void ARTSPConnection::addAuthentication(AString *request) {
+ if (mAuthType == NONE) {
+ return;
+ }
+
+ // Find the boundary between headers and the body.
+ ssize_t i = request->find("\r\n\r\n");
+ CHECK_GE(i, 0);
+
+ if (mAuthType == BASIC) {
+ AString tmp;
+ tmp.append(mUser);
+ tmp.append(":");
+ tmp.append(mPass);
+
+ AString out;
+ encodeBase64(tmp.c_str(), tmp.size(), &out);
+
+ AString fragment;
+ fragment.append("Authorization: Basic ");
+ fragment.append(out);
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+
+ return;
+ }
+
+#if defined(HAVE_ANDROID_OS)
+ CHECK_EQ((int)mAuthType, (int)DIGEST);
+
+ AString method, url;
+ GetMethodAndURL(*request, &method, &url);
+
+ AString A1;
+ A1.append(mUser);
+ A1.append(":");
+ A1.append("Streaming Server");
+ A1.append(":");
+ A1.append(mPass);
+
+ AString A2;
+ A2.append(method);
+ A2.append(":");
+ A2.append(url);
+
+ AString HA1, HA2;
+ H(A1, &HA1);
+ H(A2, &HA2);
+
+ AString tmp;
+ tmp.append(HA1);
+ tmp.append(":");
+ tmp.append(mNonce);
+ tmp.append(":");
+ tmp.append(HA2);
+
+ AString digest;
+ H(tmp, &digest);
+
+ AString fragment;
+ fragment.append("Authorization: Digest ");
+ fragment.append("nonce=\"");
+ fragment.append(mNonce);
+ fragment.append("\", ");
+ fragment.append("username=\"");
+ fragment.append(mUser);
+ fragment.append("\", ");
+ fragment.append("uri=\"");
+ fragment.append(url);
+ fragment.append("\", ");
+ fragment.append("response=\"");
+ fragment.append(digest);
+ fragment.append("\"");
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+#endif
+}
+
} // namespace android
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 96e0d5b..19be2a6 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -42,6 +42,10 @@ struct ARTSPConnection : public AHandler {
void observeBinaryData(const sp<AMessage> &reply);
+ static bool ParseURL(
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass);
+
protected:
virtual ~ARTSPConnection();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -62,9 +66,18 @@ private:
kWhatObserveBinaryData = 'obin',
};
+ enum AuthType {
+ NONE,
+ BASIC,
+ DIGEST
+ };
+
static const int64_t kSelectTimeoutUs;
State mState;
+ AString mUser, mPass;
+ AuthType mAuthType;
+ AString mNonce;
int mSocket;
int32_t mConnectionID;
int32_t mNextCSeq;
@@ -90,8 +103,11 @@ private:
sp<ABuffer> receiveBinaryData();
bool notifyResponseListener(const sp<ARTSPResponse> &response);
- static bool ParseURL(
- const char *url, AString *host, unsigned *port, AString *path);
+ bool parseAuthMethod(const sp<ARTSPResponse> &response);
+ void addAuthentication(AString *request);
+
+ status_t findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const;
static bool ParseSingleUnsignedLong(
const char *from, unsigned long *x);
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 612caff..880aa85 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -57,12 +57,20 @@ bool ASessionDescription::parse(const void *data, size_t size) {
size_t i = 0;
for (;;) {
- ssize_t eolPos = desc.find("\r\n", i);
+ ssize_t eolPos = desc.find("\n", i);
+
if (eolPos < 0) {
break;
}
- AString line(desc, i, eolPos - i);
+ AString line;
+ if ((size_t)eolPos > i && desc.c_str()[eolPos - 1] == '\r') {
+ // We accept both '\n' and '\r\n' line endings, if it's
+ // the latter, strip the '\r' as well.
+ line.setTo(desc, i, eolPos - i - 1);
+ } else {
+ line.setTo(desc, i, eolPos - i);
+ }
if (line.size() < 2 || line.c_str()[1] != '=') {
return false;
@@ -141,7 +149,7 @@ bool ASessionDescription::parse(const void *data, size_t size) {
}
}
- i = eolPos + 2;
+ i = eolPos + 1;
}
return true;
@@ -245,7 +253,7 @@ bool ASessionDescription::getDurationUs(int64_t *durationUs) const {
return false;
}
- if (value == "npt=now-") {
+ if (value == "npt=now-" || value == "npt=0-") {
return false;
}
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 081ae32..0bbadc1 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -23,6 +23,7 @@ LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
$(TOP)/frameworks/base/media/libstagefright/include \
+ $(TOP)/external/openssl/include
LOCAL_MODULE:= libstagefright_rtsp
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 6943608..9bb8c46 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -96,6 +96,7 @@ struct MyHandler : public AHandler {
mNetLooper(new ALooper),
mConn(new ARTSPConnection),
mRTPConn(new ARTPConnection),
+ mOriginalSessionURL(url),
mSessionURL(url),
mSetupTracksSuccessful(false),
mSeekPending(false),
@@ -113,6 +114,23 @@ struct MyHandler : public AHandler {
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
PRIORITY_HIGHEST);
+
+ // Strip any authentication info from the session url, we don't
+ // want to transmit user/pass in cleartext.
+ AString host, path, user, pass;
+ unsigned port;
+ if (ARTSPConnection::ParseURL(
+ mSessionURL.c_str(), &host, &port, &path, &user, &pass)
+ && user.size() > 0) {
+ mSessionURL.clear();
+ mSessionURL.append("rtsp://");
+ mSessionURL.append(host);
+ mSessionURL.append(":");
+ mSessionURL.append(StringPrintf("%u", port));
+ mSessionURL.append(path);
+
+ LOGI("rewritten session url: '%s'", mSessionURL.c_str());
+ }
}
void connect(const sp<AMessage> &doneMsg) {
@@ -126,7 +144,7 @@ struct MyHandler : public AHandler {
mConn->observeBinaryData(notify);
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
}
void disconnect(const sp<AMessage> &doneMsg) {
@@ -312,7 +330,7 @@ struct MyHandler : public AHandler {
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
} else {
(new AMessage('quit', id()))->post();
}
@@ -922,7 +940,7 @@ struct MyHandler : public AHandler {
CHECK(GetAttribute(range.c_str(), "npt", &val));
float npt1, npt2;
- if (val == "now-") {
+ if (val == "now-" || val == "0-") {
// This is a live stream and therefore not seekable.
return;
} else {
@@ -992,6 +1010,7 @@ private:
sp<ARTSPConnection> mConn;
sp<ARTPConnection> mRTPConn;
sp<ASessionDescription> mSessionDesc;
+ AString mOriginalSessionURL; // This one still has user:pass@
AString mSessionURL;
AString mBaseURL;
AString mSessionID;
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
new file mode 100644
index 0000000..0794ad1
--- /dev/null
+++ b/media/libstagefright/yuv/Android.mk
@@ -0,0 +1,13 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ YUVImage.cpp \
+ YUVCanvas.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils
+
+LOCAL_MODULE:= libstagefright_yuv
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp
new file mode 100644
index 0000000..38aa779
--- /dev/null
+++ b/media/libstagefright/yuv/YUVCanvas.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVCanvas"
+
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+
+namespace android {
+
+YUVCanvas::YUVCanvas(YUVImage &yuvImage)
+ : mYUVImage(yuvImage) {
+}
+
+YUVCanvas::~YUVCanvas() {
+}
+
+void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::FillYUVRectangle(const Rect& rect,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = rect.top; y < rect.bottom; ++y) {
+ for (int32_t x = rect.left; x < rect.right; ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::CopyImageRect(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage) {
+
+ // Try fast copy first
+ if (YUVImage::fastCopyRectangle(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, mYUVImage)) {
+ return;
+ }
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) {
+ for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) {
+ int32_t srcX = srcStartX + offsetX;
+ int32_t srcY = srcStartY + offsetY;
+
+ int32_t destX = destStartX + offsetX;
+ int32_t destY = destStartY + offsetY;
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::downsample(
+ int32_t srcOffsetX, int32_t srcOffsetY,
+ int32_t skipX, int32_t skipY,
+ const YUVImage &srcImage) {
+ // TODO: Add a low pass filter for downsampling.
+
+ // Check that srcImage is big enough to fill mYUVImage.
+ CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width());
+ CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height());
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ int32_t srcY = srcOffsetY;
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ int32_t srcX = srcOffsetX;
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+
+ srcX += skipX;
+ }
+ srcY += skipY;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
new file mode 100644
index 0000000..b712062
--- /dev/null
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -0,0 +1,413 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVImage"
+
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+#include <media/stagefright/MediaDebug.h>
+
+namespace android {
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+
+ size_t numberOfBytes = bufferSize(yuvFormat, width, height);
+ uint8_t *buffer = new uint8_t[numberOfBytes];
+ mBuffer = buffer;
+ mOwnBuffer = true;
+
+ initializeYUVPointers();
+}
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+ mBuffer = buffer;
+ mOwnBuffer = false;
+
+ initializeYUVPointers();
+}
+
+//static
+size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ int32_t numberOfPixels = width*height;
+ size_t numberOfBytes = 0;
+ if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) {
+ // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each.
+ numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1));
+ } else {
+ LOGE("Format not supported");
+ }
+ return numberOfBytes;
+}
+
+bool YUVImage::initializeYUVPointers() {
+ int32_t numberOfPixels = mWidth * mHeight;
+
+ if (mYUVFormat == YUV420Planar) {
+ mYdata = (uint8_t *)mBuffer;
+ mUdata = mYdata + numberOfPixels;
+ mVdata = mUdata + (numberOfPixels >> 2);
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // U and V channels are interleaved as VUVUVU.
+ // So V data starts at the end of Y channel and
+ // U data starts right after V's start.
+ mYdata = (uint8_t *)mBuffer;
+ mVdata = mYdata + numberOfPixels;
+ mUdata = mVdata + 1;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+ return true;
+}
+
+YUVImage::~YUVImage() {
+ if (mOwnBuffer) delete[] mBuffer;
+}
+
+bool YUVImage::getOffsets(int32_t x, int32_t y,
+ int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const {
+ *yOffset = y*mWidth + x;
+
+ int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1);
+ if (mYUVFormat == YUV420Planar) {
+ *uOffset = uvOffset;
+ *vOffset = uvOffset;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uOffset = 2*uvOffset;
+ *vOffset = 2*uvOffset;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+bool YUVImage::getOffsetIncrementsPerDataRow(
+ int32_t *yDataOffsetIncrement,
+ int32_t *uDataOffsetIncrement,
+ int32_t *vDataOffsetIncrement) const {
+ *yDataOffsetIncrement = mWidth;
+
+ int32_t uvDataOffsetIncrement = mWidth >> 1;
+
+ if (mYUVFormat == YUV420Planar) {
+ *uDataOffsetIncrement = uvDataOffsetIncrement;
+ *vDataOffsetIncrement = uvDataOffsetIncrement;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ *vDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+uint8_t* YUVImage::getYAddress(int32_t offset) const {
+ return mYdata + offset;
+}
+
+uint8_t* YUVImage::getUAddress(int32_t offset) const {
+ return mUdata + offset;
+}
+
+uint8_t* YUVImage::getVAddress(int32_t offset) const {
+ return mVdata + offset;
+}
+
+bool YUVImage::getYUVAddresses(int32_t x, int32_t y,
+ uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const {
+ int32_t yOffset;
+ int32_t uOffset;
+ int32_t vOffset;
+ if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false;
+
+ *yAddr = getYAddress(yOffset);
+ *uAddr = getUAddress(uOffset);
+ *vAddr = getVAddress(vOffset);
+
+ return true;
+}
+
+bool YUVImage::validPixel(int32_t x, int32_t y) const {
+ return (x >= 0 && x < mWidth &&
+ y >= 0 && y < mHeight);
+}
+
+bool YUVImage::getPixelValue(int32_t x, int32_t y,
+ uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yPtr = *yAddr;
+ *uPtr = *uAddr;
+ *vPtr = *vAddr;
+
+ return true;
+}
+
+bool YUVImage::setPixelValue(int32_t x, int32_t y,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yAddr = yValue;
+ *uAddr = uValue;
+ *vAddr = vValue;
+
+ return true;
+}
+
+void YUVImage::fastCopyRectangle420Planar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420Planar);
+ CHECK(destImage.mYUVFormat == YUV420Planar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr += ySrcOffsetIncrement;
+ yDestAddr += yDestOffsetIncrement;
+ }
+ }
+
+ // Copy U
+ {
+ size_t numberOfUBytesPerRow = (size_t) (width >> 1);
+ uint8_t *uSrcAddr = uSrcAddrBase;
+ uint8_t *uDestAddr = uDestAddrBase;
+ // Every other row has an entry for U/V channel values. Hence only
+ // go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow);
+
+ uSrcAddr += uSrcOffsetIncrement;
+ uDestAddr += uDestOffsetIncrement;
+ }
+ }
+
+ // Copy V
+ {
+ size_t numberOfVBytesPerRow = (size_t) (width >> 1);
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+void YUVImage::fastCopyRectangle420SemiPlanar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420SemiPlanar);
+ CHECK(destImage.mYUVFormat == YUV420SemiPlanar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr = ySrcAddr + ySrcOffsetIncrement;
+ yDestAddr = yDestAddr + yDestOffsetIncrement;
+ }
+ }
+
+ // Copy UV
+ {
+ // UV are interleaved. So number of UV bytes per row is 2*(width/2).
+ size_t numberOfUVBytesPerRow = (size_t) width;
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+// static
+bool YUVImage::fastCopyRectangle(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ if (srcImage.mYUVFormat == destImage.mYUVFormat) {
+ if (srcImage.mYUVFormat == YUV420Planar) {
+ fastCopyRectangle420Planar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ } else if (srcImage.mYUVFormat == YUV420SemiPlanar) {
+ fastCopyRectangle420SemiPlanar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ }
+ return true;
+ }
+ return false;
+}
+
+uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {
+ CHECK(maxValue >= minValue);
+
+ if (v < minValue) return minValue;
+ else if (v > maxValue) return maxValue;
+ else return v;
+}
+
+void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+ uint8_t *r, uint8_t *g, uint8_t *b) const {
+ *r = yValue + (1.370705 * (vValue-128));
+ *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
+ *b = yValue + (1.732446 * (uValue-128));
+
+ *r = clamp(*r, 0, 255);
+ *g = clamp(*g, 0, 255);
+ *b = clamp(*b, 0, 255);
+}
+
+bool YUVImage::writeToPPM(const char *filename) const {
+ FILE *fp = fopen(filename, "w");
+ if (fp == NULL) {
+ return false;
+ }
+ fprintf(fp, "P3\n");
+ fprintf(fp, "%d %d\n", mWidth, mHeight);
+ fprintf(fp, "255\n");
+ for (int32_t y = 0; y < mHeight; ++y) {
+ for (int32_t x = 0; x < mWidth; ++x) {
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+ getPixelValue(x, y, &yValue, &uValue, & vValue);
+
+ uint8_t rValue;
+ uint8_t gValue;
+ uint8_t bValue;
+ yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue);
+
+ fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue);
+ }
+ }
+ fclose(fp);
+ return true;
+}
+
+} // namespace android
diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk
new file mode 100644
index 0000000..7502f6e
--- /dev/null
+++ b/media/mtp/Android.mk
@@ -0,0 +1,78 @@
+#
+# Copyright (C) 2010 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_SIMULATOR),true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ MtpClient.cpp \
+ MtpCursor.cpp \
+ MtpDataPacket.cpp \
+ MtpDebug.cpp \
+ MtpDevice.cpp \
+ MtpEventPacket.cpp \
+ MtpDeviceInfo.cpp \
+ MtpObjectInfo.cpp \
+ MtpPacket.cpp \
+ MtpProperty.cpp \
+ MtpRequestPacket.cpp \
+ MtpResponsePacket.cpp \
+ MtpServer.cpp \
+ MtpStorageInfo.cpp \
+ MtpStringBuffer.cpp \
+ MtpStorage.cpp \
+ MtpUtils.cpp \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST
+
+include $(BUILD_STATIC_LIBRARY)
+
+endif
+
+ifeq ($(HOST_OS),linux)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ MtpClient.cpp \
+ MtpCursor.cpp \
+ MtpDataPacket.cpp \
+ MtpDebug.cpp \
+ MtpDevice.cpp \
+ MtpEventPacket.cpp \
+ MtpDeviceInfo.cpp \
+ MtpObjectInfo.cpp \
+ MtpPacket.cpp \
+ MtpProperty.cpp \
+ MtpRequestPacket.cpp \
+ MtpResponsePacket.cpp \
+ MtpStorageInfo.cpp \
+ MtpStringBuffer.cpp \
+ MtpStorage.cpp \
+ MtpUtils.cpp \
+
+LOCAL_MODULE:= libmtp
+
+LOCAL_CFLAGS := -DMTP_HOST
+
+include $(BUILD_HOST_STATIC_LIBRARY)
+
+endif
diff --git a/media/mtp/MtpClient.cpp b/media/mtp/MtpClient.cpp
new file mode 100644
index 0000000..ceb6a43
--- /dev/null
+++ b/media/mtp/MtpClient.cpp
@@ -0,0 +1,262 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpClient"
+
+#include "MtpDebug.h"
+#include "MtpClient.h"
+#include "MtpDevice.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+
+#include <usbhost/usbhost.h>
+#include <linux/version.h>
+#if LINUX_VERSION_CODE > KERNEL_VERSION(2, 6, 20)
+#include <linux/usb/ch9.h>
+#else
+#include <linux/usb_ch9.h>
+#endif
+
+namespace android {
+
+static bool isMtpDevice(uint16_t vendor, uint16_t product) {
+ // Sandisk Sansa Fuze
+ if (vendor == 0x0781 && product == 0x74c2)
+ return true;
+ // Samsung YP-Z5
+ if (vendor == 0x04e8 && product == 0x503c)
+ return true;
+ return false;
+}
+
+class MtpClientThread : public Thread {
+private:
+ MtpClient* mClient;
+
+public:
+ MtpClientThread(MtpClient* client)
+ : mClient(client)
+ {
+ }
+
+ virtual bool threadLoop() {
+ return mClient->threadLoop();
+ }
+};
+
+
+MtpClient::MtpClient()
+ : mThread(NULL),
+ mUsbHostContext(NULL),
+ mDone(false)
+{
+}
+
+MtpClient::~MtpClient() {
+ usb_host_cleanup(mUsbHostContext);
+}
+
+bool MtpClient::start() {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mThread)
+ return true;
+
+ mUsbHostContext = usb_host_init();
+ if (!mUsbHostContext)
+ return false;
+
+ mThread = new MtpClientThread(this);
+ mThread->run("MtpClientThread");
+ // wait for the thread to do initial device discovery before returning
+ mThreadStartCondition.wait(mMutex);
+
+ return true;
+}
+
+void MtpClient::stop() {
+ mDone = true;
+}
+
+MtpDevice* MtpClient::getDevice(int id) {
+ for (int i = 0; i < mDeviceList.size(); i++) {
+ MtpDevice* device = mDeviceList[i];
+ if (device->getID() == id)
+ return device;
+ }
+ return NULL;
+}
+
+bool MtpClient::usbDeviceAdded(const char *devname) {
+ struct usb_descriptor_header* desc;
+ struct usb_descriptor_iter iter;
+
+ struct usb_device *device = usb_device_open(devname);
+ if (!device) {
+ LOGE("usb_device_open failed\n");
+ return mDone;
+ }
+
+ usb_descriptor_iter_init(device, &iter);
+
+ while ((desc = usb_descriptor_iter_next(&iter)) != NULL) {
+ if (desc->bDescriptorType == USB_DT_INTERFACE) {
+ struct usb_interface_descriptor *interface = (struct usb_interface_descriptor *)desc;
+
+ if (interface->bInterfaceClass == USB_CLASS_STILL_IMAGE &&
+ interface->bInterfaceSubClass == 1 && // Still Image Capture
+ interface->bInterfaceProtocol == 1) // Picture Transfer Protocol (PIMA 15470)
+ {
+ LOGD("Found camera: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+ usb_device_get_product_name(device));
+ } else if (interface->bInterfaceClass == 0xFF &&
+ interface->bInterfaceSubClass == 0xFF &&
+ interface->bInterfaceProtocol == 0) {
+ char* interfaceName = usb_device_get_string(device, interface->iInterface);
+ if (!interfaceName || strcmp(interfaceName, "MTP"))
+ continue;
+ // Looks like an android style MTP device
+ LOGD("Found MTP device: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device),
+ usb_device_get_product_name(device));
+ } else {
+ // look for special cased devices based on vendor/product ID
+ // we are doing this mainly for testing purposes
+ uint16_t vendor = usb_device_get_vendor_id(device);
+ uint16_t product = usb_device_get_product_id(device);
+ if (!isMtpDevice(vendor, product)) {
+ // not an MTP or PTP device
+ continue;
+ }
+ // request MTP OS string and descriptor
+ // some music players need to see this before entering MTP mode.
+ char buffer[256];
+ memset(buffer, 0, sizeof(buffer));
+ int ret = usb_device_send_control(device,
+ USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_STANDARD,
+ USB_REQ_GET_DESCRIPTOR, (USB_DT_STRING << 8) | 0xEE,
+ 0, sizeof(buffer), buffer);
+ printf("usb_device_send_control returned %d errno: %d\n", ret, errno);
+ if (ret > 0) {
+ printf("got MTP string %s\n", buffer);
+ ret = usb_device_send_control(device,
+ USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_VENDOR, 1,
+ 0, 4, sizeof(buffer), buffer);
+ printf("OS descriptor got %d\n", ret);
+ } else {
+ printf("no MTP string\n");
+ }
+ }
+
+ // if we got here, then we have a likely MTP or PTP device
+
+ // interface should be followed by three endpoints
+ struct usb_endpoint_descriptor *ep;
+ struct usb_endpoint_descriptor *ep_in_desc = NULL;
+ struct usb_endpoint_descriptor *ep_out_desc = NULL;
+ struct usb_endpoint_descriptor *ep_intr_desc = NULL;
+ for (int i = 0; i < 3; i++) {
+ ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter);
+ if (!ep || ep->bDescriptorType != USB_DT_ENDPOINT) {
+ LOGE("endpoints not found\n");
+ return mDone;
+ }
+ if (ep->bmAttributes == USB_ENDPOINT_XFER_BULK) {
+ if (ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK)
+ ep_in_desc = ep;
+ else
+ ep_out_desc = ep;
+ } else if (ep->bmAttributes == USB_ENDPOINT_XFER_INT &&
+ ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK) {
+ ep_intr_desc = ep;
+ }
+ }
+ if (!ep_in_desc || !ep_out_desc || !ep_intr_desc) {
+ LOGE("endpoints not found\n");
+ return mDone;
+ }
+
+ struct usb_endpoint *ep_in = usb_endpoint_open(device, ep_in_desc);
+ struct usb_endpoint *ep_out = usb_endpoint_open(device, ep_out_desc);
+ struct usb_endpoint *ep_intr = usb_endpoint_open(device, ep_intr_desc);
+
+ if (usb_device_claim_interface(device, interface->bInterfaceNumber)) {
+ LOGE("usb_device_claim_interface failed errno: %d\n", errno);
+ usb_endpoint_close(ep_in);
+ usb_endpoint_close(ep_out);
+ usb_endpoint_close(ep_intr);
+ return mDone;
+ }
+
+ MtpDevice* mtpDevice = new MtpDevice(device, interface->bInterfaceNumber,
+ ep_in, ep_out, ep_intr);
+ mDeviceList.add(mtpDevice);
+ mtpDevice->initialize();
+ deviceAdded(mtpDevice);
+ return mDone;
+ }
+ }
+
+ usb_device_close(device);
+ return mDone;
+}
+
+bool MtpClient::usbDeviceRemoved(const char *devname) {
+ for (int i = 0; i < mDeviceList.size(); i++) {
+ MtpDevice* device = mDeviceList[i];
+ if (!strcmp(devname, device->getDeviceName())) {
+ deviceRemoved(device);
+ mDeviceList.removeAt(i);
+ delete device;
+ LOGD("Camera removed!\n");
+ break;
+ }
+ }
+ return mDone;
+}
+
+bool MtpClient::usbDiscoveryDone() {
+ Mutex::Autolock autoLock(mMutex);
+ mThreadStartCondition.signal();
+ return mDone;
+}
+
+bool MtpClient::threadLoop() {
+ usb_host_run(mUsbHostContext, usb_device_added, usb_device_removed, usb_discovery_done, this);
+ return false;
+}
+
+int MtpClient::usb_device_added(const char *devname, void* client_data) {
+ LOGD("usb_device_added %s\n", devname);
+ return ((MtpClient *)client_data)->usbDeviceAdded(devname);
+}
+
+int MtpClient::usb_device_removed(const char *devname, void* client_data) {
+ LOGD("usb_device_removed %s\n", devname);
+ return ((MtpClient *)client_data)->usbDeviceRemoved(devname);
+}
+
+int MtpClient::usb_discovery_done(void* client_data) {
+ LOGD("usb_discovery_done\n");
+ return ((MtpClient *)client_data)->usbDiscoveryDone();
+}
+
+} // namespace android
diff --git a/media/mtp/MtpClient.h b/media/mtp/MtpClient.h
new file mode 100644
index 0000000..fa5c527
--- /dev/null
+++ b/media/mtp/MtpClient.h
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_CLIENT_H
+#define _MTP_CLIENT_H
+
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_host_context;
+
+namespace android {
+
+class MtpClientThread;
+
+class MtpClient {
+private:
+ MtpDeviceList mDeviceList;
+ MtpClientThread* mThread;
+ Condition mThreadStartCondition;
+ Mutex mMutex;
+ struct usb_host_context* mUsbHostContext;
+ bool mDone;
+
+public:
+ MtpClient();
+ virtual ~MtpClient();
+
+ bool start();
+ void stop();
+
+ inline MtpDeviceList& getDeviceList() { return mDeviceList; }
+ MtpDevice* getDevice(int id);
+
+
+ virtual void deviceAdded(MtpDevice *device) = 0;
+ virtual void deviceRemoved(MtpDevice *device) = 0;
+
+private:
+ // these return true if we should stop monitoring USB and clean up
+ bool usbDeviceAdded(const char *devname);
+ bool usbDeviceRemoved(const char *devname);
+ bool usbDiscoveryDone();
+
+ friend class MtpClientThread;
+ bool threadLoop();
+ static int usb_device_added(const char *devname, void* client_data);
+ static int usb_device_removed(const char *devname, void* client_data);
+ static int usb_discovery_done(void* client_data);
+};
+
+}; // namespace android
+
+#endif // _MTP_CLIENT_H
diff --git a/media/mtp/MtpCursor.cpp b/media/mtp/MtpCursor.cpp
new file mode 100644
index 0000000..35d90dc
--- /dev/null
+++ b/media/mtp/MtpCursor.cpp
@@ -0,0 +1,461 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpCursor"
+
+#include "MtpDebug.h"
+#include "MtpClient.h"
+#include "MtpCursor.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpStorageInfo.h"
+
+
+#include "binder/CursorWindow.h"
+
+namespace android {
+
+/* Device Column IDs */
+/* These must match the values in MtpCursor.java */
+#define DEVICE_ROW_ID 1
+#define DEVICE_MANUFACTURER 2
+#define DEVICE_MODEL 3
+
+/* Storage Column IDs */
+/* These must match the values in MtpCursor.java */
+#define STORAGE_ROW_ID 101
+#define STORAGE_IDENTIFIER 102
+#define STORAGE_DESCRIPTION 103
+
+/* Object Column IDs */
+/* These must match the values in MtpCursor.java */
+#define OBJECT_ROW_ID 201
+#define OBJECT_STORAGE_ID 202
+#define OBJECT_FORMAT 203
+#define OBJECT_PROTECTION_STATUS 204
+#define OBJECT_SIZE 205
+#define OBJECT_THUMB_FORMAT 206
+#define OBJECT_THUMB_SIZE 207
+#define OBJECT_THUMB_WIDTH 208
+#define OBJECT_THUMB_HEIGHT 209
+#define OBJECT_IMAGE_WIDTH 210
+#define OBJECT_IMAGE_HEIGHT 211
+#define OBJECT_IMAGE_DEPTH 212
+#define OBJECT_PARENT 213
+#define OBJECT_ASSOCIATION_TYPE 214
+#define OBJECT_ASSOCIATION_DESC 215
+#define OBJECT_SEQUENCE_NUMBER 216
+#define OBJECT_NAME 217
+#define OBJECT_DATE_CREATED 218
+#define OBJECT_DATE_MODIFIED 219
+#define OBJECT_KEYWORDS 220
+#define OBJECT_THUMB 221
+
+MtpCursor::MtpCursor(MtpClient* client, int queryType, int deviceID,
+ MtpStorageID storageID, MtpObjectHandle objectID,
+ int columnCount, int* columns)
+ : mClient(client),
+ mQueryType(queryType),
+ mDeviceID(deviceID),
+ mStorageID(storageID),
+ mQbjectID(objectID),
+ mColumnCount(columnCount),
+ mColumns(NULL)
+{
+ if (columns) {
+ mColumns = new int[columnCount];
+ memcpy(mColumns, columns, columnCount * sizeof(int));
+ }
+}
+
+MtpCursor::~MtpCursor() {
+ delete[] mColumns;
+}
+
+int MtpCursor::fillWindow(CursorWindow* window, int startPos) {
+ LOGD("MtpCursor::fillWindow mQueryType: %d\n", mQueryType);
+
+ switch (mQueryType) {
+ case DEVICE:
+ return fillDevices(window, startPos);
+ case DEVICE_ID:
+ return fillDevice(window, startPos);
+ case STORAGE:
+ return fillStorages(window, startPos);
+ case STORAGE_ID:
+ return fillStorage(window, startPos);
+ case OBJECT:
+ return fillObjects(window, 0, startPos);
+ case OBJECT_ID:
+ return fillObject(window, startPos);
+ case STORAGE_CHILDREN:
+ return fillObjects(window, -1, startPos);
+ case OBJECT_CHILDREN:
+ return fillObjects(window, mQbjectID, startPos);
+ default:
+ LOGE("MtpCursor::fillWindow: unknown query type %d\n", mQueryType);
+ return 0;
+ }
+}
+
+int MtpCursor::fillDevices(CursorWindow* window, int startPos) {
+ int count = 0;
+ MtpDeviceList& deviceList = mClient->getDeviceList();
+ for (int i = 0; i < deviceList.size(); i++) {
+ MtpDevice* device = deviceList[i];
+ if (fillDevice(window, device, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ return count;
+}
+
+int MtpCursor::fillDevice(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillDevice(window, device, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+int MtpCursor::fillStorages(CursorWindow* window, int startPos) {
+ int count = 0;
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (!device)
+ return 0;
+ MtpStorageIDList* storageIDs = device->getStorageIDs();
+ if (!storageIDs)
+ return 0;
+
+ for (int i = 0; i < storageIDs->size(); i++) {
+ MtpStorageID storageID = (*storageIDs)[i];
+ if (fillStorage(window, device, storageID, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ delete storageIDs;
+ return count;
+}
+
+int MtpCursor::fillStorage(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillStorage(window, device, mStorageID, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+int MtpCursor::fillObjects(CursorWindow* window, int parent, int startPos) {
+ int count = 0;
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (!device)
+ return 0;
+ MtpObjectHandleList* handles = device->getObjectHandles(mStorageID, 0, parent);
+ if (!handles)
+ return 0;
+
+ for (int i = 0; i < handles->size(); i++) {
+ MtpObjectHandle handle = (*handles)[i];
+ if (fillObject(window, device, handle, startPos)) {
+ count++;
+ startPos++;
+ } else {
+ break;
+ }
+ }
+ delete handles;
+ return count;
+}
+
+int MtpCursor::fillObject(CursorWindow* window, int startPos) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ if (device && fillObject(window, device, mQbjectID, startPos))
+ return 1;
+ else
+ return 0;
+}
+
+bool MtpCursor::fillDevice(CursorWindow* window, MtpDevice* device, int row) {
+ MtpDeviceInfo* deviceInfo = device->getDeviceInfo();
+ if (!deviceInfo)
+ return false;
+ if (!prepareRow(window))
+ return false;
+
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case DEVICE_ROW_ID:
+ if (!putLong(window, device->getID(), row, i))
+ return false;
+ break;
+ case DEVICE_MANUFACTURER:
+ if (!putString(window, deviceInfo->mManufacturer, row, i))
+ return false;
+ break;
+ case DEVICE_MODEL:
+ if (!putString(window, deviceInfo->mModel, row, i))
+ return false;
+ break;
+ default:
+ LOGE("fillDevice: unknown column %d\n", mColumns[i]);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool MtpCursor::fillStorage(CursorWindow* window, MtpDevice* device,
+ MtpStorageID storageID, int row) {
+
+LOGD("fillStorage %d\n", storageID);
+
+ MtpStorageInfo* storageInfo = device->getStorageInfo(storageID);
+ if (!storageInfo)
+ return false;
+ if (!prepareRow(window)) {
+ delete storageInfo;
+ return false;
+ }
+
+ const char* text;
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case STORAGE_ROW_ID:
+ if (!putLong(window, storageID, row, i))
+ goto fail;
+ break;
+ case STORAGE_IDENTIFIER:
+ text = storageInfo->mVolumeIdentifier;
+ if (!text || !text[0])
+ text = "Camera Storage";
+ if (!putString(window, text, row, i))
+ goto fail;
+ break;
+ case STORAGE_DESCRIPTION:
+ text = storageInfo->mStorageDescription;
+ if (!text || !text[0])
+ text = "Storage Description";
+ if (!putString(window, text, row, i))
+ goto fail;
+ break;
+ default:
+ LOGE("fillStorage: unknown column %d\n", mColumns[i]);
+ goto fail;
+ }
+ }
+
+ delete storageInfo;
+ return true;
+
+fail:
+ delete storageInfo;
+ return false;
+}
+
+bool MtpCursor::fillObject(CursorWindow* window, MtpDevice* device,
+ MtpObjectHandle objectID, int row) {
+
+ MtpObjectInfo* objectInfo = device->getObjectInfo(objectID);
+ if (!objectInfo)
+ return false;
+ // objectInfo->print();
+ if (!prepareRow(window)) {
+ delete objectInfo;
+ return false;
+ }
+
+ for (int i = 0; i < mColumnCount; i++) {
+ switch (mColumns[i]) {
+ case OBJECT_ROW_ID:
+ if (!putLong(window, objectID, row, i))
+ goto fail;
+ break;
+ case OBJECT_STORAGE_ID:
+ if (!putLong(window, objectInfo->mStorageID, row, i))
+ goto fail;
+ break;
+ case OBJECT_FORMAT:
+ if (!putLong(window, objectInfo->mFormat, row, i))
+ goto fail;
+ break;
+ case OBJECT_PROTECTION_STATUS:
+ if (!putLong(window, objectInfo->mProtectionStatus, row, i))
+ goto fail;
+ break;
+ case OBJECT_SIZE:
+ if (!putLong(window, objectInfo->mCompressedSize, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_FORMAT:
+ if (!putLong(window, objectInfo->mThumbFormat, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_SIZE:
+ if (!putLong(window, objectInfo->mThumbCompressedSize, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_WIDTH:
+ if (!putLong(window, objectInfo->mThumbPixWidth, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB_HEIGHT:
+ if (!putLong(window, objectInfo->mThumbPixHeight, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_WIDTH:
+ if (!putLong(window, objectInfo->mImagePixWidth, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_HEIGHT:
+ if (!putLong(window, objectInfo->mImagePixHeight, row, i))
+ goto fail;
+ break;
+ case OBJECT_IMAGE_DEPTH:
+ if (!putLong(window, objectInfo->mImagePixDepth, row, i))
+ goto fail;
+ break;
+ case OBJECT_PARENT:
+ if (!putLong(window, objectInfo->mParent, row, i))
+ goto fail;
+ break;
+ case OBJECT_ASSOCIATION_TYPE:
+ if (!putLong(window, objectInfo->mAssociationType, row, i))
+ goto fail;
+ break;
+ case OBJECT_ASSOCIATION_DESC:
+ if (!putLong(window, objectInfo->mAssociationDesc, row, i))
+ goto fail;
+ break;
+ case OBJECT_SEQUENCE_NUMBER:
+ if (!putLong(window, objectInfo->mSequenceNumber, row, i))
+ goto fail;
+ break;
+ case OBJECT_NAME:
+ if (!putString(window, objectInfo->mName, row, i))
+ goto fail;
+ break;
+ case OBJECT_DATE_CREATED:
+ if (!putLong(window, objectInfo->mDateCreated, row, i))
+ goto fail;
+ break;
+ case OBJECT_DATE_MODIFIED:
+ if (!putLong(window, objectInfo->mDateModified, row, i))
+ goto fail;
+ break;
+ case OBJECT_KEYWORDS:
+ if (!putString(window, objectInfo->mKeywords, row, i))
+ goto fail;
+ break;
+ case OBJECT_THUMB:
+ if (!putThumbnail(window, objectID, objectInfo->mFormat, row, i))
+ goto fail;
+ break;
+ default:
+ LOGE("fillObject: unknown column %d\n", mColumns[i]);
+ goto fail;
+ }
+ }
+
+ delete objectInfo;
+ return true;
+
+fail:
+ delete objectInfo;
+ return false;
+}
+
+bool MtpCursor::prepareRow(CursorWindow* window) {
+ if (!window->setNumColumns(mColumnCount)) {
+ LOGE("Failed to change column count from %d to %d", window->getNumColumns(), mColumnCount);
+ return false;
+ }
+ field_slot_t * fieldDir = window->allocRow();
+ if (!fieldDir) {
+ LOGE("Failed allocating fieldDir");
+ return false;
+ }
+ return true;
+}
+
+
+bool MtpCursor::putLong(CursorWindow* window, int64_t value, int row, int column) {
+ if (!window->putLong(row, column, value)) {
+ window->freeLastRow();
+ LOGE("Failed allocating space for a long in column %d", column);
+ return false;
+ }
+ return true;
+}
+
+bool MtpCursor::putString(CursorWindow* window, const char* text, int row, int column) {
+ int size = strlen(text) + 1;
+ int offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for text/blob %s", size, text);
+ return false;
+ }
+ window->copyIn(offset, (const uint8_t*)text, size);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(row, column);
+ fieldSlot->type = FIELD_TYPE_STRING;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+ return true;
+}
+
+bool MtpCursor::putThumbnail(CursorWindow* window, MtpObjectHandle objectID,
+ MtpObjectFormat format, int row, int column) {
+ MtpDevice* device = mClient->getDevice(mDeviceID);
+ void* thumbnail;
+ int size, offset;
+ if (format == MTP_FORMAT_ASSOCIATION) {
+ thumbnail = NULL;
+ size = offset = 0;
+ } else {
+ thumbnail = device->getThumbnail(objectID, size);
+
+ LOGV("putThumbnail: %p, size: %d\n", thumbnail, size);
+ offset = window->alloc(size);
+ if (!offset) {
+ window->freeLastRow();
+ LOGE("Failed allocating %u bytes for thumbnail", size);
+ return false;
+ }
+ }
+ if (thumbnail)
+ window->copyIn(offset, (const uint8_t*)thumbnail, size);
+
+ // This must be updated after the call to alloc(), since that
+ // may move the field around in the window
+ field_slot_t * fieldSlot = window->getFieldSlot(row, column);
+ fieldSlot->type = FIELD_TYPE_BLOB;
+ fieldSlot->data.buffer.offset = offset;
+ fieldSlot->data.buffer.size = size;
+ return true;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpCursor.h b/media/mtp/MtpCursor.h
new file mode 100644
index 0000000..2e03c29
--- /dev/null
+++ b/media/mtp/MtpCursor.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_CURSOR_H
+#define _MTP_CURSOR_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class CursorWindow;
+
+class MtpCursor {
+private:
+ enum {
+ DEVICE = 1,
+ DEVICE_ID = 2,
+ STORAGE = 3,
+ STORAGE_ID = 4,
+ OBJECT = 5,
+ OBJECT_ID = 6,
+ STORAGE_CHILDREN = 7,
+ OBJECT_CHILDREN = 8,
+ };
+
+ MtpClient* mClient;
+ int mQueryType;
+ int mDeviceID;
+ MtpStorageID mStorageID;
+ MtpObjectHandle mQbjectID;
+ int mColumnCount;
+ int* mColumns;
+
+public:
+ MtpCursor(MtpClient* client, int queryType, int deviceID,
+ MtpStorageID storageID, MtpObjectHandle objectID,
+ int columnCount, int* columns);
+ virtual ~MtpCursor();
+
+ int fillWindow(CursorWindow* window, int startPos);
+
+private:
+ int fillDevices(CursorWindow* window, int startPos);
+ int fillDevice(CursorWindow* window, int startPos);
+ int fillStorages(CursorWindow* window, int startPos);
+ int fillStorage(CursorWindow* window, int startPos);
+ int fillObjects(CursorWindow* window, int parent, int startPos);
+ int fillObject(CursorWindow* window, int startPos);
+
+ bool fillDevice(CursorWindow* window, MtpDevice* device, int startPos);
+ bool fillStorage(CursorWindow* window, MtpDevice* device,
+ MtpStorageID storageID, int row);
+ bool fillObject(CursorWindow* window, MtpDevice* device,
+ MtpObjectHandle objectID, int row);
+
+ bool prepareRow(CursorWindow* window);
+ bool putLong(CursorWindow* window, int64_t value, int row, int column);
+ bool putString(CursorWindow* window, const char* text, int row, int column);
+ bool putThumbnail(CursorWindow* window, MtpObjectHandle objectID,
+ MtpObjectFormat format, int row, int column);
+};
+
+}; // namespace android
+
+#endif // _MTP_CURSOR_H
diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp
new file mode 100644
index 0000000..ec78ff0
--- /dev/null
+++ b/media/mtp/MtpDataPacket.cpp
@@ -0,0 +1,486 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDataPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include <usbhost/usbhost.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDataPacket::MtpDataPacket()
+ : MtpPacket(512),
+ mOffset(MTP_CONTAINER_HEADER_SIZE)
+{
+}
+
+MtpDataPacket::~MtpDataPacket() {
+}
+
+void MtpDataPacket::reset() {
+ MtpPacket::reset();
+ mOffset = MTP_CONTAINER_HEADER_SIZE;
+}
+
+void MtpDataPacket::setOperationCode(MtpOperationCode code) {
+ MtpPacket::putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+void MtpDataPacket::setTransactionID(MtpTransactionID id) {
+ MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint16_t MtpDataPacket::getUInt16() {
+ int offset = mOffset;
+ uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8);
+ mOffset += 2;
+ return result;
+}
+
+uint32_t MtpDataPacket::getUInt32() {
+ int offset = mOffset;
+ uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) |
+ ((uint32_t)mBuffer[offset + 2] << 16) | ((uint32_t)mBuffer[offset + 3] << 24);
+ mOffset += 4;
+ return result;
+}
+
+uint64_t MtpDataPacket::getUInt64() {
+ int offset = mOffset;
+ uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) |
+ ((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) |
+ ((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) |
+ ((uint64_t)mBuffer[offset + 6] << 48) | ((uint64_t)mBuffer[offset + 7] << 56);
+ mOffset += 8;
+ return result;
+}
+
+void MtpDataPacket::getUInt128(uint128_t& value) {
+ value[0] = getUInt32();
+ value[1] = getUInt32();
+ value[2] = getUInt32();
+ value[3] = getUInt32();
+}
+
+void MtpDataPacket::getString(MtpStringBuffer& string)
+{
+ string.readFromPacket(this);
+}
+
+Int8List* MtpDataPacket::getAInt8() {
+ Int8List* result = new Int8List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt8());
+ return result;
+}
+
+UInt8List* MtpDataPacket::getAUInt8() {
+ UInt8List* result = new UInt8List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt8());
+ return result;
+}
+
+Int16List* MtpDataPacket::getAInt16() {
+ Int16List* result = new Int16List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt16());
+ return result;
+}
+
+UInt16List* MtpDataPacket::getAUInt16() {
+ UInt16List* result = new UInt16List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt16());
+ return result;
+}
+
+Int32List* MtpDataPacket::getAInt32() {
+ Int32List* result = new Int32List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt32());
+ return result;
+}
+
+UInt32List* MtpDataPacket::getAUInt32() {
+ UInt32List* result = new UInt32List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt32());
+ return result;
+}
+
+Int64List* MtpDataPacket::getAInt64() {
+ Int64List* result = new Int64List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getInt64());
+ return result;
+}
+
+UInt64List* MtpDataPacket::getAUInt64() {
+ UInt64List* result = new UInt64List;
+ int count = getUInt32();
+ for (int i = 0; i < count; i++)
+ result->push(getUInt64());
+ return result;
+}
+
+void MtpDataPacket::putInt8(int8_t value) {
+ allocate(mOffset + 1);
+ mBuffer[mOffset++] = (uint8_t)value;
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt8(uint8_t value) {
+ allocate(mOffset + 1);
+ mBuffer[mOffset++] = (uint8_t)value;
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt16(int16_t value) {
+ allocate(mOffset + 2);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt16(uint16_t value) {
+ allocate(mOffset + 2);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt32(int32_t value) {
+ allocate(mOffset + 4);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt32(uint32_t value) {
+ allocate(mOffset + 4);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt64(int64_t value) {
+ allocate(mOffset + 8);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putUInt64(uint64_t value) {
+ allocate(mOffset + 8);
+ mBuffer[mOffset++] = (uint8_t)(value & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF);
+ mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF);
+ if (mPacketSize < mOffset)
+ mPacketSize = mOffset;
+}
+
+void MtpDataPacket::putInt128(const int128_t& value) {
+ putInt32(value[0]);
+ putInt32(value[1]);
+ putInt32(value[2]);
+ putInt32(value[3]);
+}
+
+void MtpDataPacket::putUInt128(const uint128_t& value) {
+ putUInt32(value[0]);
+ putUInt32(value[1]);
+ putUInt32(value[2]);
+ putUInt32(value[3]);
+}
+
+void MtpDataPacket::putInt128(int64_t value) {
+ putInt64(value);
+ putInt64(value < 0 ? -1 : 0);
+}
+
+void MtpDataPacket::putUInt128(uint64_t value) {
+ putUInt64(value);
+ putUInt64(0);
+}
+
+void MtpDataPacket::putAInt8(const int8_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt8(*values++);
+}
+
+void MtpDataPacket::putAUInt8(const uint8_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt8(*values++);
+}
+
+void MtpDataPacket::putAInt16(const int16_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const uint16_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt16(*values++);
+}
+
+void MtpDataPacket::putAUInt16(const UInt16List* values) {
+ size_t count = (values ? values->size() : 0);
+ putUInt32(count);
+ for (size_t i = 0; i < count; i++)
+ putUInt16((*values)[i]);
+}
+
+void MtpDataPacket::putAInt32(const int32_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const uint32_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt32(*values++);
+}
+
+void MtpDataPacket::putAUInt32(const UInt32List* list) {
+ if (!list) {
+ putEmptyArray();
+ } else {
+ size_t size = list->size();
+ putUInt32(size);
+ for (size_t i = 0; i < size; i++)
+ putUInt32((*list)[i]);
+ }
+}
+
+void MtpDataPacket::putAInt64(const int64_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putInt64(*values++);
+}
+
+void MtpDataPacket::putAUInt64(const uint64_t* values, int count) {
+ putUInt32(count);
+ for (int i = 0; i < count; i++)
+ putUInt64(*values++);
+}
+
+void MtpDataPacket::putString(const MtpStringBuffer& string) {
+ string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const char* s) {
+ MtpStringBuffer string(s);
+ string.writeToPacket(this);
+}
+
+void MtpDataPacket::putString(const uint16_t* string) {
+ int count = 0;
+ for (int i = 0; i < 256; i++) {
+ if (string[i])
+ count++;
+ else
+ break;
+ }
+ putUInt8(count > 0 ? count + 1 : 0);
+ for (int i = 0; i < count; i++)
+ putUInt16(string[i]);
+ // only terminate with zero if string is not empty
+ if (count > 0)
+ putUInt16(0);
+}
+
+#ifdef MTP_DEVICE
+int MtpDataPacket::read(int fd) {
+ // first read the header
+ int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret != MTP_CONTAINER_HEADER_SIZE)
+ return -1;
+ // then the following data
+ int total = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+ int remaining = total - MTP_CONTAINER_HEADER_SIZE;
+ ret = ::read(fd, &mBuffer[0] + MTP_CONTAINER_HEADER_SIZE, remaining);
+ if (ret != remaining)
+ return -1;
+
+ mPacketSize = total;
+ mOffset = MTP_CONTAINER_HEADER_SIZE;
+ return total;
+}
+
+int MtpDataPacket::readDataHeader(int fd) {
+ int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret > 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+
+int MtpDataPacket::write(int fd) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ dump();
+ // send header separately from data
+ int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret == MTP_CONTAINER_HEADER_SIZE)
+ ret = ::write(fd, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+ mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::writeDataHeader(int fd, uint32_t length) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+#endif // MTP_DEVICE
+
+#ifdef MTP_HOST
+int MtpDataPacket::read(struct usb_endpoint *ep) {
+ // first read the header
+ int length = transfer(ep, mBuffer, mBufferSize);
+ if (length >= MTP_CONTAINER_HEADER_SIZE) {
+ // look at the length field to see if the data spans multiple packets
+ uint32_t totalLength = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET);
+ while (totalLength > length) {
+ allocate(length + mAllocationIncrement);
+ int ret = transfer(ep, mBuffer + length, mAllocationIncrement);
+ if (ret >= 0)
+ length += ret;
+ else {
+ length = ret;
+ break;
+ }
+ }
+ }
+ if (length >= 0)
+ mPacketSize = length;
+ return length;
+}
+
+int MtpDataPacket::readData(struct usb_endpoint *ep, void* buffer, int length) {
+ int packetSize = usb_endpoint_max_packet(ep);
+ int read = 0;
+ while (read < length) {
+ int ret = transfer(ep, (char *)buffer + read, packetSize);
+ if (ret < 0) {
+printf("MtpDataPacket::readData returning %d\n", ret);
+ return ret;
+ }
+ read += ret;
+ }
+printf("MtpDataPacket::readData returning %d\n", read);
+ return read;
+}
+
+int MtpDataPacket::readDataHeader(struct usb_endpoint *ep) {
+ int length = transfer(ep, mBuffer, usb_endpoint_max_packet(ep));
+ if (length >= 0)
+ mPacketSize = length;
+ return length;
+}
+
+int MtpDataPacket::writeDataHeader(struct usb_endpoint *ep, uint32_t length) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+ int ret = transfer(ep, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_endpoint *ep) {
+ MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA);
+
+ // send header separately from data
+ int ret = transfer(ep, mBuffer, MTP_CONTAINER_HEADER_SIZE);
+ if (ret == MTP_CONTAINER_HEADER_SIZE)
+ ret = transfer(ep, mBuffer + MTP_CONTAINER_HEADER_SIZE,
+ mPacketSize - MTP_CONTAINER_HEADER_SIZE);
+ return (ret < 0 ? ret : 0);
+}
+
+int MtpDataPacket::write(struct usb_endpoint *ep, void* buffer, uint32_t length) {
+ int ret = 0;
+ int packetSize = usb_endpoint_max_packet(ep);
+ while (length > 0) {
+ int write = (length > packetSize ? packetSize : length);
+ int ret = transfer(ep, buffer, write);
+ if (ret < 0)
+ break;
+ length -= ret;
+ }
+ return (ret < 0 ? ret : 0);
+}
+
+#endif // MTP_HOST
+
+void* MtpDataPacket::getData(int& outLength) const {
+ int length = mPacketSize - MTP_CONTAINER_HEADER_SIZE;
+ if (length > 0) {
+ void* result = malloc(length);
+ if (result) {
+ memcpy(result, mBuffer + MTP_CONTAINER_HEADER_SIZE, length);
+ outLength = length;
+ return result;
+ }
+ }
+ outLength = 0;
+ return NULL;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h
new file mode 100644
index 0000000..fab6a07
--- /dev/null
+++ b/media/mtp/MtpDataPacket.h
@@ -0,0 +1,118 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATA_PACKET_H
+#define _MTP_DATA_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpStringBuffer;
+
+class MtpDataPacket : public MtpPacket {
+private:
+ // current offset for get/put methods
+ int mOffset;
+
+public:
+ MtpDataPacket();
+ virtual ~MtpDataPacket();
+
+ virtual void reset();
+
+ void setOperationCode(MtpOperationCode code);
+ void setTransactionID(MtpTransactionID id);
+
+ inline uint8_t getUInt8() { return (uint8_t)mBuffer[mOffset++]; }
+ inline int8_t getInt8() { return (int8_t)mBuffer[mOffset++]; }
+ uint16_t getUInt16();
+ inline int16_t getInt16() { return (int16_t)getUInt16(); }
+ uint32_t getUInt32();
+ inline int32_t getInt32() { return (int32_t)getUInt32(); }
+ uint64_t getUInt64();
+ inline int64_t getInt64() { return (int64_t)getUInt64(); }
+ void getUInt128(uint128_t& value);
+ inline void getInt128(int128_t& value) { getUInt128((uint128_t&)value); }
+ void getString(MtpStringBuffer& string);
+
+ Int8List* getAInt8();
+ UInt8List* getAUInt8();
+ Int16List* getAInt16();
+ UInt16List* getAUInt16();
+ Int32List* getAInt32();
+ UInt32List* getAUInt32();
+ Int64List* getAInt64();
+ UInt64List* getAUInt64();
+
+ void putInt8(int8_t value);
+ void putUInt8(uint8_t value);
+ void putInt16(int16_t value);
+ void putUInt16(uint16_t value);
+ void putInt32(int32_t value);
+ void putUInt32(uint32_t value);
+ void putInt64(int64_t value);
+ void putUInt64(uint64_t value);
+ void putInt128(const int128_t& value);
+ void putUInt128(const uint128_t& value);
+ void putInt128(int64_t value);
+ void putUInt128(uint64_t value);
+
+ void putAInt8(const int8_t* values, int count);
+ void putAUInt8(const uint8_t* values, int count);
+ void putAInt16(const int16_t* values, int count);
+ void putAUInt16(const uint16_t* values, int count);
+ void putAUInt16(const UInt16List* values);
+ void putAInt32(const int32_t* values, int count);
+ void putAUInt32(const uint32_t* values, int count);
+ void putAUInt32(const UInt32List* list);
+ void putAInt64(const int64_t* values, int count);
+ void putAUInt64(const uint64_t* values, int count);
+ void putString(const MtpStringBuffer& string);
+ void putString(const char* string);
+ void putString(const uint16_t* string);
+ inline void putEmptyString() { putUInt8(0); }
+ inline void putEmptyArray() { putUInt32(0); }
+
+
+#ifdef MTP_DEVICE
+ // fill our buffer with data from the given file descriptor
+ int read(int fd);
+ int readDataHeader(int fd);
+
+ // write our data to the given file descriptor
+ int write(int fd);
+ int writeDataHeader(int fd, uint32_t length);
+#endif
+
+#ifdef MTP_HOST
+ int read(struct usb_endpoint *ep);
+ int readData(struct usb_endpoint *ep, void* buffer, int length);
+ int readDataHeader(struct usb_endpoint *ep);
+
+ int writeDataHeader(struct usb_endpoint *ep, uint32_t length);
+ int write(struct usb_endpoint *ep);
+ int write(struct usb_endpoint *ep, void* buffer, uint32_t length);
+#endif
+
+ inline bool hasData() const { return mPacketSize > MTP_CONTAINER_HEADER_SIZE; }
+ void* getData(int& outLength) const;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATA_PACKET_H
diff --git a/media/mtp/MtpDatabase.h b/media/mtp/MtpDatabase.h
new file mode 100644
index 0000000..c8cb016
--- /dev/null
+++ b/media/mtp/MtpDatabase.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DATABASE_H
+#define _MTP_DATABASE_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+class MtpProperty;
+
+class MtpDatabase {
+public:
+ virtual ~MtpDatabase() {}
+
+ // called from SendObjectInfo to reserve a database entry for the incoming file
+ virtual MtpObjectHandle beginSendObject(const char* path,
+ MtpObjectFormat format,
+ MtpObjectHandle parent,
+ MtpStorageID storage,
+ uint64_t size,
+ time_t modified) = 0;
+
+ // called to report success or failure of the SendObject file transfer
+ // success should signal a notification of the new object's creation,
+ // failure should remove the database entry created in beginSendObject
+ virtual void endSendObject(const char* path,
+ MtpObjectHandle handle,
+ MtpObjectFormat format,
+ bool succeeded) = 0;
+
+ virtual MtpObjectHandleList* getObjectList(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) = 0;
+
+ virtual int getNumObjects(MtpStorageID storageID,
+ MtpObjectFormat format,
+ MtpObjectHandle parent) = 0;
+
+ // callee should delete[] the results from these
+ // results can be NULL
+ virtual MtpObjectFormatList* getSupportedPlaybackFormats() = 0;
+ virtual MtpObjectFormatList* getSupportedCaptureFormats() = 0;
+ virtual MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format) = 0;
+ virtual MtpDevicePropertyList* getSupportedDeviceProperties() = 0;
+
+ virtual MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle,
+ MtpObjectProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode resetDeviceProperty(MtpDeviceProperty property) = 0;
+
+ virtual MtpResponseCode getObjectInfo(MtpObjectHandle handle,
+ MtpDataPacket& packet) = 0;
+
+ virtual MtpResponseCode getObjectFilePath(MtpObjectHandle handle,
+ MtpString& filePath,
+ int64_t& fileLength) = 0;
+
+ virtual MtpResponseCode deleteFile(MtpObjectHandle handle) = 0;
+
+ virtual MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle) = 0;
+
+ virtual MtpResponseCode setObjectReferences(MtpObjectHandle handle,
+ MtpObjectHandleList* references) = 0;
+
+ virtual MtpProperty* getObjectPropertyDesc(MtpObjectProperty property,
+ MtpObjectFormat format) = 0;
+
+ virtual MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property) = 0;
+
+ virtual void sessionStarted() = 0;
+
+ virtual void sessionEnded() = 0;
+};
+
+}; // namespace android
+
+#endif // _MTP_DATABASE_H
diff --git a/media/mtp/MtpDebug.cpp b/media/mtp/MtpDebug.cpp
new file mode 100644
index 0000000..d6b107d
--- /dev/null
+++ b/media/mtp/MtpDebug.cpp
@@ -0,0 +1,387 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "MtpDebug.h"
+
+namespace android {
+
+struct CodeEntry {
+ const char* name;
+ uint16_t code;
+};
+
+static const CodeEntry sOperationCodes[] = {
+ { "MTP_OPERATION_GET_DEVICE_INFO", 0x1001 },
+ { "MTP_OPERATION_OPEN_SESSION", 0x1002 },
+ { "MTP_OPERATION_CLOSE_SESSION", 0x1003 },
+ { "MTP_OPERATION_GET_STORAGE_IDS", 0x1004 },
+ { "MTP_OPERATION_GET_STORAGE_INFO", 0x1005 },
+ { "MTP_OPERATION_GET_NUM_OBJECTS", 0x1006 },
+ { "MTP_OPERATION_GET_OBJECT_HANDLES", 0x1007 },
+ { "MTP_OPERATION_GET_OBJECT_INFO", 0x1008 },
+ { "MTP_OPERATION_GET_OBJECT", 0x1009 },
+ { "MTP_OPERATION_GET_THUMB", 0x100A },
+ { "MTP_OPERATION_DELETE_OBJECT", 0x100B },
+ { "MTP_OPERATION_SEND_OBJECT_INFO", 0x100C },
+ { "MTP_OPERATION_SEND_OBJECT", 0x100D },
+ { "MTP_OPERATION_INITIATE_CAPTURE", 0x100E },
+ { "MTP_OPERATION_FORMAT_STORE", 0x100F },
+ { "MTP_OPERATION_RESET_DEVICE", 0x1010 },
+ { "MTP_OPERATION_SELF_TEST", 0x1011 },
+ { "MTP_OPERATION_SET_OBJECT_PROTECTION", 0x1012 },
+ { "MTP_OPERATION_POWER_DOWN", 0x1013 },
+ { "MTP_OPERATION_GET_DEVICE_PROP_DESC", 0x1014 },
+ { "MTP_OPERATION_GET_DEVICE_PROP_VALUE", 0x1015 },
+ { "MTP_OPERATION_SET_DEVICE_PROP_VALUE", 0x1016 },
+ { "MTP_OPERATION_RESET_DEVICE_PROP_VALUE", 0x1017 },
+ { "MTP_OPERATION_TERMINATE_OPEN_CAPTURE", 0x1018 },
+ { "MTP_OPERATION_MOVE_OBJECT", 0x1019 },
+ { "MTP_OPERATION_COPY_OBJECT", 0x101A },
+ { "MTP_OPERATION_GET_PARTIAL_OBJECT", 0x101B },
+ { "MTP_OPERATION_INITIATE_OPEN_CAPTURE", 0x101C },
+ { "MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED", 0x9801 },
+ { "MTP_OPERATION_GET_OBJECT_PROP_DESC", 0x9802 },
+ { "MTP_OPERATION_GET_OBJECT_PROP_VALUE", 0x9803 },
+ { "MTP_OPERATION_SET_OBJECT_PROP_VALUE", 0x9804 },
+ { "MTP_OPERATION_GET_OBJECT_REFERENCES", 0x9810 },
+ { "MTP_OPERATION_SET_OBJECT_REFERENCES", 0x9811 },
+ { "MTP_OPERATION_SKIP", 0x9820 },
+ { 0, 0 },
+};
+
+static const CodeEntry sFormatCodes[] = {
+ { "MTP_OPERATION_GET_DEVICE_INFO", 0x1001 },
+ { "MTP_FORMAT_UNDEFINED", 0x3000 },
+ { "MTP_FORMAT_ASSOCIATION", 0x3001 },
+ { "MTP_FORMAT_SCRIPT", 0x3002 },
+ { "MTP_FORMAT_EXECUTABLE", 0x3003 },
+ { "MTP_FORMAT_TEXT", 0x3004 },
+ { "MTP_FORMAT_HTML", 0x3005 },
+ { "MTP_FORMAT_DPOF", 0x3006 },
+ { "MTP_FORMAT_AIFF", 0x3007 },
+ { "MTP_FORMAT_WAV", 0x3008 },
+ { "MTP_FORMAT_MP3", 0x3009 },
+ { "MTP_FORMAT_AVI", 0x300A },
+ { "MTP_FORMAT_MPEG", 0x300B },
+ { "MTP_FORMAT_ASF", 0x300C },
+ { "MTP_FORMAT_DEFINED", 0x3800 },
+ { "MTP_FORMAT_EXIF_JPEG", 0x3801 },
+ { "MTP_FORMAT_TIFF_EP", 0x3802 },
+ { "MTP_FORMAT_FLASHPIX", 0x3803 },
+ { "MTP_FORMAT_BMP", 0x3804 },
+ { "MTP_FORMAT_CIFF", 0x3805 },
+ { "MTP_FORMAT_GIF", 0x3807 },
+ { "MTP_FORMAT_JFIF", 0x3808 },
+ { "MTP_FORMAT_CD", 0x3809 },
+ { "MTP_FORMAT_PICT", 0x380A },
+ { "MTP_FORMAT_PNG", 0x380B },
+ { "MTP_FORMAT_TIFF", 0x380D },
+ { "MTP_FORMAT_TIFF_IT", 0x380E },
+ { "MTP_FORMAT_JP2", 0x380F },
+ { "MTP_FORMAT_JPX", 0x3810 },
+ { "MTP_FORMAT_UNDEFINED_FIRMWARE", 0xB802 },
+ { "MTP_FORMAT_WINDOWS_IMAGE_FORMAT", 0xB881 },
+ { "MTP_FORMAT_UNDEFINED_AUDIO", 0xB900 },
+ { "MTP_FORMAT_WMA", 0xB901 },
+ { "MTP_FORMAT_OGG", 0xB902 },
+ { "MTP_FORMAT_AAC", 0xB903 },
+ { "MTP_FORMAT_AUDIBLE", 0xB904 },
+ { "MTP_FORMAT_FLAC", 0xB906 },
+ { "MTP_FORMAT_UNDEFINED_VIDEO", 0xB980 },
+ { "MTP_FORMAT_WMV", 0xB981 },
+ { "MTP_FORMAT_MP4_CONTAINER", 0xB982 },
+ { "MTP_FORMAT_MP2", 0xB983 },
+ { "MTP_FORMAT_3GP_CONTAINER", 0xB984 },
+ { "MTP_FORMAT_UNDEFINED_COLLECTION", 0xBA00 },
+ { "MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM", 0xBA01 },
+ { "MTP_FORMAT_ABSTRACT_IMAGE_ALBUM", 0xBA02 },
+ { "MTP_FORMAT_ABSTRACT_AUDIO_ALBUM", 0xBA03 },
+ { "MTP_FORMAT_ABSTRACT_VIDEO_ALBUM", 0xBA04 },
+ { "MTP_FORMAT_ABSTRACT_AV_PLAYLIST", 0xBA05 },
+ { "MTP_FORMAT_ABSTRACT_CONTACT_GROUP", 0xBA06 },
+ { "MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER", 0xBA07 },
+ { "MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION", 0xBA08 },
+ { "MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST", 0xBA09 },
+ { "MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST", 0xBA0A },
+ { "MTP_FORMAT_ABSTRACT_MEDIACAST", 0xBA0B },
+ { "MTP_FORMAT_WPL_PLAYLIST", 0xBA10 },
+ { "MTP_FORMAT_M3U_PLAYLIST", 0xBA11 },
+ { "MTP_FORMAT_MPL_PLAYLIST", 0xBA12 },
+ { "MTP_FORMAT_ASX_PLAYLIST", 0xBA13 },
+ { "MTP_FORMAT_PLS_PLAYLIST", 0xBA14 },
+ { "MTP_FORMAT_UNDEFINED_DOCUMENT", 0xBA80 },
+ { "MTP_FORMAT_ABSTRACT_DOCUMENT", 0xBA81 },
+ { "MTP_FORMAT_XML_DOCUMENT", 0xBA82 },
+ { "MTP_FORMAT_MS_WORD_DOCUMENT", 0xBA83 },
+ { "MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT", 0xBA84 },
+ { "MTP_FORMAT_MS_EXCEL_SPREADSHEET", 0xBA85 },
+ { "MTP_FORMAT_MS_POWERPOINT_PRESENTATION", 0xBA86 },
+ { "MTP_FORMAT_UNDEFINED_MESSAGE", 0xBB00 },
+ { "MTP_FORMAT_ABSTRACT_MESSSAGE", 0xBB01 },
+ { "MTP_FORMAT_UNDEFINED_CONTACT", 0xBB80 },
+ { "MTP_FORMAT_ABSTRACT_CONTACT", 0xBB81 },
+ { "MTP_FORMAT_VCARD_2", 0xBB82 },
+ { 0, 0 },
+};
+
+static const CodeEntry sObjectPropCodes[] = {
+ { "MTP_PROPERTY_STORAGE_ID", 0xDC01 },
+ { "MTP_PROPERTY_OBJECT_FORMAT", 0xDC02 },
+ { "MTP_PROPERTY_PROTECTION_STATUS", 0xDC03 },
+ { "MTP_PROPERTY_OBJECT_SIZE", 0xDC04 },
+ { "MTP_PROPERTY_ASSOCIATION_TYPE", 0xDC05 },
+ { "MTP_PROPERTY_ASSOCIATION_DESC", 0xDC06 },
+ { "MTP_PROPERTY_OBJECT_FILE_NAME", 0xDC07 },
+ { "MTP_PROPERTY_DATE_CREATED", 0xDC08 },
+ { "MTP_PROPERTY_DATE_MODIFIED", 0xDC09 },
+ { "MTP_PROPERTY_KEYWORDS", 0xDC0A },
+ { "MTP_PROPERTY_PARENT_OBJECT", 0xDC0B },
+ { "MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS", 0xDC0C },
+ { "MTP_PROPERTY_HIDDEN", 0xDC0D },
+ { "MTP_PROPERTY_SYSTEM_OBJECT", 0xDC0E },
+ { "MTP_PROPERTY_PERSISTENT_UID", 0xDC41 },
+ { "MTP_PROPERTY_SYNC_ID", 0xDC42 },
+ { "MTP_PROPERTY_PROPERTY_BAG", 0xDC43 },
+ { "MTP_PROPERTY_NAME", 0xDC44 },
+ { "MTP_PROPERTY_CREATED_BY", 0xDC45 },
+ { "MTP_PROPERTY_ARTIST", 0xDC46 },
+ { "MTP_PROPERTY_DATE_AUTHORED", 0xDC47 },
+ { "MTP_PROPERTY_DESCRIPTION", 0xDC48 },
+ { "MTP_PROPERTY_URL_REFERENCE", 0xDC49 },
+ { "MTP_PROPERTY_LANGUAGE_LOCALE", 0xDC4A },
+ { "MTP_PROPERTY_COPYRIGHT_INFORMATION", 0xDC4B },
+ { "MTP_PROPERTY_SOURCE", 0xDC4C },
+ { "MTP_PROPERTY_ORIGIN_LOCATION", 0xDC4D },
+ { "MTP_PROPERTY_DATE_ADDED", 0xDC4E },
+ { "MTP_PROPERTY_NON_CONSUMABLE", 0xDC4F },
+ { "MTP_PROPERTY_CORRUPT_UNPLAYABLE", 0xDC50 },
+ { "MTP_PROPERTY_PRODUCER_SERIAL_NUMBER", 0xDC51 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT", 0xDC81 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE", 0xDC82 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT", 0xDC83 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH", 0xDC84 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION", 0xDC85 },
+ { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA", 0xDC86 },
+ { "MTP_PROPERTY_WIDTH", 0xDC87 },
+ { "MTP_PROPERTY_HEIGHT", 0xDC88 },
+ { "MTP_PROPERTY_DURATION", 0xDC89 },
+ { "MTP_PROPERTY_RATING", 0xDC8A },
+ { "MTP_PROPERTY_TRACK", 0xDC8B },
+ { "MTP_PROPERTY_GENRE", 0xDC8C },
+ { "MTP_PROPERTY_CREDITS", 0xDC8D },
+ { "MTP_PROPERTY_LYRICS", 0xDC8E },
+ { "MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID", 0xDC8F },
+ { "MTP_PROPERTY_PRODUCED_BY", 0xDC90 },
+ { "MTP_PROPERTY_USE_COUNT", 0xDC91 },
+ { "MTP_PROPERTY_SKIP_COUNT", 0xDC92 },
+ { "MTP_PROPERTY_LAST_ACCESSED", 0xDC93 },
+ { "MTP_PROPERTY_PARENTAL_RATING", 0xDC94 },
+ { "MTP_PROPERTY_META_GENRE", 0xDC95 },
+ { "MTP_PROPERTY_COMPOSER", 0xDC96 },
+ { "MTP_PROPERTY_EFFECTIVE_RATING", 0xDC97 },
+ { "MTP_PROPERTY_SUBTITLE", 0xDC98 },
+ { "MTP_PROPERTY_ORIGINAL_RELEASE_DATE", 0xDC99 },
+ { "MTP_PROPERTY_ALBUM_NAME", 0xDC9A },
+ { "MTP_PROPERTY_ALBUM_ARTIST", 0xDC9B },
+ { "MTP_PROPERTY_MOOD", 0xDC9C },
+ { "MTP_PROPERTY_DRM_STATUS", 0xDC9D },
+ { "MTP_PROPERTY_SUB_DESCRIPTION", 0xDC9E },
+ { "MTP_PROPERTY_IS_CROPPED", 0xDCD1 },
+ { "MTP_PROPERTY_IS_COLOUR_CORRECTED", 0xDCD2 },
+ { "MTP_PROPERTY_IMAGE_BIT_DEPTH", 0xDCD3 },
+ { "MTP_PROPERTY_F_NUMBER", 0xDCD4 },
+ { "MTP_PROPERTY_EXPOSURE_TIME", 0xDCD5 },
+ { "MTP_PROPERTY_EXPOSURE_INDEX", 0xDCD6 },
+ { "MTP_PROPERTY_TOTAL_BITRATE", 0xDE91 },
+ { "MTP_PROPERTY_BITRATE_TYPE", 0xDE92 },
+ { "MTP_PROPERTY_SAMPLE_RATE", 0xDE93 },
+ { "MTP_PROPERTY_NUMBER_OF_CHANNELS", 0xDE94 },
+ { "MTP_PROPERTY_AUDIO_BIT_DEPTH", 0xDE95 },
+ { "MTP_PROPERTY_SCAN_TYPE", 0xDE97 },
+ { "MTP_PROPERTY_AUDIO_WAVE_CODEC", 0xDE99 },
+ { "MTP_PROPERTY_AUDIO_BITRATE", 0xDE9A },
+ { "MTP_PROPERTY_VIDEO_FOURCC_CODEC", 0xDE9B },
+ { "MTP_PROPERTY_VIDEO_BITRATE", 0xDE9C },
+ { "MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS", 0xDE9D },
+ { "MTP_PROPERTY_KEYFRAME_DISTANCE", 0xDE9E },
+ { "MTP_PROPERTY_BUFFER_SIZE", 0xDE9F },
+ { "MTP_PROPERTY_ENCODING_QUALITY", 0xDEA0 },
+ { "MTP_PROPERTY_ENCODING_PROFILE", 0xDEA1 },
+ { "MTP_PROPERTY_DISPLAY_NAME", 0xDCE0 },
+ { "MTP_PROPERTY_BODY_TEXT", 0xDCE1 },
+ { "MTP_PROPERTY_SUBJECT", 0xDCE2 },
+ { "MTP_PROPERTY_PRIORITY", 0xDCE3 },
+ { "MTP_PROPERTY_GIVEN_NAME", 0xDD00 },
+ { "MTP_PROPERTY_MIDDLE_NAMES", 0xDD01 },
+ { "MTP_PROPERTY_FAMILY_NAME", 0xDD02 },
+ { "MTP_PROPERTY_PREFIX", 0xDD03 },
+ { "MTP_PROPERTY_SUFFIX", 0xDD04 },
+ { "MTP_PROPERTY_PHONETIC_GIVEN_NAME", 0xDD05 },
+ { "MTP_PROPERTY_PHONETIC_FAMILY_NAME", 0xDD06 },
+ { "MTP_PROPERTY_EMAIL_PRIMARY", 0xDD07 },
+ { "MTP_PROPERTY_EMAIL_PERSONAL_1", 0xDD08 },
+ { "MTP_PROPERTY_EMAIL_PERSONAL_2", 0xDD09 },
+ { "MTP_PROPERTY_EMAIL_BUSINESS_1", 0xDD0A },
+ { "MTP_PROPERTY_EMAIL_BUSINESS_2", 0xDD0B },
+ { "MTP_PROPERTY_EMAIL_OTHERS", 0xDD0C },
+ { "MTP_PROPERTY_PHONE_NUMBER_PRIMARY", 0xDD0D },
+ { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL", 0xDD0E },
+ { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2", 0xDD0F },
+ { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS", 0xDD10 },
+ { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2", 0xDD11 },
+ { "MTP_PROPERTY_PHONE_NUMBER_MOBILE", 0xDD12 },
+ { "MTP_PROPERTY_PHONE_NUMBER_MOBILE_2", 0xDD13 },
+ { "MTP_PROPERTY_FAX_NUMBER_PRIMARY", 0xDD14 },
+ { "MTP_PROPERTY_FAX_NUMBER_PERSONAL", 0xDD15 },
+ { "MTP_PROPERTY_FAX_NUMBER_BUSINESS", 0xDD16 },
+ { "MTP_PROPERTY_PAGER_NUMBER", 0xDD17 },
+ { "MTP_PROPERTY_PHONE_NUMBER_OTHERS", 0xDD18 },
+ { "MTP_PROPERTY_PRIMARY_WEB_ADDRESS", 0xDD19 },
+ { "MTP_PROPERTY_PERSONAL_WEB_ADDRESS", 0xDD1A },
+ { "MTP_PROPERTY_BUSINESS_WEB_ADDRESS", 0xDD1B },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS", 0xDD1C },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2", 0xDD1D },
+ { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3", 0xDD1E },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL", 0xDD1F },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1", 0xDD20 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2", 0xDD21 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY", 0xDD22 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION", 0xDD23 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE", 0xDD24 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY", 0xDD25 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL", 0xDD26 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1", 0xDD27 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2", 0xDD28 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY", 0xDD29 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION", 0xDD2A },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE", 0xDD2B },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY", 0xDD2C },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL", 0xDD2D },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1", 0xDD2E },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2", 0xDD2F },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY", 0xDD30 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION", 0xDD31 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE", 0xDD32 },
+ { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY", 0xDD33 },
+ { "MTP_PROPERTY_ORGANIZATION_NAME", 0xDD34 },
+ { "MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME", 0xDD35 },
+ { "MTP_PROPERTY_ROLE", 0xDD36 },
+ { "MTP_PROPERTY_BIRTHDATE", 0xDD37 },
+ { "MTP_PROPERTY_MESSAGE_TO", 0xDD40 },
+ { "MTP_PROPERTY_MESSAGE_CC", 0xDD41 },
+ { "MTP_PROPERTY_MESSAGE_BCC", 0xDD42 },
+ { "MTP_PROPERTY_MESSAGE_READ", 0xDD43 },
+ { "MTP_PROPERTY_MESSAGE_RECEIVED_TIME", 0xDD44 },
+ { "MTP_PROPERTY_MESSAGE_SENDER", 0xDD45 },
+ { "MTP_PROPERTY_ACTIVITY_BEGIN_TIME", 0xDD50 },
+ { "MTP_PROPERTY_ACTIVITY_END_TIME", 0xDD51 },
+ { "MTP_PROPERTY_ACTIVITY_LOCATION", 0xDD52 },
+ { "MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES", 0xDD54 },
+ { "MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES", 0xDD55 },
+ { "MTP_PROPERTY_ACTIVITY_RESOURCES", 0xDD56 },
+ { "MTP_PROPERTY_ACTIVITY_ACCEPTED", 0xDD57 },
+ { "MTP_PROPERTY_ACTIVITY_TENTATIVE", 0xDD58 },
+ { "MTP_PROPERTY_ACTIVITY_DECLINED", 0xDD59 },
+ { "MTP_PROPERTY_ACTIVITY_REMAINDER_TIME", 0xDD5A },
+ { "MTP_PROPERTY_ACTIVITY_OWNER", 0xDD5B },
+ { "MTP_PROPERTY_ACTIVITY_STATUS", 0xDD5C },
+ { "MTP_PROPERTY_OWNER", 0xDD5D },
+ { "MTP_PROPERTY_EDITOR", 0xDD5E },
+ { "MTP_PROPERTY_WEBMASTER", 0xDD5F },
+ { "MTP_PROPERTY_URL_SOURCE", 0xDD60 },
+ { "MTP_PROPERTY_URL_DESTINATION", 0xDD61 },
+ { "MTP_PROPERTY_TIME_BOOKMARK", 0xDD62 },
+ { "MTP_PROPERTY_OBJECT_BOOKMARK", 0xDD63 },
+ { "MTP_PROPERTY_BYTE_BOOKMARK", 0xDD64 },
+ { "MTP_PROPERTY_LAST_BUILD_DATE", 0xDD70 },
+ { "MTP_PROPERTY_TIME_TO_LIVE", 0xDD71 },
+ { "MTP_PROPERTY_MEDIA_GUID", 0xDD72 },
+ { 0, 0 },
+};
+
+static const CodeEntry sDevicePropCodes[] = {
+ { "MTP_DEVICE_PROPERTY_UNDEFINED", 0x5000 },
+ { "MTP_DEVICE_PROPERTY_BATTERY_LEVEL", 0x5001 },
+ { "MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE", 0x5002 },
+ { "MTP_DEVICE_PROPERTY_IMAGE_SIZE", 0x5003 },
+ { "MTP_DEVICE_PROPERTY_COMPRESSION_SETTING", 0x5004 },
+ { "MTP_DEVICE_PROPERTY_WHITE_BALANCE", 0x5005 },
+ { "MTP_DEVICE_PROPERTY_RGB_GAIN", 0x5006 },
+ { "MTP_DEVICE_PROPERTY_F_NUMBER", 0x5007 },
+ { "MTP_DEVICE_PROPERTY_FOCAL_LENGTH", 0x5008 },
+ { "MTP_DEVICE_PROPERTY_FOCUS_DISTANCE", 0x5009 },
+ { "MTP_DEVICE_PROPERTY_FOCUS_MODE", 0x500A },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE", 0x500B },
+ { "MTP_DEVICE_PROPERTY_FLASH_MODE", 0x500C },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_TIME", 0x500D },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE", 0x500E },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_INDEX", 0x500F },
+ { "MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION", 0x5010 },
+ { "MTP_DEVICE_PROPERTY_DATETIME", 0x5011 },
+ { "MTP_DEVICE_PROPERTY_CAPTURE_DELAY", 0x5012 },
+ { "MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE", 0x5013 },
+ { "MTP_DEVICE_PROPERTY_CONTRAST", 0x5014 },
+ { "MTP_DEVICE_PROPERTY_SHARPNESS", 0x5015 },
+ { "MTP_DEVICE_PROPERTY_DIGITAL_ZOOM", 0x5016 },
+ { "MTP_DEVICE_PROPERTY_EFFECT_MODE", 0x5017 },
+ { "MTP_DEVICE_PROPERTY_BURST_NUMBER", 0x5018 },
+ { "MTP_DEVICE_PROPERTY_BURST_INTERVAL", 0x5019 },
+ { "MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER", 0x501A },
+ { "MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL", 0x501B },
+ { "MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE", 0x501C },
+ { "MTP_DEVICE_PROPERTY_UPLOAD_URL", 0x501D },
+ { "MTP_DEVICE_PROPERTY_ARTIST", 0x501E },
+ { "MTP_DEVICE_PROPERTY_COPYRIGHT_INFO", 0x501F },
+ { "MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER", 0xD401 },
+ { "MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME", 0xD402 },
+ { "MTP_DEVICE_PROPERTY_VOLUME", 0xD403 },
+ { "MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED", 0xD404 },
+ { "MTP_DEVICE_PROPERTY_DEVICE_ICON", 0xD405 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_RATE", 0xD410 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT", 0xD411 },
+ { "MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX", 0xD412 },
+ { "MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO", 0xD406 },
+ { "MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE", 0xD407 },
+ { 0, 0 },
+};
+
+static const char* getCodeName(uint16_t code, const CodeEntry* table) {
+ const CodeEntry* entry = table;
+ while (entry->name) {
+ if (entry->code == code)
+ return entry->name;
+ entry++;
+ }
+ return "UNKNOWN";
+}
+
+const char* MtpDebug::getOperationCodeName(MtpOperationCode code) {
+ return getCodeName(code, sOperationCodes);
+}
+
+const char* MtpDebug::getFormatCodeName(MtpOperationCode code) {
+ return getCodeName(code, sFormatCodes);
+}
+
+const char* MtpDebug::getObjectPropCodeName(MtpPropertyCode code) {
+ return getCodeName(code, sObjectPropCodes);
+}
+
+const char* MtpDebug::getDevicePropCodeName(MtpPropertyCode code) {
+ return getCodeName(code, sDevicePropCodes);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDebug.h b/media/mtp/MtpDebug.h
new file mode 100644
index 0000000..5b53e31
--- /dev/null
+++ b/media/mtp/MtpDebug.h
@@ -0,0 +1,37 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEBUG_H
+#define _MTP_DEBUG_H
+
+// #define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDebug {
+public:
+ static const char* getOperationCodeName(MtpOperationCode code);
+ static const char* getFormatCodeName(MtpObjectFormat code);
+ static const char* getObjectPropCodeName(MtpPropertyCode code);
+ static const char* getDevicePropCodeName(MtpPropertyCode code);
+};
+
+}; // namespace android
+
+#endif // _MTP_DEBUG_H
diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp
new file mode 100644
index 0000000..fca0142
--- /dev/null
+++ b/media/mtp/MtpDevice.cpp
@@ -0,0 +1,496 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDevice"
+
+#include "MtpDebug.h"
+#include "MtpDevice.h"
+#include "MtpDeviceInfo.h"
+#include "MtpObjectInfo.h"
+#include "MtpProperty.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <endian.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpDevice::MtpDevice(struct usb_device* device, int interface,
+ struct usb_endpoint *ep_in, struct usb_endpoint *ep_out,
+ struct usb_endpoint *ep_intr)
+ : mDevice(device),
+ mInterface(interface),
+ mEndpointIn(ep_in),
+ mEndpointOut(ep_out),
+ mEndpointIntr(ep_intr),
+ mDeviceInfo(NULL),
+ mID(usb_device_get_unique_id(device)),
+ mSessionID(0),
+ mTransactionID(0)
+{
+}
+
+MtpDevice::~MtpDevice() {
+ close();
+ for (int i = 0; i < mDeviceProperties.size(); i++)
+ delete mDeviceProperties[i];
+}
+
+void MtpDevice::initialize() {
+ openSession();
+ mDeviceInfo = getDeviceInfo();
+ if (mDeviceInfo) {
+ mDeviceInfo->print();
+
+ if (mDeviceInfo->mDeviceProperties) {
+ int count = mDeviceInfo->mDeviceProperties->size();
+ for (int i = 0; i < count; i++) {
+ MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i];
+ MtpProperty* property = getDevicePropDesc(propCode);
+ if (property) {
+ property->print();
+ mDeviceProperties.push(property);
+ }
+ }
+ }
+ }
+}
+
+void MtpDevice::close() {
+ if (mDevice) {
+ usb_device_release_interface(mDevice, mInterface);
+ usb_device_close(mDevice);
+ mDevice = NULL;
+ }
+}
+
+const char* MtpDevice::getDeviceName() {
+ if (mDevice)
+ return usb_device_get_name(mDevice);
+ else
+ return "???";
+}
+
+bool MtpDevice::openSession() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mSessionID = 0;
+ mTransactionID = 0;
+ MtpSessionID newSession = 1;
+ mRequest.reset();
+ mRequest.setParameter(1, newSession);
+ if (!sendRequest(MTP_OPERATION_OPEN_SESSION))
+ return false;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_SESSION_ALREADY_OPEN)
+ newSession = mResponse.getParameter(1);
+ else if (ret != MTP_RESPONSE_OK)
+ return false;
+
+ mSessionID = newSession;
+ mTransactionID = 1;
+ return true;
+}
+
+bool MtpDevice::closeSession() {
+ // FIXME
+ return true;
+}
+
+MtpDeviceInfo* MtpDevice::getDeviceInfo() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ if (!sendRequest(MTP_OPERATION_GET_DEVICE_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpDeviceInfo* info = new MtpDeviceInfo;
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+MtpStorageIDList* MtpDevice::getStorageIDs() {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ if (!sendRequest(MTP_OPERATION_GET_STORAGE_IDS))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getAUInt32();
+ }
+ return NULL;
+}
+
+MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, storageID);
+ if (!sendRequest(MTP_OPERATION_GET_STORAGE_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpStorageInfo* info = new MtpStorageInfo(storageID);
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+MtpObjectHandleList* MtpDevice::getObjectHandles(MtpStorageID storageID,
+ MtpObjectFormat format, MtpObjectHandle parent) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, storageID);
+ mRequest.setParameter(2, format);
+ mRequest.setParameter(3, parent);
+ if (!sendRequest(MTP_OPERATION_GET_OBJECT_HANDLES))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getAUInt32();
+ }
+ return NULL;
+}
+
+MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) {
+ Mutex::Autolock autoLock(mMutex);
+
+ // FIXME - we might want to add some caching here
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (!sendRequest(MTP_OPERATION_GET_OBJECT_INFO))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpObjectInfo* info = new MtpObjectInfo(handle);
+ info->read(mData);
+ return info;
+ }
+ return NULL;
+}
+
+void* MtpDevice::getThumbnail(MtpObjectHandle handle, int& outLength) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (sendRequest(MTP_OPERATION_GET_THUMB) && readData()) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ return mData.getData(outLength);
+ }
+ }
+ outLength = 0;
+ return NULL;
+}
+
+MtpObjectHandle MtpDevice::sendObjectInfo(MtpObjectInfo* info) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ MtpObjectHandle parent = info->mParent;
+ if (parent == 0)
+ parent = MTP_PARENT_ROOT;
+
+ mRequest.setParameter(1, info->mStorageID);
+ mRequest.setParameter(2, info->mParent);
+
+ mData.putUInt32(info->mStorageID);
+ mData.putUInt16(info->mFormat);
+ mData.putUInt16(info->mProtectionStatus);
+ mData.putUInt32(info->mCompressedSize);
+ mData.putUInt16(info->mThumbFormat);
+ mData.putUInt32(info->mThumbCompressedSize);
+ mData.putUInt32(info->mThumbPixWidth);
+ mData.putUInt32(info->mThumbPixHeight);
+ mData.putUInt32(info->mImagePixWidth);
+ mData.putUInt32(info->mImagePixHeight);
+ mData.putUInt32(info->mImagePixDepth);
+ mData.putUInt32(info->mParent);
+ mData.putUInt16(info->mAssociationType);
+ mData.putUInt32(info->mAssociationDesc);
+ mData.putUInt32(info->mSequenceNumber);
+ mData.putString(info->mName);
+
+ char created[100], modified[100];
+ formatDateTime(info->mDateCreated, created, sizeof(created));
+ formatDateTime(info->mDateModified, modified, sizeof(modified));
+
+ mData.putString(created);
+ mData.putString(modified);
+ if (info->mKeywords)
+ mData.putString(info->mKeywords);
+ else
+ mData.putEmptyString();
+
+ if (sendRequest(MTP_OPERATION_SEND_OBJECT_INFO) && sendData()) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ info->mStorageID = mResponse.getParameter(1);
+ info->mParent = mResponse.getParameter(2);
+ info->mHandle = mResponse.getParameter(3);
+ return info->mHandle;
+ }
+ }
+ return (MtpObjectHandle)-1;
+}
+
+bool MtpDevice::sendObject(MtpObjectInfo* info, int srcFD) {
+ Mutex::Autolock autoLock(mMutex);
+
+ int remaining = info->mCompressedSize;
+ mRequest.reset();
+ mRequest.setParameter(1, info->mHandle);
+ if (sendRequest(MTP_OPERATION_SEND_OBJECT)) {
+ // send data header
+ writeDataHeader(MTP_OPERATION_SEND_OBJECT, remaining);
+
+ char buffer[65536];
+ while (remaining > 0) {
+ int count = read(srcFD, buffer, sizeof(buffer));
+ if (count > 0) {
+ int written = mData.write(mEndpointOut, buffer, count);
+ // FIXME check error
+ remaining -= count;
+ } else {
+ break;
+ }
+ }
+ }
+ MtpResponseCode ret = readResponse();
+ return (remaining == 0 && ret == MTP_RESPONSE_OK);
+}
+
+bool MtpDevice::deleteObject(MtpObjectHandle handle) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, handle);
+ if (sendRequest(MTP_OPERATION_DELETE_OBJECT)) {
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK)
+ return true;
+ }
+ return false;
+}
+
+MtpObjectHandle MtpDevice::getParent(MtpObjectHandle handle) {
+ MtpObjectInfo* info = getObjectInfo(handle);
+ if (info)
+ return info->mParent;
+ else
+ return -1;
+}
+
+MtpObjectHandle MtpDevice::getStorageID(MtpObjectHandle handle) {
+ MtpObjectInfo* info = getObjectInfo(handle);
+ if (info)
+ return info->mStorageID;
+ else
+ return -1;
+}
+
+MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mRequest.reset();
+ mRequest.setParameter(1, code);
+ if (!sendRequest(MTP_OPERATION_GET_DEVICE_PROP_DESC))
+ return NULL;
+ if (!readData())
+ return NULL;
+ MtpResponseCode ret = readResponse();
+ if (ret == MTP_RESPONSE_OK) {
+ MtpProperty* property = new MtpProperty;
+ property->read(mData);
+ return property;
+ }
+ return NULL;
+}
+
+class ReadObjectThread : public Thread {
+private:
+ MtpDevice* mDevice;
+ MtpObjectHandle mHandle;
+ int mObjectSize;
+ void* mInitialData;
+ int mInitialDataLength;
+ int mFD;
+
+public:
+ ReadObjectThread(MtpDevice* device, MtpObjectHandle handle, int objectSize)
+ : mDevice(device),
+ mHandle(handle),
+ mObjectSize(objectSize),
+ mInitialData(NULL),
+ mInitialDataLength(0)
+ {
+ }
+
+ virtual ~ReadObjectThread() {
+ if (mFD >= 0)
+ close(mFD);
+ free(mInitialData);
+ }
+
+ // returns file descriptor
+ int init() {
+ mDevice->mRequest.reset();
+ mDevice->mRequest.setParameter(1, mHandle);
+ if (mDevice->sendRequest(MTP_OPERATION_GET_OBJECT)
+ && mDevice->mData.readDataHeader(mDevice->mEndpointIn)) {
+
+ // mData will contain header and possibly the beginning of the object data
+ mInitialData = mDevice->mData.getData(mInitialDataLength);
+
+ // create a pipe for the client to read from
+ int pipefd[2];
+ if (pipe(pipefd) < 0) {
+ LOGE("pipe failed (%s)", strerror(errno));
+ return -1;
+ }
+
+ mFD = pipefd[1];
+ return pipefd[0];
+ } else {
+ return -1;
+ }
+ }
+
+ virtual bool threadLoop() {
+ int remaining = mObjectSize;
+ if (mInitialData) {
+ write(mFD, mInitialData, mInitialDataLength);
+ remaining -= mInitialDataLength;
+ free(mInitialData);
+ mInitialData = NULL;
+ }
+
+ char buffer[16384];
+ while (remaining > 0) {
+ int readSize = (remaining > sizeof(buffer) ? sizeof(buffer) : remaining);
+ int count = mDevice->mData.readData(mDevice->mEndpointIn, buffer, readSize);
+ int written;
+ if (count >= 0) {
+ int written = write(mFD, buffer, count);
+ // FIXME check error
+ remaining -= count;
+ } else {
+ break;
+ }
+ }
+
+ MtpResponseCode ret = mDevice->readResponse();
+ mDevice->mMutex.unlock();
+ return false;
+ }
+};
+
+ // returns the file descriptor for a pipe to read the object's data
+int MtpDevice::readObject(MtpObjectHandle handle, int objectSize) {
+ mMutex.lock();
+
+ ReadObjectThread* thread = new ReadObjectThread(this, handle, objectSize);
+ int fd = thread->init();
+ if (fd < 0) {
+ delete thread;
+ mMutex.unlock();
+ } else {
+ thread->run("ReadObjectThread");
+ }
+ return fd;
+}
+
+bool MtpDevice::sendRequest(MtpOperationCode operation) {
+ LOGV("sendRequest: %s\n", MtpDebug::getOperationCodeName(operation));
+ mRequest.setOperationCode(operation);
+ if (mTransactionID > 0)
+ mRequest.setTransactionID(mTransactionID++);
+ int ret = mRequest.write(mEndpointOut);
+ mRequest.dump();
+ return (ret > 0);
+}
+
+bool MtpDevice::sendData() {
+ LOGV("sendData\n");
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ int ret = mData.write(mEndpointOut);
+ mData.dump();
+ return (ret > 0);
+}
+
+bool MtpDevice::readData() {
+ mData.reset();
+ int ret = mData.read(mEndpointIn);
+ LOGV("readData returned %d\n", ret);
+ if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+ mData.dump();
+ return true;
+ }
+ else {
+ LOGV("readResponse failed\n");
+ return false;
+ }
+}
+
+bool MtpDevice::writeDataHeader(MtpOperationCode operation, int dataLength) {
+ mData.setOperationCode(operation);
+ mData.setTransactionID(mRequest.getTransactionID());
+ return (!mData.writeDataHeader(mEndpointOut, dataLength));
+}
+
+MtpResponseCode MtpDevice::readResponse() {
+ LOGV("readResponse\n");
+ int ret = mResponse.read(mEndpointIn);
+ if (ret >= MTP_CONTAINER_HEADER_SIZE) {
+ mResponse.dump();
+ return mResponse.getResponseCode();
+ }
+ else {
+ LOGD("readResponse failed\n");
+ return -1;
+ }
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h
new file mode 100644
index 0000000..57f492f
--- /dev/null
+++ b/media/mtp/MtpDevice.h
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_H
+#define _MTP_DEVICE_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpTypes.h"
+
+#include <utils/threads.h>
+
+struct usb_device;
+
+namespace android {
+
+class MtpDeviceInfo;
+class MtpObjectInfo;
+class MtpStorageInfo;
+
+class MtpDevice {
+private:
+ struct usb_device* mDevice;
+ int mInterface;
+ struct usb_endpoint* mEndpointIn;
+ struct usb_endpoint* mEndpointOut;
+ struct usb_endpoint* mEndpointIntr;
+ MtpDeviceInfo* mDeviceInfo;
+ MtpPropertyList mDeviceProperties;
+
+ // a unique ID for the device
+ int mID;
+
+ // current session ID
+ MtpSessionID mSessionID;
+ // current transaction ID
+ MtpTransactionID mTransactionID;
+
+ MtpRequestPacket mRequest;
+ MtpDataPacket mData;
+ MtpResponsePacket mResponse;
+
+ // to ensure only one MTP transaction at a time
+ Mutex mMutex;
+
+public:
+ MtpDevice(struct usb_device* device, int interface,
+ struct usb_endpoint *ep_in, struct usb_endpoint *ep_out,
+ struct usb_endpoint *ep_intr);
+ virtual ~MtpDevice();
+
+ inline int getID() const { return mID; }
+
+ void initialize();
+ void close();
+ const char* getDeviceName();
+
+ bool openSession();
+ bool closeSession();
+
+ MtpDeviceInfo* getDeviceInfo();
+ MtpStorageIDList* getStorageIDs();
+ MtpStorageInfo* getStorageInfo(MtpStorageID storageID);
+ MtpObjectHandleList* getObjectHandles(MtpStorageID storageID, MtpObjectFormat format, MtpObjectHandle parent);
+ MtpObjectInfo* getObjectInfo(MtpObjectHandle handle);
+ void* getThumbnail(MtpObjectHandle handle, int& outLength);
+ MtpObjectHandle sendObjectInfo(MtpObjectInfo* info);
+ bool sendObject(MtpObjectInfo* info, int srcFD);
+ bool deleteObject(MtpObjectHandle handle);
+ MtpObjectHandle getParent(MtpObjectHandle handle);
+ MtpObjectHandle getStorageID(MtpObjectHandle handle);
+
+ MtpProperty* getDevicePropDesc(MtpDeviceProperty code);
+
+ // returns the file descriptor for a pipe to read the object's data
+ int readObject(MtpObjectHandle handle, int objectSize);
+
+private:
+ friend class ReadObjectThread;
+
+ bool sendRequest(MtpOperationCode operation);
+ bool sendData();
+ bool readData();
+ bool writeDataHeader(MtpOperationCode operation, int dataLength);
+ MtpResponseCode readResponse();
+
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_H
diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp
new file mode 100644
index 0000000..5a9322e
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpDeviceInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpDeviceInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpDeviceInfo::MtpDeviceInfo()
+ : mStandardVersion(0),
+ mVendorExtensionID(0),
+ mVendorExtensionVersion(0),
+ mVendorExtensionDesc(NULL),
+ mFunctionalCode(0),
+ mOperations(NULL),
+ mEvents(NULL),
+ mDeviceProperties(NULL),
+ mCaptureFormats(NULL),
+ mPlaybackFormats(NULL),
+ mManufacturer(NULL),
+ mModel(NULL),
+ mVersion(NULL),
+ mSerial(NULL)
+{
+}
+
+MtpDeviceInfo::~MtpDeviceInfo() {
+ if (mVendorExtensionDesc)
+ free(mVendorExtensionDesc);
+ delete mOperations;
+ delete mEvents;
+ delete mDeviceProperties;
+ delete mCaptureFormats;
+ delete mPlaybackFormats;
+ if (mManufacturer)
+ free(mManufacturer);
+ if (mModel)
+ free(mModel);
+ if (mVersion)
+ free(mVersion);
+ if (mSerial)
+ free(mSerial);
+}
+
+void MtpDeviceInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+
+ // read the device info
+ mStandardVersion = packet.getUInt16();
+ mVendorExtensionID = packet.getUInt32();
+ mVendorExtensionVersion = packet.getUInt16();
+
+ packet.getString(string);
+ mVendorExtensionDesc = strdup((const char *)string);
+
+ mFunctionalCode = packet.getUInt16();
+ mOperations = packet.getAUInt16();
+ mEvents = packet.getAUInt16();
+ mDeviceProperties = packet.getAUInt16();
+ mCaptureFormats = packet.getAUInt16();
+ mPlaybackFormats = packet.getAUInt16();
+
+ packet.getString(string);
+ mManufacturer = strdup((const char *)string);
+ packet.getString(string);
+ mModel = strdup((const char *)string);
+ packet.getString(string);
+ mVersion = strdup((const char *)string);
+ packet.getString(string);
+ mSerial = strdup((const char *)string);
+}
+
+void MtpDeviceInfo::print() {
+ LOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n",
+ mStandardVersion, mVendorExtensionID, mVendorExtensionVersion);
+ LOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n",
+ mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h
new file mode 100644
index 0000000..2abaa10
--- /dev/null
+++ b/media/mtp/MtpDeviceInfo.h
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_DEVICE_INFO_H
+#define _MTP_DEVICE_INFO_H
+
+struct stat;
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpDeviceInfo {
+public:
+ uint16_t mStandardVersion;
+ uint32_t mVendorExtensionID;
+ uint16_t mVendorExtensionVersion;
+ char* mVendorExtensionDesc;
+ uint16_t mFunctionalCode;
+ UInt16List* mOperations;
+ UInt16List* mEvents;
+ MtpDevicePropertyList* mDeviceProperties;
+ MtpObjectFormatList* mCaptureFormats;
+ MtpObjectFormatList* mPlaybackFormats;
+ char* mManufacturer;
+ char* mModel;
+ char* mVersion;
+ char* mSerial;
+
+public:
+ MtpDeviceInfo();
+ virtual ~MtpDeviceInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_DEVICE_INFO_H
diff --git a/media/mtp/MtpEventPacket.cpp b/media/mtp/MtpEventPacket.cpp
new file mode 100644
index 0000000..fc74542
--- /dev/null
+++ b/media/mtp/MtpEventPacket.cpp
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpEventPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+#include <sys/ioctl.h>
+
+#ifdef MTP_DEVICE
+#include <linux/usb/f_mtp.h>
+#endif
+
+#include "MtpEventPacket.h"
+
+namespace android {
+
+MtpEventPacket::MtpEventPacket()
+ : MtpPacket(512)
+{
+}
+
+MtpEventPacket::~MtpEventPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpEventPacket::write(int fd) {
+ struct mtp_event event;
+
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_EVENT);
+
+ event.data = mBuffer;
+ event.length = mPacketSize;
+ int ret = ::ioctl(fd, MTP_SEND_EVENT, (unsigned long)&event);
+ return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+int MtpEventPacket::read(struct usb_endpoint *ep) {
+ int ret = transfer(ep, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+} // namespace android
+
diff --git a/media/mtp/MtpEventPacket.h b/media/mtp/MtpEventPacket.h
new file mode 100644
index 0000000..30ae869
--- /dev/null
+++ b/media/mtp/MtpEventPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_EVENT_PACKET_H
+#define _MTP_EVENT_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpEventPacket : public MtpPacket {
+
+public:
+ MtpEventPacket();
+ virtual ~MtpEventPacket();
+
+#ifdef MTP_DEVICE
+ // write our data to the given file descriptor
+ int write(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+ int read(struct usb_endpoint *ep);
+#endif
+
+ inline MtpEventCode getEventCode() const { return getContainerCode(); }
+ inline void setEventCode(MtpEventCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_EVENT_PACKET_H
diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp
new file mode 100644
index 0000000..ea68c3b
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpObjectInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpObjectInfo.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpObjectInfo::MtpObjectInfo(MtpObjectHandle handle)
+ : mHandle(handle),
+ mStorageID(0),
+ mFormat(0),
+ mProtectionStatus(0),
+ mCompressedSize(0),
+ mThumbFormat(0),
+ mThumbCompressedSize(0),
+ mThumbPixWidth(0),
+ mThumbPixHeight(0),
+ mImagePixWidth(0),
+ mImagePixHeight(0),
+ mImagePixDepth(0),
+ mParent(0),
+ mAssociationType(0),
+ mAssociationDesc(0),
+ mSequenceNumber(0),
+ mName(NULL),
+ mDateCreated(0),
+ mDateModified(0),
+ mKeywords(NULL)
+{
+}
+
+MtpObjectInfo::~MtpObjectInfo() {
+ if (mName)
+ free(mName);
+ if (mKeywords)
+ free(mKeywords);
+}
+
+void MtpObjectInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+ time_t time;
+
+ mStorageID = packet.getUInt32();
+ mFormat = packet.getUInt16();
+ mProtectionStatus = packet.getUInt16();
+ mCompressedSize = packet.getUInt32();
+ mThumbFormat = packet.getUInt16();
+ mThumbCompressedSize = packet.getUInt32();
+ mThumbPixWidth = packet.getUInt32();
+ mThumbPixHeight = packet.getUInt32();
+ mImagePixWidth = packet.getUInt32();
+ mImagePixHeight = packet.getUInt32();
+ mImagePixDepth = packet.getUInt32();
+ mParent = packet.getUInt32();
+ mAssociationType = packet.getUInt16();
+ mAssociationDesc = packet.getUInt32();
+ mSequenceNumber = packet.getUInt32();
+
+ packet.getString(string);
+ mName = strdup((const char *)string);
+
+ packet.getString(string);
+ if (parseDateTime((const char*)string, time))
+ mDateCreated = time;
+
+ packet.getString(string);
+ if (parseDateTime((const char*)string, time))
+ mDateModified = time;
+
+ packet.getString(string);
+ mKeywords = strdup((const char *)string);
+}
+
+void MtpObjectInfo::print() {
+ LOGD("MtpObject Info %08X: %s\n", mHandle, mName);
+ LOGD(" mStorageID: %08X mFormat: %04X mProtectionStatus: %d\n",
+ mStorageID, mFormat, mProtectionStatus);
+ LOGD(" mCompressedSize: %d mThumbFormat: %04X mThumbCompressedSize: %d\n",
+ mCompressedSize, mFormat, mThumbCompressedSize);
+ LOGD(" mThumbPixWidth: %d mThumbPixHeight: %d\n", mThumbPixWidth, mThumbPixHeight);
+ LOGD(" mImagePixWidth: %d mImagePixHeight: %d mImagePixDepth: %d\n",
+ mImagePixWidth, mImagePixHeight, mImagePixDepth);
+ LOGD(" mParent: %08X mAssociationType: %04X mAssociationDesc: %04X\n",
+ mParent, mAssociationType, mAssociationDesc);
+ LOGD(" mSequenceNumber: %d mDateCreated: %ld mDateModified: %ld mKeywords: %s\n",
+ mSequenceNumber, mDateCreated, mDateModified, mKeywords);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h
new file mode 100644
index 0000000..c7a449c
--- /dev/null
+++ b/media/mtp/MtpObjectInfo.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_OBJECT_INFO_H
+#define _MTP_OBJECT_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpObjectInfo {
+public:
+ MtpObjectHandle mHandle;
+ MtpStorageID mStorageID;
+ MtpObjectFormat mFormat;
+ uint16_t mProtectionStatus;
+ uint32_t mCompressedSize;
+ MtpObjectFormat mThumbFormat;
+ uint32_t mThumbCompressedSize;
+ uint32_t mThumbPixWidth;
+ uint32_t mThumbPixHeight;
+ uint32_t mImagePixWidth;
+ uint32_t mImagePixHeight;
+ uint32_t mImagePixDepth;
+ MtpObjectHandle mParent;
+ uint16_t mAssociationType;
+ uint32_t mAssociationDesc;
+ uint32_t mSequenceNumber;
+ char* mName;
+ time_t mDateCreated;
+ time_t mDateModified;
+ char* mKeywords;
+
+public:
+ MtpObjectInfo(MtpObjectHandle handle);
+ virtual ~MtpObjectInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_OBJECT_INFO_H
diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp
new file mode 100644
index 0000000..42bf8ba
--- /dev/null
+++ b/media/mtp/MtpPacket.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpPacket"
+
+#include "MtpDebug.h"
+#include "MtpPacket.h"
+#include "mtp.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <stdio.h>
+
+#include <usbhost/usbhost.h>
+
+namespace android {
+
+MtpPacket::MtpPacket(int bufferSize)
+ : mBuffer(NULL),
+ mBufferSize(bufferSize),
+ mAllocationIncrement(bufferSize),
+ mPacketSize(0)
+{
+ mBuffer = (uint8_t *)malloc(bufferSize);
+ if (!mBuffer) {
+ LOGE("out of memory!");
+ abort();
+ }
+}
+
+MtpPacket::~MtpPacket() {
+ if (mBuffer)
+ free(mBuffer);
+}
+
+void MtpPacket::reset() {
+ allocate(MTP_CONTAINER_HEADER_SIZE);
+ mPacketSize = MTP_CONTAINER_HEADER_SIZE;
+ memset(mBuffer, 0, mBufferSize);
+}
+
+void MtpPacket::allocate(int length) {
+ if (length > mBufferSize) {
+ int newLength = length + mAllocationIncrement;
+ mBuffer = (uint8_t *)realloc(mBuffer, newLength);
+ if (!mBuffer) {
+ LOGE("out of memory!");
+ abort();
+ }
+ mBufferSize = newLength;
+ }
+}
+
+void MtpPacket::dump() {
+#define DUMP_BYTES_PER_ROW 16
+ char buffer[500];
+ char* bufptr = buffer;
+
+ for (int i = 0; i < mPacketSize; i++) {
+ sprintf(bufptr, "%02X ", mBuffer[i]);
+ bufptr += strlen(bufptr);
+ if (i % DUMP_BYTES_PER_ROW == (DUMP_BYTES_PER_ROW - 1)) {
+ LOGV("%s", buffer);
+ bufptr = buffer;
+ }
+ }
+ if (bufptr != buffer) {
+ // print last line
+ LOGV("%s", buffer);
+ }
+ LOGV("\n");
+}
+
+uint16_t MtpPacket::getUInt16(int offset) const {
+ return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset];
+}
+
+uint32_t MtpPacket::getUInt32(int offset) const {
+ return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) |
+ ((uint32_t)mBuffer[offset + 1] << 8) | (uint32_t)mBuffer[offset];
+}
+
+void MtpPacket::putUInt16(int offset, uint16_t value) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+}
+
+void MtpPacket::putUInt32(int offset, uint32_t value) {
+ mBuffer[offset++] = (uint8_t)(value & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF);
+ mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF);
+}
+
+uint16_t MtpPacket::getContainerCode() const {
+ return getUInt16(MTP_CONTAINER_CODE_OFFSET);
+}
+
+void MtpPacket::setContainerCode(uint16_t code) {
+ putUInt16(MTP_CONTAINER_CODE_OFFSET, code);
+}
+
+MtpTransactionID MtpPacket::getTransactionID() const {
+ return getUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET);
+}
+
+void MtpPacket::setTransactionID(MtpTransactionID id) {
+ putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id);
+}
+
+uint32_t MtpPacket::getParameter(int index) const {
+ if (index < 1 || index > 5) {
+ LOGE("index %d out of range in MtpRequestPacket::getParameter", index);
+ return 0;
+ }
+ return getUInt32(MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t));
+}
+
+void MtpPacket::setParameter(int index, uint32_t value) {
+ if (index < 1 || index > 5) {
+ LOGE("index %d out of range in MtpResponsePacket::setParameter", index);
+ return;
+ }
+ int offset = MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t);
+ if (mPacketSize < offset + sizeof(uint32_t))
+ mPacketSize = offset + sizeof(uint32_t);
+ putUInt32(offset, value);
+}
+
+#ifdef MTP_HOST
+int MtpPacket::transfer(struct usb_endpoint *ep, void* buffer, int length) {
+ if (usb_endpoint_queue(ep, buffer, length)) {
+ LOGE("usb_endpoint_queue failed, errno: %d", errno);
+ return -1;
+ }
+ int ep_num;
+ return usb_endpoint_wait(usb_endpoint_get_device(ep), &ep_num);
+}
+#endif
+
+} // namespace android
diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h
new file mode 100644
index 0000000..9c8d6da
--- /dev/null
+++ b/media/mtp/MtpPacket.h
@@ -0,0 +1,69 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PACKET_H
+#define _MTP_PACKET_H
+
+#include "MtpTypes.h"
+
+struct usb_endpoint;
+
+namespace android {
+
+class MtpPacket {
+
+protected:
+ uint8_t* mBuffer;
+ // current size of the buffer
+ int mBufferSize;
+ // number of bytes to add when resizing the buffer
+ int mAllocationIncrement;
+ // size of the data in the packet
+ int mPacketSize;
+
+public:
+ MtpPacket(int bufferSize);
+ virtual ~MtpPacket();
+
+ // sets packet size to the default container size and sets buffer to zero
+ virtual void reset();
+
+ void allocate(int length);
+ void dump();
+
+ uint16_t getContainerCode() const;
+ void setContainerCode(uint16_t code);
+
+ MtpTransactionID getTransactionID() const;
+ void setTransactionID(MtpTransactionID id);
+
+ uint32_t getParameter(int index) const;
+ void setParameter(int index, uint32_t value);
+
+#ifdef MTP_HOST
+ int transfer(struct usb_endpoint *ep, void* buffer, int length);
+#endif
+
+protected:
+ uint16_t getUInt16(int offset) const;
+ uint32_t getUInt32(int offset) const;
+ void putUInt16(int offset, uint16_t value);
+ void putUInt32(int offset, uint32_t value);
+};
+
+}; // namespace android
+
+#endif // _MTP_PACKET_H
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
new file mode 100644
index 0000000..bbd0237
--- /dev/null
+++ b/media/mtp/MtpProperty.cpp
@@ -0,0 +1,356 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpProperty"
+
+#include "MtpDataPacket.h"
+#include "MtpProperty.h"
+#include "MtpStringBuffer.h"
+#include "MtpUtils.h"
+
+namespace android {
+
+MtpProperty::MtpProperty()
+ : mCode(0),
+ mType(0),
+ mWriteable(false),
+ mDefaultArrayLength(0),
+ mDefaultArrayValues(NULL),
+ mCurrentArrayLength(0),
+ mCurrentArrayValues(NULL),
+ mGroupCode(0),
+ mFormFlag(kFormNone),
+ mEnumLength(0),
+ mEnumValues(NULL)
+{
+ memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+ memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+ memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+ memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+}
+
+MtpProperty::MtpProperty(MtpPropertyCode propCode,
+ MtpDataType type,
+ bool writeable,
+ int defaultValue)
+ : mCode(propCode),
+ mType(type),
+ mWriteable(writeable),
+ mDefaultArrayLength(0),
+ mDefaultArrayValues(NULL),
+ mCurrentArrayLength(0),
+ mCurrentArrayValues(NULL),
+ mGroupCode(0),
+ mFormFlag(kFormNone),
+ mEnumLength(0),
+ mEnumValues(NULL)
+{
+ memset(&mDefaultValue, 0, sizeof(mDefaultValue));
+ memset(&mCurrentValue, 0, sizeof(mCurrentValue));
+ memset(&mMinimumValue, 0, sizeof(mMinimumValue));
+ memset(&mMaximumValue, 0, sizeof(mMaximumValue));
+
+ if (defaultValue) {
+ switch (type) {
+ case MTP_TYPE_INT8:
+ mDefaultValue.u.i8 = defaultValue;
+ break;
+ case MTP_TYPE_UINT8:
+ mDefaultValue.u.u8 = defaultValue;
+ break;
+ case MTP_TYPE_INT16:
+ mDefaultValue.u.i16 = defaultValue;
+ break;
+ case MTP_TYPE_UINT16:
+ mDefaultValue.u.u16 = defaultValue;
+ break;
+ case MTP_TYPE_INT32:
+ mDefaultValue.u.i32 = defaultValue;
+ break;
+ case MTP_TYPE_UINT32:
+ mDefaultValue.u.u32 = defaultValue;
+ break;
+ case MTP_TYPE_INT64:
+ mDefaultValue.u.i64 = defaultValue;
+ break;
+ case MTP_TYPE_UINT64:
+ mDefaultValue.u.u64 = defaultValue;
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::MtpProperty", type);
+ }
+ }
+}
+
+MtpProperty::~MtpProperty() {
+ if (mType == MTP_TYPE_STR) {
+ // free all strings
+ free(mDefaultValue.str);
+ free(mCurrentValue.str);
+ free(mMinimumValue.str);
+ free(mMaximumValue.str);
+ if (mDefaultArrayValues) {
+ for (int i = 0; i < mDefaultArrayLength; i++)
+ free(mDefaultArrayValues[i].str);
+ }
+ if (mCurrentArrayValues) {
+ for (int i = 0; i < mCurrentArrayLength; i++)
+ free(mCurrentArrayValues[i].str);
+ }
+ if (mEnumValues) {
+ for (int i = 0; i < mEnumLength; i++)
+ free(mEnumValues[i].str);
+ }
+ }
+ delete[] mDefaultArrayValues;
+ delete[] mCurrentArrayValues;
+ delete[] mEnumValues;
+}
+
+void MtpProperty::read(MtpDataPacket& packet) {
+ bool deviceProp = isDeviceProperty();
+
+ mCode = packet.getUInt16();
+ mType = packet.getUInt16();
+ mWriteable = (packet.getUInt8() == 1);
+ switch (mType) {
+ case MTP_TYPE_AINT8:
+ case MTP_TYPE_AUINT8:
+ case MTP_TYPE_AINT16:
+ case MTP_TYPE_AUINT16:
+ case MTP_TYPE_AINT32:
+ case MTP_TYPE_AUINT32:
+ case MTP_TYPE_AINT64:
+ case MTP_TYPE_AUINT64:
+ case MTP_TYPE_AINT128:
+ case MTP_TYPE_AUINT128:
+ mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength);
+ if (deviceProp)
+ mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength);
+ break;
+ default:
+ readValue(packet, mDefaultValue);
+ if (deviceProp)
+ readValue(packet, mCurrentValue);
+ }
+ if (!deviceProp)
+ mGroupCode = packet.getUInt32();
+ mFormFlag = packet.getUInt8();
+
+ if (mFormFlag == kFormRange) {
+ readValue(packet, mMinimumValue);
+ readValue(packet, mMaximumValue);
+ readValue(packet, mStepSize);
+ } else if (mFormFlag == kFormEnum) {
+ mEnumLength = packet.getUInt16();
+ mEnumValues = new MtpPropertyValue[mEnumLength];
+ for (int i = 0; i < mEnumLength; i++)
+ readValue(packet, mEnumValues[i]);
+ }
+}
+
+void MtpProperty::write(MtpDataPacket& packet) {
+ bool deviceProp = isDeviceProperty();
+
+ packet.putUInt16(mCode);
+ packet.putUInt16(mType);
+ packet.putUInt8(mWriteable ? 1 : 0);
+
+ switch (mType) {
+ case MTP_TYPE_AINT8:
+ case MTP_TYPE_AUINT8:
+ case MTP_TYPE_AINT16:
+ case MTP_TYPE_AUINT16:
+ case MTP_TYPE_AINT32:
+ case MTP_TYPE_AUINT32:
+ case MTP_TYPE_AINT64:
+ case MTP_TYPE_AUINT64:
+ case MTP_TYPE_AINT128:
+ case MTP_TYPE_AUINT128:
+ writeArrayValues(packet, mDefaultArrayValues, mDefaultArrayLength);
+ if (deviceProp)
+ writeArrayValues(packet, mCurrentArrayValues, mCurrentArrayLength);
+ break;
+ default:
+ writeValue(packet, mDefaultValue);
+ if (deviceProp)
+ writeValue(packet, mCurrentValue);
+ }
+ packet.putUInt32(mGroupCode);
+ if (!deviceProp)
+ packet.putUInt8(mFormFlag);
+ if (mFormFlag == kFormRange) {
+ writeValue(packet, mMinimumValue);
+ writeValue(packet, mMaximumValue);
+ writeValue(packet, mStepSize);
+ } else if (mFormFlag == kFormEnum) {
+ packet.putUInt16(mEnumLength);
+ for (int i = 0; i < mEnumLength; i++)
+ writeValue(packet, mEnumValues[i]);
+ }
+}
+
+void MtpProperty::setDefaultValue(const uint16_t* string) {
+ free(mDefaultValue.str);
+ if (string) {
+ MtpStringBuffer buffer(string);
+ mDefaultValue.str = strdup(buffer);
+ }
+ else
+ mDefaultValue.str = NULL;
+}
+
+void MtpProperty::setCurrentValue(const uint16_t* string) {
+ free(mCurrentValue.str);
+ if (string) {
+ MtpStringBuffer buffer(string);
+ mCurrentValue.str = strdup(buffer);
+ }
+ else
+ mCurrentValue.str = NULL;
+}
+
+void MtpProperty::print() {
+ LOGV("MtpProperty %04X\n", mCode);
+ LOGV(" type %04X\n", mType);
+ LOGV(" writeable %s\n", (mWriteable ? "true" : "false"));
+}
+
+void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+ MtpStringBuffer stringBuffer;
+
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ case MTP_TYPE_AINT8:
+ value.u.i8 = packet.getInt8();
+ break;
+ case MTP_TYPE_UINT8:
+ case MTP_TYPE_AUINT8:
+ value.u.u8 = packet.getUInt8();
+ break;
+ case MTP_TYPE_INT16:
+ case MTP_TYPE_AINT16:
+ value.u.i16 = packet.getInt16();
+ break;
+ case MTP_TYPE_UINT16:
+ case MTP_TYPE_AUINT16:
+ value.u.u16 = packet.getUInt16();
+ break;
+ case MTP_TYPE_INT32:
+ case MTP_TYPE_AINT32:
+ value.u.i32 = packet.getInt32();
+ break;
+ case MTP_TYPE_UINT32:
+ case MTP_TYPE_AUINT32:
+ value.u.u32 = packet.getUInt32();
+ break;
+ case MTP_TYPE_INT64:
+ case MTP_TYPE_AINT64:
+ value.u.i64 = packet.getInt64();
+ break;
+ case MTP_TYPE_UINT64:
+ case MTP_TYPE_AUINT64:
+ value.u.u64 = packet.getUInt64();
+ break;
+ case MTP_TYPE_INT128:
+ case MTP_TYPE_AINT128:
+ packet.getInt128(value.u.i128);
+ break;
+ case MTP_TYPE_UINT128:
+ case MTP_TYPE_AUINT128:
+ packet.getUInt128(value.u.u128);
+ break;
+ case MTP_TYPE_STR:
+ packet.getString(stringBuffer);
+ value.str = strdup(stringBuffer);
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::readValue", mType);
+ }
+}
+
+void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
+ MtpStringBuffer stringBuffer;
+
+ switch (mType) {
+ case MTP_TYPE_INT8:
+ case MTP_TYPE_AINT8:
+ packet.putInt8(value.u.i8);
+ break;
+ case MTP_TYPE_UINT8:
+ case MTP_TYPE_AUINT8:
+ packet.putUInt8(value.u.u8);
+ break;
+ case MTP_TYPE_INT16:
+ case MTP_TYPE_AINT16:
+ packet.putInt16(value.u.i16);
+ break;
+ case MTP_TYPE_UINT16:
+ case MTP_TYPE_AUINT16:
+ packet.putUInt16(value.u.u16);
+ break;
+ case MTP_TYPE_INT32:
+ case MTP_TYPE_AINT32:
+ packet.putInt32(value.u.i32);
+ break;
+ case MTP_TYPE_UINT32:
+ case MTP_TYPE_AUINT32:
+ packet.putUInt32(value.u.u32);
+ break;
+ case MTP_TYPE_INT64:
+ case MTP_TYPE_AINT64:
+ packet.putInt64(value.u.i64);
+ break;
+ case MTP_TYPE_UINT64:
+ case MTP_TYPE_AUINT64:
+ packet.putUInt64(value.u.u64);
+ break;
+ case MTP_TYPE_INT128:
+ case MTP_TYPE_AINT128:
+ packet.putInt128(value.u.i128);
+ break;
+ case MTP_TYPE_UINT128:
+ case MTP_TYPE_AUINT128:
+ packet.putUInt128(value.u.u128);
+ break;
+ case MTP_TYPE_STR:
+ if (value.str)
+ packet.putString(value.str);
+ else
+ packet.putEmptyString();
+ break;
+ default:
+ LOGE("unknown type %04X in MtpProperty::writeValue", mType);
+ }
+}
+
+MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
+ length = packet.getUInt32();
+ if (length == 0)
+ return NULL;
+ MtpPropertyValue* result = new MtpPropertyValue[length];
+ for (int i = 0; i < length; i++)
+ readValue(packet, result[i]);
+ return result;
+}
+
+void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) {
+ packet.putUInt32(length);
+ for (int i = 0; i < length; i++)
+ writeValue(packet, values[i]);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h
new file mode 100644
index 0000000..98b465a
--- /dev/null
+++ b/media/mtp/MtpProperty.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_PROPERTY_H
+#define _MTP_PROPERTY_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+struct MtpPropertyValue {
+ union {
+ int8_t i8;
+ uint8_t u8;
+ int16_t i16;
+ uint16_t u16;
+ int32_t i32;
+ uint32_t u32;
+ int64_t i64;
+ uint64_t u64;
+ int128_t i128;
+ uint128_t u128;
+ } u;
+ // string in UTF8 format
+ char* str;
+};
+
+class MtpProperty {
+public:
+ MtpPropertyCode mCode;
+ MtpDataType mType;
+ bool mWriteable;
+ MtpPropertyValue mDefaultValue;
+ MtpPropertyValue mCurrentValue;
+
+ // for array types
+ int mDefaultArrayLength;
+ MtpPropertyValue* mDefaultArrayValues;
+ int mCurrentArrayLength;
+ MtpPropertyValue* mCurrentArrayValues;
+
+ enum {
+ kFormNone = 0,
+ kFormRange = 1,
+ kFormEnum = 2,
+ };
+
+ uint32_t mGroupCode;
+ uint8_t mFormFlag;
+
+ // for range form
+ MtpPropertyValue mMinimumValue;
+ MtpPropertyValue mMaximumValue;
+ MtpPropertyValue mStepSize;
+
+ // for enum form
+ int mEnumLength;
+ MtpPropertyValue* mEnumValues;
+
+public:
+ MtpProperty();
+ MtpProperty(MtpPropertyCode propCode,
+ MtpDataType type,
+ bool writeable = false,
+ int defaultValue = 0);
+ virtual ~MtpProperty();
+
+ inline MtpPropertyCode getPropertyCode() const { return mCode; }
+
+ void read(MtpDataPacket& packet);
+ void write(MtpDataPacket& packet);
+
+ void setDefaultValue(const uint16_t* string);
+ void setCurrentValue(const uint16_t* string);
+
+ void print();
+
+ inline bool isDeviceProperty() const {
+ return ( ((mCode & 0xF000) == 0x5000)
+ || ((mCode & 0xF800) == 0xD000));
+ }
+
+private:
+ void readValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ void writeValue(MtpDataPacket& packet, MtpPropertyValue& value);
+ MtpPropertyValue* readArrayValues(MtpDataPacket& packet, int& length);
+ void writeArrayValues(MtpDataPacket& packet,
+ MtpPropertyValue* values, int length);
+};
+
+}; // namespace android
+
+#endif // _MTP_PROPERTY_H
diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp
new file mode 100644
index 0000000..8ece580
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.cpp
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpRequestPacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpRequestPacket.h"
+
+namespace android {
+
+MtpRequestPacket::MtpRequestPacket()
+ : MtpPacket(512)
+{
+}
+
+MtpRequestPacket::~MtpRequestPacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpRequestPacket::read(int fd) {
+ int ret = ::read(fd, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+#ifdef MTP_HOST
+ // write our buffer to the given endpoint (host mode)
+int MtpRequestPacket::write(struct usb_endpoint *ep)
+{
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_COMMAND);
+ return transfer(ep, mBuffer, mPacketSize);
+}
+#endif
+
+} // namespace android
diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h
new file mode 100644
index 0000000..df518f2
--- /dev/null
+++ b/media/mtp/MtpRequestPacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_REQUEST_PACKET_H
+#define _MTP_REQUEST_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpRequestPacket : public MtpPacket {
+
+public:
+ MtpRequestPacket();
+ virtual ~MtpRequestPacket();
+
+#ifdef MTP_DEVICE
+ // fill our buffer with data from the given file descriptor
+ int read(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // write our buffer to the given endpoint
+ int write(struct usb_endpoint *ep);
+#endif
+
+ inline MtpOperationCode getOperationCode() const { return getContainerCode(); }
+ inline void setOperationCode(MtpOperationCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_REQUEST_PACKET_H
diff --git a/media/mtp/MtpResponsePacket.cpp b/media/mtp/MtpResponsePacket.cpp
new file mode 100644
index 0000000..3ef714e
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpResponsePacket"
+
+#include <stdio.h>
+#include <sys/types.h>
+#include <fcntl.h>
+
+#include "MtpResponsePacket.h"
+
+namespace android {
+
+MtpResponsePacket::MtpResponsePacket()
+ : MtpPacket(512)
+{
+}
+
+MtpResponsePacket::~MtpResponsePacket() {
+}
+
+#ifdef MTP_DEVICE
+int MtpResponsePacket::write(int fd) {
+ putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize);
+ putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_RESPONSE);
+ int ret = ::write(fd, mBuffer, mPacketSize);
+ return (ret < 0 ? ret : 0);
+}
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+int MtpResponsePacket::read(struct usb_endpoint *ep) {
+ int ret = transfer(ep, mBuffer, mBufferSize);
+ if (ret >= 0)
+ mPacketSize = ret;
+ else
+ mPacketSize = 0;
+ return ret;
+}
+#endif
+
+} // namespace android
+
diff --git a/media/mtp/MtpResponsePacket.h b/media/mtp/MtpResponsePacket.h
new file mode 100644
index 0000000..373f8f9
--- /dev/null
+++ b/media/mtp/MtpResponsePacket.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_RESPONSE_PACKET_H
+#define _MTP_RESPONSE_PACKET_H
+
+#include "MtpPacket.h"
+#include "mtp.h"
+
+namespace android {
+
+class MtpResponsePacket : public MtpPacket {
+
+public:
+ MtpResponsePacket();
+ virtual ~MtpResponsePacket();
+
+#ifdef MTP_DEVICE
+ // write our data to the given file descriptor
+ int write(int fd);
+#endif
+
+#ifdef MTP_HOST
+ // read our buffer from the given endpoint
+ int read(struct usb_endpoint *ep);
+#endif
+
+ inline MtpResponseCode getResponseCode() const { return getContainerCode(); }
+ inline void setResponseCode(MtpResponseCode code)
+ { return setContainerCode(code); }
+};
+
+}; // namespace android
+
+#endif // _MTP_RESPONSE_PACKET_H
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
new file mode 100644
index 0000000..f74f395
--- /dev/null
+++ b/media/mtp/MtpServer.cpp
@@ -0,0 +1,797 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <sys/types.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <dirent.h>
+
+#include <cutils/properties.h>
+
+#define LOG_TAG "MtpServer"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpProperty.h"
+#include "MtpServer.h"
+#include "MtpStorage.h"
+#include "MtpStringBuffer.h"
+
+#include <linux/usb/f_mtp.h>
+
+namespace android {
+
+static const MtpOperationCode kSupportedOperationCodes[] = {
+ MTP_OPERATION_GET_DEVICE_INFO,
+ MTP_OPERATION_OPEN_SESSION,
+ MTP_OPERATION_CLOSE_SESSION,
+ MTP_OPERATION_GET_STORAGE_IDS,
+ MTP_OPERATION_GET_STORAGE_INFO,
+ MTP_OPERATION_GET_NUM_OBJECTS,
+ MTP_OPERATION_GET_OBJECT_HANDLES,
+ MTP_OPERATION_GET_OBJECT_INFO,
+ MTP_OPERATION_GET_OBJECT,
+// MTP_OPERATION_GET_THUMB,
+ MTP_OPERATION_DELETE_OBJECT,
+ MTP_OPERATION_SEND_OBJECT_INFO,
+ MTP_OPERATION_SEND_OBJECT,
+// MTP_OPERATION_INITIATE_CAPTURE,
+// MTP_OPERATION_FORMAT_STORE,
+// MTP_OPERATION_RESET_DEVICE,
+// MTP_OPERATION_SELF_TEST,
+// MTP_OPERATION_SET_OBJECT_PROTECTION,
+// MTP_OPERATION_POWER_DOWN,
+ MTP_OPERATION_GET_DEVICE_PROP_DESC,
+ MTP_OPERATION_GET_DEVICE_PROP_VALUE,
+ MTP_OPERATION_SET_DEVICE_PROP_VALUE,
+ MTP_OPERATION_RESET_DEVICE_PROP_VALUE,
+// MTP_OPERATION_TERMINATE_OPEN_CAPTURE,
+// MTP_OPERATION_MOVE_OBJECT,
+// MTP_OPERATION_COPY_OBJECT,
+// MTP_OPERATION_GET_PARTIAL_OBJECT,
+// MTP_OPERATION_INITIATE_OPEN_CAPTURE,
+ MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED,
+ MTP_OPERATION_GET_OBJECT_PROP_DESC,
+ MTP_OPERATION_GET_OBJECT_PROP_VALUE,
+ MTP_OPERATION_SET_OBJECT_PROP_VALUE,
+ MTP_OPERATION_GET_OBJECT_REFERENCES,
+ MTP_OPERATION_SET_OBJECT_REFERENCES,
+// MTP_OPERATION_SKIP,
+};
+
+static const MtpEventCode kSupportedEventCodes[] = {
+ MTP_EVENT_OBJECT_ADDED,
+ MTP_EVENT_OBJECT_REMOVED,
+};
+
+MtpServer::MtpServer(int fd, MtpDatabase* database,
+ int fileGroup, int filePerm, int directoryPerm)
+ : mFD(fd),
+ mDatabase(database),
+ mFileGroup(fileGroup),
+ mFilePermission(filePerm),
+ mDirectoryPermission(directoryPerm),
+ mSessionID(0),
+ mSessionOpen(false),
+ mSendObjectHandle(kInvalidObjectHandle),
+ mSendObjectFormat(0),
+ mSendObjectFileSize(0)
+{
+}
+
+MtpServer::~MtpServer() {
+}
+
+void MtpServer::addStorage(const char* filePath) {
+ int index = mStorages.size() + 1;
+ index |= index << 16; // set high and low part to our index
+ MtpStorage* storage = new MtpStorage(index, filePath, mDatabase);
+ addStorage(storage);
+}
+
+MtpStorage* MtpServer::getStorage(MtpStorageID id) {
+ for (int i = 0; i < mStorages.size(); i++) {
+ MtpStorage* storage = mStorages[i];
+ if (storage->getStorageID() == id)
+ return storage;
+ }
+ return NULL;
+}
+
+void MtpServer::run() {
+ int fd = mFD;
+
+ LOGV("MtpServer::run fd: %d\n", fd);
+
+ while (1) {
+ int ret = mRequest.read(fd);
+ if (ret < 0) {
+ LOGE("request read returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ MtpOperationCode operation = mRequest.getOperationCode();
+ MtpTransactionID transaction = mRequest.getTransactionID();
+
+ LOGV("operation: %s", MtpDebug::getOperationCodeName(operation));
+ mRequest.dump();
+
+ // FIXME need to generalize this
+ bool dataIn = (operation == MTP_OPERATION_SEND_OBJECT_INFO
+ || operation == MTP_OPERATION_SET_OBJECT_REFERENCES
+ || operation == MTP_OPERATION_SET_OBJECT_PROP_VALUE
+ || operation == MTP_OPERATION_SET_DEVICE_PROP_VALUE);
+ if (dataIn) {
+ int ret = mData.read(fd);
+ if (ret < 0) {
+ LOGE("data read returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ LOGV("received data:");
+ mData.dump();
+ } else {
+ mData.reset();
+ }
+
+ if (handleRequest()) {
+ if (!dataIn && mData.hasData()) {
+ mData.setOperationCode(operation);
+ mData.setTransactionID(transaction);
+ LOGV("sending data:");
+ mData.dump();
+ ret = mData.write(fd);
+ if (ret < 0) {
+ LOGE("request write returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ }
+
+ mResponse.setTransactionID(transaction);
+ LOGV("sending response %04X", mResponse.getResponseCode());
+ ret = mResponse.write(fd);
+ mResponse.dump();
+ if (ret < 0) {
+ LOGE("request write returned %d, errno: %d", ret, errno);
+ if (errno == ECANCELED) {
+ // return to top of loop and wait for next command
+ continue;
+ }
+ break;
+ }
+ } else {
+ LOGV("skipping response\n");
+ }
+ }
+
+ if (mSessionOpen)
+ mDatabase->sessionEnded();
+}
+
+void MtpServer::sendObjectAdded(MtpObjectHandle handle) {
+ if (mSessionOpen) {
+ LOGD("sendObjectAdded %d\n", handle);
+ mEvent.setEventCode(MTP_EVENT_OBJECT_ADDED);
+ mEvent.setTransactionID(mRequest.getTransactionID());
+ mEvent.setParameter(1, handle);
+ int ret = mEvent.write(mFD);
+ LOGD("mEvent.write returned %d\n", ret);
+ }
+}
+
+void MtpServer::sendObjectRemoved(MtpObjectHandle handle) {
+ if (mSessionOpen) {
+ LOGD("sendObjectRemoved %d\n", handle);
+ mEvent.setEventCode(MTP_EVENT_OBJECT_REMOVED);
+ mEvent.setTransactionID(mRequest.getTransactionID());
+ mEvent.setParameter(1, handle);
+ int ret = mEvent.write(mFD);
+ LOGD("mEvent.write returned %d\n", ret);
+ }
+}
+
+bool MtpServer::handleRequest() {
+ MtpOperationCode operation = mRequest.getOperationCode();
+ MtpResponseCode response;
+
+ mResponse.reset();
+
+ if (mSendObjectHandle != kInvalidObjectHandle && operation != MTP_OPERATION_SEND_OBJECT) {
+ // FIXME - need to delete mSendObjectHandle from the database
+ LOGE("expected SendObject after SendObjectInfo");
+ mSendObjectHandle = kInvalidObjectHandle;
+ }
+
+ switch (operation) {
+ case MTP_OPERATION_GET_DEVICE_INFO:
+ response = doGetDeviceInfo();
+ break;
+ case MTP_OPERATION_OPEN_SESSION:
+ response = doOpenSession();
+ break;
+ case MTP_OPERATION_CLOSE_SESSION:
+ response = doCloseSession();
+ break;
+ case MTP_OPERATION_GET_STORAGE_IDS:
+ response = doGetStorageIDs();
+ break;
+ case MTP_OPERATION_GET_STORAGE_INFO:
+ response = doGetStorageInfo();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED:
+ response = doGetObjectPropsSupported();
+ break;
+ case MTP_OPERATION_GET_OBJECT_HANDLES:
+ response = doGetObjectHandles();
+ break;
+ case MTP_OPERATION_GET_NUM_OBJECTS:
+ response = doGetNumObjects();
+ break;
+ case MTP_OPERATION_GET_OBJECT_REFERENCES:
+ response = doGetObjectReferences();
+ break;
+ case MTP_OPERATION_SET_OBJECT_REFERENCES:
+ response = doSetObjectReferences();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROP_VALUE:
+ response = doGetObjectPropValue();
+ break;
+ case MTP_OPERATION_SET_OBJECT_PROP_VALUE:
+ response = doSetObjectPropValue();
+ break;
+ case MTP_OPERATION_GET_DEVICE_PROP_VALUE:
+ response = doGetDevicePropValue();
+ break;
+ case MTP_OPERATION_SET_DEVICE_PROP_VALUE:
+ response = doSetDevicePropValue();
+ break;
+ case MTP_OPERATION_RESET_DEVICE_PROP_VALUE:
+ response = doResetDevicePropValue();
+ break;
+ case MTP_OPERATION_GET_OBJECT_INFO:
+ response = doGetObjectInfo();
+ break;
+ case MTP_OPERATION_GET_OBJECT:
+ response = doGetObject();
+ break;
+ case MTP_OPERATION_SEND_OBJECT_INFO:
+ response = doSendObjectInfo();
+ break;
+ case MTP_OPERATION_SEND_OBJECT:
+ response = doSendObject();
+ break;
+ case MTP_OPERATION_DELETE_OBJECT:
+ response = doDeleteObject();
+ break;
+ case MTP_OPERATION_GET_OBJECT_PROP_DESC:
+ response = doGetObjectPropDesc();
+ break;
+ case MTP_OPERATION_GET_DEVICE_PROP_DESC:
+ response = doGetDevicePropDesc();
+ break;
+ default:
+ LOGE("got unsupported command %s", MtpDebug::getOperationCodeName(operation));
+ response = MTP_RESPONSE_OPERATION_NOT_SUPPORTED;
+ break;
+ }
+
+ if (response == MTP_RESPONSE_TRANSACTION_CANCELLED)
+ return false;
+ mResponse.setResponseCode(response);
+ return true;
+}
+
+MtpResponseCode MtpServer::doGetDeviceInfo() {
+ MtpStringBuffer string;
+ char prop_value[PROPERTY_VALUE_MAX];
+
+ MtpObjectFormatList* playbackFormats = mDatabase->getSupportedPlaybackFormats();
+ MtpObjectFormatList* captureFormats = mDatabase->getSupportedCaptureFormats();
+ MtpDevicePropertyList* deviceProperties = mDatabase->getSupportedDeviceProperties();
+
+ // fill in device info
+ mData.putUInt16(MTP_STANDARD_VERSION);
+ mData.putUInt32(6); // MTP Vendor Extension ID
+ mData.putUInt16(MTP_STANDARD_VERSION);
+ string.set("microsoft.com: 1.0;");
+ mData.putString(string); // MTP Extensions
+ mData.putUInt16(0); //Functional Mode
+ mData.putAUInt16(kSupportedOperationCodes,
+ sizeof(kSupportedOperationCodes) / sizeof(uint16_t)); // Operations Supported
+ mData.putAUInt16(kSupportedEventCodes,
+ sizeof(kSupportedEventCodes) / sizeof(uint16_t)); // Events Supported
+ mData.putAUInt16(deviceProperties); // Device Properties Supported
+ mData.putAUInt16(captureFormats); // Capture Formats
+ mData.putAUInt16(playbackFormats); // Playback Formats
+ // FIXME
+ string.set("Google, Inc.");
+ mData.putString(string); // Manufacturer
+
+ property_get("ro.product.model", prop_value, "MTP Device");
+ string.set(prop_value);
+ mData.putString(string); // Model
+ string.set("1.0");
+ mData.putString(string); // Device Version
+
+ property_get("ro.serialno", prop_value, "????????");
+ string.set(prop_value);
+ mData.putString(string); // Serial Number
+
+ delete playbackFormats;
+ delete captureFormats;
+ delete deviceProperties;
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doOpenSession() {
+ if (mSessionOpen) {
+ mResponse.setParameter(1, mSessionID);
+ return MTP_RESPONSE_SESSION_ALREADY_OPEN;
+ }
+ mSessionID = mRequest.getParameter(1);
+ mSessionOpen = true;
+
+ mDatabase->sessionStarted();
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doCloseSession() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ mSessionID = 0;
+ mSessionOpen = false;
+ mDatabase->sessionEnded();
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageIDs() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+
+ int count = mStorages.size();
+ mData.putUInt32(count);
+ for (int i = 0; i < count; i++)
+ mData.putUInt32(mStorages[i]->getStorageID());
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetStorageInfo() {
+ MtpStringBuffer string;
+
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID id = mRequest.getParameter(1);
+ MtpStorage* storage = getStorage(id);
+ if (!storage)
+ return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+ mData.putUInt16(storage->getType());
+ mData.putUInt16(storage->getFileSystemType());
+ mData.putUInt16(storage->getAccessCapability());
+ mData.putUInt64(storage->getMaxCapacity());
+ mData.putUInt64(storage->getFreeSpace());
+ mData.putUInt32(1024*1024*1024); // Free Space in Objects
+ string.set(storage->getDescription());
+ mData.putString(string);
+ mData.putEmptyString(); // Volume Identifier
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropsSupported() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpObjectFormat format = mRequest.getParameter(1);
+ MtpDevicePropertyList* properties = mDatabase->getSupportedObjectProperties(format);
+ mData.putAUInt16(properties);
+ delete properties;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetObjectHandles() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
+ MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
+ MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
+ // 0x00000000 for all objects?
+ if (parent == 0xFFFFFFFF)
+ parent = 0;
+
+ MtpObjectHandleList* handles = mDatabase->getObjectList(storageID, format, parent);
+ mData.putAUInt32(handles);
+ delete handles;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetNumObjects() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage
+ MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats
+ MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent
+ // 0x00000000 for all objects?
+ if (parent == 0xFFFFFFFF)
+ parent = 0;
+
+ int count = mDatabase->getNumObjects(storageID, format, parent);
+ if (count >= 0) {
+ mResponse.setParameter(1, count);
+ return MTP_RESPONSE_OK;
+ } else {
+ mResponse.setParameter(1, 0);
+ return MTP_RESPONSE_INVALID_OBJECT_HANDLE;
+ }
+}
+
+MtpResponseCode MtpServer::doGetObjectReferences() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID handle = mRequest.getParameter(1);
+
+ // FIXME - check for invalid object handle
+ MtpObjectHandleList* handles = mDatabase->getObjectReferences(handle);
+ if (handles) {
+ mData.putAUInt32(handles);
+ delete handles;
+ } else {
+ mData.putEmptyArray();
+ }
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSetObjectReferences() {
+ if (!mSessionOpen)
+ return MTP_RESPONSE_SESSION_NOT_OPEN;
+ MtpStorageID handle = mRequest.getParameter(1);
+ MtpObjectHandleList* references = mData.getAUInt32();
+ MtpResponseCode result = mDatabase->setObjectReferences(handle, references);
+ delete references;
+ return result;
+}
+
+MtpResponseCode MtpServer::doGetObjectPropValue() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectProperty property = mRequest.getParameter(2);
+ LOGD("GetObjectPropValue %d %s\n", handle,
+ MtpDebug::getObjectPropCodeName(property));
+
+ return mDatabase->getObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doSetObjectPropValue() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectProperty property = mRequest.getParameter(2);
+ LOGD("SetObjectPropValue %d %s\n", handle,
+ MtpDebug::getObjectPropCodeName(property));
+
+ return mDatabase->setObjectPropertyValue(handle, property, mData);
+}
+
+MtpResponseCode MtpServer::doGetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("GetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->getDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doSetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("SetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->setDevicePropertyValue(property, mData);
+}
+
+MtpResponseCode MtpServer::doResetDevicePropValue() {
+ MtpDeviceProperty property = mRequest.getParameter(1);
+ LOGD("ResetDevicePropValue %s\n",
+ MtpDebug::getDevicePropCodeName(property));
+
+ return mDatabase->resetDeviceProperty(property);
+}
+
+MtpResponseCode MtpServer::doGetObjectInfo() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ return mDatabase->getObjectInfo(handle, mData);
+}
+
+MtpResponseCode MtpServer::doGetObject() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpString pathBuf;
+ int64_t fileLength;
+ int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength);
+ if (result != MTP_RESPONSE_OK)
+ return result;
+
+ const char* filePath = (const char *)pathBuf;
+ mtp_file_range mfr;
+ mfr.fd = open(filePath, O_RDONLY);
+ if (mfr.fd < 0) {
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ mfr.offset = 0;
+ mfr.length = fileLength;
+
+ // send data header
+ mData.setOperationCode(mRequest.getOperationCode());
+ mData.setTransactionID(mRequest.getTransactionID());
+ mData.writeDataHeader(mFD, fileLength + MTP_CONTAINER_HEADER_SIZE);
+
+ // then transfer the file
+ int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr);
+ close(mfr.fd);
+ if (ret < 0) {
+ if (errno == ECANCELED)
+ return MTP_RESPONSE_TRANSACTION_CANCELLED;
+ else
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObjectInfo() {
+ MtpString path;
+ MtpStorageID storageID = mRequest.getParameter(1);
+ MtpStorage* storage = getStorage(storageID);
+ MtpObjectHandle parent = mRequest.getParameter(2);
+ if (!storage)
+ return MTP_RESPONSE_INVALID_STORAGE_ID;
+
+ // special case the root
+ if (parent == MTP_PARENT_ROOT) {
+ path = storage->getPath();
+ parent = 0;
+ } else {
+ int64_t dummy;
+ int result = mDatabase->getObjectFilePath(parent, path, dummy);
+ if (result != MTP_RESPONSE_OK)
+ return result;
+ }
+
+ // read only the fields we need
+ mData.getUInt32(); // storage ID
+ MtpObjectFormat format = mData.getUInt16();
+ mData.getUInt16(); // protection status
+ mSendObjectFileSize = mData.getUInt32();
+ mData.getUInt16(); // thumb format
+ mData.getUInt32(); // thumb compressed size
+ mData.getUInt32(); // thumb pix width
+ mData.getUInt32(); // thumb pix height
+ mData.getUInt32(); // image pix width
+ mData.getUInt32(); // image pix height
+ mData.getUInt32(); // image bit depth
+ mData.getUInt32(); // parent
+ uint16_t associationType = mData.getUInt16();
+ uint32_t associationDesc = mData.getUInt32(); // association desc
+ mData.getUInt32(); // sequence number
+ MtpStringBuffer name, created, modified;
+ mData.getString(name); // file name
+ mData.getString(created); // date created
+ mData.getString(modified); // date modified
+ // keywords follow
+
+ time_t modifiedTime;
+ if (!parseDateTime(modified, modifiedTime))
+ modifiedTime = 0;
+
+ if (path[path.size() - 1] != '/')
+ path += "/";
+ path += (const char *)name;
+
+ MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path,
+ format, parent, storageID, mSendObjectFileSize, modifiedTime);
+ if (handle == kInvalidObjectHandle) {
+ return MTP_RESPONSE_GENERAL_ERROR;
+ }
+
+ if (format == MTP_FORMAT_ASSOCIATION) {
+ mode_t mask = umask(0);
+ int ret = mkdir((const char *)path, mDirectoryPermission);
+ umask(mask);
+ if (ret && ret != -EEXIST)
+ return MTP_RESPONSE_GENERAL_ERROR;
+ chown((const char *)path, getuid(), mFileGroup);
+ } else {
+ mSendObjectFilePath = path;
+ // save the handle for the SendObject call, which should follow
+ mSendObjectHandle = handle;
+ mSendObjectFormat = format;
+ }
+
+ mResponse.setParameter(1, storageID);
+ mResponse.setParameter(2, parent);
+ mResponse.setParameter(3, handle);
+
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doSendObject() {
+ MtpResponseCode result = MTP_RESPONSE_OK;
+ mode_t mask;
+ int ret;
+
+ if (mSendObjectHandle == kInvalidObjectHandle) {
+ LOGE("Expected SendObjectInfo before SendObject");
+ result = MTP_RESPONSE_NO_VALID_OBJECT_INFO;
+ goto done;
+ }
+
+ // read the header
+ ret = mData.readDataHeader(mFD);
+ // FIXME - check for errors here.
+
+ // reset so we don't attempt to send this back
+ mData.reset();
+
+ mtp_file_range mfr;
+ mfr.fd = open(mSendObjectFilePath, O_RDWR | O_CREAT | O_TRUNC);
+ if (mfr.fd < 0) {
+ result = MTP_RESPONSE_GENERAL_ERROR;
+ goto done;
+ }
+ fchown(mfr.fd, getuid(), mFileGroup);
+ // set permissions
+ mask = umask(0);
+ fchmod(mfr.fd, mFilePermission);
+ umask(mask);
+
+ mfr.offset = 0;
+ mfr.length = mSendObjectFileSize;
+
+ // transfer the file
+ ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr);
+ close(mfr.fd);
+
+ LOGV("MTP_RECEIVE_FILE returned %d", ret);
+
+ if (ret < 0) {
+ unlink(mSendObjectFilePath);
+ if (errno == ECANCELED)
+ result = MTP_RESPONSE_TRANSACTION_CANCELLED;
+ else
+ result = MTP_RESPONSE_GENERAL_ERROR;
+ }
+
+done:
+ mDatabase->endSendObject(mSendObjectFilePath, mSendObjectHandle, mSendObjectFormat,
+ result == MTP_RESPONSE_OK);
+ mSendObjectHandle = kInvalidObjectHandle;
+ mSendObjectFormat = 0;
+ return result;
+}
+
+static void deleteRecursive(const char* path) {
+ char pathbuf[PATH_MAX];
+ int pathLength = strlen(path);
+ if (pathLength >= sizeof(pathbuf) - 1) {
+ LOGE("path too long: %s\n", path);
+ }
+ strcpy(pathbuf, path);
+ if (pathbuf[pathLength - 1] != '/') {
+ pathbuf[pathLength++] = '/';
+ }
+ char* fileSpot = pathbuf + pathLength;
+ int pathRemaining = sizeof(pathbuf) - pathLength - 1;
+
+ DIR* dir = opendir(path);
+ if (!dir) {
+ LOGE("opendir %s failed: %s", path, strerror(errno));
+ return;
+ }
+
+ struct dirent* entry;
+ while ((entry = readdir(dir))) {
+ const char* name = entry->d_name;
+
+ // ignore "." and ".."
+ if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) {
+ continue;
+ }
+
+ int nameLength = strlen(name);
+ if (nameLength > pathRemaining) {
+ LOGE("path %s/%s too long\n", path, name);
+ continue;
+ }
+ strcpy(fileSpot, name);
+
+ int type = entry->d_type;
+ if (entry->d_type == DT_DIR) {
+ deleteRecursive(pathbuf);
+ rmdir(pathbuf);
+ } else {
+ unlink(pathbuf);
+ }
+ }
+}
+
+static void deletePath(const char* path) {
+ struct stat statbuf;
+ if (stat(path, &statbuf) == 0) {
+ if (S_ISDIR(statbuf.st_mode)) {
+ deleteRecursive(path);
+ rmdir(path);
+ } else {
+ unlink(path);
+ }
+ } else {
+ LOGE("deletePath stat failed for %s: %s", path, strerror(errno));
+ }
+}
+
+MtpResponseCode MtpServer::doDeleteObject() {
+ MtpObjectHandle handle = mRequest.getParameter(1);
+ MtpObjectFormat format = mRequest.getParameter(2);
+ // FIXME - support deleting all objects if handle is 0xFFFFFFFF
+ // FIXME - implement deleting objects by format
+
+ MtpString filePath;
+ int64_t fileLength;
+ int result = mDatabase->getObjectFilePath(handle, filePath, fileLength);
+ if (result == MTP_RESPONSE_OK) {
+ LOGV("deleting %s", (const char *)filePath);
+ deletePath((const char *)filePath);
+ return mDatabase->deleteFile(handle);
+ } else {
+ return result;
+ }
+}
+
+MtpResponseCode MtpServer::doGetObjectPropDesc() {
+ MtpObjectProperty propCode = mRequest.getParameter(1);
+ MtpObjectFormat format = mRequest.getParameter(2);
+ LOGD("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode),
+ MtpDebug::getFormatCodeName(format));
+ MtpProperty* property = mDatabase->getObjectPropertyDesc(propCode, format);
+ if (!property)
+ return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED;
+ property->write(mData);
+ delete property;
+ return MTP_RESPONSE_OK;
+}
+
+MtpResponseCode MtpServer::doGetDevicePropDesc() {
+ MtpDeviceProperty propCode = mRequest.getParameter(1);
+ LOGD("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode));
+ MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode);
+ if (!property)
+ return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED;
+ property->write(mData);
+ delete property;
+ return MTP_RESPONSE_OK;
+}
+
+} // namespace android
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
new file mode 100644
index 0000000..68a6564
--- /dev/null
+++ b/media/mtp/MtpServer.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_SERVER_H
+#define _MTP_SERVER_H
+
+#include "MtpRequestPacket.h"
+#include "MtpDataPacket.h"
+#include "MtpResponsePacket.h"
+#include "MtpEventPacket.h"
+#include "mtp.h"
+
+#include "MtpUtils.h"
+
+namespace android {
+
+class MtpDatabase;
+class MtpStorage;
+
+class MtpServer {
+
+private:
+ // file descriptor for MTP kernel driver
+ int mFD;
+
+ MtpDatabase* mDatabase;
+
+ // group to own new files and folders
+ int mFileGroup;
+ // permissions for new files and directories
+ int mFilePermission;
+ int mDirectoryPermission;
+
+ // current session ID
+ MtpSessionID mSessionID;
+ // true if we have an open session and mSessionID is valid
+ bool mSessionOpen;
+
+ MtpRequestPacket mRequest;
+ MtpDataPacket mData;
+ MtpResponsePacket mResponse;
+ MtpEventPacket mEvent;
+
+ MtpStorageList mStorages;
+
+ // handle for new object, set by SendObjectInfo and used by SendObject
+ MtpObjectHandle mSendObjectHandle;
+ MtpObjectFormat mSendObjectFormat;
+ MtpString mSendObjectFilePath;
+ size_t mSendObjectFileSize;
+
+public:
+ MtpServer(int fd, MtpDatabase* database,
+ int fileGroup, int filePerm, int directoryPerm);
+ virtual ~MtpServer();
+
+ void addStorage(const char* filePath);
+ inline void addStorage(MtpStorage* storage) { mStorages.push(storage); }
+ MtpStorage* getStorage(MtpStorageID id);
+ void run();
+
+ void sendObjectAdded(MtpObjectHandle handle);
+ void sendObjectRemoved(MtpObjectHandle handle);
+
+private:
+ bool handleRequest();
+
+ MtpResponseCode doGetDeviceInfo();
+ MtpResponseCode doOpenSession();
+ MtpResponseCode doCloseSession();
+ MtpResponseCode doGetStorageIDs();
+ MtpResponseCode doGetStorageInfo();
+ MtpResponseCode doGetObjectPropsSupported();
+ MtpResponseCode doGetObjectHandles();
+ MtpResponseCode doGetNumObjects();
+ MtpResponseCode doGetObjectReferences();
+ MtpResponseCode doSetObjectReferences();
+ MtpResponseCode doGetObjectPropValue();
+ MtpResponseCode doSetObjectPropValue();
+ MtpResponseCode doGetDevicePropValue();
+ MtpResponseCode doSetDevicePropValue();
+ MtpResponseCode doResetDevicePropValue();
+ MtpResponseCode doGetObjectInfo();
+ MtpResponseCode doGetObject();
+ MtpResponseCode doSendObjectInfo();
+ MtpResponseCode doSendObject();
+ MtpResponseCode doDeleteObject();
+ MtpResponseCode doGetObjectPropDesc();
+ MtpResponseCode doGetDevicePropDesc();
+};
+
+}; // namespace android
+
+#endif // _MTP_SERVER_H
diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp
new file mode 100644
index 0000000..eccf186
--- /dev/null
+++ b/media/mtp/MtpStorage.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorage"
+
+#include "MtpDebug.h"
+#include "MtpDatabase.h"
+#include "MtpStorage.h"
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/statfs.h>
+#include <unistd.h>
+#include <dirent.h>
+#include <errno.h>
+#include <string.h>
+#include <stdio.h>
+#include <limits.h>
+
+namespace android {
+
+MtpStorage::MtpStorage(MtpStorageID id, const char* filePath, MtpDatabase* db)
+ : mStorageID(id),
+ mFilePath(filePath),
+ mDatabase(db),
+ mMaxCapacity(0)
+{
+ LOGD("MtpStorage id: %d path: %s\n", id, filePath);
+}
+
+MtpStorage::~MtpStorage() {
+}
+
+int MtpStorage::getType() const {
+ return MTP_STORAGE_FIXED_RAM;
+}
+
+int MtpStorage::getFileSystemType() const {
+ return MTP_STORAGE_FILESYSTEM_HIERARCHICAL;
+}
+
+int MtpStorage::getAccessCapability() const {
+ return MTP_STORAGE_READ_WRITE;
+}
+
+uint64_t MtpStorage::getMaxCapacity() {
+ if (mMaxCapacity == 0) {
+ struct statfs stat;
+ if (statfs(mFilePath, &stat))
+ return -1;
+ mMaxCapacity = (uint64_t)stat.f_blocks * (uint64_t)stat.f_bsize;
+ }
+ return mMaxCapacity;
+}
+
+uint64_t MtpStorage::getFreeSpace() {
+ struct statfs stat;
+ if (statfs(mFilePath, &stat))
+ return -1;
+ return (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize;
+}
+
+const char* MtpStorage::getDescription() const {
+ return "Device Storage";
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h
new file mode 100644
index 0000000..b13b926
--- /dev/null
+++ b/media/mtp/MtpStorage.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_H
+#define _MTP_STORAGE_H
+
+#include "mtp.h"
+
+namespace android {
+
+class MtpDatabase;
+
+class MtpStorage {
+
+private:
+ MtpStorageID mStorageID;
+ const char* mFilePath;
+ MtpDatabase* mDatabase;
+ uint64_t mMaxCapacity;
+
+public:
+ MtpStorage(MtpStorageID id, const char* filePath, MtpDatabase* db);
+ virtual ~MtpStorage();
+
+ inline MtpStorageID getStorageID() const { return mStorageID; }
+ int getType() const;
+ int getFileSystemType() const;
+ int getAccessCapability() const;
+ uint64_t getMaxCapacity();
+ uint64_t getFreeSpace();
+ const char* getDescription() const;
+ inline const char* getPath() const { return mFilePath; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_H
diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp
new file mode 100644
index 0000000..ca64ac0
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStorageInfo"
+
+#include "MtpDebug.h"
+#include "MtpDataPacket.h"
+#include "MtpStorageInfo.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStorageInfo::MtpStorageInfo(MtpStorageID id)
+ : mStorageID(id),
+ mStorageType(0),
+ mFileSystemType(0),
+ mAccessCapability(0),
+ mMaxCapacity(0),
+ mFreeSpaceBytes(0),
+ mFreeSpaceObjects(0),
+ mStorageDescription(NULL),
+ mVolumeIdentifier(NULL)
+{
+}
+
+MtpStorageInfo::~MtpStorageInfo() {
+ if (mStorageDescription)
+ free(mStorageDescription);
+ if (mVolumeIdentifier)
+ free(mVolumeIdentifier);
+}
+
+void MtpStorageInfo::read(MtpDataPacket& packet) {
+ MtpStringBuffer string;
+
+ // read the device info
+ mStorageType = packet.getUInt16();
+ mFileSystemType = packet.getUInt16();
+ mAccessCapability = packet.getUInt16();
+ mMaxCapacity = packet.getUInt64();
+ mFreeSpaceBytes = packet.getUInt64();
+ mFreeSpaceObjects = packet.getUInt32();
+
+ packet.getString(string);
+ mStorageDescription = strdup((const char *)string);
+ packet.getString(string);
+ mVolumeIdentifier = strdup((const char *)string);
+}
+
+void MtpStorageInfo::print() {
+ LOGD("Storage Info %08X:\n\tmStorageType: %d\n\tmFileSystemType: %d\n\tmAccessCapability: %d\n",
+ mStorageID, mStorageType, mFileSystemType, mAccessCapability);
+ LOGD("\tmMaxCapacity: %lld\n\tmFreeSpaceBytes: %lld\n\tmFreeSpaceObjects: %d\n",
+ mMaxCapacity, mFreeSpaceBytes, mFreeSpaceObjects);
+ LOGD("\tmStorageDescription: %s\n\tmVolumeIdentifier: %s\n",
+ mStorageDescription, mVolumeIdentifier);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h
new file mode 100644
index 0000000..2cb626e
--- /dev/null
+++ b/media/mtp/MtpStorageInfo.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STORAGE_INFO_H
+#define _MTP_STORAGE_INFO_H
+
+#include "MtpTypes.h"
+
+namespace android {
+
+class MtpDataPacket;
+
+class MtpStorageInfo {
+public:
+ MtpStorageID mStorageID;
+ uint16_t mStorageType;
+ uint16_t mFileSystemType;
+ uint16_t mAccessCapability;
+ uint64_t mMaxCapacity;
+ uint64_t mFreeSpaceBytes;
+ uint32_t mFreeSpaceObjects;
+ char* mStorageDescription;
+ char* mVolumeIdentifier;
+
+public:
+ MtpStorageInfo(MtpStorageID id);
+ virtual ~MtpStorageInfo();
+
+ void read(MtpDataPacket& packet);
+
+ void print();
+};
+
+}; // namespace android
+
+#endif // _MTP_STORAGE_INFO_H
diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp
new file mode 100644
index 0000000..fe8cf04
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.cpp
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpStringBuffer"
+
+#include <string.h>
+
+#include "MtpDataPacket.h"
+#include "MtpStringBuffer.h"
+
+namespace android {
+
+MtpStringBuffer::MtpStringBuffer()
+ : mCharCount(0),
+ mByteCount(1)
+{
+ mBuffer[0] = 0;
+}
+
+MtpStringBuffer::MtpStringBuffer(const char* src)
+ : mCharCount(0),
+ mByteCount(1)
+{
+ set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const uint16_t* src)
+ : mCharCount(0),
+ mByteCount(1)
+{
+ set(src);
+}
+
+MtpStringBuffer::MtpStringBuffer(const MtpStringBuffer& src)
+ : mCharCount(src.mCharCount),
+ mByteCount(src.mByteCount)
+{
+ memcpy(mBuffer, src.mBuffer, mByteCount);
+}
+
+
+MtpStringBuffer::~MtpStringBuffer() {
+}
+
+void MtpStringBuffer::set(const char* src) {
+ int length = strlen(src);
+ if (length >= sizeof(mBuffer))
+ length = sizeof(mBuffer) - 1;
+ memcpy(mBuffer, src, length);
+
+ // count the characters
+ int count = 0;
+ char ch;
+ while ((ch = *src++) != 0) {
+ if ((ch & 0x80) == 0) {
+ // single byte character
+ } else if ((ch & 0xE0) == 0xC0) {
+ // two byte character
+ if (! *src++) {
+ // last character was truncated, so ignore last byte
+ length--;
+ break;
+ }
+ } else if ((ch & 0xF0) == 0xE0) {
+ // 3 byte char
+ if (! *src++) {
+ // last character was truncated, so ignore last byte
+ length--;
+ break;
+ }
+ if (! *src++) {
+ // last character was truncated, so ignore last two bytes
+ length -= 2;
+ break;
+ }
+ }
+ count++;
+ }
+
+ mByteCount = length + 1;
+ mBuffer[length] = 0;
+ mCharCount = count;
+}
+
+void MtpStringBuffer::set(const uint16_t* src) {
+ int count = 0;
+ uint16_t ch;
+ uint8_t* dest = mBuffer;
+
+ while ((ch = *src++) != 0 && count < 255) {
+ if (ch >= 0x0800) {
+ *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+ *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else if (ch >= 0x80) {
+ *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else {
+ *dest++ = ch;
+ }
+ count++;
+ }
+ *dest++ = 0;
+ mCharCount = count;
+ mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) {
+ int count = packet->getUInt8();
+ uint8_t* dest = mBuffer;
+ for (int i = 0; i < count; i++) {
+ uint16_t ch = packet->getUInt16();
+ if (ch >= 0x0800) {
+ *dest++ = (uint8_t)(0xE0 | (ch >> 12));
+ *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else if (ch >= 0x80) {
+ *dest++ = (uint8_t)(0xC0 | (ch >> 6));
+ *dest++ = (uint8_t)(0x80 | (ch & 0x3F));
+ } else {
+ *dest++ = ch;
+ }
+ }
+ *dest++ = 0;
+ mCharCount = count;
+ mByteCount = dest - mBuffer;
+}
+
+void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const {
+ int count = mCharCount;
+ const uint8_t* src = mBuffer;
+ packet->putUInt8(count > 0 ? count + 1 : 0);
+
+ // expand utf8 to 16 bit chars
+ for (int i = 0; i < count; i++) {
+ uint16_t ch;
+ uint16_t ch1 = *src++;
+ if ((ch1 & 0x80) == 0) {
+ // single byte character
+ ch = ch1;
+ } else if ((ch1 & 0xE0) == 0xC0) {
+ // two byte character
+ uint16_t ch2 = *src++;
+ ch = ((ch1 & 0x1F) << 6) | (ch2 & 0x3F);
+ } else {
+ // three byte character
+ uint16_t ch2 = *src++;
+ uint16_t ch3 = *src++;
+ ch = ((ch1 & 0x0F) << 12) | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F);
+ }
+ packet->putUInt16(ch);
+ }
+ // only terminate with zero if string is not empty
+ if (count > 0)
+ packet->putUInt16(0);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h
new file mode 100644
index 0000000..cbc8307
--- /dev/null
+++ b/media/mtp/MtpStringBuffer.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_STRING_BUFFER_H
+#define _MTP_STRING_BUFFER_H
+
+#include <stdint.h>
+
+namespace android {
+
+class MtpDataPacket;
+
+// Represents a utf8 string, with a maximum of 255 characters
+class MtpStringBuffer {
+
+private:
+ // mBuffer contains string in UTF8 format
+ // maximum 3 bytes/character, with 1 extra for zero termination
+ uint8_t mBuffer[255 * 3 + 1];
+ int mCharCount;
+ int mByteCount;
+
+public:
+ MtpStringBuffer();
+ MtpStringBuffer(const char* src);
+ MtpStringBuffer(const uint16_t* src);
+ MtpStringBuffer(const MtpStringBuffer& src);
+ virtual ~MtpStringBuffer();
+
+ void set(const char* src);
+ void set(const uint16_t* src);
+
+ void readFromPacket(MtpDataPacket* packet);
+ void writeToPacket(MtpDataPacket* packet) const;
+
+ inline int getCharCount() const { return mCharCount; }
+ inline int getByteCount() const { return mByteCount; }
+
+ inline operator const char*() const { return (const char *)mBuffer; }
+};
+
+}; // namespace android
+
+#endif // _MTP_STRING_BUFFER_H
diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h
new file mode 100644
index 0000000..720c854
--- /dev/null
+++ b/media/mtp/MtpTypes.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_TYPES_H
+#define _MTP_TYPES_H
+
+#include <stdint.h>
+#include "utils/String8.h"
+#include "utils/Vector.h"
+
+namespace android {
+
+typedef int32_t int128_t[4];
+typedef uint32_t uint128_t[4];
+
+typedef uint16_t MtpOperationCode;
+typedef uint16_t MtpResponseCode;
+typedef uint16_t MtpEventCode;
+typedef uint32_t MtpSessionID;
+typedef uint32_t MtpStorageID;
+typedef uint32_t MtpTransactionID;
+typedef uint16_t MtpPropertyCode;
+typedef uint16_t MtpDataType;
+typedef uint16_t MtpObjectFormat;
+typedef MtpPropertyCode MtpDeviceProperty;
+typedef MtpPropertyCode MtpObjectProperty;
+
+// object handles are unique across all storage but only within a single session.
+// object handles cannot be reused after an object is deleted.
+// values 0x00000000 and 0xFFFFFFFF are reserved for special purposes.
+typedef uint32_t MtpObjectHandle;
+
+// Special values
+#define MTP_PARENT_ROOT 0xFFFFFFFF // parent is root of the storage
+#define kInvalidObjectHandle 0xFFFFFFFF
+
+class MtpStorage;
+class MtpDevice;
+class MtpProperty;
+
+typedef Vector<MtpStorage *> MtpStorageList;
+typedef Vector<MtpDevice*> MtpDeviceList;
+typedef Vector<MtpProperty*> MtpPropertyList;
+
+typedef Vector<uint8_t> UInt8List;
+typedef Vector<uint16_t> UInt16List;
+typedef Vector<uint32_t> UInt32List;
+typedef Vector<uint64_t> UInt64List;
+typedef Vector<int8_t> Int8List;
+typedef Vector<int16_t> Int16List;
+typedef Vector<int32_t> Int32List;
+typedef Vector<int64_t> Int64List;
+
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt16List MtpDevicePropertyList;
+typedef UInt16List MtpObjectFormatList;
+typedef UInt32List MtpObjectHandleList;
+typedef UInt16List MtpObjectPropertyList;
+typedef UInt32List MtpStorageIDList;
+
+typedef String8 MtpString;
+
+}; // namespace android
+
+#endif // _MTP_TYPES_H
diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp
new file mode 100644
index 0000000..ab01ef5
--- /dev/null
+++ b/media/mtp/MtpUtils.cpp
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "MtpUtils"
+
+#include <stdio.h>
+#include <time.h>
+
+#include <cutils/tztime.h>
+#include "MtpUtils.h"
+
+namespace android {
+
+/*
+DateTime strings follow a compatible subset of the definition found in ISO 8601, and
+take the form of a Unicode string formatted as: "YYYYMMDDThhmmss.s". In this
+representation, YYYY shall be replaced by the year, MM replaced by the month (01-12),
+DD replaced by the day (01-31), T is a constant character 'T' delimiting time from date,
+hh is replaced by the hour (00-23), mm is replaced by the minute (00-59), and ss by the
+second (00-59). The ".s" is optional, and represents tenths of a second.
+*/
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds) {
+ int year, month, day, hour, minute, second;
+ struct tm tm;
+
+ if (sscanf(dateTime, "%04d%02d%02dT%02d%02d%02d",
+ &year, &month, &day, &hour, &minute, &second) != 6)
+ return false;
+ const char* tail = dateTime + 15;
+ // skip optional tenth of second
+ if (tail[0] == '.' && tail[1])
+ tail += 2;
+ //FIXME - support +/-hhmm
+ bool useUTC = (tail[0] == 'Z');
+
+ // hack to compute timezone
+ time_t dummy;
+ localtime_r(&dummy, &tm);
+
+ tm.tm_sec = second;
+ tm.tm_min = minute;
+ tm.tm_hour = hour;
+ tm.tm_mday = day;
+ tm.tm_mon = month;
+ tm.tm_year = year - 1900;
+ tm.tm_wday = 0;
+ tm.tm_isdst = -1;
+ if (useUTC)
+ outSeconds = mktime(&tm);
+ else
+ outSeconds = mktime_tz(&tm, tm.tm_zone);
+
+ return true;
+}
+
+void formatDateTime(time_t seconds, char* buffer, int bufferLength) {
+ struct tm tm;
+
+ localtime_r(&seconds, &tm);
+ snprintf(buffer, bufferLength, "%04d%02d%02dT%02d%02d%02d",
+ tm.tm_year + 1900, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec);
+}
+
+} // namespace android
diff --git a/media/mtp/MtpUtils.h b/media/mtp/MtpUtils.h
new file mode 100644
index 0000000..61f9055
--- /dev/null
+++ b/media/mtp/MtpUtils.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_UTILS_H
+#define _MTP_UTILS_H
+
+#include <stdint.h>
+
+namespace android {
+
+bool parseDateTime(const char* dateTime, time_t& outSeconds);
+void formatDateTime(time_t seconds, char* buffer, int bufferLength);
+
+}; // namespace android
+
+#endif // _MTP_UTILS_H
diff --git a/media/mtp/mtp.h b/media/mtp/mtp.h
new file mode 100644
index 0000000..b7afa66
--- /dev/null
+++ b/media/mtp/mtp.h
@@ -0,0 +1,475 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _MTP_H
+#define _MTP_H
+
+#include <stdint.h>
+#include <stdlib.h>
+
+#define MTP_STANDARD_VERSION 100
+
+// Container Types
+#define MTP_CONTAINER_TYPE_UNDEFINED 0
+#define MTP_CONTAINER_TYPE_COMMAND 1
+#define MTP_CONTAINER_TYPE_DATA 2
+#define MTP_CONTAINER_TYPE_RESPONSE 3
+#define MTP_CONTAINER_TYPE_EVENT 4
+
+// Container Offsets
+#define MTP_CONTAINER_LENGTH_OFFSET 0
+#define MTP_CONTAINER_TYPE_OFFSET 4
+#define MTP_CONTAINER_CODE_OFFSET 6
+#define MTP_CONTAINER_TRANSACTION_ID_OFFSET 8
+#define MTP_CONTAINER_PARAMETER_OFFSET 12
+#define MTP_CONTAINER_HEADER_SIZE 12
+
+// MTP Types
+#define MTP_TYPE_UNDEFINED 0x0000 // Undefined
+#define MTP_TYPE_INT8 0x0001 // Signed 8-bit integer
+#define MTP_TYPE_UINT8 0x0002 // Unsigned 8-bit integer
+#define MTP_TYPE_INT16 0x0003 // Signed 16-bit integer
+#define MTP_TYPE_UINT16 0x0004 // Unsigned 16-bit integer
+#define MTP_TYPE_INT32 0x0005 // Signed 32-bit integer
+#define MTP_TYPE_UINT32 0x0006 // Unsigned 32-bit integer
+#define MTP_TYPE_INT64 0x0007 // Signed 64-bit integer
+#define MTP_TYPE_UINT64 0x0008 // Unsigned 64-bit integer
+#define MTP_TYPE_INT128 0x0009 // Signed 128-bit integer
+#define MTP_TYPE_UINT128 0x000A // Unsigned 128-bit integer
+#define MTP_TYPE_AINT8 0x4001 // Array of signed 8-bit integers
+#define MTP_TYPE_AUINT8 0x4002 // Array of unsigned 8-bit integers
+#define MTP_TYPE_AINT16 0x4003 // Array of signed 16-bit integers
+#define MTP_TYPE_AUINT16 0x4004 // Array of unsigned 16-bit integers
+#define MTP_TYPE_AINT32 0x4005 // Array of signed 32-bit integers
+#define MTP_TYPE_AUINT32 0x4006 // Array of unsigned 32-bit integers
+#define MTP_TYPE_AINT64 0x4007 // Array of signed 64-bit integers
+#define MTP_TYPE_AUINT64 0x4008 // Array of unsigned 64-bit integers
+#define MTP_TYPE_AINT128 0x4009 // Array of signed 128-bit integers
+#define MTP_TYPE_AUINT128 0x400A // Array of unsigned 128-bit integers
+#define MTP_TYPE_STR 0xFFFF // Variable-length Unicode string
+
+// MTP Format Codes
+#define MTP_FORMAT_UNDEFINED 0x3000 // Undefined object
+#define MTP_FORMAT_ASSOCIATION 0x3001 // Association (for example, a folder)
+#define MTP_FORMAT_SCRIPT 0x3002 // Device model-specific script
+#define MTP_FORMAT_EXECUTABLE 0x3003 // Device model-specific binary executable
+#define MTP_FORMAT_TEXT 0x3004 // Text file
+#define MTP_FORMAT_HTML 0x3005 // Hypertext Markup Language file (text)
+#define MTP_FORMAT_DPOF 0x3006 // Digital Print Order Format file (text)
+#define MTP_FORMAT_AIFF 0x3007 // Audio clip
+#define MTP_FORMAT_WAV 0x3008 // Audio clip
+#define MTP_FORMAT_MP3 0x3009 // Audio clip
+#define MTP_FORMAT_AVI 0x300A // Video clip
+#define MTP_FORMAT_MPEG 0x300B // Video clip
+#define MTP_FORMAT_ASF 0x300C // Microsoft Advanced Streaming Format (video)
+#define MTP_FORMAT_DEFINED 0x3800 // Unknown image object
+#define MTP_FORMAT_EXIF_JPEG 0x3801 // Exchangeable File Format, JEIDA standard
+#define MTP_FORMAT_TIFF_EP 0x3802 // Tag Image File Format for Electronic Photography
+#define MTP_FORMAT_FLASHPIX 0x3803 // Structured Storage Image Format
+#define MTP_FORMAT_BMP 0x3804 // Microsoft Windows Bitmap file
+#define MTP_FORMAT_CIFF 0x3805 // Canon Camera Image File Format
+#define MTP_FORMAT_GIF 0x3807 // Graphics Interchange Format
+#define MTP_FORMAT_JFIF 0x3808 // JPEG File Interchange Format
+#define MTP_FORMAT_CD 0x3809 // PhotoCD Image Pac
+#define MTP_FORMAT_PICT 0x380A // Quickdraw Image Format
+#define MTP_FORMAT_PNG 0x380B // Portable Network Graphics
+#define MTP_FORMAT_TIFF 0x380D // Tag Image File Format
+#define MTP_FORMAT_TIFF_IT 0x380E // Tag Image File Format for Information Technology (graphic arts)
+#define MTP_FORMAT_JP2 0x380F // JPEG2000 Baseline File Format
+#define MTP_FORMAT_JPX 0x3810 // JPEG2000 Extended File Format
+#define MTP_FORMAT_UNDEFINED_FIRMWARE 0xB802
+#define MTP_FORMAT_WINDOWS_IMAGE_FORMAT 0xB881
+#define MTP_FORMAT_UNDEFINED_AUDIO 0xB900
+#define MTP_FORMAT_WMA 0xB901
+#define MTP_FORMAT_OGG 0xB902
+#define MTP_FORMAT_AAC 0xB903
+#define MTP_FORMAT_AUDIBLE 0xB904
+#define MTP_FORMAT_FLAC 0xB906
+#define MTP_FORMAT_UNDEFINED_VIDEO 0xB980
+#define MTP_FORMAT_WMV 0xB981
+#define MTP_FORMAT_MP4_CONTAINER 0xB982 // ISO 14496-1
+#define MTP_FORMAT_MP2 0xB983
+#define MTP_FORMAT_3GP_CONTAINER 0xB984 // 3GPP file format. Details: http://www.3gpp.org/ftp/Specs/html-info/26244.htm (page title - \u201cTransparent end-to-end packet switched streaming service, 3GPP file format\u201d).
+#define MTP_FORMAT_UNDEFINED_COLLECTION 0xBA00
+#define MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM 0xBA01
+#define MTP_FORMAT_ABSTRACT_IMAGE_ALBUM 0xBA02
+#define MTP_FORMAT_ABSTRACT_AUDIO_ALBUM 0xBA03
+#define MTP_FORMAT_ABSTRACT_VIDEO_ALBUM 0xBA04
+#define MTP_FORMAT_ABSTRACT_AV_PLAYLIST 0xBA05
+#define MTP_FORMAT_ABSTRACT_CONTACT_GROUP 0xBA06
+#define MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER 0xBA07
+#define MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION 0xBA08
+#define MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST 0xBA09
+#define MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST 0xBA0A
+#define MTP_FORMAT_ABSTRACT_MEDIACAST 0xBA0B // For use with mediacasts; references multimedia enclosures of RSS feeds or episodic content
+#define MTP_FORMAT_WPL_PLAYLIST 0xBA10
+#define MTP_FORMAT_M3U_PLAYLIST 0xBA11
+#define MTP_FORMAT_MPL_PLAYLIST 0xBA12
+#define MTP_FORMAT_ASX_PLAYLIST 0xBA13
+#define MTP_FORMAT_PLS_PLAYLIST 0xBA14
+#define MTP_FORMAT_UNDEFINED_DOCUMENT 0xBA80
+#define MTP_FORMAT_ABSTRACT_DOCUMENT 0xBA81
+#define MTP_FORMAT_XML_DOCUMENT 0xBA82
+#define MTP_FORMAT_MS_WORD_DOCUMENT 0xBA83
+#define MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT 0xBA84
+#define MTP_FORMAT_MS_EXCEL_SPREADSHEET 0xBA85
+#define MTP_FORMAT_MS_POWERPOINT_PRESENTATION 0xBA86
+#define MTP_FORMAT_UNDEFINED_MESSAGE 0xBB00
+#define MTP_FORMAT_ABSTRACT_MESSSAGE 0xBB01
+#define MTP_FORMAT_UNDEFINED_CONTACT 0xBB80
+#define MTP_FORMAT_ABSTRACT_CONTACT 0xBB81
+#define MTP_FORMAT_VCARD_2 0xBB82
+
+// MTP Object Property Codes
+#define MTP_PROPERTY_STORAGE_ID 0xDC01
+#define MTP_PROPERTY_OBJECT_FORMAT 0xDC02
+#define MTP_PROPERTY_PROTECTION_STATUS 0xDC03
+#define MTP_PROPERTY_OBJECT_SIZE 0xDC04
+#define MTP_PROPERTY_ASSOCIATION_TYPE 0xDC05
+#define MTP_PROPERTY_ASSOCIATION_DESC 0xDC06
+#define MTP_PROPERTY_OBJECT_FILE_NAME 0xDC07
+#define MTP_PROPERTY_DATE_CREATED 0xDC08
+#define MTP_PROPERTY_DATE_MODIFIED 0xDC09
+#define MTP_PROPERTY_KEYWORDS 0xDC0A
+#define MTP_PROPERTY_PARENT_OBJECT 0xDC0B
+#define MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS 0xDC0C
+#define MTP_PROPERTY_HIDDEN 0xDC0D
+#define MTP_PROPERTY_SYSTEM_OBJECT 0xDC0E
+#define MTP_PROPERTY_PERSISTENT_UID 0xDC41
+#define MTP_PROPERTY_SYNC_ID 0xDC42
+#define MTP_PROPERTY_PROPERTY_BAG 0xDC43
+#define MTP_PROPERTY_NAME 0xDC44
+#define MTP_PROPERTY_CREATED_BY 0xDC45
+#define MTP_PROPERTY_ARTIST 0xDC46
+#define MTP_PROPERTY_DATE_AUTHORED 0xDC47
+#define MTP_PROPERTY_DESCRIPTION 0xDC48
+#define MTP_PROPERTY_URL_REFERENCE 0xDC49
+#define MTP_PROPERTY_LANGUAGE_LOCALE 0xDC4A
+#define MTP_PROPERTY_COPYRIGHT_INFORMATION 0xDC4B
+#define MTP_PROPERTY_SOURCE 0xDC4C
+#define MTP_PROPERTY_ORIGIN_LOCATION 0xDC4D
+#define MTP_PROPERTY_DATE_ADDED 0xDC4E
+#define MTP_PROPERTY_NON_CONSUMABLE 0xDC4F
+#define MTP_PROPERTY_CORRUPT_UNPLAYABLE 0xDC50
+#define MTP_PROPERTY_PRODUCER_SERIAL_NUMBER 0xDC51
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT 0xDC81
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE 0xDC82
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT 0xDC83
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH 0xDC84
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION 0xDC85
+#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA 0xDC86
+#define MTP_PROPERTY_WIDTH 0xDC87
+#define MTP_PROPERTY_HEIGHT 0xDC88
+#define MTP_PROPERTY_DURATION 0xDC89
+#define MTP_PROPERTY_RATING 0xDC8A
+#define MTP_PROPERTY_TRACK 0xDC8B
+#define MTP_PROPERTY_GENRE 0xDC8C
+#define MTP_PROPERTY_CREDITS 0xDC8D
+#define MTP_PROPERTY_LYRICS 0xDC8E
+#define MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID 0xDC8F
+#define MTP_PROPERTY_PRODUCED_BY 0xDC90
+#define MTP_PROPERTY_USE_COUNT 0xDC91
+#define MTP_PROPERTY_SKIP_COUNT 0xDC92
+#define MTP_PROPERTY_LAST_ACCESSED 0xDC93
+#define MTP_PROPERTY_PARENTAL_RATING 0xDC94
+#define MTP_PROPERTY_META_GENRE 0xDC95
+#define MTP_PROPERTY_COMPOSER 0xDC96
+#define MTP_PROPERTY_EFFECTIVE_RATING 0xDC97
+#define MTP_PROPERTY_SUBTITLE 0xDC98
+#define MTP_PROPERTY_ORIGINAL_RELEASE_DATE 0xDC99
+#define MTP_PROPERTY_ALBUM_NAME 0xDC9A
+#define MTP_PROPERTY_ALBUM_ARTIST 0xDC9B
+#define MTP_PROPERTY_MOOD 0xDC9C
+#define MTP_PROPERTY_DRM_STATUS 0xDC9D
+#define MTP_PROPERTY_SUB_DESCRIPTION 0xDC9E
+#define MTP_PROPERTY_IS_CROPPED 0xDCD1
+#define MTP_PROPERTY_IS_COLOUR_CORRECTED 0xDCD2
+#define MTP_PROPERTY_IMAGE_BIT_DEPTH 0xDCD3
+#define MTP_PROPERTY_F_NUMBER 0xDCD4
+#define MTP_PROPERTY_EXPOSURE_TIME 0xDCD5
+#define MTP_PROPERTY_EXPOSURE_INDEX 0xDCD6
+#define MTP_PROPERTY_TOTAL_BITRATE 0xDE91
+#define MTP_PROPERTY_BITRATE_TYPE 0xDE92
+#define MTP_PROPERTY_SAMPLE_RATE 0xDE93
+#define MTP_PROPERTY_NUMBER_OF_CHANNELS 0xDE94
+#define MTP_PROPERTY_AUDIO_BIT_DEPTH 0xDE95
+#define MTP_PROPERTY_SCAN_TYPE 0xDE97
+#define MTP_PROPERTY_AUDIO_WAVE_CODEC 0xDE99
+#define MTP_PROPERTY_AUDIO_BITRATE 0xDE9A
+#define MTP_PROPERTY_VIDEO_FOURCC_CODEC 0xDE9B
+#define MTP_PROPERTY_VIDEO_BITRATE 0xDE9C
+#define MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS 0xDE9D
+#define MTP_PROPERTY_KEYFRAME_DISTANCE 0xDE9E
+#define MTP_PROPERTY_BUFFER_SIZE 0xDE9F
+#define MTP_PROPERTY_ENCODING_QUALITY 0xDEA0
+#define MTP_PROPERTY_ENCODING_PROFILE 0xDEA1
+#define MTP_PROPERTY_DISPLAY_NAME 0xDCE0
+#define MTP_PROPERTY_BODY_TEXT 0xDCE1
+#define MTP_PROPERTY_SUBJECT 0xDCE2
+#define MTP_PROPERTY_PRIORITY 0xDCE3
+#define MTP_PROPERTY_GIVEN_NAME 0xDD00
+#define MTP_PROPERTY_MIDDLE_NAMES 0xDD01
+#define MTP_PROPERTY_FAMILY_NAME 0xDD02
+#define MTP_PROPERTY_PREFIX 0xDD03
+#define MTP_PROPERTY_SUFFIX 0xDD04
+#define MTP_PROPERTY_PHONETIC_GIVEN_NAME 0xDD05
+#define MTP_PROPERTY_PHONETIC_FAMILY_NAME 0xDD06
+#define MTP_PROPERTY_EMAIL_PRIMARY 0xDD07
+#define MTP_PROPERTY_EMAIL_PERSONAL_1 0xDD08
+#define MTP_PROPERTY_EMAIL_PERSONAL_2 0xDD09
+#define MTP_PROPERTY_EMAIL_BUSINESS_1 0xDD0A
+#define MTP_PROPERTY_EMAIL_BUSINESS_2 0xDD0B
+#define MTP_PROPERTY_EMAIL_OTHERS 0xDD0C
+#define MTP_PROPERTY_PHONE_NUMBER_PRIMARY 0xDD0D
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL 0xDD0E
+#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2 0xDD0F
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS 0xDD10
+#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2 0xDD11
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE 0xDD12
+#define MTP_PROPERTY_PHONE_NUMBER_MOBILE_2 0xDD13
+#define MTP_PROPERTY_FAX_NUMBER_PRIMARY 0xDD14
+#define MTP_PROPERTY_FAX_NUMBER_PERSONAL 0xDD15
+#define MTP_PROPERTY_FAX_NUMBER_BUSINESS 0xDD16
+#define MTP_PROPERTY_PAGER_NUMBER 0xDD17
+#define MTP_PROPERTY_PHONE_NUMBER_OTHERS 0xDD18
+#define MTP_PROPERTY_PRIMARY_WEB_ADDRESS 0xDD19
+#define MTP_PROPERTY_PERSONAL_WEB_ADDRESS 0xDD1A
+#define MTP_PROPERTY_BUSINESS_WEB_ADDRESS 0xDD1B
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS 0xDD1C
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2 0xDD1D
+#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3 0xDD1E
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL 0xDD1F
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1 0xDD20
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2 0xDD21
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY 0xDD22
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION 0xDD23
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE 0xDD24
+#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY 0xDD25
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL 0xDD26
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1 0xDD27
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2 0xDD28
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY 0xDD29
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION 0xDD2A
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE 0xDD2B
+#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY 0xDD2C
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL 0xDD2D
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1 0xDD2E
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2 0xDD2F
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY 0xDD30
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION 0xDD31
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE 0xDD32
+#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY 0xDD33
+#define MTP_PROPERTY_ORGANIZATION_NAME 0xDD34
+#define MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME 0xDD35
+#define MTP_PROPERTY_ROLE 0xDD36
+#define MTP_PROPERTY_BIRTHDATE 0xDD37
+#define MTP_PROPERTY_MESSAGE_TO 0xDD40
+#define MTP_PROPERTY_MESSAGE_CC 0xDD41
+#define MTP_PROPERTY_MESSAGE_BCC 0xDD42
+#define MTP_PROPERTY_MESSAGE_READ 0xDD43
+#define MTP_PROPERTY_MESSAGE_RECEIVED_TIME 0xDD44
+#define MTP_PROPERTY_MESSAGE_SENDER 0xDD45
+#define MTP_PROPERTY_ACTIVITY_BEGIN_TIME 0xDD50
+#define MTP_PROPERTY_ACTIVITY_END_TIME 0xDD51
+#define MTP_PROPERTY_ACTIVITY_LOCATION 0xDD52
+#define MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES 0xDD54
+#define MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES 0xDD55
+#define MTP_PROPERTY_ACTIVITY_RESOURCES 0xDD56
+#define MTP_PROPERTY_ACTIVITY_ACCEPTED 0xDD57
+#define MTP_PROPERTY_ACTIVITY_TENTATIVE 0xDD58
+#define MTP_PROPERTY_ACTIVITY_DECLINED 0xDD59
+#define MTP_PROPERTY_ACTIVITY_REMAINDER_TIME 0xDD5A
+#define MTP_PROPERTY_ACTIVITY_OWNER 0xDD5B
+#define MTP_PROPERTY_ACTIVITY_STATUS 0xDD5C
+#define MTP_PROPERTY_OWNER 0xDD5D
+#define MTP_PROPERTY_EDITOR 0xDD5E
+#define MTP_PROPERTY_WEBMASTER 0xDD5F
+#define MTP_PROPERTY_URL_SOURCE 0xDD60
+#define MTP_PROPERTY_URL_DESTINATION 0xDD61
+#define MTP_PROPERTY_TIME_BOOKMARK 0xDD62
+#define MTP_PROPERTY_OBJECT_BOOKMARK 0xDD63
+#define MTP_PROPERTY_BYTE_BOOKMARK 0xDD64
+#define MTP_PROPERTY_LAST_BUILD_DATE 0xDD70
+#define MTP_PROPERTY_TIME_TO_LIVE 0xDD71
+#define MTP_PROPERTY_MEDIA_GUID 0xDD72
+
+// MTP Device Property Codes
+#define MTP_DEVICE_PROPERTY_UNDEFINED 0x5000
+#define MTP_DEVICE_PROPERTY_BATTERY_LEVEL 0x5001
+#define MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE 0x5002
+#define MTP_DEVICE_PROPERTY_IMAGE_SIZE 0x5003
+#define MTP_DEVICE_PROPERTY_COMPRESSION_SETTING 0x5004
+#define MTP_DEVICE_PROPERTY_WHITE_BALANCE 0x5005
+#define MTP_DEVICE_PROPERTY_RGB_GAIN 0x5006
+#define MTP_DEVICE_PROPERTY_F_NUMBER 0x5007
+#define MTP_DEVICE_PROPERTY_FOCAL_LENGTH 0x5008
+#define MTP_DEVICE_PROPERTY_FOCUS_DISTANCE 0x5009
+#define MTP_DEVICE_PROPERTY_FOCUS_MODE 0x500A
+#define MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE 0x500B
+#define MTP_DEVICE_PROPERTY_FLASH_MODE 0x500C
+#define MTP_DEVICE_PROPERTY_EXPOSURE_TIME 0x500D
+#define MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE 0x500E
+#define MTP_DEVICE_PROPERTY_EXPOSURE_INDEX 0x500F
+#define MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION 0x5010
+#define MTP_DEVICE_PROPERTY_DATETIME 0x5011
+#define MTP_DEVICE_PROPERTY_CAPTURE_DELAY 0x5012
+#define MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE 0x5013
+#define MTP_DEVICE_PROPERTY_CONTRAST 0x5014
+#define MTP_DEVICE_PROPERTY_SHARPNESS 0x5015
+#define MTP_DEVICE_PROPERTY_DIGITAL_ZOOM 0x5016
+#define MTP_DEVICE_PROPERTY_EFFECT_MODE 0x5017
+#define MTP_DEVICE_PROPERTY_BURST_NUMBER 0x5018
+#define MTP_DEVICE_PROPERTY_BURST_INTERVAL 0x5019
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER 0x501A
+#define MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL 0x501B
+#define MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE 0x501C
+#define MTP_DEVICE_PROPERTY_UPLOAD_URL 0x501D
+#define MTP_DEVICE_PROPERTY_ARTIST 0x501E
+#define MTP_DEVICE_PROPERTY_COPYRIGHT_INFO 0x501F
+#define MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER 0xD401
+#define MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME 0xD402
+#define MTP_DEVICE_PROPERTY_VOLUME 0xD403
+#define MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED 0xD404
+#define MTP_DEVICE_PROPERTY_DEVICE_ICON 0xD405
+#define MTP_DEVICE_PROPERTY_PLAYBACK_RATE 0xD410
+#define MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT 0xD411
+#define MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX 0xD412
+#define MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO 0xD406
+#define MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE 0xD407
+
+// MTP Operation Codes
+#define MTP_OPERATION_GET_DEVICE_INFO 0x1001
+#define MTP_OPERATION_OPEN_SESSION 0x1002
+#define MTP_OPERATION_CLOSE_SESSION 0x1003
+#define MTP_OPERATION_GET_STORAGE_IDS 0x1004
+#define MTP_OPERATION_GET_STORAGE_INFO 0x1005
+#define MTP_OPERATION_GET_NUM_OBJECTS 0x1006
+#define MTP_OPERATION_GET_OBJECT_HANDLES 0x1007
+#define MTP_OPERATION_GET_OBJECT_INFO 0x1008
+#define MTP_OPERATION_GET_OBJECT 0x1009
+#define MTP_OPERATION_GET_THUMB 0x100A
+#define MTP_OPERATION_DELETE_OBJECT 0x100B
+#define MTP_OPERATION_SEND_OBJECT_INFO 0x100C
+#define MTP_OPERATION_SEND_OBJECT 0x100D
+#define MTP_OPERATION_INITIATE_CAPTURE 0x100E
+#define MTP_OPERATION_FORMAT_STORE 0x100F
+#define MTP_OPERATION_RESET_DEVICE 0x1010
+#define MTP_OPERATION_SELF_TEST 0x1011
+#define MTP_OPERATION_SET_OBJECT_PROTECTION 0x1012
+#define MTP_OPERATION_POWER_DOWN 0x1013
+#define MTP_OPERATION_GET_DEVICE_PROP_DESC 0x1014
+#define MTP_OPERATION_GET_DEVICE_PROP_VALUE 0x1015
+#define MTP_OPERATION_SET_DEVICE_PROP_VALUE 0x1016
+#define MTP_OPERATION_RESET_DEVICE_PROP_VALUE 0x1017
+#define MTP_OPERATION_TERMINATE_OPEN_CAPTURE 0x1018
+#define MTP_OPERATION_MOVE_OBJECT 0x1019
+#define MTP_OPERATION_COPY_OBJECT 0x101A
+#define MTP_OPERATION_GET_PARTIAL_OBJECT 0x101B
+#define MTP_OPERATION_INITIATE_OPEN_CAPTURE 0x101C
+#define MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED 0x9801
+#define MTP_OPERATION_GET_OBJECT_PROP_DESC 0x9802
+#define MTP_OPERATION_GET_OBJECT_PROP_VALUE 0x9803
+#define MTP_OPERATION_SET_OBJECT_PROP_VALUE 0x9804
+#define MTP_OPERATION_GET_OBJECT_REFERENCES 0x9810
+#define MTP_OPERATION_SET_OBJECT_REFERENCES 0x9811
+#define MTP_OPERATION_SKIP 0x9820
+
+// MTP Response Codes
+#define MTP_RESPONSE_UNDEFINED 0x2000
+#define MTP_RESPONSE_OK 0x2001
+#define MTP_RESPONSE_GENERAL_ERROR 0x2002
+#define MTP_RESPONSE_SESSION_NOT_OPEN 0x2003
+#define MTP_RESPONSE_INVALID_TRANSACTION_ID 0x2004
+#define MTP_RESPONSE_OPERATION_NOT_SUPPORTED 0x2005
+#define MTP_RESPONSE_PARAMETER_NOT_SUPPORTED 0x2006
+#define MTP_RESPONSE_INCOMPLETE_TRANSFER 0x2007
+#define MTP_RESPONSE_INVALID_STORAGE_ID 0x2008
+#define MTP_RESPONSE_INVALID_OBJECT_HANDLE 0x2009
+#define MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED 0x200A
+#define MTP_RESPONSE_INVALID_OBJECT_FORMAT_CODE 0x200B
+#define MTP_RESPONSE_STORAGE_FULL 0x200C
+#define MTP_RESPONSE_OBJECT_WRITE_PROTECTED 0x200D
+#define MTP_RESPONSE_STORE_READ_ONLY 0x200E
+#define MTP_RESPONSE_ACCESS_DENIED 0x200F
+#define MTP_RESPONSE_NO_THUMBNAIL_PRESENT 0x2010
+#define MTP_RESPONSE_SELF_TEST_FAILED 0x2011
+#define MTP_RESPONSE_PARTIAL_DELETION 0x2012
+#define MTP_RESPONSE_STORE_NOT_AVAILABLE 0x2013
+#define MTP_RESPONSE_SPECIFICATION_BY_FORMAT_UNSUPPORTED 0x2014
+#define MTP_RESPONSE_NO_VALID_OBJECT_INFO 0x2015
+#define MTP_RESPONSE_INVALID_CODE_FORMAT 0x2016
+#define MTP_RESPONSE_UNKNOWN_VENDOR_CODE 0x2017
+#define MTP_RESPONSE_CAPTURE_ALREADY_TERMINATED 0x2018
+#define MTP_RESPONSE_DEVICE_BUSY 0x2019
+#define MTP_RESPONSE_INVALID_PARENT_OBJECT 0x201A
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT 0x201B
+#define MTP_RESPONSE_INVALID_DEVICE_PROP_VALUE 0x201C
+#define MTP_RESPONSE_INVALID_PARAMETER 0x201D
+#define MTP_RESPONSE_SESSION_ALREADY_OPEN 0x201E
+#define MTP_RESPONSE_TRANSACTION_CANCELLED 0x201F
+#define MTP_RESPONSE_SPECIFICATION_OF_DESTINATION_UNSUPPORTED 0x2020
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_CODE 0xA801
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT 0xA802
+#define MTP_RESPONSE_INVALID_OBJECT_PROP_VALUE 0xA803
+#define MTP_RESPONSE_INVALID_OBJECT_REFERENCE 0xA804
+#define MTP_RESPONSE_GROUP_NOT_SUPPORTED 0xA805
+#define MTP_RESPONSE_INVALID_DATASET 0xA806
+#define MTP_RESPONSE_SPECIFICATION_BY_GROUP_UNSUPPORTED 0xA807
+#define MTP_RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED 0xA808
+#define MTP_RESPONSE_OBJECT_TOO_LARGE 0xA809
+#define MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED 0xA80A
+
+// MTP Event Codes
+#define MTP_EVENT_UNDEFINED 0x4000
+#define MTP_EVENT_CANCEL_TRANSACTION 0x4001
+#define MTP_EVENT_OBJECT_ADDED 0x4002
+#define MTP_EVENT_OBJECT_REMOVED 0x4003
+#define MTP_EVENT_STORE_ADDED 0x4004
+#define MTP_EVENT_STORE_REMOVED 0x4005
+#define MTP_EVENT_DEVICE_PROP_CHANGED 0x4006
+#define MTP_EVENT_OBJECT_INFO_CHANGED 0x4007
+#define MTP_EVENT_DEVICE_INFO_CHANGED 0x4008
+#define MTP_EVENT_REQUEST_OBJECT_TRANSFER 0x4009
+#define MTP_EVENT_STORE_FULL 0x400A
+#define MTP_EVENT_DEVICE_RESET 0x400B
+#define MTP_EVENT_STORAGE_INFO_CHANGED 0x400C
+#define MTP_EVENT_CAPTURE_COMPLETE 0x400D
+#define MTP_EVENT_UNREPORTED_STATUS 0x400E
+#define MTP_EVENT_OBJECT_PROP_CHANGED 0xC801
+#define MTP_EVENT_OBJECT_PROP_DESC_CHANGED 0xC802
+#define MTP_EVENT_OBJECT_REFERENCES_CHANGED 0xC803
+
+// Storage Type
+#define MTP_STORAGE_FIXED_ROM 0x0001
+#define MTP_STORAGE_REMOVABLE_ROM 0x0002
+#define MTP_STORAGE_FIXED_RAM 0x0003
+#define MTP_STORAGE_REMOVABLE_RAM 0x0004
+
+// Storage File System
+#define MTP_STORAGE_FILESYSTEM_FLAT 0x0001
+#define MTP_STORAGE_FILESYSTEM_HIERARCHICAL 0x0002
+#define MTP_STORAGE_FILESYSTEM_DCF 0x0003
+
+// Storage Access Capability
+#define MTP_STORAGE_READ_WRITE 0x0000
+#define MTP_STORAGE_READ_ONLY_WITHOUT_DELETE 0x0001
+#define MTP_STORAGE_READ_ONLY_WITH_DELETE 0x0002
+
+// Association Type
+#define MTP_ASSOCIATION_TYPE_UNDEFINED 0x0000
+#define MTP_ASSOCIATION_TYPE_GENERIC_FOLDER 0x0001
+
+#endif // _MTP_H
diff --git a/services/audioflinger/AudioPolicyManagerBase.cpp b/services/audioflinger/AudioPolicyManagerBase.cpp
index 425ca31..65d9ef7 100644
--- a/services/audioflinger/AudioPolicyManagerBase.cpp
+++ b/services/audioflinger/AudioPolicyManagerBase.cpp
@@ -1017,8 +1017,8 @@ AudioPolicyManagerBase::AudioPolicyManagerBase(AudioPolicyClientInterface *clien
#ifdef AUDIO_POLICY_TEST
Thread(false),
#endif //AUDIO_POLICY_TEST
- mPhoneState(AudioSystem::MODE_NORMAL), mRingerMode(0), mMusicStopTime(0),
- mLimitRingtoneVolume(false), mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0)
+ mPhoneState(AudioSystem::MODE_NORMAL), mRingerMode(0), mMusicStopTime(0), mLimitRingtoneVolume(false),
+ mLastVoiceVolume(-1.0f), mTotalEffectsCpuLoad(0), mTotalEffectsMemory(0)
{
mpClientInterface = clientInterface;
@@ -1835,29 +1835,38 @@ status_t AudioPolicyManagerBase::checkAndSetVolume(int stream, int index, audio_
}
float volume = computeVolume(stream, index, output, device);
- // do not set volume if the float value did not change
- if (volume != mOutputs.valueFor(output)->mCurVolume[stream] || force) {
+ // We actually change the volume if:
+ // - the float value returned by computeVolume() changed
+ // - the force flag is set
+ if (volume != mOutputs.valueFor(output)->mCurVolume[stream] ||
+ force) {
mOutputs.valueFor(output)->mCurVolume[stream] = volume;
LOGV("setStreamVolume() for output %d stream %d, volume %f, delay %d", output, stream, volume, delayMs);
if (stream == AudioSystem::VOICE_CALL ||
stream == AudioSystem::DTMF ||
stream == AudioSystem::BLUETOOTH_SCO) {
- float voiceVolume = -1.0;
// offset value to reflect actual hardware volume that never reaches 0
// 1% corresponds roughly to first step in VOICE_CALL stream volume setting (see AudioService.java)
volume = 0.01 + 0.99 * volume;
- if (stream == AudioSystem::VOICE_CALL) {
- voiceVolume = (float)index/(float)mStreams[stream].mIndexMax;
- } else if (stream == AudioSystem::BLUETOOTH_SCO) {
- voiceVolume = 1.0;
- }
- if (voiceVolume >= 0 && output == mHardwareOutput) {
- mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
- }
}
mpClientInterface->setStreamVolume((AudioSystem::stream_type)stream, volume, output, delayMs);
}
+ if (stream == AudioSystem::VOICE_CALL ||
+ stream == AudioSystem::BLUETOOTH_SCO) {
+ float voiceVolume;
+ // Force voice volume to max for bluetooth SCO as volume is managed by the headset
+ if (stream == AudioSystem::VOICE_CALL) {
+ voiceVolume = (float)index/(float)mStreams[stream].mIndexMax;
+ } else {
+ voiceVolume = 1.0;
+ }
+ if (voiceVolume != mLastVoiceVolume && output == mHardwareOutput) {
+ mpClientInterface->setVoiceVolume(voiceVolume, delayMs);
+ mLastVoiceVolume = voiceVolume;
+ }
+ }
+
return NO_ERROR;
}
diff --git a/services/camera/libcameraservice/CameraHardwareStub.cpp b/services/camera/libcameraservice/CameraHardwareStub.cpp
index b3e0ee6..07b5a37 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.cpp
+++ b/services/camera/libcameraservice/CameraHardwareStub.cpp
@@ -101,9 +101,9 @@ CameraHardwareStub::~CameraHardwareStub()
mFakeCamera = 0; // paranoia
}
-sp<IMemoryHeap> CameraHardwareStub::getPreviewHeap() const
+status_t CameraHardwareStub::setPreviewWindow(const sp<ANativeWindow>& buf)
{
- return mPreviewHeap;
+ return NO_ERROR;
}
sp<IMemoryHeap> CameraHardwareStub::getRawHeap() const
diff --git a/services/camera/libcameraservice/CameraHardwareStub.h b/services/camera/libcameraservice/CameraHardwareStub.h
index d3427ba..9b66a76 100644
--- a/services/camera/libcameraservice/CameraHardwareStub.h
+++ b/services/camera/libcameraservice/CameraHardwareStub.h
@@ -29,7 +29,7 @@ namespace android {
class CameraHardwareStub : public CameraHardwareInterface {
public:
- virtual sp<IMemoryHeap> getPreviewHeap() const;
+ virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf);
virtual sp<IMemoryHeap> getRawHeap() const;
virtual void setCallbacks(notify_callback notify_cb,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index a64ddcf..535f07f 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -26,6 +26,7 @@
#include <binder/MemoryBase.h>
#include <binder/MemoryHeapBase.h>
#include <cutils/atomic.h>
+#include <cutils/properties.h>
#include <hardware/hardware.h>
#include <media/AudioSystem.h>
#include <media/mediaplayer.h>
@@ -307,7 +308,6 @@ CameraService::Client::Client(const sp<CameraService>& cameraService,
mClientPid = clientPid;
mUseOverlay = mHardware->useOverlay();
mMsgEnabled = 0;
-
mHardware->setCallbacks(notifyCallback,
dataCallback,
dataCallbackTimestamp,
@@ -324,6 +324,7 @@ CameraService::Client::Client(const sp<CameraService>& cameraService,
mPreviewCallbackFlag = FRAME_CALLBACK_FLAG_NOOP;
mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT);
mOrientationChanged = false;
+ mPlayShutterSound = true;
cameraService->setCameraBusy(cameraId);
cameraService->loadSound();
LOG1("Client::Client X (pid %d)", callingPid);
@@ -341,18 +342,6 @@ CameraService::Client::~Client() {
int callingPid = getCallingPid();
LOG1("Client::~Client E (pid %d, this %p)", callingPid, this);
- if (mSurface != 0 && !mUseOverlay) {
- pthread_t thr;
- // We unregister the buffers in a different thread because binder does
- // not let us make sychronous transactions in a binder destructor (that
- // is, upon our reaching a refcount of zero.)
- pthread_create(&thr,
- NULL, // attr
- unregister_surface,
- mSurface.get());
- pthread_join(thr, NULL);
- }
-
// set mClientPid to let disconnet() tear down the hardware
mClientPid = callingPid;
disconnect();
@@ -470,6 +459,11 @@ void CameraService::Client::disconnect() {
if (mUseOverlay) {
mOverlayRef = 0;
}
+ // Release the held ANativeWindow resources.
+ if (mPreviewWindow != 0) {
+ mPreviewWindow = 0;
+ mHardware->setPreviewWindow(mPreviewWindow);
+ }
mHardware.clear();
mCameraService->removeClient(mCameraClient);
@@ -480,8 +474,8 @@ void CameraService::Client::disconnect() {
// ----------------------------------------------------------------------------
-// set the ISurface that the preview will use
-status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) {
+// set the Surface that the preview will use
+status_t CameraService::Client::setPreviewDisplay(const sp<Surface>& surface) {
LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid());
Mutex::Autolock lock(mLock);
status_t result = checkPidAndHardware();
@@ -491,7 +485,7 @@ status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) {
// return if no change in surface.
// asBinder() is safe on NULL (returns NULL)
- if (surface->asBinder() == mSurface->asBinder()) {
+ if (getISurface(surface)->asBinder() == mSurface->asBinder()) {
return result;
}
@@ -502,44 +496,30 @@ status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) {
sp<Overlay> dummy;
mHardware->setOverlay(dummy);
mOverlayRef = 0;
- } else {
- mSurface->unregisterBuffers();
}
}
- mSurface = surface;
+ if (surface != 0) {
+ mSurface = getISurface(surface);
+ } else {
+ mSurface = 0;
+ }
+ mPreviewWindow = surface;
mOverlayRef = 0;
// If preview has been already started, set overlay or register preview
// buffers now.
if (mHardware->previewEnabled()) {
if (mUseOverlay) {
result = setOverlay();
- } else if (mSurface != 0) {
- result = registerPreviewBuffers();
+ } else if (mPreviewWindow != 0) {
+ native_window_set_buffers_transform(mPreviewWindow.get(),
+ mOrientation);
+ result = mHardware->setPreviewWindow(mPreviewWindow);
}
}
return result;
}
-status_t CameraService::Client::registerPreviewBuffers() {
- int w, h;
- CameraParameters params(mHardware->getParameters());
- params.getPreviewSize(&w, &h);
-
- // FIXME: don't use a hardcoded format here.
- ISurface::BufferHeap buffers(w, h, w, h,
- HAL_PIXEL_FORMAT_YCrCb_420_SP,
- mOrientation,
- 0,
- mHardware->getPreviewHeap());
-
- status_t result = mSurface->registerBuffers(buffers);
- if (result != NO_ERROR) {
- LOGE("registerBuffers failed with status %d", result);
- }
- return result;
-}
-
status_t CameraService::Client::setOverlay() {
int w, h;
CameraParameters params(mHardware->getParameters());
@@ -597,16 +577,10 @@ void CameraService::Client::setPreviewCallbackFlag(int callback_flag) {
if (checkPidAndHardware() != NO_ERROR) return;
mPreviewCallbackFlag = callback_flag;
-
- // If we don't use overlay, we always need the preview frame for display.
- // If we do use overlay, we only need the preview frame if the user
- // wants the data.
- if (mUseOverlay) {
- if(mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
- enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- } else {
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- }
+ if (mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) {
+ enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
+ } else {
+ disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
}
}
@@ -631,14 +605,14 @@ status_t CameraService::Client::startCameraMode(camera_mode mode) {
switch(mode) {
case CAMERA_PREVIEW_MODE:
- if (mSurface == 0) {
+ if (mSurface == 0 && mPreviewWindow == 0) {
LOG1("mSurface is not set yet.");
// still able to start preview in this case.
}
return startPreviewMode();
case CAMERA_RECORDING_MODE:
- if (mSurface == 0) {
- LOGE("mSurface must be set before startRecordingMode.");
+ if (mSurface == 0 && mPreviewWindow == 0) {
+ LOGE("mSurface or mPreviewWindow must be set before startRecordingMode.");
return INVALID_OPERATION;
}
return startRecordingMode();
@@ -664,16 +638,12 @@ status_t CameraService::Client::startPreviewMode() {
if (result != NO_ERROR) return result;
result = mHardware->startPreview();
} else {
- enableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- result = mHardware->startPreview();
- if (result != NO_ERROR) return result;
- // If preview display has been set, register preview buffers now.
- if (mSurface != 0) {
- // Unregister here because the surface may be previously registered
- // with the raw (snapshot) heap.
- mSurface->unregisterBuffers();
- result = registerPreviewBuffers();
+ if (mPreviewWindow != 0) {
+ native_window_set_buffers_transform(mPreviewWindow.get(),
+ mOrientation);
}
+ mHardware->setPreviewWindow(mPreviewWindow);
+ result = mHardware->startPreview();
}
return result;
}
@@ -711,13 +681,10 @@ void CameraService::Client::stopPreview() {
Mutex::Autolock lock(mLock);
if (checkPidAndHardware() != NO_ERROR) return;
+
disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
mHardware->stopPreview();
- if (mSurface != 0 && !mUseOverlay) {
- mSurface->unregisterBuffers();
- }
-
mPreviewBuffer.clear();
}
@@ -741,6 +708,30 @@ void CameraService::Client::releaseRecordingFrame(const sp<IMemory>& mem) {
mHardware->releaseRecordingFrame(mem);
}
+int32_t CameraService::Client::getNumberOfVideoBuffers() const {
+ LOG1("getNumberOfVideoBuffers");
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return 0;
+ return mHardware->getNumberOfVideoBuffers();
+}
+
+sp<IMemory> CameraService::Client::getVideoBuffer(int32_t index) const {
+ LOG1("getVideoBuffer: %d", index);
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) return 0;
+ return mHardware->getVideoBuffer(index);
+}
+
+status_t CameraService::Client::storeMetaDataInBuffers(bool enabled)
+{
+ LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false");
+ Mutex::Autolock lock(mLock);
+ if (checkPidAndHardware() != NO_ERROR) {
+ return UNKNOWN_ERROR;
+ }
+ return mHardware->storeMetaDataInBuffers(enabled);
+}
+
bool CameraService::Client::previewEnabled() {
LOG1("previewEnabled (pid %d)", getCallingPid());
@@ -815,6 +806,35 @@ String8 CameraService::Client::getParameters() const {
return params;
}
+// enable shutter sound
+status_t CameraService::Client::enableShutterSound(bool enable) {
+ LOG1("enableShutterSound (pid %d)", getCallingPid());
+
+ status_t result = checkPidAndHardware();
+ if (result != NO_ERROR) return result;
+
+ if (enable) {
+ mPlayShutterSound = true;
+ return OK;
+ }
+
+ // Disabling shutter sound may not be allowed. In that case only
+ // allow the mediaserver process to disable the sound.
+ char value[PROPERTY_VALUE_MAX];
+ property_get("ro.camera.sound.forced", value, "0");
+ if (strcmp(value, "0") != 0) {
+ // Disabling shutter sound is not allowed. Deny if the current
+ // process is not mediaserver.
+ if (getCallingPid() != getpid()) {
+ LOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid());
+ return PERMISSION_DENIED;
+ }
+ }
+
+ mPlayShutterSound = false;
+ return OK;
+}
+
status_t CameraService::Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) {
LOG1("sendCommand (pid %d)", getCallingPid());
int orientation;
@@ -836,6 +856,20 @@ status_t CameraService::Client::sendCommand(int32_t cmd, int32_t arg1, int32_t a
if (mOverlayRef != 0) mOrientationChanged = true;
}
return OK;
+ } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) {
+ switch (arg1) {
+ case 0:
+ enableShutterSound(false);
+ break;
+ case 1:
+ enableShutterSound(true);
+ break;
+ default:
+ return BAD_VALUE;
+ }
+ return OK;
+ } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) {
+ mCameraService->playSound(SOUND_RECORDING);
}
return mHardware->sendCommand(cmd, arg1, arg2);
@@ -996,11 +1030,8 @@ void CameraService::Client::dataCallbackTimestamp(nsecs_t timestamp,
// "size" is the width and height of yuv picture for registerBuffer.
// If it is NULL, use the picture size from parameters.
void CameraService::Client::handleShutter(image_rect_type *size) {
- mCameraService->playSound(SOUND_SHUTTER);
-
- // Screen goes black after the buffer is unregistered.
- if (mSurface != 0 && !mUseOverlay) {
- mSurface->unregisterBuffers();
+ if (mPlayShutterSound) {
+ mCameraService->playSound(SOUND_SHUTTER);
}
sp<ICameraClient> c = mCameraClient;
@@ -1030,7 +1061,6 @@ void CameraService::Client::handleShutter(image_rect_type *size) {
HAL_PIXEL_FORMAT_YCrCb_420_SP, mOrientation, 0,
mHardware->getRawHeap());
- mSurface->registerBuffers(buffers);
IPCThreadState::self()->flushCommands();
}
@@ -1043,12 +1073,6 @@ void CameraService::Client::handlePreviewData(const sp<IMemory>& mem) {
size_t size;
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
- if (!mUseOverlay) {
- if (mSurface != 0) {
- mSurface->postBuffer(offset);
- }
- }
-
// local copy of the callback flags
int flags = mPreviewCallbackFlag;
@@ -1069,9 +1093,7 @@ void CameraService::Client::handlePreviewData(const sp<IMemory>& mem) {
mPreviewCallbackFlag &= ~(FRAME_CALLBACK_FLAG_ONE_SHOT_MASK |
FRAME_CALLBACK_FLAG_COPY_OUT_MASK |
FRAME_CALLBACK_FLAG_ENABLE_MASK);
- if (mUseOverlay) {
- disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
- }
+ disableMsgType(CAMERA_MSG_PREVIEW_FRAME);
}
if (c != 0) {
@@ -1108,11 +1130,6 @@ void CameraService::Client::handleRawPicture(const sp<IMemory>& mem) {
size_t size;
sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
- // Put the YUV version of the snapshot in the preview display.
- if (mSurface != 0 && !mUseOverlay) {
- mSurface->postBuffer(offset);
- }
-
sp<ICameraClient> c = mCameraClient;
mLock.unlock();
if (c != 0) {
@@ -1292,4 +1309,12 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) {
return NO_ERROR;
}
+sp<ISurface> CameraService::getISurface(const sp<Surface>& surface) {
+ if (surface != 0) {
+ return surface->getISurface();
+ } else {
+ return sp<ISurface>(0);
+ }
+}
+
}; // namespace android
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index f09773d..60e0d04 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -79,6 +79,12 @@ private:
sp<MediaPlayer> mSoundPlayer[NUM_SOUNDS];
int mSoundRef; // reference count (release all MediaPlayer when 0)
+ // Used by Client objects to extract the ISurface from a Surface object.
+ // This is used because making Client a friend class of Surface would
+ // require including this header in Surface.h since Client is a nested
+ // class.
+ static sp<ISurface> getISurface(const sp<Surface>& surface);
+
class Client : public BnCamera
{
public:
@@ -87,11 +93,14 @@ private:
virtual status_t connect(const sp<ICameraClient>& client);
virtual status_t lock();
virtual status_t unlock();
- virtual status_t setPreviewDisplay(const sp<ISurface>& surface);
+ virtual status_t setPreviewDisplay(const sp<Surface>& surface);
virtual void setPreviewCallbackFlag(int flag);
virtual status_t startPreview();
virtual void stopPreview();
virtual bool previewEnabled();
+ virtual int32_t getNumberOfVideoBuffers() const;
+ virtual sp<IMemory> getVideoBuffer(int32_t index) const;
+ virtual status_t storeMetaDataInBuffers(bool enabled);
virtual status_t startRecording();
virtual void stopRecording();
virtual bool recordingEnabled();
@@ -133,6 +142,9 @@ private:
status_t startPreviewMode();
status_t startRecordingMode();
+ // internal function used by sendCommand to enable/disable shutter sound.
+ status_t enableShutterSound(bool enable);
+
// these are static callback functions
static void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user);
static void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr, void* user);
@@ -171,10 +183,12 @@ private:
int mOrientation; // Current display orientation
// True if display orientation has been changed. This is only used in overlay.
int mOrientationChanged;
+ bool mPlayShutterSound;
// Ensures atomicity among the public methods
mutable Mutex mLock;
sp<ISurface> mSurface;
+ sp<ANativeWindow> mPreviewWindow;
// If the user want us to return a copy of the preview frame (instead
// of the original one), we allocate mPreviewBuffer and reuse it if possible.