diff options
270 files changed, 23757 insertions, 4186 deletions
diff --git a/camera/Android.mk b/camera/Android.mk index 03ff229..2f16923 100644 --- a/camera/Android.mk +++ b/camera/Android.mk @@ -14,7 +14,8 @@ LOCAL_SHARED_LIBRARIES := \ libbinder \ libhardware \ libsurfaceflinger_client \ - libui + libui \ + libgui LOCAL_MODULE:= libcamera_client diff --git a/camera/Camera.cpp b/camera/Camera.cpp index 743fbb2..907f119 100644 --- a/camera/Camera.cpp +++ b/camera/Camera.cpp @@ -80,8 +80,9 @@ sp<Camera> Camera::create(const sp<ICamera>& camera) c->mStatus = NO_ERROR; c->mCamera = camera; camera->asBinder()->linkToDeath(c); + return c; } - return c; + return 0; } void Camera::init() @@ -167,32 +168,34 @@ status_t Camera::unlock() return c->unlock(); } -// pass the buffered ISurface to the camera service +// pass the buffered Surface to the camera service status_t Camera::setPreviewDisplay(const sp<Surface>& surface) { - LOGV("setPreviewDisplay"); + LOGV("setPreviewDisplay(%p)", surface.get()); sp <ICamera> c = mCamera; if (c == 0) return NO_INIT; if (surface != 0) { - return c->setPreviewDisplay(surface->getISurface()); + return c->setPreviewDisplay(surface); } else { LOGD("app passed NULL surface"); return c->setPreviewDisplay(0); } } -status_t Camera::setPreviewDisplay(const sp<ISurface>& surface) +// pass the buffered ISurfaceTexture to the camera service +status_t Camera::setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture) { - LOGV("setPreviewDisplay"); - if (surface == 0) { - LOGD("app passed NULL surface"); - } + LOGV("setPreviewTexture(%p)", surfaceTexture.get()); sp <ICamera> c = mCamera; if (c == 0) return NO_INIT; - return c->setPreviewDisplay(surface); + if (surfaceTexture != 0) { + return c->setPreviewTexture(surfaceTexture); + } else { + LOGD("app passed NULL surface"); + return c->setPreviewTexture(0); + } } - // start preview mode status_t Camera::startPreview() { @@ -202,6 +205,31 @@ status_t Camera::startPreview() return c->startPreview(); } +int32_t Camera::getNumberOfVideoBuffers() const +{ + LOGV("getNumberOfVideoBuffers"); + sp <ICamera> c = mCamera; + if (c == 0) return 0; + return c->getNumberOfVideoBuffers(); +} + +sp<IMemory> Camera::getVideoBuffer(int32_t index) const +{ + LOGV("getVideoBuffer: %d", index); + sp <ICamera> c = mCamera; + if (c == 0) return 0; + return c->getVideoBuffer(index); +} + +status_t Camera::storeMetaDataInBuffers(bool enabled) +{ + LOGV("storeMetaDataInBuffers: %s", + enabled? "true": "false"); + sp <ICamera> c = mCamera; + if (c == 0) return NO_INIT; + return c->storeMetaDataInBuffers(enabled); +} + // start recording mode, must call setPreviewDisplay first status_t Camera::startRecording() { @@ -378,4 +406,3 @@ void Camera::DeathNotifier::binderDied(const wp<IBinder>& who) { } }; // namespace android - diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp index 83e5e57..e9a5f8c 100644 --- a/camera/CameraParameters.cpp +++ b/camera/CameraParameters.cpp @@ -73,6 +73,9 @@ const char CameraParameters::KEY_ZOOM_SUPPORTED[] = "zoom-supported"; const char CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED[] = "smooth-zoom-supported"; const char CameraParameters::KEY_FOCUS_DISTANCES[] = "focus-distances"; const char CameraParameters::KEY_VIDEO_FRAME_FORMAT[] = "video-frame-format"; +const char CameraParameters::KEY_VIDEO_SIZE[] = "video-size"; +const char CameraParameters::KEY_SUPPORTED_VIDEO_SIZES[] = "video-size-values"; +const char CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[] = "preferred-preview-size-for-video"; const char CameraParameters::TRUE[] = "true"; const char CameraParameters::FOCUS_DISTANCE_INFINITY[] = "Infinity"; @@ -129,7 +132,7 @@ const char CameraParameters::SCENE_MODE_PARTY[] = "party"; const char CameraParameters::SCENE_MODE_CANDLELIGHT[] = "candlelight"; const char CameraParameters::SCENE_MODE_BARCODE[] = "barcode"; -// Formats for setPreviewFormat and setPictureFormat. +const char CameraParameters::PIXEL_FORMAT_YUV420P[] = "yuv420p"; const char CameraParameters::PIXEL_FORMAT_YUV422SP[] = "yuv422sp"; const char CameraParameters::PIXEL_FORMAT_YUV420SP[] = "yuv420sp"; const char CameraParameters::PIXEL_FORMAT_YUV422I[] = "yuv422i-yuyv"; @@ -331,12 +334,41 @@ void CameraParameters::getPreviewSize(int *width, int *height) const parse_pair(p, width, height, 'x'); } +void CameraParameters::getPreferredPreviewSizeForVideo(int *width, int *height) const +{ + *width = *height = -1; + const char *p = get(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO); + if (p == 0) return; + parse_pair(p, width, height, 'x'); +} + void CameraParameters::getSupportedPreviewSizes(Vector<Size> &sizes) const { const char *previewSizesStr = get(KEY_SUPPORTED_PREVIEW_SIZES); parseSizesList(previewSizesStr, sizes); } +void CameraParameters::setVideoSize(int width, int height) +{ + char str[32]; + sprintf(str, "%dx%d", width, height); + set(KEY_VIDEO_SIZE, str); +} + +void CameraParameters::getVideoSize(int *width, int *height) const +{ + *width = *height = -1; + const char *p = get(KEY_VIDEO_SIZE); + if (p == 0) return; + parse_pair(p, width, height, 'x'); +} + +void CameraParameters::getSupportedVideoSizes(Vector<Size> &sizes) const +{ + const char *videoSizesStr = get(KEY_SUPPORTED_VIDEO_SIZES); + parseSizesList(videoSizesStr, sizes); +} + void CameraParameters::setPreviewFrameRate(int fps) { set(KEY_PREVIEW_FRAME_RATE, fps); diff --git a/camera/ICamera.cpp b/camera/ICamera.cpp index 13673b5..0881d65 100644 --- a/camera/ICamera.cpp +++ b/camera/ICamera.cpp @@ -28,6 +28,7 @@ namespace android { enum { DISCONNECT = IBinder::FIRST_CALL_TRANSACTION, SET_PREVIEW_DISPLAY, + SET_PREVIEW_TEXTURE, SET_PREVIEW_CALLBACK_FLAG, START_PREVIEW, STOP_PREVIEW, @@ -45,6 +46,9 @@ enum { STOP_RECORDING, RECORDING_ENABLED, RELEASE_RECORDING_FRAME, + GET_NUM_VIDEO_BUFFERS, + GET_VIDEO_BUFFER, + STORE_META_DATA_IN_BUFFERS, }; class BpCamera: public BpInterface<ICamera> @@ -64,17 +68,29 @@ public: remote()->transact(DISCONNECT, data, &reply); } - // pass the buffered ISurface to the camera service - status_t setPreviewDisplay(const sp<ISurface>& surface) + // pass the buffered Surface to the camera service + status_t setPreviewDisplay(const sp<Surface>& surface) { LOGV("setPreviewDisplay"); Parcel data, reply; data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); - data.writeStrongBinder(surface->asBinder()); + Surface::writeToParcel(surface, &data); remote()->transact(SET_PREVIEW_DISPLAY, data, &reply); return reply.readInt32(); } + // pass the buffered SurfaceTexture to the camera service + status_t setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture) + { + LOGV("setPreviewTexture"); + Parcel data, reply; + data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); + sp<IBinder> b(surfaceTexture->asBinder()); + data.writeStrongBinder(b); + remote()->transact(SET_PREVIEW_TEXTURE, data, &reply); + return reply.readInt32(); + } + // set the preview callback flag to affect how the received frames from // preview are handled. See Camera.h for details. void setPreviewCallbackFlag(int flag) @@ -133,6 +149,37 @@ public: remote()->transact(RELEASE_RECORDING_FRAME, data, &reply); } + int32_t getNumberOfVideoBuffers() const + { + LOGV("getNumberOfVideoBuffers"); + Parcel data, reply; + data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); + remote()->transact(GET_NUM_VIDEO_BUFFERS, data, &reply); + return reply.readInt32(); + } + + sp<IMemory> getVideoBuffer(int32_t index) const + { + LOGV("getVideoBuffer: %d", index); + Parcel data, reply; + data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); + data.writeInt32(index); + remote()->transact(GET_VIDEO_BUFFER, data, &reply); + sp<IMemory> mem = interface_cast<IMemory>( + reply.readStrongBinder()); + return mem; + } + + status_t storeMetaDataInBuffers(bool enabled) + { + LOGV("storeMetaDataInBuffers: %s", enabled? "true": "false"); + Parcel data, reply; + data.writeInterfaceToken(ICamera::getInterfaceDescriptor()); + data.writeInt32(enabled); + remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply); + return reply.readInt32(); + } + // check preview state bool previewEnabled() { @@ -258,10 +305,17 @@ status_t BnCamera::onTransact( case SET_PREVIEW_DISPLAY: { LOGV("SET_PREVIEW_DISPLAY"); CHECK_INTERFACE(ICamera, data, reply); - sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder()); + sp<Surface> surface = Surface::readFromParcel(data); reply->writeInt32(setPreviewDisplay(surface)); return NO_ERROR; } break; + case SET_PREVIEW_TEXTURE: { + LOGV("SET_PREVIEW_TEXTURE"); + CHECK_INTERFACE(ICamera, data, reply); + sp<ISurfaceTexture> st = interface_cast<ISurfaceTexture>(data.readStrongBinder()); + reply->writeInt32(setPreviewTexture(st)); + return NO_ERROR; + } break; case SET_PREVIEW_CALLBACK_FLAG: { LOGV("SET_PREVIEW_CALLBACK_TYPE"); CHECK_INTERFACE(ICamera, data, reply); @@ -300,6 +354,26 @@ status_t BnCamera::onTransact( releaseRecordingFrame(mem); return NO_ERROR; } break; + case GET_NUM_VIDEO_BUFFERS: { + LOGV("GET_NUM_VIDEO_BUFFERS"); + CHECK_INTERFACE(ICamera, data, reply); + reply->writeInt32(getNumberOfVideoBuffers()); + return NO_ERROR; + } break; + case GET_VIDEO_BUFFER: { + LOGV("GET_VIDEO_BUFFER"); + CHECK_INTERFACE(ICamera, data, reply); + int32_t index = data.readInt32(); + reply->writeStrongBinder(getVideoBuffer(index)->asBinder()); + return NO_ERROR; + } break; + case STORE_META_DATA_IN_BUFFERS: { + LOGV("STORE_META_DATA_IN_BUFFERS"); + CHECK_INTERFACE(ICamera, data, reply); + bool enabled = data.readInt32(); + reply->writeInt32(storeMetaDataInBuffers(enabled)); + return NO_ERROR; + } break; case PREVIEW_ENABLED: { LOGV("PREVIEW_ENABLED"); CHECK_INTERFACE(ICamera, data, reply); @@ -376,4 +450,3 @@ status_t BnCamera::onTransact( // ---------------------------------------------------------------------------- }; // namespace android - diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk index 5b74007..178032d 100644 --- a/cmds/stagefright/Android.mk +++ b/cmds/stagefright/Android.mk @@ -7,13 +7,16 @@ LOCAL_SRC_FILES:= \ SineSource.cpp LOCAL_SHARED_LIBRARIES := \ - libstagefright libmedia libutils libbinder libstagefright_foundation + libstagefright libmedia libutils libbinder libstagefright_foundation \ + libskia LOCAL_C_INCLUDES:= \ $(JNI_H_INCLUDE) \ frameworks/base/media/libstagefright \ frameworks/base/media/libstagefright/include \ - $(TOP)/frameworks/base/include/media/stagefright/openmax + $(TOP)/frameworks/base/include/media/stagefright/openmax \ + external/skia/include/core \ + external/skia/include/images \ LOCAL_CFLAGS += -Wno-multichar @@ -53,6 +56,31 @@ include $(CLEAR_VARS) LOCAL_SRC_FILES:= \ SineSource.cpp \ + recordvideo.cpp + +LOCAL_SHARED_LIBRARIES := \ + libstagefright liblog libutils libbinder + +LOCAL_C_INCLUDES:= \ + $(JNI_H_INCLUDE) \ + frameworks/base/media/libstagefright \ + $(TOP)/frameworks/base/include/media/stagefright/openmax + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE_TAGS := debug + +LOCAL_MODULE:= recordvideo + +include $(BUILD_EXECUTABLE) + + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + SineSource.cpp \ audioloop.cpp LOCAL_SHARED_LIBRARIES := \ @@ -70,3 +98,53 @@ LOCAL_MODULE_TAGS := debug LOCAL_MODULE:= audioloop include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + stream.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libstagefright liblog libutils libbinder libsurfaceflinger_client \ + libstagefright_foundation libmedia + +LOCAL_C_INCLUDES:= \ + $(JNI_H_INCLUDE) \ + frameworks/base/media/libstagefright \ + $(TOP)/frameworks/base/include/media/stagefright/openmax + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE_TAGS := debug + +LOCAL_MODULE:= stream + +include $(BUILD_EXECUTABLE) + +################################################################################ + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + sf2.cpp \ + +LOCAL_SHARED_LIBRARIES := \ + libstagefright liblog libutils libbinder libstagefright_foundation \ + libmedia libsurfaceflinger_client libcutils libui + +LOCAL_C_INCLUDES:= \ + $(JNI_H_INCLUDE) \ + frameworks/base/media/libstagefright \ + $(TOP)/frameworks/base/include/media/stagefright/openmax + +LOCAL_CFLAGS += -Wno-multichar + +LOCAL_MODULE_TAGS := debug + +LOCAL_MODULE:= sf2 + +include $(BUILD_EXECUTABLE) + + diff --git a/cmds/stagefright/recordvideo.cpp b/cmds/stagefright/recordvideo.cpp new file mode 100644 index 0000000..1264215 --- /dev/null +++ b/cmds/stagefright/recordvideo.cpp @@ -0,0 +1,303 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "SineSource.h" + +#include <binder/ProcessState.h> +#include <media/stagefright/AudioPlayer.h> +#include <media/stagefright/MediaBufferGroup.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MPEG4Writer.h> +#include <media/stagefright/OMXClient.h> +#include <media/stagefright/OMXCodec.h> +#include <media/MediaPlayerInterface.h> + +using namespace android; + +// Print usage showing how to use this utility to record videos +static void usage(const char *me) { + fprintf(stderr, "usage: %s\n", me); + fprintf(stderr, " -h(elp)\n"); + fprintf(stderr, " -b bit rate in bits per second (default: 300000)\n"); + fprintf(stderr, " -c YUV420 color format: [0] semi planar or [1] planar (default: 1)\n"); + fprintf(stderr, " -f frame rate in frames per second (default: 30)\n"); + fprintf(stderr, " -i I frame interval in seconds (default: 1)\n"); + fprintf(stderr, " -n number of frames to be recorded (default: 300)\n"); + fprintf(stderr, " -w width in pixels (default: 176)\n"); + fprintf(stderr, " -t height in pixels (default: 144)\n"); + fprintf(stderr, " -l encoder level. see omx il header (default: encoder specific)\n"); + fprintf(stderr, " -p encoder profile. see omx il header (default: encoder specific)\n"); + fprintf(stderr, " -v video codec: [0] AVC [1] M4V [2] H263 (default: 0)\n"); + fprintf(stderr, "The output file is /sdcard/output.mp4\n"); + exit(1); +} + +class DummySource : public MediaSource { + +public: + DummySource(int width, int height, int nFrames, int fps, int colorFormat) + : mWidth(width), + mHeight(height), + mMaxNumFrames(nFrames), + mFrameRate(fps), + mColorFormat(colorFormat), + mSize((width * height * 3) / 2) { + + mGroup.add_buffer(new MediaBuffer(mSize)); + + // Check the color format to make sure + // that the buffer size mSize it set correctly above. + CHECK(colorFormat == OMX_COLOR_FormatYUV420SemiPlanar || + colorFormat == OMX_COLOR_FormatYUV420Planar); + } + + virtual sp<MetaData> getFormat() { + sp<MetaData> meta = new MetaData; + meta->setInt32(kKeyWidth, mWidth); + meta->setInt32(kKeyHeight, mHeight); + meta->setInt32(kKeyColorFormat, mColorFormat); + meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + + return meta; + } + + virtual status_t start(MetaData *params) { + mNumFramesOutput = 0; + return OK; + } + + virtual status_t stop() { + return OK; + } + + virtual status_t read( + MediaBuffer **buffer, const MediaSource::ReadOptions *options) { + + if (mNumFramesOutput % 10 == 0) { + fprintf(stderr, "."); + } + if (mNumFramesOutput == mMaxNumFrames) { + return ERROR_END_OF_STREAM; + } + + status_t err = mGroup.acquire_buffer(buffer); + if (err != OK) { + return err; + } + + // We don't care about the contents. we just test video encoder + // Also, by skipping the content generation, we can return from + // read() much faster. + //char x = (char)((double)rand() / RAND_MAX * 255); + //memset((*buffer)->data(), x, mSize); + (*buffer)->set_range(0, mSize); + (*buffer)->meta_data()->clear(); + (*buffer)->meta_data()->setInt64( + kKeyTime, (mNumFramesOutput * 1000000) / mFrameRate); + ++mNumFramesOutput; + + return OK; + } + +protected: + virtual ~DummySource() {} + +private: + MediaBufferGroup mGroup; + int mWidth, mHeight; + int mMaxNumFrames; + int mFrameRate; + int mColorFormat; + size_t mSize; + int64_t mNumFramesOutput;; + + DummySource(const DummySource &); + DummySource &operator=(const DummySource &); +}; + +enum { + kYUV420SP = 0, + kYUV420P = 1, +}; + +// returns -1 if mapping of the given color is unsuccessful +// returns an omx color enum value otherwise +static int translateColorToOmxEnumValue(int color) { + switch (color) { + case kYUV420SP: + return OMX_COLOR_FormatYUV420SemiPlanar; + case kYUV420P: + return OMX_COLOR_FormatYUV420Planar; + default: + fprintf(stderr, "Unsupported color: %d\n", color); + return -1; + } +} + +int main(int argc, char **argv) { + + // Default values for the program if not overwritten + int frameRateFps = 30; + int width = 176; + int height = 144; + int bitRateBps = 300000; + int iFramesIntervalSeconds = 1; + int colorFormat = OMX_COLOR_FormatYUV420Planar; + int nFrames = 300; + int level = -1; // Encoder specific default + int profile = -1; // Encoder specific default + int codec = 0; + const char *fileName = "/sdcard/output.mp4"; + + android::ProcessState::self()->startThreadPool(); + int res; + while ((res = getopt(argc, argv, "b:c:f:i:n:w:t:l:p:v:h")) >= 0) { + switch (res) { + case 'b': + { + bitRateBps = atoi(optarg); + break; + } + + case 'c': + { + colorFormat = translateColorToOmxEnumValue(atoi(optarg)); + if (colorFormat == -1) { + usage(argv[0]); + } + break; + } + + case 'f': + { + frameRateFps = atoi(optarg); + break; + } + + case 'i': + { + iFramesIntervalSeconds = atoi(optarg); + break; + } + + case 'n': + { + nFrames = atoi(optarg); + break; + } + + case 'w': + { + width = atoi(optarg); + break; + } + + case 't': + { + height = atoi(optarg); + break; + } + + case 'l': + { + level = atoi(optarg); + break; + } + + case 'p': + { + profile = atoi(optarg); + break; + } + + case 'v': + { + codec = atoi(optarg); + if (codec < 0 || codec > 2) { + usage(argv[0]); + } + break; + } + + case 'h': + default: + { + usage(argv[0]); + break; + } + } + } + + OMXClient client; + CHECK_EQ(client.connect(), OK); + + status_t err = OK; + sp<MediaSource> source = + new DummySource(width, height, nFrames, frameRateFps, colorFormat); + + sp<MetaData> enc_meta = new MetaData; + switch (codec) { + case 1: + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4); + break; + case 2: + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263); + break; + default: + enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); + break; + } + enc_meta->setInt32(kKeyWidth, width); + enc_meta->setInt32(kKeyHeight, height); + enc_meta->setInt32(kKeyFrameRate, frameRateFps); + enc_meta->setInt32(kKeyBitRate, bitRateBps); + enc_meta->setInt32(kKeyStride, width); + enc_meta->setInt32(kKeySliceHeight, height); + enc_meta->setInt32(kKeyIFramesInterval, iFramesIntervalSeconds); + enc_meta->setInt32(kKeyColorFormat, colorFormat); + if (level != -1) { + enc_meta->setInt32(kKeyVideoLevel, level); + } + if (profile != -1) { + enc_meta->setInt32(kKeyVideoProfile, profile); + } + + sp<MediaSource> encoder = + OMXCodec::Create( + client.interface(), enc_meta, true /* createEncoder */, source); + + sp<MPEG4Writer> writer = new MPEG4Writer(fileName); + writer->addSource(encoder); + int64_t start = systemTime(); + CHECK_EQ(OK, writer->start()); + while (!writer->reachedEOS()) { + } + err = writer->stop(); + int64_t end = systemTime(); + + fprintf(stderr, "$\n"); + client.disconnect(); + + if (err != OK && err != ERROR_END_OF_STREAM) { + fprintf(stderr, "record failed: %d\n", err); + return 1; + } + fprintf(stderr, "encoding %d frames in %lld us\n", nFrames, (end-start)/1000); + fprintf(stderr, "encoding speed is: %.2f fps\n", (nFrames * 1E9) / (end-start)); + return 0; +} diff --git a/cmds/stagefright/sf2.cpp b/cmds/stagefright/sf2.cpp new file mode 100644 index 0000000..1dc08ea --- /dev/null +++ b/cmds/stagefright/sf2.cpp @@ -0,0 +1,564 @@ +#include <binder/ProcessState.h> + +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <media/stagefright/ACodec.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaExtractor.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/Utils.h> + +#include <surfaceflinger/ISurfaceComposer.h> +#include <surfaceflinger/SurfaceComposerClient.h> + +#include "include/ESDS.h" + +using namespace android; + +struct Controller : public AHandler { + Controller(const char *uri, bool decodeAudio, const sp<Surface> &surface) + : mURI(uri), + mDecodeAudio(decodeAudio), + mSurface(surface), + mCodec(new ACodec) { + CHECK(!mDecodeAudio || mSurface == NULL); + } + + void startAsync() { + (new AMessage(kWhatStart, id()))->post(); + } + +protected: + virtual ~Controller() { + } + + virtual void onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatStart: + { +#if 1 + mDecodeLooper = looper(); +#else + mDecodeLooper = new ALooper; + mDecodeLooper->setName("sf2 decode looper"); + mDecodeLooper->start(); +#endif + + sp<DataSource> dataSource = + DataSource::CreateFromURI(mURI.c_str()); + + sp<MediaExtractor> extractor = + MediaExtractor::Create(dataSource); + + for (size_t i = 0; i < extractor->countTracks(); ++i) { + sp<MetaData> meta = extractor->getTrackMetaData(i); + + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + if (!strncasecmp(mDecodeAudio ? "audio/" : "video/", + mime, 6)) { + mSource = extractor->getTrack(i); + break; + } + } + CHECK(mSource != NULL); + + CHECK_EQ(mSource->start(), (status_t)OK); + + mDecodeLooper->registerHandler(mCodec); + + mCodec->setNotificationMessage( + new AMessage(kWhatCodecNotify, id())); + + sp<AMessage> format = makeFormat(mSource->getFormat()); + + if (mSurface != NULL) { + format->setObject("surface", mSurface); + } + + mCodec->initiateSetup(format); + + mCSDIndex = 0; + mStartTimeUs = ALooper::GetNowUs(); + mNumOutputBuffersReceived = 0; + mTotalBytesReceived = 0; + mLeftOverBuffer = NULL; + mFinalResult = OK; + mSeekState = SEEK_NONE; + + // (new AMessage(kWhatSeek, id()))->post(5000000ll); + break; + } + + case kWhatSeek: + { + printf("+"); + fflush(stdout); + + CHECK(mSeekState == SEEK_NONE + || mSeekState == SEEK_FLUSH_COMPLETED); + + if (mLeftOverBuffer != NULL) { + mLeftOverBuffer->release(); + mLeftOverBuffer = NULL; + } + + mSeekState = SEEK_FLUSHING; + mSeekTimeUs = 30000000ll; + + mCodec->signalFlush(); + break; + } + + case kWhatStop: + { + if (mLeftOverBuffer != NULL) { + mLeftOverBuffer->release(); + mLeftOverBuffer = NULL; + } + + CHECK_EQ(mSource->stop(), (status_t)OK); + mSource.clear(); + + mCodec->initiateShutdown(); + break; + } + + case kWhatCodecNotify: + { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + if (what == ACodec::kWhatFillThisBuffer) { + onFillThisBuffer(msg); + } else if (what == ACodec::kWhatDrainThisBuffer) { + if ((mNumOutputBuffersReceived++ % 16) == 0) { + printf("."); + fflush(stdout); + } + + onDrainThisBuffer(msg); + } else if (what == ACodec::kWhatEOS) { + printf("$\n"); + + int64_t delayUs = ALooper::GetNowUs() - mStartTimeUs; + + if (mDecodeAudio) { + printf("%lld bytes received. %.2f KB/sec\n", + mTotalBytesReceived, + mTotalBytesReceived * 1E6 / 1024 / delayUs); + } else { + printf("%d frames decoded, %.2f fps. %lld bytes " + "received. %.2f KB/sec\n", + mNumOutputBuffersReceived, + mNumOutputBuffersReceived * 1E6 / delayUs, + mTotalBytesReceived, + mTotalBytesReceived * 1E6 / 1024 / delayUs); + } + + (new AMessage(kWhatStop, id()))->post(); + } else if (what == ACodec::kWhatFlushCompleted) { + mSeekState = SEEK_FLUSH_COMPLETED; + mCodec->signalResume(); + + (new AMessage(kWhatSeek, id()))->post(5000000ll); + } else { + CHECK_EQ(what, (int32_t)ACodec::kWhatShutdownCompleted); + + mDecodeLooper->unregisterHandler(mCodec->id()); + + if (mDecodeLooper != looper()) { + mDecodeLooper->stop(); + } + + looper()->stop(); + } + break; + } + + default: + TRESPASS(); + break; + } + } + +private: + enum { + kWhatStart = 'strt', + kWhatStop = 'stop', + kWhatCodecNotify = 'noti', + kWhatSeek = 'seek', + }; + + sp<ALooper> mDecodeLooper; + + AString mURI; + bool mDecodeAudio; + sp<Surface> mSurface; + sp<ACodec> mCodec; + sp<MediaSource> mSource; + + Vector<sp<ABuffer> > mCSD; + size_t mCSDIndex; + + MediaBuffer *mLeftOverBuffer; + status_t mFinalResult; + + int64_t mStartTimeUs; + int32_t mNumOutputBuffersReceived; + int64_t mTotalBytesReceived; + + enum SeekState { + SEEK_NONE, + SEEK_FLUSHING, + SEEK_FLUSH_COMPLETED, + }; + SeekState mSeekState; + int64_t mSeekTimeUs; + + sp<AMessage> makeFormat(const sp<MetaData> &meta) { + CHECK(mCSD.isEmpty()); + + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + sp<AMessage> msg = new AMessage; + msg->setString("mime", mime); + + if (!strncasecmp("video/", mime, 6)) { + int32_t width, height; + CHECK(meta->findInt32(kKeyWidth, &width)); + CHECK(meta->findInt32(kKeyHeight, &height)); + + msg->setInt32("width", width); + msg->setInt32("height", height); + } else { + CHECK(!strncasecmp("audio/", mime, 6)); + + int32_t numChannels, sampleRate; + CHECK(meta->findInt32(kKeyChannelCount, &numChannels)); + CHECK(meta->findInt32(kKeySampleRate, &sampleRate)); + + msg->setInt32("channel-count", numChannels); + msg->setInt32("sample-rate", sampleRate); + } + + uint32_t type; + const void *data; + size_t size; + if (meta->findData(kKeyAVCC, &type, &data, &size)) { + // Parse the AVCDecoderConfigurationRecord + + const uint8_t *ptr = (const uint8_t *)data; + + CHECK(size >= 7); + CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1 + uint8_t profile = ptr[1]; + uint8_t level = ptr[3]; + + // There is decodable content out there that fails the following + // assertion, let's be lenient for now... + // CHECK((ptr[4] >> 2) == 0x3f); // reserved + + size_t lengthSize = 1 + (ptr[4] & 3); + + // commented out check below as H264_QVGA_500_NO_AUDIO.3gp + // violates it... + // CHECK((ptr[5] >> 5) == 7); // reserved + + size_t numSeqParameterSets = ptr[5] & 31; + + ptr += 6; + size -= 6; + + sp<ABuffer> buffer = new ABuffer(1024); + buffer->setRange(0, 0); + + for (size_t i = 0; i < numSeqParameterSets; ++i) { + CHECK(size >= 2); + size_t length = U16_AT(ptr); + + ptr += 2; + size -= 2; + + CHECK(size >= length); + + memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4); + memcpy(buffer->data() + buffer->size() + 4, ptr, length); + buffer->setRange(0, buffer->size() + 4 + length); + + ptr += length; + size -= length; + } + + buffer->meta()->setInt32("csd", true); + mCSD.push(buffer); + + buffer = new ABuffer(1024); + buffer->setRange(0, 0); + + CHECK(size >= 1); + size_t numPictureParameterSets = *ptr; + ++ptr; + --size; + + for (size_t i = 0; i < numPictureParameterSets; ++i) { + CHECK(size >= 2); + size_t length = U16_AT(ptr); + + ptr += 2; + size -= 2; + + CHECK(size >= length); + + memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4); + memcpy(buffer->data() + buffer->size() + 4, ptr, length); + buffer->setRange(0, buffer->size() + 4 + length); + + ptr += length; + size -= length; + } + + buffer->meta()->setInt32("csd", true); + mCSD.push(buffer); + + msg->setObject("csd", buffer); + } else if (meta->findData(kKeyESDS, &type, &data, &size)) { + ESDS esds((const char *)data, size); + CHECK_EQ(esds.InitCheck(), (status_t)OK); + + const void *codec_specific_data; + size_t codec_specific_data_size; + esds.getCodecSpecificInfo( + &codec_specific_data, &codec_specific_data_size); + + sp<ABuffer> buffer = new ABuffer(codec_specific_data_size); + + memcpy(buffer->data(), codec_specific_data, + codec_specific_data_size); + + buffer->meta()->setInt32("csd", true); + mCSD.push(buffer); + } + + int32_t maxInputSize; + if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) { + msg->setInt32("max-input-size", maxInputSize); + } + + return msg; + } + + void onFillThisBuffer(const sp<AMessage> &msg) { + sp<AMessage> reply; + CHECK(msg->findMessage("reply", &reply)); + + if (mSeekState == SEEK_FLUSHING) { + reply->post(); + return; + } + + sp<RefBase> obj; + CHECK(msg->findObject("buffer", &obj)); + sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get()); + + if (mCSDIndex < mCSD.size()) { + outBuffer = mCSD.editItemAt(mCSDIndex++); + outBuffer->meta()->setInt64("timeUs", 0); + } else { + size_t sizeLeft = outBuffer->capacity(); + outBuffer->setRange(0, 0); + + int32_t n = 0; + + for (;;) { + MediaBuffer *inBuffer; + + if (mLeftOverBuffer != NULL) { + inBuffer = mLeftOverBuffer; + mLeftOverBuffer = NULL; + } else if (mFinalResult != OK) { + break; + } else { + MediaSource::ReadOptions options; + if (mSeekState == SEEK_FLUSH_COMPLETED) { + options.setSeekTo(mSeekTimeUs); + mSeekState = SEEK_NONE; + } + status_t err = mSource->read(&inBuffer, &options); + + if (err != OK) { + mFinalResult = err; + break; + } + } + + if (inBuffer->range_length() > sizeLeft) { + if (outBuffer->size() == 0) { + LOGE("Unable to fit even a single input buffer of size %d.", + inBuffer->range_length()); + } + CHECK_GT(outBuffer->size(), 0u); + + mLeftOverBuffer = inBuffer; + break; + } + + ++n; + + if (outBuffer->size() == 0) { + int64_t timeUs; + CHECK(inBuffer->meta_data()->findInt64(kKeyTime, &timeUs)); + + outBuffer->meta()->setInt64("timeUs", timeUs); + } + + memcpy(outBuffer->data() + outBuffer->size(), + (const uint8_t *)inBuffer->data() + + inBuffer->range_offset(), + inBuffer->range_length()); + + outBuffer->setRange( + 0, outBuffer->size() + inBuffer->range_length()); + + sizeLeft -= inBuffer->range_length(); + + inBuffer->release(); + inBuffer = NULL; + + // break; // Don't coalesce + } + + LOGV("coalesced %d input buffers", n); + + if (outBuffer->size() == 0) { + CHECK_NE(mFinalResult, (status_t)OK); + + reply->setInt32("err", mFinalResult); + reply->post(); + return; + } + } + + reply->setObject("buffer", outBuffer); + reply->post(); + } + + void onDrainThisBuffer(const sp<AMessage> &msg) { + sp<RefBase> obj; + CHECK(msg->findObject("buffer", &obj)); + + sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); + mTotalBytesReceived += buffer->size(); + + sp<AMessage> reply; + CHECK(msg->findMessage("reply", &reply)); + + reply->post(); + } + + DISALLOW_EVIL_CONSTRUCTORS(Controller); +}; + +static void usage(const char *me) { + fprintf(stderr, "usage: %s\n", me); + fprintf(stderr, " -h(elp)\n"); + fprintf(stderr, " -a(udio)\n"); + + fprintf(stderr, + " -s(surface) Allocate output buffers on a surface.\n"); +} + +int main(int argc, char **argv) { + android::ProcessState::self()->startThreadPool(); + + bool decodeAudio = false; + bool useSurface = false; + + int res; + while ((res = getopt(argc, argv, "has")) >= 0) { + switch (res) { + case 'a': + decodeAudio = true; + break; + + case 's': + useSurface = true; + break; + + case '?': + case 'h': + default: + { + usage(argv[0]); + return 1; + } + } + } + + argc -= optind; + argv += optind; + + if (argc != 1) { + usage(argv[-optind]); + return 1; + } + + DataSource::RegisterDefaultSniffers(); + + sp<ALooper> looper = new ALooper; + looper->setName("sf2"); + + sp<SurfaceComposerClient> composerClient; + sp<SurfaceControl> control; + sp<Surface> surface; + + if (!decodeAudio && useSurface) { + composerClient = new SurfaceComposerClient; + CHECK_EQ(composerClient->initCheck(), (status_t)OK); + + control = composerClient->createSurface( + getpid(), + String8("A Surface"), + 0, + 1280, + 800, + PIXEL_FORMAT_RGB_565, + 0); + + CHECK(control != NULL); + CHECK(control->isValid()); + + CHECK_EQ(composerClient->openTransaction(), (status_t)OK); + CHECK_EQ(control->setLayer(30000), (status_t)OK); + CHECK_EQ(control->show(), (status_t)OK); + CHECK_EQ(composerClient->closeTransaction(), (status_t)OK); + + surface = control->getSurface(); + CHECK(surface != NULL); + } + + sp<Controller> controller = new Controller(argv[0], decodeAudio, surface); + looper->registerHandler(controller); + + controller->startAsync(); + + CHECK_EQ(looper->start(true /* runOnCallingThread */), (status_t)OK); + + looper->unregisterHandler(controller->id()); + + if (!decodeAudio && useSurface) { + composerClient->dispose(); + } + + return 0; +} + diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp index ff92431..a43b190 100644 --- a/cmds/stagefright/stagefright.cpp +++ b/cmds/stagefright/stagefright.cpp @@ -31,7 +31,7 @@ #include <media/IMediaPlayerService.h> #include <media/stagefright/foundation/ALooper.h> #include "include/ARTSPController.h" -#include "include/LiveSource.h" +#include "include/LiveSession.h" #include "include/NuCachedSource2.h" #include <media/stagefright/AudioPlayer.h> #include <media/stagefright/DataSource.h> @@ -49,6 +49,10 @@ #include <media/stagefright/MPEG2TSWriter.h> #include <media/stagefright/MPEG4Writer.h> +#include <private/media/VideoFrame.h> +#include <SkBitmap.h> +#include <SkImageEncoder.h> + #include <fcntl.h> using namespace android; @@ -59,6 +63,7 @@ static long gReproduceBug; // if not -1. static bool gPreferSoftwareCodec; static bool gPlaybackAudio; static bool gWriteMP4; +static bool gDisplayHistogram; static String8 gWriteMP4Filename; static int64_t getNowUs() { @@ -68,6 +73,58 @@ static int64_t getNowUs() { return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll; } +static int CompareIncreasing(const int64_t *a, const int64_t *b) { + return (*a) < (*b) ? -1 : (*a) > (*b) ? 1 : 0; +} + +static void displayDecodeHistogram(Vector<int64_t> *decodeTimesUs) { + printf("decode times:\n"); + + decodeTimesUs->sort(CompareIncreasing); + + size_t n = decodeTimesUs->size(); + int64_t minUs = decodeTimesUs->itemAt(0); + int64_t maxUs = decodeTimesUs->itemAt(n - 1); + + printf("min decode time %lld us (%.2f secs)\n", minUs, minUs / 1E6); + printf("max decode time %lld us (%.2f secs)\n", maxUs, maxUs / 1E6); + + size_t counts[100]; + for (size_t i = 0; i < 100; ++i) { + counts[i] = 0; + } + + for (size_t i = 0; i < n; ++i) { + int64_t x = decodeTimesUs->itemAt(i); + + size_t slot = ((x - minUs) * 100) / (maxUs - minUs); + if (slot == 100) { slot = 99; } + + ++counts[slot]; + } + + for (size_t i = 0; i < 100; ++i) { + int64_t slotUs = minUs + (i * (maxUs - minUs) / 100); + + double fps = 1E6 / slotUs; + printf("[%.2f fps]: %d\n", fps, counts[i]); + } +} + +static void displayAVCProfileLevelIfPossible(const sp<MetaData>& meta) { + uint32_t type; + const void *data; + size_t size; + if (meta->findData(kKeyAVCC, &type, &data, &size)) { + const uint8_t *ptr = (const uint8_t *)data; + CHECK(size >= 7); + CHECK(ptr[0] == 1); // configurationVersion == 1 + uint8_t profile = ptr[1]; + uint8_t level = ptr[3]; + fprintf(stderr, "AVC video profile %d and level %d\n", profile, level); + } +} + static void playSource(OMXClient *client, sp<MediaSource> &source) { sp<MetaData> meta = source->getFormat(); @@ -87,6 +144,7 @@ static void playSource(OMXClient *client, sp<MediaSource> &source) { fprintf(stderr, "Failed to instantiate decoder for '%s'.\n", mime); return; } + displayAVCProfileLevelIfPossible(meta); } source.clear(); @@ -201,6 +259,8 @@ static void playSource(OMXClient *client, sp<MediaSource> &source) { int64_t sumDecodeUs = 0; int64_t totalBytes = 0; + Vector<int64_t> decodeTimesUs; + while (numIterationsLeft-- > 0) { long numFrames = 0; @@ -224,9 +284,17 @@ static void playSource(OMXClient *client, sp<MediaSource> &source) { break; } - if (buffer->range_length() > 0 && (n++ % 16) == 0) { - printf("."); - fflush(stdout); + if (buffer->range_length() > 0) { + if (gDisplayHistogram && n > 0) { + // Ignore the first time since it includes some setup + // cost. + decodeTimesUs.push(delayDecodeUs); + } + + if ((n++ % 16) == 0) { + printf("."); + fflush(stdout); + } } sumDecodeUs += delayDecodeUs; @@ -266,6 +334,10 @@ static void playSource(OMXClient *client, sp<MediaSource> &source) { (double)sumDecodeUs / n); printf("decoded a total of %d frame(s).\n", n); + + if (gDisplayHistogram) { + displayDecodeHistogram(&decodeTimesUs); + } } else if (!strncasecmp("audio/", mime, 6)) { // Frame count makes less sense for audio, as the output buffer // sizes may be different across decoders. @@ -466,6 +538,8 @@ static void usage(const char *me) { fprintf(stderr, " -o playback audio\n"); fprintf(stderr, " -w(rite) filename (write to .mp4 file)\n"); fprintf(stderr, " -k seek test\n"); + fprintf(stderr, " -x display a histogram of decoding times/fps " + "(video only)\n"); } int main(int argc, char **argv) { @@ -482,12 +556,14 @@ int main(int argc, char **argv) { gPreferSoftwareCodec = false; gPlaybackAudio = false; gWriteMP4 = false; + gDisplayHistogram = false; sp<ALooper> looper; sp<ARTSPController> rtspController; + sp<LiveSession> liveSession; int res; - while ((res = getopt(argc, argv, "han:lm:b:ptsow:k")) >= 0) { + while ((res = getopt(argc, argv, "han:lm:b:ptsow:kx")) >= 0) { switch (res) { case 'a': { @@ -560,6 +636,12 @@ int main(int argc, char **argv) { break; } + case 'x': + { + gDisplayHistogram = true; + break; + } + case '?': case 'h': default: @@ -602,6 +684,19 @@ int main(int argc, char **argv) { if (mem != NULL) { printf("getFrameAtTime(%s) => OK\n", filename); + + VideoFrame *frame = (VideoFrame *)mem->pointer(); + + SkBitmap bitmap; + bitmap.setConfig( + SkBitmap::kRGB_565_Config, frame->mWidth, frame->mHeight); + + bitmap.setPixels((uint8_t *)frame + sizeof(VideoFrame)); + + CHECK(SkImageEncoder::EncodeFile( + "/sdcard/out.jpg", bitmap, + SkImageEncoder::kJPEG_Type, + SkImageEncoder::kDefaultQuality)); } else { mem = retriever->extractAlbumArt(); @@ -754,8 +849,15 @@ int main(int argc, char **argv) { String8 uri("http://"); uri.append(filename + 11); - dataSource = new LiveSource(uri.string()); - dataSource = new NuCachedSource2(dataSource); + if (looper == NULL) { + looper = new ALooper; + looper->start(); + } + liveSession = new LiveSession; + looper->registerHandler(liveSession); + + liveSession->connect(uri.string()); + dataSource = liveSession->getDataSource(); extractor = MediaExtractor::Create( diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp new file mode 100644 index 0000000..9246a00 --- /dev/null +++ b/cmds/stagefright/stream.cpp @@ -0,0 +1,190 @@ +#include <binder/ProcessState.h> + +#include <media/IStreamSource.h> +#include <media/mediaplayer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <binder/IServiceManager.h> +#include <media/IMediaPlayerService.h> +#include <surfaceflinger/ISurfaceComposer.h> +#include <surfaceflinger/SurfaceComposerClient.h> + +#include <fcntl.h> + +using namespace android; + +struct MyStreamSource : public BnStreamSource { + // Caller retains ownership of fd. + MyStreamSource(int fd); + + virtual void setListener(const sp<IStreamListener> &listener); + virtual void setBuffers(const Vector<sp<IMemory> > &buffers); + + virtual void onBufferAvailable(size_t index); + +protected: + virtual ~MyStreamSource(); + +private: + int mFd; + off64_t mFileSize; + int64_t mNextSeekTimeUs; + + sp<IStreamListener> mListener; + Vector<sp<IMemory> > mBuffers; + + DISALLOW_EVIL_CONSTRUCTORS(MyStreamSource); +}; + +MyStreamSource::MyStreamSource(int fd) + : mFd(fd), + mFileSize(0), + mNextSeekTimeUs(-1) { // ALooper::GetNowUs() + 5000000ll) { + CHECK_GE(fd, 0); + + mFileSize = lseek64(fd, 0, SEEK_END); + lseek64(fd, 0, SEEK_SET); +} + +MyStreamSource::~MyStreamSource() { +} + +void MyStreamSource::setListener(const sp<IStreamListener> &listener) { + mListener = listener; +} + +void MyStreamSource::setBuffers(const Vector<sp<IMemory> > &buffers) { + mBuffers = buffers; +} + +void MyStreamSource::onBufferAvailable(size_t index) { + CHECK_LT(index, mBuffers.size()); + + if (mNextSeekTimeUs >= 0 && mNextSeekTimeUs <= ALooper::GetNowUs()) { + off64_t offset = (off64_t)(((float)rand() / RAND_MAX) * mFileSize * 0.8); + offset = (offset / 188) * 188; + + lseek(mFd, offset, SEEK_SET); + + mListener->issueCommand( + IStreamListener::DISCONTINUITY, false /* synchronous */); + + mNextSeekTimeUs = -1; + mNextSeekTimeUs = ALooper::GetNowUs() + 5000000ll; + } + + sp<IMemory> mem = mBuffers.itemAt(index); + + ssize_t n = read(mFd, mem->pointer(), mem->size()); + if (n <= 0) { + mListener->issueCommand(IStreamListener::EOS, false /* synchronous */); + } else { + mListener->queueBuffer(index, n); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +struct MyClient : public BnMediaPlayerClient { + MyClient() + : mEOS(false) { + } + + virtual void notify(int msg, int ext1, int ext2) { + Mutex::Autolock autoLock(mLock); + + if (msg == MEDIA_ERROR || msg == MEDIA_PLAYBACK_COMPLETE) { + mEOS = true; + mCondition.signal(); + } + } + + void waitForEOS() { + Mutex::Autolock autoLock(mLock); + while (!mEOS) { + mCondition.wait(mLock); + } + } + +protected: + virtual ~MyClient() { + } + +private: + Mutex mLock; + Condition mCondition; + + bool mEOS; + + DISALLOW_EVIL_CONSTRUCTORS(MyClient); +}; + +int main(int argc, char **argv) { + android::ProcessState::self()->startThreadPool(); + + if (argc != 2) { + fprintf(stderr, "Usage: %s filename\n", argv[0]); + return 1; + } + + sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient; + CHECK_EQ(composerClient->initCheck(), (status_t)OK); + + sp<SurfaceControl> control = + composerClient->createSurface( + getpid(), + String8("A Surface"), + 0, + 1280, + 800, + PIXEL_FORMAT_RGB_565, + 0); + + CHECK(control != NULL); + CHECK(control->isValid()); + + CHECK_EQ(composerClient->openTransaction(), (status_t)OK); + CHECK_EQ(control->setLayer(30000), (status_t)OK); + CHECK_EQ(control->show(), (status_t)OK); + CHECK_EQ(composerClient->closeTransaction(), (status_t)OK); + + sp<Surface> surface = control->getSurface(); + CHECK(surface != NULL); + + sp<IServiceManager> sm = defaultServiceManager(); + sp<IBinder> binder = sm->getService(String16("media.player")); + sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder); + + CHECK(service.get() != NULL); + + int fd = open(argv[1], O_RDONLY); + + if (fd < 0) { + fprintf(stderr, "Failed to open file '%s'.", argv[1]); + return 1; + } + + sp<MyClient> client = new MyClient; + + sp<IMediaPlayer> player = + service->create(getpid(), client, new MyStreamSource(fd), 0); + + if (player != NULL) { + player->setVideoSurface(surface); + player->start(); + + client->waitForEOS(); + + player->stop(); + } else { + fprintf(stderr, "failed to instantiate player.\n"); + } + + close(fd); + fd = -1; + + composerClient->dispose(); + + return 0; +} diff --git a/drm/common/DrmEngineBase.cpp b/drm/common/DrmEngineBase.cpp index ac360eb..9b16c36 100644 --- a/drm/common/DrmEngineBase.cpp +++ b/drm/common/DrmEngineBase.cpp @@ -84,7 +84,7 @@ status_t DrmEngineBase::consumeRights( } status_t DrmEngineBase::setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) { + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) { return onSetPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position); } @@ -120,7 +120,7 @@ DrmSupportInfo* DrmEngineBase::getSupportInfo(int uniqueId) { } status_t DrmEngineBase::openDecryptSession( - int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) { + int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) { return onOpenDecryptSession(uniqueId, decryptHandle, fd, offset, length); } @@ -150,7 +150,7 @@ status_t DrmEngineBase::finalizeDecryptUnit( } ssize_t DrmEngineBase::pread( - int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) { + int uniqueId, DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) { return onPread(uniqueId, decryptHandle, buffer, numBytes, offset); } diff --git a/drm/common/IDrmManagerService.cpp b/drm/common/IDrmManagerService.cpp index 723b50e..ddbd220 100644 --- a/drm/common/IDrmManagerService.cpp +++ b/drm/common/IDrmManagerService.cpp @@ -363,7 +363,7 @@ status_t BpDrmManagerService::consumeRights( } status_t BpDrmManagerService::setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) { + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) { LOGV("setPlaybackStatus"); Parcel data, reply; @@ -382,7 +382,7 @@ status_t BpDrmManagerService::setPlaybackStatus( } data.writeInt32(playbackStatus); - data.writeInt32(position); + data.writeInt64(position); remote()->transact(SET_PLAYBACK_STATUS, data, &reply); return reply.readInt32(); @@ -459,7 +459,7 @@ DrmConvertedStatus* BpDrmManagerService::convertData( if (0 != reply.dataAvail()) { //Filling DRM Converted Status const int statusCode = reply.readInt32(); - const int offset = reply.readInt32(); + const off64_t offset = reply.readInt64(); DrmBuffer* convertedData = NULL; if (0 != reply.dataAvail()) { @@ -491,7 +491,7 @@ DrmConvertedStatus* BpDrmManagerService::closeConvertSession(int uniqueId, int c if (0 != reply.dataAvail()) { //Filling DRM Converted Status const int statusCode = reply.readInt32(); - const int offset = reply.readInt32(); + const off64_t offset = reply.readInt64(); DrmBuffer* convertedData = NULL; if (0 != reply.dataAvail()) { @@ -545,15 +545,15 @@ status_t BpDrmManagerService::getAllSupportInfo( } DecryptHandle* BpDrmManagerService::openDecryptSession( - int uniqueId, int fd, int offset, int length) { + int uniqueId, int fd, off64_t offset, off64_t length) { LOGV("Entering BpDrmManagerService::openDecryptSession"); Parcel data, reply; data.writeInterfaceToken(IDrmManagerService::getInterfaceDescriptor()); data.writeInt32(uniqueId); data.writeFileDescriptor(fd); - data.writeInt32(offset); - data.writeInt32(length); + data.writeInt64(offset); + data.writeInt64(length); remote()->transact(OPEN_DECRYPT_SESSION, data, &reply); @@ -569,8 +569,6 @@ DecryptHandle* BpDrmManagerService::openDecryptSession( handle->decryptInfo = new DecryptInfo(); handle->decryptInfo->decryptBufferLength = reply.readInt32(); } - } else { - LOGE("no decryptHandle is generated in service side"); } return handle; } @@ -729,7 +727,7 @@ status_t BpDrmManagerService::finalizeDecryptUnit( ssize_t BpDrmManagerService::pread( int uniqueId, DecryptHandle* decryptHandle, void* buffer, - ssize_t numBytes, off_t offset) { + ssize_t numBytes, off64_t offset) { LOGV("read"); Parcel data, reply; int result; @@ -749,7 +747,7 @@ ssize_t BpDrmManagerService::pread( } data.writeInt32(numBytes); - data.writeInt32(offset); + data.writeInt64(offset); remote()->transact(PREAD, data, &reply); result = reply.readInt32(); @@ -1113,7 +1111,7 @@ status_t BnDrmManagerService::onTransact( } const status_t status - = setPlaybackStatus(uniqueId, &handle, data.readInt32(), data.readInt32()); + = setPlaybackStatus(uniqueId, &handle, data.readInt32(), data.readInt64()); reply->writeInt32(status); delete handle.decryptInfo; handle.decryptInfo = NULL; @@ -1185,7 +1183,7 @@ status_t BnDrmManagerService::onTransact( if (NULL != drmConvertedStatus) { //Filling Drm Converted Ststus reply->writeInt32(drmConvertedStatus->statusCode); - reply->writeInt32(drmConvertedStatus->offset); + reply->writeInt64(drmConvertedStatus->offset); if (NULL != drmConvertedStatus->convertedData) { const DrmBuffer* convertedData = drmConvertedStatus->convertedData; @@ -1214,7 +1212,7 @@ status_t BnDrmManagerService::onTransact( if (NULL != drmConvertedStatus) { //Filling Drm Converted Ststus reply->writeInt32(drmConvertedStatus->statusCode); - reply->writeInt32(drmConvertedStatus->offset); + reply->writeInt64(drmConvertedStatus->offset); if (NULL != drmConvertedStatus->convertedData) { const DrmBuffer* convertedData = drmConvertedStatus->convertedData; @@ -1274,7 +1272,7 @@ status_t BnDrmManagerService::onTransact( const int fd = data.readFileDescriptor(); DecryptHandle* handle - = openDecryptSession(uniqueId, fd, data.readInt32(), data.readInt32()); + = openDecryptSession(uniqueId, fd, data.readInt64(), data.readInt64()); if (NULL != handle) { reply->writeInt32(handle->decryptId); @@ -1285,8 +1283,6 @@ status_t BnDrmManagerService::onTransact( reply->writeInt32(handle->decryptInfo->decryptBufferLength); delete handle->decryptInfo; handle->decryptInfo = NULL; } - } else { - LOGE("NULL decryptHandle is returned"); } delete handle; handle = NULL; return DRM_NO_ERROR; @@ -1481,7 +1477,7 @@ status_t BnDrmManagerService::onTransact( const int numBytes = data.readInt32(); char* buffer = new char[numBytes]; - const off_t offset = data.readInt32(); + const off64_t offset = data.readInt64(); ssize_t result = pread(uniqueId, &handle, buffer, numBytes, offset); reply->writeInt32(result); diff --git a/drm/common/ReadWriteUtils.cpp b/drm/common/ReadWriteUtils.cpp index 7ec4fa2..c16214e 100644 --- a/drm/common/ReadWriteUtils.cpp +++ b/drm/common/ReadWriteUtils.cpp @@ -42,7 +42,7 @@ String8 ReadWriteUtils::readBytes(const String8& filePath) { struct stat sb; if (fstat(fd, &sb) == 0 && sb.st_size > 0) { - int length = sb.st_size; + off64_t length = sb.st_size; char* bytes = new char[length]; if (length == read(fd, (void*) bytes, length)) { string.append(bytes, length); @@ -57,7 +57,7 @@ String8 ReadWriteUtils::readBytes(const String8& filePath) { int ReadWriteUtils::readBytes(const String8& filePath, char** buffer) { FILE* file = NULL; file = fopen(filePath.string(), "r"); - int length = 0; + off64_t length = 0; if (NULL != file) { int fd = fileno(file); diff --git a/drm/drmserver/DrmManager.cpp b/drm/drmserver/DrmManager.cpp index 49df1c8..b6e0c30 100644 --- a/drm/drmserver/DrmManager.cpp +++ b/drm/drmserver/DrmManager.cpp @@ -272,7 +272,7 @@ status_t DrmManager::consumeRights( } status_t DrmManager::setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) { + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) { status_t result = DRM_ERROR_UNKNOWN; if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) { IDrmEngine* drmEngine = mDecryptSessionMap.valueFor(decryptHandle->decryptId); @@ -384,7 +384,7 @@ status_t DrmManager::getAllSupportInfo( return DRM_NO_ERROR; } -DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, int offset, int length) { +DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) { Mutex::Autolock _l(mDecryptLock); status_t result = DRM_ERROR_CANNOT_HANDLE; Vector<String8> plugInIdList = mPlugInManager.getPlugInIdList(); @@ -407,7 +407,6 @@ DecryptHandle* DrmManager::openDecryptSession(int uniqueId, int fd, int offset, } if (DRM_NO_ERROR != result) { delete handle; handle = NULL; - LOGE("DrmManager::openDecryptSession: no capable plug-in found"); } return handle; } @@ -485,7 +484,7 @@ status_t DrmManager::finalizeDecryptUnit( } ssize_t DrmManager::pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset) { + void* buffer, ssize_t numBytes, off64_t offset) { ssize_t result = DECRYPT_FILE_ERROR; if (mDecryptSessionMap.indexOfKey(decryptHandle->decryptId) != NAME_NOT_FOUND) { diff --git a/drm/drmserver/DrmManagerService.cpp b/drm/drmserver/DrmManagerService.cpp index 4dcfa72..0901a44 100644 --- a/drm/drmserver/DrmManagerService.cpp +++ b/drm/drmserver/DrmManagerService.cpp @@ -162,7 +162,7 @@ status_t DrmManagerService::consumeRights( } status_t DrmManagerService::setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) { + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) { LOGV("Entering setPlaybackStatus"); return mDrmManager->setPlaybackStatus(uniqueId, decryptHandle, playbackStatus, position); } @@ -207,7 +207,7 @@ status_t DrmManagerService::getAllSupportInfo( } DecryptHandle* DrmManagerService::openDecryptSession( - int uniqueId, int fd, int offset, int length) { + int uniqueId, int fd, off64_t offset, off64_t length) { LOGV("Entering DrmManagerService::openDecryptSession"); if (isProtectedCallAllowed()) { return mDrmManager->openDecryptSession(uniqueId, fd, offset, length); @@ -251,7 +251,7 @@ status_t DrmManagerService::finalizeDecryptUnit( } ssize_t DrmManagerService::pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset) { + void* buffer, ssize_t numBytes, off64_t offset) { LOGV("Entering pread"); return mDrmManager->pread(uniqueId, decryptHandle, buffer, numBytes, offset); } diff --git a/drm/libdrmframework/DrmManagerClient.cpp b/drm/libdrmframework/DrmManagerClient.cpp index 8bb00c3..7b51822 100644 --- a/drm/libdrmframework/DrmManagerClient.cpp +++ b/drm/libdrmframework/DrmManagerClient.cpp @@ -83,7 +83,7 @@ status_t DrmManagerClient::consumeRights(DecryptHandle* decryptHandle, int actio } status_t DrmManagerClient::setPlaybackStatus( - DecryptHandle* decryptHandle, int playbackStatus, int position) { + DecryptHandle* decryptHandle, int playbackStatus, int64_t position) { return mDrmManagerClientImpl ->setPlaybackStatus(mUniqueId, decryptHandle, playbackStatus, position); } @@ -117,7 +117,7 @@ status_t DrmManagerClient::getAllSupportInfo(int* length, DrmSupportInfo** drmSu return mDrmManagerClientImpl->getAllSupportInfo(mUniqueId, length, drmSupportInfoArray); } -DecryptHandle* DrmManagerClient::openDecryptSession(int fd, int offset, int length) { +DecryptHandle* DrmManagerClient::openDecryptSession(int fd, off64_t offset, off64_t length) { return mDrmManagerClientImpl->openDecryptSession(mUniqueId, fd, offset, length); } @@ -150,7 +150,7 @@ status_t DrmManagerClient::finalizeDecryptUnit(DecryptHandle* decryptHandle, int } ssize_t DrmManagerClient::pread( - DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset) { + DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset) { Mutex::Autolock _l(mDecryptLock); return mDrmManagerClientImpl->pread(mUniqueId, decryptHandle, buffer, numBytes, offset); } diff --git a/drm/libdrmframework/DrmManagerClientImpl.cpp b/drm/libdrmframework/DrmManagerClientImpl.cpp index eea312b..d20de92 100644 --- a/drm/libdrmframework/DrmManagerClientImpl.cpp +++ b/drm/libdrmframework/DrmManagerClientImpl.cpp @@ -179,7 +179,7 @@ status_t DrmManagerClientImpl::consumeRights( } status_t DrmManagerClientImpl::setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) { + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) { status_t status = DRM_ERROR_UNKNOWN; if (NULL != decryptHandle) { status = getDrmManagerService()->setPlaybackStatus( @@ -240,7 +240,7 @@ status_t DrmManagerClientImpl::getAllSupportInfo( } DecryptHandle* DrmManagerClientImpl::openDecryptSession( - int uniqueId, int fd, int offset, int length) { + int uniqueId, int fd, off64_t offset, off64_t length) { return getDrmManagerService()->openDecryptSession(uniqueId, fd, offset, length); } @@ -292,7 +292,7 @@ status_t DrmManagerClientImpl::finalizeDecryptUnit( } ssize_t DrmManagerClientImpl::pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset) { + void* buffer, ssize_t numBytes, off64_t offset) { ssize_t retCode = INVALID_VALUE; if ((NULL != decryptHandle) && (NULL != buffer) && (0 < numBytes)) { retCode = getDrmManagerService()->pread(uniqueId, decryptHandle, buffer, numBytes, offset); diff --git a/drm/libdrmframework/include/DrmManager.h b/drm/libdrmframework/include/DrmManager.h index bc462c2..e05366d 100644 --- a/drm/libdrmframework/include/DrmManager.h +++ b/drm/libdrmframework/include/DrmManager.h @@ -95,7 +95,7 @@ public: status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve); status_t setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position); + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position); bool validateAction( int uniqueId, const String8& path, int action, const ActionDescription& description); @@ -112,7 +112,7 @@ public: status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray); - DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length); + DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length); DecryptHandle* openDecryptSession(int uniqueId, const char* uri); @@ -127,7 +127,7 @@ public: status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId); ssize_t pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset); + void* buffer, ssize_t numBytes, off64_t offset); void onInfo(const DrmInfoEvent& event); diff --git a/drm/libdrmframework/include/DrmManagerClientImpl.h b/drm/libdrmframework/include/DrmManagerClientImpl.h index ff84fc7..0a7fcd1 100644 --- a/drm/libdrmframework/include/DrmManagerClientImpl.h +++ b/drm/libdrmframework/include/DrmManagerClientImpl.h @@ -203,7 +203,7 @@ public: * Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure */ status_t setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position); + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position); /** * Validates whether an action on the DRM content is allowed or not. @@ -303,7 +303,7 @@ public: * @return * Handle for the decryption session */ - DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length); + DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length); /** * Open the decrypt session to decrypt the given protected content @@ -381,7 +381,7 @@ public: * @return Number of bytes read. Returns -1 for Failure. */ ssize_t pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset); + void* buffer, ssize_t numBytes, off64_t offset); /** * Notify the event to the registered listener diff --git a/drm/libdrmframework/include/DrmManagerService.h b/drm/libdrmframework/include/DrmManagerService.h index f346356..d0a0db7 100644 --- a/drm/libdrmframework/include/DrmManagerService.h +++ b/drm/libdrmframework/include/DrmManagerService.h @@ -81,7 +81,7 @@ public: status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve); status_t setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position); + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position); bool validateAction(int uniqueId, const String8& path, int action, const ActionDescription& description); @@ -98,7 +98,7 @@ public: status_t getAllSupportInfo(int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray); - DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length); + DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length); DecryptHandle* openDecryptSession(int uniqueId, const char* uri); @@ -113,7 +113,7 @@ public: status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId); ssize_t pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset); + void* buffer, ssize_t numBytes, off64_t offset); private: DrmManager* mDrmManager; diff --git a/drm/libdrmframework/include/IDrmManagerService.h b/drm/libdrmframework/include/IDrmManagerService.h index f1dabd3..2424ea5 100644 --- a/drm/libdrmframework/include/IDrmManagerService.h +++ b/drm/libdrmframework/include/IDrmManagerService.h @@ -120,7 +120,7 @@ public: int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve) = 0; virtual status_t setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0; + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0; virtual bool validateAction( int uniqueId, const String8& path, @@ -140,7 +140,7 @@ public: virtual status_t getAllSupportInfo( int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray) = 0; - virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length) = 0; + virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length) = 0; virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri) = 0; @@ -156,7 +156,7 @@ public: int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId) = 0; virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes,off_t offset) = 0; + void* buffer, ssize_t numBytes,off64_t offset) = 0; }; /** @@ -204,7 +204,7 @@ public: int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve); virtual status_t setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position); + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position); virtual bool validateAction( int uniqueId, const String8& path, int action, const ActionDescription& description); @@ -223,7 +223,7 @@ public: virtual status_t getAllSupportInfo( int uniqueId, int* length, DrmSupportInfo** drmSupportInfoArray); - virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, int offset, int length); + virtual DecryptHandle* openDecryptSession(int uniqueId, int fd, off64_t offset, off64_t length); virtual DecryptHandle* openDecryptSession(int uniqueId, const char* uri); @@ -239,7 +239,7 @@ public: int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId); virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset); + void* buffer, ssize_t numBytes, off64_t offset); }; /** diff --git a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h index 67b6355..b61e3d3 100644 --- a/drm/libdrmframework/plugins/common/include/DrmEngineBase.h +++ b/drm/libdrmframework/plugins/common/include/DrmEngineBase.h @@ -62,7 +62,7 @@ public: status_t consumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve); status_t setPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position); + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position); bool validateAction( int uniqueId, const String8& path, int action, const ActionDescription& description); @@ -80,7 +80,7 @@ public: DrmSupportInfo* getSupportInfo(int uniqueId); status_t openDecryptSession( - int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length); + int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length); status_t openDecryptSession( int uniqueId, DecryptHandle* decryptHandle, const char* uri); @@ -96,7 +96,7 @@ public: status_t finalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId); ssize_t pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset); + void* buffer, ssize_t numBytes, off64_t offset); protected: ///////////////////////////////////////////////////// @@ -268,7 +268,7 @@ protected: * Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure */ virtual status_t onSetPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position) = 0; + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position) = 0; /** * Validates whether an action on the DRM content is allowed or not. @@ -369,7 +369,7 @@ protected: * DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success */ virtual status_t onOpenDecryptSession( - int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0; + int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0; /** * Open the decrypt session to decrypt the given protected content @@ -450,7 +450,7 @@ protected: * @return Number of bytes read. Returns -1 for Failure. */ virtual ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset) = 0; + void* buffer, ssize_t numBytes, off64_t offset) = 0; }; }; diff --git a/drm/libdrmframework/plugins/common/include/IDrmEngine.h b/drm/libdrmframework/plugins/common/include/IDrmEngine.h index f839070..d05c24f 100644 --- a/drm/libdrmframework/plugins/common/include/IDrmEngine.h +++ b/drm/libdrmframework/plugins/common/include/IDrmEngine.h @@ -224,7 +224,7 @@ public: * Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure */ virtual status_t setPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle, - int playbackStatus, int position) = 0; + int playbackStatus, int64_t position) = 0; /** * Validates whether an action on the DRM content is allowed or not. @@ -325,7 +325,7 @@ public: * DRM_ERROR_CANNOT_HANDLE for failure and DRM_NO_ERROR for success */ virtual status_t openDecryptSession( - int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) = 0; + int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) = 0; /** * Open the decrypt session to decrypt the given protected content @@ -406,7 +406,7 @@ public: * @return Number of bytes read. Returns -1 for Failure. */ virtual ssize_t pread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset) = 0; + void* buffer, ssize_t numBytes, off64_t offset) = 0; }; }; diff --git a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h index bbcd9ed..f941f70 100644 --- a/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h +++ b/drm/libdrmframework/plugins/passthru/include/DrmPassthruPlugIn.h @@ -56,7 +56,7 @@ protected: status_t onConsumeRights(int uniqueId, DecryptHandle* decryptHandle, int action, bool reserve); status_t onSetPlaybackStatus( - int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int position); + int uniqueId, DecryptHandle* decryptHandle, int playbackStatus, int64_t position); bool onValidateAction( int uniqueId, const String8& path, int action, const ActionDescription& description); @@ -74,7 +74,7 @@ protected: DrmSupportInfo* onGetSupportInfo(int uniqueId); status_t onOpenDecryptSession( - int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length); + int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length); status_t onOpenDecryptSession( int uniqueId, DecryptHandle* decryptHandle, const char* uri); @@ -90,7 +90,7 @@ protected: status_t onFinalizeDecryptUnit(int uniqueId, DecryptHandle* decryptHandle, int decryptUnitId); ssize_t onPread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset); + void* buffer, ssize_t numBytes, off64_t offset); private: DecryptHandle* openDecryptSessionImpl(); diff --git a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp index dee1fdb..976978f 100644 --- a/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp +++ b/drm/libdrmframework/plugins/passthru/src/DrmPassthruPlugIn.cpp @@ -187,7 +187,7 @@ status_t DrmPassthruPlugIn::onConsumeRights(int uniqueId, DecryptHandle* decrypt } status_t DrmPassthruPlugIn::onSetPlaybackStatus(int uniqueId, DecryptHandle* decryptHandle, - int playbackStatus, int position) { + int playbackStatus, int64_t position) { LOGD("DrmPassthruPlugIn::onSetPlaybackStatus() : %d", uniqueId); return DRM_NO_ERROR; } @@ -234,7 +234,7 @@ DrmConvertedStatus* DrmPassthruPlugIn::onCloseConvertSession(int uniqueId, int c } status_t DrmPassthruPlugIn::onOpenDecryptSession( - int uniqueId, DecryptHandle* decryptHandle, int fd, int offset, int length) { + int uniqueId, DecryptHandle* decryptHandle, int fd, off64_t offset, off64_t length) { LOGD("DrmPassthruPlugIn::onOpenDecryptSession() : %d", uniqueId); #ifdef ENABLE_PASSTHRU_DECRYPTION @@ -292,7 +292,7 @@ status_t DrmPassthruPlugIn::onFinalizeDecryptUnit( } ssize_t DrmPassthruPlugIn::onPread(int uniqueId, DecryptHandle* decryptHandle, - void* buffer, ssize_t numBytes, off_t offset) { + void* buffer, ssize_t numBytes, off64_t offset) { LOGD("DrmPassthruPlugIn::onPread() : %d", uniqueId); return 0; } diff --git a/include/camera/Camera.h b/include/camera/Camera.h index e734c38..e5f7e62 100644 --- a/include/camera/Camera.h +++ b/include/camera/Camera.h @@ -19,11 +19,10 @@ #include <utils/Timers.h> #include <camera/ICameraClient.h> +#include <gui/ISurfaceTexture.h> namespace android { -class ISurface; - /* * A set of bit masks for specifying how the received preview frames are * handled before the previewCallback() call. @@ -96,11 +95,19 @@ enum { // or CAMERA_MSG_COMPRESSED_IMAGE. This is not allowed to be set during // preview. CAMERA_CMD_SET_DISPLAY_ORIENTATION = 3, + + // cmdType to disable/enable shutter sound. + // In sendCommand passing arg1 = 0 will disable, + // while passing arg1 = 1 will enable the shutter sound. + CAMERA_CMD_ENABLE_SHUTTER_SOUND = 4, + + // cmdType to play recording sound. + CAMERA_CMD_PLAY_RECORDING_SOUND = 5, }; // camera fatal errors enum { - CAMERA_ERROR_UKNOWN = 1, + CAMERA_ERROR_UNKNOWN = 1, CAMERA_ERROR_SERVER_DIED = 100 }; @@ -166,9 +173,11 @@ public: status_t getStatus() { return mStatus; } - // pass the buffered ISurface to the camera service + // pass the buffered Surface to the camera service status_t setPreviewDisplay(const sp<Surface>& surface); - status_t setPreviewDisplay(const sp<ISurface>& surface); + + // pass the buffered ISurfaceTexture to the camera service + status_t setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture); // start preview mode, must call setPreviewDisplay first status_t startPreview(); @@ -209,6 +218,15 @@ public: // send command to camera driver status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2); + // return the total number of available video buffers. + int32_t getNumberOfVideoBuffers() const; + + // return the individual video buffer corresponding to the given index. + sp<IMemory> getVideoBuffer(int32_t index) const; + + // tell camera hal to store meta data or real YUV in video buffers. + status_t storeMetaDataInBuffers(bool enabled); + void setListener(const sp<CameraListener>& listener); void setPreviewCallbackFlags(int preview_callback_flag); diff --git a/include/camera/CameraHardwareInterface.h b/include/camera/CameraHardwareInterface.h index 35c5aa1..86bd849 100644 --- a/include/camera/CameraHardwareInterface.h +++ b/include/camera/CameraHardwareInterface.h @@ -18,15 +18,16 @@ #define ANDROID_HARDWARE_CAMERA_HARDWARE_INTERFACE_H #include <binder/IMemory.h> +#include <ui/egl/android_natives.h> #include <utils/RefBase.h> #include <surfaceflinger/ISurface.h> +#include <ui/android_native_buffer.h> +#include <ui/GraphicBuffer.h> #include <camera/Camera.h> #include <camera/CameraParameters.h> namespace android { -class Overlay; - /** * The size of image for display. */ @@ -86,8 +87,8 @@ class CameraHardwareInterface : public virtual RefBase { public: virtual ~CameraHardwareInterface() { } - /** Return the IMemoryHeap for the preview image heap */ - virtual sp<IMemoryHeap> getPreviewHeap() const = 0; + /** Set the ANativeWindow to which preview frames are sent */ + virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) = 0; /** Return the IMemoryHeap for the raw image heap */ virtual sp<IMemoryHeap> getRawHeap() const = 0; @@ -111,6 +112,13 @@ public: /** * Disable a message, or a set of messages. + * + * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera hal + * should not rely on its client to call releaseRecordingFrame() to release + * video recording frames sent out by the cameral hal before and after the + * disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera hal clients must not + * modify/access any video recording frame after calling + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). */ virtual void disableMsgType(int32_t msgType) = 0; @@ -127,12 +135,6 @@ public: virtual status_t startPreview() = 0; /** - * Only used if overlays are used for camera preview. - */ - virtual bool useOverlay() {return false;} - virtual status_t setOverlay(const sp<Overlay> &overlay) {return BAD_VALUE;} - - /** * Stop a previously started preview. */ virtual void stopPreview() = 0; @@ -143,9 +145,89 @@ public: virtual bool previewEnabled() = 0; /** + * Retrieve the total number of available buffers from camera hal for passing + * video frame data in a recording session. Must be called again if a new + * recording session is started. + * + * This method should be called after startRecording(), since + * the some camera hal may choose to allocate the video buffers only after + * recording is started. + * + * Some camera hal may not implement this method, and 0 can be returned to + * indicate that this feature is not available. + * + * @return the number of video buffers that camera hal makes available. + * Zero (0) is returned to indicate that camera hal does not support + * this feature. + */ + virtual int32_t getNumberOfVideoBuffers() const { return 0; } + + /** + * Retrieve the video buffer corresponding to the given index in a + * recording session. Must be called again if a new recording session + * is started. + * + * It allows a client to retrieve all video buffers that camera hal makes + * available to passing video frame data by calling this method with all + * valid index values. The valid index value ranges from 0 to n, where + * n = getNumberOfVideoBuffers() - 1. With an index outside of the valid + * range, 0 must be returned. This method should be called after + * startRecording(). + * + * The video buffers should NOT be modified/released by camera hal + * until stopRecording() is called and all outstanding video buffers + * previously sent out via CAMERA_MSG_VIDEO_FRAME have been released + * via releaseVideoBuffer(). + * + * @param index an index to retrieve the corresponding video buffer. + * + * @return the video buffer corresponding to the given index. + */ + virtual sp<IMemory> getVideoBuffer(int32_t index) const { return 0; } + + /** + * Request the camera hal to store meta data or real YUV data in + * the video buffers send out via CAMERA_MSG_VIDEO_FRRAME for a + * recording session. If it is not called, the default camera + * hal behavior is to store real YUV data in the video buffers. + * + * This method should be called before startRecording() in order + * to be effective. + * + * If meta data is stored in the video buffers, it is up to the + * receiver of the video buffers to interpret the contents and + * to find the actual frame data with the help of the meta data + * in the buffer. How this is done is outside of the scope of + * this method. + * + * Some camera hal may not support storing meta data in the video + * buffers, but all camera hal should support storing real YUV data + * in the video buffers. If the camera hal does not support storing + * the meta data in the video buffers when it is requested to do + * do, INVALID_OPERATION must be returned. It is very useful for + * the camera hal to pass meta data rather than the actual frame + * data directly to the video encoder, since the amount of the + * uncompressed frame data can be very large if video size is large. + * + * @param enable if true to instruct the camera hal to store + * meta data in the video buffers; false to instruct + * the camera hal to store real YUV data in the video + * buffers. + * + * @return OK on success. + */ + virtual status_t storeMetaDataInBuffers(bool enable) { + return enable? INVALID_OPERATION: OK; + } + + /** * Start record mode. When a record image is available a CAMERA_MSG_VIDEO_FRAME * message is sent with the corresponding frame. Every record frame must be released - * by calling releaseRecordingFrame(). + * by a cameral hal client via releaseRecordingFrame() before the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls + * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's responsibility + * to manage the life-cycle of the video recording frames, and the client must + * not modify/access any video recording frames. */ virtual status_t startRecording() = 0; @@ -161,6 +243,13 @@ public: /** * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. + * + * It is camera hal client's responsibility to release video recording + * frames sent out by the camera hal before the camera hal receives + * a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives + * the call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is camera hal's + * responsibility of managing the life-cycle of the video recording + * frames. */ virtual void releaseRecordingFrame(const sp<IMemory>& mem) = 0; diff --git a/include/camera/CameraParameters.h b/include/camera/CameraParameters.h index 4e770fd..431aaa4 100644 --- a/include/camera/CameraParameters.h +++ b/include/camera/CameraParameters.h @@ -59,6 +59,35 @@ public: void setPreviewSize(int width, int height); void getPreviewSize(int *width, int *height) const; void getSupportedPreviewSizes(Vector<Size> &sizes) const; + + // Set the dimensions in pixels to the given width and height + // for video frames. The given width and height must be one + // of the supported dimensions returned from + // getSupportedVideoSizes(). Must not be called if + // getSupportedVideoSizes() returns an empty Vector of Size. + void setVideoSize(int width, int height); + // Retrieve the current dimensions (width and height) + // in pixels for video frames, which must be one of the + // supported dimensions returned from getSupportedVideoSizes(). + // Must not be called if getSupportedVideoSizes() returns an + // empty Vector of Size. + void getVideoSize(int *width, int *height) const; + // Retrieve a Vector of supported dimensions (width and height) + // in pixels for video frames. If sizes returned from the method + // is empty, the camera does not support calls to setVideoSize() + // or getVideoSize(). In adddition, it also indicates that + // the camera only has a single output, and does not have + // separate output for video frames and preview frame. + void getSupportedVideoSizes(Vector<Size> &sizes) const; + // Retrieve the preferred preview size (width and height) in pixels + // for video recording. The given width and height must be one of + // supported preview sizes returned from getSupportedPreviewSizes(). + // Must not be called if getSupportedVideoSizes() returns an empty + // Vector of Size. If getSupportedVideoSizes() returns an empty + // Vector of Size, the width and height returned from this method + // is invalid, and is "-1x-1". + void getPreferredPreviewSizeForVideo(int *width, int *height) const; + void setPreviewFrameRate(int fps); int getPreviewFrameRate() const; void getPreviewFpsRange(int *min_fps, int *max_fps) const; @@ -288,6 +317,31 @@ public: // Example value: "0.95,1.9,Infinity" or "0.049,0.05,0.051". Read only. static const char KEY_FOCUS_DISTANCES[]; + // The current dimensions in pixels (width x height) for video frames. + // The width and height must be one of the supported sizes retrieved + // via KEY_SUPPORTED_VIDEO_SIZES. + // Example value: "1280x720". Read/write. + static const char KEY_VIDEO_SIZE[]; + // A list of the supported dimensions in pixels (width x height) + // for video frames. See CAMERA_MSG_VIDEO_FRAME for details in + // frameworks/base/include/camera/Camera.h. + // Example: "176x144,1280x720". Read only. + static const char KEY_SUPPORTED_VIDEO_SIZES[]; + + // Preferred preview frame size in pixels for video recording. + // The width and height must be one of the supported sizes retrieved + // via KEY_SUPPORTED_PREVIEW_SIZES. This key can be used only when + // getSupportedVideoSizes() does not return an empty Vector of Size. + // Camcorder applications are recommended to set the preview size + // to a value that is not larger than the preferred preview size. + // In other words, the product of the width and height of the + // preview size should not be larger than that of the preferred + // preview size. In addition, we recommend to choos a preview size + // that has the same aspect ratio as the resolution of video to be + // recorded. + // Example value: "800x600". Read only. + static const char KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO[]; + // The image format for video frames. See CAMERA_MSG_VIDEO_FRAME in // frameworks/base/include/camera/Camera.h. // Example value: "yuv420sp" or PIXEL_FORMAT_XXX constants. Read only. @@ -361,7 +415,10 @@ public: // for barcode reading. static const char SCENE_MODE_BARCODE[]; - // Formats for setPreviewFormat and setPictureFormat. + // Pixel color formats for KEY_PREVIEW_FORMAT, KEY_PICTURE_FORMAT, + // and KEY_VIDEO_FRAME_FORMAT + // Planar variant of the YUV420 color format + static const char PIXEL_FORMAT_YUV420P[]; static const char PIXEL_FORMAT_YUV422SP[]; static const char PIXEL_FORMAT_YUV420SP[]; // NV21 static const char PIXEL_FORMAT_YUV422I[]; // YUY2 diff --git a/include/camera/ICamera.h b/include/camera/ICamera.h index 6fcf9e5..b2310a6 100644 --- a/include/camera/ICamera.h +++ b/include/camera/ICamera.h @@ -20,10 +20,11 @@ #include <utils/RefBase.h> #include <binder/IInterface.h> #include <binder/Parcel.h> -#include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> #include <binder/IMemory.h> #include <utils/String8.h> #include <camera/Camera.h> +#include <gui/ISurfaceTexture.h> namespace android { @@ -45,8 +46,12 @@ public: // allow other processes to use this ICamera interface virtual status_t unlock() = 0; - // pass the buffered ISurface to the camera service - virtual status_t setPreviewDisplay(const sp<ISurface>& surface) = 0; + // pass the buffered Surface to the camera service + virtual status_t setPreviewDisplay(const sp<Surface>& surface) = 0; + + // pass the buffered ISurfaceTexture to the camera service + virtual status_t setPreviewTexture( + const sp<ISurfaceTexture>& surfaceTexture) = 0; // set the preview callback flag to affect how the received frames from // preview are handled. @@ -90,6 +95,15 @@ public: // send command to camera driver virtual status_t sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) = 0; + + // return the total number of available video buffers + virtual int32_t getNumberOfVideoBuffers() const = 0; + + // return the individual video buffer corresponding to the given index. + virtual sp<IMemory> getVideoBuffer(int32_t index) const = 0; + + // tell the camera hal to store meta data or real YUV data in video buffers. + virtual status_t storeMetaDataInBuffers(bool enabled) = 0; }; // ---------------------------------------------------------------------------- diff --git a/include/drm/DrmManagerClient.h b/include/drm/DrmManagerClient.h index 004556f..085ebf1 100644 --- a/include/drm/DrmManagerClient.h +++ b/include/drm/DrmManagerClient.h @@ -69,7 +69,7 @@ public: * @return * Handle for the decryption session */ - DecryptHandle* openDecryptSession(int fd, int offset, int length); + DecryptHandle* openDecryptSession(int fd, off64_t offset, off64_t length); /** * Open the decrypt session to decrypt the given protected content @@ -113,7 +113,7 @@ public: * @return status_t * Returns DRM_NO_ERROR for success, DRM_ERROR_UNKNOWN for failure */ - status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int position); + status_t setPlaybackStatus(DecryptHandle* decryptHandle, int playbackStatus, int64_t position); /** * Initialize decryption for the given unit of the protected content @@ -167,7 +167,7 @@ public: * * @return Number of bytes read. Returns -1 for Failure. */ - ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off_t offset); + ssize_t pread(DecryptHandle* decryptHandle, void* buffer, ssize_t numBytes, off64_t offset); /** * Validates whether an action on the DRM content is allowed or not. diff --git a/include/drm/drm_framework_common.h b/include/drm/drm_framework_common.h index c5765a9..1758cdd 100644 --- a/include/drm/drm_framework_common.h +++ b/include/drm/drm_framework_common.h @@ -217,6 +217,10 @@ public: * POSIX based Decrypt API set for container based DRM */ static const int CONTAINER_BASED = 0x02; + /** + * Decrypt API for Widevine streams + */ + static const int WV_BASED = 0x3; }; /** diff --git a/include/media/AudioEffect.h b/include/media/AudioEffect.h index c967efb..cda2be0 100644 --- a/include/media/AudioEffect.h +++ b/include/media/AudioEffect.h @@ -403,7 +403,7 @@ public: static status_t guidToString(const effect_uuid_t *guid, char *str, size_t maxLen); protected: - volatile int32_t mEnabled; // enable state + bool mEnabled; // enable state int32_t mSessionId; // audio session ID int32_t mPriority; // priority for effect control status_t mStatus; // effect status @@ -412,6 +412,7 @@ protected: void* mUserData; // client context for callback function effect_descriptor_t mDescriptor; // effect descriptor int32_t mId; // system wide unique effect engine instance ID + Mutex mLock; // Mutex for mEnabled access private: diff --git a/include/media/AudioRecord.h b/include/media/AudioRecord.h index 38e3d44..5f7cd90 100644 --- a/include/media/AudioRecord.h +++ b/include/media/AudioRecord.h @@ -356,7 +356,7 @@ private: sp<IAudioRecord> mAudioRecord; sp<IMemory> mCblkMemory; sp<ClientRecordThread> mClientRecordThread; - Mutex mRecordThreadLock; + Mutex mLock; uint32_t mFrameCount; diff --git a/include/media/AudioSystem.h b/include/media/AudioSystem.h index 9fd905f..1e29d82 100644 --- a/include/media/AudioSystem.h +++ b/include/media/AudioSystem.h @@ -156,6 +156,7 @@ public: MODE_NORMAL = 0, MODE_RINGTONE, MODE_IN_CALL, + MODE_IN_COMMUNICATION, NUM_MODES // not a valid entry, denotes end-of-list }; @@ -262,11 +263,15 @@ public: DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100, DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200, DEVICE_OUT_AUX_DIGITAL = 0x400, + DEVICE_OUT_ANLG_DOCK_HEADSET = 0x800, + DEVICE_OUT_DGTL_DOCK_HEADSET = 0x1000, DEVICE_OUT_DEFAULT = 0x8000, DEVICE_OUT_ALL = (DEVICE_OUT_EARPIECE | DEVICE_OUT_SPEAKER | DEVICE_OUT_WIRED_HEADSET | DEVICE_OUT_WIRED_HEADPHONE | DEVICE_OUT_BLUETOOTH_SCO | DEVICE_OUT_BLUETOOTH_SCO_HEADSET | DEVICE_OUT_BLUETOOTH_SCO_CARKIT | DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES | - DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL | DEVICE_OUT_DEFAULT), + DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER | DEVICE_OUT_AUX_DIGITAL | + DEVICE_OUT_ANLG_DOCK_HEADSET | DEVICE_OUT_DGTL_DOCK_HEADSET | + DEVICE_OUT_DEFAULT), DEVICE_OUT_ALL_A2DP = (DEVICE_OUT_BLUETOOTH_A2DP | DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES | DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER), @@ -309,6 +314,8 @@ public: FORCE_WIRED_ACCESSORY, FORCE_BT_CAR_DOCK, FORCE_BT_DESK_DOCK, + FORCE_ANALOG_DOCK, + FORCE_DIGITAL_DOCK, NUM_FORCE_CONFIG, FORCE_DEFAULT = FORCE_NONE }; @@ -466,7 +473,7 @@ public: AudioParameter(const String8& keyValuePairs); virtual ~AudioParameter(); - // reserved parameter keys for changeing standard parameters with setParameters() function. + // reserved parameter keys for changing standard parameters with setParameters() function. // Using these keys is mandatory for AudioFlinger to properly monitor audio output/input // configuration changes and act accordingly. // keyRouting: to change audio routing, value is an int in AudioSystem::audio_devices @@ -474,11 +481,14 @@ public: // keyFormat: to change audio format, value is an int in AudioSystem::audio_format // keyChannels: to change audio channel configuration, value is an int in AudioSystem::audio_channels // keyFrameCount: to change audio output frame count, value is an int + // keyInputSource: to change audio input source, value is an int in audio_source + // (defined in media/mediarecorder.h) static const char *keyRouting; static const char *keySamplingRate; static const char *keyFormat; static const char *keyChannels; static const char *keyFrameCount; + static const char *keyInputSource; String8 toString(); diff --git a/include/media/AudioTrack.h b/include/media/AudioTrack.h index 4475d4a..813a905 100644 --- a/include/media/AudioTrack.h +++ b/include/media/AudioTrack.h @@ -480,6 +480,7 @@ private: uint32_t mFlags; int mSessionId; int mAuxEffectId; + Mutex mLock; }; diff --git a/include/media/EffectApi.h b/include/media/EffectApi.h index 16fb43c..b97c22e 100644 --- a/include/media/EffectApi.h +++ b/include/media/EffectApi.h @@ -602,9 +602,9 @@ enum audio_device_e { // Audio mode enum audio_mode_e { - AUDIO_MODE_NORMAL, // phone idle - AUDIO_MODE_RINGTONE, // phone ringing - AUDIO_MODE_IN_CALL // phone call connected + AUDIO_MODE_NORMAL, // device idle + AUDIO_MODE_RINGTONE, // device ringing + AUDIO_MODE_IN_CALL // audio call connected (VoIP or telephony) }; // Values for "accessMode" field of buffer_config_t: diff --git a/include/media/IMediaPlayer.h b/include/media/IMediaPlayer.h index af9a7ed..bba7ed7 100644 --- a/include/media/IMediaPlayer.h +++ b/include/media/IMediaPlayer.h @@ -25,6 +25,7 @@ namespace android { class Parcel; class ISurface; +class Surface; class IMediaPlayer: public IInterface { @@ -33,7 +34,7 @@ public: virtual void disconnect() = 0; - virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0; + virtual status_t setVideoSurface(const sp<Surface>& surface) = 0; virtual status_t prepareAsync() = 0; virtual status_t start() = 0; virtual status_t stop() = 0; @@ -46,8 +47,6 @@ public: virtual status_t setAudioStreamType(int type) = 0; virtual status_t setLooping(int loop) = 0; virtual status_t setVolume(float leftVolume, float rightVolume) = 0; - virtual status_t suspend() = 0; - virtual status_t resume() = 0; virtual status_t setAuxEffectSendLevel(float level) = 0; virtual status_t attachAuxEffect(int effectId) = 0; diff --git a/include/media/IMediaPlayerService.h b/include/media/IMediaPlayerService.h index 9416ca1..0bfb166 100644 --- a/include/media/IMediaPlayerService.h +++ b/include/media/IMediaPlayerService.h @@ -32,6 +32,7 @@ namespace android { class IMediaRecorder; class IOMX; +struct IStreamSource; class IMediaPlayerService: public IInterface { @@ -45,6 +46,11 @@ public: int audioSessionId = 0) = 0; virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length, int audioSessionId) = 0; + + virtual sp<IMediaPlayer> create( + pid_t pid, const sp<IMediaPlayerClient> &client, + const sp<IStreamSource> &source, int audioSessionId) = 0; + virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0; virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) = 0; virtual sp<IOMX> getOMX() = 0; diff --git a/include/media/IMediaRecorder.h b/include/media/IMediaRecorder.h index 54adca8..28be7c1 100644 --- a/include/media/IMediaRecorder.h +++ b/include/media/IMediaRecorder.h @@ -22,7 +22,7 @@ namespace android { -class ISurface; +class Surface; class ICamera; class IMediaRecorderClient; @@ -32,7 +32,7 @@ public: DECLARE_META_INTERFACE(MediaRecorder); virtual status_t setCamera(const sp<ICamera>& camera) = 0; - virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0; + virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0; virtual status_t setVideoSource(int vs) = 0; virtual status_t setAudioSource(int as) = 0; virtual status_t setOutputFormat(int of) = 0; @@ -40,6 +40,7 @@ public: virtual status_t setAudioEncoder(int ae) = 0; virtual status_t setOutputFile(const char* path) = 0; virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0; + virtual status_t setOutputFileAuxiliary(int fd) = 0; virtual status_t setVideoSize(int width, int height) = 0; virtual status_t setVideoFrameRate(int frames_per_second) = 0; virtual status_t setParameters(const String8& params) = 0; @@ -68,4 +69,3 @@ public: }; // namespace android #endif // ANDROID_IMEDIARECORDER_H - diff --git a/include/media/IOMX.h b/include/media/IOMX.h index f794766..cb36bbb 100644 --- a/include/media/IOMX.h +++ b/include/media/IOMX.h @@ -19,6 +19,7 @@ #define ANDROID_IOMX_H_ #include <binder/IInterface.h> +#include <ui/GraphicBuffer.h> #include <utils/List.h> #include <utils/String8.h> @@ -78,10 +79,20 @@ public: node_id node, OMX_INDEXTYPE index, const void *params, size_t size) = 0; + virtual status_t storeMetaDataInBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0; + + virtual status_t enableGraphicBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable) = 0; + virtual status_t useBuffer( node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, buffer_id *buffer) = 0; + virtual status_t useGraphicBuffer( + node_id node, OMX_U32 port_index, + const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) = 0; + // This API clearly only makes sense if the caller lives in the // same process as the callee, i.e. is the media_server, as the // returned "buffer_data" pointer is just that, a pointer into local @@ -109,33 +120,6 @@ public: node_id node, const char *parameter_name, OMX_INDEXTYPE *index) = 0; - - virtual sp<IOMXRenderer> createRenderer( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees) = 0; - - // Note: These methods are _not_ virtual, it exists as a wrapper around - // the virtual "createRenderer" method above facilitating extraction - // of the ISurface from a regular Surface or a java Surface object. - sp<IOMXRenderer> createRenderer( - const sp<Surface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees); - - sp<IOMXRenderer> createRendererFromJavaSurface( - JNIEnv *env, jobject javaSurface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees); }; struct omx_message { @@ -182,13 +166,6 @@ public: virtual void onMessage(const omx_message &msg) = 0; }; -class IOMXRenderer : public IInterface { -public: - DECLARE_META_INTERFACE(OMXRenderer); - - virtual void render(IOMX::buffer_id buffer) = 0; -}; - //////////////////////////////////////////////////////////////////////////////// class BnOMX : public BnInterface<IOMX> { @@ -205,13 +182,6 @@ public: uint32_t flags = 0); }; -class BnOMXRenderer : public BnInterface<IOMXRenderer> { -public: - virtual status_t onTransact( - uint32_t code, const Parcel &data, Parcel *reply, - uint32_t flags = 0); -}; - } // namespace android #endif // ANDROID_IOMX_H_ diff --git a/include/media/IStreamSource.h b/include/media/IStreamSource.h new file mode 100644 index 0000000..4b698e6 --- /dev/null +++ b/include/media/IStreamSource.h @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ANDROID_ISTREAMSOURCE_H_ + +#define ANDROID_ISTREAMSOURCE_H_ + +#include <binder/IInterface.h> + +namespace android { + +struct AMessage; +struct IMemory; +struct IStreamListener; + +struct IStreamSource : public IInterface { + DECLARE_META_INTERFACE(StreamSource); + + virtual void setListener(const sp<IStreamListener> &listener) = 0; + virtual void setBuffers(const Vector<sp<IMemory> > &buffers) = 0; + + virtual void onBufferAvailable(size_t index) = 0; +}; + +struct IStreamListener : public IInterface { + DECLARE_META_INTERFACE(StreamListener); + + enum Command { + EOS, + DISCONTINUITY, + }; + + virtual void queueBuffer(size_t index, size_t size) = 0; + + virtual void issueCommand( + Command cmd, bool synchronous, const sp<AMessage> &msg = NULL) = 0; +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct BnStreamSource : public BnInterface<IStreamSource> { + virtual status_t onTransact( + uint32_t code, const Parcel &data, Parcel *reply, + uint32_t flags = 0); +}; + +struct BnStreamListener : public BnInterface<IStreamListener> { + virtual status_t onTransact( + uint32_t code, const Parcel &data, Parcel *reply, + uint32_t flags = 0); +}; + +} // namespace android + +#endif // ANDROID_ISTREAMSOURCE_H_ diff --git a/include/media/MediaPlayerInterface.h b/include/media/MediaPlayerInterface.h index 0521709..c0963a6 100644 --- a/include/media/MediaPlayerInterface.h +++ b/include/media/MediaPlayerInterface.h @@ -33,13 +33,15 @@ namespace android { class Parcel; class ISurface; +class Surface; template<typename T> class SortedVector; enum player_type { PV_PLAYER = 1, SONIVOX_PLAYER = 2, - STAGEFRIGHT_PLAYER = 4, + STAGEFRIGHT_PLAYER = 3, + NU_PLAYER = 4, // Test players are available only in the 'test' and 'eng' builds. // The shared library with the test player is passed passed as an // argument to the 'test:' url in the setDataSource call. @@ -105,7 +107,12 @@ public: const KeyedVector<String8, String8> *headers = NULL) = 0; virtual status_t setDataSource(int fd, int64_t offset, int64_t length) = 0; - virtual status_t setVideoSurface(const sp<ISurface>& surface) = 0; + + virtual status_t setDataSource(const sp<IStreamSource> &source) { + return INVALID_OPERATION; + } + + virtual status_t setVideoSurface(const sp<Surface>& surface) = 0; virtual status_t prepare() = 0; virtual status_t prepareAsync() = 0; virtual status_t start() = 0; @@ -118,8 +125,6 @@ public: virtual status_t reset() = 0; virtual status_t setLooping(int loop) = 0; virtual player_type playerType() = 0; - virtual status_t suspend() { return INVALID_OPERATION; } - virtual status_t resume() { return INVALID_OPERATION; } virtual void setNotifyCallback(void* cookie, notify_callback_f notifyFunc) { mCookie = cookie; mNotify = notifyFunc; } diff --git a/include/media/MediaProfiles.h b/include/media/MediaProfiles.h index c3cd361..aa97874 100644 --- a/include/media/MediaProfiles.h +++ b/include/media/MediaProfiles.h @@ -25,7 +25,20 @@ namespace android { enum camcorder_quality { CAMCORDER_QUALITY_LOW = 0, - CAMCORDER_QUALITY_HIGH = 1 + CAMCORDER_QUALITY_HIGH = 1, + CAMCORDER_QUALITY_QCIF = 2, + CAMCORDER_QUALITY_CIF = 3, + CAMCORDER_QUALITY_480P = 4, + CAMCORDER_QUALITY_720P = 5, + CAMCORDER_QUALITY_1080P = 6, + + CAMCORDER_QUALITY_TIME_LAPSE_LOW = 1000, + CAMCORDER_QUALITY_TIME_LAPSE_HIGH = 1001, + CAMCORDER_QUALITY_TIME_LAPSE_QCIF = 1002, + CAMCORDER_QUALITY_TIME_LAPSE_CIF = 1003, + CAMCORDER_QUALITY_TIME_LAPSE_480P = 1004, + CAMCORDER_QUALITY_TIME_LAPSE_720P = 1005, + CAMCORDER_QUALITY_TIME_LAPSE_1080P = 1006 }; enum video_decoder { @@ -68,6 +81,12 @@ public: camcorder_quality quality) const; /** + * Returns true if a profile for the given camera at the given quality exists, + * or false if not. + */ + bool hasCamcorderProfile(int cameraId, camcorder_quality quality) const; + + /** * Returns the output file formats supported. */ Vector<output_format> getOutputFileFormats() const; @@ -252,6 +271,8 @@ private: Vector<int> mLevels; }; + int getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const; + // Debug static void logVideoCodec(const VideoCodec& codec); static void logAudioCodec(const AudioCodec& codec); @@ -281,8 +302,25 @@ private: // If the xml configuration file does not exist, use hard-coded values static MediaProfiles* createDefaultInstance(); - static CamcorderProfile *createDefaultCamcorderLowProfile(); - static CamcorderProfile *createDefaultCamcorderHighProfile(); + + static CamcorderProfile *createDefaultCamcorderQcifProfile(camcorder_quality quality); + static CamcorderProfile *createDefaultCamcorderCifProfile(camcorder_quality quality); + static void createDefaultCamcorderLowProfiles( + MediaProfiles::CamcorderProfile **lowProfile, + MediaProfiles::CamcorderProfile **lowSpecificProfile); + static void createDefaultCamcorderHighProfiles( + MediaProfiles::CamcorderProfile **highProfile, + MediaProfiles::CamcorderProfile **highSpecificProfile); + + static CamcorderProfile *createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality); + static CamcorderProfile *createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality); + static void createDefaultCamcorderTimeLapseLowProfiles( + MediaProfiles::CamcorderProfile **lowTimeLapseProfile, + MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile); + static void createDefaultCamcorderTimeLapseHighProfiles( + MediaProfiles::CamcorderProfile **highTimeLapseProfile, + MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile); + static void createDefaultCamcorderProfiles(MediaProfiles *profiles); static void createDefaultVideoEncoders(MediaProfiles *profiles); static void createDefaultAudioEncoders(MediaProfiles *profiles); diff --git a/include/media/MediaRecorderBase.h b/include/media/MediaRecorderBase.h index 5e9e368..c42346e 100644 --- a/include/media/MediaRecorderBase.h +++ b/include/media/MediaRecorderBase.h @@ -22,7 +22,7 @@ namespace android { -class ISurface; +class Surface; struct MediaRecorderBase { MediaRecorderBase() {} @@ -37,9 +37,10 @@ struct MediaRecorderBase { virtual status_t setVideoSize(int width, int height) = 0; virtual status_t setVideoFrameRate(int frames_per_second) = 0; virtual status_t setCamera(const sp<ICamera>& camera) = 0; - virtual status_t setPreviewSurface(const sp<ISurface>& surface) = 0; + virtual status_t setPreviewSurface(const sp<Surface>& surface) = 0; virtual status_t setOutputFile(const char *path) = 0; virtual status_t setOutputFile(int fd, int64_t offset, int64_t length) = 0; + virtual status_t setOutputFileAuxiliary(int fd) {return INVALID_OPERATION;} virtual status_t setParameters(const String8& params) = 0; virtual status_t setListener(const sp<IMediaRecorderClient>& listener) = 0; virtual status_t prepare() = 0; diff --git a/include/media/PVMediaRecorder.h b/include/media/PVMediaRecorder.h deleted file mode 100644 index c091c39..0000000 --- a/include/media/PVMediaRecorder.h +++ /dev/null @@ -1,69 +0,0 @@ -/* - ** - ** Copyright 2008, The Android Open Source Project - ** - ** Licensed under the Apache License, Version 2.0 (the "License"); - ** you may not use this file except in compliance with the License. - ** You may obtain a copy of the License at - ** - ** http://www.apache.org/licenses/LICENSE-2.0 - ** - ** Unless required by applicable law or agreed to in writing, software - ** distributed under the License is distributed on an "AS IS" BASIS, - ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - ** See the License for the specific language governing permissions and - ** limitations under the License. - */ - -#ifndef ANDROID_PVMEDIARECORDER_H -#define ANDROID_PVMEDIARECORDER_H - -#include <media/IMediaRecorderClient.h> -#include <media/MediaRecorderBase.h> - -namespace android { - -class ISurface; -class ICamera; -class AuthorDriverWrapper; - -class PVMediaRecorder : public MediaRecorderBase { -public: - PVMediaRecorder(); - virtual ~PVMediaRecorder(); - - virtual status_t init(); - virtual status_t setAudioSource(audio_source as); - virtual status_t setVideoSource(video_source vs); - virtual status_t setOutputFormat(output_format of); - virtual status_t setAudioEncoder(audio_encoder ae); - virtual status_t setVideoEncoder(video_encoder ve); - virtual status_t setVideoSize(int width, int height); - virtual status_t setVideoFrameRate(int frames_per_second); - virtual status_t setCamera(const sp<ICamera>& camera); - virtual status_t setPreviewSurface(const sp<ISurface>& surface); - virtual status_t setOutputFile(const char *path); - virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); - virtual status_t setParameters(const String8& params); - virtual status_t setListener(const sp<IMediaRecorderClient>& listener); - virtual status_t prepare(); - virtual status_t start(); - virtual status_t stop(); - virtual status_t close(); - virtual status_t reset(); - virtual status_t getMaxAmplitude(int *max); - virtual status_t dump(int fd, const Vector<String16>& args) const; - -private: - status_t doStop(); - - AuthorDriverWrapper* mAuthorDriverWrapper; - - PVMediaRecorder(const PVMediaRecorder &); - PVMediaRecorder &operator=(const PVMediaRecorder &); -}; - -}; // namespace android - -#endif // ANDROID_PVMEDIARECORDER_H - diff --git a/include/media/PVMetadataRetriever.h b/include/media/PVMetadataRetriever.h deleted file mode 100644 index c202dfe..0000000 --- a/include/media/PVMetadataRetriever.h +++ /dev/null @@ -1,51 +0,0 @@ -/* -** -** Copyright (C) 2008 The Android Open Source Project -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -#ifndef ANDROID_PVMETADATARETRIEVER_H -#define ANDROID_PVMETADATARETRIEVER_H - -#include <utils/Errors.h> -#include <media/MediaMetadataRetrieverInterface.h> -#include <private/media/VideoFrame.h> - -namespace android { - -class MetadataDriver; - -class PVMetadataRetriever : public MediaMetadataRetrieverInterface -{ -public: - PVMetadataRetriever(); - virtual ~PVMetadataRetriever(); - - virtual status_t setDataSource(const char *url); - virtual status_t setDataSource(int fd, int64_t offset, int64_t length); - virtual status_t setMode(int mode); - virtual status_t getMode(int* mode) const; - virtual VideoFrame* captureFrame(); - virtual MediaAlbumArt* extractAlbumArt(); - virtual const char* extractMetadata(int keyCode); - -private: - mutable Mutex mLock; - MetadataDriver* mMetadataDriver; - char* mDataSourcePath; -}; - -}; // namespace android - -#endif // ANDROID_PVMETADATARETRIEVER_H diff --git a/include/media/PVPlayer.h b/include/media/PVPlayer.h deleted file mode 100644 index df50981..0000000 --- a/include/media/PVPlayer.h +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (C) 2008 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_PVPLAYER_H -#define ANDROID_PVPLAYER_H - -#include <utils/Errors.h> -#include <media/MediaPlayerInterface.h> -#include <media/Metadata.h> - -#define MAX_OPENCORE_INSTANCES 25 - -#ifdef MAX_OPENCORE_INSTANCES -#include <cutils/atomic.h> -#endif - -class PlayerDriver; - -namespace android { - -class PVPlayer : public MediaPlayerInterface -{ -public: - PVPlayer(); - virtual ~PVPlayer(); - - virtual status_t initCheck(); - - virtual status_t setDataSource( - const char *url, const KeyedVector<String8, String8> *headers); - - virtual status_t setDataSource(int fd, int64_t offset, int64_t length); - virtual status_t setVideoSurface(const sp<ISurface>& surface); - virtual status_t prepare(); - virtual status_t prepareAsync(); - virtual status_t start(); - virtual status_t stop(); - virtual status_t pause(); - virtual bool isPlaying(); - virtual status_t seekTo(int msec); - virtual status_t getCurrentPosition(int *msec); - virtual status_t getDuration(int *msec); - virtual status_t reset(); - virtual status_t setLooping(int loop); - virtual player_type playerType() { return PV_PLAYER; } - virtual status_t invoke(const Parcel& request, Parcel *reply); - virtual status_t getMetadata( - const SortedVector<media::Metadata::Type>& ids, - Parcel *records); - - // make available to PlayerDriver - void sendEvent(int msg, int ext1=0, int ext2=0) { MediaPlayerBase::sendEvent(msg, ext1, ext2); } - -private: - static void do_nothing(status_t s, void *cookie, bool cancelled) { } - static void run_init(status_t s, void *cookie, bool cancelled); - static void run_set_video_surface(status_t s, void *cookie, bool cancelled); - static void run_set_audio_output(status_t s, void *cookie, bool cancelled); - static void run_prepare(status_t s, void *cookie, bool cancelled); - static void check_for_live_streaming(status_t s, void *cookie, bool cancelled); - - PlayerDriver* mPlayerDriver; - char * mDataSourcePath; - bool mIsDataSourceSet; - sp<ISurface> mSurface; - int mSharedFd; - status_t mInit; - int mDuration; - -#ifdef MAX_OPENCORE_INSTANCES - static volatile int32_t sNumInstances; -#endif -}; - -}; // namespace android - -#endif // ANDROID_PVPLAYER_H diff --git a/include/media/mediaplayer.h b/include/media/mediaplayer.h index 207191d..88b0c3e 100644 --- a/include/media/mediaplayer.h +++ b/include/media/mediaplayer.h @@ -169,8 +169,6 @@ public: status_t invoke(const Parcel& request, Parcel *reply); status_t setMetadataFilter(const Parcel& filter); status_t getMetadata(bool update_only, bool apply_filter, Parcel *metadata); - status_t suspend(); - status_t resume(); status_t setAudioSessionId(int sessionId); int getAudioSessionId(); status_t setAuxEffectSendLevel(float level); diff --git a/include/media/mediarecorder.h b/include/media/mediarecorder.h index 5ab1640..a710546 100644 --- a/include/media/mediarecorder.h +++ b/include/media/mediarecorder.h @@ -44,7 +44,8 @@ enum audio_source { AUDIO_SOURCE_VOICE_CALL = 4, AUDIO_SOURCE_CAMCORDER = 5, AUDIO_SOURCE_VOICE_RECOGNITION = 6, - AUDIO_SOURCE_MAX = AUDIO_SOURCE_VOICE_RECOGNITION, + AUDIO_SOURCE_VOICE_COMMUNICATION = 7, + AUDIO_SOURCE_MAX = AUDIO_SOURCE_VOICE_COMMUNICATION, AUDIO_SOURCE_LIST_END // must be last - used to validate audio source type }; @@ -173,6 +174,7 @@ public: status_t setAudioEncoder(int ae); status_t setOutputFile(const char* path); status_t setOutputFile(int fd, int64_t offset, int64_t length); + status_t setOutputFileAuxiliary(int fd); status_t setVideoSize(int width, int height); status_t setVideoFrameRate(int frames_per_second); status_t setParameters(const String8& params); @@ -199,6 +201,7 @@ private: bool mIsAudioEncoderSet; bool mIsVideoEncoderSet; bool mIsOutputFileSet; + bool mIsAuxiliaryOutputFileSet; Mutex mLock; Mutex mNotifyLock; }; diff --git a/include/media/mediascanner.h b/include/media/mediascanner.h index 0d397ac..df5be32 100644 --- a/include/media/mediascanner.h +++ b/include/media/mediascanner.h @@ -38,8 +38,7 @@ struct MediaScanner { typedef bool (*ExceptionCheck)(void* env); virtual status_t processDirectory( - const char *path, const char *extensions, - MediaScannerClient &client, + const char *path, MediaScannerClient &client, ExceptionCheck exceptionCheck, void *exceptionEnv); void setLocale(const char *locale); @@ -55,9 +54,8 @@ private: char *mLocale; status_t doProcessDirectory( - char *path, int pathRemaining, const char *extensions, - MediaScannerClient &client, ExceptionCheck exceptionCheck, - void *exceptionEnv); + char *path, int pathRemaining, MediaScannerClient &client, + ExceptionCheck exceptionCheck, void *exceptionEnv); MediaScanner(const MediaScanner &); MediaScanner &operator=(const MediaScanner &); @@ -73,7 +71,8 @@ public: bool addStringTag(const char* name, const char* value); void endFile(); - virtual bool scanFile(const char* path, long long lastModified, long long fileSize) = 0; + virtual bool scanFile(const char* path, long long lastModified, + long long fileSize, bool isDirectory) = 0; virtual bool handleStringTag(const char* name, const char* value) = 0; virtual bool setMimeType(const char* mimeType) = 0; virtual bool addNoMediaFolder(const char* path) = 0; diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h new file mode 100644 index 0000000..4599d70 --- /dev/null +++ b/include/media/stagefright/ACodec.h @@ -0,0 +1,157 @@ +#ifndef A_CODEC_H_ + +#define A_CODEC_H_ + +#include <stdint.h> +#include <android/native_window.h> +#include <media/IOMX.h> +#include <media/stagefright/foundation/AHierarchicalStateMachine.h> + +namespace android { + +struct ABuffer; +struct MemoryDealer; + +struct ACodec : public AHierarchicalStateMachine { + enum { + kWhatFillThisBuffer = 'fill', + kWhatDrainThisBuffer = 'drai', + kWhatEOS = 'eos ', + kWhatShutdownCompleted = 'scom', + kWhatFlushCompleted = 'fcom', + kWhatOutputFormatChanged = 'outC', + }; + + ACodec(); + + void setNotificationMessage(const sp<AMessage> &msg); + void initiateSetup(const sp<AMessage> &msg); + void signalFlush(); + void signalResume(); + void initiateShutdown(); + +protected: + virtual ~ACodec(); + +private: + struct BaseState; + struct UninitializedState; + struct LoadedToIdleState; + struct IdleToExecutingState; + struct ExecutingState; + struct OutputPortSettingsChangedState; + struct ExecutingToIdleState; + struct IdleToLoadedState; + struct ErrorState; + struct FlushingState; + + enum { + kWhatSetup = 'setu', + kWhatOMXMessage = 'omx ', + kWhatInputBufferFilled = 'inpF', + kWhatOutputBufferDrained = 'outD', + kWhatShutdown = 'shut', + kWhatFlush = 'flus', + kWhatResume = 'resm', + kWhatDrainDeferredMessages = 'drai', + }; + + enum { + kPortIndexInput = 0, + kPortIndexOutput = 1 + }; + + struct BufferInfo { + enum Status { + OWNED_BY_US, + OWNED_BY_COMPONENT, + OWNED_BY_UPSTREAM, + OWNED_BY_DOWNSTREAM, + OWNED_BY_NATIVE_WINDOW, + }; + + IOMX::buffer_id mBufferID; + Status mStatus; + + sp<ABuffer> mData; + sp<GraphicBuffer> mGraphicBuffer; + }; + + sp<AMessage> mNotify; + + sp<UninitializedState> mUninitializedState; + sp<LoadedToIdleState> mLoadedToIdleState; + sp<IdleToExecutingState> mIdleToExecutingState; + sp<ExecutingState> mExecutingState; + sp<OutputPortSettingsChangedState> mOutputPortSettingsChangedState; + sp<ExecutingToIdleState> mExecutingToIdleState; + sp<IdleToLoadedState> mIdleToLoadedState; + sp<ErrorState> mErrorState; + sp<FlushingState> mFlushingState; + + AString mComponentName; + sp<IOMX> mOMX; + IOMX::node_id mNode; + sp<MemoryDealer> mDealer[2]; + + sp<ANativeWindow> mNativeWindow; + + Vector<BufferInfo> mBuffers[2]; + bool mPortEOS[2]; + + List<sp<AMessage> > mDeferredQueue; + + bool mSentFormat; + + status_t allocateBuffersOnPort(OMX_U32 portIndex); + status_t freeBuffersOnPort(OMX_U32 portIndex); + status_t freeBuffer(OMX_U32 portIndex, size_t i); + + status_t allocateOutputBuffersFromNativeWindow(); + status_t cancelBufferToNativeWindow(BufferInfo *info); + status_t freeOutputBuffersOwnedByNativeWindow(); + BufferInfo *dequeueBufferFromNativeWindow(); + + BufferInfo *findBufferByID( + uint32_t portIndex, IOMX::buffer_id bufferID, + ssize_t *index = NULL); + + void setComponentRole(bool isEncoder, const char *mime); + void configureCodec(const char *mime, const sp<AMessage> &msg); + + status_t setVideoPortFormatType( + OMX_U32 portIndex, + OMX_VIDEO_CODINGTYPE compressionFormat, + OMX_COLOR_FORMATTYPE colorFormat); + + status_t setSupportedOutputFormat(); + + status_t setupVideoDecoder( + const char *mime, int32_t width, int32_t height); + + status_t setVideoFormatOnPort( + OMX_U32 portIndex, + int32_t width, int32_t height, + OMX_VIDEO_CODINGTYPE compressionFormat); + + status_t setupAACDecoder(int32_t numChannels, int32_t sampleRate); + status_t setMinBufferSize(OMX_U32 portIndex, size_t size); + + status_t initNativeWindow(); + + // Returns true iff all buffers on the given port have status OWNED_BY_US. + bool allYourBuffersAreBelongToUs(OMX_U32 portIndex); + + bool allYourBuffersAreBelongToUs(); + + void deferMessage(const sp<AMessage> &msg); + void processDeferredMessages(); + + void sendFormatChange(); + + DISALLOW_EVIL_CONSTRUCTORS(ACodec); +}; + +} // namespace android + +#endif // A_CODEC_H_ diff --git a/include/media/stagefright/AMRWriter.h b/include/media/stagefright/AMRWriter.h index aa965e1..62d57b4 100644 --- a/include/media/stagefright/AMRWriter.h +++ b/include/media/stagefright/AMRWriter.h @@ -44,7 +44,7 @@ protected: virtual ~AMRWriter(); private: - FILE *mFile; + int mFd; status_t mInitCheck; sp<MediaSource> mSource; bool mStarted; diff --git a/include/media/stagefright/AudioPlayer.h b/include/media/stagefright/AudioPlayer.h index 37af032..d12ee9c 100644 --- a/include/media/stagefright/AudioPlayer.h +++ b/include/media/stagefright/AudioPlayer.h @@ -65,6 +65,7 @@ public: bool reachedEOS(status_t *finalStatus); private: + friend class VideoEditorAudioPlayer; sp<MediaSource> mSource; AudioTrack *mAudioTrack; diff --git a/include/media/stagefright/CameraSource.h b/include/media/stagefright/CameraSource.h index 3192d03..794355b 100644 --- a/include/media/stagefright/CameraSource.h +++ b/include/media/stagefright/CameraSource.h @@ -20,39 +20,167 @@ #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaSource.h> +#include <camera/ICamera.h> +#include <camera/CameraParameters.h> #include <utils/List.h> #include <utils/RefBase.h> -#include <utils/threads.h> namespace android { -class ICamera; class IMemory; class Camera; +class Surface; class CameraSource : public MediaSource, public MediaBufferObserver { public: + /** + * Factory method to create a new CameraSource using the current + * settings (such as video size, frame rate, color format, etc) + * from the default camera. + * + * @return NULL on error. + */ static CameraSource *Create(); - static CameraSource *CreateFromCamera(const sp<Camera> &camera); + + /** + * Factory method to create a new CameraSource. + * + * @param camera the video input frame data source. If it is NULL, + * we will try to connect to the camera with the given + * cameraId. + * + * @param cameraId the id of the camera that the source will connect + * to if camera is NULL; otherwise ignored. + * + * @param videoSize the dimension (in pixels) of the video frame + * @param frameRate the target frames per second + * @param surface the preview surface for display where preview + * frames are sent to + * @param storeMetaDataInVideoBuffers true to request the camera + * source to store meta data in video buffers; false to + * request the camera source to store real YUV frame data + * in the video buffers. The camera source may not support + * storing meta data in video buffers, if so, a request + * to do that will NOT be honored. To find out whether + * meta data is actually being stored in video buffers + * during recording, call isMetaDataStoredInVideoBuffers(). + * + * @return NULL on error. + */ + static CameraSource *CreateFromCamera(const sp<ICamera> &camera, + int32_t cameraId, + Size videoSize, + int32_t frameRate, + const sp<Surface>& surface, + bool storeMetaDataInVideoBuffers = false); virtual ~CameraSource(); virtual status_t start(MetaData *params = NULL); virtual status_t stop(); + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + /** + * Check whether a CameraSource object is properly initialized. + * Must call this method before stop(). + * @return OK if initialization has successfully completed. + */ + virtual status_t initCheck() const; + + /** + * Returns the MetaData associated with the CameraSource, + * including: + * kKeyColorFormat: YUV color format of the video frames + * kKeyWidth, kKeyHeight: dimension (in pixels) of the video frames + * kKeySampleRate: frame rate in frames per second + * kKeyMIMEType: always fixed to be MEDIA_MIMETYPE_VIDEO_RAW + */ virtual sp<MetaData> getFormat(); - virtual status_t read( - MediaBuffer **buffer, const ReadOptions *options = NULL); + /** + * Retrieve the total number of video buffers available from + * this source. + * + * This method is useful if these video buffers are used + * for passing video frame data to other media components, + * such as OMX video encoders, in order to eliminate the + * memcpy of the data. + * + * @return the total numbner of video buffers. Returns 0 to + * indicate that this source does not make the video + * buffer information availalble. + */ + size_t getNumberOfVideoBuffers() const; + + /** + * Retrieve the individual video buffer available from + * this source. + * + * @param index the index corresponding to the video buffer. + * Valid range of the index is [0, n], where n = + * getNumberOfVideoBuffers() - 1. + * + * @return the video buffer corresponding to the given index. + * If index is out of range, 0 should be returned. + */ + sp<IMemory> getVideoBuffer(size_t index) const; + + /** + * Tell whether this camera source stores meta data or real YUV + * frame data in video buffers. + * + * @return true if meta data is stored in the video + * buffers; false if real YUV data is stored in + * the video buffers. + */ + bool isMetaDataStoredInVideoBuffers() const; virtual void signalBufferReturned(MediaBuffer* buffer); -private: - friend class CameraSourceListener; +protected: + enum CameraFlags { + FLAGS_SET_CAMERA = 1L << 0, + FLAGS_HOT_CAMERA = 1L << 1, + }; + + int32_t mCameraFlags; + Size mVideoSize; + int32_t mVideoFrameRate; + int32_t mColorFormat; + status_t mInitCheck; - sp<Camera> mCamera; + sp<Camera> mCamera; + sp<Surface> mSurface; sp<MetaData> mMeta; + int64_t mStartTimeUs; + int32_t mNumFramesReceived; + int64_t mLastFrameTimestampUs; + bool mStarted; + + CameraSource(const sp<ICamera>& camera, int32_t cameraId, + Size videoSize, int32_t frameRate, + const sp<Surface>& surface, + bool storeMetaDataInVideoBuffers); + + virtual void startCameraRecording(); + virtual void stopCameraRecording(); + virtual void releaseRecordingFrame(const sp<IMemory>& frame); + + // Returns true if need to skip the current frame. + // Called from dataCallbackTimestamp. + virtual bool skipCurrentFrame(int64_t timestampUs) {return false;} + + // Callback called when still camera raw data is available. + virtual void dataCallback(int32_t msgType, const sp<IMemory> &data) {} + + virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, + const sp<IMemory> &data); + +private: + friend class CameraSourceListener; + Mutex mLock; Condition mFrameAvailableCondition; Condition mFrameCompleteCondition; @@ -60,25 +188,35 @@ private: List<sp<IMemory> > mFramesBeingEncoded; List<int64_t> mFrameTimes; - int64_t mStartTimeUs; int64_t mFirstFrameTimeUs; - int64_t mLastFrameTimestampUs; - int32_t mNumFramesReceived; int32_t mNumFramesEncoded; int32_t mNumFramesDropped; int32_t mNumGlitches; int64_t mGlitchDurationThresholdUs; bool mCollectStats; - bool mStarted; - - CameraSource(const sp<Camera> &camera); - - void dataCallbackTimestamp( - int64_t timestampUs, int32_t msgType, const sp<IMemory> &data); + bool mIsMetaDataStoredInVideoBuffers; void releaseQueuedFrames(); void releaseOneRecordingFrame(const sp<IMemory>& frame); + + status_t init(const sp<ICamera>& camera, int32_t cameraId, + Size videoSize, int32_t frameRate, + bool storeMetaDataInVideoBuffers); + status_t isCameraAvailable(const sp<ICamera>& camera, int32_t cameraId); + status_t isCameraColorFormatSupported(const CameraParameters& params); + status_t configureCamera(CameraParameters* params, + int32_t width, int32_t height, + int32_t frameRate); + + status_t checkVideoSize(const CameraParameters& params, + int32_t width, int32_t height); + + status_t checkFrameRate(const CameraParameters& params, + int32_t frameRate); + + void releaseCamera(); + CameraSource(const CameraSource &); CameraSource &operator=(const CameraSource &); }; diff --git a/include/media/stagefright/CameraSourceTimeLapse.h b/include/media/stagefright/CameraSourceTimeLapse.h new file mode 100644 index 0000000..0e5d534 --- /dev/null +++ b/include/media/stagefright/CameraSourceTimeLapse.h @@ -0,0 +1,243 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef CAMERA_SOURCE_TIME_LAPSE_H_ + +#define CAMERA_SOURCE_TIME_LAPSE_H_ + +#include <pthread.h> + +#include <utils/RefBase.h> +#include <utils/threads.h> + +namespace android { + +class ICamera; +class IMemory; +class Camera; + +class CameraSourceTimeLapse : public CameraSource { +public: + static CameraSourceTimeLapse *CreateFromCamera( + const sp<ICamera> &camera, + int32_t cameraId, + Size videoSize, + int32_t videoFrameRate, + const sp<Surface>& surface, + int64_t timeBetweenTimeLapseFrameCaptureUs); + + virtual ~CameraSourceTimeLapse(); + + // If the frame capture interval is large, read will block for a long time. + // Due to the way the mediaRecorder framework works, a stop() call from + // mediaRecorder waits until the read returns, causing a long wait for + // stop() to return. To avoid this, we can make read() return a copy of the + // last read frame with the same time stamp frequently. This keeps the + // read() call from blocking too long. Calling this function quickly + // captures another frame, keeps its copy, and enables this mode of read() + // returning quickly. + void startQuickReadReturns(); + +private: + // If true, will use still camera takePicture() for time lapse frames + // If false, will use the videocamera frames instead. + bool mUseStillCameraForTimeLapse; + + // Size of picture taken from still camera. This may be larger than the size + // of the video, as still camera may not support the exact video resolution + // demanded. See setPictureSizeToClosestSupported(). + int32_t mPictureWidth; + int32_t mPictureHeight; + + // size of the encoded video. + int32_t mVideoWidth; + int32_t mVideoHeight; + + // True if we need to crop the still camera image to get the video frame. + bool mNeedCropping; + + // Start location of the cropping rectangle. + int32_t mCropRectStartX; + int32_t mCropRectStartY; + + // Time between capture of two frames during time lapse recording + // Negative value indicates that timelapse is disabled. + int64_t mTimeBetweenTimeLapseFrameCaptureUs; + + // Time between two frames in final video (1/frameRate) + int64_t mTimeBetweenTimeLapseVideoFramesUs; + + // Real timestamp of the last encoded time lapse frame + int64_t mLastTimeLapseFrameRealTimestampUs; + + // Thread id of thread which takes still picture and sleeps in a loop. + pthread_t mThreadTimeLapse; + + // Variable set in dataCallbackTimestamp() to help skipCurrentFrame() + // to know if current frame needs to be skipped. + bool mSkipCurrentFrame; + + // Lock for accessing mCameraIdle + Mutex mCameraIdleLock; + + // Condition variable to wait on if camera is is not yet idle. Once the + // camera gets idle, this variable will be signalled. + Condition mCameraIdleCondition; + + // True if camera is in preview mode and ready for takePicture(). + // False after a call to takePicture() but before the final compressed + // data callback has been called and preview has been restarted. + volatile bool mCameraIdle; + + // True if stop() is waiting for camera to get idle, i.e. for the last + // takePicture() to complete. This is needed so that dataCallbackTimestamp() + // can return immediately. + volatile bool mStopWaitingForIdleCamera; + + // Lock for accessing quick stop variables. + Mutex mQuickStopLock; + + // Condition variable to wake up still picture thread. + Condition mTakePictureCondition; + + // mQuickStop is set to true if we use quick read() returns, otherwise it is set + // to false. Once in this mode read() return a copy of the last read frame + // with the same time stamp. See startQuickReadReturns(). + volatile bool mQuickStop; + + // Forces the next frame passed to dataCallbackTimestamp() to be read + // as a time lapse frame. Used by startQuickReadReturns() so that the next + // frame wakes up any blocking read. + volatile bool mForceRead; + + // Stores a copy of the MediaBuffer read in the last read() call after + // mQuickStop was true. + MediaBuffer* mLastReadBufferCopy; + + // Status code for last read. + status_t mLastReadStatus; + + CameraSourceTimeLapse( + const sp<ICamera> &camera, + int32_t cameraId, + Size videoSize, + int32_t videoFrameRate, + const sp<Surface>& surface, + int64_t timeBetweenTimeLapseFrameCaptureUs); + + // Wrapper over CameraSource::signalBufferReturned() to implement quick stop. + // It only handles the case when mLastReadBufferCopy is signalled. Otherwise + // it calls the base class' function. + virtual void signalBufferReturned(MediaBuffer* buffer); + + // Wrapper over CameraSource::read() to implement quick stop. + virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL); + + // For still camera case starts a thread which calls camera's takePicture() + // in a loop. For video camera case, just starts the camera's video recording. + virtual void startCameraRecording(); + + // For still camera case joins the thread created in startCameraRecording(). + // For video camera case, just stops the camera's video recording. + virtual void stopCameraRecording(); + + // For still camera case don't need to do anything as memory is locally + // allocated with refcounting. + // For video camera case just tell the camera to release the frame. + virtual void releaseRecordingFrame(const sp<IMemory>& frame); + + // mSkipCurrentFrame is set to true in dataCallbackTimestamp() if the current + // frame needs to be skipped and this function just returns the value of mSkipCurrentFrame. + virtual bool skipCurrentFrame(int64_t timestampUs); + + // Handles the callback to handle raw frame data from the still camera. + // Creates a copy of the frame data as the camera can reuse the frame memory + // once this callback returns. The function also sets a new timstamp corresponding + // to one frame time ahead of the last encoded frame's time stamp. It then + // calls dataCallbackTimestamp() of the base class with the copied data and the + // modified timestamp, which will think that it recieved the frame from a video + // camera and proceed as usual. + virtual void dataCallback(int32_t msgType, const sp<IMemory> &data); + + // In the video camera case calls skipFrameAndModifyTimeStamp() to modify + // timestamp and set mSkipCurrentFrame. + // Then it calls the base CameraSource::dataCallbackTimestamp() + virtual void dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, + const sp<IMemory> &data); + + // Convenience function to fill mLastReadBufferCopy from the just read + // buffer. + void fillLastReadBufferCopy(MediaBuffer& sourceBuffer); + + // If the passed in size (width x height) is a supported video/preview size, + // the function sets the camera's video/preview size to it and returns true. + // Otherwise returns false. + bool trySettingVideoSize(int32_t width, int32_t height); + + // The still camera may not support the demanded video width and height. + // We look for the supported picture sizes from the still camera and + // choose the smallest one with either dimensions higher than the corresponding + // video dimensions. The still picture will be cropped to get the video frame. + // The function returns true if the camera supports picture sizes greater than + // or equal to the passed in width and height, and false otherwise. + bool setPictureSizeToClosestSupported(int32_t width, int32_t height); + + // Computes the offset of the rectangle from where to start cropping the + // still image into the video frame. We choose the center of the image to be + // cropped. The offset is stored in (mCropRectStartX, mCropRectStartY). + bool computeCropRectangleOffset(); + + // Crops the source data into a smaller image starting at + // (mCropRectStartX, mCropRectStartY) and of the size of the video frame. + // The data is returned into a newly allocated IMemory. + sp<IMemory> cropYUVImage(const sp<IMemory> &source_data); + + // When video camera is used for time lapse capture, returns true + // until enough time has passed for the next time lapse frame. When + // the frame needs to be encoded, it returns false and also modifies + // the time stamp to be one frame time ahead of the last encoded + // frame's time stamp. + bool skipFrameAndModifyTimeStamp(int64_t *timestampUs); + + // Wrapper to enter threadTimeLapseEntry() + static void *ThreadTimeLapseWrapper(void *me); + + // Runs a loop which sleeps until a still picture is required + // and then calls mCamera->takePicture() to take the still picture. + // Used only in the case mUseStillCameraForTimeLapse = true. + void threadTimeLapseEntry(); + + // Wrapper to enter threadStartPreview() + static void *ThreadStartPreviewWrapper(void *me); + + // Starts the camera's preview. + void threadStartPreview(); + + // Starts thread ThreadStartPreviewWrapper() for restarting preview. + // Needs to be done in a thread so that dataCallback() which calls this function + // can return, and the camera can know that takePicture() is done. + void restartPreview(); + + // Creates a copy of source_data into a new memory of final type MemoryBase. + sp<IMemory> createIMemoryCopy(const sp<IMemory> &source_data); + + CameraSourceTimeLapse(const CameraSourceTimeLapse &); + CameraSourceTimeLapse &operator=(const CameraSourceTimeLapse &); +}; + +} // namespace android + +#endif // CAMERA_SOURCE_TIME_LAPSE_H_ diff --git a/include/media/stagefright/ColorConverter.h b/include/media/stagefright/ColorConverter.h index bc3f464..2ae8a5b 100644 --- a/include/media/stagefright/ColorConverter.h +++ b/include/media/stagefright/ColorConverter.h @@ -21,6 +21,7 @@ #include <sys/types.h> #include <stdint.h> +#include <utils/Errors.h> #include <OMX_Video.h> @@ -32,36 +33,48 @@ struct ColorConverter { bool isValid() const; - void convert( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip); + status_t convert( + const void *srcBits, + size_t srcWidth, size_t srcHeight, + size_t srcCropLeft, size_t srcCropTop, + size_t srcCropRight, size_t srcCropBottom, + void *dstBits, + size_t dstWidth, size_t dstHeight, + size_t dstCropLeft, size_t dstCropTop, + size_t dstCropRight, size_t dstCropBottom); private: + struct BitmapParams { + BitmapParams( + void *bits, + size_t width, size_t height, + size_t cropLeft, size_t cropTop, + size_t cropRight, size_t cropBottom); + + size_t cropWidth() const; + size_t cropHeight() const; + + void *mBits; + size_t mWidth, mHeight; + size_t mCropLeft, mCropTop, mCropRight, mCropBottom; + }; + OMX_COLOR_FORMATTYPE mSrcFormat, mDstFormat; uint8_t *mClip; uint8_t *initClip(); - void convertCbYCrY( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip); - - void convertYUV420Planar( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip); - - void convertQCOMYUV420SemiPlanar( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip); - - void convertYUV420SemiPlanar( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip); + status_t convertCbYCrY( + const BitmapParams &src, const BitmapParams &dst); + + status_t convertYUV420Planar( + const BitmapParams &src, const BitmapParams &dst); + + status_t convertQCOMYUV420SemiPlanar( + const BitmapParams &src, const BitmapParams &dst); + + status_t convertYUV420SemiPlanar( + const BitmapParams &src, const BitmapParams &dst); ColorConverter(const ColorConverter &); ColorConverter &operator=(const ColorConverter &); diff --git a/include/media/stagefright/DataSource.h b/include/media/stagefright/DataSource.h index d0b9fcd..d4f1733 100644 --- a/include/media/stagefright/DataSource.h +++ b/include/media/stagefright/DataSource.h @@ -48,13 +48,13 @@ public: virtual status_t initCheck() const = 0; - virtual ssize_t readAt(off_t offset, void *data, size_t size) = 0; + virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0; // Convenience methods: - bool getUInt16(off_t offset, uint16_t *x); + bool getUInt16(off64_t offset, uint16_t *x); // May return ERROR_UNSUPPORTED. - virtual status_t getSize(off_t *size); + virtual status_t getSize(off64_t *size); virtual uint32_t flags() { return 0; @@ -80,6 +80,9 @@ public: } virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {}; + virtual String8 getUri() { + return String8(); + } protected: virtual ~DataSource() {} diff --git a/include/media/stagefright/FileSource.h b/include/media/stagefright/FileSource.h index 4307263..72a0403 100644 --- a/include/media/stagefright/FileSource.h +++ b/include/media/stagefright/FileSource.h @@ -34,9 +34,9 @@ public: virtual status_t initCheck() const; - virtual ssize_t readAt(off_t offset, void *data, size_t size); + virtual ssize_t readAt(off64_t offset, void *data, size_t size); - virtual status_t getSize(off_t *size); + virtual status_t getSize(off64_t *size); virtual DecryptHandle* DrmInitialization(DrmManagerClient *client); @@ -46,7 +46,6 @@ protected: virtual ~FileSource(); private: - FILE *mFile; int mFd; int64_t mOffset; int64_t mLength; @@ -59,7 +58,7 @@ private: int64_t mDrmBufSize; unsigned char *mDrmBuf; - ssize_t readAtDRM(off_t offset, void *data, size_t size); + ssize_t readAtDRM(off64_t offset, void *data, size_t size); FileSource(const FileSource &); FileSource &operator=(const FileSource &); diff --git a/include/media/stagefright/HardwareAPI.h b/include/media/stagefright/HardwareAPI.h index 63f11d1..17908b4 100644 --- a/include/media/stagefright/HardwareAPI.h +++ b/include/media/stagefright/HardwareAPI.h @@ -19,28 +19,76 @@ #define HARDWARE_API_H_ #include <media/stagefright/OMXPluginBase.h> -#include <media/stagefright/VideoRenderer.h> -#include <surfaceflinger/ISurface.h> +#include <ui/android_native_buffer.h> #include <utils/RefBase.h> #include <OMX_Component.h> -extern android::VideoRenderer *createRenderer( - const android::sp<android::ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight); - -extern android::VideoRenderer *createRendererWithRotation( - const android::sp<android::ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees); +namespace android { + +// A pointer to this struct is passed to the OMX_SetParameter when the extension +// index for the 'OMX.google.android.index.enableAndroidNativeBuffers' extension +// is given. +// +// When Android native buffer use is disabled for a port (the default state), +// the OMX node should operate as normal, and expect UseBuffer calls to set its +// buffers. This is the mode that will be used when CPU access to the buffer is +// required. +// +// When Android native buffer use has been enabled for a given port, the video +// color format for the port is to be interpreted as an Android pixel format +// rather than an OMX color format. The node should then expect to receive +// UseAndroidNativeBuffer calls (via OMX_SetParameter) rather than UseBuffer +// calls for that port. +struct EnableAndroidNativeBuffersParams { + OMX_U32 nSize; + OMX_VERSIONTYPE nVersion; + OMX_U32 nPortIndex; + OMX_BOOL enable; +}; + +// A pointer to this struct is passed to OMX_SetParameter() when the extension +// index "OMX.google.android.index.storeMetaDataInBuffers" +// is given. +// +// When meta data is stored in the video buffers passed between OMX clients +// and OMX components, interpretation of the buffer data is up to the +// buffer receiver, and the data may or may not be the actual video data, but +// some information helpful for the receiver to locate the actual data. +// The buffer receiver thus needs to know how to interpret what is stored +// in these buffers, with mechanisms pre-determined externally. How to +// interpret the meta data is outside of the scope of this method. +// +// Currently, this is specifically used to pass meta data from video source +// (camera component, for instance) to video encoder to avoid memcpying of +// input video frame data. To do this, bStoreMetaDta is set to OMX_TRUE. +// If bStoreMetaData is set to false, real YUV frame data will be stored +// in the buffers. In addition, if no OMX_SetParameter() call is made +// with the corresponding extension index, real YUV data is stored +// in the buffers. +struct StoreMetaDataInBuffersParams { + OMX_U32 nSize; + OMX_VERSIONTYPE nVersion; + OMX_U32 nPortIndex; + OMX_BOOL bStoreMetaData; +}; + +// A pointer to this struct is passed to OMX_SetParameter when the extension +// index for the 'OMX.google.android.index.useAndroidNativeBuffer' extension is +// given. This call will only be performed if a prior call was made with the +// 'OMX.google.android.index.enableAndroidNativeBuffers' extension index, +// enabling use of Android native buffers. +struct UseAndroidNativeBufferParams { + OMX_U32 nSize; + OMX_VERSIONTYPE nVersion; + OMX_U32 nPortIndex; + OMX_PTR pAppPrivate; + OMX_BUFFERHEADERTYPE **bufferHeader; + const sp<android_native_buffer_t>& nativeBuffer; +}; + +} // namespace android extern android::OMXPluginBase *createOMXPlugin(); #endif // HARDWARE_API_H_ - diff --git a/include/media/stagefright/JPEGSource.h b/include/media/stagefright/JPEGSource.h index 9d0a700..1b7e91b 100644 --- a/include/media/stagefright/JPEGSource.h +++ b/include/media/stagefright/JPEGSource.h @@ -42,9 +42,9 @@ private: sp<DataSource> mSource; MediaBufferGroup *mGroup; bool mStarted; - off_t mSize; + off64_t mSize; int32_t mWidth, mHeight; - off_t mOffset; + off64_t mOffset; status_t parseJPEG(); diff --git a/include/media/stagefright/MPEG4Writer.h b/include/media/stagefright/MPEG4Writer.h index 7bf07eb..f7618e9 100644 --- a/include/media/stagefright/MPEG4Writer.h +++ b/include/media/stagefright/MPEG4Writer.h @@ -61,20 +61,21 @@ protected: private: class Track; - FILE *mFile; + int mFd; + status_t mInitCheck; bool mUse4ByteNalLength; bool mUse32BitOffset; bool mIsFileSizeLimitExplicitlyRequested; bool mPaused; bool mStarted; - off_t mOffset; + off64_t mOffset; off_t mMdatOffset; uint8_t *mMoovBoxBuffer; - off_t mMoovBoxBufferOffset; + off64_t mMoovBoxBufferOffset; bool mWriteMoovBoxToMemory; - off_t mFreeBoxOffset; + off64_t mFreeBoxOffset; bool mStreamableFile; - off_t mEstimatedMoovBoxSize; + off64_t mEstimatedMoovBoxSize; uint32_t mInterleaveDurationUs; int32_t mTimeScale; int64_t mStartTimestampUs; @@ -83,7 +84,7 @@ private: List<Track *> mTracks; - List<off_t> mBoxes; + List<off64_t> mBoxes; void setStartTimestampUs(int64_t timeUs); int64_t getStartTimestampUs(); // Not const @@ -145,10 +146,10 @@ private: void unlock(); // Acquire lock before calling these methods - off_t addSample_l(MediaBuffer *buffer); - off_t addLengthPrefixedSample_l(MediaBuffer *buffer); + off64_t addSample_l(MediaBuffer *buffer); + off64_t addLengthPrefixedSample_l(MediaBuffer *buffer); - inline size_t write(const void *ptr, size_t size, size_t nmemb, FILE* stream); + inline size_t write(const void *ptr, size_t size, size_t nmemb); bool exceedsFileSizeLimit(); bool use32BitFileOffset() const; bool exceedsFileDurationLimit(); diff --git a/include/media/stagefright/MediaBuffer.h b/include/media/stagefright/MediaBuffer.h index 339e6fb..c1c4f94 100644 --- a/include/media/stagefright/MediaBuffer.h +++ b/include/media/stagefright/MediaBuffer.h @@ -25,6 +25,7 @@ namespace android { +class GraphicBuffer; class MediaBuffer; class MediaBufferObserver; class MetaData; @@ -48,6 +49,8 @@ public: MediaBuffer(size_t size); + MediaBuffer(const sp<GraphicBuffer>& graphicBuffer); + // Decrements the reference count and returns the buffer to its // associated MediaBufferGroup if the reference count drops to 0. void release(); @@ -63,6 +66,8 @@ public: void set_range(size_t offset, size_t length); + sp<GraphicBuffer> graphicBuffer() const; + sp<MetaData> meta_data(); // Clears meta data and resets the range to the full extent. @@ -94,6 +99,7 @@ private: void *mData; size_t mSize, mRangeOffset, mRangeLength; + sp<GraphicBuffer> mGraphicBuffer; bool mOwnsData; diff --git a/include/media/stagefright/MediaDefs.h b/include/media/stagefright/MediaDefs.h index 92ce068..2d50ca5 100644 --- a/include/media/stagefright/MediaDefs.h +++ b/include/media/stagefright/MediaDefs.h @@ -44,6 +44,8 @@ extern const char *MEDIA_MIMETYPE_CONTAINER_OGG; extern const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA; extern const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS; +extern const char *MEDIA_MIMETYPE_CONTAINER_WVM; + } // namespace android #endif // MEDIA_DEFS_H_ diff --git a/include/media/stagefright/MediaSource.h b/include/media/stagefright/MediaSource.h index dafc621..a31395e 100644 --- a/include/media/stagefright/MediaSource.h +++ b/include/media/stagefright/MediaSource.h @@ -78,31 +78,18 @@ struct MediaSource : public RefBase { void clearSeekTo(); bool getSeekTo(int64_t *time_us, SeekMode *mode) const; - // Option allows encoder to skip some frames until the specified - // time stamp. - // To prevent from being abused, when the skipFrame timestamp is - // found to be more than 1 second later than the current timestamp, - // an error will be returned from read(). - void clearSkipFrame(); - bool getSkipFrame(int64_t *timeUs) const; - void setSkipFrame(int64_t timeUs); - void setLateBy(int64_t lateness_us); int64_t getLateBy() const; private: enum Options { - // Bit map kSeekTo_Option = 1, - kSkipFrame_Option = 2, }; uint32_t mOptions; int64_t mSeekTimeUs; SeekMode mSeekMode; int64_t mLatenessUs; - - int64_t mSkipFrameUntilTimeUs; }; // Causes this source to suspend pulling data from its upstream source diff --git a/include/media/stagefright/MediaSourceSplitter.h b/include/media/stagefright/MediaSourceSplitter.h new file mode 100644 index 0000000..568f4c2 --- /dev/null +++ b/include/media/stagefright/MediaSourceSplitter.h @@ -0,0 +1,193 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// This class provides a way to split a single media source into multiple sources. +// The constructor takes in the real mediaSource and createClient() can then be +// used to create multiple sources served from this real mediaSource. +// +// Usage: +// - Create MediaSourceSplitter by passing in a real mediaSource from which +// multiple duplicate channels are needed. +// - Create a client using createClient() and use it as any other mediaSource. +// +// Note that multiple clients can be created using createClient() and +// started/stopped in any order. MediaSourceSplitter stops the real source only +// when all clients have been stopped. +// +// If a new client is created/started after some existing clients have already +// started, the new client will start getting its read frames from the current +// time. + +#ifndef MEDIA_SOURCE_SPLITTER_H_ + +#define MEDIA_SOURCE_SPLITTER_H_ + +#include <media/stagefright/MediaSource.h> +#include <utils/threads.h> +#include <utils/Vector.h> +#include <utils/RefBase.h> + +namespace android { + +class MediaBuffer; +class MetaData; + +class MediaSourceSplitter : public RefBase { +public: + // Constructor + // mediaSource: The real mediaSource. The class keeps a reference to it to + // implement the various clients. + MediaSourceSplitter(sp<MediaSource> mediaSource); + + ~MediaSourceSplitter(); + + // Creates a new client of base type MediaSource. Multiple clients can be + // created which get their data through the same real mediaSource. These + // clients can then be used like any other MediaSource, all of which provide + // data from the same real source. + sp<MediaSource> createClient(); + +private: + // Total number of clients created through createClient(). + int32_t mNumberOfClients; + + // reference to the real MediaSource passed to the constructor. + sp<MediaSource> mSource; + + // Stores pointer to the MediaBuffer read from the real MediaSource. + // All clients use this to implement the read() call. + MediaBuffer *mLastReadMediaBuffer; + + // Status code for read from the real MediaSource. All clients return + // this for their read(). + status_t mLastReadStatus; + + // Boolean telling whether the real MediaSource has started. + bool mSourceStarted; + + // List of booleans, one for each client, storing whether the corresponding + // client's start() has been called. + Vector<bool> mClientsStarted; + + // Stores the number of clients which are currently started. + int32_t mNumberOfClientsStarted; + + // Since different clients call read() asynchronously, we need to keep track + // of what data is currently read into the mLastReadMediaBuffer. + // mCurrentReadBit stores the bit for the current read buffer. This bit + // flips each time a new buffer is read from the source. + // mClientsDesiredReadBit stores the bit for the next desired read buffer + // for each client. This bit flips each time read() is completed for this + // client. + bool mCurrentReadBit; + Vector<bool> mClientsDesiredReadBit; + + // Number of clients whose current read has been completed. + int32_t mNumberOfCurrentReads; + + // Boolean telling whether the last read has been completed for all clients. + // The variable is reset to false each time buffer is read from the real + // source. + bool mLastReadCompleted; + + // A global mutex for access to critical sections. + Mutex mLock; + + // Condition variable for waiting on read from source to complete. + Condition mReadFromSourceCondition; + + // Condition variable for waiting on all client's last read to complete. + Condition mAllReadsCompleteCondition; + + // Functions used by Client to implement the MediaSource interface. + + // If the real source has not been started yet by any client, starts it. + status_t start(int clientId, MetaData *params); + + // Stops the real source after all clients have called stop(). + status_t stop(int clientId); + + // returns the real source's getFormat(). + sp<MetaData> getFormat(int clientId); + + // If the client's desired buffer has already been read into + // mLastReadMediaBuffer, points the buffer to that. Otherwise if it is the + // master client, reads the buffer from source or else waits for the master + // client to read the buffer and uses that. + status_t read(int clientId, + MediaBuffer **buffer, const MediaSource::ReadOptions *options = NULL); + + // Not implemented right now. + status_t pause(int clientId); + + // Function which reads a buffer from the real source into + // mLastReadMediaBuffer + void readFromSource_lock(const MediaSource::ReadOptions *options); + + // Waits until read from the real source has been completed. + // _lock means that the function should be called when the thread has already + // obtained the lock for the mutex mLock. + void waitForReadFromSource_lock(int32_t clientId); + + // Waits until all clients have read the current buffer in + // mLastReadCompleted. + void waitForAllClientsLastRead_lock(int32_t clientId); + + // Each client calls this after it completes its read(). Once all clients + // have called this for the current buffer, the function calls + // mAllReadsCompleteCondition.broadcast() to signal the waiting clients. + void signalReadComplete_lock(bool readAborted); + + // Make these constructors private. + MediaSourceSplitter(); + MediaSourceSplitter(const MediaSourceSplitter &); + MediaSourceSplitter &operator=(const MediaSourceSplitter &); + + // This class implements the MediaSource interface. Each client stores a + // reference to the parent MediaSourceSplitter and uses it to complete the + // various calls. + class Client : public MediaSource { + public: + // Constructor stores reference to the parent MediaSourceSplitter and it + // client id. + Client(sp<MediaSourceSplitter> splitter, int32_t clientId); + + // MediaSource interface + virtual status_t start(MetaData *params = NULL); + + virtual status_t stop(); + + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + + virtual status_t pause(); + + private: + // Refernce to the parent MediaSourceSplitter + sp<MediaSourceSplitter> mSplitter; + + // Id of this client. + int32_t mClientId; + }; + + friend class Client; +}; + +} // namespace android + +#endif // MEDIA_SOURCE_SPLITTER_H_ diff --git a/include/media/stagefright/MetaData.h b/include/media/stagefright/MetaData.h index ea2fa52..18fd90e 100644 --- a/include/media/stagefright/MetaData.h +++ b/include/media/stagefright/MetaData.h @@ -30,14 +30,21 @@ namespace android { // The following keys map to int32_t data unless indicated otherwise. enum { kKeyMIMEType = 'mime', // cstring - kKeyWidth = 'widt', // int32_t - kKeyHeight = 'heig', // int32_t + kKeyWidth = 'widt', // int32_t, image pixel + kKeyHeight = 'heig', // int32_t, image pixel + kKeyDisplayWidth = 'dWid', // int32_t, display/presentation + kKeyDisplayHeight = 'dHgt', // int32_t, display/presentation + + // a rectangle, if absent assumed to be (0, 0, width - 1, height - 1) + kKeyCropRect = 'crop', + kKeyRotation = 'rotA', // int32_t (angle in degrees) kKeyIFramesInterval = 'ifiv', // int32_t kKeyStride = 'strd', // int32_t kKeySliceHeight = 'slht', // int32_t kKeyChannelCount = '#chn', // int32_t - kKeySampleRate = 'srte', // int32_t (also video frame rate) + kKeySampleRate = 'srte', // int32_t (audio sampling rate Hz) + kKeyFrameRate = 'frmR', // int32_t (video frame rate fps) kKeyBitRate = 'brte', // int32_t (bps) kKeyESDS = 'esds', // raw data kKeyAVCC = 'avcc', // raw data @@ -94,7 +101,6 @@ enum { // Track authoring progress status // kKeyTrackTimeStatus is used to track progress in elapsed time kKeyTrackTimeStatus = 'tktm', // int64_t - kKeyRotationDegree = 'rdge', // int32_t (clockwise, in degree) kKeyNotRealTime = 'ntrt', // bool (int32_t) @@ -104,6 +110,9 @@ enum { kKeyValidSamples = 'valD', // int32_t kKeyIsUnreadable = 'unre', // bool (int32_t) + + // An indication that a video buffer has been rendered. + kKeyRendered = 'rend', // bool (int32_t) }; enum { @@ -123,6 +132,7 @@ public: TYPE_INT64 = 'in64', TYPE_FLOAT = 'floa', TYPE_POINTER = 'ptr ', + TYPE_RECT = 'rect', }; void clear(); @@ -134,12 +144,22 @@ public: bool setFloat(uint32_t key, float value); bool setPointer(uint32_t key, void *value); + bool setRect( + uint32_t key, + int32_t left, int32_t top, + int32_t right, int32_t bottom); + bool findCString(uint32_t key, const char **value); bool findInt32(uint32_t key, int32_t *value); bool findInt64(uint32_t key, int64_t *value); bool findFloat(uint32_t key, float *value); bool findPointer(uint32_t key, void **value); + bool findRect( + uint32_t key, + int32_t *left, int32_t *top, + int32_t *right, int32_t *bottom); + bool setData(uint32_t key, uint32_t type, const void *data, size_t size); bool findData(uint32_t key, uint32_t *type, @@ -185,6 +205,10 @@ private: } }; + struct Rect { + int32_t mLeft, mTop, mRight, mBottom; + }; + KeyedVector<uint32_t, typed_data> mItems; // MetaData &operator=(const MetaData &); diff --git a/include/media/stagefright/OMXCodec.h b/include/media/stagefright/OMXCodec.h index 8274dfb..3251c28 100644 --- a/include/media/stagefright/OMXCodec.h +++ b/include/media/stagefright/OMXCodec.h @@ -18,6 +18,7 @@ #define OMX_CODEC_H_ +#include <android/native_window.h> #include <media/IOMX.h> #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaSource.h> @@ -38,13 +39,22 @@ struct OMXCodec : public MediaSource, // The client wants to access the output buffer's video // data for example for thumbnail extraction. kClientNeedsFramebuffer = 4, + + // Request for software or hardware codecs. If request + // can not be fullfilled, Create() returns NULL. + kSoftwareCodecsOnly = 8, + kHardwareCodecsOnly = 16, + + // Store meta data in video buffers + kStoreMetaDataInVideoBuffers = 32, }; static sp<MediaSource> Create( const sp<IOMX> &omx, const sp<MetaData> &meta, bool createEncoder, const sp<MediaSource> &source, const char *matchComponentName = NULL, - uint32_t flags = 0); + uint32_t flags = 0, + const sp<ANativeWindow> &nativeWindow = NULL); static void setComponentRole( const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder, @@ -114,12 +124,18 @@ private: kAvoidMemcopyInputRecordingFrames = 2048, kRequiresLargerEncoderOutputBuffer = 4096, kOutputBuffersAreUnreadable = 8192, - kStoreMetaDataInInputVideoBuffers = 16384, + }; + + enum BufferStatus { + OWNED_BY_US, + OWNED_BY_COMPONENT, + OWNED_BY_NATIVE_WINDOW, + OWNED_BY_CLIENT, }; struct BufferInfo { IOMX::buffer_id mBuffer; - bool mOwnedByComponent; + BufferStatus mStatus; sp<IMemory> mMem; size_t mSize; void *mData; @@ -156,7 +172,6 @@ private: int64_t mSeekTimeUs; ReadOptions::SeekMode mSeekMode; int64_t mTargetTimeUs; - int64_t mSkipTimeUs; MediaBuffer *mLeftOverBuffer; @@ -165,13 +180,23 @@ private: bool mPaused; + sp<ANativeWindow> mNativeWindow; + + // The index in each of the mPortBuffers arrays of the buffer that will be + // submitted to OMX next. This only applies when using buffers from a + // native window. + size_t mNextNativeBufferIndex[2]; + // A list of indices into mPortStatus[kPortIndexOutput] filled with data. List<size_t> mFilledBuffers; Condition mBufferFilled; + bool mIsMetaDataStoredInVideoBuffers; + OMXCodec(const sp<IOMX> &omx, IOMX::node_id node, uint32_t quirks, bool isEncoder, const char *mime, const char *componentName, - const sp<MediaSource> &source); + const sp<MediaSource> &source, + const sp<ANativeWindow> &nativeWindow); void addCodecSpecificData(const void *data, size_t size); void clearCodecSpecificData(); @@ -222,13 +247,20 @@ private: status_t allocateBuffers(); status_t allocateBuffersOnPort(OMX_U32 portIndex); + status_t allocateOutputBuffersFromNativeWindow(); + + status_t queueBufferToNativeWindow(BufferInfo *info); + status_t cancelBufferToNativeWindow(BufferInfo *info); + BufferInfo* dequeueBufferFromNativeWindow(); status_t freeBuffersOnPort( OMX_U32 portIndex, bool onlyThoseWeOwn = false); - void drainInputBuffer(IOMX::buffer_id buffer); + status_t freeBuffer(OMX_U32 portIndex, size_t bufIndex); + + bool drainInputBuffer(IOMX::buffer_id buffer); void fillOutputBuffer(IOMX::buffer_id buffer); - void drainInputBuffer(BufferInfo *info); + bool drainInputBuffer(BufferInfo *info); void fillOutputBuffer(BufferInfo *info); void drainInputBuffers(); @@ -256,6 +288,7 @@ private: status_t init(); void initOutputFormat(const sp<MetaData> &inputFormat); + status_t initNativeWindow(); void dumpPortStatus(OMX_U32 portIndex); @@ -270,6 +303,10 @@ private: uint32_t flags, Vector<String8> *matchingCodecs); + void restorePatchedDataPointer(BufferInfo *info); + + status_t applyRotation(); + OMXCodec(const OMXCodec &); OMXCodec &operator=(const OMXCodec &); }; @@ -282,6 +319,7 @@ struct CodecProfileLevel { struct CodecCapabilities { String8 mComponentName; Vector<CodecProfileLevel> mProfileLevels; + Vector<OMX_U32> mColorFormats; }; // Return a vector of componentNames with supported profile/level pairs diff --git a/include/media/stagefright/StagefrightMediaScanner.h b/include/media/stagefright/StagefrightMediaScanner.h index 4437eee..108acb4 100644 --- a/include/media/stagefright/StagefrightMediaScanner.h +++ b/include/media/stagefright/StagefrightMediaScanner.h @@ -22,8 +22,6 @@ namespace android { -struct MediaMetadataRetriever; - struct StagefrightMediaScanner : public MediaScanner { StagefrightMediaScanner(); virtual ~StagefrightMediaScanner(); @@ -35,8 +33,6 @@ struct StagefrightMediaScanner : public MediaScanner { virtual char *extractAlbumArt(int fd); private: - sp<MediaMetadataRetriever> mRetriever; - StagefrightMediaScanner(const StagefrightMediaScanner &); StagefrightMediaScanner &operator=(const StagefrightMediaScanner &); }; diff --git a/include/media/stagefright/VideoSourceDownSampler.h b/include/media/stagefright/VideoSourceDownSampler.h new file mode 100644 index 0000000..439918c --- /dev/null +++ b/include/media/stagefright/VideoSourceDownSampler.h @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// VideoSourceDownSampler implements the MediaSource interface, +// downsampling frames provided from a real video source. + +#ifndef VIDEO_SOURCE_DOWN_SAMPLER_H_ + +#define VIDEO_SOURCE_DOWN_SAMPLER_H_ + +#include <media/stagefright/MediaSource.h> +#include <utils/RefBase.h> + +namespace android { + +class IMemory; +class MediaBuffer; +class MetaData; + +class VideoSourceDownSampler : public MediaSource { +public: + virtual ~VideoSourceDownSampler(); + + // Constructor: + // videoSource: The real video source which provides the original frames. + // width, height: The desired width, height. These should be less than or equal + // to those of the real video source. We then downsample the original frames to + // this size. + VideoSourceDownSampler(const sp<MediaSource> &videoSource, + int32_t width, int32_t height); + + // MediaSource interface + virtual status_t start(MetaData *params = NULL); + + virtual status_t stop(); + + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options = NULL); + + virtual status_t pause(); + +private: + // Reference to the real video source. + sp<MediaSource> mRealVideoSource; + + // Size of frames to be provided by this source. + int32_t mWidth; + int32_t mHeight; + + // Size of frames provided by the real source. + int32_t mRealSourceWidth; + int32_t mRealSourceHeight; + + // Down sampling paramters. + int32_t mDownSampleOffsetX; + int32_t mDownSampleOffsetY; + int32_t mDownSampleSkipX; + int32_t mDownSampleSkipY; + + // True if we need to crop the still video image to get the video frame. + bool mNeedDownSampling; + + // Meta data. This is a copy of the real source except for the width and + // height parameters. + sp<MetaData> mMeta; + + // Computes the offset, skip parameters for downsampling the original frame + // to the desired size. + void computeDownSamplingParameters(); + + // Downsamples the frame in sourceBuffer to size (mWidth x mHeight). A new + // buffer is created which stores the downsampled image. + void downSampleYUVImage(const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const; + + // Disallow these. + VideoSourceDownSampler(const VideoSourceDownSampler &); + VideoSourceDownSampler &operator=(const VideoSourceDownSampler &); +}; + +} // namespace android + +#endif // VIDEO_SOURCE_DOWN_SAMPLER_H_ diff --git a/include/media/stagefright/YUVCanvas.h b/include/media/stagefright/YUVCanvas.h new file mode 100644 index 0000000..ff70923 --- /dev/null +++ b/include/media/stagefright/YUVCanvas.h @@ -0,0 +1,79 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// YUVCanvas holds a reference to a YUVImage on which it can do various +// drawing operations. It provides various utility functions for filling, +// cropping, etc. + + +#ifndef YUV_CANVAS_H_ + +#define YUV_CANVAS_H_ + +#include <stdint.h> + +namespace android { + +class YUVImage; +class Rect; + +class YUVCanvas { +public: + + // Constructor takes in reference to a yuvImage on which it can do + // various drawing opreations. + YUVCanvas(YUVImage &yuvImage); + ~YUVCanvas(); + + // Fills the entire image with the given YUV values. + void FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue); + + // Fills the rectangular region [startX,endX]x[startY,endY] with the given YUV values. + void FillYUVRectangle(const Rect& rect, + uint8_t yValue, uint8_t uValue, uint8_t vValue); + + // Copies the region [startX,endX]x[startY,endY] from srcImage into the + // canvas' target image (mYUVImage) starting at + // (destinationStartX,destinationStartY). + // Note that undefined behavior may occur if srcImage is same as the canvas' + // target image. + void CopyImageRect( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage); + + // Downsamples the srcImage into the canvas' target image (mYUVImage) + // The downsampling copies pixels from the source image starting at + // (srcOffsetX, srcOffsetY) to the target image, starting at (0, 0). + // For each X increment in the target image, skipX pixels are skipped + // in the source image. + // Similarly for each Y increment in the target image, skipY pixels + // are skipped in the source image. + void downsample( + int32_t srcOffsetX, int32_t srcOffsetY, + int32_t skipX, int32_t skipY, + const YUVImage &srcImage); + +private: + YUVImage& mYUVImage; + + YUVCanvas(const YUVCanvas &); + YUVCanvas &operator=(const YUVCanvas &); +}; + +} // namespace android + +#endif // YUV_CANVAS_H_ diff --git a/include/media/stagefright/YUVImage.h b/include/media/stagefright/YUVImage.h new file mode 100644 index 0000000..4e98618 --- /dev/null +++ b/include/media/stagefright/YUVImage.h @@ -0,0 +1,178 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// A container class to hold YUV data and provide various utilities, +// e.g. to set/get pixel values. +// Supported formats: +// - YUV420 Planar +// - YUV420 Semi Planar +// +// Currently does not support variable strides. +// +// Implementation: Two simple abstractions are done to simplify access +// to YUV channels for different formats: +// - initializeYUVPointers() sets up pointers (mYdata, mUdata, mVdata) to +// point to the right start locations of the different channel data depending +// on the format. +// - getOffsets() returns the correct offset for the different channels +// depending on the format. +// Location of any pixel's YUV channels can then be easily computed using these. +// + +#ifndef YUV_IMAGE_H_ + +#define YUV_IMAGE_H_ + +#include <stdint.h> +#include <cstring> + +namespace android { + +class Rect; + +class YUVImage { +public: + // Supported YUV formats + enum YUVFormat { + YUV420Planar, + YUV420SemiPlanar + }; + + // Constructs an image with the given size, format. Also allocates and owns + // the required memory. + YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height); + + // Constructs an image with the given size, format. The memory is provided + // by the caller and we don't own it. + YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer); + + // Destructor to delete the memory if it owns it. + ~YUVImage(); + + // Returns the size of the buffer required to store the YUV data for the given + // format and geometry. Useful when the caller wants to allocate the requisite + // memory. + static size_t bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height); + + int32_t width() const {return mWidth;} + int32_t height() const {return mHeight;} + + // Returns true if pixel is the range [0, width-1] x [0, height-1] + // and false otherwise. + bool validPixel(int32_t x, int32_t y) const; + + // Get the pixel YUV value at pixel (x,y). + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + // Returns true if get was successful and false otherwise. + bool getPixelValue(int32_t x, int32_t y, + uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const; + + // Set the pixel YUV value at pixel (x,y). + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + // Returns true if set was successful and false otherwise. + bool setPixelValue(int32_t x, int32_t y, + uint8_t yValue, uint8_t uValue, uint8_t vValue); + + // Uses memcpy to copy an entire row of data + static void fastCopyRectangle420Planar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage); + + // Uses memcpy to copy an entire row of data + static void fastCopyRectangle420SemiPlanar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage); + + // Tries to use memcopy to copy entire rows of data. + // Returns false if fast copy is not possible for the passed image formats. + static bool fastCopyRectangle( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage); + + // Convert the given YUV value to RGB. + void yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue, + uint8_t *r, uint8_t *g, uint8_t *b) const; + + // Write the image to a human readable PPM file. + // Returns true if write was succesful and false otherwise. + bool writeToPPM(const char *filename) const; + +private: + // YUV Format of the image. + YUVFormat mYUVFormat; + + int32_t mWidth; + int32_t mHeight; + + // Pointer to the memory buffer. + uint8_t *mBuffer; + + // Boolean telling whether we own the memory buffer. + bool mOwnBuffer; + + // Pointer to start of the Y data plane. + uint8_t *mYdata; + + // Pointer to start of the U data plane. Note that in case of interleaved formats like + // YUV420 semiplanar, mUdata points to the start of the U data in the UV plane. + uint8_t *mUdata; + + // Pointer to start of the V data plane. Note that in case of interleaved formats like + // YUV420 semiplanar, mVdata points to the start of the V data in the UV plane. + uint8_t *mVdata; + + // Initialize the pointers mYdata, mUdata, mVdata to point to the right locations for + // the given format and geometry. + // Returns true if initialize was succesful and false otherwise. + bool initializeYUVPointers(); + + // For the given pixel location, this returns the offset of the location of y, u and v + // data from the corresponding base pointers -- mYdata, mUdata, mVdata. + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + // Returns true if getting offsets was succesful and false otherwise. + bool getOffsets(int32_t x, int32_t y, + int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const; + + // Returns the offset increments incurred in going from one data row to the next data row + // for the YUV channels. Note that this corresponds to data rows and not pixel rows. + // E.g. depending on formats, U/V channels may have only one data row corresponding + // to two pixel rows. + bool getOffsetIncrementsPerDataRow( + int32_t *yDataOffsetIncrement, + int32_t *uDataOffsetIncrement, + int32_t *vDataOffsetIncrement) const; + + // Given the offset return the address of the corresponding channel's data. + uint8_t* getYAddress(int32_t offset) const; + uint8_t* getUAddress(int32_t offset) const; + uint8_t* getVAddress(int32_t offset) const; + + // Given the pixel location, returns the address of the corresponding channel's data. + // Note that the range of x is [0, width-1] and the range of y is [0, height-1]. + bool getYUVAddresses(int32_t x, int32_t y, + uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const; + + // Disallow implicit casting and copying. + YUVImage(const YUVImage &); + YUVImage &operator=(const YUVImage &); +}; + +} // namespace android + +#endif // YUV_IMAGE_H_ diff --git a/include/media/stagefright/foundation/ADebug.h b/include/media/stagefright/foundation/ADebug.h index 69021d8..450dcfe 100644 --- a/include/media/stagefright/foundation/ADebug.h +++ b/include/media/stagefright/foundation/ADebug.h @@ -32,6 +32,7 @@ namespace android { #define CHECK(condition) \ LOG_ALWAYS_FATAL_IF( \ !(condition), \ + "%s", \ __FILE__ ":" LITERAL_TO_STRING(__LINE__) \ " CHECK(" #condition ") failed.") @@ -58,10 +59,12 @@ MAKE_COMPARATOR(GT,>) do { \ AString ___res = Compare_##suffix(x, y); \ if (!___res.empty()) { \ - LOG_ALWAYS_FATAL( \ - __FILE__ ":" LITERAL_TO_STRING(__LINE__) \ - " CHECK_" #suffix "( " #x "," #y ") failed: %s", \ - ___res.c_str()); \ + AString ___full = \ + __FILE__ ":" LITERAL_TO_STRING(__LINE__) \ + " CHECK_" #suffix "( " #x "," #y ") failed: "; \ + ___full.append(___res); \ + \ + LOG_ALWAYS_FATAL("%s", ___full.c_str()); \ } \ } while (false) @@ -72,7 +75,10 @@ MAKE_COMPARATOR(GT,>) #define CHECK_GE(x,y) CHECK_OP(x,y,GE,>=) #define CHECK_GT(x,y) CHECK_OP(x,y,GT,>) -#define TRESPASS() LOG_ALWAYS_FATAL("Should not be here.") +#define TRESPASS() \ + LOG_ALWAYS_FATAL( \ + __FILE__ ":" LITERAL_TO_STRING(__LINE__) \ + " Should not be here."); } // namespace android diff --git a/include/media/stagefright/foundation/AHierarchicalStateMachine.h b/include/media/stagefright/foundation/AHierarchicalStateMachine.h new file mode 100644 index 0000000..b5786fb --- /dev/null +++ b/include/media/stagefright/foundation/AHierarchicalStateMachine.h @@ -0,0 +1,49 @@ +#ifndef A_HIERARCHICAL_STATE_MACHINE_H_ + +#define A_HIERARCHICAL_STATE_MACHINE_H_ + +#include <media/stagefright/foundation/AHandler.h> + +namespace android { + +struct AState : public RefBase { + AState(const sp<AState> &parentState = NULL); + + sp<AState> parentState(); + +protected: + virtual ~AState(); + + virtual void stateEntered(); + virtual void stateExited(); + + virtual bool onMessageReceived(const sp<AMessage> &msg) = 0; + +private: + friend struct AHierarchicalStateMachine; + + sp<AState> mParentState; + + DISALLOW_EVIL_CONSTRUCTORS(AState); +}; + +struct AHierarchicalStateMachine : public AHandler { + AHierarchicalStateMachine(); + +protected: + virtual ~AHierarchicalStateMachine(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + + // Only to be called in response to a message. + void changeState(const sp<AState> &state); + +private: + sp<AState> mState; + + DISALLOW_EVIL_CONSTRUCTORS(AHierarchicalStateMachine); +}; + +} // namespace android + +#endif // A_HIERARCHICAL_STATE_MACHINE_H_ diff --git a/include/media/stagefright/foundation/AMessage.h b/include/media/stagefright/foundation/AMessage.h index c674cba..72dc730 100644 --- a/include/media/stagefright/foundation/AMessage.h +++ b/include/media/stagefright/foundation/AMessage.h @@ -26,16 +26,22 @@ namespace android { struct AString; +struct Parcel; struct AMessage : public RefBase { AMessage(uint32_t what = 0, ALooper::handler_id target = 0); + static sp<AMessage> FromParcel(const Parcel &parcel); + void writeToParcel(Parcel *parcel) const; + void setWhat(uint32_t what); uint32_t what() const; void setTarget(ALooper::handler_id target); ALooper::handler_id target() const; + void clear(); + void setInt32(const char *name, int32_t value); void setInt64(const char *name, int64_t value); void setSize(const char *name, size_t value); @@ -46,6 +52,10 @@ struct AMessage : public RefBase { void setObject(const char *name, const sp<RefBase> &obj); void setMessage(const char *name, const sp<AMessage> &obj); + void setRect( + const char *name, + int32_t left, int32_t top, int32_t right, int32_t bottom); + bool findInt32(const char *name, int32_t *value) const; bool findInt64(const char *name, int64_t *value) const; bool findSize(const char *name, size_t *value) const; @@ -56,8 +66,15 @@ struct AMessage : public RefBase { bool findObject(const char *name, sp<RefBase> *obj) const; bool findMessage(const char *name, sp<AMessage> *obj) const; + bool findRect( + const char *name, + int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const; + void post(int64_t delayUs = 0); + // Performs a deep-copy of "this", contained messages are in turn "dup'ed". + // Warning: RefBase items, i.e. "objects" are _not_ copied but only have + // their refcount incremented. sp<AMessage> dup() const; AString debugString(int32_t indent = 0) const; @@ -76,11 +93,16 @@ private: kTypeString, kTypeObject, kTypeMessage, + kTypeRect, }; uint32_t mWhat; ALooper::handler_id mTarget; + struct Rect { + int32_t mLeft, mTop, mRight, mBottom; + }; + struct Item { union { int32_t int32Value; @@ -91,6 +113,7 @@ private: void *ptrValue; RefBase *refValue; AString *stringValue; + Rect rectValue; } u; const char *mName; Type mType; @@ -102,7 +125,6 @@ private: Item mItems[kMaxNumItems]; size_t mNumItems; - void clear(); Item *allocateItem(const char *name); void freeItem(Item *item); const Item *findItem(const char *name, Type type) const; diff --git a/include/private/surfaceflinger/SharedBufferStack.h b/include/private/surfaceflinger/SharedBufferStack.h index 4ae3cdf..eb599b5 100644 --- a/include/private/surfaceflinger/SharedBufferStack.h +++ b/include/private/surfaceflinger/SharedBufferStack.h @@ -284,6 +284,8 @@ public: uint32_t getTransform(int buffer) const; status_t resize(int newNumBuffers); + status_t grow(int newNumBuffers); + status_t shrink(int newNumBuffers); SharedBufferStack::Statistics getStats() const; @@ -345,6 +347,13 @@ private: int mNumBuffers; BufferList mBufferList; + struct BuffersAvailableCondition : public ConditionBase { + int mNumBuffers; + inline BuffersAvailableCondition(SharedBufferServer* sbs, + int numBuffers); + inline bool operator()() const; + inline const char* name() const { return "BuffersAvailableCondition"; } + }; struct RetireUpdate : public UpdateBase { const int numBuffers; diff --git a/include/private/ui/sw_gralloc_handle.h b/include/private/ui/sw_gralloc_handle.h deleted file mode 100644 index b3d333e..0000000 --- a/include/private/ui/sw_gralloc_handle.h +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright (C) 2008 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H -#define ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H - -#include <stdint.h> -#include <limits.h> -#include <sys/cdefs.h> -#include <hardware/gralloc.h> -#include <errno.h> - -#include <cutils/native_handle.h> - -namespace android { - -/*****************************************************************************/ - -struct sw_gralloc_handle_t : public native_handle -{ - // file-descriptors - int fd; - // ints - int magic; - int size; - int base; - int prot; - int pid; - - static const int sNumInts = 5; - static const int sNumFds = 1; - static const int sMagic = '_sgh'; - - sw_gralloc_handle_t() : - fd(-1), magic(sMagic), size(0), base(0), prot(0), pid(getpid()) - { - version = sizeof(native_handle); - numInts = sNumInts; - numFds = sNumFds; - } - ~sw_gralloc_handle_t() { - magic = 0; - } - - static int validate(const native_handle* h) { - const sw_gralloc_handle_t* hnd = (const sw_gralloc_handle_t*)h; - if (!h || h->version != sizeof(native_handle) || - h->numInts != sNumInts || h->numFds != sNumFds || - hnd->magic != sMagic) - { - return -EINVAL; - } - return 0; - } - - static status_t alloc(uint32_t w, uint32_t h, int format, - int usage, buffer_handle_t* handle, int32_t* stride); - static status_t free(sw_gralloc_handle_t* hnd); - static status_t registerBuffer(sw_gralloc_handle_t* hnd); - static status_t unregisterBuffer(sw_gralloc_handle_t* hnd); - static status_t lock(sw_gralloc_handle_t* hnd, int usage, - int l, int t, int w, int h, void** vaddr); - static status_t unlock(sw_gralloc_handle_t* hnd); -}; - -/*****************************************************************************/ - -}; // namespace android - -#endif /* ANDROID_UI_PRIVATE_SW_GRALLOC_HANDLE_H */ diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk index 48b45ff..e6ff654 100644 --- a/media/libeffects/visualizer/Android.mk +++ b/media/libeffects/visualizer/Android.mk @@ -27,4 +27,4 @@ LOCAL_C_INCLUDES := \ LOCAL_PRELINK_MODULE := false -include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk index 2e5cbe3..74fb531 100644 --- a/media/libmedia/Android.mk +++ b/media/libmedia/Android.mk @@ -15,6 +15,7 @@ LOCAL_SRC_FILES:= \ IMediaRecorderClient.cpp \ IMediaPlayer.cpp \ IMediaRecorder.cpp \ + IStreamSource.cpp \ Metadata.cpp \ mediarecorder.cpp \ IMediaMetadataRetriever.cpp \ @@ -35,7 +36,8 @@ LOCAL_SRC_FILES:= \ fixedfft.cpp.arm LOCAL_SHARED_LIBRARIES := \ - libui libcutils libutils libbinder libsonivox libicuuc libexpat libsurfaceflinger_client libcamera_client + libui libcutils libutils libbinder libsonivox libicuuc libexpat \ + libsurfaceflinger_client libcamera_client libstagefright_foundation LOCAL_MODULE:= libmedia diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp index 88b8c86..aadeba5 100644 --- a/media/libmedia/AudioEffect.cpp +++ b/media/libmedia/AudioEffect.cpp @@ -27,7 +27,6 @@ #include <media/AudioEffect.h> #include <utils/Log.h> -#include <cutils/atomic.h> #include <binder/IPCThreadState.h> @@ -207,18 +206,22 @@ status_t AudioEffect::setEnabled(bool enabled) return INVALID_OPERATION; } - if (enabled) { - LOGV("enable %p", this); - if (android_atomic_or(1, &mEnabled) == 0) { - return mIEffect->enable(); + status_t status = NO_ERROR; + + AutoMutex lock(mLock); + if (enabled != mEnabled) { + if (enabled) { + LOGV("enable %p", this); + status = mIEffect->enable(); + } else { + LOGV("disable %p", this); + status = mIEffect->disable(); } - } else { - LOGV("disable %p", this); - if (android_atomic_and(~1, &mEnabled) == 1) { - return mIEffect->disable(); + if (status == NO_ERROR) { + mEnabled = enabled; } } - return NO_ERROR; + return status; } status_t AudioEffect::command(uint32_t cmdCode, @@ -232,26 +235,26 @@ status_t AudioEffect::command(uint32_t cmdCode, return INVALID_OPERATION; } - if ((cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) && - (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL)) { - return BAD_VALUE; + if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) { + if (mEnabled == (cmdCode == EFFECT_CMD_ENABLE)) { + return NO_ERROR; + } + if (replySize == NULL || *replySize != sizeof(status_t) || replyData == NULL) { + return BAD_VALUE; + } + mLock.lock(); } status_t status = mIEffect->command(cmdCode, cmdSize, cmdData, replySize, replyData); - if (status != NO_ERROR) { - return status; - } if (cmdCode == EFFECT_CMD_ENABLE || cmdCode == EFFECT_CMD_DISABLE) { - status = *(status_t *)replyData; - if (status != NO_ERROR) { - return status; + if (status == NO_ERROR) { + status = *(status_t *)replyData; } - if (cmdCode == EFFECT_CMD_ENABLE) { - android_atomic_or(1, &mEnabled); - } else { - android_atomic_and(~1, &mEnabled); + if (status == NO_ERROR) { + mEnabled = (cmdCode == EFFECT_CMD_ENABLE); } + mLock.unlock(); } return status; @@ -370,11 +373,7 @@ void AudioEffect::enableStatusChanged(bool enabled) { LOGV("enableStatusChanged %p enabled %d mCbf %p", this, enabled, mCbf); if (mStatus == ALREADY_EXISTS) { - if (enabled) { - android_atomic_or(1, &mEnabled); - } else { - android_atomic_and(~1, &mEnabled); - } + mEnabled = enabled; if (mCbf) { mCbf(EVENT_ENABLE_STATUS_CHANGED, mUserData, &enabled); } diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp index a6c515c..1d6ffa0 100644 --- a/media/libmedia/AudioRecord.cpp +++ b/media/libmedia/AudioRecord.cpp @@ -35,7 +35,6 @@ #include <binder/Parcel.h> #include <binder/IPCThreadState.h> #include <utils/Timers.h> -#include <cutils/atomic.h> #define LIKELY( exp ) (__builtin_expect( (exp) != 0, true )) #define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false )) @@ -282,7 +281,9 @@ status_t AudioRecord::start() t->mLock.lock(); } - if (android_atomic_or(1, &mActive) == 0) { + AutoMutex lock(mLock); + if (mActive == 0) { + mActive = 1; ret = mAudioRecord->start(); if (ret == DEAD_OBJECT) { LOGV("start() dead IAudioRecord: creating a new one"); @@ -302,8 +303,7 @@ status_t AudioRecord::start() setpriority(PRIO_PROCESS, 0, THREAD_PRIORITY_AUDIO_CLIENT); } } else { - LOGV("start() failed"); - android_atomic_and(~1, &mActive); + mActive = 0; } } @@ -322,9 +322,11 @@ status_t AudioRecord::stop() if (t != 0) { t->mLock.lock(); - } + } - if (android_atomic_and(~1, &mActive) == 1) { + AutoMutex lock(mLock); + if (mActive == 1) { + mActive = 0; mCblk->cv.signal(); mAudioRecord->stop(); // the record head position will reset to 0, so if a marker is set, we need diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp index 9c2a8ba..1a3fcd6 100644 --- a/media/libmedia/AudioSystem.cpp +++ b/media/libmedia/AudioSystem.cpp @@ -835,6 +835,7 @@ const char *AudioParameter::keySamplingRate = "sampling_rate"; const char *AudioParameter::keyFormat = "format"; const char *AudioParameter::keyChannels = "channels"; const char *AudioParameter::keyFrameCount = "frame_count"; +const char *AudioParameter::keyInputSource = "input_source"; AudioParameter::AudioParameter(const String8& keyValuePairs) { diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp index 587c8ff..c1bed59 100644 --- a/media/libmedia/AudioTrack.cpp +++ b/media/libmedia/AudioTrack.cpp @@ -35,7 +35,6 @@ #include <binder/Parcel.h> #include <binder/IPCThreadState.h> #include <utils/Timers.h> -#include <cutils/atomic.h> #define LIKELY( exp ) (__builtin_expect( (exp) != 0, true )) #define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false )) @@ -312,7 +311,9 @@ void AudioTrack::start() t->mLock.lock(); } - if (android_atomic_or(1, &mActive) == 0) { + AutoMutex lock(mLock); + if (mActive == 0) { + mActive = 1; mNewPosition = mCblk->server + mUpdatePeriod; mCblk->bufferTimeoutMs = MAX_STARTUP_TIMEOUT_MS; mCblk->waitTimeMs = 0; @@ -344,7 +345,7 @@ void AudioTrack::start() } if (status != NO_ERROR) { LOGV("start() failed"); - android_atomic_and(~1, &mActive); + mActive = 0; if (t != 0) { t->requestExit(); } else { @@ -367,7 +368,9 @@ void AudioTrack::stop() t->mLock.lock(); } - if (android_atomic_and(~1, &mActive) == 1) { + AutoMutex lock(mLock); + if (mActive == 1) { + mActive = 0; mCblk->cv.signal(); mAudioTrack->stop(); // Cancel loops (If we are in the middle of a loop, playback @@ -407,7 +410,6 @@ void AudioTrack::flush() mMarkerReached = false; mUpdatePeriod = 0; - if (!mActive) { mAudioTrack->flush(); // Release AudioTrack callback thread in case it was waiting for new buffers @@ -419,7 +421,9 @@ void AudioTrack::flush() void AudioTrack::pause() { LOGV("pause"); - if (android_atomic_and(~1, &mActive) == 1) { + AutoMutex lock(mLock); + if (mActive == 1) { + mActive = 0; mAudioTrack->pause(); } } diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp index 0f55b19..c287c0a 100644 --- a/media/libmedia/IMediaPlayer.cpp +++ b/media/libmedia/IMediaPlayer.cpp @@ -22,6 +22,7 @@ #include <media/IMediaPlayer.h> #include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> namespace android { @@ -43,8 +44,6 @@ enum { INVOKE, SET_METADATA_FILTER, GET_METADATA, - SUSPEND, - RESUME, SET_AUX_EFFECT_SEND_LEVEL, ATTACH_AUX_EFFECT }; @@ -65,11 +64,11 @@ public: remote()->transact(DISCONNECT, data, &reply); } - status_t setVideoSurface(const sp<ISurface>& surface) + status_t setVideoSurface(const sp<Surface>& surface) { Parcel data, reply; data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); - data.writeStrongBinder(surface->asBinder()); + Surface::writeToParcel(surface, &data); remote()->transact(SET_VIDEO_SURFACE, data, &reply); return reply.readInt32(); } @@ -204,26 +203,6 @@ public: return reply->readInt32(); } - status_t suspend() { - Parcel request; - request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); - - Parcel reply; - remote()->transact(SUSPEND, request, &reply); - - return reply.readInt32(); - } - - status_t resume() { - Parcel request; - request.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor()); - - Parcel reply; - remote()->transact(RESUME, request, &reply); - - return reply.readInt32(); - } - status_t setAuxEffectSendLevel(float level) { Parcel data, reply; @@ -258,7 +237,7 @@ status_t BnMediaPlayer::onTransact( } break; case SET_VIDEO_SURFACE: { CHECK_INTERFACE(IMediaPlayer, data, reply); - sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder()); + sp<Surface> surface = Surface::readFromParcel(data); reply->writeInt32(setVideoSurface(surface)); return NO_ERROR; } break; @@ -341,16 +320,6 @@ status_t BnMediaPlayer::onTransact( reply->writeInt32(setMetadataFilter(data)); return NO_ERROR; } break; - case SUSPEND: { - CHECK_INTERFACE(IMediaPlayer, data, reply); - reply->writeInt32(suspend()); - return NO_ERROR; - } break; - case RESUME: { - CHECK_INTERFACE(IMediaPlayer, data, reply); - reply->writeInt32(resume()); - return NO_ERROR; - } break; case GET_METADATA: { CHECK_INTERFACE(IMediaPlayer, data, reply); const status_t retcode = getMetadata(data.readInt32(), data.readInt32(), reply); diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp index 4abfa75..77199e1 100644 --- a/media/libmedia/IMediaPlayerService.cpp +++ b/media/libmedia/IMediaPlayerService.cpp @@ -23,6 +23,7 @@ #include <media/IMediaPlayerService.h> #include <media/IMediaRecorder.h> #include <media/IOMX.h> +#include <media/IStreamSource.h> #include <utils/Errors.h> // for status_t @@ -31,6 +32,7 @@ namespace android { enum { CREATE_URL = IBinder::FIRST_CALL_TRANSACTION, CREATE_FD, + CREATE_STREAM, DECODE_URL, DECODE_FD, CREATE_MEDIA_RECORDER, @@ -107,6 +109,21 @@ public: return interface_cast<IMediaPlayer>(reply.readStrongBinder());; } + virtual sp<IMediaPlayer> create( + pid_t pid, const sp<IMediaPlayerClient> &client, + const sp<IStreamSource> &source, int audioSessionId) { + Parcel data, reply; + data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor()); + data.writeInt32(static_cast<int32_t>(pid)); + data.writeStrongBinder(client->asBinder()); + data.writeStrongBinder(source->asBinder()); + data.writeInt32(static_cast<int32_t>(audioSessionId)); + + remote()->transact(CREATE_STREAM, data, &reply); + + return interface_cast<IMediaPlayer>(reply.readStrongBinder());; + } + virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat) { Parcel data, reply; @@ -184,6 +201,27 @@ status_t BnMediaPlayerService::onTransact( reply->writeStrongBinder(player->asBinder()); return NO_ERROR; } break; + case CREATE_STREAM: + { + CHECK_INTERFACE(IMediaPlayerService, data, reply); + + pid_t pid = static_cast<pid_t>(data.readInt32()); + + sp<IMediaPlayerClient> client = + interface_cast<IMediaPlayerClient>(data.readStrongBinder()); + + sp<IStreamSource> source = + interface_cast<IStreamSource>(data.readStrongBinder()); + + int audioSessionId = static_cast<int>(data.readInt32()); + + sp<IMediaPlayer> player = + create(pid, client, source, audioSessionId); + + reply->writeStrongBinder(player->asBinder()); + return OK; + break; + } case DECODE_URL: { CHECK_INTERFACE(IMediaPlayerService, data, reply); const char* url = data.readCString(); diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp index 947ff34..59cd1b7 100644 --- a/media/libmedia/IMediaRecorder.cpp +++ b/media/libmedia/IMediaRecorder.cpp @@ -19,7 +19,7 @@ #define LOG_TAG "IMediaRecorder" #include <utils/Log.h> #include <binder/Parcel.h> -#include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> #include <camera/ICamera.h> #include <media/IMediaRecorderClient.h> #include <media/IMediaRecorder.h> @@ -43,6 +43,7 @@ enum { SET_AUDIO_ENCODER, SET_OUTPUT_FILE_PATH, SET_OUTPUT_FILE_FD, + SET_OUTPUT_FILE_AUXILIARY_FD, SET_VIDEO_SIZE, SET_VIDEO_FRAMERATE, SET_PARAMETERS, @@ -69,12 +70,12 @@ public: return reply.readInt32(); } - status_t setPreviewSurface(const sp<ISurface>& surface) + status_t setPreviewSurface(const sp<Surface>& surface) { LOGV("setPreviewSurface(%p)", surface.get()); Parcel data, reply; data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); - data.writeStrongBinder(surface->asBinder()); + Surface::writeToParcel(surface, &data); remote()->transact(SET_PREVIEW_SURFACE, data, &reply); return reply.readInt32(); } @@ -159,6 +160,15 @@ public: return reply.readInt32(); } + status_t setOutputFileAuxiliary(int fd) { + LOGV("setOutputFileAuxiliary(%d)", fd); + Parcel data, reply; + data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor()); + data.writeFileDescriptor(fd); + remote()->transact(SET_OUTPUT_FILE_AUXILIARY_FD, data, &reply); + return reply.readInt32(); + } + status_t setVideoSize(int width, int height) { LOGV("setVideoSize(%dx%d)", width, height); @@ -377,6 +387,13 @@ status_t BnMediaRecorder::onTransact( ::close(fd); return NO_ERROR; } break; + case SET_OUTPUT_FILE_AUXILIARY_FD: { + LOGV("SET_OUTPUT_FILE_AUXILIARY_FD"); + CHECK_INTERFACE(IMediaRecorder, data, reply); + int fd = dup(data.readFileDescriptor()); + reply->writeInt32(setOutputFileAuxiliary(fd)); + return NO_ERROR; + } break; case SET_VIDEO_SIZE: { LOGV("SET_VIDEO_SIZE"); CHECK_INTERFACE(IMediaRecorder, data, reply); @@ -409,7 +426,7 @@ status_t BnMediaRecorder::onTransact( case SET_PREVIEW_SURFACE: { LOGV("SET_PREVIEW_SURFACE"); CHECK_INTERFACE(IMediaRecorder, data, reply); - sp<ISurface> surface = interface_cast<ISurface>(data.readStrongBinder()); + sp<Surface> surface = Surface::readFromParcel(data); reply->writeInt32(setPreviewSurface(surface)); return NO_ERROR; } break; diff --git a/media/libmedia/IOMX.cpp b/media/libmedia/IOMX.cpp index ae6c2bf..af67175 100644 --- a/media/libmedia/IOMX.cpp +++ b/media/libmedia/IOMX.cpp @@ -5,6 +5,7 @@ #include <binder/IMemory.h> #include <binder/Parcel.h> #include <media/IOMX.h> +#include <media/stagefright/foundation/ADebug.h> #include <surfaceflinger/ISurface.h> #include <surfaceflinger/Surface.h> @@ -21,59 +22,19 @@ enum { SET_PARAMETER, GET_CONFIG, SET_CONFIG, + ENABLE_GRAPHIC_BUFFERS, USE_BUFFER, + USE_GRAPHIC_BUFFER, + STORE_META_DATA_IN_BUFFERS, ALLOC_BUFFER, ALLOC_BUFFER_WITH_BACKUP, FREE_BUFFER, FILL_BUFFER, EMPTY_BUFFER, GET_EXTENSION_INDEX, - CREATE_RENDERER, OBSERVER_ON_MSG, - RENDERER_RENDER, }; -sp<IOMXRenderer> IOMX::createRenderer( - const sp<Surface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees) { - return createRenderer( - surface->getISurface(), - componentName, colorFormat, encodedWidth, encodedHeight, - displayWidth, displayHeight, - rotationDegrees); -} - -sp<IOMXRenderer> IOMX::createRendererFromJavaSurface( - JNIEnv *env, jobject javaSurface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees) { - jclass surfaceClass = env->FindClass("android/view/Surface"); - if (surfaceClass == NULL) { - LOGE("Can't find android/view/Surface"); - return NULL; - } - - jfieldID surfaceID = env->GetFieldID(surfaceClass, ANDROID_VIEW_SURFACE_JNI_ID, "I"); - if (surfaceID == NULL) { - LOGE("Can't find Surface.mSurface"); - return NULL; - } - - sp<Surface> surface = (Surface *)env->GetIntField(javaSurface, surfaceID); - - return createRenderer( - surface, componentName, colorFormat, encodedWidth, - encodedHeight, displayWidth, displayHeight, - rotationDegrees); -} - class BpOMX : public BpInterface<IOMX> { public: BpOMX(const sp<IBinder> &impl) @@ -220,6 +181,19 @@ public: return reply.readInt32(); } + virtual status_t enableGraphicBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeInt32((uint32_t)enable); + remote()->transact(ENABLE_GRAPHIC_BUFFERS, data, &reply); + + status_t err = reply.readInt32(); + return err; + } + virtual status_t useBuffer( node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, buffer_id *buffer) { @@ -242,6 +216,42 @@ public: return err; } + + virtual status_t useGraphicBuffer( + node_id node, OMX_U32 port_index, + const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.write(*graphicBuffer); + remote()->transact(USE_GRAPHIC_BUFFER, data, &reply); + + status_t err = reply.readInt32(); + if (err != OK) { + *buffer = 0; + + return err; + } + + *buffer = (void*)reply.readIntPtr(); + + return err; + } + + virtual status_t storeMetaDataInBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable) { + Parcel data, reply; + data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); + data.writeIntPtr((intptr_t)node); + data.writeInt32(port_index); + data.writeInt32((uint32_t)enable); + remote()->transact(STORE_META_DATA_IN_BUFFERS, data, &reply); + + status_t err = reply.readInt32(); + return err; + } + virtual status_t allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data) { @@ -347,30 +357,6 @@ public: return err; } - - virtual sp<IOMXRenderer> createRenderer( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees) { - Parcel data, reply; - data.writeInterfaceToken(IOMX::getInterfaceDescriptor()); - - data.writeStrongBinder(surface->asBinder()); - data.writeCString(componentName); - data.writeInt32(colorFormat); - data.writeInt32(encodedWidth); - data.writeInt32(encodedHeight); - data.writeInt32(displayWidth); - data.writeInt32(displayHeight); - data.writeInt32(rotationDegrees); - - remote()->transact(CREATE_RENDERER, data, &reply); - - return interface_cast<IOMXRenderer>(reply.readStrongBinder()); - } }; IMPLEMENT_META_INTERFACE(OMX, "android.hardware.IOMX"); @@ -464,6 +450,9 @@ status_t BnOMX::onTransact( } case GET_PARAMETER: + case SET_PARAMETER: + case GET_CONFIG: + case SET_CONFIG: { CHECK_INTERFACE(IOMX, data, reply); @@ -472,15 +461,30 @@ status_t BnOMX::onTransact( size_t size = data.readInt32(); - // XXX I am not happy with this but Parcel::readInplace didn't work. void *params = malloc(size); data.read(params, size); - status_t err = getParameter(node, index, params, size); + status_t err; + switch (code) { + case GET_PARAMETER: + err = getParameter(node, index, params, size); + break; + case SET_PARAMETER: + err = setParameter(node, index, params, size); + break; + case GET_CONFIG: + err = getConfig(node, index, params, size); + break; + case SET_CONFIG: + err = setConfig(node, index, params, size); + break; + default: + TRESPASS(); + } reply->writeInt32(err); - if (err == OK) { + if ((code == GET_PARAMETER || code == GET_CONFIG) && err == OK) { reply->write(params, size); } @@ -490,80 +494,72 @@ status_t BnOMX::onTransact( return NO_ERROR; } - case SET_PARAMETER: + case ENABLE_GRAPHIC_BUFFERS: { CHECK_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); - OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); - - size_t size = data.readInt32(); - void *params = const_cast<void *>(data.readInplace(size)); + OMX_U32 port_index = data.readInt32(); + OMX_BOOL enable = (OMX_BOOL)data.readInt32(); - reply->writeInt32(setParameter(node, index, params, size)); + status_t err = enableGraphicBuffers(node, port_index, enable); + reply->writeInt32(err); return NO_ERROR; } - case GET_CONFIG: + case USE_BUFFER: { CHECK_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); - OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); - - size_t size = data.readInt32(); - - // XXX I am not happy with this but Parcel::readInplace didn't work. - void *params = malloc(size); - data.read(params, size); - - status_t err = getConfig(node, index, params, size); + OMX_U32 port_index = data.readInt32(); + sp<IMemory> params = + interface_cast<IMemory>(data.readStrongBinder()); + buffer_id buffer; + status_t err = useBuffer(node, port_index, params, &buffer); reply->writeInt32(err); if (err == OK) { - reply->write(params, size); + reply->writeIntPtr((intptr_t)buffer); } - free(params); - params = NULL; - return NO_ERROR; } - case SET_CONFIG: + case USE_GRAPHIC_BUFFER: { CHECK_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); - OMX_INDEXTYPE index = static_cast<OMX_INDEXTYPE>(data.readInt32()); + OMX_U32 port_index = data.readInt32(); + sp<GraphicBuffer> graphicBuffer = new GraphicBuffer(); + data.read(*graphicBuffer); - size_t size = data.readInt32(); - void *params = const_cast<void *>(data.readInplace(size)); + buffer_id buffer; + status_t err = useGraphicBuffer( + node, port_index, graphicBuffer, &buffer); + reply->writeInt32(err); - reply->writeInt32(setConfig(node, index, params, size)); + if (err == OK) { + reply->writeIntPtr((intptr_t)buffer); + } return NO_ERROR; } - case USE_BUFFER: + case STORE_META_DATA_IN_BUFFERS: { CHECK_INTERFACE(IOMX, data, reply); node_id node = (void*)data.readIntPtr(); OMX_U32 port_index = data.readInt32(); - sp<IMemory> params = - interface_cast<IMemory>(data.readStrongBinder()); + OMX_BOOL enable = (OMX_BOOL)data.readInt32(); - buffer_id buffer; - status_t err = useBuffer(node, port_index, params, &buffer); + status_t err = storeMetaDataInBuffers(node, port_index, enable); reply->writeInt32(err); - if (err == OK) { - reply->writeIntPtr((intptr_t)buffer); - } - return NO_ERROR; } @@ -672,35 +668,6 @@ status_t BnOMX::onTransact( return OK; } - case CREATE_RENDERER: - { - CHECK_INTERFACE(IOMX, data, reply); - - sp<ISurface> isurface = - interface_cast<ISurface>(data.readStrongBinder()); - - const char *componentName = data.readCString(); - - OMX_COLOR_FORMATTYPE colorFormat = - static_cast<OMX_COLOR_FORMATTYPE>(data.readInt32()); - - size_t encodedWidth = (size_t)data.readInt32(); - size_t encodedHeight = (size_t)data.readInt32(); - size_t displayWidth = (size_t)data.readInt32(); - size_t displayHeight = (size_t)data.readInt32(); - int32_t rotationDegrees = data.readInt32(); - - sp<IOMXRenderer> renderer = - createRenderer(isurface, componentName, colorFormat, - encodedWidth, encodedHeight, - displayWidth, displayHeight, - rotationDegrees); - - reply->writeStrongBinder(renderer->asBinder()); - - return OK; - } - default: return BBinder::onTransact(code, data, reply, flags); } @@ -746,44 +713,4 @@ status_t BnOMXObserver::onTransact( } } -//////////////////////////////////////////////////////////////////////////////// - -class BpOMXRenderer : public BpInterface<IOMXRenderer> { -public: - BpOMXRenderer(const sp<IBinder> &impl) - : BpInterface<IOMXRenderer>(impl) { - } - - virtual void render(IOMX::buffer_id buffer) { - Parcel data, reply; - data.writeInterfaceToken(IOMXRenderer::getInterfaceDescriptor()); - data.writeIntPtr((intptr_t)buffer); - - // NOTE: Do NOT make this a ONE_WAY call, it must be synchronous - // so that the caller knows when to recycle the buffer. - remote()->transact(RENDERER_RENDER, data, &reply); - } -}; - -IMPLEMENT_META_INTERFACE(OMXRenderer, "android.hardware.IOMXRenderer"); - -status_t BnOMXRenderer::onTransact( - uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { - switch (code) { - case RENDERER_RENDER: - { - CHECK_INTERFACE(IOMXRenderer, data, reply); - - IOMX::buffer_id buffer = (void*)data.readIntPtr(); - - render(buffer); - - return NO_ERROR; - } - - default: - return BBinder::onTransact(code, data, reply, flags); - } -} - } // namespace android diff --git a/media/libmedia/IStreamSource.cpp b/media/libmedia/IStreamSource.cpp new file mode 100644 index 0000000..5069002 --- /dev/null +++ b/media/libmedia/IStreamSource.cpp @@ -0,0 +1,186 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "IStreamSource" +#include <utils/Log.h> + +#include <media/IStreamSource.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <binder/IMemory.h> +#include <binder/Parcel.h> + +namespace android { + +enum { + // IStreamSource + SET_LISTENER = IBinder::FIRST_CALL_TRANSACTION, + SET_BUFFERS, + ON_BUFFER_AVAILABLE, + + // IStreamListener + QUEUE_BUFFER, + ISSUE_COMMAND, +}; + +struct BpStreamSource : public BpInterface<IStreamSource> { + BpStreamSource(const sp<IBinder> &impl) + : BpInterface<IStreamSource>(impl) { + } + + virtual void setListener(const sp<IStreamListener> &listener) { + Parcel data, reply; + data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor()); + data.writeStrongBinder(listener->asBinder()); + remote()->transact(SET_LISTENER, data, &reply); + } + + virtual void setBuffers(const Vector<sp<IMemory> > &buffers) { + Parcel data, reply; + data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor()); + data.writeInt32(static_cast<int32_t>(buffers.size())); + for (size_t i = 0; i < buffers.size(); ++i) { + data.writeStrongBinder(buffers.itemAt(i)->asBinder()); + } + remote()->transact(SET_BUFFERS, data, &reply); + } + + virtual void onBufferAvailable(size_t index) { + Parcel data, reply; + data.writeInterfaceToken(IStreamSource::getInterfaceDescriptor()); + data.writeInt32(static_cast<int32_t>(index)); + remote()->transact( + ON_BUFFER_AVAILABLE, data, &reply, IBinder::FLAG_ONEWAY); + } +}; + +IMPLEMENT_META_INTERFACE(StreamSource, "android.hardware.IStreamSource"); + +status_t BnStreamSource::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { + switch (code) { + case SET_LISTENER: + { + CHECK_INTERFACE(IStreamSource, data, reply); + setListener( + interface_cast<IStreamListener>(data.readStrongBinder())); + break; + } + + case SET_BUFFERS: + { + CHECK_INTERFACE(IStreamSource, data, reply); + size_t n = static_cast<size_t>(data.readInt32()); + Vector<sp<IMemory> > buffers; + for (size_t i = 0; i < n; ++i) { + sp<IMemory> mem = + interface_cast<IMemory>(data.readStrongBinder()); + + buffers.push(mem); + } + setBuffers(buffers); + break; + } + + case ON_BUFFER_AVAILABLE: + { + CHECK_INTERFACE(IStreamSource, data, reply); + onBufferAvailable(static_cast<size_t>(data.readInt32())); + break; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } + + return OK; +} + +//////////////////////////////////////////////////////////////////////////////// + +struct BpStreamListener : public BpInterface<IStreamListener> { + BpStreamListener(const sp<IBinder> &impl) + : BpInterface<IStreamListener>(impl) { + } + + virtual void queueBuffer(size_t index, size_t size) { + Parcel data, reply; + data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor()); + data.writeInt32(static_cast<int32_t>(index)); + data.writeInt32(static_cast<int32_t>(size)); + + remote()->transact(QUEUE_BUFFER, data, &reply, IBinder::FLAG_ONEWAY); + } + + virtual void issueCommand( + Command cmd, bool synchronous, const sp<AMessage> &msg) { + Parcel data, reply; + data.writeInterfaceToken(IStreamListener::getInterfaceDescriptor()); + data.writeInt32(static_cast<int32_t>(cmd)); + data.writeInt32(static_cast<int32_t>(synchronous)); + + if (msg != NULL) { + data.writeInt32(1); + msg->writeToParcel(&data); + } else { + data.writeInt32(0); + } + + remote()->transact(ISSUE_COMMAND, data, &reply, IBinder::FLAG_ONEWAY); + } +}; + +IMPLEMENT_META_INTERFACE(StreamListener, "android.hardware.IStreamListener"); + +status_t BnStreamListener::onTransact( + uint32_t code, const Parcel &data, Parcel *reply, uint32_t flags) { + switch (code) { + case QUEUE_BUFFER: + { + CHECK_INTERFACE(IStreamListener, data, reply); + size_t index = static_cast<size_t>(data.readInt32()); + size_t size = static_cast<size_t>(data.readInt32()); + + queueBuffer(index, size); + break; + } + + case ISSUE_COMMAND: + { + CHECK_INTERFACE(IStreamListener, data, reply); + Command cmd = static_cast<Command>(data.readInt32()); + + bool synchronous = static_cast<bool>(data.readInt32()); + + sp<AMessage> msg; + + if (data.readInt32()) { + msg = AMessage::FromParcel(data); + } + + issueCommand(cmd, synchronous, msg); + break; + } + + default: + return BBinder::onTransact(code, data, reply, flags); + } + + return OK; +} + +} // namespace android diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp index 3869389..9ad63f0 100644 --- a/media/libmedia/MediaProfiles.cpp +++ b/media/libmedia/MediaProfiles.cpp @@ -59,8 +59,21 @@ const MediaProfiles::NameToTagMap MediaProfiles::sAudioDecoderNameMap[] = { }; const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = { + {"low", CAMCORDER_QUALITY_LOW}, {"high", CAMCORDER_QUALITY_HIGH}, - {"low", CAMCORDER_QUALITY_LOW} + {"qcif", CAMCORDER_QUALITY_QCIF}, + {"cif", CAMCORDER_QUALITY_CIF}, + {"480p", CAMCORDER_QUALITY_480P}, + {"720p", CAMCORDER_QUALITY_720P}, + {"1080p", CAMCORDER_QUALITY_1080P}, + + {"timelapselow", CAMCORDER_QUALITY_TIME_LAPSE_LOW}, + {"timelapsehigh", CAMCORDER_QUALITY_TIME_LAPSE_HIGH}, + {"timelapseqcif", CAMCORDER_QUALITY_TIME_LAPSE_QCIF}, + {"timelapsecif", CAMCORDER_QUALITY_TIME_LAPSE_CIF}, + {"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P}, + {"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P}, + {"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P} }; /*static*/ void @@ -411,24 +424,57 @@ MediaProfiles::createDefaultVideoEncoders(MediaProfiles *profiles) } /*static*/ MediaProfiles::CamcorderProfile* -MediaProfiles::createDefaultCamcorderHighProfile() +MediaProfiles::createDefaultCamcorderTimeLapseQcifProfile(camcorder_quality quality) { MediaProfiles::VideoCodec *videoCodec = - new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20); + new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 1000000, 176, 144, 20); + + AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1); + CamcorderProfile *profile = new MediaProfiles::CamcorderProfile; + profile->mCameraId = 0; + profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP; + profile->mQuality = quality; + profile->mDuration = 60; + profile->mVideoCodec = videoCodec; + profile->mAudioCodec = audioCodec; + return profile; +} + +/*static*/ MediaProfiles::CamcorderProfile* +MediaProfiles::createDefaultCamcorderTimeLapse480pProfile(camcorder_quality quality) +{ + MediaProfiles::VideoCodec *videoCodec = + new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 20000000, 720, 480, 20); AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1); CamcorderProfile *profile = new MediaProfiles::CamcorderProfile; profile->mCameraId = 0; profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP; - profile->mQuality = CAMCORDER_QUALITY_HIGH; + profile->mQuality = quality; profile->mDuration = 60; profile->mVideoCodec = videoCodec; profile->mAudioCodec = audioCodec; return profile; } +/*static*/ void +MediaProfiles::createDefaultCamcorderTimeLapseLowProfiles( + MediaProfiles::CamcorderProfile **lowTimeLapseProfile, + MediaProfiles::CamcorderProfile **lowSpecificTimeLapseProfile) { + *lowTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_LOW); + *lowSpecificTimeLapseProfile = createDefaultCamcorderTimeLapseQcifProfile(CAMCORDER_QUALITY_TIME_LAPSE_QCIF); +} + +/*static*/ void +MediaProfiles::createDefaultCamcorderTimeLapseHighProfiles( + MediaProfiles::CamcorderProfile **highTimeLapseProfile, + MediaProfiles::CamcorderProfile **highSpecificTimeLapseProfile) { + *highTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_HIGH); + *highSpecificTimeLapseProfile = createDefaultCamcorderTimeLapse480pProfile(CAMCORDER_QUALITY_TIME_LAPSE_480P); +} + /*static*/ MediaProfiles::CamcorderProfile* -MediaProfiles::createDefaultCamcorderLowProfile() +MediaProfiles::createDefaultCamcorderQcifProfile(camcorder_quality quality) { MediaProfiles::VideoCodec *videoCodec = new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 192000, 176, 144, 20); @@ -439,18 +485,72 @@ MediaProfiles::createDefaultCamcorderLowProfile() MediaProfiles::CamcorderProfile *profile = new MediaProfiles::CamcorderProfile; profile->mCameraId = 0; profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP; - profile->mQuality = CAMCORDER_QUALITY_LOW; + profile->mQuality = quality; profile->mDuration = 30; profile->mVideoCodec = videoCodec; profile->mAudioCodec = audioCodec; return profile; } +/*static*/ MediaProfiles::CamcorderProfile* +MediaProfiles::createDefaultCamcorderCifProfile(camcorder_quality quality) +{ + MediaProfiles::VideoCodec *videoCodec = + new MediaProfiles::VideoCodec(VIDEO_ENCODER_H263, 360000, 352, 288, 20); + + AudioCodec *audioCodec = new AudioCodec(AUDIO_ENCODER_AMR_NB, 12200, 8000, 1); + CamcorderProfile *profile = new MediaProfiles::CamcorderProfile; + profile->mCameraId = 0; + profile->mFileFormat = OUTPUT_FORMAT_THREE_GPP; + profile->mQuality = quality; + profile->mDuration = 60; + profile->mVideoCodec = videoCodec; + profile->mAudioCodec = audioCodec; + return profile; +} + +/*static*/ void +MediaProfiles::createDefaultCamcorderLowProfiles( + MediaProfiles::CamcorderProfile **lowProfile, + MediaProfiles::CamcorderProfile **lowSpecificProfile) { + *lowProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_LOW); + *lowSpecificProfile = createDefaultCamcorderQcifProfile(CAMCORDER_QUALITY_QCIF); +} + +/*static*/ void +MediaProfiles::createDefaultCamcorderHighProfiles( + MediaProfiles::CamcorderProfile **highProfile, + MediaProfiles::CamcorderProfile **highSpecificProfile) { + *highProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_HIGH); + *highSpecificProfile = createDefaultCamcorderCifProfile(CAMCORDER_QUALITY_CIF); +} + /*static*/ void MediaProfiles::createDefaultCamcorderProfiles(MediaProfiles *profiles) { - profiles->mCamcorderProfiles.add(createDefaultCamcorderHighProfile()); - profiles->mCamcorderProfiles.add(createDefaultCamcorderLowProfile()); + // low camcorder profiles. + MediaProfiles::CamcorderProfile *lowProfile, *lowSpecificProfile; + createDefaultCamcorderLowProfiles(&lowProfile, &lowSpecificProfile); + profiles->mCamcorderProfiles.add(lowProfile); + profiles->mCamcorderProfiles.add(lowSpecificProfile); + + // high camcorder profiles. + MediaProfiles::CamcorderProfile* highProfile, *highSpecificProfile; + createDefaultCamcorderHighProfiles(&highProfile, &highSpecificProfile); + profiles->mCamcorderProfiles.add(highProfile); + profiles->mCamcorderProfiles.add(highSpecificProfile); + + // low camcorder time lapse profiles. + MediaProfiles::CamcorderProfile *lowTimeLapseProfile, *lowSpecificTimeLapseProfile; + createDefaultCamcorderTimeLapseLowProfiles(&lowTimeLapseProfile, &lowSpecificTimeLapseProfile); + profiles->mCamcorderProfiles.add(lowTimeLapseProfile); + profiles->mCamcorderProfiles.add(lowSpecificTimeLapseProfile); + + // high camcorder time lapse profiles. + MediaProfiles::CamcorderProfile *highTimeLapseProfile, *highSpecificTimeLapseProfile; + createDefaultCamcorderTimeLapseHighProfiles(&highTimeLapseProfile, &highSpecificTimeLapseProfile); + profiles->mCamcorderProfiles.add(highTimeLapseProfile); + profiles->mCamcorderProfiles.add(highSpecificTimeLapseProfile); } /*static*/ void @@ -668,13 +768,8 @@ Vector<audio_decoder> MediaProfiles::getAudioDecoders() const return decoders; // copy out } -int MediaProfiles::getCamcorderProfileParamByName(const char *name, - int cameraId, - camcorder_quality quality) const +int MediaProfiles::getCamcorderProfileIndex(int cameraId, camcorder_quality quality) const { - LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d", - name, cameraId, quality); - int index = -1; for (size_t i = 0, n = mCamcorderProfiles.size(); i < n; ++i) { if (mCamcorderProfiles[i]->mCameraId == cameraId && @@ -683,6 +778,17 @@ int MediaProfiles::getCamcorderProfileParamByName(const char *name, break; } } + return index; +} + +int MediaProfiles::getCamcorderProfileParamByName(const char *name, + int cameraId, + camcorder_quality quality) const +{ + LOGV("getCamcorderProfileParamByName: %s for camera %d, quality %d", + name, cameraId, quality); + + int index = getCamcorderProfileIndex(cameraId, quality); if (index == -1) { LOGE("The given camcorder profile camera %d quality %d is not found", cameraId, quality); @@ -705,6 +811,11 @@ int MediaProfiles::getCamcorderProfileParamByName(const char *name, return -1; } +bool MediaProfiles::hasCamcorderProfile(int cameraId, camcorder_quality quality) const +{ + return (getCamcorderProfileIndex(cameraId, quality) != -1); +} + Vector<int> MediaProfiles::getImageEncodingQualityLevels(int cameraId) const { Vector<int> result; diff --git a/media/libmedia/MediaScanner.cpp b/media/libmedia/MediaScanner.cpp index c5112a5..5ec573e 100644 --- a/media/libmedia/MediaScanner.cpp +++ b/media/libmedia/MediaScanner.cpp @@ -48,8 +48,7 @@ const char *MediaScanner::locale() const { } status_t MediaScanner::processDirectory( - const char *path, const char *extensions, - MediaScannerClient &client, + const char *path, MediaScannerClient &client, ExceptionCheck exceptionCheck, void *exceptionEnv) { int pathLength = strlen(path); if (pathLength >= PATH_MAX) { @@ -72,38 +71,20 @@ status_t MediaScanner::processDirectory( status_t result = doProcessDirectory( - pathBuffer, pathRemaining, extensions, client, - exceptionCheck, exceptionEnv); + pathBuffer, pathRemaining, client, exceptionCheck, exceptionEnv); free(pathBuffer); return result; } -static bool fileMatchesExtension(const char* path, const char* extensions) { - const char* extension = strrchr(path, '.'); - if (!extension) return false; - ++extension; // skip the dot - if (extension[0] == 0) return false; - - while (extensions[0]) { - const char* comma = strchr(extensions, ','); - size_t length = (comma ? comma - extensions : strlen(extensions)); - if (length == strlen(extension) && strncasecmp(extension, extensions, length) == 0) return true; - extensions += length; - if (extensions[0] == ',') ++extensions; - } - - return false; -} - status_t MediaScanner::doProcessDirectory( - char *path, int pathRemaining, const char *extensions, - MediaScannerClient &client, ExceptionCheck exceptionCheck, - void *exceptionEnv) { + char *path, int pathRemaining, MediaScannerClient &client, + ExceptionCheck exceptionCheck, void *exceptionEnv) { // place to copy file or directory name char* fileSpot = path + strlen(path); struct dirent* entry; + struct stat statbuf; // ignore directories that contain a ".nomedia" file if (pathRemaining >= 8 /* strlen(".nomedia") */ ) { @@ -133,12 +114,18 @@ status_t MediaScanner::doProcessDirectory( continue; } + int nameLength = strlen(name); + if (nameLength + 1 > pathRemaining) { + // path too long! + continue; + } + strcpy(fileSpot, name); + int type = entry->d_type; if (type == DT_UNKNOWN) { // If the type is unknown, stat() the file instead. // This is sometimes necessary when accessing NFS mounted filesystems, but // could be needed in other cases well. - struct stat statbuf; if (stat(path, &statbuf) == 0) { if (S_ISREG(statbuf.st_mode)) { type = DT_REG; @@ -150,34 +137,29 @@ status_t MediaScanner::doProcessDirectory( } } if (type == DT_REG || type == DT_DIR) { - int nameLength = strlen(name); - bool isDirectory = (type == DT_DIR); - - if (nameLength > pathRemaining || (isDirectory && nameLength + 1 > pathRemaining)) { - // path too long! - continue; - } - - strcpy(fileSpot, name); - if (isDirectory) { + if (type == DT_DIR) { // ignore directories with a name that starts with '.' // for example, the Mac ".Trashes" directory if (name[0] == '.') continue; + // report the directory to the client + if (stat(path, &statbuf) == 0) { + client.scanFile(path, statbuf.st_mtime, 0, true); + } + + // and now process its contents strcat(fileSpot, "/"); - int err = doProcessDirectory(path, pathRemaining - nameLength - 1, extensions, client, exceptionCheck, exceptionEnv); + int err = doProcessDirectory(path, pathRemaining - nameLength - 1, client, + exceptionCheck, exceptionEnv); if (err) { // pass exceptions up - ignore other errors if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure; LOGE("Error processing '%s' - skipping\n", path); continue; } - } else if (fileMatchesExtension(path, extensions)) { - struct stat statbuf; + } else { stat(path, &statbuf); - if (statbuf.st_size > 0) { - client.scanFile(path, statbuf.st_mtime, statbuf.st_size); - } + client.scanFile(path, statbuf.st_mtime, statbuf.st_size, false); if (exceptionCheck && exceptionCheck(exceptionEnv)) goto failure; } } diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp index cc41e66..87c8fe4 100644 --- a/media/libmedia/mediaplayer.cpp +++ b/media/libmedia/mediaplayer.cpp @@ -172,16 +172,6 @@ status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply) return INVALID_OPERATION; } -status_t MediaPlayer::suspend() { - Mutex::Autolock _l(mLock); - return mPlayer->suspend(); -} - -status_t MediaPlayer::resume() { - Mutex::Autolock _l(mLock); - return mPlayer->resume(); -} - status_t MediaPlayer::setMetadataFilter(const Parcel& filter) { LOGD("setMetadataFilter"); @@ -207,10 +197,8 @@ status_t MediaPlayer::setVideoSurface(const sp<Surface>& surface) LOGV("setVideoSurface"); Mutex::Autolock _l(mLock); if (mPlayer == 0) return NO_INIT; - if (surface != NULL) - return mPlayer->setVideoSurface(surface->getISurface()); - else - return mPlayer->setVideoSurface(NULL); + + return mPlayer->setVideoSurface(surface); } // must call with lock held @@ -449,6 +437,9 @@ status_t MediaPlayer::reset() } else { mCurrentState = MEDIA_PLAYER_IDLE; } + // setDataSource has to be called again to create a + // new mediaplayer. + mPlayer = 0; return ret; } clear_l(); @@ -616,7 +607,9 @@ void MediaPlayer::notify(int msg, int ext1, int ext2) case MEDIA_INFO: // ext1: Media framework error code. // ext2: Implementation dependant error code. - LOGW("info/warning (%d, %d)", ext1, ext2); + if (ext1 != MEDIA_INFO_VIDEO_TRACK_LAGGING) { + LOGW("info/warning (%d, %d)", ext1, ext2); + } break; case MEDIA_SEEK_COMPLETE: LOGV("Received seek complete"); diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp index e20e3ba..fd575fe 100644 --- a/media/libmedia/mediarecorder.cpp +++ b/media/libmedia/mediarecorder.cpp @@ -65,7 +65,7 @@ status_t MediaRecorder::setPreviewSurface(const sp<Surface>& surface) return INVALID_OPERATION; } - status_t ret = mMediaRecorder->setPreviewSurface(surface->getISurface()); + status_t ret = mMediaRecorder->setPreviewSurface(surface); if (OK != ret) { LOGV("setPreviewSurface failed: %d", ret); mCurrentState = MEDIA_RECORDER_ERROR; @@ -308,6 +308,32 @@ status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length) return ret; } +status_t MediaRecorder::setOutputFileAuxiliary(int fd) +{ + LOGV("setOutputFileAuxiliary(%d)", fd); + if(mMediaRecorder == NULL) { + LOGE("media recorder is not initialized yet"); + return INVALID_OPERATION; + } + if (mIsAuxiliaryOutputFileSet) { + LOGE("output file has already been set"); + return INVALID_OPERATION; + } + if (!(mCurrentState & MEDIA_RECORDER_DATASOURCE_CONFIGURED)) { + LOGE("setOutputFile called in an invalid state(%d)", mCurrentState); + return INVALID_OPERATION; + } + + status_t ret = mMediaRecorder->setOutputFileAuxiliary(fd); + if (OK != ret) { + LOGV("setOutputFileAuxiliary failed: %d", ret); + mCurrentState = MEDIA_RECORDER_ERROR; + return ret; + } + mIsAuxiliaryOutputFileSet = true; + return ret; +} + status_t MediaRecorder::setVideoSize(int width, int height) { LOGV("setVideoSize(%d, %d)", width, height); @@ -571,6 +597,7 @@ void MediaRecorder::doCleanUp() mIsAudioEncoderSet = false; mIsVideoEncoderSet = false; mIsOutputFileSet = false; + mIsAuxiliaryOutputFileSet = false; } // Release should be OK in any state @@ -643,4 +670,3 @@ void MediaRecorder::died() } }; // namespace android - diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk index 55846be..f7f0d95 100644 --- a/media/libmediaplayerservice/Android.mk +++ b/media/libmediaplayerservice/Android.mk @@ -31,20 +31,12 @@ LOCAL_SHARED_LIBRARIES := \ libandroid_runtime \ libstagefright \ libstagefright_omx \ - libstagefright_color_conversion \ libstagefright_foundation \ libsurfaceflinger_client LOCAL_STATIC_LIBRARIES := \ - libstagefright_rtsp - -ifneq ($(BUILD_WITHOUT_PV),true) -LOCAL_SHARED_LIBRARIES += \ - libopencore_player \ - libopencore_author -else -LOCAL_CFLAGS += -DNO_OPENCORE -endif + libstagefright_rtsp \ + libstagefright_nuplayer \ ifneq ($(TARGET_SIMULATOR),true) LOCAL_SHARED_LIBRARIES += libdl @@ -56,9 +48,11 @@ LOCAL_C_INCLUDES := \ $(TOP)/frameworks/base/include/media/stagefright/openmax \ $(TOP)/frameworks/base/media/libstagefright/include \ $(TOP)/frameworks/base/media/libstagefright/rtsp \ - $(TOP)/external/tremolo/Tremolo + $(TOP)/external/tremolo/Tremolo \ LOCAL_MODULE:= libmediaplayerservice include $(BUILD_SHARED_LIBRARY) +include $(call all-makefiles-under,$(LOCAL_PATH)) + diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp index c43e9bb..439e4ce 100644 --- a/media/libmediaplayerservice/MediaPlayerService.cpp +++ b/media/libmediaplayerservice/MediaPlayerService.cpp @@ -56,9 +56,9 @@ #include "MetadataRetrieverClient.h" #include "MidiFile.h" -#include <media/PVPlayer.h> #include "TestPlayerStub.h" #include "StagefrightPlayer.h" +#include "nuplayer/NuPlayerDriver.h" #include <OMX.h> @@ -196,11 +196,6 @@ extmap FILE_EXTS [] = { {".rtttl", SONIVOX_PLAYER}, {".rtx", SONIVOX_PLAYER}, {".ota", SONIVOX_PLAYER}, -#ifndef NO_OPENCORE - {".wma", PV_PLAYER}, - {".wmv", PV_PLAYER}, - {".asf", PV_PLAYER}, -#endif }; // TODO: Find real cause of Audio/Video delay in PV framework and remove this workaround @@ -284,6 +279,26 @@ sp<IMediaPlayer> MediaPlayerService::create(pid_t pid, const sp<IMediaPlayerClie return c; } +sp<IMediaPlayer> MediaPlayerService::create( + pid_t pid, const sp<IMediaPlayerClient> &client, + const sp<IStreamSource> &source, int audioSessionId) { + int32_t connId = android_atomic_inc(&mNextConnId); + sp<Client> c = new Client(this, pid, connId, client, audioSessionId); + + LOGV("Create new client(%d) from pid %d, audioSessionId=%d", + connId, pid, audioSessionId); + + if (OK != c->setDataSource(source)) { + c.clear(); + } else { + wp<Client> w = c; + Mutex::Autolock lock(mLock); + mClients.add(w); + } + + return c; +} + sp<IOMX> MediaPlayerService::getOMX() { Mutex::Autolock autoLock(mLock); @@ -691,14 +706,6 @@ player_type getPlayerType(int fd, int64_t offset, int64_t length) if (ident == 0x5367674f) // 'OggS' return STAGEFRIGHT_PLAYER; -#ifndef NO_OPENCORE - if (ident == 0x75b22630) { - // The magic number for .asf files, i.e. wmv and wma content. - // These are not currently supported through stagefright. - return PV_PLAYER; - } -#endif - // Some kind of MIDI? EAS_DATA_HANDLE easdata; if (EAS_Init(&easdata) == EAS_SUCCESS) { @@ -725,6 +732,21 @@ player_type getPlayerType(const char* url) return TEST_PLAYER; } + char value[PROPERTY_VALUE_MAX]; + if (!property_get("media.httplive.disable-nuplayer", value, NULL) + || (strcasecmp(value, "true") && strcmp(value, "1"))) { + if (!strncasecmp("http://", url, 7)) { + size_t len = strlen(url); + if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { + return NU_PLAYER; + } + + if (strstr(url,"m3u8")) { + return NU_PLAYER; + } + } + } + // use MidiFile for MIDI extensions int lenURL = strlen(url); for (int i = 0; i < NELEM(FILE_EXTS); ++i) { @@ -737,16 +759,6 @@ player_type getPlayerType(const char* url) } } - if (!strncasecmp(url, "rtsp://", 7)) { - char value[PROPERTY_VALUE_MAX]; - if (property_get("media.stagefright.enable-rtsp", value, NULL) - && (strcmp(value, "1") && strcasecmp(value, "true"))) { - // For now, we're going to use PV for rtsp-based playback - // by default until we can clear up a few more issues. - return PV_PLAYER; - } - } - return getDefaultPlayerType(); } @@ -755,12 +767,6 @@ static sp<MediaPlayerBase> createPlayer(player_type playerType, void* cookie, { sp<MediaPlayerBase> p; switch (playerType) { -#ifndef NO_OPENCORE - case PV_PLAYER: - LOGV(" create PVPlayer"); - p = new PVPlayer(); - break; -#endif case SONIVOX_PLAYER: LOGV(" create MidiFile"); p = new MidiFile(); @@ -769,10 +775,17 @@ static sp<MediaPlayerBase> createPlayer(player_type playerType, void* cookie, LOGV(" create StagefrightPlayer"); p = new StagefrightPlayer; break; + case NU_PLAYER: + LOGV(" create NuPlayer"); + p = new NuPlayerDriver; + break; case TEST_PLAYER: LOGV("Create Test Player stub"); p = new TestPlayerStub(); break; + default: + LOGE("Unknown player type: %d", playerType); + return NULL; } if (p != NULL) { if (p->initCheck() == NO_ERROR) { @@ -891,7 +904,31 @@ status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64 return mStatus; } -status_t MediaPlayerService::Client::setVideoSurface(const sp<ISurface>& surface) +status_t MediaPlayerService::Client::setDataSource( + const sp<IStreamSource> &source) { + // create the right type of player + sp<MediaPlayerBase> p = createPlayer(NU_PLAYER); + + if (p == NULL) { + return NO_INIT; + } + + if (!p->hardwareOutput()) { + mAudioOutput = new AudioOutput(mAudioSessionId); + static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput); + } + + // now set data source + mStatus = p->setDataSource(source); + + if (mStatus == OK) { + mPlayer = p; + } + + return mStatus; +} + +status_t MediaPlayerService::Client::setVideoSurface(const sp<Surface>& surface) { LOGV("[%d] setVideoSurface(%p)", mConnId, surface.get()); sp<MediaPlayerBase> p = getPlayer(); @@ -966,20 +1003,6 @@ status_t MediaPlayerService::Client::getMetadata( return OK; } -status_t MediaPlayerService::Client::suspend() { - sp<MediaPlayerBase> p = getPlayer(); - if (p == 0) return UNKNOWN_ERROR; - - return p->suspend(); -} - -status_t MediaPlayerService::Client::resume() { - sp<MediaPlayerBase> p = getPlayer(); - if (p == 0) return UNKNOWN_ERROR; - - return p->resume(); -} - status_t MediaPlayerService::Client::prepareAsync() { LOGV("[%d] prepareAsync", mConnId); diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h index 4492e20..62f8ed6 100644 --- a/media/libmediaplayerservice/MediaPlayerService.h +++ b/media/libmediaplayerservice/MediaPlayerService.h @@ -191,6 +191,11 @@ public: const KeyedVector<String8, String8> *headers, int audioSessionId); virtual sp<IMediaPlayer> create(pid_t pid, const sp<IMediaPlayerClient>& client, int fd, int64_t offset, int64_t length, int audioSessionId); + + virtual sp<IMediaPlayer> create( + pid_t pid, const sp<IMediaPlayerClient> &client, + const sp<IStreamSource> &source, int audioSessionId); + virtual sp<IMemory> decode(const char* url, uint32_t *pSampleRate, int* pNumChannels, int* pFormat); virtual sp<IMemory> decode(int fd, int64_t offset, int64_t length, uint32_t *pSampleRate, int* pNumChannels, int* pFormat); virtual sp<IOMX> getOMX(); @@ -206,7 +211,7 @@ private: // IMediaPlayer interface virtual void disconnect(); - virtual status_t setVideoSurface(const sp<ISurface>& surface); + virtual status_t setVideoSurface(const sp<Surface>& surface); virtual status_t prepareAsync(); virtual status_t start(); virtual status_t stop(); @@ -224,8 +229,6 @@ private: virtual status_t getMetadata(bool update_only, bool apply_filter, Parcel *reply); - virtual status_t suspend(); - virtual status_t resume(); virtual status_t setAuxEffectSendLevel(float level); virtual status_t attachAuxEffect(int effectId); @@ -236,6 +239,9 @@ private: const KeyedVector<String8, String8> *headers); status_t setDataSource(int fd, int64_t offset, int64_t length); + + status_t setDataSource(const sp<IStreamSource> &source); + static void notify(void* cookie, int msg, int ext1, int ext2); pid_t pid() const { return mPid; } diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp index 19915f1..1a1780c 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.cpp +++ b/media/libmediaplayerservice/MediaRecorderClient.cpp @@ -31,10 +31,6 @@ #include <binder/MemoryHeapBase.h> #include <binder/MemoryBase.h> -#ifndef NO_OPENCORE -#include <media/PVMediaRecorder.h> -#endif - #include <utils/String16.h> #include <media/AudioTrack.h> @@ -70,7 +66,7 @@ status_t MediaRecorderClient::setCamera(const sp<ICamera>& camera) return mRecorder->setCamera(camera); } -status_t MediaRecorderClient::setPreviewSurface(const sp<ISurface>& surface) +status_t MediaRecorderClient::setPreviewSurface(const sp<Surface>& surface) { LOGV("setPreviewSurface"); Mutex::Autolock lock(mLock); @@ -164,6 +160,17 @@ status_t MediaRecorderClient::setOutputFile(int fd, int64_t offset, int64_t leng return mRecorder->setOutputFile(fd, offset, length); } +status_t MediaRecorderClient::setOutputFileAuxiliary(int fd) +{ + LOGV("setOutputFileAuxiliary(%d)", fd); + Mutex::Autolock lock(mLock); + if (mRecorder == NULL) { + LOGE("recorder is not initialized"); + return NO_INIT; + } + return mRecorder->setOutputFileAuxiliary(fd); +} + status_t MediaRecorderClient::setVideoSize(int width, int height) { LOGV("setVideoSize(%dx%d)", width, height); @@ -293,22 +300,7 @@ MediaRecorderClient::MediaRecorderClient(const sp<MediaPlayerService>& service, { LOGV("Client constructor"); mPid = pid; - - char value[PROPERTY_VALUE_MAX]; - if (!property_get("media.stagefright.enable-record", value, NULL) - || !strcmp(value, "1") || !strcasecmp(value, "true")) { - mRecorder = new StagefrightRecorder; - } else -#ifndef NO_OPENCORE - { - mRecorder = new PVMediaRecorder(); - } -#else - { - mRecorder = NULL; - } -#endif - + mRecorder = new StagefrightRecorder; mMediaPlayerService = service; } @@ -337,4 +329,3 @@ status_t MediaRecorderClient::dump(int fd, const Vector<String16>& args) const { } }; // namespace android - diff --git a/media/libmediaplayerservice/MediaRecorderClient.h b/media/libmediaplayerservice/MediaRecorderClient.h index 1d1913d..fded98e 100644 --- a/media/libmediaplayerservice/MediaRecorderClient.h +++ b/media/libmediaplayerservice/MediaRecorderClient.h @@ -29,7 +29,7 @@ class MediaRecorderClient : public BnMediaRecorder { public: virtual status_t setCamera(const sp<ICamera>& camera); - virtual status_t setPreviewSurface(const sp<ISurface>& surface); + virtual status_t setPreviewSurface(const sp<Surface>& surface); virtual status_t setVideoSource(int vs); virtual status_t setAudioSource(int as); virtual status_t setOutputFormat(int of); @@ -37,6 +37,7 @@ public: virtual status_t setAudioEncoder(int ae); virtual status_t setOutputFile(const char* path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); + virtual status_t setOutputFileAuxiliary(int fd); virtual status_t setVideoSize(int width, int height); virtual status_t setVideoFrameRate(int frames_per_second); virtual status_t setParameters(const String8& params); @@ -66,4 +67,3 @@ private: }; // namespace android #endif // ANDROID_MEDIARECORDERCLIENT_H - diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp index 713e441..5fcf2a7 100644 --- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp +++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp @@ -35,7 +35,6 @@ #include <binder/IServiceManager.h> #include <media/MediaMetadataRetrieverInterface.h> #include <media/MediaPlayerInterface.h> -#include <media/PVMetadataRetriever.h> #include <private/media/VideoFrame.h> #include "MidiMetadataRetriever.h" #include "MetadataRetrieverClient.h" @@ -105,12 +104,6 @@ static sp<MediaMetadataRetrieverBase> createRetriever(player_type playerType) p = new StagefrightMetadataRetriever; break; } -#ifndef NO_OPENCORE - case PV_PLAYER: - LOGV("create pv metadata retriever"); - p = new PVMetadataRetriever(); - break; -#endif case SONIVOX_PLAYER: LOGV("create midi metadata retriever"); p = new MidiMetadataRetriever(); diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h index 4a60ece..aa8f3f0 100644 --- a/media/libmediaplayerservice/MidiFile.h +++ b/media/libmediaplayerservice/MidiFile.h @@ -35,7 +35,7 @@ public: const char* path, const KeyedVector<String8, String8> *headers); virtual status_t setDataSource(int fd, int64_t offset, int64_t length); - virtual status_t setVideoSurface(const sp<ISurface>& surface) { return UNKNOWN_ERROR; } + virtual status_t setVideoSurface(const sp<Surface>& surface) { return UNKNOWN_ERROR; } virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp index 6bded09..da564dc 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.cpp +++ b/media/libmediaplayerservice/StagefrightPlayer.cpp @@ -44,10 +44,14 @@ status_t StagefrightPlayer::setDataSource(int fd, int64_t offset, int64_t length return mPlayer->setDataSource(dup(fd), offset, length); } -status_t StagefrightPlayer::setVideoSurface(const sp<ISurface> &surface) { +status_t StagefrightPlayer::setDataSource(const sp<IStreamSource> &source) { + return mPlayer->setDataSource(source); +} + +status_t StagefrightPlayer::setVideoSurface(const sp<Surface> &surface) { LOGV("setVideoSurface"); - mPlayer->setISurface(surface); + mPlayer->setSurface(surface); return OK; } @@ -140,16 +144,6 @@ player_type StagefrightPlayer::playerType() { return STAGEFRIGHT_PLAYER; } -status_t StagefrightPlayer::suspend() { - LOGV("suspend"); - return mPlayer->suspend(); -} - -status_t StagefrightPlayer::resume() { - LOGV("resume"); - return mPlayer->resume(); -} - status_t StagefrightPlayer::invoke(const Parcel &request, Parcel *reply) { return INVALID_OPERATION; } diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h index 781eb44..fc72bfb 100644 --- a/media/libmediaplayerservice/StagefrightPlayer.h +++ b/media/libmediaplayerservice/StagefrightPlayer.h @@ -35,7 +35,10 @@ public: const char *url, const KeyedVector<String8, String8> *headers); virtual status_t setDataSource(int fd, int64_t offset, int64_t length); - virtual status_t setVideoSurface(const sp<ISurface> &surface); + + virtual status_t setDataSource(const sp<IStreamSource> &source); + + virtual status_t setVideoSurface(const sp<Surface> &surface); virtual status_t prepare(); virtual status_t prepareAsync(); virtual status_t start(); @@ -50,8 +53,6 @@ public: virtual player_type playerType(); virtual status_t invoke(const Parcel &request, Parcel *reply); virtual void setAudioSink(const sp<AudioSink> &audioSink); - virtual status_t suspend(); - virtual status_t resume(); virtual status_t getMetadata( const media::Metadata::Filter& ids, Parcel *records); diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp index 3261fe6..65df68c 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.cpp +++ b/media/libmediaplayerservice/StagefrightRecorder.cpp @@ -20,10 +20,12 @@ #include "StagefrightRecorder.h" -#include <binder/IPCThreadState.h> #include <media/stagefright/AudioSource.h> #include <media/stagefright/AMRWriter.h> #include <media/stagefright/CameraSource.h> +#include <media/stagefright/VideoSourceDownSampler.h> +#include <media/stagefright/CameraSourceTimeLapse.h> +#include <media/stagefright/MediaSourceSplitter.h> #include <media/stagefright/MPEG2TSWriter.h> #include <media/stagefright/MPEG4Writer.h> #include <media/stagefright/MediaDebug.h> @@ -33,21 +35,20 @@ #include <media/stagefright/OMXCodec.h> #include <media/MediaProfiles.h> #include <camera/ICamera.h> -#include <camera/Camera.h> #include <camera/CameraParameters.h> -#include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> #include <utils/Errors.h> #include <sys/types.h> -#include <unistd.h> #include <ctype.h> +#include <unistd.h> #include "ARTPWriter.h" namespace android { StagefrightRecorder::StagefrightRecorder() - : mWriter(NULL), - mOutputFd(-1) { + : mWriter(NULL), mWriterAux(NULL), + mOutputFd(-1), mOutputFdAux(-1) { LOGV("Constructor"); reset(); @@ -164,7 +165,8 @@ status_t StagefrightRecorder::setVideoSize(int width, int height) { status_t StagefrightRecorder::setVideoFrameRate(int frames_per_second) { LOGV("setVideoFrameRate: %d", frames_per_second); - if (frames_per_second <= 0 || frames_per_second > 30) { + if ((frames_per_second <= 0 && frames_per_second != -1) || + frames_per_second > 120) { LOGE("Invalid video frame rate: %d", frames_per_second); return BAD_VALUE; } @@ -182,26 +184,11 @@ status_t StagefrightRecorder::setCamera(const sp<ICamera> &camera) { return BAD_VALUE; } - int64_t token = IPCThreadState::self()->clearCallingIdentity(); - mFlags &= ~FLAGS_HOT_CAMERA; - mCamera = Camera::create(camera); - if (mCamera == 0) { - LOGE("Unable to connect to camera"); - IPCThreadState::self()->restoreCallingIdentity(token); - return -EBUSY; - } - - LOGV("Connected to camera"); - if (mCamera->previewEnabled()) { - LOGV("camera is hot"); - mFlags |= FLAGS_HOT_CAMERA; - } - IPCThreadState::self()->restoreCallingIdentity(token); - + mCamera = camera; return OK; } -status_t StagefrightRecorder::setPreviewSurface(const sp<ISurface> &surface) { +status_t StagefrightRecorder::setPreviewSurface(const sp<Surface> &surface) { LOGV("setPreviewSurface: %p", surface.get()); mPreviewSurface = surface; @@ -235,6 +222,24 @@ status_t StagefrightRecorder::setOutputFile(int fd, int64_t offset, int64_t leng return OK; } +status_t StagefrightRecorder::setOutputFileAuxiliary(int fd) { + LOGV("setOutputFileAuxiliary: %d", fd); + + if (fd < 0) { + LOGE("Invalid file descriptor: %d", fd); + return -EBADF; + } + + mCaptureAuxVideo = true; + + if (mOutputFdAux >= 0) { + ::close(mOutputFdAux); + } + mOutputFdAux = dup(fd); + + return OK; +} + // Attempt to parse an int64 literal optionally surrounded by whitespace, // returns true on success, false otherwise. static bool safe_strtoi64(const char *s, int64_t *val) { @@ -353,6 +358,8 @@ status_t StagefrightRecorder::setParamVideoRotation(int32_t degrees) { status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) { LOGV("setParamMaxFileDurationUs: %lld us", timeUs); + + // This is meant for backward compatibility for MediaRecorder.java if (timeUs <= 0) { LOGW("Max file duration is not positive: %lld us. Disabling duration limit.", timeUs); timeUs = 0; // Disable the duration limit for zero or negative values. @@ -370,7 +377,13 @@ status_t StagefrightRecorder::setParamMaxFileDurationUs(int64_t timeUs) { status_t StagefrightRecorder::setParamMaxFileSizeBytes(int64_t bytes) { LOGV("setParamMaxFileSizeBytes: %lld bytes", bytes); - if (bytes <= 1024) { // XXX: 1 kB + + // This is meant for backward compatibility for MediaRecorder.java + if (bytes <= 0) { + LOGW("Max file size is not positive: %lld bytes. " + "Disabling file size limit.", bytes); + bytes = 0; // Disable the file size limit for zero or negative values. + } else if (bytes <= 1024) { // XXX: 1 kB LOGE("Max file size is too small: %lld bytes", bytes); return BAD_VALUE; } @@ -493,6 +506,68 @@ status_t StagefrightRecorder::setParamAudioTimeScale(int32_t timeScale) { return OK; } +status_t StagefrightRecorder::setParamTimeLapseEnable(int32_t timeLapseEnable) { + LOGV("setParamTimeLapseEnable: %d", timeLapseEnable); + + if(timeLapseEnable == 0) { + mCaptureTimeLapse = false; + } else if (timeLapseEnable == 1) { + mCaptureTimeLapse = true; + } else { + return BAD_VALUE; + } + return OK; +} + +status_t StagefrightRecorder::setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs) { + LOGV("setParamTimeBetweenTimeLapseFrameCapture: %lld us", timeUs); + + // Not allowing time more than a day + if (timeUs <= 0 || timeUs > 86400*1E6) { + LOGE("Time between time lapse frame capture (%lld) is out of range [0, 1 Day]", timeUs); + return BAD_VALUE; + } + + mTimeBetweenTimeLapseFrameCaptureUs = timeUs; + return OK; +} + +status_t StagefrightRecorder::setParamAuxVideoWidth(int32_t width) { + LOGV("setParamAuxVideoWidth : %d", width); + + if (width <= 0) { + LOGE("Width (%d) is not positive", width); + return BAD_VALUE; + } + + mAuxVideoWidth = width; + return OK; +} + +status_t StagefrightRecorder::setParamAuxVideoHeight(int32_t height) { + LOGV("setParamAuxVideoHeight : %d", height); + + if (height <= 0) { + LOGE("Height (%d) is not positive", height); + return BAD_VALUE; + } + + mAuxVideoHeight = height; + return OK; +} + +status_t StagefrightRecorder::setParamAuxVideoEncodingBitRate(int32_t bitRate) { + LOGV("StagefrightRecorder::setParamAuxVideoEncodingBitRate: %d", bitRate); + + if (bitRate <= 0) { + LOGE("Invalid video encoding bit rate: %d", bitRate); + return BAD_VALUE; + } + + mAuxVideoBitRate = bitRate; + return OK; +} + status_t StagefrightRecorder::setParameter( const String8 &key, const String8 &value) { LOGV("setParameter: key (%s) => value (%s)", key.string(), value.string()); @@ -581,6 +656,32 @@ status_t StagefrightRecorder::setParameter( if (safe_strtoi32(value.string(), &timeScale)) { return setParamVideoTimeScale(timeScale); } + } else if (key == "time-lapse-enable") { + int32_t timeLapseEnable; + if (safe_strtoi32(value.string(), &timeLapseEnable)) { + return setParamTimeLapseEnable(timeLapseEnable); + } + } else if (key == "time-between-time-lapse-frame-capture") { + int64_t timeBetweenTimeLapseFrameCaptureMs; + if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) { + return setParamTimeBetweenTimeLapseFrameCapture( + 1000LL * timeBetweenTimeLapseFrameCaptureMs); + } + } else if (key == "video-aux-param-width") { + int32_t auxWidth; + if (safe_strtoi32(value.string(), &auxWidth)) { + return setParamAuxVideoWidth(auxWidth); + } + } else if (key == "video-aux-param-height") { + int32_t auxHeight; + if (safe_strtoi32(value.string(), &auxHeight)) { + return setParamAuxVideoHeight(auxHeight); + } + } else if (key == "video-aux-param-encoding-bitrate") { + int32_t auxVideoBitRate; + if (safe_strtoi32(value.string(), &auxVideoBitRate)) { + return setParamAuxVideoEncodingBitRate(auxVideoBitRate); + } } else { LOGE("setParameter: failed to find key %s", key.string()); } @@ -745,27 +846,12 @@ status_t StagefrightRecorder::startAMRRecording() { mAudioEncoder); return BAD_VALUE; } - if (mSampleRate != 8000) { - LOGE("Invalid sampling rate %d used for AMRNB recording", - mSampleRate); - return BAD_VALUE; - } } else { // mOutputFormat must be OUTPUT_FORMAT_AMR_WB if (mAudioEncoder != AUDIO_ENCODER_AMR_WB) { LOGE("Invlaid encoder %d used for AMRWB recording", mAudioEncoder); return BAD_VALUE; } - if (mSampleRate != 16000) { - LOGE("Invalid sample rate %d used for AMRWB recording", - mSampleRate); - return BAD_VALUE; - } - } - if (mAudioChannels != 1) { - LOGE("Invalid number of audio channels %d used for amr recording", - mAudioChannels); - return BAD_VALUE; } if (mAudioSource >= AUDIO_SOURCE_LIST_END) { @@ -773,13 +859,17 @@ status_t StagefrightRecorder::startAMRRecording() { return BAD_VALUE; } - sp<MediaSource> audioEncoder = createAudioSource(); + status_t status = BAD_VALUE; + if (OK != (status = checkAudioEncoderCapabilities())) { + return status; + } + sp<MediaSource> audioEncoder = createAudioSource(); if (audioEncoder == NULL) { return UNKNOWN_ERROR; } - mWriter = new AMRWriter(dup(mOutputFd)); + mWriter = new AMRWriter(mOutputFd); mWriter->addSource(audioEncoder); if (mMaxFileDurationUs != 0) { @@ -814,13 +904,20 @@ status_t StagefrightRecorder::startRTPRecording() { if (mAudioSource != AUDIO_SOURCE_LIST_END) { source = createAudioSource(); } else { - status_t err = setupVideoEncoder(&source); + + sp<CameraSource> cameraSource; + status_t err = setupCameraSource(&cameraSource); + if (err != OK) { + return err; + } + + err = setupVideoEncoder(cameraSource, mVideoBitRate, &source); if (err != OK) { return err; } } - mWriter = new ARTPWriter(dup(mOutputFd)); + mWriter = new ARTPWriter(mOutputFd); mWriter->addSource(source); mWriter->setListener(mListener); @@ -830,7 +927,7 @@ status_t StagefrightRecorder::startRTPRecording() { status_t StagefrightRecorder::startMPEG2TSRecording() { CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS); - sp<MediaWriter> writer = new MPEG2TSWriter(dup(mOutputFd)); + sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd); if (mAudioSource != AUDIO_SOURCE_LIST_END) { if (mAudioEncoder != AUDIO_ENCODER_AAC) { @@ -850,8 +947,14 @@ status_t StagefrightRecorder::startMPEG2TSRecording() { return ERROR_UNSUPPORTED; } + sp<CameraSource> cameraSource; + status_t err = setupCameraSource(&cameraSource); + if (err != OK) { + return err; + } + sp<MediaSource> encoder; - status_t err = setupVideoEncoder(&encoder); + err = setupVideoEncoder(cameraSource, mVideoBitRate, &encoder); if (err != OK) { return err; @@ -879,7 +982,7 @@ void StagefrightRecorder::clipVideoFrameRate() { "enc.vid.fps.min", mVideoEncoder); int maxFrameRate = mEncoderProfiles->getVideoEncoderParamByName( "enc.vid.fps.max", mVideoEncoder); - if (mFrameRate < minFrameRate) { + if (mFrameRate < minFrameRate && mFrameRate != -1) { LOGW("Intended video encoding frame rate (%d fps) is too small" " and will be set to (%d fps)", mFrameRate, minFrameRate); mFrameRate = minFrameRate; @@ -924,58 +1027,89 @@ void StagefrightRecorder::clipVideoFrameWidth() { } } -status_t StagefrightRecorder::setupCameraSource() { - clipVideoBitRate(); - clipVideoFrameRate(); - clipVideoFrameWidth(); - clipVideoFrameHeight(); +status_t StagefrightRecorder::checkVideoEncoderCapabilities() { + if (!mCaptureTimeLapse) { + // Dont clip for time lapse capture as encoder will have enough + // time to encode because of slow capture rate of time lapse. + clipVideoBitRate(); + clipVideoFrameRate(); + clipVideoFrameWidth(); + clipVideoFrameHeight(); + } + return OK; +} - int64_t token = IPCThreadState::self()->clearCallingIdentity(); - if (mCamera == 0) { - mCamera = Camera::connect(mCameraId); - if (mCamera == 0) { - LOGE("Camera connection could not be established."); - return -EBUSY; - } - mFlags &= ~FLAGS_HOT_CAMERA; - mCamera->lock(); - } - - // Set the actual video recording frame size - CameraParameters params(mCamera->getParameters()); - params.setPreviewSize(mVideoWidth, mVideoHeight); - params.setPreviewFrameRate(mFrameRate); - String8 s = params.flatten(); - if (OK != mCamera->setParameters(s)) { - LOGE("Could not change settings." - " Someone else is using camera %d?", mCameraId); - return -EBUSY; - } - CameraParameters newCameraParams(mCamera->getParameters()); - - // Check on video frame size - int frameWidth = 0, frameHeight = 0; - newCameraParams.getPreviewSize(&frameWidth, &frameHeight); - if (frameWidth < 0 || frameWidth != mVideoWidth || - frameHeight < 0 || frameHeight != mVideoHeight) { - LOGE("Failed to set the video frame size to %dx%d", - mVideoWidth, mVideoHeight); - IPCThreadState::self()->restoreCallingIdentity(token); - return UNKNOWN_ERROR; +status_t StagefrightRecorder::checkAudioEncoderCapabilities() { + clipAudioBitRate(); + clipAudioSampleRate(); + clipNumberOfAudioChannels(); + return OK; +} + +void StagefrightRecorder::clipAudioBitRate() { + LOGV("clipAudioBitRate: encoder %d", mAudioEncoder); + + int minAudioBitRate = + mEncoderProfiles->getAudioEncoderParamByName( + "enc.aud.bps.min", mAudioEncoder); + if (mAudioBitRate < minAudioBitRate) { + LOGW("Intended audio encoding bit rate (%d) is too small" + " and will be set to (%d)", mAudioBitRate, minAudioBitRate); + mAudioBitRate = minAudioBitRate; + } + + int maxAudioBitRate = + mEncoderProfiles->getAudioEncoderParamByName( + "enc.aud.bps.max", mAudioEncoder); + if (mAudioBitRate > maxAudioBitRate) { + LOGW("Intended audio encoding bit rate (%d) is too large" + " and will be set to (%d)", mAudioBitRate, maxAudioBitRate); + mAudioBitRate = maxAudioBitRate; } +} - // Check on video frame rate - int frameRate = newCameraParams.getPreviewFrameRate(); - if (frameRate < 0 || (frameRate - mFrameRate) != 0) { - LOGE("Failed to set frame rate to %d fps. The actual " - "frame rate is %d", mFrameRate, frameRate); +void StagefrightRecorder::clipAudioSampleRate() { + LOGV("clipAudioSampleRate: encoder %d", mAudioEncoder); + + int minSampleRate = + mEncoderProfiles->getAudioEncoderParamByName( + "enc.aud.hz.min", mAudioEncoder); + if (mSampleRate < minSampleRate) { + LOGW("Intended audio sample rate (%d) is too small" + " and will be set to (%d)", mSampleRate, minSampleRate); + mSampleRate = minSampleRate; } - // This CHECK is good, since we just passed the lock/unlock - // check earlier by calling mCamera->setParameters(). - CHECK_EQ(OK, mCamera->setPreviewDisplay(mPreviewSurface)); - IPCThreadState::self()->restoreCallingIdentity(token); - return OK; + int maxSampleRate = + mEncoderProfiles->getAudioEncoderParamByName( + "enc.aud.hz.max", mAudioEncoder); + if (mSampleRate > maxSampleRate) { + LOGW("Intended audio sample rate (%d) is too large" + " and will be set to (%d)", mSampleRate, maxSampleRate); + mSampleRate = maxSampleRate; + } +} + +void StagefrightRecorder::clipNumberOfAudioChannels() { + LOGV("clipNumberOfAudioChannels: encoder %d", mAudioEncoder); + + int minChannels = + mEncoderProfiles->getAudioEncoderParamByName( + "enc.aud.ch.min", mAudioEncoder); + if (mAudioChannels < minChannels) { + LOGW("Intended number of audio channels (%d) is too small" + " and will be set to (%d)", mAudioChannels, minChannels); + mAudioChannels = minChannels; + } + + int maxChannels = + mEncoderProfiles->getAudioEncoderParamByName( + "enc.aud.ch.max", mAudioEncoder); + if (mAudioChannels > maxChannels) { + LOGW("Intended number of audio channels (%d) is too large" + " and will be set to (%d)", mAudioChannels, maxChannels); + mAudioChannels = maxChannels; + } } void StagefrightRecorder::clipVideoFrameHeight() { @@ -995,18 +1129,64 @@ void StagefrightRecorder::clipVideoFrameHeight() { } } -status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) { - source->clear(); +status_t StagefrightRecorder::setupCameraSource( + sp<CameraSource> *cameraSource) { + status_t err = OK; + if ((err = checkVideoEncoderCapabilities()) != OK) { + return err; + } + Size videoSize; + videoSize.width = mVideoWidth; + videoSize.height = mVideoHeight; + if (mCaptureTimeLapse) { + mCameraSourceTimeLapse = CameraSourceTimeLapse::CreateFromCamera( + mCamera, mCameraId, + videoSize, mFrameRate, mPreviewSurface, + mTimeBetweenTimeLapseFrameCaptureUs); + *cameraSource = mCameraSourceTimeLapse; + } else { + *cameraSource = CameraSource::CreateFromCamera( + mCamera, mCameraId, videoSize, mFrameRate, + mPreviewSurface, true /*storeMetaDataInVideoBuffers*/); + } + if (*cameraSource == NULL) { + return UNKNOWN_ERROR; + } - status_t err = setupCameraSource(); - if (err != OK) return err; + if ((*cameraSource)->initCheck() != OK) { + (*cameraSource).clear(); + *cameraSource = NULL; + return NO_INIT; + } + + // When frame rate is not set, the actual frame rate will be set to + // the current frame rate being used. + if (mFrameRate == -1) { + int32_t frameRate = 0; + CHECK ((*cameraSource)->getFormat()->findInt32( + kKeyFrameRate, &frameRate)); + LOGI("Frame rate is not explicitly set. Use the current frame " + "rate (%d fps)", frameRate); + mFrameRate = frameRate; + } - sp<CameraSource> cameraSource = CameraSource::CreateFromCamera(mCamera); - CHECK(cameraSource != NULL); + CHECK(mFrameRate != -1); + + mIsMetaDataStoredInVideoBuffers = + (*cameraSource)->isMetaDataStoredInVideoBuffers(); + + return OK; +} + +status_t StagefrightRecorder::setupVideoEncoder( + sp<MediaSource> cameraSource, + int32_t videoBitRate, + sp<MediaSource> *source) { + source->clear(); sp<MetaData> enc_meta = new MetaData; - enc_meta->setInt32(kKeyBitRate, mVideoBitRate); - enc_meta->setInt32(kKeySampleRate, mFrameRate); + enc_meta->setInt32(kKeyBitRate, videoBitRate); + enc_meta->setInt32(kKeyFrameRate, mFrameRate); switch (mVideoEncoder) { case VIDEO_ENCODER_H263: @@ -1054,10 +1234,21 @@ status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) { OMXClient client; CHECK_EQ(client.connect(), OK); + uint32_t encoder_flags = 0; + if (mIsMetaDataStoredInVideoBuffers) { + encoder_flags |= OMXCodec::kHardwareCodecsOnly; + encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers; + } sp<MediaSource> encoder = OMXCodec::Create( client.interface(), enc_meta, - true /* createEncoder */, cameraSource); + true /* createEncoder */, cameraSource, + NULL, encoder_flags); if (encoder == NULL) { + LOGW("Failed to create the encoder"); + // When the encoder fails to be created, we need + // release the camera source due to the camera's lock + // and unlock mechanism. + cameraSource->stop(); return UNKNOWN_ERROR; } @@ -1067,18 +1258,23 @@ status_t StagefrightRecorder::setupVideoEncoder(sp<MediaSource> *source) { } status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) { - sp<MediaSource> audioEncoder; + status_t status = BAD_VALUE; + if (OK != (status = checkAudioEncoderCapabilities())) { + return status; + } + switch(mAudioEncoder) { case AUDIO_ENCODER_AMR_NB: case AUDIO_ENCODER_AMR_WB: case AUDIO_ENCODER_AAC: - audioEncoder = createAudioSource(); break; + default: LOGE("Unsupported audio encoder: %d", mAudioEncoder); return UNKNOWN_ERROR; } + sp<MediaSource> audioEncoder = createAudioSource(); if (audioEncoder == NULL) { return UNKNOWN_ERROR; } @@ -1087,54 +1283,153 @@ status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) { return OK; } -status_t StagefrightRecorder::startMPEG4Recording() { - int32_t totalBitRate = 0; +status_t StagefrightRecorder::setupMPEG4Recording( + bool useSplitCameraSource, + int outputFd, + int32_t videoWidth, int32_t videoHeight, + int32_t videoBitRate, + int32_t *totalBitRate, + sp<MediaWriter> *mediaWriter) { + mediaWriter->clear(); + *totalBitRate = 0; status_t err = OK; - sp<MediaWriter> writer = new MPEG4Writer(dup(mOutputFd)); + sp<MediaWriter> writer = new MPEG4Writer(outputFd); - // Add audio source first if it exists - if (mAudioSource != AUDIO_SOURCE_LIST_END) { - err = setupAudioEncoder(writer); - if (err != OK) return err; - totalBitRate += mAudioBitRate; - } if (mVideoSource == VIDEO_SOURCE_DEFAULT || mVideoSource == VIDEO_SOURCE_CAMERA) { + + sp<MediaSource> cameraMediaSource; + if (useSplitCameraSource) { + LOGV("Using Split camera source"); + cameraMediaSource = mCameraSourceSplitter->createClient(); + } else { + sp<CameraSource> cameraSource; + err = setupCameraSource(&cameraSource); + cameraMediaSource = cameraSource; + } + if ((videoWidth != mVideoWidth) || (videoHeight != mVideoHeight)) { + // Use downsampling from the original source. + cameraMediaSource = + new VideoSourceDownSampler(cameraMediaSource, videoWidth, videoHeight); + } + if (err != OK) { + return err; + } + sp<MediaSource> encoder; - err = setupVideoEncoder(&encoder); - if (err != OK) return err; + err = setupVideoEncoder(cameraMediaSource, videoBitRate, &encoder); + if (err != OK) { + return err; + } + writer->addSource(encoder); - totalBitRate += mVideoBitRate; + *totalBitRate += videoBitRate; + } + + // Audio source is added at the end if it exists. + // This help make sure that the "recoding" sound is suppressed for + // camcorder applications in the recorded files. + if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_LIST_END)) { + err = setupAudioEncoder(writer); + if (err != OK) return err; + *totalBitRate += mAudioBitRate; } if (mInterleaveDurationUs > 0) { reinterpret_cast<MPEG4Writer *>(writer.get())-> setInterleaveDuration(mInterleaveDurationUs); } - if (mMaxFileDurationUs != 0) { writer->setMaxFileDuration(mMaxFileDurationUs); } if (mMaxFileSizeBytes != 0) { writer->setMaxFileSize(mMaxFileSizeBytes); } - sp<MetaData> meta = new MetaData; - meta->setInt64(kKeyTime, systemTime() / 1000); - meta->setInt32(kKeyFileType, mOutputFormat); - meta->setInt32(kKeyBitRate, totalBitRate); - meta->setInt32(kKey64BitFileOffset, mUse64BitFileOffset); + + writer->setListener(mListener); + *mediaWriter = writer; + return OK; +} + +void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate, + sp<MetaData> *meta) { + (*meta)->setInt64(kKeyTime, startTimeUs); + (*meta)->setInt32(kKeyFileType, mOutputFormat); + (*meta)->setInt32(kKeyBitRate, totalBitRate); + (*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset); if (mMovieTimeScale > 0) { - meta->setInt32(kKeyTimeScale, mMovieTimeScale); + (*meta)->setInt32(kKeyTimeScale, mMovieTimeScale); } if (mTrackEveryTimeDurationUs > 0) { - meta->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs); + (*meta)->setInt64(kKeyTrackTimeStatus, mTrackEveryTimeDurationUs); } if (mRotationDegrees != 0) { - meta->setInt32(kKeyRotationDegree, mRotationDegrees); + (*meta)->setInt32(kKeyRotation, mRotationDegrees); } - writer->setListener(mListener); - mWriter = writer; - return mWriter->start(meta.get()); +} + +status_t StagefrightRecorder::startMPEG4Recording() { + if (mCaptureAuxVideo) { + if (!mCaptureTimeLapse) { + LOGE("Auxiliary video can be captured only in time lapse mode"); + return UNKNOWN_ERROR; + } + LOGV("Creating MediaSourceSplitter"); + sp<CameraSource> cameraSource; + status_t err = setupCameraSource(&cameraSource); + if (err != OK) { + return err; + } + mCameraSourceSplitter = new MediaSourceSplitter(cameraSource); + } else { + mCameraSourceSplitter = NULL; + } + + int32_t totalBitRate; + status_t err = setupMPEG4Recording(mCaptureAuxVideo, + mOutputFd, mVideoWidth, mVideoHeight, + mVideoBitRate, &totalBitRate, &mWriter); + if (err != OK) { + return err; + } + + int64_t startTimeUs = systemTime() / 1000; + sp<MetaData> meta = new MetaData; + setupMPEG4MetaData(startTimeUs, totalBitRate, &meta); + + err = mWriter->start(meta.get()); + if (err != OK) { + return err; + } + + if (mCaptureAuxVideo) { + CHECK(mOutputFdAux >= 0); + if (mWriterAux != NULL) { + LOGE("Auxiliary File writer is not avaialble"); + return UNKNOWN_ERROR; + } + if ((mAuxVideoWidth > mVideoWidth) || (mAuxVideoHeight > mVideoHeight) || + ((mAuxVideoWidth == mVideoWidth) && mAuxVideoHeight == mVideoHeight)) { + LOGE("Auxiliary video size (%d x %d) same or larger than the main video size (%d x %d)", + mAuxVideoWidth, mAuxVideoHeight, mVideoWidth, mVideoHeight); + return UNKNOWN_ERROR; + } + + int32_t totalBitrateAux; + err = setupMPEG4Recording(mCaptureAuxVideo, + mOutputFdAux, mAuxVideoWidth, mAuxVideoHeight, + mAuxVideoBitRate, &totalBitrateAux, &mWriterAux); + if (err != OK) { + return err; + } + + sp<MetaData> metaAux = new MetaData; + setupMPEG4MetaData(startTimeUs, totalBitrateAux, &metaAux); + + return mWriterAux->start(metaAux.get()); + } + + return OK; } status_t StagefrightRecorder::pause() { @@ -1143,28 +1438,36 @@ status_t StagefrightRecorder::pause() { return UNKNOWN_ERROR; } mWriter->pause(); + + if (mCaptureAuxVideo) { + if (mWriterAux == NULL) { + return UNKNOWN_ERROR; + } + mWriterAux->pause(); + } + return OK; } status_t StagefrightRecorder::stop() { LOGV("stop"); status_t err = OK; - if (mWriter != NULL) { - err = mWriter->stop(); - mWriter.clear(); + + if (mCaptureTimeLapse && mCameraSourceTimeLapse != NULL) { + mCameraSourceTimeLapse->startQuickReadReturns(); + mCameraSourceTimeLapse = NULL; } - if (mCamera != 0) { - LOGV("Disconnect camera"); - int64_t token = IPCThreadState::self()->clearCallingIdentity(); - if ((mFlags & FLAGS_HOT_CAMERA) == 0) { - LOGV("Camera was cold when we started, stopping preview"); - mCamera->stopPreview(); + if (mCaptureAuxVideo) { + if (mWriterAux != NULL) { + mWriterAux->stop(); + mWriterAux.clear(); } - mCamera->unlock(); - mCamera.clear(); - IPCThreadState::self()->restoreCallingIdentity(token); - mFlags = 0; + } + + if (mWriter != NULL) { + err = mWriter->stop(); + mWriter.clear(); } if (mOutputFd >= 0) { @@ -1172,6 +1475,13 @@ status_t StagefrightRecorder::stop() { mOutputFd = -1; } + if (mCaptureAuxVideo) { + if (mOutputFdAux >= 0) { + ::close(mOutputFdAux); + mOutputFdAux = -1; + } + } + return err; } @@ -1196,8 +1506,11 @@ status_t StagefrightRecorder::reset() { mVideoEncoder = VIDEO_ENCODER_H263; mVideoWidth = 176; mVideoHeight = 144; - mFrameRate = 20; + mAuxVideoWidth = 176; + mAuxVideoHeight = 144; + mFrameRate = -1; mVideoBitRate = 192000; + mAuxVideoBitRate = 192000; mSampleRate = 8000; mAudioChannels = 1; mAudioBitRate = 12200; @@ -1214,11 +1527,17 @@ status_t StagefrightRecorder::reset() { mMaxFileDurationUs = 0; mMaxFileSizeBytes = 0; mTrackEveryTimeDurationUs = 0; - mRotationDegrees = 0; + mCaptureTimeLapse = false; + mTimeBetweenTimeLapseFrameCaptureUs = -1; + mCaptureAuxVideo = false; + mCameraSourceSplitter = NULL; + mCameraSourceTimeLapse = NULL; + mIsMetaDataStoredInVideoBuffers = false; mEncoderProfiles = MediaProfiles::getInstance(); + mRotationDegrees = 0; mOutputFd = -1; - mFlags = 0; + mOutputFdAux = -1; return OK; } @@ -1255,6 +1574,8 @@ status_t StagefrightRecorder::dump( snprintf(buffer, SIZE, " Recorder: %p\n", this); snprintf(buffer, SIZE, " Output file (fd %d):\n", mOutputFd); result.append(buffer); + snprintf(buffer, SIZE, " Output file Auxiliary (fd %d):\n", mOutputFdAux); + result.append(buffer); snprintf(buffer, SIZE, " File format: %d\n", mOutputFormat); result.append(buffer); snprintf(buffer, SIZE, " Max file size (bytes): %lld\n", mMaxFileSizeBytes); @@ -1287,8 +1608,6 @@ status_t StagefrightRecorder::dump( result.append(buffer); snprintf(buffer, SIZE, " Camera Id: %d\n", mCameraId); result.append(buffer); - snprintf(buffer, SIZE, " Camera flags: %d\n", mFlags); - result.append(buffer); snprintf(buffer, SIZE, " Encoder: %d\n", mVideoEncoder); result.append(buffer); snprintf(buffer, SIZE, " Encoder profile: %d\n", mVideoEncoderProfile); @@ -1299,10 +1618,14 @@ status_t StagefrightRecorder::dump( result.append(buffer); snprintf(buffer, SIZE, " Frame size (pixels): %dx%d\n", mVideoWidth, mVideoHeight); result.append(buffer); + snprintf(buffer, SIZE, " Aux Frame size (pixels): %dx%d\n", mAuxVideoWidth, mAuxVideoHeight); + result.append(buffer); snprintf(buffer, SIZE, " Frame rate (fps): %d\n", mFrameRate); result.append(buffer); snprintf(buffer, SIZE, " Bit rate (bps): %d\n", mVideoBitRate); result.append(buffer); + snprintf(buffer, SIZE, " Aux Bit rate (bps): %d\n", mAuxVideoBitRate); + result.append(buffer); ::write(fd, result.string(), result.size()); return OK; } diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h index e42df2e..72225db 100644 --- a/media/libmediaplayerservice/StagefrightRecorder.h +++ b/media/libmediaplayerservice/StagefrightRecorder.h @@ -19,13 +19,18 @@ #define STAGEFRIGHT_RECORDER_H_ #include <media/MediaRecorderBase.h> +#include <camera/CameraParameters.h> #include <utils/String8.h> namespace android { class Camera; +class CameraSource; +class CameraSourceTimeLapse; +class MediaSourceSplitter; struct MediaSource; struct MediaWriter; +class MetaData; struct AudioSource; class MediaProfiles; @@ -42,9 +47,10 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t setVideoSize(int width, int height); virtual status_t setVideoFrameRate(int frames_per_second); virtual status_t setCamera(const sp<ICamera>& camera); - virtual status_t setPreviewSurface(const sp<ISurface>& surface); + virtual status_t setPreviewSurface(const sp<Surface>& surface); virtual status_t setOutputFile(const char *path); virtual status_t setOutputFile(int fd, int64_t offset, int64_t length); + virtual status_t setOutputFileAuxiliary(int fd); virtual status_t setParameters(const String8& params); virtual status_t setListener(const sp<IMediaRecorderClient>& listener); virtual status_t prepare(); @@ -57,15 +63,10 @@ struct StagefrightRecorder : public MediaRecorderBase { virtual status_t dump(int fd, const Vector<String16>& args) const; private: - enum CameraFlags { - FLAGS_SET_CAMERA = 1L << 0, - FLAGS_HOT_CAMERA = 1L << 1, - }; - - sp<Camera> mCamera; - sp<ISurface> mPreviewSurface; + sp<ICamera> mCamera; + sp<Surface> mPreviewSurface; sp<IMediaRecorderClient> mListener; - sp<MediaWriter> mWriter; + sp<MediaWriter> mWriter, mWriterAux; sp<AudioSource> mAudioSourceNode; audio_source mAudioSource; @@ -75,8 +76,9 @@ private: video_encoder mVideoEncoder; bool mUse64BitFileOffset; int32_t mVideoWidth, mVideoHeight; + int32_t mAuxVideoWidth, mAuxVideoHeight; int32_t mFrameRate; - int32_t mVideoBitRate; + int32_t mVideoBitRate, mAuxVideoBitRate; int32_t mAudioBitRate; int32_t mAudioChannels; int32_t mSampleRate; @@ -93,21 +95,41 @@ private: int64_t mTrackEveryTimeDurationUs; int32_t mRotationDegrees; // Clockwise + bool mCaptureTimeLapse; + int64_t mTimeBetweenTimeLapseFrameCaptureUs; + bool mCaptureAuxVideo; + sp<MediaSourceSplitter> mCameraSourceSplitter; + sp<CameraSourceTimeLapse> mCameraSourceTimeLapse; + String8 mParams; - int mOutputFd; - int32_t mFlags; + int mOutputFd, mOutputFdAux; + bool mIsMetaDataStoredInVideoBuffers; MediaProfiles *mEncoderProfiles; + status_t setupMPEG4Recording( + bool useSplitCameraSource, + int outputFd, + int32_t videoWidth, int32_t videoHeight, + int32_t videoBitRate, + int32_t *totalBitRate, + sp<MediaWriter> *mediaWriter); + void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate, + sp<MetaData> *meta); status_t startMPEG4Recording(); status_t startAMRRecording(); status_t startAACRecording(); status_t startRTPRecording(); status_t startMPEG2TSRecording(); sp<MediaSource> createAudioSource(); - status_t setupCameraSource(); + status_t checkVideoEncoderCapabilities(); + status_t checkAudioEncoderCapabilities(); + status_t setupCameraSource(sp<CameraSource> *cameraSource); status_t setupAudioEncoder(const sp<MediaWriter>& writer); - status_t setupVideoEncoder(sp<MediaSource> *source); + status_t setupVideoEncoder( + sp<MediaSource> cameraSource, + int32_t videoBitRate, + sp<MediaSource> *source); // Encoding parameter handling utilities status_t setParameter(const String8 &key, const String8 &value); @@ -115,6 +137,11 @@ private: status_t setParamAudioNumberOfChannels(int32_t channles); status_t setParamAudioSamplingRate(int32_t sampleRate); status_t setParamAudioTimeScale(int32_t timeScale); + status_t setParamTimeLapseEnable(int32_t timeLapseEnable); + status_t setParamTimeBetweenTimeLapseFrameCapture(int64_t timeUs); + status_t setParamAuxVideoHeight(int32_t height); + status_t setParamAuxVideoWidth(int32_t width); + status_t setParamAuxVideoEncodingBitRate(int32_t bitRate); status_t setParamVideoEncodingBitRate(int32_t bitRate); status_t setParamVideoIFramesInterval(int32_t seconds); status_t setParamVideoEncoderProfile(int32_t profile); @@ -132,6 +159,9 @@ private: void clipVideoFrameRate(); void clipVideoFrameWidth(); void clipVideoFrameHeight(); + void clipAudioBitRate(); + void clipAudioSampleRate(); + void clipNumberOfAudioChannels(); StagefrightRecorder(const StagefrightRecorder &); StagefrightRecorder &operator=(const StagefrightRecorder &); @@ -140,4 +170,3 @@ private: } // namespace android #endif // STAGEFRIGHT_RECORDER_H_ - diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h index 6e6c3cd..6abd8e3 100644 --- a/media/libmediaplayerservice/TestPlayerStub.h +++ b/media/libmediaplayerservice/TestPlayerStub.h @@ -75,7 +75,7 @@ class TestPlayerStub : public MediaPlayerInterface { // All the methods below wrap the mPlayer instance. - virtual status_t setVideoSurface(const android::sp<android::ISurface>& s) { + virtual status_t setVideoSurface(const android::sp<android::Surface>& s) { return mPlayer->setVideoSurface(s); } virtual status_t prepare() {return mPlayer->prepare();} diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk new file mode 100644 index 0000000..c20e279 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/Android.mk @@ -0,0 +1,25 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + HTTPLiveSource.cpp \ + NuPlayer.cpp \ + NuPlayerDecoder.cpp \ + NuPlayerDriver.cpp \ + NuPlayerRenderer.cpp \ + NuPlayerStreamListener.cpp \ + DecoderWrapper.cpp \ + StreamingSource.cpp \ + +LOCAL_C_INCLUDES := \ + $(TOP)/frameworks/base/include/media/stagefright/openmax \ + $(TOP)/frameworks/base/media/libstagefright/include \ + $(TOP)/frameworks/base/media/libstagefright/mpeg2ts \ + $(TOP)/frameworks/base/media/libstagefright/httplive \ + +LOCAL_MODULE:= libstagefright_nuplayer + +LOCAL_MODULE_TAGS := eng + +include $(BUILD_STATIC_LIBRARY) + diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp b/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp new file mode 100644 index 0000000..802d1fb --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/DecoderWrapper.cpp @@ -0,0 +1,576 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "DecoderWrapper" +#include <utils/Log.h> + +#include "DecoderWrapper.h" + +#include "AACDecoder.h" + +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/ACodec.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> + +namespace android { + +struct DecoderWrapper::WrapperSource : public MediaSource { + WrapperSource( + const sp<MetaData> &meta, + const sp<AMessage> ¬ify); + + virtual status_t start(MetaData *params); + virtual status_t stop(); + virtual sp<MetaData> getFormat(); + + virtual status_t read( + MediaBuffer **buffer, const ReadOptions *options); + + void queueBuffer(const sp<ABuffer> &buffer); + void queueEOS(status_t finalResult); + void clear(); + +protected: + virtual ~WrapperSource(); + +private: + Mutex mLock; + Condition mCondition; + + sp<MetaData> mMeta; + sp<AMessage> mNotify; + + List<sp<ABuffer> > mQueue; + status_t mFinalResult; + + DISALLOW_EVIL_CONSTRUCTORS(WrapperSource); +}; + +DecoderWrapper::WrapperSource::WrapperSource( + const sp<MetaData> &meta, const sp<AMessage> ¬ify) + : mMeta(meta), + mNotify(notify), + mFinalResult(OK) { +} + +DecoderWrapper::WrapperSource::~WrapperSource() { +} + +status_t DecoderWrapper::WrapperSource::start(MetaData *params) { + return OK; +} + +status_t DecoderWrapper::WrapperSource::stop() { + return OK; +} + +sp<MetaData> DecoderWrapper::WrapperSource::getFormat() { + return mMeta; +} + +status_t DecoderWrapper::WrapperSource::read( + MediaBuffer **out, const ReadOptions *options) { + Mutex::Autolock autoLock(mLock); + + bool requestedBuffer = false; + + while (mQueue.empty() && mFinalResult == OK) { + if (!requestedBuffer) { + mNotify->dup()->post(); + requestedBuffer = true; + } + + mCondition.wait(mLock); + } + + if (mQueue.empty()) { + return mFinalResult; + } + + sp<ABuffer> src = *mQueue.begin(); + mQueue.erase(mQueue.begin()); + + MediaBuffer *dst = new MediaBuffer(src->size()); + memcpy(dst->data(), src->data(), src->size()); + + int64_t timeUs; + CHECK(src->meta()->findInt64("timeUs", &timeUs)); + + dst->meta_data()->setInt64(kKeyTime, timeUs); + + *out = dst; + + return OK; +} + +void DecoderWrapper::WrapperSource::queueBuffer(const sp<ABuffer> &buffer) { + Mutex::Autolock autoLock(mLock); + mQueue.push_back(buffer); + mCondition.broadcast(); +} + +void DecoderWrapper::WrapperSource::queueEOS(status_t finalResult) { + CHECK_NE(finalResult, (status_t)OK); + + Mutex::Autolock autoLock(mLock); + mFinalResult = finalResult; + mCondition.broadcast(); +} + +void DecoderWrapper::WrapperSource::clear() { + Mutex::Autolock autoLock(mLock); + mQueue.clear(); + mFinalResult = OK; +} + +//////////////////////////////////////////////////////////////////////////////// + +struct DecoderWrapper::WrapperReader : public AHandler { + WrapperReader( + const sp<MediaSource> &decoder, + const sp<AMessage> ¬ify); + + void start(); + void stop(); + void readMore(bool flush = false); + +protected: + virtual ~WrapperReader(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + enum { + kWhatRead + }; + + sp<MediaSource> mDecoder; + sp<AMessage> mNotify; + bool mEOS; + bool mSentFormat; + + void sendFormatChange(); + + DISALLOW_EVIL_CONSTRUCTORS(WrapperReader); +}; + +DecoderWrapper::WrapperReader::WrapperReader( + const sp<MediaSource> &decoder, const sp<AMessage> ¬ify) + : mDecoder(decoder), + mNotify(notify), + mEOS(false), + mSentFormat(false) { +} + +DecoderWrapper::WrapperReader::~WrapperReader() { +} + +void DecoderWrapper::WrapperReader::start() { + CHECK_EQ(mDecoder->start(), (status_t)OK); + readMore(); +} + +void DecoderWrapper::WrapperReader::stop() { + CHECK_EQ(mDecoder->stop(), (status_t)OK); +} + +void DecoderWrapper::WrapperReader::readMore(bool flush) { + if (!flush && mEOS) { + return; + } + + sp<AMessage> msg = new AMessage(kWhatRead, id()); + msg->setInt32("flush", static_cast<int32_t>(flush)); + msg->post(); +} + +void DecoderWrapper::WrapperReader::onMessageReceived( + const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatRead: + { + int32_t flush; + CHECK(msg->findInt32("flush", &flush)); + + MediaSource::ReadOptions options; + if (flush) { + // Dummy seek + options.setSeekTo(0); + mEOS = false; + } + + CHECK(!mEOS); + + MediaBuffer *src; + status_t err = mDecoder->read(&src, &options); + + if (err == OK) { + if (!mSentFormat) { + sendFormatChange(); + mSentFormat = true; + } + + sp<AMessage> notify = mNotify->dup(); + + sp<AMessage> realNotify; + CHECK(notify->findMessage("real-notify", &realNotify)); + + realNotify->setInt32("what", ACodec::kWhatDrainThisBuffer); + + sp<ABuffer> dst = new ABuffer(src->range_length()); + memcpy(dst->data(), + (const uint8_t *)src->data() + src->range_offset(), + src->range_length()); + + int64_t timeUs; + CHECK(src->meta_data()->findInt64(kKeyTime, &timeUs)); + src->release(); + src = NULL; + + dst->meta()->setInt64("timeUs", timeUs); + + realNotify->setObject("buffer", dst); + + notify->post(); + } else if (err == INFO_FORMAT_CHANGED) { + sendFormatChange(); + + readMore(false /* flush */); + } else { + sp<AMessage> notify = mNotify->dup(); + + sp<AMessage> realNotify; + CHECK(notify->findMessage("real-notify", &realNotify)); + + realNotify->setInt32("what", ACodec::kWhatEOS); + mEOS = true; + + notify->post(); + } + break; + } + + default: + TRESPASS(); + break; + } +} + +void DecoderWrapper::WrapperReader::sendFormatChange() { + sp<AMessage> notify = mNotify->dup(); + + sp<AMessage> realNotify; + CHECK(notify->findMessage("real-notify", &realNotify)); + + realNotify->setInt32("what", ACodec::kWhatOutputFormatChanged); + + sp<MetaData> meta = mDecoder->getFormat(); + + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + realNotify->setString("mime", mime); + + if (!strncasecmp("audio/", mime, 6)) { + int32_t numChannels; + CHECK(meta->findInt32(kKeyChannelCount, &numChannels)); + + int32_t sampleRate; + CHECK(meta->findInt32(kKeySampleRate, &sampleRate)); + + realNotify->setInt32("channel-count", numChannels); + realNotify->setInt32("sample-rate", sampleRate); + } else { + CHECK(!strncasecmp("video/", mime, 6)); + + int32_t width, height; + CHECK(meta->findInt32(kKeyWidth, &width)); + CHECK(meta->findInt32(kKeyHeight, &height)); + + realNotify->setInt32("width", width); + realNotify->setInt32("height", height); + + int32_t cropLeft, cropTop, cropRight, cropBottom; + if (!meta->findRect( + kKeyCropRect, + &cropLeft, &cropTop, &cropRight, &cropBottom)) { + cropLeft = 0; + cropTop = 0; + cropRight = width - 1; + cropBottom = height - 1; + } + + realNotify->setRect("crop", cropLeft, cropTop, cropRight, cropBottom); + } + + notify->post(); + + mSentFormat = true; +} + +//////////////////////////////////////////////////////////////////////////////// + +DecoderWrapper::DecoderWrapper() + : mNumOutstandingInputBuffers(0), + mNumOutstandingOutputBuffers(0), + mNumPendingDecodes(0), + mFlushing(false) { +} + +DecoderWrapper::~DecoderWrapper() { +} + +void DecoderWrapper::setNotificationMessage(const sp<AMessage> &msg) { + mNotify = msg; +} + +void DecoderWrapper::initiateSetup(const sp<AMessage> &msg) { + msg->setWhat(kWhatSetup); + msg->setTarget(id()); + msg->post(); +} + +void DecoderWrapper::initiateShutdown() { + (new AMessage(kWhatShutdown, id()))->post(); +} + +void DecoderWrapper::signalFlush() { + (new AMessage(kWhatFlush, id()))->post(); +} + +void DecoderWrapper::signalResume() { + (new AMessage(kWhatResume, id()))->post(); +} + +void DecoderWrapper::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatSetup: + onSetup(msg); + break; + + case kWhatShutdown: + onShutdown(); + break; + + case kWhatInputDataRequested: + { + postFillBuffer(); + ++mNumOutstandingInputBuffers; + break; + } + + case kWhatInputBufferFilled: + { + CHECK_GT(mNumOutstandingInputBuffers, 0); + --mNumOutstandingInputBuffers; + + if (mFlushing) { + mSource->queueEOS(INFO_DISCONTINUITY); + + completeFlushIfPossible(); + break; + } + + sp<RefBase> obj; + if (!msg->findObject("buffer", &obj)) { + int32_t err = OK; + CHECK(msg->findInt32("err", &err)); + + mSource->queueEOS(err); + break; + } + + sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); + + mSource->queueBuffer(buffer); + break; + } + + case kWhatFillBufferDone: + { + sp<AMessage> notify; + CHECK(msg->findMessage("real-notify", ¬ify)); + + int32_t what; + CHECK(notify->findInt32("what", &what)); + + if (what == ACodec::kWhatDrainThisBuffer) { + CHECK_GT(mNumPendingDecodes, 0); + --mNumPendingDecodes; + + sp<AMessage> reply = + new AMessage(kWhatOutputBufferDrained, id()); + + notify->setMessage("reply", reply); + + ++mNumOutstandingOutputBuffers; + } else if (what == ACodec::kWhatEOS) { + CHECK_GT(mNumPendingDecodes, 0); + --mNumPendingDecodes; + + if (mFlushing) { + completeFlushIfPossible(); + break; + } + } + + notify->post(); + break; + } + + case kWhatOutputBufferDrained: + { + CHECK_GT(mNumOutstandingOutputBuffers, 0); + --mNumOutstandingOutputBuffers; + + if (mFlushing) { + completeFlushIfPossible(); + break; + } + + ++mNumPendingDecodes; + mReader->readMore(); + break; + } + + case kWhatFlush: + { + onFlush(); + break; + } + + case kWhatResume: + { + onResume(); + break; + } + + default: + TRESPASS(); + break; + } +} + +void DecoderWrapper::onSetup(const sp<AMessage> &msg) { + AString mime; + CHECK(msg->findString("mime", &mime)); + + CHECK(!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)); + + int32_t numChannels, sampleRate; + CHECK(msg->findInt32("channel-count", &numChannels)); + CHECK(msg->findInt32("sample-rate", &sampleRate)); + + sp<RefBase> obj; + CHECK(msg->findObject("esds", &obj)); + sp<ABuffer> esds = static_cast<ABuffer *>(obj.get()); + + sp<MetaData> meta = new MetaData; + meta->setCString(kKeyMIMEType, mime.c_str()); + meta->setInt32(kKeySampleRate, sampleRate); + meta->setInt32(kKeyChannelCount, numChannels); + meta->setData(kKeyESDS, 0, esds->data(), esds->size()); + + mSource = new WrapperSource( + meta, new AMessage(kWhatInputDataRequested, id())); + + sp<MediaSource> decoder = new AACDecoder(mSource); + + mReaderLooper = new ALooper; + mReaderLooper->setName("DecoderWrapper looper"); + + mReaderLooper->start( + false, /* runOnCallingThread */ + false, /* canCallJava */ + PRIORITY_AUDIO); + + sp<AMessage> notify = new AMessage(kWhatFillBufferDone, id()); + notify->setMessage("real-notify", mNotify); + + mReader = new WrapperReader(decoder, notify); + mReaderLooper->registerHandler(mReader); + + mReader->start(); + ++mNumPendingDecodes; +} + +void DecoderWrapper::onShutdown() { + mReaderLooper->stop(); + mReaderLooper.clear(); + + mReader->stop(); + mReader.clear(); + + mSource.clear(); + + mNumOutstandingInputBuffers = 0; + mNumOutstandingOutputBuffers = 0; + mNumPendingDecodes = 0; + mFlushing = false; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", ACodec::kWhatShutdownCompleted); + notify->post(); +} + +void DecoderWrapper::postFillBuffer() { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFillThisBuffer); + sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id()); + notify->setMessage("reply", reply); + notify->post(); +} + +void DecoderWrapper::onFlush() { + CHECK(!mFlushing); + mFlushing = true; + + completeFlushIfPossible(); +} + +void DecoderWrapper::completeFlushIfPossible() { + CHECK(mFlushing); + + if (mNumOutstandingInputBuffers > 0 + || mNumOutstandingOutputBuffers > 0 + || mNumPendingDecodes > 0) { + return; + } + + mFlushing = false; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFlushCompleted); + notify->post(); +} + +void DecoderWrapper::onResume() { + CHECK(!mFlushing); + + ++mNumPendingDecodes; + + mSource->clear(); + mReader->readMore(true /* flush */); +} + +} // namespace android diff --git a/media/libmediaplayerservice/nuplayer/DecoderWrapper.h b/media/libmediaplayerservice/nuplayer/DecoderWrapper.h new file mode 100644 index 0000000..b9be12c --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/DecoderWrapper.h @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef DECODER_WRAPPER_H_ + +#define DECODER_WRAPPER_H_ + +#include <media/stagefright/foundation/AHandler.h> + +namespace android { + +struct MediaSource; + +struct DecoderWrapper : public AHandler { + DecoderWrapper(); + + void setNotificationMessage(const sp<AMessage> &msg); + void initiateSetup(const sp<AMessage> &msg); + void initiateShutdown(); + void signalFlush(); + void signalResume(); + +protected: + virtual ~DecoderWrapper(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + struct WrapperSource; + struct WrapperReader; + + enum { + kWhatSetup, + kWhatInputBufferFilled, + kWhatOutputBufferDrained, + kWhatShutdown, + kWhatFillBufferDone, + kWhatInputDataRequested, + kWhatFlush, + kWhatResume, + }; + + sp<AMessage> mNotify; + + sp<WrapperSource> mSource; + + sp<ALooper> mReaderLooper; + sp<WrapperReader> mReader; + + int32_t mNumOutstandingInputBuffers; + int32_t mNumOutstandingOutputBuffers; + int32_t mNumPendingDecodes; + bool mFlushing; + + void onSetup(const sp<AMessage> &msg); + void onShutdown(); + void onFlush(); + void onResume(); + + void postFillBuffer(); + void completeFlushIfPossible(); + + DISALLOW_EVIL_CONSTRUCTORS(DecoderWrapper); +}; + +} // namespace android + +#endif // DECODER_WRAPPER_H_ + diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp new file mode 100644 index 0000000..6bf6dd3 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp @@ -0,0 +1,152 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "HTTPLiveSource" +#include <utils/Log.h> + +#include "HTTPLiveSource.h" + +#include "ATSParser.h" +#include "AnotherPacketSource.h" +#include "LiveDataSource.h" +#include "LiveSession.h" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MetaData.h> + +namespace android { + +NuPlayer::HTTPLiveSource::HTTPLiveSource(const char *url) + : mURL(url), + mEOS(false), + mOffset(0) { +} + +NuPlayer::HTTPLiveSource::~HTTPLiveSource() { + mLiveSession->disconnect(); + mLiveLooper->stop(); +} + +void NuPlayer::HTTPLiveSource::start() { + mLiveLooper = new ALooper; + mLiveLooper->setName("http live"); + mLiveLooper->start(); + + mLiveSession = new LiveSession; + mLiveLooper->registerHandler(mLiveSession); + + mLiveSession->connect(mURL.c_str()); + + mTSParser = new ATSParser; +} + +sp<MetaData> NuPlayer::HTTPLiveSource::getFormat(bool audio) { + ATSParser::SourceType type = + audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO; + + sp<AnotherPacketSource> source = + static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get()); + + if (source == NULL) { + return NULL; + } + + return source->getFormat(); +} + +bool NuPlayer::HTTPLiveSource::feedMoreTSData() { + if (mEOS) { + return false; + } + + sp<LiveDataSource> source = + static_cast<LiveDataSource *>(mLiveSession->getDataSource().get()); + + for (int32_t i = 0; i < 50; ++i) { + char buffer[188]; + ssize_t n = source->readAtNonBlocking(mOffset, buffer, sizeof(buffer)); + + if (n == -EWOULDBLOCK) { + break; + } else if (n < 0) { + LOGI("input data EOS reached."); + mTSParser->signalEOS(n); + mEOS = true; + break; + } else { + if (buffer[0] == 0x00) { + // XXX legacy + mTSParser->signalDiscontinuity( + buffer[1] == 0x00 + ? ATSParser::DISCONTINUITY_SEEK + : ATSParser::DISCONTINUITY_FORMATCHANGE); + } else { + mTSParser->feedTSPacket(buffer, sizeof(buffer)); + } + + mOffset += n; + } + } + + return true; +} + +status_t NuPlayer::HTTPLiveSource::dequeueAccessUnit( + bool audio, sp<ABuffer> *accessUnit) { + ATSParser::SourceType type = + audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO; + + sp<AnotherPacketSource> source = + static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get()); + + if (source == NULL) { + return -EWOULDBLOCK; + } + + status_t finalResult; + if (!source->hasBufferAvailable(&finalResult)) { + return finalResult == OK ? -EWOULDBLOCK : finalResult; + } + + return source->dequeueAccessUnit(accessUnit); +} + +status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) { + return mLiveSession->getDuration(durationUs); +} + +status_t NuPlayer::HTTPLiveSource::seekTo(int64_t seekTimeUs) { + // We need to make sure we're not seeking until we have seen the very first + // PTS timestamp in the whole stream (from the beginning of the stream). + while (!mTSParser->PTSTimeDeltaEstablished() && feedMoreTSData()) { + usleep(100000); + } + + mLiveSession->seekTo(seekTimeUs); + + return OK; +} + +bool NuPlayer::HTTPLiveSource::isSeekable() { + return mLiveSession->isSeekable(); +} + +} // namespace android + diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h new file mode 100644 index 0000000..f3f539a --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HTTP_LIVE_SOURCE_H_ + +#define HTTP_LIVE_SOURCE_H_ + +#include "NuPlayer.h" +#include "NuPlayerSource.h" + +namespace android { + +struct ATSParser; +struct LiveSession; + +struct NuPlayer::HTTPLiveSource : public NuPlayer::Source { + HTTPLiveSource(const char *url); + + virtual void start(); + + // Returns true iff more data was available, false on EOS. + virtual bool feedMoreTSData(); + + virtual sp<MetaData> getFormat(bool audio); + virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit); + + virtual status_t getDuration(int64_t *durationUs); + virtual status_t seekTo(int64_t seekTimeUs); + virtual bool isSeekable(); + +protected: + virtual ~HTTPLiveSource(); + +private: + AString mURL; + bool mEOS; + off64_t mOffset; + sp<ALooper> mLiveLooper; + sp<LiveSession> mLiveSession; + sp<ATSParser> mTSParser; + + DISALLOW_EVIL_CONSTRUCTORS(HTTPLiveSource); +}; + +} // namespace android + +#endif // HTTP_LIVE_SOURCE_H_ diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp new file mode 100644 index 0000000..1fcf92b --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp @@ -0,0 +1,663 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "NuPlayer" +#include <utils/Log.h> + +#include "NuPlayer.h" + +#include "HTTPLiveSource.h" +#include "NuPlayerDecoder.h" +#include "NuPlayerDriver.h" +#include "NuPlayerRenderer.h" +#include "NuPlayerSource.h" +#include "StreamingSource.h" + +#include "ATSParser.h" + +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/ACodec.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MetaData.h> +#include <surfaceflinger/Surface.h> + +namespace android { + +//////////////////////////////////////////////////////////////////////////////// + +NuPlayer::NuPlayer() + : mAudioEOS(false), + mVideoEOS(false), + mScanSourcesPending(false), + mScanSourcesGeneration(0), + mFlushingAudio(NONE), + mFlushingVideo(NONE), + mResetInProgress(false), + mResetPostponed(false) { +} + +NuPlayer::~NuPlayer() { +} + +void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { + mDriver = driver; +} + +void NuPlayer::setDataSource(const sp<IStreamSource> &source) { + sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + + msg->setObject("source", new StreamingSource(source)); + msg->post(); +} + +void NuPlayer::setDataSource( + const char *url, const KeyedVector<String8, String8> *headers) { + sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); + + msg->setObject("source", new HTTPLiveSource(url)); + msg->post(); +} + +void NuPlayer::setVideoSurface(const sp<Surface> &surface) { + sp<AMessage> msg = new AMessage(kWhatSetVideoSurface, id()); + msg->setObject("surface", surface); + msg->post(); +} + +void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { + sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); + msg->setObject("sink", sink); + msg->post(); +} + +void NuPlayer::start() { + (new AMessage(kWhatStart, id()))->post(); +} + +void NuPlayer::pause() { + (new AMessage(kWhatPause, id()))->post(); +} + +void NuPlayer::resume() { + (new AMessage(kWhatResume, id()))->post(); +} + +void NuPlayer::resetAsync() { + (new AMessage(kWhatReset, id()))->post(); +} + +void NuPlayer::seekToAsync(int64_t seekTimeUs) { + sp<AMessage> msg = new AMessage(kWhatSeek, id()); + msg->setInt64("seekTimeUs", seekTimeUs); + msg->post(); +} + +// static +bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) { + switch (state) { + case FLUSHING_DECODER: + if (needShutdown != NULL) { + *needShutdown = false; + } + return true; + + case FLUSHING_DECODER_SHUTDOWN: + if (needShutdown != NULL) { + *needShutdown = true; + } + return true; + + default: + return false; + } +} + +void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatSetDataSource: + { + LOGV("kWhatSetDataSource"); + + CHECK(mSource == NULL); + + sp<RefBase> obj; + CHECK(msg->findObject("source", &obj)); + + mSource = static_cast<Source *>(obj.get()); + break; + } + + case kWhatSetVideoSurface: + { + LOGV("kWhatSetVideoSurface"); + + sp<RefBase> obj; + CHECK(msg->findObject("surface", &obj)); + + mSurface = static_cast<Surface *>(obj.get()); + break; + } + + case kWhatSetAudioSink: + { + LOGV("kWhatSetAudioSink"); + + sp<RefBase> obj; + CHECK(msg->findObject("sink", &obj)); + + mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get()); + break; + } + + case kWhatStart: + { + LOGV("kWhatStart"); + + mAudioEOS = false; + mVideoEOS = false; + + mSource->start(); + + mRenderer = new Renderer( + mAudioSink, + new AMessage(kWhatRendererNotify, id())); + + looper()->registerHandler(mRenderer); + + postScanSources(); + break; + } + + case kWhatScanSources: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation != mScanSourcesGeneration) { + // Drop obsolete msg. + break; + } + + mScanSourcesPending = false; + + LOGV("scanning sources haveAudio=%d, haveVideo=%d", + mAudioDecoder != NULL, mVideoDecoder != NULL); + + instantiateDecoder(false, &mVideoDecoder); + + if (mAudioSink != NULL) { + instantiateDecoder(true, &mAudioDecoder); + } + + if (!mSource->feedMoreTSData()) { + if (mAudioDecoder == NULL && mVideoDecoder == NULL) { + // We're not currently decoding anything (no audio or + // video tracks found) and we just ran out of input data. + notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); + } + break; + } + + if (mAudioDecoder == NULL || mVideoDecoder == NULL) { + msg->post(100000ll); + mScanSourcesPending = true; + } + break; + } + + case kWhatVideoNotify: + case kWhatAudioNotify: + { + bool audio = msg->what() == kWhatAudioNotify; + + sp<AMessage> codecRequest; + CHECK(msg->findMessage("codec-request", &codecRequest)); + + int32_t what; + CHECK(codecRequest->findInt32("what", &what)); + + if (what == ACodec::kWhatFillThisBuffer) { + status_t err = feedDecoderInputData( + audio, codecRequest); + + if (err == -EWOULDBLOCK) { + if (mSource->feedMoreTSData()) { + msg->post(); + } + } + } else if (what == ACodec::kWhatEOS) { + mRenderer->queueEOS(audio, ERROR_END_OF_STREAM); + } else if (what == ACodec::kWhatFlushCompleted) { + bool needShutdown; + + if (audio) { + CHECK(IsFlushingState(mFlushingAudio, &needShutdown)); + mFlushingAudio = FLUSHED; + } else { + CHECK(IsFlushingState(mFlushingVideo, &needShutdown)); + mFlushingVideo = FLUSHED; + } + + LOGV("decoder %s flush completed", audio ? "audio" : "video"); + + if (needShutdown) { + LOGV("initiating %s decoder shutdown", + audio ? "audio" : "video"); + + (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown(); + + if (audio) { + mFlushingAudio = SHUTTING_DOWN_DECODER; + } else { + mFlushingVideo = SHUTTING_DOWN_DECODER; + } + } + + finishFlushIfPossible(); + } else if (what == ACodec::kWhatOutputFormatChanged) { + if (audio) { + int32_t numChannels; + CHECK(codecRequest->findInt32("channel-count", &numChannels)); + + int32_t sampleRate; + CHECK(codecRequest->findInt32("sample-rate", &sampleRate)); + + LOGV("Audio output format changed to %d Hz, %d channels", + sampleRate, numChannels); + + mAudioSink->close(); + CHECK_EQ(mAudioSink->open(sampleRate, numChannels), (status_t)OK); + mAudioSink->start(); + + mRenderer->signalAudioSinkChanged(); + } else { + // video + + int32_t width, height; + CHECK(codecRequest->findInt32("width", &width)); + CHECK(codecRequest->findInt32("height", &height)); + + int32_t cropLeft, cropTop, cropRight, cropBottom; + CHECK(codecRequest->findRect( + "crop", + &cropLeft, &cropTop, &cropRight, &cropBottom)); + + LOGV("Video output format changed to %d x %d " + "(crop: %d, %d, %d, %d)", + width, height, + cropLeft, cropTop, cropRight, cropBottom); + + notifyListener( + MEDIA_SET_VIDEO_SIZE, + cropRight - cropLeft + 1, + cropBottom - cropTop + 1); + } + } else if (what == ACodec::kWhatShutdownCompleted) { + LOGV("%s shutdown completed", audio ? "audio" : "video"); + if (audio) { + mAudioDecoder.clear(); + + CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER); + mFlushingAudio = SHUT_DOWN; + } else { + mVideoDecoder.clear(); + + CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER); + mFlushingVideo = SHUT_DOWN; + } + + finishFlushIfPossible(); + } else { + CHECK_EQ((int)what, (int)ACodec::kWhatDrainThisBuffer); + + renderBuffer(audio, codecRequest); + } + + break; + } + + case kWhatRendererNotify: + { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + if (what == Renderer::kWhatEOS) { + int32_t audio; + CHECK(msg->findInt32("audio", &audio)); + + if (audio) { + mAudioEOS = true; + } else { + mVideoEOS = true; + } + + LOGV("reached %s EOS", audio ? "audio" : "video"); + + if ((mAudioEOS || mAudioDecoder == NULL) + && (mVideoEOS || mVideoDecoder == NULL)) { + notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); + } + } else if (what == Renderer::kWhatPosition) { + int64_t positionUs; + CHECK(msg->findInt64("positionUs", &positionUs)); + + if (mDriver != NULL) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyPosition(positionUs); + } + } + } else { + CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete); + + int32_t audio; + CHECK(msg->findInt32("audio", &audio)); + + LOGV("renderer %s flush completed.", audio ? "audio" : "video"); + } + break; + } + + case kWhatMoreDataQueued: + { + break; + } + + case kWhatReset: + { + LOGV("kWhatReset"); + + if (mFlushingAudio != NONE || mFlushingVideo != NONE) { + // We're currently flushing, postpone the reset until that's + // completed. + + LOGV("postponing reset"); + + mResetPostponed = true; + break; + } + + if (mAudioDecoder == NULL && mVideoDecoder == NULL) { + finishReset(); + break; + } + + if (mAudioDecoder != NULL) { + flushDecoder(true /* audio */, true /* needShutdown */); + } + + if (mVideoDecoder != NULL) { + flushDecoder(false /* audio */, true /* needShutdown */); + } + + mResetInProgress = true; + break; + } + + case kWhatSeek: + { + int64_t seekTimeUs; + CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); + + LOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)", + seekTimeUs, seekTimeUs / 1E6); + + mSource->seekTo(seekTimeUs); + + if (mDriver != NULL) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifySeekComplete(); + } + } + + break; + } + + case kWhatPause: + { + CHECK(mRenderer != NULL); + mRenderer->pause(); + break; + } + + case kWhatResume: + { + CHECK(mRenderer != NULL); + mRenderer->resume(); + break; + } + + default: + TRESPASS(); + break; + } +} + +void NuPlayer::finishFlushIfPossible() { + if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) { + return; + } + + if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) { + return; + } + + LOGV("both audio and video are flushed now."); + + mRenderer->signalTimeDiscontinuity(); + + if (mAudioDecoder != NULL) { + mAudioDecoder->signalResume(); + } + + if (mVideoDecoder != NULL) { + mVideoDecoder->signalResume(); + } + + mFlushingAudio = NONE; + mFlushingVideo = NONE; + + if (mResetInProgress) { + LOGV("reset completed"); + + mResetInProgress = false; + finishReset(); + } else if (mResetPostponed) { + (new AMessage(kWhatReset, id()))->post(); + mResetPostponed = false; + } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) { + postScanSources(); + } +} + +void NuPlayer::finishReset() { + CHECK(mAudioDecoder == NULL); + CHECK(mVideoDecoder == NULL); + + mRenderer.clear(); + mSource.clear(); + + if (mDriver != NULL) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyResetComplete(); + } + } +} + +void NuPlayer::postScanSources() { + if (mScanSourcesPending) { + return; + } + + sp<AMessage> msg = new AMessage(kWhatScanSources, id()); + msg->setInt32("generation", mScanSourcesGeneration); + msg->post(); + + mScanSourcesPending = true; +} + +status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) { + if (*decoder != NULL) { + return OK; + } + + sp<MetaData> meta = mSource->getFormat(audio); + + if (meta == NULL) { + return -EWOULDBLOCK; + } + + sp<AMessage> notify = + new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify, + id()); + + *decoder = new Decoder(notify, audio ? NULL : mSurface); + looper()->registerHandler(*decoder); + + (*decoder)->configure(meta); + + int64_t durationUs; + if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { + sp<NuPlayerDriver> driver = mDriver.promote(); + if (driver != NULL) { + driver->notifyDuration(durationUs); + } + } + + return OK; +} + +status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { + sp<AMessage> reply; + CHECK(msg->findMessage("reply", &reply)); + + if ((audio && IsFlushingState(mFlushingAudio)) + || (!audio && IsFlushingState(mFlushingVideo))) { + reply->setInt32("err", INFO_DISCONTINUITY); + reply->post(); + return OK; + } + + sp<ABuffer> accessUnit; + status_t err = mSource->dequeueAccessUnit(audio, &accessUnit); + + if (err == -EWOULDBLOCK) { + return err; + } else if (err != OK) { + if (err == INFO_DISCONTINUITY) { + int32_t type; + CHECK(accessUnit->meta()->findInt32("discontinuity", &type)); + + bool formatChange = + type == ATSParser::DISCONTINUITY_FORMATCHANGE; + + LOGV("%s discontinuity (formatChange=%d)", + audio ? "audio" : "video", formatChange); + + flushDecoder(audio, formatChange); + } + + reply->setInt32("err", err); + reply->post(); + return OK; + } + + // LOGV("returned a valid buffer of %s data", audio ? "audio" : "video"); + +#if 0 + int64_t mediaTimeUs; + CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); + LOGV("feeding %s input buffer at media time %.2f secs", + audio ? "audio" : "video", + mediaTimeUs / 1E6); +#endif + + reply->setObject("buffer", accessUnit); + reply->post(); + + return OK; +} + +void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) { + // LOGV("renderBuffer %s", audio ? "audio" : "video"); + + sp<AMessage> reply; + CHECK(msg->findMessage("reply", &reply)); + + sp<RefBase> obj; + CHECK(msg->findObject("buffer", &obj)); + + sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); + + mRenderer->queueBuffer(audio, buffer, reply); +} + +void NuPlayer::notifyListener(int msg, int ext1, int ext2) { + if (mDriver == NULL) { + return; + } + + sp<NuPlayerDriver> driver = mDriver.promote(); + + if (driver == NULL) { + return; + } + + driver->sendEvent(msg, ext1, ext2); +} + +void NuPlayer::flushDecoder(bool audio, bool needShutdown) { + // Make sure we don't continue to scan sources until we finish flushing. + ++mScanSourcesGeneration; + mScanSourcesPending = false; + + (audio ? mAudioDecoder : mVideoDecoder)->signalFlush(); + mRenderer->flush(audio); + + FlushStatus newStatus = + needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER; + + if (audio) { + CHECK(mFlushingAudio == NONE + || mFlushingAudio == AWAITING_DISCONTINUITY); + + mFlushingAudio = newStatus; + + if (mFlushingVideo == NONE) { + mFlushingVideo = (mVideoDecoder != NULL) + ? AWAITING_DISCONTINUITY + : FLUSHED; + } + } else { + CHECK(mFlushingVideo == NONE + || mFlushingVideo == AWAITING_DISCONTINUITY); + + mFlushingVideo = newStatus; + + if (mFlushingAudio == NONE) { + mFlushingAudio = (mAudioDecoder != NULL) + ? AWAITING_DISCONTINUITY + : FLUSHED; + } + } +} + +} // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h new file mode 100644 index 0000000..bb65162 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h @@ -0,0 +1,132 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef NU_PLAYER_H_ + +#define NU_PLAYER_H_ + +#include <media/MediaPlayerInterface.h> +#include <media/stagefright/foundation/AHandler.h> + +namespace android { + +struct ACodec; +struct MetaData; +struct NuPlayerDriver; + +struct NuPlayer : public AHandler { + NuPlayer(); + + void setDriver(const wp<NuPlayerDriver> &driver); + + void setDataSource(const sp<IStreamSource> &source); + + void setDataSource( + const char *url, const KeyedVector<String8, String8> *headers); + + void setVideoSurface(const sp<Surface> &surface); + void setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink); + void start(); + + void pause(); + void resume(); + + // Will notify the driver through "notifyResetComplete" once finished. + void resetAsync(); + + // Will notify the driver through "notifySeekComplete" once finished. + void seekToAsync(int64_t seekTimeUs); + +protected: + virtual ~NuPlayer(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + struct Decoder; + struct HTTPLiveSource; + struct NuPlayerStreamListener; + struct Renderer; + struct Source; + struct StreamingSource; + + enum { + kWhatSetDataSource, + kWhatSetVideoSurface, + kWhatSetAudioSink, + kWhatMoreDataQueued, + kWhatStart, + kWhatScanSources, + kWhatVideoNotify, + kWhatAudioNotify, + kWhatRendererNotify, + kWhatReset, + kWhatSeek, + kWhatPause, + kWhatResume, + }; + + wp<NuPlayerDriver> mDriver; + sp<Source> mSource; + sp<Surface> mSurface; + sp<MediaPlayerBase::AudioSink> mAudioSink; + sp<Decoder> mVideoDecoder; + sp<Decoder> mAudioDecoder; + sp<Renderer> mRenderer; + + bool mAudioEOS; + bool mVideoEOS; + + bool mScanSourcesPending; + int32_t mScanSourcesGeneration; + + enum FlushStatus { + NONE, + AWAITING_DISCONTINUITY, + FLUSHING_DECODER, + FLUSHING_DECODER_SHUTDOWN, + SHUTTING_DOWN_DECODER, + FLUSHED, + SHUT_DOWN, + }; + + FlushStatus mFlushingAudio; + FlushStatus mFlushingVideo; + bool mResetInProgress; + bool mResetPostponed; + + status_t instantiateDecoder(bool audio, sp<Decoder> *decoder); + + status_t feedDecoderInputData(bool audio, const sp<AMessage> &msg); + void renderBuffer(bool audio, const sp<AMessage> &msg); + + void notifyListener(int msg, int ext1, int ext2); + + void finishFlushIfPossible(); + + void flushDecoder(bool audio, bool needShutdown); + + static bool IsFlushingState(FlushStatus state, bool *needShutdown = NULL); + + void finishReset(); + void postScanSources(); + + DISALLOW_EVIL_CONSTRUCTORS(NuPlayer); +}; + +} // namespace android + +#endif // NU_PLAYER_H_ diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp new file mode 100644 index 0000000..761dfa4 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp @@ -0,0 +1,296 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "NuPlayerDecoder" +#include <utils/Log.h> + +#include "NuPlayerDecoder.h" + +#include "DecoderWrapper.h" +#include "ESDS.h" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/ACodec.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/Utils.h> +#include <surfaceflinger/Surface.h> + +namespace android { + +NuPlayer::Decoder::Decoder( + const sp<AMessage> ¬ify, const sp<Surface> &surface) + : mNotify(notify), + mSurface(surface) { +} + +NuPlayer::Decoder::~Decoder() { +} + +void NuPlayer::Decoder::configure(const sp<MetaData> &meta) { + CHECK(mCodec == NULL); + CHECK(mWrapper == NULL); + + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + sp<AMessage> notifyMsg = + new AMessage(kWhatCodecNotify, id()); + + sp<AMessage> format = makeFormat(meta); + + if (mSurface != NULL) { + format->setObject("surface", mSurface); + } + + if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { + mWrapper = new DecoderWrapper; + looper()->registerHandler(mWrapper); + + mWrapper->setNotificationMessage(notifyMsg); + mWrapper->initiateSetup(format); + } else { + mCodec = new ACodec; + looper()->registerHandler(mCodec); + + mCodec->setNotificationMessage(notifyMsg); + mCodec->initiateSetup(format); + } +} + +void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatCodecNotify: + { + int32_t what; + CHECK(msg->findInt32("what", &what)); + + if (what == ACodec::kWhatFillThisBuffer) { + onFillThisBuffer(msg); + } else { + sp<AMessage> notify = mNotify->dup(); + notify->setMessage("codec-request", msg); + notify->post(); + } + break; + } + + default: + TRESPASS(); + break; + } +} + +sp<AMessage> NuPlayer::Decoder::makeFormat(const sp<MetaData> &meta) { + CHECK(mCSD.isEmpty()); + + const char *mime; + CHECK(meta->findCString(kKeyMIMEType, &mime)); + + sp<AMessage> msg = new AMessage; + msg->setString("mime", mime); + + if (!strncasecmp("video/", mime, 6)) { + int32_t width, height; + CHECK(meta->findInt32(kKeyWidth, &width)); + CHECK(meta->findInt32(kKeyHeight, &height)); + + msg->setInt32("width", width); + msg->setInt32("height", height); + } else { + CHECK(!strncasecmp("audio/", mime, 6)); + + int32_t numChannels, sampleRate; + CHECK(meta->findInt32(kKeyChannelCount, &numChannels)); + CHECK(meta->findInt32(kKeySampleRate, &sampleRate)); + + msg->setInt32("channel-count", numChannels); + msg->setInt32("sample-rate", sampleRate); + } + + int32_t maxInputSize; + if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) { + msg->setInt32("max-input-size", maxInputSize); + } + + mCSDIndex = 0; + + uint32_t type; + const void *data; + size_t size; + if (meta->findData(kKeyAVCC, &type, &data, &size)) { + // Parse the AVCDecoderConfigurationRecord + + const uint8_t *ptr = (const uint8_t *)data; + + CHECK(size >= 7); + CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1 + uint8_t profile = ptr[1]; + uint8_t level = ptr[3]; + + // There is decodable content out there that fails the following + // assertion, let's be lenient for now... + // CHECK((ptr[4] >> 2) == 0x3f); // reserved + + size_t lengthSize = 1 + (ptr[4] & 3); + + // commented out check below as H264_QVGA_500_NO_AUDIO.3gp + // violates it... + // CHECK((ptr[5] >> 5) == 7); // reserved + + size_t numSeqParameterSets = ptr[5] & 31; + + ptr += 6; + size -= 6; + + sp<ABuffer> buffer = new ABuffer(1024); + buffer->setRange(0, 0); + + for (size_t i = 0; i < numSeqParameterSets; ++i) { + CHECK(size >= 2); + size_t length = U16_AT(ptr); + + ptr += 2; + size -= 2; + + CHECK(size >= length); + + memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4); + memcpy(buffer->data() + buffer->size() + 4, ptr, length); + buffer->setRange(0, buffer->size() + 4 + length); + + ptr += length; + size -= length; + } + + buffer->meta()->setInt32("csd", true); + mCSD.push(buffer); + + buffer = new ABuffer(1024); + buffer->setRange(0, 0); + + CHECK(size >= 1); + size_t numPictureParameterSets = *ptr; + ++ptr; + --size; + + for (size_t i = 0; i < numPictureParameterSets; ++i) { + CHECK(size >= 2); + size_t length = U16_AT(ptr); + + ptr += 2; + size -= 2; + + CHECK(size >= length); + + memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4); + memcpy(buffer->data() + buffer->size() + 4, ptr, length); + buffer->setRange(0, buffer->size() + 4 + length); + + ptr += length; + size -= length; + } + + buffer->meta()->setInt32("csd", true); + mCSD.push(buffer); + + msg->setObject("csd", buffer); + } else if (meta->findData(kKeyESDS, &type, &data, &size)) { +#if 0 + ESDS esds((const char *)data, size); + CHECK_EQ(esds.InitCheck(), (status_t)OK); + + const void *codec_specific_data; + size_t codec_specific_data_size; + esds.getCodecSpecificInfo( + &codec_specific_data, &codec_specific_data_size); + + sp<ABuffer> buffer = new ABuffer(codec_specific_data_size); + + memcpy(buffer->data(), codec_specific_data, + codec_specific_data_size); + + buffer->meta()->setInt32("csd", true); + mCSD.push(buffer); +#else + sp<ABuffer> buffer = new ABuffer(size); + memcpy(buffer->data(), data, size); + + msg->setObject("esds", buffer); +#endif + } + + return msg; +} + +void NuPlayer::Decoder::onFillThisBuffer(const sp<AMessage> &msg) { + sp<AMessage> reply; + CHECK(msg->findMessage("reply", &reply)); + +#if 0 + sp<RefBase> obj; + CHECK(msg->findObject("buffer", &obj)); + sp<ABuffer> outBuffer = static_cast<ABuffer *>(obj.get()); +#else + sp<ABuffer> outBuffer; +#endif + + if (mCSDIndex < mCSD.size()) { + outBuffer = mCSD.editItemAt(mCSDIndex++); + outBuffer->meta()->setInt64("timeUs", 0); + + reply->setObject("buffer", outBuffer); + reply->post(); + return; + } + + sp<AMessage> notify = mNotify->dup(); + notify->setMessage("codec-request", msg); + notify->post(); +} + +void NuPlayer::Decoder::signalFlush() { + if (mCodec != NULL) { + mCodec->signalFlush(); + } else { + CHECK(mWrapper != NULL); + mWrapper->signalFlush(); + } +} + +void NuPlayer::Decoder::signalResume() { + if (mCodec != NULL) { + mCodec->signalResume(); + } else { + CHECK(mWrapper != NULL); + mWrapper->signalResume(); + } +} + +void NuPlayer::Decoder::initiateShutdown() { + if (mCodec != NULL) { + mCodec->initiateShutdown(); + } else { + CHECK(mWrapper != NULL); + mWrapper->initiateShutdown(); + } +} + +} // namespace android + diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h new file mode 100644 index 0000000..3874cfe --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h @@ -0,0 +1,67 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef NUPLAYER_DECODER_H_ + +#define NUPLAYER_DECODER_H_ + +#include "NuPlayer.h" + +#include <media/stagefright/foundation/AHandler.h> + +namespace android { + +struct ABuffer; +struct DecoderWrapper; + +struct NuPlayer::Decoder : public AHandler { + Decoder(const sp<AMessage> ¬ify, const sp<Surface> &surface = NULL); + + void configure(const sp<MetaData> &meta); + + void signalFlush(); + void signalResume(); + void initiateShutdown(); + +protected: + virtual ~Decoder(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + enum { + kWhatCodecNotify, + }; + + sp<AMessage> mNotify; + sp<Surface> mSurface; + + sp<ACodec> mCodec; + sp<DecoderWrapper> mWrapper; + + Vector<sp<ABuffer> > mCSD; + size_t mCSDIndex; + + sp<AMessage> makeFormat(const sp<MetaData> &meta); + + void onFillThisBuffer(const sp<AMessage> &msg); + + DISALLOW_EVIL_CONSTRUCTORS(Decoder); +}; + +} // namespace android + +#endif // NUPLAYER_DECODER_H_ diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp new file mode 100644 index 0000000..ac19a2f --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp @@ -0,0 +1,268 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "NuPlayerDriver" +#include <utils/Log.h> + +#include "NuPlayerDriver.h" + +#include "NuPlayer.h" + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/ALooper.h> + +namespace android { + +NuPlayerDriver::NuPlayerDriver() + : mResetInProgress(false), + mDurationUs(-1), + mPositionUs(-1), + mLooper(new ALooper), + mState(UNINITIALIZED), + mStartupSeekTimeUs(-1) { + mLooper->setName("NuPlayerDriver Looper"); + + mLooper->start( + false, /* runOnCallingThread */ + true, /* canCallJava */ + PRIORITY_AUDIO); + + mPlayer = new NuPlayer; + mLooper->registerHandler(mPlayer); + + mPlayer->setDriver(this); +} + +NuPlayerDriver::~NuPlayerDriver() { + mLooper->stop(); +} + +status_t NuPlayerDriver::initCheck() { + return OK; +} + +status_t NuPlayerDriver::setDataSource( + const char *url, const KeyedVector<String8, String8> *headers) { + CHECK_EQ((int)mState, (int)UNINITIALIZED); + + mPlayer->setDataSource(url, headers); + + mState = STOPPED; + + return OK; +} + +status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) { + return INVALID_OPERATION; +} + +status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) { + CHECK_EQ((int)mState, (int)UNINITIALIZED); + + mPlayer->setDataSource(source); + + mState = STOPPED; + + return OK; +} + +status_t NuPlayerDriver::setVideoSurface(const sp<Surface> &surface) { + mPlayer->setVideoSurface(surface); + + return OK; +} + +status_t NuPlayerDriver::prepare() { + return OK; +} + +status_t NuPlayerDriver::prepareAsync() { + sendEvent(MEDIA_PREPARED); + + return OK; +} + +status_t NuPlayerDriver::start() { + switch (mState) { + case UNINITIALIZED: + return INVALID_OPERATION; + case STOPPED: + { + mPlayer->start(); + + if (mStartupSeekTimeUs >= 0) { + mPlayer->seekToAsync(mStartupSeekTimeUs); + mStartupSeekTimeUs = -1; + } + break; + } + case PLAYING: + return OK; + default: + { + CHECK_EQ((int)mState, (int)PAUSED); + + mPlayer->resume(); + break; + } + } + + mState = PLAYING; + + return OK; +} + +status_t NuPlayerDriver::stop() { + return pause(); +} + +status_t NuPlayerDriver::pause() { + switch (mState) { + case UNINITIALIZED: + return INVALID_OPERATION; + case STOPPED: + return OK; + case PLAYING: + mPlayer->pause(); + break; + default: + { + CHECK_EQ((int)mState, (int)PAUSED); + return OK; + } + } + + mState = PAUSED; + + return OK; +} + +bool NuPlayerDriver::isPlaying() { + return mState == PLAYING; +} + +status_t NuPlayerDriver::seekTo(int msec) { + int64_t seekTimeUs = msec * 1000ll; + + switch (mState) { + case UNINITIALIZED: + return INVALID_OPERATION; + case STOPPED: + { + mStartupSeekTimeUs = seekTimeUs; + break; + } + case PLAYING: + case PAUSED: + { + mPlayer->seekToAsync(seekTimeUs); + break; + } + + default: + TRESPASS(); + break; + } + + return OK; +} + +status_t NuPlayerDriver::getCurrentPosition(int *msec) { + Mutex::Autolock autoLock(mLock); + + if (mPositionUs < 0) { + *msec = 0; + } else { + *msec = (mPositionUs + 500ll) / 1000; + } + + return OK; +} + +status_t NuPlayerDriver::getDuration(int *msec) { + Mutex::Autolock autoLock(mLock); + + if (mDurationUs < 0) { + *msec = 0; + } else { + *msec = (mDurationUs + 500ll) / 1000; + } + + return OK; +} + +status_t NuPlayerDriver::reset() { + Mutex::Autolock autoLock(mLock); + mResetInProgress = true; + + mPlayer->resetAsync(); + + while (mResetInProgress) { + mCondition.wait(mLock); + } + + mDurationUs = -1; + mPositionUs = -1; + mState = UNINITIALIZED; + mStartupSeekTimeUs = -1; + + return OK; +} + +status_t NuPlayerDriver::setLooping(int loop) { + return INVALID_OPERATION; +} + +player_type NuPlayerDriver::playerType() { + return NU_PLAYER; +} + +status_t NuPlayerDriver::invoke(const Parcel &request, Parcel *reply) { + return INVALID_OPERATION; +} + +void NuPlayerDriver::setAudioSink(const sp<AudioSink> &audioSink) { + mPlayer->setAudioSink(audioSink); +} + +status_t NuPlayerDriver::getMetadata( + const media::Metadata::Filter& ids, Parcel *records) { + return INVALID_OPERATION; +} + +void NuPlayerDriver::notifyResetComplete() { + Mutex::Autolock autoLock(mLock); + CHECK(mResetInProgress); + mResetInProgress = false; + mCondition.broadcast(); +} + +void NuPlayerDriver::notifyDuration(int64_t durationUs) { + Mutex::Autolock autoLock(mLock); + mDurationUs = durationUs; +} + +void NuPlayerDriver::notifyPosition(int64_t positionUs) { + Mutex::Autolock autoLock(mLock); + mPositionUs = positionUs; +} + +void NuPlayerDriver::notifySeekComplete() { + sendEvent(MEDIA_SEEK_COMPLETE); +} + +} // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h new file mode 100644 index 0000000..e3a5de4 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h @@ -0,0 +1,95 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <media/MediaPlayerInterface.h> + +#include <media/stagefright/foundation/ABase.h> + +namespace android { + +struct ALooper; +struct NuPlayer; + +struct NuPlayerDriver : public MediaPlayerInterface { + NuPlayerDriver(); + + virtual status_t initCheck(); + + virtual status_t setDataSource( + const char *url, const KeyedVector<String8, String8> *headers); + + virtual status_t setDataSource(int fd, int64_t offset, int64_t length); + + virtual status_t setDataSource(const sp<IStreamSource> &source); + + virtual status_t setVideoSurface(const sp<Surface> &surface); + virtual status_t prepare(); + virtual status_t prepareAsync(); + virtual status_t start(); + virtual status_t stop(); + virtual status_t pause(); + virtual bool isPlaying(); + virtual status_t seekTo(int msec); + virtual status_t getCurrentPosition(int *msec); + virtual status_t getDuration(int *msec); + virtual status_t reset(); + virtual status_t setLooping(int loop); + virtual player_type playerType(); + virtual status_t invoke(const Parcel &request, Parcel *reply); + virtual void setAudioSink(const sp<AudioSink> &audioSink); + + virtual status_t getMetadata( + const media::Metadata::Filter& ids, Parcel *records); + + void notifyResetComplete(); + void notifyDuration(int64_t durationUs); + void notifyPosition(int64_t positionUs); + void notifySeekComplete(); + +protected: + virtual ~NuPlayerDriver(); + +private: + Mutex mLock; + Condition mCondition; + + // The following are protected through "mLock" + // >>> + bool mResetInProgress; + int64_t mDurationUs; + int64_t mPositionUs; + // <<< + + sp<ALooper> mLooper; + sp<NuPlayer> mPlayer; + + enum State { + UNINITIALIZED, + STOPPED, + PLAYING, + PAUSED + }; + + State mState; + + int64_t mStartupSeekTimeUs; + + DISALLOW_EVIL_CONSTRUCTORS(NuPlayerDriver); +}; + +} // namespace android + + diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp new file mode 100644 index 0000000..369a3a8 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp @@ -0,0 +1,593 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "NuPlayerRenderer" +#include <utils/Log.h> + +#include "NuPlayerRenderer.h" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +namespace android { + +NuPlayer::Renderer::Renderer( + const sp<MediaPlayerBase::AudioSink> &sink, + const sp<AMessage> ¬ify) + : mAudioSink(sink), + mNotify(notify), + mNumFramesWritten(0), + mDrainAudioQueuePending(false), + mDrainVideoQueuePending(false), + mAudioQueueGeneration(0), + mVideoQueueGeneration(0), + mAnchorTimeMediaUs(-1), + mAnchorTimeRealUs(-1), + mFlushingAudio(false), + mFlushingVideo(false), + mHasAudio(false), + mHasVideo(false), + mSyncQueues(false), + mPaused(false) { +} + +NuPlayer::Renderer::~Renderer() { +} + +void NuPlayer::Renderer::queueBuffer( + bool audio, + const sp<ABuffer> &buffer, + const sp<AMessage> ¬ifyConsumed) { + sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); + msg->setInt32("audio", static_cast<int32_t>(audio)); + msg->setObject("buffer", buffer); + msg->setMessage("notifyConsumed", notifyConsumed); + msg->post(); +} + +void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { + CHECK_NE(finalResult, (status_t)OK); + + sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); + msg->setInt32("audio", static_cast<int32_t>(audio)); + msg->setInt32("finalResult", finalResult); + msg->post(); +} + +void NuPlayer::Renderer::flush(bool audio) { + { + Mutex::Autolock autoLock(mFlushLock); + if (audio) { + CHECK(!mFlushingAudio); + mFlushingAudio = true; + } else { + CHECK(!mFlushingVideo); + mFlushingVideo = true; + } + } + + sp<AMessage> msg = new AMessage(kWhatFlush, id()); + msg->setInt32("audio", static_cast<int32_t>(audio)); + msg->post(); +} + +void NuPlayer::Renderer::signalTimeDiscontinuity() { + CHECK(mAudioQueue.empty()); + CHECK(mVideoQueue.empty()); + mAnchorTimeMediaUs = -1; + mAnchorTimeRealUs = -1; + mSyncQueues = mHasAudio && mHasVideo; +} + +void NuPlayer::Renderer::pause() { + (new AMessage(kWhatPause, id()))->post(); +} + +void NuPlayer::Renderer::resume() { + (new AMessage(kWhatResume, id()))->post(); +} + +void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatDrainAudioQueue: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation != mAudioQueueGeneration) { + break; + } + + mDrainAudioQueuePending = false; + + onDrainAudioQueue(); + + postDrainAudioQueue(); + break; + } + + case kWhatDrainVideoQueue: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + if (generation != mVideoQueueGeneration) { + break; + } + + mDrainVideoQueuePending = false; + + onDrainVideoQueue(); + + postDrainVideoQueue(); + break; + } + + case kWhatQueueBuffer: + { + onQueueBuffer(msg); + break; + } + + case kWhatQueueEOS: + { + onQueueEOS(msg); + break; + } + + case kWhatFlush: + { + onFlush(msg); + break; + } + + case kWhatAudioSinkChanged: + { + onAudioSinkChanged(); + break; + } + + case kWhatPause: + { + onPause(); + break; + } + + case kWhatResume: + { + onResume(); + break; + } + + default: + TRESPASS(); + break; + } +} + +void NuPlayer::Renderer::postDrainAudioQueue() { + if (mDrainAudioQueuePending || mSyncQueues || mPaused) { + return; + } + + if (mAudioQueue.empty()) { + return; + } + + mDrainAudioQueuePending = true; + sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); + msg->setInt32("generation", mAudioQueueGeneration); + msg->post(10000); +} + +void NuPlayer::Renderer::signalAudioSinkChanged() { + (new AMessage(kWhatAudioSinkChanged, id()))->post(); +} + +void NuPlayer::Renderer::onDrainAudioQueue() { + uint32_t numFramesPlayed; + CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); + + ssize_t numFramesAvailableToWrite = + mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); + + CHECK_GE(numFramesAvailableToWrite, 0); + + size_t numBytesAvailableToWrite = + numFramesAvailableToWrite * mAudioSink->frameSize(); + + while (numBytesAvailableToWrite > 0) { + if (mAudioQueue.empty()) { + break; + } + + QueueEntry *entry = &*mAudioQueue.begin(); + + if (entry->mBuffer == NULL) { + // EOS + + notifyEOS(true /* audio */); + + mAudioQueue.erase(mAudioQueue.begin()); + entry = NULL; + return; + } + + if (entry->mOffset == 0) { + int64_t mediaTimeUs; + CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + + LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); + + mAnchorTimeMediaUs = mediaTimeUs; + + uint32_t numFramesPlayed; + CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); + + uint32_t numFramesPendingPlayout = + mNumFramesWritten - numFramesPlayed; + + int64_t realTimeOffsetUs = + (mAudioSink->latency() / 2 /* XXX */ + + numFramesPendingPlayout + * mAudioSink->msecsPerFrame()) * 1000ll; + + // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); + + mAnchorTimeRealUs = + ALooper::GetNowUs() + realTimeOffsetUs; + } + + size_t copy = entry->mBuffer->size() - entry->mOffset; + if (copy > numBytesAvailableToWrite) { + copy = numBytesAvailableToWrite; + } + + CHECK_EQ(mAudioSink->write( + entry->mBuffer->data() + entry->mOffset, copy), + (ssize_t)copy); + + entry->mOffset += copy; + if (entry->mOffset == entry->mBuffer->size()) { + entry->mNotifyConsumed->post(); + mAudioQueue.erase(mAudioQueue.begin()); + entry = NULL; + } + + numBytesAvailableToWrite -= copy; + mNumFramesWritten += copy / mAudioSink->frameSize(); + } + + notifyPosition(); +} + +void NuPlayer::Renderer::postDrainVideoQueue() { + if (mDrainVideoQueuePending || mSyncQueues || mPaused) { + return; + } + + if (mVideoQueue.empty()) { + return; + } + + QueueEntry &entry = *mVideoQueue.begin(); + + sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); + msg->setInt32("generation", mVideoQueueGeneration); + + int64_t delayUs; + + if (entry.mBuffer == NULL) { + // EOS doesn't carry a timestamp. + delayUs = 0; + } else { + int64_t mediaTimeUs; + CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + + if (mAnchorTimeMediaUs < 0) { + delayUs = 0; + + if (!mHasAudio) { + mAnchorTimeMediaUs = mediaTimeUs; + mAnchorTimeRealUs = ALooper::GetNowUs(); + } + } else { + int64_t realTimeUs = + (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; + + delayUs = realTimeUs - ALooper::GetNowUs(); + } + } + + msg->post(delayUs); + + mDrainVideoQueuePending = true; +} + +void NuPlayer::Renderer::onDrainVideoQueue() { + if (mVideoQueue.empty()) { + return; + } + + QueueEntry *entry = &*mVideoQueue.begin(); + + if (entry->mBuffer == NULL) { + // EOS + + notifyEOS(false /* audio */); + + mVideoQueue.erase(mVideoQueue.begin()); + entry = NULL; + return; + } + +#if 0 + int64_t mediaTimeUs; + CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); + + LOGI("rendering video at media time %.2f secs", mediaTimeUs / 1E6); +#endif + + entry->mNotifyConsumed->setInt32("render", true); + entry->mNotifyConsumed->post(); + mVideoQueue.erase(mVideoQueue.begin()); + entry = NULL; + + notifyPosition(); +} + +void NuPlayer::Renderer::notifyEOS(bool audio) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatEOS); + notify->setInt32("audio", static_cast<int32_t>(audio)); + notify->post(); +} + +void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { + int32_t audio; + CHECK(msg->findInt32("audio", &audio)); + + if (audio) { + mHasAudio = true; + } else { + mHasVideo = true; + } + + if (dropBufferWhileFlushing(audio, msg)) { + return; + } + + sp<RefBase> obj; + CHECK(msg->findObject("buffer", &obj)); + sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); + + sp<AMessage> notifyConsumed; + CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); + + QueueEntry entry; + entry.mBuffer = buffer; + entry.mNotifyConsumed = notifyConsumed; + entry.mOffset = 0; + entry.mFinalResult = OK; + + if (audio) { + mAudioQueue.push_back(entry); + postDrainAudioQueue(); + } else { + mVideoQueue.push_back(entry); + postDrainVideoQueue(); + } + + if (mSyncQueues && !mAudioQueue.empty() && !mVideoQueue.empty()) { + int64_t firstAudioTimeUs; + int64_t firstVideoTimeUs; + CHECK((*mAudioQueue.begin()).mBuffer->meta() + ->findInt64("timeUs", &firstAudioTimeUs)); + CHECK((*mVideoQueue.begin()).mBuffer->meta() + ->findInt64("timeUs", &firstVideoTimeUs)); + + int64_t diff = firstVideoTimeUs - firstAudioTimeUs; + + LOGV("queueDiff = %.2f secs", diff / 1E6); + + if (diff > 100000ll) { + // Audio data starts More than 0.1 secs before video. + // Drop some audio. + + (*mAudioQueue.begin()).mNotifyConsumed->post(); + mAudioQueue.erase(mAudioQueue.begin()); + return; + } + + syncQueuesDone(); + } +} + +void NuPlayer::Renderer::syncQueuesDone() { + if (!mSyncQueues) { + return; + } + + mSyncQueues = false; + + if (!mAudioQueue.empty()) { + postDrainAudioQueue(); + } + + if (!mVideoQueue.empty()) { + postDrainVideoQueue(); + } +} + +void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { + int32_t audio; + CHECK(msg->findInt32("audio", &audio)); + + if (dropBufferWhileFlushing(audio, msg)) { + return; + } + + int32_t finalResult; + CHECK(msg->findInt32("finalResult", &finalResult)); + + QueueEntry entry; + entry.mOffset = 0; + entry.mFinalResult = finalResult; + + if (audio) { + mAudioQueue.push_back(entry); + postDrainAudioQueue(); + } else { + mVideoQueue.push_back(entry); + postDrainVideoQueue(); + } +} + +void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { + int32_t audio; + CHECK(msg->findInt32("audio", &audio)); + + // If we're currently syncing the queues, i.e. dropping audio while + // aligning the first audio/video buffer times and only one of the + // two queues has data, we may starve that queue by not requesting + // more buffers from the decoder. If the other source then encounters + // a discontinuity that leads to flushing, we'll never find the + // corresponding discontinuity on the other queue. + // Therefore we'll stop syncing the queues if at least one of them + // is flushed. + syncQueuesDone(); + + if (audio) { + flushQueue(&mAudioQueue); + + Mutex::Autolock autoLock(mFlushLock); + mFlushingAudio = false; + + mDrainAudioQueuePending = false; + ++mAudioQueueGeneration; + } else { + flushQueue(&mVideoQueue); + + Mutex::Autolock autoLock(mFlushLock); + mFlushingVideo = false; + + mDrainVideoQueuePending = false; + ++mVideoQueueGeneration; + } + + notifyFlushComplete(audio); +} + +void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { + while (!queue->empty()) { + QueueEntry *entry = &*queue->begin(); + + if (entry->mBuffer != NULL) { + entry->mNotifyConsumed->post(); + } + + queue->erase(queue->begin()); + entry = NULL; + } +} + +void NuPlayer::Renderer::notifyFlushComplete(bool audio) { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatFlushComplete); + notify->setInt32("audio", static_cast<int32_t>(audio)); + notify->post(); +} + +bool NuPlayer::Renderer::dropBufferWhileFlushing( + bool audio, const sp<AMessage> &msg) { + bool flushing = false; + + { + Mutex::Autolock autoLock(mFlushLock); + if (audio) { + flushing = mFlushingAudio; + } else { + flushing = mFlushingVideo; + } + } + + if (!flushing) { + return false; + } + + sp<AMessage> notifyConsumed; + if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { + notifyConsumed->post(); + } + + return true; +} + +void NuPlayer::Renderer::onAudioSinkChanged() { + CHECK(!mDrainAudioQueuePending); + mNumFramesWritten = 0; +} + +void NuPlayer::Renderer::notifyPosition() { + if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { + return; + } + + int64_t nowUs = ALooper::GetNowUs(); + int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; + + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatPosition); + notify->setInt64("positionUs", positionUs); + notify->post(); +} + +void NuPlayer::Renderer::onPause() { + CHECK(!mPaused); + + mDrainAudioQueuePending = false; + ++mAudioQueueGeneration; + + mDrainVideoQueuePending = false; + ++mVideoQueueGeneration; + + if (mHasAudio) { + mAudioSink->pause(); + } + + mPaused = true; +} + +void NuPlayer::Renderer::onResume() { + CHECK(mPaused); + + if (mHasAudio) { + mAudioSink->start(); + } + + mPaused = false; + + if (!mAudioQueue.empty()) { + postDrainAudioQueue(); + } + + if (!mVideoQueue.empty()) { + postDrainVideoQueue(); + } +} + +} // namespace android + diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h new file mode 100644 index 0000000..703e971 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h @@ -0,0 +1,127 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef NUPLAYER_RENDERER_H_ + +#define NUPLAYER_RENDERER_H_ + +#include "NuPlayer.h" + +namespace android { + +struct ABuffer; + +struct NuPlayer::Renderer : public AHandler { + Renderer(const sp<MediaPlayerBase::AudioSink> &sink, + const sp<AMessage> ¬ify); + + void queueBuffer( + bool audio, + const sp<ABuffer> &buffer, + const sp<AMessage> ¬ifyConsumed); + + void queueEOS(bool audio, status_t finalResult); + + void flush(bool audio); + + void signalTimeDiscontinuity(); + + void signalAudioSinkChanged(); + + void pause(); + void resume(); + + enum { + kWhatEOS, + kWhatFlushComplete, + kWhatPosition, + }; + +protected: + virtual ~Renderer(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + enum { + kWhatDrainAudioQueue, + kWhatDrainVideoQueue, + kWhatQueueBuffer, + kWhatQueueEOS, + kWhatFlush, + kWhatAudioSinkChanged, + kWhatPause, + kWhatResume, + }; + + struct QueueEntry { + sp<ABuffer> mBuffer; + sp<AMessage> mNotifyConsumed; + size_t mOffset; + status_t mFinalResult; + }; + + sp<MediaPlayerBase::AudioSink> mAudioSink; + sp<AMessage> mNotify; + List<QueueEntry> mAudioQueue; + List<QueueEntry> mVideoQueue; + uint32_t mNumFramesWritten; + + bool mDrainAudioQueuePending; + bool mDrainVideoQueuePending; + int32_t mAudioQueueGeneration; + int32_t mVideoQueueGeneration; + + int64_t mAnchorTimeMediaUs; + int64_t mAnchorTimeRealUs; + + Mutex mFlushLock; // protects the following 2 member vars. + bool mFlushingAudio; + bool mFlushingVideo; + + bool mHasAudio; + bool mHasVideo; + bool mSyncQueues; + + bool mPaused; + + void onDrainAudioQueue(); + void postDrainAudioQueue(); + + void onDrainVideoQueue(); + void postDrainVideoQueue(); + + void onQueueBuffer(const sp<AMessage> &msg); + void onQueueEOS(const sp<AMessage> &msg); + void onFlush(const sp<AMessage> &msg); + void onAudioSinkChanged(); + void onPause(); + void onResume(); + + void notifyEOS(bool audio); + void notifyFlushComplete(bool audio); + void notifyPosition(); + + void flushQueue(List<QueueEntry> *queue); + bool dropBufferWhileFlushing(bool audio, const sp<AMessage> &msg); + void syncQueuesDone(); + + DISALLOW_EVIL_CONSTRUCTORS(Renderer); +}; + +} // namespace android + +#endif // NUPLAYER_RENDERER_H_ diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h new file mode 100644 index 0000000..5e55487 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h @@ -0,0 +1,62 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef NUPLAYER_SOURCE_H_ + +#define NUPLAYER_SOURCE_H_ + +#include "NuPlayer.h" + +namespace android { + +struct ABuffer; + +struct NuPlayer::Source : public RefBase { + Source() {} + + virtual void start() = 0; + + // Returns true iff more data was available, false on EOS. + virtual bool feedMoreTSData() = 0; + + virtual sp<MetaData> getFormat(bool audio) = 0; + + virtual status_t dequeueAccessUnit( + bool audio, sp<ABuffer> *accessUnit) = 0; + + virtual status_t getDuration(int64_t *durationUs) { + return INVALID_OPERATION; + } + + virtual status_t seekTo(int64_t seekTimeUs) { + return INVALID_OPERATION; + } + + virtual bool isSeekable() { + return false; + } + +protected: + virtual ~Source() {} + +private: + DISALLOW_EVIL_CONSTRUCTORS(Source); +}; + +} // namespace android + +#endif // NUPLAYER_SOURCE_H_ + diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp new file mode 100644 index 0000000..a23beb7 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.cpp @@ -0,0 +1,159 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "NuPlayerStreamListener" +#include <utils/Log.h> + +#include "NuPlayerStreamListener.h" + +#include <binder/MemoryDealer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaErrors.h> + +namespace android { + +NuPlayer::NuPlayerStreamListener::NuPlayerStreamListener( + const sp<IStreamSource> &source, + ALooper::handler_id id) + : mSource(source), + mTargetID(id), + mEOS(false), + mSendDataNotification(true) { + mSource->setListener(this); + + mMemoryDealer = new MemoryDealer(kNumBuffers * kBufferSize); + for (size_t i = 0; i < kNumBuffers; ++i) { + sp<IMemory> mem = mMemoryDealer->allocate(kBufferSize); + CHECK(mem != NULL); + + mBuffers.push(mem); + } + mSource->setBuffers(mBuffers); +} + +void NuPlayer::NuPlayerStreamListener::start() { + for (size_t i = 0; i < kNumBuffers; ++i) { + mSource->onBufferAvailable(i); + } +} + +void NuPlayer::NuPlayerStreamListener::queueBuffer(size_t index, size_t size) { + QueueEntry entry; + entry.mIsCommand = false; + entry.mIndex = index; + entry.mSize = size; + entry.mOffset = 0; + + Mutex::Autolock autoLock(mLock); + mQueue.push_back(entry); + + if (mSendDataNotification) { + mSendDataNotification = false; + + if (mTargetID != 0) { + (new AMessage(kWhatMoreDataQueued, mTargetID))->post(); + } + } +} + +void NuPlayer::NuPlayerStreamListener::issueCommand( + Command cmd, bool synchronous, const sp<AMessage> &extra) { + CHECK(!synchronous); + + QueueEntry entry; + entry.mIsCommand = true; + entry.mCommand = cmd; + entry.mExtra = extra; + + Mutex::Autolock autoLock(mLock); + mQueue.push_back(entry); + + if (mSendDataNotification) { + mSendDataNotification = false; + + if (mTargetID != 0) { + (new AMessage(kWhatMoreDataQueued, mTargetID))->post(); + } + } +} + +ssize_t NuPlayer::NuPlayerStreamListener::read(void *data, size_t size) { + CHECK_GT(size, 0u); + + Mutex::Autolock autoLock(mLock); + + if (mEOS) { + return 0; + } + + if (mQueue.empty()) { + mSendDataNotification = true; + + return -EWOULDBLOCK; + } + + QueueEntry *entry = &*mQueue.begin(); + + if (entry->mIsCommand) { + switch (entry->mCommand) { + case EOS: + { + mQueue.erase(mQueue.begin()); + entry = NULL; + + mEOS = true; + return 0; + } + + case DISCONTINUITY: + { + mQueue.erase(mQueue.begin()); + entry = NULL; + + return INFO_DISCONTINUITY; + } + + default: + TRESPASS(); + break; + } + } + + size_t copy = entry->mSize; + if (copy > size) { + copy = size; + } + + memcpy(data, + (const uint8_t *)mBuffers.editItemAt(entry->mIndex)->pointer() + + entry->mOffset, + copy); + + entry->mOffset += copy; + entry->mSize -= copy; + + if (entry->mSize == 0) { + mSource->onBufferAvailable(entry->mIndex); + mQueue.erase(mQueue.begin()); + entry = NULL; + } + + return copy; +} + +} // namespace android diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h new file mode 100644 index 0000000..f88e945 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/NuPlayerStreamListener.h @@ -0,0 +1,74 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef NUPLAYER_STREAM_LISTENER_H_ + +#define NUPLAYER_STREAM_LISTENER_H_ + +#include "NuPlayer.h" + +#include <media/IStreamSource.h> + +namespace android { + +struct MemoryDealer; + +struct NuPlayer::NuPlayerStreamListener : public BnStreamListener { + NuPlayerStreamListener( + const sp<IStreamSource> &source, + ALooper::handler_id targetID); + + virtual void queueBuffer(size_t index, size_t size); + + virtual void issueCommand( + Command cmd, bool synchronous, const sp<AMessage> &extra); + + void start(); + ssize_t read(void *data, size_t size); + +private: + enum { + kNumBuffers = 16, + kBufferSize = 188 * 20 + }; + + struct QueueEntry { + bool mIsCommand; + + size_t mIndex; + size_t mSize; + size_t mOffset; + + Command mCommand; + sp<AMessage> mExtra; + }; + + Mutex mLock; + + sp<IStreamSource> mSource; + ALooper::handler_id mTargetID; + sp<MemoryDealer> mMemoryDealer; + Vector<sp<IMemory> > mBuffers; + List<QueueEntry> mQueue; + bool mEOS; + bool mSendDataNotification; + + DISALLOW_EVIL_CONSTRUCTORS(NuPlayerStreamListener); +}; + +} // namespace android + +#endif // NUPLAYER_STREAM_LISTENER_H_ diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.cpp b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp new file mode 100644 index 0000000..b85ac9f --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.cpp @@ -0,0 +1,120 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "StreamingSource" +#include <utils/Log.h> + +#include "StreamingSource.h" + +#include "ATSParser.h" +#include "AnotherPacketSource.h" +#include "NuPlayerStreamListener.h" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MetaData.h> + +namespace android { + +NuPlayer::StreamingSource::StreamingSource(const sp<IStreamSource> &source) + : mSource(source), + mEOS(false) { +} + +NuPlayer::StreamingSource::~StreamingSource() { +} + +void NuPlayer::StreamingSource::start() { + mStreamListener = new NuPlayerStreamListener(mSource, 0); + mTSParser = new ATSParser; + + mStreamListener->start(); +} + +bool NuPlayer::StreamingSource::feedMoreTSData() { + if (mEOS) { + return false; + } + + for (int32_t i = 0; i < 10; ++i) { + char buffer[188]; + ssize_t n = mStreamListener->read(buffer, sizeof(buffer)); + + if (n == 0) { + LOGI("input data EOS reached."); + mTSParser->signalEOS(ERROR_END_OF_STREAM); + mEOS = true; + break; + } else if (n == INFO_DISCONTINUITY) { + mTSParser->signalDiscontinuity(ATSParser::DISCONTINUITY_SEEK); + } else if (n < 0) { + CHECK_EQ(n, -EWOULDBLOCK); + break; + } else { + if (buffer[0] == 0x00) { + // XXX legacy + mTSParser->signalDiscontinuity( + buffer[1] == 0x00 + ? ATSParser::DISCONTINUITY_SEEK + : ATSParser::DISCONTINUITY_FORMATCHANGE); + } else { + mTSParser->feedTSPacket(buffer, sizeof(buffer)); + } + } + } + + return true; +} + +sp<MetaData> NuPlayer::StreamingSource::getFormat(bool audio) { + ATSParser::SourceType type = + audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO; + + sp<AnotherPacketSource> source = + static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get()); + + if (source == NULL) { + return NULL; + } + + return source->getFormat(); +} + +status_t NuPlayer::StreamingSource::dequeueAccessUnit( + bool audio, sp<ABuffer> *accessUnit) { + ATSParser::SourceType type = + audio ? ATSParser::MPEG2ADTS_AUDIO : ATSParser::AVC_VIDEO; + + sp<AnotherPacketSource> source = + static_cast<AnotherPacketSource *>(mTSParser->getSource(type).get()); + + if (source == NULL) { + return -EWOULDBLOCK; + } + + status_t finalResult; + if (!source->hasBufferAvailable(&finalResult)) { + return finalResult == OK ? -EWOULDBLOCK : finalResult; + } + + return source->dequeueAccessUnit(accessUnit); +} + +} // namespace android + diff --git a/media/libmediaplayerservice/nuplayer/StreamingSource.h b/media/libmediaplayerservice/nuplayer/StreamingSource.h new file mode 100644 index 0000000..7abce84 --- /dev/null +++ b/media/libmediaplayerservice/nuplayer/StreamingSource.h @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef STREAMING_SOURCE_H_ + +#define STREAMING_SOURCE_H_ + +#include "NuPlayer.h" +#include "NuPlayerSource.h" + +namespace android { + +struct ABuffer; +struct ATSParser; + +struct NuPlayer::StreamingSource : public NuPlayer::Source { + StreamingSource(const sp<IStreamSource> &source); + + virtual void start(); + + // Returns true iff more data was available, false on EOS. + virtual bool feedMoreTSData(); + + virtual sp<MetaData> getFormat(bool audio); + virtual status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit); + +protected: + virtual ~StreamingSource(); + +private: + sp<IStreamSource> mSource; + bool mEOS; + sp<NuPlayerStreamListener> mStreamListener; + sp<ATSParser> mTSParser; + + DISALLOW_EVIL_CONSTRUCTORS(StreamingSource); +}; + +} // namespace android + +#endif // STREAMING_SOURCE_H_ diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp new file mode 100644 index 0000000..dfb4e00 --- /dev/null +++ b/media/libstagefright/ACodec.cpp @@ -0,0 +1,2208 @@ +//#define LOG_NDEBUG 0 +#define LOG_TAG "ACodec" + +#include <media/stagefright/ACodec.h> + +#include <binder/MemoryDealer.h> + +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> + +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/OMXClient.h> + +#include <surfaceflinger/Surface.h> + +#include <OMX_Component.h> + +namespace android { + +template<class T> +static void InitOMXParams(T *params) { + params->nSize = sizeof(T); + params->nVersion.s.nVersionMajor = 1; + params->nVersion.s.nVersionMinor = 0; + params->nVersion.s.nRevision = 0; + params->nVersion.s.nStep = 0; +} + +struct CodecObserver : public BnOMXObserver { + CodecObserver() {} + + void setNotificationMessage(const sp<AMessage> &msg) { + mNotify = msg; + } + + // from IOMXObserver + virtual void onMessage(const omx_message &omx_msg) { + sp<AMessage> msg = mNotify->dup(); + + msg->setInt32("type", omx_msg.type); + msg->setPointer("node", omx_msg.node); + + switch (omx_msg.type) { + case omx_message::EVENT: + { + msg->setInt32("event", omx_msg.u.event_data.event); + msg->setInt32("data1", omx_msg.u.event_data.data1); + msg->setInt32("data2", omx_msg.u.event_data.data2); + break; + } + + case omx_message::EMPTY_BUFFER_DONE: + { + msg->setPointer("buffer", omx_msg.u.buffer_data.buffer); + break; + } + + case omx_message::FILL_BUFFER_DONE: + { + msg->setPointer( + "buffer", omx_msg.u.extended_buffer_data.buffer); + msg->setInt32( + "range_offset", + omx_msg.u.extended_buffer_data.range_offset); + msg->setInt32( + "range_length", + omx_msg.u.extended_buffer_data.range_length); + msg->setInt32( + "flags", + omx_msg.u.extended_buffer_data.flags); + msg->setInt64( + "timestamp", + omx_msg.u.extended_buffer_data.timestamp); + msg->setPointer( + "platform_private", + omx_msg.u.extended_buffer_data.platform_private); + msg->setPointer( + "data_ptr", + omx_msg.u.extended_buffer_data.data_ptr); + break; + } + + default: + TRESPASS(); + break; + } + + msg->post(); + } + +protected: + virtual ~CodecObserver() {} + +private: + sp<AMessage> mNotify; + + DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::BaseState : public AState { + BaseState(ACodec *codec, const sp<AState> &parentState = NULL); + +protected: + enum PortMode { + KEEP_BUFFERS, + RESUBMIT_BUFFERS, + FREE_BUFFERS, + }; + + ACodec *mCodec; + + virtual PortMode getPortMode(OMX_U32 portIndex); + + virtual bool onMessageReceived(const sp<AMessage> &msg); + + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + + virtual void onOutputBufferDrained(const sp<AMessage> &msg); + virtual void onInputBufferFilled(const sp<AMessage> &msg); + + void postFillThisBuffer(BufferInfo *info); + +private: + bool onOMXMessage(const sp<AMessage> &msg); + + bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID); + + bool onOMXFillBufferDone( + IOMX::buffer_id bufferID, + size_t rangeOffset, size_t rangeLength, + OMX_U32 flags, + int64_t timeUs, + void *platformPrivate, + void *dataPtr); + + void getMoreInputDataIfPossible(); + + DISALLOW_EVIL_CONSTRUCTORS(BaseState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::UninitializedState : public ACodec::BaseState { + UninitializedState(ACodec *codec); + +protected: + virtual bool onMessageReceived(const sp<AMessage> &msg); + +private: + void onSetup(const sp<AMessage> &msg); + + DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::LoadedToIdleState : public ACodec::BaseState { + LoadedToIdleState(ACodec *codec); + +protected: + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + virtual void stateEntered(); + +private: + status_t allocateBuffers(); + + DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::IdleToExecutingState : public ACodec::BaseState { + IdleToExecutingState(ACodec *codec); + +protected: + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + virtual void stateEntered(); + +private: + DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::ExecutingState : public ACodec::BaseState { + ExecutingState(ACodec *codec); + + void submitOutputBuffers(); + + // Submit output buffers to the decoder, submit input buffers to client + // to fill with data. + void resume(); + +protected: + virtual PortMode getPortMode(OMX_U32 portIndex); + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual void stateEntered(); + + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + +private: + DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { + OutputPortSettingsChangedState(ACodec *codec); + +protected: + virtual PortMode getPortMode(OMX_U32 portIndex); + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual void stateEntered(); + + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + +private: + DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::ExecutingToIdleState : public ACodec::BaseState { + ExecutingToIdleState(ACodec *codec); + +protected: + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual void stateEntered(); + + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + + virtual void onOutputBufferDrained(const sp<AMessage> &msg); + virtual void onInputBufferFilled(const sp<AMessage> &msg); + +private: + void changeStateIfWeOwnAllBuffers(); + + DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::IdleToLoadedState : public ACodec::BaseState { + IdleToLoadedState(ACodec *codec); + +protected: + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual void stateEntered(); + + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + +private: + DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::ErrorState : public ACodec::BaseState { + ErrorState(ACodec *codec); + +protected: + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual void stateEntered(); + + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + +private: + DISALLOW_EVIL_CONSTRUCTORS(ErrorState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +struct ACodec::FlushingState : public ACodec::BaseState { + FlushingState(ACodec *codec); + +protected: + virtual bool onMessageReceived(const sp<AMessage> &msg); + virtual void stateEntered(); + + virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); + + virtual void onOutputBufferDrained(const sp<AMessage> &msg); + virtual void onInputBufferFilled(const sp<AMessage> &msg); + +private: + bool mFlushComplete[2]; + + void changeStateIfWeOwnAllBuffers(); + + DISALLOW_EVIL_CONSTRUCTORS(FlushingState); +}; + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::ACodec() + : mNode(NULL), + mSentFormat(false) { + mUninitializedState = new UninitializedState(this); + mLoadedToIdleState = new LoadedToIdleState(this); + mIdleToExecutingState = new IdleToExecutingState(this); + mExecutingState = new ExecutingState(this); + + mOutputPortSettingsChangedState = + new OutputPortSettingsChangedState(this); + + mExecutingToIdleState = new ExecutingToIdleState(this); + mIdleToLoadedState = new IdleToLoadedState(this); + mErrorState = new ErrorState(this); + mFlushingState = new FlushingState(this); + + mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; + + changeState(mUninitializedState); +} + +ACodec::~ACodec() { +} + +void ACodec::setNotificationMessage(const sp<AMessage> &msg) { + mNotify = msg; +} + +void ACodec::initiateSetup(const sp<AMessage> &msg) { + msg->setWhat(kWhatSetup); + msg->setTarget(id()); + msg->post(); +} + +void ACodec::signalFlush() { + (new AMessage(kWhatFlush, id()))->post(); +} + +void ACodec::signalResume() { + (new AMessage(kWhatResume, id()))->post(); +} + +void ACodec::initiateShutdown() { + (new AMessage(kWhatShutdown, id()))->post(); +} + +status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { + CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); + + CHECK(mDealer[portIndex] == NULL); + CHECK(mBuffers[portIndex].isEmpty()); + + if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { + return allocateOutputBuffersFromNativeWindow(); + } + + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = portIndex; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + if (err != OK) { + return err; + } + + LOGV("[%s] Allocating %lu buffers of size %lu on %s port", + mComponentName.c_str(), + def.nBufferCountActual, def.nBufferSize, + portIndex == kPortIndexInput ? "input" : "output"); + + size_t totalSize = def.nBufferCountActual * def.nBufferSize; + mDealer[portIndex] = new MemoryDealer(totalSize, "OMXCodec"); + + for (OMX_U32 i = 0; i < def.nBufferCountActual; ++i) { + sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize); + CHECK(mem.get() != NULL); + + IOMX::buffer_id buffer; +#if 0 + err = mOMX->allocateBufferWithBackup(mNode, portIndex, mem, &buffer); +#else + err = mOMX->useBuffer(mNode, portIndex, mem, &buffer); +#endif + + if (err != OK) { + return err; + } + + BufferInfo info; + info.mBufferID = buffer; + info.mStatus = BufferInfo::OWNED_BY_US; + info.mData = new ABuffer(mem->pointer(), def.nBufferSize); + mBuffers[portIndex].push(info); + } + + return OK; +} + +status_t ACodec::allocateOutputBuffersFromNativeWindow() { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexOutput; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + if (err != OK) { + return err; + } + + err = native_window_set_buffers_geometry( + mNativeWindow.get(), + def.format.video.nFrameWidth, + def.format.video.nFrameHeight, + def.format.video.eColorFormat); + + if (err != 0) { + LOGE("native_window_set_buffers_geometry failed: %s (%d)", + strerror(-err), -err); + return err; + } + + // Increase the buffer count by one to allow for the ANativeWindow to hold + // on to one of the buffers. + def.nBufferCountActual++; + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + if (err != OK) { + return err; + } + + // Set up the native window. + // XXX TODO: Get the gralloc usage flags from the OMX plugin! + err = native_window_set_usage( + mNativeWindow.get(), + GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP); + + if (err != 0) { + LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); + return err; + } + + err = native_window_set_buffer_count( + mNativeWindow.get(), def.nBufferCountActual); + + if (err != 0) { + LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), + -err); + return err; + } + + // XXX TODO: Do something so the ANativeWindow knows that we'll need to get + // the same set of buffers. + + LOGV("[%s] Allocating %lu buffers from a native window of size %lu on " + "output port", + mComponentName.c_str(), def.nBufferCountActual, def.nBufferSize); + + // Dequeue buffers and send them to OMX + OMX_U32 i; + for (i = 0; i < def.nBufferCountActual; i++) { + android_native_buffer_t *buf; + err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf); + if (err != 0) { + LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); + break; + } + + sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); + IOMX::buffer_id bufferId; + err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, + &bufferId); + if (err != 0) { + break; + } + + LOGV("[%s] Registered graphic buffer with ID %p (pointer = %p)", + mComponentName.c_str(), + bufferId, graphicBuffer.get()); + + BufferInfo info; + info.mBufferID = bufferId; + info.mStatus = BufferInfo::OWNED_BY_US; + info.mData = new ABuffer(0); + info.mGraphicBuffer = graphicBuffer; + mBuffers[kPortIndexOutput].push(info); + } + + OMX_U32 cancelStart; + OMX_U32 cancelEnd; + + if (err != 0) { + // If an error occurred while dequeuing we need to cancel any buffers + // that were dequeued. + cancelStart = 0; + cancelEnd = i; + } else { + // Return the last two buffers to the native window. + // XXX TODO: The number of buffers the native window owns should + // probably be queried from it when we put the native window in + // fixed buffer pool mode (which needs to be implemented). + // Currently it's hard-coded to 2. + cancelStart = def.nBufferCountActual - 2; + cancelEnd = def.nBufferCountActual; + } + + for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { + BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); + cancelBufferToNativeWindow(info); + } + + return err; +} + +status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); + + LOGV("[%s] Calling cancelBuffer on buffer %p", + mComponentName.c_str(), info->mBufferID); + + int err = mNativeWindow->cancelBuffer( + mNativeWindow.get(), info->mGraphicBuffer.get()); + + CHECK_EQ(err, 0); + + info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; + + return OK; +} + +ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { + android_native_buffer_t *buf; + CHECK_EQ(mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf), 0); + + for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { + BufferInfo *info = + &mBuffers[kPortIndexOutput].editItemAt(i); + + if (info->mGraphicBuffer->handle == buf->handle) { + CHECK_EQ((int)info->mStatus, + (int)BufferInfo::OWNED_BY_NATIVE_WINDOW); + + info->mStatus = BufferInfo::OWNED_BY_US; + + return info; + } + } + + TRESPASS(); + + return NULL; +} + +status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { + for (size_t i = mBuffers[portIndex].size(); i-- > 0;) { + CHECK_EQ((status_t)OK, freeBuffer(portIndex, i)); + } + + mDealer[portIndex].clear(); + + return OK; +} + +status_t ACodec::freeOutputBuffersOwnedByNativeWindow() { + for (size_t i = mBuffers[kPortIndexOutput].size(); i-- > 0;) { + BufferInfo *info = + &mBuffers[kPortIndexOutput].editItemAt(i); + + if (info->mStatus == + BufferInfo::OWNED_BY_NATIVE_WINDOW) { + CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i)); + } + } + + return OK; +} + +status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { + BufferInfo *info = &mBuffers[portIndex].editItemAt(i); + + CHECK(info->mStatus == BufferInfo::OWNED_BY_US + || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); + + if (portIndex == kPortIndexOutput && mNativeWindow != NULL + && info->mStatus == BufferInfo::OWNED_BY_US) { + CHECK_EQ((status_t)OK, cancelBufferToNativeWindow(info)); + } + + CHECK_EQ(mOMX->freeBuffer( + mNode, portIndex, info->mBufferID), + (status_t)OK); + + mBuffers[portIndex].removeAt(i); + + return OK; +} + +ACodec::BufferInfo *ACodec::findBufferByID( + uint32_t portIndex, IOMX::buffer_id bufferID, + ssize_t *index) { + for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { + BufferInfo *info = &mBuffers[portIndex].editItemAt(i); + + if (info->mBufferID == bufferID) { + if (index != NULL) { + *index = i; + } + return info; + } + } + + TRESPASS(); + + return NULL; +} + +void ACodec::setComponentRole( + bool isEncoder, const char *mime) { + struct MimeToRole { + const char *mime; + const char *decoderRole; + const char *encoderRole; + }; + + static const MimeToRole kMimeToRole[] = { + { MEDIA_MIMETYPE_AUDIO_MPEG, + "audio_decoder.mp3", "audio_encoder.mp3" }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, + "audio_decoder.amrnb", "audio_encoder.amrnb" }, + { MEDIA_MIMETYPE_AUDIO_AMR_WB, + "audio_decoder.amrwb", "audio_encoder.amrwb" }, + { MEDIA_MIMETYPE_AUDIO_AAC, + "audio_decoder.aac", "audio_encoder.aac" }, + { MEDIA_MIMETYPE_VIDEO_AVC, + "video_decoder.avc", "video_encoder.avc" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, + "video_decoder.mpeg4", "video_encoder.mpeg4" }, + { MEDIA_MIMETYPE_VIDEO_H263, + "video_decoder.h263", "video_encoder.h263" }, + }; + + static const size_t kNumMimeToRole = + sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); + + size_t i; + for (i = 0; i < kNumMimeToRole; ++i) { + if (!strcasecmp(mime, kMimeToRole[i].mime)) { + break; + } + } + + if (i == kNumMimeToRole) { + return; + } + + const char *role = + isEncoder ? kMimeToRole[i].encoderRole + : kMimeToRole[i].decoderRole; + + if (role != NULL) { + OMX_PARAM_COMPONENTROLETYPE roleParams; + InitOMXParams(&roleParams); + + strncpy((char *)roleParams.cRole, + role, OMX_MAX_STRINGNAME_SIZE - 1); + + roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; + + status_t err = mOMX->setParameter( + mNode, OMX_IndexParamStandardComponentRole, + &roleParams, sizeof(roleParams)); + + if (err != OK) { + LOGW("[%s] Failed to set standard component role '%s'.", + mComponentName.c_str(), role); + } + } +} + +void ACodec::configureCodec( + const char *mime, const sp<AMessage> &msg) { + setComponentRole(false /* isEncoder */, mime); + + if (!strncasecmp(mime, "video/", 6)) { + int32_t width, height; + CHECK(msg->findInt32("width", &width)); + CHECK(msg->findInt32("height", &height)); + + CHECK_EQ(setupVideoDecoder(mime, width, height), + (status_t)OK); + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { + int32_t numChannels, sampleRate; + CHECK(msg->findInt32("channel-count", &numChannels)); + CHECK(msg->findInt32("sample-rate", &sampleRate)); + + CHECK_EQ(setupAACDecoder(numChannels, sampleRate), (status_t)OK); + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { + } else { + TRESPASS(); + } + + int32_t maxInputSize; + if (msg->findInt32("max-input-size", &maxInputSize)) { + CHECK_EQ(setMinBufferSize(kPortIndexInput, (size_t)maxInputSize), + (status_t)OK); + } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { + CHECK_EQ(setMinBufferSize(kPortIndexInput, 8192), // XXX + (status_t)OK); + } +} + +status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = portIndex; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + if (err != OK) { + return err; + } + + if (def.nBufferSize >= size) { + return OK; + } + + def.nBufferSize = size; + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + if (err != OK) { + return err; + } + + err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + if (err != OK) { + return err; + } + + CHECK(def.nBufferSize >= size); + + return OK; +} + +status_t ACodec::setupAACDecoder(int32_t numChannels, int32_t sampleRate) { + OMX_AUDIO_PARAM_AACPROFILETYPE profile; + InitOMXParams(&profile); + profile.nPortIndex = kPortIndexInput; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); + + if (err != OK) { + return err; + } + + profile.nChannels = numChannels; + profile.nSampleRate = sampleRate; + profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4ADTS; + + err = mOMX->setParameter( + mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); + + return err; +} + +status_t ACodec::setVideoPortFormatType( + OMX_U32 portIndex, + OMX_VIDEO_CODINGTYPE compressionFormat, + OMX_COLOR_FORMATTYPE colorFormat) { + OMX_VIDEO_PARAM_PORTFORMATTYPE format; + InitOMXParams(&format); + format.nPortIndex = portIndex; + format.nIndex = 0; + bool found = false; + + OMX_U32 index = 0; + for (;;) { + format.nIndex = index; + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); + + if (err != OK) { + return err; + } + + // The following assertion is violated by TI's video decoder. + // CHECK_EQ(format.nIndex, index); + + if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { + if (portIndex == kPortIndexInput + && colorFormat == format.eColorFormat) { + // eCompressionFormat does not seem right. + found = true; + break; + } + if (portIndex == kPortIndexOutput + && compressionFormat == format.eCompressionFormat) { + // eColorFormat does not seem right. + found = true; + break; + } + } + + if (format.eCompressionFormat == compressionFormat + && format.eColorFormat == colorFormat) { + found = true; + break; + } + + ++index; + } + + if (!found) { + return UNKNOWN_ERROR; + } + + status_t err = mOMX->setParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); + + return err; +} + +status_t ACodec::setSupportedOutputFormat() { + OMX_VIDEO_PARAM_PORTFORMATTYPE format; + InitOMXParams(&format); + format.nPortIndex = kPortIndexOutput; + format.nIndex = 0; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); + CHECK_EQ(err, (status_t)OK); + CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused); + + static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00; + + CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar + || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar + || format.eColorFormat == OMX_COLOR_FormatCbYCrY + || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar); + + return mOMX->setParameter( + mNode, OMX_IndexParamVideoPortFormat, + &format, sizeof(format)); +} + +status_t ACodec::setupVideoDecoder( + const char *mime, int32_t width, int32_t height) { + OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused; + if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) { + compressionFormat = OMX_VIDEO_CodingAVC; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) { + compressionFormat = OMX_VIDEO_CodingMPEG4; + } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) { + compressionFormat = OMX_VIDEO_CodingH263; + } else { + TRESPASS(); + } + + status_t err = setVideoPortFormatType( + kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); + + if (err != OK) { + return err; + } + + err = setSupportedOutputFormat(); + + if (err != OK) { + return err; + } + + err = setVideoFormatOnPort( + kPortIndexInput, width, height, compressionFormat); + + if (err != OK) { + return err; + } + + err = setVideoFormatOnPort( + kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); + + if (err != OK) { + return err; + } + + return OK; +} + +status_t ACodec::setVideoFormatOnPort( + OMX_U32 portIndex, + int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat) { + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = portIndex; + + OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + CHECK_EQ(err, (status_t)OK); + + if (portIndex == kPortIndexInput) { + // XXX Need a (much) better heuristic to compute input buffer sizes. + const size_t X = 64 * 1024; + if (def.nBufferSize < X) { + def.nBufferSize = X; + } + } + + CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); + + video_def->nFrameWidth = width; + video_def->nFrameHeight = height; + + if (portIndex == kPortIndexInput) { + video_def->eCompressionFormat = compressionFormat; + video_def->eColorFormat = OMX_COLOR_FormatUnused; + } + + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + + return err; +} + +status_t ACodec::initNativeWindow() { + if (mNativeWindow != NULL) { + return mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE); + } + + mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_FALSE); + return OK; +} + +bool ACodec::allYourBuffersAreBelongToUs( + OMX_U32 portIndex) { + for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { + BufferInfo *info = &mBuffers[portIndex].editItemAt(i); + + if (info->mStatus != BufferInfo::OWNED_BY_US + && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { + LOGV("[%s] Buffer %p on port %ld still has status %d", + mComponentName.c_str(), + info->mBufferID, portIndex, info->mStatus); + return false; + } + } + + return true; +} + +bool ACodec::allYourBuffersAreBelongToUs() { + return allYourBuffersAreBelongToUs(kPortIndexInput) + && allYourBuffersAreBelongToUs(kPortIndexOutput); +} + +void ACodec::deferMessage(const sp<AMessage> &msg) { + bool wasEmptyBefore = mDeferredQueue.empty(); + mDeferredQueue.push_back(msg); +} + +void ACodec::processDeferredMessages() { + List<sp<AMessage> > queue = mDeferredQueue; + mDeferredQueue.clear(); + + List<sp<AMessage> >::iterator it = queue.begin(); + while (it != queue.end()) { + onMessageReceived(*it++); + } +} + +void ACodec::sendFormatChange() { + sp<AMessage> notify = mNotify->dup(); + notify->setInt32("what", kWhatOutputFormatChanged); + + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexOutput; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)), + (status_t)OK); + + CHECK_EQ((int)def.eDir, (int)OMX_DirOutput); + + switch (def.eDomain) { + case OMX_PortDomainVideo: + { + OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; + + notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); + notify->setInt32("width", videoDef->nFrameWidth); + notify->setInt32("height", videoDef->nFrameHeight); + + OMX_CONFIG_RECTTYPE rect; + InitOMXParams(&rect); + rect.nPortIndex = kPortIndexOutput; + + if (mOMX->getConfig( + mNode, OMX_IndexConfigCommonOutputCrop, + &rect, sizeof(rect)) != OK) { + rect.nLeft = 0; + rect.nTop = 0; + rect.nWidth = videoDef->nFrameWidth; + rect.nHeight = videoDef->nFrameHeight; + } + + CHECK_GE(rect.nLeft, 0); + CHECK_GE(rect.nTop, 0); + CHECK_GE(rect.nWidth, 0u); + CHECK_GE(rect.nHeight, 0u); + CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth); + CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight); + + notify->setRect( + "crop", + rect.nLeft, + rect.nTop, + rect.nLeft + rect.nWidth - 1, + rect.nTop + rect.nHeight - 1); + + if (mNativeWindow != NULL) { + android_native_rect_t crop; + crop.left = rect.nLeft; + crop.top = rect.nTop; + crop.right = rect.nLeft + rect.nWidth - 1; + crop.bottom = rect.nTop + rect.nHeight - 1; + + CHECK_EQ(0, native_window_set_crop( + mNativeWindow.get(), &crop)); + } + break; + } + + case OMX_PortDomainAudio: + { + OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; + CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM); + + OMX_AUDIO_PARAM_PCMMODETYPE params; + InitOMXParams(¶ms); + params.nPortIndex = kPortIndexOutput; + + CHECK_EQ(mOMX->getParameter( + mNode, OMX_IndexParamAudioPcm, + ¶ms, sizeof(params)), + (status_t)OK); + + CHECK(params.nChannels == 1 || params.bInterleaved); + CHECK_EQ(params.nBitPerSample, 16u); + CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned); + CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear); + + notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); + notify->setInt32("channel-count", params.nChannels); + notify->setInt32("sample-rate", params.nSamplingRate); + break; + } + + default: + TRESPASS(); + } + + notify->post(); + + mSentFormat = true; +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) + : AState(parentState), + mCodec(codec) { +} + +ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(OMX_U32 portIndex) { + return KEEP_BUFFERS; +} + +bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatInputBufferFilled: + { + onInputBufferFilled(msg); + break; + } + + case kWhatOutputBufferDrained: + { + onOutputBufferDrained(msg); + break; + } + + case ACodec::kWhatOMXMessage: + { + return onOMXMessage(msg); + } + + default: + return false; + } + + return true; +} + +bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { + int32_t type; + CHECK(msg->findInt32("type", &type)); + + IOMX::node_id nodeID; + CHECK(msg->findPointer("node", &nodeID)); + CHECK_EQ(nodeID, mCodec->mNode); + + switch (type) { + case omx_message::EVENT: + { + int32_t event, data1, data2; + CHECK(msg->findInt32("event", &event)); + CHECK(msg->findInt32("data1", &data1)); + CHECK(msg->findInt32("data2", &data2)); + + return onOMXEvent( + static_cast<OMX_EVENTTYPE>(event), + static_cast<OMX_U32>(data1), + static_cast<OMX_U32>(data2)); + } + + case omx_message::EMPTY_BUFFER_DONE: + { + IOMX::buffer_id bufferID; + CHECK(msg->findPointer("buffer", &bufferID)); + + return onOMXEmptyBufferDone(bufferID); + } + + case omx_message::FILL_BUFFER_DONE: + { + IOMX::buffer_id bufferID; + CHECK(msg->findPointer("buffer", &bufferID)); + + int32_t rangeOffset, rangeLength, flags; + int64_t timeUs; + void *platformPrivate; + void *dataPtr; + + CHECK(msg->findInt32("range_offset", &rangeOffset)); + CHECK(msg->findInt32("range_length", &rangeLength)); + CHECK(msg->findInt32("flags", &flags)); + CHECK(msg->findInt64("timestamp", &timeUs)); + CHECK(msg->findPointer("platform_private", &platformPrivate)); + CHECK(msg->findPointer("data_ptr", &dataPtr)); + + return onOMXFillBufferDone( + bufferID, + (size_t)rangeOffset, (size_t)rangeLength, + (OMX_U32)flags, + timeUs, + platformPrivate, + dataPtr); + } + + default: + TRESPASS(); + break; + } +} + +bool ACodec::BaseState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + if (event != OMX_EventError) { + LOGV("[%s] EVENT(%d, 0x%08lx, 0x%08lx)", + mCodec->mComponentName.c_str(), event, data1, data2); + + return false; + } + + LOGE("[%s] ERROR(0x%08lx, 0x%08lx)", + mCodec->mComponentName.c_str(), data1, data2); + + mCodec->changeState(mCodec->mErrorState); + + return true; +} + +bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID) { + BufferInfo *info = + mCodec->findBufferByID(kPortIndexInput, bufferID); + + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); + info->mStatus = BufferInfo::OWNED_BY_US; + + PortMode mode = getPortMode(kPortIndexInput); + + switch (mode) { + case KEEP_BUFFERS: + break; + + case RESUBMIT_BUFFERS: + postFillThisBuffer(info); + break; + + default: + { + CHECK_EQ((int)mode, (int)FREE_BUFFERS); + TRESPASS(); // Not currently used + break; + } + } + + return true; +} + +void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { + if (mCodec->mPortEOS[kPortIndexInput]) { + return; + } + + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); + + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFillThisBuffer); + notify->setPointer("buffer-id", info->mBufferID); + + info->mData->meta()->clear(); + notify->setObject("buffer", info->mData); + + sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec->id()); + reply->setPointer("buffer-id", info->mBufferID); + + notify->setMessage("reply", reply); + + notify->post(); + + info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; +} + +void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { + IOMX::buffer_id bufferID; + CHECK(msg->findPointer("buffer-id", &bufferID)); + + sp<RefBase> obj; + int32_t err = OK; + if (!msg->findObject("buffer", &obj)) { + CHECK(msg->findInt32("err", &err)); + + LOGV("[%s] saw error %d instead of an input buffer", + mCodec->mComponentName.c_str(), err); + + obj.clear(); + } + + sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); + + BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_UPSTREAM); + + info->mStatus = BufferInfo::OWNED_BY_US; + + PortMode mode = getPortMode(kPortIndexInput); + + switch (mode) { + case KEEP_BUFFERS: + { + if (buffer == NULL) { + mCodec->mPortEOS[kPortIndexInput] = true; + } + break; + } + + case RESUBMIT_BUFFERS: + { + if (buffer != NULL) { + CHECK(!mCodec->mPortEOS[kPortIndexInput]); + + int64_t timeUs; + CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); + + OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; + + int32_t isCSD; + if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { + flags |= OMX_BUFFERFLAG_CODECCONFIG; + } + + if (buffer != info->mData) { + if (!(flags & OMX_BUFFERFLAG_CODECCONFIG)) { + LOGV("[%s] Needs to copy input data.", + mCodec->mComponentName.c_str()); + } + + CHECK_LE(buffer->size(), info->mData->capacity()); + memcpy(info->mData->data(), buffer->data(), buffer->size()); + } + + CHECK_EQ(mCodec->mOMX->emptyBuffer( + mCodec->mNode, + bufferID, + 0, + buffer->size(), + flags, + timeUs), + (status_t)OK); + + info->mStatus = BufferInfo::OWNED_BY_COMPONENT; + + getMoreInputDataIfPossible(); + } else if (!mCodec->mPortEOS[kPortIndexInput]) { + LOGV("[%s] Signalling EOS on the input port", + mCodec->mComponentName.c_str()); + + CHECK_EQ(mCodec->mOMX->emptyBuffer( + mCodec->mNode, + bufferID, + 0, + 0, + OMX_BUFFERFLAG_EOS, + 0), + (status_t)OK); + + info->mStatus = BufferInfo::OWNED_BY_COMPONENT; + + mCodec->mPortEOS[kPortIndexInput] = true; + } + break; + + default: + CHECK_EQ((int)mode, (int)FREE_BUFFERS); + break; + } + } +} + +void ACodec::BaseState::getMoreInputDataIfPossible() { + if (mCodec->mPortEOS[kPortIndexInput]) { + return; + } + + BufferInfo *eligible = NULL; + + for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { + BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); + +#if 0 + if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { + // There's already a "read" pending. + return; + } +#endif + + if (info->mStatus == BufferInfo::OWNED_BY_US) { + eligible = info; + } + } + + if (eligible == NULL) { + return; + } + + postFillThisBuffer(eligible); +} + +bool ACodec::BaseState::onOMXFillBufferDone( + IOMX::buffer_id bufferID, + size_t rangeOffset, size_t rangeLength, + OMX_U32 flags, + int64_t timeUs, + void *platformPrivate, + void *dataPtr) { + ssize_t index; + BufferInfo *info = + mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); + + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_COMPONENT); + + info->mStatus = BufferInfo::OWNED_BY_US; + + PortMode mode = getPortMode(kPortIndexOutput); + + switch (mode) { + case KEEP_BUFFERS: + break; + + case RESUBMIT_BUFFERS: + { + if (rangeLength == 0) { + if (!(flags & OMX_BUFFERFLAG_EOS)) { + CHECK_EQ(mCodec->mOMX->fillBuffer( + mCodec->mNode, info->mBufferID), + (status_t)OK); + + info->mStatus = BufferInfo::OWNED_BY_COMPONENT; + } + } else { + if (!mCodec->mSentFormat) { + mCodec->sendFormatChange(); + } + + if (mCodec->mNativeWindow == NULL) { + info->mData->setRange(rangeOffset, rangeLength); + } + + info->mData->meta()->setInt64("timeUs", timeUs); + + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatDrainThisBuffer); + notify->setPointer("buffer-id", info->mBufferID); + notify->setObject("buffer", info->mData); + + sp<AMessage> reply = + new AMessage(kWhatOutputBufferDrained, mCodec->id()); + + reply->setPointer("buffer-id", info->mBufferID); + + notify->setMessage("reply", reply); + + notify->post(); + + info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; + } + + if (flags & OMX_BUFFERFLAG_EOS) { + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatEOS); + notify->post(); + + mCodec->mPortEOS[kPortIndexOutput] = true; + } + break; + } + + default: + { + CHECK_EQ((int)mode, (int)FREE_BUFFERS); + + CHECK_EQ((status_t)OK, + mCodec->freeBuffer(kPortIndexOutput, index)); + break; + } + } + + return true; +} + +void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { + IOMX::buffer_id bufferID; + CHECK(msg->findPointer("buffer-id", &bufferID)); + + ssize_t index; + BufferInfo *info = + mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM); + + int32_t render; + if (mCodec->mNativeWindow != NULL + && msg->findInt32("render", &render) && render != 0) { + // The client wants this buffer to be rendered. + + CHECK_EQ(mCodec->mNativeWindow->queueBuffer( + mCodec->mNativeWindow.get(), + info->mGraphicBuffer.get()), + 0); + + info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; + } else { + info->mStatus = BufferInfo::OWNED_BY_US; + } + + PortMode mode = getPortMode(kPortIndexOutput); + + switch (mode) { + case KEEP_BUFFERS: + { + // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? + + if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { + // We cannot resubmit the buffer we just rendered, dequeue + // the spare instead. + + info = mCodec->dequeueBufferFromNativeWindow(); + } + break; + } + + case RESUBMIT_BUFFERS: + { + if (!mCodec->mPortEOS[kPortIndexOutput]) { + if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { + // We cannot resubmit the buffer we just rendered, dequeue + // the spare instead. + + info = mCodec->dequeueBufferFromNativeWindow(); + } + + CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), + (status_t)OK); + + info->mStatus = BufferInfo::OWNED_BY_COMPONENT; + } + break; + } + + default: + { + CHECK_EQ((int)mode, (int)FREE_BUFFERS); + + CHECK_EQ((status_t)OK, + mCodec->freeBuffer(kPortIndexOutput, index)); + break; + } + } +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::UninitializedState::UninitializedState(ACodec *codec) + : BaseState(codec) { +} + +bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { + bool handled = false; + + switch (msg->what()) { + case ACodec::kWhatSetup: + { + onSetup(msg); + + handled = true; + break; + } + + case ACodec::kWhatShutdown: + { + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatShutdownCompleted); + notify->post(); + + handled = true; + } + + case ACodec::kWhatFlush: + { + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFlushCompleted); + notify->post(); + + handled = true; + } + + default: + return BaseState::onMessageReceived(msg); + } + + return handled; +} + +void ACodec::UninitializedState::onSetup( + const sp<AMessage> &msg) { + OMXClient client; + CHECK_EQ(client.connect(), (status_t)OK); + + sp<IOMX> omx = client.interface(); + + AString mime; + CHECK(msg->findString("mime", &mime)); + + AString componentName; + + if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) { + componentName = "OMX.Nvidia.h264.decode"; + } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) { + componentName = "OMX.Nvidia.aac.decoder"; + } else if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_AUDIO_MPEG)) { + componentName = "OMX.Nvidia.mp3.decoder"; + } else { + TRESPASS(); + } + + sp<CodecObserver> observer = new CodecObserver; + + IOMX::node_id node; + CHECK_EQ(omx->allocateNode(componentName.c_str(), observer, &node), + (status_t)OK); + + sp<AMessage> notify = new AMessage(kWhatOMXMessage, mCodec->id()); + observer->setNotificationMessage(notify); + + mCodec->mComponentName = componentName; + mCodec->mOMX = omx; + mCodec->mNode = node; + + mCodec->configureCodec(mime.c_str(), msg); + + sp<RefBase> obj; + if (msg->findObject("surface", &obj)) { + mCodec->mNativeWindow = static_cast<Surface *>(obj.get()); + } + + CHECK_EQ((status_t)OK, mCodec->initNativeWindow()); + + CHECK_EQ(omx->sendCommand(node, OMX_CommandStateSet, OMX_StateIdle), + (status_t)OK); + + mCodec->changeState(mCodec->mLoadedToIdleState); +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) + : BaseState(codec) { +} + +void ACodec::LoadedToIdleState::stateEntered() { + LOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); + + CHECK_EQ(allocateBuffers(), (status_t)OK); +} + +status_t ACodec::LoadedToIdleState::allocateBuffers() { + status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); + + if (err != OK) { + return err; + } + + return mCodec->allocateBuffersOnPort(kPortIndexOutput); +} + +bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatShutdown: + { + mCodec->deferMessage(msg); + return true; + } + + default: + return BaseState::onMessageReceived(msg); + } +} + +bool ACodec::LoadedToIdleState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); + CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); + + CHECK_EQ(mCodec->mOMX->sendCommand( + mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting), + (status_t)OK); + + mCodec->changeState(mCodec->mIdleToExecutingState); + + return true; + } + + default: + return BaseState::onOMXEvent(event, data1, data2); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) + : BaseState(codec) { +} + +void ACodec::IdleToExecutingState::stateEntered() { + LOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); +} + +bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatShutdown: + { + mCodec->deferMessage(msg); + return true; + } + + default: + return BaseState::onMessageReceived(msg); + } +} + +bool ACodec::IdleToExecutingState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); + CHECK_EQ(data2, (OMX_U32)OMX_StateExecuting); + + mCodec->mExecutingState->resume(); + mCodec->changeState(mCodec->mExecutingState); + + return true; + } + + default: + return BaseState::onOMXEvent(event, data1, data2); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::ExecutingState::ExecutingState(ACodec *codec) + : BaseState(codec) { +} + +ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( + OMX_U32 portIndex) { + return RESUBMIT_BUFFERS; +} + +void ACodec::ExecutingState::submitOutputBuffers() { + for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { + BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); + + if (mCodec->mNativeWindow != NULL) { + CHECK(info->mStatus == BufferInfo::OWNED_BY_US + || info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW); + + if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { + continue; + } + + status_t err = mCodec->mNativeWindow->lockBuffer( + mCodec->mNativeWindow.get(), + info->mGraphicBuffer.get()); + CHECK_EQ(err, (status_t)OK); + } else { + CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); + } + + CHECK_EQ(mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID), + (status_t)OK); + + info->mStatus = BufferInfo::OWNED_BY_COMPONENT; + } +} + +void ACodec::ExecutingState::resume() { + submitOutputBuffers(); + + // Post the first input buffer. + CHECK_GT(mCodec->mBuffers[kPortIndexInput].size(), 0u); + BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(0); + + postFillThisBuffer(info); +} + +void ACodec::ExecutingState::stateEntered() { + LOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); + + mCodec->processDeferredMessages(); +} + +bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { + bool handled = false; + + switch (msg->what()) { + case kWhatShutdown: + { + CHECK_EQ(mCodec->mOMX->sendCommand( + mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle), + (status_t)OK); + + mCodec->changeState(mCodec->mExecutingToIdleState); + + handled = true; + break; + } + + case kWhatFlush: + { + CHECK_EQ(mCodec->mOMX->sendCommand( + mCodec->mNode, OMX_CommandFlush, OMX_ALL), + (status_t)OK); + + mCodec->changeState(mCodec->mFlushingState); + + handled = true; + break; + } + + case kWhatResume: + { + resume(); + + handled = true; + break; + } + + default: + handled = BaseState::onMessageReceived(msg); + break; + } + + return handled; +} + +bool ACodec::ExecutingState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventPortSettingsChanged: + { + CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); + + if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { + CHECK_EQ(mCodec->mOMX->sendCommand( + mCodec->mNode, + OMX_CommandPortDisable, kPortIndexOutput), + (status_t)OK); + + if (mCodec->mNativeWindow != NULL) { + CHECK_EQ((status_t)OK, + mCodec->freeOutputBuffersOwnedByNativeWindow()); + } + + mCodec->changeState(mCodec->mOutputPortSettingsChangedState); + } else if (data2 == OMX_IndexConfigCommonOutputCrop) { + mCodec->mSentFormat = false; + } else { + LOGV("[%s] OMX_EventPortSettingsChanged 0x%08lx", + mCodec->mComponentName.c_str(), data2); + } + + return true; + } + + case OMX_EventBufferFlag: + { + return true; + } + + default: + return BaseState::onOMXEvent(event, data1, data2); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( + ACodec *codec) + : BaseState(codec) { +} + +ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( + OMX_U32 portIndex) { + if (portIndex == kPortIndexOutput) { + return FREE_BUFFERS; + } + + CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); + + return RESUBMIT_BUFFERS; +} + +bool ACodec::OutputPortSettingsChangedState::onMessageReceived( + const sp<AMessage> &msg) { + bool handled = false; + + switch (msg->what()) { + case kWhatFlush: + case kWhatShutdown: + { + mCodec->deferMessage(msg); + handled = true; + break; + } + + default: + handled = BaseState::onMessageReceived(msg); + break; + } + + return handled; +} + +void ACodec::OutputPortSettingsChangedState::stateEntered() { + LOGV("[%s] Now handling output port settings change", + mCodec->mComponentName.c_str()); +} + +bool ACodec::OutputPortSettingsChangedState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + if (data1 == (OMX_U32)OMX_CommandPortDisable) { + CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); + + LOGV("[%s] Output port now disabled.", + mCodec->mComponentName.c_str()); + + CHECK(mCodec->mBuffers[kPortIndexOutput].isEmpty()); + mCodec->mDealer[kPortIndexOutput].clear(); + + CHECK_EQ(mCodec->mOMX->sendCommand( + mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput), + (status_t)OK); + + CHECK_EQ(mCodec->allocateBuffersOnPort(kPortIndexOutput), + (status_t)OK); + + return true; + } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { + CHECK_EQ(data2, (OMX_U32)kPortIndexOutput); + + mCodec->mSentFormat = false; + + LOGV("[%s] Output port now reenabled.", + mCodec->mComponentName.c_str()); + + mCodec->mExecutingState->submitOutputBuffers(); + mCodec->changeState(mCodec->mExecutingState); + + return true; + } + + return false; + } + + default: + return false; + } +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) + : BaseState(codec) { +} + +bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { + bool handled = false; + + switch (msg->what()) { + case kWhatFlush: + { + // Don't send me a flush request if you previously wanted me + // to shutdown. + TRESPASS(); + break; + } + + case kWhatShutdown: + { + // We're already doing that... + + handled = true; + break; + } + + default: + handled = BaseState::onMessageReceived(msg); + break; + } + + return handled; +} + +void ACodec::ExecutingToIdleState::stateEntered() { + LOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); + + mCodec->mSentFormat = false; +} + +bool ACodec::ExecutingToIdleState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); + CHECK_EQ(data2, (OMX_U32)OMX_StateIdle); + + changeStateIfWeOwnAllBuffers(); + + return true; + } + + default: + return BaseState::onOMXEvent(event, data1, data2); + } +} +void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { + if (mCodec->allYourBuffersAreBelongToUs()) { + CHECK_EQ(mCodec->mOMX->sendCommand( + mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded), + (status_t)OK); + + CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexInput), (status_t)OK); + CHECK_EQ(mCodec->freeBuffersOnPort(kPortIndexOutput), (status_t)OK); + + mCodec->changeState(mCodec->mIdleToLoadedState); + } +} + +void ACodec::ExecutingToIdleState::onInputBufferFilled( + const sp<AMessage> &msg) { + BaseState::onInputBufferFilled(msg); + + changeStateIfWeOwnAllBuffers(); +} + +void ACodec::ExecutingToIdleState::onOutputBufferDrained( + const sp<AMessage> &msg) { + BaseState::onOutputBufferDrained(msg); + + changeStateIfWeOwnAllBuffers(); +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) + : BaseState(codec) { +} + +bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { + bool handled = false; + + switch (msg->what()) { + case kWhatShutdown: + { + // We're already doing that... + + handled = true; + break; + } + + case kWhatFlush: + { + // Don't send me a flush request if you previously wanted me + // to shutdown. + TRESPASS(); + break; + } + + default: + handled = BaseState::onMessageReceived(msg); + break; + } + + return handled; +} + +void ACodec::IdleToLoadedState::stateEntered() { + LOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); +} + +bool ACodec::IdleToLoadedState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + CHECK_EQ(data1, (OMX_U32)OMX_CommandStateSet); + CHECK_EQ(data2, (OMX_U32)OMX_StateLoaded); + + LOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); + + CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK); + + mCodec->mNativeWindow.clear(); + mCodec->mNode = NULL; + mCodec->mOMX.clear(); + mCodec->mComponentName.clear(); + + mCodec->changeState(mCodec->mUninitializedState); + + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatShutdownCompleted); + notify->post(); + + return true; + } + + default: + return BaseState::onOMXEvent(event, data1, data2); + } +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::ErrorState::ErrorState(ACodec *codec) + : BaseState(codec) { +} + +bool ACodec::ErrorState::onMessageReceived(const sp<AMessage> &msg) { + return BaseState::onMessageReceived(msg); +} + +void ACodec::ErrorState::stateEntered() { + LOGV("[%s] Now in ErrorState", mCodec->mComponentName.c_str()); +} + +bool ACodec::ErrorState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + LOGV("EVENT(%d, 0x%08lx, 0x%08lx)", event, data1, data2); + return true; +} + +//////////////////////////////////////////////////////////////////////////////// + +ACodec::FlushingState::FlushingState(ACodec *codec) + : BaseState(codec) { +} + +void ACodec::FlushingState::stateEntered() { + LOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); + + mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; +} + +bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { + bool handled = false; + + switch (msg->what()) { + case kWhatShutdown: + { + mCodec->deferMessage(msg); + break; + } + + case kWhatFlush: + { + // We're already doing this right now. + handled = true; + break; + } + + default: + handled = BaseState::onMessageReceived(msg); + break; + } + + return handled; +} + +bool ACodec::FlushingState::onOMXEvent( + OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + CHECK_EQ(data1, (OMX_U32)OMX_CommandFlush); + + if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { + CHECK(!mFlushComplete[data2]); + mFlushComplete[data2] = true; + } else { + CHECK_EQ(data2, OMX_ALL); + CHECK(mFlushComplete[kPortIndexInput]); + CHECK(mFlushComplete[kPortIndexOutput]); + + changeStateIfWeOwnAllBuffers(); + } + + return true; + } + + default: + return BaseState::onOMXEvent(event, data1, data2); + } + + return true; +} + +void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { + BaseState::onOutputBufferDrained(msg); + + changeStateIfWeOwnAllBuffers(); +} + +void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { + BaseState::onInputBufferFilled(msg); + + changeStateIfWeOwnAllBuffers(); +} + +void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { + if (mFlushComplete[kPortIndexInput] + && mFlushComplete[kPortIndexOutput] + && mCodec->allYourBuffersAreBelongToUs()) { + sp<AMessage> notify = mCodec->mNotify->dup(); + notify->setInt32("what", ACodec::kWhatFlushCompleted); + notify->post(); + + mCodec->mPortEOS[kPortIndexInput] = + mCodec->mPortEOS[kPortIndexOutput] = false; + + mCodec->changeState(mCodec->mExecutingState); + } +} + +} // namespace android + diff --git a/media/libstagefright/AMRExtractor.cpp b/media/libstagefright/AMRExtractor.cpp index 1b05528..ac87c29 100644 --- a/media/libstagefright/AMRExtractor.cpp +++ b/media/libstagefright/AMRExtractor.cpp @@ -55,7 +55,7 @@ private: size_t mFrameSize; bool mIsWide; - off_t mOffset; + off64_t mOffset; int64_t mCurrentTimeUs; bool mStarted; MediaBufferGroup *mGroup; @@ -115,9 +115,9 @@ AMRExtractor::AMRExtractor(const sp<DataSource> &source) mFrameSize = getFrameSize(mIsWide, FT); - off_t streamSize; + off64_t streamSize; if (mDataSource->getSize(&streamSize) == OK) { - off_t numFrames = streamSize / mFrameSize; + off64_t numFrames = streamSize / mFrameSize; mMeta->setInt64(kKeyDuration, 20000ll * numFrames); } diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp index c0b1abe..0db3d1d 100644 --- a/media/libstagefright/AMRWriter.cpp +++ b/media/libstagefright/AMRWriter.cpp @@ -24,20 +24,28 @@ #include <media/mediarecorder.h> #include <sys/prctl.h> #include <sys/resource.h> +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> namespace android { AMRWriter::AMRWriter(const char *filename) - : mFile(fopen(filename, "wb")), - mInitCheck(mFile != NULL ? OK : NO_INIT), + : mFd(-1), + mInitCheck(NO_INIT), mStarted(false), mPaused(false), mResumed(false) { + + mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC); + if (mFd >= 0) { + mInitCheck = OK; + } } AMRWriter::AMRWriter(int fd) - : mFile(fdopen(fd, "wb")), - mInitCheck(mFile != NULL ? OK : NO_INIT), + : mFd(dup(fd)), + mInitCheck(mFd < 0? NO_INIT: OK), mStarted(false), mPaused(false), mResumed(false) { @@ -48,9 +56,9 @@ AMRWriter::~AMRWriter() { stop(); } - if (mFile != NULL) { - fclose(mFile); - mFile = NULL; + if (mFd != -1) { + close(mFd); + mFd = -1; } } @@ -90,8 +98,8 @@ status_t AMRWriter::addSource(const sp<MediaSource> &source) { mSource = source; const char *kHeader = isWide ? "#!AMR-WB\n" : "#!AMR\n"; - size_t n = strlen(kHeader); - if (fwrite(kHeader, 1, n, mFile) != n) { + ssize_t n = strlen(kHeader); + if (write(mFd, kHeader, n) != n) { return ERROR_IO; } @@ -240,11 +248,9 @@ status_t AMRWriter::threadFunc() { notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0); break; } - ssize_t n = fwrite( - (const uint8_t *)buffer->data() + buffer->range_offset(), - 1, - buffer->range_length(), - mFile); + ssize_t n = write(mFd, + (const uint8_t *)buffer->data() + buffer->range_offset(), + buffer->range_length()); if (n < (ssize_t)buffer->range_length()) { buffer->release(); @@ -266,9 +272,8 @@ status_t AMRWriter::threadFunc() { notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_COMPLETION_STATUS, UNKNOWN_ERROR); } - fflush(mFile); - fclose(mFile); - mFile = NULL; + close(mFd); + mFd = -1; mReachedEOS = true; if (err == ERROR_END_OF_STREAM) { return OK; diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index d674547..2d486e3 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -4,12 +4,15 @@ include $(CLEAR_VARS) include frameworks/base/media/libstagefright/codecs/common/Config.mk LOCAL_SRC_FILES:= \ + ACodec.cpp \ AMRExtractor.cpp \ AMRWriter.cpp \ AudioPlayer.cpp \ AudioSource.cpp \ AwesomePlayer.cpp \ CameraSource.cpp \ + CameraSourceTimeLapse.cpp \ + VideoSourceDownSampler.cpp \ DataSource.cpp \ DRMExtractor.cpp \ ESDS.cpp \ @@ -25,6 +28,7 @@ LOCAL_SRC_FILES:= \ MediaDefs.cpp \ MediaExtractor.cpp \ MediaSource.cpp \ + MediaSourceSplitter.cpp \ MetaData.cpp \ NuCachedSource2.cpp \ NuHTTPDataSource.cpp \ @@ -41,9 +45,11 @@ LOCAL_SRC_FILES:= \ TimeSource.cpp \ TimedEventQueue.cpp \ Utils.cpp \ + VBRISeeker.cpp \ WAVExtractor.cpp \ + WVMExtractor.cpp \ + XINGSeeker.cpp \ avc_utils.cpp \ - string.cpp LOCAL_C_INCLUDES:= \ $(JNI_H_INCLUDE) \ @@ -60,10 +66,13 @@ LOCAL_SHARED_LIBRARIES := \ libsonivox \ libvorbisidec \ libsurfaceflinger_client \ + libstagefright_yuv \ libcamera_client \ - libdrmframework + libdrmframework \ + libcrypto LOCAL_STATIC_LIBRARIES := \ + libstagefright_color_conversion \ libstagefright_aacdec \ libstagefright_aacenc \ libstagefright_amrnbdec \ @@ -90,7 +99,6 @@ LOCAL_SHARED_LIBRARIES += \ libstagefright_enc_common \ libstagefright_avc_common \ libstagefright_foundation \ - libstagefright_color_conversion ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true) LOCAL_LDLIBS += -lpthread -ldl diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp index 29f16d8..f96df18 100644 --- a/media/libstagefright/AudioSource.cpp +++ b/media/libstagefright/AudioSource.cpp @@ -91,14 +91,17 @@ status_t AudioSource::start(MetaData *params) { mStartTimeUs = startTimeUs; } status_t err = mRecord->start(); - if (err == OK) { mGroup = new MediaBufferGroup; mGroup->add_buffer(new MediaBuffer(kMaxBufferSize)); mStarted = true; + } else { + delete mRecord; + mRecord = NULL; } + return err; } @@ -140,38 +143,6 @@ sp<MetaData> AudioSource::getFormat() { return meta; } -/* - * Returns -1 if frame skipping request is too long. - * Returns 0 if there is no need to skip frames. - * Returns 1 if we need to skip frames. - */ -static int skipFrame(int64_t timestampUs, - const MediaSource::ReadOptions *options) { - - int64_t skipFrameUs; - if (!options || !options->getSkipFrame(&skipFrameUs)) { - return 0; - } - - if (skipFrameUs <= timestampUs) { - return 0; - } - - // Safe guard against the abuse of the kSkipFrame_Option. - if (skipFrameUs - timestampUs >= 1E6) { - LOGE("Frame skipping requested is way too long: %lld us", - skipFrameUs - timestampUs); - - return -1; - } - - LOGV("skipFrame: %lld us > timestamp: %lld us", - skipFrameUs, timestampUs); - - return 1; - -} - void AudioSource::rampVolume( int32_t startFrame, int32_t rampDurationFrames, uint8_t *data, size_t bytes) { @@ -218,7 +189,7 @@ status_t AudioSource::read( CHECK_EQ(mGroup->acquire_buffer(&buffer), OK); int err = 0; - while (mStarted) { + if (mStarted) { uint32_t numFramesRecorded; mRecord->getPosition(&numFramesRecorded); @@ -268,12 +239,6 @@ status_t AudioSource::read( if (mCollectStats) { mTotalLostFrames += (numLostBytes >> 1); } - if ((err = skipFrame(timestampUs, options)) == -1) { - buffer->release(); - return UNKNOWN_ERROR; - } else if (err != 0) { - continue; - } memset(buffer->data(), 0, numLostBytes); buffer->set_range(0, numLostBytes); if (numFramesRecorded == 0) { @@ -294,12 +259,6 @@ status_t AudioSource::read( int64_t recordDurationUs = (1000000LL * n >> 1) / sampleRate; timestampUs += recordDurationUs; - if ((err = skipFrame(timestampUs, options)) == -1) { - buffer->release(); - return UNKNOWN_ERROR; - } else if (err != 0) { - continue; - } if (mPrevSampleTimeUs - mStartTimeUs < kAutoRampStartUs) { // Mute the initial video recording signal diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index 6dc61c7..11ac56c 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -22,7 +22,6 @@ #include "include/ARTSPController.h" #include "include/AwesomePlayer.h" -#include "include/LiveSource.h" #include "include/SoftwareRenderer.h" #include "include/NuCachedSource2.h" #include "include/ThrottledSource.h" @@ -34,20 +33,26 @@ #include "UDPPusher.h" #include <binder/IPCThreadState.h> +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/AudioPlayer.h> #include <media/stagefright/DataSource.h> #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaExtractor.h> -#include <media/stagefright/MediaDebug.h> #include <media/stagefright/MediaSource.h> #include <media/stagefright/MetaData.h> #include <media/stagefright/OMXCodec.h> -#include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> #include <media/stagefright/foundation/ALooper.h> +#include <media/stagefright/foundation/AMessage.h> +#include "include/LiveSession.h" + +#define USE_SURFACE_ALLOC 1 +#define FRAME_DROP_FREQ 7 namespace android { @@ -79,49 +84,10 @@ private: AwesomeEvent &operator=(const AwesomeEvent &); }; -struct AwesomeRemoteRenderer : public AwesomeRenderer { - AwesomeRemoteRenderer(const sp<IOMXRenderer> &target) - : mTarget(target) { - } - - virtual status_t initCheck() const { - return OK; - } - - virtual void render(MediaBuffer *buffer) { - void *id; - if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) { - mTarget->render((IOMX::buffer_id)id); - } - } - -private: - sp<IOMXRenderer> mTarget; - - AwesomeRemoteRenderer(const AwesomeRemoteRenderer &); - AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &); -}; - struct AwesomeLocalRenderer : public AwesomeRenderer { AwesomeLocalRenderer( - bool previewOnly, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees) - : mInitCheck(NO_INIT), - mTarget(NULL), - mLibHandle(NULL) { - mInitCheck = init(previewOnly, componentName, - colorFormat, surface, displayWidth, - displayHeight, decodedWidth, decodedHeight, - rotationDegrees); - } - - virtual status_t initCheck() const { - return mInitCheck; + const sp<Surface> &surface, const sp<MetaData> &meta) + : mTarget(new SoftwareRenderer(surface, meta)) { } virtual void render(MediaBuffer *buffer) { @@ -137,119 +103,77 @@ protected: virtual ~AwesomeLocalRenderer() { delete mTarget; mTarget = NULL; - - if (mLibHandle) { - dlclose(mLibHandle); - mLibHandle = NULL; - } } private: - status_t mInitCheck; - VideoRenderer *mTarget; - void *mLibHandle; - - status_t init( - bool previewOnly, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees); + SoftwareRenderer *mTarget; AwesomeLocalRenderer(const AwesomeLocalRenderer &); AwesomeLocalRenderer &operator=(const AwesomeLocalRenderer &);; }; -status_t AwesomeLocalRenderer::init( - bool previewOnly, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees) { - if (!previewOnly) { - // We will stick to the vanilla software-color-converting renderer - // for "previewOnly" mode, to avoid unneccessarily switching overlays - // more often than necessary. - - mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW); - - if (mLibHandle) { - typedef VideoRenderer *(*CreateRendererWithRotationFunc)( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees); - - typedef VideoRenderer *(*CreateRendererFunc)( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight); - - CreateRendererWithRotationFunc funcWithRotation = - (CreateRendererWithRotationFunc)dlsym( - mLibHandle, - "_Z26createRendererWithRotationRKN7android2spINS_8" - "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji"); - - if (funcWithRotation) { - mTarget = - (*funcWithRotation)( - surface, componentName, colorFormat, - displayWidth, displayHeight, - decodedWidth, decodedHeight, - rotationDegrees); - } else { - if (rotationDegrees != 0) { - LOGW("renderer does not support rotation."); - } +struct AwesomeNativeWindowRenderer : public AwesomeRenderer { + AwesomeNativeWindowRenderer( + const sp<ANativeWindow> &nativeWindow, + int32_t rotationDegrees) + : mNativeWindow(nativeWindow) { + applyRotation(rotationDegrees); + } - CreateRendererFunc func = - (CreateRendererFunc)dlsym( - mLibHandle, - "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20" - "OMX_COLOR_FORMATTYPEjjjj"); - - if (func) { - mTarget = - (*func)(surface, componentName, colorFormat, - displayWidth, displayHeight, - decodedWidth, decodedHeight); - } - } + virtual void render(MediaBuffer *buffer) { + status_t err = mNativeWindow->queueBuffer( + mNativeWindow.get(), buffer->graphicBuffer().get()); + if (err != 0) { + LOGE("queueBuffer failed with error %s (%d)", strerror(-err), + -err); + return; } + + sp<MetaData> metaData = buffer->meta_data(); + metaData->setInt32(kKeyRendered, 1); } - if (mTarget != NULL) { - return OK; +protected: + virtual ~AwesomeNativeWindowRenderer() {} + +private: + sp<ANativeWindow> mNativeWindow; + + void applyRotation(int32_t rotationDegrees) { + uint32_t transform; + switch (rotationDegrees) { + case 0: transform = 0; break; + case 90: transform = HAL_TRANSFORM_ROT_90; break; + case 180: transform = HAL_TRANSFORM_ROT_180; break; + case 270: transform = HAL_TRANSFORM_ROT_270; break; + default: transform = 0; break; + } + + if (transform) { + CHECK_EQ(0, native_window_set_buffers_transform( + mNativeWindow.get(), transform)); + } } - mTarget = new SoftwareRenderer( - colorFormat, surface, displayWidth, displayHeight, - decodedWidth, decodedHeight, rotationDegrees); + AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &); + AwesomeNativeWindowRenderer &operator=( + const AwesomeNativeWindowRenderer &); +}; - return ((SoftwareRenderer *)mTarget)->initCheck(); -} +//////////////////////////////////////////////////////////////////////////////// AwesomePlayer::AwesomePlayer() : mQueueStarted(false), mTimeSource(NULL), mVideoRendererIsPreview(false), mAudioPlayer(NULL), + mDisplayWidth(0), + mDisplayHeight(0), mFlags(0), mExtractorFlags(0), - mLastVideoBuffer(NULL), mVideoBuffer(NULL), - mSuspensionState(NULL), mDecryptHandle(NULL) { - CHECK_EQ(mClient.connect(), OK); + CHECK_EQ(mClient.connect(), (status_t)OK); DataSource::RegisterDefaultSniffers(); @@ -259,6 +183,8 @@ AwesomePlayer::AwesomePlayer() mStreamDoneEventPending = false; mBufferingEvent = new AwesomeEvent(this, &AwesomePlayer::onBufferingUpdate); mBufferingEventPending = false; + mVideoLagEvent = new AwesomeEvent(this, &AwesomePlayer::onVideoLagUpdate); + mVideoEventPending = false; mCheckAudioStatusEvent = new AwesomeEvent( this, &AwesomePlayer::onCheckAudioStatus); @@ -285,6 +211,8 @@ void AwesomePlayer::cancelPlayerEvents(bool keepBufferingGoing) { mStreamDoneEventPending = false; mQueue.cancelEvent(mCheckAudioStatusEvent->eventID()); mAudioStatusEventPending = false; + mQueue.cancelEvent(mVideoLagEvent->eventID()); + mVideoLagEventPending = false; if (!keepBufferingGoing) { mQueue.cancelEvent(mBufferingEvent->eventID()); @@ -309,6 +237,17 @@ status_t AwesomePlayer::setDataSource_l( mUri = uri; + if (!strncmp("http://", uri, 7)) { + // Hack to support http live. + + size_t len = strlen(uri); + if (!strcasecmp(&uri[len - 5], ".m3u8") + || strstr(&uri[7], "m3u8") != NULL) { + mUri = "httplive://"; + mUri.append(&uri[7]); + } + } + if (headers) { mUriHeaders = *headers; } @@ -339,6 +278,10 @@ status_t AwesomePlayer::setDataSource( return setDataSource_l(dataSource); } +status_t AwesomePlayer::setDataSource(const sp<IStreamSource> &source) { + return INVALID_OPERATION; +} + status_t AwesomePlayer::setDataSource_l( const sp<DataSource> &dataSource) { sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource); @@ -390,6 +333,18 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { if (!haveVideo && !strncasecmp(mime, "video/", 6)) { setVideoSource(extractor->getTrack(i)); haveVideo = true; + + // Set the presentation/display size + int32_t displayWidth, displayHeight; + bool success = meta->findInt32(kKeyDisplayWidth, &displayWidth); + if (success) { + success = meta->findInt32(kKeyDisplayHeight, &displayHeight); + } + if (success) { + mDisplayWidth = displayWidth; + mDisplayHeight = displayHeight; + } + } else if (!haveAudio && !strncasecmp(mime, "audio/", 6)) { setAudioSource(extractor->getTrack(i)); haveAudio = true; @@ -423,14 +378,21 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) { } void AwesomePlayer::reset() { + LOGI("reset"); + Mutex::Autolock autoLock(mLock); reset_l(); } void AwesomePlayer::reset_l() { + LOGI("reset_l"); + mDisplayWidth = 0; + mDisplayHeight = 0; + if (mDecryptHandle != NULL) { mDrmManagerClient->setPlaybackStatus(mDecryptHandle, Playback::STOP, 0); + mDrmManagerClient->closeDecryptSession(mDecryptHandle); mDecryptHandle = NULL; mDrmManagerClient = NULL; } @@ -441,6 +403,16 @@ void AwesomePlayer::reset_l() { LOGI("interrupting the connection process"); mConnectingDataSource->disconnect(); } + + if (mFlags & PREPARING_CONNECTED) { + // We are basically done preparing, we're just buffering + // enough data to start playback, we can safely interrupt that. + finishAsyncPrepare_l(); + } + } + + if (mFlags & PREPARING) { + LOGI("waiting until preparation is completes."); } while (mFlags & PREPARING) { @@ -466,6 +438,8 @@ void AwesomePlayer::reset_l() { } mAudioSource.clear(); + LOGI("audio source cleared"); + mTimeSource = NULL; delete mAudioPlayer; @@ -473,11 +447,6 @@ void AwesomePlayer::reset_l() { mVideoRenderer.clear(); - if (mLastVideoBuffer) { - mLastVideoBuffer->release(); - mLastVideoBuffer = NULL; - } - if (mVideoBuffer) { mVideoBuffer->release(); mVideoBuffer = NULL; @@ -488,6 +457,11 @@ void AwesomePlayer::reset_l() { mRTSPController.clear(); } + if (mLiveSession != NULL) { + mLiveSession->disconnect(); + mLiveSession.clear(); + } + mRTPPusher.clear(); mRTCPPusher.clear(); mRTPSession.clear(); @@ -506,10 +480,11 @@ void AwesomePlayer::reset_l() { IPCThreadState::self()->flushCommands(); } + LOGI("video source cleared"); + mDurationUs = -1; mFlags = 0; mExtractorFlags = 0; - mVideoWidth = mVideoHeight = -1; mTimeSourceDeltaUs = 0; mVideoTimeUs = 0; @@ -522,10 +497,9 @@ void AwesomePlayer::reset_l() { mFileSource.clear(); - delete mSuspensionState; - mSuspensionState = NULL; - mBitrate = -1; + + LOGI("reset_l completed"); } void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { @@ -539,7 +513,7 @@ void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) { } bool AwesomePlayer::getBitrate(int64_t *bitrate) { - off_t size; + off64_t size; if (mDurationUs >= 0 && mCachedSource != NULL && mCachedSource->getSize(&size) == OK) { *bitrate = size * 8000000ll / mDurationUs; // in bits/sec @@ -564,14 +538,44 @@ bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) { *durationUs = mRTSPController->getQueueDurationUs(eos); return true; } else if (mCachedSource != NULL && getBitrate(&bitrate)) { - size_t cachedDataRemaining = mCachedSource->approxDataRemaining(eos); + status_t finalStatus; + size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus); *durationUs = cachedDataRemaining * 8000000ll / bitrate; + *eos = (finalStatus != OK); return true; } return false; } +void AwesomePlayer::ensureCacheIsFetching_l() { + if (mCachedSource != NULL) { + mCachedSource->resumeFetchingIfNecessary(); + } +} + +void AwesomePlayer::onVideoLagUpdate() { + Mutex::Autolock autoLock(mLock); + if (!mVideoLagEventPending) { + return; + } + mVideoLagEventPending = false; + + int64_t audioTimeUs = mAudioPlayer->getMediaTimeUs(); + int64_t videoLateByUs = audioTimeUs - mVideoTimeUs; + + if (videoLateByUs > 300000ll) { + LOGV("video late by %lld ms.", videoLateByUs / 1000ll); + + notifyListener_l( + MEDIA_INFO, + MEDIA_INFO_VIDEO_TRACK_LAGGING, + videoLateByUs / 1000ll); + } + + postVideoLagEvent_l(); +} + void AwesomePlayer::onBufferingUpdate() { Mutex::Autolock autoLock(mLock); if (!mBufferingEventPending) { @@ -580,11 +584,14 @@ void AwesomePlayer::onBufferingUpdate() { mBufferingEventPending = false; if (mCachedSource != NULL) { - bool eos; - size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&eos); + status_t finalStatus; + size_t cachedDataRemaining = mCachedSource->approxDataRemaining(&finalStatus); + bool eos = (finalStatus != OK); if (eos) { - notifyListener_l(MEDIA_BUFFERING_UPDATE, 100); + if (finalStatus == ERROR_END_OF_STREAM) { + notifyListener_l(MEDIA_BUFFERING_UPDATE, 100); + } if (mFlags & PREPARING) { LOGV("cache has reached EOS, prepare is done."); finishAsyncPrepare_l(); @@ -611,6 +618,7 @@ void AwesomePlayer::onBufferingUpdate() { kLowWaterMarkBytes); mFlags |= CACHE_UNDERRUN; pause_l(); + ensureCacheIsFetching_l(); notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); } else if (eos || cachedDataRemaining > kHighWaterMarkBytes) { if (mFlags & CACHE_UNDERRUN) { @@ -632,12 +640,16 @@ void AwesomePlayer::onBufferingUpdate() { int64_t cachedDurationUs; bool eos; if (getCachedDuration_l(&cachedDurationUs, &eos)) { + LOGV("cachedDurationUs = %.2f secs, eos=%d", + cachedDurationUs / 1E6, eos); + if ((mFlags & PLAYING) && !eos && (cachedDurationUs < kLowWaterMarkUs)) { LOGI("cache is running low (%.2f secs) , pausing.", cachedDurationUs / 1E6); mFlags |= CACHE_UNDERRUN; pause_l(); + ensureCacheIsFetching_l(); notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START); } else if (eos || cachedDurationUs > kHighWaterMarkUs) { if (mFlags & CACHE_UNDERRUN) { @@ -663,11 +675,6 @@ void AwesomePlayer::partial_reset_l() { mVideoRenderer.clear(); - if (mLastVideoBuffer) { - mLastVideoBuffer->release(); - mLastVideoBuffer = NULL; - } - if (mVideoBuffer) { mVideoBuffer->release(); mVideoBuffer = NULL; @@ -687,7 +694,8 @@ void AwesomePlayer::partial_reset_l() { IPCThreadState::self()->flushCommands(); } - CHECK_EQ(OK, initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData)); + CHECK_EQ((status_t)OK, + initVideoDecoder(OMXCodec::kIgnoreCodecSpecificData)); } void AwesomePlayer::onStreamDone() { @@ -826,6 +834,10 @@ status_t AwesomePlayer::play_l() { if (mVideoSource != NULL) { // Kick off video playback postVideoEvent_l(); + + if (mAudioSource != NULL && mVideoSource != NULL) { + postVideoLagEvent_l(); + } } if (deferredAudioSeek) { @@ -843,9 +855,53 @@ status_t AwesomePlayer::play_l() { return OK; } -status_t AwesomePlayer::initRenderer_l() { - if (mISurface == NULL) { - return OK; +void AwesomePlayer::notifyVideoSize_l() { + sp<MetaData> meta = mVideoSource->getFormat(); + + int32_t cropLeft, cropTop, cropRight, cropBottom; + if (!meta->findRect( + kKeyCropRect, &cropLeft, &cropTop, &cropRight, &cropBottom)) { + int32_t width, height; + CHECK(meta->findInt32(kKeyWidth, &width)); + CHECK(meta->findInt32(kKeyHeight, &height)); + + cropLeft = cropTop = 0; + cropRight = width - 1; + cropBottom = height - 1; + + LOGV("got dimensions only %d x %d", width, height); + } else { + LOGV("got crop rect %d, %d, %d, %d", + cropLeft, cropTop, cropRight, cropBottom); + } + + int32_t usableWidth = cropRight - cropLeft + 1; + int32_t usableHeight = cropBottom - cropTop + 1; + if (mDisplayWidth != 0) { + usableWidth = mDisplayWidth; + } + if (mDisplayHeight != 0) { + usableHeight = mDisplayHeight; + } + + int32_t rotationDegrees; + if (!mVideoTrack->getFormat()->findInt32( + kKeyRotation, &rotationDegrees)) { + rotationDegrees = 0; + } + + if (rotationDegrees == 90 || rotationDegrees == 270) { + notifyListener_l( + MEDIA_SET_VIDEO_SIZE, usableHeight, usableWidth); + } else { + notifyListener_l( + MEDIA_SET_VIDEO_SIZE, usableWidth, usableHeight); + } +} + +void AwesomePlayer::initRenderer_l() { + if (mSurface == NULL) { + return; } sp<MetaData> meta = mVideoSource->getFormat(); @@ -870,37 +926,19 @@ status_t AwesomePlayer::initRenderer_l() { // before creating a new one. IPCThreadState::self()->flushCommands(); - if (!strncmp("OMX.", component, 4)) { - // Our OMX codecs allocate buffers on the media_server side - // therefore they require a remote IOMXRenderer that knows how - // to display them. - - sp<IOMXRenderer> native = - mClient.interface()->createRenderer( - mISurface, component, - (OMX_COLOR_FORMATTYPE)format, - decodedWidth, decodedHeight, - mVideoWidth, mVideoHeight, - rotationDegrees); - - if (native == NULL) { - return NO_INIT; - } - - mVideoRenderer = new AwesomeRemoteRenderer(native); + if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) { + // Hardware decoders avoid the CPU color conversion by decoding + // directly to ANativeBuffers, so we must use a renderer that + // just pushes those buffers to the ANativeWindow. + mVideoRenderer = + new AwesomeNativeWindowRenderer(mSurface, rotationDegrees); } else { // Other decoders are instantiated locally and as a consequence - // allocate their buffers in local address space. - mVideoRenderer = new AwesomeLocalRenderer( - false, // previewOnly - component, - (OMX_COLOR_FORMATTYPE)format, - mISurface, - mVideoWidth, mVideoHeight, - decodedWidth, decodedHeight, rotationDegrees); + // allocate their buffers in local address space. This renderer + // then performs a color conversion and copy to get the data + // into the ANativeBuffer. + mVideoRenderer = new AwesomeLocalRenderer(mSurface, meta); } - - return mVideoRenderer->initCheck(); } status_t AwesomePlayer::pause() { @@ -943,10 +981,10 @@ bool AwesomePlayer::isPlaying() const { return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN); } -void AwesomePlayer::setISurface(const sp<ISurface> &isurface) { +void AwesomePlayer::setSurface(const sp<Surface> &surface) { Mutex::Autolock autoLock(mLock); - mISurface = isurface; + mSurface = surface; } void AwesomePlayer::setAudioSink( @@ -1063,20 +1101,6 @@ void AwesomePlayer::seekAudioIfNecessary_l() { } } -status_t AwesomePlayer::getVideoDimensions( - int32_t *width, int32_t *height) const { - Mutex::Autolock autoLock(mLock); - - if (mVideoWidth < 0 || mVideoHeight < 0) { - return UNKNOWN_ERROR; - } - - *width = mVideoWidth; - *height = mVideoHeight; - - return OK; -} - void AwesomePlayer::setAudioSource(sp<MediaSource> source) { CHECK(source != NULL); @@ -1134,7 +1158,7 @@ status_t AwesomePlayer::initVideoDecoder(uint32_t flags) { mClient.interface(), mVideoTrack->getFormat(), false, // createEncoder mVideoTrack, - NULL, flags); + NULL, flags, USE_SURFACE_ALLOC ? mSurface : NULL); if (mVideoSource != NULL) { int64_t durationUs; @@ -1145,9 +1169,6 @@ status_t AwesomePlayer::initVideoDecoder(uint32_t flags) { } } - CHECK(mVideoTrack->getFormat()->findInt32(kKeyWidth, &mVideoWidth)); - CHECK(mVideoTrack->getFormat()->findInt32(kKeyHeight, &mVideoHeight)); - status_t err = mVideoSource->start(); if (err != OK) { @@ -1165,7 +1186,7 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) { } if (mAudioPlayer != NULL) { - LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6); + LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6); // If we don't have a video time, seek audio to the originally // requested seek time instead. @@ -1183,6 +1204,13 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) { mFlags |= FIRST_FRAME; mSeeking = false; mSeekNotificationSent = false; + + if (mDecryptHandle != NULL) { + mDrmManagerClient->setPlaybackStatus(mDecryptHandle, + Playback::PAUSE, 0); + mDrmManagerClient->setPlaybackStatus(mDecryptHandle, + Playback::START, videoTimeUs / 1000); + } } void AwesomePlayer::onVideoEvent() { @@ -1195,11 +1223,6 @@ void AwesomePlayer::onVideoEvent() { mVideoEventPending = false; if (mSeeking) { - if (mLastVideoBuffer) { - mLastVideoBuffer->release(); - mLastVideoBuffer = NULL; - } - if (mVideoBuffer) { mVideoBuffer->release(); mVideoBuffer = NULL; @@ -1234,23 +1257,18 @@ void AwesomePlayer::onVideoEvent() { options.clearSeekTo(); if (err != OK) { - CHECK_EQ(mVideoBuffer, NULL); + CHECK(mVideoBuffer == NULL); if (err == INFO_FORMAT_CHANGED) { LOGV("VideoSource signalled format change."); + notifyVideoSize_l(); + if (mVideoRenderer != NULL) { mVideoRendererIsPreview = false; - err = initRenderer_l(); - - if (err == OK) { - continue; - } - - // fall through - } else { - continue; + initRenderer_l(); } + continue; } // So video playback is complete, but we may still have @@ -1292,16 +1310,9 @@ void AwesomePlayer::onVideoEvent() { TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource; - if (mDecryptHandle != NULL) { - mDrmManagerClient->setPlaybackStatus(mDecryptHandle, - Playback::PAUSE, 0); - mDrmManagerClient->setPlaybackStatus(mDecryptHandle, - Playback::START, timeUs / 1000); - } - if (mFlags & FIRST_FRAME) { mFlags &= ~FIRST_FRAME; - + mSinceLastDropped = 0; mTimeSourceDeltaUs = ts->getRealTimeUs() - timeUs; } @@ -1329,12 +1340,16 @@ void AwesomePlayer::onVideoEvent() { if (latenessUs > 40000) { // We're more than 40ms late. LOGV("we're late by %lld us (%.2f secs)", latenessUs, latenessUs / 1E6); + if ( mSinceLastDropped > FRAME_DROP_FREQ) + { + LOGV("we're late by %lld us (%.2f secs) dropping one after %d frames", latenessUs, latenessUs / 1E6, mSinceLastDropped); + mSinceLastDropped = 0; + mVideoBuffer->release(); + mVideoBuffer = NULL; - mVideoBuffer->release(); - mVideoBuffer = NULL; - - postVideoEvent_l(); - return; + postVideoEvent_l(); + return; + } } if (latenessUs < -10000) { @@ -1347,26 +1362,15 @@ void AwesomePlayer::onVideoEvent() { if (mVideoRendererIsPreview || mVideoRenderer == NULL) { mVideoRendererIsPreview = false; - status_t err = initRenderer_l(); - - if (err != OK) { - finishSeekIfNecessary(-1); - - mFlags |= VIDEO_AT_EOS; - postStreamDoneEvent_l(err); - return; - } + initRenderer_l(); } if (mVideoRenderer != NULL) { + mSinceLastDropped++; mVideoRenderer->render(mVideoBuffer); } - if (mLastVideoBuffer) { - mLastVideoBuffer->release(); - mLastVideoBuffer = NULL; - } - mLastVideoBuffer = mVideoBuffer; + mVideoBuffer->release(); mVideoBuffer = NULL; postVideoEvent_l(); @@ -1399,6 +1403,14 @@ void AwesomePlayer::postBufferingEvent_l() { mQueue.postEventWithDelay(mBufferingEvent, 1000000ll); } +void AwesomePlayer::postVideoLagEvent_l() { + if (mVideoLagEventPending) { + return; + } + mVideoLagEventPending = true; + mQueue.postEventWithDelay(mVideoLagEvent, 1000000ll); +} + void AwesomePlayer::postCheckAudioStatusEvent_l() { if (mAudioStatusEventPending) { return; @@ -1532,11 +1544,11 @@ status_t AwesomePlayer::finishSetDataSource_l() { mLock.unlock(); for (;;) { - bool eos; + status_t finalStatus; size_t cachedDataRemaining = - mCachedSource->approxDataRemaining(&eos); + mCachedSource->approxDataRemaining(&finalStatus); - if (eos || cachedDataRemaining >= kHighWaterMarkBytes + if (finalStatus != OK || cachedDataRemaining >= kHighWaterMarkBytes || (mFlags & PREPARE_CANCELLED)) { break; } @@ -1554,16 +1566,23 @@ status_t AwesomePlayer::finishSetDataSource_l() { String8 uri("http://"); uri.append(mUri.string() + 11); - sp<LiveSource> liveSource = new LiveSource(uri.string()); + if (mLooper == NULL) { + mLooper = new ALooper; + mLooper->setName("httplive"); + mLooper->start(); + } + + mLiveSession = new LiveSession; + mLooper->registerHandler(mLiveSession); - mCachedSource = new NuCachedSource2(liveSource); - dataSource = mCachedSource; + mLiveSession->connect(uri.string()); + dataSource = mLiveSession->getDataSource(); sp<MediaExtractor> extractor = MediaExtractor::Create(dataSource, MEDIA_MIMETYPE_CONTAINER_MPEG2TS); static_cast<MPEG2TSExtractor *>(extractor.get()) - ->setLiveSource(liveSource); + ->setLiveSession(mLiveSession); return setDataSource_l(extractor); } else if (!strncmp("rtsp://gtalk/", mUri.string(), 13)) { @@ -1711,9 +1730,15 @@ status_t AwesomePlayer::finishSetDataSource_l() { } dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient); - if (mDecryptHandle != NULL - && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) { - notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE); + if (mDecryptHandle != NULL) { + if (RightsStatus::RIGHTS_VALID == mDecryptHandle->status) { + if (DecryptApiType::WV_BASED == mDecryptHandle->decryptApiType) { + LOGD("Setting mCachedSource to NULL for WVM\n"); + mCachedSource.clear(); + } + } else { + notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE); + } } return setDataSource_l(extractor); @@ -1727,7 +1752,7 @@ void AwesomePlayer::abortPrepare(status_t err) { } mPrepareResult = err; - mFlags &= ~(PREPARING|PREPARE_CANCELLED); + mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED); mAsyncPrepareEvent = NULL; mPreparedCondition.broadcast(); } @@ -1775,6 +1800,8 @@ void AwesomePlayer::onPrepareAsyncEvent() { } } + mFlags |= PREPARING_CONNECTED; + if (mCachedSource != NULL || mRTSPController != NULL) { postBufferingEvent_l(); } else { @@ -1784,175 +1811,22 @@ void AwesomePlayer::onPrepareAsyncEvent() { void AwesomePlayer::finishAsyncPrepare_l() { if (mIsAsyncPrepare) { - if (mVideoWidth < 0 || mVideoHeight < 0) { + if (mVideoSource == NULL) { notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); } else { - int32_t rotationDegrees; - if (!mVideoTrack->getFormat()->findInt32( - kKeyRotation, &rotationDegrees)) { - rotationDegrees = 0; - } - -#if 1 - if (rotationDegrees == 90 || rotationDegrees == 270) { - notifyListener_l( - MEDIA_SET_VIDEO_SIZE, mVideoHeight, mVideoWidth); - } else -#endif - { - notifyListener_l( - MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight); - } + notifyVideoSize_l(); } notifyListener_l(MEDIA_PREPARED); } mPrepareResult = OK; - mFlags &= ~(PREPARING|PREPARE_CANCELLED); + mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED); mFlags |= PREPARED; mAsyncPrepareEvent = NULL; mPreparedCondition.broadcast(); } -status_t AwesomePlayer::suspend() { - LOGV("suspend"); - Mutex::Autolock autoLock(mLock); - - if (mSuspensionState != NULL) { - if (mLastVideoBuffer == NULL) { - //go into here if video is suspended again - //after resuming without being played between - //them - SuspensionState *state = mSuspensionState; - mSuspensionState = NULL; - reset_l(); - mSuspensionState = state; - return OK; - } - - delete mSuspensionState; - mSuspensionState = NULL; - } - - if (mFlags & PREPARING) { - mFlags |= PREPARE_CANCELLED; - if (mConnectingDataSource != NULL) { - LOGI("interrupting the connection process"); - mConnectingDataSource->disconnect(); - } - } - - while (mFlags & PREPARING) { - mPreparedCondition.wait(mLock); - } - - SuspensionState *state = new SuspensionState; - state->mUri = mUri; - state->mUriHeaders = mUriHeaders; - state->mFileSource = mFileSource; - - state->mFlags = mFlags & (PLAYING | AUTO_LOOPING | LOOPING | AT_EOS); - getPosition(&state->mPositionUs); - - if (mLastVideoBuffer) { - size_t size = mLastVideoBuffer->range_length(); - - if (size) { - int32_t unreadable; - if (!mLastVideoBuffer->meta_data()->findInt32( - kKeyIsUnreadable, &unreadable) - || unreadable == 0) { - state->mLastVideoFrameSize = size; - state->mLastVideoFrame = malloc(size); - memcpy(state->mLastVideoFrame, - (const uint8_t *)mLastVideoBuffer->data() - + mLastVideoBuffer->range_offset(), - size); - - state->mVideoWidth = mVideoWidth; - state->mVideoHeight = mVideoHeight; - - sp<MetaData> meta = mVideoSource->getFormat(); - CHECK(meta->findInt32(kKeyColorFormat, &state->mColorFormat)); - CHECK(meta->findInt32(kKeyWidth, &state->mDecodedWidth)); - CHECK(meta->findInt32(kKeyHeight, &state->mDecodedHeight)); - } else { - LOGV("Unable to save last video frame, we have no access to " - "the decoded video data."); - } - } - } - - reset_l(); - - mSuspensionState = state; - - return OK; -} - -status_t AwesomePlayer::resume() { - LOGV("resume"); - Mutex::Autolock autoLock(mLock); - - if (mSuspensionState == NULL) { - return INVALID_OPERATION; - } - - SuspensionState *state = mSuspensionState; - mSuspensionState = NULL; - - status_t err; - if (state->mFileSource != NULL) { - err = setDataSource_l(state->mFileSource); - - if (err == OK) { - mFileSource = state->mFileSource; - } - } else { - err = setDataSource_l(state->mUri, &state->mUriHeaders); - } - - if (err != OK) { - delete state; - state = NULL; - - return err; - } - - seekTo_l(state->mPositionUs); - - mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS); - - if (state->mLastVideoFrame && mISurface != NULL) { - mVideoRenderer = - new AwesomeLocalRenderer( - true, // previewOnly - "", - (OMX_COLOR_FORMATTYPE)state->mColorFormat, - mISurface, - state->mVideoWidth, - state->mVideoHeight, - state->mDecodedWidth, - state->mDecodedHeight, - 0); - - mVideoRendererIsPreview = true; - - ((AwesomeLocalRenderer *)mVideoRenderer.get())->render( - state->mLastVideoFrame, state->mLastVideoFrameSize); - } - - if (state->mFlags & PLAYING) { - play_l(); - } - - mSuspensionState = state; - state = NULL; - - return OK; -} - uint32_t AwesomePlayer::flags() const { return mExtractorFlags; } @@ -1966,4 +1840,3 @@ void AwesomePlayer::postAudioSeekComplete() { } } // namespace android - diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index 89cb135..b1c6b18 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -27,6 +27,7 @@ #include <media/stagefright/MetaData.h> #include <camera/Camera.h> #include <camera/CameraParameters.h> +#include <surfaceflinger/Surface.h> #include <utils/String8.h> #include <cutils/properties.h> @@ -65,6 +66,11 @@ void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { LOGV("postData(%d, ptr:%p, size:%d)", msgType, dataPtr->pointer(), dataPtr->size()); + + sp<CameraSource> source = mSource.promote(); + if (source.get() != NULL) { + source->dataCallback(msgType, dataPtr); + } } void CameraSourceListener::postDataTimestamp( @@ -77,6 +83,10 @@ void CameraSourceListener::postDataTimestamp( } static int32_t getColorFormat(const char* colorFormat) { + if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) { + return OMX_COLOR_FormatYUV420Planar; + } + if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) { return OMX_COLOR_FormatYUV422SemiPlanar; } @@ -99,72 +109,409 @@ static int32_t getColorFormat(const char* colorFormat) { CHECK_EQ(0, "Unknown color format"); } -// static CameraSource *CameraSource::Create() { - sp<Camera> camera = Camera::connect(0); + Size size; + size.width = -1; + size.height = -1; - if (camera.get() == NULL) { - return NULL; - } - - return new CameraSource(camera); + sp<ICamera> camera; + return new CameraSource(camera, 0, size, -1, NULL, false); } // static -CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) { - if (camera.get() == NULL) { - return NULL; - } - - return new CameraSource(camera); +CameraSource *CameraSource::CreateFromCamera( + const sp<ICamera>& camera, + int32_t cameraId, + Size videoSize, + int32_t frameRate, + const sp<Surface>& surface, + bool storeMetaDataInVideoBuffers) { + + CameraSource *source = new CameraSource(camera, cameraId, + videoSize, frameRate, surface, + storeMetaDataInVideoBuffers); + return source; } -CameraSource::CameraSource(const sp<Camera> &camera) - : mCamera(camera), - mFirstFrameTimeUs(0), - mLastFrameTimestampUs(0), +CameraSource::CameraSource( + const sp<ICamera>& camera, + int32_t cameraId, + Size videoSize, + int32_t frameRate, + const sp<Surface>& surface, + bool storeMetaDataInVideoBuffers) + : mCameraFlags(0), + mVideoFrameRate(-1), + mCamera(0), + mSurface(surface), mNumFramesReceived(0), + mLastFrameTimestampUs(0), + mStarted(false), + mFirstFrameTimeUs(0), mNumFramesEncoded(0), mNumFramesDropped(0), mNumGlitches(0), mGlitchDurationThresholdUs(200000), - mCollectStats(false), - mStarted(false) { + mCollectStats(false) { + + mVideoSize.width = -1; + mVideoSize.height = -1; + + mInitCheck = init(camera, cameraId, + videoSize, frameRate, + storeMetaDataInVideoBuffers); +} + +status_t CameraSource::initCheck() const { + return mInitCheck; +} + +status_t CameraSource::isCameraAvailable( + const sp<ICamera>& camera, int32_t cameraId) { + + if (camera == 0) { + mCamera = Camera::connect(cameraId); + mCameraFlags &= ~FLAGS_HOT_CAMERA; + } else { + mCamera = Camera::create(camera); + mCameraFlags |= FLAGS_HOT_CAMERA; + } + + // Is camera available? + if (mCamera == 0) { + LOGE("Camera connection could not be established."); + return -EBUSY; + } + if (!(mCameraFlags & FLAGS_HOT_CAMERA)) { + mCamera->lock(); + } + return OK; +} + + +/* + * Check to see whether the requested video width and height is one + * of the supported sizes. + * @param width the video frame width in pixels + * @param height the video frame height in pixels + * @param suppportedSizes the vector of sizes that we check against + * @return true if the dimension (width and height) is supported. + */ +static bool isVideoSizeSupported( + int32_t width, int32_t height, + const Vector<Size>& supportedSizes) { + + LOGV("isVideoSizeSupported"); + for (size_t i = 0; i < supportedSizes.size(); ++i) { + if (width == supportedSizes[i].width && + height == supportedSizes[i].height) { + return true; + } + } + return false; +} + +/* + * If the preview and video output is separate, we only set the + * the video size, and applications should set the preview size + * to some proper value, and the recording framework will not + * change the preview size; otherwise, if the video and preview + * output is the same, we need to set the preview to be the same + * as the requested video size. + * + */ +/* + * Query the camera to retrieve the supported video frame sizes + * and also to see whether CameraParameters::setVideoSize() + * is supported or not. + * @param params CameraParameters to retrieve the information + * @@param isSetVideoSizeSupported retunrs whether method + * CameraParameters::setVideoSize() is supported or not. + * @param sizes returns the vector of Size objects for the + * supported video frame sizes advertised by the camera. + */ +static void getSupportedVideoSizes( + const CameraParameters& params, + bool *isSetVideoSizeSupported, + Vector<Size>& sizes) { + + *isSetVideoSizeSupported = true; + params.getSupportedVideoSizes(sizes); + if (sizes.size() == 0) { + LOGD("Camera does not support setVideoSize()"); + params.getSupportedPreviewSizes(sizes); + *isSetVideoSizeSupported = false; + } +} + +/* + * Check whether the camera has the supported color format + * @param params CameraParameters to retrieve the information + * @return OK if no error. + */ +status_t CameraSource::isCameraColorFormatSupported( + const CameraParameters& params) { + mColorFormat = getColorFormat(params.get( + CameraParameters::KEY_VIDEO_FRAME_FORMAT)); + if (mColorFormat == -1) { + return BAD_VALUE; + } + return OK; +} + +/* + * Configure the camera to use the requested video size + * (width and height) and/or frame rate. If both width and + * height are -1, configuration on the video size is skipped. + * if frameRate is -1, configuration on the frame rate + * is skipped. Skipping the configuration allows one to + * use the current camera setting without the need to + * actually know the specific values (see Create() method). + * + * @param params the CameraParameters to be configured + * @param width the target video frame width in pixels + * @param height the target video frame height in pixels + * @param frameRate the target frame rate in frames per second. + * @return OK if no error. + */ +status_t CameraSource::configureCamera( + CameraParameters* params, + int32_t width, int32_t height, + int32_t frameRate) { + + Vector<Size> sizes; + bool isSetVideoSizeSupportedByCamera = true; + getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes); + bool isCameraParamChanged = false; + if (width != -1 && height != -1) { + if (!isVideoSizeSupported(width, height, sizes)) { + LOGE("Video dimension (%dx%d) is unsupported", width, height); + releaseCamera(); + return BAD_VALUE; + } + if (isSetVideoSizeSupportedByCamera) { + params->setVideoSize(width, height); + } else { + params->setPreviewSize(width, height); + } + isCameraParamChanged = true; + } else if ((width == -1 && height != -1) || + (width != -1 && height == -1)) { + // If one and only one of the width and height is -1 + // we reject such a request. + LOGE("Requested video size (%dx%d) is not supported", width, height); + releaseCamera(); + return BAD_VALUE; + } else { // width == -1 && height == -1 + // Do not configure the camera. + // Use the current width and height value setting from the camera. + } + + if (frameRate != -1) { + CHECK(frameRate > 0 && frameRate <= 120); + const char* supportedFrameRates = + params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES); + CHECK(supportedFrameRates != NULL); + LOGV("Supported frame rates: %s", supportedFrameRates); + char buf[4]; + snprintf(buf, 4, "%d", frameRate); + if (strstr(supportedFrameRates, buf) == NULL) { + LOGE("Requested frame rate (%d) is not supported: %s", + frameRate, supportedFrameRates); + releaseCamera(); + return BAD_VALUE; + } + + // The frame rate is supported, set the camera to the requested value. + params->setPreviewFrameRate(frameRate); + isCameraParamChanged = true; + } else { // frameRate == -1 + // Do not configure the camera. + // Use the current frame rate value setting from the camera + } + + if (isCameraParamChanged) { + // Either frame rate or frame size needs to be changed. + String8 s = params->flatten(); + if (OK != mCamera->setParameters(s)) { + LOGE("Could not change settings." + " Someone else is using camera %p?", mCamera.get()); + return -EBUSY; + } + } + return OK; +} +/* + * Check whether the requested video frame size + * has been successfully configured or not. If both width and height + * are -1, check on the current width and height value setting + * is performed. + * + * @param params CameraParameters to retrieve the information + * @param the target video frame width in pixels to check against + * @param the target video frame height in pixels to check against + * @return OK if no error + */ +status_t CameraSource::checkVideoSize( + const CameraParameters& params, + int32_t width, int32_t height) { + + // The actual video size is the same as the preview size + // if the camera hal does not support separate video and + // preview output. In this case, we retrieve the video + // size from preview. + int32_t frameWidthActual = -1; + int32_t frameHeightActual = -1; + Vector<Size> sizes; + params.getSupportedVideoSizes(sizes); + if (sizes.size() == 0) { + // video size is the same as preview size + params.getPreviewSize(&frameWidthActual, &frameHeightActual); + } else { + // video size may not be the same as preview + params.getVideoSize(&frameWidthActual, &frameHeightActual); + } + if (frameWidthActual < 0 || frameHeightActual < 0) { + LOGE("Failed to retrieve video frame size (%dx%d)", + frameWidthActual, frameHeightActual); + return UNKNOWN_ERROR; + } + + // Check the actual video frame size against the target/requested + // video frame size. + if (width != -1 && height != -1) { + if (frameWidthActual != width || frameHeightActual != height) { + LOGE("Failed to set video frame size to %dx%d. " + "The actual video size is %dx%d ", width, height, + frameWidthActual, frameHeightActual); + return UNKNOWN_ERROR; + } + } + + // Good now. + mVideoSize.width = frameWidthActual; + mVideoSize.height = frameHeightActual; + return OK; +} + +/* + * Check the requested frame rate has been successfully configured or not. + * If the target frameRate is -1, check on the current frame rate value + * setting is performed. + * + * @param params CameraParameters to retrieve the information + * @param the target video frame rate to check against + * @return OK if no error. + */ +status_t CameraSource::checkFrameRate( + const CameraParameters& params, + int32_t frameRate) { + + int32_t frameRateActual = params.getPreviewFrameRate(); + if (frameRateActual < 0) { + LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual); + return UNKNOWN_ERROR; + } + + // Check the actual video frame rate against the target/requested + // video frame rate. + if (frameRate != -1 && (frameRateActual - frameRate) != 0) { + LOGE("Failed to set preview frame rate to %d fps. The actual " + "frame rate is %d", frameRate, frameRateActual); + return UNKNOWN_ERROR; + } + + // Good now. + mVideoFrameRate = frameRateActual; + return OK; +} + +/* + * Initialize the CameraSource to so that it becomes + * ready for providing the video input streams as requested. + * @param camera the camera object used for the video source + * @param cameraId if camera == 0, use camera with this id + * as the video source + * @param videoSize the target video frame size. If both + * width and height in videoSize is -1, use the current + * width and heigth settings by the camera + * @param frameRate the target frame rate in frames per second. + * if it is -1, use the current camera frame rate setting. + * @param storeMetaDataInVideoBuffers request to store meta + * data or real YUV data in video buffers. Request to + * store meta data in video buffers may not be honored + * if the source does not support this feature. + * + * @return OK if no error. + */ +status_t CameraSource::init( + const sp<ICamera>& camera, + int32_t cameraId, + Size videoSize, + int32_t frameRate, + bool storeMetaDataInVideoBuffers) { + + status_t err = OK; int64_t token = IPCThreadState::self()->clearCallingIdentity(); - String8 s = mCamera->getParameters(); - IPCThreadState::self()->restoreCallingIdentity(token); - printf("params: \"%s\"\n", s.string()); + if ((err = isCameraAvailable(camera, cameraId)) != OK) { + return err; + } + CameraParameters params(mCamera->getParameters()); + if ((err = isCameraColorFormatSupported(params)) != OK) { + return err; + } + + // Set the camera to use the requested video frame size + // and/or frame rate. + if ((err = configureCamera(¶ms, + videoSize.width, videoSize.height, + frameRate))) { + return err; + } + + // Check on video frame size and frame rate. + CameraParameters newCameraParams(mCamera->getParameters()); + if ((err = checkVideoSize(newCameraParams, + videoSize.width, videoSize.height)) != OK) { + return err; + } + if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) { + return err; + } + + // This CHECK is good, since we just passed the lock/unlock + // check earlier by calling mCamera->setParameters(). + CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface)); + + // By default, do not store metadata in video buffers + mIsMetaDataStoredInVideoBuffers = false; + mCamera->storeMetaDataInBuffers(false); + if (storeMetaDataInVideoBuffers) { + if (OK == mCamera->storeMetaDataInBuffers(true)) { + mIsMetaDataStoredInVideoBuffers = true; + } + } - int32_t width, height, stride, sliceHeight; - CameraParameters params(s); - params.getPreviewSize(&width, &height); + IPCThreadState::self()->restoreCallingIdentity(token); - // Calculate glitch duraton threshold based on frame rate - int32_t frameRate = params.getPreviewFrameRate(); - int64_t glitchDurationUs = (1000000LL / frameRate); + int64_t glitchDurationUs = (1000000LL / mVideoFrameRate); if (glitchDurationUs > mGlitchDurationThresholdUs) { mGlitchDurationThresholdUs = glitchDurationUs; } - const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT); - CHECK(colorFormatStr != NULL); - int32_t colorFormat = getColorFormat(colorFormatStr); - // XXX: query camera for the stride and slice height // when the capability becomes available. - stride = width; - sliceHeight = height; - mMeta = new MetaData; - mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); - mMeta->setInt32(kKeyColorFormat, colorFormat); - mMeta->setInt32(kKeyWidth, width); - mMeta->setInt32(kKeyHeight, height); - mMeta->setInt32(kKeyStride, stride); - mMeta->setInt32(kKeySliceHeight, sliceHeight); - + mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); + mMeta->setInt32(kKeyColorFormat, mColorFormat); + mMeta->setInt32(kKeyWidth, mVideoSize.width); + mMeta->setInt32(kKeyHeight, mVideoSize.height); + mMeta->setInt32(kKeyStride, mVideoSize.width); + mMeta->setInt32(kKeySliceHeight, mVideoSize.height); + mMeta->setInt32(kKeyFrameRate, mVideoFrameRate); + return OK; } CameraSource::~CameraSource() { @@ -173,8 +520,17 @@ CameraSource::~CameraSource() { } } +void CameraSource::startCameraRecording() { + CHECK_EQ(OK, mCamera->startRecording()); + CHECK(mCamera->recordingEnabled()); +} + status_t CameraSource::start(MetaData *meta) { CHECK(!mStarted); + if (mInitCheck != OK) { + LOGE("CameraSource is not initialized yet"); + return mInitCheck; + } char value[PROPERTY_VALUE_MAX]; if (property_get("media.stagefright.record-stats", value, NULL) @@ -188,31 +544,51 @@ status_t CameraSource::start(MetaData *meta) { mStartTimeUs = startTimeUs; } + // Call setListener first before calling startCameraRecording() + // to avoid recording frames being dropped. int64_t token = IPCThreadState::self()->clearCallingIdentity(); mCamera->setListener(new CameraSourceListener(this)); - CHECK_EQ(OK, mCamera->startRecording()); + startCameraRecording(); IPCThreadState::self()->restoreCallingIdentity(token); mStarted = true; return OK; } +void CameraSource::stopCameraRecording() { + mCamera->setListener(NULL); + mCamera->stopRecording(); +} + +void CameraSource::releaseCamera() { + LOGV("releaseCamera"); + if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) { + LOGV("Camera was cold when we started, stopping preview"); + mCamera->stopPreview(); + } + mCamera->unlock(); + mCamera.clear(); + mCamera = 0; + mCameraFlags = 0; +} + status_t CameraSource::stop() { - LOGV("stop"); + LOGD("stop: E"); Mutex::Autolock autoLock(mLock); mStarted = false; mFrameAvailableCondition.signal(); int64_t token = IPCThreadState::self()->clearCallingIdentity(); - mCamera->setListener(NULL); - mCamera->stopRecording(); releaseQueuedFrames(); while (!mFramesBeingEncoded.empty()) { - LOGI("Waiting for outstanding frames being encoded: %d", + if (NO_ERROR != + mFrameCompleteCondition.waitRelative(mLock, 3000000000LL)) { + LOGW("Timed out waiting for outstanding frames being encoded: %d", mFramesBeingEncoded.size()); - mFrameCompleteCondition.wait(mLock); + } } - mCamera = NULL; + stopCameraRecording(); + releaseCamera(); IPCThreadState::self()->restoreCallingIdentity(token); if (mCollectStats) { @@ -221,15 +597,27 @@ status_t CameraSource::stop() { mLastFrameTimestampUs - mFirstFrameTimeUs); } + if (mNumGlitches > 0) { + LOGW("%d long delays between neighboring video frames during", + mNumGlitches); + } + CHECK_EQ(mNumFramesReceived, mNumFramesEncoded + mNumFramesDropped); + LOGD("stop: X"); return OK; } +void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { + if (mCamera != NULL) { + mCamera->releaseRecordingFrame(frame); + } +} + void CameraSource::releaseQueuedFrames() { List<sp<IMemory> >::iterator it; while (!mFramesReceived.empty()) { it = mFramesReceived.begin(); - mCamera->releaseRecordingFrame(*it); + releaseRecordingFrame(*it); mFramesReceived.erase(it); ++mNumFramesDropped; } @@ -241,7 +629,7 @@ sp<MetaData> CameraSource::getFormat() { void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { int64_t token = IPCThreadState::self()->clearCallingIdentity(); - mCamera->releaseRecordingFrame(frame); + releaseRecordingFrame(frame); IPCThreadState::self()->restoreCallingIdentity(token); } @@ -251,7 +639,6 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) { for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); it != mFramesBeingEncoded.end(); ++it) { if ((*it)->pointer() == buffer->data()) { - releaseOneRecordingFrame((*it)); mFramesBeingEncoded.erase(it); ++mNumFramesEncoded; @@ -281,45 +668,26 @@ status_t CameraSource::read( { Mutex::Autolock autoLock(mLock); - while (mStarted) { - while(mFramesReceived.empty()) { - mFrameAvailableCondition.wait(mLock); - } - - if (!mStarted) { - return OK; - } - - frame = *mFramesReceived.begin(); - mFramesReceived.erase(mFramesReceived.begin()); - - frameTime = *mFrameTimes.begin(); - mFrameTimes.erase(mFrameTimes.begin()); - int64_t skipTimeUs; - if (!options || !options->getSkipFrame(&skipTimeUs)) { - skipTimeUs = frameTime; - } - if (skipTimeUs > frameTime) { - LOGV("skipTimeUs: %lld us > frameTime: %lld us", - skipTimeUs, frameTime); - releaseOneRecordingFrame(frame); - ++mNumFramesDropped; - // Safeguard against the abuse of the kSkipFrame_Option. - if (skipTimeUs - frameTime >= 1E6) { - LOGE("Frame skipping requested is way too long: %lld us", - skipTimeUs - frameTime); - return UNKNOWN_ERROR; - } - } else { - mFramesBeingEncoded.push_back(frame); - *buffer = new MediaBuffer(frame->pointer(), frame->size()); - (*buffer)->setObserver(this); - (*buffer)->add_ref(); - (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); - - return OK; + while (mStarted && mFramesReceived.empty()) { + if (NO_ERROR != + mFrameAvailableCondition.waitRelative(mLock, 3000000000LL)) { + LOGW("Timed out waiting for incoming camera video frames: %lld us", + mLastFrameTimestampUs); } } + if (!mStarted) { + return OK; + } + frame = *mFramesReceived.begin(); + mFramesReceived.erase(mFramesReceived.begin()); + + frameTime = *mFrameTimes.begin(); + mFrameTimes.erase(mFrameTimes.begin()); + mFramesBeingEncoded.push_back(frame); + *buffer = new MediaBuffer(frame->pointer(), frame->size()); + (*buffer)->setObserver(this); + (*buffer)->add_ref(); + (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); } return OK; } @@ -338,11 +706,18 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs, if (mNumFramesReceived > 0 && timestampUs - mLastFrameTimestampUs > mGlitchDurationThresholdUs) { if (mNumGlitches % 10 == 0) { // Don't spam the log - LOGW("Long delay detected in video recording"); + LOGV("Long delay detected in video recording"); } ++mNumGlitches; } + // May need to skip frame or modify timestamp. Currently implemented + // by the subclass CameraSourceTimeLapse. + if (skipCurrentFrame(timestampUs)) { + releaseOneRecordingFrame(data); + return; + } + mLastFrameTimestampUs = timestampUs; if (mNumFramesReceived == 0) { mFirstFrameTimeUs = timestampUs; @@ -367,4 +742,31 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs, mFrameAvailableCondition.signal(); } +size_t CameraSource::getNumberOfVideoBuffers() const { + LOGV("getNumberOfVideoBuffers"); + size_t nBuffers = 0; + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + if (mInitCheck == OK && mCamera != 0) { + nBuffers = mCamera->getNumberOfVideoBuffers(); + } + IPCThreadState::self()->restoreCallingIdentity(token); + return nBuffers; +} + +sp<IMemory> CameraSource::getVideoBuffer(size_t index) const { + LOGV("getVideoBuffer: %d", index); + sp<IMemory> buffer = 0; + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + if (mInitCheck == OK && mCamera != 0) { + buffer = mCamera->getVideoBuffer(index); + } + IPCThreadState::self()->restoreCallingIdentity(token); + return buffer; +} + +bool CameraSource::isMetaDataStoredInVideoBuffers() const { + LOGV("isMetaDataStoredInVideoBuffers"); + return mIsMetaDataStoredInVideoBuffers; +} + } // namespace android diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp new file mode 100644 index 0000000..b58b9d8 --- /dev/null +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -0,0 +1,536 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "CameraSourceTimeLapse" + +#include <binder/IPCThreadState.h> +#include <binder/MemoryBase.h> +#include <binder/MemoryHeapBase.h> +#include <media/stagefright/CameraSource.h> +#include <media/stagefright/CameraSourceTimeLapse.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/YUVImage.h> +#include <media/stagefright/YUVCanvas.h> +#include <camera/Camera.h> +#include <camera/CameraParameters.h> +#include <ui/Rect.h> +#include <utils/String8.h> +#include <utils/Vector.h> +#include "OMX_Video.h" +#include <limits.h> + +namespace android { + +// static +CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera( + const sp<ICamera> &camera, + int32_t cameraId, + Size videoSize, + int32_t videoFrameRate, + const sp<Surface>& surface, + int64_t timeBetweenTimeLapseFrameCaptureUs) { + + CameraSourceTimeLapse *source = new + CameraSourceTimeLapse(camera, cameraId, + videoSize, videoFrameRate, surface, + timeBetweenTimeLapseFrameCaptureUs); + + if (source != NULL) { + if (source->initCheck() != OK) { + delete source; + return NULL; + } + } + return source; +} + +CameraSourceTimeLapse::CameraSourceTimeLapse( + const sp<ICamera>& camera, + int32_t cameraId, + Size videoSize, + int32_t videoFrameRate, + const sp<Surface>& surface, + int64_t timeBetweenTimeLapseFrameCaptureUs) + : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, true), + mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs), + mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), + mLastTimeLapseFrameRealTimestampUs(0), + mSkipCurrentFrame(false) { + + LOGD("starting time lapse mode: %lld us", mTimeBetweenTimeLapseFrameCaptureUs); + mVideoWidth = videoSize.width; + mVideoHeight = videoSize.height; + + if (trySettingVideoSize(videoSize.width, videoSize.height)) { + mUseStillCameraForTimeLapse = false; + } else { + // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater + // than the fastest rate at which the still camera can take pictures. + mUseStillCameraForTimeLapse = true; + CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height)); + mNeedCropping = computeCropRectangleOffset(); + mMeta->setInt32(kKeyWidth, videoSize.width); + mMeta->setInt32(kKeyHeight, videoSize.height); + } + + // Initialize quick stop variables. + mQuickStop = false; + mForceRead = false; + mLastReadBufferCopy = NULL; + mStopWaitingForIdleCamera = false; +} + +CameraSourceTimeLapse::~CameraSourceTimeLapse() { +} + +void CameraSourceTimeLapse::startQuickReadReturns() { + Mutex::Autolock autoLock(mQuickStopLock); + LOGV("Enabling quick read returns"); + + // Enable quick stop mode. + mQuickStop = true; + + if (mUseStillCameraForTimeLapse) { + // wake up the thread right away. + mTakePictureCondition.signal(); + } else { + // Force dataCallbackTimestamp() coming from the video camera to not skip the + // next frame as we want read() to get a get a frame right away. + mForceRead = true; + } +} + +bool CameraSourceTimeLapse::trySettingVideoSize(int32_t width, int32_t height) { + LOGV("trySettingVideoSize: %dx%d", width, height); + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + String8 s = mCamera->getParameters(); + + CameraParameters params(s); + Vector<Size> supportedSizes; + params.getSupportedVideoSizes(supportedSizes); + bool videoOutputSupported = false; + if (supportedSizes.size() == 0) { + params.getSupportedPreviewSizes(supportedSizes); + } else { + videoOutputSupported = true; + } + + bool videoSizeSupported = false; + for (uint32_t i = 0; i < supportedSizes.size(); ++i) { + int32_t pictureWidth = supportedSizes[i].width; + int32_t pictureHeight = supportedSizes[i].height; + + if ((pictureWidth == width) && (pictureHeight == height)) { + videoSizeSupported = true; + } + } + + bool isSuccessful = false; + if (videoSizeSupported) { + LOGV("Video size (%d, %d) is supported", width, height); + if (videoOutputSupported) { + params.setVideoSize(width, height); + } else { + params.setPreviewSize(width, height); + } + if (mCamera->setParameters(params.flatten()) == OK) { + isSuccessful = true; + } else { + LOGE("Failed to set preview size to %dx%d", width, height); + isSuccessful = false; + } + } + + IPCThreadState::self()->restoreCallingIdentity(token); + return isSuccessful; +} + +bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { + LOGV("setPictureSizeToClosestSupported: %dx%d", width, height); + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + String8 s = mCamera->getParameters(); + IPCThreadState::self()->restoreCallingIdentity(token); + + CameraParameters params(s); + Vector<Size> supportedSizes; + params.getSupportedPictureSizes(supportedSizes); + + int32_t minPictureSize = INT_MAX; + for (uint32_t i = 0; i < supportedSizes.size(); ++i) { + int32_t pictureWidth = supportedSizes[i].width; + int32_t pictureHeight = supportedSizes[i].height; + + if ((pictureWidth >= width) && (pictureHeight >= height)) { + int32_t pictureSize = pictureWidth*pictureHeight; + if (pictureSize < minPictureSize) { + minPictureSize = pictureSize; + mPictureWidth = pictureWidth; + mPictureHeight = pictureHeight; + } + } + } + LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight); + return (minPictureSize != INT_MAX); +} + +bool CameraSourceTimeLapse::computeCropRectangleOffset() { + if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { + return false; + } + + CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); + + int32_t widthDifference = mPictureWidth - mVideoWidth; + int32_t heightDifference = mPictureHeight - mVideoHeight; + + mCropRectStartX = widthDifference/2; + mCropRectStartY = heightDifference/2; + + LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); + + return true; +} + +void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) { + Mutex::Autolock autoLock(mQuickStopLock); + if (mQuickStop && (buffer == mLastReadBufferCopy)) { + buffer->setObserver(NULL); + buffer->release(); + } else { + return CameraSource::signalBufferReturned(buffer); + } +} + +void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) { + size_t sourceSize = sourceBuffer.size(); + void* sourcePointer = sourceBuffer.data(); + + (*newBuffer) = new MediaBuffer(sourceSize); + memcpy((*newBuffer)->data(), sourcePointer, sourceSize); + + (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime); +} + +void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) { + int64_t frameTime; + CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime)); + createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy); + mLastReadBufferCopy->add_ref(); + mLastReadBufferCopy->setObserver(this); +} + +status_t CameraSourceTimeLapse::read( + MediaBuffer **buffer, const ReadOptions *options) { + if (mLastReadBufferCopy == NULL) { + mLastReadStatus = CameraSource::read(buffer, options); + + // mQuickStop may have turned to true while read was blocked. Make a copy of + // the buffer in that case. + Mutex::Autolock autoLock(mQuickStopLock); + if (mQuickStop && *buffer) { + fillLastReadBufferCopy(**buffer); + } + return mLastReadStatus; + } else { + (*buffer) = mLastReadBufferCopy; + (*buffer)->add_ref(); + return mLastReadStatus; + } +} + +// static +void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { + CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); + source->threadTimeLapseEntry(); + return NULL; +} + +void CameraSourceTimeLapse::threadTimeLapseEntry() { + while (mStarted) { + { + Mutex::Autolock autoLock(mCameraIdleLock); + if (!mCameraIdle) { + mCameraIdleCondition.wait(mCameraIdleLock); + } + CHECK(mCameraIdle); + mCameraIdle = false; + } + + // Even if mQuickStop == true we need to take one more picture + // as a read() may be blocked, waiting for a frame to get available. + // After this takePicture, if mQuickStop == true, we can safely exit + // this thread as read() will make a copy of this last frame and keep + // returning it in the quick stop mode. + Mutex::Autolock autoLock(mQuickStopLock); + CHECK_EQ(OK, mCamera->takePicture()); + if (mQuickStop) { + LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true"); + return; + } + mTakePictureCondition.waitRelative(mQuickStopLock, + mTimeBetweenTimeLapseFrameCaptureUs * 1000); + } + LOGV("threadTimeLapseEntry: Exiting due to mStarted = false"); +} + +void CameraSourceTimeLapse::startCameraRecording() { + if (mUseStillCameraForTimeLapse) { + LOGV("start time lapse recording using still camera"); + + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + String8 s = mCamera->getParameters(); + + CameraParameters params(s); + params.setPictureSize(mPictureWidth, mPictureHeight); + mCamera->setParameters(params.flatten()); + mCameraIdle = true; + mStopWaitingForIdleCamera = false; + + // disable shutter sound and play the recording sound. + mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0); + mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); + IPCThreadState::self()->restoreCallingIdentity(token); + + // create a thread which takes pictures in a loop + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); + pthread_attr_destroy(&attr); + } else { + LOGV("start time lapse recording using video camera"); + CHECK_EQ(OK, mCamera->startRecording()); + } +} + +void CameraSourceTimeLapse::stopCameraRecording() { + if (mUseStillCameraForTimeLapse) { + void *dummy; + pthread_join(mThreadTimeLapse, &dummy); + + // Last takePicture may still be underway. Wait for the camera to get + // idle. + Mutex::Autolock autoLock(mCameraIdleLock); + mStopWaitingForIdleCamera = true; + if (!mCameraIdle) { + mCameraIdleCondition.wait(mCameraIdleLock); + } + CHECK(mCameraIdle); + mCamera->setListener(NULL); + + // play the recording sound. + mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0); + } else { + mCamera->setListener(NULL); + mCamera->stopRecording(); + } + if (mLastReadBufferCopy) { + mLastReadBufferCopy->release(); + mLastReadBufferCopy = NULL; + } +} + +void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) { + if (!mUseStillCameraForTimeLapse && + mCamera != NULL) { + mCamera->releaseRecordingFrame(frame); + } +} + +sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) { + size_t source_size = source_data->size(); + void* source_pointer = source_data->pointer(); + + sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); + sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); + memcpy(newMemory->pointer(), source_pointer, source_size); + return newMemory; +} + +// Allocates IMemory of final type MemoryBase with the given size. +sp<IMemory> allocateIMemory(size_t size) { + sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size); + sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size); + return newMemory; +} + +// static +void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { + CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); + source->threadStartPreview(); + return NULL; +} + +void CameraSourceTimeLapse::threadStartPreview() { + CHECK_EQ(OK, mCamera->startPreview()); + Mutex::Autolock autoLock(mCameraIdleLock); + mCameraIdle = true; + mCameraIdleCondition.signal(); +} + +void CameraSourceTimeLapse::restartPreview() { + // Start this in a different thread, so that the dataCallback can return + LOGV("restartPreview"); + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); + + pthread_t threadPreview; + pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this); + pthread_attr_destroy(&attr); +} + +sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) { + // find the YUV format + int32_t srcFormat; + CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); + YUVImage::YUVFormat yuvFormat; + if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + yuvFormat = YUVImage::YUV420SemiPlanar; + } else { + CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar); + yuvFormat = YUVImage::YUV420Planar; + } + + // allocate memory for cropped image and setup a canvas using it. + sp<IMemory> croppedImageMemory = allocateIMemory( + YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); + YUVImage yuvImageCropped(yuvFormat, + mVideoWidth, mVideoHeight, + (uint8_t *)croppedImageMemory->pointer()); + YUVCanvas yuvCanvasCrop(yuvImageCropped); + + YUVImage yuvImageSource(yuvFormat, + mPictureWidth, mPictureHeight, + (uint8_t *)source_data->pointer()); + yuvCanvasCrop.CopyImageRect( + Rect(mCropRectStartX, mCropRectStartY, + mCropRectStartX + mVideoWidth, + mCropRectStartY + mVideoHeight), + 0, 0, + yuvImageSource); + + return croppedImageMemory; +} + +void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) { + if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { + // takePicture will complete after this callback, so restart preview. + restartPreview(); + return; + } + if (msgType != CAMERA_MSG_RAW_IMAGE) { + return; + } + + LOGV("dataCallback for timelapse still frame"); + CHECK_EQ(true, mUseStillCameraForTimeLapse); + + int64_t timestampUs; + if (mNumFramesReceived == 0) { + timestampUs = mStartTimeUs; + } else { + timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; + } + + if (mNeedCropping) { + sp<IMemory> croppedImageData = cropYUVImage(data); + dataCallbackTimestamp(timestampUs, msgType, croppedImageData); + } else { + sp<IMemory> dataCopy = createIMemoryCopy(data); + dataCallbackTimestamp(timestampUs, msgType, dataCopy); + } +} + +bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { + if (mSkipCurrentFrame) { + mSkipCurrentFrame = false; + return true; + } else { + return false; + } +} + +bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { + if (!mUseStillCameraForTimeLapse) { + if (mLastTimeLapseFrameRealTimestampUs == 0) { + // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs + // to current time (timestampUs) and save frame data. + LOGV("dataCallbackTimestamp timelapse: initial frame"); + + mLastTimeLapseFrameRealTimestampUs = *timestampUs; + return false; + } + + { + Mutex::Autolock autoLock(mQuickStopLock); + + // mForceRead may be set to true by startQuickReadReturns(). In that + // case don't skip this frame. + if (mForceRead) { + LOGV("dataCallbackTimestamp timelapse: forced read"); + mForceRead = false; + *timestampUs = mLastFrameTimestampUs; + return false; + } + } + + if (*timestampUs < + (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { + // Skip all frames from last encoded frame until + // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. + // Tell the camera to release its recording frame and return. + LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); + return true; + } else { + // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: + // - Reset mLastTimeLapseFrameRealTimestampUs to current time. + // - Artificially modify timestampUs to be one frame time (1/framerate) ahead + // of the last encoded frame's time stamp. + LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); + + mLastTimeLapseFrameRealTimestampUs = *timestampUs; + *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; + return false; + } + } + return false; +} + +void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, + const sp<IMemory> &data) { + if (!mUseStillCameraForTimeLapse) { + mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); + } else { + Mutex::Autolock autoLock(mCameraIdleLock); + // If we are using the still camera and stop() has been called, it may + // be waiting for the camera to get idle. In that case return + // immediately. Calling CameraSource::dataCallbackTimestamp() will lead + // to a deadlock since it tries to access CameraSource::mLock which in + // this case is held by CameraSource::stop() currently waiting for the + // camera to get idle. And camera will not get idle until this call + // returns. + if (mStopWaitingForIdleCamera) { + return; + } + } + CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); +} + +} // namespace android diff --git a/media/libstagefright/DRMExtractor.cpp b/media/libstagefright/DRMExtractor.cpp index aa9ad23..3c98932 100644 --- a/media/libstagefright/DRMExtractor.cpp +++ b/media/libstagefright/DRMExtractor.cpp @@ -280,18 +280,23 @@ bool SniffDRM( if (gDrmManagerClient == NULL) { gDrmManagerClient = new DrmManagerClient(); } + + if (gDrmManagerClient == NULL) { + return false; + } } DecryptHandle *decryptHandle = source->DrmInitialization(gDrmManagerClient); if (decryptHandle != NULL) { if (decryptHandle->decryptApiType == DecryptApiType::CONTAINER_BASED) { - *mimeType = String8("drm+container_based+"); + *mimeType = String8("drm+container_based+") + decryptHandle->mimeType; } else if (decryptHandle->decryptApiType == DecryptApiType::ELEMENTARY_STREAM_BASED) { - *mimeType = String8("drm+es_based+"); + *mimeType = String8("drm+es_based+") + decryptHandle->mimeType; + } else if (decryptHandle->decryptApiType == DecryptApiType::WV_BASED) { + *mimeType = MEDIA_MIMETYPE_CONTAINER_WVM; + LOGW("SniffWVM: found match\n"); } - - *mimeType += decryptHandle->mimeType; *confidence = 10.0f; return true; diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp index 0b8997c..ee0d792 100644 --- a/media/libstagefright/DataSource.cpp +++ b/media/libstagefright/DataSource.cpp @@ -36,7 +36,7 @@ namespace android { -bool DataSource::getUInt16(off_t offset, uint16_t *x) { +bool DataSource::getUInt16(off64_t offset, uint16_t *x) { *x = 0; uint8_t byte[2]; @@ -49,7 +49,7 @@ bool DataSource::getUInt16(off_t offset, uint16_t *x) { return true; } -status_t DataSource::getSize(off_t *size) { +status_t DataSource::getSize(off64_t *size) { *size = 0; return ERROR_UNSUPPORTED; diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp index b46d8d0..98d5b50 100644 --- a/media/libstagefright/FileSource.cpp +++ b/media/libstagefright/FileSource.cpp @@ -16,12 +16,16 @@ #include <media/stagefright/FileSource.h> #include <media/stagefright/MediaDebug.h> +#include <sys/types.h> +#include <unistd.h> +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> namespace android { FileSource::FileSource(const char *filename) - : mFile(fopen(filename, "rb")), - mFd(fileno(mFile)), + : mFd(-1), mOffset(0), mLength(-1), mDecryptHandle(NULL), @@ -29,11 +33,12 @@ FileSource::FileSource(const char *filename) mDrmBufOffset(0), mDrmBufSize(0), mDrmBuf(NULL){ + + mFd = open(filename, O_LARGEFILE | O_RDONLY); } FileSource::FileSource(int fd, int64_t offset, int64_t length) - : mFile(fdopen(fd, "rb")), - mFd(fd), + : mFd(fd), mOffset(offset), mLength(length), mDecryptHandle(NULL), @@ -46,26 +51,23 @@ FileSource::FileSource(int fd, int64_t offset, int64_t length) } FileSource::~FileSource() { - if (mFile != NULL) { - fclose(mFile); - mFile = NULL; + if (mFd >= 0) { + close(mFd); + mFd = -1; } if (mDrmBuf != NULL) { delete[] mDrmBuf; mDrmBuf = NULL; } - if (mDecryptHandle != NULL) { - mDrmManagerClient->closeDecryptSession(mDecryptHandle); - } } status_t FileSource::initCheck() const { - return mFile != NULL ? OK : NO_INIT; + return mFd >= 0 ? OK : NO_INIT; } -ssize_t FileSource::readAt(off_t offset, void *data, size_t size) { - if (mFile == NULL) { +ssize_t FileSource::readAt(off64_t offset, void *data, size_t size) { + if (mFd < 0) { return NO_INIT; } @@ -85,18 +87,18 @@ ssize_t FileSource::readAt(off_t offset, void *data, size_t size) { == mDecryptHandle->decryptApiType) { return readAtDRM(offset, data, size); } else { - int err = fseeko(mFile, offset + mOffset, SEEK_SET); - if (err < 0) { + off64_t result = lseek64(mFd, offset + mOffset, SEEK_SET); + if (result == -1) { LOGE("seek to %lld failed", offset + mOffset); return UNKNOWN_ERROR; } - return fread(data, 1, size, mFile); + return ::read(mFd, data, size); } } -status_t FileSource::getSize(off_t *size) { - if (mFile == NULL) { +status_t FileSource::getSize(off64_t *size) { + if (mFd < 0) { return NO_INIT; } @@ -106,14 +108,17 @@ status_t FileSource::getSize(off_t *size) { return OK; } - fseek(mFile, 0, SEEK_END); - *size = ftello(mFile); + *size = lseek64(mFd, 0, SEEK_END); return OK; } DecryptHandle* FileSource::DrmInitialization(DrmManagerClient* client) { + if (client == NULL) { + return NULL; + } mDrmManagerClient = client; + if (mDecryptHandle == NULL) { mDecryptHandle = mDrmManagerClient->openDecryptSession( mFd, mOffset, mLength); @@ -132,7 +137,7 @@ void FileSource::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) { *client = mDrmManagerClient; } -ssize_t FileSource::readAtDRM(off_t offset, void *data, size_t size) { +ssize_t FileSource::readAtDRM(off64_t offset, void *data, size_t size) { size_t DRM_CACHE_SIZE = 1024; if (mDrmBuf == NULL) { mDrmBuf = new unsigned char[DRM_CACHE_SIZE]; diff --git a/media/libstagefright/HTTPStream.cpp b/media/libstagefright/HTTPStream.cpp index 7194614..057868c 100644 --- a/media/libstagefright/HTTPStream.cpp +++ b/media/libstagefright/HTTPStream.cpp @@ -37,7 +37,7 @@ namespace android { // static -const char *HTTPStream::kStatusKey = ":status:"; +const char *HTTPStream::kStatusKey = ":status:"; // MUST be lowercase. HTTPStream::HTTPStream() : mState(READY), @@ -297,7 +297,7 @@ status_t HTTPStream::receive_header(int *http_status) { return err; } - mHeaders.add(string(kStatusKey), string(line)); + mHeaders.add(AString(kStatusKey), AString(line)); char *spacePos = strchr(line, ' '); if (spacePos == NULL) { @@ -341,7 +341,10 @@ status_t HTTPStream::receive_header(int *http_status) { char *colonPos = strchr(line, ':'); if (colonPos == NULL) { - mHeaders.add(string(line), string()); + AString key = line; + key.tolower(); + + mHeaders.add(key, AString()); } else { char *end_of_key = colonPos; while (end_of_key > line && isspace(end_of_key[-1])) { @@ -355,7 +358,10 @@ status_t HTTPStream::receive_header(int *http_status) { *end_of_key = '\0'; - mHeaders.add(string(line), string(start_of_value)); + AString key = line; + key.tolower(); + + mHeaders.add(key, AString(start_of_value)); } } @@ -391,8 +397,11 @@ ssize_t HTTPStream::receive(void *data, size_t size) { return (ssize_t)total; } -bool HTTPStream::find_header_value(const string &key, string *value) const { - ssize_t index = mHeaders.indexOfKey(key); +bool HTTPStream::find_header_value(const AString &key, AString *value) const { + AString key_lower = key; + key_lower.tolower(); + + ssize_t index = mHeaders.indexOfKey(key_lower); if (index < 0) { value->clear(); return false; diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp index ec81097..e818115 100644 --- a/media/libstagefright/JPEGSource.cpp +++ b/media/libstagefright/JPEGSource.cpp @@ -142,7 +142,7 @@ status_t JPEGSource::parseJPEG() { mWidth = 0; mHeight = 0; - off_t i = 0; + off64_t i = 0; uint16_t soi; if (!mSource->getUInt16(i, &soi)) { diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp index b15c720..00a4dd5 100644 --- a/media/libstagefright/MP3Extractor.cpp +++ b/media/libstagefright/MP3Extractor.cpp @@ -21,6 +21,8 @@ #include "include/MP3Extractor.h" #include "include/ID3.h" +#include "include/VBRISeeker.h" +#include "include/XINGSeeker.h" #include <media/stagefright/foundation/AMessage.h> #include <media/stagefright/DataSource.h> @@ -42,10 +44,11 @@ namespace android { // Yes ... there are things that must indeed match... static const uint32_t kMask = 0xfffe0cc0; -static bool get_mp3_frame_size( +// static +bool MP3Extractor::get_mp3_frame_size( uint32_t header, size_t *frame_size, - int *out_sampling_rate = NULL, int *out_channels = NULL, - int *out_bitrate = NULL) { + int *out_sampling_rate, int *out_channels, + int *out_bitrate) { *frame_size = 0; if (out_sampling_rate) { @@ -178,136 +181,13 @@ static bool get_mp3_frame_size( return true; } -static bool parse_xing_header( - const sp<DataSource> &source, off_t first_frame_pos, - int32_t *frame_number = NULL, int32_t *byte_number = NULL, - char *table_of_contents = NULL, int32_t *quality_indicator = NULL, - int64_t *duration = NULL) { - - if (frame_number) { - *frame_number = 0; - } - if (byte_number) { - *byte_number = 0; - } - if (table_of_contents) { - table_of_contents[0] = 0; - } - if (quality_indicator) { - *quality_indicator = 0; - } - if (duration) { - *duration = 0; - } - - uint8_t buffer[4]; - int offset = first_frame_pos; - if (source->readAt(offset, &buffer, 4) < 4) { // get header - return false; - } - offset += 4; - - uint8_t id, layer, sr_index, mode; - layer = (buffer[1] >> 1) & 3; - id = (buffer[1] >> 3) & 3; - sr_index = (buffer[2] >> 2) & 3; - mode = (buffer[3] >> 6) & 3; - if (layer == 0) { - return false; - } - if (id == 1) { - return false; - } - if (sr_index == 3) { - return false; - } - // determine offset of XING header - if(id&1) { // mpeg1 - if (mode != 3) offset += 32; - else offset += 17; - } else { // mpeg2 - if (mode != 3) offset += 17; - else offset += 9; - } - - if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID - return false; - } - offset += 4; - // Check XING ID - if ((buffer[0] != 'X') || (buffer[1] != 'i') - || (buffer[2] != 'n') || (buffer[3] != 'g')) { - if ((buffer[0] != 'I') || (buffer[1] != 'n') - || (buffer[2] != 'f') || (buffer[3] != 'o')) { - return false; - } - } - - if (source->readAt(offset, &buffer, 4) < 4) { // flags - return false; - } - offset += 4; - uint32_t flags = U32_AT(buffer); - - if (flags & 0x0001) { // Frames field is present - if (source->readAt(offset, buffer, 4) < 4) { - return false; - } - if (frame_number) { - *frame_number = U32_AT(buffer); - } - int32_t frame = U32_AT(buffer); - // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer - const int samplesPerFrames[2][3] = - { - { 384, 1152, 576 }, // MPEG 2, 2.5: layer1, layer2, layer3 - { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3 - }; - // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index - const int samplingRates[4][3] = - { - { 11025, 12000, 8000, }, // MPEG 2.5 - { 0, 0, 0, }, // reserved - { 22050, 24000, 16000, }, // MPEG 2 - { 44100, 48000, 32000, } // MPEG 1 - }; - if (duration) { - *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL - / samplingRates[id][sr_index]; - } - offset += 4; - } - if (flags & 0x0002) { // Bytes field is present - if (byte_number) { - if (source->readAt(offset, buffer, 4) < 4) { - return false; - } - *byte_number = U32_AT(buffer); - } - offset += 4; - } - if (flags & 0x0004) { // TOC field is present - if (table_of_contents) { - if (source->readAt(offset + 1, table_of_contents, 99) < 99) { - return false; - } - } - offset += 100; - } - if (flags & 0x0008) { // Quality indicator field is present - if (quality_indicator) { - if (source->readAt(offset, buffer, 4) < 4) { - return false; - } - *quality_indicator = U32_AT(buffer); - } - } - return true; -} - static bool Resync( const sp<DataSource> &source, uint32_t match_header, - off_t *inout_pos, uint32_t *out_header) { + off64_t *inout_pos, off64_t *post_id3_pos, uint32_t *out_header) { + if (post_id3_pos != NULL) { + *post_id3_pos = 0; + } + if (*inout_pos == 0) { // Skip an optional ID3 header if syncing at the very beginning // of the datasource. @@ -340,34 +220,66 @@ static bool Resync( LOGV("skipped ID3 tag, new starting offset is %ld (0x%08lx)", *inout_pos, *inout_pos); } + + if (post_id3_pos != NULL) { + *post_id3_pos = *inout_pos; + } } - off_t pos = *inout_pos; + off64_t pos = *inout_pos; bool valid = false; + + const size_t kMaxReadBytes = 1024; + const size_t kMaxBytesChecked = 128 * 1024; + uint8_t buf[kMaxReadBytes]; + ssize_t bytesToRead = kMaxReadBytes; + ssize_t totalBytesRead = 0; + ssize_t remainingBytes = 0; + bool reachEOS = false; + uint8_t *tmp = buf; + do { - if (pos >= *inout_pos + 128 * 1024) { + if (pos >= *inout_pos + kMaxBytesChecked) { // Don't scan forever. LOGV("giving up at offset %ld", pos); break; } - uint8_t tmp[4]; - if (source->readAt(pos, tmp, 4) != 4) { - break; + if (remainingBytes < 4) { + if (reachEOS) { + break; + } else { + memcpy(buf, tmp, remainingBytes); + bytesToRead = kMaxReadBytes - remainingBytes; + totalBytesRead = source->readAt(pos, buf + remainingBytes, bytesToRead); + if (totalBytesRead <= 0) { + break; + } + reachEOS = (totalBytesRead != bytesToRead); + totalBytesRead += remainingBytes; + remainingBytes = totalBytesRead; + tmp = buf; + continue; + } } uint32_t header = U32_AT(tmp); if (match_header != 0 && (header & kMask) != (match_header & kMask)) { ++pos; + ++tmp; + --remainingBytes; continue; } size_t frame_size; int sample_rate, num_channels, bitrate; - if (!get_mp3_frame_size(header, &frame_size, - &sample_rate, &num_channels, &bitrate)) { + if (!MP3Extractor::get_mp3_frame_size( + header, &frame_size, + &sample_rate, &num_channels, &bitrate)) { ++pos; + ++tmp; + --remainingBytes; continue; } @@ -376,7 +288,7 @@ static bool Resync( // We found what looks like a valid frame, // now find its successors. - off_t test_pos = pos + frame_size; + off64_t test_pos = pos + frame_size; valid = true; for (int j = 0; j < 3; ++j) { @@ -396,7 +308,8 @@ static bool Resync( } size_t test_frame_size; - if (!get_mp3_frame_size(test_header, &test_frame_size)) { + if (!MP3Extractor::get_mp3_frame_size( + test_header, &test_frame_size)) { valid = false; break; } @@ -417,6 +330,8 @@ static bool Resync( } ++pos; + ++tmp; + --remainingBytes; } while (!valid); return valid; @@ -426,8 +341,8 @@ class MP3Source : public MediaSource { public: MP3Source( const sp<MetaData> &meta, const sp<DataSource> &source, - off_t first_frame_pos, uint32_t fixed_header, - int32_t byte_number, const char *table_of_contents); + off64_t first_frame_pos, uint32_t fixed_header, + const sp<MP3Seeker> &seeker); virtual status_t start(MetaData *params = NULL); virtual status_t stop(); @@ -443,14 +358,12 @@ protected: private: sp<MetaData> mMeta; sp<DataSource> mDataSource; - off_t mFirstFramePos; + off64_t mFirstFramePos; uint32_t mFixedHeader; - off_t mCurrentPos; + off64_t mCurrentPos; int64_t mCurrentTimeUs; bool mStarted; - int32_t mByteNumber; // total number of bytes in this MP3 - // TOC entries in XING header. Skip the first one since it's always 0. - char mTableOfContents[99]; + sp<MP3Seeker> mSeeker; MediaBufferGroup *mGroup; MP3Source(const MP3Source &); @@ -462,25 +375,28 @@ MP3Extractor::MP3Extractor( : mInitCheck(NO_INIT), mDataSource(source), mFirstFramePos(-1), - mFixedHeader(0), - mByteNumber(0) { - off_t pos = 0; + mFixedHeader(0) { + off64_t pos = 0; + off64_t post_id3_pos; uint32_t header; bool success; int64_t meta_offset; uint32_t meta_header; + int64_t meta_post_id3_offset; if (meta != NULL && meta->findInt64("offset", &meta_offset) - && meta->findInt32("header", (int32_t *)&meta_header)) { + && meta->findInt32("header", (int32_t *)&meta_header) + && meta->findInt64("post-id3-offset", &meta_post_id3_offset)) { // The sniffer has already done all the hard work for us, simply // accept its judgement. - pos = (off_t)meta_offset; + pos = (off64_t)meta_offset; header = meta_header; + post_id3_pos = (off64_t)meta_post_id3_offset; success = true; } else { - success = Resync(mDataSource, 0, &pos, &header); + success = Resync(mDataSource, 0, &pos, &post_id3_pos, &header); } if (!success) { @@ -505,21 +421,27 @@ MP3Extractor::MP3Extractor( mMeta->setInt32(kKeyBitRate, bitrate * 1000); mMeta->setInt32(kKeyChannelCount, num_channels); - int64_t duration; - parse_xing_header( - mDataSource, mFirstFramePos, NULL, &mByteNumber, - mTableOfContents, NULL, &duration); - if (duration > 0) { - mMeta->setInt64(kKeyDuration, duration); - } else { - off_t fileSize; + mSeeker = XINGSeeker::CreateFromSource(mDataSource, mFirstFramePos); + + if (mSeeker == NULL) { + mSeeker = VBRISeeker::CreateFromSource(mDataSource, post_id3_pos); + } + + int64_t durationUs; + + if (mSeeker == NULL || !mSeeker->getDuration(&durationUs)) { + off64_t fileSize; if (mDataSource->getSize(&fileSize) == OK) { - mMeta->setInt64( - kKeyDuration, - 8000LL * (fileSize - mFirstFramePos) / bitrate); + durationUs = 8000LL * (fileSize - mFirstFramePos) / bitrate; + } else { + durationUs = -1; } } + if (durationUs >= 0) { + mMeta->setInt64(kKeyDuration, durationUs); + } + mInitCheck = OK; } @@ -534,7 +456,7 @@ sp<MediaSource> MP3Extractor::getTrack(size_t index) { return new MP3Source( mMeta, mDataSource, mFirstFramePos, mFixedHeader, - mByteNumber, mTableOfContents); + mSeeker); } sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) { @@ -549,8 +471,8 @@ sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) { MP3Source::MP3Source( const sp<MetaData> &meta, const sp<DataSource> &source, - off_t first_frame_pos, uint32_t fixed_header, - int32_t byte_number, const char *table_of_contents) + off64_t first_frame_pos, uint32_t fixed_header, + const sp<MP3Seeker> &seeker) : mMeta(meta), mDataSource(source), mFirstFramePos(first_frame_pos), @@ -558,9 +480,8 @@ MP3Source::MP3Source( mCurrentPos(0), mCurrentTimeUs(0), mStarted(false), - mByteNumber(byte_number), + mSeeker(seeker), mGroup(NULL) { - memcpy (mTableOfContents, table_of_contents, sizeof(mTableOfContents)); } MP3Source::~MP3Source() { @@ -607,43 +528,21 @@ status_t MP3Source::read( int64_t seekTimeUs; ReadOptions::SeekMode mode; if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { - int32_t bitrate; - if (!mMeta->findInt32(kKeyBitRate, &bitrate)) { - // bitrate is in bits/sec. - LOGI("no bitrate"); - - return ERROR_UNSUPPORTED; - } - - mCurrentTimeUs = seekTimeUs; - // interpolate in TOC to get file seek point in bytes - int64_t duration; - if ((mByteNumber > 0) && (mTableOfContents[0] > 0) - && mMeta->findInt64(kKeyDuration, &duration)) { - float percent = (float)seekTimeUs * 100 / duration; - float fx; - if( percent <= 0.0f ) { - fx = 0.0f; - } else if( percent >= 100.0f ) { - fx = 256.0f; - } else { - int a = (int)percent; - float fa, fb; - if ( a == 0 ) { - fa = 0.0f; - } else { - fa = (float)mTableOfContents[a-1]; - } - if ( a < 99 ) { - fb = (float)mTableOfContents[a]; - } else { - fb = 256.0f; - } - fx = fa + (fb-fa)*(percent-a); + int64_t actualSeekTimeUs = seekTimeUs; + if (mSeeker == NULL + || !mSeeker->getOffsetForTime(&actualSeekTimeUs, &mCurrentPos)) { + int32_t bitrate; + if (!mMeta->findInt32(kKeyBitRate, &bitrate)) { + // bitrate is in bits/sec. + LOGI("no bitrate"); + + return ERROR_UNSUPPORTED; } - mCurrentPos = mFirstFramePos + (int)((1.0f/256.0f)*fx*mByteNumber); - } else { + + mCurrentTimeUs = seekTimeUs; mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 8000000; + } else { + mCurrentTimeUs = actualSeekTimeUs; } } @@ -667,15 +566,16 @@ status_t MP3Source::read( uint32_t header = U32_AT((const uint8_t *)buffer->data()); if ((header & kMask) == (mFixedHeader & kMask) - && get_mp3_frame_size(header, &frame_size, NULL, NULL, &bitrate)) { + && MP3Extractor::get_mp3_frame_size( + header, &frame_size, NULL, NULL, &bitrate)) { break; } // Lost sync. LOGV("lost sync! header = 0x%08x, old header = 0x%08x\n", header, mFixedHeader); - off_t pos = mCurrentPos; - if (!Resync(mDataSource, mFixedHeader, &pos, NULL)) { + off64_t pos = mCurrentPos; + if (!Resync(mDataSource, mFixedHeader, &pos, NULL, NULL)) { LOGE("Unable to resync. Signalling end of stream."); buffer->release(); @@ -781,15 +681,17 @@ sp<MetaData> MP3Extractor::getMetaData() { bool SniffMP3( const sp<DataSource> &source, String8 *mimeType, float *confidence, sp<AMessage> *meta) { - off_t pos = 0; + off64_t pos = 0; + off64_t post_id3_pos; uint32_t header; - if (!Resync(source, 0, &pos, &header)) { + if (!Resync(source, 0, &pos, &post_id3_pos, &header)) { return false; } *meta = new AMessage; (*meta)->setInt64("offset", pos); (*meta)->setInt32("header", header); + (*meta)->setInt64("post-id3-offset", post_id3_pos); *mimeType = MEDIA_MIMETYPE_AUDIO_MPEG; *confidence = 0.2f; diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp index 81a2b0d..4e4f289 100644 --- a/media/libstagefright/MPEG2TSWriter.cpp +++ b/media/libstagefright/MPEG2TSWriter.cpp @@ -42,12 +42,21 @@ struct MPEG2TSWriter::SourceInfo : public AHandler { unsigned streamType() const; unsigned incrementContinuityCounter(); + void readMore(); + enum { kNotifyStartFailed, kNotifyBuffer, kNotifyReachedEOS, }; + sp<ABuffer> lastAccessUnit(); + int64_t lastAccessUnitTimeUs(); + void setLastAccessUnit(const sp<ABuffer> &accessUnit); + + void setEOSReceived(); + bool eosReceived() const; + protected: virtual void onMessageReceived(const sp<AMessage> &msg); @@ -67,13 +76,16 @@ private: sp<ABuffer> mAACBuffer; + sp<ABuffer> mLastAccessUnit; + bool mEOSReceived; + unsigned mStreamType; unsigned mContinuityCounter; void extractCodecSpecificData(); - void appendAACFrames(MediaBuffer *buffer); - void flushAACFrames(); + bool appendAACFrames(MediaBuffer *buffer); + bool flushAACFrames(); void postAVCFrame(MediaBuffer *buffer); @@ -83,6 +95,7 @@ private: MPEG2TSWriter::SourceInfo::SourceInfo(const sp<MediaSource> &source) : mSource(source), mLooper(new ALooper), + mEOSReceived(false), mStreamType(0), mContinuityCounter(0) { mLooper->setName("MPEG2TSWriter source"); @@ -232,6 +245,7 @@ void MPEG2TSWriter::SourceInfo::extractCodecSpecificData() { sp<AMessage> notify = mNotify->dup(); notify->setInt32("what", kNotifyBuffer); notify->setObject("buffer", out); + notify->setInt32("oob", true); notify->post(); } @@ -260,11 +274,13 @@ void MPEG2TSWriter::SourceInfo::postAVCFrame(MediaBuffer *buffer) { notify->post(); } -void MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) { +bool MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) { + bool accessUnitPosted = false; + if (mAACBuffer != NULL && mAACBuffer->size() + 7 + buffer->range_length() > mAACBuffer->capacity()) { - flushAACFrames(); + accessUnitPosted = flushAACFrames(); } if (mAACBuffer == NULL) { @@ -324,11 +340,13 @@ void MPEG2TSWriter::SourceInfo::appendAACFrames(MediaBuffer *buffer) { ptr += buffer->range_length(); mAACBuffer->setRange(0, ptr - mAACBuffer->data()); + + return accessUnitPosted; } -void MPEG2TSWriter::SourceInfo::flushAACFrames() { +bool MPEG2TSWriter::SourceInfo::flushAACFrames() { if (mAACBuffer == NULL) { - return; + return false; } sp<AMessage> notify = mNotify->dup(); @@ -337,6 +355,12 @@ void MPEG2TSWriter::SourceInfo::flushAACFrames() { notify->post(); mAACBuffer.clear(); + + return true; +} + +void MPEG2TSWriter::SourceInfo::readMore() { + (new AMessage(kWhatRead, id()))->post(); } void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { @@ -353,7 +377,7 @@ void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { extractCodecSpecificData(); - (new AMessage(kWhatRead, id()))->post(); + readMore(); break; } @@ -388,7 +412,9 @@ void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { buffer->range_length()); } else if (buffer->range_length() > 0) { if (mStreamType == 0x0f) { - appendAACFrames(buffer); + if (!appendAACFrames(buffer)) { + msg->post(); + } } else { postAVCFrame(buffer); } @@ -398,7 +424,7 @@ void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { buffer = NULL; } - msg->post(); + // Do not read more data until told to. break; } @@ -407,10 +433,39 @@ void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { } } +sp<ABuffer> MPEG2TSWriter::SourceInfo::lastAccessUnit() { + return mLastAccessUnit; +} + +void MPEG2TSWriter::SourceInfo::setLastAccessUnit( + const sp<ABuffer> &accessUnit) { + mLastAccessUnit = accessUnit; +} + +int64_t MPEG2TSWriter::SourceInfo::lastAccessUnitTimeUs() { + if (mLastAccessUnit == NULL) { + return -1; + } + + int64_t timeUs; + CHECK(mLastAccessUnit->meta()->findInt64("timeUs", &timeUs)); + + return timeUs; +} + +void MPEG2TSWriter::SourceInfo::setEOSReceived() { + CHECK(!mEOSReceived); + mEOSReceived = true; +} + +bool MPEG2TSWriter::SourceInfo::eosReceived() const { + return mEOSReceived; +} + //////////////////////////////////////////////////////////////////////////////// MPEG2TSWriter::MPEG2TSWriter(int fd) - : mFile(fdopen(fd, "wb")), + : mFile(fdopen(dup(fd), "wb")), mStarted(false), mNumSourcesDone(0), mNumTSPacketsWritten(0), @@ -527,15 +582,89 @@ void MPEG2TSWriter::onMessageReceived(const sp<AMessage> &msg) { if (what == SourceInfo::kNotifyReachedEOS || what == SourceInfo::kNotifyStartFailed) { + sp<SourceInfo> source = mSources.editItemAt(sourceIndex); + source->setEOSReceived(); + + sp<ABuffer> buffer = source->lastAccessUnit(); + source->setLastAccessUnit(NULL); + + if (buffer != NULL) { + writeTS(); + writeAccessUnit(sourceIndex, buffer); + } + ++mNumSourcesDone; } else if (what == SourceInfo::kNotifyBuffer) { sp<RefBase> obj; CHECK(msg->findObject("buffer", &obj)); + sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); + + int32_t oob; + if (msg->findInt32("oob", &oob) && oob) { + // This is codec specific data delivered out of band. + // It can be written out immediately. + writeTS(); + writeAccessUnit(sourceIndex, buffer); + break; + } + + // We don't just write out data as we receive it from + // the various sources. That would essentially write them + // out in random order (as the thread scheduler determines + // how the messages are dispatched). + // Instead we gather an access unit for all tracks and + // write out the one with the smallest timestamp, then + // request more data for the written out track. + // Rinse, repeat. + // If we don't have data on any track we don't write + // anything just yet. + + sp<SourceInfo> source = mSources.editItemAt(sourceIndex); + + CHECK(source->lastAccessUnit() == NULL); + source->setLastAccessUnit(buffer); + + LOGV("lastAccessUnitTimeUs[%d] = %.2f secs", + sourceIndex, source->lastAccessUnitTimeUs() / 1E6); + + int64_t minTimeUs = -1; + size_t minIndex = 0; + + for (size_t i = 0; i < mSources.size(); ++i) { + const sp<SourceInfo> &source = mSources.editItemAt(i); + + if (source->eosReceived()) { + continue; + } + + int64_t timeUs = source->lastAccessUnitTimeUs(); + if (timeUs < 0) { + minTimeUs = -1; + break; + } else if (minTimeUs < 0 || timeUs < minTimeUs) { + minTimeUs = timeUs; + minIndex = i; + } + } + + if (minTimeUs < 0) { + LOGV("not a all tracks have valid data."); + break; + } + + LOGV("writing access unit at time %.2f secs (index %d)", + minTimeUs / 1E6, minIndex); + + source = mSources.editItemAt(minIndex); + + buffer = source->lastAccessUnit(); + source->setLastAccessUnit(NULL); + writeTS(); + writeAccessUnit(minIndex, buffer); - sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); - writeAccessUnit(sourceIndex, buffer); + source->readMore(); } break; } diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp index 5497322..e6e98aa 100644 --- a/media/libstagefright/MPEG4Extractor.cpp +++ b/media/libstagefright/MPEG4Extractor.cpp @@ -98,11 +98,11 @@ struct MPEG4DataSource : public DataSource { MPEG4DataSource(const sp<DataSource> &source); virtual status_t initCheck() const; - virtual ssize_t readAt(off_t offset, void *data, size_t size); - virtual status_t getSize(off_t *size); + virtual ssize_t readAt(off64_t offset, void *data, size_t size); + virtual status_t getSize(off64_t *size); virtual uint32_t flags(); - status_t setCachedRange(off_t offset, size_t size); + status_t setCachedRange(off64_t offset, size_t size); protected: virtual ~MPEG4DataSource(); @@ -111,7 +111,7 @@ private: Mutex mLock; sp<DataSource> mSource; - off_t mCachedOffset; + off64_t mCachedOffset; size_t mCachedSize; uint8_t *mCache; @@ -146,7 +146,7 @@ status_t MPEG4DataSource::initCheck() const { return mSource->initCheck(); } -ssize_t MPEG4DataSource::readAt(off_t offset, void *data, size_t size) { +ssize_t MPEG4DataSource::readAt(off64_t offset, void *data, size_t size) { Mutex::Autolock autoLock(mLock); if (offset >= mCachedOffset @@ -158,7 +158,7 @@ ssize_t MPEG4DataSource::readAt(off_t offset, void *data, size_t size) { return mSource->readAt(offset, data, size); } -status_t MPEG4DataSource::getSize(off_t *size) { +status_t MPEG4DataSource::getSize(off64_t *size) { return mSource->getSize(size); } @@ -166,7 +166,7 @@ uint32_t MPEG4DataSource::flags() { return mSource->flags(); } -status_t MPEG4DataSource::setCachedRange(off_t offset, size_t size) { +status_t MPEG4DataSource::setCachedRange(off64_t offset, size_t size) { Mutex::Autolock autoLock(mLock); clearCache(); @@ -247,6 +247,8 @@ static const char *FourCC2MIME(uint32_t fourcc) { return MEDIA_MIMETYPE_VIDEO_MPEG4; case FOURCC('s', '2', '6', '3'): + case FOURCC('h', '2', '6', '3'): + case FOURCC('H', '2', '6', '3'): return MEDIA_MIMETYPE_VIDEO_H263; case FOURCC('a', 'v', 'c', '1'): @@ -363,7 +365,7 @@ status_t MPEG4Extractor::readMetaData() { return OK; } - off_t offset = 0; + off64_t offset = 0; status_t err; while ((err = parseChunk(&offset, 0)) == OK) { } @@ -404,7 +406,7 @@ char* MPEG4Extractor::getDrmTrackInfo(size_t trackID, int *len) { } // Reads an encoded integer 7 bits at a time until it encounters the high bit clear. -int32_t readSize(off_t offset, +int32_t readSize(off64_t offset, const sp<DataSource> DataSource, uint8_t *numOfBytes) { uint32_t size = 0; uint8_t data; @@ -424,7 +426,7 @@ int32_t readSize(off_t offset, return size; } -status_t MPEG4Extractor::parseDrmSINF(off_t *offset, off_t data_offset) { +status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) { uint8_t updateIdTag; if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) { return ERROR_IO; @@ -596,14 +598,14 @@ static void convertTimeToDate(int64_t time_1904, String8 *s) { s->setTo(tmp); } -status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { +status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) { uint32_t hdr[2]; if (mDataSource->readAt(*offset, hdr, 8) < 8) { return ERROR_IO; } uint64_t chunk_size = ntohl(hdr[0]); uint32_t chunk_type = ntohl(hdr[1]); - off_t data_offset = *offset + 8; + off64_t data_offset = *offset + 8; if (chunk_size == 1) { if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) { @@ -644,11 +646,11 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { PathAdder autoAdder(&mPath, chunk_type); - off_t chunk_data_size = *offset + chunk_size - data_offset; + off64_t chunk_data_size = *offset + chunk_size - data_offset; if (chunk_type != FOURCC('c', 'p', 'r', 't') && mPath.size() == 5 && underMetaDataPath(mPath)) { - off_t stop_offset = *offset + chunk_size; + off64_t stop_offset = *offset + chunk_size; *offset = data_offset; while (*offset < stop_offset) { status_t err = parseChunk(offset, depth + 1); @@ -715,7 +717,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { track->meta->setCString(kKeyMIMEType, "application/octet-stream"); } - off_t stop_offset = *offset + chunk_size; + off64_t stop_offset = *offset + chunk_size; *offset = data_offset; while (*offset < stop_offset) { status_t err = parseChunk(offset, depth + 1); @@ -788,7 +790,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { return ERROR_IO; } - off_t timescale_offset; + off64_t timescale_offset; if (version == 1) { timescale_offset = data_offset + 4 + 16; @@ -838,7 +840,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { } uint8_t buffer[8]; - if (chunk_data_size < (off_t)sizeof(buffer)) { + if (chunk_data_size < (off64_t)sizeof(buffer)) { return ERROR_MALFORMED; } @@ -862,7 +864,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { break; } - off_t stop_offset = *offset + chunk_size; + off64_t stop_offset = *offset + chunk_size; *offset = data_offset + 8; for (uint32_t i = 0; i < entry_count; ++i) { status_t err = parseChunk(offset, depth + 1); @@ -919,7 +921,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { mLastTrack->meta->setInt32(kKeyChannelCount, num_channels); mLastTrack->meta->setInt32(kKeySampleRate, sample_rate); - off_t stop_offset = *offset + chunk_size; + off64_t stop_offset = *offset + chunk_size; *offset = data_offset + sizeof(buffer); while (*offset < stop_offset) { status_t err = parseChunk(offset, depth + 1); @@ -936,6 +938,8 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { case FOURCC('m', 'p', '4', 'v'): case FOURCC('s', '2', '6', '3'): + case FOURCC('H', '2', '6', '3'): + case FOURCC('h', '2', '6', '3'): case FOURCC('a', 'v', 'c', '1'): { mHasVideo = true; @@ -955,6 +959,13 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { uint16_t width = U16_AT(&buffer[6 + 18]); uint16_t height = U16_AT(&buffer[6 + 20]); + // The video sample is not stand-compliant if it has invalid dimension. + // Use some default width and height value, and + // let the decoder figure out the actual width and height (and thus + // be prepared for INFO_FOMRAT_CHANGED event). + if (width == 0) width = 352; + if (height == 0) height = 288; + // printf("*** coding='%s' width=%d height=%d\n", // chunk, width, height); @@ -962,7 +973,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { mLastTrack->meta->setInt32(kKeyWidth, width); mLastTrack->meta->setInt32(kKeyHeight, height); - off_t stop_offset = *offset + chunk_size; + off64_t stop_offset = *offset + chunk_size; *offset = data_offset + sizeof(buffer); while (*offset < stop_offset) { status_t err = parseChunk(offset, depth + 1); @@ -1029,8 +1040,23 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion, // and thus will grow by 2 bytes per fragment. mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2); - *offset += chunk_size; + + // Calculate average frame rate. + const char *mime; + CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime)); + if (!strncasecmp("video/", mime, 6)) { + size_t nSamples = mLastTrack->sampleTable->countSamples(); + int64_t durationUs; + if (mLastTrack->meta->findInt64(kKeyDuration, &durationUs)) { + if (durationUs > 0) { + int32_t frameRate = (nSamples * 1000000LL + + (durationUs >> 1)) / durationUs; + mLastTrack->meta->setInt32(kKeyFrameRate, frameRate); + } + } + } + break; } @@ -1069,7 +1095,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { } uint8_t buffer[256]; - if (chunk_data_size > (off_t)sizeof(buffer)) { + if (chunk_data_size > (off64_t)sizeof(buffer)) { return ERROR_BUFFER_TOO_SMALL; } @@ -1108,7 +1134,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { case FOURCC('a', 'v', 'c', 'C'): { char buffer[256]; - if (chunk_data_size > (off_t)sizeof(buffer)) { + if (chunk_data_size > (off64_t)sizeof(buffer)) { return ERROR_BUFFER_TOO_SMALL; } @@ -1127,7 +1153,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { case FOURCC('m', 'e', 't', 'a'): { uint8_t buffer[4]; - if (chunk_data_size < (off_t)sizeof(buffer)) { + if (chunk_data_size < (off64_t)sizeof(buffer)) { return ERROR_MALFORMED; } @@ -1147,7 +1173,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { return OK; } - off_t stop_offset = *offset + chunk_size; + off64_t stop_offset = *offset + chunk_size; *offset = data_offset + sizeof(buffer); while (*offset < stop_offset) { status_t err = parseChunk(offset, depth + 1); @@ -1232,7 +1258,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) { } status_t MPEG4Extractor::parseTrackHeader( - off_t data_offset, off_t data_size) { + off64_t data_offset, off64_t data_size) { if (data_size < 4) { return ERROR_MALFORMED; } @@ -1246,7 +1272,7 @@ status_t MPEG4Extractor::parseTrackHeader( uint8_t buffer[36 + 60]; - if (data_size != (off_t)dynSize + 60) { + if (data_size != (off64_t)dynSize + 60) { return ERROR_MALFORMED; } @@ -1263,7 +1289,9 @@ status_t MPEG4Extractor::parseTrackHeader( mtime = U64_AT(&buffer[12]); id = U32_AT(&buffer[20]); duration = U64_AT(&buffer[28]); - } else if (version == 0) { + } else { + CHECK_EQ((unsigned)version, 0u); + ctime = U32_AT(&buffer[4]); mtime = U32_AT(&buffer[8]); id = U32_AT(&buffer[12]); @@ -1308,15 +1336,17 @@ status_t MPEG4Extractor::parseTrackHeader( mLastTrack->meta->setInt32(kKeyRotation, rotationDegrees); } -#if 0 + // Handle presentation display size, which could be different + // from the image size indicated by kKeyWidth and kKeyHeight. uint32_t width = U32_AT(&buffer[dynSize + 52]); uint32_t height = U32_AT(&buffer[dynSize + 56]); -#endif + mLastTrack->meta->setInt32(kKeyDisplayWidth, width >> 16); + mLastTrack->meta->setInt32(kKeyDisplayHeight, height >> 16); return OK; } -status_t MPEG4Extractor::parseMetaData(off_t offset, size_t size) { +status_t MPEG4Extractor::parseMetaData(off64_t offset, size_t size) { if (size < 4) { return ERROR_MALFORMED; } @@ -1816,7 +1846,7 @@ status_t MPEG4Source::read( // fall through } - off_t offset; + off64_t offset; size_t size; uint32_t dts; bool isSyncSample; diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp index 7eb7d46..d1a497f 100644 --- a/media/libstagefright/MPEG4Writer.cpp +++ b/media/libstagefright/MPEG4Writer.cpp @@ -33,6 +33,10 @@ #include <media/stagefright/MediaSource.h> #include <media/stagefright/Utils.h> #include <media/mediarecorder.h> +#include <sys/types.h> +#include <sys/stat.h> +#include <fcntl.h> +#include <unistd.h> #include "include/ESDS.h" @@ -64,7 +68,7 @@ public: bool isAvc() const { return mIsAvc; } bool isAudio() const { return mIsAudio; } bool isMPEG4() const { return mIsMPEG4; } - void addChunkOffset(off_t offset); + void addChunkOffset(off64_t offset); status_t dump(int fd, const Vector<String16>& args) const; private: @@ -74,6 +78,7 @@ private: volatile bool mDone; volatile bool mPaused; volatile bool mResumed; + volatile bool mStarted; bool mIsAvc; bool mIsAudio; bool mIsMPEG4; @@ -99,7 +104,7 @@ private: List<MediaBuffer *> mChunkSamples; size_t mNumStcoTableEntries; - List<off_t> mChunkOffsets; + List<off64_t> mChunkOffsets; size_t mNumStscTableEntries; struct StscTableEntry { @@ -214,7 +219,8 @@ private: }; MPEG4Writer::MPEG4Writer(const char *filename) - : mFile(fopen(filename, "wb")), + : mFd(-1), + mInitCheck(NO_INIT), mUse4ByteNalLength(true), mUse32BitOffset(true), mIsFileSizeLimitExplicitlyRequested(false), @@ -224,11 +230,16 @@ MPEG4Writer::MPEG4Writer(const char *filename) mMdatOffset(0), mEstimatedMoovBoxSize(0), mInterleaveDurationUs(1000000) { - CHECK(mFile != NULL); + + mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC); + if (mFd >= 0) { + mInitCheck = OK; + } } MPEG4Writer::MPEG4Writer(int fd) - : mFile(fdopen(fd, "wb")), + : mFd(dup(fd)), + mInitCheck(mFd < 0? NO_INIT: OK), mUse4ByteNalLength(true), mUse32BitOffset(true), mIsFileSizeLimitExplicitlyRequested(false), @@ -238,7 +249,6 @@ MPEG4Writer::MPEG4Writer(int fd) mMdatOffset(0), mEstimatedMoovBoxSize(0), mInterleaveDurationUs(1000000) { - CHECK(mFile != NULL); } MPEG4Writer::~MPEG4Writer() { @@ -368,7 +378,7 @@ int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) { } status_t MPEG4Writer::start(MetaData *param) { - if (mFile == NULL) { + if (mInitCheck != OK) { return UNKNOWN_ERROR; } @@ -459,13 +469,13 @@ status_t MPEG4Writer::start(MetaData *param) { mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate); } CHECK(mEstimatedMoovBoxSize >= 8); - fseeko(mFile, mFreeBoxOffset, SEEK_SET); + lseek64(mFd, mFreeBoxOffset, SEEK_SET); writeInt32(mEstimatedMoovBoxSize); write("free", 4); mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize; mOffset = mMdatOffset; - fseeko(mFile, mMdatOffset, SEEK_SET); + lseek64(mFd, mMdatOffset, SEEK_SET); if (mUse32BitOffset) { write("????mdat", 8); } else { @@ -491,7 +501,7 @@ bool MPEG4Writer::use32BitFileOffset() const { } status_t MPEG4Writer::pause() { - if (mFile == NULL) { + if (mInitCheck != OK) { return OK; } mPaused = true; @@ -507,7 +517,7 @@ status_t MPEG4Writer::pause() { } void MPEG4Writer::stopWriterThread() { - LOGV("stopWriterThread"); + LOGD("Stopping writer thread"); { Mutex::Autolock autolock(mLock); @@ -518,6 +528,7 @@ void MPEG4Writer::stopWriterThread() { void *dummy; pthread_join(mThread, &dummy); + LOGD("Writer thread stopped"); } /* @@ -572,13 +583,15 @@ void MPEG4Writer::writeCompositionMatrix(int degrees) { writeInt32(0x40000000); // w } + status_t MPEG4Writer::stop() { - if (mFile == NULL) { + if (mInitCheck != OK) { return OK; } status_t err = OK; int64_t maxDurationUs = 0; + int64_t minDurationUs = 0x7fffffffffffffffLL; for (List<Track *>::iterator it = mTracks.begin(); it != mTracks.end(); ++it) { status_t status = (*it)->stop(); @@ -590,34 +603,42 @@ status_t MPEG4Writer::stop() { if (durationUs > maxDurationUs) { maxDurationUs = durationUs; } + if (durationUs < minDurationUs) { + minDurationUs = durationUs; + } + } + + if (mTracks.size() > 1) { + LOGD("Duration from tracks range is [%lld, %lld] us", + minDurationUs, maxDurationUs); } stopWriterThread(); // Do not write out movie header on error. if (err != OK) { - fflush(mFile); - fclose(mFile); - mFile = NULL; + close(mFd); + mFd = -1; + mInitCheck = NO_INIT; mStarted = false; return err; } // Fix up the size of the 'mdat' chunk. if (mUse32BitOffset) { - fseeko(mFile, mMdatOffset, SEEK_SET); + lseek64(mFd, mMdatOffset, SEEK_SET); int32_t size = htonl(static_cast<int32_t>(mOffset - mMdatOffset)); - fwrite(&size, 1, 4, mFile); + ::write(mFd, &size, 4); } else { - fseeko(mFile, mMdatOffset + 8, SEEK_SET); + lseek64(mFd, mMdatOffset + 8, SEEK_SET); int64_t size = mOffset - mMdatOffset; size = hton64(size); - fwrite(&size, 1, 8, mFile); + ::write(mFd, &size, 8); } - fseeko(mFile, mOffset, SEEK_SET); + lseek64(mFd, mOffset, SEEK_SET); time_t now = time(NULL); - const off_t moovOffset = mOffset; + const off64_t moovOffset = mOffset; mWriteMoovBoxToMemory = true; mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize); mMoovBoxBufferOffset = 0; @@ -637,7 +658,7 @@ status_t MPEG4Writer::stop() { writeInt16(0); // reserved writeInt32(0); // reserved writeInt32(0); // reserved - writeCompositionMatrix(0); + writeCompositionMatrix(0); // matrix writeInt32(0); // predefined writeInt32(0); // predefined writeInt32(0); // predefined @@ -659,12 +680,12 @@ status_t MPEG4Writer::stop() { CHECK(mMoovBoxBufferOffset + 8 <= mEstimatedMoovBoxSize); // Moov box - fseeko(mFile, mFreeBoxOffset, SEEK_SET); + lseek64(mFd, mFreeBoxOffset, SEEK_SET); mOffset = mFreeBoxOffset; - write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, mFile); + write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset); // Free box - fseeko(mFile, mOffset, SEEK_SET); + lseek64(mFd, mOffset, SEEK_SET); writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset); write("free", 4); @@ -678,9 +699,9 @@ status_t MPEG4Writer::stop() { CHECK(mBoxes.empty()); - fflush(mFile); - fclose(mFile); - mFile = NULL; + close(mFd); + mFd = -1; + mInitCheck = NO_INIT; mStarted = false; return err; } @@ -698,11 +719,12 @@ void MPEG4Writer::unlock() { mLock.unlock(); } -off_t MPEG4Writer::addSample_l(MediaBuffer *buffer) { - off_t old_offset = mOffset; +off64_t MPEG4Writer::addSample_l(MediaBuffer *buffer) { + off64_t old_offset = mOffset; - fwrite((const uint8_t *)buffer->data() + buffer->range_offset(), - 1, buffer->range_length(), mFile); + ::write(mFd, + (const uint8_t *)buffer->data() + buffer->range_offset(), + buffer->range_length()); mOffset += buffer->range_length(); @@ -723,33 +745,34 @@ static void StripStartcode(MediaBuffer *buffer) { } } -off_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) { - off_t old_offset = mOffset; +off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) { + off64_t old_offset = mOffset; size_t length = buffer->range_length(); if (mUse4ByteNalLength) { uint8_t x = length >> 24; - fwrite(&x, 1, 1, mFile); + ::write(mFd, &x, 1); x = (length >> 16) & 0xff; - fwrite(&x, 1, 1, mFile); + ::write(mFd, &x, 1); x = (length >> 8) & 0xff; - fwrite(&x, 1, 1, mFile); + ::write(mFd, &x, 1); x = length & 0xff; - fwrite(&x, 1, 1, mFile); + ::write(mFd, &x, 1); + + ::write(mFd, + (const uint8_t *)buffer->data() + buffer->range_offset(), + length); - fwrite((const uint8_t *)buffer->data() + buffer->range_offset(), - 1, length, mFile); mOffset += length + 4; } else { CHECK(length < 65536); uint8_t x = length >> 8; - fwrite(&x, 1, 1, mFile); + ::write(mFd, &x, 1); x = length & 0xff; - fwrite(&x, 1, 1, mFile); - fwrite((const uint8_t *)buffer->data() + buffer->range_offset(), - 1, length, mFile); + ::write(mFd, &x, 1); + ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length); mOffset += length + 2; } @@ -757,19 +780,21 @@ off_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) { } size_t MPEG4Writer::write( - const void *ptr, size_t size, size_t nmemb, FILE *stream) { + const void *ptr, size_t size, size_t nmemb) { const size_t bytes = size * nmemb; if (mWriteMoovBoxToMemory) { - off_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes; + // This happens only when we write the moov box at the end of + // recording, not for each output video/audio frame we receive. + off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes; if (moovBoxSize > mEstimatedMoovBoxSize) { - for (List<off_t>::iterator it = mBoxes.begin(); + for (List<off64_t>::iterator it = mBoxes.begin(); it != mBoxes.end(); ++it) { (*it) += mOffset; } - fseeko(mFile, mOffset, SEEK_SET); - fwrite(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, stream); - fwrite(ptr, size, nmemb, stream); + lseek64(mFd, mOffset, SEEK_SET); + ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset); + ::write(mFd, ptr, size * nmemb); mOffset += (bytes + mMoovBoxBufferOffset); free(mMoovBoxBuffer); mMoovBoxBuffer = NULL; @@ -781,7 +806,7 @@ size_t MPEG4Writer::write( mMoovBoxBufferOffset += bytes; } } else { - fwrite(ptr, size, nmemb, stream); + ::write(mFd, ptr, size * nmemb); mOffset += bytes; } return bytes; @@ -800,51 +825,51 @@ void MPEG4Writer::beginBox(const char *fourcc) { void MPEG4Writer::endBox() { CHECK(!mBoxes.empty()); - off_t offset = *--mBoxes.end(); + off64_t offset = *--mBoxes.end(); mBoxes.erase(--mBoxes.end()); if (mWriteMoovBoxToMemory) { int32_t x = htonl(mMoovBoxBufferOffset - offset); memcpy(mMoovBoxBuffer + offset, &x, 4); } else { - fseeko(mFile, offset, SEEK_SET); + lseek64(mFd, offset, SEEK_SET); writeInt32(mOffset - offset); mOffset -= 4; - fseeko(mFile, mOffset, SEEK_SET); + lseek64(mFd, mOffset, SEEK_SET); } } void MPEG4Writer::writeInt8(int8_t x) { - write(&x, 1, 1, mFile); + write(&x, 1, 1); } void MPEG4Writer::writeInt16(int16_t x) { x = htons(x); - write(&x, 1, 2, mFile); + write(&x, 1, 2); } void MPEG4Writer::writeInt32(int32_t x) { x = htonl(x); - write(&x, 1, 4, mFile); + write(&x, 1, 4); } void MPEG4Writer::writeInt64(int64_t x) { x = hton64(x); - write(&x, 1, 8, mFile); + write(&x, 1, 8); } void MPEG4Writer::writeCString(const char *s) { size_t n = strlen(s); - write(s, 1, n + 1, mFile); + write(s, 1, n + 1); } void MPEG4Writer::writeFourcc(const char *s) { CHECK_EQ(strlen(s), 4); - write(s, 1, 4, mFile); + write(s, 1, 4); } void MPEG4Writer::write(const void *data, size_t size) { - write(data, 1, size, mFile); + write(data, 1, size); } bool MPEG4Writer::isFileStreamable() const { @@ -927,6 +952,7 @@ MPEG4Writer::Track::Track( mDone(false), mPaused(false), mResumed(false), + mStarted(false), mTrackDurationUs(0), mEstimatedTrackSizeBytes(0), mSamplesHaveSameSize(true), @@ -988,7 +1014,7 @@ void MPEG4Writer::Track::addOneSttsTableEntry( ++mNumSttsTableEntries; } -void MPEG4Writer::Track::addChunkOffset(off_t offset) { +void MPEG4Writer::Track::addChunkOffset(off64_t offset) { ++mNumStcoTableEntries; mChunkOffsets.push_back(offset); } @@ -1103,7 +1129,7 @@ void MPEG4Writer::writeFirstChunk(ChunkInfo* info) { for (List<MediaBuffer *>::iterator it = chunkIt->mSamples.begin(); it != chunkIt->mSamples.end(); ++it) { - off_t offset = info->mTrack->isAvc() + off64_t offset = info->mTrack->isAvc() ? addLengthPrefixedSample_l(*it) : addSample_l(*it); if (it == chunkIt->mSamples.begin()) { @@ -1228,7 +1254,7 @@ status_t MPEG4Writer::Track::start(MetaData *params) { } int32_t rotationDegrees; - if (!mIsAudio && params && params->findInt32(kKeyRotationDegree, &rotationDegrees)) { + if (!mIsAudio && params && params->findInt32(kKeyRotation, &rotationDegrees)) { mRotation = rotationDegrees; } @@ -1255,6 +1281,7 @@ status_t MPEG4Writer::Track::start(MetaData *params) { pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); mDone = false; + mStarted = true; mTrackDurationUs = 0; mReachedEOS = false; mEstimatedTrackSizeBytes = 0; @@ -1282,10 +1309,15 @@ status_t MPEG4Writer::Track::pause() { } status_t MPEG4Writer::Track::stop() { + LOGD("Stopping %s track", mIsAudio? "Audio": "Video"); + if (!mStarted) { + LOGE("Stop() called but track is not started"); + return ERROR_END_OF_STREAM; + } + if (mDone) { return OK; } - mDone = true; void *dummy; @@ -1293,6 +1325,7 @@ status_t MPEG4Writer::Track::stop() { status_t err = (status_t) dummy; + LOGD("Stopping %s track source", mIsAudio? "Audio": "Video"); { status_t status = mSource->stop(); if (err == OK && status != OK && status != ERROR_END_OF_STREAM) { @@ -1300,6 +1333,7 @@ status_t MPEG4Writer::Track::stop() { } } + LOGD("%s track stopped", mIsAudio? "Audio": "Video"); return err; } @@ -1716,6 +1750,8 @@ status_t MPEG4Writer::Track::threadEntry() { } else { prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0); } + setpriority(PRIO_PROCESS, 0, ANDROID_PRIORITY_AUDIO); + sp<MetaData> meta_data; mNumSamples = 0; @@ -1924,7 +1960,7 @@ status_t MPEG4Writer::Track::threadEntry() { trackProgressStatus(timestampUs); } if (mOwner->numTracks() == 1) { - off_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy) + off64_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy) : mOwner->addSample_l(copy); if (mChunkOffsets.empty()) { addChunkOffset(offset); @@ -2125,7 +2161,7 @@ void MPEG4Writer::Track::writeTrackHeader( mOwner->writeInt16(mIsAudio ? 0x100 : 0); // volume mOwner->writeInt16(0); // reserved - mOwner->writeCompositionMatrix(mRotation); + mOwner->writeCompositionMatrix(mRotation); // matrix if (mIsAudio) { mOwner->writeInt32(0); @@ -2263,6 +2299,9 @@ void MPEG4Writer::Track::writeTrackHeader( CHECK(mCodecSpecificData); CHECK(mCodecSpecificDataSize > 0); + // Make sure all sizes encode to a single byte. + CHECK(mCodecSpecificDataSize + 23 < 128); + mOwner->writeInt32(0); // version=0, flags=0 mOwner->writeInt8(0x03); // ES_DescrTag mOwner->writeInt8(23 + mCodecSpecificDataSize); @@ -2472,7 +2511,7 @@ void MPEG4Writer::Track::writeTrackHeader( mOwner->beginBox(use32BitOffset? "stco": "co64"); mOwner->writeInt32(0); // version=0, flags=0 mOwner->writeInt32(mNumStcoTableEntries); - for (List<off_t>::iterator it = mChunkOffsets.begin(); + for (List<off64_t>::iterator it = mChunkOffsets.begin(); it != mChunkOffsets.end(); ++it) { if (use32BitOffset) { mOwner->writeInt32(static_cast<int32_t>(*it)); diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp index b973745..a8fadf2 100644 --- a/media/libstagefright/MediaBuffer.cpp +++ b/media/libstagefright/MediaBuffer.cpp @@ -25,15 +25,10 @@ #include <media/stagefright/MediaDebug.h> #include <media/stagefright/MetaData.h> -namespace android { - -// XXX make this truly atomic. -static int atomic_add(int *value, int delta) { - int prev_value = *value; - *value += delta; +#include <ui/GraphicBuffer.h> +#include <sys/atomics.h> - return prev_value; -} +namespace android { MediaBuffer::MediaBuffer(void *data, size_t size) : mObserver(NULL), @@ -61,6 +56,20 @@ MediaBuffer::MediaBuffer(size_t size) mOriginal(NULL) { } +MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer) + : mObserver(NULL), + mNextBuffer(NULL), + mRefCount(0), + mData(NULL), + mSize(1), + mRangeOffset(0), + mRangeLength(mSize), + mGraphicBuffer(graphicBuffer), + mOwnsData(false), + mMetaData(new MetaData), + mOriginal(NULL) { +} + void MediaBuffer::release() { if (mObserver == NULL) { CHECK_EQ(mRefCount, 0); @@ -68,7 +77,7 @@ void MediaBuffer::release() { return; } - int prevCount = atomic_add(&mRefCount, -1); + int prevCount = __atomic_dec(&mRefCount); if (prevCount == 1) { if (mObserver == NULL) { delete this; @@ -88,14 +97,16 @@ void MediaBuffer::claim() { } void MediaBuffer::add_ref() { - atomic_add(&mRefCount, 1); + (void) __atomic_inc(&mRefCount); } void *MediaBuffer::data() const { + CHECK(mGraphicBuffer == NULL); return mData; } size_t MediaBuffer::size() const { + CHECK(mGraphicBuffer == NULL); return mSize; } @@ -108,15 +119,19 @@ size_t MediaBuffer::range_length() const { } void MediaBuffer::set_range(size_t offset, size_t length) { - if (offset + length > mSize) { + if ((mGraphicBuffer == NULL) && (offset + length > mSize)) { LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize); } - CHECK(offset + length <= mSize); + CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize)); mRangeOffset = offset; mRangeLength = length; } +sp<GraphicBuffer> MediaBuffer::graphicBuffer() const { + return mGraphicBuffer; +} + sp<MetaData> MediaBuffer::meta_data() { return mMetaData; } @@ -158,6 +173,8 @@ int MediaBuffer::refcount() const { } MediaBuffer *MediaBuffer::clone() { + CHECK_EQ(mGraphicBuffer, NULL); + MediaBuffer *buffer = new MediaBuffer(mData, mSize); buffer->set_range(mRangeOffset, mRangeLength); buffer->mMetaData = new MetaData(*mMetaData.get()); @@ -169,4 +186,3 @@ MediaBuffer *MediaBuffer::clone() { } } // namespace android - diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp index 7648d42..4599fca 100644 --- a/media/libstagefright/MediaDefs.cpp +++ b/media/libstagefright/MediaDefs.cpp @@ -42,4 +42,6 @@ const char *MEDIA_MIMETYPE_CONTAINER_OGG = "application/ogg"; const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA = "video/x-matroska"; const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts"; +const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm"; + } // namespace android diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp index 965c370..d12ac64 100644 --- a/media/libstagefright/MediaExtractor.cpp +++ b/media/libstagefright/MediaExtractor.cpp @@ -25,6 +25,7 @@ #include "include/OggExtractor.h" #include "include/MPEG2TSExtractor.h" #include "include/DRMExtractor.h" +#include "include/WVMExtractor.h" #include "matroska/MatroskaExtractor.h" @@ -92,6 +93,8 @@ sp<MediaExtractor> MediaExtractor::Create( return new MatroskaExtractor(source); } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) { return new MPEG2TSExtractor(source); + } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WVM)) { + return new WVMExtractor(source); } return NULL; diff --git a/media/libstagefright/MediaSource.cpp b/media/libstagefright/MediaSource.cpp index b4ef338..fd0e79c 100644 --- a/media/libstagefright/MediaSource.cpp +++ b/media/libstagefright/MediaSource.cpp @@ -32,7 +32,6 @@ void MediaSource::ReadOptions::reset() { mOptions = 0; mSeekTimeUs = 0; mLatenessUs = 0; - mSkipFrameUntilTimeUs = 0; } void MediaSource::ReadOptions::setSeekTo(int64_t time_us, SeekMode mode) { @@ -54,21 +53,6 @@ bool MediaSource::ReadOptions::getSeekTo( return (mOptions & kSeekTo_Option) != 0; } -void MediaSource::ReadOptions::clearSkipFrame() { - mOptions &= ~kSkipFrame_Option; - mSkipFrameUntilTimeUs = 0; -} - -void MediaSource::ReadOptions::setSkipFrame(int64_t timeUs) { - mOptions |= kSkipFrame_Option; - mSkipFrameUntilTimeUs = timeUs; -} - -bool MediaSource::ReadOptions::getSkipFrame(int64_t *timeUs) const { - *timeUs = mSkipFrameUntilTimeUs; - return (mOptions & kSkipFrame_Option) != 0; -} - void MediaSource::ReadOptions::setLateBy(int64_t lateness_us) { mLatenessUs = lateness_us; } diff --git a/media/libstagefright/MediaSourceSplitter.cpp b/media/libstagefright/MediaSourceSplitter.cpp new file mode 100644 index 0000000..abc7012 --- /dev/null +++ b/media/libstagefright/MediaSourceSplitter.cpp @@ -0,0 +1,234 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "MediaSourceSplitter" +#include <utils/Log.h> + +#include <media/stagefright/MediaSourceSplitter.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MetaData.h> + +namespace android { + +MediaSourceSplitter::MediaSourceSplitter(sp<MediaSource> mediaSource) { + mNumberOfClients = 0; + mSource = mediaSource; + mSourceStarted = false; + + mNumberOfClientsStarted = 0; + mNumberOfCurrentReads = 0; + mCurrentReadBit = 0; + mLastReadCompleted = true; +} + +MediaSourceSplitter::~MediaSourceSplitter() { +} + +sp<MediaSource> MediaSourceSplitter::createClient() { + Mutex::Autolock autoLock(mLock); + + sp<MediaSource> client = new Client(this, mNumberOfClients++); + mClientsStarted.push(false); + mClientsDesiredReadBit.push(0); + return client; +} + +status_t MediaSourceSplitter::start(int clientId, MetaData *params) { + Mutex::Autolock autoLock(mLock); + + LOGV("start client (%d)", clientId); + if (mClientsStarted[clientId]) { + return OK; + } + + mNumberOfClientsStarted++; + + if (!mSourceStarted) { + LOGV("Starting real source from client (%d)", clientId); + status_t err = mSource->start(params); + + if (err == OK) { + mSourceStarted = true; + mClientsStarted.editItemAt(clientId) = true; + mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit; + } + + return err; + } else { + mClientsStarted.editItemAt(clientId) = true; + if (mLastReadCompleted) { + // Last read was completed. So join in the threads for the next read. + mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit; + } else { + // Last read is ongoing. So join in the threads for the current read. + mClientsDesiredReadBit.editItemAt(clientId) = mCurrentReadBit; + } + return OK; + } +} + +status_t MediaSourceSplitter::stop(int clientId) { + Mutex::Autolock autoLock(mLock); + + LOGV("stop client (%d)", clientId); + CHECK(clientId >= 0 && clientId < mNumberOfClients); + CHECK(mClientsStarted[clientId]); + + if (--mNumberOfClientsStarted == 0) { + LOGV("Stopping real source from client (%d)", clientId); + status_t err = mSource->stop(); + mSourceStarted = false; + mClientsStarted.editItemAt(clientId) = false; + return err; + } else { + mClientsStarted.editItemAt(clientId) = false; + if (!mLastReadCompleted && (mClientsDesiredReadBit[clientId] == mCurrentReadBit)) { + // !mLastReadCompleted implies that buffer has been read from source, but all + // clients haven't read it. + // mClientsDesiredReadBit[clientId] == mCurrentReadBit implies that this + // client would have wanted to read from this buffer. (i.e. it has not yet + // called read() for the current read buffer.) + // Since other threads may be waiting for all the clients' reads to complete, + // signal that this read has been aborted. + signalReadComplete_lock(true); + } + return OK; + } +} + +sp<MetaData> MediaSourceSplitter::getFormat(int clientId) { + Mutex::Autolock autoLock(mLock); + + LOGV("getFormat client (%d)", clientId); + return mSource->getFormat(); +} + +status_t MediaSourceSplitter::read(int clientId, + MediaBuffer **buffer, const MediaSource::ReadOptions *options) { + Mutex::Autolock autoLock(mLock); + + CHECK(clientId >= 0 && clientId < mNumberOfClients); + + LOGV("read client (%d)", clientId); + *buffer = NULL; + + if (!mClientsStarted[clientId]) { + return OK; + } + + if (mCurrentReadBit != mClientsDesiredReadBit[clientId]) { + // Desired buffer has not been read from source yet. + + // If the current client is the special client with clientId = 0 + // then read from source, else wait until the client 0 has finished + // reading from source. + if (clientId == 0) { + // Wait for all client's last read to complete first so as to not + // corrupt the buffer at mLastReadMediaBuffer. + waitForAllClientsLastRead_lock(clientId); + + readFromSource_lock(options); + *buffer = mLastReadMediaBuffer; + } else { + waitForReadFromSource_lock(clientId); + + *buffer = mLastReadMediaBuffer; + (*buffer)->add_ref(); + } + CHECK(mCurrentReadBit == mClientsDesiredReadBit[clientId]); + } else { + // Desired buffer has already been read from source. Use the cached data. + CHECK(clientId != 0); + + *buffer = mLastReadMediaBuffer; + (*buffer)->add_ref(); + } + + mClientsDesiredReadBit.editItemAt(clientId) = !mClientsDesiredReadBit[clientId]; + signalReadComplete_lock(false); + + return mLastReadStatus; +} + +void MediaSourceSplitter::readFromSource_lock(const MediaSource::ReadOptions *options) { + mLastReadStatus = mSource->read(&mLastReadMediaBuffer , options); + + mCurrentReadBit = !mCurrentReadBit; + mLastReadCompleted = false; + mReadFromSourceCondition.broadcast(); +} + +void MediaSourceSplitter::waitForReadFromSource_lock(int32_t clientId) { + mReadFromSourceCondition.wait(mLock); +} + +void MediaSourceSplitter::waitForAllClientsLastRead_lock(int32_t clientId) { + if (mLastReadCompleted) { + return; + } + mAllReadsCompleteCondition.wait(mLock); + CHECK(mLastReadCompleted); +} + +void MediaSourceSplitter::signalReadComplete_lock(bool readAborted) { + if (!readAborted) { + mNumberOfCurrentReads++; + } + + if (mNumberOfCurrentReads == mNumberOfClientsStarted) { + mLastReadCompleted = true; + mNumberOfCurrentReads = 0; + mAllReadsCompleteCondition.broadcast(); + } +} + +status_t MediaSourceSplitter::pause(int clientId) { + return ERROR_UNSUPPORTED; +} + +// Client + +MediaSourceSplitter::Client::Client( + sp<MediaSourceSplitter> splitter, + int32_t clientId) { + mSplitter = splitter; + mClientId = clientId; +} + +status_t MediaSourceSplitter::Client::start(MetaData *params) { + return mSplitter->start(mClientId, params); +} + +status_t MediaSourceSplitter::Client::stop() { + return mSplitter->stop(mClientId); +} + +sp<MetaData> MediaSourceSplitter::Client::getFormat() { + return mSplitter->getFormat(mClientId); +} + +status_t MediaSourceSplitter::Client::read( + MediaBuffer **buffer, const ReadOptions *options) { + return mSplitter->read(mClientId, buffer, options); +} + +status_t MediaSourceSplitter::Client::pause() { + return mSplitter->pause(mClientId); +} + +} // namespace android diff --git a/media/libstagefright/MetaData.cpp b/media/libstagefright/MetaData.cpp index 63b476e..884f3b4 100644 --- a/media/libstagefright/MetaData.cpp +++ b/media/libstagefright/MetaData.cpp @@ -70,6 +70,19 @@ bool MetaData::setPointer(uint32_t key, void *value) { return setData(key, TYPE_POINTER, &value, sizeof(value)); } +bool MetaData::setRect( + uint32_t key, + int32_t left, int32_t top, + int32_t right, int32_t bottom) { + Rect r; + r.mLeft = left; + r.mTop = top; + r.mRight = right; + r.mBottom = bottom; + + return setData(key, TYPE_RECT, &r, sizeof(r)); +} + bool MetaData::findCString(uint32_t key, const char **value) { uint32_t type; const void *data; @@ -143,6 +156,28 @@ bool MetaData::findPointer(uint32_t key, void **value) { return true; } +bool MetaData::findRect( + uint32_t key, + int32_t *left, int32_t *top, + int32_t *right, int32_t *bottom) { + uint32_t type; + const void *data; + size_t size; + if (!findData(key, &type, &data, &size) || type != TYPE_RECT) { + return false; + } + + CHECK_EQ(size, sizeof(Rect)); + + const Rect *r = (const Rect *)data; + *left = r->mLeft; + *top = r->mTop; + *right = r->mRight; + *bottom = r->mBottom; + + return true; +} + bool MetaData::setData( uint32_t key, uint32_t type, const void *data, size_t size) { bool overwrote_existing = true; diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp index 5b0168b..741aa1c 100644 --- a/media/libstagefright/NuCachedSource2.cpp +++ b/media/libstagefright/NuCachedSource2.cpp @@ -180,8 +180,7 @@ NuCachedSource2::NuCachedSource2(const sp<DataSource> &source) mFinalStatus(OK), mLastAccessPos(0), mFetching(true), - mLastFetchTimeUs(-1), - mSuspended(false) { + mLastFetchTimeUs(-1) { mLooper->setName("NuCachedSource2"); mLooper->registerHandler(mReflector); mLooper->start(); @@ -202,7 +201,7 @@ status_t NuCachedSource2::initCheck() const { return mSource->initCheck(); } -status_t NuCachedSource2::getSize(off_t *size) { +status_t NuCachedSource2::getSize(off64_t *size) { return mSource->getSize(size); } @@ -224,12 +223,6 @@ void NuCachedSource2::onMessageReceived(const sp<AMessage> &msg) { break; } - case kWhatSuspend: - { - onSuspend(); - break; - } - default: TRESPASS(); } @@ -271,7 +264,6 @@ void NuCachedSource2::onFetch() { bool keepAlive = !mFetching - && !mSuspended && mFinalStatus == OK && ALooper::GetNowUs() >= mLastFetchTimeUs + kKeepAliveIntervalUs; @@ -288,7 +280,7 @@ void NuCachedSource2::onFetch() { LOGI("Cache full, done prefetching for now"); mFetching = false; } - } else if (!mSuspended) { + } else { Mutex::Autolock autoLock(mLock); restartPrefetcherIfNecessary_l(); } @@ -326,15 +318,17 @@ void NuCachedSource2::onRead(const sp<AMessage> &msg) { mCondition.signal(); } -void NuCachedSource2::restartPrefetcherIfNecessary_l() { - static const size_t kGrayArea = 256 * 1024; +void NuCachedSource2::restartPrefetcherIfNecessary_l( + bool ignoreLowWaterThreshold) { + static const size_t kGrayArea = 1024 * 1024; if (mFetching || mFinalStatus != OK) { return; } - if (mCacheOffset + mCache->totalSize() - mLastAccessPos - >= kLowWaterThreshold) { + if (!ignoreLowWaterThreshold + && mCacheOffset + mCache->totalSize() - mLastAccessPos + >= kLowWaterThreshold) { return; } @@ -352,10 +346,10 @@ void NuCachedSource2::restartPrefetcherIfNecessary_l() { mFetching = true; } -ssize_t NuCachedSource2::readAt(off_t offset, void *data, size_t size) { +ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) { Mutex::Autolock autoSerializer(mSerializer); - LOGV("readAt offset %ld, size %d", offset, size); + LOGV("readAt offset %lld, size %d", offset, size); Mutex::Autolock autoLock(mLock); @@ -400,34 +394,34 @@ size_t NuCachedSource2::cachedSize() { return mCacheOffset + mCache->totalSize(); } -size_t NuCachedSource2::approxDataRemaining(bool *eos) { +size_t NuCachedSource2::approxDataRemaining(status_t *finalStatus) { Mutex::Autolock autoLock(mLock); - return approxDataRemaining_l(eos); + return approxDataRemaining_l(finalStatus); } -size_t NuCachedSource2::approxDataRemaining_l(bool *eos) { - *eos = (mFinalStatus != OK); - off_t lastBytePosCached = mCacheOffset + mCache->totalSize(); +size_t NuCachedSource2::approxDataRemaining_l(status_t *finalStatus) { + *finalStatus = mFinalStatus; + off64_t lastBytePosCached = mCacheOffset + mCache->totalSize(); if (mLastAccessPos < lastBytePosCached) { return lastBytePosCached - mLastAccessPos; } return 0; } -ssize_t NuCachedSource2::readInternal(off_t offset, void *data, size_t size) { - LOGV("readInternal offset %ld size %d", offset, size); +ssize_t NuCachedSource2::readInternal(off64_t offset, void *data, size_t size) { + LOGV("readInternal offset %lld size %d", offset, size); Mutex::Autolock autoLock(mLock); if (offset < mCacheOffset - || offset >= (off_t)(mCacheOffset + mCache->totalSize())) { - static const off_t kPadding = 32768; + || offset >= (off64_t)(mCacheOffset + mCache->totalSize())) { + static const off64_t kPadding = 256 * 1024; // In the presence of multiple decoded streams, once of them will // trigger this seek request, the other one will request data "nearby" // soon, adjust the seek position so that that subsequent request // does not trigger another seek. - off_t seekOffset = (offset > kPadding) ? offset - kPadding : 0; + off64_t seekOffset = (offset > kPadding) ? offset - kPadding : 0; seekInternal_l(seekOffset); } @@ -456,15 +450,15 @@ ssize_t NuCachedSource2::readInternal(off_t offset, void *data, size_t size) { return -EAGAIN; } -status_t NuCachedSource2::seekInternal_l(off_t offset) { +status_t NuCachedSource2::seekInternal_l(off64_t offset) { mLastAccessPos = offset; if (offset >= mCacheOffset - && offset <= (off_t)(mCacheOffset + mCache->totalSize())) { + && offset <= (off64_t)(mCacheOffset + mCache->totalSize())) { return OK; } - LOGI("new range: offset= %ld", offset); + LOGI("new range: offset= %lld", offset); mCacheOffset = offset; @@ -477,39 +471,22 @@ status_t NuCachedSource2::seekInternal_l(off_t offset) { return OK; } -void NuCachedSource2::clearCacheAndResume() { - LOGV("clearCacheAndResume"); - +void NuCachedSource2::resumeFetchingIfNecessary() { Mutex::Autolock autoLock(mLock); - CHECK(mSuspended); - - mCacheOffset = 0; - mFinalStatus = OK; - mLastAccessPos = 0; - mLastFetchTimeUs = -1; - - size_t totalSize = mCache->totalSize(); - CHECK_EQ(mCache->releaseFromStart(totalSize), totalSize); - - mFetching = true; - mSuspended = false; + restartPrefetcherIfNecessary_l(true /* ignore low water threshold */); } -void NuCachedSource2::suspend() { - (new AMessage(kWhatSuspend, mReflector->id()))->post(); - - while (!mSuspended) { - usleep(10000); - } +DecryptHandle* NuCachedSource2::DrmInitialization(DrmManagerClient* client) { + return mSource->DrmInitialization(client); } -void NuCachedSource2::onSuspend() { - Mutex::Autolock autoLock(mLock); +void NuCachedSource2::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) { + mSource->getDrmInfo(handle, client); +} - mFetching = false; - mSuspended = true; +String8 NuCachedSource2::getUri() { + return mSource->getUri(); } } // namespace android - diff --git a/media/libstagefright/NuHTTPDataSource.cpp b/media/libstagefright/NuHTTPDataSource.cpp index 04cca47..653c85e 100644 --- a/media/libstagefright/NuHTTPDataSource.cpp +++ b/media/libstagefright/NuHTTPDataSource.cpp @@ -5,6 +5,7 @@ #include "include/NuHTTPDataSource.h" #include <cutils/properties.h> +#include <media/stagefright/foundation/ALooper.h> #include <media/stagefright/MediaDebug.h> #include <media/stagefright/MediaErrors.h> @@ -69,7 +70,12 @@ NuHTTPDataSource::NuHTTPDataSource() mContentLength(0), mContentLengthValid(false), mHasChunkedTransferEncoding(false), - mChunkDataBytesLeft(0) { + mChunkDataBytesLeft(0), + mNumBandwidthHistoryItems(0), + mTotalTransferTimeUs(0), + mTotalTransferBytes(0), + mDecryptHandle(NULL), + mDrmManagerClient(NULL) { } NuHTTPDataSource::~NuHTTPDataSource() { @@ -78,7 +84,7 @@ NuHTTPDataSource::~NuHTTPDataSource() { status_t NuHTTPDataSource::connect( const char *uri, const KeyedVector<String8, String8> *overrides, - off_t offset) { + off64_t offset) { String8 headers; MakeFullHeaders(overrides, &headers); @@ -88,9 +94,12 @@ status_t NuHTTPDataSource::connect( status_t NuHTTPDataSource::connect( const char *uri, const String8 &headers, - off_t offset) { + off64_t offset) { String8 host, path; unsigned port; + + mUri = uri; + if (!ParseURL(uri, &host, &port, &path)) { return ERROR_MALFORMED; } @@ -106,8 +115,8 @@ static bool IsRedirectStatusCode(int httpStatus) { status_t NuHTTPDataSource::connect( const char *host, unsigned port, const char *path, const String8 &headers, - off_t offset) { - LOGI("connect to %s:%u%s @%ld", host, port, path, offset); + off64_t offset) { + LOGI("connect to %s:%u%s @%lld", host, port, path, offset); bool needsToReconnect = true; @@ -149,11 +158,14 @@ status_t NuHTTPDataSource::connect( request.append(" HTTP/1.1\r\n"); request.append("Host: "); request.append(mHost); + if (mPort != 80) { + request.append(StringPrintf(":%u", mPort).c_str()); + } request.append("\r\n"); if (offset != 0) { char rangeHeader[128]; - sprintf(rangeHeader, "Range: bytes=%ld-\r\n", offset); + sprintf(rangeHeader, "Range: bytes=%lld-\r\n", offset); request.append(rangeHeader); } @@ -169,7 +181,7 @@ status_t NuHTTPDataSource::connect( } if (IsRedirectStatusCode(httpStatus)) { - string value; + AString value; CHECK(mHTTP.find_header_value("Location", &value)); mState = DISCONNECTED; @@ -189,9 +201,8 @@ status_t NuHTTPDataSource::connect( mHasChunkedTransferEncoding = false; { - string value; - if (mHTTP.find_header_value("Transfer-Encoding", &value) - || mHTTP.find_header_value("Transfer-encoding", &value)) { + AString value; + if (mHTTP.find_header_value("Transfer-Encoding", &value)) { // We don't currently support any transfer encodings but // chunked. @@ -213,11 +224,11 @@ status_t NuHTTPDataSource::connect( applyTimeoutResponse(); if (offset == 0) { - string value; + AString value; unsigned long x; - if (mHTTP.find_header_value(string("Content-Length"), &value) + if (mHTTP.find_header_value(AString("Content-Length"), &value) && ParseSingleUnsignedLong(value.c_str(), &x)) { - mContentLength = (off_t)x; + mContentLength = (off64_t)x; mContentLengthValid = true; } else { LOGW("Server did not give us the content length!"); @@ -230,9 +241,9 @@ status_t NuHTTPDataSource::connect( return ERROR_UNSUPPORTED; } - string value; + AString value; unsigned long x; - if (mHTTP.find_header_value(string("Content-Range"), &value)) { + if (mHTTP.find_header_value(AString("Content-Range"), &value)) { const char *slashPos = strchr(value.c_str(), '/'); if (slashPos != NULL && ParseSingleUnsignedLong(slashPos + 1, &x)) { @@ -322,7 +333,7 @@ ssize_t NuHTTPDataSource::internalRead(void *data, size_t size) { return n; } -ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) { +ssize_t NuHTTPDataSource::readAt(off64_t offset, void *data, size_t size) { LOGV("readAt offset %ld, size %d", offset, size); Mutex::Autolock autoLock(mLock); @@ -349,6 +360,8 @@ ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) { size_t numBytesRead = 0; while (numBytesRead < size) { + int64_t startTimeUs = ALooper::GetNowUs(); + ssize_t n = internalRead((uint8_t *)data + numBytesRead, size - numBytesRead); @@ -356,6 +369,9 @@ ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) { return n; } + int64_t delayUs = ALooper::GetNowUs() - startTimeUs; + addBandwidthMeasurement_l(n, delayUs); + numBytesRead += (size_t)n; if (n == 0) { @@ -374,7 +390,7 @@ ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) { return numBytesRead; } -status_t NuHTTPDataSource::getSize(off_t *size) { +status_t NuHTTPDataSource::getSize(off64_t *size) { *size = 0; if (mState != CONNECTED) { @@ -425,7 +441,7 @@ void NuHTTPDataSource::MakeFullHeaders( } void NuHTTPDataSource::applyTimeoutResponse() { - string timeout; + AString timeout; if (mHTTP.find_header_value("X-SocketTimeout", &timeout)) { const char *s = timeout.c_str(); char *end; @@ -440,4 +456,64 @@ void NuHTTPDataSource::applyTimeoutResponse() { } } +bool NuHTTPDataSource::estimateBandwidth(int32_t *bandwidth_bps) { + Mutex::Autolock autoLock(mLock); + + if (mNumBandwidthHistoryItems < 2) { + return false; + } + + *bandwidth_bps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs); + + return true; +} + +void NuHTTPDataSource::addBandwidthMeasurement_l( + size_t numBytes, int64_t delayUs) { + BandwidthEntry entry; + entry.mDelayUs = delayUs; + entry.mNumBytes = numBytes; + mTotalTransferTimeUs += delayUs; + mTotalTransferBytes += numBytes; + + mBandwidthHistory.push_back(entry); + if (++mNumBandwidthHistoryItems > 100) { + BandwidthEntry *entry = &*mBandwidthHistory.begin(); + mTotalTransferTimeUs -= entry->mDelayUs; + mTotalTransferBytes -= entry->mNumBytes; + mBandwidthHistory.erase(mBandwidthHistory.begin()); + --mNumBandwidthHistoryItems; + } +} + +DecryptHandle* NuHTTPDataSource::DrmInitialization(DrmManagerClient* client) { + if (client == NULL) { + return NULL; + } + mDrmManagerClient = client; + + if (mDecryptHandle == NULL) { + /* Note if redirect occurs, mUri is the redirect uri instead of the + * original one + */ + mDecryptHandle = mDrmManagerClient->openDecryptSession(mUri); + } + + if (mDecryptHandle == NULL) { + mDrmManagerClient = NULL; + } + + return mDecryptHandle; +} + +void NuHTTPDataSource::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) { + *handle = mDecryptHandle; + + *client = mDrmManagerClient; +} + +String8 NuHTTPDataSource::getUri() { + return mUri; +} + } // namespace android diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index b5d00bf..94694a3 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -38,10 +38,11 @@ #include <binder/IServiceManager.h> #include <binder/MemoryDealer.h> #include <binder/ProcessState.h> +#include <media/stagefright/foundation/ADebug.h> #include <media/IMediaPlayerService.h> +#include <media/stagefright/HardwareAPI.h> #include <media/stagefright/MediaBuffer.h> #include <media/stagefright/MediaBufferGroup.h> -#include <media/stagefright/MediaDebug.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MediaExtractor.h> #include <media/stagefright/MetaData.h> @@ -53,6 +54,7 @@ #include <OMX_Component.h> #include "include/ThreadedSource.h" +#include "include/avc_utils.h" namespace android { @@ -152,37 +154,37 @@ static sp<MediaSource> InstantiateSoftwareCodec( static const CodecInfo kDecoderInfo[] = { { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" }, +// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" }, // { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" }, { MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" }, -// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" }, // { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" }, +// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" }, { MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" }, -// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" }, +// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" }, { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" }, { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" }, -// { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" }, +// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" }, { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" }, { MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" }, -// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" }, { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" }, { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" }, -// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" }, { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" }, -// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" }, { MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" }, -// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" }, { MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" }, { MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" }, }; @@ -194,25 +196,24 @@ static const CodecInfo kEncoderInfo[] = { { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" }, { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" }, { MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" }, -// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" }, -// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" }, { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" }, -// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" }, { MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" }, -// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" }, }; #undef OPTIONAL @@ -266,39 +267,6 @@ static const char *GetCodec(const CodecInfo *info, size_t numInfos, return NULL; } -enum { - kAVCProfileBaseline = 0x42, - kAVCProfileMain = 0x4d, - kAVCProfileExtended = 0x58, - kAVCProfileHigh = 0x64, - kAVCProfileHigh10 = 0x6e, - kAVCProfileHigh422 = 0x7a, - kAVCProfileHigh444 = 0xf4, - kAVCProfileCAVLC444Intra = 0x2c -}; - -static const char *AVCProfileToString(uint8_t profile) { - switch (profile) { - case kAVCProfileBaseline: - return "Baseline"; - case kAVCProfileMain: - return "Main"; - case kAVCProfileExtended: - return "Extended"; - case kAVCProfileHigh: - return "High"; - case kAVCProfileHigh10: - return "High 10"; - case kAVCProfileHigh422: - return "High 422"; - case kAVCProfileHigh444: - return "High 444"; - case kAVCProfileCAVLC444Intra: - return "CAVLC 444 Intra"; - default: return "Unknown"; - } -} - template<class T> static void InitOMXParams(T *params) { params->nSize = sizeof(T); @@ -309,16 +277,15 @@ static void InitOMXParams(T *params) { } static bool IsSoftwareCodec(const char *componentName) { - if (!strncmp("OMX.PV.", componentName, 7)) { - return true; + if (!strncmp("OMX.", componentName, 4)) { + return false; } - return false; + return true; } // A sort order in which non-OMX components are first, -// followed by software codecs, i.e. OMX.PV.*, followed -// by all the others. +// followed by software codecs, and followed by all the others. static int CompareSoftwareCodecsFirst( const String8 *elem1, const String8 *elem2) { bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4); @@ -352,9 +319,13 @@ uint32_t OMXCodec::getComponentQuirks( const char *componentName, bool isEncoder) { uint32_t quirks = 0; - if (!strcmp(componentName, "OMX.PV.avcdec")) { - quirks |= kWantsNALFragments; + if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") || + !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") || + !strcmp(componentName, "OMX.Nvidia.aac.decoder") || + !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) { + quirks |= kDecoderLiesAboutNumberOfChannels; } + if (!strcmp(componentName, "OMX.TI.MP3.decode")) { quirks |= kNeedsFlushBeforeDisable; quirks |= kDecoderLiesAboutNumberOfChannels; @@ -414,13 +385,6 @@ uint32_t OMXCodec::getComponentQuirks( quirks |= kOutputBuffersAreUnreadable; } - if (!strncmp(componentName, "OMX.SEC.", 8) && isEncoder) { - // These input buffers contain meta data (for instance, - // information helps locate the actual YUV data, or - // the physical address of the YUV data). - quirks |= kStoreMetaDataInInputVideoBuffers; - } - return quirks; } @@ -456,7 +420,16 @@ void OMXCodec::findMatchingCodecs( continue; } - matchingCodecs->push(String8(componentName)); + // When requesting software-only codecs, only push software codecs + // When requesting hardware-only codecs, only push hardware codecs + // When there is request neither for software-only nor for + // hardware-only codecs, push all codecs + if (((flags & kSoftwareCodecsOnly) && IsSoftwareCodec(componentName)) || + ((flags & kHardwareCodecsOnly) && !IsSoftwareCodec(componentName)) || + (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) { + + matchingCodecs->push(String8(componentName)); + } } if (flags & kPreferSoftwareCodecs) { @@ -470,7 +443,8 @@ sp<MediaSource> OMXCodec::Create( const sp<MetaData> &meta, bool createEncoder, const sp<MediaSource> &source, const char *matchComponentName, - uint32_t flags) { + uint32_t flags, + const sp<ANativeWindow> &nativeWindow) { const char *mime; bool success = meta->findCString(kKeyMIMEType, &mime); CHECK(success); @@ -526,7 +500,7 @@ sp<MediaSource> OMXCodec::Create( sp<OMXCodec> codec = new OMXCodec( omx, node, quirks, createEncoder, mime, componentName, - source); + source, nativeWindow); observer->setCodec(codec); @@ -544,13 +518,17 @@ sp<MediaSource> OMXCodec::Create( } status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) { + mIsMetaDataStoredInVideoBuffers = false; + if (flags & kStoreMetaDataInVideoBuffers) { + mIsMetaDataStoredInVideoBuffers = true; + } if (!(flags & kIgnoreCodecSpecificData)) { uint32_t type; const void *data; size_t size; if (meta->findData(kKeyESDS, &type, &data, &size)) { ESDS esds((const char *)data, size); - CHECK_EQ(esds.InitCheck(), OK); + CHECK_EQ(esds.InitCheck(), (status_t)OK); const void *codec_specific_data; size_t codec_specific_data_size; @@ -565,7 +543,7 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) { const uint8_t *ptr = (const uint8_t *)data; CHECK(size >= 7); - CHECK_EQ(ptr[0], 1); // configurationVersion == 1 + CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1 uint8_t profile = ptr[1]; uint8_t level = ptr[3]; @@ -619,7 +597,7 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) { size -= length; } - CODEC_LOGV( + CODEC_LOGI( "AVC profile = %d (%s), level = %d", (int)profile, AVCProfileToString(profile), level); @@ -734,6 +712,16 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) { mQuirks &= ~kOutputBuffersAreUnreadable; } + if (mNativeWindow != NULL + && !mIsEncoder + && !strncasecmp(mMIME, "video/", 6) + && !strncmp(mComponentName, "OMX.", 4)) { + status_t err = initNativeWindow(); + if (err != OK) { + return err; + } + } + return OK; } @@ -744,7 +732,7 @@ void OMXCodec::setMinBufferSize(OMX_U32 portIndex, OMX_U32 size) { status_t err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); if ((portIndex == kPortIndexInput && (mQuirks & kInputBufferSizesAreBogus)) || (def.nBufferSize < size)) { @@ -753,11 +741,11 @@ void OMXCodec::setMinBufferSize(OMX_U32 portIndex, OMX_U32 size) { err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); // Make sure the setting actually stuck. if (portIndex == kPortIndexInput @@ -917,7 +905,7 @@ void OMXCodec::setVideoInputFormat( int32_t width, height, frameRate, bitRate, stride, sliceHeight; bool success = meta->findInt32(kKeyWidth, &width); success = success && meta->findInt32(kKeyHeight, &height); - success = success && meta->findInt32(kKeySampleRate, &frameRate); + success = success && meta->findInt32(kKeyFrameRate, &frameRate); success = success && meta->findInt32(kKeyBitRate, &bitRate); success = success && meta->findInt32(kKeyStride, &stride); success = success && meta->findInt32(kKeySliceHeight, &sliceHeight); @@ -937,7 +925,7 @@ void OMXCodec::setVideoInputFormat( } OMX_COLOR_FORMATTYPE colorFormat; - CHECK_EQ(OK, findTargetColorFormat(meta, &colorFormat)); + CHECK_EQ((status_t)OK, findTargetColorFormat(meta, &colorFormat)); status_t err; OMX_PARAM_PORTDEFINITIONTYPE def; @@ -946,19 +934,19 @@ void OMXCodec::setVideoInputFormat( //////////////////////// Input port ///////////////////////// CHECK_EQ(setVideoPortFormatType( kPortIndexInput, OMX_VIDEO_CodingUnused, - colorFormat), OK); + colorFormat), (status_t)OK); InitOMXParams(&def); def.nPortIndex = kPortIndexInput; err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); def.nBufferSize = getFrameSize(colorFormat, stride > 0? stride: -stride, sliceHeight); - CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); video_def->nFrameWidth = width; video_def->nFrameHeight = height; @@ -970,20 +958,20 @@ void OMXCodec::setVideoInputFormat( err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); //////////////////////// Output port ///////////////////////// CHECK_EQ(setVideoPortFormatType( kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused), - OK); + (status_t)OK); InitOMXParams(&def); def.nPortIndex = kPortIndexOutput; err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); - CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + CHECK_EQ(err, (status_t)OK); + CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); video_def->nFrameWidth = width; video_def->nFrameHeight = height; @@ -998,23 +986,23 @@ void OMXCodec::setVideoInputFormat( err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); /////////////////// Codec-specific //////////////////////// switch (compressionFormat) { case OMX_VIDEO_CodingMPEG4: { - CHECK_EQ(setupMPEG4EncoderParameters(meta), OK); + CHECK_EQ(setupMPEG4EncoderParameters(meta), (status_t)OK); break; } case OMX_VIDEO_CodingH263: - CHECK_EQ(setupH263EncoderParameters(meta), OK); + CHECK_EQ(setupH263EncoderParameters(meta), (status_t)OK); break; case OMX_VIDEO_CodingAVC: { - CHECK_EQ(setupAVCEncoderParameters(meta), OK); + CHECK_EQ(setupAVCEncoderParameters(meta), (status_t)OK); break; } @@ -1073,7 +1061,7 @@ status_t OMXCodec::setupBitRate(int32_t bitRate) { status_t err = mOMX->getParameter( mNode, OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); bitrateType.eControlRate = OMX_Video_ControlRateVariable; bitrateType.nTargetBitrate = bitRate; @@ -1081,7 +1069,7 @@ status_t OMXCodec::setupBitRate(int32_t bitRate) { err = mOMX->setParameter( mNode, OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); return OK; } @@ -1137,7 +1125,7 @@ status_t OMXCodec::getVideoProfileLevel( status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) { int32_t iFramesInterval, frameRate, bitRate; bool success = meta->findInt32(kKeyBitRate, &bitRate); - success = success && meta->findInt32(kKeySampleRate, &frameRate); + success = success && meta->findInt32(kKeyFrameRate, &frameRate); success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval); CHECK(success); OMX_VIDEO_PARAM_H263TYPE h263type; @@ -1146,7 +1134,7 @@ status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) { status_t err = mOMX->getParameter( mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; @@ -1173,10 +1161,10 @@ status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) { err = mOMX->setParameter( mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - CHECK_EQ(setupBitRate(bitRate), OK); - CHECK_EQ(setupErrorCorrectionParameters(), OK); + CHECK_EQ(setupBitRate(bitRate), (status_t)OK); + CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK); return OK; } @@ -1184,7 +1172,7 @@ status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) { status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) { int32_t iFramesInterval, frameRate, bitRate; bool success = meta->findInt32(kKeyBitRate, &bitRate); - success = success && meta->findInt32(kKeySampleRate, &frameRate); + success = success && meta->findInt32(kKeyFrameRate, &frameRate); success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval); CHECK(success); OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; @@ -1193,7 +1181,7 @@ status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) { status_t err = mOMX->getParameter( mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); mpeg4type.nSliceHeaderSpacing = 0; mpeg4type.bSVH = OMX_FALSE; @@ -1225,10 +1213,10 @@ status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) { err = mOMX->setParameter( mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - CHECK_EQ(setupBitRate(bitRate), OK); - CHECK_EQ(setupErrorCorrectionParameters(), OK); + CHECK_EQ(setupBitRate(bitRate), (status_t)OK); + CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK); return OK; } @@ -1236,7 +1224,7 @@ status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) { status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) { int32_t iFramesInterval, frameRate, bitRate; bool success = meta->findInt32(kKeyBitRate, &bitRate); - success = success && meta->findInt32(kKeySampleRate, &frameRate); + success = success && meta->findInt32(kKeyFrameRate, &frameRate); success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval); CHECK(success); @@ -1246,7 +1234,7 @@ status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) { status_t err = mOMX->getParameter( mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; @@ -1292,11 +1280,15 @@ status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) { h264type.bMBAFF = OMX_FALSE; h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; + if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) { + h264type.eLevel = OMX_VIDEO_AVCLevelMax; + } + err = mOMX->setParameter( mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - CHECK_EQ(setupBitRate(bitRate), OK); + CHECK_EQ(setupBitRate(bitRate), (status_t)OK); return OK; } @@ -1334,8 +1326,8 @@ status_t OMXCodec::setVideoOutputFormat( status_t err = mOMX->getParameter( mNode, OMX_IndexParamVideoPortFormat, &format, sizeof(format)); - CHECK_EQ(err, OK); - CHECK_EQ(format.eCompressionFormat, OMX_VIDEO_CodingUnused); + CHECK_EQ(err, (status_t)OK); + CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused); static const int OMX_QCOM_COLOR_FormatYVU420SemiPlanar = 0x7FA30C00; @@ -1363,7 +1355,7 @@ status_t OMXCodec::setVideoOutputFormat( err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); #if 1 // XXX Need a (much) better heuristic to compute input buffer sizes. @@ -1373,7 +1365,7 @@ status_t OMXCodec::setVideoOutputFormat( } #endif - CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); video_def->nFrameWidth = width; video_def->nFrameHeight = height; @@ -1395,8 +1387,8 @@ status_t OMXCodec::setVideoOutputFormat( err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); - CHECK_EQ(def.eDomain, OMX_PortDomainVideo); + CHECK_EQ(err, (status_t)OK); + CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo); #if 0 def.nBufferSize = @@ -1417,7 +1409,8 @@ OMXCodec::OMXCodec( bool isEncoder, const char *mime, const char *componentName, - const sp<MediaSource> &source) + const sp<MediaSource> &source, + const sp<ANativeWindow> &nativeWindow) : mOMX(omx), mOMXLivesLocally(omx->livesLocally(getpid())), mNode(node), @@ -1435,9 +1428,9 @@ OMXCodec::OMXCodec( mSeekTimeUs(-1), mSeekMode(ReadOptions::SEEK_CLOSEST_SYNC), mTargetTimeUs(-1), - mSkipTimeUs(-1), mLeftOverBuffer(NULL), - mPaused(false) { + mPaused(false), + mNativeWindow(nativeWindow) { mPortStatus[kPortIndexInput] = ENABLED; mPortStatus[kPortIndexOutput] = ENABLED; @@ -1515,10 +1508,10 @@ void OMXCodec::setComponentRole() { OMXCodec::~OMXCodec() { mSource.clear(); - CHECK(mState == LOADED || mState == ERROR); + CHECK(mState == LOADED || mState == ERROR || mState == LOADED_TO_IDLE); status_t err = mOMX->freeNode(mNode); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); mNode = NULL; setState(DEAD); @@ -1535,21 +1528,23 @@ OMXCodec::~OMXCodec() { status_t OMXCodec::init() { // mLock is held. - CHECK_EQ(mState, LOADED); + CHECK_EQ((int)mState, (int)LOADED); status_t err; if (!(mQuirks & kRequiresLoadedToIdleAfterAllocation)) { err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); setState(LOADED_TO_IDLE); } err = allocateBuffers(); - CHECK_EQ(err, OK); + if (err != (status_t)OK) { + return err; + } if (mQuirks & kRequiresLoadedToIdleAfterAllocation) { err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); setState(LOADED_TO_IDLE); } @@ -1581,6 +1576,10 @@ status_t OMXCodec::allocateBuffers() { } status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) { + if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { + return allocateOutputBuffersFromNativeWindow(); + } + OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); def.nPortIndex = portIndex; @@ -1592,6 +1591,14 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) { return err; } + if (mIsMetaDataStoredInVideoBuffers && portIndex == kPortIndexInput) { + err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE); + if (err != OK) { + LOGE("Storing meta data in video buffers is not supported"); + return err; + } + } + CODEC_LOGI("allocating %lu buffers of size %lu on %s port", def.nBufferCountActual, def.nBufferSize, portIndex == kPortIndexInput ? "input" : "output"); @@ -1646,7 +1653,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) { } info.mBuffer = buffer; - info.mOwnedByComponent = false; + info.mStatus = OWNED_BY_US; info.mMem = mem; info.mMediaBuffer = NULL; @@ -1673,6 +1680,204 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) { return OK; } +status_t OMXCodec::applyRotation() { + sp<MetaData> meta = mSource->getFormat(); + + int32_t rotationDegrees; + if (!meta->findInt32(kKeyRotation, &rotationDegrees)) { + rotationDegrees = 0; + } + + uint32_t transform; + switch (rotationDegrees) { + case 0: transform = 0; break; + case 90: transform = HAL_TRANSFORM_ROT_90; break; + case 180: transform = HAL_TRANSFORM_ROT_180; break; + case 270: transform = HAL_TRANSFORM_ROT_270; break; + default: transform = 0; break; + } + + status_t err = OK; + + if (transform) { + err = native_window_set_buffers_transform( + mNativeWindow.get(), transform); + } + + return err; +} + +status_t OMXCodec::allocateOutputBuffersFromNativeWindow() { + // Get the number of buffers needed. + OMX_PARAM_PORTDEFINITIONTYPE def; + InitOMXParams(&def); + def.nPortIndex = kPortIndexOutput; + + status_t err = mOMX->getParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + if (err != OK) { + return err; + } + + err = native_window_set_buffers_geometry( + mNativeWindow.get(), + def.format.video.nFrameWidth, + def.format.video.nFrameHeight, + def.format.video.eColorFormat); + + if (err != 0) { + LOGE("native_window_set_buffers_geometry failed: %s (%d)", + strerror(-err), -err); + return err; + } + + // Increase the buffer count by one to allow for the ANativeWindow to hold + // on to one of the buffers. + def.nBufferCountActual++; + err = mOMX->setParameter( + mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); + if (err != OK) { + return err; + } + + err = applyRotation(); + if (err != OK) { + return err; + } + + // Set up the native window. + // XXX TODO: Get the gralloc usage flags from the OMX plugin! + err = native_window_set_usage( + mNativeWindow.get(), GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP); + if (err != 0) { + LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err); + return err; + } + + err = native_window_set_buffer_count( + mNativeWindow.get(), def.nBufferCountActual); + if (err != 0) { + LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), + -err); + return err; + } + + // XXX TODO: Do something so the ANativeWindow knows that we'll need to get + // the same set of buffers. + + CODEC_LOGI("allocating %lu buffers from a native window of size %lu on " + "output port", def.nBufferCountActual, def.nBufferSize); + + // Dequeue buffers and send them to OMX + OMX_U32 i; + for (i = 0; i < def.nBufferCountActual; i++) { + android_native_buffer_t* buf; + err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf); + if (err != 0) { + LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); + break; + } + + sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); + IOMX::buffer_id bufferId; + err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, + &bufferId); + if (err != 0) { + break; + } + + CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)", + bufferId, graphicBuffer.get()); + + BufferInfo info; + info.mData = NULL; + info.mSize = def.nBufferSize; + info.mBuffer = bufferId; + info.mStatus = OWNED_BY_US; + info.mMem = NULL; + info.mMediaBuffer = new MediaBuffer(graphicBuffer); + info.mMediaBuffer->setObserver(this); + + mPortBuffers[kPortIndexOutput].push(info); + } + + OMX_U32 cancelStart; + OMX_U32 cancelEnd; + + if (err != 0) { + // If an error occurred while dequeuing we need to cancel any buffers + // that were dequeued. + cancelStart = 0; + cancelEnd = i; + } else { + // Return the last two buffers to the native window. + // XXX TODO: The number of buffers the native window owns should probably be + // queried from it when we put the native window in fixed buffer pool mode + // (which needs to be implemented). Currently it's hard-coded to 2. + cancelStart = def.nBufferCountActual - 2; + cancelEnd = def.nBufferCountActual; + } + + for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { + BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i); + cancelBufferToNativeWindow(info); + } + + return err; +} + +status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) { + CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US); + CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer); + int err = mNativeWindow->cancelBuffer( + mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get()); + if (err != 0) { + CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err); + + setState(ERROR); + return err; + } + info->mStatus = OWNED_BY_NATIVE_WINDOW; + return OK; +} + +OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() { + // Dequeue the next buffer from the native window. + android_native_buffer_t* buf; + int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf); + if (err != 0) { + CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err); + + setState(ERROR); + return 0; + } + + // Determine which buffer we just dequeued. + Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput]; + BufferInfo *bufInfo = 0; + for (size_t i = 0; i < buffers->size(); i++) { + sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i). + mMediaBuffer->graphicBuffer(); + if (graphicBuffer->handle == buf->handle) { + bufInfo = &buffers->editItemAt(i); + break; + } + } + + if (bufInfo == 0) { + CODEC_LOGE("dequeued unrecognized buffer: %p", buf); + + setState(ERROR); + return 0; + } + + // The native window no longer owns the buffer. + CHECK_EQ((int)bufInfo->mStatus, (int)OWNED_BY_NATIVE_WINDOW); + bufInfo->mStatus = OWNED_BY_US; + + return bufInfo; +} + void OMXCodec::on_message(const omx_message &msg) { switch (msg.type) { case omx_message::EVENT: @@ -1697,32 +1902,35 @@ void OMXCodec::on_message(const omx_message &msg) { } CHECK(i < buffers->size()); - if (!(*buffers)[i].mOwnedByComponent) { + if ((*buffers)[i].mStatus != OWNED_BY_COMPONENT) { LOGW("We already own input buffer %p, yet received " "an EMPTY_BUFFER_DONE.", buffer); } - { - BufferInfo *info = &buffers->editItemAt(i); - info->mOwnedByComponent = false; - if (info->mMediaBuffer != NULL) { - // It is time to release the media buffers storing meta data - info->mMediaBuffer->release(); - info->mMediaBuffer = NULL; + BufferInfo* info = &buffers->editItemAt(i); + info->mStatus = OWNED_BY_US; + + // Buffer could not be released until empty buffer done is called. + if (info->mMediaBuffer != NULL) { + if (mIsEncoder && + (mQuirks & kAvoidMemcopyInputRecordingFrames)) { + // If zero-copy mode is enabled this will send the + // input buffer back to the upstream source. + restorePatchedDataPointer(info); } + + info->mMediaBuffer->release(); + info->mMediaBuffer = NULL; } if (mPortStatus[kPortIndexInput] == DISABLING) { CODEC_LOGV("Port is disabled, freeing buffer %p", buffer); - status_t err = - mOMX->freeBuffer(mNode, kPortIndexInput, buffer); - CHECK_EQ(err, OK); - - buffers->removeAt(i); + status_t err = freeBuffer(kPortIndexInput, i); + CHECK_EQ(err, (status_t)OK); } else if (mState != ERROR && mPortStatus[kPortIndexInput] != SHUTTING_DOWN) { - CHECK_EQ(mPortStatus[kPortIndexInput], ENABLED); + CHECK_EQ((int)mPortStatus[kPortIndexInput], (int)ENABLED); drainInputBuffer(&buffers->editItemAt(i)); } break; @@ -1749,21 +1957,19 @@ void OMXCodec::on_message(const omx_message &msg) { CHECK(i < buffers->size()); BufferInfo *info = &buffers->editItemAt(i); - if (!info->mOwnedByComponent) { + if (info->mStatus != OWNED_BY_COMPONENT) { LOGW("We already own output buffer %p, yet received " "a FILL_BUFFER_DONE.", buffer); } - info->mOwnedByComponent = false; + info->mStatus = OWNED_BY_US; if (mPortStatus[kPortIndexOutput] == DISABLING) { CODEC_LOGV("Port is disabled, freeing buffer %p", buffer); - status_t err = - mOMX->freeBuffer(mNode, kPortIndexOutput, buffer); - CHECK_EQ(err, OK); + status_t err = freeBuffer(kPortIndexOutput, i); + CHECK_EQ(err, (status_t)OK); - buffers->removeAt(i); #if 0 } else if (mPortStatus[kPortIndexOutput] == ENABLED && (flags & OMX_BUFFERFLAG_EOS)) { @@ -1772,7 +1978,7 @@ void OMXCodec::on_message(const omx_message &msg) { mBufferFilled.signal(); #endif } else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) { - CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED); + CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED); if (info->mMediaBuffer == NULL) { CHECK(mOMXLivesLocally); @@ -1791,8 +1997,10 @@ void OMXCodec::on_message(const omx_message &msg) { } MediaBuffer *buffer = info->mMediaBuffer; + bool isGraphicBuffer = buffer->graphicBuffer() != NULL; - if (msg.u.extended_buffer_data.range_offset + if (!isGraphicBuffer + && msg.u.extended_buffer_data.range_offset + msg.u.extended_buffer_data.range_length > buffer->size()) { CODEC_LOGE( @@ -1816,7 +2024,7 @@ void OMXCodec::on_message(const omx_message &msg) { buffer->meta_data()->setInt32(kKeyIsCodecConfig, true); } - if (mQuirks & kOutputBuffersAreUnreadable) { + if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) { buffer->meta_data()->setInt32(kKeyIsUnreadable, true); } @@ -1855,6 +2063,9 @@ void OMXCodec::on_message(const omx_message &msg) { mFilledBuffers.push_back(i); mBufferFilled.signal(); + if (mIsEncoder) { + sched_yield(); + } } break; @@ -1868,48 +2079,6 @@ void OMXCodec::on_message(const omx_message &msg) { } } -void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { - switch (event) { - case OMX_EventCmdComplete: - { - onCmdComplete((OMX_COMMANDTYPE)data1, data2); - break; - } - - case OMX_EventError: - { - CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2); - - setState(ERROR); - break; - } - - case OMX_EventPortSettingsChanged: - { - onPortSettingsChanged(data1); - break; - } - -#if 0 - case OMX_EventBufferFlag: - { - CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1); - - if (data1 == kPortIndexOutput) { - mNoMoreOutputData = true; - } - break; - } -#endif - - default: - { - CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2); - break; - } - } -} - // Has the format changed in any way that the client would have to be aware of? static bool formatHasNotablyChanged( const sp<MetaData> &from, const sp<MetaData> &to) { @@ -1954,6 +2123,21 @@ static bool formatHasNotablyChanged( if (height_from != height_to) { return true; } + + int32_t left_from, top_from, right_from, bottom_from; + CHECK(from->findRect( + kKeyCropRect, + &left_from, &top_from, &right_from, &bottom_from)); + + int32_t left_to, top_to, right_to, bottom_to; + CHECK(to->findRect( + kKeyCropRect, + &left_to, &top_to, &right_to, &bottom_to)); + + if (left_to != left_from || top_to != top_from + || right_to != right_from || bottom_to != bottom_from) { + return true; + } } else if (!strcasecmp(mime_from, MEDIA_MIMETYPE_AUDIO_RAW)) { int32_t numChannels_from, numChannels_to; CHECK(from->findInt32(kKeyChannelCount, &numChannels_from)); @@ -1975,6 +2159,78 @@ static bool formatHasNotablyChanged( return false; } +void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { + switch (event) { + case OMX_EventCmdComplete: + { + onCmdComplete((OMX_COMMANDTYPE)data1, data2); + break; + } + + case OMX_EventError: + { + CODEC_LOGE("ERROR(0x%08lx, %ld)", data1, data2); + + setState(ERROR); + break; + } + + case OMX_EventPortSettingsChanged: + { + CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)", + data1, data2); + + if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { + onPortSettingsChanged(data1); + } else if (data1 == kPortIndexOutput + && data2 == OMX_IndexConfigCommonOutputCrop) { + + sp<MetaData> oldOutputFormat = mOutputFormat; + initOutputFormat(mSource->getFormat()); + + if (formatHasNotablyChanged(oldOutputFormat, mOutputFormat)) { + mOutputPortSettingsHaveChanged = true; + + if (mNativeWindow != NULL) { + int32_t left, top, right, bottom; + CHECK(mOutputFormat->findRect( + kKeyCropRect, + &left, &top, &right, &bottom)); + + android_native_rect_t crop; + crop.left = left; + crop.top = top; + crop.right = right; + crop.bottom = bottom; + + CHECK_EQ(0, native_window_set_crop( + mNativeWindow.get(), &crop)); + } + } + } + break; + } + +#if 0 + case OMX_EventBufferFlag: + { + CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1); + + if (data1 == kPortIndexOutput) { + mNoMoreOutputData = true; + } + break; + } +#endif + + default: + { + CODEC_LOGV("EVENT(%d, %ld, %ld)", event, data1, data2); + break; + } + } +} + void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) { switch (cmd) { case OMX_CommandStateSet: @@ -1989,13 +2245,13 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) { CODEC_LOGV("PORT_DISABLED(%ld)", portIndex); CHECK(mState == EXECUTING || mState == RECONFIGURING); - CHECK_EQ(mPortStatus[portIndex], DISABLING); - CHECK_EQ(mPortBuffers[portIndex].size(), 0); + CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLING); + CHECK_EQ(mPortBuffers[portIndex].size(), 0u); mPortStatus[portIndex] = DISABLED; if (mState == RECONFIGURING) { - CHECK_EQ(portIndex, kPortIndexOutput); + CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput); sp<MetaData> oldOutputFormat = mOutputFormat; initOutputFormat(mSource->getFormat()); @@ -2009,7 +2265,11 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) { enablePortAsync(portIndex); status_t err = allocateBuffersOnPort(portIndex); - CHECK_EQ(err, OK); + + if (err != OK) { + CODEC_LOGE("allocateBuffersOnPort failed (err = %d)", err); + setState(ERROR); + } } break; } @@ -2020,12 +2280,12 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) { CODEC_LOGV("PORT_ENABLED(%ld)", portIndex); CHECK(mState == EXECUTING || mState == RECONFIGURING); - CHECK_EQ(mPortStatus[portIndex], ENABLING); + CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLING); mPortStatus[portIndex] = ENABLED; if (mState == RECONFIGURING) { - CHECK_EQ(portIndex, kPortIndexOutput); + CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput); setState(EXECUTING); @@ -2040,14 +2300,14 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) { CODEC_LOGV("FLUSH_DONE(%ld)", portIndex); - CHECK_EQ(mPortStatus[portIndex], SHUTTING_DOWN); + CHECK_EQ((int)mPortStatus[portIndex], (int)SHUTTING_DOWN); mPortStatus[portIndex] = ENABLED; CHECK_EQ(countBuffersWeOwn(mPortBuffers[portIndex]), mPortBuffers[portIndex].size()); if (mState == RECONFIGURING) { - CHECK_EQ(portIndex, kPortIndexOutput); + CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput); disablePortAsync(portIndex); } else if (mState == EXECUTING_TO_IDLE) { @@ -2061,7 +2321,7 @@ void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) { status_t err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); } } else { // We're flushing both ports in preparation for seeking. @@ -2101,11 +2361,11 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) { status_t err = mOMX->sendCommand( mNode, OMX_CommandStateSet, OMX_StateExecuting); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); setState(IDLE_TO_EXECUTING); } else { - CHECK_EQ(mState, EXECUTING_TO_IDLE); + CHECK_EQ((int)mState, (int)EXECUTING_TO_IDLE); CHECK_EQ( countBuffersWeOwn(mPortBuffers[kPortIndexInput]), @@ -2118,13 +2378,13 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) { status_t err = mOMX->sendCommand( mNode, OMX_CommandStateSet, OMX_StateLoaded); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); err = freeBuffersOnPort(kPortIndexInput); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); err = freeBuffersOnPort(kPortIndexOutput); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); mPortStatus[kPortIndexInput] = ENABLED; mPortStatus[kPortIndexOutput] = ENABLED; @@ -2136,7 +2396,7 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) { case OMX_StateExecuting: { - CHECK_EQ(mState, IDLE_TO_EXECUTING); + CHECK_EQ((int)mState, (int)IDLE_TO_EXECUTING); CODEC_LOGV("Now Executing."); @@ -2152,7 +2412,7 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) { case OMX_StateLoaded: { - CHECK_EQ(mState, IDLE_TO_LOADED); + CHECK_EQ((int)mState, (int)IDLE_TO_LOADED); CODEC_LOGV("Now Loaded."); @@ -2178,7 +2438,7 @@ void OMXCodec::onStateChange(OMX_STATETYPE newState) { size_t OMXCodec::countBuffersWeOwn(const Vector<BufferInfo> &buffers) { size_t n = 0; for (size_t i = 0; i < buffers.size(); ++i) { - if (!buffers[i].mOwnedByComponent) { + if (buffers[i].mStatus != OWNED_BY_COMPONENT) { ++n; } } @@ -2195,44 +2455,64 @@ status_t OMXCodec::freeBuffersOnPort( for (size_t i = buffers->size(); i-- > 0;) { BufferInfo *info = &buffers->editItemAt(i); - if (onlyThoseWeOwn && info->mOwnedByComponent) { + if (onlyThoseWeOwn && info->mStatus == OWNED_BY_COMPONENT) { continue; } - CHECK_EQ(info->mOwnedByComponent, false); + CHECK(info->mStatus == OWNED_BY_US + || info->mStatus == OWNED_BY_NATIVE_WINDOW); CODEC_LOGV("freeing buffer %p on port %ld", info->mBuffer, portIndex); - status_t err = - mOMX->freeBuffer(mNode, portIndex, info->mBuffer); + status_t err = freeBuffer(portIndex, i); if (err != OK) { stickyErr = err; } - if (info->mMediaBuffer != NULL) { - info->mMediaBuffer->setObserver(NULL); + } + + CHECK(onlyThoseWeOwn || buffers->isEmpty()); + + return stickyErr; +} + +status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) { + Vector<BufferInfo> *buffers = &mPortBuffers[portIndex]; + + BufferInfo *info = &buffers->editItemAt(bufIndex); + + status_t err = mOMX->freeBuffer(mNode, portIndex, info->mBuffer); - // Make sure nobody but us owns this buffer at this point. - CHECK_EQ(info->mMediaBuffer->refcount(), 0); + if (err == OK && info->mMediaBuffer != NULL) { + CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput); + info->mMediaBuffer->setObserver(NULL); - info->mMediaBuffer->release(); - info->mMediaBuffer = NULL; + // Make sure nobody but us owns this buffer at this point. + CHECK_EQ(info->mMediaBuffer->refcount(), 0); + + // Cancel the buffer if it belongs to an ANativeWindow. + sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer(); + if (info->mStatus == OWNED_BY_US && graphicBuffer != 0) { + err = cancelBufferToNativeWindow(info); } - buffers->removeAt(i); + info->mMediaBuffer->release(); + info->mMediaBuffer = NULL; } - CHECK(onlyThoseWeOwn || buffers->isEmpty()); + if (err == OK) { + buffers->removeAt(bufIndex); + } - return stickyErr; + return err; } void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) { CODEC_LOGV("PORT_SETTINGS_CHANGED(%ld)", portIndex); - CHECK_EQ(mState, EXECUTING); - CHECK_EQ(portIndex, kPortIndexOutput); + CHECK_EQ((int)mState, (int)EXECUTING); + CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput); setState(RECONFIGURING); if (mQuirks & kNeedsFlushBeforeDisable) { @@ -2252,7 +2532,7 @@ bool OMXCodec::flushPortAsync(OMX_U32 portIndex) { portIndex, countBuffersWeOwn(mPortBuffers[portIndex]), mPortBuffers[portIndex].size()); - CHECK_EQ(mPortStatus[portIndex], ENABLED); + CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED); mPortStatus[portIndex] = SHUTTING_DOWN; if ((mQuirks & kRequiresFlushCompleteEmulation) @@ -2266,7 +2546,7 @@ bool OMXCodec::flushPortAsync(OMX_U32 portIndex) { status_t err = mOMX->sendCommand(mNode, OMX_CommandFlush, portIndex); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); return true; } @@ -2274,12 +2554,13 @@ bool OMXCodec::flushPortAsync(OMX_U32 portIndex) { void OMXCodec::disablePortAsync(OMX_U32 portIndex) { CHECK(mState == EXECUTING || mState == RECONFIGURING); - CHECK_EQ(mPortStatus[portIndex], ENABLED); + CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED); mPortStatus[portIndex] = DISABLING; + CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex); status_t err = mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); freeBuffersOnPort(portIndex, true); } @@ -2287,16 +2568,17 @@ void OMXCodec::disablePortAsync(OMX_U32 portIndex) { void OMXCodec::enablePortAsync(OMX_U32 portIndex) { CHECK(mState == EXECUTING || mState == RECONFIGURING); - CHECK_EQ(mPortStatus[portIndex], DISABLED); + CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLED); mPortStatus[portIndex] = ENABLING; + CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex); status_t err = mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); } void OMXCodec::fillOutputBuffers() { - CHECK_EQ(mState, EXECUTING); + CHECK_EQ((int)mState, (int)EXECUTING); // This is a workaround for some decoders not properly reporting // end-of-output-stream. If we own all input buffers and also own @@ -2315,7 +2597,10 @@ void OMXCodec::fillOutputBuffers() { Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput]; for (size_t i = 0; i < buffers->size(); ++i) { - fillOutputBuffer(&buffers->editItemAt(i)); + BufferInfo *info = &buffers->editItemAt(i); + if (info->mStatus == OWNED_BY_US) { + fillOutputBuffer(&buffers->editItemAt(i)); + } } } @@ -2324,15 +2609,17 @@ void OMXCodec::drainInputBuffers() { Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput]; for (size_t i = 0; i < buffers->size(); ++i) { - drainInputBuffer(&buffers->editItemAt(i)); + if (!drainInputBuffer(&buffers->editItemAt(i))) { + break; + } } } -void OMXCodec::drainInputBuffer(BufferInfo *info) { - CHECK_EQ(info->mOwnedByComponent, false); +bool OMXCodec::drainInputBuffer(BufferInfo *info) { + CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US); if (mSignalledEOS) { - return; + return false; } if (mCodecSpecificDataIndex < mCodecSpecificData.size()) { @@ -2366,16 +2653,16 @@ void OMXCodec::drainInputBuffer(BufferInfo *info) { mNode, info->mBuffer, 0, size, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG, 0); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - info->mOwnedByComponent = true; + info->mStatus = OWNED_BY_COMPONENT; ++mCodecSpecificDataIndex; - return; + return true; } if (mPaused) { - return; + return false; } status_t err; @@ -2385,17 +2672,16 @@ void OMXCodec::drainInputBuffer(BufferInfo *info) { size_t offset = 0; int32_t n = 0; + for (;;) { MediaBuffer *srcBuffer; - MediaSource::ReadOptions options; - if (mSkipTimeUs >= 0) { - options.setSkipFrame(mSkipTimeUs); - } if (mSeekTimeUs >= 0) { if (mLeftOverBuffer) { mLeftOverBuffer->release(); mLeftOverBuffer = NULL; } + + MediaSource::ReadOptions options; options.setSeekTo(mSeekTimeUs, mSeekMode); mSeekTimeUs = -1; @@ -2420,13 +2706,14 @@ void OMXCodec::drainInputBuffer(BufferInfo *info) { err = OK; } else { - err = mSource->read(&srcBuffer, &options); + err = mSource->read(&srcBuffer); } if (err != OK) { signalEOS = true; mFinalStatus = err; mSignalledEOS = true; + mBufferFilled.signal(); break; } @@ -2443,28 +2730,35 @@ void OMXCodec::drainInputBuffer(BufferInfo *info) { srcBuffer = NULL; setState(ERROR); - return; + return false; } mLeftOverBuffer = srcBuffer; break; } - // Do not release the media buffer if it stores meta data - // instead of YUV data. The release is delayed until - // EMPTY_BUFFER_DONE callback is received. bool releaseBuffer = true; if (mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames)) { CHECK(mOMXLivesLocally && offset == 0); - OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *) info->mBuffer; - header->pBuffer = (OMX_U8 *) srcBuffer->data() + srcBuffer->range_offset(); + + OMX_BUFFERHEADERTYPE *header = + (OMX_BUFFERHEADERTYPE *)info->mBuffer; + + CHECK(header->pBuffer == info->mData); + + header->pBuffer = + (OMX_U8 *)srcBuffer->data() + srcBuffer->range_offset(); + + releaseBuffer = false; + info->mMediaBuffer = srcBuffer; } else { - if (mQuirks & kStoreMetaDataInInputVideoBuffers) { + if (mIsMetaDataStoredInVideoBuffers) { releaseBuffer = false; info->mMediaBuffer = srcBuffer; } memcpy((uint8_t *)info->mData + offset, - (const uint8_t *)srcBuffer->data() + srcBuffer->range_offset(), + (const uint8_t *)srcBuffer->data() + + srcBuffer->range_offset(), srcBuffer->range_length()); } @@ -2520,10 +2814,10 @@ void OMXCodec::drainInputBuffer(BufferInfo *info) { if (err != OK) { setState(ERROR); - return; + return false; } - info->mOwnedByComponent = true; + info->mStatus = OWNED_BY_COMPONENT; // This component does not ever signal the EOS flag on output buffers, // Thanks for nothing. @@ -2531,10 +2825,12 @@ void OMXCodec::drainInputBuffer(BufferInfo *info) { mNoMoreOutputData = true; mBufferFilled.signal(); } + + return true; } void OMXCodec::fillOutputBuffer(BufferInfo *info) { - CHECK_EQ(info->mOwnedByComponent, false); + CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US); if (mNoMoreOutputData) { CODEC_LOGV("There is no more output data available, not " @@ -2542,7 +2838,24 @@ void OMXCodec::fillOutputBuffer(BufferInfo *info) { return; } - CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer); + if (info->mMediaBuffer != NULL) { + sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer(); + if (graphicBuffer != 0) { + // When using a native buffer we need to lock the buffer before + // giving it to OMX. + CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer); + int err = mNativeWindow->lockBuffer(mNativeWindow.get(), + graphicBuffer.get()); + if (err != 0) { + CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err); + + setState(ERROR); + return; + } + } + } + + CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer); status_t err = mOMX->fillBuffer(mNode, info->mBuffer); if (err != OK) { @@ -2552,19 +2865,20 @@ void OMXCodec::fillOutputBuffer(BufferInfo *info) { return; } - info->mOwnedByComponent = true; + info->mStatus = OWNED_BY_COMPONENT; } -void OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) { +bool OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) { Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput]; for (size_t i = 0; i < buffers->size(); ++i) { if ((*buffers)[i].mBuffer == buffer) { - drainInputBuffer(&buffers->editItemAt(i)); - return; + return drainInputBuffer(&buffers->editItemAt(i)); } } CHECK(!"should not be here."); + + return false; } void OMXCodec::fillOutputBuffer(IOMX::buffer_id buffer) { @@ -2597,10 +2911,10 @@ void OMXCodec::setRawAudioFormat( def.nPortIndex = portIndex; status_t err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition, - &def, sizeof(def)), OK); + &def, sizeof(def)), (status_t)OK); // pcm param OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; @@ -2610,7 +2924,7 @@ void OMXCodec::setRawAudioFormat( err = mOMX->getParameter( mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); pcmParams.nChannels = numChannels; pcmParams.eNumData = OMX_NumericalDataSigned; @@ -2631,7 +2945,7 @@ void OMXCodec::setRawAudioFormat( err = mOMX->setParameter( mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); } static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(bool isAMRWB, int32_t bps) { @@ -2688,13 +3002,13 @@ void OMXCodec::setAMRFormat(bool isWAMR, int32_t bitRate) { status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitRate); err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); //////////////////////// @@ -2723,33 +3037,33 @@ void OMXCodec::setAACFormat(int32_t numChannels, int32_t sampleRate, int32_t bit status_t err = OMX_ErrorNone; while (OMX_ErrorNone == err) { CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioPortFormat, - &format, sizeof(format)), OK); + &format, sizeof(format)), (status_t)OK); if (format.eEncoding == OMX_AUDIO_CodingAAC) { break; } format.nIndex++; } - CHECK_EQ(OK, err); + CHECK_EQ((status_t)OK, err); CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioPortFormat, - &format, sizeof(format)), OK); + &format, sizeof(format)), (status_t)OK); // port definition OMX_PARAM_PORTDEFINITIONTYPE def; InitOMXParams(&def); def.nPortIndex = kPortIndexOutput; CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, - &def, sizeof(def)), OK); + &def, sizeof(def)), (status_t)OK); def.format.audio.bFlagErrorConcealment = OMX_TRUE; def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition, - &def, sizeof(def)), OK); + &def, sizeof(def)), (status_t)OK); // profile OMX_AUDIO_PARAM_AACPROFILETYPE profile; InitOMXParams(&profile); profile.nPortIndex = kPortIndexOutput; CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioAac, - &profile, sizeof(profile)), OK); + &profile, sizeof(profile)), (status_t)OK); profile.nChannels = numChannels; profile.eChannelMode = (numChannels == 1? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo); @@ -2762,7 +3076,7 @@ void OMXCodec::setAACFormat(int32_t numChannels, int32_t sampleRate, int32_t bit profile.eAACProfile = OMX_AUDIO_AACObjectLC; profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioAac, - &profile, sizeof(profile)), OK); + &profile, sizeof(profile)), (status_t)OK); } else { OMX_AUDIO_PARAM_AACPROFILETYPE profile; @@ -2771,7 +3085,7 @@ void OMXCodec::setAACFormat(int32_t numChannels, int32_t sampleRate, int32_t bit status_t err = mOMX->getParameter( mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); profile.nChannels = numChannels; profile.nSampleRate = sampleRate; @@ -2779,7 +3093,7 @@ void OMXCodec::setAACFormat(int32_t numChannels, int32_t sampleRate, int32_t bit err = mOMX->setParameter( mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); } } @@ -2791,10 +3105,10 @@ void OMXCodec::setImageOutputFormat( OMX_INDEXTYPE index; status_t err = mOMX->get_extension_index( mNode, "OMX.TI.JPEG.decode.Config.OutputColorFormat", &index); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); err = mOMX->set_config(mNode, index, &format, sizeof(format)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); #endif OMX_PARAM_PORTDEFINITIONTYPE def; @@ -2803,13 +3117,13 @@ void OMXCodec::setImageOutputFormat( status_t err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - CHECK_EQ(def.eDomain, OMX_PortDomainImage); + CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage); OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image; - CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused); + CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingUnused); imageDef->eColorFormat = format; imageDef->nFrameWidth = width; imageDef->nFrameHeight = height; @@ -2852,7 +3166,7 @@ void OMXCodec::setImageOutputFormat( err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); } void OMXCodec::setJPEGInputFormat( @@ -2863,12 +3177,12 @@ void OMXCodec::setJPEGInputFormat( status_t err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - CHECK_EQ(def.eDomain, OMX_PortDomainImage); + CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage); OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image; - CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingJPEG); + CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingJPEG); imageDef->nFrameWidth = width; imageDef->nFrameHeight = height; @@ -2877,7 +3191,7 @@ void OMXCodec::setJPEGInputFormat( err = mOMX->setParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); } void OMXCodec::addCodecSpecificData(const void *data, size_t size) { @@ -2978,7 +3292,7 @@ status_t OMXCodec::stop() { status_t err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); } while (mState != LOADED && mState != ERROR) { @@ -3029,12 +3343,6 @@ status_t OMXCodec::read( if (options && options->getSeekTo(&seekTimeUs, &seekMode)) { seeking = true; } - int64_t skipTimeUs; - if (options && options->getSkipFrame(&skipTimeUs)) { - mSkipTimeUs = skipTimeUs; - } else { - mSkipTimeUs = -1; - } if (mInitialBufferSubmit) { mInitialBufferSubmit = false; @@ -3070,7 +3378,7 @@ status_t OMXCodec::read( mFilledBuffers.clear(); - CHECK_EQ(mState, EXECUTING); + CHECK_EQ((int)mState, (int)EXECUTING); bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput); bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput); @@ -3089,7 +3397,15 @@ status_t OMXCodec::read( } while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) { - mBufferFilled.wait(mLock); + if (mIsEncoder) { + if (NO_ERROR != mBufferFilled.waitRelative(mLock, 3000000000LL)) { + LOGW("Timed out waiting for buffers from video encoder: %d/%d", + countBuffersWeOwn(mPortBuffers[kPortIndexInput]), + countBuffersWeOwn(mPortBuffers[kPortIndexOutput])); + } + } else { + mBufferFilled.wait(mLock); + } } if (mState == ERROR) { @@ -3110,6 +3426,9 @@ status_t OMXCodec::read( mFilledBuffers.erase(mFilledBuffers.begin()); BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index); + CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US); + info->mStatus = OWNED_BY_CLIENT; + info->mMediaBuffer->add_ref(); *buffer = info->mMediaBuffer; @@ -3124,8 +3443,37 @@ void OMXCodec::signalBufferReturned(MediaBuffer *buffer) { BufferInfo *info = &buffers->editItemAt(i); if (info->mMediaBuffer == buffer) { - CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED); - fillOutputBuffer(info); + CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED); + CHECK_EQ((int)info->mStatus, (int)OWNED_BY_CLIENT); + + info->mStatus = OWNED_BY_US; + + if (buffer->graphicBuffer() == 0) { + fillOutputBuffer(info); + } else { + sp<MetaData> metaData = info->mMediaBuffer->meta_data(); + int32_t rendered = 0; + if (!metaData->findInt32(kKeyRendered, &rendered)) { + rendered = 0; + } + if (!rendered) { + status_t err = cancelBufferToNativeWindow(info); + if (err < 0) { + return; + } + } + + info->mStatus = OWNED_BY_NATIVE_WINDOW; + + // Dequeue the next buffer from the native window. + BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow(); + if (nextBufInfo == 0) { + return; + } + + // Give the buffer to the OMX node to fill. + fillOutputBuffer(nextBufInfo); + } return; } } @@ -3351,7 +3699,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) { status_t err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); printf("%s Port = {\n", portIndex == kPortIndexInput ? "Input" : "Output"); @@ -3417,7 +3765,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) { err = mOMX->getParameter( mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); printf(" nSamplingRate = %ld\n", params.nSamplingRate); printf(" nChannels = %ld\n", params.nChannels); @@ -3436,7 +3784,7 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) { err = mOMX->getParameter( mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); printf(" nChannels = %ld\n", amr.nChannels); printf(" eAMRBandMode = %s\n", @@ -3458,6 +3806,18 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) { printf("}\n"); } +status_t OMXCodec::initNativeWindow() { + // Enable use of a GraphicBuffer as the output for this node. This must + // happen before getting the IndexParamPortDefinition parameter because it + // will affect the pixel format that the node reports. + status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE); + if (err != 0) { + return err; + } + + return OK; +} + void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) { mOutputFormat = new MetaData; mOutputFormat->setCString(kKeyDecoderComponent, mComponentName); @@ -3474,13 +3834,14 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) { status_t err = mOMX->getParameter( mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); switch (def.eDomain) { case OMX_PortDomainImage: { OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image; - CHECK_EQ(imageDef->eCompressionFormat, OMX_IMAGE_CodingUnused); + CHECK_EQ((int)imageDef->eCompressionFormat, + (int)OMX_IMAGE_CodingUnused); mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW); mOutputFormat->setInt32(kKeyColorFormat, imageDef->eColorFormat); @@ -3500,11 +3861,11 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) { err = mOMX->getParameter( mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - CHECK_EQ(params.eNumData, OMX_NumericalDataSigned); - CHECK_EQ(params.nBitPerSample, 16); - CHECK_EQ(params.ePCMMode, OMX_AUDIO_PCMModeLinear); + CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned); + CHECK_EQ(params.nBitPerSample, 16u); + CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear); int32_t numChannels, sampleRate; inputFormat->findInt32(kKeyChannelCount, &numChannels); @@ -3538,9 +3899,9 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) { err = mOMX->getParameter( mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr)); - CHECK_EQ(err, OK); + CHECK_EQ(err, (status_t)OK); - CHECK_EQ(amr.nChannels, 1); + CHECK_EQ(amr.nChannels, 1u); mOutputFormat->setInt32(kKeyChannelCount, 1); if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeNB0 @@ -3592,18 +3953,42 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) { CHECK(!"Unknown compression format."); } - if (!strcmp(mComponentName, "OMX.PV.avcdec")) { - // This component appears to be lying to me. - mOutputFormat->setInt32( - kKeyWidth, (video_def->nFrameWidth + 15) & -16); - mOutputFormat->setInt32( - kKeyHeight, (video_def->nFrameHeight + 15) & -16); - } else { - mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth); - mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight); + mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth); + mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight); + mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat); + + if (!mIsEncoder) { + OMX_CONFIG_RECTTYPE rect; + InitOMXParams(&rect); + rect.nPortIndex = kPortIndexOutput; + status_t err = + mOMX->getConfig( + mNode, OMX_IndexConfigCommonOutputCrop, + &rect, sizeof(rect)); + + if (err == OK) { + CHECK_GE(rect.nLeft, 0); + CHECK_GE(rect.nTop, 0); + CHECK_GE(rect.nWidth, 0u); + CHECK_GE(rect.nHeight, 0u); + CHECK_LE(rect.nLeft + rect.nWidth - 1, video_def->nFrameWidth); + CHECK_LE(rect.nTop + rect.nHeight - 1, video_def->nFrameHeight); + + mOutputFormat->setRect( + kKeyCropRect, + rect.nLeft, + rect.nTop, + rect.nLeft + rect.nWidth - 1, + rect.nTop + rect.nHeight - 1); + } else { + mOutputFormat->setRect( + kKeyCropRect, + 0, 0, + video_def->nFrameWidth - 1, + video_def->nFrameHeight - 1); + } } - mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat); break; } @@ -3693,8 +4078,30 @@ status_t QueryCodecs( caps->mProfileLevels.push(profileLevel); } - CHECK_EQ(omx->freeNode(node), OK); + // Color format query + OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; + InitOMXParams(&portFormat); + portFormat.nPortIndex = queryDecoders ? 1 : 0; + for (portFormat.nIndex = 0;; ++portFormat.nIndex) { + err = omx->getParameter( + node, OMX_IndexParamVideoPortFormat, + &portFormat, sizeof(portFormat)); + if (err != OK) { + break; + } + caps->mColorFormats.push(portFormat.eColorFormat); + } + + CHECK_EQ(omx->freeNode(node), (status_t)OK); } } +void OMXCodec::restorePatchedDataPointer(BufferInfo *info) { + CHECK(mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames)); + CHECK(mOMXLivesLocally); + + OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)info->mBuffer; + header->pBuffer = (OMX_U8 *)info->mData; +} + } // namespace android diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp index 0368fb7..cf622af 100644 --- a/media/libstagefright/OggExtractor.cpp +++ b/media/libstagefright/OggExtractor.cpp @@ -73,7 +73,7 @@ struct MyVorbisExtractor { // Returns an approximate bitrate in bits per second. uint64_t approxBitrate(); - status_t seekToOffset(off_t offset); + status_t seekToOffset(off64_t offset); status_t readNextPacket(MediaBuffer **buffer); status_t init(); @@ -91,7 +91,7 @@ private: }; sp<DataSource> mSource; - off_t mOffset; + off64_t mOffset; Page mCurrentPage; uint64_t mPrevGranulePosition; size_t mCurrentPageSize; @@ -99,7 +99,7 @@ private: uint64_t mCurrentPageSamples; size_t mNextLaceIndex; - off_t mFirstDataOffset; + off64_t mFirstDataOffset; vorbis_info mVi; vorbis_comment mVc; @@ -107,8 +107,8 @@ private: sp<MetaData> mMeta; sp<MetaData> mFileMeta; - ssize_t readPage(off_t offset, Page *page); - status_t findNextPage(off_t startOffset, off_t *pageOffset); + ssize_t readPage(off64_t offset, Page *page); + status_t findNextPage(off64_t startOffset, off64_t *pageOffset); status_t verifyHeader( MediaBuffer *buffer, uint8_t type); @@ -116,7 +116,7 @@ private: void parseFileMetaData(); void extractAlbumArt(const void *data, size_t size); - uint64_t findPrevGranulePosition(off_t pageOffset); + uint64_t findPrevGranulePosition(off64_t pageOffset); MyVorbisExtractor(const MyVorbisExtractor &); MyVorbisExtractor &operator=(const MyVorbisExtractor &); @@ -162,7 +162,7 @@ status_t OggSource::read( int64_t seekTimeUs; ReadOptions::SeekMode mode; if (options && options->getSeekTo(&seekTimeUs, &mode)) { - off_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll; + off64_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll; LOGV("seeking to offset %ld", pos); if (mExtractor->mImpl->seekToOffset(pos) != OK) { @@ -220,7 +220,7 @@ sp<MetaData> MyVorbisExtractor::getFormat() const { } status_t MyVorbisExtractor::findNextPage( - off_t startOffset, off_t *pageOffset) { + off64_t startOffset, off64_t *pageOffset) { *pageOffset = startOffset; for (;;) { @@ -250,9 +250,9 @@ status_t MyVorbisExtractor::findNextPage( // it (if any) and return its granule position. // To do this we back up from the "current" page's offset until we find any // page preceding it and then scan forward to just before the current page. -uint64_t MyVorbisExtractor::findPrevGranulePosition(off_t pageOffset) { - off_t prevPageOffset = 0; - off_t prevGuess = pageOffset; +uint64_t MyVorbisExtractor::findPrevGranulePosition(off64_t pageOffset) { + off64_t prevPageOffset = 0; + off64_t prevGuess = pageOffset; for (;;) { if (prevGuess >= 5000) { prevGuess -= 5000; @@ -292,14 +292,14 @@ uint64_t MyVorbisExtractor::findPrevGranulePosition(off_t pageOffset) { } } -status_t MyVorbisExtractor::seekToOffset(off_t offset) { +status_t MyVorbisExtractor::seekToOffset(off64_t offset) { if (mFirstDataOffset >= 0 && offset < mFirstDataOffset) { // Once we know where the actual audio data starts (past the headers) // don't ever seek to anywhere before that. offset = mFirstDataOffset; } - off_t pageOffset; + off64_t pageOffset; status_t err = findNextPage(offset, &pageOffset); if (err != OK) { @@ -324,7 +324,7 @@ status_t MyVorbisExtractor::seekToOffset(off_t offset) { return OK; } -ssize_t MyVorbisExtractor::readPage(off_t offset, Page *page) { +ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) { uint8_t header[27]; if (mSource->readAt(offset, header, sizeof(header)) < (ssize_t)sizeof(header)) { @@ -410,7 +410,7 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) { } if (mNextLaceIndex < mCurrentPage.mNumSegments) { - off_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments; + off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments; for (size_t j = 0; j < mNextLaceIndex; ++j) { dataOffset += mCurrentPage.mLace[j]; } @@ -609,7 +609,7 @@ status_t MyVorbisExtractor::verifyHeader( LOGV("nominal-bitrate = %ld", mVi.bitrate_nominal); LOGV("window-bitrate = %ld", mVi.bitrate_window); - off_t size; + off64_t size; if (mSource->getSize(&size) == OK) { uint64_t bps = approxBitrate(); diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp index 7155c61..062ab9b 100644 --- a/media/libstagefright/SampleIterator.cpp +++ b/media/libstagefright/SampleIterator.cpp @@ -179,7 +179,7 @@ status_t SampleIterator::findChunkRange(uint32_t sampleIndex) { return OK; } -status_t SampleIterator::getChunkOffset(uint32_t chunk, off_t *offset) { +status_t SampleIterator::getChunkOffset(uint32_t chunk, off64_t *offset) { *offset = 0; if (chunk >= mTable->mNumChunkOffsets) { diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp index 27faf4f..a9163fc 100644 --- a/media/libstagefright/SampleTable.cpp +++ b/media/libstagefright/SampleTable.cpp @@ -76,7 +76,7 @@ SampleTable::~SampleTable() { } status_t SampleTable::setChunkOffsetParams( - uint32_t type, off_t data_offset, size_t data_size) { + uint32_t type, off64_t data_offset, size_t data_size) { if (mChunkOffsetOffset >= 0) { return ERROR_MALFORMED; } @@ -117,7 +117,7 @@ status_t SampleTable::setChunkOffsetParams( } status_t SampleTable::setSampleToChunkParams( - off_t data_offset, size_t data_size) { + off64_t data_offset, size_t data_size) { if (mSampleToChunkOffset >= 0) { return ERROR_MALFORMED; } @@ -168,7 +168,7 @@ status_t SampleTable::setSampleToChunkParams( } status_t SampleTable::setSampleSizeParams( - uint32_t type, off_t data_offset, size_t data_size) { + uint32_t type, off64_t data_offset, size_t data_size) { if (mSampleSizeOffset >= 0) { return ERROR_MALFORMED; } @@ -228,7 +228,7 @@ status_t SampleTable::setSampleSizeParams( } status_t SampleTable::setTimeToSampleParams( - off_t data_offset, size_t data_size) { + off64_t data_offset, size_t data_size) { if (mTimeToSample != NULL || data_size < 8) { return ERROR_MALFORMED; } @@ -260,7 +260,7 @@ status_t SampleTable::setTimeToSampleParams( return OK; } -status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) { +status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size) { if (mSyncSampleOffset >= 0 || data_size < 8) { return ERROR_MALFORMED; } @@ -281,7 +281,7 @@ status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) { mNumSyncSamples = U32_AT(&header[4]); if (mNumSyncSamples < 2) { - LOGW("Table of sync samples is empty or has only a single entry!"); + LOGV("Table of sync samples is empty or has only a single entry!"); } mSyncSamples = new uint32_t[mNumSyncSamples]; @@ -419,8 +419,10 @@ status_t SampleTable::findSyncSampleNear( ++left; } + if (left > 0) { + --left; + } - --left; uint32_t x; if (mDataSource->readAt( mSyncSampleOffset + 8 + left * 4, &x, 4) != 4) { @@ -557,7 +559,7 @@ status_t SampleTable::getSampleSize_l( status_t SampleTable::getMetaDataForSample( uint32_t sampleIndex, - off_t *offset, + off64_t *offset, size_t *size, uint32_t *decodingTime, bool *isSyncSample) { diff --git a/media/libstagefright/ShoutcastSource.cpp b/media/libstagefright/ShoutcastSource.cpp index 23b7681..783f2d0 100644 --- a/media/libstagefright/ShoutcastSource.cpp +++ b/media/libstagefright/ShoutcastSource.cpp @@ -14,7 +14,6 @@ * limitations under the License. */ -#include "include/stagefright_string.h" #include "include/HTTPStream.h" #include <stdlib.h> @@ -34,7 +33,7 @@ ShoutcastSource::ShoutcastSource(HTTPStream *http) mBytesUntilMetaData(0), mGroup(NULL), mStarted(false) { - string metaint; + AString metaint; if (mHttp->find_header_value("icy-metaint", &metaint)) { char *end; const char *start = metaint.c_str(); diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp index 7a600d7..be3df7c 100644 --- a/media/libstagefright/StagefrightMediaScanner.cpp +++ b/media/libstagefright/StagefrightMediaScanner.cpp @@ -28,9 +28,7 @@ namespace android { -StagefrightMediaScanner::StagefrightMediaScanner() - : mRetriever(new MediaMetadataRetriever) { -} +StagefrightMediaScanner::StagefrightMediaScanner() {} StagefrightMediaScanner::~StagefrightMediaScanner() {} @@ -39,7 +37,7 @@ static bool FileHasAcceptableExtension(const char *extension) { ".mp3", ".mp4", ".m4a", ".3gp", ".3gpp", ".3g2", ".3gpp2", ".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac", ".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota", - ".mkv", ".mka", ".webm", ".ts" + ".mkv", ".mka", ".webm", ".ts", ".fl" }; static const size_t kNumValidExtensions = sizeof(kValidExtensions) / sizeof(kValidExtensions[0]); @@ -131,37 +129,41 @@ status_t StagefrightMediaScanner::processFile( if (status != OK) { return status; } - } else if (mRetriever->setDataSource(path) == OK) { - const char *value; - if ((value = mRetriever->extractMetadata( - METADATA_KEY_MIMETYPE)) != NULL) { - client.setMimeType(value); - } + } else { + sp<MediaMetadataRetriever> mRetriever(new MediaMetadataRetriever); - struct KeyMap { - const char *tag; - int key; - }; - static const KeyMap kKeyMap[] = { - { "tracknumber", METADATA_KEY_CD_TRACK_NUMBER }, - { "discnumber", METADATA_KEY_DISC_NUMBER }, - { "album", METADATA_KEY_ALBUM }, - { "artist", METADATA_KEY_ARTIST }, - { "albumartist", METADATA_KEY_ALBUMARTIST }, - { "composer", METADATA_KEY_COMPOSER }, - { "genre", METADATA_KEY_GENRE }, - { "title", METADATA_KEY_TITLE }, - { "year", METADATA_KEY_YEAR }, - { "duration", METADATA_KEY_DURATION }, - { "writer", METADATA_KEY_WRITER }, - { "compilation", METADATA_KEY_COMPILATION }, - }; - static const size_t kNumEntries = sizeof(kKeyMap) / sizeof(kKeyMap[0]); - - for (size_t i = 0; i < kNumEntries; ++i) { + if (mRetriever->setDataSource(path) == OK) { const char *value; - if ((value = mRetriever->extractMetadata(kKeyMap[i].key)) != NULL) { - client.addStringTag(kKeyMap[i].tag, value); + if ((value = mRetriever->extractMetadata( + METADATA_KEY_MIMETYPE)) != NULL) { + client.setMimeType(value); + } + + struct KeyMap { + const char *tag; + int key; + }; + static const KeyMap kKeyMap[] = { + { "tracknumber", METADATA_KEY_CD_TRACK_NUMBER }, + { "discnumber", METADATA_KEY_DISC_NUMBER }, + { "album", METADATA_KEY_ALBUM }, + { "artist", METADATA_KEY_ARTIST }, + { "albumartist", METADATA_KEY_ALBUMARTIST }, + { "composer", METADATA_KEY_COMPOSER }, + { "genre", METADATA_KEY_GENRE }, + { "title", METADATA_KEY_TITLE }, + { "year", METADATA_KEY_YEAR }, + { "duration", METADATA_KEY_DURATION }, + { "writer", METADATA_KEY_WRITER }, + { "compilation", METADATA_KEY_COMPILATION }, + }; + static const size_t kNumEntries = sizeof(kKeyMap) / sizeof(kKeyMap[0]); + + for (size_t i = 0; i < kNumEntries; ++i) { + const char *value; + if ((value = mRetriever->extractMetadata(kKeyMap[i].key)) != NULL) { + client.addStringTag(kKeyMap[i].tag, value); + } } } } @@ -174,12 +176,13 @@ status_t StagefrightMediaScanner::processFile( char *StagefrightMediaScanner::extractAlbumArt(int fd) { LOGV("extractAlbumArt %d", fd); - off_t size = lseek(fd, 0, SEEK_END); + off64_t size = lseek64(fd, 0, SEEK_END); if (size < 0) { return NULL; } - lseek(fd, 0, SEEK_SET); + lseek64(fd, 0, SEEK_SET); + sp<MediaMetadataRetriever> mRetriever(new MediaMetadataRetriever); if (mRetriever->setDataSource(fd, 0, size) == OK) { sp<IMemory> mem = mRetriever->extractAlbumArt(); diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp index e8f4839..600de7c 100644 --- a/media/libstagefright/StagefrightMetadataRetriever.cpp +++ b/media/libstagefright/StagefrightMetadataRetriever.cpp @@ -144,7 +144,10 @@ static VideoFrame *extractVideoFrameWithCodecFlags( static_cast<MediaSource::ReadOptions::SeekMode>(seekMode); int64_t thumbNailTime; - if (frameTimeUs < 0 && trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) { + if (frameTimeUs < 0) { + if (!trackMeta->findInt64(kKeyThumbnailTime, &thumbNailTime)) { + thumbNailTime = 0; + } options.setSeekTo(thumbNailTime, mode); } else { thumbNailTime = -1; @@ -205,17 +208,26 @@ static VideoFrame *extractVideoFrameWithCodecFlags( CHECK(meta->findInt32(kKeyWidth, &width)); CHECK(meta->findInt32(kKeyHeight, &height)); + int32_t crop_left, crop_top, crop_right, crop_bottom; + if (!meta->findRect( + kKeyCropRect, + &crop_left, &crop_top, &crop_right, &crop_bottom)) { + crop_left = crop_top = 0; + crop_right = width - 1; + crop_bottom = height - 1; + } + int32_t rotationAngle; if (!trackMeta->findInt32(kKeyRotation, &rotationAngle)) { rotationAngle = 0; // By default, no rotation } VideoFrame *frame = new VideoFrame; - frame->mWidth = width; - frame->mHeight = height; - frame->mDisplayWidth = width; - frame->mDisplayHeight = height; - frame->mSize = width * height * 2; + frame->mWidth = crop_right - crop_left + 1; + frame->mHeight = crop_bottom - crop_top + 1; + frame->mDisplayWidth = frame->mWidth; + frame->mDisplayHeight = frame->mHeight; + frame->mSize = frame->mWidth * frame->mHeight * 2; frame->mData = new uint8_t[frame->mSize]; frame->mRotationAngle = rotationAngle; @@ -226,17 +238,27 @@ static VideoFrame *extractVideoFrameWithCodecFlags( (OMX_COLOR_FORMATTYPE)srcFormat, OMX_COLOR_Format16bitRGB565); CHECK(converter.isValid()); - converter.convert( - width, height, + err = converter.convert( (const uint8_t *)buffer->data() + buffer->range_offset(), - 0, - frame->mData, width * 2); + width, height, + crop_left, crop_top, crop_right, crop_bottom, + frame->mData, + frame->mWidth, + frame->mHeight, + 0, 0, frame->mWidth - 1, frame->mHeight - 1); buffer->release(); buffer = NULL; decoder->stop(); + if (err != OK) { + LOGE("Colorconverter failed to convert frame."); + + delete frame; + frame = NULL; + } + return frame; } @@ -419,5 +441,4 @@ void StagefrightMetadataRetriever::parseMetaData() { } } - } // namespace android diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp index 4711f7c..88e07b0 100644 --- a/media/libstagefright/ThrottledSource.cpp +++ b/media/libstagefright/ThrottledSource.cpp @@ -41,7 +41,7 @@ status_t ThrottledSource::initCheck() const { return mSource->initCheck(); } -ssize_t ThrottledSource::readAt(off_t offset, void *data, size_t size) { +ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) { Mutex::Autolock autoLock(mLock); ssize_t n = mSource->readAt(offset, data, size); @@ -72,7 +72,7 @@ ssize_t ThrottledSource::readAt(off_t offset, void *data, size_t size) { return n; } -status_t ThrottledSource::getSize(off_t *size) { +status_t ThrottledSource::getSize(off64_t *size) { return mSource->getSize(size); } diff --git a/media/libstagefright/VBRISeeker.cpp b/media/libstagefright/VBRISeeker.cpp new file mode 100644 index 0000000..48bddc2 --- /dev/null +++ b/media/libstagefright/VBRISeeker.cpp @@ -0,0 +1,164 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "VBRISeeker" +#include <utils/Log.h> + +#include "include/VBRISeeker.h" + +#include "include/MP3Extractor.h" + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/Utils.h> + +namespace android { + +static uint32_t U24_AT(const uint8_t *ptr) { + return ptr[0] << 16 | ptr[1] << 8 | ptr[2]; +} + +// static +sp<VBRISeeker> VBRISeeker::CreateFromSource( + const sp<DataSource> &source, off64_t post_id3_pos) { + off64_t pos = post_id3_pos; + + uint8_t header[4]; + ssize_t n = source->readAt(pos, header, sizeof(header)); + if (n < (ssize_t)sizeof(header)) { + return NULL; + } + + uint32_t tmp = U32_AT(&header[0]); + size_t frameSize; + int sampleRate; + if (!MP3Extractor::get_mp3_frame_size(tmp, &frameSize, &sampleRate)) { + return NULL; + } + + // VBRI header follows 32 bytes after the header _ends_. + pos += sizeof(header) + 32; + + uint8_t vbriHeader[26]; + n = source->readAt(pos, vbriHeader, sizeof(vbriHeader)); + if (n < (ssize_t)sizeof(vbriHeader)) { + return NULL; + } + + if (memcmp(vbriHeader, "VBRI", 4)) { + return NULL; + } + + size_t numFrames = U32_AT(&vbriHeader[14]); + + int64_t durationUs = + numFrames * 1000000ll * (sampleRate >= 32000 ? 1152 : 576) / sampleRate; + + LOGV("duration = %.2f secs", durationUs / 1E6); + + size_t numEntries = U16_AT(&vbriHeader[18]); + size_t entrySize = U16_AT(&vbriHeader[22]); + size_t scale = U16_AT(&vbriHeader[20]); + + LOGV("%d entries, scale=%d, size_per_entry=%d", + numEntries, + scale, + entrySize); + + size_t totalEntrySize = numEntries * entrySize; + uint8_t *buffer = new uint8_t[totalEntrySize]; + + n = source->readAt(pos + sizeof(vbriHeader), buffer, totalEntrySize); + if (n < (ssize_t)totalEntrySize) { + delete[] buffer; + buffer = NULL; + + return NULL; + } + + sp<VBRISeeker> seeker = new VBRISeeker; + seeker->mBasePos = post_id3_pos; + seeker->mDurationUs = durationUs; + + off64_t offset = post_id3_pos; + for (size_t i = 0; i < numEntries; ++i) { + uint32_t numBytes; + switch (entrySize) { + case 1: numBytes = buffer[i]; break; + case 2: numBytes = U16_AT(buffer + 2 * i); break; + case 3: numBytes = U24_AT(buffer + 3 * i); break; + default: + { + CHECK_EQ(entrySize, 4u); + numBytes = U32_AT(buffer + 4 * i); break; + } + } + + numBytes *= scale; + + seeker->mSegments.push(numBytes); + + LOGV("entry #%d: %d offset 0x%08lx", i, numBytes, offset); + offset += numBytes; + } + + delete[] buffer; + buffer = NULL; + + LOGI("Found VBRI header."); + + return seeker; +} + +VBRISeeker::VBRISeeker() + : mDurationUs(-1) { +} + +bool VBRISeeker::getDuration(int64_t *durationUs) { + if (mDurationUs < 0) { + return false; + } + + *durationUs = mDurationUs; + + return true; +} + +bool VBRISeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) { + if (mDurationUs < 0) { + return false; + } + + int64_t segmentDurationUs = mDurationUs / mSegments.size(); + + int64_t nowUs = 0; + *pos = mBasePos; + size_t segmentIndex = 0; + while (segmentIndex < mSegments.size() && nowUs < *timeUs) { + nowUs += segmentDurationUs; + *pos += mSegments.itemAt(segmentIndex++); + } + + LOGV("getOffsetForTime %lld us => 0x%08lx", *timeUs, *pos); + + *timeUs = nowUs; + + return true; +} + +} // namespace android + diff --git a/media/libstagefright/VideoSourceDownSampler.cpp b/media/libstagefright/VideoSourceDownSampler.cpp new file mode 100644 index 0000000..ea7b09a --- /dev/null +++ b/media/libstagefright/VideoSourceDownSampler.cpp @@ -0,0 +1,142 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "VideoSourceDownSampler" + +#include <media/stagefright/VideoSourceDownSampler.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/YUVImage.h> +#include <media/stagefright/YUVCanvas.h> +#include "OMX_Video.h" + +namespace android { + +VideoSourceDownSampler::VideoSourceDownSampler(const sp<MediaSource> &videoSource, + int32_t width, int32_t height) { + LOGV("Construct VideoSourceDownSampler"); + CHECK(width > 0); + CHECK(height > 0); + + mRealVideoSource = videoSource; + mWidth = width; + mHeight = height; + + mMeta = new MetaData(*(mRealVideoSource->getFormat())); + CHECK(mMeta->findInt32(kKeyWidth, &mRealSourceWidth)); + CHECK(mMeta->findInt32(kKeyHeight, &mRealSourceHeight)); + + if ((mWidth != mRealSourceWidth) || (mHeight != mRealSourceHeight)) { + // Change meta data for width and height. + CHECK(mWidth <= mRealSourceWidth); + CHECK(mHeight <= mRealSourceHeight); + + mNeedDownSampling = true; + computeDownSamplingParameters(); + mMeta->setInt32(kKeyWidth, mWidth); + mMeta->setInt32(kKeyHeight, mHeight); + } else { + mNeedDownSampling = false; + } +} + +VideoSourceDownSampler::~VideoSourceDownSampler() { +} + +void VideoSourceDownSampler::computeDownSamplingParameters() { + mDownSampleSkipX = mRealSourceWidth / mWidth; + mDownSampleSkipY = mRealSourceHeight / mHeight; + + mDownSampleOffsetX = mRealSourceWidth - mDownSampleSkipX * mWidth; + mDownSampleOffsetY = mRealSourceHeight - mDownSampleSkipY * mHeight; +} + +void VideoSourceDownSampler::downSampleYUVImage( + const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const { + // find the YUV format + int32_t srcFormat; + CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); + YUVImage::YUVFormat yuvFormat; + if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + yuvFormat = YUVImage::YUV420SemiPlanar; + } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) { + yuvFormat = YUVImage::YUV420Planar; + } + + // allocate mediaBuffer for down sampled image and setup a canvas. + *buffer = new MediaBuffer(YUVImage::bufferSize(yuvFormat, mWidth, mHeight)); + YUVImage yuvDownSampledImage(yuvFormat, + mWidth, mHeight, + (uint8_t *)(*buffer)->data()); + YUVCanvas yuvCanvasDownSample(yuvDownSampledImage); + + YUVImage yuvImageSource(yuvFormat, + mRealSourceWidth, mRealSourceHeight, + (uint8_t *)sourceBuffer.data()); + yuvCanvasDownSample.downsample(mDownSampleOffsetX, mDownSampleOffsetY, + mDownSampleSkipX, mDownSampleSkipY, + yuvImageSource); +} + +status_t VideoSourceDownSampler::start(MetaData *params) { + LOGV("start"); + return mRealVideoSource->start(); +} + +status_t VideoSourceDownSampler::stop() { + LOGV("stop"); + return mRealVideoSource->stop(); +} + +sp<MetaData> VideoSourceDownSampler::getFormat() { + LOGV("getFormat"); + return mMeta; +} + +status_t VideoSourceDownSampler::read( + MediaBuffer **buffer, const ReadOptions *options) { + LOGV("read"); + MediaBuffer *realBuffer; + status_t err = mRealVideoSource->read(&realBuffer, options); + + if (mNeedDownSampling) { + downSampleYUVImage(*realBuffer, buffer); + + int64_t frameTime; + realBuffer->meta_data()->findInt64(kKeyTime, &frameTime); + (*buffer)->meta_data()->setInt64(kKeyTime, frameTime); + + // We just want this buffer to be deleted when the encoder releases it. + // So don't add a reference to it and set the observer to NULL. + (*buffer)->setObserver(NULL); + + // The original buffer is no longer required. Release it. + realBuffer->release(); + } else { + *buffer = realBuffer; + } + + return err; +} + +status_t VideoSourceDownSampler::pause() { + LOGV("pause"); + return mRealVideoSource->pause(); +} + +} // namespace android diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp index aff06bc..9332120 100644 --- a/media/libstagefright/WAVExtractor.cpp +++ b/media/libstagefright/WAVExtractor.cpp @@ -51,7 +51,7 @@ struct WAVSource : public MediaSource { const sp<MetaData> &meta, uint16_t waveFormat, int32_t bitsPerSample, - off_t offset, size_t size); + off64_t offset, size_t size); virtual status_t start(MetaData *params = NULL); virtual status_t stop(); @@ -72,11 +72,11 @@ private: int32_t mSampleRate; int32_t mNumChannels; int32_t mBitsPerSample; - off_t mOffset; + off64_t mOffset; size_t mSize; bool mStarted; MediaBufferGroup *mGroup; - off_t mCurrentPos; + off64_t mCurrentPos; WAVSource(const WAVSource &); WAVSource &operator=(const WAVSource &); @@ -139,7 +139,7 @@ status_t WAVExtractor::init() { size_t totalSize = U32_LE_AT(&header[4]); - off_t offset = 12; + off64_t offset = 12; size_t remainingSize = totalSize; while (remainingSize >= 8) { uint8_t chunkHeader[8]; @@ -251,7 +251,7 @@ WAVSource::WAVSource( const sp<MetaData> &meta, uint16_t waveFormat, int32_t bitsPerSample, - off_t offset, size_t size) + off64_t offset, size_t size) : mDataSource(dataSource), mMeta(meta), mWaveFormat(waveFormat), @@ -318,7 +318,7 @@ status_t WAVSource::read( int64_t seekTimeUs; ReadOptions::SeekMode mode; if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) { - int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * 2; + int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3); if (pos > mSize) { pos = mSize; } @@ -335,7 +335,7 @@ status_t WAVSource::read( mBitsPerSample == 8 ? kMaxFrameSize / 2 : kMaxFrameSize; size_t maxBytesAvailable = - (mCurrentPos - mOffset >= (off_t)mSize) + (mCurrentPos - mOffset >= (off64_t)mSize) ? 0 : mSize - (mCurrentPos - mOffset); if (maxBytesToRead > maxBytesAvailable) { diff --git a/media/libstagefright/WVMExtractor.cpp b/media/libstagefright/WVMExtractor.cpp new file mode 100644 index 0000000..7c72852 --- /dev/null +++ b/media/libstagefright/WVMExtractor.cpp @@ -0,0 +1,104 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "WVMExtractor" +#include <utils/Log.h> + +#include "include/WVMExtractor.h" + +#include <arpa/inet.h> +#include <utils/String8.h> +#include <media/stagefright/Utils.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/MediaSource.h> +#include <media/stagefright/MediaDefs.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/MediaErrors.h> +#include <media/stagefright/MediaBuffer.h> +#include <media/stagefright/MediaDebug.h> +#include <dlfcn.h> + +#include <utils/Errors.h> + +/* The extractor lifetime is short - just long enough to get + * the media sources constructed - so the shared lib needs to remain open + * beyond the lifetime of the extractor. So keep the handle as a global + * rather than a member of the extractor + */ +void *gVendorLibHandle = NULL; + +namespace android { + +static Mutex gWVMutex; + +WVMExtractor::WVMExtractor(const sp<DataSource> &source) + : mDataSource(source) { + { + Mutex::Autolock autoLock(gWVMutex); + if (gVendorLibHandle == NULL) { + gVendorLibHandle = dlopen("libwvm.so", RTLD_NOW); + } + + if (gVendorLibHandle == NULL) { + LOGE("Failed to open libwvm.so"); + return; + } + } + + typedef MediaExtractor *(*GetInstanceFunc)(sp<DataSource>); + GetInstanceFunc getInstanceFunc = + (GetInstanceFunc) dlsym(gVendorLibHandle, + "_ZN7android11GetInstanceENS_2spINS_10DataSourceEEE"); + + if (getInstanceFunc) { + LOGD("Calling GetInstanceFunc"); + mImpl = (*getInstanceFunc)(source); + CHECK(mImpl != NULL); + } else { + LOGE("Failed to locate GetInstance in libwvm.so"); + } +} + +WVMExtractor::~WVMExtractor() { +} + +size_t WVMExtractor::countTracks() { + return (mImpl != NULL) ? mImpl->countTracks() : 0; +} + +sp<MediaSource> WVMExtractor::getTrack(size_t index) { + if (mImpl == NULL) { + return NULL; + } + return mImpl->getTrack(index); +} + +sp<MetaData> WVMExtractor::getTrackMetaData(size_t index, uint32_t flags) { + if (mImpl == NULL) { + return NULL; + } + return mImpl->getTrackMetaData(index, flags); +} + +sp<MetaData> WVMExtractor::getMetaData() { + if (mImpl == NULL) { + return NULL; + } + return mImpl->getMetaData(); +} + +} //namespace android + diff --git a/media/libstagefright/XINGSeeker.cpp b/media/libstagefright/XINGSeeker.cpp new file mode 100644 index 0000000..616836c --- /dev/null +++ b/media/libstagefright/XINGSeeker.cpp @@ -0,0 +1,223 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "include/XINGSeeker.h" + +#include <media/stagefright/DataSource.h> +#include <media/stagefright/Utils.h> + +namespace android { + +static bool parse_xing_header( + const sp<DataSource> &source, off64_t first_frame_pos, + int32_t *frame_number = NULL, int32_t *byte_number = NULL, + char *table_of_contents = NULL, int32_t *quality_indicator = NULL, + int64_t *duration = NULL); + +// static +sp<XINGSeeker> XINGSeeker::CreateFromSource( + const sp<DataSource> &source, off64_t first_frame_pos) { + sp<XINGSeeker> seeker = new XINGSeeker; + + seeker->mFirstFramePos = first_frame_pos; + + if (!parse_xing_header( + source, first_frame_pos, + NULL, &seeker->mSizeBytes, seeker->mTableOfContents, + NULL, &seeker->mDurationUs)) { + return NULL; + } + + LOGI("Found XING header."); + + return seeker; +} + +XINGSeeker::XINGSeeker() + : mDurationUs(-1), + mSizeBytes(0) { +} + +bool XINGSeeker::getDuration(int64_t *durationUs) { + if (mDurationUs < 0) { + return false; + } + + *durationUs = mDurationUs; + + return true; +} + +bool XINGSeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) { + if (mSizeBytes == 0 || mTableOfContents[0] <= 0 || mDurationUs < 0) { + return false; + } + + float percent = (float)(*timeUs) * 100 / mDurationUs; + float fx; + if( percent <= 0.0f ) { + fx = 0.0f; + } else if( percent >= 100.0f ) { + fx = 256.0f; + } else { + int a = (int)percent; + float fa, fb; + if ( a == 0 ) { + fa = 0.0f; + } else { + fa = (float)mTableOfContents[a-1]; + } + if ( a < 99 ) { + fb = (float)mTableOfContents[a]; + } else { + fb = 256.0f; + } + fx = fa + (fb-fa)*(percent-a); + } + + *pos = (int)((1.0f/256.0f)*fx*mSizeBytes) + mFirstFramePos; + + return true; +} + +static bool parse_xing_header( + const sp<DataSource> &source, off64_t first_frame_pos, + int32_t *frame_number, int32_t *byte_number, + char *table_of_contents, int32_t *quality_indicator, + int64_t *duration) { + if (frame_number) { + *frame_number = 0; + } + if (byte_number) { + *byte_number = 0; + } + if (table_of_contents) { + table_of_contents[0] = 0; + } + if (quality_indicator) { + *quality_indicator = 0; + } + if (duration) { + *duration = 0; + } + + uint8_t buffer[4]; + int offset = first_frame_pos; + if (source->readAt(offset, &buffer, 4) < 4) { // get header + return false; + } + offset += 4; + + uint8_t id, layer, sr_index, mode; + layer = (buffer[1] >> 1) & 3; + id = (buffer[1] >> 3) & 3; + sr_index = (buffer[2] >> 2) & 3; + mode = (buffer[3] >> 6) & 3; + if (layer == 0) { + return false; + } + if (id == 1) { + return false; + } + if (sr_index == 3) { + return false; + } + // determine offset of XING header + if(id&1) { // mpeg1 + if (mode != 3) offset += 32; + else offset += 17; + } else { // mpeg2 + if (mode != 3) offset += 17; + else offset += 9; + } + + if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID + return false; + } + offset += 4; + // Check XING ID + if ((buffer[0] != 'X') || (buffer[1] != 'i') + || (buffer[2] != 'n') || (buffer[3] != 'g')) { + if ((buffer[0] != 'I') || (buffer[1] != 'n') + || (buffer[2] != 'f') || (buffer[3] != 'o')) { + return false; + } + } + + if (source->readAt(offset, &buffer, 4) < 4) { // flags + return false; + } + offset += 4; + uint32_t flags = U32_AT(buffer); + + if (flags & 0x0001) { // Frames field is present + if (source->readAt(offset, buffer, 4) < 4) { + return false; + } + if (frame_number) { + *frame_number = U32_AT(buffer); + } + int32_t frame = U32_AT(buffer); + // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer + const int samplesPerFrames[2][3] = + { + { 384, 1152, 576 }, // MPEG 2, 2.5: layer1, layer2, layer3 + { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3 + }; + // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index + const int samplingRates[4][3] = + { + { 11025, 12000, 8000, }, // MPEG 2.5 + { 0, 0, 0, }, // reserved + { 22050, 24000, 16000, }, // MPEG 2 + { 44100, 48000, 32000, } // MPEG 1 + }; + if (duration) { + *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL + / samplingRates[id][sr_index]; + } + offset += 4; + } + if (flags & 0x0002) { // Bytes field is present + if (byte_number) { + if (source->readAt(offset, buffer, 4) < 4) { + return false; + } + *byte_number = U32_AT(buffer); + } + offset += 4; + } + if (flags & 0x0004) { // TOC field is present + if (table_of_contents) { + if (source->readAt(offset + 1, table_of_contents, 99) < 99) { + return false; + } + } + offset += 100; + } + if (flags & 0x0008) { // Quality indicator field is present + if (quality_indicator) { + if (source->readAt(offset, buffer, 4) < 4) { + return false; + } + *quality_indicator = U32_AT(buffer); + } + } + return true; +} + +} // namespace android + diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp index 478e40c..fa12cf0 100644 --- a/media/libstagefright/avc_utils.cpp +++ b/media/libstagefright/avc_utils.cpp @@ -14,6 +14,10 @@ * limitations under the License. */ +//#define LOG_NDEBUG 0 +#define LOG_TAG "avc_utils" +#include <utils/Log.h> + #include "include/avc_utils.h" #include <media/stagefright/foundation/ABitReader.h> @@ -218,6 +222,28 @@ static sp<ABuffer> FindNAL( return NULL; } +const char *AVCProfileToString(uint8_t profile) { + switch (profile) { + case kAVCProfileBaseline: + return "Baseline"; + case kAVCProfileMain: + return "Main"; + case kAVCProfileExtended: + return "Extended"; + case kAVCProfileHigh: + return "High"; + case kAVCProfileHigh10: + return "High 10"; + case kAVCProfileHigh422: + return "High 422"; + case kAVCProfileHigh444: + return "High 444"; + case kAVCProfileCAVLC444Intra: + return "CAVLC 444 Intra"; + default: return "Unknown"; + } +} + sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) { const uint8_t *data = accessUnit->data(); size_t size = accessUnit->size(); @@ -244,6 +270,10 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) { *out++ = 0x01; // configurationVersion memcpy(out, seqParamSet->data() + 1, 3); // profile/level... + + uint8_t profile = out[0]; + uint8_t level = out[2]; + out += 3; *out++ = (0x3f << 2) | 1; // lengthSize == 2 bytes *out++ = 0xe0 | 1; @@ -271,7 +301,8 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) { meta->setInt32(kKeyWidth, width); meta->setInt32(kKeyHeight, height); - LOGI("found AVC codec config (%d x %d)", width, height); + LOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)", + width, height, AVCProfileToString(profile), level / 10, level % 10); return meta; } diff --git a/media/libstagefright/codecs/aacdec/AACDecoder.cpp b/media/libstagefright/codecs/aacdec/AACDecoder.cpp index f58c16d..208431c 100644 --- a/media/libstagefright/codecs/aacdec/AACDecoder.cpp +++ b/media/libstagefright/codecs/aacdec/AACDecoder.cpp @@ -21,8 +21,8 @@ #include "pvmp4audiodecoder_api.h" +#include <media/stagefright/foundation/ADebug.h> #include <media/stagefright/MediaBufferGroup.h> -#include <media/stagefright/MediaDebug.h> #include <media/stagefright/MediaDefs.h> #include <media/stagefright/MetaData.h> @@ -84,7 +84,7 @@ status_t AACDecoder::initCheck() { sp<MetaData> meta = mSource->getFormat(); if (meta->findData(kKeyESDS, &type, &data, &size)) { ESDS esds((const char *)data, size); - CHECK_EQ(esds.InitCheck(), OK); + CHECK_EQ(esds.InitCheck(), (status_t)OK); const void *codec_specific_data; size_t codec_specific_data_size; @@ -197,7 +197,7 @@ status_t AACDecoder::read( } MediaBuffer *buffer; - CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), OK); + CHECK_EQ(mBufferGroup->acquire_buffer(&buffer), (status_t)OK); mConfig->pInputBuffer = (UChar *)mInputBuffer->data() + mInputBuffer->range_offset(); @@ -308,7 +308,7 @@ status_t AACDecoder::read( mAnchorTimeUs + (mNumSamplesOutput * 1000000) / mConfig->samplingRate); - mNumSamplesOutput += mConfig->frameLength; + mNumSamplesOutput += mConfig->frameLength * mUpsamplingFactor; *out = buffer; diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk index d5d8f3e..69e331f 100644 --- a/media/libstagefright/codecs/aacdec/Android.mk +++ b/media/libstagefright/codecs/aacdec/Android.mk @@ -149,6 +149,8 @@ LOCAL_CFLAGS := -DAAC_PLUS -DHQ_SBR -DPARAMETRICSTEREO -DOSCL_IMPORT_REF= -DOSCL LOCAL_C_INCLUDES := frameworks/base/media/libstagefright/include +LOCAL_ARM_MODE := arm + LOCAL_MODULE := libstagefright_aacdec include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp index df9f107..a8b1292 100644 --- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp +++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp @@ -36,6 +36,7 @@ AACEncoder::AACEncoder(const sp<MediaSource> &source, const sp<MetaData> &meta) mStarted(false), mBufferGroup(NULL), mInputBuffer(NULL), + mInputFrame(NULL), mEncoderHandle(NULL), mApiHandle(NULL), mMemOperator(NULL) { @@ -45,6 +46,7 @@ status_t AACEncoder::initCheck() { CHECK(mApiHandle == NULL && mEncoderHandle == NULL); CHECK(mMeta->findInt32(kKeySampleRate, &mSampleRate)); CHECK(mMeta->findInt32(kKeyChannelCount, &mChannels)); + CHECK(mChannels <= 2 && mChannels >= 1); CHECK(mMeta->findInt32(kKeyBitRate, &mBitRate)); mApiHandle = new VO_AUDIO_CODECAPI; @@ -145,7 +147,15 @@ status_t AACEncoder::start(MetaData *params) { mNumInputSamples = 0; mAnchorTimeUs = 0; mFrameCount = 0; - mSource->start(params); + + mInputFrame = new int16_t[mChannels * kNumSamplesPerFrame]; + CHECK(mInputFrame != NULL); + + status_t err = mSource->start(params); + if (err != OK) { + LOGE("AudioSource is not available"); + return err; + } mStarted = true; @@ -153,11 +163,6 @@ status_t AACEncoder::start(MetaData *params) { } status_t AACEncoder::stop() { - if (!mStarted) { - LOGW("Call stop() when encoder has not started"); - return OK; - } - if (mInputBuffer) { mInputBuffer->release(); mInputBuffer = NULL; @@ -166,8 +171,17 @@ status_t AACEncoder::stop() { delete mBufferGroup; mBufferGroup = NULL; - mSource->stop(); + if (mInputFrame) { + delete[] mInputFrame; + mInputFrame = NULL; + } + + if (!mStarted) { + LOGW("Call stop() when encoder has not started"); + return ERROR_END_OF_STREAM; + } + mSource->stop(); if (mEncoderHandle) { CHECK_EQ(VO_ERR_NONE, mApiHandle->Uninit(mEncoderHandle)); mEncoderHandle = NULL; @@ -222,7 +236,8 @@ status_t AACEncoder::read( buffer->meta_data()->setInt32(kKeyIsCodecConfig, false); } - while (mNumInputSamples < kNumSamplesPerFrame) { + const int32_t nSamples = mChannels * kNumSamplesPerFrame; + while (mNumInputSamples < nSamples) { if (mInputBuffer == NULL) { if (mSource->read(&mInputBuffer, options) != OK) { if (mNumInputSamples == 0) { @@ -231,7 +246,7 @@ status_t AACEncoder::read( } memset(&mInputFrame[mNumInputSamples], 0, - sizeof(int16_t) * (kNumSamplesPerFrame - mNumInputSamples)); + sizeof(int16_t) * (nSamples - mNumInputSamples)); mNumInputSamples = 0; break; } @@ -250,8 +265,7 @@ status_t AACEncoder::read( } else { readFromSource = false; } - size_t copy = - (kNumSamplesPerFrame - mNumInputSamples) * sizeof(int16_t); + size_t copy = (nSamples - mNumInputSamples) * sizeof(int16_t); if (copy > mInputBuffer->range_length()) { copy = mInputBuffer->range_length(); @@ -271,8 +285,8 @@ status_t AACEncoder::read( mInputBuffer = NULL; } mNumInputSamples += copy / sizeof(int16_t); - if (mNumInputSamples >= kNumSamplesPerFrame) { - mNumInputSamples %= kNumSamplesPerFrame; + if (mNumInputSamples >= nSamples) { + mNumInputSamples %= nSamples; break; } } @@ -280,7 +294,7 @@ status_t AACEncoder::read( VO_CODECBUFFER inputData; memset(&inputData, 0, sizeof(inputData)); inputData.Buffer = (unsigned char*) mInputFrame; - inputData.Length = kNumSamplesPerFrame * sizeof(int16_t); + inputData.Length = nSamples * sizeof(int16_t); CHECK(VO_ERR_NONE == mApiHandle->SetInputData(mEncoderHandle,&inputData)); VO_CODECBUFFER outputData; @@ -289,15 +303,21 @@ status_t AACEncoder::read( memset(&outputInfo, 0, sizeof(outputInfo)); VO_U32 ret = VO_ERR_NONE; - outputData.Buffer = outPtr; - outputData.Length = buffer->size(); - ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo); - CHECK(ret == VO_ERR_NONE || ret == VO_ERR_INPUT_BUFFER_SMALL); - CHECK(outputData.Length != 0); - buffer->set_range(0, outputData.Length); + size_t nOutputBytes = 0; + do { + outputData.Buffer = outPtr; + outputData.Length = buffer->size() - nOutputBytes; + ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo); + if (ret == VO_ERR_NONE) { + outPtr += outputData.Length; + nOutputBytes += outputData.Length; + } + } while (ret != VO_ERR_INPUT_BUFFER_SMALL); + buffer->set_range(0, nOutputBytes); int64_t mediaTimeUs = ((mFrameCount - 1) * 1000000LL * kNumSamplesPerFrame) / mSampleRate; + buffer->meta_data()->setInt64(kKeyTime, mAnchorTimeUs + mediaTimeUs); if (readFromSource && wallClockTimeUs != -1) { buffer->meta_data()->setInt64(kKeyDriftTime, mediaTimeUs - wallClockTimeUs); diff --git a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp index 868c514..5bbba35 100644 --- a/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp +++ b/media/libstagefright/codecs/avc/dec/AVCDecoder.cpp @@ -73,6 +73,7 @@ AVCDecoder::AVCDecoder(const sp<MediaSource> &source) CHECK(mSource->getFormat()->findInt32(kKeyHeight, &height)); mFormat->setInt32(kKeyWidth, width); mFormat->setInt32(kKeyHeight, height); + mFormat->setRect(kKeyCropRect, 0, 0, width - 1, height - 1); mFormat->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar); mFormat->setCString(kKeyDecoderComponent, "AVCDecoder"); @@ -418,16 +419,32 @@ status_t AVCDecoder::read( crop_top = crop_left = 0; } - int32_t aligned_width = (crop_right - crop_left + 1 + 15) & ~15; - int32_t aligned_height = (crop_bottom - crop_top + 1 + 15) & ~15; + int32_t prevCropLeft, prevCropTop; + int32_t prevCropRight, prevCropBottom; + if (!mFormat->findRect( + kKeyCropRect, + &prevCropLeft, &prevCropTop, + &prevCropRight, &prevCropBottom)) { + prevCropLeft = prevCropTop = 0; + prevCropRight = width - 1; + prevCropBottom = height - 1; + } int32_t oldWidth, oldHeight; CHECK(mFormat->findInt32(kKeyWidth, &oldWidth)); CHECK(mFormat->findInt32(kKeyHeight, &oldHeight)); - if (oldWidth != aligned_width || oldHeight != aligned_height) { - mFormat->setInt32(kKeyWidth, aligned_width); - mFormat->setInt32(kKeyHeight, aligned_height); + if (oldWidth != width || oldHeight != height + || prevCropLeft != crop_left + || prevCropTop != crop_top + || prevCropRight != crop_right + || prevCropBottom != crop_bottom) { + mFormat->setRect( + kKeyCropRect, + crop_left, crop_top, crop_right, crop_bottom); + + mFormat->setInt32(kKeyWidth, width); + mFormat->setInt32(kKeyHeight, height); err = INFO_FORMAT_CHANGED; } else { diff --git a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp index 52a391f..e3292e6 100644 --- a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp +++ b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp @@ -33,6 +33,80 @@ namespace android { +static status_t ConvertOmxAvcProfileToAvcSpecProfile( + int32_t omxProfile, AVCProfile* pvProfile) { + LOGV("ConvertOmxAvcProfileToAvcSpecProfile: %d", omxProfile); + switch (omxProfile) { + case OMX_VIDEO_AVCProfileBaseline: + *pvProfile = AVC_BASELINE; + return OK; + default: + LOGE("Unsupported omx profile: %d", omxProfile); + } + return BAD_VALUE; +} + +static status_t ConvertOmxAvcLevelToAvcSpecLevel( + int32_t omxLevel, AVCLevel *pvLevel) { + LOGV("ConvertOmxAvcLevelToAvcSpecLevel: %d", omxLevel); + AVCLevel level = AVC_LEVEL5_1; + switch (omxLevel) { + case OMX_VIDEO_AVCLevel1: + level = AVC_LEVEL1_B; + break; + case OMX_VIDEO_AVCLevel1b: + level = AVC_LEVEL1; + break; + case OMX_VIDEO_AVCLevel11: + level = AVC_LEVEL1_1; + break; + case OMX_VIDEO_AVCLevel12: + level = AVC_LEVEL1_2; + break; + case OMX_VIDEO_AVCLevel13: + level = AVC_LEVEL1_3; + break; + case OMX_VIDEO_AVCLevel2: + level = AVC_LEVEL2; + break; + case OMX_VIDEO_AVCLevel21: + level = AVC_LEVEL2_1; + break; + case OMX_VIDEO_AVCLevel22: + level = AVC_LEVEL2_2; + break; + case OMX_VIDEO_AVCLevel3: + level = AVC_LEVEL3; + break; + case OMX_VIDEO_AVCLevel31: + level = AVC_LEVEL3_1; + break; + case OMX_VIDEO_AVCLevel32: + level = AVC_LEVEL3_2; + break; + case OMX_VIDEO_AVCLevel4: + level = AVC_LEVEL4; + break; + case OMX_VIDEO_AVCLevel41: + level = AVC_LEVEL4_1; + break; + case OMX_VIDEO_AVCLevel42: + level = AVC_LEVEL4_2; + break; + case OMX_VIDEO_AVCLevel5: + level = AVC_LEVEL5; + break; + case OMX_VIDEO_AVCLevel51: + level = AVC_LEVEL5_1; + break; + default: + LOGE("Unknown omx level: %d", omxLevel); + return BAD_VALUE; + } + *pvLevel = level; + return OK; +} + inline static void ConvertYUV420SemiPlanarToYUV420Planar( uint8_t *inyuv, uint8_t* outyuv, int32_t width, int32_t height) { @@ -104,7 +178,7 @@ AVCEncoder::AVCEncoder( mInputFrameData(NULL), mGroup(NULL) { - LOGV("Construct software AVCEncoder"); + LOGI("Construct software AVCEncoder"); mHandle = new tagAVCHandle; memset(mHandle, 0, sizeof(tagAVCHandle)); @@ -133,7 +207,7 @@ status_t AVCEncoder::initCheck(const sp<MetaData>& meta) { LOGV("initCheck"); CHECK(meta->findInt32(kKeyWidth, &mVideoWidth)); CHECK(meta->findInt32(kKeyHeight, &mVideoHeight)); - CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate)); + CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate)); CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate)); // XXX: Add more color format support @@ -231,10 +305,16 @@ status_t AVCEncoder::initCheck(const sp<MetaData>& meta) { mEncParams->level = AVC_LEVEL3_2; int32_t profile, level; if (meta->findInt32(kKeyVideoProfile, &profile)) { - mEncParams->profile = (AVCProfile) profile; + if (OK != ConvertOmxAvcProfileToAvcSpecProfile( + profile, &mEncParams->profile)) { + return BAD_VALUE; + } } if (meta->findInt32(kKeyVideoLevel, &level)) { - mEncParams->level = (AVCLevel) level; + if (OK != ConvertOmxAvcLevelToAvcSpecLevel( + level, &mEncParams->level)) { + return BAD_VALUE; + } } @@ -242,7 +322,7 @@ status_t AVCEncoder::initCheck(const sp<MetaData>& meta) { mFormat->setInt32(kKeyWidth, mVideoWidth); mFormat->setInt32(kKeyHeight, mVideoHeight); mFormat->setInt32(kKeyBitRate, mVideoBitRate); - mFormat->setInt32(kKeySampleRate, mVideoFrameRate); + mFormat->setInt32(kKeyFrameRate, mVideoFrameRate); mFormat->setInt32(kKeyColorFormat, mVideoColorFormat); mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC); mFormat->setCString(kKeyDecoderComponent, "AVCEncoder"); diff --git a/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp b/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp index dcf129e..2bdb3ef 100644 --- a/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp +++ b/media/libstagefright/codecs/m4v_h263/dec/M4vH263Decoder.cpp @@ -132,7 +132,10 @@ status_t M4vH263Decoder::start(MetaData *) { } MP4DecodingMode actualMode = PVGetDecBitstreamMode(mHandle); - CHECK_EQ((int)mode, (int)actualMode); + if (mode != actualMode) { + PVCleanUpVideoDecoder(mHandle); + return UNKNOWN_ERROR; + } PVSetPostProcType((VideoDecControls *) mHandle, 0); @@ -240,6 +243,8 @@ status_t M4vH263Decoder::read( CHECK_LE(disp_width, buf_width); CHECK_LE(disp_height, buf_height); + mFormat->setRect(kKeyCropRect, 0, 0, disp_width - 1, disp_height - 1); + return INFO_FORMAT_CHANGED; } diff --git a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp index a011137..15ed219 100644 --- a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp +++ b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp @@ -32,6 +32,109 @@ namespace android { +static status_t ConvertOmxProfileLevel( + MP4EncodingMode mode, + int32_t omxProfile, + int32_t omxLevel, + ProfileLevelType* pvProfileLevel) { + LOGV("ConvertOmxProfileLevel: %d/%d/%d", mode, omxProfile, omxLevel); + ProfileLevelType profileLevel; + if (mode == H263_MODE) { + switch (omxProfile) { + case OMX_VIDEO_H263ProfileBaseline: + if (omxLevel > OMX_VIDEO_H263Level45) { + LOGE("Unsupported level (%d) for H263", omxLevel); + return BAD_VALUE; + } else { + LOGW("PV does not support level configuration for H263"); + profileLevel = CORE_PROFILE_LEVEL2; + break; + } + break; + default: + LOGE("Unsupported profile (%d) for H263", omxProfile); + return BAD_VALUE; + } + } else { // MPEG4 + switch (omxProfile) { + case OMX_VIDEO_MPEG4ProfileSimple: + switch (omxLevel) { + case OMX_VIDEO_MPEG4Level0b: + profileLevel = SIMPLE_PROFILE_LEVEL0; + break; + case OMX_VIDEO_MPEG4Level1: + profileLevel = SIMPLE_PROFILE_LEVEL1; + break; + case OMX_VIDEO_MPEG4Level2: + profileLevel = SIMPLE_PROFILE_LEVEL2; + break; + case OMX_VIDEO_MPEG4Level3: + profileLevel = SIMPLE_PROFILE_LEVEL3; + break; + default: + LOGE("Unsupported level (%d) for MPEG4 simple profile", + omxLevel); + return BAD_VALUE; + } + break; + case OMX_VIDEO_MPEG4ProfileSimpleScalable: + switch (omxLevel) { + case OMX_VIDEO_MPEG4Level0b: + profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL0; + break; + case OMX_VIDEO_MPEG4Level1: + profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL1; + break; + case OMX_VIDEO_MPEG4Level2: + profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL2; + break; + default: + LOGE("Unsupported level (%d) for MPEG4 simple " + "scalable profile", omxLevel); + return BAD_VALUE; + } + break; + case OMX_VIDEO_MPEG4ProfileCore: + switch (omxLevel) { + case OMX_VIDEO_MPEG4Level1: + profileLevel = CORE_PROFILE_LEVEL1; + break; + case OMX_VIDEO_MPEG4Level2: + profileLevel = CORE_PROFILE_LEVEL2; + break; + default: + LOGE("Unsupported level (%d) for MPEG4 core " + "profile", omxLevel); + return BAD_VALUE; + } + break; + case OMX_VIDEO_MPEG4ProfileCoreScalable: + switch (omxLevel) { + case OMX_VIDEO_MPEG4Level1: + profileLevel = CORE_SCALABLE_PROFILE_LEVEL1; + break; + case OMX_VIDEO_MPEG4Level2: + profileLevel = CORE_SCALABLE_PROFILE_LEVEL2; + break; + case OMX_VIDEO_MPEG4Level3: + profileLevel = CORE_SCALABLE_PROFILE_LEVEL3; + break; + default: + LOGE("Unsupported level (%d) for MPEG4 core " + "scalable profile", omxLevel); + return BAD_VALUE; + } + break; + default: + LOGE("Unsupported MPEG4 profile (%d)", omxProfile); + return BAD_VALUE; + } + } + + *pvProfileLevel = profileLevel; + return OK; +} + inline static void ConvertYUV420SemiPlanarToYUV420Planar( uint8_t *inyuv, uint8_t* outyuv, int32_t width, int32_t height) { @@ -75,7 +178,7 @@ M4vH263Encoder::M4vH263Encoder( mInputFrameData(NULL), mGroup(NULL) { - LOGV("Construct software M4vH263Encoder"); + LOGI("Construct software M4vH263Encoder"); mHandle = new tagvideoEncControls; memset(mHandle, 0, sizeof(tagvideoEncControls)); @@ -97,7 +200,7 @@ status_t M4vH263Encoder::initCheck(const sp<MetaData>& meta) { LOGV("initCheck"); CHECK(meta->findInt32(kKeyWidth, &mVideoWidth)); CHECK(meta->findInt32(kKeyHeight, &mVideoHeight)); - CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate)); + CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate)); CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate)); // XXX: Add more color format support @@ -150,9 +253,14 @@ status_t M4vH263Encoder::initCheck(const sp<MetaData>& meta) { // If profile and level setting is not correct, failure // is reported when the encoder is initialized. mEncParams->profile_level = CORE_PROFILE_LEVEL2; - int32_t profileLevel; - if (meta->findInt32(kKeyVideoLevel, &profileLevel)) { - mEncParams->profile_level = (ProfileLevelType)profileLevel; + int32_t profile, level; + if (meta->findInt32(kKeyVideoProfile, &profile) && + meta->findInt32(kKeyVideoLevel, &level)) { + if (OK != ConvertOmxProfileLevel( + mEncParams->encMode, profile, level, + &mEncParams->profile_level)) { + return BAD_VALUE; + } } mEncParams->packetSize = 32; @@ -191,7 +299,7 @@ status_t M4vH263Encoder::initCheck(const sp<MetaData>& meta) { mFormat->setInt32(kKeyWidth, mVideoWidth); mFormat->setInt32(kKeyHeight, mVideoHeight); mFormat->setInt32(kKeyBitRate, mVideoBitRate); - mFormat->setInt32(kKeySampleRate, mVideoFrameRate); + mFormat->setInt32(kKeyFrameRate, mVideoFrameRate); mFormat->setInt32(kKeyColorFormat, mVideoColorFormat); mFormat->setCString(kKeyMIMEType, mime); diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk index fb56a93..753500e 100644 --- a/media/libstagefright/codecs/mp3dec/Android.mk +++ b/media/libstagefright/codecs/mp3dec/Android.mk @@ -53,5 +53,7 @@ LOCAL_CFLAGS := \ LOCAL_MODULE := libstagefright_mp3dec +LOCAL_ARM_MODE := arm + include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk index 0dcbd73..62ba40f 100644 --- a/media/libstagefright/colorconversion/Android.mk +++ b/media/libstagefright/colorconversion/Android.mk @@ -6,17 +6,9 @@ LOCAL_SRC_FILES:= \ SoftwareRenderer.cpp LOCAL_C_INCLUDES := \ - $(TOP)/frameworks/base/include/media/stagefright/openmax - -LOCAL_SHARED_LIBRARIES := \ - libbinder \ - libmedia \ - libutils \ - libui \ - libcutils \ - libsurfaceflinger_client\ - libcamera_client + $(TOP)/frameworks/base/include/media/stagefright/openmax \ + $(TOP)/hardware/msm7k LOCAL_MODULE:= libstagefright_color_conversion -include $(BUILD_SHARED_LIBRARY) +include $(BUILD_STATIC_LIBRARY) diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp index 5b16997..d518c97 100644 --- a/media/libstagefright/colorconversion/ColorConverter.cpp +++ b/media/libstagefright/colorconversion/ColorConverter.cpp @@ -16,6 +16,7 @@ #include <media/stagefright/ColorConverter.h> #include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MediaErrors.h> namespace android { @@ -50,31 +51,68 @@ bool ColorConverter::isValid() const { } } -void ColorConverter::convert( +ColorConverter::BitmapParams::BitmapParams( + void *bits, size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip) { - CHECK_EQ(mDstFormat, OMX_COLOR_Format16bitRGB565); + size_t cropLeft, size_t cropTop, + size_t cropRight, size_t cropBottom) + : mBits(bits), + mWidth(width), + mHeight(height), + mCropLeft(cropLeft), + mCropTop(cropTop), + mCropRight(cropRight), + mCropBottom(cropBottom) { +} + +size_t ColorConverter::BitmapParams::cropWidth() const { + return mCropRight - mCropLeft + 1; +} + +size_t ColorConverter::BitmapParams::cropHeight() const { + return mCropBottom - mCropTop + 1; +} + +status_t ColorConverter::convert( + const void *srcBits, + size_t srcWidth, size_t srcHeight, + size_t srcCropLeft, size_t srcCropTop, + size_t srcCropRight, size_t srcCropBottom, + void *dstBits, + size_t dstWidth, size_t dstHeight, + size_t dstCropLeft, size_t dstCropTop, + size_t dstCropRight, size_t dstCropBottom) { + if (mDstFormat != OMX_COLOR_Format16bitRGB565) { + return ERROR_UNSUPPORTED; + } + + BitmapParams src( + const_cast<void *>(srcBits), + srcWidth, srcHeight, + srcCropLeft, srcCropTop, srcCropRight, srcCropBottom); + + BitmapParams dst( + dstBits, + dstWidth, dstHeight, + dstCropLeft, dstCropTop, dstCropRight, dstCropBottom); + + status_t err; switch (mSrcFormat) { case OMX_COLOR_FormatYUV420Planar: - convertYUV420Planar( - width, height, srcBits, srcSkip, dstBits, dstSkip); + err = convertYUV420Planar(src, dst); break; case OMX_COLOR_FormatCbYCrY: - convertCbYCrY( - width, height, srcBits, srcSkip, dstBits, dstSkip); + err = convertCbYCrY(src, dst); break; case OMX_QCOM_COLOR_FormatYVU420SemiPlanar: - convertQCOMYUV420SemiPlanar( - width, height, srcBits, srcSkip, dstBits, dstSkip); + err = convertQCOMYUV420SemiPlanar(src, dst); break; case OMX_COLOR_FormatYUV420SemiPlanar: - convertYUV420SemiPlanar( - width, height, srcBits, srcSkip, dstBits, dstSkip); + err = convertYUV420SemiPlanar(src, dst); break; default: @@ -83,28 +121,34 @@ void ColorConverter::convert( break; } } + + return err; } -void ColorConverter::convertCbYCrY( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip) { - CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats. - CHECK(dstSkip >= width * 2); - CHECK((dstSkip & 3) == 0); +status_t ColorConverter::convertCbYCrY( + const BitmapParams &src, const BitmapParams &dst) { + // XXX Untested uint8_t *kAdjustedClip = initClip(); - uint32_t *dst_ptr = (uint32_t *)dstBits; + if (!((src.mCropLeft & 1) == 0 + && src.cropWidth() == dst.cropWidth() + && src.cropHeight() == dst.cropHeight())) { + return ERROR_UNSUPPORTED; + } + + uint32_t *dst_ptr = (uint32_t *)dst.mBits + + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2; - const uint8_t *src = (const uint8_t *)srcBits; + const uint8_t *src_ptr = (const uint8_t *)src.mBits + + (src.mCropTop * dst.mWidth + src.mCropLeft) * 2; - for (size_t y = 0; y < height; ++y) { - for (size_t x = 0; x < width; x += 2) { - signed y1 = (signed)src[2 * x + 1] - 16; - signed y2 = (signed)src[2 * x + 3] - 16; - signed u = (signed)src[2 * x] - 128; - signed v = (signed)src[2 * x + 2] - 128; + for (size_t y = 0; y < src.cropHeight(); ++y) { + for (size_t x = 0; x < src.cropWidth(); x += 2) { + signed y1 = (signed)src_ptr[2 * x + 1] - 16; + signed y2 = (signed)src_ptr[2 * x + 3] - 16; + signed u = (signed)src_ptr[2 * x] - 128; + signed v = (signed)src_ptr[2 * x + 2] - 128; signed u_b = u * 517; signed u_g = -u * 100; @@ -134,32 +178,39 @@ void ColorConverter::convertCbYCrY( dst_ptr[x / 2] = (rgb2 << 16) | rgb1; } - src += width * 2; - dst_ptr += dstSkip / 4; + src_ptr += src.mWidth * 2; + dst_ptr += dst.mWidth / 2; } + + return OK; } -void ColorConverter::convertYUV420Planar( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip) { - CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats. - CHECK(dstSkip >= width * 2); - CHECK((dstSkip & 3) == 0); +status_t ColorConverter::convertYUV420Planar( + const BitmapParams &src, const BitmapParams &dst) { + if (!((dst.mWidth & 3) == 0 + && (src.mCropLeft & 1) == 0 + && src.cropWidth() == dst.cropWidth() + && src.cropHeight() == dst.cropHeight())) { + return ERROR_UNSUPPORTED; + } uint8_t *kAdjustedClip = initClip(); - uint32_t *dst_ptr = (uint32_t *)dstBits; - const uint8_t *src_y = (const uint8_t *)srcBits; + uint32_t *dst_ptr = (uint32_t *)dst.mBits + + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2; + + const uint8_t *src_y = + (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft; const uint8_t *src_u = - (const uint8_t *)src_y + width * height; + (const uint8_t *)src_y + src.mWidth * src.mHeight + + src.mCropTop * (src.mWidth / 2) + src.mCropLeft / 2; const uint8_t *src_v = - (const uint8_t *)src_u + (width / 2) * (height / 2); + src_u + (src.mWidth / 2) * (src.mHeight / 2); - for (size_t y = 0; y < height; ++y) { - for (size_t x = 0; x < width; x += 2) { + for (size_t y = 0; y < src.cropHeight(); ++y) { + for (size_t x = 0; x < src.cropWidth(); x += 2) { // B = 1.164 * (Y - 16) + 2.018 * (U - 128) // G = 1.164 * (Y - 16) - 0.813 * (V - 128) - 0.391 * (U - 128) // R = 1.164 * (Y - 16) + 1.596 * (V - 128) @@ -212,35 +263,42 @@ void ColorConverter::convertYUV420Planar( dst_ptr[x / 2] = (rgb2 << 16) | rgb1; } - src_y += width; + src_y += src.mWidth; if (y & 1) { - src_u += width / 2; - src_v += width / 2; + src_u += src.mWidth / 2; + src_v += src.mWidth / 2; } - dst_ptr += dstSkip / 4; + dst_ptr += dst.mWidth / 2; } -} -void ColorConverter::convertQCOMYUV420SemiPlanar( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip) { - CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats. - CHECK(dstSkip >= width * 2); - CHECK((dstSkip & 3) == 0); + return OK; +} +status_t ColorConverter::convertQCOMYUV420SemiPlanar( + const BitmapParams &src, const BitmapParams &dst) { uint8_t *kAdjustedClip = initClip(); - uint32_t *dst_ptr = (uint32_t *)dstBits; - const uint8_t *src_y = (const uint8_t *)srcBits; + if (!((dst.mWidth & 3) == 0 + && (src.mCropLeft & 1) == 0 + && src.cropWidth() == dst.cropWidth() + && src.cropHeight() == dst.cropHeight())) { + return ERROR_UNSUPPORTED; + } + + uint32_t *dst_ptr = (uint32_t *)dst.mBits + + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2; + + const uint8_t *src_y = + (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft; const uint8_t *src_u = - (const uint8_t *)src_y + width * height; + (const uint8_t *)src_y + src.mWidth * src.mHeight + + src.mCropTop * src.mWidth + src.mCropLeft; - for (size_t y = 0; y < height; ++y) { - for (size_t x = 0; x < width; x += 2) { + for (size_t y = 0; y < src.cropHeight(); ++y) { + for (size_t x = 0; x < src.cropWidth(); x += 2) { signed y1 = (signed)src_y[x] - 16; signed y2 = (signed)src_y[x + 1] - 16; @@ -275,34 +333,43 @@ void ColorConverter::convertQCOMYUV420SemiPlanar( dst_ptr[x / 2] = (rgb2 << 16) | rgb1; } - src_y += width; + src_y += src.mWidth; if (y & 1) { - src_u += width; + src_u += src.mWidth; } - dst_ptr += dstSkip / 4; + dst_ptr += dst.mWidth / 2; } + + return OK; } -void ColorConverter::convertYUV420SemiPlanar( - size_t width, size_t height, - const void *srcBits, size_t srcSkip, - void *dstBits, size_t dstSkip) { - CHECK_EQ(srcSkip, 0); // Doesn't really make sense for YUV formats. - CHECK(dstSkip >= width * 2); - CHECK((dstSkip & 3) == 0); +status_t ColorConverter::convertYUV420SemiPlanar( + const BitmapParams &src, const BitmapParams &dst) { + // XXX Untested uint8_t *kAdjustedClip = initClip(); - uint32_t *dst_ptr = (uint32_t *)dstBits; - const uint8_t *src_y = (const uint8_t *)srcBits; + if (!((dst.mWidth & 3) == 0 + && (src.mCropLeft & 1) == 0 + && src.cropWidth() == dst.cropWidth() + && src.cropHeight() == dst.cropHeight())) { + return ERROR_UNSUPPORTED; + } + + uint32_t *dst_ptr = (uint32_t *)dst.mBits + + (dst.mCropTop * dst.mWidth + dst.mCropLeft) / 2; + + const uint8_t *src_y = + (const uint8_t *)src.mBits + src.mCropTop * src.mWidth + src.mCropLeft; const uint8_t *src_u = - (const uint8_t *)src_y + width * height; + (const uint8_t *)src_y + src.mWidth * src.mHeight + + src.mCropTop * src.mWidth + src.mCropLeft; - for (size_t y = 0; y < height; ++y) { - for (size_t x = 0; x < width; x += 2) { + for (size_t y = 0; y < src.cropHeight(); ++y) { + for (size_t x = 0; x < src.cropWidth(); x += 2) { signed y1 = (signed)src_y[x] - 16; signed y2 = (signed)src_y[x + 1] - 16; @@ -337,14 +404,16 @@ void ColorConverter::convertYUV420SemiPlanar( dst_ptr[x / 2] = (rgb2 << 16) | rgb1; } - src_y += width; + src_y += src.mWidth; if (y & 1) { - src_u += width; + src_u += src.mWidth; } - dst_ptr += dstSkip / 4; + dst_ptr += dst.mWidth / 2; } + + return OK; } uint8_t *ColorConverter::initClip() { diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp index 93ec79d..70408d7 100644 --- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp +++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp @@ -21,91 +21,129 @@ #include <binder/MemoryHeapBase.h> #include <binder/MemoryHeapPmem.h> -#include <media/stagefright/MediaDebug.h> -#include <surfaceflinger/ISurface.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/MetaData.h> +#include <surfaceflinger/Surface.h> +#include <ui/android_native_buffer.h> +#include <ui/GraphicBufferMapper.h> namespace android { SoftwareRenderer::SoftwareRenderer( - OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees) - : mInitCheck(NO_INIT), - mColorFormat(colorFormat), - mConverter(colorFormat, OMX_COLOR_Format16bitRGB565), - mISurface(surface), - mDisplayWidth(displayWidth), - mDisplayHeight(displayHeight), - mDecodedWidth(decodedWidth), - mDecodedHeight(decodedHeight), - mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565 - mIndex(0) { - mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize); - if (mMemoryHeap->heapID() < 0) { - LOGI("Creating physical memory heap failed, reverting to regular heap."); - mMemoryHeap = new MemoryHeapBase(2 * mFrameSize); - } else { - sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap); - pmemHeap->slap(); - mMemoryHeap = pmemHeap; + const sp<Surface> &surface, const sp<MetaData> &meta) + : mConverter(NULL), + mYUVMode(None), + mSurface(surface) { + int32_t tmp; + CHECK(meta->findInt32(kKeyColorFormat, &tmp)); + mColorFormat = (OMX_COLOR_FORMATTYPE)tmp; + + CHECK(meta->findInt32(kKeyWidth, &mWidth)); + CHECK(meta->findInt32(kKeyHeight, &mHeight)); + + if (!meta->findRect( + kKeyCropRect, + &mCropLeft, &mCropTop, &mCropRight, &mCropBottom)) { + mCropLeft = mCropTop = 0; + mCropRight = mWidth - 1; + mCropBottom = mHeight - 1; } - CHECK(mISurface.get() != NULL); - CHECK(mDecodedWidth > 0); - CHECK(mDecodedHeight > 0); - CHECK(mMemoryHeap->heapID() >= 0); - CHECK(mConverter.isValid()); + int32_t rotationDegrees; + if (!meta->findInt32(kKeyRotation, &rotationDegrees)) { + rotationDegrees = 0; + } - uint32_t orientation; - switch (rotationDegrees) { - case 0: orientation = ISurface::BufferHeap::ROT_0; break; - case 90: orientation = ISurface::BufferHeap::ROT_90; break; - case 180: orientation = ISurface::BufferHeap::ROT_180; break; - case 270: orientation = ISurface::BufferHeap::ROT_270; break; - default: orientation = ISurface::BufferHeap::ROT_0; break; + int halFormat; + switch (mColorFormat) { + default: + halFormat = HAL_PIXEL_FORMAT_RGB_565; + + mConverter = new ColorConverter( + mColorFormat, OMX_COLOR_Format16bitRGB565); + CHECK(mConverter->isValid()); + break; } - ISurface::BufferHeap bufferHeap( - mDisplayWidth, mDisplayHeight, - mDecodedWidth, mDecodedHeight, - PIXEL_FORMAT_RGB_565, - orientation, 0, - mMemoryHeap); + CHECK(mSurface.get() != NULL); + CHECK(mWidth > 0); + CHECK(mHeight > 0); + CHECK(mConverter == NULL || mConverter->isValid()); + + CHECK_EQ(0, + native_window_set_usage( + mSurface.get(), + GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN + | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP)); - status_t err = mISurface->registerBuffers(bufferHeap); + CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2)); - if (err != OK) { - LOGW("ISurface failed to register buffers (0x%08x)", err); + // Width must be multiple of 32??? + CHECK_EQ(0, native_window_set_buffers_geometry( + mSurface.get(), + mCropRight - mCropLeft + 1, + mCropBottom - mCropTop + 1, + halFormat)); + + uint32_t transform; + switch (rotationDegrees) { + case 0: transform = 0; break; + case 90: transform = HAL_TRANSFORM_ROT_90; break; + case 180: transform = HAL_TRANSFORM_ROT_180; break; + case 270: transform = HAL_TRANSFORM_ROT_270; break; + default: transform = 0; break; } - mInitCheck = err; + if (transform) { + CHECK_EQ(0, native_window_set_buffers_transform( + mSurface.get(), transform)); + } } SoftwareRenderer::~SoftwareRenderer() { - mISurface->unregisterBuffers(); -} - -status_t SoftwareRenderer::initCheck() const { - return mInitCheck; + delete mConverter; + mConverter = NULL; } void SoftwareRenderer::render( const void *data, size_t size, void *platformPrivate) { - if (mInitCheck != OK) { + android_native_buffer_t *buf; + int err; + if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) { + LOGW("Surface::dequeueBuffer returned error %d", err); return; } - size_t offset = mIndex * mFrameSize; - void *dst = (uint8_t *)mMemoryHeap->getBase() + offset; + CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf)); + + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + + Rect bounds(mWidth, mHeight); + + void *dst; + CHECK_EQ(0, mapper.lock( + buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst)); - mConverter.convert( - mDecodedWidth, mDecodedHeight, - data, 0, dst, 2 * mDecodedWidth); + if (mConverter) { + mConverter->convert( + data, + mWidth, mHeight, + mCropLeft, mCropTop, mCropRight, mCropBottom, + dst, + buf->stride, buf->height, + 0, 0, + mCropRight - mCropLeft, + mCropBottom - mCropTop); + } else { + TRESPASS(); + } - mISurface->postBuffer(offset); - mIndex = 1 - mIndex; + CHECK_EQ(0, mapper.unlock(buf->handle)); + + if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) { + LOGW("Surface::queueBuffer returned error %d", err); + } + buf = NULL; } } // namespace android diff --git a/media/libstagefright/foundation/AHierarchicalStateMachine.cpp b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp new file mode 100644 index 0000000..30286d8 --- /dev/null +++ b/media/libstagefright/foundation/AHierarchicalStateMachine.cpp @@ -0,0 +1,97 @@ +#include <media/stagefright/foundation/AHierarchicalStateMachine.h> + +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <utils/Vector.h> + +namespace android { + +AState::AState(const sp<AState> &parentState) + : mParentState(parentState) { +} + +AState::~AState() { +} + +sp<AState> AState::parentState() { + return mParentState; +} + +void AState::stateEntered() { +} + +void AState::stateExited() { +} + +//////////////////////////////////////////////////////////////////////////////// + +AHierarchicalStateMachine::AHierarchicalStateMachine() { +} + +AHierarchicalStateMachine::~AHierarchicalStateMachine() { +} + +void AHierarchicalStateMachine::onMessageReceived(const sp<AMessage> &msg) { + sp<AState> save = mState; + + sp<AState> cur = mState; + while (cur != NULL && !cur->onMessageReceived(msg)) { + // If you claim not to have handled the message you shouldn't + // have called setState... + CHECK(save == mState); + + cur = cur->parentState(); + } + + if (cur != NULL) { + return; + } + + LOGW("Warning message %s unhandled in root state.", + msg->debugString().c_str()); +} + +void AHierarchicalStateMachine::changeState(const sp<AState> &state) { + if (state == mState) { + // Quick exit for the easy case. + return; + } + + Vector<sp<AState> > A; + sp<AState> cur = mState; + for (;;) { + A.push(cur); + if (cur == NULL) { + break; + } + cur = cur->parentState(); + } + + Vector<sp<AState> > B; + cur = state; + for (;;) { + B.push(cur); + if (cur == NULL) { + break; + } + cur = cur->parentState(); + } + + // Remove the common tail. + while (A.size() > 0 && B.size() > 0 && A.top() == B.top()) { + A.pop(); + B.pop(); + } + + mState = state; + + for (size_t i = 0; i < A.size(); ++i) { + A.editItemAt(i)->stateExited(); + } + + for (size_t i = B.size(); i-- > 0;) { + B.editItemAt(i)->stateEntered(); + } +} + +} // namespace android diff --git a/media/libstagefright/foundation/AMessage.cpp b/media/libstagefright/foundation/AMessage.cpp index 26c6d42..b592c3f 100644 --- a/media/libstagefright/foundation/AMessage.cpp +++ b/media/libstagefright/foundation/AMessage.cpp @@ -23,6 +23,8 @@ #include "ALooperRoster.h" #include "AString.h" +#include <binder/Parcel.h> + namespace android { AMessage::AMessage(uint32_t what, ALooper::handler_id target) @@ -169,6 +171,18 @@ void AMessage::setMessage(const char *name, const sp<AMessage> &obj) { item->u.refValue = obj.get(); } +void AMessage::setRect( + const char *name, + int32_t left, int32_t top, int32_t right, int32_t bottom) { + Item *item = allocateItem(name); + item->mType = kTypeRect; + + item->u.rectValue.mLeft = left; + item->u.rectValue.mTop = top; + item->u.rectValue.mRight = right; + item->u.rectValue.mBottom = bottom; +} + bool AMessage::findString(const char *name, AString *value) const { const Item *item = findItem(name, kTypeString); if (item) { @@ -196,6 +210,22 @@ bool AMessage::findMessage(const char *name, sp<AMessage> *obj) const { return false; } +bool AMessage::findRect( + const char *name, + int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) const { + const Item *item = findItem(name, kTypeRect); + if (item == NULL) { + return false; + } + + *left = item->u.rectValue.mLeft; + *top = item->u.rectValue.mTop; + *right = item->u.rectValue.mRight; + *bottom = item->u.rectValue.mBottom; + + return true; +} + void AMessage::post(int64_t delayUs) { extern ALooperRoster gLooperRoster; @@ -222,13 +252,22 @@ sp<AMessage> AMessage::dup() const { } case kTypeObject: - case kTypeMessage: { to->u.refValue = from->u.refValue; to->u.refValue->incStrong(msg.get()); break; } + case kTypeMessage: + { + sp<AMessage> copy = + static_cast<AMessage *>(from->u.refValue)->dup(); + + to->u.refValue = copy.get(); + to->u.refValue->incStrong(msg.get()); + break; + } + default: { to->u = from->u; @@ -341,4 +380,136 @@ AString AMessage::debugString(int32_t indent) const { return s; } +// static +sp<AMessage> AMessage::FromParcel(const Parcel &parcel) { + int32_t what = parcel.readInt32(); + sp<AMessage> msg = new AMessage(what); + + msg->mNumItems = static_cast<size_t>(parcel.readInt32()); + + for (size_t i = 0; i < msg->mNumItems; ++i) { + Item *item = &msg->mItems[i]; + + item->mName = AAtomizer::Atomize(parcel.readCString()); + item->mType = static_cast<Type>(parcel.readInt32()); + + switch (item->mType) { + case kTypeInt32: + { + item->u.int32Value = parcel.readInt32(); + break; + } + + case kTypeInt64: + { + item->u.int64Value = parcel.readInt64(); + break; + } + + case kTypeSize: + { + item->u.sizeValue = static_cast<size_t>(parcel.readInt32()); + break; + } + + case kTypeFloat: + { + item->u.floatValue = parcel.readFloat(); + break; + } + + case kTypeDouble: + { + item->u.doubleValue = parcel.readDouble(); + break; + } + + case kTypeString: + { + item->u.stringValue = new AString(parcel.readCString()); + break; + } + + case kTypeMessage: + { + sp<AMessage> subMsg = AMessage::FromParcel(parcel); + subMsg->incStrong(msg.get()); + + item->u.refValue = subMsg.get(); + break; + } + + default: + { + LOGE("This type of object cannot cross process boundaries."); + TRESPASS(); + } + } + } + + return msg; +} + +void AMessage::writeToParcel(Parcel *parcel) const { + parcel->writeInt32(static_cast<int32_t>(mWhat)); + parcel->writeInt32(static_cast<int32_t>(mNumItems)); + + for (size_t i = 0; i < mNumItems; ++i) { + const Item &item = mItems[i]; + + parcel->writeCString(item.mName); + parcel->writeInt32(static_cast<int32_t>(item.mType)); + + switch (item.mType) { + case kTypeInt32: + { + parcel->writeInt32(item.u.int32Value); + break; + } + + case kTypeInt64: + { + parcel->writeInt64(item.u.int64Value); + break; + } + + case kTypeSize: + { + parcel->writeInt32(static_cast<int32_t>(item.u.sizeValue)); + break; + } + + case kTypeFloat: + { + parcel->writeFloat(item.u.floatValue); + break; + } + + case kTypeDouble: + { + parcel->writeDouble(item.u.doubleValue); + break; + } + + case kTypeString: + { + parcel->writeCString(item.u.stringValue->c_str()); + break; + } + + case kTypeMessage: + { + static_cast<AMessage *>(item.u.refValue)->writeToParcel(parcel); + break; + } + + default: + { + LOGE("This type of object cannot cross process boundaries."); + TRESPASS(); + } + } + } +} + } // namespace android diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk index ffa7db0..4e07f6f 100644 --- a/media/libstagefright/foundation/Android.mk +++ b/media/libstagefright/foundation/Android.mk @@ -1,16 +1,17 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -LOCAL_SRC_FILES:= \ - AAtomizer.cpp \ - ABitReader.cpp \ - ABuffer.cpp \ - AHandler.cpp \ - ALooper.cpp \ - ALooperRoster.cpp \ - AMessage.cpp \ - AString.cpp \ - base64.cpp \ +LOCAL_SRC_FILES:= \ + AAtomizer.cpp \ + ABitReader.cpp \ + ABuffer.cpp \ + AHandler.cpp \ + AHierarchicalStateMachine.cpp \ + ALooper.cpp \ + ALooperRoster.cpp \ + AMessage.cpp \ + AString.cpp \ + base64.cpp \ hexdump.cpp LOCAL_C_INCLUDES:= \ @@ -18,14 +19,7 @@ LOCAL_C_INCLUDES:= \ LOCAL_SHARED_LIBRARIES := \ libbinder \ - libmedia \ libutils \ - libcutils \ - libui \ - libsonivox \ - libvorbisidec \ - libsurfaceflinger_client \ - libcamera_client LOCAL_CFLAGS += -Wno-multichar diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk index cc7dd4f..9225e41 100644 --- a/media/libstagefright/httplive/Android.mk +++ b/media/libstagefright/httplive/Android.mk @@ -2,14 +2,16 @@ LOCAL_PATH:= $(call my-dir) include $(CLEAR_VARS) -LOCAL_SRC_FILES:= \ - LiveSource.cpp \ - M3UParser.cpp \ +LOCAL_SRC_FILES:= \ + LiveDataSource.cpp \ + LiveSession.cpp \ + M3UParser.cpp \ LOCAL_C_INCLUDES:= \ $(JNI_H_INCLUDE) \ $(TOP)/frameworks/base/include/media/stagefright/openmax \ - $(TOP)/frameworks/base/media/libstagefright + $(TOP)/frameworks/base/media/libstagefright \ + $(TOP)/external/openssl/include LOCAL_MODULE:= libstagefright_httplive diff --git a/media/libstagefright/httplive/LiveDataSource.cpp b/media/libstagefright/httplive/LiveDataSource.cpp new file mode 100644 index 0000000..5f5c6d4 --- /dev/null +++ b/media/libstagefright/httplive/LiveDataSource.cpp @@ -0,0 +1,173 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "LiveDataSource" +#include <utils/Log.h> + +#include "LiveDataSource.h" + +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> + +#define SAVE_BACKUP 0 + +namespace android { + +LiveDataSource::LiveDataSource() + : mOffset(0), + mFinalResult(OK), + mBackupFile(NULL) { +#if SAVE_BACKUP + mBackupFile = fopen("/data/misc/backup.ts", "wb"); + CHECK(mBackupFile != NULL); +#endif +} + +LiveDataSource::~LiveDataSource() { + if (mBackupFile != NULL) { + fclose(mBackupFile); + mBackupFile = NULL; + } +} + +status_t LiveDataSource::initCheck() const { + return OK; +} + +size_t LiveDataSource::countQueuedBuffers() { + Mutex::Autolock autoLock(mLock); + + return mBufferQueue.size(); +} + +ssize_t LiveDataSource::readAtNonBlocking( + off64_t offset, void *data, size_t size) { + Mutex::Autolock autoLock(mLock); + + if (offset != mOffset) { + LOGE("Attempt at reading non-sequentially from LiveDataSource."); + return -EPIPE; + } + + size_t totalAvailable = 0; + for (List<sp<ABuffer> >::iterator it = mBufferQueue.begin(); + it != mBufferQueue.end(); ++it) { + sp<ABuffer> buffer = *it; + + totalAvailable += buffer->size(); + + if (totalAvailable >= size) { + break; + } + } + + if (totalAvailable < size) { + return mFinalResult == OK ? -EWOULDBLOCK : mFinalResult; + } + + return readAt_l(offset, data, size); +} + +ssize_t LiveDataSource::readAt(off64_t offset, void *data, size_t size) { + Mutex::Autolock autoLock(mLock); + return readAt_l(offset, data, size); +} + +ssize_t LiveDataSource::readAt_l(off64_t offset, void *data, size_t size) { + if (offset != mOffset) { + LOGE("Attempt at reading non-sequentially from LiveDataSource."); + return -EPIPE; + } + + size_t sizeDone = 0; + + while (sizeDone < size) { + while (mBufferQueue.empty() && mFinalResult == OK) { + mCondition.wait(mLock); + } + + if (mBufferQueue.empty()) { + if (sizeDone > 0) { + mOffset += sizeDone; + return sizeDone; + } + + return mFinalResult; + } + + sp<ABuffer> buffer = *mBufferQueue.begin(); + + size_t copy = size - sizeDone; + + if (copy > buffer->size()) { + copy = buffer->size(); + } + + memcpy((uint8_t *)data + sizeDone, buffer->data(), copy); + + sizeDone += copy; + + buffer->setRange(buffer->offset() + copy, buffer->size() - copy); + + if (buffer->size() == 0) { + mBufferQueue.erase(mBufferQueue.begin()); + } + } + + mOffset += sizeDone; + + return sizeDone; +} + +void LiveDataSource::queueBuffer(const sp<ABuffer> &buffer) { + Mutex::Autolock autoLock(mLock); + + if (mFinalResult != OK) { + return; + } + +#if SAVE_BACKUP + if (mBackupFile != NULL) { + CHECK_EQ(fwrite(buffer->data(), 1, buffer->size(), mBackupFile), + buffer->size()); + } +#endif + + mBufferQueue.push_back(buffer); + mCondition.broadcast(); +} + +void LiveDataSource::queueEOS(status_t finalResult) { + CHECK_NE(finalResult, (status_t)OK); + + Mutex::Autolock autoLock(mLock); + + mFinalResult = finalResult; + mCondition.broadcast(); +} + +void LiveDataSource::reset() { + Mutex::Autolock autoLock(mLock); + + // XXX FIXME: If we've done a partial read and waiting for more buffers, + // we'll mix old and new data... + + mFinalResult = OK; + mBufferQueue.clear(); +} + +} // namespace android diff --git a/media/libstagefright/httplive/LiveDataSource.h b/media/libstagefright/httplive/LiveDataSource.h new file mode 100644 index 0000000..b7be637 --- /dev/null +++ b/media/libstagefright/httplive/LiveDataSource.h @@ -0,0 +1,64 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LIVE_DATA_SOURCE_H_ + +#define LIVE_DATA_SOURCE_H_ + +#include <media/stagefright/foundation/ABase.h> +#include <media/stagefright/DataSource.h> +#include <utils/threads.h> +#include <utils/List.h> + +namespace android { + +struct ABuffer; + +struct LiveDataSource : public DataSource { + LiveDataSource(); + + virtual status_t initCheck() const; + + virtual ssize_t readAt(off64_t offset, void *data, size_t size); + ssize_t readAtNonBlocking(off64_t offset, void *data, size_t size); + + void queueBuffer(const sp<ABuffer> &buffer); + void queueEOS(status_t finalResult); + void reset(); + + size_t countQueuedBuffers(); + +protected: + virtual ~LiveDataSource(); + +private: + Mutex mLock; + Condition mCondition; + + off64_t mOffset; + List<sp<ABuffer> > mBufferQueue; + status_t mFinalResult; + + FILE *mBackupFile; + + ssize_t readAt_l(off64_t offset, void *data, size_t size); + + DISALLOW_EVIL_CONSTRUCTORS(LiveDataSource); +}; + +} // namespace android + +#endif // LIVE_DATA_SOURCE_H_ diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp new file mode 100644 index 0000000..c6c36e3 --- /dev/null +++ b/media/libstagefright/httplive/LiveSession.cpp @@ -0,0 +1,721 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "LiveSession" +#include <utils/Log.h> + +#include "include/LiveSession.h" + +#include "LiveDataSource.h" + +#include "include/M3UParser.h" +#include "include/NuHTTPDataSource.h" + +#include <cutils/properties.h> +#include <media/stagefright/foundation/hexdump.h> +#include <media/stagefright/foundation/ABuffer.h> +#include <media/stagefright/foundation/ADebug.h> +#include <media/stagefright/foundation/AMessage.h> +#include <media/stagefright/DataSource.h> +#include <media/stagefright/FileSource.h> +#include <media/stagefright/MediaErrors.h> + +#include <ctype.h> +#include <openssl/aes.h> + +namespace android { + +const int64_t LiveSession::kMaxPlaylistAgeUs = 15000000ll; + +LiveSession::LiveSession() + : mDataSource(new LiveDataSource), + mHTTPDataSource(new NuHTTPDataSource), + mPrevBandwidthIndex(-1), + mLastPlaylistFetchTimeUs(-1), + mSeqNumber(-1), + mSeekTimeUs(-1), + mNumRetries(0), + mDurationUs(-1), + mSeekDone(false), + mMonitorQueueGeneration(0) { +} + +LiveSession::~LiveSession() { +} + +sp<DataSource> LiveSession::getDataSource() { + return mDataSource; +} + +void LiveSession::connect(const char *url) { + sp<AMessage> msg = new AMessage(kWhatConnect, id()); + msg->setString("url", url); + msg->post(); +} + +void LiveSession::disconnect() { + (new AMessage(kWhatDisconnect, id()))->post(); +} + +void LiveSession::seekTo(int64_t timeUs) { + Mutex::Autolock autoLock(mLock); + mSeekDone = false; + + sp<AMessage> msg = new AMessage(kWhatSeek, id()); + msg->setInt64("timeUs", timeUs); + msg->post(); + + while (!mSeekDone) { + mCondition.wait(mLock); + } +} + +void LiveSession::onMessageReceived(const sp<AMessage> &msg) { + switch (msg->what()) { + case kWhatConnect: + onConnect(msg); + break; + + case kWhatDisconnect: + onDisconnect(); + break; + + case kWhatMonitorQueue: + { + int32_t generation; + CHECK(msg->findInt32("generation", &generation)); + + if (generation != mMonitorQueueGeneration) { + // Stale event + break; + } + + onMonitorQueue(); + break; + } + + case kWhatSeek: + onSeek(msg); + break; + + default: + TRESPASS(); + break; + } +} + +// static +int LiveSession::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) { + if (a->mBandwidth < b->mBandwidth) { + return -1; + } else if (a->mBandwidth == b->mBandwidth) { + return 0; + } + + return 1; +} + +void LiveSession::onConnect(const sp<AMessage> &msg) { + AString url; + CHECK(msg->findString("url", &url)); + + LOGI("onConnect '%s'", url.c_str()); + + mMasterURL = url; + + sp<M3UParser> playlist = fetchPlaylist(url.c_str()); + CHECK(playlist != NULL); + + if (playlist->isVariantPlaylist()) { + for (size_t i = 0; i < playlist->size(); ++i) { + BandwidthItem item; + + sp<AMessage> meta; + playlist->itemAt(i, &item.mURI, &meta); + + unsigned long bandwidth; + CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth)); + + mBandwidthItems.push(item); + } + + CHECK_GT(mBandwidthItems.size(), 0u); + + mBandwidthItems.sort(SortByBandwidth); + + char value[PROPERTY_VALUE_MAX]; + if (property_get("media.httplive.disable-nuplayer", value, NULL) + && (!strcasecmp(value, "true") || !strcmp(value, "1"))) { + // The "legacy" player cannot deal with audio format changes, + // some streams use different audio encoding parameters for + // their lowest bandwidth stream. + if (mBandwidthItems.size() > 1) { + // XXX Remove the lowest bitrate stream for now... + mBandwidthItems.removeAt(0); + } + } + } + + postMonitorQueue(); +} + +void LiveSession::onDisconnect() { + LOGI("onDisconnect"); + + mDataSource->queueEOS(ERROR_END_OF_STREAM); +} + +status_t LiveSession::fetchFile(const char *url, sp<ABuffer> *out) { + *out = NULL; + + sp<DataSource> source; + + if (!strncasecmp(url, "file://", 7)) { + source = new FileSource(url + 7); + } else if (strncasecmp(url, "http://", 7)) { + return ERROR_UNSUPPORTED; + } else { + status_t err = mHTTPDataSource->connect(url); + + if (err != OK) { + return err; + } + + source = mHTTPDataSource; + } + + off64_t size; + status_t err = source->getSize(&size); + + if (err != OK) { + size = 65536; + } + + sp<ABuffer> buffer = new ABuffer(size); + buffer->setRange(0, 0); + + for (;;) { + size_t bufferRemaining = buffer->capacity() - buffer->size(); + + if (bufferRemaining == 0) { + bufferRemaining = 32768; + + LOGV("increasing download buffer to %d bytes", + buffer->size() + bufferRemaining); + + sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining); + memcpy(copy->data(), buffer->data(), buffer->size()); + copy->setRange(0, buffer->size()); + + buffer = copy; + } + + ssize_t n = source->readAt( + buffer->size(), buffer->data() + buffer->size(), + bufferRemaining); + + if (n < 0) { + return n; + } + + if (n == 0) { + break; + } + + buffer->setRange(0, buffer->size() + (size_t)n); + } + + *out = buffer; + + return OK; +} + +sp<M3UParser> LiveSession::fetchPlaylist(const char *url) { + sp<ABuffer> buffer; + status_t err = fetchFile(url, &buffer); + + if (err != OK) { + return NULL; + } + + sp<M3UParser> playlist = + new M3UParser(url, buffer->data(), buffer->size()); + + if (playlist->initCheck() != OK) { + return NULL; + } + + return playlist; +} + +static double uniformRand() { + return (double)rand() / RAND_MAX; +} + +size_t LiveSession::getBandwidthIndex() { + if (mBandwidthItems.size() == 0) { + return 0; + } + +#if 1 + int32_t bandwidthBps; + if (mHTTPDataSource != NULL + && mHTTPDataSource->estimateBandwidth(&bandwidthBps)) { + LOGV("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f); + } else { + LOGV("no bandwidth estimate."); + return 0; // Pick the lowest bandwidth stream by default. + } + + char value[PROPERTY_VALUE_MAX]; + if (property_get("media.httplive.max-bw", value, NULL)) { + char *end; + long maxBw = strtoul(value, &end, 10); + if (end > value && *end == '\0') { + if (maxBw > 0 && bandwidthBps > maxBw) { + LOGV("bandwidth capped to %ld bps", maxBw); + bandwidthBps = maxBw; + } + } + } + + // Consider only 80% of the available bandwidth usable. + bandwidthBps = (bandwidthBps * 8) / 10; + + // Pick the highest bandwidth stream below or equal to estimated bandwidth. + + size_t index = mBandwidthItems.size() - 1; + while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth + > (size_t)bandwidthBps) { + --index; + } +#elif 0 + // Change bandwidth at random() + size_t index = uniformRand() * mBandwidthItems.size(); +#elif 0 + // There's a 50% chance to stay on the current bandwidth and + // a 50% chance to switch to the next higher bandwidth (wrapping around + // to lowest) + const size_t kMinIndex = 0; + + size_t index; + if (mPrevBandwidthIndex < 0) { + index = kMinIndex; + } else if (uniformRand() < 0.5) { + index = (size_t)mPrevBandwidthIndex; + } else { + index = mPrevBandwidthIndex + 1; + if (index == mBandwidthItems.size()) { + index = kMinIndex; + } + } +#elif 0 + // Pick the highest bandwidth stream below or equal to 1.2 Mbit/sec + + size_t index = mBandwidthItems.size() - 1; + while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth > 1200000) { + --index; + } +#else + size_t index = mBandwidthItems.size() - 1; // Highest bandwidth stream +#endif + + return index; +} + +void LiveSession::onDownloadNext() { + size_t bandwidthIndex = getBandwidthIndex(); + +rinse_repeat: + int64_t nowUs = ALooper::GetNowUs(); + + if (mLastPlaylistFetchTimeUs < 0 + || (ssize_t)bandwidthIndex != mPrevBandwidthIndex + || (!mPlaylist->isComplete() + && mLastPlaylistFetchTimeUs + kMaxPlaylistAgeUs <= nowUs)) { + AString url; + if (mBandwidthItems.size() > 0) { + url = mBandwidthItems.editItemAt(bandwidthIndex).mURI; + } else { + url = mMasterURL; + } + + bool firstTime = (mPlaylist == NULL); + + mPlaylist = fetchPlaylist(url.c_str()); + if (mPlaylist == NULL) { + LOGE("failed to load playlist at url '%s'", url.c_str()); + mDataSource->queueEOS(ERROR_IO); + return; + } + + if (firstTime) { + Mutex::Autolock autoLock(mLock); + + int32_t targetDuration; + if (!mPlaylist->isComplete() + || !mPlaylist->meta()->findInt32( + "target-duration", &targetDuration)) { + mDurationUs = -1; + } else { + mDurationUs = 1000000ll * targetDuration * mPlaylist->size(); + } + } + + mLastPlaylistFetchTimeUs = ALooper::GetNowUs(); + } + + int32_t firstSeqNumberInPlaylist; + if (mPlaylist->meta() == NULL || !mPlaylist->meta()->findInt32( + "media-sequence", &firstSeqNumberInPlaylist)) { + firstSeqNumberInPlaylist = 0; + } + + bool explicitDiscontinuity = false; + bool bandwidthChanged = false; + + if (mSeekTimeUs >= 0) { + int32_t targetDuration; + if (mPlaylist->isComplete() && + mPlaylist->meta()->findInt32( + "target-duration", &targetDuration)) { + int64_t seekTimeSecs = (mSeekTimeUs + 500000ll) / 1000000ll; + int64_t index = seekTimeSecs / targetDuration; + + if (index >= 0 && index < mPlaylist->size()) { + int32_t newSeqNumber = firstSeqNumberInPlaylist + index; + + if (newSeqNumber != mSeqNumber) { + LOGI("seeking to seq no %d", newSeqNumber); + + mSeqNumber = newSeqNumber; + + mDataSource->reset(); + + // reseting the data source will have had the + // side effect of discarding any previously queued + // bandwidth change discontinuity. + // Therefore we'll need to treat these explicit + // discontinuities as involving a bandwidth change + // even if they aren't directly. + explicitDiscontinuity = true; + bandwidthChanged = true; + } + } + } + + mSeekTimeUs = -1; + + Mutex::Autolock autoLock(mLock); + mSeekDone = true; + mCondition.broadcast(); + } + + if (mSeqNumber < 0) { + if (mPlaylist->isComplete()) { + mSeqNumber = firstSeqNumberInPlaylist; + } else { + mSeqNumber = firstSeqNumberInPlaylist + mPlaylist->size() / 2; + } + } + + int32_t lastSeqNumberInPlaylist = + firstSeqNumberInPlaylist + (int32_t)mPlaylist->size() - 1; + + if (mSeqNumber < firstSeqNumberInPlaylist + || mSeqNumber > lastSeqNumberInPlaylist) { + if (mPrevBandwidthIndex != (ssize_t)bandwidthIndex) { + // Go back to the previous bandwidth. + + LOGI("new bandwidth does not have the sequence number " + "we're looking for, switching back to previous bandwidth"); + + mLastPlaylistFetchTimeUs = -1; + bandwidthIndex = mPrevBandwidthIndex; + goto rinse_repeat; + } + + if (!mPlaylist->isComplete() + && mSeqNumber > lastSeqNumberInPlaylist + && mNumRetries < kMaxNumRetries) { + ++mNumRetries; + + mLastPlaylistFetchTimeUs = -1; + postMonitorQueue(3000000ll); + return; + } + + LOGE("Cannot find sequence number %d in playlist " + "(contains %d - %d)", + mSeqNumber, firstSeqNumberInPlaylist, + firstSeqNumberInPlaylist + mPlaylist->size() - 1); + + mDataSource->queueEOS(ERROR_END_OF_STREAM); + return; + } + + mNumRetries = 0; + + AString uri; + sp<AMessage> itemMeta; + CHECK(mPlaylist->itemAt( + mSeqNumber - firstSeqNumberInPlaylist, + &uri, + &itemMeta)); + + int32_t val; + if (itemMeta->findInt32("discontinuity", &val) && val != 0) { + explicitDiscontinuity = true; + } + + sp<ABuffer> buffer; + status_t err = fetchFile(uri.c_str(), &buffer); + if (err != OK) { + LOGE("failed to fetch .ts segment at url '%s'", uri.c_str()); + mDataSource->queueEOS(err); + return; + } + + CHECK(buffer != NULL); + + err = decryptBuffer(mSeqNumber - firstSeqNumberInPlaylist, buffer); + + if (err != OK) { + LOGE("decryptBuffer failed w/ error %d", err); + + mDataSource->queueEOS(err); + return; + } + + if (buffer->size() == 0 || buffer->data()[0] != 0x47) { + // Not a transport stream??? + + LOGE("This doesn't look like a transport stream..."); + + mBandwidthItems.removeAt(bandwidthIndex); + + if (mBandwidthItems.isEmpty()) { + mDataSource->queueEOS(ERROR_UNSUPPORTED); + return; + } + + LOGI("Retrying with a different bandwidth stream."); + + mLastPlaylistFetchTimeUs = -1; + bandwidthIndex = getBandwidthIndex(); + mPrevBandwidthIndex = bandwidthIndex; + mSeqNumber = -1; + + goto rinse_repeat; + } + + if ((size_t)mPrevBandwidthIndex != bandwidthIndex) { + bandwidthChanged = true; + } + + if (mPrevBandwidthIndex < 0) { + // Don't signal a bandwidth change at the very beginning of + // playback. + bandwidthChanged = false; + } + + if (explicitDiscontinuity || bandwidthChanged) { + // Signal discontinuity. + + LOGI("queueing discontinuity (explicit=%d, bandwidthChanged=%d)", + explicitDiscontinuity, bandwidthChanged); + + sp<ABuffer> tmp = new ABuffer(188); + memset(tmp->data(), 0, tmp->size()); + tmp->data()[1] = bandwidthChanged; + + mDataSource->queueBuffer(tmp); + } + + mDataSource->queueBuffer(buffer); + + mPrevBandwidthIndex = bandwidthIndex; + ++mSeqNumber; + + postMonitorQueue(); +} + +void LiveSession::onMonitorQueue() { + if (mSeekTimeUs >= 0 + || mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments) { + onDownloadNext(); + } else { + postMonitorQueue(1000000ll); + } +} + +status_t LiveSession::decryptBuffer( + size_t playlistIndex, const sp<ABuffer> &buffer) { + sp<AMessage> itemMeta; + bool found = false; + AString method; + + for (ssize_t i = playlistIndex; i >= 0; --i) { + AString uri; + CHECK(mPlaylist->itemAt(i, &uri, &itemMeta)); + + if (itemMeta->findString("cipher-method", &method)) { + found = true; + break; + } + } + + if (!found) { + method = "NONE"; + } + + if (method == "NONE") { + return OK; + } else if (!(method == "AES-128")) { + LOGE("Unsupported cipher method '%s'", method.c_str()); + return ERROR_UNSUPPORTED; + } + + AString keyURI; + if (!itemMeta->findString("cipher-uri", &keyURI)) { + LOGE("Missing key uri"); + return ERROR_MALFORMED; + } + + ssize_t index = mAESKeyForURI.indexOfKey(keyURI); + + sp<ABuffer> key; + if (index >= 0) { + key = mAESKeyForURI.valueAt(index); + } else { + key = new ABuffer(16); + + sp<NuHTTPDataSource> keySource = new NuHTTPDataSource; + status_t err = keySource->connect(keyURI.c_str()); + + if (err == OK) { + size_t offset = 0; + while (offset < 16) { + ssize_t n = keySource->readAt( + offset, key->data() + offset, 16 - offset); + if (n <= 0) { + err = ERROR_IO; + break; + } + + offset += n; + } + } + + if (err != OK) { + LOGE("failed to fetch cipher key from '%s'.", keyURI.c_str()); + return ERROR_IO; + } + + mAESKeyForURI.add(keyURI, key); + } + + AES_KEY aes_key; + if (AES_set_decrypt_key(key->data(), 128, &aes_key) != 0) { + LOGE("failed to set AES decryption key."); + return UNKNOWN_ERROR; + } + + unsigned char aes_ivec[16]; + + AString iv; + if (itemMeta->findString("cipher-iv", &iv)) { + if ((!iv.startsWith("0x") && !iv.startsWith("0X")) + || iv.size() != 16 * 2 + 2) { + LOGE("malformed cipher IV '%s'.", iv.c_str()); + return ERROR_MALFORMED; + } + + memset(aes_ivec, 0, sizeof(aes_ivec)); + for (size_t i = 0; i < 16; ++i) { + char c1 = tolower(iv.c_str()[2 + 2 * i]); + char c2 = tolower(iv.c_str()[3 + 2 * i]); + if (!isxdigit(c1) || !isxdigit(c2)) { + LOGE("malformed cipher IV '%s'.", iv.c_str()); + return ERROR_MALFORMED; + } + uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10; + uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10; + + aes_ivec[i] = nibble1 << 4 | nibble2; + } + } else { + memset(aes_ivec, 0, sizeof(aes_ivec)); + aes_ivec[15] = mSeqNumber & 0xff; + aes_ivec[14] = (mSeqNumber >> 8) & 0xff; + aes_ivec[13] = (mSeqNumber >> 16) & 0xff; + aes_ivec[12] = (mSeqNumber >> 24) & 0xff; + } + + AES_cbc_encrypt( + buffer->data(), buffer->data(), buffer->size(), + &aes_key, aes_ivec, AES_DECRYPT); + + // hexdump(buffer->data(), buffer->size()); + + size_t n = buffer->size(); + CHECK_GT(n, 0u); + + size_t pad = buffer->data()[n - 1]; + + CHECK_GT(pad, 0u); + CHECK_LE(pad, 16u); + CHECK_GE((size_t)n, pad); + for (size_t i = 0; i < pad; ++i) { + CHECK_EQ((unsigned)buffer->data()[n - 1 - i], pad); + } + + n -= pad; + + buffer->setRange(buffer->offset(), n); + + return OK; +} + +void LiveSession::postMonitorQueue(int64_t delayUs) { + sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id()); + msg->setInt32("generation", ++mMonitorQueueGeneration); + msg->post(delayUs); +} + +void LiveSession::onSeek(const sp<AMessage> &msg) { + int64_t timeUs; + CHECK(msg->findInt64("timeUs", &timeUs)); + + mSeekTimeUs = timeUs; + postMonitorQueue(); +} + +status_t LiveSession::getDuration(int64_t *durationUs) { + Mutex::Autolock autoLock(mLock); + *durationUs = mDurationUs; + + return OK; +} + +bool LiveSession::isSeekable() { + int64_t durationUs; + return getDuration(&durationUs) == OK && durationUs >= 0; +} + +} // namespace android + diff --git a/media/libstagefright/httplive/LiveSource.cpp b/media/libstagefright/httplive/LiveSource.cpp deleted file mode 100644 index 29c7b04..0000000 --- a/media/libstagefright/httplive/LiveSource.cpp +++ /dev/null @@ -1,428 +0,0 @@ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -//#define LOG_NDEBUG 0 -#define LOG_TAG "LiveSource" -#include <utils/Log.h> - -#include "include/LiveSource.h" -#include "include/M3UParser.h" -#include "include/NuHTTPDataSource.h" - -#include <media/stagefright/foundation/ABuffer.h> -#include <media/stagefright/FileSource.h> -#include <media/stagefright/MediaDebug.h> - -namespace android { - -LiveSource::LiveSource(const char *url) - : mMasterURL(url), - mInitCheck(NO_INIT), - mDurationUs(-1), - mPlaylistIndex(0), - mLastFetchTimeUs(-1), - mSource(new NuHTTPDataSource), - mSourceSize(0), - mOffsetBias(0), - mSignalDiscontinuity(false), - mPrevBandwidthIndex(-1) { - if (switchToNext()) { - mInitCheck = OK; - - determineSeekability(); - } -} - -LiveSource::~LiveSource() { -} - -status_t LiveSource::initCheck() const { - return mInitCheck; -} - -// static -int LiveSource::SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b) { - if (a->mBandwidth < b->mBandwidth) { - return -1; - } else if (a->mBandwidth == b->mBandwidth) { - return 0; - } - - return 1; -} - -static double uniformRand() { - return (double)rand() / RAND_MAX; -} - -bool LiveSource::loadPlaylist(bool fetchMaster) { - mSignalDiscontinuity = false; - - mPlaylist.clear(); - mPlaylistIndex = 0; - - if (fetchMaster) { - mPrevBandwidthIndex = -1; - - sp<ABuffer> buffer; - status_t err = fetchM3U(mMasterURL.c_str(), &buffer); - - if (err != OK) { - return false; - } - - mPlaylist = new M3UParser( - mMasterURL.c_str(), buffer->data(), buffer->size()); - - if (mPlaylist->initCheck() != OK) { - return false; - } - - if (mPlaylist->isVariantPlaylist()) { - for (size_t i = 0; i < mPlaylist->size(); ++i) { - BandwidthItem item; - - sp<AMessage> meta; - mPlaylist->itemAt(i, &item.mURI, &meta); - - unsigned long bandwidth; - CHECK(meta->findInt32("bandwidth", (int32_t *)&item.mBandwidth)); - - mBandwidthItems.push(item); - } - mPlaylist.clear(); - - // fall through - if (mBandwidthItems.size() == 0) { - return false; - } - - mBandwidthItems.sort(SortByBandwidth); - - for (size_t i = 0; i < mBandwidthItems.size(); ++i) { - const BandwidthItem &item = mBandwidthItems.itemAt(i); - LOGV("item #%d: %s", i, item.mURI.c_str()); - } - } - } - - if (mBandwidthItems.size() > 0) { -#if 0 - // Change bandwidth at random() - size_t index = uniformRand() * mBandwidthItems.size(); -#elif 0 - // There's a 50% chance to stay on the current bandwidth and - // a 50% chance to switch to the next higher bandwidth (wrapping around - // to lowest) - size_t index; - if (uniformRand() < 0.5) { - index = mPrevBandwidthIndex < 0 ? 0 : (size_t)mPrevBandwidthIndex; - } else { - if (mPrevBandwidthIndex < 0) { - index = 0; - } else { - index = mPrevBandwidthIndex + 1; - if (index == mBandwidthItems.size()) { - index = 0; - } - } - } -#else - // Stay on the lowest bandwidth available. - size_t index = mBandwidthItems.size() - 1; // Highest bandwidth stream -#endif - - mURL = mBandwidthItems.editItemAt(index).mURI; - - if (mPrevBandwidthIndex >= 0 && (size_t)mPrevBandwidthIndex != index) { - // If we switched streams because of bandwidth changes, - // we'll signal this discontinuity by inserting a - // special transport stream packet into the stream. - mSignalDiscontinuity = true; - } - - mPrevBandwidthIndex = index; - } else { - mURL = mMasterURL; - } - - if (mPlaylist == NULL) { - sp<ABuffer> buffer; - status_t err = fetchM3U(mURL.c_str(), &buffer); - - if (err != OK) { - return false; - } - - mPlaylist = new M3UParser(mURL.c_str(), buffer->data(), buffer->size()); - - if (mPlaylist->initCheck() != OK) { - return false; - } - - if (mPlaylist->isVariantPlaylist()) { - return false; - } - } - - if (!mPlaylist->meta()->findInt32( - "media-sequence", &mFirstItemSequenceNumber)) { - mFirstItemSequenceNumber = 0; - } - - return true; -} - -static int64_t getNowUs() { - struct timeval tv; - gettimeofday(&tv, NULL); - - return (int64_t)tv.tv_usec + tv.tv_sec * 1000000ll; -} - -bool LiveSource::switchToNext() { - mSignalDiscontinuity = false; - - mOffsetBias += mSourceSize; - mSourceSize = 0; - - if (mLastFetchTimeUs < 0 || getNowUs() >= mLastFetchTimeUs + 15000000ll - || mPlaylistIndex == mPlaylist->size()) { - int32_t nextSequenceNumber = - mPlaylistIndex + mFirstItemSequenceNumber; - - if (!loadPlaylist(mLastFetchTimeUs < 0)) { - LOGE("failed to reload playlist"); - return false; - } - - if (mLastFetchTimeUs < 0) { - mPlaylistIndex = 0; - } else { - if (nextSequenceNumber < mFirstItemSequenceNumber - || nextSequenceNumber - >= mFirstItemSequenceNumber + (int32_t)mPlaylist->size()) { - LOGE("Cannot find sequence number %d in new playlist", - nextSequenceNumber); - - return false; - } - - mPlaylistIndex = nextSequenceNumber - mFirstItemSequenceNumber; - } - - mLastFetchTimeUs = getNowUs(); - } - - AString uri; - sp<AMessage> itemMeta; - CHECK(mPlaylist->itemAt(mPlaylistIndex, &uri, &itemMeta)); - LOGV("switching to %s", uri.c_str()); - - if (mSource->connect(uri.c_str()) != OK - || mSource->getSize(&mSourceSize) != OK) { - return false; - } - - int32_t val; - if (itemMeta->findInt32("discontinuity", &val) && val != 0) { - mSignalDiscontinuity = true; - } - - mPlaylistIndex++; - return true; -} - -static const ssize_t kHeaderSize = 188; - -ssize_t LiveSource::readAt(off_t offset, void *data, size_t size) { - CHECK(offset >= mOffsetBias); - offset -= mOffsetBias; - - off_t delta = mSignalDiscontinuity ? kHeaderSize : 0; - - if (offset >= mSourceSize + delta) { - CHECK_EQ(offset, mSourceSize + delta); - - offset -= mSourceSize + delta; - if (!switchToNext()) { - return ERROR_END_OF_STREAM; - } - - if (mSignalDiscontinuity) { - LOGV("switchToNext changed streams"); - } else { - LOGV("switchToNext stayed within the same stream"); - } - - mOffsetBias += delta; - - delta = mSignalDiscontinuity ? kHeaderSize : 0; - } - - if (offset < delta) { - size_t avail = delta - offset; - memset(data, 0, avail); - return avail; - } - - size_t numRead = 0; - while (numRead < size) { - ssize_t n = mSource->readAt( - offset + numRead - delta, - (uint8_t *)data + numRead, size - numRead); - - if (n <= 0) { - break; - } - - numRead += n; - } - - return numRead; -} - -status_t LiveSource::fetchM3U(const char *url, sp<ABuffer> *out) { - *out = NULL; - - sp<DataSource> source; - - if (!strncasecmp(url, "file://", 7)) { - source = new FileSource(url + 7); - } else { - CHECK(!strncasecmp(url, "http://", 7)); - - status_t err = mSource->connect(url); - - if (err != OK) { - return err; - } - - source = mSource; - } - - off_t size; - status_t err = source->getSize(&size); - - if (err != OK) { - size = 65536; - } - - sp<ABuffer> buffer = new ABuffer(size); - buffer->setRange(0, 0); - - for (;;) { - size_t bufferRemaining = buffer->capacity() - buffer->size(); - - if (bufferRemaining == 0) { - bufferRemaining = 32768; - - LOGV("increasing download buffer to %d bytes", - buffer->size() + bufferRemaining); - - sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining); - memcpy(copy->data(), buffer->data(), buffer->size()); - copy->setRange(0, buffer->size()); - - buffer = copy; - } - - ssize_t n = source->readAt( - buffer->size(), buffer->data() + buffer->size(), - bufferRemaining); - - if (n < 0) { - return err; - } - - if (n == 0) { - break; - } - - buffer->setRange(0, buffer->size() + (size_t)n); - } - - *out = buffer; - - return OK; -} - -bool LiveSource::seekTo(int64_t seekTimeUs) { - LOGV("seek to %lld us", seekTimeUs); - - if (!mPlaylist->isComplete()) { - return false; - } - - int32_t targetDuration; - if (!mPlaylist->meta()->findInt32("target-duration", &targetDuration)) { - return false; - } - - int64_t seekTimeSecs = (seekTimeUs + 500000ll) / 1000000ll; - - int64_t index = seekTimeSecs / targetDuration; - - if (index < 0 || index >= mPlaylist->size()) { - return false; - } - - size_t newPlaylistIndex = mFirstItemSequenceNumber + index; - - if (newPlaylistIndex == mPlaylistIndex) { - return false; - } - - mPlaylistIndex = newPlaylistIndex; - - switchToNext(); - mOffsetBias = 0; - - LOGV("seeking to index %lld", index); - - return true; -} - -bool LiveSource::getDuration(int64_t *durationUs) const { - if (mDurationUs >= 0) { - *durationUs = mDurationUs; - return true; - } - - *durationUs = 0; - return false; -} - -bool LiveSource::isSeekable() const { - return mDurationUs >= 0; -} - -void LiveSource::determineSeekability() { - mDurationUs = -1; - - if (!mPlaylist->isComplete()) { - return; - } - - int32_t targetDuration; - if (!mPlaylist->meta()->findInt32("target-duration", &targetDuration)) { - return; - } - - mDurationUs = targetDuration * 1000000ll * mPlaylist->size(); -} - -} // namespace android diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index 90f3d6d..95f6741 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -14,6 +14,10 @@ * limitations under the License. */ +//#define LOG_NDEBUG 0 +#define LOG_TAG "M3UParser" +#include <utils/Log.h> + #include "include/M3UParser.h" #include <media/stagefright/foundation/AMessage.h> @@ -80,14 +84,18 @@ static bool MakeURL(const char *baseURL, const char *url, AString *out) { out->clear(); if (strncasecmp("http://", baseURL, 7) + && strncasecmp("https://", baseURL, 8) && strncasecmp("file://", baseURL, 7)) { // Base URL must be absolute return false; } - if (!strncasecmp("http://", url, 7)) { + if (!strncasecmp("http://", url, 7) || !strncasecmp("https://", url, 8)) { // "url" is already an absolute URL, ignore base URL. out->setTo(url); + + LOGV("base:'%s', url:'%s' => '%s'", baseURL, url, out->c_str()); + return true; } @@ -108,6 +116,8 @@ static bool MakeURL(const char *baseURL, const char *url, AString *out) { out->append(url); } + LOGV("base:'%s', url:'%s' => '%s'", baseURL, url, out->c_str()); + return true; } @@ -158,6 +168,11 @@ status_t M3UParser::parse(const void *_data, size_t size) { return ERROR_MALFORMED; } err = parseMetaData(line, &mMeta, "media-sequence"); + } else if (line.startsWith("#EXT-X-KEY")) { + if (mIsVariantPlaylist) { + return ERROR_MALFORMED; + } + err = parseCipherInfo(line, &itemMeta, mBaseURI); } else if (line.startsWith("#EXT-X-ENDLIST")) { mIsComplete = true; } else if (line.startsWith("#EXTINF")) { @@ -292,6 +307,75 @@ status_t M3UParser::parseStreamInf( } // static +status_t M3UParser::parseCipherInfo( + const AString &line, sp<AMessage> *meta, const AString &baseURI) { + ssize_t colonPos = line.find(":"); + + if (colonPos < 0) { + return ERROR_MALFORMED; + } + + size_t offset = colonPos + 1; + + while (offset < line.size()) { + ssize_t end = line.find(",", offset); + if (end < 0) { + end = line.size(); + } + + AString attr(line, offset, end - offset); + attr.trim(); + + offset = end + 1; + + ssize_t equalPos = attr.find("="); + if (equalPos < 0) { + continue; + } + + AString key(attr, 0, equalPos); + key.trim(); + + AString val(attr, equalPos + 1, attr.size() - equalPos - 1); + val.trim(); + + LOGV("key=%s value=%s", key.c_str(), val.c_str()); + + key.tolower(); + + if (key == "method" || key == "uri" || key == "iv") { + if (meta->get() == NULL) { + *meta = new AMessage; + } + + if (key == "uri") { + if (val.size() >= 2 + && val.c_str()[0] == '"' + && val.c_str()[val.size() - 1] == '"') { + // Remove surrounding quotes. + AString tmp(val, 1, val.size() - 2); + val = tmp; + } + + AString absURI; + if (MakeURL(baseURI.c_str(), val.c_str(), &absURI)) { + val = absURI; + } else { + LOGE("failed to make absolute url for '%s'.", + val.c_str()); + } + } + + key.insert(AString("cipher-"), 0); + + (*meta)->setString(key.c_str(), val.c_str(), val.size()); + } + } + + return OK; +} + +// static status_t M3UParser::ParseInt32(const char *s, int32_t *x) { char *end; long lval = strtol(s, &end, 10); diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp index da340f7..45e018d 100644 --- a/media/libstagefright/id3/ID3.cpp +++ b/media/libstagefright/id3/ID3.cpp @@ -149,7 +149,25 @@ struct id3_header { } if (header.version_major == 4) { - if (!removeUnsynchronizationV2_4()) { + void *copy = malloc(size); + memcpy(copy, mData, size); + + bool success = removeUnsynchronizationV2_4(false /* iTunesHack */); + if (!success) { + memcpy(mData, copy, size); + mSize = size; + + success = removeUnsynchronizationV2_4(true /* iTunesHack */); + + if (success) { + LOGV("Had to apply the iTunes hack to parse this ID3 tag"); + } + } + + free(copy); + copy = NULL; + + if (!success) { free(mData); mData = NULL; @@ -261,7 +279,7 @@ static void WriteSyncsafeInteger(uint8_t *dst, size_t x) { } } -bool ID3::removeUnsynchronizationV2_4() { +bool ID3::removeUnsynchronizationV2_4(bool iTunesHack) { size_t oldSize = mSize; size_t offset = 0; @@ -271,7 +289,9 @@ bool ID3::removeUnsynchronizationV2_4() { } size_t dataSize; - if (!ParseSyncsafeInteger(&mData[offset + 4], &dataSize)) { + if (iTunesHack) { + dataSize = U32_AT(&mData[offset + 4]); + } else if (!ParseSyncsafeInteger(&mData[offset + 4], &dataSize)) { return false; } @@ -308,7 +328,7 @@ bool ID3::removeUnsynchronizationV2_4() { flags &= ~2; } - if (flags != prevFlags) { + if (flags != prevFlags || iTunesHack) { WriteSyncsafeInteger(&mData[offset + 4], dataSize); mData[offset + 8] = flags >> 8; mData[offset + 9] = flags & 0xff; @@ -769,8 +789,8 @@ ID3::getAlbumArt(size_t *length, String8 *mime) const { bool ID3::parseV1(const sp<DataSource> &source) { const size_t V1_TAG_SIZE = 128; - off_t size; - if (source->getSize(&size) != OK || size < (off_t)V1_TAG_SIZE) { + off64_t size; + if (source->getSize(&size) != OK || size < (off64_t)V1_TAG_SIZE) { return false; } diff --git a/media/libstagefright/include/AACEncoder.h b/media/libstagefright/include/AACEncoder.h index ecc533f..3d5fc60 100644 --- a/media/libstagefright/include/AACEncoder.h +++ b/media/libstagefright/include/AACEncoder.h @@ -60,7 +60,7 @@ class AACEncoder: public MediaSource { kNumSamplesPerFrame = 1024, }; - int16_t mInputFrame[kNumSamplesPerFrame]; + int16_t *mInputFrame; uint8_t mAudioSpecificConfigData[2]; // auido specific data void *mEncoderHandle; diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index e352928..41ef181 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -45,10 +45,11 @@ struct UDPPusher; class DrmManagerClinet; class DecryptHandle; +struct LiveSession; + struct AwesomeRenderer : public RefBase { AwesomeRenderer() {} - virtual status_t initCheck() const = 0; virtual void render(MediaBuffer *buffer) = 0; private: @@ -68,6 +69,8 @@ struct AwesomePlayer { status_t setDataSource(int fd, int64_t offset, int64_t length); + status_t setDataSource(const sp<IStreamSource> &source); + void reset(); status_t prepare(); @@ -80,7 +83,7 @@ struct AwesomePlayer { bool isPlaying() const; - void setISurface(const sp<ISurface> &isurface); + void setSurface(const sp<Surface> &surface); void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink); status_t setLooping(bool shouldLoop); @@ -89,11 +92,6 @@ struct AwesomePlayer { status_t seekTo(int64_t timeUs); - status_t getVideoDimensions(int32_t *width, int32_t *height) const; - - status_t suspend(); - status_t resume(); - // This is a mask of MediaExtractor::Flags. uint32_t flags() const; @@ -102,6 +100,7 @@ struct AwesomePlayer { private: friend struct AwesomeEvent; + friend struct PreviewPlayer; enum { PLAYING = 1, @@ -115,6 +114,11 @@ private: AUDIO_AT_EOS = 256, VIDEO_AT_EOS = 512, AUTO_LOOPING = 1024, + + // We are basically done preparing but are currently buffering + // sufficient data to begin playback and finish the preparation phase + // for good. + PREPARING_CONNECTED = 2048, }; mutable Mutex mLock; @@ -125,7 +129,7 @@ private: bool mQueueStarted; wp<MediaPlayerBase> mListener; - sp<ISurface> mISurface; + sp<Surface> mSurface; sp<MediaPlayerBase::AudioSink> mAudioSink; SystemTimeSource mSystemTimeSource; @@ -146,10 +150,13 @@ private: AudioPlayer *mAudioPlayer; int64_t mDurationUs; + int32_t mDisplayWidth; + int32_t mDisplayHeight; + uint32_t mFlags; uint32_t mExtractorFlags; + uint32_t mSinceLastDropped; - int32_t mVideoWidth, mVideoHeight; int64_t mTimeSourceDeltaUs; int64_t mVideoTimeUs; @@ -170,6 +177,8 @@ private: bool mBufferingEventPending; sp<TimedEventQueue::Event> mCheckAudioStatusEvent; bool mAudioStatusEventPending; + sp<TimedEventQueue::Event> mVideoLagEvent; + bool mVideoLagEventPending; sp<TimedEventQueue::Event> mAsyncPrepareEvent; Condition mPreparedCondition; @@ -181,9 +190,9 @@ private: void postBufferingEvent_l(); void postStreamDoneEvent_l(status_t status); void postCheckAudioStatusEvent_l(); + void postVideoLagEvent_l(); status_t play_l(); - MediaBuffer *mLastVideoBuffer; MediaBuffer *mVideoBuffer; sp<NuHTTPDataSource> mConnectingDataSource; @@ -194,31 +203,7 @@ private: sp<ARTPSession> mRTPSession; sp<UDPPusher> mRTPPusher, mRTCPPusher; - struct SuspensionState { - String8 mUri; - KeyedVector<String8, String8> mUriHeaders; - sp<DataSource> mFileSource; - - uint32_t mFlags; - int64_t mPositionUs; - - void *mLastVideoFrame; - size_t mLastVideoFrameSize; - int32_t mColorFormat; - int32_t mVideoWidth, mVideoHeight; - int32_t mDecodedWidth, mDecodedHeight; - - SuspensionState() - : mLastVideoFrame(NULL) { - } - - ~SuspensionState() { - if (mLastVideoFrame) { - free(mLastVideoFrame); - mLastVideoFrame = NULL; - } - } - } *mSuspensionState; + sp<LiveSession> mLiveSession; DrmManagerClient *mDrmManagerClient; DecryptHandle *mDecryptHandle; @@ -233,7 +218,8 @@ private: void partial_reset_l(); status_t seekTo_l(int64_t timeUs); status_t pause_l(bool at_eos = false); - status_t initRenderer_l(); + void initRenderer_l(); + void notifyVideoSize_l(); void seekAudioIfNecessary_l(); void cancelPlayerEvents(bool keepBufferingGoing = false); @@ -254,6 +240,7 @@ private: void onPrepareAsyncEvent(); void abortPrepare(status_t err); void finishAsyncPrepare_l(); + void onVideoLagUpdate(); bool getCachedDuration_l(int64_t *durationUs, bool *eos); @@ -267,6 +254,7 @@ private: bool getBitrate(int64_t *bitrate); void finishSeekIfNecessary(int64_t videoTimeUs); + void ensureCacheIsFetching_l(); AwesomePlayer(const AwesomePlayer &); AwesomePlayer &operator=(const AwesomePlayer &); diff --git a/media/libstagefright/include/HTTPStream.h b/media/libstagefright/include/HTTPStream.h index 793798f..545cd0c 100644 --- a/media/libstagefright/include/HTTPStream.h +++ b/media/libstagefright/include/HTTPStream.h @@ -18,10 +18,9 @@ #define HTTP_STREAM_H_ -#include "stagefright_string.h" - #include <sys/types.h> +#include <media/stagefright/foundation/AString.h> #include <media/stagefright/MediaErrors.h> #include <utils/KeyedVector.h> #include <utils/threads.h> @@ -50,7 +49,7 @@ public: static const char *kStatusKey; bool find_header_value( - const string &key, string *value) const; + const AString &key, AString *value) const; // Pass a negative value to disable the timeout. void setReceiveTimeout(int seconds); @@ -70,7 +69,7 @@ private: Mutex mLock; int mSocket; - KeyedVector<string, string> mHeaders; + KeyedVector<AString, AString> mHeaders; HTTPStream(const HTTPStream &); HTTPStream &operator=(const HTTPStream &); diff --git a/media/libstagefright/include/ID3.h b/media/libstagefright/include/ID3.h index 7ddbb41..98c82a4 100644 --- a/media/libstagefright/include/ID3.h +++ b/media/libstagefright/include/ID3.h @@ -80,7 +80,7 @@ private: bool parseV1(const sp<DataSource> &source); bool parseV2(const sp<DataSource> &source); void removeUnsynchronization(); - bool removeUnsynchronizationV2_4(); + bool removeUnsynchronizationV2_4(bool iTunesHack); static bool ParseSyncsafeInteger(const uint8_t encoded[4], size_t *x); diff --git a/media/libstagefright/include/LiveSession.h b/media/libstagefright/include/LiveSession.h new file mode 100644 index 0000000..41f5ad0 --- /dev/null +++ b/media/libstagefright/include/LiveSession.h @@ -0,0 +1,115 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef LIVE_SESSION_H_ + +#define LIVE_SESSION_H_ + +#include <media/stagefright/foundation/AHandler.h> + +namespace android { + +struct ABuffer; +struct DataSource; +struct LiveDataSource; +struct M3UParser; +struct NuHTTPDataSource; + +struct LiveSession : public AHandler { + LiveSession(); + + sp<DataSource> getDataSource(); + + void connect(const char *url); + void disconnect(); + + // Blocks until seek is complete. + void seekTo(int64_t timeUs); + + status_t getDuration(int64_t *durationUs); + bool isSeekable(); + +protected: + virtual ~LiveSession(); + + virtual void onMessageReceived(const sp<AMessage> &msg); + +private: + enum { + kMaxNumQueuedFragments = 3, + kMaxNumRetries = 5, + }; + + static const int64_t kMaxPlaylistAgeUs; + + enum { + kWhatConnect = 'conn', + kWhatDisconnect = 'disc', + kWhatMonitorQueue = 'moni', + kWhatSeek = 'seek', + }; + + struct BandwidthItem { + AString mURI; + unsigned long mBandwidth; + }; + + sp<LiveDataSource> mDataSource; + + sp<NuHTTPDataSource> mHTTPDataSource; + + AString mMasterURL; + Vector<BandwidthItem> mBandwidthItems; + + KeyedVector<AString, sp<ABuffer> > mAESKeyForURI; + + ssize_t mPrevBandwidthIndex; + int64_t mLastPlaylistFetchTimeUs; + sp<M3UParser> mPlaylist; + int32_t mSeqNumber; + int64_t mSeekTimeUs; + int32_t mNumRetries; + + Mutex mLock; + Condition mCondition; + int64_t mDurationUs; + bool mSeekDone; + + int32_t mMonitorQueueGeneration; + + void onConnect(const sp<AMessage> &msg); + void onDisconnect(); + void onDownloadNext(); + void onMonitorQueue(); + void onSeek(const sp<AMessage> &msg); + + status_t fetchFile(const char *url, sp<ABuffer> *out); + sp<M3UParser> fetchPlaylist(const char *url); + size_t getBandwidthIndex(); + + status_t decryptBuffer( + size_t playlistIndex, const sp<ABuffer> &buffer); + + void postMonitorQueue(int64_t delayUs = 0); + + static int SortByBandwidth(const BandwidthItem *, const BandwidthItem *); + + DISALLOW_EVIL_CONSTRUCTORS(LiveSession); +}; + +} // namespace android + +#endif // LIVE_SESSION_H_ diff --git a/media/libstagefright/include/LiveSource.h b/media/libstagefright/include/LiveSource.h deleted file mode 100644 index 55dd45e..0000000 --- a/media/libstagefright/include/LiveSource.h +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef LIVE_SOURCE_H_ - -#define LIVE_SOURCE_H_ - -#include <media/stagefright/foundation/ABase.h> -#include <media/stagefright/foundation/AString.h> -#include <media/stagefright/DataSource.h> -#include <utils/Vector.h> - -namespace android { - -struct ABuffer; -struct NuHTTPDataSource; -struct M3UParser; - -struct LiveSource : public DataSource { - LiveSource(const char *url); - - virtual status_t initCheck() const; - - virtual ssize_t readAt(off_t offset, void *data, size_t size); - - virtual uint32_t flags() { - return kWantsPrefetching; - } - - bool getDuration(int64_t *durationUs) const; - - bool isSeekable() const; - bool seekTo(int64_t seekTimeUs); - -protected: - virtual ~LiveSource(); - -private: - struct BandwidthItem { - AString mURI; - unsigned long mBandwidth; - }; - Vector<BandwidthItem> mBandwidthItems; - - AString mMasterURL; - AString mURL; - status_t mInitCheck; - int64_t mDurationUs; - - sp<M3UParser> mPlaylist; - int32_t mFirstItemSequenceNumber; - size_t mPlaylistIndex; - int64_t mLastFetchTimeUs; - - sp<NuHTTPDataSource> mSource; - off_t mSourceSize; - off_t mOffsetBias; - - bool mSignalDiscontinuity; - ssize_t mPrevBandwidthIndex; - - status_t fetchM3U(const char *url, sp<ABuffer> *buffer); - - static int SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b); - - bool switchToNext(); - bool loadPlaylist(bool fetchMaster); - void determineSeekability(); - - DISALLOW_EVIL_CONSTRUCTORS(LiveSource); -}; - -} // namespace android - -#endif // LIVE_SOURCE_H_ diff --git a/media/libstagefright/include/M3UParser.h b/media/libstagefright/include/M3UParser.h index bd9eebe..63895b4 100644 --- a/media/libstagefright/include/M3UParser.h +++ b/media/libstagefright/include/M3UParser.h @@ -66,6 +66,9 @@ private: static status_t parseStreamInf( const AString &line, sp<AMessage> *meta); + static status_t parseCipherInfo( + const AString &line, sp<AMessage> *meta, const AString &baseURI); + static status_t ParseInt32(const char *s, int32_t *x); DISALLOW_EVIL_CONSTRUCTORS(M3UParser); diff --git a/media/libstagefright/include/MP3Extractor.h b/media/libstagefright/include/MP3Extractor.h index 30136e7..728980e 100644 --- a/media/libstagefright/include/MP3Extractor.h +++ b/media/libstagefright/include/MP3Extractor.h @@ -24,6 +24,7 @@ namespace android { struct AMessage; class DataSource; +struct MP3Seeker; class String8; class MP3Extractor : public MediaExtractor { @@ -37,15 +38,19 @@ public: virtual sp<MetaData> getMetaData(); + static bool get_mp3_frame_size( + uint32_t header, size_t *frame_size, + int *out_sampling_rate = NULL, int *out_channels = NULL, + int *out_bitrate = NULL); + private: status_t mInitCheck; sp<DataSource> mDataSource; - off_t mFirstFramePos; + off64_t mFirstFramePos; sp<MetaData> mMeta; uint32_t mFixedHeader; - int32_t mByteNumber; // total number of bytes in this MP3 - char mTableOfContents[99]; // TOC entries in XING header + sp<MP3Seeker> mSeeker; MP3Extractor(const MP3Extractor &); MP3Extractor &operator=(const MP3Extractor &); diff --git a/media/libstagefright/include/MP3Seeker.h b/media/libstagefright/include/MP3Seeker.h new file mode 100644 index 0000000..599542e --- /dev/null +++ b/media/libstagefright/include/MP3Seeker.h @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MP3_SEEKER_H_ + +#define MP3_SEEKER_H_ + +#include <media/stagefright/foundation/ABase.h> +#include <utils/RefBase.h> + +namespace android { + +struct MP3Seeker : public RefBase { + MP3Seeker() {} + + virtual bool getDuration(int64_t *durationUs) = 0; + + // Given a request seek time in "*timeUs", find the byte offset closest + // to that position and return it in "*pos". Update "*timeUs" to reflect + // the actual time that seekpoint represents. + virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos) = 0; + +protected: + virtual ~MP3Seeker() {} + +private: + DISALLOW_EVIL_CONSTRUCTORS(MP3Seeker); +}; + +} // namespace android + +#endif // MP3_SEEKER_H_ + diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h index d83b538..efe7496 100644 --- a/media/libstagefright/include/MPEG2TSExtractor.h +++ b/media/libstagefright/include/MPEG2TSExtractor.h @@ -15,7 +15,7 @@ struct ATSParser; struct DataSource; struct MPEG2TSSource; struct String8; -struct LiveSource; +struct LiveSession; struct MPEG2TSExtractor : public MediaExtractor { MPEG2TSExtractor(const sp<DataSource> &source); @@ -28,7 +28,7 @@ struct MPEG2TSExtractor : public MediaExtractor { virtual uint32_t flags() const; - void setLiveSource(const sp<LiveSource> &liveSource); + void setLiveSession(const sp<LiveSession> &liveSession); void seekTo(int64_t seekTimeUs); private: @@ -37,13 +37,13 @@ private: mutable Mutex mLock; sp<DataSource> mDataSource; - sp<LiveSource> mLiveSource; + sp<LiveSession> mLiveSession; sp<ATSParser> mParser; Vector<sp<AnotherPacketSource> > mSourceImpls; - off_t mOffset; + off64_t mOffset; void init(); status_t feedMore(); diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h index bc2e4dc..04e8a6a 100644 --- a/media/libstagefright/include/MPEG4Extractor.h +++ b/media/libstagefright/include/MPEG4Extractor.h @@ -67,8 +67,8 @@ private: Vector<uint32_t> mPath; status_t readMetaData(); - status_t parseChunk(off_t *offset, int depth); - status_t parseMetaData(off_t offset, size_t size); + status_t parseChunk(off64_t *offset, int depth); + status_t parseMetaData(off64_t offset, size_t size); status_t updateAudioTrackInfoFromESDS_MPEG4Audio( const void *esds_data, size_t esds_size); @@ -86,9 +86,9 @@ private: SINF *mFirstSINF; bool mIsDrm; - status_t parseDrmSINF(off_t *offset, off_t data_offset); + status_t parseDrmSINF(off64_t *offset, off64_t data_offset); - status_t parseTrackHeader(off_t data_offset, off_t data_size); + status_t parseTrackHeader(off64_t data_offset, off64_t data_size); MPEG4Extractor(const MPEG4Extractor &); MPEG4Extractor &operator=(const MPEG4Extractor &); diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h index 1fb2088..28840be 100644 --- a/media/libstagefright/include/NuCachedSource2.h +++ b/media/libstagefright/include/NuCachedSource2.h @@ -32,18 +32,20 @@ struct NuCachedSource2 : public DataSource { virtual status_t initCheck() const; - virtual ssize_t readAt(off_t offset, void *data, size_t size); + virtual ssize_t readAt(off64_t offset, void *data, size_t size); - virtual status_t getSize(off_t *size); + virtual status_t getSize(off64_t *size); virtual uint32_t flags(); + virtual DecryptHandle* DrmInitialization(DrmManagerClient *client); + virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client); + virtual String8 getUri(); //////////////////////////////////////////////////////////////////////////// size_t cachedSize(); - size_t approxDataRemaining(bool *eos); + size_t approxDataRemaining(status_t *finalStatus); - void suspend(); - void clearCacheAndResume(); + void resumeFetchingIfNecessary(); protected: virtual ~NuCachedSource2(); @@ -53,8 +55,8 @@ private: enum { kPageSize = 65536, - kHighWaterThreshold = 5 * 1024 * 1024, - kLowWaterThreshold = 512 * 1024, + kHighWaterThreshold = 20 * 1024 * 1024, + kLowWaterThreshold = 4 * 1024 * 1024, // Read data after a 15 sec timeout whether we're actively // fetching or not. @@ -64,7 +66,6 @@ private: enum { kWhatFetchMore = 'fetc', kWhatRead = 'read', - kWhatSuspend = 'susp', }; sp<DataSource> mSource; @@ -76,25 +77,23 @@ private: Condition mCondition; PageCache *mCache; - off_t mCacheOffset; + off64_t mCacheOffset; status_t mFinalStatus; - off_t mLastAccessPos; + off64_t mLastAccessPos; sp<AMessage> mAsyncResult; bool mFetching; int64_t mLastFetchTimeUs; - bool mSuspended; void onMessageReceived(const sp<AMessage> &msg); void onFetch(); void onRead(const sp<AMessage> &msg); - void onSuspend(); void fetchInternal(); - ssize_t readInternal(off_t offset, void *data, size_t size); - status_t seekInternal_l(off_t offset); + ssize_t readInternal(off64_t offset, void *data, size_t size); + status_t seekInternal_l(off64_t offset); - size_t approxDataRemaining_l(bool *eos); - void restartPrefetcherIfNecessary_l(); + size_t approxDataRemaining_l(status_t *finalStatus); + void restartPrefetcherIfNecessary_l(bool ignoreLowWaterThreshold = false); DISALLOW_EVIL_CONSTRUCTORS(NuCachedSource2); }; diff --git a/media/libstagefright/include/NuHTTPDataSource.h b/media/libstagefright/include/NuHTTPDataSource.h index 3c88cc2..c8e93be 100644 --- a/media/libstagefright/include/NuHTTPDataSource.h +++ b/media/libstagefright/include/NuHTTPDataSource.h @@ -3,6 +3,7 @@ #define NU_HTTP_DATA_SOURCE_H_ #include <media/stagefright/DataSource.h> +#include <utils/List.h> #include <utils/String8.h> #include <utils/threads.h> @@ -16,16 +17,24 @@ struct NuHTTPDataSource : public DataSource { status_t connect( const char *uri, const KeyedVector<String8, String8> *headers = NULL, - off_t offset = 0); + off64_t offset = 0); void disconnect(); virtual status_t initCheck() const; - virtual ssize_t readAt(off_t offset, void *data, size_t size); - virtual status_t getSize(off_t *size); + virtual ssize_t readAt(off64_t offset, void *data, size_t size); + virtual status_t getSize(off64_t *size); virtual uint32_t flags(); + // Returns true if bandwidth could successfully be estimated, + // false otherwise. + bool estimateBandwidth(int32_t *bandwidth_bps); + + virtual DecryptHandle* DrmInitialization(DrmManagerClient *client); + virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client); + virtual String8 getUri(); + protected: virtual ~NuHTTPDataSource(); @@ -36,6 +45,11 @@ private: CONNECTED }; + struct BandwidthEntry { + int64_t mDelayUs; + size_t mNumBytes; + }; + Mutex mLock; State mState; @@ -44,10 +58,11 @@ private: unsigned mPort; String8 mPath; String8 mHeaders; + String8 mUri; HTTPStream mHTTP; - off_t mOffset; - off_t mContentLength; + off64_t mOffset; + off64_t mContentLength; bool mContentLengthValid; bool mHasChunkedTransferEncoding; @@ -55,18 +70,27 @@ private: // chunk header (or -1 if no more chunks). ssize_t mChunkDataBytesLeft; + List<BandwidthEntry> mBandwidthHistory; + size_t mNumBandwidthHistoryItems; + int64_t mTotalTransferTimeUs; + size_t mTotalTransferBytes; + + DecryptHandle *mDecryptHandle; + DrmManagerClient *mDrmManagerClient; + status_t connect( - const char *uri, const String8 &headers, off_t offset); + const char *uri, const String8 &headers, off64_t offset); status_t connect( const char *host, unsigned port, const char *path, const String8 &headers, - off_t offset); + off64_t offset); // Read up to "size" bytes of data, respect transfer encoding. ssize_t internalRead(void *data, size_t size); void applyTimeoutResponse(); + void addBandwidthMeasurement_l(size_t numBytes, int64_t delayUs); static void MakeFullHeaders( const KeyedVector<String8, String8> *overrides, diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h index 72ab5aa..5fed98a 100644 --- a/media/libstagefright/include/OMX.h +++ b/media/libstagefright/include/OMX.h @@ -59,10 +59,20 @@ public: node_id node, OMX_INDEXTYPE index, const void *params, size_t size); + virtual status_t enableGraphicBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable); + + virtual status_t storeMetaDataInBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable); + virtual status_t useBuffer( node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, buffer_id *buffer); + virtual status_t useGraphicBuffer( + node_id node, OMX_U32 port_index, + const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer); + virtual status_t allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data); @@ -87,14 +97,6 @@ public: const char *parameter_name, OMX_INDEXTYPE *index); - virtual sp<IOMXRenderer> createRenderer( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees); - virtual void binderDied(const wp<IBinder> &the_late_who); OMX_ERRORTYPE OnEvent( diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h index b5b31ac..86c102c 100644 --- a/media/libstagefright/include/OMXNodeInstance.h +++ b/media/libstagefright/include/OMXNodeInstance.h @@ -49,10 +49,17 @@ struct OMXNodeInstance { status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size); status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size); + status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable); + status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable); + status_t useBuffer( OMX_U32 portIndex, const sp<IMemory> ¶ms, OMX::buffer_id *buffer); + status_t useGraphicBuffer( + OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer, + OMX::buffer_id *buffer); + status_t allocateBuffer( OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer, void **buffer_data); @@ -125,4 +132,3 @@ private: } // namespace android #endif // OMX_NODE_INSTANCE_H_ - diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h index a5eaed9..b5a043c 100644 --- a/media/libstagefright/include/SampleIterator.h +++ b/media/libstagefright/include/SampleIterator.h @@ -27,7 +27,7 @@ struct SampleIterator { uint32_t getChunkIndex() const { return mCurrentChunkIndex; } uint32_t getDescIndex() const { return mChunkDesc; } - off_t getSampleOffset() const { return mCurrentSampleOffset; } + off64_t getSampleOffset() const { return mCurrentSampleOffset; } size_t getSampleSize() const { return mCurrentSampleSize; } uint32_t getSampleTime() const { return mCurrentSampleTime; } @@ -48,7 +48,7 @@ private: uint32_t mChunkDesc; uint32_t mCurrentChunkIndex; - off_t mCurrentChunkOffset; + off64_t mCurrentChunkOffset; Vector<size_t> mCurrentChunkSampleSizes; uint32_t mTimeToSampleIndex; @@ -58,13 +58,13 @@ private: uint32_t mTTSDuration; uint32_t mCurrentSampleIndex; - off_t mCurrentSampleOffset; + off64_t mCurrentSampleOffset; size_t mCurrentSampleSize; uint32_t mCurrentSampleTime; void reset(); status_t findChunkRange(uint32_t sampleIndex); - status_t getChunkOffset(uint32_t chunk, off_t *offset); + status_t getChunkOffset(uint32_t chunk, off64_t *offset); status_t findSampleTime(uint32_t sampleIndex, uint32_t *time); SampleIterator(const SampleIterator &); diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h index f830690..c5e8136 100644 --- a/media/libstagefright/include/SampleTable.h +++ b/media/libstagefright/include/SampleTable.h @@ -36,17 +36,17 @@ public: // type can be 'stco' or 'co64'. status_t setChunkOffsetParams( - uint32_t type, off_t data_offset, size_t data_size); + uint32_t type, off64_t data_offset, size_t data_size); - status_t setSampleToChunkParams(off_t data_offset, size_t data_size); + status_t setSampleToChunkParams(off64_t data_offset, size_t data_size); // type can be 'stsz' or 'stz2'. status_t setSampleSizeParams( - uint32_t type, off_t data_offset, size_t data_size); + uint32_t type, off64_t data_offset, size_t data_size); - status_t setTimeToSampleParams(off_t data_offset, size_t data_size); + status_t setTimeToSampleParams(off64_t data_offset, size_t data_size); - status_t setSyncSampleParams(off_t data_offset, size_t data_size); + status_t setSyncSampleParams(off64_t data_offset, size_t data_size); //////////////////////////////////////////////////////////////////////////// @@ -58,7 +58,7 @@ public: status_t getMetaDataForSample( uint32_t sampleIndex, - off_t *offset, + off64_t *offset, size_t *size, uint32_t *decodingTime, bool *isSyncSample = NULL); @@ -89,14 +89,14 @@ private: sp<DataSource> mDataSource; Mutex mLock; - off_t mChunkOffsetOffset; + off64_t mChunkOffsetOffset; uint32_t mChunkOffsetType; uint32_t mNumChunkOffsets; - off_t mSampleToChunkOffset; + off64_t mSampleToChunkOffset; uint32_t mNumSampleToChunkOffsets; - off_t mSampleSizeOffset; + off64_t mSampleSizeOffset; uint32_t mSampleSizeFieldSize; uint32_t mDefaultSampleSize; uint32_t mNumSampleSizes; @@ -104,7 +104,7 @@ private: uint32_t mTimeToSampleCount; uint32_t *mTimeToSample; - off_t mSyncSampleOffset; + off64_t mSyncSampleOffset; uint32_t mNumSyncSamples; uint32_t *mSyncSamples; size_t mLastSyncSampleIndex; diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h index 89d7cc4..90d3fe1 100644 --- a/media/libstagefright/include/SoftwareRenderer.h +++ b/media/libstagefright/include/SoftwareRenderer.h @@ -19,40 +19,34 @@ #define SOFTWARE_RENDERER_H_ #include <media/stagefright/ColorConverter.h> -#include <media/stagefright/VideoRenderer.h> #include <utils/RefBase.h> namespace android { -class ISurface; -class MemoryHeapBase; +struct MetaData; +class Surface; -class SoftwareRenderer : public VideoRenderer { +class SoftwareRenderer { public: SoftwareRenderer( - OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees = 0); + const sp<Surface> &surface, const sp<MetaData> &meta); - virtual ~SoftwareRenderer(); + ~SoftwareRenderer(); - status_t initCheck() const; - - virtual void render( + void render( const void *data, size_t size, void *platformPrivate); private: - status_t mInitCheck; + enum YUVMode { + None, + }; + OMX_COLOR_FORMATTYPE mColorFormat; - ColorConverter mConverter; - sp<ISurface> mISurface; - size_t mDisplayWidth, mDisplayHeight; - size_t mDecodedWidth, mDecodedHeight; - size_t mFrameSize; - sp<MemoryHeapBase> mMemoryHeap; - int mIndex; + ColorConverter *mConverter; + YUVMode mYUVMode; + sp<Surface> mSurface; + int32_t mWidth, mHeight; + int32_t mCropLeft, mCropTop, mCropRight, mCropBottom; SoftwareRenderer(const SoftwareRenderer &); SoftwareRenderer &operator=(const SoftwareRenderer &); diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h index 88164b3..8928a4a 100644 --- a/media/libstagefright/include/ThrottledSource.h +++ b/media/libstagefright/include/ThrottledSource.h @@ -30,9 +30,9 @@ struct ThrottledSource : public DataSource { virtual status_t initCheck() const; - virtual ssize_t readAt(off_t offset, void *data, size_t size); + virtual ssize_t readAt(off64_t offset, void *data, size_t size); - virtual status_t getSize(off_t *size); + virtual status_t getSize(off64_t *size); virtual uint32_t flags(); private: diff --git a/media/libstagefright/include/VBRISeeker.h b/media/libstagefright/include/VBRISeeker.h new file mode 100644 index 0000000..1a2bf9f --- /dev/null +++ b/media/libstagefright/include/VBRISeeker.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef VBRI_SEEKER_H_ + +#define VBRI_SEEKER_H_ + +#include "include/MP3Seeker.h" + +#include <utils/Vector.h> + +namespace android { + +struct DataSource; + +struct VBRISeeker : public MP3Seeker { + static sp<VBRISeeker> CreateFromSource( + const sp<DataSource> &source, off64_t post_id3_pos); + + virtual bool getDuration(int64_t *durationUs); + virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos); + +private: + off64_t mBasePos; + int64_t mDurationUs; + Vector<uint32_t> mSegments; + + VBRISeeker(); + + DISALLOW_EVIL_CONSTRUCTORS(VBRISeeker); +}; + +} // namespace android + +#endif // VBRI_SEEKER_H_ + + diff --git a/media/libstagefright/include/WAVExtractor.h b/media/libstagefright/include/WAVExtractor.h index df6d3e7..9de197f 100644 --- a/media/libstagefright/include/WAVExtractor.h +++ b/media/libstagefright/include/WAVExtractor.h @@ -48,7 +48,7 @@ private: uint16_t mNumChannels; uint32_t mSampleRate; uint16_t mBitsPerSample; - off_t mDataOffset; + off64_t mDataOffset; size_t mDataSize; sp<MetaData> mTrackMeta; diff --git a/media/libstagefright/include/WVMExtractor.h b/media/libstagefright/include/WVMExtractor.h new file mode 100644 index 0000000..0da45a8 --- /dev/null +++ b/media/libstagefright/include/WVMExtractor.h @@ -0,0 +1,51 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WVM_EXTRACTOR_H_ + +#define WVM_EXTRACTOR_H_ + +#include <media/stagefright/MediaExtractor.h> + +namespace android { + +class DataSource; + +class WVMExtractor : public MediaExtractor { +public: + WVMExtractor(const sp<DataSource> &source); + + virtual size_t countTracks(); + virtual sp<MediaSource> getTrack(size_t index); + virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags); + virtual sp<MetaData> getMetaData(); + +protected: + virtual ~WVMExtractor(); + +private: + sp<DataSource> mDataSource; + sp<MediaExtractor> mImpl; + + WVMExtractor(const WVMExtractor &); + WVMExtractor &operator=(const WVMExtractor &); + +}; + +} // namespace android + +#endif // DRM_EXTRACTOR_H_ + diff --git a/media/libstagefright/include/XINGSeeker.h b/media/libstagefright/include/XINGSeeker.h new file mode 100644 index 0000000..d5a484e --- /dev/null +++ b/media/libstagefright/include/XINGSeeker.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef XING_SEEKER_H_ + +#define XING_SEEKER_H_ + +#include "include/MP3Seeker.h" + +namespace android { + +struct DataSource; + +struct XINGSeeker : public MP3Seeker { + static sp<XINGSeeker> CreateFromSource( + const sp<DataSource> &source, off64_t first_frame_pos); + + virtual bool getDuration(int64_t *durationUs); + virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos); + +private: + int64_t mFirstFramePos; + int64_t mDurationUs; + int32_t mSizeBytes; + + // TOC entries in XING header. Skip the first one since it's always 0. + char mTableOfContents[99]; + + XINGSeeker(); + + DISALLOW_EVIL_CONSTRUCTORS(XINGSeeker); +}; + +} // namespace android + +#endif // XING_SEEKER_H_ + diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h index 62cfc36..3aeb07f 100644 --- a/media/libstagefright/include/avc_utils.h +++ b/media/libstagefright/include/avc_utils.h @@ -24,6 +24,17 @@ namespace android { struct ABitReader; +enum { + kAVCProfileBaseline = 0x42, + kAVCProfileMain = 0x4d, + kAVCProfileExtended = 0x58, + kAVCProfileHigh = 0x64, + kAVCProfileHigh10 = 0x6e, + kAVCProfileHigh422 = 0x7a, + kAVCProfileHigh444 = 0xf4, + kAVCProfileCAVLC444Intra = 0x2c +}; + void FindAVCDimensions( const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height); @@ -39,6 +50,8 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit); bool IsIDR(const sp<ABuffer> &accessUnit); +const char *AVCProfileToString(uint8_t profile); + } // namespace android #endif // AVC_UTILS_H_ diff --git a/media/libstagefright/include/stagefright_string.h b/media/libstagefright/include/stagefright_string.h deleted file mode 100644 index 5dc7116..0000000 --- a/media/libstagefright/include/stagefright_string.h +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright (C) 2009 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#ifndef STRING_H_ - -#define STRING_H_ - -#include <utils/String8.h> - -namespace android { - -class string { -public: - typedef size_t size_type; - static size_type npos; - - string(); - string(const char *s); - string(const char *s, size_t length); - string(const string &from, size_type start, size_type length = npos); - - const char *c_str() const; - size_type size() const; - - void clear(); - void erase(size_type from, size_type length); - - size_type find(char c) const; - - bool operator<(const string &other) const; - bool operator==(const string &other) const; - - string &operator+=(char c); - -private: - String8 mString; -}; - -} // namespace android - -#endif // STRING_H_ diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp index 1661130..733de92 100644 --- a/media/libstagefright/matroska/MatroskaExtractor.cpp +++ b/media/libstagefright/matroska/MatroskaExtractor.cpp @@ -58,7 +58,7 @@ struct DataSourceReader : public mkvparser::IMkvReader { } virtual int Length(long long* total, long long* available) { - off_t size; + off64_t size; if (mSource->getSize(&size) != OK) { return -1; } @@ -707,6 +707,12 @@ void MatroskaExtractor::addTracks() { for (size_t index = 0; index < tracks->GetTracksCount(); ++index) { const mkvparser::Track *track = tracks->GetTrackByIndex(index); + if (track == NULL) { + // Apparently this is currently valid (if unexpected) behaviour + // of the mkv parser lib. + continue; + } + const char *const codecID = track->GetCodecId(); LOGV("codec id = %s", codecID); LOGV("codec name = %s", track->GetCodecNameAsUTF8()); diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp index c88c6c1..4335b99 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.cpp +++ b/media/libstagefright/mpeg2ts/ATSParser.cpp @@ -43,19 +43,25 @@ namespace android { static const size_t kTSPacketSize = 188; struct ATSParser::Program : public RefBase { - Program(unsigned programMapPID); + Program(ATSParser *parser, unsigned programMapPID); bool parsePID( unsigned pid, unsigned payload_unit_start_indicator, ABitReader *br); - void signalDiscontinuity(bool isASeek); + void signalDiscontinuity(DiscontinuityType type); + void signalEOS(status_t finalResult); sp<MediaSource> getSource(SourceType type); int64_t convertPTSToTimestamp(uint64_t PTS); + bool PTSTimeDeltaEstablished() const { + return mFirstPTSValid; + } + private: + ATSParser *mParser; unsigned mProgramMapPID; KeyedVector<unsigned, sp<Stream> > mStreams; bool mFirstPTSValid; @@ -69,11 +75,16 @@ private: struct ATSParser::Stream : public RefBase { Stream(Program *program, unsigned elementaryPID, unsigned streamType); + unsigned type() const { return mStreamType; } + unsigned pid() const { return mElementaryPID; } + void setPID(unsigned pid) { mElementaryPID = pid; } + void parse( unsigned payload_unit_start_indicator, ABitReader *br); - void signalDiscontinuity(bool isASeek); + void signalDiscontinuity(DiscontinuityType type); + void signalEOS(status_t finalResult); sp<MediaSource> getSource(SourceType type); @@ -88,6 +99,7 @@ private: sp<ABuffer> mBuffer; sp<AnotherPacketSource> mSource; bool mPayloadStarted; + DiscontinuityType mPendingDiscontinuity; ElementaryStreamQueue mQueue; @@ -100,13 +112,16 @@ private: void extractAACFrames(const sp<ABuffer> &buffer); + void deferDiscontinuity(DiscontinuityType type); + DISALLOW_EVIL_CONSTRUCTORS(Stream); }; //////////////////////////////////////////////////////////////////////////////// -ATSParser::Program::Program(unsigned programMapPID) - : mProgramMapPID(programMapPID), +ATSParser::Program::Program(ATSParser *parser, unsigned programMapPID) + : mParser(parser), + mProgramMapPID(programMapPID), mFirstPTSValid(false), mFirstPTS(0) { } @@ -135,12 +150,23 @@ bool ATSParser::Program::parsePID( return true; } -void ATSParser::Program::signalDiscontinuity(bool isASeek) { +void ATSParser::Program::signalDiscontinuity(DiscontinuityType type) { for (size_t i = 0; i < mStreams.size(); ++i) { - mStreams.editValueAt(i)->signalDiscontinuity(isASeek); + mStreams.editValueAt(i)->signalDiscontinuity(type); } } +void ATSParser::Program::signalEOS(status_t finalResult) { + for (size_t i = 0; i < mStreams.size(); ++i) { + mStreams.editValueAt(i)->signalEOS(finalResult); + } +} + +struct StreamInfo { + unsigned mType; + unsigned mPID; +}; + void ATSParser::Program::parseProgramMap(ABitReader *br) { unsigned table_id = br->getBits(8); LOGV(" table_id = %u", table_id); @@ -155,7 +181,7 @@ void ATSParser::Program::parseProgramMap(ABitReader *br) { unsigned section_length = br->getBits(12); LOGV(" section_length = %u", section_length); - CHECK((section_length & 0xc00) == 0); + CHECK_EQ(section_length & 0xc00, 0u); CHECK_LE(section_length, 1021u); MY_LOGV(" program_number = %u", br->getBits(16)); @@ -170,10 +196,12 @@ void ATSParser::Program::parseProgramMap(ABitReader *br) { unsigned program_info_length = br->getBits(12); LOGV(" program_info_length = %u", program_info_length); - CHECK((program_info_length & 0xc00) == 0); + CHECK_EQ(program_info_length & 0xc00, 0u); br->skipBits(program_info_length * 8); // skip descriptors + Vector<StreamInfo> infos; + // infoBytesRemaining is the number of bytes that make up the // variable length section of ES_infos. It does not include the // final CRC. @@ -194,7 +222,7 @@ void ATSParser::Program::parseProgramMap(ABitReader *br) { unsigned ES_info_length = br->getBits(12); LOGV(" ES_info_length = %u", ES_info_length); - CHECK((ES_info_length & 0xc00) == 0); + CHECK_EQ(ES_info_length & 0xc00, 0u); CHECK_GE(infoBytesRemaining - 5, ES_info_length); @@ -217,31 +245,60 @@ void ATSParser::Program::parseProgramMap(ABitReader *br) { CHECK_EQ(info_bytes_remaining, 0u); #endif - ssize_t index = mStreams.indexOfKey(elementaryPID); -#if 0 // XXX revisit - CHECK_LT(index, 0); - mStreams.add(elementaryPID, - new Stream(this, elementaryPID, streamType)); -#else - if (index < 0) { - mStreams.add(elementaryPID, - new Stream(this, elementaryPID, streamType)); - } -#endif + StreamInfo info; + info.mType = streamType; + info.mPID = elementaryPID; + infos.push(info); infoBytesRemaining -= 5 + ES_info_length; } CHECK_EQ(infoBytesRemaining, 0u); - MY_LOGV(" CRC = 0x%08x", br->getBits(32)); + + bool PIDsChanged = false; + for (size_t i = 0; i < infos.size(); ++i) { + StreamInfo &info = infos.editItemAt(i); + + ssize_t index = mStreams.indexOfKey(info.mPID); + + if (index >= 0 && mStreams.editValueAt(index)->type() != info.mType) { + LOGI("uh oh. stream PIDs have changed."); + PIDsChanged = true; + break; + } + } + + if (PIDsChanged) { + mStreams.clear(); + } + + for (size_t i = 0; i < infos.size(); ++i) { + StreamInfo &info = infos.editItemAt(i); + + ssize_t index = mStreams.indexOfKey(info.mPID); + + if (index < 0) { + sp<Stream> stream = new Stream(this, info.mPID, info.mType); + mStreams.add(info.mPID, stream); + + if (PIDsChanged) { + stream->signalDiscontinuity(DISCONTINUITY_FORMATCHANGE); + } + } + } } sp<MediaSource> ATSParser::Program::getSource(SourceType type) { + size_t index = (type == MPEG2ADTS_AUDIO) ? 0 : 0; + for (size_t i = 0; i < mStreams.size(); ++i) { sp<MediaSource> source = mStreams.editValueAt(i)->getSource(type); if (source != NULL) { - return source; + if (index == 0) { + return source; + } + --index; } } @@ -269,11 +326,14 @@ ATSParser::Stream::Stream( : mProgram(program), mElementaryPID(elementaryPID), mStreamType(streamType), - mBuffer(new ABuffer(128 * 1024)), + mBuffer(new ABuffer(192 * 1024)), mPayloadStarted(false), + mPendingDiscontinuity(DISCONTINUITY_NONE), mQueue(streamType == 0x1b ? ElementaryStreamQueue::H264 : ElementaryStreamQueue::AAC) { mBuffer->setRange(0, 0); + + LOGV("new stream PID 0x%02x, type 0x%02x", elementaryPID, streamType); } ATSParser::Stream::~Stream() { @@ -298,7 +358,7 @@ void ATSParser::Stream::parse( } size_t payloadSizeBits = br->numBitsLeft(); - CHECK((payloadSizeBits % 8) == 0); + CHECK_EQ(payloadSizeBits % 8, 0u); CHECK_LE(mBuffer->size() + payloadSizeBits / 8, mBuffer->capacity()); @@ -306,24 +366,57 @@ void ATSParser::Stream::parse( mBuffer->setRange(0, mBuffer->size() + payloadSizeBits / 8); } -void ATSParser::Stream::signalDiscontinuity(bool isASeek) { - LOGV("Stream discontinuity"); +void ATSParser::Stream::signalDiscontinuity(DiscontinuityType type) { mPayloadStarted = false; mBuffer->setRange(0, 0); - mQueue.clear(); + switch (type) { + case DISCONTINUITY_HTTPLIVE: + { + mQueue.clear(true); + + if (mStreamType == 0x1b) { + // Don't signal discontinuities on audio streams. + if (mSource != NULL) { + mSource->queueDiscontinuity(type); + } else { + deferDiscontinuity(type); + } + } + break; + } - if (isASeek) { - // This is only a "minor" discontinuity, we stay within the same - // bitstream. + case DISCONTINUITY_SEEK: + case DISCONTINUITY_FORMATCHANGE: + { + bool isASeek = (type == DISCONTINUITY_SEEK); - mSource->clear(); - return; + mQueue.clear(!isASeek); + + if (mSource != NULL) { + mSource->queueDiscontinuity(type); + } else { + deferDiscontinuity(type); + } + break; + } + + default: + TRESPASS(); + break; + } +} + +void ATSParser::Stream::deferDiscontinuity(DiscontinuityType type) { + if (type > mPendingDiscontinuity) { + // Only upgrade discontinuities. + mPendingDiscontinuity = type; } +} - if (mStreamType == 0x1b && mSource != NULL) { - // Don't signal discontinuities on audio streams. - mSource->queueDiscontinuity(); +void ATSParser::Stream::signalEOS(status_t finalResult) { + if (mSource != NULL) { + mSource->signalEOS(finalResult); } } @@ -468,7 +561,7 @@ void ATSParser::Stream::parsePES(ABitReader *br) { br->data(), br->numBitsLeft() / 8); size_t payloadSizeBits = br->numBitsLeft(); - CHECK((payloadSizeBits % 8) == 0); + CHECK_EQ(payloadSizeBits % 8, 0u); LOGV("There's %d bytes of payload.", payloadSizeBits / 8); } @@ -503,7 +596,10 @@ void ATSParser::Stream::onPayloadData( int64_t timeUs = mProgram->convertPTSToTimestamp(PTS); status_t err = mQueue.appendData(data, size, timeUs); - CHECK_EQ(err, (status_t)OK); + + if (err != OK) { + return; + } sp<ABuffer> accessUnit; while ((accessUnit = mQueue.dequeueAccessUnit()) != NULL) { @@ -513,12 +609,22 @@ void ATSParser::Stream::onPayloadData( if (meta != NULL) { LOGV("created source!"); mSource = new AnotherPacketSource(meta); + + if (mPendingDiscontinuity != DISCONTINUITY_NONE) { + mSource->queueDiscontinuity(mPendingDiscontinuity); + mPendingDiscontinuity = DISCONTINUITY_NONE; + } + mSource->queueAccessUnit(accessUnit); } } else if (mQueue.getFormat() != NULL) { // After a discontinuity we invalidate the queue's format // and won't enqueue any access units to the source until // the queue has reestablished the new format. + + if (mSource->getFormat() == NULL) { + mSource->setFormat(mQueue.getFormat()); + } mSource->queueAccessUnit(accessUnit); } } @@ -548,9 +654,17 @@ void ATSParser::feedTSPacket(const void *data, size_t size) { parseTS(&br); } -void ATSParser::signalDiscontinuity(bool isASeek) { +void ATSParser::signalDiscontinuity(DiscontinuityType type) { + for (size_t i = 0; i < mPrograms.size(); ++i) { + mPrograms.editItemAt(i)->signalDiscontinuity(type); + } +} + +void ATSParser::signalEOS(status_t finalResult) { + CHECK_NE(finalResult, (status_t)OK); + for (size_t i = 0; i < mPrograms.size(); ++i) { - mPrograms.editItemAt(i)->signalDiscontinuity(isASeek); + mPrograms.editItemAt(i)->signalEOS(finalResult); } } @@ -568,7 +682,7 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) { unsigned section_length = br->getBits(12); LOGV(" section_length = %u", section_length); - CHECK((section_length & 0xc00) == 0); + CHECK_EQ(section_length & 0xc00, 0u); MY_LOGV(" transport_stream_id = %u", br->getBits(16)); MY_LOGV(" reserved = %u", br->getBits(2)); @@ -593,7 +707,7 @@ void ATSParser::parseProgramAssociationTable(ABitReader *br) { LOGV(" program_map_PID = 0x%04x", programMapPID); - mPrograms.push(new Program(programMapPID)); + mPrograms.push(new Program(this, programMapPID)); } } @@ -680,4 +794,12 @@ sp<MediaSource> ATSParser::getSource(SourceType type) { return NULL; } +bool ATSParser::PTSTimeDeltaEstablished() { + if (mPrograms.isEmpty()) { + return false; + } + + return mPrograms.editItemAt(0)->PTSTimeDeltaEstablished(); +} + } // namespace android diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h index 11b1de4..ec3be84 100644 --- a/media/libstagefright/mpeg2ts/ATSParser.h +++ b/media/libstagefright/mpeg2ts/ATSParser.h @@ -21,19 +21,29 @@ #include <sys/types.h> #include <media/stagefright/foundation/ABase.h> +#include <media/stagefright/foundation/AMessage.h> #include <utils/Vector.h> #include <utils/RefBase.h> namespace android { struct ABitReader; +struct ABuffer; struct MediaSource; struct ATSParser : public RefBase { + enum DiscontinuityType { + DISCONTINUITY_NONE, + DISCONTINUITY_HTTPLIVE, + DISCONTINUITY_SEEK, + DISCONTINUITY_FORMATCHANGE + }; + ATSParser(); void feedTSPacket(const void *data, size_t size); - void signalDiscontinuity(bool isASeek = false); + void signalDiscontinuity(DiscontinuityType type); + void signalEOS(status_t finalResult); enum SourceType { AVC_VIDEO, @@ -41,6 +51,8 @@ struct ATSParser : public RefBase { }; sp<MediaSource> getSource(SourceType type); + bool PTSTimeDeltaEstablished(); + protected: virtual ~ATSParser(); diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp index ea747c8..0ad883b 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp @@ -33,6 +33,11 @@ AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta) mEOSResult(OK) { } +void AnotherPacketSource::setFormat(const sp<MetaData> &meta) { + CHECK(mFormat == NULL); + mFormat = meta; +} + AnotherPacketSource::~AnotherPacketSource() { } @@ -48,6 +53,34 @@ sp<MetaData> AnotherPacketSource::getFormat() { return mFormat; } +status_t AnotherPacketSource::dequeueAccessUnit(sp<ABuffer> *buffer) { + buffer->clear(); + + Mutex::Autolock autoLock(mLock); + while (mEOSResult == OK && mBuffers.empty()) { + mCondition.wait(mLock); + } + + if (!mBuffers.empty()) { + *buffer = *mBuffers.begin(); + mBuffers.erase(mBuffers.begin()); + + int32_t discontinuity; + if ((*buffer)->meta()->findInt32("discontinuity", &discontinuity)) { + + if (discontinuity == ATSParser::DISCONTINUITY_FORMATCHANGE) { + mFormat.clear(); + } + + return INFO_DISCONTINUITY; + } + + return OK; + } + + return mEOSResult; +} + status_t AnotherPacketSource::read( MediaBuffer **out, const ReadOptions *) { *out = NULL; @@ -62,13 +95,15 @@ status_t AnotherPacketSource::read( mBuffers.erase(mBuffers.begin()); int32_t discontinuity; - if (buffer->meta()->findInt32("discontinuity", &discontinuity) - && discontinuity) { + if (buffer->meta()->findInt32("discontinuity", &discontinuity)) { + if (discontinuity == ATSParser::DISCONTINUITY_FORMATCHANGE) { + mFormat.clear(); + } + return INFO_DISCONTINUITY; } else { - uint64_t timeUs; - CHECK(buffer->meta()->findInt64( - "time", (int64_t *)&timeUs)); + int64_t timeUs; + CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); MediaBuffer *mediaBuffer = new MediaBuffer(buffer->size()); mediaBuffer->meta_data()->setInt64(kKeyTime, timeUs); @@ -92,7 +127,7 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) { } int64_t timeUs; - CHECK(buffer->meta()->findInt64("time", &timeUs)); + CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); LOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", timeUs, timeUs / 1E6); Mutex::Autolock autoLock(mLock); @@ -100,22 +135,30 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) { mCondition.signal(); } -void AnotherPacketSource::queueDiscontinuity() { +void AnotherPacketSource::queueDiscontinuity( + ATSParser::DiscontinuityType type) { sp<ABuffer> buffer = new ABuffer(0); - buffer->meta()->setInt32("discontinuity", true); + buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type)); Mutex::Autolock autoLock(mLock); +#if 0 + if (type == ATSParser::DISCONTINUITY_SEEK + || type == ATSParser::DISCONTINUITY_FORMATCHANGE) { + // XXX Fix this: This will also clear any pending discontinuities, + // If there's a pending DISCONTINUITY_FORMATCHANGE and the new + // discontinuity is "just" a DISCONTINUITY_SEEK, this will effectively + // downgrade the type of discontinuity received by the client. + + mBuffers.clear(); + mEOSResult = OK; + } +#endif + mBuffers.push_back(buffer); mCondition.signal(); } -void AnotherPacketSource::clear() { - Mutex::Autolock autoLock(mLock); - mBuffers.clear(); - mEOSResult = OK; -} - void AnotherPacketSource::signalEOS(status_t result) { CHECK(result != OK); @@ -134,4 +177,19 @@ bool AnotherPacketSource::hasBufferAvailable(status_t *finalResult) { return false; } +status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) { + *timeUs = 0; + + Mutex::Autolock autoLock(mLock); + + if (mBuffers.empty()) { + return mEOSResult != OK ? mEOSResult : -EWOULDBLOCK; + } + + sp<ABuffer> buffer = *mBuffers.begin(); + CHECK(buffer->meta()->findInt64("timeUs", timeUs)); + + return OK; +} + } // namespace android diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h index 6999175..6fe93f8 100644 --- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h +++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h @@ -23,6 +23,8 @@ #include <utils/threads.h> #include <utils/List.h> +#include "ATSParser.h" + namespace android { struct ABuffer; @@ -30,6 +32,8 @@ struct ABuffer; struct AnotherPacketSource : public MediaSource { AnotherPacketSource(const sp<MetaData> &meta); + void setFormat(const sp<MetaData> &meta); + virtual status_t start(MetaData *params = NULL); virtual status_t stop(); virtual sp<MetaData> getFormat(); @@ -39,11 +43,13 @@ struct AnotherPacketSource : public MediaSource { bool hasBufferAvailable(status_t *finalResult); + status_t nextBufferTime(int64_t *timeUs); + void queueAccessUnit(const sp<ABuffer> &buffer); - void queueDiscontinuity(); + void queueDiscontinuity(ATSParser::DiscontinuityType type); void signalEOS(status_t result); - void clear(); + status_t dequeueAccessUnit(sp<ABuffer> *buffer); protected: virtual ~AnotherPacketSource(); diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp index b0b9e66..73efdfe 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.cpp +++ b/media/libstagefright/mpeg2ts/ESQueue.cpp @@ -40,10 +40,43 @@ sp<MetaData> ElementaryStreamQueue::getFormat() { return mFormat; } -void ElementaryStreamQueue::clear() { - mBuffer->setRange(0, 0); - mTimestamps.clear(); - mFormat.clear(); +void ElementaryStreamQueue::clear(bool clearFormat) { + if (mBuffer != NULL) { + mBuffer->setRange(0, 0); + } + + mRangeInfos.clear(); + + if (clearFormat) { + mFormat.clear(); + } +} + +static bool IsSeeminglyValidADTSHeader(const uint8_t *ptr, size_t size) { + if (size < 3) { + // Not enough data to verify header. + return false; + } + + if (ptr[0] != 0xff || (ptr[1] >> 4) != 0x0f) { + return false; + } + + unsigned layer = (ptr[1] >> 1) & 3; + + if (layer != 0) { + return false; + } + + unsigned ID = (ptr[1] >> 3) & 1; + unsigned profile_ObjectType = ptr[2] >> 6; + + if (ID == 1 && profile_ObjectType == 3) { + // MPEG-2 profile 3 is reserved. + return false; + } + + return true; } status_t ElementaryStreamQueue::appendData( @@ -52,9 +85,34 @@ status_t ElementaryStreamQueue::appendData( switch (mMode) { case H264: { +#if 0 if (size < 4 || memcmp("\x00\x00\x00\x01", data, 4)) { return ERROR_MALFORMED; } +#else + uint8_t *ptr = (uint8_t *)data; + + ssize_t startOffset = -1; + for (size_t i = 0; i + 3 < size; ++i) { + if (!memcmp("\x00\x00\x00\x01", &ptr[i], 4)) { + startOffset = i; + break; + } + } + + if (startOffset < 0) { + return ERROR_MALFORMED; + } + + if (startOffset > 0) { + LOGI("found something resembling an H.264 syncword at " + "offset %ld", + startOffset); + } + + data = &ptr[startOffset]; + size -= startOffset; +#endif break; } @@ -62,9 +120,31 @@ status_t ElementaryStreamQueue::appendData( { uint8_t *ptr = (uint8_t *)data; +#if 0 if (size < 2 || ptr[0] != 0xff || (ptr[1] >> 4) != 0x0f) { return ERROR_MALFORMED; } +#else + ssize_t startOffset = -1; + for (size_t i = 0; i < size; ++i) { + if (IsSeeminglyValidADTSHeader(&ptr[i], size - i)) { + startOffset = i; + break; + } + } + + if (startOffset < 0) { + return ERROR_MALFORMED; + } + + if (startOffset > 0) { + LOGI("found something resembling an AAC syncword at offset %ld", + startOffset); + } + + data = &ptr[startOffset]; + size -= startOffset; +#endif break; } @@ -94,7 +174,17 @@ status_t ElementaryStreamQueue::appendData( memcpy(mBuffer->data() + mBuffer->size(), data, size); mBuffer->setRange(0, mBuffer->size() + size); - mTimestamps.push_back(timeUs); + RangeInfo info; + info.mLength = size; + info.mTimestampUs = timeUs; + mRangeInfos.push_back(info); + +#if 0 + if (mMode == AAC) { + LOGI("size = %d, timeUs = %.2f secs", size, timeUs / 1E6); + hexdump(data, size); + } +#endif return OK; } @@ -109,6 +199,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnit() { } sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { + Vector<size_t> ranges; Vector<size_t> frameOffsets; Vector<size_t> frameSizes; size_t auSize = 0; @@ -134,6 +225,14 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { mFormat = MakeAACCodecSpecificData( profile, sampling_freq_index, channel_configuration); + + int32_t sampleRate; + int32_t numChannels; + CHECK(mFormat->findInt32(kKeySampleRate, &sampleRate)); + CHECK(mFormat->findInt32(kKeyChannelCount, &numChannels)); + + LOGI("found AAC codec config (%d Hz, %d channels)", + sampleRate, numChannels); } else { // profile_ObjectType, sampling_frequency_index, private_bits, // channel_configuration, original_copy, home @@ -162,6 +261,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { size_t headerSize = protection_absent ? 7 : 9; + ranges.push(aac_frame_length); frameOffsets.push(offset + headerSize); frameSizes.push(aac_frame_length - headerSize); auSize += aac_frame_length - headerSize; @@ -173,11 +273,23 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { return NULL; } + int64_t timeUs = -1; + + for (size_t i = 0; i < ranges.size(); ++i) { + int64_t tmpUs = fetchTimestamp(ranges.itemAt(i)); + + if (i == 0) { + timeUs = tmpUs; + } + } + sp<ABuffer> accessUnit = new ABuffer(auSize); size_t dstOffset = 0; for (size_t i = 0; i < frameOffsets.size(); ++i) { + size_t frameOffset = frameOffsets.itemAt(i); + memcpy(accessUnit->data() + dstOffset, - mBuffer->data() + frameOffsets.itemAt(i), + mBuffer->data() + frameOffset, frameSizes.itemAt(i)); dstOffset += frameSizes.itemAt(i); @@ -187,15 +299,48 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() { mBuffer->size() - offset); mBuffer->setRange(0, mBuffer->size() - offset); - CHECK_GT(mTimestamps.size(), 0u); - int64_t timeUs = *mTimestamps.begin(); - mTimestamps.erase(mTimestamps.begin()); - - accessUnit->meta()->setInt64("time", timeUs); + if (timeUs >= 0) { + accessUnit->meta()->setInt64("timeUs", timeUs); + } else { + LOGW("no time for AAC access unit"); + } return accessUnit; } +int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) { + int64_t timeUs = -1; + bool first = true; + + while (size > 0) { + CHECK(!mRangeInfos.empty()); + + RangeInfo *info = &*mRangeInfos.begin(); + + if (first) { + timeUs = info->mTimestampUs; + first = false; + } + + if (info->mLength > size) { + info->mLength -= size; + + if (first) { + info->mTimestampUs = -1; + } + + size = 0; + } else { + size -= info->mLength; + + mRangeInfos.erase(mRangeInfos.begin()); + info = NULL; + } + } + + return timeUs; +} + // static sp<MetaData> ElementaryStreamQueue::MakeAACCodecSpecificData( unsigned profile, unsigned sampling_freq_index, @@ -333,11 +478,10 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() { mBuffer->setRange(0, mBuffer->size() - nextScan); - CHECK_GT(mTimestamps.size(), 0u); - int64_t timeUs = *mTimestamps.begin(); - mTimestamps.erase(mTimestamps.begin()); + int64_t timeUs = fetchTimestamp(nextScan); + CHECK_GE(timeUs, 0ll); - accessUnit->meta()->setInt64("time", timeUs); + accessUnit->meta()->setInt64("timeUs", timeUs); if (mFormat == NULL) { mFormat = MakeAVCCodecSpecificData(accessUnit); diff --git a/media/libstagefright/mpeg2ts/ESQueue.h b/media/libstagefright/mpeg2ts/ESQueue.h index 9eaf834..5b7957e 100644 --- a/media/libstagefright/mpeg2ts/ESQueue.h +++ b/media/libstagefright/mpeg2ts/ESQueue.h @@ -35,23 +35,32 @@ struct ElementaryStreamQueue { ElementaryStreamQueue(Mode mode); status_t appendData(const void *data, size_t size, int64_t timeUs); - void clear(); + void clear(bool clearFormat); sp<ABuffer> dequeueAccessUnit(); sp<MetaData> getFormat(); private: + struct RangeInfo { + int64_t mTimestampUs; + size_t mLength; + }; + Mode mMode; sp<ABuffer> mBuffer; - List<int64_t> mTimestamps; + List<RangeInfo> mRangeInfos; sp<MetaData> mFormat; sp<ABuffer> dequeueAccessUnitH264(); sp<ABuffer> dequeueAccessUnitAAC(); + // consume a logical (compressed) access unit of size "size", + // returns its timestamp in us (or -1 if no time information). + int64_t fetchTimestamp(size_t size); + static sp<MetaData> MakeAACCodecSpecificData( unsigned profile, unsigned sampling_freq_index, unsigned channel_configuration); diff --git a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp index 3176810..a1f0796 100644 --- a/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp +++ b/media/libstagefright/mpeg2ts/MPEG2TSExtractor.cpp @@ -19,7 +19,7 @@ #include <utils/Log.h> #include "include/MPEG2TSExtractor.h" -#include "include/LiveSource.h" +#include "include/LiveSession.h" #include "include/NuCachedSource2.h" #include <media/stagefright/DataSource.h> @@ -82,8 +82,8 @@ sp<MetaData> MPEG2TSSource::getFormat() { sp<MetaData> meta = mImpl->getFormat(); int64_t durationUs; - if (mExtractor->mLiveSource != NULL - && mExtractor->mLiveSource->getDuration(&durationUs)) { + if (mExtractor->mLiveSession != NULL + && mExtractor->mLiveSession->getDuration(&durationUs) == OK) { meta->setInt64(kKeyDuration, durationUs); } @@ -214,7 +214,7 @@ status_t MPEG2TSExtractor::feedMore() { if (isDiscontinuity(packet, n)) { LOGI("XXX discontinuity detected"); - mParser->signalDiscontinuity(); + mParser->signalDiscontinuity(ATSParser::DISCONTINUITY_HTTPLIVE); } else if (n < (ssize_t)kTSPacketSize) { return (n < 0) ? (status_t)n : ERROR_END_OF_STREAM; } else { @@ -226,32 +226,20 @@ status_t MPEG2TSExtractor::feedMore() { return OK; } -void MPEG2TSExtractor::setLiveSource(const sp<LiveSource> &liveSource) { +void MPEG2TSExtractor::setLiveSession(const sp<LiveSession> &liveSession) { Mutex::Autolock autoLock(mLock); - mLiveSource = liveSource; + mLiveSession = liveSession; } void MPEG2TSExtractor::seekTo(int64_t seekTimeUs) { Mutex::Autolock autoLock(mLock); - if (mLiveSource == NULL) { + if (mLiveSession == NULL) { return; } - if (mDataSource->flags() & DataSource::kIsCachingDataSource) { - static_cast<NuCachedSource2 *>(mDataSource.get())->suspend(); - } - - if (mLiveSource->seekTo(seekTimeUs)) { - mParser->signalDiscontinuity(true /* isSeek */); - mOffset = 0; - } - - if (mDataSource->flags() & DataSource::kIsCachingDataSource) { - static_cast<NuCachedSource2 *>(mDataSource.get()) - ->clearCacheAndResume(); - } + mLiveSession->seekTo(seekTimeUs); } uint32_t MPEG2TSExtractor::flags() const { @@ -259,7 +247,7 @@ uint32_t MPEG2TSExtractor::flags() const { uint32_t flags = CAN_PAUSE; - if (mLiveSource != NULL && mLiveSource->isSeekable()) { + if (mLiveSession != NULL && mLiveSession->isSeekable()) { flags |= CAN_SEEK_FORWARD | CAN_SEEK_BACKWARD | CAN_SEEK; } diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk index ead1675..6e069c8 100644 --- a/media/libstagefright/omx/Android.mk +++ b/media/libstagefright/omx/Android.mk @@ -31,7 +31,6 @@ LOCAL_SHARED_LIBRARIES := \ libutils \ libui \ libcutils \ - libstagefright_color_conversion ifneq ($(BUILD_WITHOUT_PV),true) LOCAL_SHARED_LIBRARIES += \ diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index 2ba63f7..3638f41 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -24,14 +24,11 @@ #include <sys/resource.h> #include "../include/OMX.h" -#include "OMXRenderer.h" #include "../include/OMXNodeInstance.h" -#include "../include/SoftwareRenderer.h" #include <binder/IMemory.h> #include <media/stagefright/MediaDebug.h> -#include <media/stagefright/VideoRenderer.h> #include <utils/threads.h> #include "OMXMaster.h" @@ -295,6 +292,16 @@ status_t OMX::setConfig( index, params, size); } +status_t OMX::enableGraphicBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable) { + return findInstance(node)->enableGraphicBuffers(port_index, enable); +} + +status_t OMX::storeMetaDataInBuffers( + node_id node, OMX_U32 port_index, OMX_BOOL enable) { + return findInstance(node)->storeMetaDataInBuffers(port_index, enable); +} + status_t OMX::useBuffer( node_id node, OMX_U32 port_index, const sp<IMemory> ¶ms, buffer_id *buffer) { @@ -302,6 +309,13 @@ status_t OMX::useBuffer( port_index, params, buffer); } +status_t OMX::useGraphicBuffer( + node_id node, OMX_U32 port_index, + const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) { + return findInstance(node)->useGraphicBuffer( + port_index, graphicBuffer, buffer); +} + status_t OMX::allocateBuffer( node_id node, OMX_U32 port_index, size_t size, buffer_id *buffer, void **buffer_data) { @@ -431,135 +445,4 @@ void OMX::invalidateNodeID_l(node_id node) { mNodeIDToInstance.removeItem(node); } -//////////////////////////////////////////////////////////////////////////////// - -struct SharedVideoRenderer : public VideoRenderer { - SharedVideoRenderer(void *libHandle, VideoRenderer *obj) - : mLibHandle(libHandle), - mObj(obj) { - } - - virtual ~SharedVideoRenderer() { - delete mObj; - mObj = NULL; - - dlclose(mLibHandle); - mLibHandle = NULL; - } - - virtual void render( - const void *data, size_t size, void *platformPrivate) { - return mObj->render(data, size, platformPrivate); - } - -private: - void *mLibHandle; - VideoRenderer *mObj; - - SharedVideoRenderer(const SharedVideoRenderer &); - SharedVideoRenderer &operator=(const SharedVideoRenderer &); -}; - -sp<IOMXRenderer> OMX::createRenderer( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t encodedWidth, size_t encodedHeight, - size_t displayWidth, size_t displayHeight, - int32_t rotationDegrees) { - Mutex::Autolock autoLock(mLock); - - VideoRenderer *impl = NULL; - - void *libHandle = dlopen("libstagefrighthw.so", RTLD_NOW); - - if (libHandle) { - typedef VideoRenderer *(*CreateRendererWithRotationFunc)( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight, - int32_t rotationDegrees); - - typedef VideoRenderer *(*CreateRendererFunc)( - const sp<ISurface> &surface, - const char *componentName, - OMX_COLOR_FORMATTYPE colorFormat, - size_t displayWidth, size_t displayHeight, - size_t decodedWidth, size_t decodedHeight); - - CreateRendererWithRotationFunc funcWithRotation = - (CreateRendererWithRotationFunc)dlsym( - libHandle, - "_Z26createRendererWithRotationRKN7android2spINS_8" - "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji"); - - if (funcWithRotation) { - impl = (*funcWithRotation)( - surface, componentName, colorFormat, - displayWidth, displayHeight, encodedWidth, encodedHeight, - rotationDegrees); - } else { - CreateRendererFunc func = - (CreateRendererFunc)dlsym( - libHandle, - "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20" - "OMX_COLOR_FORMATTYPEjjjj"); - - if (func) { - impl = (*func)(surface, componentName, colorFormat, - displayWidth, displayHeight, encodedWidth, encodedHeight); - } - } - - if (impl) { - impl = new SharedVideoRenderer(libHandle, impl); - libHandle = NULL; - } - - if (libHandle) { - dlclose(libHandle); - libHandle = NULL; - } - } - - if (!impl) { - LOGW("Using software renderer."); - impl = new SoftwareRenderer( - colorFormat, - surface, - displayWidth, displayHeight, - encodedWidth, encodedHeight); - - if (((SoftwareRenderer *)impl)->initCheck() != OK) { - delete impl; - impl = NULL; - - return NULL; - } - } - - return new OMXRenderer(impl); -} - -OMXRenderer::OMXRenderer(VideoRenderer *impl) - : mImpl(impl) { -} - -OMXRenderer::~OMXRenderer() { - delete mImpl; - mImpl = NULL; -} - -void OMXRenderer::render(IOMX::buffer_id buffer) { - OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer; - - mImpl->render( - header->pBuffer + header->nOffset, - header->nFilledLen, - header->pPlatformPrivate); -} - } // namespace android - diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp index 5db516e..9b6d441 100644 --- a/media/libstagefright/omx/OMXNodeInstance.cpp +++ b/media/libstagefright/omx/OMXNodeInstance.cpp @@ -24,6 +24,7 @@ #include <OMX_Component.h> #include <binder/IMemory.h> +#include <media/stagefright/HardwareAPI.h> #include <media/stagefright/MediaDebug.h> #include <media/stagefright/MediaErrors.h> @@ -40,6 +41,11 @@ struct BufferMeta { mIsBackup(false) { } + BufferMeta(const sp<GraphicBuffer> &graphicBuffer) + : mGraphicBuffer(graphicBuffer), + mIsBackup(false) { + } + void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) { if (!mIsBackup) { return; @@ -61,6 +67,7 @@ struct BufferMeta { } private: + sp<GraphicBuffer> mGraphicBuffer; sp<IMemory> mMem; size_t mSize; bool mIsBackup; @@ -240,6 +247,74 @@ status_t OMXNodeInstance::setConfig( return StatusFromOMXError(err); } +status_t OMXNodeInstance::enableGraphicBuffers( + OMX_U32 portIndex, OMX_BOOL enable) { + Mutex::Autolock autoLock(mLock); + + OMX_INDEXTYPE index; + OMX_ERRORTYPE err = OMX_GetExtensionIndex( + mHandle, + const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"), + &index); + + if (err != OMX_ErrorNone) { + LOGE("OMX_GetExtensionIndex failed"); + + return StatusFromOMXError(err); + } + + OMX_VERSIONTYPE ver; + ver.s.nVersionMajor = 1; + ver.s.nVersionMinor = 0; + ver.s.nRevision = 0; + ver.s.nStep = 0; + EnableAndroidNativeBuffersParams params = { + sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable, + }; + + err = OMX_SetParameter(mHandle, index, ¶ms); + + if (err != OMX_ErrorNone) { + LOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)", + err, err); + + return UNKNOWN_ERROR; + } + + return OK; +} + +status_t OMXNodeInstance::storeMetaDataInBuffers( + OMX_U32 portIndex, + OMX_BOOL enable) { + Mutex::Autolock autolock(mLock); + + OMX_INDEXTYPE index; + OMX_STRING name = const_cast<OMX_STRING>( + "OMX.google.android.index.storeMetaDataInBuffers"); + + OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index); + if (err != OMX_ErrorNone) { + LOGE("OMX_GetExtensionIndex %s failed", name); + return StatusFromOMXError(err); + } + + StoreMetaDataInBuffersParams params; + memset(¶ms, 0, sizeof(params)); + params.nSize = sizeof(params); + + // Version: 1.0.0.0 + params.nVersion.s.nVersionMajor = 1; + + params.nPortIndex = portIndex; + params.bStoreMetaData = enable; + if ((err = OMX_SetParameter(mHandle, index, ¶ms)) != OMX_ErrorNone) { + LOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err); + return UNKNOWN_ERROR; + } + return err; +} + status_t OMXNodeInstance::useBuffer( OMX_U32 portIndex, const sp<IMemory> ¶ms, OMX::buffer_id *buffer) { @@ -273,6 +348,60 @@ status_t OMXNodeInstance::useBuffer( return OK; } +status_t OMXNodeInstance::useGraphicBuffer( + OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer, + OMX::buffer_id *buffer) { + Mutex::Autolock autoLock(mLock); + + OMX_INDEXTYPE index; + OMX_ERRORTYPE err = OMX_GetExtensionIndex( + mHandle, + const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"), + &index); + + if (err != OMX_ErrorNone) { + LOGE("OMX_GetExtensionIndex failed"); + + return StatusFromOMXError(err); + } + + BufferMeta *bufferMeta = new BufferMeta(graphicBuffer); + + OMX_BUFFERHEADERTYPE *header; + + OMX_VERSIONTYPE ver; + ver.s.nVersionMajor = 1; + ver.s.nVersionMinor = 0; + ver.s.nRevision = 0; + ver.s.nStep = 0; + UseAndroidNativeBufferParams params = { + sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta, + &header, graphicBuffer, + }; + + err = OMX_SetParameter(mHandle, index, ¶ms); + + if (err != OMX_ErrorNone) { + LOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err, + err); + + delete bufferMeta; + bufferMeta = NULL; + + *buffer = 0; + + return UNKNOWN_ERROR; + } + + CHECK_EQ(header->pAppPrivate, bufferMeta); + + *buffer = header; + + addActiveBuffer(portIndex, *buffer); + + return OK; +} + status_t OMXNodeInstance::allocateBuffer( OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer, void **buffer_data) { @@ -498,4 +627,3 @@ void OMXNodeInstance::freeActiveBuffers() { } } // namespace android - diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp index 10cc88b..f0b858d 100644 --- a/media/libstagefright/rtsp/APacketSource.cpp +++ b/media/libstagefright/rtsp/APacketSource.cpp @@ -373,7 +373,17 @@ static bool ExtractDimensionsFromVOLHeader( br.skipBits(2); // chroma_format br.skipBits(1); // low_delay if (br.getBits(1)) { // vbv_parameters - TRESPASS(); + br.skipBits(15); // first_half_bit_rate + CHECK(br.getBits(1)); // marker_bit + br.skipBits(15); // latter_half_bit_rate + CHECK(br.getBits(1)); // marker_bit + br.skipBits(15); // first_half_vbv_buffer_size + CHECK(br.getBits(1)); // marker_bit + br.skipBits(3); // latter_half_vbv_buffer_size + br.skipBits(11); // first_half_vbv_occupancy + CHECK(br.getBits(1)); // marker_bit + br.skipBits(15); // latter_half_vbv_occupancy + CHECK(br.getBits(1)); // marker_bit } } unsigned video_object_layer_shape = br.getBits(2); diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp index 155fd96..5a033e1 100644 --- a/media/libstagefright/rtsp/ARTPWriter.cpp +++ b/media/libstagefright/rtsp/ARTPWriter.cpp @@ -46,7 +46,7 @@ static int UniformRand(int limit) { ARTPWriter::ARTPWriter(int fd) : mFlags(0), - mFd(fd), + mFd(dup(fd)), mLooper(new ALooper), mReflector(new AHandlerReflector<ARTPWriter>(this)) { CHECK_GE(fd, 0); diff --git a/media/libstagefright/string.cpp b/media/libstagefright/string.cpp deleted file mode 100644 index 8b2c36c..0000000 --- a/media/libstagefright/string.cpp +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright (C) 2009 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include "include/stagefright_string.h" - -#include <media/stagefright/MediaDebug.h> - -namespace android { - -// static -string::size_type string::npos = (string::size_type)-1; - -string::string() { -} - -string::string(const char *s, size_t length) - : mString(s, length) { -} - -string::string(const string &from, size_type start, size_type length) { - CHECK(start <= from.size()); - if (length == npos) { - length = from.size() - start; - } else { - CHECK(start + length <= from.size()); - } - - mString.setTo(from.c_str() + start, length); -} - -string::string(const char *s) - : mString(s) { -} - -const char *string::c_str() const { - return mString.string(); -} - -string::size_type string::size() const { - return mString.length(); -} - -void string::clear() { - mString = String8(); -} - -string::size_type string::find(char c) const { - char s[2]; - s[0] = c; - s[1] = '\0'; - - ssize_t index = mString.find(s); - - return index < 0 ? npos : (size_type)index; -} - -bool string::operator<(const string &other) const { - return mString < other.mString; -} - -bool string::operator==(const string &other) const { - return mString == other.mString; -} - -string &string::operator+=(char c) { - mString.append(&c, 1); - - return *this; -} - -void string::erase(size_t from, size_t length) { - String8 s(mString.string(), from); - s.append(mString.string() + from + length); - - mString = s; -} - -} // namespace android - diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk new file mode 100644 index 0000000..7697e3c --- /dev/null +++ b/media/libstagefright/yuv/Android.mk @@ -0,0 +1,15 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + YUVImage.cpp \ + YUVCanvas.cpp + +LOCAL_SHARED_LIBRARIES := \ + libcutils + +LOCAL_MODULE:= libstagefright_yuv + +LOCAL_PRELINK_MODULE := false + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp new file mode 100644 index 0000000..38aa779 --- /dev/null +++ b/media/libstagefright/yuv/YUVCanvas.cpp @@ -0,0 +1,111 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_NDEBUG 0 +#define LOG_TAG "YUVCanvas" + +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/YUVCanvas.h> +#include <media/stagefright/YUVImage.h> +#include <ui/Rect.h> + +namespace android { + +YUVCanvas::YUVCanvas(YUVImage &yuvImage) + : mYUVImage(yuvImage) { +} + +YUVCanvas::~YUVCanvas() { +} + +void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) { + for (int32_t y = 0; y < mYUVImage.height(); ++y) { + for (int32_t x = 0; x < mYUVImage.width(); ++x) { + mYUVImage.setPixelValue(x, y, yValue, uValue, vValue); + } + } +} + +void YUVCanvas::FillYUVRectangle(const Rect& rect, + uint8_t yValue, uint8_t uValue, uint8_t vValue) { + for (int32_t y = rect.top; y < rect.bottom; ++y) { + for (int32_t x = rect.left; x < rect.right; ++x) { + mYUVImage.setPixelValue(x, y, yValue, uValue, vValue); + } + } +} + +void YUVCanvas::CopyImageRect( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage) { + + // Try fast copy first + if (YUVImage::fastCopyRectangle( + srcRect, + destStartX, destStartY, + srcImage, mYUVImage)) { + return; + } + + int32_t srcStartX = srcRect.left; + int32_t srcStartY = srcRect.top; + for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) { + for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) { + int32_t srcX = srcStartX + offsetX; + int32_t srcY = srcStartY + offsetY; + + int32_t destX = destStartX + offsetX; + int32_t destY = destStartY + offsetY; + + uint8_t yValue; + uint8_t uValue; + uint8_t vValue; + + srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue); + mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue); + } + } +} + +void YUVCanvas::downsample( + int32_t srcOffsetX, int32_t srcOffsetY, + int32_t skipX, int32_t skipY, + const YUVImage &srcImage) { + // TODO: Add a low pass filter for downsampling. + + // Check that srcImage is big enough to fill mYUVImage. + CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width()); + CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height()); + + uint8_t yValue; + uint8_t uValue; + uint8_t vValue; + + int32_t srcY = srcOffsetY; + for (int32_t y = 0; y < mYUVImage.height(); ++y) { + int32_t srcX = srcOffsetX; + for (int32_t x = 0; x < mYUVImage.width(); ++x) { + srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue); + mYUVImage.setPixelValue(x, y, yValue, uValue, vValue); + + srcX += skipX; + } + srcY += skipY; + } +} + +} // namespace android diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp new file mode 100644 index 0000000..b712062 --- /dev/null +++ b/media/libstagefright/yuv/YUVImage.cpp @@ -0,0 +1,413 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_NDEBUG 0 +#define LOG_TAG "YUVImage" + +#include <media/stagefright/YUVImage.h> +#include <ui/Rect.h> +#include <media/stagefright/MediaDebug.h> + +namespace android { + +YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) { + mYUVFormat = yuvFormat; + mWidth = width; + mHeight = height; + + size_t numberOfBytes = bufferSize(yuvFormat, width, height); + uint8_t *buffer = new uint8_t[numberOfBytes]; + mBuffer = buffer; + mOwnBuffer = true; + + initializeYUVPointers(); +} + +YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) { + mYUVFormat = yuvFormat; + mWidth = width; + mHeight = height; + mBuffer = buffer; + mOwnBuffer = false; + + initializeYUVPointers(); +} + +//static +size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) { + int32_t numberOfPixels = width*height; + size_t numberOfBytes = 0; + if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) { + // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each. + numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1)); + } else { + LOGE("Format not supported"); + } + return numberOfBytes; +} + +bool YUVImage::initializeYUVPointers() { + int32_t numberOfPixels = mWidth * mHeight; + + if (mYUVFormat == YUV420Planar) { + mYdata = (uint8_t *)mBuffer; + mUdata = mYdata + numberOfPixels; + mVdata = mUdata + (numberOfPixels >> 2); + } else if (mYUVFormat == YUV420SemiPlanar) { + // U and V channels are interleaved as VUVUVU. + // So V data starts at the end of Y channel and + // U data starts right after V's start. + mYdata = (uint8_t *)mBuffer; + mVdata = mYdata + numberOfPixels; + mUdata = mVdata + 1; + } else { + LOGE("Format not supported"); + return false; + } + return true; +} + +YUVImage::~YUVImage() { + if (mOwnBuffer) delete[] mBuffer; +} + +bool YUVImage::getOffsets(int32_t x, int32_t y, + int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const { + *yOffset = y*mWidth + x; + + int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1); + if (mYUVFormat == YUV420Planar) { + *uOffset = uvOffset; + *vOffset = uvOffset; + } else if (mYUVFormat == YUV420SemiPlanar) { + // Since U and V channels are interleaved, offsets need + // to be doubled. + *uOffset = 2*uvOffset; + *vOffset = 2*uvOffset; + } else { + LOGE("Format not supported"); + return false; + } + + return true; +} + +bool YUVImage::getOffsetIncrementsPerDataRow( + int32_t *yDataOffsetIncrement, + int32_t *uDataOffsetIncrement, + int32_t *vDataOffsetIncrement) const { + *yDataOffsetIncrement = mWidth; + + int32_t uvDataOffsetIncrement = mWidth >> 1; + + if (mYUVFormat == YUV420Planar) { + *uDataOffsetIncrement = uvDataOffsetIncrement; + *vDataOffsetIncrement = uvDataOffsetIncrement; + } else if (mYUVFormat == YUV420SemiPlanar) { + // Since U and V channels are interleaved, offsets need + // to be doubled. + *uDataOffsetIncrement = 2*uvDataOffsetIncrement; + *vDataOffsetIncrement = 2*uvDataOffsetIncrement; + } else { + LOGE("Format not supported"); + return false; + } + + return true; +} + +uint8_t* YUVImage::getYAddress(int32_t offset) const { + return mYdata + offset; +} + +uint8_t* YUVImage::getUAddress(int32_t offset) const { + return mUdata + offset; +} + +uint8_t* YUVImage::getVAddress(int32_t offset) const { + return mVdata + offset; +} + +bool YUVImage::getYUVAddresses(int32_t x, int32_t y, + uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const { + int32_t yOffset; + int32_t uOffset; + int32_t vOffset; + if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false; + + *yAddr = getYAddress(yOffset); + *uAddr = getUAddress(uOffset); + *vAddr = getVAddress(vOffset); + + return true; +} + +bool YUVImage::validPixel(int32_t x, int32_t y) const { + return (x >= 0 && x < mWidth && + y >= 0 && y < mHeight); +} + +bool YUVImage::getPixelValue(int32_t x, int32_t y, + uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const { + CHECK(validPixel(x, y)); + + uint8_t *yAddr; + uint8_t *uAddr; + uint8_t *vAddr; + if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false; + + *yPtr = *yAddr; + *uPtr = *uAddr; + *vPtr = *vAddr; + + return true; +} + +bool YUVImage::setPixelValue(int32_t x, int32_t y, + uint8_t yValue, uint8_t uValue, uint8_t vValue) { + CHECK(validPixel(x, y)); + + uint8_t *yAddr; + uint8_t *uAddr; + uint8_t *vAddr; + if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false; + + *yAddr = yValue; + *uAddr = uValue; + *vAddr = vValue; + + return true; +} + +void YUVImage::fastCopyRectangle420Planar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage) { + CHECK(srcImage.mYUVFormat == YUV420Planar); + CHECK(destImage.mYUVFormat == YUV420Planar); + + int32_t srcStartX = srcRect.left; + int32_t srcStartY = srcRect.top; + int32_t width = srcRect.width(); + int32_t height = srcRect.height(); + + // Get source and destination start addresses + uint8_t *ySrcAddrBase; + uint8_t *uSrcAddrBase; + uint8_t *vSrcAddrBase; + srcImage.getYUVAddresses(srcStartX, srcStartY, + &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase); + + uint8_t *yDestAddrBase; + uint8_t *uDestAddrBase; + uint8_t *vDestAddrBase; + destImage.getYUVAddresses(destStartX, destStartY, + &yDestAddrBase, &uDestAddrBase, &vDestAddrBase); + + // Get source and destination offset increments incurred in going + // from one data row to next. + int32_t ySrcOffsetIncrement; + int32_t uSrcOffsetIncrement; + int32_t vSrcOffsetIncrement; + srcImage.getOffsetIncrementsPerDataRow( + &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement); + + int32_t yDestOffsetIncrement; + int32_t uDestOffsetIncrement; + int32_t vDestOffsetIncrement; + destImage.getOffsetIncrementsPerDataRow( + &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement); + + // Copy Y + { + size_t numberOfYBytesPerRow = (size_t) width; + uint8_t *ySrcAddr = ySrcAddrBase; + uint8_t *yDestAddr = yDestAddrBase; + for (int32_t offsetY = 0; offsetY < height; ++offsetY) { + memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow); + + ySrcAddr += ySrcOffsetIncrement; + yDestAddr += yDestOffsetIncrement; + } + } + + // Copy U + { + size_t numberOfUBytesPerRow = (size_t) (width >> 1); + uint8_t *uSrcAddr = uSrcAddrBase; + uint8_t *uDestAddr = uDestAddrBase; + // Every other row has an entry for U/V channel values. Hence only + // go half the height. + for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) { + memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow); + + uSrcAddr += uSrcOffsetIncrement; + uDestAddr += uDestOffsetIncrement; + } + } + + // Copy V + { + size_t numberOfVBytesPerRow = (size_t) (width >> 1); + uint8_t *vSrcAddr = vSrcAddrBase; + uint8_t *vDestAddr = vDestAddrBase; + // Every other pixel row has a U/V data row. Hence only go half the height. + for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) { + memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow); + + vSrcAddr += vSrcOffsetIncrement; + vDestAddr += vDestOffsetIncrement; + } + } +} + +void YUVImage::fastCopyRectangle420SemiPlanar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage) { + CHECK(srcImage.mYUVFormat == YUV420SemiPlanar); + CHECK(destImage.mYUVFormat == YUV420SemiPlanar); + + int32_t srcStartX = srcRect.left; + int32_t srcStartY = srcRect.top; + int32_t width = srcRect.width(); + int32_t height = srcRect.height(); + + // Get source and destination start addresses + uint8_t *ySrcAddrBase; + uint8_t *uSrcAddrBase; + uint8_t *vSrcAddrBase; + srcImage.getYUVAddresses(srcStartX, srcStartY, + &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase); + + uint8_t *yDestAddrBase; + uint8_t *uDestAddrBase; + uint8_t *vDestAddrBase; + destImage.getYUVAddresses(destStartX, destStartY, + &yDestAddrBase, &uDestAddrBase, &vDestAddrBase); + + // Get source and destination offset increments incurred in going + // from one data row to next. + int32_t ySrcOffsetIncrement; + int32_t uSrcOffsetIncrement; + int32_t vSrcOffsetIncrement; + srcImage.getOffsetIncrementsPerDataRow( + &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement); + + int32_t yDestOffsetIncrement; + int32_t uDestOffsetIncrement; + int32_t vDestOffsetIncrement; + destImage.getOffsetIncrementsPerDataRow( + &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement); + + // Copy Y + { + size_t numberOfYBytesPerRow = (size_t) width; + uint8_t *ySrcAddr = ySrcAddrBase; + uint8_t *yDestAddr = yDestAddrBase; + for (int32_t offsetY = 0; offsetY < height; ++offsetY) { + memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow); + + ySrcAddr = ySrcAddr + ySrcOffsetIncrement; + yDestAddr = yDestAddr + yDestOffsetIncrement; + } + } + + // Copy UV + { + // UV are interleaved. So number of UV bytes per row is 2*(width/2). + size_t numberOfUVBytesPerRow = (size_t) width; + uint8_t *vSrcAddr = vSrcAddrBase; + uint8_t *vDestAddr = vDestAddrBase; + // Every other pixel row has a U/V data row. Hence only go half the height. + for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) { + memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow); + + vSrcAddr += vSrcOffsetIncrement; + vDestAddr += vDestOffsetIncrement; + } + } +} + +// static +bool YUVImage::fastCopyRectangle( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage) { + if (srcImage.mYUVFormat == destImage.mYUVFormat) { + if (srcImage.mYUVFormat == YUV420Planar) { + fastCopyRectangle420Planar( + srcRect, + destStartX, destStartY, + srcImage, destImage); + } else if (srcImage.mYUVFormat == YUV420SemiPlanar) { + fastCopyRectangle420SemiPlanar( + srcRect, + destStartX, destStartY, + srcImage, destImage); + } + return true; + } + return false; +} + +uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) { + CHECK(maxValue >= minValue); + + if (v < minValue) return minValue; + else if (v > maxValue) return maxValue; + else return v; +} + +void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue, + uint8_t *r, uint8_t *g, uint8_t *b) const { + *r = yValue + (1.370705 * (vValue-128)); + *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128)); + *b = yValue + (1.732446 * (uValue-128)); + + *r = clamp(*r, 0, 255); + *g = clamp(*g, 0, 255); + *b = clamp(*b, 0, 255); +} + +bool YUVImage::writeToPPM(const char *filename) const { + FILE *fp = fopen(filename, "w"); + if (fp == NULL) { + return false; + } + fprintf(fp, "P3\n"); + fprintf(fp, "%d %d\n", mWidth, mHeight); + fprintf(fp, "255\n"); + for (int32_t y = 0; y < mHeight; ++y) { + for (int32_t x = 0; x < mWidth; ++x) { + uint8_t yValue; + uint8_t uValue; + uint8_t vValue; + getPixelValue(x, y, &yValue, &uValue, & vValue); + + uint8_t rValue; + uint8_t gValue; + uint8_t bValue; + yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue); + + fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue); + } + } + fclose(fp); + return true; +} + +} // namespace android diff --git a/media/mtp/Android.mk b/media/mtp/Android.mk new file mode 100644 index 0000000..70dc340 --- /dev/null +++ b/media/mtp/Android.mk @@ -0,0 +1,78 @@ +# +# Copyright (C) 2010 The Android Open Source Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +LOCAL_PATH:= $(call my-dir) + +ifneq ($(TARGET_SIMULATOR),true) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + MtpClient.cpp \ + MtpDataPacket.cpp \ + MtpDebug.cpp \ + MtpDevice.cpp \ + MtpEventPacket.cpp \ + MtpDeviceInfo.cpp \ + MtpObjectInfo.cpp \ + MtpPacket.cpp \ + MtpProperty.cpp \ + MtpRequestPacket.cpp \ + MtpResponsePacket.cpp \ + MtpServer.cpp \ + MtpStorageInfo.cpp \ + MtpStringBuffer.cpp \ + MtpStorage.cpp \ + MtpUtils.cpp \ + +LOCAL_MODULE:= libmtp + +LOCAL_CFLAGS := -DMTP_DEVICE -DMTP_HOST + +LOCAL_SHARED_LIBRARIES := libutils libcutils libusbhost libbinder + +include $(BUILD_SHARED_LIBRARY) + +endif + +ifeq ($(HOST_OS),linux) + +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + MtpClient.cpp \ + MtpDataPacket.cpp \ + MtpDebug.cpp \ + MtpDevice.cpp \ + MtpEventPacket.cpp \ + MtpDeviceInfo.cpp \ + MtpObjectInfo.cpp \ + MtpPacket.cpp \ + MtpProperty.cpp \ + MtpRequestPacket.cpp \ + MtpResponsePacket.cpp \ + MtpStorageInfo.cpp \ + MtpStringBuffer.cpp \ + MtpStorage.cpp \ + MtpUtils.cpp \ + +LOCAL_MODULE:= libmtp + +LOCAL_CFLAGS := -DMTP_HOST + +include $(BUILD_HOST_STATIC_LIBRARY) + +endif diff --git a/media/mtp/MtpClient.cpp b/media/mtp/MtpClient.cpp new file mode 100644 index 0000000..c830540 --- /dev/null +++ b/media/mtp/MtpClient.cpp @@ -0,0 +1,251 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpClient" + +#include "MtpDebug.h" +#include "MtpClient.h" +#include "MtpDevice.h" + +#include <stdio.h> +#include <stdlib.h> +#include <sys/types.h> +#include <sys/ioctl.h> +#include <sys/stat.h> +#include <fcntl.h> +#include <errno.h> + +#include <usbhost/usbhost.h> + +struct usb_device; + +namespace android { + +static bool isMtpDevice(uint16_t vendor, uint16_t product) { + // Sandisk Sansa Fuze + if (vendor == 0x0781 && product == 0x74c2) + return true; + // Samsung YP-Z5 + if (vendor == 0x04e8 && product == 0x503c) + return true; + return false; +} + +class MtpClientThread : public Thread { +private: + MtpClient* mClient; + +public: + MtpClientThread(MtpClient* client) + : mClient(client) + { + } + + virtual bool threadLoop() { + return mClient->threadLoop(); + } +}; + + +MtpClient::MtpClient() + : mThread(NULL), + mUsbHostContext(NULL), + mDone(false) +{ +} + +MtpClient::~MtpClient() { + usb_host_cleanup(mUsbHostContext); +} + +bool MtpClient::start() { + Mutex::Autolock autoLock(mMutex); + + if (mThread) + return true; + + mUsbHostContext = usb_host_init(); + if (!mUsbHostContext) + return false; + + mThread = new MtpClientThread(this); + mThread->run("MtpClientThread"); + // wait for the thread to do initial device discovery before returning + mThreadStartCondition.wait(mMutex); + + return true; +} + +void MtpClient::stop() { + mDone = true; +} + +MtpDevice* MtpClient::getDevice(int id) { + for (int i = 0; i < mDeviceList.size(); i++) { + MtpDevice* device = mDeviceList[i]; + if (device->getID() == id) + return device; + } + return NULL; +} + +bool MtpClient::usbDeviceAdded(const char *devname) { + struct usb_descriptor_header* desc; + struct usb_descriptor_iter iter; + + struct usb_device *device = usb_device_open(devname); + if (!device) { + LOGE("usb_device_open failed\n"); + return mDone; + } + + usb_descriptor_iter_init(device, &iter); + + while ((desc = usb_descriptor_iter_next(&iter)) != NULL) { + if (desc->bDescriptorType == USB_DT_INTERFACE) { + struct usb_interface_descriptor *interface = (struct usb_interface_descriptor *)desc; + + if (interface->bInterfaceClass == USB_CLASS_STILL_IMAGE && + interface->bInterfaceSubClass == 1 && // Still Image Capture + interface->bInterfaceProtocol == 1) // Picture Transfer Protocol (PIMA 15470) + { + LOGD("Found camera: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device), + usb_device_get_product_name(device)); + } else if (interface->bInterfaceClass == 0xFF && + interface->bInterfaceSubClass == 0xFF && + interface->bInterfaceProtocol == 0) { + char* interfaceName = usb_device_get_string(device, interface->iInterface); + if (!interfaceName || strcmp(interfaceName, "MTP")) + continue; + // Looks like an android style MTP device + LOGD("Found MTP device: \"%s\" \"%s\"\n", usb_device_get_manufacturer_name(device), + usb_device_get_product_name(device)); + } else { + // look for special cased devices based on vendor/product ID + // we are doing this mainly for testing purposes + uint16_t vendor = usb_device_get_vendor_id(device); + uint16_t product = usb_device_get_product_id(device); + if (!isMtpDevice(vendor, product)) { + // not an MTP or PTP device + continue; + } + // request MTP OS string and descriptor + // some music players need to see this before entering MTP mode. + char buffer[256]; + memset(buffer, 0, sizeof(buffer)); + int ret = usb_device_send_control(device, + USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_STANDARD, + USB_REQ_GET_DESCRIPTOR, (USB_DT_STRING << 8) | 0xEE, + 0, sizeof(buffer), buffer); + printf("usb_device_send_control returned %d errno: %d\n", ret, errno); + if (ret > 0) { + printf("got MTP string %s\n", buffer); + ret = usb_device_send_control(device, + USB_DIR_IN|USB_RECIP_DEVICE|USB_TYPE_VENDOR, 1, + 0, 4, sizeof(buffer), buffer); + printf("OS descriptor got %d\n", ret); + } else { + printf("no MTP string\n"); + } + } + + // if we got here, then we have a likely MTP or PTP device + + // interface should be followed by three endpoints + struct usb_endpoint_descriptor *ep; + struct usb_endpoint_descriptor *ep_in_desc = NULL; + struct usb_endpoint_descriptor *ep_out_desc = NULL; + struct usb_endpoint_descriptor *ep_intr_desc = NULL; + for (int i = 0; i < 3; i++) { + ep = (struct usb_endpoint_descriptor *)usb_descriptor_iter_next(&iter); + if (!ep || ep->bDescriptorType != USB_DT_ENDPOINT) { + LOGE("endpoints not found\n"); + return mDone; + } + if (ep->bmAttributes == USB_ENDPOINT_XFER_BULK) { + if (ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK) + ep_in_desc = ep; + else + ep_out_desc = ep; + } else if (ep->bmAttributes == USB_ENDPOINT_XFER_INT && + ep->bEndpointAddress & USB_ENDPOINT_DIR_MASK) { + ep_intr_desc = ep; + } + } + if (!ep_in_desc || !ep_out_desc || !ep_intr_desc) { + LOGE("endpoints not found\n"); + return mDone; + } + + if (usb_device_claim_interface(device, interface->bInterfaceNumber)) { + LOGE("usb_device_claim_interface failed errno: %d\n", errno); + return mDone; + } + + MtpDevice* mtpDevice = new MtpDevice(device, interface->bInterfaceNumber, + ep_in_desc, ep_out_desc, ep_intr_desc); + mDeviceList.add(mtpDevice); + mtpDevice->initialize(); + deviceAdded(mtpDevice); + return mDone; + } + } + + usb_device_close(device); + return mDone; +} + +bool MtpClient::usbDeviceRemoved(const char *devname) { + for (int i = 0; i < mDeviceList.size(); i++) { + MtpDevice* device = mDeviceList[i]; + if (!strcmp(devname, device->getDeviceName())) { + deviceRemoved(device); + mDeviceList.removeAt(i); + delete device; + LOGD("Camera removed!\n"); + break; + } + } + return mDone; +} + +bool MtpClient::usbDiscoveryDone() { + Mutex::Autolock autoLock(mMutex); + mThreadStartCondition.signal(); + return mDone; +} + +bool MtpClient::threadLoop() { + usb_host_run(mUsbHostContext, usb_device_added, usb_device_removed, usb_discovery_done, this); + return false; +} + +int MtpClient::usb_device_added(const char *devname, void* client_data) { + LOGD("usb_device_added %s\n", devname); + return ((MtpClient *)client_data)->usbDeviceAdded(devname); +} + +int MtpClient::usb_device_removed(const char *devname, void* client_data) { + LOGD("usb_device_removed %s\n", devname); + return ((MtpClient *)client_data)->usbDeviceRemoved(devname); +} + +int MtpClient::usb_discovery_done(void* client_data) { + LOGD("usb_discovery_done\n"); + return ((MtpClient *)client_data)->usbDiscoveryDone(); +} + +} // namespace android diff --git a/media/mtp/MtpClient.h b/media/mtp/MtpClient.h new file mode 100644 index 0000000..fa5c527 --- /dev/null +++ b/media/mtp/MtpClient.h @@ -0,0 +1,68 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_CLIENT_H +#define _MTP_CLIENT_H + +#include "MtpTypes.h" + +#include <utils/threads.h> + +struct usb_host_context; + +namespace android { + +class MtpClientThread; + +class MtpClient { +private: + MtpDeviceList mDeviceList; + MtpClientThread* mThread; + Condition mThreadStartCondition; + Mutex mMutex; + struct usb_host_context* mUsbHostContext; + bool mDone; + +public: + MtpClient(); + virtual ~MtpClient(); + + bool start(); + void stop(); + + inline MtpDeviceList& getDeviceList() { return mDeviceList; } + MtpDevice* getDevice(int id); + + + virtual void deviceAdded(MtpDevice *device) = 0; + virtual void deviceRemoved(MtpDevice *device) = 0; + +private: + // these return true if we should stop monitoring USB and clean up + bool usbDeviceAdded(const char *devname); + bool usbDeviceRemoved(const char *devname); + bool usbDiscoveryDone(); + + friend class MtpClientThread; + bool threadLoop(); + static int usb_device_added(const char *devname, void* client_data); + static int usb_device_removed(const char *devname, void* client_data); + static int usb_discovery_done(void* client_data); +}; + +}; // namespace android + +#endif // _MTP_CLIENT_H diff --git a/media/mtp/MtpDataPacket.cpp b/media/mtp/MtpDataPacket.cpp new file mode 100644 index 0000000..801edb0 --- /dev/null +++ b/media/mtp/MtpDataPacket.cpp @@ -0,0 +1,506 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpDataPacket" + +#include <stdio.h> +#include <sys/types.h> +#include <fcntl.h> + +#include <usbhost/usbhost.h> + +#include "MtpDataPacket.h" +#include "MtpStringBuffer.h" + +namespace android { + +MtpDataPacket::MtpDataPacket() + : MtpPacket(512), + mOffset(MTP_CONTAINER_HEADER_SIZE) +{ +} + +MtpDataPacket::~MtpDataPacket() { +} + +void MtpDataPacket::reset() { + MtpPacket::reset(); + mOffset = MTP_CONTAINER_HEADER_SIZE; +} + +void MtpDataPacket::setOperationCode(MtpOperationCode code) { + MtpPacket::putUInt16(MTP_CONTAINER_CODE_OFFSET, code); +} + +void MtpDataPacket::setTransactionID(MtpTransactionID id) { + MtpPacket::putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id); +} + +uint16_t MtpDataPacket::getUInt16() { + int offset = mOffset; + uint16_t result = (uint16_t)mBuffer[offset] | ((uint16_t)mBuffer[offset + 1] << 8); + mOffset += 2; + return result; +} + +uint32_t MtpDataPacket::getUInt32() { + int offset = mOffset; + uint32_t result = (uint32_t)mBuffer[offset] | ((uint32_t)mBuffer[offset + 1] << 8) | + ((uint32_t)mBuffer[offset + 2] << 16) | ((uint32_t)mBuffer[offset + 3] << 24); + mOffset += 4; + return result; +} + +uint64_t MtpDataPacket::getUInt64() { + int offset = mOffset; + uint64_t result = (uint64_t)mBuffer[offset] | ((uint64_t)mBuffer[offset + 1] << 8) | + ((uint64_t)mBuffer[offset + 2] << 16) | ((uint64_t)mBuffer[offset + 3] << 24) | + ((uint64_t)mBuffer[offset + 4] << 32) | ((uint64_t)mBuffer[offset + 5] << 40) | + ((uint64_t)mBuffer[offset + 6] << 48) | ((uint64_t)mBuffer[offset + 7] << 56); + mOffset += 8; + return result; +} + +void MtpDataPacket::getUInt128(uint128_t& value) { + value[0] = getUInt32(); + value[1] = getUInt32(); + value[2] = getUInt32(); + value[3] = getUInt32(); +} + +void MtpDataPacket::getString(MtpStringBuffer& string) +{ + string.readFromPacket(this); +} + +Int8List* MtpDataPacket::getAInt8() { + Int8List* result = new Int8List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getInt8()); + return result; +} + +UInt8List* MtpDataPacket::getAUInt8() { + UInt8List* result = new UInt8List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getUInt8()); + return result; +} + +Int16List* MtpDataPacket::getAInt16() { + Int16List* result = new Int16List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getInt16()); + return result; +} + +UInt16List* MtpDataPacket::getAUInt16() { + UInt16List* result = new UInt16List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getUInt16()); + return result; +} + +Int32List* MtpDataPacket::getAInt32() { + Int32List* result = new Int32List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getInt32()); + return result; +} + +UInt32List* MtpDataPacket::getAUInt32() { + UInt32List* result = new UInt32List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getUInt32()); + return result; +} + +Int64List* MtpDataPacket::getAInt64() { + Int64List* result = new Int64List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getInt64()); + return result; +} + +UInt64List* MtpDataPacket::getAUInt64() { + UInt64List* result = new UInt64List; + int count = getUInt32(); + for (int i = 0; i < count; i++) + result->push(getUInt64()); + return result; +} + +void MtpDataPacket::putInt8(int8_t value) { + allocate(mOffset + 1); + mBuffer[mOffset++] = (uint8_t)value; + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putUInt8(uint8_t value) { + allocate(mOffset + 1); + mBuffer[mOffset++] = (uint8_t)value; + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putInt16(int16_t value) { + allocate(mOffset + 2); + mBuffer[mOffset++] = (uint8_t)(value & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF); + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putUInt16(uint16_t value) { + allocate(mOffset + 2); + mBuffer[mOffset++] = (uint8_t)(value & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF); + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putInt32(int32_t value) { + allocate(mOffset + 4); + mBuffer[mOffset++] = (uint8_t)(value & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF); + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putUInt32(uint32_t value) { + allocate(mOffset + 4); + mBuffer[mOffset++] = (uint8_t)(value & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF); + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putInt64(int64_t value) { + allocate(mOffset + 8); + mBuffer[mOffset++] = (uint8_t)(value & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF); + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putUInt64(uint64_t value) { + allocate(mOffset + 8); + mBuffer[mOffset++] = (uint8_t)(value & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 8) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 16) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 24) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 32) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 40) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 48) & 0xFF); + mBuffer[mOffset++] = (uint8_t)((value >> 56) & 0xFF); + if (mPacketSize < mOffset) + mPacketSize = mOffset; +} + +void MtpDataPacket::putInt128(const int128_t& value) { + putInt32(value[0]); + putInt32(value[1]); + putInt32(value[2]); + putInt32(value[3]); +} + +void MtpDataPacket::putUInt128(const uint128_t& value) { + putUInt32(value[0]); + putUInt32(value[1]); + putUInt32(value[2]); + putUInt32(value[3]); +} + +void MtpDataPacket::putInt128(int64_t value) { + putInt64(value); + putInt64(value < 0 ? -1 : 0); +} + +void MtpDataPacket::putUInt128(uint64_t value) { + putUInt64(value); + putUInt64(0); +} + +void MtpDataPacket::putAInt8(const int8_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putInt8(*values++); +} + +void MtpDataPacket::putAUInt8(const uint8_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putUInt8(*values++); +} + +void MtpDataPacket::putAInt16(const int16_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putInt16(*values++); +} + +void MtpDataPacket::putAUInt16(const uint16_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putUInt16(*values++); +} + +void MtpDataPacket::putAUInt16(const UInt16List* values) { + size_t count = (values ? values->size() : 0); + putUInt32(count); + for (size_t i = 0; i < count; i++) + putUInt16((*values)[i]); +} + +void MtpDataPacket::putAInt32(const int32_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putInt32(*values++); +} + +void MtpDataPacket::putAUInt32(const uint32_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putUInt32(*values++); +} + +void MtpDataPacket::putAUInt32(const UInt32List* list) { + if (!list) { + putEmptyArray(); + } else { + size_t size = list->size(); + putUInt32(size); + for (size_t i = 0; i < size; i++) + putUInt32((*list)[i]); + } +} + +void MtpDataPacket::putAInt64(const int64_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putInt64(*values++); +} + +void MtpDataPacket::putAUInt64(const uint64_t* values, int count) { + putUInt32(count); + for (int i = 0; i < count; i++) + putUInt64(*values++); +} + +void MtpDataPacket::putString(const MtpStringBuffer& string) { + string.writeToPacket(this); +} + +void MtpDataPacket::putString(const char* s) { + MtpStringBuffer string(s); + string.writeToPacket(this); +} + +void MtpDataPacket::putString(const uint16_t* string) { + int count = 0; + for (int i = 0; i < 256; i++) { + if (string[i]) + count++; + else + break; + } + putUInt8(count > 0 ? count + 1 : 0); + for (int i = 0; i < count; i++) + putUInt16(string[i]); + // only terminate with zero if string is not empty + if (count > 0) + putUInt16(0); +} + +#ifdef MTP_DEVICE +int MtpDataPacket::read(int fd) { + // first read the header + int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE); + if (ret != MTP_CONTAINER_HEADER_SIZE) + return -1; + // then the following data + int total = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET); + allocate(total); + int remaining = total - MTP_CONTAINER_HEADER_SIZE; + ret = ::read(fd, &mBuffer[0] + MTP_CONTAINER_HEADER_SIZE, remaining); + if (ret != remaining) + return -1; + + mPacketSize = total; + mOffset = MTP_CONTAINER_HEADER_SIZE; + return total; +} + +int MtpDataPacket::readDataHeader(int fd) { + int ret = ::read(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE); + if (ret > 0) + mPacketSize = ret; + else + mPacketSize = 0; + return ret; +} + +int MtpDataPacket::write(int fd) { + MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize); + MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA); + // send header separately from data + int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE); + if (ret == MTP_CONTAINER_HEADER_SIZE) + ret = ::write(fd, mBuffer + MTP_CONTAINER_HEADER_SIZE, + mPacketSize - MTP_CONTAINER_HEADER_SIZE); + return (ret < 0 ? ret : 0); +} + +int MtpDataPacket::writeDataHeader(int fd, uint32_t length) { + MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length); + MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA); + int ret = ::write(fd, mBuffer, MTP_CONTAINER_HEADER_SIZE); + return (ret < 0 ? ret : 0); +} +#endif // MTP_DEVICE + +#ifdef MTP_HOST +int MtpDataPacket::read(struct usb_request *request) { + // first read the header + request->buffer = mBuffer; + request->buffer_length = mBufferSize; + int length = transfer(request); + if (length >= MTP_CONTAINER_HEADER_SIZE) { + // look at the length field to see if the data spans multiple packets + uint32_t totalLength = MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET); + while (totalLength > length) { + allocate(length + mAllocationIncrement); + request->buffer = mBuffer + length; + request->buffer_length = mAllocationIncrement; + int ret = transfer(request); + if (ret >= 0) + length += ret; + else { + length = ret; + break; + } + } + } + if (length >= 0) + mPacketSize = length; + return length; +} + +int MtpDataPacket::readData(struct usb_request *request, void* buffer, int length) { + int read = 0; + while (read < length) { + request->buffer = (char *)buffer + read; + request->buffer_length = length - read; + int ret = transfer(request); + if (ret < 0) { + return ret; + } + read += ret; + } + return read; +} + +// Queue a read request. Call readDataWait to wait for result +int MtpDataPacket::readDataAsync(struct usb_request *req) { + if (usb_request_queue(req)) { + LOGE("usb_endpoint_queue failed, errno: %d", errno); + return -1; + } + return 0; +} + +// Wait for result of readDataAsync +int MtpDataPacket::readDataWait(struct usb_device *device) { + struct usb_request *req = usb_request_wait(device); + return (req ? req->actual_length : -1); +} + +int MtpDataPacket::readDataHeader(struct usb_request *request) { + request->buffer = mBuffer; + request->buffer_length = request->max_packet_size; + int length = transfer(request); + if (length >= 0) + mPacketSize = length; + return length; +} + +int MtpDataPacket::writeDataHeader(struct usb_request *request, uint32_t length) { + MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, length); + MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA); + request->buffer = mBuffer; + request->buffer_length = MTP_CONTAINER_HEADER_SIZE; + int ret = transfer(request); + return (ret < 0 ? ret : 0); +} + +int MtpDataPacket::write(struct usb_request *request) { + MtpPacket::putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize); + MtpPacket::putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_DATA); + + // send header separately from data + request->buffer = mBuffer; + request->buffer_length = MTP_CONTAINER_HEADER_SIZE; + int ret = transfer(request); + if (ret == MTP_CONTAINER_HEADER_SIZE) { + request->buffer = mBuffer + MTP_CONTAINER_HEADER_SIZE; + request->buffer_length = mPacketSize - MTP_CONTAINER_HEADER_SIZE; + ret = transfer(request); + } + return (ret < 0 ? ret : 0); +} + +int MtpDataPacket::write(struct usb_request *request, void* buffer, uint32_t length) { + request->buffer = buffer; + request->buffer_length = length; + int ret = transfer(request); + return (ret < 0 ? ret : 0); +} + +#endif // MTP_HOST + +void* MtpDataPacket::getData(int& outLength) const { + int length = mPacketSize - MTP_CONTAINER_HEADER_SIZE; + if (length > 0) { + void* result = malloc(length); + if (result) { + memcpy(result, mBuffer + MTP_CONTAINER_HEADER_SIZE, length); + outLength = length; + return result; + } + } + outLength = 0; + return NULL; +} + +} // namespace android diff --git a/media/mtp/MtpDataPacket.h b/media/mtp/MtpDataPacket.h new file mode 100644 index 0000000..577cea1 --- /dev/null +++ b/media/mtp/MtpDataPacket.h @@ -0,0 +1,124 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_DATA_PACKET_H +#define _MTP_DATA_PACKET_H + +#include "MtpPacket.h" +#include "mtp.h" + +struct usb_device; +struct usb_request; + +namespace android { + +class MtpStringBuffer; + +class MtpDataPacket : public MtpPacket { +private: + // current offset for get/put methods + int mOffset; + +public: + MtpDataPacket(); + virtual ~MtpDataPacket(); + + virtual void reset(); + + void setOperationCode(MtpOperationCode code); + void setTransactionID(MtpTransactionID id); + + inline uint8_t getUInt8() { return (uint8_t)mBuffer[mOffset++]; } + inline int8_t getInt8() { return (int8_t)mBuffer[mOffset++]; } + uint16_t getUInt16(); + inline int16_t getInt16() { return (int16_t)getUInt16(); } + uint32_t getUInt32(); + inline int32_t getInt32() { return (int32_t)getUInt32(); } + uint64_t getUInt64(); + inline int64_t getInt64() { return (int64_t)getUInt64(); } + void getUInt128(uint128_t& value); + inline void getInt128(int128_t& value) { getUInt128((uint128_t&)value); } + void getString(MtpStringBuffer& string); + + Int8List* getAInt8(); + UInt8List* getAUInt8(); + Int16List* getAInt16(); + UInt16List* getAUInt16(); + Int32List* getAInt32(); + UInt32List* getAUInt32(); + Int64List* getAInt64(); + UInt64List* getAUInt64(); + + void putInt8(int8_t value); + void putUInt8(uint8_t value); + void putInt16(int16_t value); + void putUInt16(uint16_t value); + void putInt32(int32_t value); + void putUInt32(uint32_t value); + void putInt64(int64_t value); + void putUInt64(uint64_t value); + void putInt128(const int128_t& value); + void putUInt128(const uint128_t& value); + void putInt128(int64_t value); + void putUInt128(uint64_t value); + + void putAInt8(const int8_t* values, int count); + void putAUInt8(const uint8_t* values, int count); + void putAInt16(const int16_t* values, int count); + void putAUInt16(const uint16_t* values, int count); + void putAUInt16(const UInt16List* values); + void putAInt32(const int32_t* values, int count); + void putAUInt32(const uint32_t* values, int count); + void putAUInt32(const UInt32List* list); + void putAInt64(const int64_t* values, int count); + void putAUInt64(const uint64_t* values, int count); + void putString(const MtpStringBuffer& string); + void putString(const char* string); + void putString(const uint16_t* string); + inline void putEmptyString() { putUInt8(0); } + inline void putEmptyArray() { putUInt32(0); } + + +#ifdef MTP_DEVICE + // fill our buffer with data from the given file descriptor + int read(int fd); + int readDataHeader(int fd); + + // write our data to the given file descriptor + int write(int fd); + int writeDataHeader(int fd, uint32_t length); +#endif + +#ifdef MTP_HOST + int read(struct usb_request *request); + int readData(struct usb_request *request, void* buffer, int length); + int readDataAsync(struct usb_request *req); + int readDataWait(struct usb_device *device); + int readDataHeader(struct usb_request *ep); + + int writeDataHeader(struct usb_request *ep, uint32_t length); + int write(struct usb_request *ep); + int write(struct usb_request *ep, void* buffer, uint32_t length); +#endif + + inline bool hasData() const { return mPacketSize > MTP_CONTAINER_HEADER_SIZE; } + inline uint32_t getContainerLength() const { return MtpPacket::getUInt32(MTP_CONTAINER_LENGTH_OFFSET); } + void* getData(int& outLength) const; +}; + +}; // namespace android + +#endif // _MTP_DATA_PACKET_H diff --git a/media/mtp/MtpDatabase.h b/media/mtp/MtpDatabase.h new file mode 100644 index 0000000..4d9a1ae --- /dev/null +++ b/media/mtp/MtpDatabase.h @@ -0,0 +1,110 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_DATABASE_H +#define _MTP_DATABASE_H + +#include "MtpTypes.h" + +namespace android { + +class MtpDataPacket; +class MtpProperty; + +class MtpDatabase { +public: + virtual ~MtpDatabase() {} + + // called from SendObjectInfo to reserve a database entry for the incoming file + virtual MtpObjectHandle beginSendObject(const char* path, + MtpObjectFormat format, + MtpObjectHandle parent, + MtpStorageID storage, + uint64_t size, + time_t modified) = 0; + + // called to report success or failure of the SendObject file transfer + // success should signal a notification of the new object's creation, + // failure should remove the database entry created in beginSendObject + virtual void endSendObject(const char* path, + MtpObjectHandle handle, + MtpObjectFormat format, + bool succeeded) = 0; + + virtual MtpObjectHandleList* getObjectList(MtpStorageID storageID, + MtpObjectFormat format, + MtpObjectHandle parent) = 0; + + virtual int getNumObjects(MtpStorageID storageID, + MtpObjectFormat format, + MtpObjectHandle parent) = 0; + + // callee should delete[] the results from these + // results can be NULL + virtual MtpObjectFormatList* getSupportedPlaybackFormats() = 0; + virtual MtpObjectFormatList* getSupportedCaptureFormats() = 0; + virtual MtpObjectPropertyList* getSupportedObjectProperties(MtpObjectFormat format) = 0; + virtual MtpDevicePropertyList* getSupportedDeviceProperties() = 0; + + virtual MtpResponseCode getObjectPropertyValue(MtpObjectHandle handle, + MtpObjectProperty property, + MtpDataPacket& packet) = 0; + + virtual MtpResponseCode setObjectPropertyValue(MtpObjectHandle handle, + MtpObjectProperty property, + MtpDataPacket& packet) = 0; + + virtual MtpResponseCode getDevicePropertyValue(MtpDeviceProperty property, + MtpDataPacket& packet) = 0; + + virtual MtpResponseCode setDevicePropertyValue(MtpDeviceProperty property, + MtpDataPacket& packet) = 0; + + virtual MtpResponseCode resetDeviceProperty(MtpDeviceProperty property) = 0; + + virtual MtpResponseCode getObjectPropertyList(MtpObjectHandle handle, + uint32_t format, uint32_t property, + int groupCode, int depth, + MtpDataPacket& packet) = 0; + + virtual MtpResponseCode getObjectInfo(MtpObjectHandle handle, + MtpDataPacket& packet) = 0; + + virtual MtpResponseCode getObjectFilePath(MtpObjectHandle handle, + MtpString& outFilePath, + int64_t& outFileLength, + MtpObjectFormat& outFormat) = 0; + + virtual MtpResponseCode deleteFile(MtpObjectHandle handle) = 0; + + virtual MtpObjectHandleList* getObjectReferences(MtpObjectHandle handle) = 0; + + virtual MtpResponseCode setObjectReferences(MtpObjectHandle handle, + MtpObjectHandleList* references) = 0; + + virtual MtpProperty* getObjectPropertyDesc(MtpObjectProperty property, + MtpObjectFormat format) = 0; + + virtual MtpProperty* getDevicePropertyDesc(MtpDeviceProperty property) = 0; + + virtual void sessionStarted() = 0; + + virtual void sessionEnded() = 0; +}; + +}; // namespace android + +#endif // _MTP_DATABASE_H diff --git a/media/mtp/MtpDebug.cpp b/media/mtp/MtpDebug.cpp new file mode 100644 index 0000000..1668ecf --- /dev/null +++ b/media/mtp/MtpDebug.cpp @@ -0,0 +1,396 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "MtpDebug.h" + +namespace android { + +struct CodeEntry { + const char* name; + uint16_t code; +}; + +static const CodeEntry sOperationCodes[] = { + { "MTP_OPERATION_GET_DEVICE_INFO", 0x1001 }, + { "MTP_OPERATION_OPEN_SESSION", 0x1002 }, + { "MTP_OPERATION_CLOSE_SESSION", 0x1003 }, + { "MTP_OPERATION_GET_STORAGE_IDS", 0x1004 }, + { "MTP_OPERATION_GET_STORAGE_INFO", 0x1005 }, + { "MTP_OPERATION_GET_NUM_OBJECTS", 0x1006 }, + { "MTP_OPERATION_GET_OBJECT_HANDLES", 0x1007 }, + { "MTP_OPERATION_GET_OBJECT_INFO", 0x1008 }, + { "MTP_OPERATION_GET_OBJECT", 0x1009 }, + { "MTP_OPERATION_GET_THUMB", 0x100A }, + { "MTP_OPERATION_DELETE_OBJECT", 0x100B }, + { "MTP_OPERATION_SEND_OBJECT_INFO", 0x100C }, + { "MTP_OPERATION_SEND_OBJECT", 0x100D }, + { "MTP_OPERATION_INITIATE_CAPTURE", 0x100E }, + { "MTP_OPERATION_FORMAT_STORE", 0x100F }, + { "MTP_OPERATION_RESET_DEVICE", 0x1010 }, + { "MTP_OPERATION_SELF_TEST", 0x1011 }, + { "MTP_OPERATION_SET_OBJECT_PROTECTION", 0x1012 }, + { "MTP_OPERATION_POWER_DOWN", 0x1013 }, + { "MTP_OPERATION_GET_DEVICE_PROP_DESC", 0x1014 }, + { "MTP_OPERATION_GET_DEVICE_PROP_VALUE", 0x1015 }, + { "MTP_OPERATION_SET_DEVICE_PROP_VALUE", 0x1016 }, + { "MTP_OPERATION_RESET_DEVICE_PROP_VALUE", 0x1017 }, + { "MTP_OPERATION_TERMINATE_OPEN_CAPTURE", 0x1018 }, + { "MTP_OPERATION_MOVE_OBJECT", 0x1019 }, + { "MTP_OPERATION_COPY_OBJECT", 0x101A }, + { "MTP_OPERATION_GET_PARTIAL_OBJECT", 0x101B }, + { "MTP_OPERATION_INITIATE_OPEN_CAPTURE", 0x101C }, + { "MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED", 0x9801 }, + { "MTP_OPERATION_GET_OBJECT_PROP_DESC", 0x9802 }, + { "MTP_OPERATION_GET_OBJECT_PROP_VALUE", 0x9803 }, + { "MTP_OPERATION_SET_OBJECT_PROP_VALUE", 0x9804 }, + { "MTP_OPERATION_GET_OBJECT_PROP_LIST", 0x9805 }, + { "MTP_OPERATION_SET_OBJECT_PROP_LIST", 0x9806 }, + { "MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC", 0x9807 }, + { "MTP_OPERATION_SEND_OBJECT_PROP_LIST", 0x9808 }, + { "MTP_OPERATION_GET_OBJECT_REFERENCES", 0x9810 }, + { "MTP_OPERATION_SET_OBJECT_REFERENCES", 0x9811 }, + { "MTP_OPERATION_SKIP", 0x9820 }, + { 0, 0 }, +}; + +static const CodeEntry sFormatCodes[] = { + { "MTP_FORMAT_UNDEFINED", 0x3000 }, + { "MTP_FORMAT_ASSOCIATION", 0x3001 }, + { "MTP_FORMAT_SCRIPT", 0x3002 }, + { "MTP_FORMAT_EXECUTABLE", 0x3003 }, + { "MTP_FORMAT_TEXT", 0x3004 }, + { "MTP_FORMAT_HTML", 0x3005 }, + { "MTP_FORMAT_DPOF", 0x3006 }, + { "MTP_FORMAT_AIFF", 0x3007 }, + { "MTP_FORMAT_WAV", 0x3008 }, + { "MTP_FORMAT_MP3", 0x3009 }, + { "MTP_FORMAT_AVI", 0x300A }, + { "MTP_FORMAT_MPEG", 0x300B }, + { "MTP_FORMAT_ASF", 0x300C }, + { "MTP_FORMAT_DEFINED", 0x3800 }, + { "MTP_FORMAT_EXIF_JPEG", 0x3801 }, + { "MTP_FORMAT_TIFF_EP", 0x3802 }, + { "MTP_FORMAT_FLASHPIX", 0x3803 }, + { "MTP_FORMAT_BMP", 0x3804 }, + { "MTP_FORMAT_CIFF", 0x3805 }, + { "MTP_FORMAT_GIF", 0x3807 }, + { "MTP_FORMAT_JFIF", 0x3808 }, + { "MTP_FORMAT_CD", 0x3809 }, + { "MTP_FORMAT_PICT", 0x380A }, + { "MTP_FORMAT_PNG", 0x380B }, + { "MTP_FORMAT_TIFF", 0x380D }, + { "MTP_FORMAT_TIFF_IT", 0x380E }, + { "MTP_FORMAT_JP2", 0x380F }, + { "MTP_FORMAT_JPX", 0x3810 }, + { "MTP_FORMAT_UNDEFINED_FIRMWARE", 0xB802 }, + { "MTP_FORMAT_WINDOWS_IMAGE_FORMAT", 0xB881 }, + { "MTP_FORMAT_UNDEFINED_AUDIO", 0xB900 }, + { "MTP_FORMAT_WMA", 0xB901 }, + { "MTP_FORMAT_OGG", 0xB902 }, + { "MTP_FORMAT_AAC", 0xB903 }, + { "MTP_FORMAT_AUDIBLE", 0xB904 }, + { "MTP_FORMAT_FLAC", 0xB906 }, + { "MTP_FORMAT_UNDEFINED_VIDEO", 0xB980 }, + { "MTP_FORMAT_WMV", 0xB981 }, + { "MTP_FORMAT_MP4_CONTAINER", 0xB982 }, + { "MTP_FORMAT_MP2", 0xB983 }, + { "MTP_FORMAT_3GP_CONTAINER", 0xB984 }, + { "MTP_FORMAT_UNDEFINED_COLLECTION", 0xBA00 }, + { "MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM", 0xBA01 }, + { "MTP_FORMAT_ABSTRACT_IMAGE_ALBUM", 0xBA02 }, + { "MTP_FORMAT_ABSTRACT_AUDIO_ALBUM", 0xBA03 }, + { "MTP_FORMAT_ABSTRACT_VIDEO_ALBUM", 0xBA04 }, + { "MTP_FORMAT_ABSTRACT_AV_PLAYLIST", 0xBA05 }, + { "MTP_FORMAT_ABSTRACT_CONTACT_GROUP", 0xBA06 }, + { "MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER", 0xBA07 }, + { "MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION", 0xBA08 }, + { "MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST", 0xBA09 }, + { "MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST", 0xBA0A }, + { "MTP_FORMAT_ABSTRACT_MEDIACAST", 0xBA0B }, + { "MTP_FORMAT_WPL_PLAYLIST", 0xBA10 }, + { "MTP_FORMAT_M3U_PLAYLIST", 0xBA11 }, + { "MTP_FORMAT_MPL_PLAYLIST", 0xBA12 }, + { "MTP_FORMAT_ASX_PLAYLIST", 0xBA13 }, + { "MTP_FORMAT_PLS_PLAYLIST", 0xBA14 }, + { "MTP_FORMAT_UNDEFINED_DOCUMENT", 0xBA80 }, + { "MTP_FORMAT_ABSTRACT_DOCUMENT", 0xBA81 }, + { "MTP_FORMAT_XML_DOCUMENT", 0xBA82 }, + { "MTP_FORMAT_MS_WORD_DOCUMENT", 0xBA83 }, + { "MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT", 0xBA84 }, + { "MTP_FORMAT_MS_EXCEL_SPREADSHEET", 0xBA85 }, + { "MTP_FORMAT_MS_POWERPOINT_PRESENTATION", 0xBA86 }, + { "MTP_FORMAT_UNDEFINED_MESSAGE", 0xBB00 }, + { "MTP_FORMAT_ABSTRACT_MESSSAGE", 0xBB01 }, + { "MTP_FORMAT_UNDEFINED_CONTACT", 0xBB80 }, + { "MTP_FORMAT_ABSTRACT_CONTACT", 0xBB81 }, + { "MTP_FORMAT_VCARD_2", 0xBB82 }, + { 0, 0 }, +}; + +static const CodeEntry sObjectPropCodes[] = { + { "MTP_PROPERTY_STORAGE_ID", 0xDC01 }, + { "MTP_PROPERTY_OBJECT_FORMAT", 0xDC02 }, + { "MTP_PROPERTY_PROTECTION_STATUS", 0xDC03 }, + { "MTP_PROPERTY_OBJECT_SIZE", 0xDC04 }, + { "MTP_PROPERTY_ASSOCIATION_TYPE", 0xDC05 }, + { "MTP_PROPERTY_ASSOCIATION_DESC", 0xDC06 }, + { "MTP_PROPERTY_OBJECT_FILE_NAME", 0xDC07 }, + { "MTP_PROPERTY_DATE_CREATED", 0xDC08 }, + { "MTP_PROPERTY_DATE_MODIFIED", 0xDC09 }, + { "MTP_PROPERTY_KEYWORDS", 0xDC0A }, + { "MTP_PROPERTY_PARENT_OBJECT", 0xDC0B }, + { "MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS", 0xDC0C }, + { "MTP_PROPERTY_HIDDEN", 0xDC0D }, + { "MTP_PROPERTY_SYSTEM_OBJECT", 0xDC0E }, + { "MTP_PROPERTY_PERSISTENT_UID", 0xDC41 }, + { "MTP_PROPERTY_SYNC_ID", 0xDC42 }, + { "MTP_PROPERTY_PROPERTY_BAG", 0xDC43 }, + { "MTP_PROPERTY_NAME", 0xDC44 }, + { "MTP_PROPERTY_CREATED_BY", 0xDC45 }, + { "MTP_PROPERTY_ARTIST", 0xDC46 }, + { "MTP_PROPERTY_DATE_AUTHORED", 0xDC47 }, + { "MTP_PROPERTY_DESCRIPTION", 0xDC48 }, + { "MTP_PROPERTY_URL_REFERENCE", 0xDC49 }, + { "MTP_PROPERTY_LANGUAGE_LOCALE", 0xDC4A }, + { "MTP_PROPERTY_COPYRIGHT_INFORMATION", 0xDC4B }, + { "MTP_PROPERTY_SOURCE", 0xDC4C }, + { "MTP_PROPERTY_ORIGIN_LOCATION", 0xDC4D }, + { "MTP_PROPERTY_DATE_ADDED", 0xDC4E }, + { "MTP_PROPERTY_NON_CONSUMABLE", 0xDC4F }, + { "MTP_PROPERTY_CORRUPT_UNPLAYABLE", 0xDC50 }, + { "MTP_PROPERTY_PRODUCER_SERIAL_NUMBER", 0xDC51 }, + { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT", 0xDC81 }, + { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE", 0xDC82 }, + { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT", 0xDC83 }, + { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH", 0xDC84 }, + { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION", 0xDC85 }, + { "MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA", 0xDC86 }, + { "MTP_PROPERTY_WIDTH", 0xDC87 }, + { "MTP_PROPERTY_HEIGHT", 0xDC88 }, + { "MTP_PROPERTY_DURATION", 0xDC89 }, + { "MTP_PROPERTY_RATING", 0xDC8A }, + { "MTP_PROPERTY_TRACK", 0xDC8B }, + { "MTP_PROPERTY_GENRE", 0xDC8C }, + { "MTP_PROPERTY_CREDITS", 0xDC8D }, + { "MTP_PROPERTY_LYRICS", 0xDC8E }, + { "MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID", 0xDC8F }, + { "MTP_PROPERTY_PRODUCED_BY", 0xDC90 }, + { "MTP_PROPERTY_USE_COUNT", 0xDC91 }, + { "MTP_PROPERTY_SKIP_COUNT", 0xDC92 }, + { "MTP_PROPERTY_LAST_ACCESSED", 0xDC93 }, + { "MTP_PROPERTY_PARENTAL_RATING", 0xDC94 }, + { "MTP_PROPERTY_META_GENRE", 0xDC95 }, + { "MTP_PROPERTY_COMPOSER", 0xDC96 }, + { "MTP_PROPERTY_EFFECTIVE_RATING", 0xDC97 }, + { "MTP_PROPERTY_SUBTITLE", 0xDC98 }, + { "MTP_PROPERTY_ORIGINAL_RELEASE_DATE", 0xDC99 }, + { "MTP_PROPERTY_ALBUM_NAME", 0xDC9A }, + { "MTP_PROPERTY_ALBUM_ARTIST", 0xDC9B }, + { "MTP_PROPERTY_MOOD", 0xDC9C }, + { "MTP_PROPERTY_DRM_STATUS", 0xDC9D }, + { "MTP_PROPERTY_SUB_DESCRIPTION", 0xDC9E }, + { "MTP_PROPERTY_IS_CROPPED", 0xDCD1 }, + { "MTP_PROPERTY_IS_COLOUR_CORRECTED", 0xDCD2 }, + { "MTP_PROPERTY_IMAGE_BIT_DEPTH", 0xDCD3 }, + { "MTP_PROPERTY_F_NUMBER", 0xDCD4 }, + { "MTP_PROPERTY_EXPOSURE_TIME", 0xDCD5 }, + { "MTP_PROPERTY_EXPOSURE_INDEX", 0xDCD6 }, + { "MTP_PROPERTY_TOTAL_BITRATE", 0xDE91 }, + { "MTP_PROPERTY_BITRATE_TYPE", 0xDE92 }, + { "MTP_PROPERTY_SAMPLE_RATE", 0xDE93 }, + { "MTP_PROPERTY_NUMBER_OF_CHANNELS", 0xDE94 }, + { "MTP_PROPERTY_AUDIO_BIT_DEPTH", 0xDE95 }, + { "MTP_PROPERTY_SCAN_TYPE", 0xDE97 }, + { "MTP_PROPERTY_AUDIO_WAVE_CODEC", 0xDE99 }, + { "MTP_PROPERTY_AUDIO_BITRATE", 0xDE9A }, + { "MTP_PROPERTY_VIDEO_FOURCC_CODEC", 0xDE9B }, + { "MTP_PROPERTY_VIDEO_BITRATE", 0xDE9C }, + { "MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS", 0xDE9D }, + { "MTP_PROPERTY_KEYFRAME_DISTANCE", 0xDE9E }, + { "MTP_PROPERTY_BUFFER_SIZE", 0xDE9F }, + { "MTP_PROPERTY_ENCODING_QUALITY", 0xDEA0 }, + { "MTP_PROPERTY_ENCODING_PROFILE", 0xDEA1 }, + { "MTP_PROPERTY_DISPLAY_NAME", 0xDCE0 }, + { "MTP_PROPERTY_BODY_TEXT", 0xDCE1 }, + { "MTP_PROPERTY_SUBJECT", 0xDCE2 }, + { "MTP_PROPERTY_PRIORITY", 0xDCE3 }, + { "MTP_PROPERTY_GIVEN_NAME", 0xDD00 }, + { "MTP_PROPERTY_MIDDLE_NAMES", 0xDD01 }, + { "MTP_PROPERTY_FAMILY_NAME", 0xDD02 }, + { "MTP_PROPERTY_PREFIX", 0xDD03 }, + { "MTP_PROPERTY_SUFFIX", 0xDD04 }, + { "MTP_PROPERTY_PHONETIC_GIVEN_NAME", 0xDD05 }, + { "MTP_PROPERTY_PHONETIC_FAMILY_NAME", 0xDD06 }, + { "MTP_PROPERTY_EMAIL_PRIMARY", 0xDD07 }, + { "MTP_PROPERTY_EMAIL_PERSONAL_1", 0xDD08 }, + { "MTP_PROPERTY_EMAIL_PERSONAL_2", 0xDD09 }, + { "MTP_PROPERTY_EMAIL_BUSINESS_1", 0xDD0A }, + { "MTP_PROPERTY_EMAIL_BUSINESS_2", 0xDD0B }, + { "MTP_PROPERTY_EMAIL_OTHERS", 0xDD0C }, + { "MTP_PROPERTY_PHONE_NUMBER_PRIMARY", 0xDD0D }, + { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL", 0xDD0E }, + { "MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2", 0xDD0F }, + { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS", 0xDD10 }, + { "MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2", 0xDD11 }, + { "MTP_PROPERTY_PHONE_NUMBER_MOBILE", 0xDD12 }, + { "MTP_PROPERTY_PHONE_NUMBER_MOBILE_2", 0xDD13 }, + { "MTP_PROPERTY_FAX_NUMBER_PRIMARY", 0xDD14 }, + { "MTP_PROPERTY_FAX_NUMBER_PERSONAL", 0xDD15 }, + { "MTP_PROPERTY_FAX_NUMBER_BUSINESS", 0xDD16 }, + { "MTP_PROPERTY_PAGER_NUMBER", 0xDD17 }, + { "MTP_PROPERTY_PHONE_NUMBER_OTHERS", 0xDD18 }, + { "MTP_PROPERTY_PRIMARY_WEB_ADDRESS", 0xDD19 }, + { "MTP_PROPERTY_PERSONAL_WEB_ADDRESS", 0xDD1A }, + { "MTP_PROPERTY_BUSINESS_WEB_ADDRESS", 0xDD1B }, + { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS", 0xDD1C }, + { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2", 0xDD1D }, + { "MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3", 0xDD1E }, + { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL", 0xDD1F }, + { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1", 0xDD20 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2", 0xDD21 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY", 0xDD22 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION", 0xDD23 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE", 0xDD24 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY", 0xDD25 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL", 0xDD26 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1", 0xDD27 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2", 0xDD28 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY", 0xDD29 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION", 0xDD2A }, + { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE", 0xDD2B }, + { "MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY", 0xDD2C }, + { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL", 0xDD2D }, + { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1", 0xDD2E }, + { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2", 0xDD2F }, + { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY", 0xDD30 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION", 0xDD31 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE", 0xDD32 }, + { "MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY", 0xDD33 }, + { "MTP_PROPERTY_ORGANIZATION_NAME", 0xDD34 }, + { "MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME", 0xDD35 }, + { "MTP_PROPERTY_ROLE", 0xDD36 }, + { "MTP_PROPERTY_BIRTHDATE", 0xDD37 }, + { "MTP_PROPERTY_MESSAGE_TO", 0xDD40 }, + { "MTP_PROPERTY_MESSAGE_CC", 0xDD41 }, + { "MTP_PROPERTY_MESSAGE_BCC", 0xDD42 }, + { "MTP_PROPERTY_MESSAGE_READ", 0xDD43 }, + { "MTP_PROPERTY_MESSAGE_RECEIVED_TIME", 0xDD44 }, + { "MTP_PROPERTY_MESSAGE_SENDER", 0xDD45 }, + { "MTP_PROPERTY_ACTIVITY_BEGIN_TIME", 0xDD50 }, + { "MTP_PROPERTY_ACTIVITY_END_TIME", 0xDD51 }, + { "MTP_PROPERTY_ACTIVITY_LOCATION", 0xDD52 }, + { "MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES", 0xDD54 }, + { "MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES", 0xDD55 }, + { "MTP_PROPERTY_ACTIVITY_RESOURCES", 0xDD56 }, + { "MTP_PROPERTY_ACTIVITY_ACCEPTED", 0xDD57 }, + { "MTP_PROPERTY_ACTIVITY_TENTATIVE", 0xDD58 }, + { "MTP_PROPERTY_ACTIVITY_DECLINED", 0xDD59 }, + { "MTP_PROPERTY_ACTIVITY_REMAINDER_TIME", 0xDD5A }, + { "MTP_PROPERTY_ACTIVITY_OWNER", 0xDD5B }, + { "MTP_PROPERTY_ACTIVITY_STATUS", 0xDD5C }, + { "MTP_PROPERTY_OWNER", 0xDD5D }, + { "MTP_PROPERTY_EDITOR", 0xDD5E }, + { "MTP_PROPERTY_WEBMASTER", 0xDD5F }, + { "MTP_PROPERTY_URL_SOURCE", 0xDD60 }, + { "MTP_PROPERTY_URL_DESTINATION", 0xDD61 }, + { "MTP_PROPERTY_TIME_BOOKMARK", 0xDD62 }, + { "MTP_PROPERTY_OBJECT_BOOKMARK", 0xDD63 }, + { "MTP_PROPERTY_BYTE_BOOKMARK", 0xDD64 }, + { "MTP_PROPERTY_LAST_BUILD_DATE", 0xDD70 }, + { "MTP_PROPERTY_TIME_TO_LIVE", 0xDD71 }, + { "MTP_PROPERTY_MEDIA_GUID", 0xDD72 }, + { 0, 0 }, +}; + +static const CodeEntry sDevicePropCodes[] = { + { "MTP_DEVICE_PROPERTY_UNDEFINED", 0x5000 }, + { "MTP_DEVICE_PROPERTY_BATTERY_LEVEL", 0x5001 }, + { "MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE", 0x5002 }, + { "MTP_DEVICE_PROPERTY_IMAGE_SIZE", 0x5003 }, + { "MTP_DEVICE_PROPERTY_COMPRESSION_SETTING", 0x5004 }, + { "MTP_DEVICE_PROPERTY_WHITE_BALANCE", 0x5005 }, + { "MTP_DEVICE_PROPERTY_RGB_GAIN", 0x5006 }, + { "MTP_DEVICE_PROPERTY_F_NUMBER", 0x5007 }, + { "MTP_DEVICE_PROPERTY_FOCAL_LENGTH", 0x5008 }, + { "MTP_DEVICE_PROPERTY_FOCUS_DISTANCE", 0x5009 }, + { "MTP_DEVICE_PROPERTY_FOCUS_MODE", 0x500A }, + { "MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE", 0x500B }, + { "MTP_DEVICE_PROPERTY_FLASH_MODE", 0x500C }, + { "MTP_DEVICE_PROPERTY_EXPOSURE_TIME", 0x500D }, + { "MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE", 0x500E }, + { "MTP_DEVICE_PROPERTY_EXPOSURE_INDEX", 0x500F }, + { "MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION", 0x5010 }, + { "MTP_DEVICE_PROPERTY_DATETIME", 0x5011 }, + { "MTP_DEVICE_PROPERTY_CAPTURE_DELAY", 0x5012 }, + { "MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE", 0x5013 }, + { "MTP_DEVICE_PROPERTY_CONTRAST", 0x5014 }, + { "MTP_DEVICE_PROPERTY_SHARPNESS", 0x5015 }, + { "MTP_DEVICE_PROPERTY_DIGITAL_ZOOM", 0x5016 }, + { "MTP_DEVICE_PROPERTY_EFFECT_MODE", 0x5017 }, + { "MTP_DEVICE_PROPERTY_BURST_NUMBER", 0x5018 }, + { "MTP_DEVICE_PROPERTY_BURST_INTERVAL", 0x5019 }, + { "MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER", 0x501A }, + { "MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL", 0x501B }, + { "MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE", 0x501C }, + { "MTP_DEVICE_PROPERTY_UPLOAD_URL", 0x501D }, + { "MTP_DEVICE_PROPERTY_ARTIST", 0x501E }, + { "MTP_DEVICE_PROPERTY_COPYRIGHT_INFO", 0x501F }, + { "MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER", 0xD401 }, + { "MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME", 0xD402 }, + { "MTP_DEVICE_PROPERTY_VOLUME", 0xD403 }, + { "MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED", 0xD404 }, + { "MTP_DEVICE_PROPERTY_DEVICE_ICON", 0xD405 }, + { "MTP_DEVICE_PROPERTY_PLAYBACK_RATE", 0xD410 }, + { "MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT", 0xD411 }, + { "MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX", 0xD412 }, + { "MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO", 0xD406 }, + { "MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE", 0xD407 }, + { 0, 0 }, +}; + +static const char* getCodeName(uint16_t code, const CodeEntry* table) { + const CodeEntry* entry = table; + while (entry->name) { + if (entry->code == code) + return entry->name; + entry++; + } + return "UNKNOWN"; +} + +const char* MtpDebug::getOperationCodeName(MtpOperationCode code) { + return getCodeName(code, sOperationCodes); +} + +const char* MtpDebug::getFormatCodeName(MtpObjectFormat code) { + if (code == 0) + return "NONE"; + return getCodeName(code, sFormatCodes); +} + +const char* MtpDebug::getObjectPropCodeName(MtpPropertyCode code) { + if (code == 0) + return "NONE"; + return getCodeName(code, sObjectPropCodes); +} + +const char* MtpDebug::getDevicePropCodeName(MtpPropertyCode code) { + if (code == 0) + return "NONE"; + return getCodeName(code, sDevicePropCodes); +} + +} // namespace android diff --git a/media/libstagefright/omx/OMXRenderer.h b/media/mtp/MtpDebug.h index 4d194ce..5b53e31 100644 --- a/media/libstagefright/omx/OMXRenderer.h +++ b/media/mtp/MtpDebug.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009 The Android Open Source Project + * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,31 +14,24 @@ * limitations under the License. */ -#ifndef OMX_RENDERER_H_ +#ifndef _MTP_DEBUG_H +#define _MTP_DEBUG_H -#define OMX_RENDERER_H_ +// #define LOG_NDEBUG 0 +#include <utils/Log.h> -#include <media/IOMX.h> +#include "MtpTypes.h" namespace android { -class VideoRenderer; - -class OMXRenderer : public BnOMXRenderer { +class MtpDebug { public: - // Assumes ownership of "impl". - OMXRenderer(VideoRenderer *impl); - virtual ~OMXRenderer(); - - virtual void render(IOMX::buffer_id buffer); - -private: - VideoRenderer *mImpl; - - OMXRenderer(const OMXRenderer &); - OMXRenderer &operator=(const OMXRenderer &); + static const char* getOperationCodeName(MtpOperationCode code); + static const char* getFormatCodeName(MtpObjectFormat code); + static const char* getObjectPropCodeName(MtpPropertyCode code); + static const char* getDevicePropCodeName(MtpPropertyCode code); }; -} // namespace android +}; // namespace android -#endif // OMX_RENDERER_H_ +#endif // _MTP_DEBUG_H diff --git a/media/mtp/MtpDevice.cpp b/media/mtp/MtpDevice.cpp new file mode 100644 index 0000000..d22c72f --- /dev/null +++ b/media/mtp/MtpDevice.cpp @@ -0,0 +1,591 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpDevice" + +#include "MtpDebug.h" +#include "MtpDevice.h" +#include "MtpDeviceInfo.h" +#include "MtpObjectInfo.h" +#include "MtpProperty.h" +#include "MtpStorageInfo.h" +#include "MtpStringBuffer.h" +#include "MtpUtils.h" + +#include <stdio.h> +#include <stdlib.h> +#include <sys/types.h> +#include <sys/ioctl.h> +#include <sys/stat.h> +#include <fcntl.h> +#include <errno.h> +#include <endian.h> + +#include <usbhost/usbhost.h> + +namespace android { + +MtpDevice::MtpDevice(struct usb_device* device, int interface, + const struct usb_endpoint_descriptor *ep_in, + const struct usb_endpoint_descriptor *ep_out, + const struct usb_endpoint_descriptor *ep_intr) + : mDevice(device), + mInterface(interface), + mRequestIn1(NULL), + mRequestIn2(NULL), + mRequestOut(NULL), + mRequestIntr(NULL), + mDeviceInfo(NULL), + mID(usb_device_get_unique_id(device)), + mSessionID(0), + mTransactionID(0), + mReceivedResponse(false) +{ + mRequestIn1 = usb_request_new(device, ep_in); + mRequestIn2 = usb_request_new(device, ep_in); + mRequestOut = usb_request_new(device, ep_out); + mRequestIntr = usb_request_new(device, ep_intr); +} + +MtpDevice::~MtpDevice() { + close(); + for (int i = 0; i < mDeviceProperties.size(); i++) + delete mDeviceProperties[i]; + usb_request_free(mRequestIn1); + usb_request_free(mRequestIn2); + usb_request_free(mRequestOut); + usb_request_free(mRequestIntr); +} + +void MtpDevice::initialize() { + openSession(); + mDeviceInfo = getDeviceInfo(); + if (mDeviceInfo) { + if (mDeviceInfo->mDeviceProperties) { + int count = mDeviceInfo->mDeviceProperties->size(); + for (int i = 0; i < count; i++) { + MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i]; + MtpProperty* property = getDevicePropDesc(propCode); + if (property) + mDeviceProperties.push(property); + } + } + } +} + +void MtpDevice::close() { + if (mDevice) { + usb_device_release_interface(mDevice, mInterface); + usb_device_close(mDevice); + mDevice = NULL; + } +} + +void MtpDevice::print() { + if (mDeviceInfo) { + mDeviceInfo->print(); + + if (mDeviceInfo->mDeviceProperties) { + LOGI("***** DEVICE PROPERTIES *****\n"); + int count = mDeviceInfo->mDeviceProperties->size(); + for (int i = 0; i < count; i++) { + MtpDeviceProperty propCode = (*mDeviceInfo->mDeviceProperties)[i]; + MtpProperty* property = getDevicePropDesc(propCode); + if (property) { + property->print(); + } + } + } + } + + if (mDeviceInfo->mPlaybackFormats) { + LOGI("***** OBJECT PROPERTIES *****\n"); + int count = mDeviceInfo->mPlaybackFormats->size(); + for (int i = 0; i < count; i++) { + MtpObjectFormat format = (*mDeviceInfo->mPlaybackFormats)[i]; + LOGI("*** FORMAT: %s\n", MtpDebug::getFormatCodeName(format)); + MtpObjectPropertyList* props = getObjectPropsSupported(format); + if (props) { + for (int j = 0; j < props->size(); j++) { + MtpObjectProperty prop = (*props)[j]; + MtpProperty* property = getObjectPropDesc(prop, format); + if (property) + property->print(); + else + LOGE("could not fetch property: %s", + MtpDebug::getObjectPropCodeName(prop)); + } + } + } + } +} + +const char* MtpDevice::getDeviceName() { + if (mDevice) + return usb_device_get_name(mDevice); + else + return "???"; +} + +bool MtpDevice::openSession() { + Mutex::Autolock autoLock(mMutex); + + mSessionID = 0; + mTransactionID = 0; + MtpSessionID newSession = 1; + mRequest.reset(); + mRequest.setParameter(1, newSession); + if (!sendRequest(MTP_OPERATION_OPEN_SESSION)) + return false; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_SESSION_ALREADY_OPEN) + newSession = mResponse.getParameter(1); + else if (ret != MTP_RESPONSE_OK) + return false; + + mSessionID = newSession; + mTransactionID = 1; + return true; +} + +bool MtpDevice::closeSession() { + // FIXME + return true; +} + +MtpDeviceInfo* MtpDevice::getDeviceInfo() { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + if (!sendRequest(MTP_OPERATION_GET_DEVICE_INFO)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + MtpDeviceInfo* info = new MtpDeviceInfo; + info->read(mData); + return info; + } + return NULL; +} + +MtpStorageIDList* MtpDevice::getStorageIDs() { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + if (!sendRequest(MTP_OPERATION_GET_STORAGE_IDS)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + return mData.getAUInt32(); + } + return NULL; +} + +MtpStorageInfo* MtpDevice::getStorageInfo(MtpStorageID storageID) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + mRequest.setParameter(1, storageID); + if (!sendRequest(MTP_OPERATION_GET_STORAGE_INFO)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + MtpStorageInfo* info = new MtpStorageInfo(storageID); + info->read(mData); + return info; + } + return NULL; +} + +MtpObjectHandleList* MtpDevice::getObjectHandles(MtpStorageID storageID, + MtpObjectFormat format, MtpObjectHandle parent) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + mRequest.setParameter(1, storageID); + mRequest.setParameter(2, format); + mRequest.setParameter(3, parent); + if (!sendRequest(MTP_OPERATION_GET_OBJECT_HANDLES)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + return mData.getAUInt32(); + } + return NULL; +} + +MtpObjectInfo* MtpDevice::getObjectInfo(MtpObjectHandle handle) { + Mutex::Autolock autoLock(mMutex); + + // FIXME - we might want to add some caching here + + mRequest.reset(); + mRequest.setParameter(1, handle); + if (!sendRequest(MTP_OPERATION_GET_OBJECT_INFO)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + MtpObjectInfo* info = new MtpObjectInfo(handle); + info->read(mData); + return info; + } + return NULL; +} + +void* MtpDevice::getThumbnail(MtpObjectHandle handle, int& outLength) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + mRequest.setParameter(1, handle); + if (sendRequest(MTP_OPERATION_GET_THUMB) && readData()) { + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + return mData.getData(outLength); + } + } + outLength = 0; + return NULL; +} + +MtpObjectHandle MtpDevice::sendObjectInfo(MtpObjectInfo* info) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + MtpObjectHandle parent = info->mParent; + if (parent == 0) + parent = MTP_PARENT_ROOT; + + mRequest.setParameter(1, info->mStorageID); + mRequest.setParameter(2, info->mParent); + + mData.putUInt32(info->mStorageID); + mData.putUInt16(info->mFormat); + mData.putUInt16(info->mProtectionStatus); + mData.putUInt32(info->mCompressedSize); + mData.putUInt16(info->mThumbFormat); + mData.putUInt32(info->mThumbCompressedSize); + mData.putUInt32(info->mThumbPixWidth); + mData.putUInt32(info->mThumbPixHeight); + mData.putUInt32(info->mImagePixWidth); + mData.putUInt32(info->mImagePixHeight); + mData.putUInt32(info->mImagePixDepth); + mData.putUInt32(info->mParent); + mData.putUInt16(info->mAssociationType); + mData.putUInt32(info->mAssociationDesc); + mData.putUInt32(info->mSequenceNumber); + mData.putString(info->mName); + + char created[100], modified[100]; + formatDateTime(info->mDateCreated, created, sizeof(created)); + formatDateTime(info->mDateModified, modified, sizeof(modified)); + + mData.putString(created); + mData.putString(modified); + if (info->mKeywords) + mData.putString(info->mKeywords); + else + mData.putEmptyString(); + + if (sendRequest(MTP_OPERATION_SEND_OBJECT_INFO) && sendData()) { + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + info->mStorageID = mResponse.getParameter(1); + info->mParent = mResponse.getParameter(2); + info->mHandle = mResponse.getParameter(3); + return info->mHandle; + } + } + return (MtpObjectHandle)-1; +} + +bool MtpDevice::sendObject(MtpObjectInfo* info, int srcFD) { + Mutex::Autolock autoLock(mMutex); + + int remaining = info->mCompressedSize; + mRequest.reset(); + mRequest.setParameter(1, info->mHandle); + if (sendRequest(MTP_OPERATION_SEND_OBJECT)) { + // send data header + writeDataHeader(MTP_OPERATION_SEND_OBJECT, remaining); + + char buffer[65536]; + while (remaining > 0) { + int count = read(srcFD, buffer, sizeof(buffer)); + if (count > 0) { + int written = mData.write(mRequestOut, buffer, count); + // FIXME check error + remaining -= count; + } else { + break; + } + } + } + MtpResponseCode ret = readResponse(); + return (remaining == 0 && ret == MTP_RESPONSE_OK); +} + +bool MtpDevice::deleteObject(MtpObjectHandle handle) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + mRequest.setParameter(1, handle); + if (sendRequest(MTP_OPERATION_DELETE_OBJECT)) { + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) + return true; + } + return false; +} + +MtpObjectHandle MtpDevice::getParent(MtpObjectHandle handle) { + MtpObjectInfo* info = getObjectInfo(handle); + if (info) + return info->mParent; + else + return -1; +} + +MtpObjectHandle MtpDevice::getStorageID(MtpObjectHandle handle) { + MtpObjectInfo* info = getObjectInfo(handle); + if (info) + return info->mStorageID; + else + return -1; +} + +MtpObjectPropertyList* MtpDevice::getObjectPropsSupported(MtpObjectFormat format) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + mRequest.setParameter(1, format); + if (!sendRequest(MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + return mData.getAUInt16(); + } + return NULL; + +} + +MtpProperty* MtpDevice::getDevicePropDesc(MtpDeviceProperty code) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + mRequest.setParameter(1, code); + if (!sendRequest(MTP_OPERATION_GET_DEVICE_PROP_DESC)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + MtpProperty* property = new MtpProperty; + property->read(mData); + return property; + } + return NULL; +} + +MtpProperty* MtpDevice::getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format) { + Mutex::Autolock autoLock(mMutex); + + mRequest.reset(); + mRequest.setParameter(1, code); + mRequest.setParameter(2, format); + if (!sendRequest(MTP_OPERATION_GET_OBJECT_PROP_DESC)) + return NULL; + if (!readData()) + return NULL; + MtpResponseCode ret = readResponse(); + if (ret == MTP_RESPONSE_OK) { + MtpProperty* property = new MtpProperty; + property->read(mData); + return property; + } + return NULL; +} + +// reads the object's data and writes it to the specified file path +bool MtpDevice::readObject(MtpObjectHandle handle, const char* destPath, int group, int perm) { + LOGD("readObject: %s", destPath); + int fd = ::open(destPath, O_RDWR | O_CREAT | O_TRUNC); + if (fd < 0) { + LOGE("open failed for %s", destPath); + return false; + } + + fchown(fd, getuid(), group); + // set permissions + int mask = umask(0); + fchmod(fd, perm); + umask(mask); + + Mutex::Autolock autoLock(mMutex); + bool result = false; + + mRequest.reset(); + mRequest.setParameter(1, handle); + if (sendRequest(MTP_OPERATION_GET_OBJECT) + && mData.readDataHeader(mRequestIn1)) { + uint32_t length = mData.getContainerLength(); + if (length < MTP_CONTAINER_HEADER_SIZE) + goto fail; + length -= MTP_CONTAINER_HEADER_SIZE; + uint32_t remaining = length; + + int initialDataLength = 0; + void* initialData = mData.getData(initialDataLength); + if (initialData) { + if (initialDataLength > 0) { + if (write(fd, initialData, initialDataLength) != initialDataLength) + goto fail; + remaining -= initialDataLength; + } + free(initialData); + } + + // USB reads greater than 16K don't work + char buffer1[16384], buffer2[16384]; + mRequestIn1->buffer = buffer1; + mRequestIn2->buffer = buffer2; + struct usb_request* req = mRequestIn1; + void* writeBuffer = NULL; + int writeLength = 0; + + while (remaining > 0 || writeBuffer) { + if (remaining > 0) { + // queue up a read request + req->buffer_length = (remaining > sizeof(buffer1) ? sizeof(buffer1) : remaining); + if (mData.readDataAsync(req)) { + LOGE("readDataAsync failed"); + goto fail; + } + } else { + req = NULL; + } + + if (writeBuffer) { + // write previous buffer + if (write(fd, writeBuffer, writeLength) != writeLength) { + LOGE("write failed"); + // wait for pending read before failing + if (req) + mData.readDataWait(mDevice); + goto fail; + } + writeBuffer = NULL; + } + + // wait for read to complete + if (req) { + int read = mData.readDataWait(mDevice); + if (read < 0) + goto fail; + + writeBuffer = req->buffer; + writeLength = read; + remaining -= read; + req = (req == mRequestIn1 ? mRequestIn2 : mRequestIn1); + } + } + + MtpResponseCode response = readResponse(); + if (response == MTP_RESPONSE_OK) + result = true; + } + +fail: + ::close(fd); + return result; +} + +bool MtpDevice::sendRequest(MtpOperationCode operation) { + LOGV("sendRequest: %s\n", MtpDebug::getOperationCodeName(operation)); + mReceivedResponse = false; + mRequest.setOperationCode(operation); + if (mTransactionID > 0) + mRequest.setTransactionID(mTransactionID++); + int ret = mRequest.write(mRequestOut); + mRequest.dump(); + return (ret > 0); +} + +bool MtpDevice::sendData() { + LOGV("sendData\n"); + mData.setOperationCode(mRequest.getOperationCode()); + mData.setTransactionID(mRequest.getTransactionID()); + int ret = mData.write(mRequestOut); + mData.dump(); + return (ret > 0); +} + +bool MtpDevice::readData() { + mData.reset(); + int ret = mData.read(mRequestIn1); + LOGV("readData returned %d\n", ret); + if (ret >= MTP_CONTAINER_HEADER_SIZE) { + if (mData.getContainerType() == MTP_CONTAINER_TYPE_RESPONSE) { + LOGD("got response packet instead of data packet"); + // we got a response packet rather than data + // copy it to mResponse + mResponse.copyFrom(mData); + mReceivedResponse = true; + return false; + } + mData.dump(); + return true; + } + else { + LOGV("readResponse failed\n"); + return false; + } +} + +bool MtpDevice::writeDataHeader(MtpOperationCode operation, int dataLength) { + mData.setOperationCode(operation); + mData.setTransactionID(mRequest.getTransactionID()); + return (!mData.writeDataHeader(mRequestOut, dataLength)); +} + +MtpResponseCode MtpDevice::readResponse() { + LOGV("readResponse\n"); + if (mReceivedResponse) { + mReceivedResponse = false; + return mResponse.getResponseCode(); + } + int ret = mResponse.read(mRequestIn1); + if (ret >= MTP_CONTAINER_HEADER_SIZE) { + mResponse.dump(); + return mResponse.getResponseCode(); + } else { + LOGD("readResponse failed\n"); + return -1; + } +} + +} // namespace android diff --git a/media/mtp/MtpDevice.h b/media/mtp/MtpDevice.h new file mode 100644 index 0000000..d0a0fb3 --- /dev/null +++ b/media/mtp/MtpDevice.h @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_DEVICE_H +#define _MTP_DEVICE_H + +#include "MtpRequestPacket.h" +#include "MtpDataPacket.h" +#include "MtpResponsePacket.h" +#include "MtpTypes.h" + +#include <utils/threads.h> + +struct usb_device; +struct usb_request; +struct usb_endpoint_descriptor; + +namespace android { + +class MtpDeviceInfo; +class MtpObjectInfo; +class MtpStorageInfo; + +class MtpDevice { +private: + struct usb_device* mDevice; + int mInterface; + struct usb_request* mRequestIn1; + struct usb_request* mRequestIn2; + struct usb_request* mRequestOut; + struct usb_request* mRequestIntr; + MtpDeviceInfo* mDeviceInfo; + MtpPropertyList mDeviceProperties; + + // a unique ID for the device + int mID; + + // current session ID + MtpSessionID mSessionID; + // current transaction ID + MtpTransactionID mTransactionID; + + MtpRequestPacket mRequest; + MtpDataPacket mData; + MtpResponsePacket mResponse; + // set to true if we received a response packet instead of a data packet + bool mReceivedResponse; + + // to ensure only one MTP transaction at a time + Mutex mMutex; + +public: + MtpDevice(struct usb_device* device, int interface, + const struct usb_endpoint_descriptor *ep_in, + const struct usb_endpoint_descriptor *ep_out, + const struct usb_endpoint_descriptor *ep_intr); + virtual ~MtpDevice(); + + inline int getID() const { return mID; } + + void initialize(); + void close(); + void print(); + const char* getDeviceName(); + + bool openSession(); + bool closeSession(); + + MtpDeviceInfo* getDeviceInfo(); + MtpStorageIDList* getStorageIDs(); + MtpStorageInfo* getStorageInfo(MtpStorageID storageID); + MtpObjectHandleList* getObjectHandles(MtpStorageID storageID, MtpObjectFormat format, + MtpObjectHandle parent); + MtpObjectInfo* getObjectInfo(MtpObjectHandle handle); + void* getThumbnail(MtpObjectHandle handle, int& outLength); + MtpObjectHandle sendObjectInfo(MtpObjectInfo* info); + bool sendObject(MtpObjectInfo* info, int srcFD); + bool deleteObject(MtpObjectHandle handle); + MtpObjectHandle getParent(MtpObjectHandle handle); + MtpObjectHandle getStorageID(MtpObjectHandle handle); + + MtpObjectPropertyList* getObjectPropsSupported(MtpObjectFormat format); + + MtpProperty* getDevicePropDesc(MtpDeviceProperty code); + MtpProperty* getObjectPropDesc(MtpObjectProperty code, MtpObjectFormat format); + + bool readObject(MtpObjectHandle handle, const char* destPath, int group, + int perm); + +private: + bool sendRequest(MtpOperationCode operation); + bool sendData(); + bool readData(); + bool writeDataHeader(MtpOperationCode operation, int dataLength); + MtpResponseCode readResponse(); + +}; + +}; // namespace android + +#endif // _MTP_DEVICE_H diff --git a/media/mtp/MtpDeviceInfo.cpp b/media/mtp/MtpDeviceInfo.cpp new file mode 100644 index 0000000..5a9322e --- /dev/null +++ b/media/mtp/MtpDeviceInfo.cpp @@ -0,0 +1,97 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpDeviceInfo" + +#include "MtpDebug.h" +#include "MtpDataPacket.h" +#include "MtpDeviceInfo.h" +#include "MtpStringBuffer.h" + +namespace android { + +MtpDeviceInfo::MtpDeviceInfo() + : mStandardVersion(0), + mVendorExtensionID(0), + mVendorExtensionVersion(0), + mVendorExtensionDesc(NULL), + mFunctionalCode(0), + mOperations(NULL), + mEvents(NULL), + mDeviceProperties(NULL), + mCaptureFormats(NULL), + mPlaybackFormats(NULL), + mManufacturer(NULL), + mModel(NULL), + mVersion(NULL), + mSerial(NULL) +{ +} + +MtpDeviceInfo::~MtpDeviceInfo() { + if (mVendorExtensionDesc) + free(mVendorExtensionDesc); + delete mOperations; + delete mEvents; + delete mDeviceProperties; + delete mCaptureFormats; + delete mPlaybackFormats; + if (mManufacturer) + free(mManufacturer); + if (mModel) + free(mModel); + if (mVersion) + free(mVersion); + if (mSerial) + free(mSerial); +} + +void MtpDeviceInfo::read(MtpDataPacket& packet) { + MtpStringBuffer string; + + // read the device info + mStandardVersion = packet.getUInt16(); + mVendorExtensionID = packet.getUInt32(); + mVendorExtensionVersion = packet.getUInt16(); + + packet.getString(string); + mVendorExtensionDesc = strdup((const char *)string); + + mFunctionalCode = packet.getUInt16(); + mOperations = packet.getAUInt16(); + mEvents = packet.getAUInt16(); + mDeviceProperties = packet.getAUInt16(); + mCaptureFormats = packet.getAUInt16(); + mPlaybackFormats = packet.getAUInt16(); + + packet.getString(string); + mManufacturer = strdup((const char *)string); + packet.getString(string); + mModel = strdup((const char *)string); + packet.getString(string); + mVersion = strdup((const char *)string); + packet.getString(string); + mSerial = strdup((const char *)string); +} + +void MtpDeviceInfo::print() { + LOGV("Device Info:\n\tmStandardVersion: %d\n\tmVendorExtensionID: %d\n\tmVendorExtensionVersiony: %d\n", + mStandardVersion, mVendorExtensionID, mVendorExtensionVersion); + LOGV("\tmVendorExtensionDesc: %s\n\tmFunctionalCode: %d\n\tmManufacturer: %s\n\tmModel: %s\n\tmVersion: %s\n\tmSerial: %s\n", + mVendorExtensionDesc, mFunctionalCode, mManufacturer, mModel, mVersion, mSerial); +} + +} // namespace android diff --git a/media/mtp/MtpDeviceInfo.h b/media/mtp/MtpDeviceInfo.h new file mode 100644 index 0000000..2abaa10 --- /dev/null +++ b/media/mtp/MtpDeviceInfo.h @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_DEVICE_INFO_H +#define _MTP_DEVICE_INFO_H + +struct stat; + +namespace android { + +class MtpDataPacket; + +class MtpDeviceInfo { +public: + uint16_t mStandardVersion; + uint32_t mVendorExtensionID; + uint16_t mVendorExtensionVersion; + char* mVendorExtensionDesc; + uint16_t mFunctionalCode; + UInt16List* mOperations; + UInt16List* mEvents; + MtpDevicePropertyList* mDeviceProperties; + MtpObjectFormatList* mCaptureFormats; + MtpObjectFormatList* mPlaybackFormats; + char* mManufacturer; + char* mModel; + char* mVersion; + char* mSerial; + +public: + MtpDeviceInfo(); + virtual ~MtpDeviceInfo(); + + void read(MtpDataPacket& packet); + + void print(); +}; + +}; // namespace android + +#endif // _MTP_DEVICE_INFO_H diff --git a/media/mtp/MtpEventPacket.cpp b/media/mtp/MtpEventPacket.cpp new file mode 100644 index 0000000..d2fca42 --- /dev/null +++ b/media/mtp/MtpEventPacket.cpp @@ -0,0 +1,70 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpEventPacket" + +#include <stdio.h> +#include <sys/types.h> +#include <fcntl.h> +#include <sys/ioctl.h> + +#ifdef MTP_DEVICE +#include <linux/usb/f_mtp.h> +#endif + +#include "MtpEventPacket.h" + +#include <usbhost/usbhost.h> + +namespace android { + +MtpEventPacket::MtpEventPacket() + : MtpPacket(512) +{ +} + +MtpEventPacket::~MtpEventPacket() { +} + +#ifdef MTP_DEVICE +int MtpEventPacket::write(int fd) { + struct mtp_event event; + + putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize); + putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_EVENT); + + event.data = mBuffer; + event.length = mPacketSize; + int ret = ::ioctl(fd, MTP_SEND_EVENT, (unsigned long)&event); + return (ret < 0 ? ret : 0); +} +#endif + +#ifdef MTP_HOST +int MtpEventPacket::read(struct usb_request *request) { + request->buffer = mBuffer; + request->buffer_length = mBufferSize; + int ret = transfer(request); + if (ret >= 0) + mPacketSize = ret; + else + mPacketSize = 0; + return ret; +} +#endif + +} // namespace android + diff --git a/media/mtp/MtpEventPacket.h b/media/mtp/MtpEventPacket.h new file mode 100644 index 0000000..660baad --- /dev/null +++ b/media/mtp/MtpEventPacket.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_EVENT_PACKET_H +#define _MTP_EVENT_PACKET_H + +#include "MtpPacket.h" +#include "mtp.h" + +namespace android { + +class MtpEventPacket : public MtpPacket { + +public: + MtpEventPacket(); + virtual ~MtpEventPacket(); + +#ifdef MTP_DEVICE + // write our data to the given file descriptor + int write(int fd); +#endif + +#ifdef MTP_HOST + // read our buffer with the given request + int read(struct usb_request *request); +#endif + + inline MtpEventCode getEventCode() const { return getContainerCode(); } + inline void setEventCode(MtpEventCode code) + { return setContainerCode(code); } +}; + +}; // namespace android + +#endif // _MTP_EVENT_PACKET_H diff --git a/media/mtp/MtpObjectInfo.cpp b/media/mtp/MtpObjectInfo.cpp new file mode 100644 index 0000000..ea68c3b --- /dev/null +++ b/media/mtp/MtpObjectInfo.cpp @@ -0,0 +1,108 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpObjectInfo" + +#include "MtpDebug.h" +#include "MtpDataPacket.h" +#include "MtpObjectInfo.h" +#include "MtpStringBuffer.h" +#include "MtpUtils.h" + +namespace android { + +MtpObjectInfo::MtpObjectInfo(MtpObjectHandle handle) + : mHandle(handle), + mStorageID(0), + mFormat(0), + mProtectionStatus(0), + mCompressedSize(0), + mThumbFormat(0), + mThumbCompressedSize(0), + mThumbPixWidth(0), + mThumbPixHeight(0), + mImagePixWidth(0), + mImagePixHeight(0), + mImagePixDepth(0), + mParent(0), + mAssociationType(0), + mAssociationDesc(0), + mSequenceNumber(0), + mName(NULL), + mDateCreated(0), + mDateModified(0), + mKeywords(NULL) +{ +} + +MtpObjectInfo::~MtpObjectInfo() { + if (mName) + free(mName); + if (mKeywords) + free(mKeywords); +} + +void MtpObjectInfo::read(MtpDataPacket& packet) { + MtpStringBuffer string; + time_t time; + + mStorageID = packet.getUInt32(); + mFormat = packet.getUInt16(); + mProtectionStatus = packet.getUInt16(); + mCompressedSize = packet.getUInt32(); + mThumbFormat = packet.getUInt16(); + mThumbCompressedSize = packet.getUInt32(); + mThumbPixWidth = packet.getUInt32(); + mThumbPixHeight = packet.getUInt32(); + mImagePixWidth = packet.getUInt32(); + mImagePixHeight = packet.getUInt32(); + mImagePixDepth = packet.getUInt32(); + mParent = packet.getUInt32(); + mAssociationType = packet.getUInt16(); + mAssociationDesc = packet.getUInt32(); + mSequenceNumber = packet.getUInt32(); + + packet.getString(string); + mName = strdup((const char *)string); + + packet.getString(string); + if (parseDateTime((const char*)string, time)) + mDateCreated = time; + + packet.getString(string); + if (parseDateTime((const char*)string, time)) + mDateModified = time; + + packet.getString(string); + mKeywords = strdup((const char *)string); +} + +void MtpObjectInfo::print() { + LOGD("MtpObject Info %08X: %s\n", mHandle, mName); + LOGD(" mStorageID: %08X mFormat: %04X mProtectionStatus: %d\n", + mStorageID, mFormat, mProtectionStatus); + LOGD(" mCompressedSize: %d mThumbFormat: %04X mThumbCompressedSize: %d\n", + mCompressedSize, mFormat, mThumbCompressedSize); + LOGD(" mThumbPixWidth: %d mThumbPixHeight: %d\n", mThumbPixWidth, mThumbPixHeight); + LOGD(" mImagePixWidth: %d mImagePixHeight: %d mImagePixDepth: %d\n", + mImagePixWidth, mImagePixHeight, mImagePixDepth); + LOGD(" mParent: %08X mAssociationType: %04X mAssociationDesc: %04X\n", + mParent, mAssociationType, mAssociationDesc); + LOGD(" mSequenceNumber: %d mDateCreated: %ld mDateModified: %ld mKeywords: %s\n", + mSequenceNumber, mDateCreated, mDateModified, mKeywords); +} + +} // namespace android diff --git a/media/mtp/MtpObjectInfo.h b/media/mtp/MtpObjectInfo.h new file mode 100644 index 0000000..c7a449c --- /dev/null +++ b/media/mtp/MtpObjectInfo.h @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_OBJECT_INFO_H +#define _MTP_OBJECT_INFO_H + +#include "MtpTypes.h" + +namespace android { + +class MtpDataPacket; + +class MtpObjectInfo { +public: + MtpObjectHandle mHandle; + MtpStorageID mStorageID; + MtpObjectFormat mFormat; + uint16_t mProtectionStatus; + uint32_t mCompressedSize; + MtpObjectFormat mThumbFormat; + uint32_t mThumbCompressedSize; + uint32_t mThumbPixWidth; + uint32_t mThumbPixHeight; + uint32_t mImagePixWidth; + uint32_t mImagePixHeight; + uint32_t mImagePixDepth; + MtpObjectHandle mParent; + uint16_t mAssociationType; + uint32_t mAssociationDesc; + uint32_t mSequenceNumber; + char* mName; + time_t mDateCreated; + time_t mDateModified; + char* mKeywords; + +public: + MtpObjectInfo(MtpObjectHandle handle); + virtual ~MtpObjectInfo(); + + void read(MtpDataPacket& packet); + + void print(); +}; + +}; // namespace android + +#endif // _MTP_OBJECT_INFO_H diff --git a/media/mtp/MtpPacket.cpp b/media/mtp/MtpPacket.cpp new file mode 100644 index 0000000..d3f2cb4 --- /dev/null +++ b/media/mtp/MtpPacket.cpp @@ -0,0 +1,165 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpPacket" + +#include "MtpDebug.h" +#include "MtpPacket.h" +#include "mtp.h" + +#include <stdio.h> +#include <stdlib.h> +#include <stdio.h> + +#include <usbhost/usbhost.h> + +namespace android { + +MtpPacket::MtpPacket(int bufferSize) + : mBuffer(NULL), + mBufferSize(bufferSize), + mAllocationIncrement(bufferSize), + mPacketSize(0) +{ + mBuffer = (uint8_t *)malloc(bufferSize); + if (!mBuffer) { + LOGE("out of memory!"); + abort(); + } +} + +MtpPacket::~MtpPacket() { + if (mBuffer) + free(mBuffer); +} + +void MtpPacket::reset() { + allocate(MTP_CONTAINER_HEADER_SIZE); + mPacketSize = MTP_CONTAINER_HEADER_SIZE; + memset(mBuffer, 0, mBufferSize); +} + +void MtpPacket::allocate(int length) { + if (length > mBufferSize) { + int newLength = length + mAllocationIncrement; + mBuffer = (uint8_t *)realloc(mBuffer, newLength); + if (!mBuffer) { + LOGE("out of memory!"); + abort(); + } + mBufferSize = newLength; + } +} + +void MtpPacket::dump() { +#define DUMP_BYTES_PER_ROW 16 + char buffer[500]; + char* bufptr = buffer; + + for (int i = 0; i < mPacketSize; i++) { + sprintf(bufptr, "%02X ", mBuffer[i]); + bufptr += strlen(bufptr); + if (i % DUMP_BYTES_PER_ROW == (DUMP_BYTES_PER_ROW - 1)) { + LOGV("%s", buffer); + bufptr = buffer; + } + } + if (bufptr != buffer) { + // print last line + LOGV("%s", buffer); + } + LOGV("\n"); +} + +void MtpPacket::copyFrom(const MtpPacket& src) { + int length = src.mPacketSize; + allocate(length); + mPacketSize = length; + memcpy(mBuffer, src.mBuffer, length); +} + +uint16_t MtpPacket::getUInt16(int offset) const { + return ((uint16_t)mBuffer[offset + 1] << 8) | (uint16_t)mBuffer[offset]; +} + +uint32_t MtpPacket::getUInt32(int offset) const { + return ((uint32_t)mBuffer[offset + 3] << 24) | ((uint32_t)mBuffer[offset + 2] << 16) | + ((uint32_t)mBuffer[offset + 1] << 8) | (uint32_t)mBuffer[offset]; +} + +void MtpPacket::putUInt16(int offset, uint16_t value) { + mBuffer[offset++] = (uint8_t)(value & 0xFF); + mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF); +} + +void MtpPacket::putUInt32(int offset, uint32_t value) { + mBuffer[offset++] = (uint8_t)(value & 0xFF); + mBuffer[offset++] = (uint8_t)((value >> 8) & 0xFF); + mBuffer[offset++] = (uint8_t)((value >> 16) & 0xFF); + mBuffer[offset++] = (uint8_t)((value >> 24) & 0xFF); +} + +uint16_t MtpPacket::getContainerCode() const { + return getUInt16(MTP_CONTAINER_CODE_OFFSET); +} + +void MtpPacket::setContainerCode(uint16_t code) { + putUInt16(MTP_CONTAINER_CODE_OFFSET, code); +} + +uint16_t MtpPacket::getContainerType() const { + return getUInt16(MTP_CONTAINER_TYPE_OFFSET); +} + +MtpTransactionID MtpPacket::getTransactionID() const { + return getUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET); +} + +void MtpPacket::setTransactionID(MtpTransactionID id) { + putUInt32(MTP_CONTAINER_TRANSACTION_ID_OFFSET, id); +} + +uint32_t MtpPacket::getParameter(int index) const { + if (index < 1 || index > 5) { + LOGE("index %d out of range in MtpPacket::getParameter", index); + return 0; + } + return getUInt32(MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t)); +} + +void MtpPacket::setParameter(int index, uint32_t value) { + if (index < 1 || index > 5) { + LOGE("index %d out of range in MtpPacket::setParameter", index); + return; + } + int offset = MTP_CONTAINER_PARAMETER_OFFSET + (index - 1) * sizeof(uint32_t); + if (mPacketSize < offset + sizeof(uint32_t)) + mPacketSize = offset + sizeof(uint32_t); + putUInt32(offset, value); +} + +#ifdef MTP_HOST +int MtpPacket::transfer(struct usb_request* request) { + if (usb_request_queue(request)) { + LOGE("usb_endpoint_queue failed, errno: %d", errno); + return -1; + } + request = usb_request_wait(request->dev); + return (request ? request->actual_length : -1); +} +#endif + +} // namespace android diff --git a/media/mtp/MtpPacket.h b/media/mtp/MtpPacket.h new file mode 100644 index 0000000..0ffb1d3 --- /dev/null +++ b/media/mtp/MtpPacket.h @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_PACKET_H +#define _MTP_PACKET_H + +#include "MtpTypes.h" + +struct usb_request; + +namespace android { + +class MtpPacket { + +protected: + uint8_t* mBuffer; + // current size of the buffer + int mBufferSize; + // number of bytes to add when resizing the buffer + int mAllocationIncrement; + // size of the data in the packet + int mPacketSize; + +public: + MtpPacket(int bufferSize); + virtual ~MtpPacket(); + + // sets packet size to the default container size and sets buffer to zero + virtual void reset(); + + void allocate(int length); + void dump(); + void copyFrom(const MtpPacket& src); + + uint16_t getContainerCode() const; + void setContainerCode(uint16_t code); + + uint16_t getContainerType() const; + + MtpTransactionID getTransactionID() const; + void setTransactionID(MtpTransactionID id); + + uint32_t getParameter(int index) const; + void setParameter(int index, uint32_t value); + +#ifdef MTP_HOST + int transfer(struct usb_request* request); +#endif + +protected: + uint16_t getUInt16(int offset) const; + uint32_t getUInt32(int offset) const; + void putUInt16(int offset, uint16_t value); + void putUInt32(int offset, uint32_t value); +}; + +}; // namespace android + +#endif // _MTP_PACKET_H diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp new file mode 100644 index 0000000..8016c35 --- /dev/null +++ b/media/mtp/MtpProperty.cpp @@ -0,0 +1,534 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpProperty" + +#include "MtpDataPacket.h" +#include "MtpDebug.h" +#include "MtpProperty.h" +#include "MtpStringBuffer.h" +#include "MtpUtils.h" + +namespace android { + +MtpProperty::MtpProperty() + : mCode(0), + mType(0), + mWriteable(false), + mDefaultArrayLength(0), + mDefaultArrayValues(NULL), + mCurrentArrayLength(0), + mCurrentArrayValues(NULL), + mGroupCode(0), + mFormFlag(kFormNone), + mEnumLength(0), + mEnumValues(NULL) +{ + memset(&mDefaultValue, 0, sizeof(mDefaultValue)); + memset(&mCurrentValue, 0, sizeof(mCurrentValue)); + memset(&mMinimumValue, 0, sizeof(mMinimumValue)); + memset(&mMaximumValue, 0, sizeof(mMaximumValue)); +} + +MtpProperty::MtpProperty(MtpPropertyCode propCode, + MtpDataType type, + bool writeable, + int defaultValue) + : mCode(propCode), + mType(type), + mWriteable(writeable), + mDefaultArrayLength(0), + mDefaultArrayValues(NULL), + mCurrentArrayLength(0), + mCurrentArrayValues(NULL), + mGroupCode(0), + mFormFlag(kFormNone), + mEnumLength(0), + mEnumValues(NULL) +{ + memset(&mDefaultValue, 0, sizeof(mDefaultValue)); + memset(&mCurrentValue, 0, sizeof(mCurrentValue)); + memset(&mMinimumValue, 0, sizeof(mMinimumValue)); + memset(&mMaximumValue, 0, sizeof(mMaximumValue)); + + if (defaultValue) { + switch (type) { + case MTP_TYPE_INT8: + mDefaultValue.u.i8 = defaultValue; + break; + case MTP_TYPE_UINT8: + mDefaultValue.u.u8 = defaultValue; + break; + case MTP_TYPE_INT16: + mDefaultValue.u.i16 = defaultValue; + break; + case MTP_TYPE_UINT16: + mDefaultValue.u.u16 = defaultValue; + break; + case MTP_TYPE_INT32: + mDefaultValue.u.i32 = defaultValue; + break; + case MTP_TYPE_UINT32: + mDefaultValue.u.u32 = defaultValue; + break; + case MTP_TYPE_INT64: + mDefaultValue.u.i64 = defaultValue; + break; + case MTP_TYPE_UINT64: + mDefaultValue.u.u64 = defaultValue; + break; + default: + LOGE("unknown type %04X in MtpProperty::MtpProperty", type); + } + } +} + +MtpProperty::~MtpProperty() { + if (mType == MTP_TYPE_STR) { + // free all strings + free(mDefaultValue.str); + free(mCurrentValue.str); + free(mMinimumValue.str); + free(mMaximumValue.str); + if (mDefaultArrayValues) { + for (int i = 0; i < mDefaultArrayLength; i++) + free(mDefaultArrayValues[i].str); + } + if (mCurrentArrayValues) { + for (int i = 0; i < mCurrentArrayLength; i++) + free(mCurrentArrayValues[i].str); + } + if (mEnumValues) { + for (int i = 0; i < mEnumLength; i++) + free(mEnumValues[i].str); + } + } + delete[] mDefaultArrayValues; + delete[] mCurrentArrayValues; + delete[] mEnumValues; +} + +void MtpProperty::read(MtpDataPacket& packet) { + mCode = packet.getUInt16(); + bool deviceProp = isDeviceProperty(); + mType = packet.getUInt16(); + mWriteable = (packet.getUInt8() == 1); + switch (mType) { + case MTP_TYPE_AINT8: + case MTP_TYPE_AUINT8: + case MTP_TYPE_AINT16: + case MTP_TYPE_AUINT16: + case MTP_TYPE_AINT32: + case MTP_TYPE_AUINT32: + case MTP_TYPE_AINT64: + case MTP_TYPE_AUINT64: + case MTP_TYPE_AINT128: + case MTP_TYPE_AUINT128: + mDefaultArrayValues = readArrayValues(packet, mDefaultArrayLength); + if (deviceProp) + mCurrentArrayValues = readArrayValues(packet, mCurrentArrayLength); + break; + default: + readValue(packet, mDefaultValue); + if (deviceProp) + readValue(packet, mCurrentValue); + } + if (!deviceProp) + mGroupCode = packet.getUInt32(); + mFormFlag = packet.getUInt8(); + + if (mFormFlag == kFormRange) { + readValue(packet, mMinimumValue); + readValue(packet, mMaximumValue); + readValue(packet, mStepSize); + } else if (mFormFlag == kFormEnum) { + mEnumLength = packet.getUInt16(); + mEnumValues = new MtpPropertyValue[mEnumLength]; + for (int i = 0; i < mEnumLength; i++) + readValue(packet, mEnumValues[i]); + } +} + +void MtpProperty::write(MtpDataPacket& packet) { + bool deviceProp = isDeviceProperty(); + + packet.putUInt16(mCode); + packet.putUInt16(mType); + packet.putUInt8(mWriteable ? 1 : 0); + + switch (mType) { + case MTP_TYPE_AINT8: + case MTP_TYPE_AUINT8: + case MTP_TYPE_AINT16: + case MTP_TYPE_AUINT16: + case MTP_TYPE_AINT32: + case MTP_TYPE_AUINT32: + case MTP_TYPE_AINT64: + case MTP_TYPE_AUINT64: + case MTP_TYPE_AINT128: + case MTP_TYPE_AUINT128: + writeArrayValues(packet, mDefaultArrayValues, mDefaultArrayLength); + if (deviceProp) + writeArrayValues(packet, mCurrentArrayValues, mCurrentArrayLength); + break; + default: + writeValue(packet, mDefaultValue); + if (deviceProp) + writeValue(packet, mCurrentValue); + } + packet.putUInt32(mGroupCode); + if (!deviceProp) + packet.putUInt8(mFormFlag); + if (mFormFlag == kFormRange) { + writeValue(packet, mMinimumValue); + writeValue(packet, mMaximumValue); + writeValue(packet, mStepSize); + } else if (mFormFlag == kFormEnum) { + packet.putUInt16(mEnumLength); + for (int i = 0; i < mEnumLength; i++) + writeValue(packet, mEnumValues[i]); + } +} + +void MtpProperty::setDefaultValue(const uint16_t* string) { + free(mDefaultValue.str); + if (string) { + MtpStringBuffer buffer(string); + mDefaultValue.str = strdup(buffer); + } + else + mDefaultValue.str = NULL; +} + +void MtpProperty::setCurrentValue(const uint16_t* string) { + free(mCurrentValue.str); + if (string) { + MtpStringBuffer buffer(string); + mCurrentValue.str = strdup(buffer); + } + else + mCurrentValue.str = NULL; +} + +void MtpProperty::setFormRange(int min, int max, int step) { + mFormFlag = kFormRange; + switch (mType) { + case MTP_TYPE_INT8: + mMinimumValue.u.i8 = min; + mMaximumValue.u.i8 = max; + mStepSize.u.i8 = step; + break; + case MTP_TYPE_UINT8: + mMinimumValue.u.u8 = min; + mMaximumValue.u.u8 = max; + mStepSize.u.u8 = step; + break; + case MTP_TYPE_INT16: + mMinimumValue.u.i16 = min; + mMaximumValue.u.i16 = max; + mStepSize.u.i16 = step; + break; + case MTP_TYPE_UINT16: + mMinimumValue.u.u16 = min; + mMaximumValue.u.u16 = max; + mStepSize.u.u16 = step; + break; + case MTP_TYPE_INT32: + mMinimumValue.u.i32 = min; + mMaximumValue.u.i32 = max; + mStepSize.u.i32 = step; + break; + case MTP_TYPE_UINT32: + mMinimumValue.u.u32 = min; + mMaximumValue.u.u32 = max; + mStepSize.u.u32 = step; + break; + case MTP_TYPE_INT64: + mMinimumValue.u.i64 = min; + mMaximumValue.u.i64 = max; + mStepSize.u.i64 = step; + break; + case MTP_TYPE_UINT64: + mMinimumValue.u.u64 = min; + mMaximumValue.u.u64 = max; + mStepSize.u.u64 = step; + break; + default: + LOGE("unsupported type for MtpProperty::setRange"); + break; + } +} + +void MtpProperty::setFormEnum(const int* values, int count) { + mFormFlag = kFormEnum; + delete[] mEnumValues; + mEnumValues = new MtpPropertyValue[count]; + mEnumLength = count; + + for (int i = 0; i < count; i++) { + int value = *values++; + switch (mType) { + case MTP_TYPE_INT8: + mEnumValues[i].u.i8 = value; + break; + case MTP_TYPE_UINT8: + mEnumValues[i].u.u8 = value; + break; + case MTP_TYPE_INT16: + mEnumValues[i].u.i16 = value; + break; + case MTP_TYPE_UINT16: + mEnumValues[i].u.u16 = value; + break; + case MTP_TYPE_INT32: + mEnumValues[i].u.i32 = value; + break; + case MTP_TYPE_UINT32: + mEnumValues[i].u.u32 = value; + break; + case MTP_TYPE_INT64: + mEnumValues[i].u.i64 = value; + break; + case MTP_TYPE_UINT64: + mEnumValues[i].u.u64 = value; + break; + default: + LOGE("unsupported type for MtpProperty::setEnum"); + break; + } + } +} + +void MtpProperty::setFormDateTime() { + mFormFlag = kFormDateTime; +} + +void MtpProperty::print() { + MtpString buffer; + bool deviceProp = isDeviceProperty(); + if (deviceProp) + LOGI(" %s (%04X)", MtpDebug::getDevicePropCodeName(mCode), mCode); + else + LOGI(" %s (%04X)", MtpDebug::getObjectPropCodeName(mCode), mCode); + LOGI(" type %04X", mType); + LOGI(" writeable %s", (mWriteable ? "true" : "false")); + buffer = " default value: "; + print(mDefaultValue, buffer); + LOGI("%s", (const char *)buffer); + if (deviceProp) { + buffer = " current value: "; + print(mCurrentValue, buffer); + LOGI("%s", (const char *)buffer); + } + switch (mFormFlag) { + case kFormNone: + break; + case kFormRange: + buffer = " Range ("; + print(mMinimumValue, buffer); + buffer += ", "; + print(mMaximumValue, buffer); + buffer += ", "; + print(mStepSize, buffer); + buffer += ")"; + LOGI("%s", (const char *)buffer); + break; + case kFormEnum: + buffer = " Enum { "; + for (int i = 0; i < mEnumLength; i++) { + print(mEnumValues[i], buffer); + buffer += " "; + } + buffer += "}"; + LOGI("%s", (const char *)buffer); + break; + case kFormDateTime: + LOGI(" DateTime\n"); + break; + default: + LOGI(" form %d\n", mFormFlag); + break; + } +} + +void MtpProperty::print(MtpPropertyValue& value, MtpString& buffer) { + switch (mType) { + case MTP_TYPE_INT8: + buffer.appendFormat("%d", value.u.i8); + break; + case MTP_TYPE_UINT8: + buffer.appendFormat("%d", value.u.u8); + break; + case MTP_TYPE_INT16: + buffer.appendFormat("%d", value.u.i16); + break; + case MTP_TYPE_UINT16: + buffer.appendFormat("%d", value.u.u16); + break; + case MTP_TYPE_INT32: + buffer.appendFormat("%d", value.u.i32); + break; + case MTP_TYPE_UINT32: + buffer.appendFormat("%d", value.u.u32); + break; + case MTP_TYPE_INT64: + buffer.appendFormat("%lld", value.u.i64); + break; + case MTP_TYPE_UINT64: + buffer.appendFormat("%lld", value.u.u64); + break; + case MTP_TYPE_INT128: + buffer.appendFormat("%08X%08X%08X%08X", value.u.i128[0], value.u.i128[1], + value.u.i128[2], value.u.i128[3]); + break; + case MTP_TYPE_UINT128: + buffer.appendFormat("%08X%08X%08X%08X", value.u.u128[0], value.u.u128[1], + value.u.u128[2], value.u.u128[3]); + break; + case MTP_TYPE_STR: + buffer.appendFormat("%s", value.str); + break; + default: + LOGE("unsupported type for MtpProperty::print\n"); + break; + } +} + +void MtpProperty::readValue(MtpDataPacket& packet, MtpPropertyValue& value) { + MtpStringBuffer stringBuffer; + + switch (mType) { + case MTP_TYPE_INT8: + case MTP_TYPE_AINT8: + value.u.i8 = packet.getInt8(); + break; + case MTP_TYPE_UINT8: + case MTP_TYPE_AUINT8: + value.u.u8 = packet.getUInt8(); + break; + case MTP_TYPE_INT16: + case MTP_TYPE_AINT16: + value.u.i16 = packet.getInt16(); + break; + case MTP_TYPE_UINT16: + case MTP_TYPE_AUINT16: + value.u.u16 = packet.getUInt16(); + break; + case MTP_TYPE_INT32: + case MTP_TYPE_AINT32: + value.u.i32 = packet.getInt32(); + break; + case MTP_TYPE_UINT32: + case MTP_TYPE_AUINT32: + value.u.u32 = packet.getUInt32(); + break; + case MTP_TYPE_INT64: + case MTP_TYPE_AINT64: + value.u.i64 = packet.getInt64(); + break; + case MTP_TYPE_UINT64: + case MTP_TYPE_AUINT64: + value.u.u64 = packet.getUInt64(); + break; + case MTP_TYPE_INT128: + case MTP_TYPE_AINT128: + packet.getInt128(value.u.i128); + break; + case MTP_TYPE_UINT128: + case MTP_TYPE_AUINT128: + packet.getUInt128(value.u.u128); + break; + case MTP_TYPE_STR: + packet.getString(stringBuffer); + value.str = strdup(stringBuffer); + break; + default: + LOGE("unknown type %04X in MtpProperty::readValue", mType); + } +} + +void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) { + MtpStringBuffer stringBuffer; + + switch (mType) { + case MTP_TYPE_INT8: + case MTP_TYPE_AINT8: + packet.putInt8(value.u.i8); + break; + case MTP_TYPE_UINT8: + case MTP_TYPE_AUINT8: + packet.putUInt8(value.u.u8); + break; + case MTP_TYPE_INT16: + case MTP_TYPE_AINT16: + packet.putInt16(value.u.i16); + break; + case MTP_TYPE_UINT16: + case MTP_TYPE_AUINT16: + packet.putUInt16(value.u.u16); + break; + case MTP_TYPE_INT32: + case MTP_TYPE_AINT32: + packet.putInt32(value.u.i32); + break; + case MTP_TYPE_UINT32: + case MTP_TYPE_AUINT32: + packet.putUInt32(value.u.u32); + break; + case MTP_TYPE_INT64: + case MTP_TYPE_AINT64: + packet.putInt64(value.u.i64); + break; + case MTP_TYPE_UINT64: + case MTP_TYPE_AUINT64: + packet.putUInt64(value.u.u64); + break; + case MTP_TYPE_INT128: + case MTP_TYPE_AINT128: + packet.putInt128(value.u.i128); + break; + case MTP_TYPE_UINT128: + case MTP_TYPE_AUINT128: + packet.putUInt128(value.u.u128); + break; + case MTP_TYPE_STR: + if (value.str) + packet.putString(value.str); + else + packet.putEmptyString(); + break; + default: + LOGE("unknown type %04X in MtpProperty::writeValue", mType); + } +} + +MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) { + length = packet.getUInt32(); + if (length == 0) + return NULL; + MtpPropertyValue* result = new MtpPropertyValue[length]; + for (int i = 0; i < length; i++) + readValue(packet, result[i]); + return result; +} + +void MtpProperty::writeArrayValues(MtpDataPacket& packet, MtpPropertyValue* values, int length) { + packet.putUInt32(length); + for (int i = 0; i < length; i++) + writeValue(packet, values[i]); +} + +} // namespace android diff --git a/media/mtp/MtpProperty.h b/media/mtp/MtpProperty.h new file mode 100644 index 0000000..06ca56e --- /dev/null +++ b/media/mtp/MtpProperty.h @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_PROPERTY_H +#define _MTP_PROPERTY_H + +#include "MtpTypes.h" + +namespace android { + +class MtpDataPacket; + +struct MtpPropertyValue { + union { + int8_t i8; + uint8_t u8; + int16_t i16; + uint16_t u16; + int32_t i32; + uint32_t u32; + int64_t i64; + uint64_t u64; + int128_t i128; + uint128_t u128; + } u; + // string in UTF8 format + char* str; +}; + +class MtpProperty { +public: + MtpPropertyCode mCode; + MtpDataType mType; + bool mWriteable; + MtpPropertyValue mDefaultValue; + MtpPropertyValue mCurrentValue; + + // for array types + int mDefaultArrayLength; + MtpPropertyValue* mDefaultArrayValues; + int mCurrentArrayLength; + MtpPropertyValue* mCurrentArrayValues; + + enum { + kFormNone = 0, + kFormRange = 1, + kFormEnum = 2, + kFormDateTime = 3, + }; + + uint32_t mGroupCode; + uint8_t mFormFlag; + + // for range form + MtpPropertyValue mMinimumValue; + MtpPropertyValue mMaximumValue; + MtpPropertyValue mStepSize; + + // for enum form + int mEnumLength; + MtpPropertyValue* mEnumValues; + +public: + MtpProperty(); + MtpProperty(MtpPropertyCode propCode, + MtpDataType type, + bool writeable = false, + int defaultValue = 0); + virtual ~MtpProperty(); + + inline MtpPropertyCode getPropertyCode() const { return mCode; } + + void read(MtpDataPacket& packet); + void write(MtpDataPacket& packet); + + void setDefaultValue(const uint16_t* string); + void setCurrentValue(const uint16_t* string); + + void setFormRange(int min, int max, int step); + void setFormEnum(const int* values, int count); + void setFormDateTime(); + + void print(); + void print(MtpPropertyValue& value, MtpString& buffer); + + inline bool isDeviceProperty() const { + return ( ((mCode & 0xF000) == 0x5000) + || ((mCode & 0xF800) == 0xD000)); + } + +private: + void readValue(MtpDataPacket& packet, MtpPropertyValue& value); + void writeValue(MtpDataPacket& packet, MtpPropertyValue& value); + MtpPropertyValue* readArrayValues(MtpDataPacket& packet, int& length); + void writeArrayValues(MtpDataPacket& packet, + MtpPropertyValue* values, int length); +}; + +}; // namespace android + +#endif // _MTP_PROPERTY_H diff --git a/media/mtp/MtpRequestPacket.cpp b/media/mtp/MtpRequestPacket.cpp new file mode 100644 index 0000000..0e58e01 --- /dev/null +++ b/media/mtp/MtpRequestPacket.cpp @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpRequestPacket" + +#include <stdio.h> +#include <sys/types.h> +#include <fcntl.h> + +#include "MtpRequestPacket.h" + +#include <usbhost/usbhost.h> + +namespace android { + +MtpRequestPacket::MtpRequestPacket() + : MtpPacket(512) +{ +} + +MtpRequestPacket::~MtpRequestPacket() { +} + +#ifdef MTP_DEVICE +int MtpRequestPacket::read(int fd) { + int ret = ::read(fd, mBuffer, mBufferSize); + if (ret >= 0) + mPacketSize = ret; + else + mPacketSize = 0; + return ret; +} +#endif + +#ifdef MTP_HOST + // write our buffer to the given endpoint (host mode) +int MtpRequestPacket::write(struct usb_request *request) +{ + putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize); + putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_COMMAND); + request->buffer = mBuffer; + request->buffer_length = mPacketSize; + return transfer(request); +} +#endif + +} // namespace android diff --git a/media/mtp/MtpRequestPacket.h b/media/mtp/MtpRequestPacket.h new file mode 100644 index 0000000..1201f11 --- /dev/null +++ b/media/mtp/MtpRequestPacket.h @@ -0,0 +1,50 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_REQUEST_PACKET_H +#define _MTP_REQUEST_PACKET_H + +#include "MtpPacket.h" +#include "mtp.h" + +struct usb_request; + +namespace android { + +class MtpRequestPacket : public MtpPacket { + +public: + MtpRequestPacket(); + virtual ~MtpRequestPacket(); + +#ifdef MTP_DEVICE + // fill our buffer with data from the given file descriptor + int read(int fd); +#endif + +#ifdef MTP_HOST + // write our buffer to the given endpoint + int write(struct usb_request *request); +#endif + + inline MtpOperationCode getOperationCode() const { return getContainerCode(); } + inline void setOperationCode(MtpOperationCode code) + { return setContainerCode(code); } +}; + +}; // namespace android + +#endif // _MTP_REQUEST_PACKET_H diff --git a/media/mtp/MtpResponsePacket.cpp b/media/mtp/MtpResponsePacket.cpp new file mode 100644 index 0000000..c2b41e4 --- /dev/null +++ b/media/mtp/MtpResponsePacket.cpp @@ -0,0 +1,60 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpResponsePacket" + +#include <stdio.h> +#include <sys/types.h> +#include <fcntl.h> + +#include "MtpResponsePacket.h" + +#include <usbhost/usbhost.h> + +namespace android { + +MtpResponsePacket::MtpResponsePacket() + : MtpPacket(512) +{ +} + +MtpResponsePacket::~MtpResponsePacket() { +} + +#ifdef MTP_DEVICE +int MtpResponsePacket::write(int fd) { + putUInt32(MTP_CONTAINER_LENGTH_OFFSET, mPacketSize); + putUInt16(MTP_CONTAINER_TYPE_OFFSET, MTP_CONTAINER_TYPE_RESPONSE); + int ret = ::write(fd, mBuffer, mPacketSize); + return (ret < 0 ? ret : 0); +} +#endif + +#ifdef MTP_HOST +int MtpResponsePacket::read(struct usb_request *request) { + request->buffer = mBuffer; + request->buffer_length = mBufferSize; + int ret = transfer(request); + if (ret >= 0) + mPacketSize = ret; + else + mPacketSize = 0; + return ret; +} +#endif + +} // namespace android + diff --git a/media/mtp/MtpResponsePacket.h b/media/mtp/MtpResponsePacket.h new file mode 100644 index 0000000..592ad4a --- /dev/null +++ b/media/mtp/MtpResponsePacket.h @@ -0,0 +1,48 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_RESPONSE_PACKET_H +#define _MTP_RESPONSE_PACKET_H + +#include "MtpPacket.h" +#include "mtp.h" + +namespace android { + +class MtpResponsePacket : public MtpPacket { + +public: + MtpResponsePacket(); + virtual ~MtpResponsePacket(); + +#ifdef MTP_DEVICE + // write our data to the given file descriptor + int write(int fd); +#endif + +#ifdef MTP_HOST + // read our buffer with the given request + int read(struct usb_request *request); +#endif + + inline MtpResponseCode getResponseCode() const { return getContainerCode(); } + inline void setResponseCode(MtpResponseCode code) + { return setContainerCode(code); } +}; + +}; // namespace android + +#endif // _MTP_RESPONSE_PACKET_H diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp new file mode 100644 index 0000000..be004d2 --- /dev/null +++ b/media/mtp/MtpServer.cpp @@ -0,0 +1,878 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <stdio.h> +#include <stdlib.h> +#include <sys/types.h> +#include <sys/ioctl.h> +#include <sys/stat.h> +#include <fcntl.h> +#include <errno.h> +#include <sys/stat.h> +#include <dirent.h> + +#include <cutils/properties.h> + +#define LOG_TAG "MtpServer" + +#include "MtpDebug.h" +#include "MtpDatabase.h" +#include "MtpProperty.h" +#include "MtpServer.h" +#include "MtpStorage.h" +#include "MtpStringBuffer.h" + +#include <linux/usb/f_mtp.h> + +namespace android { + +static const MtpOperationCode kSupportedOperationCodes[] = { + MTP_OPERATION_GET_DEVICE_INFO, + MTP_OPERATION_OPEN_SESSION, + MTP_OPERATION_CLOSE_SESSION, + MTP_OPERATION_GET_STORAGE_IDS, + MTP_OPERATION_GET_STORAGE_INFO, + MTP_OPERATION_GET_NUM_OBJECTS, + MTP_OPERATION_GET_OBJECT_HANDLES, + MTP_OPERATION_GET_OBJECT_INFO, + MTP_OPERATION_GET_OBJECT, +// MTP_OPERATION_GET_THUMB, + MTP_OPERATION_DELETE_OBJECT, + MTP_OPERATION_SEND_OBJECT_INFO, + MTP_OPERATION_SEND_OBJECT, +// MTP_OPERATION_INITIATE_CAPTURE, +// MTP_OPERATION_FORMAT_STORE, +// MTP_OPERATION_RESET_DEVICE, +// MTP_OPERATION_SELF_TEST, +// MTP_OPERATION_SET_OBJECT_PROTECTION, +// MTP_OPERATION_POWER_DOWN, + MTP_OPERATION_GET_DEVICE_PROP_DESC, + MTP_OPERATION_GET_DEVICE_PROP_VALUE, + MTP_OPERATION_SET_DEVICE_PROP_VALUE, + MTP_OPERATION_RESET_DEVICE_PROP_VALUE, +// MTP_OPERATION_TERMINATE_OPEN_CAPTURE, +// MTP_OPERATION_MOVE_OBJECT, +// MTP_OPERATION_COPY_OBJECT, + MTP_OPERATION_GET_PARTIAL_OBJECT, +// MTP_OPERATION_INITIATE_OPEN_CAPTURE, + MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED, + MTP_OPERATION_GET_OBJECT_PROP_DESC, + MTP_OPERATION_GET_OBJECT_PROP_VALUE, + MTP_OPERATION_SET_OBJECT_PROP_VALUE, + MTP_OPERATION_GET_OBJECT_PROP_LIST, +// MTP_OPERATION_SET_OBJECT_PROP_LIST, +// MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC, +// MTP_OPERATION_SEND_OBJECT_PROP_LIST, + MTP_OPERATION_GET_OBJECT_REFERENCES, + MTP_OPERATION_SET_OBJECT_REFERENCES, +// MTP_OPERATION_SKIP, +}; + +static const MtpEventCode kSupportedEventCodes[] = { + MTP_EVENT_OBJECT_ADDED, + MTP_EVENT_OBJECT_REMOVED, +}; + +MtpServer::MtpServer(int fd, MtpDatabase* database, + int fileGroup, int filePerm, int directoryPerm) + : mFD(fd), + mDatabase(database), + mFileGroup(fileGroup), + mFilePermission(filePerm), + mDirectoryPermission(directoryPerm), + mSessionID(0), + mSessionOpen(false), + mSendObjectHandle(kInvalidObjectHandle), + mSendObjectFormat(0), + mSendObjectFileSize(0) +{ +} + +MtpServer::~MtpServer() { +} + +void MtpServer::addStorage(const char* filePath, uint64_t reserveSpace) { + int index = mStorages.size() + 1; + index |= index << 16; // set high and low part to our index + MtpStorage* storage = new MtpStorage(index, filePath, reserveSpace); + addStorage(storage); +} + +MtpStorage* MtpServer::getStorage(MtpStorageID id) { + if (id == 0) + return mStorages[0]; + for (int i = 0; i < mStorages.size(); i++) { + MtpStorage* storage = mStorages[i]; + if (storage->getStorageID() == id) + return storage; + } + return NULL; +} + +void MtpServer::run() { + int fd = mFD; + + LOGV("MtpServer::run fd: %d\n", fd); + + while (1) { + int ret = mRequest.read(fd); + if (ret < 0) { + LOGV("request read returned %d, errno: %d", ret, errno); + if (errno == ECANCELED) { + // return to top of loop and wait for next command + continue; + } + break; + } + MtpOperationCode operation = mRequest.getOperationCode(); + MtpTransactionID transaction = mRequest.getTransactionID(); + + LOGV("operation: %s", MtpDebug::getOperationCodeName(operation)); + mRequest.dump(); + + // FIXME need to generalize this + bool dataIn = (operation == MTP_OPERATION_SEND_OBJECT_INFO + || operation == MTP_OPERATION_SET_OBJECT_REFERENCES + || operation == MTP_OPERATION_SET_OBJECT_PROP_VALUE + || operation == MTP_OPERATION_SET_DEVICE_PROP_VALUE); + if (dataIn) { + int ret = mData.read(fd); + if (ret < 0) { + LOGE("data read returned %d, errno: %d", ret, errno); + if (errno == ECANCELED) { + // return to top of loop and wait for next command + continue; + } + break; + } + LOGV("received data:"); + mData.dump(); + } else { + mData.reset(); + } + + if (handleRequest()) { + if (!dataIn && mData.hasData()) { + mData.setOperationCode(operation); + mData.setTransactionID(transaction); + LOGV("sending data:"); + mData.dump(); + ret = mData.write(fd); + if (ret < 0) { + LOGE("request write returned %d, errno: %d", ret, errno); + if (errno == ECANCELED) { + // return to top of loop and wait for next command + continue; + } + break; + } + } + + mResponse.setTransactionID(transaction); + LOGV("sending response %04X", mResponse.getResponseCode()); + ret = mResponse.write(fd); + mResponse.dump(); + if (ret < 0) { + LOGE("request write returned %d, errno: %d", ret, errno); + if (errno == ECANCELED) { + // return to top of loop and wait for next command + continue; + } + break; + } + } else { + LOGV("skipping response\n"); + } + } + + if (mSessionOpen) + mDatabase->sessionEnded(); +} + +void MtpServer::sendObjectAdded(MtpObjectHandle handle) { + if (mSessionOpen) { + LOGV("sendObjectAdded %d\n", handle); + mEvent.setEventCode(MTP_EVENT_OBJECT_ADDED); + mEvent.setTransactionID(mRequest.getTransactionID()); + mEvent.setParameter(1, handle); + int ret = mEvent.write(mFD); + LOGV("mEvent.write returned %d\n", ret); + } +} + +void MtpServer::sendObjectRemoved(MtpObjectHandle handle) { + if (mSessionOpen) { + LOGV("sendObjectRemoved %d\n", handle); + mEvent.setEventCode(MTP_EVENT_OBJECT_REMOVED); + mEvent.setTransactionID(mRequest.getTransactionID()); + mEvent.setParameter(1, handle); + int ret = mEvent.write(mFD); + LOGV("mEvent.write returned %d\n", ret); + } +} + +bool MtpServer::handleRequest() { + MtpOperationCode operation = mRequest.getOperationCode(); + MtpResponseCode response; + + mResponse.reset(); + + if (mSendObjectHandle != kInvalidObjectHandle && operation != MTP_OPERATION_SEND_OBJECT) { + // FIXME - need to delete mSendObjectHandle from the database + LOGE("expected SendObject after SendObjectInfo"); + mSendObjectHandle = kInvalidObjectHandle; + } + + switch (operation) { + case MTP_OPERATION_GET_DEVICE_INFO: + response = doGetDeviceInfo(); + break; + case MTP_OPERATION_OPEN_SESSION: + response = doOpenSession(); + break; + case MTP_OPERATION_CLOSE_SESSION: + response = doCloseSession(); + break; + case MTP_OPERATION_GET_STORAGE_IDS: + response = doGetStorageIDs(); + break; + case MTP_OPERATION_GET_STORAGE_INFO: + response = doGetStorageInfo(); + break; + case MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED: + response = doGetObjectPropsSupported(); + break; + case MTP_OPERATION_GET_OBJECT_HANDLES: + response = doGetObjectHandles(); + break; + case MTP_OPERATION_GET_NUM_OBJECTS: + response = doGetNumObjects(); + break; + case MTP_OPERATION_GET_OBJECT_REFERENCES: + response = doGetObjectReferences(); + break; + case MTP_OPERATION_SET_OBJECT_REFERENCES: + response = doSetObjectReferences(); + break; + case MTP_OPERATION_GET_OBJECT_PROP_VALUE: + response = doGetObjectPropValue(); + break; + case MTP_OPERATION_SET_OBJECT_PROP_VALUE: + response = doSetObjectPropValue(); + break; + case MTP_OPERATION_GET_DEVICE_PROP_VALUE: + response = doGetDevicePropValue(); + break; + case MTP_OPERATION_SET_DEVICE_PROP_VALUE: + response = doSetDevicePropValue(); + break; + case MTP_OPERATION_RESET_DEVICE_PROP_VALUE: + response = doResetDevicePropValue(); + break; + case MTP_OPERATION_GET_OBJECT_PROP_LIST: + response = doGetObjectPropList(); + break; + case MTP_OPERATION_GET_OBJECT_INFO: + response = doGetObjectInfo(); + break; + case MTP_OPERATION_GET_OBJECT: + response = doGetObject(); + break; + case MTP_OPERATION_GET_PARTIAL_OBJECT: + response = doGetPartialObject(); + break; + case MTP_OPERATION_SEND_OBJECT_INFO: + response = doSendObjectInfo(); + break; + case MTP_OPERATION_SEND_OBJECT: + response = doSendObject(); + break; + case MTP_OPERATION_DELETE_OBJECT: + response = doDeleteObject(); + break; + case MTP_OPERATION_GET_OBJECT_PROP_DESC: + response = doGetObjectPropDesc(); + break; + case MTP_OPERATION_GET_DEVICE_PROP_DESC: + response = doGetDevicePropDesc(); + break; + default: + LOGE("got unsupported command %s", MtpDebug::getOperationCodeName(operation)); + response = MTP_RESPONSE_OPERATION_NOT_SUPPORTED; + break; + } + + if (response == MTP_RESPONSE_TRANSACTION_CANCELLED) + return false; + mResponse.setResponseCode(response); + return true; +} + +MtpResponseCode MtpServer::doGetDeviceInfo() { + MtpStringBuffer string; + char prop_value[PROPERTY_VALUE_MAX]; + + MtpObjectFormatList* playbackFormats = mDatabase->getSupportedPlaybackFormats(); + MtpObjectFormatList* captureFormats = mDatabase->getSupportedCaptureFormats(); + MtpDevicePropertyList* deviceProperties = mDatabase->getSupportedDeviceProperties(); + + // fill in device info + mData.putUInt16(MTP_STANDARD_VERSION); + mData.putUInt32(6); // MTP Vendor Extension ID + mData.putUInt16(MTP_STANDARD_VERSION); + string.set("microsoft.com: 1.0;"); + mData.putString(string); // MTP Extensions + mData.putUInt16(0); //Functional Mode + mData.putAUInt16(kSupportedOperationCodes, + sizeof(kSupportedOperationCodes) / sizeof(uint16_t)); // Operations Supported + mData.putAUInt16(kSupportedEventCodes, + sizeof(kSupportedEventCodes) / sizeof(uint16_t)); // Events Supported + mData.putAUInt16(deviceProperties); // Device Properties Supported + mData.putAUInt16(captureFormats); // Capture Formats + mData.putAUInt16(playbackFormats); // Playback Formats + // FIXME + string.set("Google, Inc."); + mData.putString(string); // Manufacturer + + property_get("ro.product.model", prop_value, "MTP Device"); + string.set(prop_value); + mData.putString(string); // Model + string.set("1.0"); + mData.putString(string); // Device Version + + property_get("ro.serialno", prop_value, "????????"); + string.set(prop_value); + mData.putString(string); // Serial Number + + delete playbackFormats; + delete captureFormats; + delete deviceProperties; + + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doOpenSession() { + if (mSessionOpen) { + mResponse.setParameter(1, mSessionID); + return MTP_RESPONSE_SESSION_ALREADY_OPEN; + } + mSessionID = mRequest.getParameter(1); + mSessionOpen = true; + + mDatabase->sessionStarted(); + + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doCloseSession() { + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + mSessionID = 0; + mSessionOpen = false; + mDatabase->sessionEnded(); + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doGetStorageIDs() { + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + + int count = mStorages.size(); + mData.putUInt32(count); + for (int i = 0; i < count; i++) + mData.putUInt32(mStorages[i]->getStorageID()); + + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doGetStorageInfo() { + MtpStringBuffer string; + + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + MtpStorageID id = mRequest.getParameter(1); + MtpStorage* storage = getStorage(id); + if (!storage) + return MTP_RESPONSE_INVALID_STORAGE_ID; + + mData.putUInt16(storage->getType()); + mData.putUInt16(storage->getFileSystemType()); + mData.putUInt16(storage->getAccessCapability()); + mData.putUInt64(storage->getMaxCapacity()); + mData.putUInt64(storage->getFreeSpace()); + mData.putUInt32(1024*1024*1024); // Free Space in Objects + string.set(storage->getDescription()); + mData.putString(string); + mData.putEmptyString(); // Volume Identifier + + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doGetObjectPropsSupported() { + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + MtpObjectFormat format = mRequest.getParameter(1); + MtpObjectPropertyList* properties = mDatabase->getSupportedObjectProperties(format); + mData.putAUInt16(properties); + delete properties; + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doGetObjectHandles() { + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage + MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats + MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent + // 0x00000000 for all objects? + if (parent == 0xFFFFFFFF) + parent = 0; + + MtpObjectHandleList* handles = mDatabase->getObjectList(storageID, format, parent); + mData.putAUInt32(handles); + delete handles; + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doGetNumObjects() { + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + MtpStorageID storageID = mRequest.getParameter(1); // 0xFFFFFFFF for all storage + MtpObjectFormat format = mRequest.getParameter(2); // 0 for all formats + MtpObjectHandle parent = mRequest.getParameter(3); // 0xFFFFFFFF for objects with no parent + // 0x00000000 for all objects? + if (parent == 0xFFFFFFFF) + parent = 0; + + int count = mDatabase->getNumObjects(storageID, format, parent); + if (count >= 0) { + mResponse.setParameter(1, count); + return MTP_RESPONSE_OK; + } else { + mResponse.setParameter(1, 0); + return MTP_RESPONSE_INVALID_OBJECT_HANDLE; + } +} + +MtpResponseCode MtpServer::doGetObjectReferences() { + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + MtpStorageID handle = mRequest.getParameter(1); + + // FIXME - check for invalid object handle + MtpObjectHandleList* handles = mDatabase->getObjectReferences(handle); + if (handles) { + mData.putAUInt32(handles); + delete handles; + } else { + mData.putEmptyArray(); + } + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doSetObjectReferences() { + if (!mSessionOpen) + return MTP_RESPONSE_SESSION_NOT_OPEN; + MtpStorageID handle = mRequest.getParameter(1); + MtpObjectHandleList* references = mData.getAUInt32(); + MtpResponseCode result = mDatabase->setObjectReferences(handle, references); + delete references; + return result; +} + +MtpResponseCode MtpServer::doGetObjectPropValue() { + MtpObjectHandle handle = mRequest.getParameter(1); + MtpObjectProperty property = mRequest.getParameter(2); + LOGV("GetObjectPropValue %d %s\n", handle, + MtpDebug::getObjectPropCodeName(property)); + + return mDatabase->getObjectPropertyValue(handle, property, mData); +} + +MtpResponseCode MtpServer::doSetObjectPropValue() { + MtpObjectHandle handle = mRequest.getParameter(1); + MtpObjectProperty property = mRequest.getParameter(2); + LOGV("SetObjectPropValue %d %s\n", handle, + MtpDebug::getObjectPropCodeName(property)); + + return mDatabase->setObjectPropertyValue(handle, property, mData); +} + +MtpResponseCode MtpServer::doGetDevicePropValue() { + MtpDeviceProperty property = mRequest.getParameter(1); + LOGV("GetDevicePropValue %s\n", + MtpDebug::getDevicePropCodeName(property)); + + return mDatabase->getDevicePropertyValue(property, mData); +} + +MtpResponseCode MtpServer::doSetDevicePropValue() { + MtpDeviceProperty property = mRequest.getParameter(1); + LOGV("SetDevicePropValue %s\n", + MtpDebug::getDevicePropCodeName(property)); + + return mDatabase->setDevicePropertyValue(property, mData); +} + +MtpResponseCode MtpServer::doResetDevicePropValue() { + MtpDeviceProperty property = mRequest.getParameter(1); + LOGV("ResetDevicePropValue %s\n", + MtpDebug::getDevicePropCodeName(property)); + + return mDatabase->resetDeviceProperty(property); +} + +MtpResponseCode MtpServer::doGetObjectPropList() { + + MtpObjectHandle handle = mRequest.getParameter(1); + // use uint32_t so we can support 0xFFFFFFFF + uint32_t format = mRequest.getParameter(2); + uint32_t property = mRequest.getParameter(3); + int groupCode = mRequest.getParameter(4); + int depth = mRequest.getParameter(5); + LOGV("GetObjectPropList %d format: %s property: %s group: %d depth: %d\n", + handle, MtpDebug::getFormatCodeName(format), + MtpDebug::getObjectPropCodeName(property), groupCode, depth); + + return mDatabase->getObjectPropertyList(handle, format, property, groupCode, depth, mData); +} + +MtpResponseCode MtpServer::doGetObjectInfo() { + MtpObjectHandle handle = mRequest.getParameter(1); + return mDatabase->getObjectInfo(handle, mData); +} + +MtpResponseCode MtpServer::doGetObject() { + MtpObjectHandle handle = mRequest.getParameter(1); + MtpString pathBuf; + int64_t fileLength; + MtpObjectFormat format; + int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format); + if (result != MTP_RESPONSE_OK) + return result; + + const char* filePath = (const char *)pathBuf; + mtp_file_range mfr; + mfr.fd = open(filePath, O_RDONLY); + if (mfr.fd < 0) { + return MTP_RESPONSE_GENERAL_ERROR; + } + mfr.offset = 0; + mfr.length = fileLength; + + // send data header + mData.setOperationCode(mRequest.getOperationCode()); + mData.setTransactionID(mRequest.getTransactionID()); + mData.writeDataHeader(mFD, fileLength + MTP_CONTAINER_HEADER_SIZE); + + // then transfer the file + int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr); + close(mfr.fd); + if (ret < 0) { + if (errno == ECANCELED) + return MTP_RESPONSE_TRANSACTION_CANCELLED; + else + return MTP_RESPONSE_GENERAL_ERROR; + } + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doGetPartialObject() { + MtpObjectHandle handle = mRequest.getParameter(1); + uint32_t offset = mRequest.getParameter(2); + uint32_t length = mRequest.getParameter(3); + MtpString pathBuf; + int64_t fileLength; + MtpObjectFormat format; + int result = mDatabase->getObjectFilePath(handle, pathBuf, fileLength, format); + if (result != MTP_RESPONSE_OK) + return result; + if (offset + length > fileLength) + length = fileLength - offset; + + const char* filePath = (const char *)pathBuf; + mtp_file_range mfr; + mfr.fd = open(filePath, O_RDONLY); + if (mfr.fd < 0) { + return MTP_RESPONSE_GENERAL_ERROR; + } + mfr.offset = offset; + mfr.length = length; + mResponse.setParameter(1, length); + + // send data header + mData.setOperationCode(mRequest.getOperationCode()); + mData.setTransactionID(mRequest.getTransactionID()); + mData.writeDataHeader(mFD, length + MTP_CONTAINER_HEADER_SIZE); + + // then transfer the file + int ret = ioctl(mFD, MTP_SEND_FILE, (unsigned long)&mfr); + close(mfr.fd); + if (ret < 0) { + if (errno == ECANCELED) + return MTP_RESPONSE_TRANSACTION_CANCELLED; + else + return MTP_RESPONSE_GENERAL_ERROR; + } + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doSendObjectInfo() { + MtpString path; + MtpStorageID storageID = mRequest.getParameter(1); + MtpStorage* storage = getStorage(storageID); + MtpObjectHandle parent = mRequest.getParameter(2); + if (!storage) + return MTP_RESPONSE_INVALID_STORAGE_ID; + + // special case the root + if (parent == MTP_PARENT_ROOT) { + path = storage->getPath(); + parent = 0; + } else { + int64_t length; + MtpObjectFormat format; + int result = mDatabase->getObjectFilePath(parent, path, length, format); + if (result != MTP_RESPONSE_OK) + return result; + if (format != MTP_FORMAT_ASSOCIATION) + return MTP_RESPONSE_INVALID_PARENT_OBJECT; + } + + // read only the fields we need + mData.getUInt32(); // storage ID + MtpObjectFormat format = mData.getUInt16(); + mData.getUInt16(); // protection status + mSendObjectFileSize = mData.getUInt32(); + mData.getUInt16(); // thumb format + mData.getUInt32(); // thumb compressed size + mData.getUInt32(); // thumb pix width + mData.getUInt32(); // thumb pix height + mData.getUInt32(); // image pix width + mData.getUInt32(); // image pix height + mData.getUInt32(); // image bit depth + mData.getUInt32(); // parent + uint16_t associationType = mData.getUInt16(); + uint32_t associationDesc = mData.getUInt32(); // association desc + mData.getUInt32(); // sequence number + MtpStringBuffer name, created, modified; + mData.getString(name); // file name + mData.getString(created); // date created + mData.getString(modified); // date modified + // keywords follow + + LOGV("name: %s format: %04X\n", (const char *)name, format); + time_t modifiedTime; + if (!parseDateTime(modified, modifiedTime)) + modifiedTime = 0; + + if (path[path.size() - 1] != '/') + path += "/"; + path += (const char *)name; + + // check space first + if (mSendObjectFileSize > storage->getFreeSpace()) + return MTP_RESPONSE_STORAGE_FULL; + + MtpObjectHandle handle = mDatabase->beginSendObject((const char*)path, + format, parent, storageID, mSendObjectFileSize, modifiedTime); + if (handle == kInvalidObjectHandle) { + return MTP_RESPONSE_GENERAL_ERROR; + } + + if (format == MTP_FORMAT_ASSOCIATION) { + mode_t mask = umask(0); + int ret = mkdir((const char *)path, mDirectoryPermission); + umask(mask); + if (ret && ret != -EEXIST) + return MTP_RESPONSE_GENERAL_ERROR; + chown((const char *)path, getuid(), mFileGroup); + + // SendObject does not get sent for directories, so call endSendObject here instead + mDatabase->endSendObject(path, handle, MTP_FORMAT_ASSOCIATION, MTP_RESPONSE_OK); + } else { + mSendObjectFilePath = path; + // save the handle for the SendObject call, which should follow + mSendObjectHandle = handle; + mSendObjectFormat = format; + } + + mResponse.setParameter(1, storageID); + mResponse.setParameter(2, parent); + mResponse.setParameter(3, handle); + + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doSendObject() { + MtpResponseCode result = MTP_RESPONSE_OK; + mode_t mask; + int ret; + + if (mSendObjectHandle == kInvalidObjectHandle) { + LOGE("Expected SendObjectInfo before SendObject"); + result = MTP_RESPONSE_NO_VALID_OBJECT_INFO; + goto done; + } + + // read the header + ret = mData.readDataHeader(mFD); + // FIXME - check for errors here. + + // reset so we don't attempt to send this back + mData.reset(); + + mtp_file_range mfr; + mfr.fd = open(mSendObjectFilePath, O_RDWR | O_CREAT | O_TRUNC); + if (mfr.fd < 0) { + result = MTP_RESPONSE_GENERAL_ERROR; + goto done; + } + fchown(mfr.fd, getuid(), mFileGroup); + // set permissions + mask = umask(0); + fchmod(mfr.fd, mFilePermission); + umask(mask); + + mfr.offset = 0; + mfr.length = mSendObjectFileSize; + + LOGV("receiving %s\n", (const char *)mSendObjectFilePath); + // transfer the file + ret = ioctl(mFD, MTP_RECEIVE_FILE, (unsigned long)&mfr); + close(mfr.fd); + + LOGV("MTP_RECEIVE_FILE returned %d", ret); + + if (ret < 0) { + unlink(mSendObjectFilePath); + if (errno == ECANCELED) + result = MTP_RESPONSE_TRANSACTION_CANCELLED; + else + result = MTP_RESPONSE_GENERAL_ERROR; + } + +done: + mDatabase->endSendObject(mSendObjectFilePath, mSendObjectHandle, mSendObjectFormat, + result == MTP_RESPONSE_OK); + mSendObjectHandle = kInvalidObjectHandle; + mSendObjectFormat = 0; + return result; +} + +static void deleteRecursive(const char* path) { + char pathbuf[PATH_MAX]; + int pathLength = strlen(path); + if (pathLength >= sizeof(pathbuf) - 1) { + LOGE("path too long: %s\n", path); + } + strcpy(pathbuf, path); + if (pathbuf[pathLength - 1] != '/') { + pathbuf[pathLength++] = '/'; + } + char* fileSpot = pathbuf + pathLength; + int pathRemaining = sizeof(pathbuf) - pathLength - 1; + + DIR* dir = opendir(path); + if (!dir) { + LOGE("opendir %s failed: %s", path, strerror(errno)); + return; + } + + struct dirent* entry; + while ((entry = readdir(dir))) { + const char* name = entry->d_name; + + // ignore "." and ".." + if (name[0] == '.' && (name[1] == 0 || (name[1] == '.' && name[2] == 0))) { + continue; + } + + int nameLength = strlen(name); + if (nameLength > pathRemaining) { + LOGE("path %s/%s too long\n", path, name); + continue; + } + strcpy(fileSpot, name); + + int type = entry->d_type; + if (entry->d_type == DT_DIR) { + deleteRecursive(pathbuf); + rmdir(pathbuf); + } else { + unlink(pathbuf); + } + } + closedir(dir); +} + +static void deletePath(const char* path) { + struct stat statbuf; + if (stat(path, &statbuf) == 0) { + if (S_ISDIR(statbuf.st_mode)) { + deleteRecursive(path); + rmdir(path); + } else { + unlink(path); + } + } else { + LOGE("deletePath stat failed for %s: %s", path, strerror(errno)); + } +} + +MtpResponseCode MtpServer::doDeleteObject() { + MtpObjectHandle handle = mRequest.getParameter(1); + MtpObjectFormat format = mRequest.getParameter(2); + // FIXME - support deleting all objects if handle is 0xFFFFFFFF + // FIXME - implement deleting objects by format + + MtpString filePath; + int64_t fileLength; + int result = mDatabase->getObjectFilePath(handle, filePath, fileLength, format); + if (result == MTP_RESPONSE_OK) { + LOGV("deleting %s", (const char *)filePath); + deletePath((const char *)filePath); + return mDatabase->deleteFile(handle); + } else { + return result; + } +} + +MtpResponseCode MtpServer::doGetObjectPropDesc() { + MtpObjectProperty propCode = mRequest.getParameter(1); + MtpObjectFormat format = mRequest.getParameter(2); + LOGV("GetObjectPropDesc %s %s\n", MtpDebug::getObjectPropCodeName(propCode), + MtpDebug::getFormatCodeName(format)); + MtpProperty* property = mDatabase->getObjectPropertyDesc(propCode, format); + if (!property) + return MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED; + property->write(mData); + delete property; + return MTP_RESPONSE_OK; +} + +MtpResponseCode MtpServer::doGetDevicePropDesc() { + MtpDeviceProperty propCode = mRequest.getParameter(1); + LOGV("GetDevicePropDesc %s\n", MtpDebug::getDevicePropCodeName(propCode)); + MtpProperty* property = mDatabase->getDevicePropertyDesc(propCode); + if (!property) + return MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED; + property->write(mData); + delete property; + return MTP_RESPONSE_OK; +} + +} // namespace android diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h new file mode 100644 index 0000000..605d5a2 --- /dev/null +++ b/media/mtp/MtpServer.h @@ -0,0 +1,109 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_SERVER_H +#define _MTP_SERVER_H + +#include "MtpRequestPacket.h" +#include "MtpDataPacket.h" +#include "MtpResponsePacket.h" +#include "MtpEventPacket.h" +#include "mtp.h" + +#include "MtpUtils.h" + +namespace android { + +class MtpDatabase; +class MtpStorage; + +class MtpServer { + +private: + // file descriptor for MTP kernel driver + int mFD; + + MtpDatabase* mDatabase; + + // group to own new files and folders + int mFileGroup; + // permissions for new files and directories + int mFilePermission; + int mDirectoryPermission; + + // current session ID + MtpSessionID mSessionID; + // true if we have an open session and mSessionID is valid + bool mSessionOpen; + + MtpRequestPacket mRequest; + MtpDataPacket mData; + MtpResponsePacket mResponse; + MtpEventPacket mEvent; + + MtpStorageList mStorages; + + // handle for new object, set by SendObjectInfo and used by SendObject + MtpObjectHandle mSendObjectHandle; + MtpObjectFormat mSendObjectFormat; + MtpString mSendObjectFilePath; + size_t mSendObjectFileSize; + +public: + MtpServer(int fd, MtpDatabase* database, + int fileGroup, int filePerm, int directoryPerm); + virtual ~MtpServer(); + + void addStorage(const char* filePath, uint64_t reserveSpace); + inline void addStorage(MtpStorage* storage) { mStorages.push(storage); } + MtpStorage* getStorage(MtpStorageID id); + void run(); + + void sendObjectAdded(MtpObjectHandle handle); + void sendObjectRemoved(MtpObjectHandle handle); + +private: + bool handleRequest(); + + MtpResponseCode doGetDeviceInfo(); + MtpResponseCode doOpenSession(); + MtpResponseCode doCloseSession(); + MtpResponseCode doGetStorageIDs(); + MtpResponseCode doGetStorageInfo(); + MtpResponseCode doGetObjectPropsSupported(); + MtpResponseCode doGetObjectHandles(); + MtpResponseCode doGetNumObjects(); + MtpResponseCode doGetObjectReferences(); + MtpResponseCode doSetObjectReferences(); + MtpResponseCode doGetObjectPropValue(); + MtpResponseCode doSetObjectPropValue(); + MtpResponseCode doGetDevicePropValue(); + MtpResponseCode doSetDevicePropValue(); + MtpResponseCode doResetDevicePropValue(); + MtpResponseCode doGetObjectPropList(); + MtpResponseCode doGetObjectInfo(); + MtpResponseCode doGetObject(); + MtpResponseCode doGetPartialObject(); + MtpResponseCode doSendObjectInfo(); + MtpResponseCode doSendObject(); + MtpResponseCode doDeleteObject(); + MtpResponseCode doGetObjectPropDesc(); + MtpResponseCode doGetDevicePropDesc(); +}; + +}; // namespace android + +#endif // _MTP_SERVER_H diff --git a/media/mtp/MtpStorage.cpp b/media/mtp/MtpStorage.cpp new file mode 100644 index 0000000..2fbbc51 --- /dev/null +++ b/media/mtp/MtpStorage.cpp @@ -0,0 +1,81 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpStorage" + +#include "MtpDebug.h" +#include "MtpDatabase.h" +#include "MtpStorage.h" + +#include <sys/types.h> +#include <sys/stat.h> +#include <sys/statfs.h> +#include <unistd.h> +#include <dirent.h> +#include <errno.h> +#include <string.h> +#include <stdio.h> +#include <limits.h> + +namespace android { + +MtpStorage::MtpStorage(MtpStorageID id, const char* filePath, uint64_t reserveSpace) + : mStorageID(id), + mFilePath(filePath), + mMaxCapacity(0), + mReserveSpace(reserveSpace) +{ + LOGV("MtpStorage id: %d path: %s\n", id, filePath); +} + +MtpStorage::~MtpStorage() { +} + +int MtpStorage::getType() const { + return MTP_STORAGE_FIXED_RAM; +} + +int MtpStorage::getFileSystemType() const { + return MTP_STORAGE_FILESYSTEM_HIERARCHICAL; +} + +int MtpStorage::getAccessCapability() const { + return MTP_STORAGE_READ_WRITE; +} + +uint64_t MtpStorage::getMaxCapacity() { + if (mMaxCapacity == 0) { + struct statfs stat; + if (statfs(mFilePath, &stat)) + return -1; + mMaxCapacity = (uint64_t)stat.f_blocks * (uint64_t)stat.f_bsize; + } + return mMaxCapacity; +} + +uint64_t MtpStorage::getFreeSpace() { + struct statfs stat; + if (statfs(mFilePath, &stat)) + return -1; + uint64_t freeSpace = (uint64_t)stat.f_bavail * (uint64_t)stat.f_bsize; + return (freeSpace > mReserveSpace ? freeSpace - mReserveSpace : 0); +} + +const char* MtpStorage::getDescription() const { + return "Device Storage"; +} + +} // namespace android diff --git a/media/mtp/MtpStorage.h b/media/mtp/MtpStorage.h new file mode 100644 index 0000000..ace720b --- /dev/null +++ b/media/mtp/MtpStorage.h @@ -0,0 +1,52 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_STORAGE_H +#define _MTP_STORAGE_H + +#include "mtp.h" + +namespace android { + +class MtpDatabase; + +class MtpStorage { + +private: + MtpStorageID mStorageID; + const char* mFilePath; + uint64_t mMaxCapacity; + // amount of free space to leave unallocated + uint64_t mReserveSpace; + +public: + MtpStorage(MtpStorageID id, const char* filePath, + uint64_t reserveSpace); + virtual ~MtpStorage(); + + inline MtpStorageID getStorageID() const { return mStorageID; } + int getType() const; + int getFileSystemType() const; + int getAccessCapability() const; + uint64_t getMaxCapacity(); + uint64_t getFreeSpace(); + const char* getDescription() const; + inline const char* getPath() const { return mFilePath; } +}; + +}; // namespace android + +#endif // _MTP_STORAGE_H diff --git a/media/mtp/MtpStorageInfo.cpp b/media/mtp/MtpStorageInfo.cpp new file mode 100644 index 0000000..ca64ac0 --- /dev/null +++ b/media/mtp/MtpStorageInfo.cpp @@ -0,0 +1,72 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpStorageInfo" + +#include "MtpDebug.h" +#include "MtpDataPacket.h" +#include "MtpStorageInfo.h" +#include "MtpStringBuffer.h" + +namespace android { + +MtpStorageInfo::MtpStorageInfo(MtpStorageID id) + : mStorageID(id), + mStorageType(0), + mFileSystemType(0), + mAccessCapability(0), + mMaxCapacity(0), + mFreeSpaceBytes(0), + mFreeSpaceObjects(0), + mStorageDescription(NULL), + mVolumeIdentifier(NULL) +{ +} + +MtpStorageInfo::~MtpStorageInfo() { + if (mStorageDescription) + free(mStorageDescription); + if (mVolumeIdentifier) + free(mVolumeIdentifier); +} + +void MtpStorageInfo::read(MtpDataPacket& packet) { + MtpStringBuffer string; + + // read the device info + mStorageType = packet.getUInt16(); + mFileSystemType = packet.getUInt16(); + mAccessCapability = packet.getUInt16(); + mMaxCapacity = packet.getUInt64(); + mFreeSpaceBytes = packet.getUInt64(); + mFreeSpaceObjects = packet.getUInt32(); + + packet.getString(string); + mStorageDescription = strdup((const char *)string); + packet.getString(string); + mVolumeIdentifier = strdup((const char *)string); +} + +void MtpStorageInfo::print() { + LOGD("Storage Info %08X:\n\tmStorageType: %d\n\tmFileSystemType: %d\n\tmAccessCapability: %d\n", + mStorageID, mStorageType, mFileSystemType, mAccessCapability); + LOGD("\tmMaxCapacity: %lld\n\tmFreeSpaceBytes: %lld\n\tmFreeSpaceObjects: %d\n", + mMaxCapacity, mFreeSpaceBytes, mFreeSpaceObjects); + LOGD("\tmStorageDescription: %s\n\tmVolumeIdentifier: %s\n", + mStorageDescription, mVolumeIdentifier); +} + +} // namespace android diff --git a/media/mtp/MtpStorageInfo.h b/media/mtp/MtpStorageInfo.h new file mode 100644 index 0000000..2cb626e --- /dev/null +++ b/media/mtp/MtpStorageInfo.h @@ -0,0 +1,49 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_STORAGE_INFO_H +#define _MTP_STORAGE_INFO_H + +#include "MtpTypes.h" + +namespace android { + +class MtpDataPacket; + +class MtpStorageInfo { +public: + MtpStorageID mStorageID; + uint16_t mStorageType; + uint16_t mFileSystemType; + uint16_t mAccessCapability; + uint64_t mMaxCapacity; + uint64_t mFreeSpaceBytes; + uint32_t mFreeSpaceObjects; + char* mStorageDescription; + char* mVolumeIdentifier; + +public: + MtpStorageInfo(MtpStorageID id); + virtual ~MtpStorageInfo(); + + void read(MtpDataPacket& packet); + + void print(); +}; + +}; // namespace android + +#endif // _MTP_STORAGE_INFO_H diff --git a/media/mtp/MtpStringBuffer.cpp b/media/mtp/MtpStringBuffer.cpp new file mode 100644 index 0000000..fe8cf04 --- /dev/null +++ b/media/mtp/MtpStringBuffer.cpp @@ -0,0 +1,171 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpStringBuffer" + +#include <string.h> + +#include "MtpDataPacket.h" +#include "MtpStringBuffer.h" + +namespace android { + +MtpStringBuffer::MtpStringBuffer() + : mCharCount(0), + mByteCount(1) +{ + mBuffer[0] = 0; +} + +MtpStringBuffer::MtpStringBuffer(const char* src) + : mCharCount(0), + mByteCount(1) +{ + set(src); +} + +MtpStringBuffer::MtpStringBuffer(const uint16_t* src) + : mCharCount(0), + mByteCount(1) +{ + set(src); +} + +MtpStringBuffer::MtpStringBuffer(const MtpStringBuffer& src) + : mCharCount(src.mCharCount), + mByteCount(src.mByteCount) +{ + memcpy(mBuffer, src.mBuffer, mByteCount); +} + + +MtpStringBuffer::~MtpStringBuffer() { +} + +void MtpStringBuffer::set(const char* src) { + int length = strlen(src); + if (length >= sizeof(mBuffer)) + length = sizeof(mBuffer) - 1; + memcpy(mBuffer, src, length); + + // count the characters + int count = 0; + char ch; + while ((ch = *src++) != 0) { + if ((ch & 0x80) == 0) { + // single byte character + } else if ((ch & 0xE0) == 0xC0) { + // two byte character + if (! *src++) { + // last character was truncated, so ignore last byte + length--; + break; + } + } else if ((ch & 0xF0) == 0xE0) { + // 3 byte char + if (! *src++) { + // last character was truncated, so ignore last byte + length--; + break; + } + if (! *src++) { + // last character was truncated, so ignore last two bytes + length -= 2; + break; + } + } + count++; + } + + mByteCount = length + 1; + mBuffer[length] = 0; + mCharCount = count; +} + +void MtpStringBuffer::set(const uint16_t* src) { + int count = 0; + uint16_t ch; + uint8_t* dest = mBuffer; + + while ((ch = *src++) != 0 && count < 255) { + if (ch >= 0x0800) { + *dest++ = (uint8_t)(0xE0 | (ch >> 12)); + *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F)); + *dest++ = (uint8_t)(0x80 | (ch & 0x3F)); + } else if (ch >= 0x80) { + *dest++ = (uint8_t)(0xC0 | (ch >> 6)); + *dest++ = (uint8_t)(0x80 | (ch & 0x3F)); + } else { + *dest++ = ch; + } + count++; + } + *dest++ = 0; + mCharCount = count; + mByteCount = dest - mBuffer; +} + +void MtpStringBuffer::readFromPacket(MtpDataPacket* packet) { + int count = packet->getUInt8(); + uint8_t* dest = mBuffer; + for (int i = 0; i < count; i++) { + uint16_t ch = packet->getUInt16(); + if (ch >= 0x0800) { + *dest++ = (uint8_t)(0xE0 | (ch >> 12)); + *dest++ = (uint8_t)(0x80 | ((ch >> 6) & 0x3F)); + *dest++ = (uint8_t)(0x80 | (ch & 0x3F)); + } else if (ch >= 0x80) { + *dest++ = (uint8_t)(0xC0 | (ch >> 6)); + *dest++ = (uint8_t)(0x80 | (ch & 0x3F)); + } else { + *dest++ = ch; + } + } + *dest++ = 0; + mCharCount = count; + mByteCount = dest - mBuffer; +} + +void MtpStringBuffer::writeToPacket(MtpDataPacket* packet) const { + int count = mCharCount; + const uint8_t* src = mBuffer; + packet->putUInt8(count > 0 ? count + 1 : 0); + + // expand utf8 to 16 bit chars + for (int i = 0; i < count; i++) { + uint16_t ch; + uint16_t ch1 = *src++; + if ((ch1 & 0x80) == 0) { + // single byte character + ch = ch1; + } else if ((ch1 & 0xE0) == 0xC0) { + // two byte character + uint16_t ch2 = *src++; + ch = ((ch1 & 0x1F) << 6) | (ch2 & 0x3F); + } else { + // three byte character + uint16_t ch2 = *src++; + uint16_t ch3 = *src++; + ch = ((ch1 & 0x0F) << 12) | ((ch2 & 0x3F) << 6) | (ch3 & 0x3F); + } + packet->putUInt16(ch); + } + // only terminate with zero if string is not empty + if (count > 0) + packet->putUInt16(0); +} + +} // namespace android diff --git a/media/mtp/MtpStringBuffer.h b/media/mtp/MtpStringBuffer.h new file mode 100644 index 0000000..cbc8307 --- /dev/null +++ b/media/mtp/MtpStringBuffer.h @@ -0,0 +1,57 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_STRING_BUFFER_H +#define _MTP_STRING_BUFFER_H + +#include <stdint.h> + +namespace android { + +class MtpDataPacket; + +// Represents a utf8 string, with a maximum of 255 characters +class MtpStringBuffer { + +private: + // mBuffer contains string in UTF8 format + // maximum 3 bytes/character, with 1 extra for zero termination + uint8_t mBuffer[255 * 3 + 1]; + int mCharCount; + int mByteCount; + +public: + MtpStringBuffer(); + MtpStringBuffer(const char* src); + MtpStringBuffer(const uint16_t* src); + MtpStringBuffer(const MtpStringBuffer& src); + virtual ~MtpStringBuffer(); + + void set(const char* src); + void set(const uint16_t* src); + + void readFromPacket(MtpDataPacket* packet); + void writeToPacket(MtpDataPacket* packet) const; + + inline int getCharCount() const { return mCharCount; } + inline int getByteCount() const { return mByteCount; } + + inline operator const char*() const { return (const char *)mBuffer; } +}; + +}; // namespace android + +#endif // _MTP_STRING_BUFFER_H diff --git a/media/mtp/MtpTypes.h b/media/mtp/MtpTypes.h new file mode 100644 index 0000000..720c854 --- /dev/null +++ b/media/mtp/MtpTypes.h @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_TYPES_H +#define _MTP_TYPES_H + +#include <stdint.h> +#include "utils/String8.h" +#include "utils/Vector.h" + +namespace android { + +typedef int32_t int128_t[4]; +typedef uint32_t uint128_t[4]; + +typedef uint16_t MtpOperationCode; +typedef uint16_t MtpResponseCode; +typedef uint16_t MtpEventCode; +typedef uint32_t MtpSessionID; +typedef uint32_t MtpStorageID; +typedef uint32_t MtpTransactionID; +typedef uint16_t MtpPropertyCode; +typedef uint16_t MtpDataType; +typedef uint16_t MtpObjectFormat; +typedef MtpPropertyCode MtpDeviceProperty; +typedef MtpPropertyCode MtpObjectProperty; + +// object handles are unique across all storage but only within a single session. +// object handles cannot be reused after an object is deleted. +// values 0x00000000 and 0xFFFFFFFF are reserved for special purposes. +typedef uint32_t MtpObjectHandle; + +// Special values +#define MTP_PARENT_ROOT 0xFFFFFFFF // parent is root of the storage +#define kInvalidObjectHandle 0xFFFFFFFF + +class MtpStorage; +class MtpDevice; +class MtpProperty; + +typedef Vector<MtpStorage *> MtpStorageList; +typedef Vector<MtpDevice*> MtpDeviceList; +typedef Vector<MtpProperty*> MtpPropertyList; + +typedef Vector<uint8_t> UInt8List; +typedef Vector<uint16_t> UInt16List; +typedef Vector<uint32_t> UInt32List; +typedef Vector<uint64_t> UInt64List; +typedef Vector<int8_t> Int8List; +typedef Vector<int16_t> Int16List; +typedef Vector<int32_t> Int32List; +typedef Vector<int64_t> Int64List; + +typedef UInt16List MtpObjectPropertyList; +typedef UInt16List MtpDevicePropertyList; +typedef UInt16List MtpObjectFormatList; +typedef UInt32List MtpObjectHandleList; +typedef UInt16List MtpObjectPropertyList; +typedef UInt32List MtpStorageIDList; + +typedef String8 MtpString; + +}; // namespace android + +#endif // _MTP_TYPES_H diff --git a/media/mtp/MtpUtils.cpp b/media/mtp/MtpUtils.cpp new file mode 100644 index 0000000..ab01ef5 --- /dev/null +++ b/media/mtp/MtpUtils.cpp @@ -0,0 +1,78 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_TAG "MtpUtils" + +#include <stdio.h> +#include <time.h> + +#include <cutils/tztime.h> +#include "MtpUtils.h" + +namespace android { + +/* +DateTime strings follow a compatible subset of the definition found in ISO 8601, and +take the form of a Unicode string formatted as: "YYYYMMDDThhmmss.s". In this +representation, YYYY shall be replaced by the year, MM replaced by the month (01-12), +DD replaced by the day (01-31), T is a constant character 'T' delimiting time from date, +hh is replaced by the hour (00-23), mm is replaced by the minute (00-59), and ss by the +second (00-59). The ".s" is optional, and represents tenths of a second. +*/ + +bool parseDateTime(const char* dateTime, time_t& outSeconds) { + int year, month, day, hour, minute, second; + struct tm tm; + + if (sscanf(dateTime, "%04d%02d%02dT%02d%02d%02d", + &year, &month, &day, &hour, &minute, &second) != 6) + return false; + const char* tail = dateTime + 15; + // skip optional tenth of second + if (tail[0] == '.' && tail[1]) + tail += 2; + //FIXME - support +/-hhmm + bool useUTC = (tail[0] == 'Z'); + + // hack to compute timezone + time_t dummy; + localtime_r(&dummy, &tm); + + tm.tm_sec = second; + tm.tm_min = minute; + tm.tm_hour = hour; + tm.tm_mday = day; + tm.tm_mon = month; + tm.tm_year = year - 1900; + tm.tm_wday = 0; + tm.tm_isdst = -1; + if (useUTC) + outSeconds = mktime(&tm); + else + outSeconds = mktime_tz(&tm, tm.tm_zone); + + return true; +} + +void formatDateTime(time_t seconds, char* buffer, int bufferLength) { + struct tm tm; + + localtime_r(&seconds, &tm); + snprintf(buffer, bufferLength, "%04d%02d%02dT%02d%02d%02d", + tm.tm_year + 1900, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min, tm.tm_sec); +} + +} // namespace android diff --git a/include/media/stagefright/VideoRenderer.h b/media/mtp/MtpUtils.h index f80b277..61f9055 100644 --- a/include/media/stagefright/VideoRenderer.h +++ b/media/mtp/MtpUtils.h @@ -1,5 +1,5 @@ /* - * Copyright (C) 2009 The Android Open Source Project + * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -14,28 +14,16 @@ * limitations under the License. */ -#ifndef VIDEO_RENDERER_H_ +#ifndef _MTP_UTILS_H +#define _MTP_UTILS_H -#define VIDEO_RENDERER_H_ - -#include <sys/types.h> +#include <stdint.h> namespace android { -class VideoRenderer { -public: - virtual ~VideoRenderer() {} - - virtual void render( - const void *data, size_t size, void *platformPrivate) = 0; - -protected: - VideoRenderer() {} - - VideoRenderer(const VideoRenderer &); - VideoRenderer &operator=(const VideoRenderer &); -}; +bool parseDateTime(const char* dateTime, time_t& outSeconds); +void formatDateTime(time_t seconds, char* buffer, int bufferLength); -} // namespace android +}; // namespace android -#endif // VIDEO_RENDERER_H_ +#endif // _MTP_UTILS_H diff --git a/media/mtp/mtp.h b/media/mtp/mtp.h new file mode 100644 index 0000000..8bc2e22 --- /dev/null +++ b/media/mtp/mtp.h @@ -0,0 +1,479 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MTP_H +#define _MTP_H + +#include <stdint.h> +#include <stdlib.h> + +#define MTP_STANDARD_VERSION 100 + +// Container Types +#define MTP_CONTAINER_TYPE_UNDEFINED 0 +#define MTP_CONTAINER_TYPE_COMMAND 1 +#define MTP_CONTAINER_TYPE_DATA 2 +#define MTP_CONTAINER_TYPE_RESPONSE 3 +#define MTP_CONTAINER_TYPE_EVENT 4 + +// Container Offsets +#define MTP_CONTAINER_LENGTH_OFFSET 0 +#define MTP_CONTAINER_TYPE_OFFSET 4 +#define MTP_CONTAINER_CODE_OFFSET 6 +#define MTP_CONTAINER_TRANSACTION_ID_OFFSET 8 +#define MTP_CONTAINER_PARAMETER_OFFSET 12 +#define MTP_CONTAINER_HEADER_SIZE 12 + +// MTP Data Types +#define MTP_TYPE_UNDEFINED 0x0000 // Undefined +#define MTP_TYPE_INT8 0x0001 // Signed 8-bit integer +#define MTP_TYPE_UINT8 0x0002 // Unsigned 8-bit integer +#define MTP_TYPE_INT16 0x0003 // Signed 16-bit integer +#define MTP_TYPE_UINT16 0x0004 // Unsigned 16-bit integer +#define MTP_TYPE_INT32 0x0005 // Signed 32-bit integer +#define MTP_TYPE_UINT32 0x0006 // Unsigned 32-bit integer +#define MTP_TYPE_INT64 0x0007 // Signed 64-bit integer +#define MTP_TYPE_UINT64 0x0008 // Unsigned 64-bit integer +#define MTP_TYPE_INT128 0x0009 // Signed 128-bit integer +#define MTP_TYPE_UINT128 0x000A // Unsigned 128-bit integer +#define MTP_TYPE_AINT8 0x4001 // Array of signed 8-bit integers +#define MTP_TYPE_AUINT8 0x4002 // Array of unsigned 8-bit integers +#define MTP_TYPE_AINT16 0x4003 // Array of signed 16-bit integers +#define MTP_TYPE_AUINT16 0x4004 // Array of unsigned 16-bit integers +#define MTP_TYPE_AINT32 0x4005 // Array of signed 32-bit integers +#define MTP_TYPE_AUINT32 0x4006 // Array of unsigned 32-bit integers +#define MTP_TYPE_AINT64 0x4007 // Array of signed 64-bit integers +#define MTP_TYPE_AUINT64 0x4008 // Array of unsigned 64-bit integers +#define MTP_TYPE_AINT128 0x4009 // Array of signed 128-bit integers +#define MTP_TYPE_AUINT128 0x400A // Array of unsigned 128-bit integers +#define MTP_TYPE_STR 0xFFFF // Variable-length Unicode string + +// MTP Format Codes +#define MTP_FORMAT_UNDEFINED 0x3000 // Undefined object +#define MTP_FORMAT_ASSOCIATION 0x3001 // Association (for example, a folder) +#define MTP_FORMAT_SCRIPT 0x3002 // Device model-specific script +#define MTP_FORMAT_EXECUTABLE 0x3003 // Device model-specific binary executable +#define MTP_FORMAT_TEXT 0x3004 // Text file +#define MTP_FORMAT_HTML 0x3005 // Hypertext Markup Language file (text) +#define MTP_FORMAT_DPOF 0x3006 // Digital Print Order Format file (text) +#define MTP_FORMAT_AIFF 0x3007 // Audio clip +#define MTP_FORMAT_WAV 0x3008 // Audio clip +#define MTP_FORMAT_MP3 0x3009 // Audio clip +#define MTP_FORMAT_AVI 0x300A // Video clip +#define MTP_FORMAT_MPEG 0x300B // Video clip +#define MTP_FORMAT_ASF 0x300C // Microsoft Advanced Streaming Format (video) +#define MTP_FORMAT_DEFINED 0x3800 // Unknown image object +#define MTP_FORMAT_EXIF_JPEG 0x3801 // Exchangeable File Format, JEIDA standard +#define MTP_FORMAT_TIFF_EP 0x3802 // Tag Image File Format for Electronic Photography +#define MTP_FORMAT_FLASHPIX 0x3803 // Structured Storage Image Format +#define MTP_FORMAT_BMP 0x3804 // Microsoft Windows Bitmap file +#define MTP_FORMAT_CIFF 0x3805 // Canon Camera Image File Format +#define MTP_FORMAT_GIF 0x3807 // Graphics Interchange Format +#define MTP_FORMAT_JFIF 0x3808 // JPEG File Interchange Format +#define MTP_FORMAT_CD 0x3809 // PhotoCD Image Pac +#define MTP_FORMAT_PICT 0x380A // Quickdraw Image Format +#define MTP_FORMAT_PNG 0x380B // Portable Network Graphics +#define MTP_FORMAT_TIFF 0x380D // Tag Image File Format +#define MTP_FORMAT_TIFF_IT 0x380E // Tag Image File Format for Information Technology (graphic arts) +#define MTP_FORMAT_JP2 0x380F // JPEG2000 Baseline File Format +#define MTP_FORMAT_JPX 0x3810 // JPEG2000 Extended File Format +#define MTP_FORMAT_UNDEFINED_FIRMWARE 0xB802 +#define MTP_FORMAT_WINDOWS_IMAGE_FORMAT 0xB881 +#define MTP_FORMAT_UNDEFINED_AUDIO 0xB900 +#define MTP_FORMAT_WMA 0xB901 +#define MTP_FORMAT_OGG 0xB902 +#define MTP_FORMAT_AAC 0xB903 +#define MTP_FORMAT_AUDIBLE 0xB904 +#define MTP_FORMAT_FLAC 0xB906 +#define MTP_FORMAT_UNDEFINED_VIDEO 0xB980 +#define MTP_FORMAT_WMV 0xB981 +#define MTP_FORMAT_MP4_CONTAINER 0xB982 // ISO 14496-1 +#define MTP_FORMAT_MP2 0xB983 +#define MTP_FORMAT_3GP_CONTAINER 0xB984 // 3GPP file format. Details: http://www.3gpp.org/ftp/Specs/html-info/26244.htm (page title - \u201cTransparent end-to-end packet switched streaming service, 3GPP file format\u201d). +#define MTP_FORMAT_UNDEFINED_COLLECTION 0xBA00 +#define MTP_FORMAT_ABSTRACT_MULTIMEDIA_ALBUM 0xBA01 +#define MTP_FORMAT_ABSTRACT_IMAGE_ALBUM 0xBA02 +#define MTP_FORMAT_ABSTRACT_AUDIO_ALBUM 0xBA03 +#define MTP_FORMAT_ABSTRACT_VIDEO_ALBUM 0xBA04 +#define MTP_FORMAT_ABSTRACT_AV_PLAYLIST 0xBA05 +#define MTP_FORMAT_ABSTRACT_CONTACT_GROUP 0xBA06 +#define MTP_FORMAT_ABSTRACT_MESSAGE_FOLDER 0xBA07 +#define MTP_FORMAT_ABSTRACT_CHAPTERED_PRODUCTION 0xBA08 +#define MTP_FORMAT_ABSTRACT_AUDIO_PLAYLIST 0xBA09 +#define MTP_FORMAT_ABSTRACT_VIDEO_PLAYLIST 0xBA0A +#define MTP_FORMAT_ABSTRACT_MEDIACAST 0xBA0B // For use with mediacasts; references multimedia enclosures of RSS feeds or episodic content +#define MTP_FORMAT_WPL_PLAYLIST 0xBA10 +#define MTP_FORMAT_M3U_PLAYLIST 0xBA11 +#define MTP_FORMAT_MPL_PLAYLIST 0xBA12 +#define MTP_FORMAT_ASX_PLAYLIST 0xBA13 +#define MTP_FORMAT_PLS_PLAYLIST 0xBA14 +#define MTP_FORMAT_UNDEFINED_DOCUMENT 0xBA80 +#define MTP_FORMAT_ABSTRACT_DOCUMENT 0xBA81 +#define MTP_FORMAT_XML_DOCUMENT 0xBA82 +#define MTP_FORMAT_MS_WORD_DOCUMENT 0xBA83 +#define MTP_FORMAT_MHT_COMPILED_HTML_DOCUMENT 0xBA84 +#define MTP_FORMAT_MS_EXCEL_SPREADSHEET 0xBA85 +#define MTP_FORMAT_MS_POWERPOINT_PRESENTATION 0xBA86 +#define MTP_FORMAT_UNDEFINED_MESSAGE 0xBB00 +#define MTP_FORMAT_ABSTRACT_MESSSAGE 0xBB01 +#define MTP_FORMAT_UNDEFINED_CONTACT 0xBB80 +#define MTP_FORMAT_ABSTRACT_CONTACT 0xBB81 +#define MTP_FORMAT_VCARD_2 0xBB82 + +// MTP Object Property Codes +#define MTP_PROPERTY_STORAGE_ID 0xDC01 +#define MTP_PROPERTY_OBJECT_FORMAT 0xDC02 +#define MTP_PROPERTY_PROTECTION_STATUS 0xDC03 +#define MTP_PROPERTY_OBJECT_SIZE 0xDC04 +#define MTP_PROPERTY_ASSOCIATION_TYPE 0xDC05 +#define MTP_PROPERTY_ASSOCIATION_DESC 0xDC06 +#define MTP_PROPERTY_OBJECT_FILE_NAME 0xDC07 +#define MTP_PROPERTY_DATE_CREATED 0xDC08 +#define MTP_PROPERTY_DATE_MODIFIED 0xDC09 +#define MTP_PROPERTY_KEYWORDS 0xDC0A +#define MTP_PROPERTY_PARENT_OBJECT 0xDC0B +#define MTP_PROPERTY_ALLOWED_FOLDER_CONTENTS 0xDC0C +#define MTP_PROPERTY_HIDDEN 0xDC0D +#define MTP_PROPERTY_SYSTEM_OBJECT 0xDC0E +#define MTP_PROPERTY_PERSISTENT_UID 0xDC41 +#define MTP_PROPERTY_SYNC_ID 0xDC42 +#define MTP_PROPERTY_PROPERTY_BAG 0xDC43 +#define MTP_PROPERTY_NAME 0xDC44 +#define MTP_PROPERTY_CREATED_BY 0xDC45 +#define MTP_PROPERTY_ARTIST 0xDC46 +#define MTP_PROPERTY_DATE_AUTHORED 0xDC47 +#define MTP_PROPERTY_DESCRIPTION 0xDC48 +#define MTP_PROPERTY_URL_REFERENCE 0xDC49 +#define MTP_PROPERTY_LANGUAGE_LOCALE 0xDC4A +#define MTP_PROPERTY_COPYRIGHT_INFORMATION 0xDC4B +#define MTP_PROPERTY_SOURCE 0xDC4C +#define MTP_PROPERTY_ORIGIN_LOCATION 0xDC4D +#define MTP_PROPERTY_DATE_ADDED 0xDC4E +#define MTP_PROPERTY_NON_CONSUMABLE 0xDC4F +#define MTP_PROPERTY_CORRUPT_UNPLAYABLE 0xDC50 +#define MTP_PROPERTY_PRODUCER_SERIAL_NUMBER 0xDC51 +#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_FORMAT 0xDC81 +#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_SIZE 0xDC82 +#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_HEIGHT 0xDC83 +#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_WIDTH 0xDC84 +#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DURATION 0xDC85 +#define MTP_PROPERTY_REPRESENTATIVE_SAMPLE_DATA 0xDC86 +#define MTP_PROPERTY_WIDTH 0xDC87 +#define MTP_PROPERTY_HEIGHT 0xDC88 +#define MTP_PROPERTY_DURATION 0xDC89 +#define MTP_PROPERTY_RATING 0xDC8A +#define MTP_PROPERTY_TRACK 0xDC8B +#define MTP_PROPERTY_GENRE 0xDC8C +#define MTP_PROPERTY_CREDITS 0xDC8D +#define MTP_PROPERTY_LYRICS 0xDC8E +#define MTP_PROPERTY_SUBSCRIPTION_CONTENT_ID 0xDC8F +#define MTP_PROPERTY_PRODUCED_BY 0xDC90 +#define MTP_PROPERTY_USE_COUNT 0xDC91 +#define MTP_PROPERTY_SKIP_COUNT 0xDC92 +#define MTP_PROPERTY_LAST_ACCESSED 0xDC93 +#define MTP_PROPERTY_PARENTAL_RATING 0xDC94 +#define MTP_PROPERTY_META_GENRE 0xDC95 +#define MTP_PROPERTY_COMPOSER 0xDC96 +#define MTP_PROPERTY_EFFECTIVE_RATING 0xDC97 +#define MTP_PROPERTY_SUBTITLE 0xDC98 +#define MTP_PROPERTY_ORIGINAL_RELEASE_DATE 0xDC99 +#define MTP_PROPERTY_ALBUM_NAME 0xDC9A +#define MTP_PROPERTY_ALBUM_ARTIST 0xDC9B +#define MTP_PROPERTY_MOOD 0xDC9C +#define MTP_PROPERTY_DRM_STATUS 0xDC9D +#define MTP_PROPERTY_SUB_DESCRIPTION 0xDC9E +#define MTP_PROPERTY_IS_CROPPED 0xDCD1 +#define MTP_PROPERTY_IS_COLOUR_CORRECTED 0xDCD2 +#define MTP_PROPERTY_IMAGE_BIT_DEPTH 0xDCD3 +#define MTP_PROPERTY_F_NUMBER 0xDCD4 +#define MTP_PROPERTY_EXPOSURE_TIME 0xDCD5 +#define MTP_PROPERTY_EXPOSURE_INDEX 0xDCD6 +#define MTP_PROPERTY_TOTAL_BITRATE 0xDE91 +#define MTP_PROPERTY_BITRATE_TYPE 0xDE92 +#define MTP_PROPERTY_SAMPLE_RATE 0xDE93 +#define MTP_PROPERTY_NUMBER_OF_CHANNELS 0xDE94 +#define MTP_PROPERTY_AUDIO_BIT_DEPTH 0xDE95 +#define MTP_PROPERTY_SCAN_TYPE 0xDE97 +#define MTP_PROPERTY_AUDIO_WAVE_CODEC 0xDE99 +#define MTP_PROPERTY_AUDIO_BITRATE 0xDE9A +#define MTP_PROPERTY_VIDEO_FOURCC_CODEC 0xDE9B +#define MTP_PROPERTY_VIDEO_BITRATE 0xDE9C +#define MTP_PROPERTY_FRAMES_PER_THOUSAND_SECONDS 0xDE9D +#define MTP_PROPERTY_KEYFRAME_DISTANCE 0xDE9E +#define MTP_PROPERTY_BUFFER_SIZE 0xDE9F +#define MTP_PROPERTY_ENCODING_QUALITY 0xDEA0 +#define MTP_PROPERTY_ENCODING_PROFILE 0xDEA1 +#define MTP_PROPERTY_DISPLAY_NAME 0xDCE0 +#define MTP_PROPERTY_BODY_TEXT 0xDCE1 +#define MTP_PROPERTY_SUBJECT 0xDCE2 +#define MTP_PROPERTY_PRIORITY 0xDCE3 +#define MTP_PROPERTY_GIVEN_NAME 0xDD00 +#define MTP_PROPERTY_MIDDLE_NAMES 0xDD01 +#define MTP_PROPERTY_FAMILY_NAME 0xDD02 +#define MTP_PROPERTY_PREFIX 0xDD03 +#define MTP_PROPERTY_SUFFIX 0xDD04 +#define MTP_PROPERTY_PHONETIC_GIVEN_NAME 0xDD05 +#define MTP_PROPERTY_PHONETIC_FAMILY_NAME 0xDD06 +#define MTP_PROPERTY_EMAIL_PRIMARY 0xDD07 +#define MTP_PROPERTY_EMAIL_PERSONAL_1 0xDD08 +#define MTP_PROPERTY_EMAIL_PERSONAL_2 0xDD09 +#define MTP_PROPERTY_EMAIL_BUSINESS_1 0xDD0A +#define MTP_PROPERTY_EMAIL_BUSINESS_2 0xDD0B +#define MTP_PROPERTY_EMAIL_OTHERS 0xDD0C +#define MTP_PROPERTY_PHONE_NUMBER_PRIMARY 0xDD0D +#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL 0xDD0E +#define MTP_PROPERTY_PHONE_NUMBER_PERSONAL_2 0xDD0F +#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS 0xDD10 +#define MTP_PROPERTY_PHONE_NUMBER_BUSINESS_2 0xDD11 +#define MTP_PROPERTY_PHONE_NUMBER_MOBILE 0xDD12 +#define MTP_PROPERTY_PHONE_NUMBER_MOBILE_2 0xDD13 +#define MTP_PROPERTY_FAX_NUMBER_PRIMARY 0xDD14 +#define MTP_PROPERTY_FAX_NUMBER_PERSONAL 0xDD15 +#define MTP_PROPERTY_FAX_NUMBER_BUSINESS 0xDD16 +#define MTP_PROPERTY_PAGER_NUMBER 0xDD17 +#define MTP_PROPERTY_PHONE_NUMBER_OTHERS 0xDD18 +#define MTP_PROPERTY_PRIMARY_WEB_ADDRESS 0xDD19 +#define MTP_PROPERTY_PERSONAL_WEB_ADDRESS 0xDD1A +#define MTP_PROPERTY_BUSINESS_WEB_ADDRESS 0xDD1B +#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS 0xDD1C +#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_2 0xDD1D +#define MTP_PROPERTY_INSTANT_MESSANGER_ADDRESS_3 0xDD1E +#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_FULL 0xDD1F +#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_1 0xDD20 +#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_LINE_2 0xDD21 +#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_CITY 0xDD22 +#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_REGION 0xDD23 +#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_POSTAL_CODE 0xDD24 +#define MTP_PROPERTY_POSTAL_ADDRESS_PERSONAL_COUNTRY 0xDD25 +#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_FULL 0xDD26 +#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_1 0xDD27 +#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_LINE_2 0xDD28 +#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_CITY 0xDD29 +#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_REGION 0xDD2A +#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_POSTAL_CODE 0xDD2B +#define MTP_PROPERTY_POSTAL_ADDRESS_BUSINESS_COUNTRY 0xDD2C +#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_FULL 0xDD2D +#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_1 0xDD2E +#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_LINE_2 0xDD2F +#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_CITY 0xDD30 +#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_REGION 0xDD31 +#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_POSTAL_CODE 0xDD32 +#define MTP_PROPERTY_POSTAL_ADDRESS_OTHER_COUNTRY 0xDD33 +#define MTP_PROPERTY_ORGANIZATION_NAME 0xDD34 +#define MTP_PROPERTY_PHONETIC_ORGANIZATION_NAME 0xDD35 +#define MTP_PROPERTY_ROLE 0xDD36 +#define MTP_PROPERTY_BIRTHDATE 0xDD37 +#define MTP_PROPERTY_MESSAGE_TO 0xDD40 +#define MTP_PROPERTY_MESSAGE_CC 0xDD41 +#define MTP_PROPERTY_MESSAGE_BCC 0xDD42 +#define MTP_PROPERTY_MESSAGE_READ 0xDD43 +#define MTP_PROPERTY_MESSAGE_RECEIVED_TIME 0xDD44 +#define MTP_PROPERTY_MESSAGE_SENDER 0xDD45 +#define MTP_PROPERTY_ACTIVITY_BEGIN_TIME 0xDD50 +#define MTP_PROPERTY_ACTIVITY_END_TIME 0xDD51 +#define MTP_PROPERTY_ACTIVITY_LOCATION 0xDD52 +#define MTP_PROPERTY_ACTIVITY_REQUIRED_ATTENDEES 0xDD54 +#define MTP_PROPERTY_ACTIVITY_OPTIONAL_ATTENDEES 0xDD55 +#define MTP_PROPERTY_ACTIVITY_RESOURCES 0xDD56 +#define MTP_PROPERTY_ACTIVITY_ACCEPTED 0xDD57 +#define MTP_PROPERTY_ACTIVITY_TENTATIVE 0xDD58 +#define MTP_PROPERTY_ACTIVITY_DECLINED 0xDD59 +#define MTP_PROPERTY_ACTIVITY_REMAINDER_TIME 0xDD5A +#define MTP_PROPERTY_ACTIVITY_OWNER 0xDD5B +#define MTP_PROPERTY_ACTIVITY_STATUS 0xDD5C +#define MTP_PROPERTY_OWNER 0xDD5D +#define MTP_PROPERTY_EDITOR 0xDD5E +#define MTP_PROPERTY_WEBMASTER 0xDD5F +#define MTP_PROPERTY_URL_SOURCE 0xDD60 +#define MTP_PROPERTY_URL_DESTINATION 0xDD61 +#define MTP_PROPERTY_TIME_BOOKMARK 0xDD62 +#define MTP_PROPERTY_OBJECT_BOOKMARK 0xDD63 +#define MTP_PROPERTY_BYTE_BOOKMARK 0xDD64 +#define MTP_PROPERTY_LAST_BUILD_DATE 0xDD70 +#define MTP_PROPERTY_TIME_TO_LIVE 0xDD71 +#define MTP_PROPERTY_MEDIA_GUID 0xDD72 + +// MTP Device Property Codes +#define MTP_DEVICE_PROPERTY_UNDEFINED 0x5000 +#define MTP_DEVICE_PROPERTY_BATTERY_LEVEL 0x5001 +#define MTP_DEVICE_PROPERTY_FUNCTIONAL_MODE 0x5002 +#define MTP_DEVICE_PROPERTY_IMAGE_SIZE 0x5003 +#define MTP_DEVICE_PROPERTY_COMPRESSION_SETTING 0x5004 +#define MTP_DEVICE_PROPERTY_WHITE_BALANCE 0x5005 +#define MTP_DEVICE_PROPERTY_RGB_GAIN 0x5006 +#define MTP_DEVICE_PROPERTY_F_NUMBER 0x5007 +#define MTP_DEVICE_PROPERTY_FOCAL_LENGTH 0x5008 +#define MTP_DEVICE_PROPERTY_FOCUS_DISTANCE 0x5009 +#define MTP_DEVICE_PROPERTY_FOCUS_MODE 0x500A +#define MTP_DEVICE_PROPERTY_EXPOSURE_METERING_MODE 0x500B +#define MTP_DEVICE_PROPERTY_FLASH_MODE 0x500C +#define MTP_DEVICE_PROPERTY_EXPOSURE_TIME 0x500D +#define MTP_DEVICE_PROPERTY_EXPOSURE_PROGRAM_MODE 0x500E +#define MTP_DEVICE_PROPERTY_EXPOSURE_INDEX 0x500F +#define MTP_DEVICE_PROPERTY_EXPOSURE_BIAS_COMPENSATION 0x5010 +#define MTP_DEVICE_PROPERTY_DATETIME 0x5011 +#define MTP_DEVICE_PROPERTY_CAPTURE_DELAY 0x5012 +#define MTP_DEVICE_PROPERTY_STILL_CAPTURE_MODE 0x5013 +#define MTP_DEVICE_PROPERTY_CONTRAST 0x5014 +#define MTP_DEVICE_PROPERTY_SHARPNESS 0x5015 +#define MTP_DEVICE_PROPERTY_DIGITAL_ZOOM 0x5016 +#define MTP_DEVICE_PROPERTY_EFFECT_MODE 0x5017 +#define MTP_DEVICE_PROPERTY_BURST_NUMBER 0x5018 +#define MTP_DEVICE_PROPERTY_BURST_INTERVAL 0x5019 +#define MTP_DEVICE_PROPERTY_TIMELAPSE_NUMBER 0x501A +#define MTP_DEVICE_PROPERTY_TIMELAPSE_INTERVAL 0x501B +#define MTP_DEVICE_PROPERTY_FOCUS_METERING_MODE 0x501C +#define MTP_DEVICE_PROPERTY_UPLOAD_URL 0x501D +#define MTP_DEVICE_PROPERTY_ARTIST 0x501E +#define MTP_DEVICE_PROPERTY_COPYRIGHT_INFO 0x501F +#define MTP_DEVICE_PROPERTY_SYNCHRONIZATION_PARTNER 0xD401 +#define MTP_DEVICE_PROPERTY_DEVICE_FRIENDLY_NAME 0xD402 +#define MTP_DEVICE_PROPERTY_VOLUME 0xD403 +#define MTP_DEVICE_PROPERTY_SUPPORTED_FORMATS_ORDERED 0xD404 +#define MTP_DEVICE_PROPERTY_DEVICE_ICON 0xD405 +#define MTP_DEVICE_PROPERTY_PLAYBACK_RATE 0xD410 +#define MTP_DEVICE_PROPERTY_PLAYBACK_OBJECT 0xD411 +#define MTP_DEVICE_PROPERTY_PLAYBACK_CONTAINER_INDEX 0xD412 +#define MTP_DEVICE_PROPERTY_SESSION_INITIATOR_VERSION_INFO 0xD406 +#define MTP_DEVICE_PROPERTY_PERCEIVED_DEVICE_TYPE 0xD407 + +// MTP Operation Codes +#define MTP_OPERATION_GET_DEVICE_INFO 0x1001 +#define MTP_OPERATION_OPEN_SESSION 0x1002 +#define MTP_OPERATION_CLOSE_SESSION 0x1003 +#define MTP_OPERATION_GET_STORAGE_IDS 0x1004 +#define MTP_OPERATION_GET_STORAGE_INFO 0x1005 +#define MTP_OPERATION_GET_NUM_OBJECTS 0x1006 +#define MTP_OPERATION_GET_OBJECT_HANDLES 0x1007 +#define MTP_OPERATION_GET_OBJECT_INFO 0x1008 +#define MTP_OPERATION_GET_OBJECT 0x1009 +#define MTP_OPERATION_GET_THUMB 0x100A +#define MTP_OPERATION_DELETE_OBJECT 0x100B +#define MTP_OPERATION_SEND_OBJECT_INFO 0x100C +#define MTP_OPERATION_SEND_OBJECT 0x100D +#define MTP_OPERATION_INITIATE_CAPTURE 0x100E +#define MTP_OPERATION_FORMAT_STORE 0x100F +#define MTP_OPERATION_RESET_DEVICE 0x1010 +#define MTP_OPERATION_SELF_TEST 0x1011 +#define MTP_OPERATION_SET_OBJECT_PROTECTION 0x1012 +#define MTP_OPERATION_POWER_DOWN 0x1013 +#define MTP_OPERATION_GET_DEVICE_PROP_DESC 0x1014 +#define MTP_OPERATION_GET_DEVICE_PROP_VALUE 0x1015 +#define MTP_OPERATION_SET_DEVICE_PROP_VALUE 0x1016 +#define MTP_OPERATION_RESET_DEVICE_PROP_VALUE 0x1017 +#define MTP_OPERATION_TERMINATE_OPEN_CAPTURE 0x1018 +#define MTP_OPERATION_MOVE_OBJECT 0x1019 +#define MTP_OPERATION_COPY_OBJECT 0x101A +#define MTP_OPERATION_GET_PARTIAL_OBJECT 0x101B +#define MTP_OPERATION_INITIATE_OPEN_CAPTURE 0x101C +#define MTP_OPERATION_GET_OBJECT_PROPS_SUPPORTED 0x9801 +#define MTP_OPERATION_GET_OBJECT_PROP_DESC 0x9802 +#define MTP_OPERATION_GET_OBJECT_PROP_VALUE 0x9803 +#define MTP_OPERATION_SET_OBJECT_PROP_VALUE 0x9804 +#define MTP_OPERATION_GET_OBJECT_PROP_LIST 0x9805 +#define MTP_OPERATION_SET_OBJECT_PROP_LIST 0x9806 +#define MTP_OPERATION_GET_INTERDEPENDENT_PROP_DESC 0x9807 +#define MTP_OPERATION_SEND_OBJECT_PROP_LIST 0x9808 +#define MTP_OPERATION_GET_OBJECT_REFERENCES 0x9810 +#define MTP_OPERATION_SET_OBJECT_REFERENCES 0x9811 +#define MTP_OPERATION_SKIP 0x9820 + +// MTP Response Codes +#define MTP_RESPONSE_UNDEFINED 0x2000 +#define MTP_RESPONSE_OK 0x2001 +#define MTP_RESPONSE_GENERAL_ERROR 0x2002 +#define MTP_RESPONSE_SESSION_NOT_OPEN 0x2003 +#define MTP_RESPONSE_INVALID_TRANSACTION_ID 0x2004 +#define MTP_RESPONSE_OPERATION_NOT_SUPPORTED 0x2005 +#define MTP_RESPONSE_PARAMETER_NOT_SUPPORTED 0x2006 +#define MTP_RESPONSE_INCOMPLETE_TRANSFER 0x2007 +#define MTP_RESPONSE_INVALID_STORAGE_ID 0x2008 +#define MTP_RESPONSE_INVALID_OBJECT_HANDLE 0x2009 +#define MTP_RESPONSE_DEVICE_PROP_NOT_SUPPORTED 0x200A +#define MTP_RESPONSE_INVALID_OBJECT_FORMAT_CODE 0x200B +#define MTP_RESPONSE_STORAGE_FULL 0x200C +#define MTP_RESPONSE_OBJECT_WRITE_PROTECTED 0x200D +#define MTP_RESPONSE_STORE_READ_ONLY 0x200E +#define MTP_RESPONSE_ACCESS_DENIED 0x200F +#define MTP_RESPONSE_NO_THUMBNAIL_PRESENT 0x2010 +#define MTP_RESPONSE_SELF_TEST_FAILED 0x2011 +#define MTP_RESPONSE_PARTIAL_DELETION 0x2012 +#define MTP_RESPONSE_STORE_NOT_AVAILABLE 0x2013 +#define MTP_RESPONSE_SPECIFICATION_BY_FORMAT_UNSUPPORTED 0x2014 +#define MTP_RESPONSE_NO_VALID_OBJECT_INFO 0x2015 +#define MTP_RESPONSE_INVALID_CODE_FORMAT 0x2016 +#define MTP_RESPONSE_UNKNOWN_VENDOR_CODE 0x2017 +#define MTP_RESPONSE_CAPTURE_ALREADY_TERMINATED 0x2018 +#define MTP_RESPONSE_DEVICE_BUSY 0x2019 +#define MTP_RESPONSE_INVALID_PARENT_OBJECT 0x201A +#define MTP_RESPONSE_INVALID_DEVICE_PROP_FORMAT 0x201B +#define MTP_RESPONSE_INVALID_DEVICE_PROP_VALUE 0x201C +#define MTP_RESPONSE_INVALID_PARAMETER 0x201D +#define MTP_RESPONSE_SESSION_ALREADY_OPEN 0x201E +#define MTP_RESPONSE_TRANSACTION_CANCELLED 0x201F +#define MTP_RESPONSE_SPECIFICATION_OF_DESTINATION_UNSUPPORTED 0x2020 +#define MTP_RESPONSE_INVALID_OBJECT_PROP_CODE 0xA801 +#define MTP_RESPONSE_INVALID_OBJECT_PROP_FORMAT 0xA802 +#define MTP_RESPONSE_INVALID_OBJECT_PROP_VALUE 0xA803 +#define MTP_RESPONSE_INVALID_OBJECT_REFERENCE 0xA804 +#define MTP_RESPONSE_GROUP_NOT_SUPPORTED 0xA805 +#define MTP_RESPONSE_INVALID_DATASET 0xA806 +#define MTP_RESPONSE_SPECIFICATION_BY_GROUP_UNSUPPORTED 0xA807 +#define MTP_RESPONSE_SPECIFICATION_BY_DEPTH_UNSUPPORTED 0xA808 +#define MTP_RESPONSE_OBJECT_TOO_LARGE 0xA809 +#define MTP_RESPONSE_OBJECT_PROP_NOT_SUPPORTED 0xA80A + +// MTP Event Codes +#define MTP_EVENT_UNDEFINED 0x4000 +#define MTP_EVENT_CANCEL_TRANSACTION 0x4001 +#define MTP_EVENT_OBJECT_ADDED 0x4002 +#define MTP_EVENT_OBJECT_REMOVED 0x4003 +#define MTP_EVENT_STORE_ADDED 0x4004 +#define MTP_EVENT_STORE_REMOVED 0x4005 +#define MTP_EVENT_DEVICE_PROP_CHANGED 0x4006 +#define MTP_EVENT_OBJECT_INFO_CHANGED 0x4007 +#define MTP_EVENT_DEVICE_INFO_CHANGED 0x4008 +#define MTP_EVENT_REQUEST_OBJECT_TRANSFER 0x4009 +#define MTP_EVENT_STORE_FULL 0x400A +#define MTP_EVENT_DEVICE_RESET 0x400B +#define MTP_EVENT_STORAGE_INFO_CHANGED 0x400C +#define MTP_EVENT_CAPTURE_COMPLETE 0x400D +#define MTP_EVENT_UNREPORTED_STATUS 0x400E +#define MTP_EVENT_OBJECT_PROP_CHANGED 0xC801 +#define MTP_EVENT_OBJECT_PROP_DESC_CHANGED 0xC802 +#define MTP_EVENT_OBJECT_REFERENCES_CHANGED 0xC803 + +// Storage Type +#define MTP_STORAGE_FIXED_ROM 0x0001 +#define MTP_STORAGE_REMOVABLE_ROM 0x0002 +#define MTP_STORAGE_FIXED_RAM 0x0003 +#define MTP_STORAGE_REMOVABLE_RAM 0x0004 + +// Storage File System +#define MTP_STORAGE_FILESYSTEM_FLAT 0x0001 +#define MTP_STORAGE_FILESYSTEM_HIERARCHICAL 0x0002 +#define MTP_STORAGE_FILESYSTEM_DCF 0x0003 + +// Storage Access Capability +#define MTP_STORAGE_READ_WRITE 0x0000 +#define MTP_STORAGE_READ_ONLY_WITHOUT_DELETE 0x0001 +#define MTP_STORAGE_READ_ONLY_WITH_DELETE 0x0002 + +// Association Type +#define MTP_ASSOCIATION_TYPE_UNDEFINED 0x0000 +#define MTP_ASSOCIATION_TYPE_GENERIC_FOLDER 0x0001 + +#endif // _MTP_H diff --git a/services/audioflinger/A2dpAudioInterface.cpp b/services/audioflinger/A2dpAudioInterface.cpp index 995e31c..d926cb1 100644 --- a/services/audioflinger/A2dpAudioInterface.cpp +++ b/services/audioflinger/A2dpAudioInterface.cpp @@ -23,10 +23,13 @@ #include "A2dpAudioInterface.h" #include "audio/liba2dp.h" - +#include <hardware_legacy/power.h> namespace android { +static const char *sA2dpWakeLock = "A2dpOutputStream"; +#define MAX_WRITE_RETRIES 5 + // ---------------------------------------------------------------------------- //AudioHardwareInterface* A2dpAudioInterface::createA2dpInterface() @@ -257,52 +260,74 @@ status_t A2dpAudioInterface::A2dpAudioStreamOut::set( if (pRate) *pRate = lRate; mDevice = device; + mBufferDurationUs = ((bufferSize() * 1000 )/ frameSize() / sampleRate()) * 1000; return NO_ERROR; } A2dpAudioInterface::A2dpAudioStreamOut::~A2dpAudioStreamOut() { LOGV("A2dpAudioStreamOut destructor"); - standby(); close(); LOGV("A2dpAudioStreamOut destructor returning from close()"); } ssize_t A2dpAudioInterface::A2dpAudioStreamOut::write(const void* buffer, size_t bytes) { - Mutex::Autolock lock(mLock); - - size_t remaining = bytes; status_t status = -1; + { + Mutex::Autolock lock(mLock); - if (!mBluetoothEnabled || mClosing || mSuspended) { - LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \ - mBluetoothEnabled %d, mClosing %d, mSuspended %d", - mBluetoothEnabled, mClosing, mSuspended); - goto Error; - } - - status = init(); - if (status < 0) - goto Error; + size_t remaining = bytes; - while (remaining > 0) { - status = a2dp_write(mData, buffer, remaining); - if (status <= 0) { - LOGE("a2dp_write failed err: %d\n", status); + if (!mBluetoothEnabled || mClosing || mSuspended) { + LOGV("A2dpAudioStreamOut::write(), but bluetooth disabled \ + mBluetoothEnabled %d, mClosing %d, mSuspended %d", + mBluetoothEnabled, mClosing, mSuspended); goto Error; } - remaining -= status; - buffer = ((char *)buffer) + status; - } - mStandby = false; + if (mStandby) { + acquire_wake_lock (PARTIAL_WAKE_LOCK, sA2dpWakeLock); + mStandby = false; + mLastWriteTime = systemTime(); + } + + status = init(); + if (status < 0) + goto Error; + + int retries = MAX_WRITE_RETRIES; + while (remaining > 0 && retries) { + status = a2dp_write(mData, buffer, remaining); + if (status < 0) { + LOGE("a2dp_write failed err: %d\n", status); + goto Error; + } + if (status == 0) { + retries--; + } + remaining -= status; + buffer = (char *)buffer + status; + } - return bytes; + // if A2DP sink runs abnormally fast, sleep a little so that audioflinger mixer thread + // does no spin and starve other threads. + // NOTE: It is likely that the A2DP headset is being disconnected + nsecs_t now = systemTime(); + if ((uint32_t)ns2us(now - mLastWriteTime) < (mBufferDurationUs >> 2)) { + LOGV("A2DP sink runs too fast"); + usleep(mBufferDurationUs - (uint32_t)ns2us(now - mLastWriteTime)); + } + mLastWriteTime = now; + return bytes; + } Error: + + standby(); + // Simulate audio output timing in case of error - usleep(((bytes * 1000 )/ frameSize() / sampleRate()) * 1000); + usleep(mBufferDurationUs); return status; } @@ -324,19 +349,22 @@ status_t A2dpAudioInterface::A2dpAudioStreamOut::init() status_t A2dpAudioInterface::A2dpAudioStreamOut::standby() { - int result = 0; - - if (mClosing) { - LOGV("Ignore standby, closing"); - return result; - } - Mutex::Autolock lock(mLock); + return standby_l(); +} + +status_t A2dpAudioInterface::A2dpAudioStreamOut::standby_l() +{ + int result = NO_ERROR; if (!mStandby) { - result = a2dp_stop(mData); - if (result == 0) - mStandby = true; + LOGV_IF(mClosing || !mBluetoothEnabled, "Standby skip stop: closing %d enabled %d", + mClosing, mBluetoothEnabled); + if (!mClosing && mBluetoothEnabled) { + result = a2dp_stop(mData); + } + release_wake_lock(sA2dpWakeLock); + mStandby = true; } return result; @@ -362,6 +390,9 @@ status_t A2dpAudioInterface::A2dpAudioStreamOut::setParameters(const String8& ke key = String8("closing"); if (param.get(key, value) == NO_ERROR) { mClosing = (value == "true"); + if (mClosing) { + standby(); + } param.remove(key); } key = AudioParameter::keyRouting; @@ -444,6 +475,7 @@ status_t A2dpAudioInterface::A2dpAudioStreamOut::close() status_t A2dpAudioInterface::A2dpAudioStreamOut::close_l() { + standby_l(); if (mData) { LOGV("A2dpAudioStreamOut::close_l() calling a2dp_cleanup(mData)"); a2dp_cleanup(mData); diff --git a/services/audioflinger/A2dpAudioInterface.h b/services/audioflinger/A2dpAudioInterface.h index 48154f9..dbe2c6a 100644 --- a/services/audioflinger/A2dpAudioInterface.h +++ b/services/audioflinger/A2dpAudioInterface.h @@ -103,6 +103,7 @@ private: status_t setAddress(const char* address); status_t setBluetoothEnabled(bool enabled); status_t setSuspended(bool onOff); + status_t standby_l(); private: int mFd; @@ -116,6 +117,8 @@ private: uint32_t mDevice; bool mClosing; bool mSuspended; + nsecs_t mLastWriteTime; + uint32_t mBufferDurationUs; }; friend class A2dpAudioStreamOut; diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp index 5071555..11ad4e4 100644 --- a/services/audioflinger/AudioFlinger.cpp +++ b/services/audioflinger/AudioFlinger.cpp @@ -131,18 +131,21 @@ AudioFlinger::AudioFlinger() : BnAudioFlinger(), mAudioHardware(0), mMasterVolume(1.0f), mMasterMute(false), mNextUniqueId(1) { + Mutex::Autolock _l(mLock); + mHardwareStatus = AUDIO_HW_IDLE; mAudioHardware = AudioHardwareInterface::create(); mHardwareStatus = AUDIO_HW_INIT; if (mAudioHardware->initCheck() == NO_ERROR) { - // open 16-bit output stream for s/w mixer + AutoMutex lock(mHardwareLock); mMode = AudioSystem::MODE_NORMAL; - setMode(mMode); - - setMasterVolume(1.0f); - setMasterMute(false); + mHardwareStatus = AUDIO_HW_SET_MODE; + mAudioHardware->setMode(mMode); + mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME; + mAudioHardware->setMasterVolume(1.0f); + mHardwareStatus = AUDIO_HW_IDLE; } else { LOGE("Couldn't even initialize the stubbed audio hardware!"); } @@ -343,7 +346,7 @@ sp<IAudioTrack> AudioFlinger::createTrack( lSessionId = *sessionId; } else { // if no audio session id is provided, create one here - lSessionId = nextUniqueId(); + lSessionId = nextUniqueId_l(); if (sessionId != NULL) { *sessionId = lSessionId; } @@ -440,13 +443,16 @@ status_t AudioFlinger::setMasterVolume(float value) } // when hw supports master volume, don't scale in sw mixer - AutoMutex lock(mHardwareLock); - mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME; - if (mAudioHardware->setMasterVolume(value) == NO_ERROR) { - value = 1.0f; + { // scope for the lock + AutoMutex lock(mHardwareLock); + mHardwareStatus = AUDIO_HW_SET_MASTER_VOLUME; + if (mAudioHardware->setMasterVolume(value) == NO_ERROR) { + value = 1.0f; + } + mHardwareStatus = AUDIO_HW_IDLE; } - mHardwareStatus = AUDIO_HW_IDLE; + Mutex::Autolock _l(mLock); mMasterVolume = value; for (uint32_t i = 0; i < mPlaybackThreads.size(); i++) mPlaybackThreads.valueAt(i)->setMasterVolume(value); @@ -517,6 +523,7 @@ status_t AudioFlinger::setMasterMute(bool muted) return PERMISSION_DENIED; } + Mutex::Autolock _l(mLock); mMasterMute = muted; for (uint32_t i = 0; i < mPlaybackThreads.size(); i++) mPlaybackThreads.valueAt(i)->setMasterMute(muted); @@ -579,6 +586,7 @@ status_t AudioFlinger::setStreamMute(int stream, bool muted) return BAD_VALUE; } + AutoMutex lock(mLock); mStreamTypes[stream].mute = muted; for (uint32_t i = 0; i < mPlaybackThreads.size(); i++) mPlaybackThreads.valueAt(i)->setStreamMute(stream, muted); @@ -3699,7 +3707,7 @@ sp<IAudioRecord> AudioFlinger::openRecord( if (sessionId != NULL && *sessionId != AudioSystem::SESSION_OUTPUT_MIX) { lSessionId = *sessionId; } else { - lSessionId = nextUniqueId(); + lSessionId = nextUniqueId_l(); if (sessionId != NULL) { *sessionId = lSessionId; } @@ -4300,7 +4308,7 @@ int AudioFlinger::openOutput(uint32_t *pDevices, mHardwareStatus = AUDIO_HW_IDLE; if (output != 0) { - int id = nextUniqueId(); + int id = nextUniqueId_l(); if ((flags & AudioSystem::OUTPUT_FLAG_DIRECT) || (format != AudioSystem::PCM_16_BIT) || (channels != AudioSystem::CHANNEL_OUT_STEREO)) { @@ -4348,7 +4356,7 @@ int AudioFlinger::openDuplicateOutput(int output1, int output2) return 0; } - int id = nextUniqueId(); + int id = nextUniqueId_l(); DuplicatingThread *thread = new DuplicatingThread(this, thread1, id); thread->addOutputTrack(thread2); mPlaybackThreads.add(id, thread); @@ -4473,7 +4481,7 @@ int AudioFlinger::openInput(uint32_t *pDevices, } if (input != 0) { - int id = nextUniqueId(); + int id = nextUniqueId_l(); // Start record thread thread = new RecordThread(this, input, reqSamplingRate, reqChannels, id); mRecordThreads.add(id, thread); @@ -4543,7 +4551,8 @@ status_t AudioFlinger::setStreamOutput(uint32_t stream, int output) int AudioFlinger::newAudioSessionId() { - return nextUniqueId(); + AutoMutex _l(mLock); + return nextUniqueId_l(); } // checkPlaybackThread_l() must be called with AudioFlinger::mLock held @@ -4578,9 +4587,10 @@ AudioFlinger::RecordThread *AudioFlinger::checkRecordThread_l(int input) const return thread; } -int AudioFlinger::nextUniqueId() +// nextUniqueId_l() must be called with AudioFlinger::mLock held +int AudioFlinger::nextUniqueId_l() { - return android_atomic_inc(&mNextUniqueId); + return mNextUniqueId++; } // ---------------------------------------------------------------------------- @@ -4967,7 +4977,7 @@ sp<AudioFlinger::EffectHandle> AudioFlinger::PlaybackThread::createEffect_l( LOGV("createEffect_l() got effect %p on chain %p", effect == 0 ? 0 : effect.get(), chain.get()); if (effect == 0) { - int id = mAudioFlinger->nextUniqueId(); + int id = mAudioFlinger->nextUniqueId_l(); // Check CPU and memory usage lStatus = AudioSystem::registerEffect(desc, mId, chain->strategy(), sessionId, id); if (lStatus != NO_ERROR) { @@ -5810,7 +5820,8 @@ uint32_t AudioFlinger::EffectModule::deviceAudioSystemToEffectApi(uint32_t devic const uint32_t AudioFlinger::EffectModule::sModeConvTable[] = { AUDIO_MODE_NORMAL, // AudioSystem::MODE_NORMAL AUDIO_MODE_RINGTONE, // AudioSystem::MODE_RINGTONE - AUDIO_MODE_IN_CALL // AudioSystem::MODE_IN_CALL + AUDIO_MODE_IN_CALL, // AudioSystem::MODE_IN_CALL + AUDIO_MODE_IN_CALL // AudioSystem::MODE_IN_COMMUNICATION, same conversion as for MODE_IN_CALL }; int AudioFlinger::EffectModule::modeAudioSystemToEffectApi(uint32_t mode) diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h index 5917632..f0ef867 100644 --- a/services/audioflinger/AudioFlinger.h +++ b/services/audioflinger/AudioFlinger.h @@ -785,7 +785,7 @@ private: float streamVolumeInternal(int stream) const { return mStreamTypes[stream].volume; } void audioConfigChanged_l(int event, int ioHandle, void *param2); - int nextUniqueId(); + int nextUniqueId_l(); status_t moveEffectChain_l(int session, AudioFlinger::PlaybackThread *srcThread, AudioFlinger::PlaybackThread *dstThread, diff --git a/services/audioflinger/AudioHardwareInterface.cpp b/services/audioflinger/AudioHardwareInterface.cpp index 9a4a7f9..f58e4c0 100644 --- a/services/audioflinger/AudioHardwareInterface.cpp +++ b/services/audioflinger/AudioHardwareInterface.cpp @@ -48,14 +48,15 @@ static const char* routingModeStrings[] = "CURRENT", "NORMAL", "RINGTONE", - "IN_CALL" + "IN_CALL", + "IN_COMMUNICATION" }; static const char* routeNone = "NONE"; static const char* displayMode(int mode) { - if ((mode < -2) || (mode > 2)) + if ((mode < AudioSystem::MODE_INVALID) || (mode >= AudioSystem::NUM_MODES)) return routingModeStrings[0]; return routingModeStrings[mode+3]; } diff --git a/services/audioflinger/AudioPolicyManagerBase.cpp b/services/audioflinger/AudioPolicyManagerBase.cpp index 8d16ab4..e4086c4 100644 --- a/services/audioflinger/AudioPolicyManagerBase.cpp +++ b/services/audioflinger/AudioPolicyManagerBase.cpp @@ -235,7 +235,7 @@ void AudioPolicyManagerBase::setPhoneState(int state) // if leaving call state, handle special case of active streams // pertaining to sonification strategy see handleIncallSonification() - if (mPhoneState == AudioSystem::MODE_IN_CALL) { + if (isInCall()) { LOGV("setPhoneState() in call state management: new state is %d", state); for (int stream = 0; stream < AudioSystem::NUM_STREAM_TYPES; stream++) { handleIncallSonification(stream, false, true); @@ -248,16 +248,21 @@ void AudioPolicyManagerBase::setPhoneState(int state) bool force = false; // are we entering or starting a call - if ((oldState != AudioSystem::MODE_IN_CALL) && (state == AudioSystem::MODE_IN_CALL)) { + if (!isStateInCall(oldState) && isStateInCall(state)) { LOGV(" Entering call in setPhoneState()"); // force routing command to audio hardware when starting a call // even if no device change is needed force = true; - } else if ((oldState == AudioSystem::MODE_IN_CALL) && (state != AudioSystem::MODE_IN_CALL)) { + } else if (isStateInCall(oldState) && !isStateInCall(state)) { LOGV(" Exiting call in setPhoneState()"); // force routing command to audio hardware when exiting a call // even if no device change is needed force = true; + } else if (isStateInCall(state) && (state != oldState)) { + LOGV(" Switching between telephony and VoIP in setPhoneState()"); + // force routing command to audio hardware when switching between telephony and VoIP + // even if no device change is needed + force = true; } // check for device and output changes triggered by new phone state @@ -272,7 +277,7 @@ void AudioPolicyManagerBase::setPhoneState(int state) // force routing command to audio hardware when ending call // even if no device change is needed - if (oldState == AudioSystem::MODE_IN_CALL && newDevice == 0) { + if (isStateInCall(oldState) && newDevice == 0) { newDevice = hwOutputDesc->device(); } @@ -280,7 +285,7 @@ void AudioPolicyManagerBase::setPhoneState(int state) // immediately and delay the route change to avoid sending the ring tone // tail into the earpiece or headset. int delayMs = 0; - if (state == AudioSystem::MODE_IN_CALL && oldState == AudioSystem::MODE_RINGTONE) { + if (isStateInCall(state) && oldState == AudioSystem::MODE_RINGTONE) { // delay the device change command by twice the output latency to have some margin // and be sure that audio buffers not yet affected by the mute are out when // we actually apply the route change @@ -293,7 +298,7 @@ void AudioPolicyManagerBase::setPhoneState(int state) // if entering in call state, handle special case of active streams // pertaining to sonification strategy see handleIncallSonification() - if (state == AudioSystem::MODE_IN_CALL) { + if (isStateInCall(state)) { LOGV("setPhoneState() in call state management: new state is %d", state); // unmute the ringing tone after a sufficient delay if it was muted before // setting output device above @@ -338,7 +343,9 @@ void AudioPolicyManagerBase::setForceUse(AudioSystem::force_use usage, AudioSyst break; case AudioSystem::FOR_MEDIA: if (config != AudioSystem::FORCE_HEADPHONES && config != AudioSystem::FORCE_BT_A2DP && - config != AudioSystem::FORCE_WIRED_ACCESSORY && config != AudioSystem::FORCE_NONE) { + config != AudioSystem::FORCE_WIRED_ACCESSORY && + config != AudioSystem::FORCE_ANALOG_DOCK && + config != AudioSystem::FORCE_DIGITAL_DOCK && config != AudioSystem::FORCE_NONE) { LOGW("setForceUse() invalid config %d for FOR_MEDIA", config); return; } @@ -354,7 +361,10 @@ void AudioPolicyManagerBase::setForceUse(AudioSystem::force_use usage, AudioSyst break; case AudioSystem::FOR_DOCK: if (config != AudioSystem::FORCE_NONE && config != AudioSystem::FORCE_BT_CAR_DOCK && - config != AudioSystem::FORCE_BT_DESK_DOCK && config != AudioSystem::FORCE_WIRED_ACCESSORY) { + config != AudioSystem::FORCE_BT_DESK_DOCK && + config != AudioSystem::FORCE_WIRED_ACCESSORY && + config != AudioSystem::FORCE_ANALOG_DOCK && + config != AudioSystem::FORCE_DIGITAL_DOCK) { LOGW("setForceUse() invalid config %d for FOR_DOCK", config); } forceVolumeReeval = true; @@ -564,7 +574,7 @@ status_t AudioPolicyManagerBase::startOutput(audio_io_handle_t output, setOutputDevice(output, getNewDevice(output)); // handle special case for sonification while in call - if (mPhoneState == AudioSystem::MODE_IN_CALL) { + if (isInCall()) { handleIncallSonification(stream, true, false); } @@ -589,7 +599,7 @@ status_t AudioPolicyManagerBase::stopOutput(audio_io_handle_t output, routing_strategy strategy = getStrategy((AudioSystem::stream_type)stream); // handle special case for sonification while in call - if (mPhoneState == AudioSystem::MODE_IN_CALL) { + if (isInCall()) { handleIncallSonification(stream, false, false); } @@ -738,10 +748,8 @@ status_t AudioPolicyManagerBase::startInput(audio_io_handle_t input) AudioParameter param = AudioParameter(); param.addInt(String8(AudioParameter::keyRouting), (int)inputDesc->mDevice); - // use Voice Recognition mode or not for this input based on input source - int vr_enabled = inputDesc->mInputSource == AUDIO_SOURCE_VOICE_RECOGNITION ? 1 : 0; - param.addInt(String8("vr_mode"), vr_enabled); - LOGV("AudioPolicyManager::startInput(%d), setting vr_mode to %d", inputDesc->mInputSource, vr_enabled); + param.addInt(String8(AudioParameter::keyInputSource), (int)inputDesc->mInputSource); + LOGV("AudioPolicyManager::startInput() input source = %d", inputDesc->mInputSource); mpClientInterface->setParameters(input, param.toString()); @@ -1044,25 +1052,27 @@ AudioPolicyManagerBase::AudioPolicyManagerBase(AudioPolicyClientInterface *clien updateDeviceForStrategy(); #ifdef AUDIO_POLICY_TEST - AudioParameter outputCmd = AudioParameter(); - outputCmd.addInt(String8("set_id"), 0); - mpClientInterface->setParameters(mHardwareOutput, outputCmd.toString()); - - mTestDevice = AudioSystem::DEVICE_OUT_SPEAKER; - mTestSamplingRate = 44100; - mTestFormat = AudioSystem::PCM_16_BIT; - mTestChannels = AudioSystem::CHANNEL_OUT_STEREO; - mTestLatencyMs = 0; - mCurOutput = 0; - mDirectOutput = false; - for (int i = 0; i < NUM_TEST_OUTPUTS; i++) { - mTestOutputs[i] = 0; - } + if (mHardwareOutput != 0) { + AudioParameter outputCmd = AudioParameter(); + outputCmd.addInt(String8("set_id"), 0); + mpClientInterface->setParameters(mHardwareOutput, outputCmd.toString()); + + mTestDevice = AudioSystem::DEVICE_OUT_SPEAKER; + mTestSamplingRate = 44100; + mTestFormat = AudioSystem::PCM_16_BIT; + mTestChannels = AudioSystem::CHANNEL_OUT_STEREO; + mTestLatencyMs = 0; + mCurOutput = 0; + mDirectOutput = false; + for (int i = 0; i < NUM_TEST_OUTPUTS; i++) { + mTestOutputs[i] = 0; + } - const size_t SIZE = 256; - char buffer[SIZE]; - snprintf(buffer, SIZE, "AudioPolicyManagerTest"); - run(buffer, ANDROID_PRIORITY_AUDIO); + const size_t SIZE = 256; + char buffer[SIZE]; + snprintf(buffer, SIZE, "AudioPolicyManagerTest"); + run(buffer, ANDROID_PRIORITY_AUDIO); + } #endif //AUDIO_POLICY_TEST } @@ -1083,6 +1093,11 @@ AudioPolicyManagerBase::~AudioPolicyManagerBase() mInputs.clear(); } +status_t AudioPolicyManagerBase::initCheck() +{ + return (mHardwareOutput == 0) ? NO_INIT : NO_ERROR; +} + #ifdef AUDIO_POLICY_TEST bool AudioPolicyManagerBase::threadLoop() { @@ -1344,6 +1359,7 @@ status_t AudioPolicyManagerBase::handleA2dpDisconnection(AudioSystem::audio_devi void AudioPolicyManagerBase::closeA2dpOutputs() { + LOGV("setDeviceConnectionState() closing A2DP and duplicated output!"); if (mDuplicatedOutput != 0) { @@ -1494,7 +1510,7 @@ uint32_t AudioPolicyManagerBase::getNewDevice(audio_io_handle_t output, bool fro // use device for strategy media // 4: the strategy DTMF is active on the hardware output: // use device for strategy DTMF - if (mPhoneState == AudioSystem::MODE_IN_CALL || + if (isInCall() || outputDesc->isUsedByStrategy(STRATEGY_PHONE)) { device = getDeviceForStrategy(STRATEGY_PHONE, fromCache); } else if (outputDesc->isUsedByStrategy(STRATEGY_SONIFICATION)) { @@ -1549,7 +1565,7 @@ uint32_t AudioPolicyManagerBase::getDeviceForStrategy(routing_strategy strategy, switch (strategy) { case STRATEGY_DTMF: - if (mPhoneState != AudioSystem::MODE_IN_CALL) { + if (!isInCall()) { // when off call, DTMF strategy follows the same rules as MEDIA strategy device = getDeviceForStrategy(STRATEGY_MEDIA, false); break; @@ -1562,7 +1578,7 @@ uint32_t AudioPolicyManagerBase::getDeviceForStrategy(routing_strategy strategy, // of priority switch (mForceUse[AudioSystem::FOR_COMMUNICATION]) { case AudioSystem::FORCE_BT_SCO: - if (mPhoneState != AudioSystem::MODE_IN_CALL || strategy != STRATEGY_DTMF) { + if (!isInCall() || strategy != STRATEGY_DTMF) { device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT; if (device) break; } @@ -1578,9 +1594,15 @@ uint32_t AudioPolicyManagerBase::getDeviceForStrategy(routing_strategy strategy, if (device) break; device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET; if (device) break; + device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL; + if (device) break; + device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET; + if (device) break; + device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET; + if (device) break; #ifdef WITH_A2DP // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to A2DP - if (mPhoneState != AudioSystem::MODE_IN_CALL) { + if (!isInCall()) { device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP; if (device) break; device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES; @@ -1594,14 +1616,16 @@ uint32_t AudioPolicyManagerBase::getDeviceForStrategy(routing_strategy strategy, break; case AudioSystem::FORCE_SPEAKER: - if (mPhoneState != AudioSystem::MODE_IN_CALL || strategy != STRATEGY_DTMF) { - device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_SCO_CARKIT; - if (device) break; - } + device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL; + if (device) break; + device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET; + if (device) break; + device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET; + if (device) break; #ifdef WITH_A2DP // when not in a phone call, phone strategy should route STREAM_VOICE_CALL to // A2DP speaker when forcing to speaker output - if (mPhoneState != AudioSystem::MODE_IN_CALL) { + if (!isInCall()) { device = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER; if (device) break; } @@ -1618,7 +1642,7 @@ uint32_t AudioPolicyManagerBase::getDeviceForStrategy(routing_strategy strategy, // If incall, just select the STRATEGY_PHONE device: The rest of the behavior is handled by // handleIncallSonification(). - if (mPhoneState == AudioSystem::MODE_IN_CALL) { + if (isInCall()) { device = getDeviceForStrategy(STRATEGY_PHONE, false); break; } @@ -1630,12 +1654,18 @@ uint32_t AudioPolicyManagerBase::getDeviceForStrategy(routing_strategy strategy, // FALL THROUGH case STRATEGY_MEDIA: { - uint32_t device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL; + uint32_t device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADPHONE; if (device2 == 0) { - device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADPHONE; + device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET; } if (device2 == 0) { - device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_WIRED_HEADSET; + device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_AUX_DIGITAL; + } + if (device2 == 0) { + device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET; + } + if (device2 == 0) { + device2 = mAvailableOutputDevices & AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET; } #ifdef WITH_A2DP if (mA2dpOutput != 0) { @@ -1739,6 +1769,7 @@ uint32_t AudioPolicyManagerBase::getDeviceForInputSource(int inputSource) case AUDIO_SOURCE_DEFAULT: case AUDIO_SOURCE_MIC: case AUDIO_SOURCE_VOICE_RECOGNITION: + case AUDIO_SOURCE_VOICE_COMMUNICATION: if (mForceUse[AudioSystem::FOR_RECORD] == AudioSystem::FORCE_BT_SCO && mAvailableInputDevices & AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET) { device = AudioSystem::DEVICE_IN_BLUETOOTH_SCO_HEADSET; @@ -1801,7 +1832,9 @@ float AudioPolicyManagerBase::computeVolume(int stream, int index, audio_io_hand (AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP | AudioSystem::DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES | AudioSystem::DEVICE_OUT_WIRED_HEADSET | - AudioSystem::DEVICE_OUT_WIRED_HEADPHONE)) && + AudioSystem::DEVICE_OUT_WIRED_HEADPHONE | + AudioSystem::DEVICE_OUT_ANLG_DOCK_HEADSET | + AudioSystem::DEVICE_OUT_DGTL_DOCK_HEADSET)) && (getStrategy((AudioSystem::stream_type)stream) == STRATEGY_SONIFICATION) && streamDesc.mCanBeMuted) { volume *= SONIFICATION_HEADSET_VOLUME_FACTOR; @@ -1960,6 +1993,16 @@ void AudioPolicyManagerBase::handleIncallSonification(int stream, bool starting, } } +bool AudioPolicyManagerBase::isInCall() +{ + return isStateInCall(mPhoneState); +} + +bool AudioPolicyManagerBase::isStateInCall(int state) { + return ((state == AudioSystem::MODE_IN_CALL) || + (state == AudioSystem::MODE_IN_COMMUNICATION)); +} + bool AudioPolicyManagerBase::needsDirectOuput(AudioSystem::stream_type stream, uint32_t samplingRate, uint32_t format, diff --git a/services/audioflinger/AudioPolicyService.cpp b/services/audioflinger/AudioPolicyService.cpp index f24e08e..46a01ad 100644 --- a/services/audioflinger/AudioPolicyService.cpp +++ b/services/audioflinger/AudioPolicyService.cpp @@ -68,6 +68,8 @@ AudioPolicyService::AudioPolicyService() { char value[PROPERTY_VALUE_MAX]; + Mutex::Autolock _l(mLock); + // start tone playback thread mTonePlaybackThread = new AudioCommandThread(String8("")); // start audio commands thread @@ -88,9 +90,18 @@ AudioPolicyService::AudioPolicyService() } #endif - // load properties - property_get("ro.camera.sound.forced", value, "0"); - mpPolicyManager->setSystemProperty("ro.camera.sound.forced", value); + if ((mpPolicyManager != NULL) && (mpPolicyManager->initCheck() != NO_ERROR)) { + delete mpPolicyManager; + mpPolicyManager = NULL; + } + + if (mpPolicyManager == NULL) { + LOGE("Could not create AudioPolicyManager"); + } else { + // load properties + property_get("ro.camera.sound.forced", value, "0"); + mpPolicyManager->setSystemProperty("ro.camera.sound.forced", value); + } } AudioPolicyService::~AudioPolicyService() diff --git a/services/camera/libcameraservice/Android.mk b/services/camera/libcameraservice/Android.mk index 87975af..b52fc69 100644 --- a/services/camera/libcameraservice/Android.mk +++ b/services/camera/libcameraservice/Android.mk @@ -49,7 +49,8 @@ LOCAL_SHARED_LIBRARIES:= \ libcutils \ libmedia \ libcamera_client \ - libsurfaceflinger_client + libsurfaceflinger_client \ + libgui LOCAL_MODULE:= libcameraservice diff --git a/services/camera/libcameraservice/CameraHardwareStub.cpp b/services/camera/libcameraservice/CameraHardwareStub.cpp index b3e0ee6..07b5a37 100644 --- a/services/camera/libcameraservice/CameraHardwareStub.cpp +++ b/services/camera/libcameraservice/CameraHardwareStub.cpp @@ -101,9 +101,9 @@ CameraHardwareStub::~CameraHardwareStub() mFakeCamera = 0; // paranoia } -sp<IMemoryHeap> CameraHardwareStub::getPreviewHeap() const +status_t CameraHardwareStub::setPreviewWindow(const sp<ANativeWindow>& buf) { - return mPreviewHeap; + return NO_ERROR; } sp<IMemoryHeap> CameraHardwareStub::getRawHeap() const diff --git a/services/camera/libcameraservice/CameraHardwareStub.h b/services/camera/libcameraservice/CameraHardwareStub.h index d3427ba..9b66a76 100644 --- a/services/camera/libcameraservice/CameraHardwareStub.h +++ b/services/camera/libcameraservice/CameraHardwareStub.h @@ -29,7 +29,7 @@ namespace android { class CameraHardwareStub : public CameraHardwareInterface { public: - virtual sp<IMemoryHeap> getPreviewHeap() const; + virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf); virtual sp<IMemoryHeap> getRawHeap() const; virtual void setCallbacks(notify_callback notify_cb, diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp index a64ddcf..3d8ca7a 100644 --- a/services/camera/libcameraservice/CameraService.cpp +++ b/services/camera/libcameraservice/CameraService.cpp @@ -26,11 +26,12 @@ #include <binder/MemoryBase.h> #include <binder/MemoryHeapBase.h> #include <cutils/atomic.h> +#include <cutils/properties.h> +#include <gui/SurfaceTextureClient.h> #include <hardware/hardware.h> #include <media/AudioSystem.h> #include <media/mediaplayer.h> #include <surfaceflinger/ISurface.h> -#include <ui/Overlay.h> #include <utils/Errors.h> #include <utils/Log.h> #include <utils/String16.h> @@ -305,9 +306,9 @@ CameraService::Client::Client(const sp<CameraService>& cameraService, mCameraId = cameraId; mCameraFacing = cameraFacing; mClientPid = clientPid; - mUseOverlay = mHardware->useOverlay(); mMsgEnabled = 0; - + mSurface = 0; + mPreviewWindow = 0; mHardware->setCallbacks(notifyCallback, dataCallback, dataCallbackTimestamp, @@ -317,42 +318,21 @@ CameraService::Client::Client(const sp<CameraService>& cameraService, enableMsgType(CAMERA_MSG_ERROR | CAMERA_MSG_ZOOM | CAMERA_MSG_FOCUS); - mOverlayW = 0; - mOverlayH = 0; // Callback is disabled by default mPreviewCallbackFlag = FRAME_CALLBACK_FLAG_NOOP; mOrientation = getOrientation(0, mCameraFacing == CAMERA_FACING_FRONT); - mOrientationChanged = false; + mPlayShutterSound = true; cameraService->setCameraBusy(cameraId); cameraService->loadSound(); LOG1("Client::Client X (pid %d)", callingPid); } -static void *unregister_surface(void *arg) { - ISurface *surface = (ISurface *)arg; - surface->unregisterBuffers(); - IPCThreadState::self()->flushCommands(); - return NULL; -} - // tear down the client CameraService::Client::~Client() { int callingPid = getCallingPid(); LOG1("Client::~Client E (pid %d, this %p)", callingPid, this); - if (mSurface != 0 && !mUseOverlay) { - pthread_t thr; - // We unregister the buffers in a different thread because binder does - // not let us make sychronous transactions in a binder destructor (that - // is, upon our reaching a refcount of zero.) - pthread_create(&thr, - NULL, // attr - unregister_surface, - mSurface.get()); - pthread_join(thr, NULL); - } - // set mClientPid to let disconnet() tear down the hardware mClientPid = callingPid; disconnect(); @@ -466,9 +446,11 @@ void CameraService::Client::disconnect() { mHardware->cancelPicture(); // Release the hardware resources. mHardware->release(); - // Release the held overlay resources. - if (mUseOverlay) { - mOverlayRef = 0; + + // Release the held ANativeWindow resources. + if (mPreviewWindow != 0) { + mPreviewWindow = 0; + mHardware->setPreviewWindow(mPreviewWindow); } mHardware.clear(); @@ -480,8 +462,8 @@ void CameraService::Client::disconnect() { // ---------------------------------------------------------------------------- -// set the ISurface that the preview will use -status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) { +// set the Surface that the preview will use +status_t CameraService::Client::setPreviewDisplay(const sp<Surface>& surface) { LOG1("setPreviewDisplay(%p) (pid %d)", surface.get(), getCallingPid()); Mutex::Autolock lock(mLock); status_t result = checkPidAndHardware(); @@ -491,100 +473,64 @@ status_t CameraService::Client::setPreviewDisplay(const sp<ISurface>& surface) { // return if no change in surface. // asBinder() is safe on NULL (returns NULL) - if (surface->asBinder() == mSurface->asBinder()) { + if (getISurface(surface)->asBinder() == mSurface) { return result; } if (mSurface != 0) { LOG1("clearing old preview surface %p", mSurface.get()); - if (mUseOverlay) { - // Force the destruction of any previous overlay - sp<Overlay> dummy; - mHardware->setOverlay(dummy); - mOverlayRef = 0; - } else { - mSurface->unregisterBuffers(); - } } - mSurface = surface; - mOverlayRef = 0; - // If preview has been already started, set overlay or register preview + mSurface = getISurface(surface)->asBinder(); + mPreviewWindow = surface; + + // If preview has been already started, register preview // buffers now. if (mHardware->previewEnabled()) { - if (mUseOverlay) { - result = setOverlay(); - } else if (mSurface != 0) { - result = registerPreviewBuffers(); + if (mPreviewWindow != 0) { + native_window_set_buffers_transform(mPreviewWindow.get(), + mOrientation); + result = mHardware->setPreviewWindow(mPreviewWindow); } } return result; } -status_t CameraService::Client::registerPreviewBuffers() { - int w, h; - CameraParameters params(mHardware->getParameters()); - params.getPreviewSize(&w, &h); - - // FIXME: don't use a hardcoded format here. - ISurface::BufferHeap buffers(w, h, w, h, - HAL_PIXEL_FORMAT_YCrCb_420_SP, - mOrientation, - 0, - mHardware->getPreviewHeap()); +// set the SurfaceTexture that the preview will use +status_t CameraService::Client::setPreviewTexture( + const sp<ISurfaceTexture>& surfaceTexture) { + LOG1("setPreviewTexture(%p) (pid %d)", surfaceTexture.get(), + getCallingPid()); + Mutex::Autolock lock(mLock); + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; - status_t result = mSurface->registerBuffers(buffers); - if (result != NO_ERROR) { - LOGE("registerBuffers failed with status %d", result); + // return if no change in surface. + // asBinder() is safe on NULL (returns NULL) + if (surfaceTexture->asBinder() == mSurface) { + return result; } - return result; -} - -status_t CameraService::Client::setOverlay() { - int w, h; - CameraParameters params(mHardware->getParameters()); - params.getPreviewSize(&w, &h); - if (w != mOverlayW || h != mOverlayH || mOrientationChanged) { - // Force the destruction of any previous overlay - sp<Overlay> dummy; - mHardware->setOverlay(dummy); - mOverlayRef = 0; - mOrientationChanged = false; + if (mSurface != 0) { + LOG1("clearing old preview surface %p", mSurface.get()); } - - status_t result = NO_ERROR; - if (mSurface == 0) { - result = mHardware->setOverlay(NULL); + mSurface = surfaceTexture->asBinder(); + if (surfaceTexture != 0) { + mPreviewWindow = new SurfaceTextureClient(surfaceTexture); } else { - if (mOverlayRef == 0) { - // FIXME: - // Surfaceflinger may hold onto the previous overlay reference for some - // time after we try to destroy it. retry a few times. In the future, we - // should make the destroy call block, or possibly specify that we can - // wait in the createOverlay call if the previous overlay is in the - // process of being destroyed. - for (int retry = 0; retry < 50; ++retry) { - mOverlayRef = mSurface->createOverlay(w, h, OVERLAY_FORMAT_DEFAULT, - mOrientation); - if (mOverlayRef != 0) break; - LOGW("Overlay create failed - retrying"); - usleep(20000); - } - if (mOverlayRef == 0) { - LOGE("Overlay Creation Failed!"); - return -EINVAL; - } - result = mHardware->setOverlay(new Overlay(mOverlayRef)); - } - } - if (result != NO_ERROR) { - LOGE("mHardware->setOverlay() failed with status %d\n", result); - return result; + mPreviewWindow = 0; } - mOverlayW = w; - mOverlayH = h; + // If preview has been already started, set overlay or register preview + // buffers now. + if (mHardware->previewEnabled()) { + // XXX: What if the new preview window is 0? + if (mPreviewWindow != 0) { + native_window_set_buffers_transform(mPreviewWindow.get(), + mOrientation); + result = mHardware->setPreviewWindow(mPreviewWindow); + } + } return result; } @@ -597,16 +543,10 @@ void CameraService::Client::setPreviewCallbackFlag(int callback_flag) { if (checkPidAndHardware() != NO_ERROR) return; mPreviewCallbackFlag = callback_flag; - - // If we don't use overlay, we always need the preview frame for display. - // If we do use overlay, we only need the preview frame if the user - // wants the data. - if (mUseOverlay) { - if(mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) { - enableMsgType(CAMERA_MSG_PREVIEW_FRAME); - } else { - disableMsgType(CAMERA_MSG_PREVIEW_FRAME); - } + if (mPreviewCallbackFlag & FRAME_CALLBACK_FLAG_ENABLE_MASK) { + enableMsgType(CAMERA_MSG_PREVIEW_FRAME); + } else { + disableMsgType(CAMERA_MSG_PREVIEW_FRAME); } } @@ -631,14 +571,14 @@ status_t CameraService::Client::startCameraMode(camera_mode mode) { switch(mode) { case CAMERA_PREVIEW_MODE: - if (mSurface == 0) { + if (mSurface == 0 && mPreviewWindow == 0) { LOG1("mSurface is not set yet."); // still able to start preview in this case. } return startPreviewMode(); case CAMERA_RECORDING_MODE: - if (mSurface == 0) { - LOGE("mSurface must be set before startRecordingMode."); + if (mSurface == 0 && mPreviewWindow == 0) { + LOGE("mSurface or mPreviewWindow must be set before startRecordingMode."); return INVALID_OPERATION; } return startRecordingMode(); @@ -656,25 +596,13 @@ status_t CameraService::Client::startPreviewMode() { return NO_ERROR; } - if (mUseOverlay) { - // If preview display has been set, set overlay now. - if (mSurface != 0) { - result = setOverlay(); - } - if (result != NO_ERROR) return result; - result = mHardware->startPreview(); - } else { - enableMsgType(CAMERA_MSG_PREVIEW_FRAME); - result = mHardware->startPreview(); - if (result != NO_ERROR) return result; - // If preview display has been set, register preview buffers now. - if (mSurface != 0) { - // Unregister here because the surface may be previously registered - // with the raw (snapshot) heap. - mSurface->unregisterBuffers(); - result = registerPreviewBuffers(); - } + if (mPreviewWindow != 0) { + native_window_set_buffers_transform(mPreviewWindow.get(), + mOrientation); } + mHardware->setPreviewWindow(mPreviewWindow); + result = mHardware->startPreview(); + return result; } @@ -711,13 +639,10 @@ void CameraService::Client::stopPreview() { Mutex::Autolock lock(mLock); if (checkPidAndHardware() != NO_ERROR) return; + disableMsgType(CAMERA_MSG_PREVIEW_FRAME); mHardware->stopPreview(); - if (mSurface != 0 && !mUseOverlay) { - mSurface->unregisterBuffers(); - } - mPreviewBuffer.clear(); } @@ -741,6 +666,30 @@ void CameraService::Client::releaseRecordingFrame(const sp<IMemory>& mem) { mHardware->releaseRecordingFrame(mem); } +int32_t CameraService::Client::getNumberOfVideoBuffers() const { + LOG1("getNumberOfVideoBuffers"); + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return 0; + return mHardware->getNumberOfVideoBuffers(); +} + +sp<IMemory> CameraService::Client::getVideoBuffer(int32_t index) const { + LOG1("getVideoBuffer: %d", index); + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) return 0; + return mHardware->getVideoBuffer(index); +} + +status_t CameraService::Client::storeMetaDataInBuffers(bool enabled) +{ + LOG1("storeMetaDataInBuffers: %s", enabled? "true": "false"); + Mutex::Autolock lock(mLock); + if (checkPidAndHardware() != NO_ERROR) { + return UNKNOWN_ERROR; + } + return mHardware->storeMetaDataInBuffers(enabled); +} + bool CameraService::Client::previewEnabled() { LOG1("previewEnabled (pid %d)", getCallingPid()); @@ -815,6 +764,35 @@ String8 CameraService::Client::getParameters() const { return params; } +// enable shutter sound +status_t CameraService::Client::enableShutterSound(bool enable) { + LOG1("enableShutterSound (pid %d)", getCallingPid()); + + status_t result = checkPidAndHardware(); + if (result != NO_ERROR) return result; + + if (enable) { + mPlayShutterSound = true; + return OK; + } + + // Disabling shutter sound may not be allowed. In that case only + // allow the mediaserver process to disable the sound. + char value[PROPERTY_VALUE_MAX]; + property_get("ro.camera.sound.forced", value, "0"); + if (strcmp(value, "0") != 0) { + // Disabling shutter sound is not allowed. Deny if the current + // process is not mediaserver. + if (getCallingPid() != getpid()) { + LOGE("Failed to disable shutter sound. Permission denied (pid %d)", getCallingPid()); + return PERMISSION_DENIED; + } + } + + mPlayShutterSound = false; + return OK; +} + status_t CameraService::Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { LOG1("sendCommand (pid %d)", getCallingPid()); int orientation; @@ -833,9 +811,22 @@ status_t CameraService::Client::sendCommand(int32_t cmd, int32_t arg1, int32_t a if (mOrientation != orientation) { mOrientation = orientation; - if (mOverlayRef != 0) mOrientationChanged = true; } return OK; + } else if (cmd == CAMERA_CMD_ENABLE_SHUTTER_SOUND) { + switch (arg1) { + case 0: + enableShutterSound(false); + break; + case 1: + enableShutterSound(true); + break; + default: + return BAD_VALUE; + } + return OK; + } else if (cmd == CAMERA_CMD_PLAY_RECORDING_SOUND) { + mCameraService->playSound(SOUND_RECORDING); } return mHardware->sendCommand(cmd, arg1, arg2); @@ -996,11 +987,8 @@ void CameraService::Client::dataCallbackTimestamp(nsecs_t timestamp, // "size" is the width and height of yuv picture for registerBuffer. // If it is NULL, use the picture size from parameters. void CameraService::Client::handleShutter(image_rect_type *size) { - mCameraService->playSound(SOUND_SHUTTER); - - // Screen goes black after the buffer is unregistered. - if (mSurface != 0 && !mUseOverlay) { - mSurface->unregisterBuffers(); + if (mPlayShutterSound) { + mCameraService->playSound(SOUND_SHUTTER); } sp<ICameraClient> c = mCameraClient; @@ -1011,29 +999,6 @@ void CameraService::Client::handleShutter(image_rect_type *size) { } disableMsgType(CAMERA_MSG_SHUTTER); - // It takes some time before yuvPicture callback to be called. - // Register the buffer for raw image here to reduce latency. - if (mSurface != 0 && !mUseOverlay) { - int w, h; - CameraParameters params(mHardware->getParameters()); - if (size == NULL) { - params.getPictureSize(&w, &h); - } else { - w = size->width; - h = size->height; - w &= ~1; - h &= ~1; - LOG1("Snapshot image width=%d, height=%d", w, h); - } - // FIXME: don't use hardcoded format constants here - ISurface::BufferHeap buffers(w, h, w, h, - HAL_PIXEL_FORMAT_YCrCb_420_SP, mOrientation, 0, - mHardware->getRawHeap()); - - mSurface->registerBuffers(buffers); - IPCThreadState::self()->flushCommands(); - } - mLock.unlock(); } @@ -1043,12 +1008,6 @@ void CameraService::Client::handlePreviewData(const sp<IMemory>& mem) { size_t size; sp<IMemoryHeap> heap = mem->getMemory(&offset, &size); - if (!mUseOverlay) { - if (mSurface != 0) { - mSurface->postBuffer(offset); - } - } - // local copy of the callback flags int flags = mPreviewCallbackFlag; @@ -1069,9 +1028,7 @@ void CameraService::Client::handlePreviewData(const sp<IMemory>& mem) { mPreviewCallbackFlag &= ~(FRAME_CALLBACK_FLAG_ONE_SHOT_MASK | FRAME_CALLBACK_FLAG_COPY_OUT_MASK | FRAME_CALLBACK_FLAG_ENABLE_MASK); - if (mUseOverlay) { - disableMsgType(CAMERA_MSG_PREVIEW_FRAME); - } + disableMsgType(CAMERA_MSG_PREVIEW_FRAME); } if (c != 0) { @@ -1108,11 +1065,6 @@ void CameraService::Client::handleRawPicture(const sp<IMemory>& mem) { size_t size; sp<IMemoryHeap> heap = mem->getMemory(&offset, &size); - // Put the YUV version of the snapshot in the preview display. - if (mSurface != 0 && !mUseOverlay) { - mSurface->postBuffer(offset); - } - sp<ICameraClient> c = mCameraClient; mLock.unlock(); if (c != 0) { @@ -1292,4 +1244,12 @@ status_t CameraService::dump(int fd, const Vector<String16>& args) { return NO_ERROR; } +sp<ISurface> CameraService::getISurface(const sp<Surface>& surface) { + if (surface != 0) { + return surface->getISurface(); + } else { + return sp<ISurface>(0); + } +} + }; // namespace android diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h index f09773d..ccb9cf7 100644 --- a/services/camera/libcameraservice/CameraService.h +++ b/services/camera/libcameraservice/CameraService.h @@ -79,6 +79,12 @@ private: sp<MediaPlayer> mSoundPlayer[NUM_SOUNDS]; int mSoundRef; // reference count (release all MediaPlayer when 0) + // Used by Client objects to extract the ISurface from a Surface object. + // This is used because making Client a friend class of Surface would + // require including this header in Surface.h since Client is a nested + // class. + static sp<ISurface> getISurface(const sp<Surface>& surface); + class Client : public BnCamera { public: @@ -87,11 +93,15 @@ private: virtual status_t connect(const sp<ICameraClient>& client); virtual status_t lock(); virtual status_t unlock(); - virtual status_t setPreviewDisplay(const sp<ISurface>& surface); + virtual status_t setPreviewDisplay(const sp<Surface>& surface); + virtual status_t setPreviewTexture(const sp<ISurfaceTexture>& surfaceTexture); virtual void setPreviewCallbackFlag(int flag); virtual status_t startPreview(); virtual void stopPreview(); virtual bool previewEnabled(); + virtual int32_t getNumberOfVideoBuffers() const; + virtual sp<IMemory> getVideoBuffer(int32_t index) const; + virtual status_t storeMetaDataInBuffers(bool enabled); virtual status_t startRecording(); virtual void stopRecording(); virtual bool recordingEnabled(); @@ -121,7 +131,6 @@ private: // these are internal functions used to set up preview buffers status_t registerPreviewBuffers(); - status_t setOverlay(); // camera operation mode enum camera_mode { @@ -133,6 +142,9 @@ private: status_t startPreviewMode(); status_t startRecordingMode(); + // internal function used by sendCommand to enable/disable shutter sound. + status_t enableShutterSound(bool enable); + // these are static callback functions static void notifyCallback(int32_t msgType, int32_t ext1, int32_t ext2, void* user); static void dataCallback(int32_t msgType, const sp<IMemory>& dataPtr, void* user); @@ -163,18 +175,15 @@ private: int mCameraFacing; // immutable after constructor pid_t mClientPid; sp<CameraHardwareInterface> mHardware; // cleared after disconnect() - bool mUseOverlay; // immutable after constructor - sp<OverlayRef> mOverlayRef; - int mOverlayW; - int mOverlayH; int mPreviewCallbackFlag; int mOrientation; // Current display orientation - // True if display orientation has been changed. This is only used in overlay. - int mOrientationChanged; + bool mPlayShutterSound; // Ensures atomicity among the public methods mutable Mutex mLock; - sp<ISurface> mSurface; + // This is a binder of Surface or SurfaceTexture. + sp<IBinder> mSurface; + sp<ANativeWindow> mPreviewWindow; // If the user want us to return a copy of the preview frame (instead // of the original one), we allocate mPreviewBuffer and reuse it if possible. |