diff options
Diffstat (limited to 'media/libstagefright')
-rw-r--r-- | media/libstagefright/Android.mk | 2 | ||||
-rw-r--r-- | media/libstagefright/AwesomePlayer.cpp | 65 | ||||
-rw-r--r-- | media/libstagefright/CameraSource.cpp | 42 | ||||
-rw-r--r-- | media/libstagefright/CameraSourceTimeLapse.cpp | 350 | ||||
-rw-r--r-- | media/libstagefright/NuHTTPDataSource.cpp | 2 | ||||
-rw-r--r-- | media/libstagefright/OMXCodec.cpp | 23 | ||||
-rw-r--r-- | media/libstagefright/colorconversion/Android.mk | 8 | ||||
-rw-r--r-- | media/libstagefright/colorconversion/SoftwareRenderer.cpp | 173 | ||||
-rw-r--r-- | media/libstagefright/httplive/M3UParser.cpp | 2 | ||||
-rw-r--r-- | media/libstagefright/include/AwesomePlayer.h | 3 | ||||
-rw-r--r-- | media/libstagefright/include/SoftwareRenderer.h | 18 | ||||
-rw-r--r-- | media/libstagefright/omx/OMX.cpp | 5 | ||||
-rw-r--r-- | media/libstagefright/rtsp/ARTSPConnection.cpp | 2 | ||||
-rw-r--r-- | media/libstagefright/rtsp/ASessionDescription.cpp | 2 | ||||
-rw-r--r-- | media/libstagefright/rtsp/MyHandler.h | 2 | ||||
-rw-r--r-- | media/libstagefright/yuv/Android.mk | 13 | ||||
-rw-r--r-- | media/libstagefright/yuv/YUVCanvas.cpp | 111 | ||||
-rw-r--r-- | media/libstagefright/yuv/YUVImage.cpp | 413 |
18 files changed, 1159 insertions, 77 deletions
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk index 86fa668..b5a6327 100644 --- a/media/libstagefright/Android.mk +++ b/media/libstagefright/Android.mk @@ -10,6 +10,7 @@ LOCAL_SRC_FILES:= \ AudioSource.cpp \ AwesomePlayer.cpp \ CameraSource.cpp \ + CameraSourceTimeLapse.cpp \ DataSource.cpp \ ESDS.cpp \ FileSource.cpp \ @@ -58,6 +59,7 @@ LOCAL_SHARED_LIBRARIES := \ libsonivox \ libvorbisidec \ libsurfaceflinger_client \ + libstagefright_yuv \ libcamera_client LOCAL_STATIC_LIBRARIES := \ diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp index f2653cf..b7beb6b 100644 --- a/media/libstagefright/AwesomePlayer.cpp +++ b/media/libstagefright/AwesomePlayer.cpp @@ -44,7 +44,7 @@ #include <media/stagefright/MetaData.h> #include <media/stagefright/OMXCodec.h> -#include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> #include <media/stagefright/foundation/ALooper.h> @@ -97,13 +97,14 @@ struct AwesomeLocalRenderer : public AwesomeRenderer { bool previewOnly, const char *componentName, OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, + const sp<ISurface> &isurface, + const sp<Surface> &surface, size_t displayWidth, size_t displayHeight, size_t decodedWidth, size_t decodedHeight) : mTarget(NULL), mLibHandle(NULL) { init(previewOnly, componentName, - colorFormat, surface, displayWidth, + colorFormat, isurface, surface, displayWidth, displayHeight, decodedWidth, decodedHeight); } @@ -135,7 +136,8 @@ private: bool previewOnly, const char *componentName, OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, + const sp<ISurface> &isurface, + const sp<Surface> &surface, size_t displayWidth, size_t displayHeight, size_t decodedWidth, size_t decodedHeight); @@ -147,7 +149,8 @@ void AwesomeLocalRenderer::init( bool previewOnly, const char *componentName, OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, + const sp<ISurface> &isurface, + const sp<Surface> &surface, size_t displayWidth, size_t displayHeight, size_t decodedWidth, size_t decodedHeight) { if (!previewOnly) { @@ -173,7 +176,7 @@ void AwesomeLocalRenderer::init( if (func) { mTarget = - (*func)(surface, componentName, colorFormat, + (*func)(isurface, componentName, colorFormat, displayWidth, displayHeight, decodedWidth, decodedHeight); } @@ -619,8 +622,18 @@ status_t AwesomePlayer::play_l() { return OK; } +void AwesomePlayer::notifyVideoSize_l() { + sp<MetaData> meta = mVideoSource->getFormat(); + + int32_t decodedWidth, decodedHeight; + CHECK(meta->findInt32(kKeyWidth, &decodedWidth)); + CHECK(meta->findInt32(kKeyHeight, &decodedHeight)); + + notifyListener_l(MEDIA_SET_VIDEO_SIZE, decodedWidth, decodedHeight); +} + void AwesomePlayer::initRenderer_l() { - if (mISurface != NULL) { + if (mSurface != NULL || mISurface != NULL) { sp<MetaData> meta = mVideoSource->getFormat(); int32_t format; @@ -637,17 +650,7 @@ void AwesomePlayer::initRenderer_l() { // before creating a new one. IPCThreadState::self()->flushCommands(); - if (!strncmp("OMX.", component, 4)) { - // Our OMX codecs allocate buffers on the media_server side - // therefore they require a remote IOMXRenderer that knows how - // to display them. - mVideoRenderer = new AwesomeRemoteRenderer( - mClient.interface()->createRenderer( - mISurface, component, - (OMX_COLOR_FORMATTYPE)format, - decodedWidth, decodedHeight, - mVideoWidth, mVideoHeight)); - } else { + if (mSurface != NULL) { // Other decoders are instantiated locally and as a consequence // allocate their buffers in local address space. mVideoRenderer = new AwesomeLocalRenderer( @@ -655,8 +658,19 @@ void AwesomePlayer::initRenderer_l() { component, (OMX_COLOR_FORMATTYPE)format, mISurface, + mSurface, mVideoWidth, mVideoHeight, decodedWidth, decodedHeight); + } else { + // Our OMX codecs allocate buffers on the media_server side + // therefore they require a remote IOMXRenderer that knows how + // to display them. + mVideoRenderer = new AwesomeRemoteRenderer( + mClient.interface()->createRenderer( + mISurface, component, + (OMX_COLOR_FORMATTYPE)format, + decodedWidth, decodedHeight, + mVideoWidth, mVideoHeight)); } } } @@ -695,6 +709,12 @@ void AwesomePlayer::setISurface(const sp<ISurface> &isurface) { mISurface = isurface; } +void AwesomePlayer::setSurface(const sp<Surface> &surface) { + Mutex::Autolock autoLock(mLock); + + mSurface = surface; +} + void AwesomePlayer::setAudioSink( const sp<MediaPlayerBase::AudioSink> &audioSink) { Mutex::Autolock autoLock(mLock); @@ -937,6 +957,8 @@ void AwesomePlayer::onVideoEvent() { if (err == INFO_FORMAT_CHANGED) { LOGV("VideoSource signalled format change."); + notifyVideoSize_l(); + if (mVideoRenderer != NULL) { mVideoRendererIsPreview = false; initRenderer_l(); @@ -1422,10 +1444,10 @@ void AwesomePlayer::onPrepareAsyncEvent() { Mutex::Autolock autoLock(mLock); if (mIsAsyncPrepare) { - if (mVideoWidth < 0 || mVideoHeight < 0) { + if (mVideoSource == NULL) { notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0); } else { - notifyListener_l(MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight); + notifyVideoSize_l(); } notifyListener_l(MEDIA_PREPARED); @@ -1540,13 +1562,14 @@ status_t AwesomePlayer::resume() { mFlags = state->mFlags & (LOOPING | AT_EOS); - if (state->mLastVideoFrame && mISurface != NULL) { + if (state->mLastVideoFrame && (mSurface != NULL || mISurface != NULL)) { mVideoRenderer = new AwesomeLocalRenderer( true, // previewOnly "", (OMX_COLOR_FORMATTYPE)state->mColorFormat, mISurface, + mSurface, state->mVideoWidth, state->mVideoHeight, state->mDecodedWidth, diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp index 3e31d61..9ccd140 100644 --- a/media/libstagefright/CameraSource.cpp +++ b/media/libstagefright/CameraSource.cpp @@ -65,6 +65,11 @@ void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) { void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) { LOGV("postData(%d, ptr:%p, size:%d)", msgType, dataPtr->pointer(), dataPtr->size()); + + sp<CameraSource> source = mSource.promote(); + if (source.get() != NULL) { + source->dataCallback(msgType, dataPtr); + } } void CameraSourceListener::postDataTimestamp( @@ -118,15 +123,15 @@ CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) { CameraSource::CameraSource(const sp<Camera> &camera) : mCamera(camera), - mFirstFrameTimeUs(0), - mLastFrameTimestampUs(0), mNumFramesReceived(0), + mLastFrameTimestampUs(0), + mStarted(false), + mFirstFrameTimeUs(0), mNumFramesEncoded(0), mNumFramesDropped(0), mNumGlitches(0), mGlitchDurationThresholdUs(200000), - mCollectStats(false), - mStarted(false) { + mCollectStats(false) { int64_t token = IPCThreadState::self()->clearCallingIdentity(); String8 s = mCamera->getParameters(); @@ -161,7 +166,6 @@ CameraSource::CameraSource(const sp<Camera> &camera) mMeta->setInt32(kKeyHeight, height); mMeta->setInt32(kKeyStride, stride); mMeta->setInt32(kKeySliceHeight, sliceHeight); - } CameraSource::~CameraSource() { @@ -170,6 +174,10 @@ CameraSource::~CameraSource() { } } +void CameraSource::startCameraRecording() { + CHECK_EQ(OK, mCamera->startRecording()); +} + status_t CameraSource::start(MetaData *meta) { CHECK(!mStarted); @@ -187,13 +195,17 @@ status_t CameraSource::start(MetaData *meta) { int64_t token = IPCThreadState::self()->clearCallingIdentity(); mCamera->setListener(new CameraSourceListener(this)); - CHECK_EQ(OK, mCamera->startRecording()); + startCameraRecording(); IPCThreadState::self()->restoreCallingIdentity(token); mStarted = true; return OK; } +void CameraSource::stopCameraRecording() { + mCamera->stopRecording(); +} + status_t CameraSource::stop() { LOGV("stop"); Mutex::Autolock autoLock(mLock); @@ -202,7 +214,7 @@ status_t CameraSource::stop() { int64_t token = IPCThreadState::self()->clearCallingIdentity(); mCamera->setListener(NULL); - mCamera->stopRecording(); + stopCameraRecording(); releaseQueuedFrames(); while (!mFramesBeingEncoded.empty()) { LOGI("Waiting for outstanding frames being encoded: %d", @@ -222,11 +234,15 @@ status_t CameraSource::stop() { return OK; } +void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) { + mCamera->releaseRecordingFrame(frame); +} + void CameraSource::releaseQueuedFrames() { List<sp<IMemory> >::iterator it; while (!mFramesReceived.empty()) { it = mFramesReceived.begin(); - mCamera->releaseRecordingFrame(*it); + releaseRecordingFrame(*it); mFramesReceived.erase(it); ++mNumFramesDropped; } @@ -238,7 +254,7 @@ sp<MetaData> CameraSource::getFormat() { void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) { int64_t token = IPCThreadState::self()->clearCallingIdentity(); - mCamera->releaseRecordingFrame(frame); + releaseRecordingFrame(frame); IPCThreadState::self()->restoreCallingIdentity(token); } @@ -248,7 +264,6 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) { for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin(); it != mFramesBeingEncoded.end(); ++it) { if ((*it)->pointer() == buffer->data()) { - releaseOneRecordingFrame((*it)); mFramesBeingEncoded.erase(it); ++mNumFramesEncoded; @@ -340,6 +355,13 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs, ++mNumGlitches; } + // May need to skip frame or modify timestamp. Currently implemented + // by the subclass CameraSourceTimeLapse. + if(skipCurrentFrame(timestampUs)) { + releaseOneRecordingFrame(data); + return; + } + mLastFrameTimestampUs = timestampUs; if (mNumFramesReceived == 0) { mFirstFrameTimeUs = timestampUs; diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp new file mode 100644 index 0000000..ba99501 --- /dev/null +++ b/media/libstagefright/CameraSourceTimeLapse.cpp @@ -0,0 +1,350 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//#define LOG_NDEBUG 0 +#define LOG_TAG "CameraSourceTimeLapse" + +#include <binder/IPCThreadState.h> +#include <binder/MemoryBase.h> +#include <binder/MemoryHeapBase.h> +#include <media/stagefright/CameraSource.h> +#include <media/stagefright/CameraSourceTimeLapse.h> +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/MetaData.h> +#include <media/stagefright/YUVImage.h> +#include <media/stagefright/YUVCanvas.h> +#include <camera/Camera.h> +#include <camera/CameraParameters.h> +#include <ui/Rect.h> +#include <utils/String8.h> +#include <utils/Vector.h> +#include "OMX_Video.h" +#include <limits.h> + +namespace android { + +// static +CameraSourceTimeLapse *CameraSourceTimeLapse::Create(bool useStillCameraForTimeLapse, + int64_t timeBetweenTimeLapseFrameCaptureUs, + int32_t width, int32_t height, + int32_t videoFrameRate) { + sp<Camera> camera = Camera::connect(0); + + if (camera.get() == NULL) { + return NULL; + } + + return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse, + timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate); +} + +// static +CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(const sp<Camera> &camera, + bool useStillCameraForTimeLapse, + int64_t timeBetweenTimeLapseFrameCaptureUs, + int32_t width, int32_t height, + int32_t videoFrameRate) { + if (camera.get() == NULL) { + return NULL; + } + + return new CameraSourceTimeLapse(camera, useStillCameraForTimeLapse, + timeBetweenTimeLapseFrameCaptureUs, width, height, videoFrameRate); +} + +CameraSourceTimeLapse::CameraSourceTimeLapse(const sp<Camera> &camera, + bool useStillCameraForTimeLapse, + int64_t timeBetweenTimeLapseFrameCaptureUs, + int32_t width, int32_t height, + int32_t videoFrameRate) + : CameraSource(camera), + mUseStillCameraForTimeLapse(useStillCameraForTimeLapse), + mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs), + mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate), + mLastTimeLapseFrameRealTimestampUs(0), + mSkipCurrentFrame(false) { + + LOGV("starting time lapse mode"); + mVideoWidth = width; + mVideoHeight = height; + if (mUseStillCameraForTimeLapse) { + CHECK(setPictureSizeToClosestSupported(width, height)); + mNeedCropping = computeCropRectangleOffset(); + mMeta->setInt32(kKeyWidth, width); + mMeta->setInt32(kKeyHeight, height); + } +} + +CameraSourceTimeLapse::~CameraSourceTimeLapse() { +} + +bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) { + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + String8 s = mCamera->getParameters(); + IPCThreadState::self()->restoreCallingIdentity(token); + + CameraParameters params(s); + Vector<Size> supportedSizes; + params.getSupportedPictureSizes(supportedSizes); + + int32_t minPictureSize = INT_MAX; + for (uint32_t i = 0; i < supportedSizes.size(); ++i) { + int32_t pictureWidth = supportedSizes[i].width; + int32_t pictureHeight = supportedSizes[i].height; + + if ((pictureWidth >= width) && (pictureHeight >= height)) { + int32_t pictureSize = pictureWidth*pictureHeight; + if (pictureSize < minPictureSize) { + minPictureSize = pictureSize; + mPictureWidth = pictureWidth; + mPictureHeight = pictureHeight; + } + } + } + LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight); + return (minPictureSize != INT_MAX); +} + +bool CameraSourceTimeLapse::computeCropRectangleOffset() { + if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) { + return false; + } + + CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight)); + + int32_t widthDifference = mPictureWidth - mVideoWidth; + int32_t heightDifference = mPictureHeight - mVideoHeight; + + mCropRectStartX = widthDifference/2; + mCropRectStartY = heightDifference/2; + + LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY); + + return true; +} + +// static +void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) { + CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); + source->threadTimeLapseEntry(); + return NULL; +} + +void CameraSourceTimeLapse::threadTimeLapseEntry() { + while(mStarted) { + if (mCameraIdle) { + LOGV("threadTimeLapseEntry: taking picture"); + CHECK_EQ(OK, mCamera->takePicture()); + mCameraIdle = false; + usleep(mTimeBetweenTimeLapseFrameCaptureUs); + } else { + LOGV("threadTimeLapseEntry: camera busy with old takePicture. Sleeping a little."); + usleep(1E4); + } + } +} + +void CameraSourceTimeLapse::startCameraRecording() { + if (mUseStillCameraForTimeLapse) { + LOGV("start time lapse recording using still camera"); + + int64_t token = IPCThreadState::self()->clearCallingIdentity(); + String8 s = mCamera->getParameters(); + IPCThreadState::self()->restoreCallingIdentity(token); + + CameraParameters params(s); + params.setPictureSize(mPictureWidth, mPictureHeight); + mCamera->setParameters(params.flatten()); + mCameraIdle = true; + + // create a thread which takes pictures in a loop + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); + + pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this); + pthread_attr_destroy(&attr); + } else { + LOGV("start time lapse recording using video camera"); + CHECK_EQ(OK, mCamera->startRecording()); + } +} + +void CameraSourceTimeLapse::stopCameraRecording() { + if (mUseStillCameraForTimeLapse) { + void *dummy; + pthread_join(mThreadTimeLapse, &dummy); + CHECK_EQ(OK, mCamera->startPreview()); + } else { + mCamera->stopRecording(); + } +} + +void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) { + if (!mUseStillCameraForTimeLapse) { + mCamera->releaseRecordingFrame(frame); + } +} + +sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) { + size_t source_size = source_data->size(); + void* source_pointer = source_data->pointer(); + + sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size); + sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size); + memcpy(newMemory->pointer(), source_pointer, source_size); + return newMemory; +} + +// Allocates IMemory of final type MemoryBase with the given size. +sp<IMemory> allocateIMemory(size_t size) { + sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size); + sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size); + return newMemory; +} + +// static +void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) { + CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me); + source->threadStartPreview(); + return NULL; +} + +void CameraSourceTimeLapse::threadStartPreview() { + CHECK_EQ(OK, mCamera->startPreview()); + mCameraIdle = true; +} + +void CameraSourceTimeLapse::restartPreview() { + // Start this in a different thread, so that the dataCallback can return + LOGV("restartPreview"); + pthread_attr_t attr; + pthread_attr_init(&attr); + pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED); + + pthread_t threadPreview; + pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this); + pthread_attr_destroy(&attr); +} + +sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) { + // find the YUV format + int32_t srcFormat; + CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat)); + YUVImage::YUVFormat yuvFormat; + if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) { + yuvFormat = YUVImage::YUV420SemiPlanar; + } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) { + yuvFormat = YUVImage::YUV420Planar; + } + + // allocate memory for cropped image and setup a canvas using it. + sp<IMemory> croppedImageMemory = allocateIMemory( + YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight)); + YUVImage yuvImageCropped(yuvFormat, + mVideoWidth, mVideoHeight, + (uint8_t *)croppedImageMemory->pointer()); + YUVCanvas yuvCanvasCrop(yuvImageCropped); + + YUVImage yuvImageSource(yuvFormat, + mPictureWidth, mPictureHeight, + (uint8_t *)source_data->pointer()); + yuvCanvasCrop.CopyImageRect( + Rect(mCropRectStartX, mCropRectStartY, + mCropRectStartX + mVideoWidth, + mCropRectStartY + mVideoHeight), + 0, 0, + yuvImageSource); + + return croppedImageMemory; +} + +void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) { + if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) { + // takePicture will complete after this callback, so restart preview. + restartPreview(); + return; + } + if (msgType != CAMERA_MSG_RAW_IMAGE) { + return; + } + + LOGV("dataCallback for timelapse still frame"); + CHECK_EQ(true, mUseStillCameraForTimeLapse); + + int64_t timestampUs; + if (mNumFramesReceived == 0) { + timestampUs = mStartTimeUs; + } else { + timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; + } + + if (mNeedCropping) { + sp<IMemory> croppedImageData = cropYUVImage(data); + dataCallbackTimestamp(timestampUs, msgType, croppedImageData); + } else { + sp<IMemory> dataCopy = createIMemoryCopy(data); + dataCallbackTimestamp(timestampUs, msgType, dataCopy); + } +} + +bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) { + if (mSkipCurrentFrame) { + mSkipCurrentFrame = false; + return true; + } else { + return false; + } +} + +bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) { + if (!mUseStillCameraForTimeLapse) { + if (mLastTimeLapseFrameRealTimestampUs == 0) { + // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs + // to current time (timestampUs) and save frame data. + LOGV("dataCallbackTimestamp timelapse: initial frame"); + + mLastTimeLapseFrameRealTimestampUs = *timestampUs; + } else if (*timestampUs < + (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) { + // Skip all frames from last encoded frame until + // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed. + // Tell the camera to release its recording frame and return. + LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame"); + return true; + } else { + // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time: + // - Reset mLastTimeLapseFrameRealTimestampUs to current time. + // - Artificially modify timestampUs to be one frame time (1/framerate) ahead + // of the last encoded frame's time stamp. + LOGV("dataCallbackTimestamp timelapse: got timelapse frame"); + + mLastTimeLapseFrameRealTimestampUs = *timestampUs; + *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs; + } + } + return false; +} + +void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType, + const sp<IMemory> &data) { + if (!mUseStillCameraForTimeLapse) { + mSkipCurrentFrame = skipFrameAndModifyTimeStamp(×tampUs); + } + CameraSource::dataCallbackTimestamp(timestampUs, msgType, data); +} + +} // namespace android diff --git a/media/libstagefright/NuHTTPDataSource.cpp b/media/libstagefright/NuHTTPDataSource.cpp index ab9285d..332bab3 100644 --- a/media/libstagefright/NuHTTPDataSource.cpp +++ b/media/libstagefright/NuHTTPDataSource.cpp @@ -42,7 +42,7 @@ static bool ParseURL( path->setTo(slashPos); } - char *colonPos = strchr(host->string(), ':'); + const char *colonPos = strchr(host->string(), ':'); if (colonPos != NULL) { unsigned long x; diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp index 4741b1d..b39157e 100644 --- a/media/libstagefright/OMXCodec.cpp +++ b/media/libstagefright/OMXCodec.cpp @@ -146,29 +146,36 @@ static sp<MediaSource> InstantiateSoftwareCodec( static const CodecInfo kDecoderInfo[] = { { MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" }, + { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" }, // { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" }, { MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" }, // { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" }, // { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" }, { MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" }, // { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" }, + { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" }, { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" }, { MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" }, // { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" }, + { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" }, { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" }, { MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" }, // { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" }, { MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" }, { MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" }, + { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" }, { MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" }, // { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" }, + { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" }, { MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" }, { MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" }, // { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" }, @@ -199,6 +206,7 @@ static const CodecInfo kEncoderInfo[] = { { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" }, { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" }, + { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" }, { MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" }, // { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" }, }; @@ -337,6 +345,13 @@ static int CompareSoftwareCodecsFirst( uint32_t OMXCodec::getComponentQuirks(const char *componentName) { uint32_t quirks = 0; + if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") || + !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") || + !strcmp(componentName, "OMX.Nvidia.aac.decoder") || + !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) { + quirks |= kDecoderLiesAboutNumberOfChannels; + } + if (!strcmp(componentName, "OMX.PV.avcdec")) { quirks |= kWantsNALFragments; } @@ -854,6 +869,10 @@ void OMXCodec::setVideoInputFormat( OMX_COLOR_FORMATTYPE colorFormat; CHECK_EQ(OK, findTargetColorFormat(meta, &colorFormat)); + if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) { + colorFormat = OMX_COLOR_FormatYUV420Planar; + } + status_t err; OMX_PARAM_PORTDEFINITIONTYPE def; OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; @@ -1193,6 +1212,10 @@ status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) { h264type.bMBAFF = OMX_FALSE; h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; + if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) { + h264type.eLevel = OMX_VIDEO_AVCLevelMax; + } + err = mOMX->setParameter( mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); CHECK_EQ(err, OK); diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk index b9ba1be..2b63235 100644 --- a/media/libstagefright/colorconversion/Android.mk +++ b/media/libstagefright/colorconversion/Android.mk @@ -6,7 +6,8 @@ LOCAL_SRC_FILES:= \ SoftwareRenderer.cpp LOCAL_C_INCLUDES := \ - $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include + $(TOP)/external/opencore/extern_libs_v2/khronos/openmax/include \ + $(TOP)/hardware/msm7k LOCAL_SHARED_LIBRARIES := \ libbinder \ @@ -17,6 +18,11 @@ LOCAL_SHARED_LIBRARIES := \ libsurfaceflinger_client\ libcamera_client +# ifeq ($(TARGET_BOARD_PLATFORM),msm7k) +ifeq ($(TARGET_PRODUCT),passion) + LOCAL_CFLAGS += -DHAS_YCBCR420_SP_ADRENO +endif + LOCAL_MODULE:= libstagefright_color_conversion include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp index a6dbf69..507fa5a 100644 --- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp +++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp @@ -22,65 +22,172 @@ #include <binder/MemoryHeapBase.h> #include <binder/MemoryHeapPmem.h> #include <media/stagefright/MediaDebug.h> -#include <surfaceflinger/ISurface.h> +#include <surfaceflinger/Surface.h> +#include <ui/android_native_buffer.h> +#include <ui/GraphicBufferMapper.h> + +// XXX: Temporary hack to allow referencing the _ADRENO pixel format here. +#include <libgralloc-qsd8k/gralloc_priv.h> namespace android { SoftwareRenderer::SoftwareRenderer( OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, + const sp<Surface> &surface, size_t displayWidth, size_t displayHeight, size_t decodedWidth, size_t decodedHeight) : mColorFormat(colorFormat), - mConverter(colorFormat, OMX_COLOR_Format16bitRGB565), - mISurface(surface), + mConverter(NULL), + mYUVMode(None), + mSurface(surface), mDisplayWidth(displayWidth), mDisplayHeight(displayHeight), mDecodedWidth(decodedWidth), - mDecodedHeight(decodedHeight), - mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565 - mIndex(0) { - mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize); - if (mMemoryHeap->heapID() < 0) { - LOGI("Creating physical memory heap failed, reverting to regular heap."); - mMemoryHeap = new MemoryHeapBase(2 * mFrameSize); - } else { - sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap); - pmemHeap->slap(); - mMemoryHeap = pmemHeap; + mDecodedHeight(decodedHeight) { + LOGI("input format = %d", mColorFormat); + LOGI("display = %d x %d, decoded = %d x %d", + mDisplayWidth, mDisplayHeight, mDecodedWidth, mDecodedHeight); + + int halFormat; + switch (mColorFormat) { +#if HAS_YCBCR420_SP_ADRENO + case OMX_COLOR_FormatYUV420Planar: + { + halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; + mYUVMode = YUV420ToYUV420sp; + break; + } + + case 0x7fa30c00: + { + halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; + mYUVMode = YUV420spToYUV420sp; + break; + } +#endif + + default: + halFormat = HAL_PIXEL_FORMAT_RGB_565; + + mConverter = new ColorConverter( + mColorFormat, OMX_COLOR_Format16bitRGB565); + CHECK(mConverter->isValid()); + break; } - CHECK(mISurface.get() != NULL); + CHECK(mSurface.get() != NULL); CHECK(mDecodedWidth > 0); CHECK(mDecodedHeight > 0); - CHECK(mMemoryHeap->heapID() >= 0); - CHECK(mConverter.isValid()); + CHECK(mConverter == NULL || mConverter->isValid()); - ISurface::BufferHeap bufferHeap( - mDisplayWidth, mDisplayHeight, - mDecodedWidth, mDecodedHeight, - PIXEL_FORMAT_RGB_565, - mMemoryHeap); + CHECK_EQ(0, + native_window_set_usage( + mSurface.get(), + GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN + | GRALLOC_USAGE_HW_TEXTURE)); - status_t err = mISurface->registerBuffers(bufferHeap); - CHECK_EQ(err, OK); + CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2)); + + // Width must be multiple of 32??? + CHECK_EQ(0, native_window_set_buffers_geometry( + mSurface.get(), mDecodedWidth, mDecodedHeight, + halFormat)); } SoftwareRenderer::~SoftwareRenderer() { - mISurface->unregisterBuffers(); + delete mConverter; + mConverter = NULL; +} + +static inline size_t ALIGN(size_t x, size_t alignment) { + return (x + alignment - 1) & ~(alignment - 1); } void SoftwareRenderer::render( const void *data, size_t size, void *platformPrivate) { - size_t offset = mIndex * mFrameSize; - void *dst = (uint8_t *)mMemoryHeap->getBase() + offset; + android_native_buffer_t *buf; + CHECK_EQ(0, mSurface->dequeueBuffer(mSurface.get(), &buf)); + CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf)); + + GraphicBufferMapper &mapper = GraphicBufferMapper::get(); + + Rect bounds(mDecodedWidth, mDecodedHeight); + + void *dst; + CHECK_EQ(0, mapper.lock( + buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst)); + + if (mConverter) { + mConverter->convert( + mDecodedWidth, mDecodedHeight, + data, 0, dst, buf->stride * 2); + } else if (mYUVMode == YUV420spToYUV420sp) { + // Input and output are both YUV420sp, but the alignment requirements + // are different. + size_t srcYStride = mDecodedWidth; + const uint8_t *srcY = (const uint8_t *)data; + uint8_t *dstY = (uint8_t *)dst; + for (size_t i = 0; i < mDecodedHeight; ++i) { + memcpy(dstY, srcY, mDecodedWidth); + srcY += srcYStride; + dstY += buf->stride; + } + + size_t srcUVStride = (mDecodedWidth + 1) & ~1; + size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2; + + const uint8_t *srcUV = (const uint8_t *)data + + mDecodedHeight * mDecodedWidth; + + size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096); + uint8_t *dstUV = (uint8_t *)dst + dstUVOffset; + + for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) { + memcpy(dstUV, srcUV, (mDecodedWidth + 1) & ~1); + srcUV += srcUVStride; + dstUV += dstUVStride; + } + } else if (mYUVMode == YUV420ToYUV420sp) { + // Input is YUV420 planar, output is YUV420sp, adhere to proper + // alignment requirements. + size_t srcYStride = mDecodedWidth; + const uint8_t *srcY = (const uint8_t *)data; + uint8_t *dstY = (uint8_t *)dst; + for (size_t i = 0; i < mDecodedHeight; ++i) { + memcpy(dstY, srcY, mDecodedWidth); + srcY += srcYStride; + dstY += buf->stride; + } + + size_t srcUVStride = (mDecodedWidth + 1) / 2; + size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2; + + const uint8_t *srcU = (const uint8_t *)data + + mDecodedHeight * mDecodedWidth; + + const uint8_t *srcV = + srcU + ((mDecodedWidth + 1) / 2) * ((mDecodedHeight + 1) / 2); + + size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096); + uint8_t *dstUV = (uint8_t *)dst + dstUVOffset; + + for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) { + for (size_t j = 0; j < (mDecodedWidth + 1) / 2; ++j) { + dstUV[2 * j + 1] = srcU[j]; + dstUV[2 * j] = srcV[j]; + } + srcU += srcUVStride; + srcV += srcUVStride; + dstUV += dstUVStride; + } + } else { + memcpy(dst, data, size); + } - mConverter.convert( - mDecodedWidth, mDecodedHeight, - data, 0, dst, 2 * mDecodedWidth); + CHECK_EQ(0, mapper.unlock(buf->handle)); - mISurface->postBuffer(offset); - mIndex = 1 - mIndex; + CHECK_EQ(0, mSurface->queueBuffer(mSurface.get(), buf)); + buf = NULL; } } // namespace android diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp index edd8648..17771c4 100644 --- a/media/libstagefright/httplive/M3UParser.cpp +++ b/media/libstagefright/httplive/M3UParser.cpp @@ -90,7 +90,7 @@ static bool MakeURL(const char *baseURL, const char *url, AString *out) { out->setTo(baseURL); out->append(url); } else { - char *slashPos = strrchr(baseURL, '/'); + const char *slashPos = strrchr(baseURL, '/'); if (slashPos > &baseURL[6]) { out->setTo(baseURL, slashPos - baseURL); diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h index 55e2c36..f34eb45 100644 --- a/media/libstagefright/include/AwesomePlayer.h +++ b/media/libstagefright/include/AwesomePlayer.h @@ -76,6 +76,7 @@ struct AwesomePlayer { bool isPlaying() const; void setISurface(const sp<ISurface> &isurface); + void setSurface(const sp<Surface> &surface); void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink); status_t setLooping(bool shouldLoop); @@ -117,6 +118,7 @@ private: wp<MediaPlayerBase> mListener; sp<ISurface> mISurface; + sp<Surface> mSurface; sp<MediaPlayerBase::AudioSink> mAudioSink; SystemTimeSource mSystemTimeSource; @@ -219,6 +221,7 @@ private: status_t seekTo_l(int64_t timeUs); status_t pause_l(); void initRenderer_l(); + void notifyVideoSize_l(); void seekAudioIfNecessary_l(); void cancelPlayerEvents(bool keepBufferingGoing = false); diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h index 9eed089..8d58056 100644 --- a/media/libstagefright/include/SoftwareRenderer.h +++ b/media/libstagefright/include/SoftwareRenderer.h @@ -24,14 +24,14 @@ namespace android { -class ISurface; +class Surface; class MemoryHeapBase; class SoftwareRenderer : public VideoRenderer { public: SoftwareRenderer( OMX_COLOR_FORMATTYPE colorFormat, - const sp<ISurface> &surface, + const sp<Surface> &surface, size_t displayWidth, size_t displayHeight, size_t decodedWidth, size_t decodedHeight); @@ -41,14 +41,18 @@ public: const void *data, size_t size, void *platformPrivate); private: + enum YUVMode { + None, + YUV420ToYUV420sp, + YUV420spToYUV420sp, + }; + OMX_COLOR_FORMATTYPE mColorFormat; - ColorConverter mConverter; - sp<ISurface> mISurface; + ColorConverter *mConverter; + YUVMode mYUVMode; + sp<Surface> mSurface; size_t mDisplayWidth, mDisplayHeight; size_t mDecodedWidth, mDecodedHeight; - size_t mFrameSize; - sp<MemoryHeapBase> mMemoryHeap; - int mIndex; SoftwareRenderer(const SoftwareRenderer &); SoftwareRenderer &operator=(const SoftwareRenderer &); diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp index 6de761f..88b9605 100644 --- a/media/libstagefright/omx/OMX.cpp +++ b/media/libstagefright/omx/OMX.cpp @@ -495,12 +495,17 @@ sp<IOMXRenderer> OMX::createRenderer( } if (!impl) { +#if 0 LOGW("Using software renderer."); impl = new SoftwareRenderer( colorFormat, surface, displayWidth, displayHeight, encodedWidth, encodedHeight); +#else + CHECK(!"Should not be here."); + return NULL; +#endif } return new OMXRenderer(impl); diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp index 5f8f5fd..5345218 100644 --- a/media/libstagefright/rtsp/ARTSPConnection.cpp +++ b/media/libstagefright/rtsp/ARTSPConnection.cpp @@ -116,7 +116,7 @@ bool ARTSPConnection::ParseURL( path->setTo(slashPos); } - char *colonPos = strchr(host->c_str(), ':'); + const char *colonPos = strchr(host->c_str(), ':'); if (colonPos != NULL) { unsigned long x; diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp index 4ea7fda..4a8cce8 100644 --- a/media/libstagefright/rtsp/ASessionDescription.cpp +++ b/media/libstagefright/rtsp/ASessionDescription.cpp @@ -182,7 +182,7 @@ void ASessionDescription::getFormatType( AString format; getFormat(index, &format); - char *lastSpacePos = strrchr(format.c_str(), ' '); + const char *lastSpacePos = strrchr(format.c_str(), ' '); CHECK(lastSpacePos != NULL); char *end; diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h index 90070c9..b2419bf 100644 --- a/media/libstagefright/rtsp/MyHandler.h +++ b/media/libstagefright/rtsp/MyHandler.h @@ -462,7 +462,7 @@ private: out->setTo(baseURL); out->append(url); } else { - char *slashPos = strrchr(baseURL, '/'); + const char *slashPos = strrchr(baseURL, '/'); if (slashPos > &baseURL[6]) { out->setTo(baseURL, slashPos - baseURL); diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk new file mode 100644 index 0000000..0794ad1 --- /dev/null +++ b/media/libstagefright/yuv/Android.mk @@ -0,0 +1,13 @@ +LOCAL_PATH:= $(call my-dir) +include $(CLEAR_VARS) + +LOCAL_SRC_FILES:= \ + YUVImage.cpp \ + YUVCanvas.cpp + +LOCAL_SHARED_LIBRARIES := \ + libcutils + +LOCAL_MODULE:= libstagefright_yuv + +include $(BUILD_SHARED_LIBRARY) diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp new file mode 100644 index 0000000..38aa779 --- /dev/null +++ b/media/libstagefright/yuv/YUVCanvas.cpp @@ -0,0 +1,111 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_NDEBUG 0 +#define LOG_TAG "YUVCanvas" + +#include <media/stagefright/MediaDebug.h> +#include <media/stagefright/YUVCanvas.h> +#include <media/stagefright/YUVImage.h> +#include <ui/Rect.h> + +namespace android { + +YUVCanvas::YUVCanvas(YUVImage &yuvImage) + : mYUVImage(yuvImage) { +} + +YUVCanvas::~YUVCanvas() { +} + +void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) { + for (int32_t y = 0; y < mYUVImage.height(); ++y) { + for (int32_t x = 0; x < mYUVImage.width(); ++x) { + mYUVImage.setPixelValue(x, y, yValue, uValue, vValue); + } + } +} + +void YUVCanvas::FillYUVRectangle(const Rect& rect, + uint8_t yValue, uint8_t uValue, uint8_t vValue) { + for (int32_t y = rect.top; y < rect.bottom; ++y) { + for (int32_t x = rect.left; x < rect.right; ++x) { + mYUVImage.setPixelValue(x, y, yValue, uValue, vValue); + } + } +} + +void YUVCanvas::CopyImageRect( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage) { + + // Try fast copy first + if (YUVImage::fastCopyRectangle( + srcRect, + destStartX, destStartY, + srcImage, mYUVImage)) { + return; + } + + int32_t srcStartX = srcRect.left; + int32_t srcStartY = srcRect.top; + for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) { + for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) { + int32_t srcX = srcStartX + offsetX; + int32_t srcY = srcStartY + offsetY; + + int32_t destX = destStartX + offsetX; + int32_t destY = destStartY + offsetY; + + uint8_t yValue; + uint8_t uValue; + uint8_t vValue; + + srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue); + mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue); + } + } +} + +void YUVCanvas::downsample( + int32_t srcOffsetX, int32_t srcOffsetY, + int32_t skipX, int32_t skipY, + const YUVImage &srcImage) { + // TODO: Add a low pass filter for downsampling. + + // Check that srcImage is big enough to fill mYUVImage. + CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width()); + CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height()); + + uint8_t yValue; + uint8_t uValue; + uint8_t vValue; + + int32_t srcY = srcOffsetY; + for (int32_t y = 0; y < mYUVImage.height(); ++y) { + int32_t srcX = srcOffsetX; + for (int32_t x = 0; x < mYUVImage.width(); ++x) { + srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue); + mYUVImage.setPixelValue(x, y, yValue, uValue, vValue); + + srcX += skipX; + } + srcY += skipY; + } +} + +} // namespace android diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp new file mode 100644 index 0000000..b712062 --- /dev/null +++ b/media/libstagefright/yuv/YUVImage.cpp @@ -0,0 +1,413 @@ +/* + * Copyright (C) 2010 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define LOG_NDEBUG 0 +#define LOG_TAG "YUVImage" + +#include <media/stagefright/YUVImage.h> +#include <ui/Rect.h> +#include <media/stagefright/MediaDebug.h> + +namespace android { + +YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) { + mYUVFormat = yuvFormat; + mWidth = width; + mHeight = height; + + size_t numberOfBytes = bufferSize(yuvFormat, width, height); + uint8_t *buffer = new uint8_t[numberOfBytes]; + mBuffer = buffer; + mOwnBuffer = true; + + initializeYUVPointers(); +} + +YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) { + mYUVFormat = yuvFormat; + mWidth = width; + mHeight = height; + mBuffer = buffer; + mOwnBuffer = false; + + initializeYUVPointers(); +} + +//static +size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) { + int32_t numberOfPixels = width*height; + size_t numberOfBytes = 0; + if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) { + // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each. + numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1)); + } else { + LOGE("Format not supported"); + } + return numberOfBytes; +} + +bool YUVImage::initializeYUVPointers() { + int32_t numberOfPixels = mWidth * mHeight; + + if (mYUVFormat == YUV420Planar) { + mYdata = (uint8_t *)mBuffer; + mUdata = mYdata + numberOfPixels; + mVdata = mUdata + (numberOfPixels >> 2); + } else if (mYUVFormat == YUV420SemiPlanar) { + // U and V channels are interleaved as VUVUVU. + // So V data starts at the end of Y channel and + // U data starts right after V's start. + mYdata = (uint8_t *)mBuffer; + mVdata = mYdata + numberOfPixels; + mUdata = mVdata + 1; + } else { + LOGE("Format not supported"); + return false; + } + return true; +} + +YUVImage::~YUVImage() { + if (mOwnBuffer) delete[] mBuffer; +} + +bool YUVImage::getOffsets(int32_t x, int32_t y, + int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const { + *yOffset = y*mWidth + x; + + int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1); + if (mYUVFormat == YUV420Planar) { + *uOffset = uvOffset; + *vOffset = uvOffset; + } else if (mYUVFormat == YUV420SemiPlanar) { + // Since U and V channels are interleaved, offsets need + // to be doubled. + *uOffset = 2*uvOffset; + *vOffset = 2*uvOffset; + } else { + LOGE("Format not supported"); + return false; + } + + return true; +} + +bool YUVImage::getOffsetIncrementsPerDataRow( + int32_t *yDataOffsetIncrement, + int32_t *uDataOffsetIncrement, + int32_t *vDataOffsetIncrement) const { + *yDataOffsetIncrement = mWidth; + + int32_t uvDataOffsetIncrement = mWidth >> 1; + + if (mYUVFormat == YUV420Planar) { + *uDataOffsetIncrement = uvDataOffsetIncrement; + *vDataOffsetIncrement = uvDataOffsetIncrement; + } else if (mYUVFormat == YUV420SemiPlanar) { + // Since U and V channels are interleaved, offsets need + // to be doubled. + *uDataOffsetIncrement = 2*uvDataOffsetIncrement; + *vDataOffsetIncrement = 2*uvDataOffsetIncrement; + } else { + LOGE("Format not supported"); + return false; + } + + return true; +} + +uint8_t* YUVImage::getYAddress(int32_t offset) const { + return mYdata + offset; +} + +uint8_t* YUVImage::getUAddress(int32_t offset) const { + return mUdata + offset; +} + +uint8_t* YUVImage::getVAddress(int32_t offset) const { + return mVdata + offset; +} + +bool YUVImage::getYUVAddresses(int32_t x, int32_t y, + uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const { + int32_t yOffset; + int32_t uOffset; + int32_t vOffset; + if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false; + + *yAddr = getYAddress(yOffset); + *uAddr = getUAddress(uOffset); + *vAddr = getVAddress(vOffset); + + return true; +} + +bool YUVImage::validPixel(int32_t x, int32_t y) const { + return (x >= 0 && x < mWidth && + y >= 0 && y < mHeight); +} + +bool YUVImage::getPixelValue(int32_t x, int32_t y, + uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const { + CHECK(validPixel(x, y)); + + uint8_t *yAddr; + uint8_t *uAddr; + uint8_t *vAddr; + if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false; + + *yPtr = *yAddr; + *uPtr = *uAddr; + *vPtr = *vAddr; + + return true; +} + +bool YUVImage::setPixelValue(int32_t x, int32_t y, + uint8_t yValue, uint8_t uValue, uint8_t vValue) { + CHECK(validPixel(x, y)); + + uint8_t *yAddr; + uint8_t *uAddr; + uint8_t *vAddr; + if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false; + + *yAddr = yValue; + *uAddr = uValue; + *vAddr = vValue; + + return true; +} + +void YUVImage::fastCopyRectangle420Planar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage) { + CHECK(srcImage.mYUVFormat == YUV420Planar); + CHECK(destImage.mYUVFormat == YUV420Planar); + + int32_t srcStartX = srcRect.left; + int32_t srcStartY = srcRect.top; + int32_t width = srcRect.width(); + int32_t height = srcRect.height(); + + // Get source and destination start addresses + uint8_t *ySrcAddrBase; + uint8_t *uSrcAddrBase; + uint8_t *vSrcAddrBase; + srcImage.getYUVAddresses(srcStartX, srcStartY, + &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase); + + uint8_t *yDestAddrBase; + uint8_t *uDestAddrBase; + uint8_t *vDestAddrBase; + destImage.getYUVAddresses(destStartX, destStartY, + &yDestAddrBase, &uDestAddrBase, &vDestAddrBase); + + // Get source and destination offset increments incurred in going + // from one data row to next. + int32_t ySrcOffsetIncrement; + int32_t uSrcOffsetIncrement; + int32_t vSrcOffsetIncrement; + srcImage.getOffsetIncrementsPerDataRow( + &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement); + + int32_t yDestOffsetIncrement; + int32_t uDestOffsetIncrement; + int32_t vDestOffsetIncrement; + destImage.getOffsetIncrementsPerDataRow( + &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement); + + // Copy Y + { + size_t numberOfYBytesPerRow = (size_t) width; + uint8_t *ySrcAddr = ySrcAddrBase; + uint8_t *yDestAddr = yDestAddrBase; + for (int32_t offsetY = 0; offsetY < height; ++offsetY) { + memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow); + + ySrcAddr += ySrcOffsetIncrement; + yDestAddr += yDestOffsetIncrement; + } + } + + // Copy U + { + size_t numberOfUBytesPerRow = (size_t) (width >> 1); + uint8_t *uSrcAddr = uSrcAddrBase; + uint8_t *uDestAddr = uDestAddrBase; + // Every other row has an entry for U/V channel values. Hence only + // go half the height. + for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) { + memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow); + + uSrcAddr += uSrcOffsetIncrement; + uDestAddr += uDestOffsetIncrement; + } + } + + // Copy V + { + size_t numberOfVBytesPerRow = (size_t) (width >> 1); + uint8_t *vSrcAddr = vSrcAddrBase; + uint8_t *vDestAddr = vDestAddrBase; + // Every other pixel row has a U/V data row. Hence only go half the height. + for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) { + memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow); + + vSrcAddr += vSrcOffsetIncrement; + vDestAddr += vDestOffsetIncrement; + } + } +} + +void YUVImage::fastCopyRectangle420SemiPlanar( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage) { + CHECK(srcImage.mYUVFormat == YUV420SemiPlanar); + CHECK(destImage.mYUVFormat == YUV420SemiPlanar); + + int32_t srcStartX = srcRect.left; + int32_t srcStartY = srcRect.top; + int32_t width = srcRect.width(); + int32_t height = srcRect.height(); + + // Get source and destination start addresses + uint8_t *ySrcAddrBase; + uint8_t *uSrcAddrBase; + uint8_t *vSrcAddrBase; + srcImage.getYUVAddresses(srcStartX, srcStartY, + &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase); + + uint8_t *yDestAddrBase; + uint8_t *uDestAddrBase; + uint8_t *vDestAddrBase; + destImage.getYUVAddresses(destStartX, destStartY, + &yDestAddrBase, &uDestAddrBase, &vDestAddrBase); + + // Get source and destination offset increments incurred in going + // from one data row to next. + int32_t ySrcOffsetIncrement; + int32_t uSrcOffsetIncrement; + int32_t vSrcOffsetIncrement; + srcImage.getOffsetIncrementsPerDataRow( + &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement); + + int32_t yDestOffsetIncrement; + int32_t uDestOffsetIncrement; + int32_t vDestOffsetIncrement; + destImage.getOffsetIncrementsPerDataRow( + &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement); + + // Copy Y + { + size_t numberOfYBytesPerRow = (size_t) width; + uint8_t *ySrcAddr = ySrcAddrBase; + uint8_t *yDestAddr = yDestAddrBase; + for (int32_t offsetY = 0; offsetY < height; ++offsetY) { + memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow); + + ySrcAddr = ySrcAddr + ySrcOffsetIncrement; + yDestAddr = yDestAddr + yDestOffsetIncrement; + } + } + + // Copy UV + { + // UV are interleaved. So number of UV bytes per row is 2*(width/2). + size_t numberOfUVBytesPerRow = (size_t) width; + uint8_t *vSrcAddr = vSrcAddrBase; + uint8_t *vDestAddr = vDestAddrBase; + // Every other pixel row has a U/V data row. Hence only go half the height. + for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) { + memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow); + + vSrcAddr += vSrcOffsetIncrement; + vDestAddr += vDestOffsetIncrement; + } + } +} + +// static +bool YUVImage::fastCopyRectangle( + const Rect& srcRect, + int32_t destStartX, int32_t destStartY, + const YUVImage &srcImage, YUVImage &destImage) { + if (srcImage.mYUVFormat == destImage.mYUVFormat) { + if (srcImage.mYUVFormat == YUV420Planar) { + fastCopyRectangle420Planar( + srcRect, + destStartX, destStartY, + srcImage, destImage); + } else if (srcImage.mYUVFormat == YUV420SemiPlanar) { + fastCopyRectangle420SemiPlanar( + srcRect, + destStartX, destStartY, + srcImage, destImage); + } + return true; + } + return false; +} + +uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) { + CHECK(maxValue >= minValue); + + if (v < minValue) return minValue; + else if (v > maxValue) return maxValue; + else return v; +} + +void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue, + uint8_t *r, uint8_t *g, uint8_t *b) const { + *r = yValue + (1.370705 * (vValue-128)); + *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128)); + *b = yValue + (1.732446 * (uValue-128)); + + *r = clamp(*r, 0, 255); + *g = clamp(*g, 0, 255); + *b = clamp(*b, 0, 255); +} + +bool YUVImage::writeToPPM(const char *filename) const { + FILE *fp = fopen(filename, "w"); + if (fp == NULL) { + return false; + } + fprintf(fp, "P3\n"); + fprintf(fp, "%d %d\n", mWidth, mHeight); + fprintf(fp, "255\n"); + for (int32_t y = 0; y < mHeight; ++y) { + for (int32_t x = 0; x < mWidth; ++x) { + uint8_t yValue; + uint8_t uValue; + uint8_t vValue; + getPixelValue(x, y, &yValue, &uValue, & vValue); + + uint8_t rValue; + uint8_t gValue; + uint8_t bValue; + yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue); + + fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue); + } + } + fclose(fp); + return true; +} + +} // namespace android |