summaryrefslogtreecommitdiffstats
path: root/media/libstagefright/omx
diff options
context:
space:
mode:
Diffstat (limited to 'media/libstagefright/omx')
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp147
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h36
-rw-r--r--media/libstagefright/omx/OMX.cpp11
-rw-r--r--media/libstagefright/omx/OMXMaster.cpp2
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp62
-rw-r--r--media/libstagefright/omx/SoftOMXPlugin.cpp2
-rw-r--r--media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp126
-rw-r--r--media/libstagefright/omx/tests/Android.mk2
-rw-r--r--media/libstagefright/omx/tests/OMXHarness.cpp6
9 files changed, 334 insertions, 60 deletions
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 3fe9c23..fad6c33 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -29,6 +29,8 @@
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
+#include <inttypes.h>
+
namespace android {
static const bool EXTRA_CHECK = true;
@@ -43,16 +45,21 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
mNumFramesAvailable(0),
mEndOfStream(false),
mEndOfStreamSent(false),
- mRepeatAfterUs(-1ll),
mMaxTimestampGapUs(-1ll),
mPrevOriginalTimeUs(-1ll),
mPrevModifiedTimeUs(-1ll),
+ mSkipFramesBeforeNs(-1ll),
+ mRepeatAfterUs(-1ll),
mRepeatLastFrameGeneration(0),
mRepeatLastFrameTimestamp(-1ll),
mLatestSubmittedBufferId(-1),
mLatestSubmittedBufferFrameNum(0),
mLatestSubmittedBufferUseCount(0),
- mRepeatBufferDeferred(false) {
+ mRepeatBufferDeferred(false),
+ mTimePerCaptureUs(-1ll),
+ mTimePerFrameUs(-1ll),
+ mPrevCaptureUs(-1ll),
+ mPrevFrameUs(-1ll) {
ALOGV("GraphicBufferSource w=%u h=%u c=%u",
bufferWidth, bufferHeight, bufferCount);
@@ -65,13 +72,12 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
String8 name("GraphicBufferSource");
- mBufferQueue = new BufferQueue();
- mBufferQueue->setConsumerName(name);
- mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
- mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
- GRALLOC_USAGE_HW_TEXTURE);
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setConsumerName(name);
+ mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER);
- mInitCheck = mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+ mInitCheck = mConsumer->setMaxAcquiredBufferCount(bufferCount);
if (mInitCheck != NO_ERROR) {
ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
bufferCount, mInitCheck);
@@ -85,7 +91,7 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this);
sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
- mInitCheck = mBufferQueue->consumerConnect(proxy, false);
+ mInitCheck = mConsumer->consumerConnect(proxy, false);
if (mInitCheck != NO_ERROR) {
ALOGE("Error connecting to BufferQueue: %s (%d)",
strerror(-mInitCheck), mInitCheck);
@@ -97,8 +103,8 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
GraphicBufferSource::~GraphicBufferSource() {
ALOGV("~GraphicBufferSource");
- if (mBufferQueue != NULL) {
- status_t err = mBufferQueue->consumerDisconnect();
+ if (mConsumer != NULL) {
+ status_t err = mConsumer->consumerDisconnect();
if (err != NO_ERROR) {
ALOGW("consumerDisconnect failed: %d", err);
}
@@ -107,7 +113,7 @@ GraphicBufferSource::~GraphicBufferSource() {
void GraphicBufferSource::omxExecuting() {
Mutex::Autolock autoLock(mMutex);
- ALOGV("--> executing; avail=%d, codec vec size=%zd",
+ ALOGV("--> executing; avail=%zu, codec vec size=%zd",
mNumFramesAvailable, mCodecBuffers.size());
CHECK(!mExecuting);
mExecuting = true;
@@ -129,7 +135,7 @@ void GraphicBufferSource::omxExecuting() {
}
}
- ALOGV("done loading initial frames, avail=%d", mNumFramesAvailable);
+ ALOGV("done loading initial frames, avail=%zu", mNumFramesAvailable);
// If EOS has already been signaled, and there are no more frames to
// submit, try to send EOS now as well.
@@ -181,7 +187,7 @@ void GraphicBufferSource::omxLoaded(){
mLooper.clear();
}
- ALOGV("--> loaded; avail=%d eos=%d eosSent=%d",
+ ALOGV("--> loaded; avail=%zu eos=%d eosSent=%d",
mNumFramesAvailable, mEndOfStream, mEndOfStreamSent);
// Codec is no longer executing. Discard all codec-related state.
@@ -270,7 +276,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
if (id == mLatestSubmittedBufferId) {
CHECK_GT(mLatestSubmittedBufferUseCount--, 0);
} else {
- mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber,
+ mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
}
} else {
@@ -284,7 +290,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
if (mNumFramesAvailable) {
// Fill this codec buffer.
CHECK(!mEndOfStreamSent);
- ALOGV("buffer freed, %d frames avail (eos=%d)",
+ ALOGV("buffer freed, %zu frames avail (eos=%d)",
mNumFramesAvailable, mEndOfStream);
fillCodecBuffer_l();
} else if (mEndOfStream) {
@@ -313,7 +319,8 @@ void GraphicBufferSource::codecBufferFilled(OMX_BUFFERHEADERTYPE* header) {
ssize_t index = mOriginalTimeUs.indexOfKey(header->nTimeStamp);
if (index >= 0) {
ALOGV("OUT timestamp: %lld -> %lld",
- header->nTimeStamp, mOriginalTimeUs[index]);
+ static_cast<long long>(header->nTimeStamp),
+ static_cast<long long>(mOriginalTimeUs[index]));
header->nTimeStamp = mOriginalTimeUs[index];
mOriginalTimeUs.removeItemsAt(index);
} else {
@@ -324,7 +331,7 @@ void GraphicBufferSource::codecBufferFilled(OMX_BUFFERHEADERTYPE* header) {
}
if (mOriginalTimeUs.size() > BufferQueue::NUM_BUFFER_SLOTS) {
// something terribly wrong must have happened, giving up...
- ALOGE("mOriginalTimeUs has too many entries (%d)",
+ ALOGE("mOriginalTimeUs has too many entries (%zu)",
mOriginalTimeUs.size());
mMaxTimestampGapUs = -1ll;
}
@@ -339,7 +346,7 @@ void GraphicBufferSource::suspend(bool suspend) {
while (mNumFramesAvailable > 0) {
BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// shouldn't happen.
@@ -352,7 +359,7 @@ void GraphicBufferSource::suspend(bool suspend) {
--mNumFramesAvailable;
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
}
return;
@@ -381,15 +388,15 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
int cbi = findAvailableCodecBuffer_l();
if (cbi < 0) {
// No buffers available, bail.
- ALOGV("fillCodecBuffer_l: no codec buffers, avail now %d",
+ ALOGV("fillCodecBuffer_l: no codec buffers, avail now %zu",
mNumFramesAvailable);
return false;
}
- ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d",
+ ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%zu",
mNumFramesAvailable);
BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// shouldn't happen
ALOGW("fillCodecBuffer_l: frame was not available");
@@ -416,10 +423,21 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
mBufferSlot[item.mBuf] = item.mGraphicBuffer;
}
- err = submitBuffer_l(item, cbi);
+ err = UNKNOWN_ERROR;
+
+ // only submit sample if start time is unspecified, or sample
+ // is queued after the specified start time
+ if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) {
+ // if start time is set, offset time stamp by start time
+ if (mSkipFramesBeforeNs > 0) {
+ item.mTimestamp -= mSkipFramesBeforeNs;
+ }
+ err = submitBuffer_l(item, cbi);
+ }
+
if (err != OK) {
ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf);
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
} else {
ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
@@ -442,7 +460,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
//
// To be on the safe side we try to release the buffer.
ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL");
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
mLatestSubmittedBufferId,
mLatestSubmittedBufferFrameNum,
EGL_NO_DISPLAY,
@@ -496,7 +514,7 @@ void GraphicBufferSource::setLatestSubmittedBuffer_l(
if (mLatestSubmittedBufferId >= 0) {
if (mLatestSubmittedBufferUseCount == 0) {
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
mLatestSubmittedBufferId,
mLatestSubmittedBufferFrameNum,
EGL_NO_DISPLAY,
@@ -522,7 +540,7 @@ void GraphicBufferSource::setLatestSubmittedBuffer_l(
status_t GraphicBufferSource::signalEndOfInputStream() {
Mutex::Autolock autoLock(mMutex);
- ALOGV("signalEndOfInputStream: exec=%d avail=%d eos=%d",
+ ALOGV("signalEndOfInputStream: exec=%d avail=%zu eos=%d",
mExecuting, mNumFramesAvailable, mEndOfStream);
if (mEndOfStream) {
@@ -550,7 +568,32 @@ status_t GraphicBufferSource::signalEndOfInputStream() {
int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) {
int64_t timeUs = item.mTimestamp / 1000;
- if (mMaxTimestampGapUs > 0ll) {
+ if (mTimePerCaptureUs > 0ll) {
+ // Time lapse or slow motion mode
+ if (mPrevCaptureUs < 0ll) {
+ // first capture
+ mPrevCaptureUs = timeUs;
+ mPrevFrameUs = timeUs;
+ } else {
+ // snap to nearest capture point
+ int64_t nFrames = (timeUs + mTimePerCaptureUs / 2 - mPrevCaptureUs)
+ / mTimePerCaptureUs;
+ if (nFrames <= 0) {
+ // skip this frame as it's too close to previous capture
+ ALOGV("skipping frame, timeUs %lld", static_cast<long long>(timeUs));
+ return -1;
+ }
+ mPrevCaptureUs = mPrevCaptureUs + nFrames * mTimePerCaptureUs;
+ mPrevFrameUs += mTimePerFrameUs * nFrames;
+ }
+
+ ALOGV("timeUs %lld, captureUs %lld, frameUs %lld",
+ static_cast<long long>(timeUs),
+ static_cast<long long>(mPrevCaptureUs),
+ static_cast<long long>(mPrevFrameUs));
+
+ return mPrevFrameUs;
+ } else if (mMaxTimestampGapUs > 0ll) {
/* Cap timestamp gap between adjacent frames to specified max
*
* In the scenario of cast mirroring, encoding could be suspended for
@@ -574,7 +617,9 @@ int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) {
mPrevOriginalTimeUs = originalTimeUs;
mPrevModifiedTimeUs = timeUs;
mOriginalTimeUs.add(timeUs, originalTimeUs);
- ALOGV("IN timestamp: %lld -> %lld", originalTimeUs, timeUs);
+ ALOGV("IN timestamp: %lld -> %lld",
+ static_cast<long long>(originalTimeUs),
+ static_cast<long long>(timeUs));
}
return timeUs;
@@ -682,7 +727,7 @@ int GraphicBufferSource::findMatchingCodecBuffer_l(
void GraphicBufferSource::onFrameAvailable() {
Mutex::Autolock autoLock(mMutex);
- ALOGV("onFrameAvailable exec=%d avail=%d",
+ ALOGV("onFrameAvailable exec=%d avail=%zu",
mExecuting, mNumFramesAvailable);
if (mEndOfStream || mSuspended) {
@@ -696,15 +741,15 @@ void GraphicBufferSource::onFrameAvailable() {
}
BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == OK) {
// If this is the first time we're seeing this buffer, add it to our
// slot table.
if (item.mGraphicBuffer != NULL) {
- ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf);
+ ALOGV("onFrameAvailable: setting mBufferSlot %d", item.mBuf);
mBufferSlot[item.mBuf] = item.mGraphicBuffer;
}
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
}
return;
@@ -724,13 +769,13 @@ void GraphicBufferSource::onFrameAvailable() {
void GraphicBufferSource::onBuffersReleased() {
Mutex::Autolock lock(mMutex);
- uint32_t slotMask;
- if (mBufferQueue->getReleasedBuffers(&slotMask) != NO_ERROR) {
+ uint64_t slotMask;
+ if (mConsumer->getReleasedBuffers(&slotMask) != NO_ERROR) {
ALOGW("onBuffersReleased: unable to get released buffer set");
- slotMask = 0xffffffff;
+ slotMask = 0xffffffffffffffffULL;
}
- ALOGV("onBuffersReleased: 0x%08x", slotMask);
+ ALOGV("onBuffersReleased: 0x%016" PRIx64, slotMask);
for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
if ((slotMask & 0x01) != 0) {
@@ -740,6 +785,11 @@ void GraphicBufferSource::onBuffersReleased() {
}
}
+// BufferQueue::ConsumerListener callback
+void GraphicBufferSource::onSidebandStreamChanged() {
+ ALOG_ASSERT(false, "GraphicBufferSource can't consume sideband streams");
+}
+
status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs(
int64_t repeatAfterUs) {
Mutex::Autolock autoLock(mMutex);
@@ -764,6 +814,27 @@ status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) {
return OK;
}
+
+void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mSkipFramesBeforeNs =
+ (skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll;
+}
+
+status_t GraphicBufferSource::setTimeLapseUs(int64_t* data) {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mExecuting || data[0] <= 0ll || data[1] <= 0ll) {
+ return INVALID_OPERATION;
+ }
+
+ mTimePerFrameUs = data[0];
+ mTimePerCaptureUs = data[1];
+
+ return OK;
+}
+
void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatRepeatLastFrame:
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 3b0e454..a70cc1a 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -61,7 +61,7 @@ public:
// Returns the handle to the producer side of the BufferQueue. Buffers
// queued on this will be received by GraphicBufferSource.
sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
- return mBufferQueue;
+ return mProducer;
}
// This is called when OMX transitions to OMX_StateExecuting, which means
@@ -118,6 +118,17 @@ public:
// of suspension on input.
status_t setMaxTimestampGapUs(int64_t maxGapUs);
+ // Sets the time lapse (or slow motion) parameters.
+ // data[0] is the time (us) between two frames for playback
+ // data[1] is the time (us) between two frames for capture
+ // When set, the sample's timestamp will be modified to playback framerate,
+ // and capture timestamp will be modified to capture rate.
+ status_t setTimeLapseUs(int64_t* data);
+
+ // Sets the start time us (in system time), samples before which should
+ // be dropped and not submitted to encoder
+ void setSkipFramesBeforeUs(int64_t startTimeUs);
+
protected:
// BufferQueue::ConsumerListener interface, called when a new frame of
// data is available. If we're executing and a codec buffer is
@@ -132,6 +143,11 @@ protected:
// set of mBufferSlot entries.
virtual void onBuffersReleased();
+ // BufferQueue::ConsumerListener interface, called when the client has
+ // changed the sideband stream. GraphicBufferSource doesn't handle sideband
+ // streams so this is a no-op (and should never be called).
+ virtual void onSidebandStreamChanged();
+
private:
// Keep track of codec input buffers. They may either be available
// (mGraphicBuffer == NULL) or in use by the codec.
@@ -194,8 +210,11 @@ private:
bool mSuspended;
- // We consume graphic buffers from this.
- sp<BufferQueue> mBufferQueue;
+ // Our BufferQueue interfaces. mProducer is passed to the producer through
+ // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
+ // the buffers queued by the producer.
+ sp<IGraphicBufferProducer> mProducer;
+ sp<IGraphicBufferConsumer> mConsumer;
// Number of frames pending in BufferQueue that haven't yet been
// forwarded to the codec.
@@ -223,16 +242,17 @@ private:
enum {
kRepeatLastFrameCount = 10,
};
- int64_t mRepeatAfterUs;
- int64_t mMaxTimestampGapUs;
KeyedVector<int64_t, int64_t> mOriginalTimeUs;
+ int64_t mMaxTimestampGapUs;
int64_t mPrevOriginalTimeUs;
int64_t mPrevModifiedTimeUs;
+ int64_t mSkipFramesBeforeNs;
sp<ALooper> mLooper;
sp<AHandlerReflector<GraphicBufferSource> > mReflector;
+ int64_t mRepeatAfterUs;
int32_t mRepeatLastFrameGeneration;
int64_t mRepeatLastFrameTimestamp;
int32_t mRepeatLastFrameCount;
@@ -245,6 +265,12 @@ private:
// no codec buffer was available at the time.
bool mRepeatBufferDeferred;
+ // Time lapse / slow motion configuration
+ int64_t mTimePerCaptureUs;
+ int64_t mTimePerFrameUs;
+ int64_t mPrevCaptureUs;
+ int64_t mPrevFrameUs;
+
void onMessageReceived(const sp<AMessage> &msg);
DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 7819fc3..41407e4 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -233,7 +233,7 @@ status_t OMX::allocateNode(
instance, &handle);
if (err != OMX_ErrorNone) {
- ALOGV("FAILED to allocate omx component '%s'", name);
+ ALOGE("FAILED to allocate omx component '%s'", name);
instance->onGetHandleFailed();
@@ -342,6 +342,13 @@ status_t OMX::prepareForAdaptivePlayback(
portIndex, enable, maxFrameWidth, maxFrameHeight);
}
+status_t OMX::configureVideoTunnelMode(
+ node_id node, OMX_U32 portIndex, OMX_BOOL tunneled,
+ OMX_U32 audioHwSync, native_handle_t **sidebandHandle) {
+ return findInstance(node)->configureVideoTunnelMode(
+ portIndex, tunneled, audioHwSync, sidebandHandle);
+}
+
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) {
@@ -472,8 +479,6 @@ OMX_ERRORTYPE OMX::OnFillBufferDone(
msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
msg.u.extended_buffer_data.flags = pBuffer->nFlags;
msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
- msg.u.extended_buffer_data.platform_private = pBuffer->pPlatformPrivate;
- msg.u.extended_buffer_data.data_ptr = pBuffer->pBuffer;
findDispatcher(node)->post(msg);
diff --git a/media/libstagefright/omx/OMXMaster.cpp b/media/libstagefright/omx/OMXMaster.cpp
index 6b6d0ab..ae3cb33 100644
--- a/media/libstagefright/omx/OMXMaster.cpp
+++ b/media/libstagefright/omx/OMXMaster.cpp
@@ -91,7 +91,7 @@ void OMXMaster::addPlugin(OMXPluginBase *plugin) {
}
if (err != OMX_ErrorNoMore) {
- ALOGE("OMX plugin failed w/ error 0x%08x after registering %d "
+ ALOGE("OMX plugin failed w/ error 0x%08x after registering %zu "
"components", err, mPluginByComponentName.size());
}
}
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index c64dcf0..efb27f5 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -460,6 +460,49 @@ status_t OMXNodeInstance::prepareForAdaptivePlayback(
return err;
}
+status_t OMXNodeInstance::configureVideoTunnelMode(
+ OMX_U32 portIndex, OMX_BOOL tunneled, OMX_U32 audioHwSync,
+ native_handle_t **sidebandHandle) {
+ Mutex::Autolock autolock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.configureVideoTunnelMode");
+
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+ if (err != OMX_ErrorNone) {
+ ALOGE("configureVideoTunnelMode extension is missing!");
+ return StatusFromOMXError(err);
+ }
+
+ ConfigureVideoTunnelModeParams tunnelParams;
+ tunnelParams.nSize = sizeof(tunnelParams);
+ tunnelParams.nVersion.s.nVersionMajor = 1;
+ tunnelParams.nVersion.s.nVersionMinor = 0;
+ tunnelParams.nVersion.s.nRevision = 0;
+ tunnelParams.nVersion.s.nStep = 0;
+
+ tunnelParams.nPortIndex = portIndex;
+ tunnelParams.bTunneled = tunneled;
+ tunnelParams.nAudioHwSync = audioHwSync;
+ err = OMX_SetParameter(mHandle, index, &tunnelParams);
+ if (err != OMX_ErrorNone) {
+ ALOGE("configureVideoTunnelMode failed! (err %d).", err);
+ return UNKNOWN_ERROR;
+ }
+
+ err = OMX_GetParameter(mHandle, index, &tunnelParams);
+ if (err != OMX_ErrorNone) {
+ ALOGE("GetVideoTunnelWindow failed! (err %d).", err);
+ return UNKNOWN_ERROR;
+ }
+ if (sidebandHandle) {
+ *sidebandHandle = (native_handle_t*)tunnelParams.pSidebandWindow;
+ }
+
+ return err;
+}
+
status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer) {
@@ -855,6 +898,8 @@ status_t OMXNodeInstance::setInternalOption(
case IOMX::INTERNAL_OPTION_SUSPEND:
case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP:
+ case IOMX::INTERNAL_OPTION_START_TIME:
+ case IOMX::INTERNAL_OPTION_TIME_LAPSE:
{
const sp<GraphicBufferSource> &bufferSource =
getGraphicBufferSource();
@@ -879,7 +924,8 @@ status_t OMXNodeInstance::setInternalOption(
int64_t delayUs = *(int64_t *)data;
return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
- } else {
+ } else if (type ==
+ IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){
if (size != sizeof(int64_t)) {
return INVALID_OPERATION;
}
@@ -887,6 +933,20 @@ status_t OMXNodeInstance::setInternalOption(
int64_t maxGapUs = *(int64_t *)data;
return bufferSource->setMaxTimestampGapUs(maxGapUs);
+ } else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
+ if (size != sizeof(int64_t)) {
+ return INVALID_OPERATION;
+ }
+
+ int64_t skipFramesBeforeUs = *(int64_t *)data;
+
+ bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
+ } else { // IOMX::INTERNAL_OPTION_TIME_LAPSE
+ if (size != sizeof(int64_t) * 2) {
+ return INVALID_OPERATION;
+ }
+
+ bufferSource->setTimeLapseUs((int64_t *)data);
}
return OK;
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index d49e50b..9b6958a 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -42,6 +42,7 @@ static const struct {
{ "OMX.google.amrwb.encoder", "amrwbenc", "audio_encoder.amrwb" },
{ "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" },
{ "OMX.google.h264.encoder", "h264enc", "video_encoder.avc" },
+ { "OMX.google.hevc.decoder", "hevcdec", "video_decoder.hevc" },
{ "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" },
{ "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" },
{ "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" },
@@ -50,6 +51,7 @@ static const struct {
{ "OMX.google.mpeg4.encoder", "mpeg4enc", "video_encoder.mpeg4" },
{ "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" },
{ "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" },
+ { "OMX.google.opus.decoder", "opusdec", "audio_decoder.opus" },
{ "OMX.google.vp8.decoder", "vpxdec", "video_decoder.vp8" },
{ "OMX.google.vp9.decoder", "vpxdec", "video_decoder.vp9" },
{ "OMX.google.vp8.encoder", "vpxenc", "video_encoder.vp8" },
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index eb9fcf7..e533fdd 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -22,6 +22,7 @@
#include "include/SoftVideoDecoderOMXComponent.h"
+#include <media/hardware/HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -50,6 +51,9 @@ SoftVideoDecoderOMXComponent::SoftVideoDecoderOMXComponent(
OMX_PTR appData,
OMX_COMPONENTTYPE **component)
: SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mIsAdaptive(false),
+ mAdaptiveMaxWidth(0),
+ mAdaptiveMaxHeight(0),
mWidth(width),
mHeight(height),
mCropLeft(0),
@@ -119,16 +123,18 @@ void SoftVideoDecoderOMXComponent::initPorts(
updatePortDefinitions();
}
-void SoftVideoDecoderOMXComponent::updatePortDefinitions() {
+void SoftVideoDecoderOMXComponent::updatePortDefinitions(bool updateCrop) {
OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
def->format.video.nFrameWidth = mWidth;
def->format.video.nFrameHeight = mHeight;
def->format.video.nStride = def->format.video.nFrameWidth;
def->format.video.nSliceHeight = def->format.video.nFrameHeight;
+ def->nBufferSize = def->format.video.nFrameWidth * def->format.video.nFrameHeight * 3 / 2;
+
def = &editPortInfo(kOutputPortIndex)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
+ def->format.video.nFrameWidth = mIsAdaptive ? mAdaptiveMaxWidth : mWidth;
+ def->format.video.nFrameHeight = mIsAdaptive ? mAdaptiveMaxHeight : mHeight;
def->format.video.nStride = def->format.video.nFrameWidth;
def->format.video.nSliceHeight = def->format.video.nFrameHeight;
@@ -136,10 +142,77 @@ void SoftVideoDecoderOMXComponent::updatePortDefinitions() {
(def->format.video.nFrameWidth *
def->format.video.nFrameHeight * 3) / 2;
- mCropLeft = 0;
- mCropTop = 0;
- mCropWidth = mWidth;
- mCropHeight = mHeight;
+ if (updateCrop) {
+ mCropLeft = 0;
+ mCropTop = 0;
+ mCropWidth = mWidth;
+ mCropHeight = mHeight;
+ }
+}
+
+void SoftVideoDecoderOMXComponent::handlePortSettingsChange(
+ bool *portWillReset, uint32_t width, uint32_t height, bool cropChanged) {
+ *portWillReset = false;
+ bool sizeChanged = (width != mWidth || height != mHeight);
+
+ if (sizeChanged || cropChanged) {
+ mWidth = width;
+ mHeight = height;
+
+ bool updateCrop = !cropChanged;
+ if ((sizeChanged && !mIsAdaptive)
+ || width > mAdaptiveMaxWidth
+ || height > mAdaptiveMaxHeight) {
+ if (mIsAdaptive) {
+ if (width > mAdaptiveMaxWidth) {
+ mAdaptiveMaxWidth = width;
+ }
+ if (height > mAdaptiveMaxHeight) {
+ mAdaptiveMaxHeight = height;
+ }
+ }
+ updatePortDefinitions(updateCrop);
+ notify(OMX_EventPortSettingsChanged, kOutputPortIndex, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ *portWillReset = true;
+ } else {
+ updatePortDefinitions(updateCrop);
+ notify(OMX_EventPortSettingsChanged, kOutputPortIndex,
+ OMX_IndexConfigCommonOutputCrop, NULL);
+ }
+ }
+}
+
+void SoftVideoDecoderOMXComponent::copyYV12FrameToOutputBuffer(
+ uint8_t *dst, const uint8_t *srcY, const uint8_t *srcU, const uint8_t *srcV,
+ size_t srcYStride, size_t srcUStride, size_t srcVStride) {
+ size_t dstYStride = mIsAdaptive ? mAdaptiveMaxWidth : mWidth;
+ size_t dstUVStride = dstYStride / 2;
+ size_t dstHeight = mIsAdaptive ? mAdaptiveMaxHeight : mHeight;
+
+ for (size_t i = 0; i < dstHeight; ++i) {
+ if (i < mHeight) {
+ memcpy(dst, srcY, mWidth);
+ srcY += srcYStride;
+ }
+ dst += dstYStride;
+ }
+
+ for (size_t i = 0; i < dstHeight / 2; ++i) {
+ if (i < mHeight / 2) {
+ memcpy(dst, srcU, mWidth / 2);
+ srcU += srcUStride;
+ }
+ dst += dstUVStride;
+ }
+
+ for (size_t i = 0; i < dstHeight / 2; ++i) {
+ if (i < mHeight / 2) {
+ memcpy(dst, srcV, mWidth / 2);
+ srcV += srcVStride;
+ }
+ dst += dstUVStride;
+ }
}
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
@@ -183,12 +256,12 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
return OMX_ErrorUnsupportedIndex;
}
- if (index >= mNumProfileLevels) {
+ if (profileLevel->nProfileIndex >= mNumProfileLevels) {
return OMX_ErrorNoMore;
}
- profileLevel->eProfile = mProfileLevels[index].mProfile;
- profileLevel->eLevel = mProfileLevels[index].mLevel;
+ profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile;
+ profileLevel->eLevel = mProfileLevels[profileLevel->nProfileIndex].mLevel;
return OMX_ErrorNone;
}
@@ -199,7 +272,10 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter(
OMX_INDEXTYPE index, const OMX_PTR params) {
- switch (index) {
+ // Include extension index OMX_INDEXEXTTYPE.
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
case OMX_IndexParamStandardComponentRole:
{
const OMX_PARAM_COMPONENTROLETYPE *roleParams =
@@ -230,6 +306,24 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalSetParameter(
return OMX_ErrorNone;
}
+ case kPrepareForAdaptivePlaybackIndex:
+ {
+ const PrepareForAdaptivePlaybackParams* adaptivePlaybackParams =
+ (const PrepareForAdaptivePlaybackParams *)params;
+ mIsAdaptive = adaptivePlaybackParams->bEnable;
+ if (mIsAdaptive) {
+ mAdaptiveMaxWidth = adaptivePlaybackParams->nMaxFrameWidth;
+ mAdaptiveMaxHeight = adaptivePlaybackParams->nMaxFrameHeight;
+ mWidth = mAdaptiveMaxWidth;
+ mHeight = mAdaptiveMaxHeight;
+ } else {
+ mAdaptiveMaxWidth = 0;
+ mAdaptiveMaxHeight = 0;
+ }
+ updatePortDefinitions();
+ return OMX_ErrorNone;
+ }
+
default:
return SimpleSoftOMXComponent::internalSetParameter(index, params);
}
@@ -259,6 +353,16 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getConfig(
}
}
+OMX_ERRORTYPE SoftVideoDecoderOMXComponent::getExtensionIndex(
+ const char *name, OMX_INDEXTYPE *index) {
+ if (!strcmp(name, "OMX.google.android.index.prepareForAdaptivePlayback")) {
+ *(int32_t*)index = kPrepareForAdaptivePlaybackIndex;
+ return OMX_ErrorNone;
+ }
+
+ return SimpleSoftOMXComponent::getExtensionIndex(name, index);
+}
+
void SoftVideoDecoderOMXComponent::onReset() {
mOutputPortSettingsChange = NONE;
}
diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk
index e368134..447b29e 100644
--- a/media/libstagefright/omx/tests/Android.mk
+++ b/media/libstagefright/omx/tests/Android.mk
@@ -11,6 +11,8 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := omx_tests
LOCAL_MODULE_TAGS := tests
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 44e4f9d..f4dfd6b 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -26,6 +26,7 @@
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -242,7 +243,8 @@ private:
};
static sp<MediaExtractor> CreateExtractorFromURI(const char *uri) {
- sp<DataSource> source = DataSource::CreateFromURI(uri);
+ sp<DataSource> source =
+ DataSource::CreateFromURI(NULL /* httpService */, uri);
if (source == NULL) {
return NULL;
@@ -461,6 +463,7 @@ static const char *GetMimeFromComponentRole(const char *componentRole) {
{ "audio_decoder.aac", "audio/mp4a-latm" },
{ "audio_decoder.mp3", "audio/mpeg" },
{ "audio_decoder.vorbis", "audio/vorbis" },
+ { "audio_decoder.opus", "audio/opus" },
{ "audio_decoder.g711alaw", MEDIA_MIMETYPE_AUDIO_G711_ALAW },
{ "audio_decoder.g711mlaw", MEDIA_MIMETYPE_AUDIO_G711_MLAW },
};
@@ -493,6 +496,7 @@ static const char *GetURLForMime(const char *mime) {
{ "audio/mpeg",
"file:///sdcard/media_api/music/MP3_48KHz_128kbps_s_1_17_CBR.mp3" },
{ "audio/vorbis", NULL },
+ { "audio/opus", NULL },
{ "video/x-vnd.on2.vp8",
"file:///sdcard/media_api/video/big-buck-bunny_trailer.webm" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "file:///sdcard/M1F1-Alaw-AFsp.wav" },