summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/img_utils/include/img_utils/DngUtils.h37
-rw-r--r--media/img_utils/src/DngUtils.cpp97
-rw-r--r--media/libmedia/AudioTrack.cpp19
-rw-r--r--media/libmedia/IMediaHTTPConnection.cpp9
-rwxr-xr-xmedia/libstagefright/codecs/avcdec/SoftAVCDec.cpp2
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp2
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp104
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h8
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp20
9 files changed, 221 insertions, 77 deletions
diff --git a/media/img_utils/include/img_utils/DngUtils.h b/media/img_utils/include/img_utils/DngUtils.h
index 4389b02..3dcedc5 100644
--- a/media/img_utils/include/img_utils/DngUtils.h
+++ b/media/img_utils/include/img_utils/DngUtils.h
@@ -31,6 +31,7 @@ namespace android {
namespace img_utils {
#define NELEMS(x) ((int) (sizeof(x) / sizeof((x)[0])))
+#define CLAMP(x, low, high) (((x) > (high)) ? (high) : (((x) < (low)) ? (low) : (x)))
/**
* Utility class for building values for the OpcodeList tags specified
@@ -107,13 +108,49 @@ class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
uint32_t mapPlanes,
const float* mapGains);
+ /**
+ * Add WarpRectilinear opcode for the given metadata parameters.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addWarpRectilinearForMetadata(const float* kCoeffs,
+ uint32_t activeArrayWidth,
+ uint32_t activeArrayHeight,
+ float opticalCenterX,
+ float opticalCenterY);
+
+ /**
+ * Add a WarpRectilinear opcode.
+ *
+ * numPlanes - Number of planes included in this opcode.
+ * opticalCenterX, opticalCenterY - Normalized x,y coordinates of the sensor optical
+ * center relative to the top,left pixel of the produced images (e.g. [0.5, 0.5]
+ * gives a sensor optical center in the image center.
+ * kCoeffs - A list of coefficients for the polynomial equation representing the distortion
+ * correction. For each plane, 6 coefficients must be included:
+ * {k_r0, k_r1, k_r2, k_r3, k_t0, k_t1}. See the DNG 1.4 specification for an
+ * outline of the polynomial used here.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addWarpRectilinear(uint32_t numPlanes,
+ double opticalCenterX,
+ double opticalCenterY,
+ const double* kCoeffs);
+
// TODO: Add other Opcode methods
protected:
static const uint32_t FLAG_OPTIONAL = 0x1u;
static const uint32_t FLAG_OPTIONAL_FOR_PREVIEW = 0x2u;
+ // Opcode IDs
enum {
+ WARP_RECTILINEAR_ID = 1,
GAIN_MAP_ID = 9,
+ };
+
+ // LSM mosaic indices
+ enum {
LSM_R_IND = 0,
LSM_GE_IND = 1,
LSM_GO_IND = 2,
diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp
index d3b4a35..b213403 100644
--- a/media/img_utils/src/DngUtils.cpp
+++ b/media/img_utils/src/DngUtils.cpp
@@ -16,6 +16,10 @@
#include <img_utils/DngUtils.h>
+#include <inttypes.h>
+
+#include <math.h>
+
namespace android {
namespace img_utils {
@@ -229,7 +233,7 @@ status_t OpcodeListBuilder::addGainMap(uint32_t top,
err = mEndianOut.write(version, 0, NELEMS(version));
if (err != OK) return err;
- // Do not include optional flag for preview, as this can have a large effect on the output.
+ // Allow this opcode to be skipped if not supported
uint32_t flags = FLAG_OPTIONAL;
err = mEndianOut.write(&flags, 0, 1);
@@ -278,5 +282,96 @@ status_t OpcodeListBuilder::addGainMap(uint32_t top,
return OK;
}
+status_t OpcodeListBuilder::addWarpRectilinearForMetadata(const float* kCoeffs,
+ uint32_t activeArrayWidth,
+ uint32_t activeArrayHeight,
+ float opticalCenterX,
+ float opticalCenterY) {
+ if (activeArrayWidth <= 1 || activeArrayHeight <= 1) {
+ ALOGE("%s: Cannot add opcode for active array with dimensions w=%" PRIu32 ", h=%" PRIu32,
+ __FUNCTION__, activeArrayWidth, activeArrayHeight);
+ return BAD_VALUE;
+ }
+
+ double normalizedOCX = opticalCenterX / static_cast<double>(activeArrayWidth - 1);
+ double normalizedOCY = opticalCenterY / static_cast<double>(activeArrayHeight - 1);
+
+ normalizedOCX = CLAMP(normalizedOCX, 0, 1);
+ normalizedOCY = CLAMP(normalizedOCY, 0, 1);
+
+ // Conversion factors from Camera2 K factors to DNG spec. K factors:
+ //
+ // Note: these are necessary because our unit system assumes a
+ // normalized max radius of sqrt(2), whereas the DNG spec's
+ // WarpRectilinear opcode assumes a normalized max radius of 1.
+ // Thus, each K coefficient must include the domain scaling
+ // factor (the DNG domain is scaled by sqrt(2) to emulate the
+ // domain used by the Camera2 specification).
+
+ const double c_0 = sqrt(2);
+ const double c_1 = 2 * sqrt(2);
+ const double c_2 = 4 * sqrt(2);
+ const double c_3 = 8 * sqrt(2);
+ const double c_4 = 2;
+ const double c_5 = 2;
+
+ const double coeffs[] = { c_0 * kCoeffs[0],
+ c_1 * kCoeffs[1],
+ c_2 * kCoeffs[2],
+ c_3 * kCoeffs[3],
+ c_4 * kCoeffs[4],
+ c_5 * kCoeffs[5] };
+
+
+ return addWarpRectilinear(/*numPlanes*/1,
+ /*opticalCenterX*/normalizedOCX,
+ /*opticalCenterY*/normalizedOCY,
+ coeffs);
+}
+
+status_t OpcodeListBuilder::addWarpRectilinear(uint32_t numPlanes,
+ double opticalCenterX,
+ double opticalCenterY,
+ const double* kCoeffs) {
+
+ uint32_t opcodeId = WARP_RECTILINEAR_ID;
+
+ status_t err = mEndianOut.write(&opcodeId, 0, 1);
+ if (err != OK) return err;
+
+ uint8_t version[] = {1, 3, 0, 0};
+ err = mEndianOut.write(version, 0, NELEMS(version));
+ if (err != OK) return err;
+
+ // Allow this opcode to be skipped if not supported
+ uint32_t flags = FLAG_OPTIONAL;
+
+ err = mEndianOut.write(&flags, 0, 1);
+ if (err != OK) return err;
+
+ const uint32_t NUMBER_CENTER_ARGS = 2;
+ const uint32_t NUMBER_COEFFS = numPlanes * 6;
+ uint32_t totalSize = (NUMBER_CENTER_ARGS + NUMBER_COEFFS) * sizeof(double) + sizeof(uint32_t);
+
+ err = mEndianOut.write(&totalSize, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&numPlanes, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(kCoeffs, 0, NUMBER_COEFFS);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&opticalCenterX, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&opticalCenterY, 0, 1);
+ if (err != OK) return err;
+
+ mCount++;
+
+ return OK;
+}
+
} /*namespace img_utils*/
} /*namespace android*/
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 81ae6d7..b5d7614 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -986,15 +986,18 @@ status_t AudioTrack::getPosition(uint32_t *position)
}
if (mOutput != AUDIO_IO_HANDLE_NONE) {
- uint32_t halFrames;
- AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames);
+ uint32_t halFrames; // actually unused
+ (void) AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames);
+ // FIXME: on getRenderPosition() error, we return OK with frame position 0.
}
// FIXME: dspFrames may not be zero in (mState == STATE_STOPPED || mState == STATE_FLUSHED)
// due to hardware latency. We leave this behavior for now.
*position = dspFrames;
} else {
if (mCblk->mFlags & CBLK_INVALID) {
- restoreTrack_l("getPosition");
+ (void) restoreTrack_l("getPosition");
+ // FIXME: for compatibility with the Java API we ignore the restoreTrack_l()
+ // error here (e.g. DEAD_OBJECT) and return OK with the last recorded server position.
}
// IAudioTrack::stop() isn't synchronous; we don't know when presentation completes
@@ -2080,7 +2083,8 @@ status_t AudioTrack::restoreTrack_l(const char *from)
AudioSystem::clearAudioConfigCache();
if (isOffloadedOrDirect_l() || mDoNotReconnect) {
- // FIXME re-creation of offloaded tracks is not yet implemented
+ // FIXME re-creation of offloaded and direct tracks is not yet implemented;
+ // reconsider enabling for linear PCM encodings when position can be preserved.
return DEAD_OBJECT;
}
@@ -2203,7 +2207,12 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)
}
if (mCblk->mFlags & CBLK_INVALID) {
- restoreTrack_l("getTimestamp");
+ const status_t status = restoreTrack_l("getTimestamp");
+ if (status != OK) {
+ // per getTimestamp() API doc in header, we return DEAD_OBJECT here,
+ // recommending that the track be recreated.
+ return DEAD_OBJECT;
+ }
}
// The presented frame count must always lag behind the consumed frame count.
diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp
index 7e89d7f..0dda0be 100644
--- a/media/libmedia/IMediaHTTPConnection.cpp
+++ b/media/libmedia/IMediaHTTPConnection.cpp
@@ -107,7 +107,14 @@ struct BpMediaHTTPConnection : public BpInterface<IMediaHTTPConnection> {
return UNKNOWN_ERROR;
}
- size_t len = reply.readInt32();
+ int32_t lenOrErrorCode = reply.readInt32();
+
+ // Negative values are error codes
+ if (lenOrErrorCode < 0) {
+ return lenOrErrorCode;
+ }
+
+ size_t len = lenOrErrorCode;
if (len > size) {
ALOGE("requested %zu, got %zu", size, len);
diff --git a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
index 8ac337a..f3af777 100755
--- a/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
+++ b/media/libstagefright/codecs/avcdec/SoftAVCDec.cpp
@@ -787,7 +787,7 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
}
if (s_dec_op.u4_output_present) {
- outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
+ outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
mTimeStampsValid[s_dec_op.u4_ts] = false;
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index a35909e..e161fb8 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -146,8 +146,8 @@ bool SoftVPX::outputBuffers(bool flushDecoder, bool display, bool eos, bool *por
}
outHeader->nOffset = 0;
- outHeader->nFilledLen = (width * height * 3) / 2;
outHeader->nFlags = 0;
+ outHeader->nFilledLen = (outputBufferWidth() * outputBufferHeight() * 3) / 2;
outHeader->nTimeStamp = *(OMX_TICKS *)mImg->user_priv;
uint8_t *dst = outHeader->pBuffer;
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index ac6bf0d..31c6975 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -118,6 +118,7 @@ GraphicBufferSource::GraphicBufferSource(
mIsPersistent(false),
mConsumer(consumer),
mNumFramesAvailable(0),
+ mNumBufferAcquired(0),
mEndOfStream(false),
mEndOfStreamSent(false),
mMaxTimestampGapUs(-1ll),
@@ -185,7 +186,14 @@ GraphicBufferSource::GraphicBufferSource(
}
GraphicBufferSource::~GraphicBufferSource() {
- ALOGV("~GraphicBufferSource");
+ if (mLatestBufferId >= 0) {
+ releaseBuffer(
+ mLatestBufferId, mLatestBufferFrameNum,
+ mBufferSlot[mLatestBufferId], mLatestBufferFence);
+ }
+ if (mNumBufferAcquired != 0) {
+ ALOGW("potential buffer leak (acquired %d)", mNumBufferAcquired);
+ }
if (mConsumer != NULL && !mIsPersistent) {
status_t err = mConsumer->consumerDisconnect();
if (err != NO_ERROR) {
@@ -377,17 +385,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header, int f
if (id == mLatestBufferId) {
CHECK_GT(mLatestBufferUseCount--, 0);
} else {
- if (mIsPersistent) {
- mConsumer->detachBuffer(id);
- int outSlot;
- mConsumer->attachBuffer(&outSlot, mBufferSlot[id]);
- mConsumer->releaseBuffer(outSlot, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
- mBufferSlot[id] = NULL;
- } else {
- mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
- }
+ releaseBuffer(id, codecBuffer.mFrameNumber, mBufferSlot[id], fence);
}
} else {
ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d",
@@ -468,18 +466,11 @@ void GraphicBufferSource::suspend(bool suspend) {
break;
}
+ ++mNumBufferAcquired;
--mNumFramesAvailable;
- if (mIsPersistent) {
- mConsumer->detachBuffer(item.mBuf);
- mBufferSlot[item.mBuf] = NULL;
- mConsumer->attachBuffer(&item.mBuf, item.mGraphicBuffer);
- mConsumer->releaseBuffer(item.mBuf, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
- } else {
- mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
- }
+ releaseBuffer(item.mBuf, item.mFrameNumber,
+ item.mGraphicBuffer, item.mFence);
}
return;
}
@@ -526,6 +517,7 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
return false;
}
+ mNumBufferAcquired++;
mNumFramesAvailable--;
// If this is the first time we're seeing this buffer, add it to our
@@ -559,17 +551,7 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
if (err != OK) {
ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf);
- if (mIsPersistent) {
- mConsumer->detachBuffer(item.mBuf);
- mBufferSlot[item.mBuf] = NULL;
- mConsumer->attachBuffer(&item.mBuf, item.mGraphicBuffer);
- mConsumer->releaseBuffer(item.mBuf, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
- } else {
- mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
- }
- // item.mFence is released at the end of this method
+ releaseBuffer(item.mBuf, item.mFrameNumber, item.mGraphicBuffer, item.mFence);
} else {
ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
setLatestBuffer_l(item, dropped);
@@ -647,19 +629,8 @@ void GraphicBufferSource::setLatestBuffer_l(
if (mLatestBufferId >= 0) {
if (mLatestBufferUseCount == 0) {
- if (mIsPersistent) {
- mConsumer->detachBuffer(mLatestBufferId);
-
- int outSlot;
- mConsumer->attachBuffer(&outSlot, mBufferSlot[mLatestBufferId]);
- mConsumer->releaseBuffer(outSlot, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, mLatestBufferFence);
- mBufferSlot[mLatestBufferId] = NULL;
- } else {
- mConsumer->releaseBuffer(
- mLatestBufferId, mLatestBufferFrameNum,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, mLatestBufferFence);
- }
+ releaseBuffer(mLatestBufferId, mLatestBufferFrameNum,
+ mBufferSlot[mLatestBufferId], mLatestBufferFence);
// mLatestBufferFence will be set to new fence just below
}
}
@@ -848,6 +819,33 @@ int GraphicBufferSource::findMatchingCodecBuffer_l(
return -1;
}
+/*
+ * Releases an acquired buffer back to the consumer for either persistent
+ * or non-persistent surfaces.
+ *
+ * id: buffer slot to release (in persistent case the id might be changed)
+ * frameNum: frame number of the frame being released
+ * buffer: GraphicBuffer pointer to release (note this must not be & as we
+ * will clear the original mBufferSlot in persistent case)
+ * fence: fence of the frame being released
+ */
+void GraphicBufferSource::releaseBuffer(
+ int &id, uint64_t frameNum,
+ const sp<GraphicBuffer> buffer, const sp<Fence> &fence) {
+ if (mIsPersistent) {
+ mConsumer->detachBuffer(id);
+ mBufferSlot[id] = NULL;
+
+ mConsumer->attachBuffer(&id, buffer);
+ mConsumer->releaseBuffer(
+ id, 0, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
+ } else {
+ mConsumer->releaseBuffer(
+ id, frameNum, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, fence);
+ }
+ mNumBufferAcquired--;
+}
+
// BufferQueue::ConsumerListener callback
void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {
Mutex::Autolock autoLock(mMutex);
@@ -868,6 +866,8 @@ void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {
BufferItem item;
status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == OK) {
+ mNumBufferAcquired++;
+
// If this is the first time we're seeing this buffer, add it to our
// slot table.
if (item.mGraphicBuffer != NULL) {
@@ -875,16 +875,8 @@ void GraphicBufferSource::onFrameAvailable(const BufferItem& /*item*/) {
mBufferSlot[item.mBuf] = item.mGraphicBuffer;
}
- if (mIsPersistent) {
- mConsumer->detachBuffer(item.mBuf);
- mBufferSlot[item.mBuf] = NULL;
- mConsumer->attachBuffer(&item.mBuf, item.mGraphicBuffer);
- mConsumer->releaseBuffer(item.mBuf, 0,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
- } else {
- mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
- EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
- }
+ releaseBuffer(item.mBuf, item.mFrameNumber,
+ item.mGraphicBuffer, item.mFence);
}
return;
}
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 2a8c218..3f64088 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -228,6 +228,11 @@ private:
// doing anything if we don't have a codec buffer available.
void submitEndOfInputStream_l();
+ // Release buffer to the consumer
+ void releaseBuffer(
+ int &id, uint64_t frameNum,
+ const sp<GraphicBuffer> buffer, const sp<Fence> &fence);
+
void setLatestBuffer_l(const BufferItem &item, bool dropped);
bool repeatLatestBuffer_l();
int64_t getTimestamp(const BufferItem &item);
@@ -257,6 +262,9 @@ private:
// forwarded to the codec.
size_t mNumFramesAvailable;
+ // Number of frames acquired from consumer (debug only)
+ int32_t mNumBufferAcquired;
+
// Set to true if we want to send end-of-stream after we run out of
// frames in BufferQueue.
bool mEndOfStream;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index c121163..6ee1a77 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -121,10 +121,9 @@ struct BufferMeta {
return;
}
- // check component returns proper range
- sp<ABuffer> codec = getBuffer(header, false /* backup */, true /* limit */);
-
- memcpy((OMX_U8 *)mMem->pointer() + header->nOffset, codec->data(), codec->size());
+ memcpy((OMX_U8 *)mMem->pointer() + header->nOffset,
+ header->pBuffer + header->nOffset,
+ header->nFilledLen);
}
void CopyToOMX(const OMX_BUFFERHEADERTYPE *header) {
@@ -138,16 +137,14 @@ struct BufferMeta {
}
// return either the codec or the backup buffer
- sp<ABuffer> getBuffer(const OMX_BUFFERHEADERTYPE *header, bool backup, bool limit) {
+ sp<ABuffer> getBuffer(const OMX_BUFFERHEADERTYPE *header, bool backup) {
sp<ABuffer> buf;
if (backup && mMem != NULL) {
buf = new ABuffer(mMem->pointer(), mMem->size());
} else {
buf = new ABuffer(header->pBuffer, header->nAllocLen);
}
- if (limit) {
- buf->setRange(header->nOffset, header->nFilledLen);
- }
+ buf->setRange(header->nOffset, header->nFilledLen);
return buf;
}
@@ -1092,11 +1089,10 @@ status_t OMXNodeInstance::emptyBuffer(
OMX_BUFFERHEADERTYPE *header = findBufferHeader(buffer);
BufferMeta *buffer_meta =
static_cast<BufferMeta *>(header->pAppPrivate);
- sp<ABuffer> backup = buffer_meta->getBuffer(header, true /* backup */, false /* limit */);
- sp<ABuffer> codec = buffer_meta->getBuffer(header, false /* backup */, false /* limit */);
+ sp<ABuffer> backup = buffer_meta->getBuffer(header, true /* backup */);
+ sp<ABuffer> codec = buffer_meta->getBuffer(header, false /* backup */);
// convert incoming ANW meta buffers if component is configured for gralloc metadata mode
- // ignore rangeOffset in this case
if (mMetadataType[kPortIndexInput] == kMetadataBufferTypeGrallocSource
&& backup->capacity() >= sizeof(VideoNativeMetadata)
&& codec->capacity() >= sizeof(VideoGrallocMetadata)
@@ -1106,7 +1102,7 @@ status_t OMXNodeInstance::emptyBuffer(
VideoGrallocMetadata &codecMeta = *(VideoGrallocMetadata *)codec->base();
CLOG_BUFFER(emptyBuffer, "converting ANWB %p to handle %p",
backupMeta.pBuffer, backupMeta.pBuffer->handle);
- codecMeta.pHandle = backupMeta.pBuffer != NULL ? backupMeta.pBuffer->handle : NULL;
+ codecMeta.pHandle = backupMeta.pBuffer->handle;
codecMeta.eType = kMetadataBufferTypeGrallocSource;
header->nFilledLen = rangeLength ? sizeof(codecMeta) : 0;
header->nOffset = 0;