summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorPannag Sanketi <psanketi@google.com>2011-07-14 14:37:47 -0700
committerPannag Sanketi <psanketi@google.com>2011-07-22 19:33:55 -0700
commita361483bb5dbd3bbf132c5b99b2df7d197c3fc50 (patch)
tree412284fb2abbc8afd9255cd01d218cb63fe60e4c
parentb33f3407bab0970a7f9241680723a1140b177c50 (diff)
downloadframeworks_av-a361483bb5dbd3bbf132c5b99b2df7d197c3fc50.zip
frameworks_av-a361483bb5dbd3bbf132c5b99b2df7d197c3fc50.tar.gz
frameworks_av-a361483bb5dbd3bbf132c5b99b2df7d197c3fc50.tar.bz2
Adding Metadata mode to SurfaceMediaSource
SurfaceMediaSource operates in metadata mode only, i.e. just the metadata is stored in videobuffers. SurfaceMediaSource passes the Gralloc buffer handle along with a 4 byte 'type' (indicating that the metadata is of type GrallocSource) to the encoder as opposed to the GrallocBuffer itself. Related to bug id: 4529323 Change-Id: I83aebc0dd10f317658cdf70be5802dfc35a1e72d
-rw-r--r--include/media/stagefright/MetadataBufferType.h10
-rw-r--r--include/media/stagefright/SurfaceMediaSource.h6
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp50
-rw-r--r--media/libstagefright/tests/SurfaceMediaSource_test.cpp13
4 files changed, 64 insertions, 15 deletions
diff --git a/include/media/stagefright/MetadataBufferType.h b/include/media/stagefright/MetadataBufferType.h
index 52a3257..4eaf8ac 100644
--- a/include/media/stagefright/MetadataBufferType.h
+++ b/include/media/stagefright/MetadataBufferType.h
@@ -69,6 +69,16 @@ typedef enum {
* kMetadataBufferTypeGrallocSource is used to indicate that
* the payload of the metadata buffers can be interpreted as
* a buffer_handle_t.
+ * So in this case,the metadata that the encoder receives
+ * will have a byte stream that consists of two parts:
+ * 1. First, there is an integer indicating that it is a GRAlloc
+ * source (kMetadataBufferTypeGrallocSource)
+ * 2. This is followed by the buffer_handle_t that is a handle to the
+ * GRalloc buffer. The encoder needs to interpret this GRalloc handle
+ * and encode the frames.
+ * --------------------------------------------------------------
+ * | kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
+ * --------------------------------------------------------------
*/
kMetadataBufferTypeGrallocSource = 1,
diff --git a/include/media/stagefright/SurfaceMediaSource.h b/include/media/stagefright/SurfaceMediaSource.h
index d772701..4c1358a 100644
--- a/include/media/stagefright/SurfaceMediaSource.h
+++ b/include/media/stagefright/SurfaceMediaSource.h
@@ -63,6 +63,10 @@ public:
MediaBuffer **buffer, const ReadOptions *options = NULL);
virtual sp<MetaData> getFormat();
+ // Pass the metadata over to the buffer, call when you have the lock
+ void passMetadataBufferLocked(MediaBuffer **buffer);
+ bool checkBufferMatchesSlot(int slot, MediaBuffer *buffer);
+
// Get / Set the frame rate used for encoding. Default fps = 30
status_t setFrameRate(int32_t fps) ;
int32_t getFrameRate( ) const;
@@ -152,7 +156,7 @@ public:
status_t setBufferCountServer(int bufferCount);
// getTimestamp retrieves the timestamp associated with the image
- // set by the most recent call to updateFrameInfoLocked().
+ // set by the most recent call to read()
//
// The timestamp is in nanoseconds, and is monotonically increasing. Its
// other semantics (zero point, etc) are source-dependent and should be
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index ecacf64..1e682c3 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -23,6 +23,7 @@
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/openmax/OMX_IVCommon.h>
+#include <media/stagefright/MetadataBufferType.h>
#include <surfaceflinger/ISurfaceComposer.h>
#include <surfaceflinger/SurfaceComposerClient.h>
@@ -715,9 +716,9 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer,
mCurrentBuf = mSlots[mCurrentSlot].mGraphicBuffer;
mCurrentTimestamp = mSlots[mCurrentSlot].mTimestamp;
- // Pass the data to the MediaBuffer
- // TODO: Change later to pass in only the metadata
- *buffer = new MediaBuffer(mCurrentBuf);
+ // Pass the data to the MediaBuffer. Pass in only the metadata
+ passMetadataBufferLocked(buffer);
+
(*buffer)->setObserver(this);
(*buffer)->add_ref();
(*buffer)->meta_data()->setInt64(kKeyTime, mCurrentTimestamp);
@@ -725,6 +726,34 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer,
return OK;
}
+// Pass the data to the MediaBuffer. Pass in only the metadata
+// The metadata passed consists of two parts:
+// 1. First, there is an integer indicating that it is a GRAlloc
+// source (kMetadataBufferTypeGrallocSource)
+// 2. This is followed by the buffer_handle_t that is a handle to the
+// GRalloc buffer. The encoder needs to interpret this GRalloc handle
+// and encode the frames.
+// --------------------------------------------------------------
+// | kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) |
+// --------------------------------------------------------------
+// Note: Call only when you have the lock
+void SurfaceMediaSource::passMetadataBufferLocked(MediaBuffer **buffer) {
+ LOGV("passMetadataBuffer");
+ // MediaBuffer allocates and owns this data
+ MediaBuffer *tempBuffer =
+ new MediaBuffer(4 + sizeof(buffer_handle_t));
+ char *data = (char *)tempBuffer->data();
+ if (data == NULL) {
+ LOGE("Cannot allocate memory for passing buffer metadata!");
+ return;
+ }
+ OMX_U32 type = kMetadataBufferTypeGrallocSource;
+ memcpy(data, &type, 4);
+ memcpy(data + 4, &(mCurrentBuf->handle), sizeof(buffer_handle_t));
+ *buffer = tempBuffer;
+}
+
+
void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
LOGV("signalBufferReturned");
@@ -732,14 +761,13 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
Mutex::Autolock autoLock(mMutex);
if (!mStarted) {
- LOGV("started = false. Nothing to do");
+ LOGW("signalBufferReturned: mStarted = false! Nothing to do!");
return;
}
for (Fifo::iterator it = mQueue.begin(); it != mQueue.end(); ++it) {
- if (mSlots[*it].mGraphicBuffer == buffer->graphicBuffer()) {
- LOGV("Buffer %d returned. Setting it 'FREE'. New Queue size = %d",
- *it, mQueue.size()-1);
+ CHECK(mSlots[*it].mGraphicBuffer != NULL);
+ if (checkBufferMatchesSlot(*it, buffer)) {
mSlots[*it].mBufferState = BufferSlot::FREE;
mQueue.erase(it);
buffer->setObserver(0);
@@ -756,6 +784,14 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
}
}
+bool SurfaceMediaSource::checkBufferMatchesSlot(int slot, MediaBuffer *buffer) {
+ LOGV("Check if Buffer matches slot");
+ // need to convert to char* for pointer arithmetic and then
+ // copy the byte stream into our handle
+ buffer_handle_t bufferHandle ;
+ memcpy( &bufferHandle, (char *)(buffer->data()) + 4, sizeof(buffer_handle_t));
+ return mSlots[slot].mGraphicBuffer->handle == bufferHandle;
+}
} // end of namespace android
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index ce10812..dc6f2c9 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -71,8 +71,8 @@ protected:
mANW.clear();
}
- const int mYuvTexWidth;// = 64;
- const int mYuvTexHeight;// = 66;
+ const int mYuvTexWidth;
+ const int mYuvTexHeight;
sp<SurfaceMediaSource> mSMS;
sp<SurfaceTextureClient> mSTC;
@@ -124,7 +124,6 @@ sp<MPEG4Writer> SurfaceMediaSourceTest::setUpWriter(OMXClient &client ) {
// TODO: overwriting the colorformat since the format set by GRAlloc
// could be wrong or not be read by OMX
enc_meta->setInt32(kKeyColorFormat, OMX_COLOR_FormatYUV420Planar);
- // colorFormat);
sp<MediaSource> encoder =
@@ -225,7 +224,6 @@ TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotOneBufferPass)
ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
0, 0, HAL_PIXEL_FORMAT_YV12));
- // OMX_COLOR_FormatYUV420Planar)); // ));
ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
@@ -239,7 +237,6 @@ TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotWrongSizeBuffe
// setting the client side buffer size different than the server size
ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
10, 10, HAL_PIXEL_FORMAT_YV12));
- // OMX_COLOR_FormatYUV420Planar)); // ));
ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
@@ -258,6 +255,7 @@ TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotMultiBufferPa
0, 0, HAL_PIXEL_FORMAT_YV12));
ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+
SimpleDummyRecorder writer(mSMS);
writer.start();
@@ -276,10 +274,12 @@ TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotMultiBufferPa
// A dummy writer is used to simulate actual MPEG4Writer
TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotMultiBufferPassLag) {
LOGV("Testing MultiBufferPass, Dummy Recorder Lagging **************");
+
ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
0, 0, HAL_PIXEL_FORMAT_YV12));
ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
+
SimpleDummyRecorder writer(mSMS);
writer.start();
@@ -322,10 +322,9 @@ TEST_F(SurfaceMediaSourceTest, EncodingFromCpuFilledYV12BufferNpotMultiBufferPas
TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuFilledYV12BufferNpotWrite) {
LOGV("Testing the whole pipeline with actual Recorder");
ASSERT_EQ(NO_ERROR, native_window_set_buffers_geometry(mANW.get(),
- 0, 0, HAL_PIXEL_FORMAT_YV12)); // OMX_COLOR_FormatYUV420Planar)); // ));
+ 0, 0, HAL_PIXEL_FORMAT_YV12));
ASSERT_EQ(NO_ERROR, native_window_set_usage(mANW.get(),
GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_SW_WRITE_OFTEN));
-
OMXClient client;
CHECK_EQ(OK, client.connect());