summaryrefslogtreecommitdiffstats
path: root/media/libstagefright
diff options
context:
space:
mode:
Diffstat (limited to 'media/libstagefright')
-rw-r--r--media/libstagefright/AMRExtractor.cpp6
-rw-r--r--media/libstagefright/AMRWriter.cpp39
-rw-r--r--media/libstagefright/Android.mk11
-rw-r--r--media/libstagefright/AwesomePlayer.cpp365
-rw-r--r--media/libstagefright/CameraSource.cpp505
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp518
-rw-r--r--media/libstagefright/DRMExtractor.cpp13
-rw-r--r--media/libstagefright/DataSource.cpp4
-rw-r--r--media/libstagefright/FileSource.cpp47
-rw-r--r--media/libstagefright/HTTPStream.cpp21
-rw-r--r--media/libstagefright/JPEGSource.cpp2
-rw-r--r--media/libstagefright/MP3Extractor.cpp283
-rw-r--r--media/libstagefright/MPEG2TSWriter.cpp2
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp60
-rw-r--r--media/libstagefright/MPEG4Writer.cpp155
-rw-r--r--media/libstagefright/MediaBuffer.cpp29
-rw-r--r--media/libstagefright/MediaDefs.cpp2
-rw-r--r--media/libstagefright/MediaExtractor.cpp3
-rw-r--r--media/libstagefright/MediaSourceSplitter.cpp234
-rw-r--r--media/libstagefright/NuCachedSource2.cpp45
-rw-r--r--media/libstagefright/NuHTTPDataSource.cpp205
-rw-r--r--media/libstagefright/OMXCodec.cpp531
-rw-r--r--media/libstagefright/OggExtractor.cpp32
-rw-r--r--media/libstagefright/SampleIterator.cpp2
-rw-r--r--media/libstagefright/SampleTable.cpp14
-rw-r--r--media/libstagefright/ShoutcastSource.cpp3
-rw-r--r--media/libstagefright/StagefrightMediaScanner.cpp6
-rw-r--r--media/libstagefright/ThrottledSource.cpp4
-rw-r--r--media/libstagefright/VBRISeeker.cpp164
-rw-r--r--media/libstagefright/VideoSourceDownSampler.cpp142
-rw-r--r--media/libstagefright/WAVExtractor.cpp12
-rw-r--r--media/libstagefright/WVMExtractor.cpp104
-rw-r--r--media/libstagefright/XINGSeeker.cpp223
-rw-r--r--media/libstagefright/avc_utils.cpp29
-rw-r--r--media/libstagefright/codecs/aacenc/AACEncoder.cpp42
-rw-r--r--media/libstagefright/codecs/avc/enc/AVCEncoder.cpp88
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp118
-rw-r--r--media/libstagefright/colorconversion/Android.mk8
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp203
-rw-r--r--media/libstagefright/httplive/Android.mk3
-rw-r--r--media/libstagefright/httplive/LiveSource.cpp356
-rw-r--r--media/libstagefright/httplive/M3UParser.cpp56
-rw-r--r--media/libstagefright/id3/ID3.cpp4
-rw-r--r--media/libstagefright/include/AACEncoder.h2
-rw-r--r--media/libstagefright/include/AwesomePlayer.h11
-rw-r--r--media/libstagefright/include/HTTPStream.h15
-rw-r--r--media/libstagefright/include/LiveSource.h18
-rw-r--r--media/libstagefright/include/M3UParser.h3
-rw-r--r--media/libstagefright/include/MP3Extractor.h11
-rw-r--r--media/libstagefright/include/MP3Seeker.h46
-rw-r--r--media/libstagefright/include/MPEG2TSExtractor.h2
-rw-r--r--media/libstagefright/include/MPEG4Extractor.h8
-rw-r--r--media/libstagefright/include/NuCachedSource2.h21
-rw-r--r--media/libstagefright/include/NuHTTPDataSource.h46
-rw-r--r--media/libstagefright/include/OMX.h18
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h8
-rw-r--r--media/libstagefright/include/SampleIterator.h8
-rw-r--r--media/libstagefright/include/SampleTable.h20
-rw-r--r--media/libstagefright/include/SoftwareRenderer.h28
-rw-r--r--media/libstagefright/include/ThrottledSource.h4
-rw-r--r--media/libstagefright/include/VBRISeeker.h (renamed from media/libstagefright/omx/OMXRenderer.h)36
-rw-r--r--media/libstagefright/include/WAVExtractor.h2
-rw-r--r--media/libstagefright/include/WVMExtractor.h51
-rw-r--r--media/libstagefright/include/XINGSeeker.h50
-rw-r--r--media/libstagefright/include/avc_utils.h13
-rw-r--r--media/libstagefright/include/stagefright_string.h54
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.cpp127
-rw-r--r--media/libstagefright/matroska/mkvparser.cpp7614
-rw-r--r--media/libstagefright/matroska/mkvparser.hpp982
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp9
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp5
-rw-r--r--media/libstagefright/omx/OMX.cpp144
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp130
-rw-r--r--media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp384
-rw-r--r--media/libstagefright/rtsp/AMPEG4AudioAssembler.h13
-rw-r--r--media/libstagefright/rtsp/ARTPSource.cpp2
-rw-r--r--media/libstagefright/rtsp/ARTPWriter.cpp2
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp263
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.h20
-rw-r--r--media/libstagefright/rtsp/ASessionDescription.cpp19
-rw-r--r--media/libstagefright/rtsp/Android.mk1
-rw-r--r--media/libstagefright/rtsp/MyHandler.h25
-rw-r--r--media/libstagefright/string.cpp92
-rw-r--r--media/libstagefright/yuv/Android.mk13
-rw-r--r--media/libstagefright/yuv/YUVCanvas.cpp111
-rw-r--r--media/libstagefright/yuv/YUVImage.cpp413
86 files changed, 10654 insertions, 4883 deletions
diff --git a/media/libstagefright/AMRExtractor.cpp b/media/libstagefright/AMRExtractor.cpp
index 1b05528..ac87c29 100644
--- a/media/libstagefright/AMRExtractor.cpp
+++ b/media/libstagefright/AMRExtractor.cpp
@@ -55,7 +55,7 @@ private:
size_t mFrameSize;
bool mIsWide;
- off_t mOffset;
+ off64_t mOffset;
int64_t mCurrentTimeUs;
bool mStarted;
MediaBufferGroup *mGroup;
@@ -115,9 +115,9 @@ AMRExtractor::AMRExtractor(const sp<DataSource> &source)
mFrameSize = getFrameSize(mIsWide, FT);
- off_t streamSize;
+ off64_t streamSize;
if (mDataSource->getSize(&streamSize) == OK) {
- off_t numFrames = streamSize / mFrameSize;
+ off64_t numFrames = streamSize / mFrameSize;
mMeta->setInt64(kKeyDuration, 20000ll * numFrames);
}
diff --git a/media/libstagefright/AMRWriter.cpp b/media/libstagefright/AMRWriter.cpp
index c0b1abe..0db3d1d 100644
--- a/media/libstagefright/AMRWriter.cpp
+++ b/media/libstagefright/AMRWriter.cpp
@@ -24,20 +24,28 @@
#include <media/mediarecorder.h>
#include <sys/prctl.h>
#include <sys/resource.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
namespace android {
AMRWriter::AMRWriter(const char *filename)
- : mFile(fopen(filename, "wb")),
- mInitCheck(mFile != NULL ? OK : NO_INIT),
+ : mFd(-1),
+ mInitCheck(NO_INIT),
mStarted(false),
mPaused(false),
mResumed(false) {
+
+ mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+ if (mFd >= 0) {
+ mInitCheck = OK;
+ }
}
AMRWriter::AMRWriter(int fd)
- : mFile(fdopen(fd, "wb")),
- mInitCheck(mFile != NULL ? OK : NO_INIT),
+ : mFd(dup(fd)),
+ mInitCheck(mFd < 0? NO_INIT: OK),
mStarted(false),
mPaused(false),
mResumed(false) {
@@ -48,9 +56,9 @@ AMRWriter::~AMRWriter() {
stop();
}
- if (mFile != NULL) {
- fclose(mFile);
- mFile = NULL;
+ if (mFd != -1) {
+ close(mFd);
+ mFd = -1;
}
}
@@ -90,8 +98,8 @@ status_t AMRWriter::addSource(const sp<MediaSource> &source) {
mSource = source;
const char *kHeader = isWide ? "#!AMR-WB\n" : "#!AMR\n";
- size_t n = strlen(kHeader);
- if (fwrite(kHeader, 1, n, mFile) != n) {
+ ssize_t n = strlen(kHeader);
+ if (write(mFd, kHeader, n) != n) {
return ERROR_IO;
}
@@ -240,11 +248,9 @@ status_t AMRWriter::threadFunc() {
notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_MAX_DURATION_REACHED, 0);
break;
}
- ssize_t n = fwrite(
- (const uint8_t *)buffer->data() + buffer->range_offset(),
- 1,
- buffer->range_length(),
- mFile);
+ ssize_t n = write(mFd,
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ buffer->range_length());
if (n < (ssize_t)buffer->range_length()) {
buffer->release();
@@ -266,9 +272,8 @@ status_t AMRWriter::threadFunc() {
notify(MEDIA_RECORDER_EVENT_INFO, MEDIA_RECORDER_INFO_COMPLETION_STATUS, UNKNOWN_ERROR);
}
- fflush(mFile);
- fclose(mFile);
- mFile = NULL;
+ close(mFd);
+ mFd = -1;
mReachedEOS = true;
if (err == ERROR_END_OF_STREAM) {
return OK;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index d674547..4ad1eb4 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -10,6 +10,8 @@ LOCAL_SRC_FILES:= \
AudioSource.cpp \
AwesomePlayer.cpp \
CameraSource.cpp \
+ CameraSourceTimeLapse.cpp \
+ VideoSourceDownSampler.cpp \
DataSource.cpp \
DRMExtractor.cpp \
ESDS.cpp \
@@ -25,6 +27,7 @@ LOCAL_SRC_FILES:= \
MediaDefs.cpp \
MediaExtractor.cpp \
MediaSource.cpp \
+ MediaSourceSplitter.cpp \
MetaData.cpp \
NuCachedSource2.cpp \
NuHTTPDataSource.cpp \
@@ -41,9 +44,11 @@ LOCAL_SRC_FILES:= \
TimeSource.cpp \
TimedEventQueue.cpp \
Utils.cpp \
+ VBRISeeker.cpp \
WAVExtractor.cpp \
+ WVMExtractor.cpp \
+ XINGSeeker.cpp \
avc_utils.cpp \
- string.cpp
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
@@ -60,8 +65,10 @@ LOCAL_SHARED_LIBRARIES := \
libsonivox \
libvorbisidec \
libsurfaceflinger_client \
+ libstagefright_yuv \
libcamera_client \
- libdrmframework
+ libdrmframework \
+ libcrypto
LOCAL_STATIC_LIBRARIES := \
libstagefright_aacdec \
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index e65c943..922aaa8 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -45,10 +45,12 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
#include <media/stagefright/foundation/ALooper.h>
+#define USE_SURFACE_ALLOC 1
+
namespace android {
static int64_t kLowWaterMarkUs = 2000000ll; // 2secs
@@ -77,39 +79,17 @@ private:
AwesomeEvent &operator=(const AwesomeEvent &);
};
-struct AwesomeRemoteRenderer : public AwesomeRenderer {
- AwesomeRemoteRenderer(const sp<IOMXRenderer> &target)
- : mTarget(target) {
- }
-
- virtual void render(MediaBuffer *buffer) {
- void *id;
- if (buffer->meta_data()->findPointer(kKeyBufferID, &id)) {
- mTarget->render((IOMX::buffer_id)id);
- }
- }
-
-private:
- sp<IOMXRenderer> mTarget;
-
- AwesomeRemoteRenderer(const AwesomeRemoteRenderer &);
- AwesomeRemoteRenderer &operator=(const AwesomeRemoteRenderer &);
-};
-
struct AwesomeLocalRenderer : public AwesomeRenderer {
AwesomeLocalRenderer(
- bool previewOnly,
- const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight,
int32_t rotationDegrees)
- : mTarget(NULL),
- mLibHandle(NULL) {
- init(previewOnly, componentName,
- colorFormat, surface, displayWidth,
- displayHeight, decodedWidth, decodedHeight,
+ : mTarget(NULL) {
+ init(colorFormat, surface,
+ displayWidth, displayHeight,
+ decodedWidth, decodedHeight,
rotationDegrees);
}
@@ -126,22 +106,14 @@ protected:
virtual ~AwesomeLocalRenderer() {
delete mTarget;
mTarget = NULL;
-
- if (mLibHandle) {
- dlclose(mLibHandle);
- mLibHandle = NULL;
- }
}
private:
- VideoRenderer *mTarget;
- void *mLibHandle;
+ SoftwareRenderer *mTarget;
void init(
- bool previewOnly,
- const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight,
int32_t rotationDegrees);
@@ -151,76 +123,63 @@ private:
};
void AwesomeLocalRenderer::init(
- bool previewOnly,
- const char *componentName,
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight,
int32_t rotationDegrees) {
- if (!previewOnly) {
- // We will stick to the vanilla software-color-converting renderer
- // for "previewOnly" mode, to avoid unneccessarily switching overlays
- // more often than necessary.
-
- mLibHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
-
- if (mLibHandle) {
- typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees);
-
- typedef VideoRenderer *(*CreateRendererFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight);
-
- CreateRendererWithRotationFunc funcWithRotation =
- (CreateRendererWithRotationFunc)dlsym(
- mLibHandle,
- "_Z26createRendererWithRotationRKN7android2spINS_8"
- "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
-
- if (funcWithRotation) {
- mTarget =
- (*funcWithRotation)(
- surface, componentName, colorFormat,
- displayWidth, displayHeight,
- decodedWidth, decodedHeight,
- rotationDegrees);
- } else {
- if (rotationDegrees != 0) {
- LOGW("renderer does not support rotation.");
- }
+ mTarget = new SoftwareRenderer(
+ colorFormat, surface, displayWidth, displayHeight,
+ decodedWidth, decodedHeight, rotationDegrees);
+}
- CreateRendererFunc func =
- (CreateRendererFunc)dlsym(
- mLibHandle,
- "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
- "OMX_COLOR_FORMATTYPEjjjj");
-
- if (func) {
- mTarget =
- (*func)(surface, componentName, colorFormat,
- displayWidth, displayHeight,
- decodedWidth, decodedHeight);
- }
- }
+struct AwesomeNativeWindowRenderer : public AwesomeRenderer {
+ AwesomeNativeWindowRenderer(
+ const sp<ANativeWindow> &nativeWindow,
+ int32_t rotationDegrees)
+ : mNativeWindow(nativeWindow) {
+ applyRotation(rotationDegrees);
+ }
+
+ virtual void render(MediaBuffer *buffer) {
+ status_t err = mNativeWindow->queueBuffer(
+ mNativeWindow.get(), buffer->graphicBuffer().get());
+ if (err != 0) {
+ LOGE("queueBuffer failed with error %s (%d)", strerror(-err),
+ -err);
+ return;
}
+
+ sp<MetaData> metaData = buffer->meta_data();
+ metaData->setInt32(kKeyRendered, 1);
}
- if (mTarget == NULL) {
- mTarget = new SoftwareRenderer(
- colorFormat, surface, displayWidth, displayHeight,
- decodedWidth, decodedHeight, rotationDegrees);
+protected:
+ virtual ~AwesomeNativeWindowRenderer() {}
+
+private:
+ sp<ANativeWindow> mNativeWindow;
+
+ void applyRotation(int32_t rotationDegrees) {
+ uint32_t transform;
+ switch (rotationDegrees) {
+ case 0: transform = 0; break;
+ case 90: transform = HAL_TRANSFORM_ROT_90; break;
+ case 180: transform = HAL_TRANSFORM_ROT_180; break;
+ case 270: transform = HAL_TRANSFORM_ROT_270; break;
+ default: transform = 0; break;
+ }
+
+ if (transform) {
+ CHECK_EQ(0, native_window_set_buffers_transform(
+ mNativeWindow.get(), transform));
+ }
}
-}
+
+ AwesomeNativeWindowRenderer(const AwesomeNativeWindowRenderer &);
+ AwesomeNativeWindowRenderer &operator=(
+ const AwesomeNativeWindowRenderer &);
+};
AwesomePlayer::AwesomePlayer()
: mQueueStarted(false),
@@ -293,6 +252,16 @@ status_t AwesomePlayer::setDataSource_l(
mUri = uri;
+ if (!strncmp("http://", uri, 7)) {
+ // Hack to support http live.
+
+ size_t len = strlen(uri);
+ if (!strcasecmp(&uri[len - 5], ".m3u8")) {
+ mUri = "httplive://";
+ mUri.append(&uri[7]);
+ }
+ }
+
if (headers) {
mUriHeaders = *headers;
}
@@ -415,6 +384,7 @@ void AwesomePlayer::reset_l() {
if (mDecryptHandle != NULL) {
mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
Playback::STOP, 0);
+ mDrmManagerClient->closeDecryptSession(mDecryptHandle);
mDecryptHandle = NULL;
mDrmManagerClient = NULL;
}
@@ -425,6 +395,12 @@ void AwesomePlayer::reset_l() {
LOGI("interrupting the connection process");
mConnectingDataSource->disconnect();
}
+
+ if (mFlags & PREPARING_CONNECTED) {
+ // We are basically done preparing, we're just buffering
+ // enough data to start playback, we can safely interrupt that.
+ finishAsyncPrepare_l();
+ }
}
while (mFlags & PREPARING) {
@@ -523,7 +499,7 @@ void AwesomePlayer::notifyListener_l(int msg, int ext1, int ext2) {
}
bool AwesomePlayer::getBitrate(int64_t *bitrate) {
- off_t size;
+ off64_t size;
if (mDurationUs >= 0 && mCachedSource != NULL
&& mCachedSource->getSize(&size) == OK) {
*bitrate = size * 8000000ll / mDurationUs; // in bits/sec
@@ -556,6 +532,12 @@ bool AwesomePlayer::getCachedDuration_l(int64_t *durationUs, bool *eos) {
return false;
}
+void AwesomePlayer::ensureCacheIsFetching_l() {
+ if (mCachedSource != NULL) {
+ mCachedSource->resumeFetchingIfNecessary();
+ }
+}
+
void AwesomePlayer::onBufferingUpdate() {
Mutex::Autolock autoLock(mLock);
if (!mBufferingEventPending) {
@@ -598,6 +580,7 @@ void AwesomePlayer::onBufferingUpdate() {
kLowWaterMarkBytes);
mFlags |= CACHE_UNDERRUN;
pause_l();
+ ensureCacheIsFetching_l();
notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
} else if (eos || cachedDataRemaining > kHighWaterMarkBytes) {
if (mFlags & CACHE_UNDERRUN) {
@@ -619,12 +602,16 @@ void AwesomePlayer::onBufferingUpdate() {
int64_t cachedDurationUs;
bool eos;
if (getCachedDuration_l(&cachedDurationUs, &eos)) {
+ LOGV("cachedDurationUs = %.2f secs, eos=%d",
+ cachedDurationUs / 1E6, eos);
+
if ((mFlags & PLAYING) && !eos
&& (cachedDurationUs < kLowWaterMarkUs)) {
LOGI("cache is running low (%.2f secs) , pausing.",
cachedDurationUs / 1E6);
mFlags |= CACHE_UNDERRUN;
pause_l();
+ ensureCacheIsFetching_l();
notifyListener_l(MEDIA_INFO, MEDIA_INFO_BUFFERING_START);
} else if (eos || cachedDurationUs > kHighWaterMarkUs) {
if (mFlags & CACHE_UNDERRUN) {
@@ -825,52 +812,72 @@ status_t AwesomePlayer::play_l() {
return OK;
}
+void AwesomePlayer::notifyVideoSize_l() {
+ sp<MetaData> meta = mVideoSource->getFormat();
+
+ int32_t decodedWidth, decodedHeight;
+ CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
+ CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
+
+ int32_t rotationDegrees;
+ if (!mVideoTrack->getFormat()->findInt32(
+ kKeyRotation, &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
+
+ if (rotationDegrees == 90 || rotationDegrees == 270) {
+ notifyListener_l(
+ MEDIA_SET_VIDEO_SIZE, decodedHeight, decodedWidth);
+ } else {
+ notifyListener_l(
+ MEDIA_SET_VIDEO_SIZE, decodedWidth, decodedHeight);
+ }
+}
+
void AwesomePlayer::initRenderer_l() {
- if (mISurface != NULL) {
- sp<MetaData> meta = mVideoSource->getFormat();
-
- int32_t format;
- const char *component;
- int32_t decodedWidth, decodedHeight;
- CHECK(meta->findInt32(kKeyColorFormat, &format));
- CHECK(meta->findCString(kKeyDecoderComponent, &component));
- CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
- CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
-
- int32_t rotationDegrees;
- if (!mVideoTrack->getFormat()->findInt32(
- kKeyRotation, &rotationDegrees)) {
- rotationDegrees = 0;
- }
+ if (mSurface == NULL) {
+ return;
+ }
- mVideoRenderer.clear();
+ sp<MetaData> meta = mVideoSource->getFormat();
- // Must ensure that mVideoRenderer's destructor is actually executed
- // before creating a new one.
- IPCThreadState::self()->flushCommands();
+ int32_t format;
+ const char *component;
+ int32_t decodedWidth, decodedHeight;
+ CHECK(meta->findInt32(kKeyColorFormat, &format));
+ CHECK(meta->findCString(kKeyDecoderComponent, &component));
+ CHECK(meta->findInt32(kKeyWidth, &decodedWidth));
+ CHECK(meta->findInt32(kKeyHeight, &decodedHeight));
- if (!strncmp("OMX.", component, 4)) {
- // Our OMX codecs allocate buffers on the media_server side
- // therefore they require a remote IOMXRenderer that knows how
- // to display them.
- mVideoRenderer = new AwesomeRemoteRenderer(
- mClient.interface()->createRenderer(
- mISurface, component,
- (OMX_COLOR_FORMATTYPE)format,
- decodedWidth, decodedHeight,
- mVideoWidth, mVideoHeight,
- rotationDegrees));
- } else {
- // Other decoders are instantiated locally and as a consequence
- // allocate their buffers in local address space.
- mVideoRenderer = new AwesomeLocalRenderer(
- false, // previewOnly
- component,
- (OMX_COLOR_FORMATTYPE)format,
- mISurface,
- mVideoWidth, mVideoHeight,
- decodedWidth, decodedHeight, rotationDegrees);
- }
+ int32_t rotationDegrees;
+ if (!mVideoTrack->getFormat()->findInt32(
+ kKeyRotation, &rotationDegrees)) {
+ rotationDegrees = 0;
+ }
+
+ mVideoRenderer.clear();
+
+ // Must ensure that mVideoRenderer's destructor is actually executed
+ // before creating a new one.
+ IPCThreadState::self()->flushCommands();
+
+ if (USE_SURFACE_ALLOC && strncmp(component, "OMX.", 4) == 0) {
+ // Hardware decoders avoid the CPU color conversion by decoding
+ // directly to ANativeBuffers, so we must use a renderer that
+ // just pushes those buffers to the ANativeWindow.
+ mVideoRenderer =
+ new AwesomeNativeWindowRenderer(mSurface, rotationDegrees);
+ } else {
+ // Other decoders are instantiated locally and as a consequence
+ // allocate their buffers in local address space. This renderer
+ // then performs a color conversion and copy to get the data
+ // into the ANativeBuffer.
+ mVideoRenderer = new AwesomeLocalRenderer(
+ (OMX_COLOR_FORMATTYPE)format,
+ mSurface,
+ mVideoWidth, mVideoHeight,
+ decodedWidth, decodedHeight,
+ rotationDegrees);
}
}
@@ -914,10 +921,10 @@ bool AwesomePlayer::isPlaying() const {
return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
}
-void AwesomePlayer::setISurface(const sp<ISurface> &isurface) {
+void AwesomePlayer::setSurface(const sp<Surface> &surface) {
Mutex::Autolock autoLock(mLock);
- mISurface = isurface;
+ mSurface = surface;
}
void AwesomePlayer::setAudioSink(
@@ -1105,7 +1112,7 @@ status_t AwesomePlayer::initVideoDecoder(uint32_t flags) {
mClient.interface(), mVideoTrack->getFormat(),
false, // createEncoder
mVideoTrack,
- NULL, flags);
+ NULL, flags, USE_SURFACE_ALLOC ? mSurface : NULL);
if (mVideoSource != NULL) {
int64_t durationUs;
@@ -1136,7 +1143,7 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
}
if (mAudioPlayer != NULL) {
- LOGV("seeking audio to %lld us (%.2f secs).", timeUs, timeUs / 1E6);
+ LOGV("seeking audio to %lld us (%.2f secs).", videoTimeUs, videoTimeUs / 1E6);
// If we don't have a video time, seek audio to the originally
// requested seek time instead.
@@ -1154,6 +1161,13 @@ void AwesomePlayer::finishSeekIfNecessary(int64_t videoTimeUs) {
mFlags |= FIRST_FRAME;
mSeeking = false;
mSeekNotificationSent = false;
+
+ if (mDecryptHandle != NULL) {
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+ Playback::PAUSE, 0);
+ mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
+ Playback::START, videoTimeUs / 1000);
+ }
}
void AwesomePlayer::onVideoEvent() {
@@ -1210,6 +1224,8 @@ void AwesomePlayer::onVideoEvent() {
if (err == INFO_FORMAT_CHANGED) {
LOGV("VideoSource signalled format change.");
+ notifyVideoSize_l();
+
if (mVideoRenderer != NULL) {
mVideoRendererIsPreview = false;
initRenderer_l();
@@ -1251,17 +1267,11 @@ void AwesomePlayer::onVideoEvent() {
mVideoTimeUs = timeUs;
}
+ bool wasSeeking = mSeeking;
finishSeekIfNecessary(timeUs);
TimeSource *ts = (mFlags & AUDIO_AT_EOS) ? &mSystemTimeSource : mTimeSource;
- if (mDecryptHandle != NULL) {
- mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
- Playback::PAUSE, 0);
- mDrmManagerClient->setPlaybackStatus(mDecryptHandle,
- Playback::START, timeUs / 1000);
- }
-
if (mFlags & FIRST_FRAME) {
mFlags &= ~FIRST_FRAME;
@@ -1278,6 +1288,11 @@ void AwesomePlayer::onVideoEvent() {
int64_t latenessUs = nowUs - timeUs;
+ if (wasSeeking) {
+ // Let's display the first frame after seeking right away.
+ latenessUs = 0;
+ }
+
if (mRTPSession != NULL) {
// We'll completely ignore timestamps for gtalk videochat
// and we'll play incoming video as fast as we get it.
@@ -1633,9 +1648,15 @@ status_t AwesomePlayer::finishSetDataSource_l() {
}
dataSource->getDrmInfo(&mDecryptHandle, &mDrmManagerClient);
- if (mDecryptHandle != NULL
- && RightsStatus::RIGHTS_VALID != mDecryptHandle->status) {
- notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+ if (mDecryptHandle != NULL) {
+ if (RightsStatus::RIGHTS_VALID == mDecryptHandle->status) {
+ if (DecryptApiType::WV_BASED == mDecryptHandle->decryptApiType) {
+ LOGD("Setting mCachedSource to NULL for WVM\n");
+ mCachedSource.clear();
+ }
+ } else {
+ notifyListener_l(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, ERROR_NO_LICENSE);
+ }
}
return setDataSource_l(extractor);
@@ -1649,7 +1670,7 @@ void AwesomePlayer::abortPrepare(status_t err) {
}
mPrepareResult = err;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
}
@@ -1697,6 +1718,8 @@ void AwesomePlayer::onPrepareAsyncEvent() {
}
}
+ mFlags |= PREPARING_CONNECTED;
+
if (mCachedSource != NULL || mRTSPController != NULL) {
postBufferingEvent_l();
} else {
@@ -1706,32 +1729,17 @@ void AwesomePlayer::onPrepareAsyncEvent() {
void AwesomePlayer::finishAsyncPrepare_l() {
if (mIsAsyncPrepare) {
- if (mVideoWidth < 0 || mVideoHeight < 0) {
+ if (mVideoSource == NULL) {
notifyListener_l(MEDIA_SET_VIDEO_SIZE, 0, 0);
} else {
- int32_t rotationDegrees;
- if (!mVideoTrack->getFormat()->findInt32(
- kKeyRotation, &rotationDegrees)) {
- rotationDegrees = 0;
- }
-
-#if 1
- if (rotationDegrees == 90 || rotationDegrees == 270) {
- notifyListener_l(
- MEDIA_SET_VIDEO_SIZE, mVideoHeight, mVideoWidth);
- } else
-#endif
- {
- notifyListener_l(
- MEDIA_SET_VIDEO_SIZE, mVideoWidth, mVideoHeight);
- }
+ notifyVideoSize_l();
}
notifyListener_l(MEDIA_PREPARED);
}
mPrepareResult = OK;
- mFlags &= ~(PREPARING|PREPARE_CANCELLED);
+ mFlags &= ~(PREPARING|PREPARE_CANCELLED|PREPARING_CONNECTED);
mFlags |= PREPARED;
mAsyncPrepareEvent = NULL;
mPreparedCondition.broadcast();
@@ -1846,13 +1854,11 @@ status_t AwesomePlayer::resume() {
mFlags = state->mFlags & (AUTO_LOOPING | LOOPING | AT_EOS);
- if (state->mLastVideoFrame && mISurface != NULL) {
+ if (state->mLastVideoFrame && mSurface != NULL) {
mVideoRenderer =
new AwesomeLocalRenderer(
- true, // previewOnly
- "",
(OMX_COLOR_FORMATTYPE)state->mColorFormat,
- mISurface,
+ mSurface,
state->mVideoWidth,
state->mVideoHeight,
state->mDecodedWidth,
@@ -1888,4 +1894,3 @@ void AwesomePlayer::postAudioSeekComplete() {
}
} // namespace android
-
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 89cb135..d9ff723 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -27,6 +27,7 @@
#include <media/stagefright/MetaData.h>
#include <camera/Camera.h>
#include <camera/CameraParameters.h>
+#include <surfaceflinger/Surface.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -65,6 +66,11 @@ void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
void CameraSourceListener::postData(int32_t msgType, const sp<IMemory> &dataPtr) {
LOGV("postData(%d, ptr:%p, size:%d)",
msgType, dataPtr->pointer(), dataPtr->size());
+
+ sp<CameraSource> source = mSource.promote();
+ if (source.get() != NULL) {
+ source->dataCallback(msgType, dataPtr);
+ }
}
void CameraSourceListener::postDataTimestamp(
@@ -77,6 +83,10 @@ void CameraSourceListener::postDataTimestamp(
}
static int32_t getColorFormat(const char* colorFormat) {
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
+ return OMX_COLOR_FormatYUV420Planar;
+ }
+
if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
return OMX_COLOR_FormatYUV422SemiPlanar;
}
@@ -99,72 +109,410 @@ static int32_t getColorFormat(const char* colorFormat) {
CHECK_EQ(0, "Unknown color format");
}
-// static
CameraSource *CameraSource::Create() {
- sp<Camera> camera = Camera::connect(0);
-
- if (camera.get() == NULL) {
- return NULL;
- }
+ Size size;
+ size.width = -1;
+ size.height = -1;
- return new CameraSource(camera);
+ sp<ICamera> camera;
+ return new CameraSource(camera, 0, size, -1, NULL, false);
}
// static
-CameraSource *CameraSource::CreateFromCamera(const sp<Camera> &camera) {
- if (camera.get() == NULL) {
- return NULL;
+CameraSource *CameraSource::CreateFromCamera(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers) {
+
+ CameraSource *source = new CameraSource(camera, cameraId,
+ videoSize, frameRate, surface,
+ storeMetaDataInVideoBuffers);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
}
-
- return new CameraSource(camera);
+ return source;
}
-CameraSource::CameraSource(const sp<Camera> &camera)
- : mCamera(camera),
- mFirstFrameTimeUs(0),
- mLastFrameTimestampUs(0),
+CameraSource::CameraSource(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ const sp<Surface>& surface,
+ bool storeMetaDataInVideoBuffers)
+ : mCameraFlags(0),
+ mVideoFrameRate(-1),
+ mCamera(0),
+ mSurface(surface),
mNumFramesReceived(0),
+ mLastFrameTimestampUs(0),
+ mStarted(false),
+ mFirstFrameTimeUs(0),
mNumFramesEncoded(0),
mNumFramesDropped(0),
mNumGlitches(0),
mGlitchDurationThresholdUs(200000),
- mCollectStats(false),
- mStarted(false) {
+ mCollectStats(false) {
+
+ mVideoSize.width = -1;
+ mVideoSize.height = -1;
+
+ mInitCheck = init(camera, cameraId,
+ videoSize, frameRate,
+ storeMetaDataInVideoBuffers);
+}
+
+status_t CameraSource::initCheck() const {
+ return mInitCheck;
+}
+
+status_t CameraSource::isCameraAvailable(
+ const sp<ICamera>& camera, int32_t cameraId) {
+
+ if (camera == 0) {
+ mCamera = Camera::connect(cameraId);
+ mCameraFlags &= ~FLAGS_HOT_CAMERA;
+ } else {
+ mCamera = Camera::create(camera);
+ mCameraFlags |= FLAGS_HOT_CAMERA;
+ }
+
+ // Is camera available?
+ if (mCamera == 0) {
+ LOGE("Camera connection could not be established.");
+ return -EBUSY;
+ }
+ if (!(mCameraFlags & FLAGS_HOT_CAMERA)) {
+ mCamera->lock();
+ }
+ return OK;
+}
+
+
+/*
+ * Check to see whether the requested video width and height is one
+ * of the supported sizes.
+ * @param width the video frame width in pixels
+ * @param height the video frame height in pixels
+ * @param suppportedSizes the vector of sizes that we check against
+ * @return true if the dimension (width and height) is supported.
+ */
+static bool isVideoSizeSupported(
+ int32_t width, int32_t height,
+ const Vector<Size>& supportedSizes) {
+
+ LOGV("isVideoSizeSupported");
+ for (size_t i = 0; i < supportedSizes.size(); ++i) {
+ if (width == supportedSizes[i].width &&
+ height == supportedSizes[i].height) {
+ return true;
+ }
+ }
+ return false;
+}
+
+/*
+ * If the preview and video output is separate, we only set the
+ * the video size, and applications should set the preview size
+ * to some proper value, and the recording framework will not
+ * change the preview size; otherwise, if the video and preview
+ * output is the same, we need to set the preview to be the same
+ * as the requested video size.
+ *
+ */
+/*
+ * Query the camera to retrieve the supported video frame sizes
+ * and also to see whether CameraParameters::setVideoSize()
+ * is supported or not.
+ * @param params CameraParameters to retrieve the information
+ * @@param isSetVideoSizeSupported retunrs whether method
+ * CameraParameters::setVideoSize() is supported or not.
+ * @param sizes returns the vector of Size objects for the
+ * supported video frame sizes advertised by the camera.
+ */
+static void getSupportedVideoSizes(
+ const CameraParameters& params,
+ bool *isSetVideoSizeSupported,
+ Vector<Size>& sizes) {
+
+ *isSetVideoSizeSupported = true;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ LOGD("Camera does not support setVideoSize()");
+ params.getSupportedPreviewSizes(sizes);
+ *isSetVideoSizeSupported = false;
+ }
+}
+
+/*
+ * Check whether the camera has the supported color format
+ * @param params CameraParameters to retrieve the information
+ * @return OK if no error.
+ */
+status_t CameraSource::isCameraColorFormatSupported(
+ const CameraParameters& params) {
+ mColorFormat = getColorFormat(params.get(
+ CameraParameters::KEY_VIDEO_FRAME_FORMAT));
+ if (mColorFormat == -1) {
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+/*
+ * Configure the camera to use the requested video size
+ * (width and height) and/or frame rate. If both width and
+ * height are -1, configuration on the video size is skipped.
+ * if frameRate is -1, configuration on the frame rate
+ * is skipped. Skipping the configuration allows one to
+ * use the current camera setting without the need to
+ * actually know the specific values (see Create() method).
+ *
+ * @param params the CameraParameters to be configured
+ * @param width the target video frame width in pixels
+ * @param height the target video frame height in pixels
+ * @param frameRate the target frame rate in frames per second.
+ * @return OK if no error.
+ */
+status_t CameraSource::configureCamera(
+ CameraParameters* params,
+ int32_t width, int32_t height,
+ int32_t frameRate) {
+
+ Vector<Size> sizes;
+ bool isSetVideoSizeSupportedByCamera = true;
+ getSupportedVideoSizes(*params, &isSetVideoSizeSupportedByCamera, sizes);
+ bool isCameraParamChanged = false;
+ if (width != -1 && height != -1) {
+ if (!isVideoSizeSupported(width, height, sizes)) {
+ LOGE("Video dimension (%dx%d) is unsupported", width, height);
+ return BAD_VALUE;
+ }
+ if (isSetVideoSizeSupportedByCamera) {
+ params->setVideoSize(width, height);
+ } else {
+ params->setPreviewSize(width, height);
+ }
+ isCameraParamChanged = true;
+ } else if ((width == -1 && height != -1) ||
+ (width != -1 && height == -1)) {
+ // If one and only one of the width and height is -1
+ // we reject such a request.
+ LOGE("Requested video size (%dx%d) is not supported", width, height);
+ return BAD_VALUE;
+ } else { // width == -1 && height == -1
+ // Do not configure the camera.
+ // Use the current width and height value setting from the camera.
+ }
+
+ if (frameRate != -1) {
+ CHECK(frameRate > 0 && frameRate <= 120);
+ const char* supportedFrameRates =
+ params->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
+ CHECK(supportedFrameRates != NULL);
+ LOGV("Supported frame rates: %s", supportedFrameRates);
+ char buf[4];
+ snprintf(buf, 4, "%d", frameRate);
+ if (strstr(supportedFrameRates, buf) == NULL) {
+ LOGE("Requested frame rate (%d) is not supported: %s",
+ frameRate, supportedFrameRates);
+ return BAD_VALUE;
+ }
+
+ // The frame rate is supported, set the camera to the requested value.
+ params->setPreviewFrameRate(frameRate);
+ isCameraParamChanged = true;
+ } else { // frameRate == -1
+ // Do not configure the camera.
+ // Use the current frame rate value setting from the camera
+ }
+
+ if (isCameraParamChanged) {
+ // Either frame rate or frame size needs to be changed.
+ String8 s = params->flatten();
+ if (OK != mCamera->setParameters(s)) {
+ LOGE("Could not change settings."
+ " Someone else is using camera %p?", mCamera.get());
+ return -EBUSY;
+ }
+ }
+ return OK;
+}
+
+/*
+ * Check whether the requested video frame size
+ * has been successfully configured or not. If both width and height
+ * are -1, check on the current width and height value setting
+ * is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame width in pixels to check against
+ * @param the target video frame height in pixels to check against
+ * @return OK if no error
+ */
+status_t CameraSource::checkVideoSize(
+ const CameraParameters& params,
+ int32_t width, int32_t height) {
+
+ // The actual video size is the same as the preview size
+ // if the camera hal does not support separate video and
+ // preview output. In this case, we retrieve the video
+ // size from preview.
+ int32_t frameWidthActual = -1;
+ int32_t frameHeightActual = -1;
+ Vector<Size> sizes;
+ params.getSupportedVideoSizes(sizes);
+ if (sizes.size() == 0) {
+ // video size is the same as preview size
+ params.getPreviewSize(&frameWidthActual, &frameHeightActual);
+ } else {
+ // video size may not be the same as preview
+ params.getVideoSize(&frameWidthActual, &frameHeightActual);
+ }
+ if (frameWidthActual < 0 || frameHeightActual < 0) {
+ LOGE("Failed to retrieve video frame size (%dx%d)",
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame size against the target/requested
+ // video frame size.
+ if (width != -1 && height != -1) {
+ if (frameWidthActual != width || frameHeightActual != height) {
+ LOGE("Failed to set video frame size to %dx%d. "
+ "The actual video size is %dx%d ", width, height,
+ frameWidthActual, frameHeightActual);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Good now.
+ mVideoSize.width = frameWidthActual;
+ mVideoSize.height = frameHeightActual;
+ return OK;
+}
+
+/*
+ * Check the requested frame rate has been successfully configured or not.
+ * If the target frameRate is -1, check on the current frame rate value
+ * setting is performed.
+ *
+ * @param params CameraParameters to retrieve the information
+ * @param the target video frame rate to check against
+ * @return OK if no error.
+ */
+status_t CameraSource::checkFrameRate(
+ const CameraParameters& params,
+ int32_t frameRate) {
+
+ int32_t frameRateActual = params.getPreviewFrameRate();
+ if (frameRateActual < 0) {
+ LOGE("Failed to retrieve preview frame rate (%d)", frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+
+ // Check the actual video frame rate against the target/requested
+ // video frame rate.
+ if (frameRate != -1 && (frameRateActual - frameRate) != 0) {
+ LOGE("Failed to set preview frame rate to %d fps. The actual "
+ "frame rate is %d", frameRate, frameRateActual);
+ return UNKNOWN_ERROR;
+ }
+ // Good now.
+ mVideoFrameRate = frameRateActual;
+ return OK;
+}
+
+/*
+ * Initialize the CameraSource to so that it becomes
+ * ready for providing the video input streams as requested.
+ * @param camera the camera object used for the video source
+ * @param cameraId if camera == 0, use camera with this id
+ * as the video source
+ * @param videoSize the target video frame size. If both
+ * width and height in videoSize is -1, use the current
+ * width and heigth settings by the camera
+ * @param frameRate the target frame rate in frames per second.
+ * if it is -1, use the current camera frame rate setting.
+ * @param storeMetaDataInVideoBuffers request to store meta
+ * data or real YUV data in video buffers. Request to
+ * store meta data in video buffers may not be honored
+ * if the source does not support this feature.
+ *
+ * @return OK if no error.
+ */
+status_t CameraSource::init(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t frameRate,
+ bool storeMetaDataInVideoBuffers) {
+
+ status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- String8 s = mCamera->getParameters();
- IPCThreadState::self()->restoreCallingIdentity(token);
- printf("params: \"%s\"\n", s.string());
+ if ((err = isCameraAvailable(camera, cameraId)) != OK) {
+ return err;
+ }
+ CameraParameters params(mCamera->getParameters());
+ if ((err = isCameraColorFormatSupported(params)) != OK) {
+ return err;
+ }
- int32_t width, height, stride, sliceHeight;
- CameraParameters params(s);
- params.getPreviewSize(&width, &height);
+ // Set the camera to use the requested video frame size
+ // and/or frame rate.
+ if ((err = configureCamera(&params,
+ videoSize.width, videoSize.height,
+ frameRate))) {
+ return err;
+ }
+
+ // Check on video frame size and frame rate.
+ CameraParameters newCameraParams(mCamera->getParameters());
+ if ((err = checkVideoSize(newCameraParams,
+ videoSize.width, videoSize.height)) != OK) {
+ return err;
+ }
+ if ((err = checkFrameRate(newCameraParams, frameRate)) != OK) {
+ return err;
+ }
- // Calculate glitch duraton threshold based on frame rate
- int32_t frameRate = params.getPreviewFrameRate();
- int64_t glitchDurationUs = (1000000LL / frameRate);
+ // This CHECK is good, since we just passed the lock/unlock
+ // check earlier by calling mCamera->setParameters().
+ CHECK_EQ(OK, mCamera->setPreviewDisplay(mSurface));
+
+ mIsMetaDataStoredInVideoBuffers = false;
+ if (storeMetaDataInVideoBuffers &&
+ OK == mCamera->storeMetaDataInBuffers(true)) {
+ mIsMetaDataStoredInVideoBuffers = true;
+ }
+
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ int64_t glitchDurationUs = (1000000LL / mVideoFrameRate);
if (glitchDurationUs > mGlitchDurationThresholdUs) {
mGlitchDurationThresholdUs = glitchDurationUs;
}
- const char *colorFormatStr = params.get(CameraParameters::KEY_VIDEO_FRAME_FORMAT);
- CHECK(colorFormatStr != NULL);
- int32_t colorFormat = getColorFormat(colorFormatStr);
-
// XXX: query camera for the stride and slice height
// when the capability becomes available.
- stride = width;
- sliceHeight = height;
-
mMeta = new MetaData;
- mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
- mMeta->setInt32(kKeyColorFormat, colorFormat);
- mMeta->setInt32(kKeyWidth, width);
- mMeta->setInt32(kKeyHeight, height);
- mMeta->setInt32(kKeyStride, stride);
- mMeta->setInt32(kKeySliceHeight, sliceHeight);
-
+ mMeta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
+ mMeta->setInt32(kKeyColorFormat, mColorFormat);
+ mMeta->setInt32(kKeyWidth, mVideoSize.width);
+ mMeta->setInt32(kKeyHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyStride, mVideoSize.width);
+ mMeta->setInt32(kKeySliceHeight, mVideoSize.height);
+ mMeta->setInt32(kKeyFrameRate, mVideoFrameRate);
+ return OK;
}
CameraSource::~CameraSource() {
@@ -173,8 +521,17 @@ CameraSource::~CameraSource() {
}
}
+void CameraSource::startCameraRecording() {
+ CHECK_EQ(OK, mCamera->startRecording());
+ CHECK(mCamera->recordingEnabled());
+}
+
status_t CameraSource::start(MetaData *meta) {
CHECK(!mStarted);
+ if (mInitCheck != OK) {
+ LOGE("CameraSource is not initialized yet");
+ return mInitCheck;
+ }
char value[PROPERTY_VALUE_MAX];
if (property_get("media.stagefright.record-stats", value, NULL)
@@ -188,15 +545,22 @@ status_t CameraSource::start(MetaData *meta) {
mStartTimeUs = startTimeUs;
}
+ // Call setListener first before calling startCameraRecording()
+ // to avoid recording frames being dropped.
int64_t token = IPCThreadState::self()->clearCallingIdentity();
mCamera->setListener(new CameraSourceListener(this));
- CHECK_EQ(OK, mCamera->startRecording());
+ startCameraRecording();
IPCThreadState::self()->restoreCallingIdentity(token);
mStarted = true;
return OK;
}
+void CameraSource::stopCameraRecording() {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+}
+
status_t CameraSource::stop() {
LOGV("stop");
Mutex::Autolock autoLock(mLock);
@@ -204,15 +568,23 @@ status_t CameraSource::stop() {
mFrameAvailableCondition.signal();
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->setListener(NULL);
- mCamera->stopRecording();
+ stopCameraRecording();
releaseQueuedFrames();
while (!mFramesBeingEncoded.empty()) {
LOGI("Waiting for outstanding frames being encoded: %d",
mFramesBeingEncoded.size());
mFrameCompleteCondition.wait(mLock);
}
- mCamera = NULL;
+
+ LOGV("Disconnect camera");
+ if ((mCameraFlags & FLAGS_HOT_CAMERA) == 0) {
+ LOGV("Camera was cold when we started, stopping preview");
+ mCamera->stopPreview();
+ }
+ mCamera->unlock();
+ mCamera.clear();
+ mCamera = 0;
+ mCameraFlags = 0;
IPCThreadState::self()->restoreCallingIdentity(token);
if (mCollectStats) {
@@ -225,11 +597,15 @@ status_t CameraSource::stop() {
return OK;
}
+void CameraSource::releaseRecordingFrame(const sp<IMemory>& frame) {
+ mCamera->releaseRecordingFrame(frame);
+}
+
void CameraSource::releaseQueuedFrames() {
List<sp<IMemory> >::iterator it;
while (!mFramesReceived.empty()) {
it = mFramesReceived.begin();
- mCamera->releaseRecordingFrame(*it);
+ releaseRecordingFrame(*it);
mFramesReceived.erase(it);
++mNumFramesDropped;
}
@@ -241,7 +617,7 @@ sp<MetaData> CameraSource::getFormat() {
void CameraSource::releaseOneRecordingFrame(const sp<IMemory>& frame) {
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- mCamera->releaseRecordingFrame(frame);
+ releaseRecordingFrame(frame);
IPCThreadState::self()->restoreCallingIdentity(token);
}
@@ -251,7 +627,6 @@ void CameraSource::signalBufferReturned(MediaBuffer *buffer) {
for (List<sp<IMemory> >::iterator it = mFramesBeingEncoded.begin();
it != mFramesBeingEncoded.end(); ++it) {
if ((*it)->pointer() == buffer->data()) {
-
releaseOneRecordingFrame((*it));
mFramesBeingEncoded.erase(it);
++mNumFramesEncoded;
@@ -343,6 +718,13 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
++mNumGlitches;
}
+ // May need to skip frame or modify timestamp. Currently implemented
+ // by the subclass CameraSourceTimeLapse.
+ if(skipCurrentFrame(timestampUs)) {
+ releaseOneRecordingFrame(data);
+ return;
+ }
+
mLastFrameTimestampUs = timestampUs;
if (mNumFramesReceived == 0) {
mFirstFrameTimeUs = timestampUs;
@@ -367,4 +749,31 @@ void CameraSource::dataCallbackTimestamp(int64_t timestampUs,
mFrameAvailableCondition.signal();
}
+size_t CameraSource::getNumberOfVideoBuffers() const {
+ LOGV("getNumberOfVideoBuffers");
+ size_t nBuffers = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ nBuffers = mCamera->getNumberOfVideoBuffers();
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return nBuffers;
+}
+
+sp<IMemory> CameraSource::getVideoBuffer(size_t index) const {
+ LOGV("getVideoBuffer: %d", index);
+ sp<IMemory> buffer = 0;
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ if (mInitCheck == OK && mCamera != 0) {
+ buffer = mCamera->getVideoBuffer(index);
+ }
+ IPCThreadState::self()->restoreCallingIdentity(token);
+ return buffer;
+}
+
+bool CameraSource::isMetaDataStoredInVideoBuffers() const {
+ LOGV("isMetaDataStoredInVideoBuffers");
+ return mIsMetaDataStoredInVideoBuffers;
+}
+
} // namespace android
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
new file mode 100644
index 0000000..6fd1825
--- /dev/null
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -0,0 +1,518 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CameraSourceTimeLapse"
+
+#include <binder/IPCThreadState.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/stagefright/CameraSource.h>
+#include <media/stagefright/CameraSourceTimeLapse.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <ui/Rect.h>
+#include <utils/String8.h>
+#include <utils/Vector.h>
+#include "OMX_Video.h"
+#include <limits.h>
+
+namespace android {
+
+// static
+CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
+ const sp<ICamera> &camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs) {
+
+ CameraSourceTimeLapse *source = new
+ CameraSourceTimeLapse(camera, cameraId,
+ videoSize, videoFrameRate, surface,
+ timeBetweenTimeLapseFrameCaptureUs);
+
+ if (source != NULL) {
+ if (source->initCheck() != OK) {
+ delete source;
+ return NULL;
+ }
+ }
+ return source;
+}
+
+CameraSourceTimeLapse::CameraSourceTimeLapse(
+ const sp<ICamera>& camera,
+ int32_t cameraId,
+ Size videoSize,
+ int32_t videoFrameRate,
+ const sp<Surface>& surface,
+ int64_t timeBetweenTimeLapseFrameCaptureUs)
+ : CameraSource(camera, cameraId, videoSize, videoFrameRate, surface, false),
+ mTimeBetweenTimeLapseFrameCaptureUs(timeBetweenTimeLapseFrameCaptureUs),
+ mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
+ mLastTimeLapseFrameRealTimestampUs(0),
+ mSkipCurrentFrame(false) {
+
+ LOGV("starting time lapse mode");
+ mVideoWidth = videoSize.width;
+ mVideoHeight = videoSize.height;
+
+ if (trySettingPreviewSize(videoSize.width, videoSize.height)) {
+ mUseStillCameraForTimeLapse = false;
+ } else {
+ // TODO: Add a check to see that mTimeBetweenTimeLapseFrameCaptureUs is greater
+ // than the fastest rate at which the still camera can take pictures.
+ mUseStillCameraForTimeLapse = true;
+ CHECK(setPictureSizeToClosestSupported(videoSize.width, videoSize.height));
+ mNeedCropping = computeCropRectangleOffset();
+ mMeta->setInt32(kKeyWidth, videoSize.width);
+ mMeta->setInt32(kKeyHeight, videoSize.height);
+ }
+
+ // Initialize quick stop variables.
+ mQuickStop = false;
+ mForceRead = false;
+ mLastReadBufferCopy = NULL;
+ mStopWaitingForIdleCamera = false;
+}
+
+CameraSourceTimeLapse::~CameraSourceTimeLapse() {
+}
+
+void CameraSourceTimeLapse::startQuickReadReturns() {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ LOGV("Enabling quick read returns");
+
+ // Enable quick stop mode.
+ mQuickStop = true;
+
+ if (mUseStillCameraForTimeLapse) {
+ // wake up the thread right away.
+ mTakePictureCondition.signal();
+ } else {
+ // Force dataCallbackTimestamp() coming from the video camera to not skip the
+ // next frame as we want read() to get a get a frame right away.
+ mForceRead = true;
+ }
+}
+
+bool CameraSourceTimeLapse::trySettingPreviewSize(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPreviewSizes(supportedSizes);
+
+ bool previewSizeSupported = false;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth == width) && (pictureHeight == height)) {
+ previewSizeSupported = true;
+ }
+ }
+
+ if (previewSizeSupported) {
+ LOGV("Video size (%d, %d) is a supported preview size", width, height);
+ params.setPreviewSize(width, height);
+ CHECK(mCamera->setParameters(params.flatten()));
+ return true;
+ }
+
+ return false;
+}
+
+bool CameraSourceTimeLapse::setPictureSizeToClosestSupported(int32_t width, int32_t height) {
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ Vector<Size> supportedSizes;
+ params.getSupportedPictureSizes(supportedSizes);
+
+ int32_t minPictureSize = INT_MAX;
+ for (uint32_t i = 0; i < supportedSizes.size(); ++i) {
+ int32_t pictureWidth = supportedSizes[i].width;
+ int32_t pictureHeight = supportedSizes[i].height;
+
+ if ((pictureWidth >= width) && (pictureHeight >= height)) {
+ int32_t pictureSize = pictureWidth*pictureHeight;
+ if (pictureSize < minPictureSize) {
+ minPictureSize = pictureSize;
+ mPictureWidth = pictureWidth;
+ mPictureHeight = pictureHeight;
+ }
+ }
+ }
+ LOGV("Picture size = (%d, %d)", mPictureWidth, mPictureHeight);
+ return (minPictureSize != INT_MAX);
+}
+
+bool CameraSourceTimeLapse::computeCropRectangleOffset() {
+ if ((mPictureWidth == mVideoWidth) && (mPictureHeight == mVideoHeight)) {
+ return false;
+ }
+
+ CHECK((mPictureWidth > mVideoWidth) && (mPictureHeight > mVideoHeight));
+
+ int32_t widthDifference = mPictureWidth - mVideoWidth;
+ int32_t heightDifference = mPictureHeight - mVideoHeight;
+
+ mCropRectStartX = widthDifference/2;
+ mCropRectStartY = heightDifference/2;
+
+ LOGV("setting crop rectangle offset to (%d, %d)", mCropRectStartX, mCropRectStartY);
+
+ return true;
+}
+
+void CameraSourceTimeLapse::signalBufferReturned(MediaBuffer* buffer) {
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && (buffer == mLastReadBufferCopy)) {
+ buffer->setObserver(NULL);
+ buffer->release();
+ } else {
+ return CameraSource::signalBufferReturned(buffer);
+ }
+}
+
+void createMediaBufferCopy(const MediaBuffer& sourceBuffer, int64_t frameTime, MediaBuffer **newBuffer) {
+ size_t sourceSize = sourceBuffer.size();
+ void* sourcePointer = sourceBuffer.data();
+
+ (*newBuffer) = new MediaBuffer(sourceSize);
+ memcpy((*newBuffer)->data(), sourcePointer, sourceSize);
+
+ (*newBuffer)->meta_data()->setInt64(kKeyTime, frameTime);
+}
+
+void CameraSourceTimeLapse::fillLastReadBufferCopy(MediaBuffer& sourceBuffer) {
+ int64_t frameTime;
+ CHECK(sourceBuffer.meta_data()->findInt64(kKeyTime, &frameTime));
+ createMediaBufferCopy(sourceBuffer, frameTime, &mLastReadBufferCopy);
+ mLastReadBufferCopy->add_ref();
+ mLastReadBufferCopy->setObserver(this);
+}
+
+status_t CameraSourceTimeLapse::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ if (mLastReadBufferCopy == NULL) {
+ mLastReadStatus = CameraSource::read(buffer, options);
+
+ // mQuickStop may have turned to true while read was blocked. Make a copy of
+ // the buffer in that case.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ if (mQuickStop && *buffer) {
+ fillLastReadBufferCopy(**buffer);
+ }
+ return mLastReadStatus;
+ } else {
+ (*buffer) = mLastReadBufferCopy;
+ (*buffer)->add_ref();
+ return mLastReadStatus;
+ }
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadTimeLapseWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadTimeLapseEntry();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadTimeLapseEntry() {
+ while (mStarted) {
+ {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCameraIdle = false;
+ }
+
+ // Even if mQuickStop == true we need to take one more picture
+ // as a read() may be blocked, waiting for a frame to get available.
+ // After this takePicture, if mQuickStop == true, we can safely exit
+ // this thread as read() will make a copy of this last frame and keep
+ // returning it in the quick stop mode.
+ Mutex::Autolock autoLock(mQuickStopLock);
+ CHECK_EQ(OK, mCamera->takePicture());
+ if (mQuickStop) {
+ LOGV("threadTimeLapseEntry: Exiting due to mQuickStop = true");
+ return;
+ }
+ mTakePictureCondition.waitRelative(mQuickStopLock,
+ mTimeBetweenTimeLapseFrameCaptureUs * 1000);
+ }
+ LOGV("threadTimeLapseEntry: Exiting due to mStarted = false");
+}
+
+void CameraSourceTimeLapse::startCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ LOGV("start time lapse recording using still camera");
+
+ int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ String8 s = mCamera->getParameters();
+ IPCThreadState::self()->restoreCallingIdentity(token);
+
+ CameraParameters params(s);
+ params.setPictureSize(mPictureWidth, mPictureHeight);
+ mCamera->setParameters(params.flatten());
+ mCameraIdle = true;
+ mStopWaitingForIdleCamera = false;
+
+ // disable shutter sound and play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_ENABLE_SHUTTER_SOUND, 0, 0);
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+
+ // create a thread which takes pictures in a loop
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+
+ pthread_create(&mThreadTimeLapse, &attr, ThreadTimeLapseWrapper, this);
+ pthread_attr_destroy(&attr);
+ } else {
+ LOGV("start time lapse recording using video camera");
+ CHECK_EQ(OK, mCamera->startRecording());
+ }
+}
+
+void CameraSourceTimeLapse::stopCameraRecording() {
+ if (mUseStillCameraForTimeLapse) {
+ void *dummy;
+ pthread_join(mThreadTimeLapse, &dummy);
+
+ // Last takePicture may still be underway. Wait for the camera to get
+ // idle.
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mStopWaitingForIdleCamera = true;
+ if (!mCameraIdle) {
+ mCameraIdleCondition.wait(mCameraIdleLock);
+ }
+ CHECK(mCameraIdle);
+ mCamera->setListener(NULL);
+
+ // play the recording sound.
+ mCamera->sendCommand(CAMERA_CMD_PLAY_RECORDING_SOUND, 0, 0);
+ } else {
+ mCamera->setListener(NULL);
+ mCamera->stopRecording();
+ }
+ if (mLastReadBufferCopy) {
+ mLastReadBufferCopy->release();
+ mLastReadBufferCopy = NULL;
+ }
+}
+
+void CameraSourceTimeLapse::releaseRecordingFrame(const sp<IMemory>& frame) {
+ if (!mUseStillCameraForTimeLapse) {
+ mCamera->releaseRecordingFrame(frame);
+ }
+}
+
+sp<IMemory> CameraSourceTimeLapse::createIMemoryCopy(const sp<IMemory> &source_data) {
+ size_t source_size = source_data->size();
+ void* source_pointer = source_data->pointer();
+
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(source_size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, source_size);
+ memcpy(newMemory->pointer(), source_pointer, source_size);
+ return newMemory;
+}
+
+// Allocates IMemory of final type MemoryBase with the given size.
+sp<IMemory> allocateIMemory(size_t size) {
+ sp<MemoryHeapBase> newMemoryHeap = new MemoryHeapBase(size);
+ sp<MemoryBase> newMemory = new MemoryBase(newMemoryHeap, 0, size);
+ return newMemory;
+}
+
+// static
+void *CameraSourceTimeLapse::ThreadStartPreviewWrapper(void *me) {
+ CameraSourceTimeLapse *source = static_cast<CameraSourceTimeLapse *>(me);
+ source->threadStartPreview();
+ return NULL;
+}
+
+void CameraSourceTimeLapse::threadStartPreview() {
+ CHECK_EQ(OK, mCamera->startPreview());
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ mCameraIdle = true;
+ mCameraIdleCondition.signal();
+}
+
+void CameraSourceTimeLapse::restartPreview() {
+ // Start this in a different thread, so that the dataCallback can return
+ LOGV("restartPreview");
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+
+ pthread_t threadPreview;
+ pthread_create(&threadPreview, &attr, ThreadStartPreviewWrapper, this);
+ pthread_attr_destroy(&attr);
+}
+
+sp<IMemory> CameraSourceTimeLapse::cropYUVImage(const sp<IMemory> &source_data) {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else {
+ CHECK_EQ(srcFormat, OMX_COLOR_FormatYUV420Planar);
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate memory for cropped image and setup a canvas using it.
+ sp<IMemory> croppedImageMemory = allocateIMemory(
+ YUVImage::bufferSize(yuvFormat, mVideoWidth, mVideoHeight));
+ YUVImage yuvImageCropped(yuvFormat,
+ mVideoWidth, mVideoHeight,
+ (uint8_t *)croppedImageMemory->pointer());
+ YUVCanvas yuvCanvasCrop(yuvImageCropped);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mPictureWidth, mPictureHeight,
+ (uint8_t *)source_data->pointer());
+ yuvCanvasCrop.CopyImageRect(
+ Rect(mCropRectStartX, mCropRectStartY,
+ mCropRectStartX + mVideoWidth,
+ mCropRectStartY + mVideoHeight),
+ 0, 0,
+ yuvImageSource);
+
+ return croppedImageMemory;
+}
+
+void CameraSourceTimeLapse::dataCallback(int32_t msgType, const sp<IMemory> &data) {
+ if (msgType == CAMERA_MSG_COMPRESSED_IMAGE) {
+ // takePicture will complete after this callback, so restart preview.
+ restartPreview();
+ return;
+ }
+ if (msgType != CAMERA_MSG_RAW_IMAGE) {
+ return;
+ }
+
+ LOGV("dataCallback for timelapse still frame");
+ CHECK_EQ(true, mUseStillCameraForTimeLapse);
+
+ int64_t timestampUs;
+ if (mNumFramesReceived == 0) {
+ timestampUs = mStartTimeUs;
+ } else {
+ timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ }
+
+ if (mNeedCropping) {
+ sp<IMemory> croppedImageData = cropYUVImage(data);
+ dataCallbackTimestamp(timestampUs, msgType, croppedImageData);
+ } else {
+ sp<IMemory> dataCopy = createIMemoryCopy(data);
+ dataCallbackTimestamp(timestampUs, msgType, dataCopy);
+ }
+}
+
+bool CameraSourceTimeLapse::skipCurrentFrame(int64_t timestampUs) {
+ if (mSkipCurrentFrame) {
+ mSkipCurrentFrame = false;
+ return true;
+ } else {
+ return false;
+ }
+}
+
+bool CameraSourceTimeLapse::skipFrameAndModifyTimeStamp(int64_t *timestampUs) {
+ if (!mUseStillCameraForTimeLapse) {
+ if (mLastTimeLapseFrameRealTimestampUs == 0) {
+ // First time lapse frame. Initialize mLastTimeLapseFrameRealTimestampUs
+ // to current time (timestampUs) and save frame data.
+ LOGV("dataCallbackTimestamp timelapse: initial frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ return false;
+ }
+
+ {
+ Mutex::Autolock autoLock(mQuickStopLock);
+
+ // mForceRead may be set to true by startQuickReadReturns(). In that
+ // case don't skip this frame.
+ if (mForceRead) {
+ LOGV("dataCallbackTimestamp timelapse: forced read");
+ mForceRead = false;
+ *timestampUs = mLastFrameTimestampUs;
+ return false;
+ }
+ }
+
+ if (*timestampUs <
+ (mLastTimeLapseFrameRealTimestampUs + mTimeBetweenTimeLapseFrameCaptureUs)) {
+ // Skip all frames from last encoded frame until
+ // sufficient time (mTimeBetweenTimeLapseFrameCaptureUs) has passed.
+ // Tell the camera to release its recording frame and return.
+ LOGV("dataCallbackTimestamp timelapse: skipping intermediate frame");
+ return true;
+ } else {
+ // Desired frame has arrived after mTimeBetweenTimeLapseFrameCaptureUs time:
+ // - Reset mLastTimeLapseFrameRealTimestampUs to current time.
+ // - Artificially modify timestampUs to be one frame time (1/framerate) ahead
+ // of the last encoded frame's time stamp.
+ LOGV("dataCallbackTimestamp timelapse: got timelapse frame");
+
+ mLastTimeLapseFrameRealTimestampUs = *timestampUs;
+ *timestampUs = mLastFrameTimestampUs + mTimeBetweenTimeLapseVideoFramesUs;
+ return false;
+ }
+ }
+ return false;
+}
+
+void CameraSourceTimeLapse::dataCallbackTimestamp(int64_t timestampUs, int32_t msgType,
+ const sp<IMemory> &data) {
+ if (!mUseStillCameraForTimeLapse) {
+ mSkipCurrentFrame = skipFrameAndModifyTimeStamp(&timestampUs);
+ } else {
+ Mutex::Autolock autoLock(mCameraIdleLock);
+ // If we are using the still camera and stop() has been called, it may
+ // be waiting for the camera to get idle. In that case return
+ // immediately. Calling CameraSource::dataCallbackTimestamp() will lead
+ // to a deadlock since it tries to access CameraSource::mLock which in
+ // this case is held by CameraSource::stop() currently waiting for the
+ // camera to get idle. And camera will not get idle until this call
+ // returns.
+ if (mStopWaitingForIdleCamera) {
+ return;
+ }
+ }
+ CameraSource::dataCallbackTimestamp(timestampUs, msgType, data);
+}
+
+} // namespace android
diff --git a/media/libstagefright/DRMExtractor.cpp b/media/libstagefright/DRMExtractor.cpp
index aa9ad23..3c98932 100644
--- a/media/libstagefright/DRMExtractor.cpp
+++ b/media/libstagefright/DRMExtractor.cpp
@@ -280,18 +280,23 @@ bool SniffDRM(
if (gDrmManagerClient == NULL) {
gDrmManagerClient = new DrmManagerClient();
}
+
+ if (gDrmManagerClient == NULL) {
+ return false;
+ }
}
DecryptHandle *decryptHandle = source->DrmInitialization(gDrmManagerClient);
if (decryptHandle != NULL) {
if (decryptHandle->decryptApiType == DecryptApiType::CONTAINER_BASED) {
- *mimeType = String8("drm+container_based+");
+ *mimeType = String8("drm+container_based+") + decryptHandle->mimeType;
} else if (decryptHandle->decryptApiType == DecryptApiType::ELEMENTARY_STREAM_BASED) {
- *mimeType = String8("drm+es_based+");
+ *mimeType = String8("drm+es_based+") + decryptHandle->mimeType;
+ } else if (decryptHandle->decryptApiType == DecryptApiType::WV_BASED) {
+ *mimeType = MEDIA_MIMETYPE_CONTAINER_WVM;
+ LOGW("SniffWVM: found match\n");
}
-
- *mimeType += decryptHandle->mimeType;
*confidence = 10.0f;
return true;
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 0b8997c..ee0d792 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -36,7 +36,7 @@
namespace android {
-bool DataSource::getUInt16(off_t offset, uint16_t *x) {
+bool DataSource::getUInt16(off64_t offset, uint16_t *x) {
*x = 0;
uint8_t byte[2];
@@ -49,7 +49,7 @@ bool DataSource::getUInt16(off_t offset, uint16_t *x) {
return true;
}
-status_t DataSource::getSize(off_t *size) {
+status_t DataSource::getSize(off64_t *size) {
*size = 0;
return ERROR_UNSUPPORTED;
diff --git a/media/libstagefright/FileSource.cpp b/media/libstagefright/FileSource.cpp
index b46d8d0..98d5b50 100644
--- a/media/libstagefright/FileSource.cpp
+++ b/media/libstagefright/FileSource.cpp
@@ -16,12 +16,16 @@
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaDebug.h>
+#include <sys/types.h>
+#include <unistd.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
namespace android {
FileSource::FileSource(const char *filename)
- : mFile(fopen(filename, "rb")),
- mFd(fileno(mFile)),
+ : mFd(-1),
mOffset(0),
mLength(-1),
mDecryptHandle(NULL),
@@ -29,11 +33,12 @@ FileSource::FileSource(const char *filename)
mDrmBufOffset(0),
mDrmBufSize(0),
mDrmBuf(NULL){
+
+ mFd = open(filename, O_LARGEFILE | O_RDONLY);
}
FileSource::FileSource(int fd, int64_t offset, int64_t length)
- : mFile(fdopen(fd, "rb")),
- mFd(fd),
+ : mFd(fd),
mOffset(offset),
mLength(length),
mDecryptHandle(NULL),
@@ -46,26 +51,23 @@ FileSource::FileSource(int fd, int64_t offset, int64_t length)
}
FileSource::~FileSource() {
- if (mFile != NULL) {
- fclose(mFile);
- mFile = NULL;
+ if (mFd >= 0) {
+ close(mFd);
+ mFd = -1;
}
if (mDrmBuf != NULL) {
delete[] mDrmBuf;
mDrmBuf = NULL;
}
- if (mDecryptHandle != NULL) {
- mDrmManagerClient->closeDecryptSession(mDecryptHandle);
- }
}
status_t FileSource::initCheck() const {
- return mFile != NULL ? OK : NO_INIT;
+ return mFd >= 0 ? OK : NO_INIT;
}
-ssize_t FileSource::readAt(off_t offset, void *data, size_t size) {
- if (mFile == NULL) {
+ssize_t FileSource::readAt(off64_t offset, void *data, size_t size) {
+ if (mFd < 0) {
return NO_INIT;
}
@@ -85,18 +87,18 @@ ssize_t FileSource::readAt(off_t offset, void *data, size_t size) {
== mDecryptHandle->decryptApiType) {
return readAtDRM(offset, data, size);
} else {
- int err = fseeko(mFile, offset + mOffset, SEEK_SET);
- if (err < 0) {
+ off64_t result = lseek64(mFd, offset + mOffset, SEEK_SET);
+ if (result == -1) {
LOGE("seek to %lld failed", offset + mOffset);
return UNKNOWN_ERROR;
}
- return fread(data, 1, size, mFile);
+ return ::read(mFd, data, size);
}
}
-status_t FileSource::getSize(off_t *size) {
- if (mFile == NULL) {
+status_t FileSource::getSize(off64_t *size) {
+ if (mFd < 0) {
return NO_INIT;
}
@@ -106,14 +108,17 @@ status_t FileSource::getSize(off_t *size) {
return OK;
}
- fseek(mFile, 0, SEEK_END);
- *size = ftello(mFile);
+ *size = lseek64(mFd, 0, SEEK_END);
return OK;
}
DecryptHandle* FileSource::DrmInitialization(DrmManagerClient* client) {
+ if (client == NULL) {
+ return NULL;
+ }
mDrmManagerClient = client;
+
if (mDecryptHandle == NULL) {
mDecryptHandle = mDrmManagerClient->openDecryptSession(
mFd, mOffset, mLength);
@@ -132,7 +137,7 @@ void FileSource::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
*client = mDrmManagerClient;
}
-ssize_t FileSource::readAtDRM(off_t offset, void *data, size_t size) {
+ssize_t FileSource::readAtDRM(off64_t offset, void *data, size_t size) {
size_t DRM_CACHE_SIZE = 1024;
if (mDrmBuf == NULL) {
mDrmBuf = new unsigned char[DRM_CACHE_SIZE];
diff --git a/media/libstagefright/HTTPStream.cpp b/media/libstagefright/HTTPStream.cpp
index ccc6a34..e7f00aa 100644
--- a/media/libstagefright/HTTPStream.cpp
+++ b/media/libstagefright/HTTPStream.cpp
@@ -36,7 +36,7 @@
namespace android {
// static
-const char *HTTPStream::kStatusKey = ":status:";
+const char *HTTPStream::kStatusKey = ":status:"; // MUST be lowercase.
HTTPStream::HTTPStream()
: mState(READY),
@@ -220,7 +220,7 @@ status_t HTTPStream::receive_header(int *http_status) {
return err;
}
- mHeaders.add(string(kStatusKey), string(line));
+ mHeaders.add(AString(kStatusKey), AString(line));
char *spacePos = strchr(line, ' ');
if (spacePos == NULL) {
@@ -264,7 +264,10 @@ status_t HTTPStream::receive_header(int *http_status) {
char *colonPos = strchr(line, ':');
if (colonPos == NULL) {
- mHeaders.add(string(line), string());
+ AString key = line;
+ key.tolower();
+
+ mHeaders.add(key, AString());
} else {
char *end_of_key = colonPos;
while (end_of_key > line && isspace(end_of_key[-1])) {
@@ -278,7 +281,10 @@ status_t HTTPStream::receive_header(int *http_status) {
*end_of_key = '\0';
- mHeaders.add(string(line), string(start_of_value));
+ AString key = line;
+ key.tolower();
+
+ mHeaders.add(key, AString(start_of_value));
}
}
@@ -314,8 +320,11 @@ ssize_t HTTPStream::receive(void *data, size_t size) {
return (ssize_t)total;
}
-bool HTTPStream::find_header_value(const string &key, string *value) const {
- ssize_t index = mHeaders.indexOfKey(key);
+bool HTTPStream::find_header_value(const AString &key, AString *value) const {
+ AString key_lower = key;
+ key_lower.tolower();
+
+ ssize_t index = mHeaders.indexOfKey(key_lower);
if (index < 0) {
value->clear();
return false;
diff --git a/media/libstagefright/JPEGSource.cpp b/media/libstagefright/JPEGSource.cpp
index ec81097..e818115 100644
--- a/media/libstagefright/JPEGSource.cpp
+++ b/media/libstagefright/JPEGSource.cpp
@@ -142,7 +142,7 @@ status_t JPEGSource::parseJPEG() {
mWidth = 0;
mHeight = 0;
- off_t i = 0;
+ off64_t i = 0;
uint16_t soi;
if (!mSource->getUInt16(i, &soi)) {
diff --git a/media/libstagefright/MP3Extractor.cpp b/media/libstagefright/MP3Extractor.cpp
index 82c0426..9610f90 100644
--- a/media/libstagefright/MP3Extractor.cpp
+++ b/media/libstagefright/MP3Extractor.cpp
@@ -21,6 +21,8 @@
#include "include/MP3Extractor.h"
#include "include/ID3.h"
+#include "include/VBRISeeker.h"
+#include "include/XINGSeeker.h"
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/DataSource.h>
@@ -42,10 +44,11 @@ namespace android {
// Yes ... there are things that must indeed match...
static const uint32_t kMask = 0xfffe0cc0;
-static bool get_mp3_frame_size(
+// static
+bool MP3Extractor::get_mp3_frame_size(
uint32_t header, size_t *frame_size,
- int *out_sampling_rate = NULL, int *out_channels = NULL,
- int *out_bitrate = NULL) {
+ int *out_sampling_rate, int *out_channels,
+ int *out_bitrate) {
*frame_size = 0;
if (out_sampling_rate) {
@@ -178,136 +181,13 @@ static bool get_mp3_frame_size(
return true;
}
-static bool parse_xing_header(
- const sp<DataSource> &source, off_t first_frame_pos,
- int32_t *frame_number = NULL, int32_t *byte_number = NULL,
- char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
- int64_t *duration = NULL) {
-
- if (frame_number) {
- *frame_number = 0;
- }
- if (byte_number) {
- *byte_number = 0;
- }
- if (table_of_contents) {
- table_of_contents[0] = 0;
- }
- if (quality_indicator) {
- *quality_indicator = 0;
- }
- if (duration) {
- *duration = 0;
- }
-
- uint8_t buffer[4];
- int offset = first_frame_pos;
- if (source->readAt(offset, &buffer, 4) < 4) { // get header
- return false;
- }
- offset += 4;
-
- uint8_t id, layer, sr_index, mode;
- layer = (buffer[1] >> 1) & 3;
- id = (buffer[1] >> 3) & 3;
- sr_index = (buffer[2] >> 2) & 3;
- mode = (buffer[3] >> 6) & 3;
- if (layer == 0) {
- return false;
- }
- if (id == 1) {
- return false;
- }
- if (sr_index == 3) {
- return false;
- }
- // determine offset of XING header
- if(id&1) { // mpeg1
- if (mode != 3) offset += 32;
- else offset += 17;
- } else { // mpeg2
- if (mode != 3) offset += 17;
- else offset += 9;
- }
-
- if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
- return false;
- }
- offset += 4;
- // Check XING ID
- if ((buffer[0] != 'X') || (buffer[1] != 'i')
- || (buffer[2] != 'n') || (buffer[3] != 'g')) {
- if ((buffer[0] != 'I') || (buffer[1] != 'n')
- || (buffer[2] != 'f') || (buffer[3] != 'o')) {
- return false;
- }
- }
-
- if (source->readAt(offset, &buffer, 4) < 4) { // flags
- return false;
- }
- offset += 4;
- uint32_t flags = U32_AT(buffer);
-
- if (flags & 0x0001) { // Frames field is present
- if (source->readAt(offset, buffer, 4) < 4) {
- return false;
- }
- if (frame_number) {
- *frame_number = U32_AT(buffer);
- }
- int32_t frame = U32_AT(buffer);
- // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
- const int samplesPerFrames[2][3] =
- {
- { 384, 1152, 576 }, // MPEG 2, 2.5: layer1, layer2, layer3
- { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
- };
- // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
- const int samplingRates[4][3] =
- {
- { 11025, 12000, 8000, }, // MPEG 2.5
- { 0, 0, 0, }, // reserved
- { 22050, 24000, 16000, }, // MPEG 2
- { 44100, 48000, 32000, } // MPEG 1
- };
- if (duration) {
- *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
- / samplingRates[id][sr_index];
- }
- offset += 4;
- }
- if (flags & 0x0002) { // Bytes field is present
- if (byte_number) {
- if (source->readAt(offset, buffer, 4) < 4) {
- return false;
- }
- *byte_number = U32_AT(buffer);
- }
- offset += 4;
- }
- if (flags & 0x0004) { // TOC field is present
- if (table_of_contents) {
- if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
- return false;
- }
- }
- offset += 100;
- }
- if (flags & 0x0008) { // Quality indicator field is present
- if (quality_indicator) {
- if (source->readAt(offset, buffer, 4) < 4) {
- return false;
- }
- *quality_indicator = U32_AT(buffer);
- }
- }
- return true;
-}
-
static bool Resync(
const sp<DataSource> &source, uint32_t match_header,
- off_t *inout_pos, uint32_t *out_header) {
+ off64_t *inout_pos, off64_t *post_id3_pos, uint32_t *out_header) {
+ if (post_id3_pos != NULL) {
+ *post_id3_pos = 0;
+ }
+
if (*inout_pos == 0) {
// Skip an optional ID3 header if syncing at the very beginning
// of the datasource.
@@ -340,9 +220,13 @@ static bool Resync(
LOGV("skipped ID3 tag, new starting offset is %ld (0x%08lx)",
*inout_pos, *inout_pos);
}
+
+ if (post_id3_pos != NULL) {
+ *post_id3_pos = *inout_pos;
+ }
}
- off_t pos = *inout_pos;
+ off64_t pos = *inout_pos;
bool valid = false;
do {
if (pos >= *inout_pos + 128 * 1024) {
@@ -365,8 +249,9 @@ static bool Resync(
size_t frame_size;
int sample_rate, num_channels, bitrate;
- if (!get_mp3_frame_size(header, &frame_size,
- &sample_rate, &num_channels, &bitrate)) {
+ if (!MP3Extractor::get_mp3_frame_size(
+ header, &frame_size,
+ &sample_rate, &num_channels, &bitrate)) {
++pos;
continue;
}
@@ -376,7 +261,7 @@ static bool Resync(
// We found what looks like a valid frame,
// now find its successors.
- off_t test_pos = pos + frame_size;
+ off64_t test_pos = pos + frame_size;
valid = true;
for (int j = 0; j < 3; ++j) {
@@ -396,7 +281,8 @@ static bool Resync(
}
size_t test_frame_size;
- if (!get_mp3_frame_size(test_header, &test_frame_size)) {
+ if (!MP3Extractor::get_mp3_frame_size(
+ test_header, &test_frame_size)) {
valid = false;
break;
}
@@ -426,8 +312,8 @@ class MP3Source : public MediaSource {
public:
MP3Source(
const sp<MetaData> &meta, const sp<DataSource> &source,
- off_t first_frame_pos, uint32_t fixed_header,
- int32_t byte_number, const char *table_of_contents);
+ off64_t first_frame_pos, uint32_t fixed_header,
+ const sp<MP3Seeker> &seeker);
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
@@ -443,14 +329,12 @@ protected:
private:
sp<MetaData> mMeta;
sp<DataSource> mDataSource;
- off_t mFirstFramePos;
+ off64_t mFirstFramePos;
uint32_t mFixedHeader;
- off_t mCurrentPos;
+ off64_t mCurrentPos;
int64_t mCurrentTimeUs;
bool mStarted;
- int32_t mByteNumber; // total number of bytes in this MP3
- // TOC entries in XING header. Skip the first one since it's always 0.
- char mTableOfContents[99];
+ sp<MP3Seeker> mSeeker;
MediaBufferGroup *mGroup;
MP3Source(const MP3Source &);
@@ -462,25 +346,28 @@ MP3Extractor::MP3Extractor(
: mInitCheck(NO_INIT),
mDataSource(source),
mFirstFramePos(-1),
- mFixedHeader(0),
- mByteNumber(0) {
- off_t pos = 0;
+ mFixedHeader(0) {
+ off64_t pos = 0;
+ off64_t post_id3_pos;
uint32_t header;
bool success;
int64_t meta_offset;
uint32_t meta_header;
+ int64_t meta_post_id3_offset;
if (meta != NULL
&& meta->findInt64("offset", &meta_offset)
- && meta->findInt32("header", (int32_t *)&meta_header)) {
+ && meta->findInt32("header", (int32_t *)&meta_header)
+ && meta->findInt64("post-id3-offset", &meta_post_id3_offset)) {
// The sniffer has already done all the hard work for us, simply
// accept its judgement.
- pos = (off_t)meta_offset;
+ pos = (off64_t)meta_offset;
header = meta_header;
+ post_id3_pos = (off64_t)meta_post_id3_offset;
success = true;
} else {
- success = Resync(mDataSource, 0, &pos, &header);
+ success = Resync(mDataSource, 0, &pos, &post_id3_pos, &header);
}
if (!success) {
@@ -505,21 +392,27 @@ MP3Extractor::MP3Extractor(
mMeta->setInt32(kKeyBitRate, bitrate * 1000);
mMeta->setInt32(kKeyChannelCount, num_channels);
- int64_t duration;
- parse_xing_header(
- mDataSource, mFirstFramePos, NULL, &mByteNumber,
- mTableOfContents, NULL, &duration);
- if (duration > 0) {
- mMeta->setInt64(kKeyDuration, duration);
- } else {
- off_t fileSize;
+ mSeeker = XINGSeeker::CreateFromSource(mDataSource, mFirstFramePos);
+
+ if (mSeeker == NULL) {
+ mSeeker = VBRISeeker::CreateFromSource(mDataSource, post_id3_pos);
+ }
+
+ int64_t durationUs;
+
+ if (mSeeker == NULL || !mSeeker->getDuration(&durationUs)) {
+ off64_t fileSize;
if (mDataSource->getSize(&fileSize) == OK) {
- mMeta->setInt64(
- kKeyDuration,
- 8000LL * (fileSize - mFirstFramePos) / bitrate);
+ durationUs = 8000LL * (fileSize - mFirstFramePos) / bitrate;
+ } else {
+ durationUs = -1;
}
}
+ if (durationUs >= 0) {
+ mMeta->setInt64(kKeyDuration, durationUs);
+ }
+
mInitCheck = OK;
}
@@ -534,7 +427,7 @@ sp<MediaSource> MP3Extractor::getTrack(size_t index) {
return new MP3Source(
mMeta, mDataSource, mFirstFramePos, mFixedHeader,
- mByteNumber, mTableOfContents);
+ mSeeker);
}
sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) {
@@ -549,8 +442,8 @@ sp<MetaData> MP3Extractor::getTrackMetaData(size_t index, uint32_t flags) {
MP3Source::MP3Source(
const sp<MetaData> &meta, const sp<DataSource> &source,
- off_t first_frame_pos, uint32_t fixed_header,
- int32_t byte_number, const char *table_of_contents)
+ off64_t first_frame_pos, uint32_t fixed_header,
+ const sp<MP3Seeker> &seeker)
: mMeta(meta),
mDataSource(source),
mFirstFramePos(first_frame_pos),
@@ -558,9 +451,8 @@ MP3Source::MP3Source(
mCurrentPos(0),
mCurrentTimeUs(0),
mStarted(false),
- mByteNumber(byte_number),
+ mSeeker(seeker),
mGroup(NULL) {
- memcpy (mTableOfContents, table_of_contents, sizeof(mTableOfContents));
}
MP3Source::~MP3Source() {
@@ -607,43 +499,21 @@ status_t MP3Source::read(
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
- int32_t bitrate;
- if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
- // bitrate is in bits/sec.
- LOGI("no bitrate");
-
- return ERROR_UNSUPPORTED;
- }
-
- mCurrentTimeUs = seekTimeUs;
- // interpolate in TOC to get file seek point in bytes
- int64_t duration;
- if ((mByteNumber > 0) && (mTableOfContents[0] > 0)
- && mMeta->findInt64(kKeyDuration, &duration)) {
- float percent = (float)seekTimeUs * 100 / duration;
- float fx;
- if( percent <= 0.0f ) {
- fx = 0.0f;
- } else if( percent >= 100.0f ) {
- fx = 256.0f;
- } else {
- int a = (int)percent;
- float fa, fb;
- if ( a == 0 ) {
- fa = 0.0f;
- } else {
- fa = (float)mTableOfContents[a-1];
- }
- if ( a < 99 ) {
- fb = (float)mTableOfContents[a];
- } else {
- fb = 256.0f;
- }
- fx = fa + (fb-fa)*(percent-a);
+ int64_t actualSeekTimeUs = seekTimeUs;
+ if (mSeeker == NULL
+ || !mSeeker->getOffsetForTime(&actualSeekTimeUs, &mCurrentPos)) {
+ int32_t bitrate;
+ if (!mMeta->findInt32(kKeyBitRate, &bitrate)) {
+ // bitrate is in bits/sec.
+ LOGI("no bitrate");
+
+ return ERROR_UNSUPPORTED;
}
- mCurrentPos = mFirstFramePos + (int)((1.0f/256.0f)*fx*mByteNumber);
- } else {
+
+ mCurrentTimeUs = seekTimeUs;
mCurrentPos = mFirstFramePos + seekTimeUs * bitrate / 8000000;
+ } else {
+ mCurrentTimeUs = actualSeekTimeUs;
}
}
@@ -667,15 +537,16 @@ status_t MP3Source::read(
uint32_t header = U32_AT((const uint8_t *)buffer->data());
if ((header & kMask) == (mFixedHeader & kMask)
- && get_mp3_frame_size(header, &frame_size, NULL, NULL, &bitrate)) {
+ && MP3Extractor::get_mp3_frame_size(
+ header, &frame_size, NULL, NULL, &bitrate)) {
break;
}
// Lost sync.
LOGV("lost sync! header = 0x%08x, old header = 0x%08x\n", header, mFixedHeader);
- off_t pos = mCurrentPos;
- if (!Resync(mDataSource, mFixedHeader, &pos, NULL)) {
+ off64_t pos = mCurrentPos;
+ if (!Resync(mDataSource, mFixedHeader, &pos, NULL, NULL)) {
LOGE("Unable to resync. Signalling end of stream.");
buffer->release();
@@ -780,15 +651,17 @@ sp<MetaData> MP3Extractor::getMetaData() {
bool SniffMP3(
const sp<DataSource> &source, String8 *mimeType,
float *confidence, sp<AMessage> *meta) {
- off_t pos = 0;
+ off64_t pos = 0;
+ off64_t post_id3_pos;
uint32_t header;
- if (!Resync(source, 0, &pos, &header)) {
+ if (!Resync(source, 0, &pos, &post_id3_pos, &header)) {
return false;
}
*meta = new AMessage;
(*meta)->setInt64("offset", pos);
(*meta)->setInt32("header", header);
+ (*meta)->setInt64("post-id3-offset", post_id3_pos);
*mimeType = MEDIA_MIMETYPE_AUDIO_MPEG;
*confidence = 0.2f;
diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
index 81a2b0d..4d8165e 100644
--- a/media/libstagefright/MPEG2TSWriter.cpp
+++ b/media/libstagefright/MPEG2TSWriter.cpp
@@ -410,7 +410,7 @@ void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) {
////////////////////////////////////////////////////////////////////////////////
MPEG2TSWriter::MPEG2TSWriter(int fd)
- : mFile(fdopen(fd, "wb")),
+ : mFile(fdopen(dup(fd), "wb")),
mStarted(false),
mNumSourcesDone(0),
mNumTSPacketsWritten(0),
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 03682e2..bbe99d3 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -98,11 +98,11 @@ struct MPEG4DataSource : public DataSource {
MPEG4DataSource(const sp<DataSource> &source);
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
- status_t setCachedRange(off_t offset, size_t size);
+ status_t setCachedRange(off64_t offset, size_t size);
protected:
virtual ~MPEG4DataSource();
@@ -111,7 +111,7 @@ private:
Mutex mLock;
sp<DataSource> mSource;
- off_t mCachedOffset;
+ off64_t mCachedOffset;
size_t mCachedSize;
uint8_t *mCache;
@@ -146,7 +146,7 @@ status_t MPEG4DataSource::initCheck() const {
return mSource->initCheck();
}
-ssize_t MPEG4DataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t MPEG4DataSource::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoLock(mLock);
if (offset >= mCachedOffset
@@ -158,7 +158,7 @@ ssize_t MPEG4DataSource::readAt(off_t offset, void *data, size_t size) {
return mSource->readAt(offset, data, size);
}
-status_t MPEG4DataSource::getSize(off_t *size) {
+status_t MPEG4DataSource::getSize(off64_t *size) {
return mSource->getSize(size);
}
@@ -166,7 +166,7 @@ uint32_t MPEG4DataSource::flags() {
return mSource->flags();
}
-status_t MPEG4DataSource::setCachedRange(off_t offset, size_t size) {
+status_t MPEG4DataSource::setCachedRange(off64_t offset, size_t size) {
Mutex::Autolock autoLock(mLock);
clearCache();
@@ -363,7 +363,7 @@ status_t MPEG4Extractor::readMetaData() {
return OK;
}
- off_t offset = 0;
+ off64_t offset = 0;
status_t err;
while ((err = parseChunk(&offset, 0)) == OK) {
}
@@ -404,7 +404,7 @@ char* MPEG4Extractor::getDrmTrackInfo(size_t trackID, int *len) {
}
// Reads an encoded integer 7 bits at a time until it encounters the high bit clear.
-int32_t readSize(off_t offset,
+int32_t readSize(off64_t offset,
const sp<DataSource> DataSource, uint8_t *numOfBytes) {
uint32_t size = 0;
uint8_t data;
@@ -424,7 +424,7 @@ int32_t readSize(off_t offset,
return size;
}
-status_t MPEG4Extractor::parseDrmSINF(off_t *offset, off_t data_offset) {
+status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
uint8_t updateIdTag;
if (mDataSource->readAt(data_offset, &updateIdTag, 1) < 1) {
return ERROR_IO;
@@ -596,14 +596,14 @@ static void convertTimeToDate(int64_t time_1904, String8 *s) {
s->setTo(tmp);
}
-status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
+status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
uint32_t hdr[2];
if (mDataSource->readAt(*offset, hdr, 8) < 8) {
return ERROR_IO;
}
uint64_t chunk_size = ntohl(hdr[0]);
uint32_t chunk_type = ntohl(hdr[1]);
- off_t data_offset = *offset + 8;
+ off64_t data_offset = *offset + 8;
if (chunk_size == 1) {
if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) {
@@ -644,11 +644,11 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
PathAdder autoAdder(&mPath, chunk_type);
- off_t chunk_data_size = *offset + chunk_size - data_offset;
+ off64_t chunk_data_size = *offset + chunk_size - data_offset;
if (chunk_type != FOURCC('c', 'p', 'r', 't')
&& mPath.size() == 5 && underMetaDataPath(mPath)) {
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -715,7 +715,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
track->meta->setCString(kKeyMIMEType, "application/octet-stream");
}
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset;
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -788,7 +788,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
return ERROR_IO;
}
- off_t timescale_offset;
+ off64_t timescale_offset;
if (version == 1) {
timescale_offset = data_offset + 4 + 16;
@@ -838,7 +838,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
}
uint8_t buffer[8];
- if (chunk_data_size < (off_t)sizeof(buffer)) {
+ if (chunk_data_size < (off64_t)sizeof(buffer)) {
return ERROR_MALFORMED;
}
@@ -862,7 +862,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
break;
}
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + 8;
for (uint32_t i = 0; i < entry_count; ++i) {
status_t err = parseChunk(offset, depth + 1);
@@ -919,7 +919,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + sizeof(buffer);
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -962,7 +962,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyWidth, width);
mLastTrack->meta->setInt32(kKeyHeight, height);
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + sizeof(buffer);
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -1069,7 +1069,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
}
uint8_t buffer[256];
- if (chunk_data_size > (off_t)sizeof(buffer)) {
+ if (chunk_data_size > (off64_t)sizeof(buffer)) {
return ERROR_BUFFER_TOO_SMALL;
}
@@ -1108,7 +1108,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
case FOURCC('a', 'v', 'c', 'C'):
{
char buffer[256];
- if (chunk_data_size > (off_t)sizeof(buffer)) {
+ if (chunk_data_size > (off64_t)sizeof(buffer)) {
return ERROR_BUFFER_TOO_SMALL;
}
@@ -1127,7 +1127,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
case FOURCC('m', 'e', 't', 'a'):
{
uint8_t buffer[4];
- if (chunk_data_size < (off_t)sizeof(buffer)) {
+ if (chunk_data_size < (off64_t)sizeof(buffer)) {
return ERROR_MALFORMED;
}
@@ -1147,7 +1147,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
return OK;
}
- off_t stop_offset = *offset + chunk_size;
+ off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + sizeof(buffer);
while (*offset < stop_offset) {
status_t err = parseChunk(offset, depth + 1);
@@ -1232,7 +1232,7 @@ status_t MPEG4Extractor::parseChunk(off_t *offset, int depth) {
}
status_t MPEG4Extractor::parseTrackHeader(
- off_t data_offset, off_t data_size) {
+ off64_t data_offset, off64_t data_size) {
if (data_size < 4) {
return ERROR_MALFORMED;
}
@@ -1246,7 +1246,7 @@ status_t MPEG4Extractor::parseTrackHeader(
uint8_t buffer[36 + 60];
- if (data_size != (off_t)dynSize + 60) {
+ if (data_size != (off64_t)dynSize + 60) {
return ERROR_MALFORMED;
}
@@ -1263,7 +1263,9 @@ status_t MPEG4Extractor::parseTrackHeader(
mtime = U64_AT(&buffer[12]);
id = U32_AT(&buffer[20]);
duration = U64_AT(&buffer[28]);
- } else if (version == 0) {
+ } else {
+ CHECK_EQ((unsigned)version, 0u);
+
ctime = U32_AT(&buffer[4]);
mtime = U32_AT(&buffer[8]);
id = U32_AT(&buffer[12]);
@@ -1316,7 +1318,7 @@ status_t MPEG4Extractor::parseTrackHeader(
return OK;
}
-status_t MPEG4Extractor::parseMetaData(off_t offset, size_t size) {
+status_t MPEG4Extractor::parseMetaData(off64_t offset, size_t size) {
if (size < 4) {
return ERROR_MALFORMED;
}
@@ -1805,7 +1807,7 @@ status_t MPEG4Source::read(
// fall through
}
- off_t offset;
+ off64_t offset;
size_t size;
uint32_t dts;
bool isSyncSample;
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index cbb1604..6760707 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -33,6 +33,10 @@
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/Utils.h>
#include <media/mediarecorder.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <unistd.h>
#include "include/ESDS.h"
@@ -64,7 +68,7 @@ public:
bool isAvc() const { return mIsAvc; }
bool isAudio() const { return mIsAudio; }
bool isMPEG4() const { return mIsMPEG4; }
- void addChunkOffset(off_t offset);
+ void addChunkOffset(off64_t offset);
status_t dump(int fd, const Vector<String16>& args) const;
private:
@@ -99,7 +103,7 @@ private:
List<MediaBuffer *> mChunkSamples;
size_t mNumStcoTableEntries;
- List<off_t> mChunkOffsets;
+ List<off64_t> mChunkOffsets;
size_t mNumStscTableEntries;
struct StscTableEntry {
@@ -214,7 +218,8 @@ private:
};
MPEG4Writer::MPEG4Writer(const char *filename)
- : mFile(fopen(filename, "wb")),
+ : mFd(-1),
+ mInitCheck(NO_INIT),
mUse4ByteNalLength(true),
mUse32BitOffset(true),
mIsFileSizeLimitExplicitlyRequested(false),
@@ -224,11 +229,16 @@ MPEG4Writer::MPEG4Writer(const char *filename)
mMdatOffset(0),
mEstimatedMoovBoxSize(0),
mInterleaveDurationUs(1000000) {
- CHECK(mFile != NULL);
+
+ mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC);
+ if (mFd >= 0) {
+ mInitCheck = OK;
+ }
}
MPEG4Writer::MPEG4Writer(int fd)
- : mFile(fdopen(fd, "wb")),
+ : mFd(dup(fd)),
+ mInitCheck(mFd < 0? NO_INIT: OK),
mUse4ByteNalLength(true),
mUse32BitOffset(true),
mIsFileSizeLimitExplicitlyRequested(false),
@@ -238,7 +248,6 @@ MPEG4Writer::MPEG4Writer(int fd)
mMdatOffset(0),
mEstimatedMoovBoxSize(0),
mInterleaveDurationUs(1000000) {
- CHECK(mFile != NULL);
}
MPEG4Writer::~MPEG4Writer() {
@@ -368,7 +377,7 @@ int64_t MPEG4Writer::estimateMoovBoxSize(int32_t bitRate) {
}
status_t MPEG4Writer::start(MetaData *param) {
- if (mFile == NULL) {
+ if (mInitCheck != OK) {
return UNKNOWN_ERROR;
}
@@ -459,13 +468,13 @@ status_t MPEG4Writer::start(MetaData *param) {
mEstimatedMoovBoxSize = estimateMoovBoxSize(bitRate);
}
CHECK(mEstimatedMoovBoxSize >= 8);
- fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+ lseek64(mFd, mFreeBoxOffset, SEEK_SET);
writeInt32(mEstimatedMoovBoxSize);
write("free", 4);
mMdatOffset = mFreeBoxOffset + mEstimatedMoovBoxSize;
mOffset = mMdatOffset;
- fseeko(mFile, mMdatOffset, SEEK_SET);
+ lseek64(mFd, mMdatOffset, SEEK_SET);
if (mUse32BitOffset) {
write("????mdat", 8);
} else {
@@ -491,7 +500,7 @@ bool MPEG4Writer::use32BitFileOffset() const {
}
status_t MPEG4Writer::pause() {
- if (mFile == NULL) {
+ if (mInitCheck != OK) {
return OK;
}
mPaused = true;
@@ -507,7 +516,7 @@ status_t MPEG4Writer::pause() {
}
void MPEG4Writer::stopWriterThread() {
- LOGV("stopWriterThread");
+ LOGD("Stopping writer thread");
{
Mutex::Autolock autolock(mLock);
@@ -518,6 +527,7 @@ void MPEG4Writer::stopWriterThread() {
void *dummy;
pthread_join(mThread, &dummy);
+ LOGD("Writer thread stopped");
}
/*
@@ -572,8 +582,9 @@ void MPEG4Writer::writeCompositionMatrix(int degrees) {
writeInt32(0x40000000); // w
}
+
status_t MPEG4Writer::stop() {
- if (mFile == NULL) {
+ if (mInitCheck != OK) {
return OK;
}
@@ -596,28 +607,28 @@ status_t MPEG4Writer::stop() {
// Do not write out movie header on error.
if (err != OK) {
- fflush(mFile);
- fclose(mFile);
- mFile = NULL;
+ close(mFd);
+ mFd = -1;
+ mInitCheck = NO_INIT;
mStarted = false;
return err;
}
// Fix up the size of the 'mdat' chunk.
if (mUse32BitOffset) {
- fseeko(mFile, mMdatOffset, SEEK_SET);
+ lseek64(mFd, mMdatOffset, SEEK_SET);
int32_t size = htonl(static_cast<int32_t>(mOffset - mMdatOffset));
- fwrite(&size, 1, 4, mFile);
+ ::write(mFd, &size, 4);
} else {
- fseeko(mFile, mMdatOffset + 8, SEEK_SET);
+ lseek64(mFd, mMdatOffset + 8, SEEK_SET);
int64_t size = mOffset - mMdatOffset;
size = hton64(size);
- fwrite(&size, 1, 8, mFile);
+ ::write(mFd, &size, 8);
}
- fseeko(mFile, mOffset, SEEK_SET);
+ lseek64(mFd, mOffset, SEEK_SET);
time_t now = time(NULL);
- const off_t moovOffset = mOffset;
+ const off64_t moovOffset = mOffset;
mWriteMoovBoxToMemory = true;
mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
mMoovBoxBufferOffset = 0;
@@ -637,7 +648,7 @@ status_t MPEG4Writer::stop() {
writeInt16(0); // reserved
writeInt32(0); // reserved
writeInt32(0); // reserved
- writeCompositionMatrix(0);
+ writeCompositionMatrix(0); // matrix
writeInt32(0); // predefined
writeInt32(0); // predefined
writeInt32(0); // predefined
@@ -659,12 +670,12 @@ status_t MPEG4Writer::stop() {
CHECK(mMoovBoxBufferOffset + 8 <= mEstimatedMoovBoxSize);
// Moov box
- fseeko(mFile, mFreeBoxOffset, SEEK_SET);
+ lseek64(mFd, mFreeBoxOffset, SEEK_SET);
mOffset = mFreeBoxOffset;
- write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, mFile);
+ write(mMoovBoxBuffer, 1, mMoovBoxBufferOffset);
// Free box
- fseeko(mFile, mOffset, SEEK_SET);
+ lseek64(mFd, mOffset, SEEK_SET);
writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset);
write("free", 4);
@@ -678,9 +689,9 @@ status_t MPEG4Writer::stop() {
CHECK(mBoxes.empty());
- fflush(mFile);
- fclose(mFile);
- mFile = NULL;
+ close(mFd);
+ mFd = -1;
+ mInitCheck = NO_INIT;
mStarted = false;
return err;
}
@@ -698,11 +709,12 @@ void MPEG4Writer::unlock() {
mLock.unlock();
}
-off_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
- off_t old_offset = mOffset;
+off64_t MPEG4Writer::addSample_l(MediaBuffer *buffer) {
+ off64_t old_offset = mOffset;
- fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
- 1, buffer->range_length(), mFile);
+ ::write(mFd,
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ buffer->range_length());
mOffset += buffer->range_length();
@@ -723,33 +735,34 @@ static void StripStartcode(MediaBuffer *buffer) {
}
}
-off_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
- off_t old_offset = mOffset;
+off64_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
+ off64_t old_offset = mOffset;
size_t length = buffer->range_length();
if (mUse4ByteNalLength) {
uint8_t x = length >> 24;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = (length >> 16) & 0xff;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = (length >> 8) & 0xff;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = length & 0xff;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
+
+ ::write(mFd,
+ (const uint8_t *)buffer->data() + buffer->range_offset(),
+ length);
- fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
- 1, length, mFile);
mOffset += length + 4;
} else {
CHECK(length < 65536);
uint8_t x = length >> 8;
- fwrite(&x, 1, 1, mFile);
+ ::write(mFd, &x, 1);
x = length & 0xff;
- fwrite(&x, 1, 1, mFile);
- fwrite((const uint8_t *)buffer->data() + buffer->range_offset(),
- 1, length, mFile);
+ ::write(mFd, &x, 1);
+ ::write(mFd, (const uint8_t *)buffer->data() + buffer->range_offset(), length);
mOffset += length + 2;
}
@@ -757,19 +770,21 @@ off_t MPEG4Writer::addLengthPrefixedSample_l(MediaBuffer *buffer) {
}
size_t MPEG4Writer::write(
- const void *ptr, size_t size, size_t nmemb, FILE *stream) {
+ const void *ptr, size_t size, size_t nmemb) {
const size_t bytes = size * nmemb;
if (mWriteMoovBoxToMemory) {
- off_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
+ // This happens only when we write the moov box at the end of
+ // recording, not for each output video/audio frame we receive.
+ off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
if (moovBoxSize > mEstimatedMoovBoxSize) {
- for (List<off_t>::iterator it = mBoxes.begin();
+ for (List<off64_t>::iterator it = mBoxes.begin();
it != mBoxes.end(); ++it) {
(*it) += mOffset;
}
- fseeko(mFile, mOffset, SEEK_SET);
- fwrite(mMoovBoxBuffer, 1, mMoovBoxBufferOffset, stream);
- fwrite(ptr, size, nmemb, stream);
+ lseek64(mFd, mOffset, SEEK_SET);
+ ::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset);
+ ::write(mFd, ptr, size * nmemb);
mOffset += (bytes + mMoovBoxBufferOffset);
free(mMoovBoxBuffer);
mMoovBoxBuffer = NULL;
@@ -781,7 +796,7 @@ size_t MPEG4Writer::write(
mMoovBoxBufferOffset += bytes;
}
} else {
- fwrite(ptr, size, nmemb, stream);
+ ::write(mFd, ptr, size * nmemb);
mOffset += bytes;
}
return bytes;
@@ -800,51 +815,51 @@ void MPEG4Writer::beginBox(const char *fourcc) {
void MPEG4Writer::endBox() {
CHECK(!mBoxes.empty());
- off_t offset = *--mBoxes.end();
+ off64_t offset = *--mBoxes.end();
mBoxes.erase(--mBoxes.end());
if (mWriteMoovBoxToMemory) {
int32_t x = htonl(mMoovBoxBufferOffset - offset);
memcpy(mMoovBoxBuffer + offset, &x, 4);
} else {
- fseeko(mFile, offset, SEEK_SET);
+ lseek64(mFd, offset, SEEK_SET);
writeInt32(mOffset - offset);
mOffset -= 4;
- fseeko(mFile, mOffset, SEEK_SET);
+ lseek64(mFd, mOffset, SEEK_SET);
}
}
void MPEG4Writer::writeInt8(int8_t x) {
- write(&x, 1, 1, mFile);
+ write(&x, 1, 1);
}
void MPEG4Writer::writeInt16(int16_t x) {
x = htons(x);
- write(&x, 1, 2, mFile);
+ write(&x, 1, 2);
}
void MPEG4Writer::writeInt32(int32_t x) {
x = htonl(x);
- write(&x, 1, 4, mFile);
+ write(&x, 1, 4);
}
void MPEG4Writer::writeInt64(int64_t x) {
x = hton64(x);
- write(&x, 1, 8, mFile);
+ write(&x, 1, 8);
}
void MPEG4Writer::writeCString(const char *s) {
size_t n = strlen(s);
- write(s, 1, n + 1, mFile);
+ write(s, 1, n + 1);
}
void MPEG4Writer::writeFourcc(const char *s) {
CHECK_EQ(strlen(s), 4);
- write(s, 1, 4, mFile);
+ write(s, 1, 4);
}
void MPEG4Writer::write(const void *data, size_t size) {
- write(data, 1, size, mFile);
+ write(data, 1, size);
}
bool MPEG4Writer::isFileStreamable() const {
@@ -985,7 +1000,7 @@ void MPEG4Writer::Track::addOneSttsTableEntry(
++mNumSttsTableEntries;
}
-void MPEG4Writer::Track::addChunkOffset(off_t offset) {
+void MPEG4Writer::Track::addChunkOffset(off64_t offset) {
++mNumStcoTableEntries;
mChunkOffsets.push_back(offset);
}
@@ -1100,7 +1115,7 @@ void MPEG4Writer::writeFirstChunk(ChunkInfo* info) {
for (List<MediaBuffer *>::iterator it = chunkIt->mSamples.begin();
it != chunkIt->mSamples.end(); ++it) {
- off_t offset = info->mTrack->isAvc()
+ off64_t offset = info->mTrack->isAvc()
? addLengthPrefixedSample_l(*it)
: addSample_l(*it);
if (it == chunkIt->mSamples.begin()) {
@@ -1225,7 +1240,7 @@ status_t MPEG4Writer::Track::start(MetaData *params) {
}
int32_t rotationDegrees;
- if (!mIsAudio && params && params->findInt32(kKeyRotationDegree, &rotationDegrees)) {
+ if (!mIsAudio && params && params->findInt32(kKeyRotation, &rotationDegrees)) {
mRotation = rotationDegrees;
}
@@ -1279,6 +1294,7 @@ status_t MPEG4Writer::Track::pause() {
}
status_t MPEG4Writer::Track::stop() {
+ LOGD("Stopping %s track", mIsAudio? "Audio": "Video");
if (mDone) {
return OK;
}
@@ -1290,6 +1306,7 @@ status_t MPEG4Writer::Track::stop() {
status_t err = (status_t) dummy;
+ LOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
{
status_t status = mSource->stop();
if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
@@ -1297,6 +1314,7 @@ status_t MPEG4Writer::Track::stop() {
}
}
+ LOGD("%s track stopped", mIsAudio? "Audio": "Video");
return err;
}
@@ -1921,7 +1939,7 @@ status_t MPEG4Writer::Track::threadEntry() {
trackProgressStatus(timestampUs);
}
if (mOwner->numTracks() == 1) {
- off_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
+ off64_t offset = mIsAvc? mOwner->addLengthPrefixedSample_l(copy)
: mOwner->addSample_l(copy);
if (mChunkOffsets.empty()) {
addChunkOffset(offset);
@@ -2122,7 +2140,7 @@ void MPEG4Writer::Track::writeTrackHeader(
mOwner->writeInt16(mIsAudio ? 0x100 : 0); // volume
mOwner->writeInt16(0); // reserved
- mOwner->writeCompositionMatrix(mRotation);
+ mOwner->writeCompositionMatrix(mRotation); // matrix
if (mIsAudio) {
mOwner->writeInt32(0);
@@ -2260,6 +2278,9 @@ void MPEG4Writer::Track::writeTrackHeader(
CHECK(mCodecSpecificData);
CHECK(mCodecSpecificDataSize > 0);
+ // Make sure all sizes encode to a single byte.
+ CHECK(mCodecSpecificDataSize + 23 < 128);
+
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt8(0x03); // ES_DescrTag
mOwner->writeInt8(23 + mCodecSpecificDataSize);
@@ -2469,7 +2490,7 @@ void MPEG4Writer::Track::writeTrackHeader(
mOwner->beginBox(use32BitOffset? "stco": "co64");
mOwner->writeInt32(0); // version=0, flags=0
mOwner->writeInt32(mNumStcoTableEntries);
- for (List<off_t>::iterator it = mChunkOffsets.begin();
+ for (List<off64_t>::iterator it = mChunkOffsets.begin();
it != mChunkOffsets.end(); ++it) {
if (use32BitOffset) {
mOwner->writeInt32(static_cast<int32_t>(*it));
diff --git a/media/libstagefright/MediaBuffer.cpp b/media/libstagefright/MediaBuffer.cpp
index b973745..cbccd31 100644
--- a/media/libstagefright/MediaBuffer.cpp
+++ b/media/libstagefright/MediaBuffer.cpp
@@ -25,6 +25,8 @@
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MetaData.h>
+#include <ui/GraphicBuffer.h>
+
namespace android {
// XXX make this truly atomic.
@@ -61,6 +63,20 @@ MediaBuffer::MediaBuffer(size_t size)
mOriginal(NULL) {
}
+MediaBuffer::MediaBuffer(const sp<GraphicBuffer>& graphicBuffer)
+ : mObserver(NULL),
+ mNextBuffer(NULL),
+ mRefCount(0),
+ mData(NULL),
+ mSize(1),
+ mRangeOffset(0),
+ mRangeLength(mSize),
+ mGraphicBuffer(graphicBuffer),
+ mOwnsData(false),
+ mMetaData(new MetaData),
+ mOriginal(NULL) {
+}
+
void MediaBuffer::release() {
if (mObserver == NULL) {
CHECK_EQ(mRefCount, 0);
@@ -92,10 +108,12 @@ void MediaBuffer::add_ref() {
}
void *MediaBuffer::data() const {
+ CHECK(mGraphicBuffer == NULL);
return mData;
}
size_t MediaBuffer::size() const {
+ CHECK(mGraphicBuffer == NULL);
return mSize;
}
@@ -108,15 +126,19 @@ size_t MediaBuffer::range_length() const {
}
void MediaBuffer::set_range(size_t offset, size_t length) {
- if (offset + length > mSize) {
+ if ((mGraphicBuffer == NULL) && (offset + length > mSize)) {
LOGE("offset = %d, length = %d, mSize = %d", offset, length, mSize);
}
- CHECK(offset + length <= mSize);
+ CHECK((mGraphicBuffer != NULL) || (offset + length <= mSize));
mRangeOffset = offset;
mRangeLength = length;
}
+sp<GraphicBuffer> MediaBuffer::graphicBuffer() const {
+ return mGraphicBuffer;
+}
+
sp<MetaData> MediaBuffer::meta_data() {
return mMetaData;
}
@@ -158,6 +180,8 @@ int MediaBuffer::refcount() const {
}
MediaBuffer *MediaBuffer::clone() {
+ CHECK_EQ(mGraphicBuffer, NULL);
+
MediaBuffer *buffer = new MediaBuffer(mData, mSize);
buffer->set_range(mRangeOffset, mRangeLength);
buffer->mMetaData = new MetaData(*mMetaData.get());
@@ -169,4 +193,3 @@ MediaBuffer *MediaBuffer::clone() {
}
} // namespace android
-
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 7648d42..4599fca 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -42,4 +42,6 @@ const char *MEDIA_MIMETYPE_CONTAINER_OGG = "application/ogg";
const char *MEDIA_MIMETYPE_CONTAINER_MATROSKA = "video/x-matroska";
const char *MEDIA_MIMETYPE_CONTAINER_MPEG2TS = "video/mp2ts";
+const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
+
} // namespace android
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index 965c370..d12ac64 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -25,6 +25,7 @@
#include "include/OggExtractor.h"
#include "include/MPEG2TSExtractor.h"
#include "include/DRMExtractor.h"
+#include "include/WVMExtractor.h"
#include "matroska/MatroskaExtractor.h"
@@ -92,6 +93,8 @@ sp<MediaExtractor> MediaExtractor::Create(
return new MatroskaExtractor(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG2TS)) {
return new MPEG2TSExtractor(source);
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_WVM)) {
+ return new WVMExtractor(source);
}
return NULL;
diff --git a/media/libstagefright/MediaSourceSplitter.cpp b/media/libstagefright/MediaSourceSplitter.cpp
new file mode 100644
index 0000000..abc7012
--- /dev/null
+++ b/media/libstagefright/MediaSourceSplitter.cpp
@@ -0,0 +1,234 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSourceSplitter"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaSourceSplitter.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MetaData.h>
+
+namespace android {
+
+MediaSourceSplitter::MediaSourceSplitter(sp<MediaSource> mediaSource) {
+ mNumberOfClients = 0;
+ mSource = mediaSource;
+ mSourceStarted = false;
+
+ mNumberOfClientsStarted = 0;
+ mNumberOfCurrentReads = 0;
+ mCurrentReadBit = 0;
+ mLastReadCompleted = true;
+}
+
+MediaSourceSplitter::~MediaSourceSplitter() {
+}
+
+sp<MediaSource> MediaSourceSplitter::createClient() {
+ Mutex::Autolock autoLock(mLock);
+
+ sp<MediaSource> client = new Client(this, mNumberOfClients++);
+ mClientsStarted.push(false);
+ mClientsDesiredReadBit.push(0);
+ return client;
+}
+
+status_t MediaSourceSplitter::start(int clientId, MetaData *params) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("start client (%d)", clientId);
+ if (mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ mNumberOfClientsStarted++;
+
+ if (!mSourceStarted) {
+ LOGV("Starting real source from client (%d)", clientId);
+ status_t err = mSource->start(params);
+
+ if (err == OK) {
+ mSourceStarted = true;
+ mClientsStarted.editItemAt(clientId) = true;
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ }
+
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = true;
+ if (mLastReadCompleted) {
+ // Last read was completed. So join in the threads for the next read.
+ mClientsDesiredReadBit.editItemAt(clientId) = !mCurrentReadBit;
+ } else {
+ // Last read is ongoing. So join in the threads for the current read.
+ mClientsDesiredReadBit.editItemAt(clientId) = mCurrentReadBit;
+ }
+ return OK;
+ }
+}
+
+status_t MediaSourceSplitter::stop(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("stop client (%d)", clientId);
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+ CHECK(mClientsStarted[clientId]);
+
+ if (--mNumberOfClientsStarted == 0) {
+ LOGV("Stopping real source from client (%d)", clientId);
+ status_t err = mSource->stop();
+ mSourceStarted = false;
+ mClientsStarted.editItemAt(clientId) = false;
+ return err;
+ } else {
+ mClientsStarted.editItemAt(clientId) = false;
+ if (!mLastReadCompleted && (mClientsDesiredReadBit[clientId] == mCurrentReadBit)) {
+ // !mLastReadCompleted implies that buffer has been read from source, but all
+ // clients haven't read it.
+ // mClientsDesiredReadBit[clientId] == mCurrentReadBit implies that this
+ // client would have wanted to read from this buffer. (i.e. it has not yet
+ // called read() for the current read buffer.)
+ // Since other threads may be waiting for all the clients' reads to complete,
+ // signal that this read has been aborted.
+ signalReadComplete_lock(true);
+ }
+ return OK;
+ }
+}
+
+sp<MetaData> MediaSourceSplitter::getFormat(int clientId) {
+ Mutex::Autolock autoLock(mLock);
+
+ LOGV("getFormat client (%d)", clientId);
+ return mSource->getFormat();
+}
+
+status_t MediaSourceSplitter::read(int clientId,
+ MediaBuffer **buffer, const MediaSource::ReadOptions *options) {
+ Mutex::Autolock autoLock(mLock);
+
+ CHECK(clientId >= 0 && clientId < mNumberOfClients);
+
+ LOGV("read client (%d)", clientId);
+ *buffer = NULL;
+
+ if (!mClientsStarted[clientId]) {
+ return OK;
+ }
+
+ if (mCurrentReadBit != mClientsDesiredReadBit[clientId]) {
+ // Desired buffer has not been read from source yet.
+
+ // If the current client is the special client with clientId = 0
+ // then read from source, else wait until the client 0 has finished
+ // reading from source.
+ if (clientId == 0) {
+ // Wait for all client's last read to complete first so as to not
+ // corrupt the buffer at mLastReadMediaBuffer.
+ waitForAllClientsLastRead_lock(clientId);
+
+ readFromSource_lock(options);
+ *buffer = mLastReadMediaBuffer;
+ } else {
+ waitForReadFromSource_lock(clientId);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+ CHECK(mCurrentReadBit == mClientsDesiredReadBit[clientId]);
+ } else {
+ // Desired buffer has already been read from source. Use the cached data.
+ CHECK(clientId != 0);
+
+ *buffer = mLastReadMediaBuffer;
+ (*buffer)->add_ref();
+ }
+
+ mClientsDesiredReadBit.editItemAt(clientId) = !mClientsDesiredReadBit[clientId];
+ signalReadComplete_lock(false);
+
+ return mLastReadStatus;
+}
+
+void MediaSourceSplitter::readFromSource_lock(const MediaSource::ReadOptions *options) {
+ mLastReadStatus = mSource->read(&mLastReadMediaBuffer , options);
+
+ mCurrentReadBit = !mCurrentReadBit;
+ mLastReadCompleted = false;
+ mReadFromSourceCondition.broadcast();
+}
+
+void MediaSourceSplitter::waitForReadFromSource_lock(int32_t clientId) {
+ mReadFromSourceCondition.wait(mLock);
+}
+
+void MediaSourceSplitter::waitForAllClientsLastRead_lock(int32_t clientId) {
+ if (mLastReadCompleted) {
+ return;
+ }
+ mAllReadsCompleteCondition.wait(mLock);
+ CHECK(mLastReadCompleted);
+}
+
+void MediaSourceSplitter::signalReadComplete_lock(bool readAborted) {
+ if (!readAborted) {
+ mNumberOfCurrentReads++;
+ }
+
+ if (mNumberOfCurrentReads == mNumberOfClientsStarted) {
+ mLastReadCompleted = true;
+ mNumberOfCurrentReads = 0;
+ mAllReadsCompleteCondition.broadcast();
+ }
+}
+
+status_t MediaSourceSplitter::pause(int clientId) {
+ return ERROR_UNSUPPORTED;
+}
+
+// Client
+
+MediaSourceSplitter::Client::Client(
+ sp<MediaSourceSplitter> splitter,
+ int32_t clientId) {
+ mSplitter = splitter;
+ mClientId = clientId;
+}
+
+status_t MediaSourceSplitter::Client::start(MetaData *params) {
+ return mSplitter->start(mClientId, params);
+}
+
+status_t MediaSourceSplitter::Client::stop() {
+ return mSplitter->stop(mClientId);
+}
+
+sp<MetaData> MediaSourceSplitter::Client::getFormat() {
+ return mSplitter->getFormat(mClientId);
+}
+
+status_t MediaSourceSplitter::Client::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ return mSplitter->read(mClientId, buffer, options);
+}
+
+status_t MediaSourceSplitter::Client::pause() {
+ return mSplitter->pause(mClientId);
+}
+
+} // namespace android
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index b67002d..829ab20 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -201,7 +201,7 @@ status_t NuCachedSource2::initCheck() const {
return mSource->initCheck();
}
-status_t NuCachedSource2::getSize(off_t *size) {
+status_t NuCachedSource2::getSize(off64_t *size) {
return mSource->getSize(size);
}
@@ -325,15 +325,17 @@ void NuCachedSource2::onRead(const sp<AMessage> &msg) {
mCondition.signal();
}
-void NuCachedSource2::restartPrefetcherIfNecessary_l() {
+void NuCachedSource2::restartPrefetcherIfNecessary_l(
+ bool ignoreLowWaterThreshold) {
static const size_t kGrayArea = 256 * 1024;
if (mFetching || mFinalStatus != OK) {
return;
}
- if (mCacheOffset + mCache->totalSize() - mLastAccessPos
- >= kLowWaterThreshold) {
+ if (!ignoreLowWaterThreshold
+ && mCacheOffset + mCache->totalSize() - mLastAccessPos
+ >= kLowWaterThreshold) {
return;
}
@@ -351,7 +353,7 @@ void NuCachedSource2::restartPrefetcherIfNecessary_l() {
mFetching = true;
}
-ssize_t NuCachedSource2::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuCachedSource2::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoSerializer(mSerializer);
LOGV("readAt offset %ld, size %d", offset, size);
@@ -406,27 +408,27 @@ size_t NuCachedSource2::approxDataRemaining(bool *eos) {
size_t NuCachedSource2::approxDataRemaining_l(bool *eos) {
*eos = (mFinalStatus != OK);
- off_t lastBytePosCached = mCacheOffset + mCache->totalSize();
+ off64_t lastBytePosCached = mCacheOffset + mCache->totalSize();
if (mLastAccessPos < lastBytePosCached) {
return lastBytePosCached - mLastAccessPos;
}
return 0;
}
-ssize_t NuCachedSource2::readInternal(off_t offset, void *data, size_t size) {
+ssize_t NuCachedSource2::readInternal(off64_t offset, void *data, size_t size) {
LOGV("readInternal offset %ld size %d", offset, size);
Mutex::Autolock autoLock(mLock);
if (offset < mCacheOffset
- || offset >= (off_t)(mCacheOffset + mCache->totalSize())) {
- static const off_t kPadding = 32768;
+ || offset >= (off64_t)(mCacheOffset + mCache->totalSize())) {
+ static const off64_t kPadding = 32768;
// In the presence of multiple decoded streams, once of them will
// trigger this seek request, the other one will request data "nearby"
// soon, adjust the seek position so that that subsequent request
// does not trigger another seek.
- off_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
+ off64_t seekOffset = (offset > kPadding) ? offset - kPadding : 0;
seekInternal_l(seekOffset);
}
@@ -455,15 +457,15 @@ ssize_t NuCachedSource2::readInternal(off_t offset, void *data, size_t size) {
return -EAGAIN;
}
-status_t NuCachedSource2::seekInternal_l(off_t offset) {
+status_t NuCachedSource2::seekInternal_l(off64_t offset) {
mLastAccessPos = offset;
if (offset >= mCacheOffset
- && offset <= (off_t)(mCacheOffset + mCache->totalSize())) {
+ && offset <= (off64_t)(mCacheOffset + mCache->totalSize())) {
return OK;
}
- LOGI("new range: offset= %ld", offset);
+ LOGI("new range: offset= %lld", offset);
mCacheOffset = offset;
@@ -510,5 +512,22 @@ void NuCachedSource2::onSuspend() {
mSuspended = true;
}
+void NuCachedSource2::resumeFetchingIfNecessary() {
+ Mutex::Autolock autoLock(mLock);
+
+ restartPrefetcherIfNecessary_l(true /* ignore low water threshold */);
+}
+
+DecryptHandle* NuCachedSource2::DrmInitialization(DrmManagerClient* client) {
+ return mSource->DrmInitialization(client);
+}
+
+void NuCachedSource2::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+ mSource->getDrmInfo(handle, client);
+}
+
+String8 NuCachedSource2::getUri() {
+ return mSource->getUri();
+}
} // namespace android
diff --git a/media/libstagefright/NuHTTPDataSource.cpp b/media/libstagefright/NuHTTPDataSource.cpp
index 2743b2f..269b233 100644
--- a/media/libstagefright/NuHTTPDataSource.cpp
+++ b/media/libstagefright/NuHTTPDataSource.cpp
@@ -5,6 +5,7 @@
#include "include/NuHTTPDataSource.h"
#include <cutils/properties.h>
+#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaErrors.h>
@@ -67,7 +68,14 @@ NuHTTPDataSource::NuHTTPDataSource()
mPort(0),
mOffset(0),
mContentLength(0),
- mContentLengthValid(false) {
+ mContentLengthValid(false),
+ mHasChunkedTransferEncoding(false),
+ mChunkDataBytesLeft(0),
+ mNumBandwidthHistoryItems(0),
+ mTotalTransferTimeUs(0),
+ mTotalTransferBytes(0),
+ mDecryptHandle(NULL),
+ mDrmManagerClient(NULL) {
}
NuHTTPDataSource::~NuHTTPDataSource() {
@@ -76,7 +84,7 @@ NuHTTPDataSource::~NuHTTPDataSource() {
status_t NuHTTPDataSource::connect(
const char *uri,
const KeyedVector<String8, String8> *overrides,
- off_t offset) {
+ off64_t offset) {
String8 headers;
MakeFullHeaders(overrides, &headers);
@@ -86,9 +94,12 @@ status_t NuHTTPDataSource::connect(
status_t NuHTTPDataSource::connect(
const char *uri,
const String8 &headers,
- off_t offset) {
+ off64_t offset) {
String8 host, path;
unsigned port;
+
+ mUri = uri;
+
if (!ParseURL(uri, &host, &port, &path)) {
return ERROR_MALFORMED;
}
@@ -104,8 +115,8 @@ static bool IsRedirectStatusCode(int httpStatus) {
status_t NuHTTPDataSource::connect(
const char *host, unsigned port, const char *path,
const String8 &headers,
- off_t offset) {
- LOGI("connect to %s:%u%s @%ld", host, port, path, offset);
+ off64_t offset) {
+ LOGI("connect to %s:%u%s @%lld", host, port, path, offset);
bool needsToReconnect = true;
@@ -151,7 +162,7 @@ status_t NuHTTPDataSource::connect(
if (offset != 0) {
char rangeHeader[128];
- sprintf(rangeHeader, "Range: bytes=%ld-\r\n", offset);
+ sprintf(rangeHeader, "Range: bytes=%lld-\r\n", offset);
request.append(rangeHeader);
}
@@ -167,7 +178,7 @@ status_t NuHTTPDataSource::connect(
}
if (IsRedirectStatusCode(httpStatus)) {
- string value;
+ AString value;
CHECK(mHTTP.find_header_value("Location", &value));
mState = DISCONNECTED;
@@ -184,20 +195,52 @@ status_t NuHTTPDataSource::connect(
return ERROR_IO;
}
+ mHasChunkedTransferEncoding = false;
+
+ {
+ AString value;
+ if (mHTTP.find_header_value("Transfer-Encoding", &value)) {
+ // We don't currently support any transfer encodings but
+ // chunked.
+
+ if (!strcasecmp(value.c_str(), "chunked")) {
+ LOGI("Chunked transfer encoding applied.");
+ mHasChunkedTransferEncoding = true;
+ mChunkDataBytesLeft = 0;
+ } else {
+ mState = DISCONNECTED;
+ mHTTP.disconnect();
+
+ LOGE("We don't support '%s' transfer encoding.", value.c_str());
+
+ return ERROR_UNSUPPORTED;
+ }
+ }
+ }
+
applyTimeoutResponse();
if (offset == 0) {
- string value;
+ AString value;
unsigned long x;
- if (mHTTP.find_header_value(string("Content-Length"), &value)
+ if (mHTTP.find_header_value(AString("Content-Length"), &value)
&& ParseSingleUnsignedLong(value.c_str(), &x)) {
- mContentLength = (off_t)x;
+ mContentLength = (off64_t)x;
mContentLengthValid = true;
+ } else {
+ LOGW("Server did not give us the content length!");
}
} else {
- string value;
+ if (httpStatus != 206 /* Partial Content */) {
+ // We requested a range but the server didn't support that.
+ LOGE("We requested a range but the server didn't "
+ "support that.");
+ return ERROR_UNSUPPORTED;
+ }
+
+ AString value;
unsigned long x;
- if (mHTTP.find_header_value(string("Content-Range"), &value)) {
+ if (mHTTP.find_header_value(AString("Content-Range"), &value)) {
const char *slashPos = strchr(value.c_str(), '/');
if (slashPos != NULL
&& ParseSingleUnsignedLong(slashPos + 1, &x)) {
@@ -222,7 +265,72 @@ status_t NuHTTPDataSource::initCheck() const {
return mState == CONNECTED ? OK : NO_INIT;
}
-ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t NuHTTPDataSource::internalRead(void *data, size_t size) {
+ if (!mHasChunkedTransferEncoding) {
+ return mHTTP.receive(data, size);
+ }
+
+ if (mChunkDataBytesLeft < 0) {
+ return 0;
+ } else if (mChunkDataBytesLeft == 0) {
+ char line[1024];
+ status_t err = mHTTP.receive_line(line, sizeof(line));
+
+ if (err != OK) {
+ return err;
+ }
+
+ LOGV("line = '%s'", line);
+
+ char *end;
+ unsigned long n = strtoul(line, &end, 16);
+
+ if (end == line || (*end != ';' && *end != '\0')) {
+ LOGE("malformed HTTP chunk '%s'", line);
+ return ERROR_MALFORMED;
+ }
+
+ mChunkDataBytesLeft = n;
+ LOGV("chunk data size = %lu", n);
+
+ if (mChunkDataBytesLeft == 0) {
+ mChunkDataBytesLeft = -1;
+ return 0;
+ }
+
+ // fall through
+ }
+
+ if (size > (size_t)mChunkDataBytesLeft) {
+ size = mChunkDataBytesLeft;
+ }
+
+ ssize_t n = mHTTP.receive(data, size);
+
+ if (n < 0) {
+ return n;
+ }
+
+ mChunkDataBytesLeft -= (size_t)n;
+
+ if (mChunkDataBytesLeft == 0) {
+ char line[1024];
+ status_t err = mHTTP.receive_line(line, sizeof(line));
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (line[0] != '\0') {
+ LOGE("missing HTTP chunk terminator.");
+ return ERROR_MALFORMED;
+ }
+ }
+
+ return n;
+}
+
+ssize_t NuHTTPDataSource::readAt(off64_t offset, void *data, size_t size) {
LOGV("readAt offset %ld, size %d", offset, size);
Mutex::Autolock autoLock(mLock);
@@ -249,13 +357,18 @@ ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) {
size_t numBytesRead = 0;
while (numBytesRead < size) {
+ int64_t startTimeUs = ALooper::GetNowUs();
+
ssize_t n =
- mHTTP.receive((uint8_t *)data + numBytesRead, size - numBytesRead);
+ internalRead((uint8_t *)data + numBytesRead, size - numBytesRead);
if (n < 0) {
return n;
}
+ int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+ addBandwidthMeasurement_l(n, delayUs);
+
numBytesRead += (size_t)n;
if (n == 0) {
@@ -274,7 +387,7 @@ ssize_t NuHTTPDataSource::readAt(off_t offset, void *data, size_t size) {
return numBytesRead;
}
-status_t NuHTTPDataSource::getSize(off_t *size) {
+status_t NuHTTPDataSource::getSize(off64_t *size) {
*size = 0;
if (mState != CONNECTED) {
@@ -325,7 +438,7 @@ void NuHTTPDataSource::MakeFullHeaders(
}
void NuHTTPDataSource::applyTimeoutResponse() {
- string timeout;
+ AString timeout;
if (mHTTP.find_header_value("X-SocketTimeout", &timeout)) {
const char *s = timeout.c_str();
char *end;
@@ -340,4 +453,64 @@ void NuHTTPDataSource::applyTimeoutResponse() {
}
}
+bool NuHTTPDataSource::estimateBandwidth(int32_t *bandwidth_bps) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (mNumBandwidthHistoryItems < 10) {
+ return false;
+ }
+
+ *bandwidth_bps = ((double)mTotalTransferBytes * 8E6 / mTotalTransferTimeUs);
+
+ return true;
+}
+
+void NuHTTPDataSource::addBandwidthMeasurement_l(
+ size_t numBytes, int64_t delayUs) {
+ BandwidthEntry entry;
+ entry.mDelayUs = delayUs;
+ entry.mNumBytes = numBytes;
+ mTotalTransferTimeUs += delayUs;
+ mTotalTransferBytes += numBytes;
+
+ mBandwidthHistory.push_back(entry);
+ if (++mNumBandwidthHistoryItems > 100) {
+ BandwidthEntry *entry = &*mBandwidthHistory.begin();
+ mTotalTransferTimeUs -= entry->mDelayUs;
+ mTotalTransferBytes -= entry->mNumBytes;
+ mBandwidthHistory.erase(mBandwidthHistory.begin());
+ --mNumBandwidthHistoryItems;
+ }
+}
+
+DecryptHandle* NuHTTPDataSource::DrmInitialization(DrmManagerClient* client) {
+ if (client == NULL) {
+ return NULL;
+ }
+ mDrmManagerClient = client;
+
+ if (mDecryptHandle == NULL) {
+ /* Note if redirect occurs, mUri is the redirect uri instead of the
+ * original one
+ */
+ mDecryptHandle = mDrmManagerClient->openDecryptSession(mUri);
+ }
+
+ if (mDecryptHandle == NULL) {
+ mDrmManagerClient = NULL;
+ }
+
+ return mDecryptHandle;
+}
+
+void NuHTTPDataSource::getDrmInfo(DecryptHandle **handle, DrmManagerClient **client) {
+ *handle = mDecryptHandle;
+
+ *client = mDrmManagerClient;
+}
+
+String8 NuHTTPDataSource::getUri() {
+ return mUri;
+}
+
} // namespace android
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 980da77..6ca0f4f 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -39,6 +39,7 @@
#include <binder/MemoryDealer.h>
#include <binder/ProcessState.h>
#include <media/IMediaPlayerService.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDebug.h>
@@ -53,6 +54,7 @@
#include <OMX_Component.h>
#include "include/ThreadedSource.h"
+#include "include/avc_utils.h"
namespace android {
@@ -152,37 +154,37 @@ static sp<MediaSource> InstantiateSoftwareCodec(
static const CodecInfo kDecoderInfo[] = {
{ MEDIA_MIMETYPE_IMAGE_JPEG, "OMX.TI.JPEG.decode" },
+// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.Nvidia.mp3.decoder" },
// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.TI.MP3.decode" },
{ MEDIA_MIMETYPE_AUDIO_MPEG, "MP3Decoder" },
-// { MEDIA_MIMETYPE_AUDIO_MPEG, "OMX.PV.mp3dec" },
// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.TI.AMR.decode" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amr.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_NB, "AMRNBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AMR_NB, "OMX.Nvidia.amrwb.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.TI.WBAMR.decode" },
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AMR_WB, "OMX.PV.amrdec" },
+// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.Nvidia.aac.decoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.decode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACDecoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacdec" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "G711Decoder" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW, "G711Decoder" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.decode" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.decoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4dec" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.decode" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.decoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Decoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263dec" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.decode" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.decoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Decoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCDecoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcdec" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, "VorbisDecoder" },
{ MEDIA_MIMETYPE_VIDEO_VPX, "VPXDecoder" },
};
@@ -194,25 +196,24 @@ static const CodecInfo kEncoderInfo[] = {
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, "AMRWBEncoder" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "OMX.TI.AAC.encode" },
{ MEDIA_MIMETYPE_AUDIO_AAC, "AACEncoder" },
-// { MEDIA_MIMETYPE_AUDIO_AAC, "OMX.PV.aacenc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.7x30.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.qcom.video.encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.Nvidia.mp4.encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.SEC.MPEG4.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_MPEG4, "OMX.PV.mpeg4enc" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.7x30.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.qcom.video.encoder.h263" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_H263, "OMX.Nvidia.h263.encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "OMX.SEC.H263.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_H263, "M4vH263Encoder" },
-// { MEDIA_MIMETYPE_VIDEO_H263, "OMX.PV.h263enc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.7x30.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.qcom.video.encoder.avc" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.TI.Video.encoder" },
+ { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.Nvidia.h264.encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "OMX.SEC.AVC.Encoder" },
{ MEDIA_MIMETYPE_VIDEO_AVC, "AVCEncoder" },
-// { MEDIA_MIMETYPE_VIDEO_AVC, "OMX.PV.avcenc" },
};
#undef OPTIONAL
@@ -264,39 +265,6 @@ static const char *GetCodec(const CodecInfo *info, size_t numInfos,
return NULL;
}
-enum {
- kAVCProfileBaseline = 0x42,
- kAVCProfileMain = 0x4d,
- kAVCProfileExtended = 0x58,
- kAVCProfileHigh = 0x64,
- kAVCProfileHigh10 = 0x6e,
- kAVCProfileHigh422 = 0x7a,
- kAVCProfileHigh444 = 0xf4,
- kAVCProfileCAVLC444Intra = 0x2c
-};
-
-static const char *AVCProfileToString(uint8_t profile) {
- switch (profile) {
- case kAVCProfileBaseline:
- return "Baseline";
- case kAVCProfileMain:
- return "Main";
- case kAVCProfileExtended:
- return "Extended";
- case kAVCProfileHigh:
- return "High";
- case kAVCProfileHigh10:
- return "High 10";
- case kAVCProfileHigh422:
- return "High 422";
- case kAVCProfileHigh444:
- return "High 444";
- case kAVCProfileCAVLC444Intra:
- return "CAVLC 444 Intra";
- default: return "Unknown";
- }
-}
-
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -307,16 +275,15 @@ static void InitOMXParams(T *params) {
}
static bool IsSoftwareCodec(const char *componentName) {
- if (!strncmp("OMX.PV.", componentName, 7)) {
- return true;
+ if (!strncmp("OMX.", componentName, 4)) {
+ return false;
}
- return false;
+ return true;
}
// A sort order in which non-OMX components are first,
-// followed by software codecs, i.e. OMX.PV.*, followed
-// by all the others.
+// followed by software codecs, and followed by all the others.
static int CompareSoftwareCodecsFirst(
const String8 *elem1, const String8 *elem2) {
bool isNotOMX1 = strncmp(elem1->string(), "OMX.", 4);
@@ -350,9 +317,13 @@ uint32_t OMXCodec::getComponentQuirks(
const char *componentName, bool isEncoder) {
uint32_t quirks = 0;
- if (!strcmp(componentName, "OMX.PV.avcdec")) {
- quirks |= kWantsNALFragments;
+ if (!strcmp(componentName, "OMX.Nvidia.amr.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.amrwb.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.aac.decoder") ||
+ !strcmp(componentName, "OMX.Nvidia.mp3.decoder")) {
+ quirks |= kDecoderLiesAboutNumberOfChannels;
}
+
if (!strcmp(componentName, "OMX.TI.MP3.decode")) {
quirks |= kNeedsFlushBeforeDisable;
quirks |= kDecoderLiesAboutNumberOfChannels;
@@ -412,13 +383,6 @@ uint32_t OMXCodec::getComponentQuirks(
quirks |= kOutputBuffersAreUnreadable;
}
- if (!strncmp(componentName, "OMX.SEC.", 8) && isEncoder) {
- // These input buffers contain meta data (for instance,
- // information helps locate the actual YUV data, or
- // the physical address of the YUV data).
- quirks |= kStoreMetaDataInInputVideoBuffers;
- }
-
return quirks;
}
@@ -454,7 +418,16 @@ void OMXCodec::findMatchingCodecs(
continue;
}
- matchingCodecs->push(String8(componentName));
+ // When requesting software-only codecs, only push software codecs
+ // When requesting hardware-only codecs, only push hardware codecs
+ // When there is request neither for software-only nor for
+ // hardware-only codecs, push all codecs
+ if (((flags & kSoftwareCodecsOnly) && IsSoftwareCodec(componentName)) ||
+ ((flags & kHardwareCodecsOnly) && !IsSoftwareCodec(componentName)) ||
+ (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
+
+ matchingCodecs->push(String8(componentName));
+ }
}
if (flags & kPreferSoftwareCodecs) {
@@ -468,7 +441,8 @@ sp<MediaSource> OMXCodec::Create(
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName,
- uint32_t flags) {
+ uint32_t flags,
+ const sp<ANativeWindow> &nativeWindow) {
const char *mime;
bool success = meta->findCString(kKeyMIMEType, &mime);
CHECK(success);
@@ -524,7 +498,7 @@ sp<MediaSource> OMXCodec::Create(
sp<OMXCodec> codec = new OMXCodec(
omx, node, quirks,
createEncoder, mime, componentName,
- source);
+ source, nativeWindow);
observer->setCodec(codec);
@@ -542,6 +516,10 @@ sp<MediaSource> OMXCodec::Create(
}
status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
+ mIsMetaDataStoredInVideoBuffers = false;
+ if (flags & kStoreMetaDataInVideoBuffers) {
+ mIsMetaDataStoredInVideoBuffers = true;
+ }
if (!(flags & kIgnoreCodecSpecificData)) {
uint32_t type;
const void *data;
@@ -617,7 +595,7 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
size -= length;
}
- CODEC_LOGV(
+ CODEC_LOGI(
"AVC profile = %d (%s), level = %d",
(int)profile, AVCProfileToString(profile), level);
@@ -732,6 +710,16 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta, uint32_t flags) {
mQuirks &= ~kOutputBuffersAreUnreadable;
}
+ if (mNativeWindow != NULL
+ && !mIsEncoder
+ && !strncasecmp(mMIME, "video/", 6)
+ && !strncmp(mComponentName, "OMX.", 4)) {
+ status_t err = initNativeWindow();
+ if (err != OK) {
+ return err;
+ }
+ }
+
return OK;
}
@@ -915,7 +903,7 @@ void OMXCodec::setVideoInputFormat(
int32_t width, height, frameRate, bitRate, stride, sliceHeight;
bool success = meta->findInt32(kKeyWidth, &width);
success = success && meta->findInt32(kKeyHeight, &height);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyBitRate, &bitRate);
success = success && meta->findInt32(kKeyStride, &stride);
success = success && meta->findInt32(kKeySliceHeight, &sliceHeight);
@@ -1135,7 +1123,7 @@ status_t OMXCodec::getVideoProfileLevel(
status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
int32_t iFramesInterval, frameRate, bitRate;
bool success = meta->findInt32(kKeyBitRate, &bitRate);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
CHECK(success);
OMX_VIDEO_PARAM_H263TYPE h263type;
@@ -1182,7 +1170,7 @@ status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
int32_t iFramesInterval, frameRate, bitRate;
bool success = meta->findInt32(kKeyBitRate, &bitRate);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
CHECK(success);
OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
@@ -1234,7 +1222,7 @@ status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
int32_t iFramesInterval, frameRate, bitRate;
bool success = meta->findInt32(kKeyBitRate, &bitRate);
- success = success && meta->findInt32(kKeySampleRate, &frameRate);
+ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
CHECK(success);
@@ -1290,6 +1278,10 @@ status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
h264type.bMBAFF = OMX_FALSE;
h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
+ if (!strcasecmp("OMX.Nvidia.h264.encoder", mComponentName)) {
+ h264type.eLevel = OMX_VIDEO_AVCLevelMax;
+ }
+
err = mOMX->setParameter(
mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
CHECK_EQ(err, OK);
@@ -1415,7 +1407,8 @@ OMXCodec::OMXCodec(
bool isEncoder,
const char *mime,
const char *componentName,
- const sp<MediaSource> &source)
+ const sp<MediaSource> &source,
+ const sp<ANativeWindow> &nativeWindow)
: mOMX(omx),
mOMXLivesLocally(omx->livesLocally(getpid())),
mNode(node),
@@ -1435,7 +1428,8 @@ OMXCodec::OMXCodec(
mTargetTimeUs(-1),
mSkipTimeUs(-1),
mLeftOverBuffer(NULL),
- mPaused(false) {
+ mPaused(false),
+ mNativeWindow(nativeWindow) {
mPortStatus[kPortIndexInput] = ENABLED;
mPortStatus[kPortIndexOutput] = ENABLED;
@@ -1579,6 +1573,10 @@ status_t OMXCodec::allocateBuffers() {
}
status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
+ if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
+ return allocateOutputBuffersFromNativeWindow();
+ }
+
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
def.nPortIndex = portIndex;
@@ -1590,6 +1588,14 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
return err;
}
+ if (mIsMetaDataStoredInVideoBuffers && portIndex == kPortIndexInput) {
+ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
+ if (err != OK) {
+ LOGE("Storing meta data in video buffers is not supported");
+ return err;
+ }
+ }
+
CODEC_LOGI("allocating %lu buffers of size %lu on %s port",
def.nBufferCountActual, def.nBufferSize,
portIndex == kPortIndexInput ? "input" : "output");
@@ -1645,6 +1651,7 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
info.mBuffer = buffer;
info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
info.mMem = mem;
info.mMediaBuffer = NULL;
@@ -1671,6 +1678,173 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
return OK;
}
+status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
+ // Get the number of buffers needed.
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = kPortIndexOutput;
+
+ status_t err = mOMX->getParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ err = native_window_set_buffers_geometry(
+ mNativeWindow.get(),
+ def.format.video.nFrameWidth,
+ def.format.video.nFrameHeight,
+ def.format.video.eColorFormat);
+
+ if (err != 0) {
+ LOGE("native_window_set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+
+ // Increase the buffer count by one to allow for the ANativeWindow to hold
+ // on to one of the buffers.
+ def.nBufferCountActual++;
+ err = mOMX->setParameter(
+ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
+ if (err != OK) {
+ return err;
+ }
+
+ // Set up the native window.
+ // XXX TODO: Get the gralloc usage flags from the OMX plugin!
+ err = native_window_set_usage(
+ mNativeWindow.get(), GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
+ if (err != 0) {
+ LOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
+ return err;
+ }
+
+ err = native_window_set_buffer_count(
+ mNativeWindow.get(), def.nBufferCountActual);
+ if (err != 0) {
+ LOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
+ -err);
+ return err;
+ }
+
+ // XXX TODO: Do something so the ANativeWindow knows that we'll need to get
+ // the same set of buffers.
+
+ CODEC_LOGI("allocating %lu buffers from a native window of size %lu on "
+ "output port", def.nBufferCountActual, def.nBufferSize);
+
+ // Dequeue buffers and send them to OMX
+ OMX_U32 i;
+ for (i = 0; i < def.nBufferCountActual; i++) {
+ android_native_buffer_t* buf;
+ err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ LOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
+ break;
+ }
+
+ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
+ IOMX::buffer_id bufferId;
+ err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
+ &bufferId);
+ if (err != 0) {
+ break;
+ }
+
+ CODEC_LOGV("registered graphic buffer with ID %p (pointer = %p)",
+ bufferId, graphicBuffer.get());
+
+ BufferInfo info;
+ info.mData = NULL;
+ info.mSize = def.nBufferSize;
+ info.mBuffer = bufferId;
+ info.mOwnedByComponent = false;
+ info.mOwnedByNativeWindow = false;
+ info.mMem = NULL;
+ info.mMediaBuffer = new MediaBuffer(graphicBuffer);
+ info.mMediaBuffer->setObserver(this);
+
+ mPortBuffers[kPortIndexOutput].push(info);
+ }
+
+ OMX_U32 cancelStart;
+ OMX_U32 cancelEnd;
+
+ if (err != 0) {
+ // If an error occurred while dequeuing we need to cancel any buffers
+ // that were dequeued.
+ cancelStart = 0;
+ cancelEnd = i;
+ } else {
+ // Return the last two buffers to the native window.
+ // XXX TODO: The number of buffers the native window owns should probably be
+ // queried from it when we put the native window in fixed buffer pool mode
+ // (which needs to be implemented). Currently it's hard-coded to 2.
+ cancelStart = def.nBufferCountActual - 2;
+ cancelEnd = def.nBufferCountActual;
+ }
+
+ for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
+ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
+ cancelBufferToNativeWindow(info);
+ }
+
+ return err;
+}
+
+status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling cancelBuffer on buffer %p", info->mBuffer);
+ int err = mNativeWindow->cancelBuffer(
+ mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get());
+ if (err != 0) {
+ CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return err;
+ }
+ info->mOwnedByNativeWindow = true;
+ return OK;
+}
+
+OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
+ // Dequeue the next buffer from the native window.
+ android_native_buffer_t* buf;
+ int err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf);
+ if (err != 0) {
+ CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // Determine which buffer we just dequeued.
+ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
+ BufferInfo *bufInfo = 0;
+ for (size_t i = 0; i < buffers->size(); i++) {
+ sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
+ mMediaBuffer->graphicBuffer();
+ if (graphicBuffer->handle == buf->handle) {
+ bufInfo = &buffers->editItemAt(i);
+ break;
+ }
+ }
+
+ if (bufInfo == 0) {
+ CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
+
+ setState(ERROR);
+ return 0;
+ }
+
+ // The native window no longer owns the buffer.
+ CHECK(bufInfo->mOwnedByNativeWindow);
+ bufInfo->mOwnedByNativeWindow = false;
+
+ return bufInfo;
+}
+
void OMXCodec::on_message(const omx_message &msg) {
Mutex::Autolock autoLock(mLock);
@@ -1702,24 +1876,27 @@ void OMXCodec::on_message(const omx_message &msg) {
"an EMPTY_BUFFER_DONE.", buffer);
}
- {
- BufferInfo *info = &buffers->editItemAt(i);
- info->mOwnedByComponent = false;
- if (info->mMediaBuffer != NULL) {
- // It is time to release the media buffers storing meta data
- info->mMediaBuffer->release();
- info->mMediaBuffer = NULL;
+ BufferInfo* info = &buffers->editItemAt(i);
+ info->mOwnedByComponent = false;
+
+ // Buffer could not be released until empty buffer done is called.
+ if (info->mMediaBuffer != NULL) {
+ if (mIsEncoder &&
+ (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
+ // If zero-copy mode is enabled this will send the
+ // input buffer back to the upstream source.
+ restorePatchedDataPointer(info);
}
+
+ info->mMediaBuffer->release();
+ info->mMediaBuffer = NULL;
}
if (mPortStatus[kPortIndexInput] == DISABLING) {
CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
- status_t err =
- mOMX->freeBuffer(mNode, kPortIndexInput, buffer);
+ status_t err = freeBuffer(kPortIndexInput, i);
CHECK_EQ(err, OK);
-
- buffers->removeAt(i);
} else if (mState != ERROR
&& mPortStatus[kPortIndexInput] != SHUTTING_DOWN) {
CHECK_EQ(mPortStatus[kPortIndexInput], ENABLED);
@@ -1759,11 +1936,9 @@ void OMXCodec::on_message(const omx_message &msg) {
if (mPortStatus[kPortIndexOutput] == DISABLING) {
CODEC_LOGV("Port is disabled, freeing buffer %p", buffer);
- status_t err =
- mOMX->freeBuffer(mNode, kPortIndexOutput, buffer);
+ status_t err = freeBuffer(kPortIndexOutput, i);
CHECK_EQ(err, OK);
- buffers->removeAt(i);
#if 0
} else if (mPortStatus[kPortIndexOutput] == ENABLED
&& (flags & OMX_BUFFERFLAG_EOS)) {
@@ -1791,8 +1966,10 @@ void OMXCodec::on_message(const omx_message &msg) {
}
MediaBuffer *buffer = info->mMediaBuffer;
+ bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
- if (msg.u.extended_buffer_data.range_offset
+ if (!isGraphicBuffer
+ && msg.u.extended_buffer_data.range_offset
+ msg.u.extended_buffer_data.range_length
> buffer->size()) {
CODEC_LOGE(
@@ -1816,7 +1993,7 @@ void OMXCodec::on_message(const omx_message &msg) {
buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
}
- if (mQuirks & kOutputBuffersAreUnreadable) {
+ if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
}
@@ -1886,7 +2063,43 @@ void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
case OMX_EventPortSettingsChanged:
{
- onPortSettingsChanged(data1);
+ CODEC_LOGV("OMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)",
+ data1, data2);
+
+ if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
+ onPortSettingsChanged(data1);
+ } else if (data1 == kPortIndexOutput
+ && data2 == OMX_IndexConfigCommonOutputCrop) {
+
+ OMX_CONFIG_RECTTYPE rect;
+ rect.nPortIndex = kPortIndexOutput;
+ InitOMXParams(&rect);
+
+ status_t err =
+ mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect));
+
+ if (err == OK) {
+ CODEC_LOGV(
+ "output crop (%ld, %ld, %ld, %ld)",
+ rect.nLeft, rect.nTop, rect.nWidth, rect.nHeight);
+
+ if (mNativeWindow != NULL) {
+ android_native_rect_t crop;
+ crop.left = rect.nLeft;
+ crop.top = rect.nTop;
+ crop.right = crop.left + rect.nWidth - 1;
+ crop.bottom = crop.top + rect.nHeight - 1;
+
+ CHECK_EQ(0, native_window_set_crop(
+ mNativeWindow.get(), &crop));
+ }
+ } else {
+ CODEC_LOGE("getConfig(OMX_IndexConfigCommonOutputCrop) "
+ "returned error 0x%08x", err);
+ }
+ }
break;
}
@@ -2203,29 +2416,48 @@ status_t OMXCodec::freeBuffersOnPort(
CODEC_LOGV("freeing buffer %p on port %ld", info->mBuffer, portIndex);
- status_t err =
- mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
+ status_t err = freeBuffer(portIndex, i);
if (err != OK) {
stickyErr = err;
}
- if (info->mMediaBuffer != NULL) {
- info->mMediaBuffer->setObserver(NULL);
+ }
+
+ CHECK(onlyThoseWeOwn || buffers->isEmpty());
+
+ return stickyErr;
+}
+
+status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {
+ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
+
+ BufferInfo *info = &buffers->editItemAt(bufIndex);
+
+ status_t err = mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
- // Make sure nobody but us owns this buffer at this point.
- CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+ if (err == OK && info->mMediaBuffer != NULL) {
+ CHECK_EQ(portIndex, kPortIndexOutput);
+ info->mMediaBuffer->setObserver(NULL);
- info->mMediaBuffer->release();
- info->mMediaBuffer = NULL;
+ // Make sure nobody but us owns this buffer at this point.
+ CHECK_EQ(info->mMediaBuffer->refcount(), 0);
+
+ // Cancel the buffer if it belongs to an ANativeWindow.
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (!info->mOwnedByNativeWindow && graphicBuffer != 0) {
+ err = cancelBufferToNativeWindow(info);
}
- buffers->removeAt(i);
+ info->mMediaBuffer->release();
+ info->mMediaBuffer = NULL;
}
- CHECK(onlyThoseWeOwn || buffers->isEmpty());
+ if (err == OK) {
+ buffers->removeAt(bufIndex);
+ }
- return stickyErr;
+ return err;
}
void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
@@ -2277,6 +2509,7 @@ void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
CHECK_EQ(mPortStatus[portIndex], ENABLED);
mPortStatus[portIndex] = DISABLING;
+ CODEC_LOGV("sending OMX_CommandPortDisable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
CHECK_EQ(err, OK);
@@ -2290,6 +2523,7 @@ void OMXCodec::enablePortAsync(OMX_U32 portIndex) {
CHECK_EQ(mPortStatus[portIndex], DISABLED);
mPortStatus[portIndex] = ENABLING;
+ CODEC_LOGV("sending OMX_CommandPortEnable(%ld)", portIndex);
status_t err =
mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
CHECK_EQ(err, OK);
@@ -2315,7 +2549,10 @@ void OMXCodec::fillOutputBuffers() {
Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
for (size_t i = 0; i < buffers->size(); ++i) {
- fillOutputBuffer(&buffers->editItemAt(i));
+ BufferInfo *info = &buffers->editItemAt(i);
+ if (!info->mOwnedByNativeWindow) {
+ fillOutputBuffer(&buffers->editItemAt(i));
+ }
}
}
@@ -2450,16 +2687,16 @@ void OMXCodec::drainInputBuffer(BufferInfo *info) {
break;
}
- // Do not release the media buffer if it stores meta data
- // instead of YUV data. The release is delayed until
- // EMPTY_BUFFER_DONE callback is received.
bool releaseBuffer = true;
if (mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames)) {
CHECK(mOMXLivesLocally && offset == 0);
OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *) info->mBuffer;
+ CHECK(header->pBuffer == info->mData);
header->pBuffer = (OMX_U8 *) srcBuffer->data() + srcBuffer->range_offset();
+ releaseBuffer = false;
+ info->mMediaBuffer = srcBuffer;
} else {
- if (mQuirks & kStoreMetaDataInInputVideoBuffers) {
+ if (mIsMetaDataStoredInVideoBuffers) {
releaseBuffer = false;
info->mMediaBuffer = srcBuffer;
}
@@ -2542,7 +2779,23 @@ void OMXCodec::fillOutputBuffer(BufferInfo *info) {
return;
}
- CODEC_LOGV("Calling fill_buffer on buffer %p", info->mBuffer);
+ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
+ if (graphicBuffer != 0) {
+ // When using a native buffer we need to lock the buffer before giving
+ // it to OMX.
+ CHECK(!info->mOwnedByNativeWindow);
+ CODEC_LOGV("Calling lockBuffer on %p", info->mBuffer);
+ int err = mNativeWindow->lockBuffer(mNativeWindow.get(),
+ graphicBuffer.get());
+ if (err != 0) {
+ CODEC_LOGE("lockBuffer failed w/ error 0x%08x", err);
+
+ setState(ERROR);
+ return;
+ }
+ }
+
+ CODEC_LOGV("Calling fillBuffer on buffer %p", info->mBuffer);
status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
if (err != OK) {
@@ -3125,7 +3378,32 @@ void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
if (info->mMediaBuffer == buffer) {
CHECK_EQ(mPortStatus[kPortIndexOutput], ENABLED);
- fillOutputBuffer(info);
+ if (buffer->graphicBuffer() == 0) {
+ fillOutputBuffer(info);
+ } else {
+ sp<MetaData> metaData = info->mMediaBuffer->meta_data();
+ int32_t rendered = 0;
+ if (!metaData->findInt32(kKeyRendered, &rendered)) {
+ rendered = 0;
+ }
+ if (!rendered) {
+ status_t err = cancelBufferToNativeWindow(info);
+ if (err < 0) {
+ return;
+ }
+ } else {
+ info->mOwnedByNativeWindow = true;
+ }
+
+ // Dequeue the next buffer from the native window.
+ BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
+ if (nextBufInfo == 0) {
+ return;
+ }
+
+ // Give the buffer to the OMX node to fill.
+ fillOutputBuffer(nextBufInfo);
+ }
return;
}
}
@@ -3458,6 +3736,18 @@ void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
printf("}\n");
}
+status_t OMXCodec::initNativeWindow() {
+ // Enable use of a GraphicBuffer as the output for this node. This must
+ // happen before getting the IndexParamPortDefinition parameter because it
+ // will affect the pixel format that the node reports.
+ status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
+ if (err != 0) {
+ return err;
+ }
+
+ return OK;
+}
+
void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
mOutputFormat = new MetaData;
mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
@@ -3592,17 +3882,8 @@ void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
CHECK(!"Unknown compression format.");
}
- if (!strcmp(mComponentName, "OMX.PV.avcdec")) {
- // This component appears to be lying to me.
- mOutputFormat->setInt32(
- kKeyWidth, (video_def->nFrameWidth + 15) & -16);
- mOutputFormat->setInt32(
- kKeyHeight, (video_def->nFrameHeight + 15) & -16);
- } else {
- mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
- mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
- }
-
+ mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
+ mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
break;
}
@@ -3693,8 +3974,30 @@ status_t QueryCodecs(
caps->mProfileLevels.push(profileLevel);
}
+ // Color format query
+ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
+ InitOMXParams(&portFormat);
+ portFormat.nPortIndex = queryDecoders ? 1 : 0;
+ for (portFormat.nIndex = 0;; ++portFormat.nIndex) {
+ err = omx->getParameter(
+ node, OMX_IndexParamVideoPortFormat,
+ &portFormat, sizeof(portFormat));
+ if (err != OK) {
+ break;
+ }
+ caps->mColorFormats.push(portFormat.eColorFormat);
+ }
+
CHECK_EQ(omx->freeNode(node), OK);
}
}
+void OMXCodec::restorePatchedDataPointer(BufferInfo *info) {
+ CHECK(mIsEncoder && (mQuirks & kAvoidMemcopyInputRecordingFrames));
+ CHECK(mOMXLivesLocally);
+
+ OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)info->mBuffer;
+ header->pBuffer = (OMX_U8 *)info->mData;
+}
+
} // namespace android
diff --git a/media/libstagefright/OggExtractor.cpp b/media/libstagefright/OggExtractor.cpp
index 43938b2..4b8a014 100644
--- a/media/libstagefright/OggExtractor.cpp
+++ b/media/libstagefright/OggExtractor.cpp
@@ -73,7 +73,7 @@ struct MyVorbisExtractor {
// Returns an approximate bitrate in bits per second.
uint64_t approxBitrate();
- status_t seekToOffset(off_t offset);
+ status_t seekToOffset(off64_t offset);
status_t readNextPacket(MediaBuffer **buffer);
status_t init();
@@ -91,7 +91,7 @@ private:
};
sp<DataSource> mSource;
- off_t mOffset;
+ off64_t mOffset;
Page mCurrentPage;
uint64_t mPrevGranulePosition;
size_t mCurrentPageSize;
@@ -99,7 +99,7 @@ private:
uint64_t mCurrentPageSamples;
size_t mNextLaceIndex;
- off_t mFirstDataOffset;
+ off64_t mFirstDataOffset;
vorbis_info mVi;
vorbis_comment mVc;
@@ -107,8 +107,8 @@ private:
sp<MetaData> mMeta;
sp<MetaData> mFileMeta;
- ssize_t readPage(off_t offset, Page *page);
- status_t findNextPage(off_t startOffset, off_t *pageOffset);
+ ssize_t readPage(off64_t offset, Page *page);
+ status_t findNextPage(off64_t startOffset, off64_t *pageOffset);
status_t verifyHeader(
MediaBuffer *buffer, uint8_t type);
@@ -116,7 +116,7 @@ private:
void parseFileMetaData();
void extractAlbumArt(const void *data, size_t size);
- uint64_t findPrevGranulePosition(off_t pageOffset);
+ uint64_t findPrevGranulePosition(off64_t pageOffset);
MyVorbisExtractor(const MyVorbisExtractor &);
MyVorbisExtractor &operator=(const MyVorbisExtractor &);
@@ -162,7 +162,7 @@ status_t OggSource::read(
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options && options->getSeekTo(&seekTimeUs, &mode)) {
- off_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll;
+ off64_t pos = seekTimeUs * mExtractor->mImpl->approxBitrate() / 8000000ll;
LOGV("seeking to offset %ld", pos);
if (mExtractor->mImpl->seekToOffset(pos) != OK) {
@@ -220,7 +220,7 @@ sp<MetaData> MyVorbisExtractor::getFormat() const {
}
status_t MyVorbisExtractor::findNextPage(
- off_t startOffset, off_t *pageOffset) {
+ off64_t startOffset, off64_t *pageOffset) {
*pageOffset = startOffset;
for (;;) {
@@ -250,9 +250,9 @@ status_t MyVorbisExtractor::findNextPage(
// it (if any) and return its granule position.
// To do this we back up from the "current" page's offset until we find any
// page preceding it and then scan forward to just before the current page.
-uint64_t MyVorbisExtractor::findPrevGranulePosition(off_t pageOffset) {
- off_t prevPageOffset = 0;
- off_t prevGuess = pageOffset;
+uint64_t MyVorbisExtractor::findPrevGranulePosition(off64_t pageOffset) {
+ off64_t prevPageOffset = 0;
+ off64_t prevGuess = pageOffset;
for (;;) {
if (prevGuess >= 5000) {
prevGuess -= 5000;
@@ -292,14 +292,14 @@ uint64_t MyVorbisExtractor::findPrevGranulePosition(off_t pageOffset) {
}
}
-status_t MyVorbisExtractor::seekToOffset(off_t offset) {
+status_t MyVorbisExtractor::seekToOffset(off64_t offset) {
if (mFirstDataOffset >= 0 && offset < mFirstDataOffset) {
// Once we know where the actual audio data starts (past the headers)
// don't ever seek to anywhere before that.
offset = mFirstDataOffset;
}
- off_t pageOffset;
+ off64_t pageOffset;
status_t err = findNextPage(offset, &pageOffset);
if (err != OK) {
@@ -324,7 +324,7 @@ status_t MyVorbisExtractor::seekToOffset(off_t offset) {
return OK;
}
-ssize_t MyVorbisExtractor::readPage(off_t offset, Page *page) {
+ssize_t MyVorbisExtractor::readPage(off64_t offset, Page *page) {
uint8_t header[27];
if (mSource->readAt(offset, header, sizeof(header))
< (ssize_t)sizeof(header)) {
@@ -410,7 +410,7 @@ status_t MyVorbisExtractor::readNextPacket(MediaBuffer **out) {
}
if (mNextLaceIndex < mCurrentPage.mNumSegments) {
- off_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
+ off64_t dataOffset = mOffset + 27 + mCurrentPage.mNumSegments;
for (size_t j = 0; j < mNextLaceIndex; ++j) {
dataOffset += mCurrentPage.mLace[j];
}
@@ -609,7 +609,7 @@ status_t MyVorbisExtractor::verifyHeader(
LOGV("nominal-bitrate = %ld", mVi.bitrate_nominal);
LOGV("window-bitrate = %ld", mVi.bitrate_window);
- off_t size;
+ off64_t size;
if (mSource->getSize(&size) == OK) {
uint64_t bps = approxBitrate();
diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp
index 7155c61..062ab9b 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/libstagefright/SampleIterator.cpp
@@ -179,7 +179,7 @@ status_t SampleIterator::findChunkRange(uint32_t sampleIndex) {
return OK;
}
-status_t SampleIterator::getChunkOffset(uint32_t chunk, off_t *offset) {
+status_t SampleIterator::getChunkOffset(uint32_t chunk, off64_t *offset) {
*offset = 0;
if (chunk >= mTable->mNumChunkOffsets) {
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index 27faf4f..e922c73 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -76,7 +76,7 @@ SampleTable::~SampleTable() {
}
status_t SampleTable::setChunkOffsetParams(
- uint32_t type, off_t data_offset, size_t data_size) {
+ uint32_t type, off64_t data_offset, size_t data_size) {
if (mChunkOffsetOffset >= 0) {
return ERROR_MALFORMED;
}
@@ -117,7 +117,7 @@ status_t SampleTable::setChunkOffsetParams(
}
status_t SampleTable::setSampleToChunkParams(
- off_t data_offset, size_t data_size) {
+ off64_t data_offset, size_t data_size) {
if (mSampleToChunkOffset >= 0) {
return ERROR_MALFORMED;
}
@@ -168,7 +168,7 @@ status_t SampleTable::setSampleToChunkParams(
}
status_t SampleTable::setSampleSizeParams(
- uint32_t type, off_t data_offset, size_t data_size) {
+ uint32_t type, off64_t data_offset, size_t data_size) {
if (mSampleSizeOffset >= 0) {
return ERROR_MALFORMED;
}
@@ -228,7 +228,7 @@ status_t SampleTable::setSampleSizeParams(
}
status_t SampleTable::setTimeToSampleParams(
- off_t data_offset, size_t data_size) {
+ off64_t data_offset, size_t data_size) {
if (mTimeToSample != NULL || data_size < 8) {
return ERROR_MALFORMED;
}
@@ -260,7 +260,7 @@ status_t SampleTable::setTimeToSampleParams(
return OK;
}
-status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) {
+status_t SampleTable::setSyncSampleParams(off64_t data_offset, size_t data_size) {
if (mSyncSampleOffset >= 0 || data_size < 8) {
return ERROR_MALFORMED;
}
@@ -281,7 +281,7 @@ status_t SampleTable::setSyncSampleParams(off_t data_offset, size_t data_size) {
mNumSyncSamples = U32_AT(&header[4]);
if (mNumSyncSamples < 2) {
- LOGW("Table of sync samples is empty or has only a single entry!");
+ LOGV("Table of sync samples is empty or has only a single entry!");
}
mSyncSamples = new uint32_t[mNumSyncSamples];
@@ -557,7 +557,7 @@ status_t SampleTable::getSampleSize_l(
status_t SampleTable::getMetaDataForSample(
uint32_t sampleIndex,
- off_t *offset,
+ off64_t *offset,
size_t *size,
uint32_t *decodingTime,
bool *isSyncSample) {
diff --git a/media/libstagefright/ShoutcastSource.cpp b/media/libstagefright/ShoutcastSource.cpp
index 23b7681..783f2d0 100644
--- a/media/libstagefright/ShoutcastSource.cpp
+++ b/media/libstagefright/ShoutcastSource.cpp
@@ -14,7 +14,6 @@
* limitations under the License.
*/
-#include "include/stagefright_string.h"
#include "include/HTTPStream.h"
#include <stdlib.h>
@@ -34,7 +33,7 @@ ShoutcastSource::ShoutcastSource(HTTPStream *http)
mBytesUntilMetaData(0),
mGroup(NULL),
mStarted(false) {
- string metaint;
+ AString metaint;
if (mHttp->find_header_value("icy-metaint", &metaint)) {
char *end;
const char *start = metaint.c_str();
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index 8426ee7..86e0e73 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -39,7 +39,7 @@ static bool FileHasAcceptableExtension(const char *extension) {
".mp3", ".mp4", ".m4a", ".3gp", ".3gpp", ".3g2", ".3gpp2",
".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
- ".mkv", ".mka", ".webm", ".ts"
+ ".mkv", ".mka", ".webm", ".ts", ".fl"
};
static const size_t kNumValidExtensions =
sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
@@ -175,11 +175,11 @@ status_t StagefrightMediaScanner::processFile(
char *StagefrightMediaScanner::extractAlbumArt(int fd) {
LOGV("extractAlbumArt %d", fd);
- off_t size = lseek(fd, 0, SEEK_END);
+ off64_t size = lseek64(fd, 0, SEEK_END);
if (size < 0) {
return NULL;
}
- lseek(fd, 0, SEEK_SET);
+ lseek64(fd, 0, SEEK_SET);
if (mRetriever->setDataSource(fd, 0, size) == OK
&& mRetriever->setMode(
diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp
index 4711f7c..88e07b0 100644
--- a/media/libstagefright/ThrottledSource.cpp
+++ b/media/libstagefright/ThrottledSource.cpp
@@ -41,7 +41,7 @@ status_t ThrottledSource::initCheck() const {
return mSource->initCheck();
}
-ssize_t ThrottledSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoLock(mLock);
ssize_t n = mSource->readAt(offset, data, size);
@@ -72,7 +72,7 @@ ssize_t ThrottledSource::readAt(off_t offset, void *data, size_t size) {
return n;
}
-status_t ThrottledSource::getSize(off_t *size) {
+status_t ThrottledSource::getSize(off64_t *size) {
return mSource->getSize(size);
}
diff --git a/media/libstagefright/VBRISeeker.cpp b/media/libstagefright/VBRISeeker.cpp
new file mode 100644
index 0000000..48bddc2
--- /dev/null
+++ b/media/libstagefright/VBRISeeker.cpp
@@ -0,0 +1,164 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VBRISeeker"
+#include <utils/Log.h>
+
+#include "include/VBRISeeker.h"
+
+#include "include/MP3Extractor.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static uint32_t U24_AT(const uint8_t *ptr) {
+ return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
+}
+
+// static
+sp<VBRISeeker> VBRISeeker::CreateFromSource(
+ const sp<DataSource> &source, off64_t post_id3_pos) {
+ off64_t pos = post_id3_pos;
+
+ uint8_t header[4];
+ ssize_t n = source->readAt(pos, header, sizeof(header));
+ if (n < (ssize_t)sizeof(header)) {
+ return NULL;
+ }
+
+ uint32_t tmp = U32_AT(&header[0]);
+ size_t frameSize;
+ int sampleRate;
+ if (!MP3Extractor::get_mp3_frame_size(tmp, &frameSize, &sampleRate)) {
+ return NULL;
+ }
+
+ // VBRI header follows 32 bytes after the header _ends_.
+ pos += sizeof(header) + 32;
+
+ uint8_t vbriHeader[26];
+ n = source->readAt(pos, vbriHeader, sizeof(vbriHeader));
+ if (n < (ssize_t)sizeof(vbriHeader)) {
+ return NULL;
+ }
+
+ if (memcmp(vbriHeader, "VBRI", 4)) {
+ return NULL;
+ }
+
+ size_t numFrames = U32_AT(&vbriHeader[14]);
+
+ int64_t durationUs =
+ numFrames * 1000000ll * (sampleRate >= 32000 ? 1152 : 576) / sampleRate;
+
+ LOGV("duration = %.2f secs", durationUs / 1E6);
+
+ size_t numEntries = U16_AT(&vbriHeader[18]);
+ size_t entrySize = U16_AT(&vbriHeader[22]);
+ size_t scale = U16_AT(&vbriHeader[20]);
+
+ LOGV("%d entries, scale=%d, size_per_entry=%d",
+ numEntries,
+ scale,
+ entrySize);
+
+ size_t totalEntrySize = numEntries * entrySize;
+ uint8_t *buffer = new uint8_t[totalEntrySize];
+
+ n = source->readAt(pos + sizeof(vbriHeader), buffer, totalEntrySize);
+ if (n < (ssize_t)totalEntrySize) {
+ delete[] buffer;
+ buffer = NULL;
+
+ return NULL;
+ }
+
+ sp<VBRISeeker> seeker = new VBRISeeker;
+ seeker->mBasePos = post_id3_pos;
+ seeker->mDurationUs = durationUs;
+
+ off64_t offset = post_id3_pos;
+ for (size_t i = 0; i < numEntries; ++i) {
+ uint32_t numBytes;
+ switch (entrySize) {
+ case 1: numBytes = buffer[i]; break;
+ case 2: numBytes = U16_AT(buffer + 2 * i); break;
+ case 3: numBytes = U24_AT(buffer + 3 * i); break;
+ default:
+ {
+ CHECK_EQ(entrySize, 4u);
+ numBytes = U32_AT(buffer + 4 * i); break;
+ }
+ }
+
+ numBytes *= scale;
+
+ seeker->mSegments.push(numBytes);
+
+ LOGV("entry #%d: %d offset 0x%08lx", i, numBytes, offset);
+ offset += numBytes;
+ }
+
+ delete[] buffer;
+ buffer = NULL;
+
+ LOGI("Found VBRI header.");
+
+ return seeker;
+}
+
+VBRISeeker::VBRISeeker()
+ : mDurationUs(-1) {
+}
+
+bool VBRISeeker::getDuration(int64_t *durationUs) {
+ if (mDurationUs < 0) {
+ return false;
+ }
+
+ *durationUs = mDurationUs;
+
+ return true;
+}
+
+bool VBRISeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+ if (mDurationUs < 0) {
+ return false;
+ }
+
+ int64_t segmentDurationUs = mDurationUs / mSegments.size();
+
+ int64_t nowUs = 0;
+ *pos = mBasePos;
+ size_t segmentIndex = 0;
+ while (segmentIndex < mSegments.size() && nowUs < *timeUs) {
+ nowUs += segmentDurationUs;
+ *pos += mSegments.itemAt(segmentIndex++);
+ }
+
+ LOGV("getOffsetForTime %lld us => 0x%08lx", *timeUs, *pos);
+
+ *timeUs = nowUs;
+
+ return true;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/VideoSourceDownSampler.cpp b/media/libstagefright/VideoSourceDownSampler.cpp
new file mode 100644
index 0000000..ea7b09a
--- /dev/null
+++ b/media/libstagefright/VideoSourceDownSampler.cpp
@@ -0,0 +1,142 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoSourceDownSampler"
+
+#include <media/stagefright/VideoSourceDownSampler.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/YUVImage.h>
+#include <media/stagefright/YUVCanvas.h>
+#include "OMX_Video.h"
+
+namespace android {
+
+VideoSourceDownSampler::VideoSourceDownSampler(const sp<MediaSource> &videoSource,
+ int32_t width, int32_t height) {
+ LOGV("Construct VideoSourceDownSampler");
+ CHECK(width > 0);
+ CHECK(height > 0);
+
+ mRealVideoSource = videoSource;
+ mWidth = width;
+ mHeight = height;
+
+ mMeta = new MetaData(*(mRealVideoSource->getFormat()));
+ CHECK(mMeta->findInt32(kKeyWidth, &mRealSourceWidth));
+ CHECK(mMeta->findInt32(kKeyHeight, &mRealSourceHeight));
+
+ if ((mWidth != mRealSourceWidth) || (mHeight != mRealSourceHeight)) {
+ // Change meta data for width and height.
+ CHECK(mWidth <= mRealSourceWidth);
+ CHECK(mHeight <= mRealSourceHeight);
+
+ mNeedDownSampling = true;
+ computeDownSamplingParameters();
+ mMeta->setInt32(kKeyWidth, mWidth);
+ mMeta->setInt32(kKeyHeight, mHeight);
+ } else {
+ mNeedDownSampling = false;
+ }
+}
+
+VideoSourceDownSampler::~VideoSourceDownSampler() {
+}
+
+void VideoSourceDownSampler::computeDownSamplingParameters() {
+ mDownSampleSkipX = mRealSourceWidth / mWidth;
+ mDownSampleSkipY = mRealSourceHeight / mHeight;
+
+ mDownSampleOffsetX = mRealSourceWidth - mDownSampleSkipX * mWidth;
+ mDownSampleOffsetY = mRealSourceHeight - mDownSampleSkipY * mHeight;
+}
+
+void VideoSourceDownSampler::downSampleYUVImage(
+ const MediaBuffer &sourceBuffer, MediaBuffer **buffer) const {
+ // find the YUV format
+ int32_t srcFormat;
+ CHECK(mMeta->findInt32(kKeyColorFormat, &srcFormat));
+ YUVImage::YUVFormat yuvFormat;
+ if (srcFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ yuvFormat = YUVImage::YUV420SemiPlanar;
+ } else if (srcFormat == OMX_COLOR_FormatYUV420Planar) {
+ yuvFormat = YUVImage::YUV420Planar;
+ }
+
+ // allocate mediaBuffer for down sampled image and setup a canvas.
+ *buffer = new MediaBuffer(YUVImage::bufferSize(yuvFormat, mWidth, mHeight));
+ YUVImage yuvDownSampledImage(yuvFormat,
+ mWidth, mHeight,
+ (uint8_t *)(*buffer)->data());
+ YUVCanvas yuvCanvasDownSample(yuvDownSampledImage);
+
+ YUVImage yuvImageSource(yuvFormat,
+ mRealSourceWidth, mRealSourceHeight,
+ (uint8_t *)sourceBuffer.data());
+ yuvCanvasDownSample.downsample(mDownSampleOffsetX, mDownSampleOffsetY,
+ mDownSampleSkipX, mDownSampleSkipY,
+ yuvImageSource);
+}
+
+status_t VideoSourceDownSampler::start(MetaData *params) {
+ LOGV("start");
+ return mRealVideoSource->start();
+}
+
+status_t VideoSourceDownSampler::stop() {
+ LOGV("stop");
+ return mRealVideoSource->stop();
+}
+
+sp<MetaData> VideoSourceDownSampler::getFormat() {
+ LOGV("getFormat");
+ return mMeta;
+}
+
+status_t VideoSourceDownSampler::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ LOGV("read");
+ MediaBuffer *realBuffer;
+ status_t err = mRealVideoSource->read(&realBuffer, options);
+
+ if (mNeedDownSampling) {
+ downSampleYUVImage(*realBuffer, buffer);
+
+ int64_t frameTime;
+ realBuffer->meta_data()->findInt64(kKeyTime, &frameTime);
+ (*buffer)->meta_data()->setInt64(kKeyTime, frameTime);
+
+ // We just want this buffer to be deleted when the encoder releases it.
+ // So don't add a reference to it and set the observer to NULL.
+ (*buffer)->setObserver(NULL);
+
+ // The original buffer is no longer required. Release it.
+ realBuffer->release();
+ } else {
+ *buffer = realBuffer;
+ }
+
+ return err;
+}
+
+status_t VideoSourceDownSampler::pause() {
+ LOGV("pause");
+ return mRealVideoSource->pause();
+}
+
+} // namespace android
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index aff06bc..446021c 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -51,7 +51,7 @@ struct WAVSource : public MediaSource {
const sp<MetaData> &meta,
uint16_t waveFormat,
int32_t bitsPerSample,
- off_t offset, size_t size);
+ off64_t offset, size_t size);
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
@@ -72,11 +72,11 @@ private:
int32_t mSampleRate;
int32_t mNumChannels;
int32_t mBitsPerSample;
- off_t mOffset;
+ off64_t mOffset;
size_t mSize;
bool mStarted;
MediaBufferGroup *mGroup;
- off_t mCurrentPos;
+ off64_t mCurrentPos;
WAVSource(const WAVSource &);
WAVSource &operator=(const WAVSource &);
@@ -139,7 +139,7 @@ status_t WAVExtractor::init() {
size_t totalSize = U32_LE_AT(&header[4]);
- off_t offset = 12;
+ off64_t offset = 12;
size_t remainingSize = totalSize;
while (remainingSize >= 8) {
uint8_t chunkHeader[8];
@@ -251,7 +251,7 @@ WAVSource::WAVSource(
const sp<MetaData> &meta,
uint16_t waveFormat,
int32_t bitsPerSample,
- off_t offset, size_t size)
+ off64_t offset, size_t size)
: mDataSource(dataSource),
mMeta(meta),
mWaveFormat(waveFormat),
@@ -335,7 +335,7 @@ status_t WAVSource::read(
mBitsPerSample == 8 ? kMaxFrameSize / 2 : kMaxFrameSize;
size_t maxBytesAvailable =
- (mCurrentPos - mOffset >= (off_t)mSize)
+ (mCurrentPos - mOffset >= (off64_t)mSize)
? 0 : mSize - (mCurrentPos - mOffset);
if (maxBytesToRead > maxBytesAvailable) {
diff --git a/media/libstagefright/WVMExtractor.cpp b/media/libstagefright/WVMExtractor.cpp
new file mode 100644
index 0000000..7c72852
--- /dev/null
+++ b/media/libstagefright/WVMExtractor.cpp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "WVMExtractor"
+#include <utils/Log.h>
+
+#include "include/WVMExtractor.h"
+
+#include <arpa/inet.h>
+#include <utils/String8.h>
+#include <media/stagefright/Utils.h>
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaDebug.h>
+#include <dlfcn.h>
+
+#include <utils/Errors.h>
+
+/* The extractor lifetime is short - just long enough to get
+ * the media sources constructed - so the shared lib needs to remain open
+ * beyond the lifetime of the extractor. So keep the handle as a global
+ * rather than a member of the extractor
+ */
+void *gVendorLibHandle = NULL;
+
+namespace android {
+
+static Mutex gWVMutex;
+
+WVMExtractor::WVMExtractor(const sp<DataSource> &source)
+ : mDataSource(source) {
+ {
+ Mutex::Autolock autoLock(gWVMutex);
+ if (gVendorLibHandle == NULL) {
+ gVendorLibHandle = dlopen("libwvm.so", RTLD_NOW);
+ }
+
+ if (gVendorLibHandle == NULL) {
+ LOGE("Failed to open libwvm.so");
+ return;
+ }
+ }
+
+ typedef MediaExtractor *(*GetInstanceFunc)(sp<DataSource>);
+ GetInstanceFunc getInstanceFunc =
+ (GetInstanceFunc) dlsym(gVendorLibHandle,
+ "_ZN7android11GetInstanceENS_2spINS_10DataSourceEEE");
+
+ if (getInstanceFunc) {
+ LOGD("Calling GetInstanceFunc");
+ mImpl = (*getInstanceFunc)(source);
+ CHECK(mImpl != NULL);
+ } else {
+ LOGE("Failed to locate GetInstance in libwvm.so");
+ }
+}
+
+WVMExtractor::~WVMExtractor() {
+}
+
+size_t WVMExtractor::countTracks() {
+ return (mImpl != NULL) ? mImpl->countTracks() : 0;
+}
+
+sp<MediaSource> WVMExtractor::getTrack(size_t index) {
+ if (mImpl == NULL) {
+ return NULL;
+ }
+ return mImpl->getTrack(index);
+}
+
+sp<MetaData> WVMExtractor::getTrackMetaData(size_t index, uint32_t flags) {
+ if (mImpl == NULL) {
+ return NULL;
+ }
+ return mImpl->getTrackMetaData(index, flags);
+}
+
+sp<MetaData> WVMExtractor::getMetaData() {
+ if (mImpl == NULL) {
+ return NULL;
+ }
+ return mImpl->getMetaData();
+}
+
+} //namespace android
+
diff --git a/media/libstagefright/XINGSeeker.cpp b/media/libstagefright/XINGSeeker.cpp
new file mode 100644
index 0000000..616836c
--- /dev/null
+++ b/media/libstagefright/XINGSeeker.cpp
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "include/XINGSeeker.h"
+
+#include <media/stagefright/DataSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static bool parse_xing_header(
+ const sp<DataSource> &source, off64_t first_frame_pos,
+ int32_t *frame_number = NULL, int32_t *byte_number = NULL,
+ char *table_of_contents = NULL, int32_t *quality_indicator = NULL,
+ int64_t *duration = NULL);
+
+// static
+sp<XINGSeeker> XINGSeeker::CreateFromSource(
+ const sp<DataSource> &source, off64_t first_frame_pos) {
+ sp<XINGSeeker> seeker = new XINGSeeker;
+
+ seeker->mFirstFramePos = first_frame_pos;
+
+ if (!parse_xing_header(
+ source, first_frame_pos,
+ NULL, &seeker->mSizeBytes, seeker->mTableOfContents,
+ NULL, &seeker->mDurationUs)) {
+ return NULL;
+ }
+
+ LOGI("Found XING header.");
+
+ return seeker;
+}
+
+XINGSeeker::XINGSeeker()
+ : mDurationUs(-1),
+ mSizeBytes(0) {
+}
+
+bool XINGSeeker::getDuration(int64_t *durationUs) {
+ if (mDurationUs < 0) {
+ return false;
+ }
+
+ *durationUs = mDurationUs;
+
+ return true;
+}
+
+bool XINGSeeker::getOffsetForTime(int64_t *timeUs, off64_t *pos) {
+ if (mSizeBytes == 0 || mTableOfContents[0] <= 0 || mDurationUs < 0) {
+ return false;
+ }
+
+ float percent = (float)(*timeUs) * 100 / mDurationUs;
+ float fx;
+ if( percent <= 0.0f ) {
+ fx = 0.0f;
+ } else if( percent >= 100.0f ) {
+ fx = 256.0f;
+ } else {
+ int a = (int)percent;
+ float fa, fb;
+ if ( a == 0 ) {
+ fa = 0.0f;
+ } else {
+ fa = (float)mTableOfContents[a-1];
+ }
+ if ( a < 99 ) {
+ fb = (float)mTableOfContents[a];
+ } else {
+ fb = 256.0f;
+ }
+ fx = fa + (fb-fa)*(percent-a);
+ }
+
+ *pos = (int)((1.0f/256.0f)*fx*mSizeBytes) + mFirstFramePos;
+
+ return true;
+}
+
+static bool parse_xing_header(
+ const sp<DataSource> &source, off64_t first_frame_pos,
+ int32_t *frame_number, int32_t *byte_number,
+ char *table_of_contents, int32_t *quality_indicator,
+ int64_t *duration) {
+ if (frame_number) {
+ *frame_number = 0;
+ }
+ if (byte_number) {
+ *byte_number = 0;
+ }
+ if (table_of_contents) {
+ table_of_contents[0] = 0;
+ }
+ if (quality_indicator) {
+ *quality_indicator = 0;
+ }
+ if (duration) {
+ *duration = 0;
+ }
+
+ uint8_t buffer[4];
+ int offset = first_frame_pos;
+ if (source->readAt(offset, &buffer, 4) < 4) { // get header
+ return false;
+ }
+ offset += 4;
+
+ uint8_t id, layer, sr_index, mode;
+ layer = (buffer[1] >> 1) & 3;
+ id = (buffer[1] >> 3) & 3;
+ sr_index = (buffer[2] >> 2) & 3;
+ mode = (buffer[3] >> 6) & 3;
+ if (layer == 0) {
+ return false;
+ }
+ if (id == 1) {
+ return false;
+ }
+ if (sr_index == 3) {
+ return false;
+ }
+ // determine offset of XING header
+ if(id&1) { // mpeg1
+ if (mode != 3) offset += 32;
+ else offset += 17;
+ } else { // mpeg2
+ if (mode != 3) offset += 17;
+ else offset += 9;
+ }
+
+ if (source->readAt(offset, &buffer, 4) < 4) { // XING header ID
+ return false;
+ }
+ offset += 4;
+ // Check XING ID
+ if ((buffer[0] != 'X') || (buffer[1] != 'i')
+ || (buffer[2] != 'n') || (buffer[3] != 'g')) {
+ if ((buffer[0] != 'I') || (buffer[1] != 'n')
+ || (buffer[2] != 'f') || (buffer[3] != 'o')) {
+ return false;
+ }
+ }
+
+ if (source->readAt(offset, &buffer, 4) < 4) { // flags
+ return false;
+ }
+ offset += 4;
+ uint32_t flags = U32_AT(buffer);
+
+ if (flags & 0x0001) { // Frames field is present
+ if (source->readAt(offset, buffer, 4) < 4) {
+ return false;
+ }
+ if (frame_number) {
+ *frame_number = U32_AT(buffer);
+ }
+ int32_t frame = U32_AT(buffer);
+ // Samples per Frame: 1. index = MPEG Version ID, 2. index = Layer
+ const int samplesPerFrames[2][3] =
+ {
+ { 384, 1152, 576 }, // MPEG 2, 2.5: layer1, layer2, layer3
+ { 384, 1152, 1152 }, // MPEG 1: layer1, layer2, layer3
+ };
+ // sampling rates in hertz: 1. index = MPEG Version ID, 2. index = sampling rate index
+ const int samplingRates[4][3] =
+ {
+ { 11025, 12000, 8000, }, // MPEG 2.5
+ { 0, 0, 0, }, // reserved
+ { 22050, 24000, 16000, }, // MPEG 2
+ { 44100, 48000, 32000, } // MPEG 1
+ };
+ if (duration) {
+ *duration = (int64_t)frame * samplesPerFrames[id&1][3-layer] * 1000000LL
+ / samplingRates[id][sr_index];
+ }
+ offset += 4;
+ }
+ if (flags & 0x0002) { // Bytes field is present
+ if (byte_number) {
+ if (source->readAt(offset, buffer, 4) < 4) {
+ return false;
+ }
+ *byte_number = U32_AT(buffer);
+ }
+ offset += 4;
+ }
+ if (flags & 0x0004) { // TOC field is present
+ if (table_of_contents) {
+ if (source->readAt(offset + 1, table_of_contents, 99) < 99) {
+ return false;
+ }
+ }
+ offset += 100;
+ }
+ if (flags & 0x0008) { // Quality indicator field is present
+ if (quality_indicator) {
+ if (source->readAt(offset, buffer, 4) < 4) {
+ return false;
+ }
+ *quality_indicator = U32_AT(buffer);
+ }
+ }
+ return true;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index 478e40c..2fe5e18 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -218,6 +218,28 @@ static sp<ABuffer> FindNAL(
return NULL;
}
+const char *AVCProfileToString(uint8_t profile) {
+ switch (profile) {
+ case kAVCProfileBaseline:
+ return "Baseline";
+ case kAVCProfileMain:
+ return "Main";
+ case kAVCProfileExtended:
+ return "Extended";
+ case kAVCProfileHigh:
+ return "High";
+ case kAVCProfileHigh10:
+ return "High 10";
+ case kAVCProfileHigh422:
+ return "High 422";
+ case kAVCProfileHigh444:
+ return "High 444";
+ case kAVCProfileCAVLC444Intra:
+ return "CAVLC 444 Intra";
+ default: return "Unknown";
+ }
+}
+
sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
const uint8_t *data = accessUnit->data();
size_t size = accessUnit->size();
@@ -244,6 +266,10 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
*out++ = 0x01; // configurationVersion
memcpy(out, seqParamSet->data() + 1, 3); // profile/level...
+
+ uint8_t profile = out[0];
+ uint8_t level = out[2];
+
out += 3;
*out++ = (0x3f << 2) | 1; // lengthSize == 2 bytes
*out++ = 0xe0 | 1;
@@ -271,7 +297,8 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
meta->setInt32(kKeyWidth, width);
meta->setInt32(kKeyHeight, height);
- LOGI("found AVC codec config (%d x %d)", width, height);
+ LOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
+ width, height, AVCProfileToString(profile), level / 10, level % 10);
return meta;
}
diff --git a/media/libstagefright/codecs/aacenc/AACEncoder.cpp b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
index df9f107..9524884 100644
--- a/media/libstagefright/codecs/aacenc/AACEncoder.cpp
+++ b/media/libstagefright/codecs/aacenc/AACEncoder.cpp
@@ -36,6 +36,7 @@ AACEncoder::AACEncoder(const sp<MediaSource> &source, const sp<MetaData> &meta)
mStarted(false),
mBufferGroup(NULL),
mInputBuffer(NULL),
+ mInputFrame(NULL),
mEncoderHandle(NULL),
mApiHandle(NULL),
mMemOperator(NULL) {
@@ -45,6 +46,7 @@ status_t AACEncoder::initCheck() {
CHECK(mApiHandle == NULL && mEncoderHandle == NULL);
CHECK(mMeta->findInt32(kKeySampleRate, &mSampleRate));
CHECK(mMeta->findInt32(kKeyChannelCount, &mChannels));
+ CHECK(mChannels <= 2 && mChannels >= 1);
CHECK(mMeta->findInt32(kKeyBitRate, &mBitRate));
mApiHandle = new VO_AUDIO_CODECAPI;
@@ -145,6 +147,10 @@ status_t AACEncoder::start(MetaData *params) {
mNumInputSamples = 0;
mAnchorTimeUs = 0;
mFrameCount = 0;
+
+ mInputFrame = new int16_t[mChannels * kNumSamplesPerFrame];
+ CHECK(mInputFrame != NULL);
+
mSource->start(params);
mStarted = true;
@@ -176,6 +182,10 @@ status_t AACEncoder::stop() {
mApiHandle = NULL;
mStarted = false;
+ if (mInputFrame) {
+ delete[] mInputFrame;
+ mInputFrame = NULL;
+ }
return OK;
}
@@ -222,7 +232,8 @@ status_t AACEncoder::read(
buffer->meta_data()->setInt32(kKeyIsCodecConfig, false);
}
- while (mNumInputSamples < kNumSamplesPerFrame) {
+ const int32_t nSamples = mChannels * kNumSamplesPerFrame;
+ while (mNumInputSamples < nSamples) {
if (mInputBuffer == NULL) {
if (mSource->read(&mInputBuffer, options) != OK) {
if (mNumInputSamples == 0) {
@@ -231,7 +242,7 @@ status_t AACEncoder::read(
}
memset(&mInputFrame[mNumInputSamples],
0,
- sizeof(int16_t) * (kNumSamplesPerFrame - mNumInputSamples));
+ sizeof(int16_t) * (nSamples - mNumInputSamples));
mNumInputSamples = 0;
break;
}
@@ -250,8 +261,7 @@ status_t AACEncoder::read(
} else {
readFromSource = false;
}
- size_t copy =
- (kNumSamplesPerFrame - mNumInputSamples) * sizeof(int16_t);
+ size_t copy = (nSamples - mNumInputSamples) * sizeof(int16_t);
if (copy > mInputBuffer->range_length()) {
copy = mInputBuffer->range_length();
@@ -271,8 +281,8 @@ status_t AACEncoder::read(
mInputBuffer = NULL;
}
mNumInputSamples += copy / sizeof(int16_t);
- if (mNumInputSamples >= kNumSamplesPerFrame) {
- mNumInputSamples %= kNumSamplesPerFrame;
+ if (mNumInputSamples >= nSamples) {
+ mNumInputSamples %= nSamples;
break;
}
}
@@ -280,7 +290,7 @@ status_t AACEncoder::read(
VO_CODECBUFFER inputData;
memset(&inputData, 0, sizeof(inputData));
inputData.Buffer = (unsigned char*) mInputFrame;
- inputData.Length = kNumSamplesPerFrame * sizeof(int16_t);
+ inputData.Length = nSamples * sizeof(int16_t);
CHECK(VO_ERR_NONE == mApiHandle->SetInputData(mEncoderHandle,&inputData));
VO_CODECBUFFER outputData;
@@ -289,15 +299,21 @@ status_t AACEncoder::read(
memset(&outputInfo, 0, sizeof(outputInfo));
VO_U32 ret = VO_ERR_NONE;
- outputData.Buffer = outPtr;
- outputData.Length = buffer->size();
- ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
- CHECK(ret == VO_ERR_NONE || ret == VO_ERR_INPUT_BUFFER_SMALL);
- CHECK(outputData.Length != 0);
- buffer->set_range(0, outputData.Length);
+ size_t nOutputBytes = 0;
+ do {
+ outputData.Buffer = outPtr;
+ outputData.Length = buffer->size() - nOutputBytes;
+ ret = mApiHandle->GetOutputData(mEncoderHandle, &outputData, &outputInfo);
+ if (ret == VO_ERR_NONE) {
+ outPtr += outputData.Length;
+ nOutputBytes += outputData.Length;
+ }
+ } while (ret != VO_ERR_INPUT_BUFFER_SMALL);
+ buffer->set_range(0, nOutputBytes);
int64_t mediaTimeUs =
((mFrameCount - 1) * 1000000LL * kNumSamplesPerFrame) / mSampleRate;
+
buffer->meta_data()->setInt64(kKeyTime, mAnchorTimeUs + mediaTimeUs);
if (readFromSource && wallClockTimeUs != -1) {
buffer->meta_data()->setInt64(kKeyDriftTime, mediaTimeUs - wallClockTimeUs);
diff --git a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
index 52a391f..e6a0976 100644
--- a/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/AVCEncoder.cpp
@@ -33,6 +33,80 @@
namespace android {
+static status_t ConvertOmxAvcProfileToAvcSpecProfile(
+ int32_t omxProfile, AVCProfile* pvProfile) {
+ LOGV("ConvertOmxAvcProfileToAvcSpecProfile: %d", omxProfile);
+ switch (omxProfile) {
+ case OMX_VIDEO_AVCProfileBaseline:
+ *pvProfile = AVC_BASELINE;
+ return OK;
+ default:
+ LOGE("Unsupported omx profile: %d", omxProfile);
+ }
+ return BAD_VALUE;
+}
+
+static status_t ConvertOmxAvcLevelToAvcSpecLevel(
+ int32_t omxLevel, AVCLevel *pvLevel) {
+ LOGV("ConvertOmxAvcLevelToAvcSpecLevel: %d", omxLevel);
+ AVCLevel level = AVC_LEVEL5_1;
+ switch (omxLevel) {
+ case OMX_VIDEO_AVCLevel1:
+ level = AVC_LEVEL1_B;
+ break;
+ case OMX_VIDEO_AVCLevel1b:
+ level = AVC_LEVEL1;
+ break;
+ case OMX_VIDEO_AVCLevel11:
+ level = AVC_LEVEL1_1;
+ break;
+ case OMX_VIDEO_AVCLevel12:
+ level = AVC_LEVEL1_2;
+ break;
+ case OMX_VIDEO_AVCLevel13:
+ level = AVC_LEVEL1_3;
+ break;
+ case OMX_VIDEO_AVCLevel2:
+ level = AVC_LEVEL2;
+ break;
+ case OMX_VIDEO_AVCLevel21:
+ level = AVC_LEVEL2_1;
+ break;
+ case OMX_VIDEO_AVCLevel22:
+ level = AVC_LEVEL2_2;
+ break;
+ case OMX_VIDEO_AVCLevel3:
+ level = AVC_LEVEL3;
+ break;
+ case OMX_VIDEO_AVCLevel31:
+ level = AVC_LEVEL3_1;
+ break;
+ case OMX_VIDEO_AVCLevel32:
+ level = AVC_LEVEL3_2;
+ break;
+ case OMX_VIDEO_AVCLevel4:
+ level = AVC_LEVEL4;
+ break;
+ case OMX_VIDEO_AVCLevel41:
+ level = AVC_LEVEL4_1;
+ break;
+ case OMX_VIDEO_AVCLevel42:
+ level = AVC_LEVEL4_2;
+ break;
+ case OMX_VIDEO_AVCLevel5:
+ level = AVC_LEVEL5;
+ break;
+ case OMX_VIDEO_AVCLevel51:
+ level = AVC_LEVEL5_1;
+ break;
+ default:
+ LOGE("Unknown omx level: %d", omxLevel);
+ return BAD_VALUE;
+ }
+ *pvLevel = level;
+ return OK;
+}
+
inline static void ConvertYUV420SemiPlanarToYUV420Planar(
uint8_t *inyuv, uint8_t* outyuv,
int32_t width, int32_t height) {
@@ -133,7 +207,7 @@ status_t AVCEncoder::initCheck(const sp<MetaData>& meta) {
LOGV("initCheck");
CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
- CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+ CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
// XXX: Add more color format support
@@ -231,10 +305,16 @@ status_t AVCEncoder::initCheck(const sp<MetaData>& meta) {
mEncParams->level = AVC_LEVEL3_2;
int32_t profile, level;
if (meta->findInt32(kKeyVideoProfile, &profile)) {
- mEncParams->profile = (AVCProfile) profile;
+ if (OK != ConvertOmxAvcProfileToAvcSpecProfile(
+ profile, &mEncParams->profile)) {
+ return BAD_VALUE;
+ }
}
if (meta->findInt32(kKeyVideoLevel, &level)) {
- mEncParams->level = (AVCLevel) level;
+ if (OK != ConvertOmxAvcLevelToAvcSpecLevel(
+ level, &mEncParams->level)) {
+ return BAD_VALUE;
+ }
}
@@ -242,7 +322,7 @@ status_t AVCEncoder::initCheck(const sp<MetaData>& meta) {
mFormat->setInt32(kKeyWidth, mVideoWidth);
mFormat->setInt32(kKeyHeight, mVideoHeight);
mFormat->setInt32(kKeyBitRate, mVideoBitRate);
- mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+ mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
mFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
mFormat->setCString(kKeyDecoderComponent, "AVCEncoder");
diff --git a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
index a011137..c7a475b 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/M4vH263Encoder.cpp
@@ -32,6 +32,109 @@
namespace android {
+static status_t ConvertOmxProfileLevel(
+ MP4EncodingMode mode,
+ int32_t omxProfile,
+ int32_t omxLevel,
+ ProfileLevelType* pvProfileLevel) {
+ LOGV("ConvertOmxProfileLevel: %d/%d/%d", mode, omxProfile, omxLevel);
+ ProfileLevelType profileLevel;
+ if (mode == H263_MODE) {
+ switch (omxProfile) {
+ case OMX_VIDEO_H263ProfileBaseline:
+ if (omxLevel > OMX_VIDEO_H263Level45) {
+ LOGE("Unsupported level (%d) for H263", omxLevel);
+ return BAD_VALUE;
+ } else {
+ LOGW("PV does not support level configuration for H263");
+ profileLevel = CORE_PROFILE_LEVEL2;
+ break;
+ }
+ break;
+ default:
+ LOGE("Unsupported profile (%d) for H263", omxProfile);
+ return BAD_VALUE;
+ }
+ } else { // MPEG4
+ switch (omxProfile) {
+ case OMX_VIDEO_MPEG4ProfileSimple:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level0b:
+ profileLevel = SIMPLE_PROFILE_LEVEL0;
+ break;
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = SIMPLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = SIMPLE_PROFILE_LEVEL2;
+ break;
+ case OMX_VIDEO_MPEG4Level3:
+ profileLevel = SIMPLE_PROFILE_LEVEL3;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 simple profile",
+ omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ case OMX_VIDEO_MPEG4ProfileSimpleScalable:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level0b:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL0;
+ break;
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = SIMPLE_SCALABLE_PROFILE_LEVEL2;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 simple "
+ "scalable profile", omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ case OMX_VIDEO_MPEG4ProfileCore:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = CORE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = CORE_PROFILE_LEVEL2;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 core "
+ "profile", omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ case OMX_VIDEO_MPEG4ProfileCoreScalable:
+ switch (omxLevel) {
+ case OMX_VIDEO_MPEG4Level1:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL1;
+ break;
+ case OMX_VIDEO_MPEG4Level2:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL2;
+ break;
+ case OMX_VIDEO_MPEG4Level3:
+ profileLevel = CORE_SCALABLE_PROFILE_LEVEL3;
+ break;
+ default:
+ LOGE("Unsupported level (%d) for MPEG4 core "
+ "scalable profile", omxLevel);
+ return BAD_VALUE;
+ }
+ break;
+ default:
+ LOGE("Unsupported MPEG4 profile (%d)", omxProfile);
+ return BAD_VALUE;
+ }
+ }
+
+ *pvProfileLevel = profileLevel;
+ return OK;
+}
+
inline static void ConvertYUV420SemiPlanarToYUV420Planar(
uint8_t *inyuv, uint8_t* outyuv,
int32_t width, int32_t height) {
@@ -97,7 +200,7 @@ status_t M4vH263Encoder::initCheck(const sp<MetaData>& meta) {
LOGV("initCheck");
CHECK(meta->findInt32(kKeyWidth, &mVideoWidth));
CHECK(meta->findInt32(kKeyHeight, &mVideoHeight));
- CHECK(meta->findInt32(kKeySampleRate, &mVideoFrameRate));
+ CHECK(meta->findInt32(kKeyFrameRate, &mVideoFrameRate));
CHECK(meta->findInt32(kKeyBitRate, &mVideoBitRate));
// XXX: Add more color format support
@@ -150,9 +253,14 @@ status_t M4vH263Encoder::initCheck(const sp<MetaData>& meta) {
// If profile and level setting is not correct, failure
// is reported when the encoder is initialized.
mEncParams->profile_level = CORE_PROFILE_LEVEL2;
- int32_t profileLevel;
- if (meta->findInt32(kKeyVideoLevel, &profileLevel)) {
- mEncParams->profile_level = (ProfileLevelType)profileLevel;
+ int32_t profile, level;
+ if (meta->findInt32(kKeyVideoProfile, &profile) &&
+ meta->findInt32(kKeyVideoLevel, &level)) {
+ if (OK != ConvertOmxProfileLevel(
+ mEncParams->encMode, profile, level,
+ &mEncParams->profile_level)) {
+ return BAD_VALUE;
+ }
}
mEncParams->packetSize = 32;
@@ -191,7 +299,7 @@ status_t M4vH263Encoder::initCheck(const sp<MetaData>& meta) {
mFormat->setInt32(kKeyWidth, mVideoWidth);
mFormat->setInt32(kKeyHeight, mVideoHeight);
mFormat->setInt32(kKeyBitRate, mVideoBitRate);
- mFormat->setInt32(kKeySampleRate, mVideoFrameRate);
+ mFormat->setInt32(kKeyFrameRate, mVideoFrameRate);
mFormat->setInt32(kKeyColorFormat, mVideoColorFormat);
mFormat->setCString(kKeyMIMEType, mime);
diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
index 0dcbd73..ef2dba0 100644
--- a/media/libstagefright/colorconversion/Android.mk
+++ b/media/libstagefright/colorconversion/Android.mk
@@ -6,7 +6,8 @@ LOCAL_SRC_FILES:= \
SoftwareRenderer.cpp
LOCAL_C_INCLUDES := \
- $(TOP)/frameworks/base/include/media/stagefright/openmax
+ $(TOP)/frameworks/base/include/media/stagefright/openmax \
+ $(TOP)/hardware/msm7k
LOCAL_SHARED_LIBRARIES := \
libbinder \
@@ -17,6 +18,11 @@ LOCAL_SHARED_LIBRARIES := \
libsurfaceflinger_client\
libcamera_client
+# ifeq ($(TARGET_BOARD_PLATFORM),msm7k)
+ifeq ($(TARGET_PRODUCT),passion)
+ LOCAL_CFLAGS += -DHAS_YCBCR420_SP_ADRENO
+endif
+
LOCAL_MODULE:= libstagefright_color_conversion
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 86ad85b..acbea05 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -22,76 +22,197 @@
#include <binder/MemoryHeapBase.h>
#include <binder/MemoryHeapPmem.h>
#include <media/stagefright/MediaDebug.h>
-#include <surfaceflinger/ISurface.h>
+#include <surfaceflinger/Surface.h>
+#include <ui/android_native_buffer.h>
+#include <ui/GraphicBufferMapper.h>
+
+// XXX: Temporary hack to allow referencing the _ADRENO pixel format here.
+#include <libgralloc-qsd8k/gralloc_priv.h>
namespace android {
SoftwareRenderer::SoftwareRenderer(
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight,
int32_t rotationDegrees)
: mColorFormat(colorFormat),
- mConverter(colorFormat, OMX_COLOR_Format16bitRGB565),
- mISurface(surface),
+ mConverter(NULL),
+ mYUVMode(None),
+ mSurface(surface),
mDisplayWidth(displayWidth),
mDisplayHeight(displayHeight),
mDecodedWidth(decodedWidth),
- mDecodedHeight(decodedHeight),
- mFrameSize(mDecodedWidth * mDecodedHeight * 2), // RGB565
- mIndex(0) {
- mMemoryHeap = new MemoryHeapBase("/dev/pmem_adsp", 2 * mFrameSize);
- if (mMemoryHeap->heapID() < 0) {
- LOGI("Creating physical memory heap failed, reverting to regular heap.");
- mMemoryHeap = new MemoryHeapBase(2 * mFrameSize);
- } else {
- sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(mMemoryHeap);
- pmemHeap->slap();
- mMemoryHeap = pmemHeap;
+ mDecodedHeight(decodedHeight) {
+ LOGI("input format = %d", mColorFormat);
+ LOGI("display = %d x %d, decoded = %d x %d",
+ mDisplayWidth, mDisplayHeight, mDecodedWidth, mDecodedHeight);
+
+ mDecodedWidth = mDisplayWidth;
+ mDecodedHeight = mDisplayHeight;
+
+ int halFormat;
+ switch (mColorFormat) {
+#if HAS_YCBCR420_SP_ADRENO
+ case OMX_COLOR_FormatYUV420Planar:
+ {
+ halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+ mYUVMode = YUV420ToYUV420sp;
+ break;
+ }
+
+ case 0x7fa30c00:
+ {
+ halFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+ mYUVMode = YUV420spToYUV420sp;
+ break;
+ }
+#endif
+
+ default:
+ halFormat = HAL_PIXEL_FORMAT_RGB_565;
+
+ mConverter = new ColorConverter(
+ mColorFormat, OMX_COLOR_Format16bitRGB565);
+ CHECK(mConverter->isValid());
+ break;
}
- CHECK(mISurface.get() != NULL);
+ CHECK(mSurface.get() != NULL);
CHECK(mDecodedWidth > 0);
CHECK(mDecodedHeight > 0);
- CHECK(mMemoryHeap->heapID() >= 0);
- CHECK(mConverter.isValid());
+ CHECK(mConverter == NULL || mConverter->isValid());
+
+ CHECK_EQ(0,
+ native_window_set_usage(
+ mSurface.get(),
+ GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
+ | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
- uint32_t orientation;
+ CHECK_EQ(0, native_window_set_buffer_count(mSurface.get(), 2));
+
+ // Width must be multiple of 32???
+ CHECK_EQ(0, native_window_set_buffers_geometry(
+ mSurface.get(), mDecodedWidth, mDecodedHeight,
+ halFormat));
+
+ uint32_t transform;
switch (rotationDegrees) {
- case 0: orientation = ISurface::BufferHeap::ROT_0; break;
- case 90: orientation = ISurface::BufferHeap::ROT_90; break;
- case 180: orientation = ISurface::BufferHeap::ROT_180; break;
- case 270: orientation = ISurface::BufferHeap::ROT_270; break;
- default: orientation = ISurface::BufferHeap::ROT_0; break;
+ case 0: transform = 0; break;
+ case 90: transform = HAL_TRANSFORM_ROT_90; break;
+ case 180: transform = HAL_TRANSFORM_ROT_180; break;
+ case 270: transform = HAL_TRANSFORM_ROT_270; break;
+ default: transform = 0; break;
}
- ISurface::BufferHeap bufferHeap(
- mDisplayWidth, mDisplayHeight,
- mDecodedWidth, mDecodedHeight,
- PIXEL_FORMAT_RGB_565,
- orientation, 0,
- mMemoryHeap);
-
- status_t err = mISurface->registerBuffers(bufferHeap);
- CHECK_EQ(err, OK);
+ if (transform) {
+ CHECK_EQ(0, native_window_set_buffers_transform(
+ mSurface.get(), transform));
+ }
}
SoftwareRenderer::~SoftwareRenderer() {
- mISurface->unregisterBuffers();
+ delete mConverter;
+ mConverter = NULL;
+}
+
+static inline size_t ALIGN(size_t x, size_t alignment) {
+ return (x + alignment - 1) & ~(alignment - 1);
}
void SoftwareRenderer::render(
const void *data, size_t size, void *platformPrivate) {
- size_t offset = mIndex * mFrameSize;
- void *dst = (uint8_t *)mMemoryHeap->getBase() + offset;
+ android_native_buffer_t *buf;
+ int err;
+ if ((err = mSurface->dequeueBuffer(mSurface.get(), &buf)) != 0) {
+ LOGW("Surface::dequeueBuffer returned error %d", err);
+ return;
+ }
+
+ CHECK_EQ(0, mSurface->lockBuffer(mSurface.get(), buf));
- mConverter.convert(
- mDecodedWidth, mDecodedHeight,
- data, 0, dst, 2 * mDecodedWidth);
+ GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- mISurface->postBuffer(offset);
- mIndex = 1 - mIndex;
+ Rect bounds(mDecodedWidth, mDecodedHeight);
+
+ void *dst;
+ CHECK_EQ(0, mapper.lock(
+ buf->handle, GRALLOC_USAGE_SW_WRITE_OFTEN, bounds, &dst));
+
+ if (mConverter) {
+ mConverter->convert(
+ mDecodedWidth, mDecodedHeight,
+ data, 0, dst, buf->stride * 2);
+ } else if (mYUVMode == YUV420spToYUV420sp) {
+ // Input and output are both YUV420sp, but the alignment requirements
+ // are different.
+ size_t srcYStride = mDecodedWidth;
+ const uint8_t *srcY = (const uint8_t *)data;
+ uint8_t *dstY = (uint8_t *)dst;
+ for (size_t i = 0; i < mDecodedHeight; ++i) {
+ memcpy(dstY, srcY, mDecodedWidth);
+ srcY += srcYStride;
+ dstY += buf->stride;
+ }
+
+ size_t srcUVStride = (mDecodedWidth + 1) & ~1;
+ size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2;
+
+ const uint8_t *srcUV = (const uint8_t *)data
+ + mDecodedHeight * mDecodedWidth;
+
+ size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096);
+ uint8_t *dstUV = (uint8_t *)dst + dstUVOffset;
+
+ for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) {
+ memcpy(dstUV, srcUV, (mDecodedWidth + 1) & ~1);
+ srcUV += srcUVStride;
+ dstUV += dstUVStride;
+ }
+ } else if (mYUVMode == YUV420ToYUV420sp) {
+ // Input is YUV420 planar, output is YUV420sp, adhere to proper
+ // alignment requirements.
+ size_t srcYStride = mDecodedWidth;
+ const uint8_t *srcY = (const uint8_t *)data;
+ uint8_t *dstY = (uint8_t *)dst;
+ for (size_t i = 0; i < mDecodedHeight; ++i) {
+ memcpy(dstY, srcY, mDecodedWidth);
+ srcY += srcYStride;
+ dstY += buf->stride;
+ }
+
+ size_t srcUVStride = (mDecodedWidth + 1) / 2;
+ size_t dstUVStride = ALIGN(mDecodedWidth / 2, 32) * 2;
+
+ const uint8_t *srcU = (const uint8_t *)data
+ + mDecodedHeight * mDecodedWidth;
+
+ const uint8_t *srcV =
+ srcU + ((mDecodedWidth + 1) / 2) * ((mDecodedHeight + 1) / 2);
+
+ size_t dstUVOffset = ALIGN(ALIGN(mDecodedHeight, 32) * buf->stride, 4096);
+ uint8_t *dstUV = (uint8_t *)dst + dstUVOffset;
+
+ for (size_t i = 0; i < (mDecodedHeight + 1) / 2; ++i) {
+ for (size_t j = 0; j < (mDecodedWidth + 1) / 2; ++j) {
+ dstUV[2 * j + 1] = srcU[j];
+ dstUV[2 * j] = srcV[j];
+ }
+ srcU += srcUVStride;
+ srcV += srcUVStride;
+ dstUV += dstUVStride;
+ }
+ } else {
+ memcpy(dst, data, size);
+ }
+
+ CHECK_EQ(0, mapper.unlock(buf->handle));
+
+ if ((err = mSurface->queueBuffer(mSurface.get(), buf)) != 0) {
+ LOGW("Surface::queueBuffer returned error %d", err);
+ }
+ buf = NULL;
}
} // namespace android
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index cc7dd4f..3aabf5f 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -9,7 +9,8 @@ LOCAL_SRC_FILES:= \
LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
- $(TOP)/frameworks/base/media/libstagefright
+ $(TOP)/frameworks/base/media/libstagefright \
+ $(TOP)/external/openssl/include
LOCAL_MODULE:= libstagefright_httplive
diff --git a/media/libstagefright/httplive/LiveSource.cpp b/media/libstagefright/httplive/LiveSource.cpp
index 4124571..4b4c3d2 100644
--- a/media/libstagefright/httplive/LiveSource.cpp
+++ b/media/libstagefright/httplive/LiveSource.cpp
@@ -22,9 +22,14 @@
#include "include/M3UParser.h"
#include "include/NuHTTPDataSource.h"
+#include <cutils/properties.h>
+#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/FileSource.h>
-#include <media/stagefright/MediaDebug.h>
+
+#include <ctype.h>
+#include <openssl/aes.h>
namespace android {
@@ -38,7 +43,9 @@ LiveSource::LiveSource(const char *url)
mSourceSize(0),
mOffsetBias(0),
mSignalDiscontinuity(false),
- mPrevBandwidthIndex(-1) {
+ mPrevBandwidthIndex(-1),
+ mAESKey((AES_KEY *)malloc(sizeof(AES_KEY))),
+ mStreamEncrypted(false) {
if (switchToNext()) {
mInitCheck = OK;
@@ -47,6 +54,8 @@ LiveSource::LiveSource(const char *url)
}
LiveSource::~LiveSource() {
+ free(mAESKey);
+ mAESKey = NULL;
}
status_t LiveSource::initCheck() const {
@@ -68,7 +77,77 @@ static double uniformRand() {
return (double)rand() / RAND_MAX;
}
-bool LiveSource::loadPlaylist(bool fetchMaster) {
+size_t LiveSource::getBandwidthIndex() {
+ if (mBandwidthItems.size() == 0) {
+ return 0;
+ }
+
+#if 1
+ int32_t bandwidthBps;
+ if (mSource != NULL && mSource->estimateBandwidth(&bandwidthBps)) {
+ LOGI("bandwidth estimated at %.2f kbps", bandwidthBps / 1024.0f);
+ } else {
+ LOGI("no bandwidth estimate.");
+ return 0; // Pick the lowest bandwidth stream by default.
+ }
+
+ char value[PROPERTY_VALUE_MAX];
+ if (property_get("media.httplive.max-bw", value, NULL)) {
+ char *end;
+ long maxBw = strtoul(value, &end, 10);
+ if (end > value && *end == '\0') {
+ if (maxBw > 0 && bandwidthBps > maxBw) {
+ LOGV("bandwidth capped to %ld bps", maxBw);
+ bandwidthBps = maxBw;
+ }
+ }
+ }
+
+ // Consider only 80% of the available bandwidth usable.
+ bandwidthBps = (bandwidthBps * 8) / 10;
+
+ // Pick the highest bandwidth stream below or equal to estimated bandwidth.
+
+ size_t index = mBandwidthItems.size() - 1;
+ while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth
+ > (size_t)bandwidthBps) {
+ --index;
+ }
+#elif 0
+ // Change bandwidth at random()
+ size_t index = uniformRand() * mBandwidthItems.size();
+#elif 0
+ // There's a 50% chance to stay on the current bandwidth and
+ // a 50% chance to switch to the next higher bandwidth (wrapping around
+ // to lowest)
+ const size_t kMinIndex = 0;
+
+ size_t index;
+ if (mPrevBandwidthIndex < 0) {
+ index = kMinIndex;
+ } else if (uniformRand() < 0.5) {
+ index = (size_t)mPrevBandwidthIndex;
+ } else {
+ index = mPrevBandwidthIndex + 1;
+ if (index == mBandwidthItems.size()) {
+ index = kMinIndex;
+ }
+ }
+#elif 0
+ // Pick the highest bandwidth stream below or equal to 1.2 Mbit/sec
+
+ size_t index = mBandwidthItems.size() - 1;
+ while (index > 0 && mBandwidthItems.itemAt(index).mBandwidth > 1200000) {
+ --index;
+ }
+#else
+ size_t index = mBandwidthItems.size() - 1; // Highest bandwidth stream
+#endif
+
+ return index;
+}
+
+bool LiveSource::loadPlaylist(bool fetchMaster, size_t bandwidthIndex) {
mSignalDiscontinuity = false;
mPlaylist.clear();
@@ -112,49 +191,35 @@ bool LiveSource::loadPlaylist(bool fetchMaster) {
mBandwidthItems.sort(SortByBandwidth);
+#if 1 // XXX
+ if (mBandwidthItems.size() > 1) {
+ // Remove the lowest bandwidth stream, this is sometimes
+ // an AAC program stream, which we don't support at this point.
+ mBandwidthItems.removeItemsAt(0);
+ }
+#endif
+
for (size_t i = 0; i < mBandwidthItems.size(); ++i) {
const BandwidthItem &item = mBandwidthItems.itemAt(i);
LOGV("item #%d: %s", i, item.mURI.c_str());
}
+
+ bandwidthIndex = getBandwidthIndex();
}
}
if (mBandwidthItems.size() > 0) {
-#if 0
- // Change bandwidth at random()
- size_t index = uniformRand() * mBandwidthItems.size();
-#elif 0
- // There's a 50% chance to stay on the current bandwidth and
- // a 50% chance to switch to the next higher bandwidth (wrapping around
- // to lowest)
- size_t index;
- if (uniformRand() < 0.5) {
- index = mPrevBandwidthIndex < 0 ? 0 : (size_t)mPrevBandwidthIndex;
- } else {
- if (mPrevBandwidthIndex < 0) {
- index = 0;
- } else {
- index = mPrevBandwidthIndex + 1;
- if (index == mBandwidthItems.size()) {
- index = 0;
- }
- }
- }
-#else
- // Stay on the lowest bandwidth available.
- size_t index = mBandwidthItems.size() - 1; // Highest bandwidth stream
-#endif
+ mURL = mBandwidthItems.editItemAt(bandwidthIndex).mURI;
- mURL = mBandwidthItems.editItemAt(index).mURI;
-
- if (mPrevBandwidthIndex >= 0 && (size_t)mPrevBandwidthIndex != index) {
+ if (mPrevBandwidthIndex >= 0
+ && (size_t)mPrevBandwidthIndex != bandwidthIndex) {
// If we switched streams because of bandwidth changes,
// we'll signal this discontinuity by inserting a
// special transport stream packet into the stream.
mSignalDiscontinuity = true;
}
- mPrevBandwidthIndex = index;
+ mPrevBandwidthIndex = bandwidthIndex;
} else {
mURL = mMasterURL;
}
@@ -199,18 +264,33 @@ bool LiveSource::switchToNext() {
mOffsetBias += mSourceSize;
mSourceSize = 0;
+ size_t bandwidthIndex = getBandwidthIndex();
+
if (mLastFetchTimeUs < 0 || getNowUs() >= mLastFetchTimeUs + 15000000ll
- || mPlaylistIndex == mPlaylist->size()) {
+ || mPlaylistIndex == mPlaylist->size()
+ || (ssize_t)bandwidthIndex != mPrevBandwidthIndex) {
int32_t nextSequenceNumber =
mPlaylistIndex + mFirstItemSequenceNumber;
- if (!loadPlaylist(mLastFetchTimeUs < 0)) {
+ if (!loadPlaylist(mLastFetchTimeUs < 0, bandwidthIndex)) {
LOGE("failed to reload playlist");
return false;
}
if (mLastFetchTimeUs < 0) {
- mPlaylistIndex = 0;
+ if (isSeekable()) {
+ mPlaylistIndex = 0;
+ } else {
+ // This is live streamed content, the first seqnum in the
+ // various bandwidth' streams may be slightly off, so don't
+ // start at the very first entry.
+ // With a segment duration of 6-10secs, this really only
+ // delays playback up to 30secs compared to real time.
+ mPlaylistIndex = 3;
+ if (mPlaylistIndex >= mPlaylist->size()) {
+ mPlaylistIndex = mPlaylist->size() - 1;
+ }
+ }
} else {
if (nextSequenceNumber < mFirstItemSequenceNumber
|| nextSequenceNumber
@@ -227,6 +307,10 @@ bool LiveSource::switchToNext() {
mLastFetchTimeUs = getNowUs();
}
+ if (!setupCipher()) {
+ return false;
+ }
+
AString uri;
sp<AMessage> itemMeta;
CHECK(mPlaylist->itemAt(mPlaylistIndex, &uri, &itemMeta));
@@ -243,16 +327,131 @@ bool LiveSource::switchToNext() {
}
mPlaylistIndex++;
+
+ return true;
+}
+
+bool LiveSource::setupCipher() {
+ sp<AMessage> itemMeta;
+ bool found = false;
+ AString method;
+
+ for (ssize_t i = mPlaylistIndex; i >= 0; --i) {
+ AString uri;
+ CHECK(mPlaylist->itemAt(i, &uri, &itemMeta));
+
+ if (itemMeta->findString("cipher-method", &method)) {
+ found = true;
+ break;
+ }
+ }
+
+ if (!found) {
+ method = "NONE";
+ }
+
+ mStreamEncrypted = false;
+
+ if (method == "AES-128") {
+ AString keyURI;
+ if (!itemMeta->findString("cipher-uri", &keyURI)) {
+ LOGE("Missing key uri");
+ return false;
+ }
+
+ if (keyURI.size() >= 2
+ && keyURI.c_str()[0] == '"'
+ && keyURI.c_str()[keyURI.size() - 1] == '"') {
+ // Remove surrounding quotes.
+ AString tmp(keyURI, 1, keyURI.size() - 2);
+ keyURI = tmp;
+ }
+
+ ssize_t index = mAESKeyForURI.indexOfKey(keyURI);
+
+ sp<ABuffer> key;
+ if (index >= 0) {
+ key = mAESKeyForURI.valueAt(index);
+ } else {
+ key = new ABuffer(16);
+
+ sp<NuHTTPDataSource> keySource = new NuHTTPDataSource;
+ status_t err = keySource->connect(keyURI.c_str());
+
+ if (err == OK) {
+ size_t offset = 0;
+ while (offset < 16) {
+ ssize_t n = keySource->readAt(
+ offset, key->data() + offset, 16 - offset);
+ if (n <= 0) {
+ err = ERROR_IO;
+ break;
+ }
+
+ offset += n;
+ }
+ }
+
+ if (err != OK) {
+ LOGE("failed to fetch cipher key from '%s'.", keyURI.c_str());
+ return false;
+ }
+
+ mAESKeyForURI.add(keyURI, key);
+ }
+
+ if (AES_set_decrypt_key(key->data(), 128, (AES_KEY *)mAESKey) != 0) {
+ LOGE("failed to set AES decryption key.");
+ return false;
+ }
+
+ AString iv;
+ if (itemMeta->findString("cipher-iv", &iv)) {
+ if ((!iv.startsWith("0x") && !iv.startsWith("0X"))
+ || iv.size() != 16 * 2 + 2) {
+ LOGE("malformed cipher IV '%s'.", iv.c_str());
+ return false;
+ }
+
+ memset(mAESIVec, 0, sizeof(mAESIVec));
+ for (size_t i = 0; i < 16; ++i) {
+ char c1 = tolower(iv.c_str()[2 + 2 * i]);
+ char c2 = tolower(iv.c_str()[3 + 2 * i]);
+ if (!isxdigit(c1) || !isxdigit(c2)) {
+ LOGE("malformed cipher IV '%s'.", iv.c_str());
+ return false;
+ }
+ uint8_t nibble1 = isdigit(c1) ? c1 - '0' : c1 - 'a' + 10;
+ uint8_t nibble2 = isdigit(c2) ? c2 - '0' : c2 - 'a' + 10;
+
+ mAESIVec[i] = nibble1 << 4 | nibble2;
+ }
+ } else {
+ size_t seqNum = mPlaylistIndex + mFirstItemSequenceNumber;
+
+ memset(mAESIVec, 0, sizeof(mAESIVec));
+ mAESIVec[15] = seqNum & 0xff;
+ mAESIVec[14] = (seqNum >> 8) & 0xff;
+ mAESIVec[13] = (seqNum >> 16) & 0xff;
+ mAESIVec[12] = (seqNum >> 24) & 0xff;
+ }
+
+ mStreamEncrypted = true;
+ } else if (!(method == "NONE")) {
+ LOGE("Unsupported cipher method '%s'", method.c_str());
+ return false;
+ }
+
return true;
}
static const ssize_t kHeaderSize = 188;
-ssize_t LiveSource::readAt(off_t offset, void *data, size_t size) {
+ssize_t LiveSource::readAt(off64_t offset, void *data, size_t size) {
CHECK(offset >= mOffsetBias);
offset -= mOffsetBias;
- off_t delta = mSignalDiscontinuity ? kHeaderSize : 0;
+ off64_t delta = mSignalDiscontinuity ? kHeaderSize : 0;
if (offset >= mSourceSize + delta) {
CHECK_EQ(offset, mSourceSize + delta);
@@ -279,6 +478,7 @@ ssize_t LiveSource::readAt(off_t offset, void *data, size_t size) {
return avail;
}
+ bool done = false;
size_t numRead = 0;
while (numRead < size) {
ssize_t n = mSource->readAt(
@@ -289,7 +489,44 @@ ssize_t LiveSource::readAt(off_t offset, void *data, size_t size) {
break;
}
+ if (mStreamEncrypted) {
+ size_t nmod = n % 16;
+ CHECK(nmod == 0);
+
+ sp<ABuffer> tmp = new ABuffer(n);
+
+ AES_cbc_encrypt((const unsigned char *)data + numRead,
+ tmp->data(),
+ n,
+ (const AES_KEY *)mAESKey,
+ mAESIVec,
+ AES_DECRYPT);
+
+ if (mSourceSize == (off64_t)(offset + numRead - delta + n)) {
+ // check for padding at the end of the file.
+
+ size_t pad = tmp->data()[n - 1];
+ CHECK_GT(pad, 0u);
+ CHECK_LE(pad, 16u);
+ CHECK_GE((size_t)n, pad);
+ for (size_t i = 0; i < pad; ++i) {
+ CHECK_EQ((unsigned)tmp->data()[n - 1 - i], pad);
+ }
+
+ n -= pad;
+ mSourceSize -= pad;
+
+ done = true;
+ }
+
+ memcpy((uint8_t *)data + numRead, tmp->data(), n);
+ }
+
numRead += n;
+
+ if (done) {
+ break;
+ }
}
return numRead;
@@ -314,24 +551,45 @@ status_t LiveSource::fetchM3U(const char *url, sp<ABuffer> *out) {
source = mSource;
}
- off_t size;
+ off64_t size;
status_t err = source->getSize(&size);
if (err != OK) {
- return err;
+ size = 65536;
}
sp<ABuffer> buffer = new ABuffer(size);
- size_t offset = 0;
- while (offset < (size_t)size) {
+ buffer->setRange(0, 0);
+
+ for (;;) {
+ size_t bufferRemaining = buffer->capacity() - buffer->size();
+
+ if (bufferRemaining == 0) {
+ bufferRemaining = 32768;
+
+ LOGV("increasing download buffer to %d bytes",
+ buffer->size() + bufferRemaining);
+
+ sp<ABuffer> copy = new ABuffer(buffer->size() + bufferRemaining);
+ memcpy(copy->data(), buffer->data(), buffer->size());
+ copy->setRange(0, buffer->size());
+
+ buffer = copy;
+ }
+
ssize_t n = source->readAt(
- offset, buffer->data() + offset, size - offset);
+ buffer->size(), buffer->data() + buffer->size(),
+ bufferRemaining);
- if (n <= 0) {
- return ERROR_IO;
+ if (n < 0) {
+ return err;
}
- offset += n;
+ if (n == 0) {
+ break;
+ }
+
+ buffer->setRange(0, buffer->size() + (size_t)n);
}
*out = buffer;
@@ -359,19 +617,17 @@ bool LiveSource::seekTo(int64_t seekTimeUs) {
return false;
}
- size_t newPlaylistIndex = mFirstItemSequenceNumber + index;
-
- if (newPlaylistIndex == mPlaylistIndex) {
+ if (index == mPlaylistIndex) {
return false;
}
- mPlaylistIndex = newPlaylistIndex;
+ mPlaylistIndex = index;
+
+ LOGV("seeking to index %lld", index);
switchToNext();
mOffsetBias = 0;
- LOGV("seeking to index %lld", index);
-
return true;
}
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 90f3d6d..b166cc3 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -158,6 +158,11 @@ status_t M3UParser::parse(const void *_data, size_t size) {
return ERROR_MALFORMED;
}
err = parseMetaData(line, &mMeta, "media-sequence");
+ } else if (line.startsWith("#EXT-X-KEY")) {
+ if (mIsVariantPlaylist) {
+ return ERROR_MALFORMED;
+ }
+ err = parseCipherInfo(line, &itemMeta);
} else if (line.startsWith("#EXT-X-ENDLIST")) {
mIsComplete = true;
} else if (line.startsWith("#EXTINF")) {
@@ -292,6 +297,57 @@ status_t M3UParser::parseStreamInf(
}
// static
+status_t M3UParser::parseCipherInfo(
+ const AString &line, sp<AMessage> *meta) {
+ ssize_t colonPos = line.find(":");
+
+ if (colonPos < 0) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t offset = colonPos + 1;
+
+ while (offset < line.size()) {
+ ssize_t end = line.find(",", offset);
+ if (end < 0) {
+ end = line.size();
+ }
+
+ AString attr(line, offset, end - offset);
+ attr.trim();
+
+ offset = end + 1;
+
+ ssize_t equalPos = attr.find("=");
+ if (equalPos < 0) {
+ continue;
+ }
+
+ AString key(attr, 0, equalPos);
+ key.trim();
+
+ AString val(attr, equalPos + 1, attr.size() - equalPos - 1);
+ val.trim();
+
+ LOGV("key=%s value=%s", key.c_str(), val.c_str());
+
+ key.tolower();
+
+ if (key == "method" || key == "uri" || key == "iv") {
+ if (meta->get() == NULL) {
+ *meta = new AMessage;
+ }
+
+ key.insert(AString("cipher-"), 0);
+
+ (*meta)->setString(key.c_str(), val.c_str(), val.size());
+ }
+ }
+
+ return OK;
+}
+
+// static
status_t M3UParser::ParseInt32(const char *s, int32_t *x) {
char *end;
long lval = strtol(s, &end, 10);
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index da340f7..e9131a6 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -769,8 +769,8 @@ ID3::getAlbumArt(size_t *length, String8 *mime) const {
bool ID3::parseV1(const sp<DataSource> &source) {
const size_t V1_TAG_SIZE = 128;
- off_t size;
- if (source->getSize(&size) != OK || size < (off_t)V1_TAG_SIZE) {
+ off64_t size;
+ if (source->getSize(&size) != OK || size < (off64_t)V1_TAG_SIZE) {
return false;
}
diff --git a/media/libstagefright/include/AACEncoder.h b/media/libstagefright/include/AACEncoder.h
index ecc533f..3d5fc60 100644
--- a/media/libstagefright/include/AACEncoder.h
+++ b/media/libstagefright/include/AACEncoder.h
@@ -60,7 +60,7 @@ class AACEncoder: public MediaSource {
kNumSamplesPerFrame = 1024,
};
- int16_t mInputFrame[kNumSamplesPerFrame];
+ int16_t *mInputFrame;
uint8_t mAudioSpecificConfigData[2]; // auido specific data
void *mEncoderHandle;
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 4526bf1..4e63b7a 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -79,7 +79,7 @@ struct AwesomePlayer {
bool isPlaying() const;
- void setISurface(const sp<ISurface> &isurface);
+ void setSurface(const sp<Surface> &surface);
void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
status_t setLooping(bool shouldLoop);
@@ -114,6 +114,11 @@ private:
AUDIO_AT_EOS = 256,
VIDEO_AT_EOS = 512,
AUTO_LOOPING = 1024,
+
+ // We are basically done preparing but are currently buffering
+ // sufficient data to begin playback and finish the preparation phase
+ // for good.
+ PREPARING_CONNECTED = 2048,
};
mutable Mutex mLock;
@@ -124,7 +129,7 @@ private:
bool mQueueStarted;
wp<MediaPlayerBase> mListener;
- sp<ISurface> mISurface;
+ sp<Surface> mSurface;
sp<MediaPlayerBase::AudioSink> mAudioSink;
SystemTimeSource mSystemTimeSource;
@@ -233,6 +238,7 @@ private:
status_t seekTo_l(int64_t timeUs);
status_t pause_l(bool at_eos = false);
void initRenderer_l();
+ void notifyVideoSize_l();
void seekAudioIfNecessary_l();
void cancelPlayerEvents(bool keepBufferingGoing = false);
@@ -266,6 +272,7 @@ private:
bool getBitrate(int64_t *bitrate);
void finishSeekIfNecessary(int64_t videoTimeUs);
+ void ensureCacheIsFetching_l();
AwesomePlayer(const AwesomePlayer &);
AwesomePlayer &operator=(const AwesomePlayer &);
diff --git a/media/libstagefright/include/HTTPStream.h b/media/libstagefright/include/HTTPStream.h
index 35b0865..545cd0c 100644
--- a/media/libstagefright/include/HTTPStream.h
+++ b/media/libstagefright/include/HTTPStream.h
@@ -18,10 +18,9 @@
#define HTTP_STREAM_H_
-#include "stagefright_string.h"
-
#include <sys/types.h>
+#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/MediaErrors.h>
#include <utils/KeyedVector.h>
#include <utils/threads.h>
@@ -50,11 +49,15 @@ public:
static const char *kStatusKey;
bool find_header_value(
- const string &key, string *value) const;
+ const AString &key, AString *value) const;
// Pass a negative value to disable the timeout.
void setReceiveTimeout(int seconds);
+ // Receive a line of data terminated by CRLF, line will be '\0' terminated
+ // _excluding_ the termianting CRLF.
+ status_t receive_line(char *line, size_t size);
+
private:
enum State {
READY,
@@ -66,11 +69,7 @@ private:
Mutex mLock;
int mSocket;
- KeyedVector<string, string> mHeaders;
-
- // Receive a line of data terminated by CRLF, line will be '\0' terminated
- // _excluding_ the termianting CRLF.
- status_t receive_line(char *line, size_t size);
+ KeyedVector<AString, AString> mHeaders;
HTTPStream(const HTTPStream &);
HTTPStream &operator=(const HTTPStream &);
diff --git a/media/libstagefright/include/LiveSource.h b/media/libstagefright/include/LiveSource.h
index 55dd45e..38fe328 100644
--- a/media/libstagefright/include/LiveSource.h
+++ b/media/libstagefright/include/LiveSource.h
@@ -21,6 +21,7 @@
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/DataSource.h>
+#include <utils/KeyedVector.h>
#include <utils/Vector.h>
namespace android {
@@ -34,7 +35,7 @@ struct LiveSource : public DataSource {
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
virtual uint32_t flags() {
return kWantsPrefetching;
@@ -66,20 +67,29 @@ private:
int64_t mLastFetchTimeUs;
sp<NuHTTPDataSource> mSource;
- off_t mSourceSize;
- off_t mOffsetBias;
+ off64_t mSourceSize;
+ off64_t mOffsetBias;
bool mSignalDiscontinuity;
ssize_t mPrevBandwidthIndex;
+ void *mAESKey;
+ unsigned char mAESIVec[16];
+ bool mStreamEncrypted;
+
+ KeyedVector<AString, sp<ABuffer> > mAESKeyForURI;
+
status_t fetchM3U(const char *url, sp<ABuffer> *buffer);
static int SortByBandwidth(const BandwidthItem *a, const BandwidthItem *b);
bool switchToNext();
- bool loadPlaylist(bool fetchMaster);
+ bool loadPlaylist(bool fetchMaster, size_t bandwidthIndex);
void determineSeekability();
+ size_t getBandwidthIndex();
+ bool setupCipher();
+
DISALLOW_EVIL_CONSTRUCTORS(LiveSource);
};
diff --git a/media/libstagefright/include/M3UParser.h b/media/libstagefright/include/M3UParser.h
index bd9eebe..531d184 100644
--- a/media/libstagefright/include/M3UParser.h
+++ b/media/libstagefright/include/M3UParser.h
@@ -66,6 +66,9 @@ private:
static status_t parseStreamInf(
const AString &line, sp<AMessage> *meta);
+ static status_t parseCipherInfo(
+ const AString &line, sp<AMessage> *meta);
+
static status_t ParseInt32(const char *s, int32_t *x);
DISALLOW_EVIL_CONSTRUCTORS(M3UParser);
diff --git a/media/libstagefright/include/MP3Extractor.h b/media/libstagefright/include/MP3Extractor.h
index 30136e7..728980e 100644
--- a/media/libstagefright/include/MP3Extractor.h
+++ b/media/libstagefright/include/MP3Extractor.h
@@ -24,6 +24,7 @@ namespace android {
struct AMessage;
class DataSource;
+struct MP3Seeker;
class String8;
class MP3Extractor : public MediaExtractor {
@@ -37,15 +38,19 @@ public:
virtual sp<MetaData> getMetaData();
+ static bool get_mp3_frame_size(
+ uint32_t header, size_t *frame_size,
+ int *out_sampling_rate = NULL, int *out_channels = NULL,
+ int *out_bitrate = NULL);
+
private:
status_t mInitCheck;
sp<DataSource> mDataSource;
- off_t mFirstFramePos;
+ off64_t mFirstFramePos;
sp<MetaData> mMeta;
uint32_t mFixedHeader;
- int32_t mByteNumber; // total number of bytes in this MP3
- char mTableOfContents[99]; // TOC entries in XING header
+ sp<MP3Seeker> mSeeker;
MP3Extractor(const MP3Extractor &);
MP3Extractor &operator=(const MP3Extractor &);
diff --git a/media/libstagefright/include/MP3Seeker.h b/media/libstagefright/include/MP3Seeker.h
new file mode 100644
index 0000000..599542e
--- /dev/null
+++ b/media/libstagefright/include/MP3Seeker.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MP3_SEEKER_H_
+
+#define MP3_SEEKER_H_
+
+#include <media/stagefright/foundation/ABase.h>
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct MP3Seeker : public RefBase {
+ MP3Seeker() {}
+
+ virtual bool getDuration(int64_t *durationUs) = 0;
+
+ // Given a request seek time in "*timeUs", find the byte offset closest
+ // to that position and return it in "*pos". Update "*timeUs" to reflect
+ // the actual time that seekpoint represents.
+ virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos) = 0;
+
+protected:
+ virtual ~MP3Seeker() {}
+
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(MP3Seeker);
+};
+
+} // namespace android
+
+#endif // MP3_SEEKER_H_
+
diff --git a/media/libstagefright/include/MPEG2TSExtractor.h b/media/libstagefright/include/MPEG2TSExtractor.h
index d83b538..58401d4 100644
--- a/media/libstagefright/include/MPEG2TSExtractor.h
+++ b/media/libstagefright/include/MPEG2TSExtractor.h
@@ -43,7 +43,7 @@ private:
Vector<sp<AnotherPacketSource> > mSourceImpls;
- off_t mOffset;
+ off64_t mOffset;
void init();
status_t feedMore();
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index bc2e4dc..04e8a6a 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -67,8 +67,8 @@ private:
Vector<uint32_t> mPath;
status_t readMetaData();
- status_t parseChunk(off_t *offset, int depth);
- status_t parseMetaData(off_t offset, size_t size);
+ status_t parseChunk(off64_t *offset, int depth);
+ status_t parseMetaData(off64_t offset, size_t size);
status_t updateAudioTrackInfoFromESDS_MPEG4Audio(
const void *esds_data, size_t esds_size);
@@ -86,9 +86,9 @@ private:
SINF *mFirstSINF;
bool mIsDrm;
- status_t parseDrmSINF(off_t *offset, off_t data_offset);
+ status_t parseDrmSINF(off64_t *offset, off64_t data_offset);
- status_t parseTrackHeader(off_t data_offset, off_t data_size);
+ status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
MPEG4Extractor(const MPEG4Extractor &);
MPEG4Extractor &operator=(const MPEG4Extractor &);
diff --git a/media/libstagefright/include/NuCachedSource2.h b/media/libstagefright/include/NuCachedSource2.h
index 1fb2088..f0f7daf 100644
--- a/media/libstagefright/include/NuCachedSource2.h
+++ b/media/libstagefright/include/NuCachedSource2.h
@@ -32,11 +32,14 @@ struct NuCachedSource2 : public DataSource {
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
+ virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
+ virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+ virtual String8 getUri();
////////////////////////////////////////////////////////////////////////////
size_t cachedSize();
@@ -45,6 +48,8 @@ struct NuCachedSource2 : public DataSource {
void suspend();
void clearCacheAndResume();
+ void resumeFetchingIfNecessary();
+
protected:
virtual ~NuCachedSource2();
@@ -54,7 +59,7 @@ private:
enum {
kPageSize = 65536,
kHighWaterThreshold = 5 * 1024 * 1024,
- kLowWaterThreshold = 512 * 1024,
+ kLowWaterThreshold = 1024 * 1024,
// Read data after a 15 sec timeout whether we're actively
// fetching or not.
@@ -76,9 +81,9 @@ private:
Condition mCondition;
PageCache *mCache;
- off_t mCacheOffset;
+ off64_t mCacheOffset;
status_t mFinalStatus;
- off_t mLastAccessPos;
+ off64_t mLastAccessPos;
sp<AMessage> mAsyncResult;
bool mFetching;
int64_t mLastFetchTimeUs;
@@ -90,11 +95,11 @@ private:
void onSuspend();
void fetchInternal();
- ssize_t readInternal(off_t offset, void *data, size_t size);
- status_t seekInternal_l(off_t offset);
+ ssize_t readInternal(off64_t offset, void *data, size_t size);
+ status_t seekInternal_l(off64_t offset);
size_t approxDataRemaining_l(bool *eos);
- void restartPrefetcherIfNecessary_l();
+ void restartPrefetcherIfNecessary_l(bool ignoreLowWaterThreshold = false);
DISALLOW_EVIL_CONSTRUCTORS(NuCachedSource2);
};
diff --git a/media/libstagefright/include/NuHTTPDataSource.h b/media/libstagefright/include/NuHTTPDataSource.h
index 8593a91..c8e93be 100644
--- a/media/libstagefright/include/NuHTTPDataSource.h
+++ b/media/libstagefright/include/NuHTTPDataSource.h
@@ -3,6 +3,7 @@
#define NU_HTTP_DATA_SOURCE_H_
#include <media/stagefright/DataSource.h>
+#include <utils/List.h>
#include <utils/String8.h>
#include <utils/threads.h>
@@ -16,16 +17,24 @@ struct NuHTTPDataSource : public DataSource {
status_t connect(
const char *uri,
const KeyedVector<String8, String8> *headers = NULL,
- off_t offset = 0);
+ off64_t offset = 0);
void disconnect();
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
+ // Returns true if bandwidth could successfully be estimated,
+ // false otherwise.
+ bool estimateBandwidth(int32_t *bandwidth_bps);
+
+ virtual DecryptHandle* DrmInitialization(DrmManagerClient *client);
+ virtual void getDrmInfo(DecryptHandle **handle, DrmManagerClient **client);
+ virtual String8 getUri();
+
protected:
virtual ~NuHTTPDataSource();
@@ -36,6 +45,11 @@ private:
CONNECTED
};
+ struct BandwidthEntry {
+ int64_t mDelayUs;
+ size_t mNumBytes;
+ };
+
Mutex mLock;
State mState;
@@ -44,21 +58,39 @@ private:
unsigned mPort;
String8 mPath;
String8 mHeaders;
+ String8 mUri;
HTTPStream mHTTP;
- off_t mOffset;
- off_t mContentLength;
+ off64_t mOffset;
+ off64_t mContentLength;
bool mContentLengthValid;
+ bool mHasChunkedTransferEncoding;
+
+ // The number of data bytes in the current chunk before any subsequent
+ // chunk header (or -1 if no more chunks).
+ ssize_t mChunkDataBytesLeft;
+
+ List<BandwidthEntry> mBandwidthHistory;
+ size_t mNumBandwidthHistoryItems;
+ int64_t mTotalTransferTimeUs;
+ size_t mTotalTransferBytes;
+
+ DecryptHandle *mDecryptHandle;
+ DrmManagerClient *mDrmManagerClient;
status_t connect(
- const char *uri, const String8 &headers, off_t offset);
+ const char *uri, const String8 &headers, off64_t offset);
status_t connect(
const char *host, unsigned port, const char *path,
const String8 &headers,
- off_t offset);
+ off64_t offset);
+
+ // Read up to "size" bytes of data, respect transfer encoding.
+ ssize_t internalRead(void *data, size_t size);
void applyTimeoutResponse();
+ void addBandwidthMeasurement_l(size_t numBytes, int64_t delayUs);
static void MakeFullHeaders(
const KeyedVector<String8, String8> *overrides,
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 72ab5aa..5fed98a 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -59,10 +59,20 @@ public:
node_id node, OMX_INDEXTYPE index,
const void *params, size_t size);
+ virtual status_t enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
+ virtual status_t storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable);
+
virtual status_t useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer);
+ virtual status_t useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data);
@@ -87,14 +97,6 @@ public:
const char *parameter_name,
OMX_INDEXTYPE *index);
- virtual sp<IOMXRenderer> createRenderer(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees);
-
virtual void binderDied(const wp<IBinder> &the_late_who);
OMX_ERRORTYPE OnEvent(
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index b5b31ac..86c102c 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -49,10 +49,17 @@ struct OMXNodeInstance {
status_t getConfig(OMX_INDEXTYPE index, void *params, size_t size);
status_t setConfig(OMX_INDEXTYPE index, const void *params, size_t size);
+ status_t enableGraphicBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+ status_t storeMetaDataInBuffers(OMX_U32 portIndex, OMX_BOOL enable);
+
status_t useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer);
+ status_t useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
+ OMX::buffer_id *buffer);
+
status_t allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data);
@@ -125,4 +132,3 @@ private:
} // namespace android
#endif // OMX_NODE_INSTANCE_H_
-
diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h
index a5eaed9..b5a043c 100644
--- a/media/libstagefright/include/SampleIterator.h
+++ b/media/libstagefright/include/SampleIterator.h
@@ -27,7 +27,7 @@ struct SampleIterator {
uint32_t getChunkIndex() const { return mCurrentChunkIndex; }
uint32_t getDescIndex() const { return mChunkDesc; }
- off_t getSampleOffset() const { return mCurrentSampleOffset; }
+ off64_t getSampleOffset() const { return mCurrentSampleOffset; }
size_t getSampleSize() const { return mCurrentSampleSize; }
uint32_t getSampleTime() const { return mCurrentSampleTime; }
@@ -48,7 +48,7 @@ private:
uint32_t mChunkDesc;
uint32_t mCurrentChunkIndex;
- off_t mCurrentChunkOffset;
+ off64_t mCurrentChunkOffset;
Vector<size_t> mCurrentChunkSampleSizes;
uint32_t mTimeToSampleIndex;
@@ -58,13 +58,13 @@ private:
uint32_t mTTSDuration;
uint32_t mCurrentSampleIndex;
- off_t mCurrentSampleOffset;
+ off64_t mCurrentSampleOffset;
size_t mCurrentSampleSize;
uint32_t mCurrentSampleTime;
void reset();
status_t findChunkRange(uint32_t sampleIndex);
- status_t getChunkOffset(uint32_t chunk, off_t *offset);
+ status_t getChunkOffset(uint32_t chunk, off64_t *offset);
status_t findSampleTime(uint32_t sampleIndex, uint32_t *time);
SampleIterator(const SampleIterator &);
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index f830690..c5e8136 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -36,17 +36,17 @@ public:
// type can be 'stco' or 'co64'.
status_t setChunkOffsetParams(
- uint32_t type, off_t data_offset, size_t data_size);
+ uint32_t type, off64_t data_offset, size_t data_size);
- status_t setSampleToChunkParams(off_t data_offset, size_t data_size);
+ status_t setSampleToChunkParams(off64_t data_offset, size_t data_size);
// type can be 'stsz' or 'stz2'.
status_t setSampleSizeParams(
- uint32_t type, off_t data_offset, size_t data_size);
+ uint32_t type, off64_t data_offset, size_t data_size);
- status_t setTimeToSampleParams(off_t data_offset, size_t data_size);
+ status_t setTimeToSampleParams(off64_t data_offset, size_t data_size);
- status_t setSyncSampleParams(off_t data_offset, size_t data_size);
+ status_t setSyncSampleParams(off64_t data_offset, size_t data_size);
////////////////////////////////////////////////////////////////////////////
@@ -58,7 +58,7 @@ public:
status_t getMetaDataForSample(
uint32_t sampleIndex,
- off_t *offset,
+ off64_t *offset,
size_t *size,
uint32_t *decodingTime,
bool *isSyncSample = NULL);
@@ -89,14 +89,14 @@ private:
sp<DataSource> mDataSource;
Mutex mLock;
- off_t mChunkOffsetOffset;
+ off64_t mChunkOffsetOffset;
uint32_t mChunkOffsetType;
uint32_t mNumChunkOffsets;
- off_t mSampleToChunkOffset;
+ off64_t mSampleToChunkOffset;
uint32_t mNumSampleToChunkOffsets;
- off_t mSampleSizeOffset;
+ off64_t mSampleSizeOffset;
uint32_t mSampleSizeFieldSize;
uint32_t mDefaultSampleSize;
uint32_t mNumSampleSizes;
@@ -104,7 +104,7 @@ private:
uint32_t mTimeToSampleCount;
uint32_t *mTimeToSample;
- off_t mSyncSampleOffset;
+ off64_t mSyncSampleOffset;
uint32_t mNumSyncSamples;
uint32_t *mSyncSamples;
size_t mLastSyncSampleIndex;
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 25c9df7..9cafc68 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -19,37 +19,39 @@
#define SOFTWARE_RENDERER_H_
#include <media/stagefright/ColorConverter.h>
-#include <media/stagefright/VideoRenderer.h>
#include <utils/RefBase.h>
namespace android {
-class ISurface;
-class MemoryHeapBase;
+class Surface;
-class SoftwareRenderer : public VideoRenderer {
+class SoftwareRenderer {
public:
SoftwareRenderer(
OMX_COLOR_FORMATTYPE colorFormat,
- const sp<ISurface> &surface,
+ const sp<Surface> &surface,
size_t displayWidth, size_t displayHeight,
size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees = 0);
+ int32_t rotationDegrees);
- virtual ~SoftwareRenderer();
+ ~SoftwareRenderer();
- virtual void render(
+ void render(
const void *data, size_t size, void *platformPrivate);
private:
+ enum YUVMode {
+ None,
+ YUV420ToYUV420sp,
+ YUV420spToYUV420sp,
+ };
+
OMX_COLOR_FORMATTYPE mColorFormat;
- ColorConverter mConverter;
- sp<ISurface> mISurface;
+ ColorConverter *mConverter;
+ YUVMode mYUVMode;
+ sp<Surface> mSurface;
size_t mDisplayWidth, mDisplayHeight;
size_t mDecodedWidth, mDecodedHeight;
- size_t mFrameSize;
- sp<MemoryHeapBase> mMemoryHeap;
- int mIndex;
SoftwareRenderer(const SoftwareRenderer &);
SoftwareRenderer &operator=(const SoftwareRenderer &);
diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h
index 88164b3..8928a4a 100644
--- a/media/libstagefright/include/ThrottledSource.h
+++ b/media/libstagefright/include/ThrottledSource.h
@@ -30,9 +30,9 @@ struct ThrottledSource : public DataSource {
virtual status_t initCheck() const;
- virtual ssize_t readAt(off_t offset, void *data, size_t size);
+ virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off_t *size);
+ virtual status_t getSize(off64_t *size);
virtual uint32_t flags();
private:
diff --git a/media/libstagefright/omx/OMXRenderer.h b/media/libstagefright/include/VBRISeeker.h
index 4d194ce..1a2bf9f 100644
--- a/media/libstagefright/omx/OMXRenderer.h
+++ b/media/libstagefright/include/VBRISeeker.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2009 The Android Open Source Project
+ * Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,31 +14,37 @@
* limitations under the License.
*/
-#ifndef OMX_RENDERER_H_
+#ifndef VBRI_SEEKER_H_
-#define OMX_RENDERER_H_
+#define VBRI_SEEKER_H_
-#include <media/IOMX.h>
+#include "include/MP3Seeker.h"
+
+#include <utils/Vector.h>
namespace android {
-class VideoRenderer;
+struct DataSource;
-class OMXRenderer : public BnOMXRenderer {
-public:
- // Assumes ownership of "impl".
- OMXRenderer(VideoRenderer *impl);
- virtual ~OMXRenderer();
+struct VBRISeeker : public MP3Seeker {
+ static sp<VBRISeeker> CreateFromSource(
+ const sp<DataSource> &source, off64_t post_id3_pos);
- virtual void render(IOMX::buffer_id buffer);
+ virtual bool getDuration(int64_t *durationUs);
+ virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
private:
- VideoRenderer *mImpl;
+ off64_t mBasePos;
+ int64_t mDurationUs;
+ Vector<uint32_t> mSegments;
+
+ VBRISeeker();
- OMXRenderer(const OMXRenderer &);
- OMXRenderer &operator=(const OMXRenderer &);
+ DISALLOW_EVIL_CONSTRUCTORS(VBRISeeker);
};
} // namespace android
-#endif // OMX_RENDERER_H_
+#endif // VBRI_SEEKER_H_
+
+
diff --git a/media/libstagefright/include/WAVExtractor.h b/media/libstagefright/include/WAVExtractor.h
index df6d3e7..9de197f 100644
--- a/media/libstagefright/include/WAVExtractor.h
+++ b/media/libstagefright/include/WAVExtractor.h
@@ -48,7 +48,7 @@ private:
uint16_t mNumChannels;
uint32_t mSampleRate;
uint16_t mBitsPerSample;
- off_t mDataOffset;
+ off64_t mDataOffset;
size_t mDataSize;
sp<MetaData> mTrackMeta;
diff --git a/media/libstagefright/include/WVMExtractor.h b/media/libstagefright/include/WVMExtractor.h
new file mode 100644
index 0000000..0da45a8
--- /dev/null
+++ b/media/libstagefright/include/WVMExtractor.h
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WVM_EXTRACTOR_H_
+
+#define WVM_EXTRACTOR_H_
+
+#include <media/stagefright/MediaExtractor.h>
+
+namespace android {
+
+class DataSource;
+
+class WVMExtractor : public MediaExtractor {
+public:
+ WVMExtractor(const sp<DataSource> &source);
+
+ virtual size_t countTracks();
+ virtual sp<MediaSource> getTrack(size_t index);
+ virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
+ virtual sp<MetaData> getMetaData();
+
+protected:
+ virtual ~WVMExtractor();
+
+private:
+ sp<DataSource> mDataSource;
+ sp<MediaExtractor> mImpl;
+
+ WVMExtractor(const WVMExtractor &);
+ WVMExtractor &operator=(const WVMExtractor &);
+
+};
+
+} // namespace android
+
+#endif // DRM_EXTRACTOR_H_
+
diff --git a/media/libstagefright/include/XINGSeeker.h b/media/libstagefright/include/XINGSeeker.h
new file mode 100644
index 0000000..d5a484e
--- /dev/null
+++ b/media/libstagefright/include/XINGSeeker.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef XING_SEEKER_H_
+
+#define XING_SEEKER_H_
+
+#include "include/MP3Seeker.h"
+
+namespace android {
+
+struct DataSource;
+
+struct XINGSeeker : public MP3Seeker {
+ static sp<XINGSeeker> CreateFromSource(
+ const sp<DataSource> &source, off64_t first_frame_pos);
+
+ virtual bool getDuration(int64_t *durationUs);
+ virtual bool getOffsetForTime(int64_t *timeUs, off64_t *pos);
+
+private:
+ int64_t mFirstFramePos;
+ int64_t mDurationUs;
+ int32_t mSizeBytes;
+
+ // TOC entries in XING header. Skip the first one since it's always 0.
+ char mTableOfContents[99];
+
+ XINGSeeker();
+
+ DISALLOW_EVIL_CONSTRUCTORS(XINGSeeker);
+};
+
+} // namespace android
+
+#endif // XING_SEEKER_H_
+
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index 62cfc36..3aeb07f 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -24,6 +24,17 @@ namespace android {
struct ABitReader;
+enum {
+ kAVCProfileBaseline = 0x42,
+ kAVCProfileMain = 0x4d,
+ kAVCProfileExtended = 0x58,
+ kAVCProfileHigh = 0x64,
+ kAVCProfileHigh10 = 0x6e,
+ kAVCProfileHigh422 = 0x7a,
+ kAVCProfileHigh444 = 0xf4,
+ kAVCProfileCAVLC444Intra = 0x2c
+};
+
void FindAVCDimensions(
const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height);
@@ -39,6 +50,8 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit);
bool IsIDR(const sp<ABuffer> &accessUnit);
+const char *AVCProfileToString(uint8_t profile);
+
} // namespace android
#endif // AVC_UTILS_H_
diff --git a/media/libstagefright/include/stagefright_string.h b/media/libstagefright/include/stagefright_string.h
deleted file mode 100644
index 5dc7116..0000000
--- a/media/libstagefright/include/stagefright_string.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef STRING_H_
-
-#define STRING_H_
-
-#include <utils/String8.h>
-
-namespace android {
-
-class string {
-public:
- typedef size_t size_type;
- static size_type npos;
-
- string();
- string(const char *s);
- string(const char *s, size_t length);
- string(const string &from, size_type start, size_type length = npos);
-
- const char *c_str() const;
- size_type size() const;
-
- void clear();
- void erase(size_type from, size_type length);
-
- size_type find(char c) const;
-
- bool operator<(const string &other) const;
- bool operator==(const string &other) const;
-
- string &operator+=(char c);
-
-private:
- String8 mString;
-};
-
-} // namespace android
-
-#endif // STRING_H_
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 7c7d69e..e0ac49f 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -22,13 +22,15 @@
#include "mkvparser.hpp"
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/Utils.h>
#include <utils/String8.h>
namespace android {
@@ -56,7 +58,7 @@ struct DataSourceReader : public mkvparser::IMkvReader {
}
virtual int Length(long long* total, long long* available) {
- off_t size;
+ off64_t size;
if (mSource->getSize(&size) != OK) {
return -1;
}
@@ -81,46 +83,6 @@ private:
////////////////////////////////////////////////////////////////////////////////
-#include <ctype.h>
-static void hexdump(const void *_data, size_t size) {
- const uint8_t *data = (const uint8_t *)_data;
- size_t offset = 0;
- while (offset < size) {
- printf("0x%04x ", offset);
-
- size_t n = size - offset;
- if (n > 16) {
- n = 16;
- }
-
- for (size_t i = 0; i < 16; ++i) {
- if (i == 8) {
- printf(" ");
- }
-
- if (offset + i < size) {
- printf("%02x ", data[offset + i]);
- } else {
- printf(" ");
- }
- }
-
- printf(" ");
-
- for (size_t i = 0; i < n; ++i) {
- if (isprint(data[offset + i])) {
- printf("%c", data[offset + i]);
- } else {
- printf(".");
- }
- }
-
- printf("\n");
-
- offset += 16;
- }
-}
-
struct BlockIterator {
BlockIterator(mkvparser::Segment *segment, unsigned long trackNum);
@@ -167,6 +129,7 @@ private:
size_t mTrackIndex;
Type mType;
BlockIterator mBlockIter;
+ size_t mNALSizeLen; // for type AVC
status_t advance();
@@ -180,13 +143,26 @@ MatroskaSource::MatroskaSource(
mTrackIndex(index),
mType(OTHER),
mBlockIter(mExtractor->mSegment,
- mExtractor->mTracks.itemAt(index).mTrackNum) {
+ mExtractor->mTracks.itemAt(index).mTrackNum),
+ mNALSizeLen(0) {
+ sp<MetaData> meta = mExtractor->mTracks.itemAt(index).mMeta;
+
const char *mime;
- CHECK(mExtractor->mTracks.itemAt(index).mMeta->
- findCString(kKeyMIMEType, &mime));
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
mType = AVC;
+
+ uint32_t dummy;
+ const uint8_t *avcc;
+ size_t avccSize;
+ CHECK(meta->findData(
+ kKeyAVCC, &dummy, (const void **)&avcc, &avccSize));
+
+ CHECK_GE(avccSize, 5u);
+
+ mNALSizeLen = 1 + (avcc[4] & 3);
+ LOGV("mNALSizeLen = %d", mNALSizeLen);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
mType = AAC;
}
@@ -252,7 +228,7 @@ void BlockIterator::reset() {
}
void BlockIterator::seek(int64_t seekTimeUs) {
- mCluster = mSegment->GetCluster(seekTimeUs * 1000ll);
+ mCluster = mSegment->FindCluster(seekTimeUs * 1000ll);
mBlockEntry = mCluster != NULL ? mCluster->GetFirst() : NULL;
while (!eos() && block()->GetTrackNumber() != mTrackNum) {
@@ -276,6 +252,10 @@ int64_t BlockIterator::blockTimeUs() const {
////////////////////////////////////////////////////////////////////////////////
+static unsigned U24_AT(const uint8_t *ptr) {
+ return ptr[0] << 16 | ptr[1] << 8 | ptr[2];
+}
+
status_t MatroskaSource::read(
MediaBuffer **out, const ReadOptions *options) {
*out = NULL;
@@ -286,6 +266,7 @@ status_t MatroskaSource::read(
mBlockIter.seek(seekTimeUs);
}
+again:
if (mBlockIter.eos()) {
return ERROR_END_OF_STREAM;
}
@@ -294,38 +275,70 @@ status_t MatroskaSource::read(
size_t size = block->GetSize();
int64_t timeUs = mBlockIter.blockTimeUs();
- MediaBuffer *buffer = new MediaBuffer(size + 2);
+ // In the case of AVC content, each NAL unit is prefixed by
+ // mNALSizeLen bytes of length. We want to prefix the data with
+ // a four-byte 0x00000001 startcode instead of the length prefix.
+ // mNALSizeLen ranges from 1 through 4 bytes, so add an extra
+ // 3 bytes of padding to the buffer start.
+ static const size_t kPadding = 3;
+
+ MediaBuffer *buffer = new MediaBuffer(size + kPadding);
buffer->meta_data()->setInt64(kKeyTime, timeUs);
buffer->meta_data()->setInt32(kKeyIsSyncFrame, block->IsKey());
long res = block->Read(
- mExtractor->mReader, (unsigned char *)buffer->data() + 2);
+ mExtractor->mReader, (unsigned char *)buffer->data() + kPadding);
if (res != 0) {
return ERROR_END_OF_STREAM;
}
- buffer->set_range(2, size);
+ buffer->set_range(kPadding, size);
if (mType == AVC) {
- CHECK(size >= 2);
+ CHECK_GE(size, mNALSizeLen);
uint8_t *data = (uint8_t *)buffer->data();
- unsigned NALsize = data[2] << 8 | data[3];
- CHECK_EQ(size, NALsize + 2);
+ size_t NALsize;
+ switch (mNALSizeLen) {
+ case 1: NALsize = data[kPadding]; break;
+ case 2: NALsize = U16_AT(&data[kPadding]); break;
+ case 3: NALsize = U24_AT(&data[kPadding]); break;
+ case 4: NALsize = U32_AT(&data[kPadding]); break;
+ default:
+ TRESPASS();
+ }
- memcpy(data, "\x00\x00\x00\x01", 4);
- buffer->set_range(0, size + 2);
+ CHECK_GE(size, NALsize + mNALSizeLen);
+ if (size > NALsize + mNALSizeLen) {
+ LOGW("discarding %d bytes of data.", size - NALsize - mNALSizeLen);
+ }
+
+ // actual data starts at &data[kPadding + mNALSizeLen]
+
+ memcpy(&data[mNALSizeLen - 1], "\x00\x00\x00\x01", 4);
+ buffer->set_range(mNALSizeLen - 1, NALsize + 4);
} else if (mType == AAC) {
// There's strange junk at the beginning...
- const uint8_t *data = (const uint8_t *)buffer->data() + 2;
+ const uint8_t *data = (const uint8_t *)buffer->data() + kPadding;
+
+ // hexdump(data, size);
+
size_t offset = 0;
while (offset < size && data[offset] != 0x21) {
++offset;
}
- buffer->set_range(2 + offset, size - offset);
+
+ if (size == offset) {
+ buffer->release();
+
+ mBlockIter.advance();
+ goto again;
+ }
+
+ buffer->set_range(kPadding + offset, size - offset);
}
*out = buffer;
@@ -476,7 +489,7 @@ void MatroskaExtractor::addTracks() {
size_t codecPrivateSize;
const unsigned char *codecPrivate =
- track->GetCodecPrivate(&codecPrivateSize);
+ track->GetCodecPrivate(codecPrivateSize);
enum { VIDEO_TRACK = 1, AUDIO_TRACK = 2 };
diff --git a/media/libstagefright/matroska/mkvparser.cpp b/media/libstagefright/matroska/mkvparser.cpp
index 4e51004..455b1d6 100644
--- a/media/libstagefright/matroska/mkvparser.cpp
+++ b/media/libstagefright/matroska/mkvparser.cpp
@@ -1,3103 +1,4511 @@
-#include "mkvparser.hpp"
-#include <cassert>
-#include <cstring>
-
-mkvparser::IMkvReader::~IMkvReader()
-{
-}
-
-long long mkvparser::ReadUInt(IMkvReader* pReader, long long pos, long& len)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(pos < available);
- assert((available - pos) >= 1); //assume here max u-int len is 8
-
- unsigned char b;
-
- hr = pReader->Read(pos, 1, &b);
- if (hr < 0)
- return hr;
-
- assert(hr == 0L);
-
- if (b & 0x80) //1000 0000
- {
- len = 1;
- b &= 0x7F; //0111 1111
- }
- else if (b & 0x40) //0100 0000
- {
- len = 2;
- b &= 0x3F; //0011 1111
- }
- else if (b & 0x20) //0010 0000
- {
- len = 3;
- b &= 0x1F; //0001 1111
- }
- else if (b & 0x10) //0001 0000
- {
- len = 4;
- b &= 0x0F; //0000 1111
- }
- else if (b & 0x08) //0000 1000
- {
- len = 5;
- b &= 0x07; //0000 0111
- }
- else if (b & 0x04) //0000 0100
- {
- len = 6;
- b &= 0x03; //0000 0011
- }
- else if (b & 0x02) //0000 0010
- {
- len = 7;
- b &= 0x01; //0000 0001
- }
- else
- {
- assert(b & 0x01); //0000 0001
- len = 8;
- b = 0; //0000 0000
- }
-
- assert((available - pos) >= len);
-
- long long result = b;
- ++pos;
- for (long i = 1; i < len; ++i)
- {
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- assert(hr == 0L);
-
- result <<= 8;
- result |= b;
-
- ++pos;
- }
-
- return result;
-}
-
-
-long long mkvparser::GetUIntLength(
- IMkvReader* pReader,
- long long pos,
- long& len)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- if (pos >= available)
- return pos; //too few bytes available
-
- unsigned char b;
-
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- assert(hr == 0L);
-
- if (b == 0) //we can't handle u-int values larger than 8 bytes
- return E_FILE_FORMAT_INVALID;
-
- unsigned char m = 0x80;
- len = 1;
-
- while (!(b & m))
- {
- m >>= 1;
- ++len;
- }
-
- return 0; //success
-}
-
-
-long long mkvparser::SyncReadUInt(
- IMkvReader* pReader,
- long long pos,
- long long stop,
- long& len)
-{
- assert(pReader);
-
- if (pos >= stop)
- return E_FILE_FORMAT_INVALID;
-
- unsigned char b;
-
- long hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (hr != 0L)
- return E_BUFFER_NOT_FULL;
-
- if (b == 0) //we can't handle u-int values larger than 8 bytes
- return E_FILE_FORMAT_INVALID;
-
- unsigned char m = 0x80;
- len = 1;
-
- while (!(b & m))
- {
- m >>= 1;
- ++len;
- }
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- long long result = b & (~m);
- ++pos;
-
- for (int i = 1; i < len; ++i)
- {
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (hr != 0L)
- return E_BUFFER_NOT_FULL;
-
- result <<= 8;
- result |= b;
-
- ++pos;
- }
-
- return result;
-}
-
-
-long long mkvparser::UnserializeUInt(
- IMkvReader* pReader,
- long long pos,
- long long size)
-{
- assert(pReader);
- assert(pos >= 0);
- assert(size > 0);
- assert(size <= 8);
-
- long long result = 0;
-
- for (long long i = 0; i < size; ++i)
- {
- unsigned char b;
-
- const long hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
- result <<= 8;
- result |= b;
-
- ++pos;
- }
-
- return result;
-}
-
-
-float mkvparser::Unserialize4Float(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
- assert((pos + 4) <= available);
-
- float result;
-
- unsigned char* const p = (unsigned char*)&result;
- unsigned char* q = p + 4;
-
- for (;;)
- {
- hr = pReader->Read(pos, 1, --q);
- assert(hr == 0L);
-
- if (q == p)
- break;
-
- ++pos;
- }
-
- return result;
-}
-
-
-double mkvparser::Unserialize8Double(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- double result;
-
- unsigned char* const p = (unsigned char*)&result;
- unsigned char* q = p + 8;
-
- for (;;)
- {
- const long hr = pReader->Read(pos, 1, --q);
- assert(hr == 0L);
-
- if (q == p)
- break;
-
- ++pos;
- }
-
- return result;
-}
-
-signed char mkvparser::Unserialize1SInt(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr == 0);
- assert(available <= total);
- assert(pos < available);
-
- signed char result;
-
- hr = pReader->Read(pos, 1, (unsigned char*)&result);
- assert(hr == 0);
-
- return result;
-}
-
-short mkvparser::Unserialize2SInt(
- IMkvReader* pReader,
- long long pos)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
- assert((pos + 2) <= available);
-
- short result;
-
- unsigned char* const p = (unsigned char*)&result;
- unsigned char* q = p + 2;
-
- for (;;)
- {
- hr = pReader->Read(pos, 1, --q);
- assert(hr == 0L);
-
- if (q == p)
- break;
-
- ++pos;
- }
-
- return result;
-}
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- long long& val)
-
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0);
- assert(size <= 8);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
-
- val = UnserializeUInt(pReader, pos, size);
- assert(val >= 0);
-
- pos += size; //consume size of payload
-
- return true;
-}
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- char*& val)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size_ = ReadUInt(pReader, pos, len);
- assert(size_ >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
- assert((pos + size_) <= available);
-
- const size_t size = static_cast<size_t>(size_);
- val = new char[size+1];
-
- for (size_t i = 0; i < size; ++i)
- {
- char c;
-
- hr = pReader->Read(pos + i, 1, (unsigned char*)&c);
- assert(hr == 0L);
-
- val[i] = c;
-
- if (c == '\0')
- break;
-
- }
-
- val[size] = '\0';
- pos += size_; //consume size of payload
-
- return true;
-}
-
-#if 0
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id,
- wchar_t*& val)
-{
- char* str;
-
- if (!Match(pReader, pos, id, str))
- return false;
-
- const size_t size = mbstowcs(NULL, str, 0);
-
- if (size == 0)
- val = NULL;
- else
- {
- val = new wchar_t[size+1];
- mbstowcs(val, str, size);
- val[size] = L'\0';
- }
-
- delete[] str;
- return true;
-}
-#endif
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- unsigned char*& val,
- size_t *optionalSize)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size_ = ReadUInt(pReader, pos, len);
- assert(size_ >= 0);
- assert(len > 0);
- assert(len <= 8);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
- assert((pos + size_) <= available);
-
- const size_t size = static_cast<size_t>(size_);
- val = new unsigned char[size];
-
- if (optionalSize) {
- *optionalSize = size;
- }
-
- for (size_t i = 0; i < size; ++i)
- {
- unsigned char b;
-
- hr = pReader->Read(pos + i, 1, &b);
- assert(hr == 0L);
-
- val[i] = b;
- }
-
- pos += size_; //consume size of payload
- return true;
-}
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- double& val)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
- long idlen;
- const long long id = ReadUInt(pReader, pos, idlen);
- assert(id >= 0); //TODO
-
- if ((unsigned long)id != id_)
- return false;
-
- long sizelen;
- const long long size = ReadUInt(pReader, pos + idlen, sizelen);
-
- switch (size)
- {
- case 4:
- case 8:
- break;
- default:
- return false;
- }
-
- pos += idlen + sizelen; //consume id and size fields
- assert((pos + size) <= available);
-
- if (size == 4)
- val = Unserialize4Float(pReader, pos);
- else
- {
- assert(size == 8);
- val = Unserialize8Double(pReader, pos);
- }
-
- pos += size; //consume size of payload
-
- return true;
-}
-
-
-bool mkvparser::Match(
- IMkvReader* pReader,
- long long& pos,
- unsigned long id_,
- short& val)
-{
- assert(pReader);
- assert(pos >= 0);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert((pos + len) <= available);
-
- if ((unsigned long)id != id_)
- return false;
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size <= 2);
- assert((pos + len) <= available);
-
- pos += len; //consume length of size of payload
- assert((pos + size) <= available);
-
- //TODO:
- // Generalize this to work for any size signed int
- if (size == 1)
- val = Unserialize1SInt(pReader, pos);
- else
- val = Unserialize2SInt(pReader, pos);
-
- pos += size; //consume size of payload
-
- return true;
-}
-
-
-namespace mkvparser
-{
-
-EBMLHeader::EBMLHeader():
- m_docType(NULL)
-{
-}
-
-EBMLHeader::~EBMLHeader()
-{
- delete[] m_docType;
-}
-
-long long EBMLHeader::Parse(
- IMkvReader* pReader,
- long long& pos)
-{
- assert(pReader);
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
-
- if (hr < 0)
- return hr;
-
- pos = 0;
- long long end = (1024 < available)? 1024: available;
-
- for (;;)
- {
- unsigned char b = 0;
-
- while (pos < end)
- {
- hr = pReader->Read(pos, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (b == 0x1A)
- break;
-
- ++pos;
- }
-
- if (b != 0x1A)
- {
- if ((pos >= 1024) ||
- (available >= total) ||
- ((total - available) < 5))
- return -1;
-
- return available + 5; //5 = 4-byte ID + 1st byte of size
- }
-
- if ((total - pos) < 5)
- return E_FILE_FORMAT_INVALID;
-
- if ((available - pos) < 5)
- return pos + 5; //try again later
-
- long len;
-
- const long long result = ReadUInt(pReader, pos, len);
-
- if (result < 0) //error
- return result;
-
- if (result == 0x0A45DFA3) //ReadId masks-off length indicator bits
- {
- assert(len == 4);
- pos += len;
- break;
- }
-
- ++pos; //throw away just the 0x1A byte, and try again
- }
-
- long len;
- long long result = GetUIntLength(pReader, pos, len);
-
- if (result < 0) //error
- return result;
-
- if (result > 0) //need more data
- return result;
-
- assert(len > 0);
- assert(len <= 8);
-
- if ((total - pos) < len)
- return E_FILE_FORMAT_INVALID;
- if ((available - pos) < len)
- return pos + len; //try again later
-
- result = ReadUInt(pReader, pos, len);
-
- if (result < 0) //error
- return result;
-
- pos += len; //consume u-int
-
- if ((total - pos) < result)
- return E_FILE_FORMAT_INVALID;
-
- if ((available - pos) < result)
- return pos + result;
-
- end = pos + result;
-
- m_version = 1;
- m_readVersion = 1;
- m_maxIdLength = 4;
- m_maxSizeLength = 8;
- m_docTypeVersion = 1;
- m_docTypeReadVersion = 1;
-
- while (pos < end)
- {
- if (Match(pReader, pos, 0x0286, m_version))
- ;
- else if (Match(pReader, pos, 0x02F7, m_readVersion))
- ;
- else if (Match(pReader, pos, 0x02F2, m_maxIdLength))
- ;
- else if (Match(pReader, pos, 0x02F3, m_maxSizeLength))
- ;
- else if (Match(pReader, pos, 0x0282, m_docType))
- ;
- else if (Match(pReader, pos, 0x0287, m_docTypeVersion))
- ;
- else if (Match(pReader, pos, 0x0285, m_docTypeReadVersion))
- ;
- else
- {
- result = ReadUInt(pReader, pos, len);
- assert(result > 0);
- assert(len > 0);
- assert(len <= 8);
-
- pos += len;
- assert(pos < end);
-
- result = ReadUInt(pReader, pos, len);
- assert(result >= 0);
- assert(len > 0);
- assert(len <= 8);
-
- pos += len + result;
- assert(pos <= end);
- }
- }
-
- assert(pos == end);
-
- return 0;
-}
-
-
-Segment::Segment(
- IMkvReader* pReader,
- long long start,
- long long size) :
- m_pReader(pReader),
- m_start(start),
- m_size(size),
- m_pos(start),
- m_pInfo(NULL),
- m_pTracks(NULL),
- m_clusterCount(0)
- //m_clusterNumber(0)
-{
-}
-
-
-Segment::~Segment()
-{
- Cluster** i = m_clusters;
- Cluster** j = m_clusters + m_clusterCount;
-
- while (i != j)
- {
- Cluster* p = *i++;
- assert(p);
- delete p;
- }
-
- delete[] m_clusters;
-
- delete m_pTracks;
- delete m_pInfo;
-}
-
-
-long long Segment::CreateInstance(
- IMkvReader* pReader,
- long long pos,
- Segment*& pSegment)
-{
- assert(pReader);
- assert(pos >= 0);
-
- pSegment = NULL;
-
- long long total, available;
-
- long hr = pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- //I would assume that in practice this loop would execute
- //exactly once, but we allow for other elements (e.g. Void)
- //to immediately follow the EBML header. This is fine for
- //the source filter case (since the entire file is available),
- //but in the splitter case over a network we should probably
- //just give up early. We could for example decide only to
- //execute this loop a maximum of, say, 10 times.
-
- while (pos < total)
- {
- //Read ID
-
- long len;
- long long result = GetUIntLength(pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > total)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- //TODO: if we liberalize the behavior of ReadUInt, we can
- //probably eliminate having to use GetUIntLength here.
- const long long id = ReadUInt(pReader, pos, len);
-
- if (id < 0) //error
- return id;
-
- pos += len; //consume ID
-
- //Read Size
-
- result = GetUIntLength(pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > total)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- //TODO: if we liberalize the behavior of ReadUInt, we can
- //probably eliminate having to use GetUIntLength here.
- const long long size = ReadUInt(pReader, pos, len);
-
- if (size < 0)
- return size;
-
- pos += len; //consume length of size of element
-
- //Pos now points to start of payload
-
- if ((pos + size) > total)
- return E_FILE_FORMAT_INVALID;
-
- if (id == 0x08538067) //Segment ID
- {
- pSegment = new Segment(pReader, pos, size);
- assert(pSegment); //TODO
-
- return 0; //success
- }
-
- pos += size; //consume payload
- }
-
- assert(pos == total);
-
- pSegment = new Segment(pReader, pos, 0);
- assert(pSegment); //TODO
-
- return 0; //success (sort of)
-}
-
-
-long long Segment::ParseHeaders()
-{
- //Outermost (level 0) segment object has been constructed,
- //and pos designates start of payload. We need to find the
- //inner (level 1) elements.
- long long total, available;
-
- long hr = m_pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available <= total);
-
- const long long stop = m_start + m_size;
- assert(stop <= total);
- assert(m_pos <= stop);
-
- bool bQuit = false;
- while ((m_pos < stop) && !bQuit)
- {
- long long pos = m_pos;
-
- long len;
- long long result = GetUIntLength(m_pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- const long long idpos = pos;
- const long long id = ReadUInt(m_pReader, idpos, len);
-
- if (id < 0) //error
- return id;
-
- pos += len; //consume ID
-
- //Read Size
- result = GetUIntLength(m_pReader, pos, len);
-
- if (result) //error, or too few available bytes
- return result;
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- if ((pos + len) > available)
- return pos + len;
-
- const long long size = ReadUInt(m_pReader, pos, len);
-
- if (size < 0)
- return size;
-
- pos += len; //consume length of size of element
-
- //Pos now points to start of payload
-
- if ((pos + size) > stop)
- return E_FILE_FORMAT_INVALID;
-
- //We read EBML elements either in total or nothing at all.
-
- if ((pos + size) > available)
- return pos + size;
-
- if (id == 0x0549A966) //Segment Info ID
- {
- assert(m_pInfo == NULL);
- m_pInfo = new SegmentInfo(this, pos, size);
- assert(m_pInfo); //TODO
-
- if (m_pTracks)
- bQuit = true;
- }
- else if (id == 0x0654AE6B) //Tracks ID
- {
- assert(m_pTracks == NULL);
- m_pTracks = new Tracks(this, pos, size);
- assert(m_pTracks); //TODO
-
- if (m_pInfo)
- bQuit = true;
- }
- else if (id == 0x0F43B675) //Cluster ID
- {
-#if 0
- if (m_pInfo == NULL) //TODO: liberalize
- ;
- else if (m_pTracks == NULL)
- ;
- else
- //ParseCluster(idpos, pos, size);
- Cluster::Parse(this, m_clusters, pos, size);
-#endif
- bQuit = true;
- }
-
- m_pos = pos + size; //consume payload
- }
-
- assert(m_pos <= stop);
-
- return 0; //success
-}
-
-
-long Segment::ParseCluster(Cluster*& pCluster, long long& pos_) const
-{
- pCluster = NULL;
- pos_ = -1;
-
- const long long stop = m_start + m_size;
- assert(m_pos <= stop);
-
- long long pos = m_pos;
- long long off = -1;
-
-
- while (pos < stop)
- {
- long len;
- const long long idpos = pos;
-
- const long long id = SyncReadUInt(m_pReader, pos, stop, len);
-
- if (id < 0) //error
- return static_cast<long>(id);
-
- if (id == 0)
- return E_FILE_FORMAT_INVALID;
-
- pos += len; //consume id
- assert(pos < stop);
-
- const long long size = SyncReadUInt(m_pReader, pos, stop, len);
-
- if (size < 0) //error
- return static_cast<long>(size);
-
- pos += len; //consume size
- assert(pos <= stop);
-
- if (size == 0) //weird
- continue;
-
- //pos now points to start of payload
-
- pos += size; //consume payload
- assert(pos <= stop);
-
- if (off >= 0)
- {
- pos_ = idpos;
- break;
- }
-
- if (id == 0x0F43B675) //Cluster ID
- off = idpos - m_start;
- }
-
- Segment* const this_ = const_cast<Segment*>(this);
- const size_t idx = m_clusterCount;
-
- if (pos >= stop)
- {
- pos_ = stop;
-
-#if 0
- if (off < 0)
- {
- pCluster = Cluster::CreateEndOfStream(this_, idx);
- return 1L;
- }
-#else
- if (off < 0)
- return 1L;
-#endif
-
- //Reading 0 bytes at pos might work too -- it would depend
- //on how the reader is implemented.
-
- unsigned char b;
-
- const long hr = m_pReader->Read(pos - 1, 1, &b);
-
- if (hr < 0)
- return hr;
-
- if (hr != 0L)
- return E_BUFFER_NOT_FULL;
- }
-
- assert(off >= 0);
- assert(pos_ >= m_start);
- assert(pos_ <= stop);
-
- pCluster = Cluster::Parse(this_, idx, off);
- return 0L;
-}
-
-
-bool Segment::AddCluster(Cluster* pCluster, long long pos)
-{
- assert(pos >= m_start);
-
- const long long stop = m_start + m_size;
- assert(pos <= stop);
-
- if (pCluster)
- m_clusters[pos] = pCluster;
-
- m_pos = pos; //m_pos >= stop is now we know we have all clusters
-
- return (pos >= stop);
-}
-
-
-long Segment::Load()
-{
- //Outermost (level 0) segment object has been constructed,
- //and pos designates start of payload. We need to find the
- //inner (level 1) elements.
- const long long stop = m_start + m_size;
-#ifdef _DEBUG
- {
- long long total, available;
-
- long hr = m_pReader->Length(&total, &available);
- assert(hr >= 0);
- assert(available >= total);
- assert(stop <= total);
- }
-#endif
- long long index = m_pos;
-
- m_clusterCount = 0;
-
- while (index < stop)
- {
- long len = 0;
-
- long long result = GetUIntLength(m_pReader, index, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((index + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long idpos = index;
- const long long id = ReadUInt(m_pReader, idpos, len);
-
- if (id < 0) //error
- return static_cast<long>(id);
-
- index += len; //consume ID
-
- //Read Size
- result = GetUIntLength(m_pReader, index, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((index + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long size = ReadUInt(m_pReader, index, len);
-
- if (size < 0) //error
- return static_cast<long>(size);
-
- index += len; //consume length of size of element
-
- if (id == 0x0F43B675) // Cluster ID
- break;
-
- if (id == 0x014D9B74) // SeekHead ID
- {
- ParseSeekHead(index, size, NULL);
- break;
- }
- index += size;
- }
-
- if (m_clusterCount == 0)
- return -1L;
-
- while (m_pos < stop)
- {
- long long pos = m_pos;
-
- long len;
-
- long long result = GetUIntLength(m_pReader, pos, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long idpos = pos;
- const long long id = ReadUInt(m_pReader, idpos, len);
-
- if (id < 0) //error
- return static_cast<long>(id);
-
- pos += len; //consume ID
-
- //Read Size
- result = GetUIntLength(m_pReader, pos, len);
-
- if (result < 0) //error
- return static_cast<long>(result);
-
- if ((pos + len) > stop)
- return E_FILE_FORMAT_INVALID;
-
- const long long size = ReadUInt(m_pReader, pos, len);
-
- if (size < 0) //error
- return static_cast<long>(size);
-
- pos += len; //consume length of size of element
-
- //Pos now points to start of payload
-
- if ((pos + size) > stop)
- return E_FILE_FORMAT_INVALID;
-
- if (id == 0x0F43B675) //Cluster ID
- break;
-
- if (id == 0x014D9B74) //SeekHead ID
- {
- m_clusters = new Cluster*[m_clusterCount];
- size_t index = 0;
-
- ParseSeekHead(pos, size, &index);
- assert(index == m_clusterCount);
- }
- else if (id == 0x0549A966) //Segment Info ID
- {
- assert(m_pInfo == NULL);
- m_pInfo = new SegmentInfo(this, pos, size);
- assert(m_pInfo); //TODO
- }
- else if (id == 0x0654AE6B) //Tracks ID
- {
- assert(m_pTracks == NULL);
- m_pTracks = new Tracks(this, pos, size);
- assert(m_pTracks); //TODO
- }
-
- m_pos = pos + size; //consume payload
- }
-
- assert(m_clusters);
-
- //TODO: see notes above. This check is here (temporarily) to ensure
- //that the first seekhead has entries for the clusters (because that's
- //when they're loaded). In case we are given a file that lists the
- //clusters in a second seekhead, the worst thing that happens is that
- //we treat this as an invalid file (which is better then simply
- //asserting somewhere). But that's only a work-around. What we need
- //to do is be able to handle having multiple seekheads, and having
- //clusters listed somewhere besides the first seekhead.
- //
- //if (m_clusters == NULL)
- // return E_FILE_FORMAT_INVALID;
-
- //NOTE: we stop parsing when we reach the first cluster, under the
- //assumption all clusters are named in some SeekHead. Clusters
- //will have been (pre)loaded, so we indicate that we have all clusters
- //by adjusting the parse position:
- m_pos = stop; //means "we have all clusters"
-
- return 0L;
-}
-
-
-void Segment::ParseSeekHead(long long start, long long size_, size_t* pIndex)
-{
- long long pos = start;
- const long long stop = start + size_;
- while (pos < stop)
- {
- long len;
-
- const long long id = ReadUInt(m_pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume ID
-
- const long long size = ReadUInt(m_pReader, pos, len);
- assert(size >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume Size field
- assert((pos + size) <= stop);
-
- if (id == 0x0DBB) //SeekEntry ID
- ParseSeekEntry(pos, size, pIndex);
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
-
- assert(pos == stop);
-}
-
-
-void Segment::ParseSecondarySeekHead(long long off, size_t* pIndex)
-{
- assert(off >= 0);
- assert(off < m_size);
-
- long long pos = m_start + off;
- const long long stop = m_start + m_size;
-
- long len;
-
- long long result = GetUIntLength(m_pReader, pos, len);
- assert(result == 0);
- assert((pos + len) <= stop);
-
- const long long idpos = pos;
-
- const long long id = ReadUInt(m_pReader, idpos, len);
- assert(id == 0x014D9B74); //SeekHead ID
-
- pos += len; //consume ID
- assert(pos < stop);
-
- //Read Size
-
- result = GetUIntLength(m_pReader, pos, len);
- assert(result == 0);
- assert((pos + len) <= stop);
-
- const long long size = ReadUInt(m_pReader, pos, len);
- assert(size >= 0);
-
- pos += len; //consume length of size of element
- assert((pos + size) <= stop);
-
- //Pos now points to start of payload
-
- ParseSeekHead(pos, size, pIndex);
-}
-
-
-void Segment::ParseSeekEntry(long long start, long long size_, size_t* pIndex)
-{
- long long pos = start;
-
- const long long stop = start + size_;
-
- long len;
-
- const long long seekIdId = ReadUInt(m_pReader, pos, len);
- //seekIdId;
- assert(seekIdId == 0x13AB); //SeekID ID
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long seekIdSize = ReadUInt(m_pReader, pos, len);
- assert(seekIdSize >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- const long long seekId = ReadUInt(m_pReader, pos, len); //payload
- assert(seekId >= 0);
- assert(len == seekIdSize);
- assert((pos + len) <= stop);
-
- pos += seekIdSize; //consume payload
-
- const long long seekPosId = ReadUInt(m_pReader, pos, len);
- //seekPosId;
- assert(seekPosId == 0x13AC); //SeekPos ID
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long seekPosSize = ReadUInt(m_pReader, pos, len);
- assert(seekPosSize >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume size
- assert((pos + seekPosSize) <= stop);
-
- const long long seekOff = UnserializeUInt(m_pReader, pos, seekPosSize);
- assert(seekOff >= 0);
- assert(seekOff < m_size);
-
- pos += seekPosSize; //consume payload
- assert(pos == stop);
-
- const long long seekPos = m_start + seekOff;
- assert(seekPos < (m_start + m_size));
-
- if (seekId == 0x0F43B675) //cluster id
- {
- if (pIndex == NULL)
- ++m_clusterCount;
- else
- {
- assert(m_clusters);
- assert(m_clusterCount > 0);
-
- size_t& index = *pIndex;
- assert(index < m_clusterCount);
-
- Cluster*& pCluster = m_clusters[index];
-
- pCluster = Cluster::Parse(this, index, seekOff);
- assert(pCluster); //TODO
-
- ++index;
- }
- }
- else if (seekId == 0x014D9B74) //SeekHead ID
- {
- ParseSecondarySeekHead(seekOff, pIndex);
- }
-}
-
-
-long long Segment::Unparsed() const
-{
- const long long stop = m_start + m_size;
-
- const long long result = stop - m_pos;
- assert(result >= 0);
-
- return result;
-}
-
-
-#if 0 //NOTE: too inefficient
-long long Segment::Load(long long time_ns)
-{
- if (Unparsed() <= 0)
- return 0;
-
- while (m_clusters.empty())
- {
- const long long result = Parse();
-
- if (result) //error, or not enough bytes available
- return result;
-
- if (Unparsed() <= 0)
- return 0;
- }
-
- while (m_clusters.back()->GetTime() < time_ns)
- {
- const long long result = Parse();
-
- if (result) //error, or not enough bytes available
- return result;
-
- if (Unparsed() <= 0)
- return 0;
- }
-
- return 0;
-}
-#endif
-
-
-Cluster* Segment::GetFirst()
-{
- if ((m_clusters == NULL) || (m_clusterCount <= 0))
- return &m_eos;
-
- Cluster* const pCluster = m_clusters[0];
- assert(pCluster);
-
- return pCluster;
-}
-
-
-Cluster* Segment::GetLast()
-{
- if ((m_clusters == NULL) || (m_clusterCount <= 0))
- return &m_eos;
-
- const size_t idx = m_clusterCount - 1;
- Cluster* const pCluster = m_clusters[idx];
- assert(pCluster);
-
- return pCluster;
-}
-
-
-unsigned long Segment::GetCount() const
-{
- //TODO: m_clusterCount should not be long long.
- return static_cast<unsigned long>(m_clusterCount);
-}
-
-
-Cluster* Segment::GetNext(const Cluster* pCurr)
-{
- assert(pCurr);
- assert(pCurr != &m_eos);
- assert(m_clusters);
- assert(m_clusterCount > 0);
-
- size_t idx = pCurr->m_index;
- assert(idx < m_clusterCount);
- assert(pCurr == m_clusters[idx]);
-
- idx++;
-
- if (idx >= m_clusterCount)
- return &m_eos;
-
- Cluster* const pNext = m_clusters[idx];
- assert(pNext);
-
- return pNext;
-}
-
-
-Cluster* Segment::GetCluster(long long time_ns)
-{
- if ((m_clusters == NULL) || (m_clusterCount <= 0))
- return &m_eos;
-
- {
- Cluster* const pCluster = m_clusters[0];
- assert(pCluster);
- assert(pCluster->m_index == 0);
-
- if (time_ns <= pCluster->GetTime())
- return pCluster;
- }
-
- //Binary search of cluster array
-
- size_t i = 0;
- size_t j = m_clusterCount;
-
- while (i < j)
- {
- //INVARIANT:
- //[0, i) <= time_ns
- //[i, j) ?
- //[j, m_clusterCount) > time_ns
-
- const size_t k = i + (j - i) / 2;
- assert(k < m_clusterCount);
-
- Cluster* const pCluster = m_clusters[k];
- assert(pCluster);
- assert(pCluster->m_index == k);
-
- const long long t = pCluster->GetTime();
-
- if (t <= time_ns)
- i = k + 1;
- else
- j = k;
-
- assert(i <= j);
- }
-
- assert(i == j);
- assert(i > 0);
- assert(i <= m_clusterCount);
-
- const size_t k = i - 1;
-
- Cluster* const pCluster = m_clusters[k];
- assert(pCluster);
- assert(pCluster->m_index == k);
- assert(pCluster->GetTime() <= time_ns);
-
- return pCluster;
-}
-
-
-Tracks* Segment::GetTracks() const
-{
- return m_pTracks;
-}
-
-
-const SegmentInfo* const Segment::GetInfo() const
-{
- return m_pInfo;
-}
-
-
-long long Segment::GetDuration() const
-{
- assert(m_pInfo);
- return m_pInfo->GetDuration();
-}
-
-
-SegmentInfo::SegmentInfo(Segment* pSegment, long long start, long long size_) :
- m_pSegment(pSegment),
- m_start(start),
- m_size(size_),
- m_pMuxingAppAsUTF8(NULL),
- m_pWritingAppAsUTF8(NULL),
- m_pTitleAsUTF8(NULL)
-{
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos = start;
- const long long stop = start + size_;
-
- m_timecodeScale = 1000000;
- m_duration = 0;
-
-
- while (pos < stop)
- {
- if (Match(pReader, pos, 0x0AD7B1, m_timecodeScale))
- assert(m_timecodeScale > 0);
-
- else if (Match(pReader, pos, 0x0489, m_duration))
- assert(m_duration >= 0);
-
- else if (Match(pReader, pos, 0x0D80, m_pMuxingAppAsUTF8)) //[4D][80]
- assert(m_pMuxingAppAsUTF8);
-
- else if (Match(pReader, pos, 0x1741, m_pWritingAppAsUTF8)) //[57][41]
- assert(m_pWritingAppAsUTF8);
-
- else if (Match(pReader, pos, 0x3BA9, m_pTitleAsUTF8)) //[7B][A9]
- assert(m_pTitleAsUTF8);
-
- else
- {
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- //id;
- assert(id >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume id
- assert((stop - pos) > 0);
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0);
- assert((pos + len) <= stop);
-
- pos += len + size; //consume size and payload
- assert(pos <= stop);
- }
- }
-
- assert(pos == stop);
-}
-
-SegmentInfo::~SegmentInfo()
-{
- if (m_pMuxingAppAsUTF8)
- {
- delete[] m_pMuxingAppAsUTF8;
- m_pMuxingAppAsUTF8 = NULL;
- }
-
- if (m_pWritingAppAsUTF8)
- {
- delete[] m_pWritingAppAsUTF8;
- m_pWritingAppAsUTF8 = NULL;
- }
-
- if (m_pTitleAsUTF8)
- {
- delete[] m_pTitleAsUTF8;
- m_pTitleAsUTF8 = NULL;
- }
-}
-
-long long SegmentInfo::GetTimeCodeScale() const
-{
- return m_timecodeScale;
-}
-
-
-long long SegmentInfo::GetDuration() const
-{
- assert(m_duration >= 0);
- assert(m_timecodeScale >= 1);
-
- const double dd = double(m_duration) * double(m_timecodeScale);
- const long long d = static_cast<long long>(dd);
-
- return d;
-}
-
-const char* SegmentInfo::GetMuxingAppAsUTF8() const
-{
- return m_pMuxingAppAsUTF8;
-}
-
-const char* SegmentInfo::GetWritingAppAsUTF8() const
-{
- return m_pWritingAppAsUTF8;
-}
-
-const char* SegmentInfo::GetTitleAsUTF8() const
-{
- return m_pTitleAsUTF8;
-}
-
-Track::Track(Segment* pSegment, const Info& i) :
- m_pSegment(pSegment),
- m_info(i)
-{
-}
-
-Track::~Track()
-{
- Info& info = const_cast<Info&>(m_info);
- info.Clear();
-}
-
-Track::Info::Info():
- type(-1),
- number(-1),
- uid(-1),
- nameAsUTF8(NULL),
- codecId(NULL),
- codecPrivate(NULL),
- codecPrivateSize(0),
- codecNameAsUTF8(NULL)
-{
-}
-
-void Track::Info::Clear()
-{
- delete[] nameAsUTF8;
- nameAsUTF8 = NULL;
-
- delete[] codecId;
- codecId = NULL;
-
- delete[] codecPrivate;
- codecPrivate = NULL;
-
- delete[] codecNameAsUTF8;
- codecNameAsUTF8 = NULL;
-}
-
-const BlockEntry* Track::GetEOS() const
-{
- return &m_eos;
-}
-
-long long Track::GetType() const
-{
- const unsigned long result = static_cast<unsigned long>(m_info.type);
- return result;
-}
-
-unsigned long Track::GetNumber() const
-{
- assert(m_info.number >= 0);
- const unsigned long result = static_cast<unsigned long>(m_info.number);
- return result;
-}
-
-const char* Track::GetNameAsUTF8() const
-{
- return m_info.nameAsUTF8;
-}
-
-const char* Track::GetCodecNameAsUTF8() const
-{
- return m_info.codecNameAsUTF8;
-}
-
-
-const char* Track::GetCodecId() const
-{
- return m_info.codecId;
-}
-
-
-const unsigned char* Track::GetCodecPrivate(size_t *optionalSize) const
-{
- if (optionalSize) {
- *optionalSize = m_info.codecPrivateSize;
- }
- return m_info.codecPrivate;
-}
-
-
-long Track::GetFirst(const BlockEntry*& pBlockEntry) const
-{
- Cluster* const pCluster = m_pSegment->GetFirst();
-
- //If Segment::GetFirst returns NULL, then this must be a network
- //download, and we haven't loaded any clusters yet. In this case,
- //returning NULL from Track::GetFirst means the same thing.
-
- if ((pCluster == NULL) || pCluster->EOS())
- {
- pBlockEntry = NULL;
- return E_BUFFER_NOT_FULL; //return 1L instead?
- }
-
- pBlockEntry = pCluster->GetFirst();
-
- while (pBlockEntry)
- {
- const Block* const pBlock = pBlockEntry->GetBlock();
- assert(pBlock);
-
- if (pBlock->GetTrackNumber() == (unsigned long)m_info.number)
- return 0L;
-
- pBlockEntry = pCluster->GetNext(pBlockEntry);
- }
-
- //NOTE: if we get here, it means that we didn't find a block with
- //a matching track number. We interpret that as an error (which
- //might be too conservative).
-
- pBlockEntry = GetEOS(); //so we can return a non-NULL value
- return 1L;
-}
-
-
-long Track::GetNext(const BlockEntry* pCurrEntry, const BlockEntry*& pNextEntry) const
-{
- assert(pCurrEntry);
- assert(!pCurrEntry->EOS()); //?
- assert(pCurrEntry->GetBlock()->GetTrackNumber() == (unsigned long)m_info.number);
-
- const Cluster* const pCurrCluster = pCurrEntry->GetCluster();
- assert(pCurrCluster);
- assert(!pCurrCluster->EOS());
-
- pNextEntry = pCurrCluster->GetNext(pCurrEntry);
-
- while (pNextEntry)
- {
- const Block* const pNextBlock = pNextEntry->GetBlock();
- assert(pNextBlock);
-
- if (pNextBlock->GetTrackNumber() == (unsigned long)m_info.number)
- return 0L;
-
- pNextEntry = pCurrCluster->GetNext(pNextEntry);
- }
-
- Segment* pSegment = pCurrCluster->m_pSegment;
- Cluster* const pNextCluster = pSegment->GetNext(pCurrCluster);
-
- if ((pNextCluster == NULL) || pNextCluster->EOS())
- {
- if (pSegment->Unparsed() <= 0) //all clusters have been loaded
- {
- pNextEntry = GetEOS();
- return 1L;
- }
-
- pNextEntry = NULL;
- return E_BUFFER_NOT_FULL;
- }
-
- pNextEntry = pNextCluster->GetFirst();
-
- while (pNextEntry)
- {
- const Block* const pNextBlock = pNextEntry->GetBlock();
- assert(pNextBlock);
-
- if (pNextBlock->GetTrackNumber() == (unsigned long)m_info.number)
- return 0L;
-
- pNextEntry = pNextCluster->GetNext(pNextEntry);
- }
-
- //TODO: what has happened here is that we did not find a block
- //with a matching track number on the next cluster. It might
- //be the case that some cluster beyond the next cluster
- //contains a block having a matching track number, but for
- //now we terminate the search immediately. We do this so that
- //we don't end up searching the entire file looking for the
- //next block. Another possibility is to try searching for the next
- //block in a small, fixed number of clusters (intead searching
- //just the next one), or to terminate the search when when the
- //there is a large gap in time, or large gap in file position. It
- //might very well be the case that the approach we use here is
- //unnecessarily conservative.
-
- //TODO: again, here's a case where we need to return the special
- //EOS block. Or something. It's OK if pNext is NULL, because
- //we only need it to set the stop time of the media sample.
- //(The start time is determined from pCurr, which is non-NULL
- //and non-EOS.) The problem is when we set pCurr=pNext; when
- //pCurr has the value NULL we interpret that to mean that we
- //haven't fully initialized pCurr and we attempt to set it to
- //point to the first block for this track. But that's not what
- //we want at all; we want the next call to PopulateSample to
- //return end-of-stream, not (re)start from the beginning.
- //
- //One work-around is to send EOS immediately. We would send
- //the EOS the next pass anyway, so maybe it's no great loss. The
- //only problem is that if this the stream really does end one
- //cluster early (relative to other tracks), or the last frame
- //happens to be a keyframe ("CanSeekToEnd").
- //
- //The problem is that we need a way to mark as stream as
- //"at end of stream" without actually being at end of stream.
- //We need to give pCurr some value that means "you've reached EOS".
- //We can't synthesize the special EOS Cluster immediately
- //(when we first open the file, say), because we use the existance
- //of that special cluster value to mean that we've read all of
- //the clusters (this is a network download, so we can't know apriori
- //how many we have).
- //
- //Or, we could return E_FAIL, and set another bit in the stream
- //object itself, to indicate that it should send EOS earlier
- //than when (pCurr=pStop).
- //
- //Or, probably the best solution, when we actually load the
- //blocks into a cluster: if we notice that there's no block
- //for a track, we synthesize a nonce EOS block for that track.
- //That way we always have something to return. But that will
- //only work for sequential scan???
-
- //pNext = NULL;
- //return E_FAIL;
- pNextEntry = GetEOS();
- return 1L;
-}
-
-
-Track::EOSBlock::EOSBlock()
-{
-}
-
-
-bool Track::EOSBlock::EOS() const
-{
- return true;
-}
-
-
-Cluster* Track::EOSBlock::GetCluster() const
-{
- return NULL;
-}
-
-
-size_t Track::EOSBlock::GetIndex() const
-{
- return 0;
-}
-
-
-const Block* Track::EOSBlock::GetBlock() const
-{
- return NULL;
-}
-
-
-bool Track::EOSBlock::IsBFrame() const
-{
- return false;
-}
-
-
-VideoTrack::VideoTrack(Segment* pSegment, const Info& i) :
- Track(pSegment, i),
- m_width(-1),
- m_height(-1),
- m_rate(-1)
-{
- assert(i.type == 1);
- assert(i.number > 0);
-
- IMkvReader* const pReader = pSegment->m_pReader;
-
- const Settings& s = i.settings;
- assert(s.start >= 0);
- assert(s.size >= 0);
-
- long long pos = s.start;
- assert(pos >= 0);
-
- const long long stop = pos + s.size;
-
- while (pos < stop)
- {
-#ifdef _DEBUG
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-#endif
- if (Match(pReader, pos, 0x30, m_width))
- ;
- else if (Match(pReader, pos, 0x3A, m_height))
- ;
- else if (Match(pReader, pos, 0x0383E3, m_rate))
- ;
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
- assert((pos + size) <= stop);
-
- //pos now designates start of payload
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- return;
-}
-
-
-bool VideoTrack::VetEntry(const BlockEntry* pBlockEntry) const
-{
- assert(pBlockEntry);
-
- const Block* const pBlock = pBlockEntry->GetBlock();
- assert(pBlock);
- assert(pBlock->GetTrackNumber() == (unsigned long)m_info.number);
-
- return pBlock->IsKey();
-}
-
-
-
-long long VideoTrack::GetWidth() const
-{
- return m_width;
-}
-
-
-long long VideoTrack::GetHeight() const
-{
- return m_height;
-}
-
-
-double VideoTrack::GetFrameRate() const
-{
- return m_rate;
-}
-
-
-AudioTrack::AudioTrack(Segment* pSegment, const Info& i) :
- Track(pSegment, i)
-{
- assert(i.type == 2);
- assert(i.number > 0);
-
- IMkvReader* const pReader = pSegment->m_pReader;
-
- const Settings& s = i.settings;
- assert(s.start >= 0);
- assert(s.size >= 0);
-
- long long pos = s.start;
- assert(pos >= 0);
-
- const long long stop = pos + s.size;
-
- while (pos < stop)
- {
-#ifdef _DEBUG
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-#endif
- if (Match(pReader, pos, 0x35, m_rate))
- ;
- else if (Match(pReader, pos, 0x1F, m_channels))
- ;
- else if (Match(pReader, pos, 0x2264, m_bitDepth))
- ;
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
- assert((pos + size) <= stop);
-
- //pos now designates start of payload
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- return;
-}
-
-bool AudioTrack::VetEntry(const BlockEntry* pBlockEntry) const
-{
- assert(pBlockEntry);
-
- const Block* const pBlock = pBlockEntry->GetBlock();
- assert(pBlock);
- assert(pBlock->GetTrackNumber() == (unsigned long)m_info.number);
-
- return true;
-}
-
-
-double AudioTrack::GetSamplingRate() const
-{
- return m_rate;
-}
-
-
-long long AudioTrack::GetChannels() const
-{
- return m_channels;
-}
-
-long long AudioTrack::GetBitDepth() const
-{
- return m_bitDepth;
-}
-
-Tracks::Tracks(Segment* pSegment, long long start, long long size_) :
- m_pSegment(pSegment),
- m_start(start),
- m_size(size_),
- m_trackEntries(NULL),
- m_trackEntriesEnd(NULL)
-{
- long long stop = m_start + m_size;
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos1 = m_start;
- int count = 0;
-
- while (pos1 < stop)
- {
- long len;
- const long long id = ReadUInt(pReader, pos1, len);
- assert(id >= 0);
- assert((pos1 + len) <= stop);
-
- pos1 += len; //consume id
-
- const long long size = ReadUInt(pReader, pos1, len);
- assert(size >= 0);
- assert((pos1 + len) <= stop);
-
- pos1 += len; //consume length of size
-
- //pos now desinates start of element
- if (id == 0x2E) //TrackEntry ID
- ++count;
-
- pos1 += size; //consume payload
- assert(pos1 <= stop);
- }
-
- if (count <= 0)
- return;
-
- m_trackEntries = new Track*[count];
- m_trackEntriesEnd = m_trackEntries;
-
- long long pos = m_start;
-
- while (pos < stop)
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size1 = ReadUInt(pReader, pos, len);
- assert(size1 >= 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
-
- //pos now desinates start of element
-
- if (id == 0x2E) //TrackEntry ID
- ParseTrackEntry(pos, size1, *m_trackEntriesEnd++);
-
- pos += size1; //consume payload
- assert(pos <= stop);
- }
-}
-
-unsigned long Tracks::GetTracksCount() const
-{
- const ptrdiff_t result = m_trackEntriesEnd - m_trackEntries;
- assert(result >= 0);
-
- return static_cast<unsigned long>(result);
-}
-
-
-void Tracks::ParseTrackEntry(
- long long start,
- long long size,
- Track*& pTrack)
-{
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos = start;
- const long long stop = start + size;
-
- Track::Info i;
-
- Track::Settings videoSettings;
- videoSettings.start = -1;
-
- Track::Settings audioSettings;
- audioSettings.start = -1;
-
- while (pos < stop)
- {
-#ifdef _DEBUG
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- len;
- id;
-#endif
- if (Match(pReader, pos, 0x57, i.number))
- assert(i.number > 0);
-
- else if (Match(pReader, pos, 0x33C5, i.uid))
- ;
-
- else if (Match(pReader, pos, 0x03, i.type))
- ;
-
- else if (Match(pReader, pos, 0x136E, i.nameAsUTF8))
- assert(i.nameAsUTF8);
-
- else if (Match(pReader, pos, 0x06, i.codecId))
- ;
-
- else if (Match(pReader, pos, 0x23A2, i.codecPrivate, &i.codecPrivateSize))
- ;
-
- else if (Match(pReader, pos, 0x058688, i.codecNameAsUTF8))
- assert(i.codecNameAsUTF8);
-
- else
- {
- long len;
-
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO: handle error case
- assert((pos + len) <= stop);
-
- pos += len; //consume length of size
- const long long start = pos;
-
- pos += size; //consume payload
- assert(pos <= stop);
-
- if (id == 0x60)
- {
- videoSettings.start = start;
- videoSettings.size = size;
- }
- else if (id == 0x61)
- {
- audioSettings.start = start;
- audioSettings.size = size;
- }
- }
- }
-
- assert(pos == stop);
- //TODO: propertly vet info.number, to ensure both its existence,
- //and that it is unique among all tracks.
- assert(i.number > 0);
-
- //TODO: vet settings, to ensure that video settings (0x60)
- //were specified when type = 1, and that audio settings (0x61)
- //were specified when type = 2.
- if (i.type == 1) //video
- {
- assert(audioSettings.start < 0);
- assert(videoSettings.start >= 0);
-
- i.settings = videoSettings;
-
- VideoTrack* const t = new VideoTrack(m_pSegment, i);
- assert(t); //TODO
- pTrack = t;
- }
- else if (i.type == 2) //audio
- {
- assert(videoSettings.start < 0);
- assert(audioSettings.start >= 0);
-
- i.settings = audioSettings;
-
- AudioTrack* const t = new AudioTrack(m_pSegment, i);
- assert(t); //TODO
- pTrack = t;
- }
- else
- {
- // for now we do not support other track types yet.
- // TODO: support other track types
- i.Clear();
-
- pTrack = NULL;
- }
-
- return;
-}
-
-
-Tracks::~Tracks()
-{
- Track** i = m_trackEntries;
- Track** const j = m_trackEntriesEnd;
-
- while (i != j)
- {
- Track* pTrack = *i++;
- delete pTrack;
- pTrack = NULL;
- }
-
- delete[] m_trackEntries;
-}
-
-
-Track* Tracks::GetTrackByNumber(unsigned long tn) const
-{
- Track** i = m_trackEntries;
- Track** const j = m_trackEntriesEnd;
-
- while (i != j)
- {
- Track* const pTrack = *i++;
-
- if (pTrack == NULL)
- continue;
-
- if (tn == pTrack->GetNumber())
- return pTrack;
- }
-
- return NULL; //not found
-}
-
-
-Track* Tracks::GetTrackByIndex(unsigned long idx) const
-{
- const ptrdiff_t count = m_trackEntriesEnd - m_trackEntries;
-
- if (idx >= static_cast<unsigned long>(count))
- return NULL;
-
- return m_trackEntries[idx];
-}
-
-
-void Cluster::Load()
-{
- assert(m_pSegment);
-
- if (m_start > 0)
- {
- assert(m_size > 0);
- assert(m_timecode >= 0);
- return;
- }
-
- assert(m_size == 0);
- assert(m_timecode < 0);
-
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- const long long off = -m_start; //relative to segment
- long long pos = m_pSegment->m_start + off; //absolute
-
- long len;
-
- const long long id_ = ReadUInt(pReader, pos, len);
- assert(id_ >= 0);
- assert(id_ == 0x0F43B675); //Cluster ID
-
- pos += len; //consume id
-
- const long long size_ = ReadUInt(pReader, pos, len);
- assert(size_ >= 0);
-
- pos += len; //consume size
-
- m_start = pos;
- m_size = size_;
-
- const long long stop = m_start + size_;
-
- long long timecode = -1;
-
- while (pos < stop)
- {
- if (Match(pReader, pos, 0x67, timecode))
- break;
- else
- {
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- if (id == 0x20) //BlockGroup ID
- break;
-
- if (id == 0x23) //SimpleBlock ID
- break;
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- assert(pos <= stop);
- assert(timecode >= 0);
-
- m_timecode = timecode;
-}
-
-
-Cluster* Cluster::Parse(
- Segment* pSegment,
- size_t idx,
- long long off)
-{
- assert(pSegment);
- assert(off >= 0);
- assert(off < pSegment->m_size);
- Cluster* const pCluster = new Cluster(pSegment, idx, -off);
- assert(pCluster);
-
- return pCluster;
-}
-
-
-Cluster::Cluster() :
- m_pSegment(NULL),
- m_index(0),
- m_start(0),
- m_size(0),
- m_timecode(0),
- m_pEntries(NULL),
- m_entriesCount(0)
-{
-}
-
-Cluster::Cluster(
- Segment* pSegment,
- size_t idx,
- long long off) :
- m_pSegment(pSegment),
- m_index(idx),
- m_start(off),
- m_size(0),
- m_timecode(-1),
- m_pEntries(NULL),
- m_entriesCount(0)
-{
-}
-
-
-Cluster::~Cluster()
-{
-#if 0
- while (!m_pEntries.empty())
- {
- BlockEntry* pBlockEntry = m_pEntries.front();
- assert(pBlockEntry);
-
- m_pEntries.pop_front();
- delete pBlockEntry;
- }
-#else
- BlockEntry** i = m_pEntries;
- BlockEntry** const j = m_pEntries + m_entriesCount;
- while (i != j)
- {
- BlockEntry* p = *i++;
-
- assert(p);
- delete p;
- }
-
- delete[] m_pEntries;
-#endif
-
-}
-
-bool Cluster::EOS() const
-{
- return (m_pSegment == 0);
-}
-
-
-void Cluster::LoadBlockEntries()
-{
- if (m_pEntries)
- return;
-
- Load();
- assert(m_timecode >= 0);
- assert(m_start > 0);
- assert(m_size > 0);
-
- IMkvReader* const pReader = m_pSegment->m_pReader;
-
- long long pos = m_start;
- const long long stop = m_start + m_size;
- long long timecode = -1;
-
- long long idx = pos;
-
- m_entriesCount = 0;
-
- while (idx < stop)
- {
- if (Match(pReader, idx, 0x67, timecode))
- assert(timecode == m_timecode);
- else
- {
- long len;
-
- const long long id = ReadUInt(pReader, idx, len);
- assert(id >= 0); //TODO
- assert((idx + len) <= stop);
-
- idx += len; //consume id
-
- const long long size = ReadUInt(pReader, idx, len);
- assert(size >= 0); //TODO
- assert((idx + len) <= stop);
-
- idx += len; //consume size
-
- if (id == 0x20) //BlockGroup ID
- ++m_entriesCount;
- else if (id == 0x23) //SimpleBlock ID
- ++m_entriesCount;
-
- idx += size; //consume payload
-
- assert(idx <= stop);
- }
- }
-
- if (m_entriesCount == 0)
- return;
-
- m_pEntries = new BlockEntry*[m_entriesCount];
- size_t index = 0;
-
- while (pos < stop)
- {
- if (Match(pReader, pos, 0x67, timecode))
- assert(timecode == m_timecode);
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume id
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- if (id == 0x20) //BlockGroup ID
- ParseBlockGroup(pos, size, index++);
- else if (id == 0x23) //SimpleBlock ID
- ParseSimpleBlock(pos, size, index++);
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- assert(pos == stop);
- assert(timecode >= 0);
- assert(index == m_entriesCount);
-}
-
-
-
-long long Cluster::GetTimeCode()
-{
- Load();
- return m_timecode;
-}
-
-
-long long Cluster::GetTime()
-{
- const long long tc = GetTimeCode();
- assert(tc >= 0);
-
- const SegmentInfo* const pInfo = m_pSegment->GetInfo();
- assert(pInfo);
-
- const long long scale = pInfo->GetTimeCodeScale();
- assert(scale >= 1);
-
- const long long t = m_timecode * scale;
-
- return t;
-}
-
-
-void Cluster::ParseBlockGroup(long long start, long long size, size_t index)
-{
- assert(m_pEntries);
- assert(m_entriesCount);
- assert(index < m_entriesCount);
-
- BlockGroup* const pGroup = new BlockGroup(this, index, start, size);
- assert(pGroup); //TODO
-
- m_pEntries[index] = pGroup;
-}
-
-
-
-void Cluster::ParseSimpleBlock(long long start, long long size, size_t index)
-{
- assert(m_pEntries);
- assert(m_entriesCount);
- assert(index < m_entriesCount);
-
- SimpleBlock* const pSimpleBlock = new SimpleBlock(this, index, start, size);
- assert(pSimpleBlock); //TODO
-
- m_pEntries[index] = pSimpleBlock;
-}
-
-
-const BlockEntry* Cluster::GetFirst()
-{
- LoadBlockEntries();
-
- return m_pEntries[0];
-}
-
-
-const BlockEntry* Cluster::GetLast()
-{
- if (m_entriesCount == 0)
- return m_pEntries[0];
-
- return m_pEntries[m_entriesCount-1];
-}
-
-
-const BlockEntry* Cluster::GetNext(const BlockEntry* pEntry) const
-{
- assert(pEntry);
-
- size_t idx = pEntry->GetIndex();
-
- ++idx;
-
- if (idx == m_entriesCount)
- return NULL;
-
- return m_pEntries[idx];
-
-}
-
-
-const BlockEntry* Cluster::GetEntry(const Track* pTrack)
-{
-
- assert(pTrack);
-
- if (m_pSegment == NULL) //EOS
- return pTrack->GetEOS();
-
- LoadBlockEntries();
-
- BlockEntry* i = *m_pEntries;
- BlockEntry* j = *m_pEntries + m_entriesCount;
- while (i != j)
- {
- BlockEntry* pEntry = i;
- i++;
- assert(pEntry);
- assert(!pEntry->EOS());
-
- const Block* const pBlock = pEntry->GetBlock();
- assert(pBlock);
-
- if (pBlock->GetTrackNumber() != pTrack->GetNumber())
- continue;
-
- if (pTrack->VetEntry(pEntry))
- return pEntry;
- }
-
- return pTrack->GetEOS(); //no satisfactory block found
-}
-
-
-BlockEntry::BlockEntry()
-{
-}
-
-
-BlockEntry::~BlockEntry()
-{
-}
-
-
-
-SimpleBlock::SimpleBlock(
- Cluster* pCluster,
- size_t idx,
- long long start,
- long long size) :
- m_pCluster(pCluster),
- m_index(idx),
- m_block(start, size, pCluster->m_pSegment->m_pReader)
-{
-}
-
-
-bool SimpleBlock::EOS() const
-{
- return false;
-}
-
-
-Cluster* SimpleBlock::GetCluster() const
-{
- return m_pCluster;
-}
-
-
-size_t SimpleBlock::GetIndex() const
-{
- return m_index;
-}
-
-
-const Block* SimpleBlock::GetBlock() const
-{
- return &m_block;
-}
-
-
-bool SimpleBlock::IsBFrame() const
-{
- return false;
-}
-
-
-BlockGroup::BlockGroup(
- Cluster* pCluster,
- size_t idx,
- long long start,
- long long size_) :
- m_pCluster(pCluster),
- m_index(idx),
- m_prevTimeCode(0),
- m_nextTimeCode(0),
- m_pBlock(NULL) //TODO: accept multiple blocks within a block group
-{
- IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
-
- long long pos = start;
- const long long stop = start + size_;
-
- bool bSimpleBlock = false;
-
- while (pos < stop)
- {
- short t;
-
- if (Match(pReader, pos, 0x7B, t))
- {
- if (t < 0)
- m_prevTimeCode = t;
- else if (t > 0)
- m_nextTimeCode = t;
- else
- assert(false);
- }
- else
- {
- long len;
- const long long id = ReadUInt(pReader, pos, len);
- assert(id >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume ID
-
- const long long size = ReadUInt(pReader, pos, len);
- assert(size >= 0); //TODO
- assert((pos + len) <= stop);
-
- pos += len; //consume size
-
- switch (id)
- {
- case 0x23: //SimpleBlock ID
- bSimpleBlock = true;
- //YES, FALL THROUGH TO NEXT CASE
-
- case 0x21: //Block ID
- ParseBlock(pos, size);
- break;
-
- default:
- break;
- }
-
- pos += size; //consume payload
- assert(pos <= stop);
- }
- }
-
- assert(pos == stop);
- assert(m_pBlock);
-
- if (!bSimpleBlock)
- m_pBlock->SetKey(m_prevTimeCode >= 0);
-}
-
-
-BlockGroup::~BlockGroup()
-{
- delete m_pBlock;
-}
-
-
-void BlockGroup::ParseBlock(long long start, long long size)
-{
- IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
-
- Block* const pBlock = new Block(start, size, pReader);
- assert(pBlock); //TODO
-
- //TODO: the Matroska spec says you have multiple blocks within the
- //same block group, with blocks ranked by priority (the flag bits).
- //I haven't ever seen such a file (mkvmux certainly doesn't make
- //one), so until then I'll just assume block groups contain a single
- //block.
-#if 0
- m_blocks.push_back(pBlock);
-#else
- assert(m_pBlock == NULL);
- m_pBlock = pBlock;
-#endif
-
-#if 0
- Track* const pTrack = pBlock->GetTrack();
- assert(pTrack);
-
- pTrack->Insert(pBlock);
-#endif
-}
-
-
-bool BlockGroup::EOS() const
-{
- return false;
-}
-
-
-Cluster* BlockGroup::GetCluster() const
-{
- return m_pCluster;
-}
-
-
-size_t BlockGroup::GetIndex() const
-{
- return m_index;
-}
-
-
-const Block* BlockGroup::GetBlock() const
-{
- return m_pBlock;
-}
-
-
-short BlockGroup::GetPrevTimeCode() const
-{
- return m_prevTimeCode;
-}
-
-
-short BlockGroup::GetNextTimeCode() const
-{
- return m_nextTimeCode;
-}
-
-
-bool BlockGroup::IsBFrame() const
-{
- return (m_nextTimeCode > 0);
-}
-
-
-
-Block::Block(long long start, long long size_, IMkvReader* pReader) :
- m_start(start),
- m_size(size_)
-{
- long long pos = start;
- const long long stop = start + size_;
-
- long len;
-
- m_track = ReadUInt(pReader, pos, len);
- assert(m_track > 0);
- assert((pos + len) <= stop);
-
- pos += len; //consume track number
- assert((stop - pos) >= 2);
-
- m_timecode = Unserialize2SInt(pReader, pos);
-
- pos += 2;
- assert((stop - pos) >= 1);
-
- const long hr = pReader->Read(pos, 1, &m_flags);
- assert(hr == 0L);
-
- ++pos;
- assert(pos <= stop);
-
- m_frameOff = pos;
-
- const long long frame_size = stop - pos;
-
- assert(frame_size <= 2147483647L);
-
- m_frameSize = static_cast<long>(frame_size);
-}
-
-
-long long Block::GetTimeCode(Cluster* pCluster) const
-{
- assert(pCluster);
-
- const long long tc0 = pCluster->GetTimeCode();
- assert(tc0 >= 0);
-
- const long long tc = tc0 + static_cast<long long>(m_timecode);
- assert(tc >= 0);
-
- return tc; //unscaled timecode units
-}
-
-
-long long Block::GetTime(Cluster* pCluster) const
-{
- assert(pCluster);
-
- const long long tc = GetTimeCode(pCluster);
-
- const Segment* const pSegment = pCluster->m_pSegment;
- const SegmentInfo* const pInfo = pSegment->GetInfo();
- assert(pInfo);
-
- const long long scale = pInfo->GetTimeCodeScale();
- assert(scale >= 1);
-
- const long long ns = tc * scale;
-
- return ns;
-}
-
-
-unsigned long Block::GetTrackNumber() const
-{
- assert(m_track > 0);
-
- return static_cast<unsigned long>(m_track);
-}
-
-
-bool Block::IsKey() const
-{
- return ((m_flags & static_cast<unsigned char>(1 << 7)) != 0);
-}
-
-
-void Block::SetKey(bool bKey)
-{
- if (bKey)
- m_flags |= static_cast<unsigned char>(1 << 7);
- else
- m_flags &= 0x7F;
-}
-
-
-long Block::GetSize() const
-{
- return m_frameSize;
-}
-
-
-long Block::Read(IMkvReader* pReader, unsigned char* buf) const
-{
-
- assert(pReader);
- assert(buf);
-
- const long hr = pReader->Read(m_frameOff, m_frameSize, buf);
-
- return hr;
-}
-
-
-} //end namespace mkvparser
+// Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+
+#include "mkvparser.hpp"
+#include <cassert>
+#include <cstring>
+#include <new>
+//#include <windows.h>
+//#include "odbgstream.hpp"
+//using std::endl;
+
+mkvparser::IMkvReader::~IMkvReader()
+{
+}
+
+
+void mkvparser::GetVersion(int& major, int& minor, int& build, int& revision)
+{
+ major = 1;
+ minor = 0;
+ build = 0;
+ revision = 4;
+}
+
+
+long long mkvparser::ReadUInt(IMkvReader* pReader, long long pos, long& len)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(pos < available);
+ assert((available - pos) >= 1); //assume here max u-int len is 8
+
+ unsigned char b;
+
+ hr = pReader->Read(pos, 1, &b);
+ if (hr < 0)
+ return hr;
+
+ assert(hr == 0L);
+
+ if (b & 0x80) //1000 0000
+ {
+ len = 1;
+ b &= 0x7F; //0111 1111
+ }
+ else if (b & 0x40) //0100 0000
+ {
+ len = 2;
+ b &= 0x3F; //0011 1111
+ }
+ else if (b & 0x20) //0010 0000
+ {
+ len = 3;
+ b &= 0x1F; //0001 1111
+ }
+ else if (b & 0x10) //0001 0000
+ {
+ len = 4;
+ b &= 0x0F; //0000 1111
+ }
+ else if (b & 0x08) //0000 1000
+ {
+ len = 5;
+ b &= 0x07; //0000 0111
+ }
+ else if (b & 0x04) //0000 0100
+ {
+ len = 6;
+ b &= 0x03; //0000 0011
+ }
+ else if (b & 0x02) //0000 0010
+ {
+ len = 7;
+ b &= 0x01; //0000 0001
+ }
+ else
+ {
+ assert(b & 0x01); //0000 0001
+ len = 8;
+ b = 0; //0000 0000
+ }
+
+ assert((available - pos) >= len);
+
+ long long result = b;
+ ++pos;
+ for (long i = 1; i < len; ++i)
+ {
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ assert(hr == 0L);
+
+ result <<= 8;
+ result |= b;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+long long mkvparser::GetUIntLength(
+ IMkvReader* pReader,
+ long long pos,
+ long& len)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ if (pos >= available)
+ return pos; //too few bytes available
+
+ unsigned char b;
+
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ assert(hr == 0L);
+
+ if (b == 0) //we can't handle u-int values larger than 8 bytes
+ return E_FILE_FORMAT_INVALID;
+
+ unsigned char m = 0x80;
+ len = 1;
+
+ while (!(b & m))
+ {
+ m >>= 1;
+ ++len;
+ }
+
+ return 0; //success
+}
+
+
+long long mkvparser::SyncReadUInt(
+ IMkvReader* pReader,
+ long long pos,
+ long long stop,
+ long& len)
+{
+ assert(pReader);
+
+ if (pos >= stop)
+ return E_FILE_FORMAT_INVALID;
+
+ unsigned char b;
+
+ long hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ if (hr != 0L)
+ return E_BUFFER_NOT_FULL;
+
+ if (b == 0) //we can't handle u-int values larger than 8 bytes
+ return E_FILE_FORMAT_INVALID;
+
+ unsigned char m = 0x80;
+ len = 1;
+
+ while (!(b & m))
+ {
+ m >>= 1;
+ ++len;
+ }
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ long long result = b & (~m);
+ ++pos;
+
+ for (int i = 1; i < len; ++i)
+ {
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ if (hr != 0L)
+ return E_BUFFER_NOT_FULL;
+
+ result <<= 8;
+ result |= b;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+long long mkvparser::UnserializeUInt(
+ IMkvReader* pReader,
+ long long pos,
+ long long size)
+{
+ assert(pReader);
+ assert(pos >= 0);
+ assert(size > 0);
+ assert(size <= 8);
+
+ long long result = 0;
+
+ for (long long i = 0; i < size; ++i)
+ {
+ unsigned char b;
+
+ const long hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+ result <<= 8;
+ result |= b;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+float mkvparser::Unserialize4Float(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+ assert((pos + 4) <= available);
+
+ float result;
+
+ unsigned char* const p = (unsigned char*)&result;
+ unsigned char* q = p + 4;
+
+ for (;;)
+ {
+ hr = pReader->Read(pos, 1, --q);
+ assert(hr == 0L);
+
+ if (q == p)
+ break;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+double mkvparser::Unserialize8Double(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ double result;
+
+ unsigned char* const p = (unsigned char*)&result;
+ unsigned char* q = p + 8;
+
+ for (;;)
+ {
+ const long hr = pReader->Read(pos, 1, --q);
+ assert(hr == 0L);
+
+ if (q == p)
+ break;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+signed char mkvparser::Unserialize1SInt(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr == 0);
+ assert(available <= total);
+ assert(pos < available);
+
+ signed char result;
+
+ hr = pReader->Read(pos, 1, (unsigned char*)&result);
+ assert(hr == 0);
+
+ return result;
+}
+
+short mkvparser::Unserialize2SInt(
+ IMkvReader* pReader,
+ long long pos)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+ assert((pos + 2) <= available);
+
+ short result;
+
+ unsigned char* const p = (unsigned char*)&result;
+ unsigned char* q = p + 2;
+
+ for (;;)
+ {
+ hr = pReader->Read(pos, 1, --q);
+ assert(hr == 0L);
+
+ if (q == p)
+ break;
+
+ ++pos;
+ }
+
+ return result;
+}
+
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ long long& val)
+
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert(size <= 8);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+
+ val = UnserializeUInt(pReader, pos, size);
+ assert(val >= 0);
+
+ pos += size; //consume size of payload
+
+ return true;
+}
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ char*& val)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size_ = ReadUInt(pReader, pos, len);
+ assert(size_ >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+ assert((pos + size_) <= available);
+
+ const size_t size = static_cast<size_t>(size_);
+ val = new char[size+1];
+
+ for (size_t i = 0; i < size; ++i)
+ {
+ char c;
+
+ hr = pReader->Read(pos + i, 1, (unsigned char*)&c);
+ assert(hr == 0L);
+
+ val[i] = c;
+
+ if (c == '\0')
+ break;
+
+ }
+
+ val[size] = '\0';
+ pos += size_; //consume size of payload
+
+ return true;
+}
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ unsigned char*& buf,
+ size_t& buflen)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size_ = ReadUInt(pReader, pos, len);
+ assert(size_ >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+ assert((pos + size_) <= available);
+
+ const long buflen_ = static_cast<long>(size_);
+
+ buf = new (std::nothrow) unsigned char[buflen_];
+ assert(buf); //TODO
+
+ hr = pReader->Read(pos, buflen_, buf);
+ assert(hr == 0L);
+
+ buflen = buflen_;
+
+ pos += size_; //consume size of payload
+ return true;
+}
+
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ double& val)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+ long idlen;
+ const long long id = ReadUInt(pReader, pos, idlen);
+ assert(id >= 0); //TODO
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ long sizelen;
+ const long long size = ReadUInt(pReader, pos + idlen, sizelen);
+
+ switch (size)
+ {
+ case 4:
+ case 8:
+ break;
+ default:
+ return false;
+ }
+
+ pos += idlen + sizelen; //consume id and size fields
+ assert((pos + size) <= available);
+
+ if (size == 4)
+ val = Unserialize4Float(pReader, pos);
+ else
+ {
+ assert(size == 8);
+ val = Unserialize8Double(pReader, pos);
+ }
+
+ pos += size; //consume size of payload
+
+ return true;
+}
+
+
+bool mkvparser::Match(
+ IMkvReader* pReader,
+ long long& pos,
+ unsigned long id_,
+ short& val)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert((pos + len) <= available);
+
+ if ((unsigned long)id != id_)
+ return false;
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size <= 2);
+ assert((pos + len) <= available);
+
+ pos += len; //consume length of size of payload
+ assert((pos + size) <= available);
+
+ //TODO:
+ // Generalize this to work for any size signed int
+ if (size == 1)
+ val = Unserialize1SInt(pReader, pos);
+ else
+ val = Unserialize2SInt(pReader, pos);
+
+ pos += size; //consume size of payload
+
+ return true;
+}
+
+
+namespace mkvparser
+{
+
+EBMLHeader::EBMLHeader():
+ m_docType(NULL)
+{
+}
+
+EBMLHeader::~EBMLHeader()
+{
+ delete[] m_docType;
+}
+
+long long EBMLHeader::Parse(
+ IMkvReader* pReader,
+ long long& pos)
+{
+ assert(pReader);
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+
+ if (hr < 0)
+ return hr;
+
+ pos = 0;
+ long long end = (1024 < available)? 1024: available;
+
+ for (;;)
+ {
+ unsigned char b = 0;
+
+ while (pos < end)
+ {
+ hr = pReader->Read(pos, 1, &b);
+
+ if (hr < 0)
+ return hr;
+
+ if (b == 0x1A)
+ break;
+
+ ++pos;
+ }
+
+ if (b != 0x1A)
+ {
+ if ((pos >= 1024) ||
+ (available >= total) ||
+ ((total - available) < 5))
+ return -1;
+
+ return available + 5; //5 = 4-byte ID + 1st byte of size
+ }
+
+ if ((total - pos) < 5)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((available - pos) < 5)
+ return pos + 5; //try again later
+
+ long len;
+
+ const long long result = ReadUInt(pReader, pos, len);
+
+ if (result < 0) //error
+ return result;
+
+ if (result == 0x0A45DFA3) //ReadId masks-off length indicator bits
+ {
+ assert(len == 4);
+ pos += len;
+ break;
+ }
+
+ ++pos; //throw away just the 0x1A byte, and try again
+ }
+
+ long len;
+ long long result = GetUIntLength(pReader, pos, len);
+
+ if (result < 0) //error
+ return result;
+
+ if (result > 0) //need more data
+ return result;
+
+ assert(len > 0);
+ assert(len <= 8);
+
+ if ((total - pos) < len)
+ return E_FILE_FORMAT_INVALID;
+ if ((available - pos) < len)
+ return pos + len; //try again later
+
+ result = ReadUInt(pReader, pos, len);
+
+ if (result < 0) //error
+ return result;
+
+ pos += len; //consume u-int
+
+ if ((total - pos) < result)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((available - pos) < result)
+ return pos + result;
+
+ end = pos + result;
+
+ m_version = 1;
+ m_readVersion = 1;
+ m_maxIdLength = 4;
+ m_maxSizeLength = 8;
+ m_docTypeVersion = 1;
+ m_docTypeReadVersion = 1;
+
+ while (pos < end)
+ {
+ if (Match(pReader, pos, 0x0286, m_version))
+ ;
+ else if (Match(pReader, pos, 0x02F7, m_readVersion))
+ ;
+ else if (Match(pReader, pos, 0x02F2, m_maxIdLength))
+ ;
+ else if (Match(pReader, pos, 0x02F3, m_maxSizeLength))
+ ;
+ else if (Match(pReader, pos, 0x0282, m_docType))
+ ;
+ else if (Match(pReader, pos, 0x0287, m_docTypeVersion))
+ ;
+ else if (Match(pReader, pos, 0x0285, m_docTypeReadVersion))
+ ;
+ else
+ {
+ result = ReadUInt(pReader, pos, len);
+ assert(result > 0);
+ assert(len > 0);
+ assert(len <= 8);
+
+ pos += len;
+ assert(pos < end);
+
+ result = ReadUInt(pReader, pos, len);
+ assert(result >= 0);
+ assert(len > 0);
+ assert(len <= 8);
+
+ pos += len + result;
+ assert(pos <= end);
+ }
+ }
+
+ assert(pos == end);
+
+ return 0;
+}
+
+
+Segment::Segment(
+ IMkvReader* pReader,
+ long long start,
+ long long size) :
+ m_pReader(pReader),
+ m_start(start),
+ m_size(size),
+ m_pos(start),
+ m_pInfo(NULL),
+ m_pTracks(NULL),
+ m_pCues(NULL),
+ m_clusters(NULL),
+ m_clusterCount(0),
+ m_clusterPreloadCount(0),
+ m_clusterSize(0)
+{
+}
+
+
+Segment::~Segment()
+{
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ Cluster** i = m_clusters;
+ Cluster** j = m_clusters + count;
+
+ while (i != j)
+ {
+ Cluster* const p = *i++;
+ assert(p);
+
+ delete p;
+ }
+
+ delete[] m_clusters;
+
+ delete m_pTracks;
+ delete m_pInfo;
+ delete m_pCues;
+}
+
+
+long long Segment::CreateInstance(
+ IMkvReader* pReader,
+ long long pos,
+ Segment*& pSegment)
+{
+ assert(pReader);
+ assert(pos >= 0);
+
+ pSegment = NULL;
+
+ long long total, available;
+
+ long hr = pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ //I would assume that in practice this loop would execute
+ //exactly once, but we allow for other elements (e.g. Void)
+ //to immediately follow the EBML header. This is fine for
+ //the source filter case (since the entire file is available),
+ //but in the splitter case over a network we should probably
+ //just give up early. We could for example decide only to
+ //execute this loop a maximum of, say, 10 times.
+
+ while (pos < total)
+ {
+ //Read ID
+
+ long len;
+ long long result = GetUIntLength(pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > total)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ //TODO: if we liberalize the behavior of ReadUInt, we can
+ //probably eliminate having to use GetUIntLength here.
+ const long long id = ReadUInt(pReader, pos, len);
+
+ if (id < 0) //error
+ return id;
+
+ pos += len; //consume ID
+
+ //Read Size
+
+ result = GetUIntLength(pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > total)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ //TODO: if we liberalize the behavior of ReadUInt, we can
+ //probably eliminate having to use GetUIntLength here.
+ const long long size = ReadUInt(pReader, pos, len);
+
+ if (size < 0)
+ return size;
+
+ pos += len; //consume length of size of element
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > total)
+ return E_FILE_FORMAT_INVALID;
+
+ if (id == 0x08538067) //Segment ID
+ {
+ pSegment = new Segment(pReader, pos, size);
+ assert(pSegment); //TODO
+
+ return 0; //success
+ }
+
+ pos += size; //consume payload
+ }
+
+ assert(pos == total);
+
+ pSegment = new Segment(pReader, pos, 0);
+ assert(pSegment); //TODO
+
+ return 0; //success (sort of)
+}
+
+
+long long Segment::ParseHeaders()
+{
+ //Outermost (level 0) segment object has been constructed,
+ //and pos designates start of payload. We need to find the
+ //inner (level 1) elements.
+ long long total, available;
+
+ long hr = m_pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available <= total);
+
+ const long long stop = m_start + m_size;
+ assert(stop <= total);
+ assert(m_pos <= stop);
+
+ bool bQuit = false;
+
+ while ((m_pos < stop) && !bQuit)
+ {
+ long long pos = m_pos;
+
+ long len;
+ long long result = GetUIntLength(m_pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ const long long idpos = pos;
+ const long long id = ReadUInt(m_pReader, idpos, len);
+
+ if (id < 0) //error
+ return id;
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+
+ if (result) //error, or too few available bytes
+ return result;
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if ((pos + len) > available)
+ return pos + len;
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+
+ if (size < 0)
+ return size;
+
+ pos += len; //consume length of size of element
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ //We read EBML elements either in total or nothing at all.
+
+ if ((pos + size) > available)
+ return pos + size;
+
+ if (id == 0x0549A966) //Segment Info ID
+ {
+ assert(m_pInfo == NULL);
+
+ m_pInfo = new SegmentInfo(this, pos, size);
+ assert(m_pInfo); //TODO
+ }
+ else if (id == 0x0654AE6B) //Tracks ID
+ {
+ assert(m_pTracks == NULL);
+
+ m_pTracks = new Tracks(this, pos, size);
+ assert(m_pTracks); //TODO
+ }
+ else if (id == 0x0C53BB6B) //Cues ID
+ {
+ if (m_pCues == NULL)
+ {
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+ }
+ }
+ else if (id == 0x014D9B74) //SeekHead ID
+ {
+ ParseSeekHead(pos, size);
+ }
+ else if (id == 0x0F43B675) //Cluster ID
+ {
+ bQuit = true;
+ }
+
+ if (!bQuit)
+ m_pos = pos + size; //consume payload
+ }
+
+ assert(m_pos <= stop);
+
+ if (m_pInfo == NULL) //TODO: liberalize this behavior
+ return E_FILE_FORMAT_INVALID;
+
+ if (m_pTracks == NULL)
+ return E_FILE_FORMAT_INVALID;
+
+ return 0; //success
+}
+
+
+#if 0
+long Segment::ParseCluster(Cluster*& pCluster, long long& pos_) const
+{
+ pCluster = NULL;
+ pos_ = -1;
+
+ const long long stop = m_start + m_size;
+ assert(m_pos <= stop);
+
+ long long pos = m_pos;
+ long long off = -1;
+
+ while (pos < stop)
+ {
+ long len;
+ const long long idpos = pos;
+
+ const long long id = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ if (id == 0)
+ return E_FILE_FORMAT_INVALID;
+
+ pos += len; //consume id
+ assert(pos < stop);
+
+ const long long size = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos += len; //consume size
+ assert(pos <= stop);
+
+ if (size == 0) //weird
+ continue;
+
+ //pos now points to start of payload
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+
+ if (id == 0x0F43B675) //Cluster ID
+ {
+ off = idpos - m_start; // >= 0 means we found a cluster
+ break;
+ }
+ }
+
+ assert(pos <= stop);
+
+ //Indicate to caller how much of file has been consumed. This is
+ //used later in AddCluster to adjust the current parse position
+ //(the value cached in the segment object itself) to the
+ //file position value just past the cluster we parsed.
+
+ if (off < 0) //we did not found any more clusters
+ {
+ pos_ = stop; //pos_ >= 0 here means EOF (cluster is NULL)
+ return 0; //TODO: confirm this return value
+ }
+
+ //We found a cluster. Now read something, to ensure that it is
+ //fully loaded in the network cache.
+
+ if (pos >= stop) //we parsed the entire segment
+ {
+ //We did find a cluster, but it was very last element in the segment.
+ //Our preference is that the loop above runs 1 1/2 times:
+ //the first pass finds the cluster, and the second pass
+ //finds the element the follows the cluster. In this case, however,
+ //we reached the end of the file without finding another element,
+ //so we didn't actually read anything yet associated with "end of the
+ //cluster". And we must perform an actual read, in order
+ //to guarantee that all of the data that belongs to this
+ //cluster has been loaded into the network cache. So instead
+ //of reading the next element that follows the cluster, we
+ //read the last byte of the cluster (which is also the last
+ //byte in the file).
+
+ //Read the last byte of the file. (Reading 0 bytes at pos
+ //might work too -- it would depend on how the reader is
+ //implemented. Here we take the more conservative approach,
+ //since this makes fewer assumptions about the network
+ //reader abstraction.)
+
+ unsigned char b;
+
+ const int result = m_pReader->Read(pos - 1, 1, &b);
+ assert(result == 0);
+
+ pos_ = stop;
+ }
+ else
+ {
+ long len;
+ const long long idpos = pos;
+
+ const long long id = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ if (id == 0)
+ return E_BUFFER_NOT_FULL;
+
+ pos += len; //consume id
+ assert(pos < stop);
+
+ const long long size = SyncReadUInt(m_pReader, pos, stop, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos_ = idpos;
+ }
+
+ //We found a cluster, and it has been completely loaded into the
+ //network cache. (We can guarantee this because we actually read
+ //the EBML tag that follows the cluster, or, if we reached EOF,
+ //because we actually read the last byte of the cluster).
+
+ Segment* const this_ = const_cast<Segment*>(this);
+
+ pCluster = Cluster::Parse(this_, m_clusterCount, off);
+ assert(pCluster);
+ assert(pCluster->m_index == m_clusterCount);
+
+ return 0;
+}
+
+
+bool Segment::AddCluster(Cluster* pCluster, long long pos)
+{
+ assert(pos >= m_start);
+
+ const long long stop = m_start + m_size;
+ assert(pos <= stop);
+
+ if (pCluster)
+ {
+ AppendCluster(pCluster);
+ assert(m_clusters);
+ assert(m_clusterSize > pCluster->m_index);
+ assert(m_clusters[pCluster->m_index] == pCluster);
+ }
+
+ m_pos = pos; //m_pos >= stop is now we know we have all clusters
+
+ return (pos >= stop);
+}
+#endif
+
+
+long Segment::LoadCluster()
+{
+ const long long stop = m_start + m_size;
+
+ while (m_pos < stop)
+ {
+ long long pos = m_pos;
+
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long idpos = pos;
+ const long long id = ReadUInt(m_pReader, idpos, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos += len; //consume length of size of element
+
+ if (size == 0) //weird
+ {
+ m_pos = pos;
+ continue;
+ }
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if (id == 0x0C53BB6B) //Cues ID
+ {
+ if (m_pCues == NULL)
+ {
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+ }
+
+ m_pos = pos + size; //consume payload
+ continue;
+ }
+
+ if (id != 0x0F43B675) //Cluster ID
+ {
+ m_pos = pos + size; //consume payload
+ continue;
+ }
+
+ const long idx = m_clusterCount;
+ const long long idoff = idpos - m_start;
+
+ if (m_clusterPreloadCount > 0)
+ {
+ assert(idx < m_clusterSize);
+
+ Cluster* const pCluster = m_clusters[idx];
+ assert(pCluster);
+ assert(pCluster->m_index < 0);
+
+ const long long off_ = pCluster->m_pos;
+ assert(off_);
+
+ const long long off = off_ * ((off_ >= 0) ? 1 : -1);
+ assert(idoff <= off);
+
+ if (idoff == off) //cluster has been preloaded already
+ {
+ pCluster->m_index = idx;
+ ++m_clusterCount;
+ --m_clusterPreloadCount;
+
+ m_pos = pos + size; //consume payload
+ break;
+ }
+ }
+
+ Cluster* const pCluster = Cluster::Parse(this, idx, idoff);
+ assert(pCluster);
+ assert(pCluster->m_index == idx);
+
+ AppendCluster(pCluster);
+ assert(m_clusters);
+ assert(idx < m_clusterSize);
+ assert(m_clusters[idx] == pCluster);
+
+ m_pos = pos + size; //consume payload
+ break;
+ }
+
+ assert(m_pos <= stop);
+ return 0;
+}
+
+
+void Segment::AppendCluster(Cluster* pCluster)
+{
+ assert(pCluster);
+ assert(pCluster->m_index >= 0);
+
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ long& size = m_clusterSize;
+ assert(size >= count);
+
+ const long idx = pCluster->m_index;
+ assert(idx == m_clusterCount);
+
+ if (count >= size)
+ {
+ long n;
+
+ if (size > 0)
+ n = 2 * size;
+ else if (m_pInfo == 0)
+ n = 2048;
+ else
+ {
+ const long long ns = m_pInfo->GetDuration();
+
+ if (ns <= 0)
+ n = 2048;
+ else
+ {
+ const long long sec = (ns + 999999999LL) / 1000000000LL;
+ n = static_cast<long>(sec);
+ }
+ }
+
+ Cluster** const qq = new Cluster*[n];
+ Cluster** q = qq;
+
+ Cluster** p = m_clusters;
+ Cluster** const pp = p + count;
+
+ while (p != pp)
+ *q++ = *p++;
+
+ delete[] m_clusters;
+
+ m_clusters = qq;
+ size = n;
+ }
+
+ if (m_clusterPreloadCount > 0)
+ {
+ assert(m_clusters);
+
+ Cluster** const p = m_clusters + m_clusterCount;
+ assert(*p);
+ assert((*p)->m_index < 0);
+
+ Cluster** q = p + m_clusterPreloadCount;
+ assert(q < (m_clusters + size));
+
+ for (;;)
+ {
+ Cluster** const qq = q - 1;
+ assert((*qq)->m_index < 0);
+
+ *q = *qq;
+ q = qq;
+
+ if (q == p)
+ break;
+ }
+ }
+
+ m_clusters[idx] = pCluster;
+ ++m_clusterCount;
+}
+
+
+void Segment::PreloadCluster(Cluster* pCluster, ptrdiff_t idx)
+{
+ assert(pCluster);
+ assert(pCluster->m_index < 0);
+ assert(idx >= m_clusterCount);
+
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ long& size = m_clusterSize;
+ assert(size >= count);
+
+ if (count >= size)
+ {
+ long n;
+
+ if (size > 0)
+ n = 2 * size;
+ else if (m_pInfo == 0)
+ n = 2048;
+ else
+ {
+ const long long ns = m_pInfo->GetDuration();
+
+ if (ns <= 0)
+ n = 2048;
+ else
+ {
+ const long long sec = (ns + 999999999LL) / 1000000000LL;
+ n = static_cast<long>(sec);
+ }
+ }
+
+ Cluster** const qq = new Cluster*[n];
+ Cluster** q = qq;
+
+ Cluster** p = m_clusters;
+ Cluster** const pp = p + count;
+
+ while (p != pp)
+ *q++ = *p++;
+
+ delete[] m_clusters;
+
+ m_clusters = qq;
+ size = n;
+ }
+
+ assert(m_clusters);
+
+ Cluster** const p = m_clusters + idx;
+
+ Cluster** q = m_clusters + count;
+ assert(q >= p);
+ assert(q < (m_clusters + size));
+
+ while (q > p)
+ {
+ Cluster** const qq = q - 1;
+ assert((*qq)->m_index < 0);
+
+ *q = *qq;
+ q = qq;
+ }
+
+ m_clusters[idx] = pCluster;
+ ++m_clusterPreloadCount;
+}
+
+
+long Segment::Load()
+{
+ assert(m_clusters == NULL);
+ assert(m_clusterSize == 0);
+ assert(m_clusterCount == 0);
+
+ //Outermost (level 0) segment object has been constructed,
+ //and pos designates start of payload. We need to find the
+ //inner (level 1) elements.
+ const long long stop = m_start + m_size;
+
+#ifdef _DEBUG //TODO: this is really Microsoft-specific
+ {
+ long long total, available;
+
+ long hr = m_pReader->Length(&total, &available);
+ assert(hr >= 0);
+ assert(available >= total);
+ assert(stop <= total);
+ }
+#endif
+
+ while (m_pos < stop)
+ {
+ long long pos = m_pos;
+
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long idpos = pos;
+ const long long id = ReadUInt(m_pReader, idpos, len);
+
+ if (id < 0) //error
+ return static_cast<long>(id);
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+
+ if (result < 0) //error
+ return static_cast<long>(result);
+
+ if ((pos + len) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+
+ if (size < 0) //error
+ return static_cast<long>(size);
+
+ pos += len; //consume length of size of element
+
+ //Pos now points to start of payload
+
+ if ((pos + size) > stop)
+ return E_FILE_FORMAT_INVALID;
+
+ if (id == 0x0F43B675) //Cluster ID
+ {
+ const long idx = m_clusterCount;
+ const long long off = idpos - m_start;
+
+ Cluster* const pCluster = Cluster::Parse(this, idx, off);
+ assert(pCluster);
+ assert(pCluster->m_index == idx);
+
+ AppendCluster(pCluster);
+ assert(m_clusters);
+ assert(m_clusterSize > idx);
+ assert(m_clusters[idx] == pCluster);
+ }
+ else if (id == 0x0C53BB6B) //Cues ID
+ {
+ assert(m_pCues == NULL);
+
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+ }
+ else if (id == 0x0549A966) //SegmentInfo ID
+ {
+ assert(m_pInfo == NULL);
+
+ m_pInfo = new SegmentInfo(this, pos, size);
+ assert(m_pInfo);
+ }
+ else if (id == 0x0654AE6B) //Tracks ID
+ {
+ assert(m_pTracks == NULL);
+
+ m_pTracks = new Tracks(this, pos, size);
+ assert(m_pTracks); //TODO
+ }
+
+ m_pos = pos + size; //consume payload
+ }
+
+ assert(m_pos >= stop);
+
+ if (m_pInfo == NULL)
+ return E_FILE_FORMAT_INVALID; //TODO: ignore this case?
+
+ if (m_pTracks == NULL)
+ return E_FILE_FORMAT_INVALID;
+
+ if (m_clusters == NULL) //TODO: ignore this case?
+ return E_FILE_FORMAT_INVALID;
+
+ //TODO: decide whether we require Cues element
+ //if (m_pCues == NULL)
+ // return E_FILE_FORMAT_INVALID;
+
+ return 0;
+}
+
+
+void Segment::ParseSeekHead(long long start, long long size_)
+{
+ long long pos = start;
+ const long long stop = start + size_;
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(m_pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x0DBB) //SeekEntry ID
+ ParseSeekEntry(pos, size);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(pos == stop);
+}
+
+
+void Segment::ParseCues(long long off)
+{
+ if (m_pCues)
+ return;
+
+ //odbgstream os;
+ //os << "Segment::ParseCues (begin)" << endl;
+
+ long long pos = m_start + off;
+ const long long stop = m_start + m_size;
+
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0);
+ assert((pos + len) <= stop);
+
+ const long long idpos = pos;
+
+ const long long id = ReadUInt(m_pReader, idpos, len);
+ assert(id == 0x0C53BB6B); //Cues ID
+
+ pos += len; //consume ID
+ assert(pos < stop);
+
+ //Read Size
+
+ result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0);
+ assert((pos + len) <= stop);
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size >= 0);
+
+ pos += len; //consume length of size of element
+ assert((pos + size) <= stop);
+
+ //Pos now points to start of payload
+
+ m_pCues = new Cues(this, pos, size);
+ assert(m_pCues); //TODO
+
+ //os << "Segment::ParseCues (end)" << endl;
+}
+
+
+void Segment::ParseSeekEntry(
+ long long start,
+ long long size_)
+{
+ long long pos = start;
+
+ const long long stop = start + size_;
+
+ long len;
+
+ const long long seekIdId = ReadUInt(m_pReader, pos, len);
+ //seekIdId;
+ assert(seekIdId == 0x13AB); //SeekID ID
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long seekIdSize = ReadUInt(m_pReader, pos, len);
+ assert(seekIdSize >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ const long long seekId = ReadUInt(m_pReader, pos, len); //payload
+ assert(seekId >= 0);
+ assert(len == seekIdSize);
+ assert((pos + len) <= stop);
+
+ pos += seekIdSize; //consume payload
+
+ const long long seekPosId = ReadUInt(m_pReader, pos, len);
+ //seekPosId;
+ assert(seekPosId == 0x13AC); //SeekPos ID
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long seekPosSize = ReadUInt(m_pReader, pos, len);
+ assert(seekPosSize >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+ assert((pos + seekPosSize) <= stop);
+
+ const long long seekOff = UnserializeUInt(m_pReader, pos, seekPosSize);
+ assert(seekOff >= 0);
+ assert(seekOff < m_size);
+
+ pos += seekPosSize; //consume payload
+ assert(pos == stop);
+
+ const long long seekPos = m_start + seekOff;
+ assert(seekPos < (m_start + m_size));
+
+ if (seekId == 0x0C53BB6B) //Cues ID
+ ParseCues(seekOff);
+}
+
+
+Cues::Cues(Segment* pSegment, long long start_, long long size_) :
+ m_pSegment(pSegment),
+ m_start(start_),
+ m_size(size_),
+ m_cue_points(NULL),
+ m_count(0),
+ m_preload_count(0),
+ m_pos(start_)
+{
+}
+
+
+Cues::~Cues()
+{
+ const size_t n = m_count + m_preload_count;
+
+ CuePoint** p = m_cue_points;
+ CuePoint** const q = p + n;
+
+ while (p != q)
+ {
+ CuePoint* const pCP = *p++;
+ assert(pCP);
+
+ delete pCP;
+ }
+
+ delete[] m_cue_points;
+}
+
+
+void Cues::Init() const
+{
+ if (m_cue_points)
+ return;
+
+ assert(m_count == 0);
+ assert(m_preload_count == 0);
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ const long long stop = m_start + m_size;
+ long long pos = m_start;
+
+ size_t cue_points_size = 0;
+
+ while (pos < stop)
+ {
+ const long long idpos = pos;
+
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x3B) //CuePoint ID
+ PreloadCuePoint(cue_points_size, idpos);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+}
+
+
+void Cues::PreloadCuePoint(
+ size_t& cue_points_size,
+ long long pos) const
+{
+ assert(m_count == 0);
+
+ if (m_preload_count >= cue_points_size)
+ {
+ size_t n;
+
+ if (cue_points_size > 0)
+ n = static_cast<size_t>(2 * cue_points_size);
+ else
+ {
+ const SegmentInfo* const pInfo = m_pSegment->GetInfo();
+
+ if (pInfo == NULL)
+ n = 2048;
+ else
+ {
+ const long long ns = pInfo->GetDuration();
+
+ if (ns <= 0)
+ n = 2048;
+ else
+ {
+ const long long sec = (ns + 999999999LL) / 1000000000LL;
+ n = static_cast<size_t>(sec);
+ }
+ }
+ }
+
+ CuePoint** const qq = new CuePoint*[n];
+ CuePoint** q = qq; //beginning of target
+
+ CuePoint** p = m_cue_points; //beginning of source
+ CuePoint** const pp = p + m_preload_count; //end of source
+
+ while (p != pp)
+ *q++ = *p++;
+
+ delete[] m_cue_points;
+
+ m_cue_points = qq;
+ cue_points_size = n;
+ }
+
+ CuePoint* const pCP = new CuePoint(m_preload_count, pos);
+ m_cue_points[m_preload_count++] = pCP;
+}
+
+
+bool Cues::LoadCuePoint() const
+{
+ //odbgstream os;
+ //os << "Cues::LoadCuePoint" << endl;
+
+ const long long stop = m_start + m_size;
+
+ if (m_pos >= stop)
+ return false; //nothing else to do
+
+ Init();
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ while (m_pos < stop)
+ {
+ const long long idpos = m_pos;
+
+ long len;
+
+ const long long id = ReadUInt(pReader, m_pos, len);
+ assert(id >= 0); //TODO
+ assert((m_pos + len) <= stop);
+
+ m_pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, m_pos, len);
+ assert(size >= 0);
+ assert((m_pos + len) <= stop);
+
+ m_pos += len; //consume Size field
+ assert((m_pos + size) <= stop);
+
+ if (id != 0x3B) //CuePoint ID
+ {
+ m_pos += size; //consume payload
+ assert(m_pos <= stop);
+
+ continue;
+ }
+
+ assert(m_preload_count > 0);
+
+ CuePoint* const pCP = m_cue_points[m_count];
+ assert(pCP);
+ assert((pCP->GetTimeCode() >= 0) || (-pCP->GetTimeCode() == idpos));
+
+ pCP->Load(pReader);
+ ++m_count;
+ --m_preload_count;
+
+ m_pos += size; //consume payload
+ assert(m_pos <= stop);
+
+ break;
+ }
+
+ return (m_pos < stop);
+}
+
+
+bool Cues::Find(
+ long long time_ns,
+ const Track* pTrack,
+ const CuePoint*& pCP,
+ const CuePoint::TrackPosition*& pTP) const
+{
+ assert(time_ns >= 0);
+ assert(pTrack);
+
+ LoadCuePoint();
+
+ assert(m_cue_points);
+ assert(m_count > 0);
+
+ CuePoint** const ii = m_cue_points;
+ CuePoint** i = ii;
+
+ CuePoint** const jj = ii + m_count + m_preload_count;
+ CuePoint** j = jj;
+
+ pCP = *i;
+ assert(pCP);
+
+ if (time_ns <= pCP->GetTime(m_pSegment))
+ {
+ pTP = pCP->Find(pTrack);
+ return (pTP != NULL);
+ }
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[ii, i) <= time_ns
+ //[i, j) ?
+ //[j, jj) > time_ns
+
+ CuePoint** const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ CuePoint* const pCP = *k;
+ assert(pCP);
+
+ pCP->Load(pReader);
+
+ const long long t = pCP->GetTime(m_pSegment);
+
+ if (t <= time_ns)
+ i = k + 1;
+ else
+ j = k;
+
+ assert(i <= j);
+ }
+
+ assert(i == j);
+ assert(i <= jj);
+ assert(i > ii);
+
+ pCP = *--i;
+ assert(pCP);
+ assert(pCP->GetTime(m_pSegment) <= time_ns);
+
+ //TODO: here and elsewhere, it's probably not correct to search
+ //for the cue point with this time, and then search for a matching
+ //track. In principle, the matching track could be on some earlier
+ //cue point, and with our current algorithm, we'd miss it. To make
+ //this bullet-proof, we'd need to create a secondary structure,
+ //with a list of cue points that apply to a track, and then search
+ //that track-based structure for a matching cue point.
+
+ pTP = pCP->Find(pTrack);
+ return (pTP != NULL);
+}
+
+
+#if 0
+bool Cues::FindNext(
+ long long time_ns,
+ const Track* pTrack,
+ const CuePoint*& pCP,
+ const CuePoint::TrackPosition*& pTP) const
+{
+ pCP = 0;
+ pTP = 0;
+
+ if (m_count == 0)
+ return false;
+
+ assert(m_cue_points);
+
+ const CuePoint* const* const ii = m_cue_points;
+ const CuePoint* const* i = ii;
+
+ const CuePoint* const* const jj = ii + m_count;
+ const CuePoint* const* j = jj;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[ii, i) <= time_ns
+ //[i, j) ?
+ //[j, jj) > time_ns
+
+ const CuePoint* const* const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ pCP = *k;
+ assert(pCP);
+
+ const long long t = pCP->GetTime(m_pSegment);
+
+ if (t <= time_ns)
+ i = k + 1;
+ else
+ j = k;
+
+ assert(i <= j);
+ }
+
+ assert(i == j);
+ assert(i <= jj);
+
+ if (i >= jj) //time_ns is greater than max cue point
+ return false;
+
+ pCP = *i;
+ assert(pCP);
+ assert(pCP->GetTime(m_pSegment) > time_ns);
+
+ pTP = pCP->Find(pTrack);
+ return (pTP != NULL);
+}
+#endif
+
+
+const CuePoint* Cues::GetFirst() const
+{
+ LoadCuePoint(); //init cues
+
+ const size_t count = m_count + m_preload_count;
+
+ if (count == 0) //weird
+ return NULL;
+
+ CuePoint* const* const pp = m_cue_points;
+ assert(pp);
+
+ CuePoint* const pCP = pp[0];
+ assert(pCP);
+ assert(pCP->GetTimeCode() >= 0);
+
+ return pCP;
+}
+
+
+const CuePoint* Cues::GetLast() const
+{
+ LoadCuePoint(); //init cues
+
+ const size_t count = m_count + m_preload_count;
+
+ if (count == 0) //weird
+ return NULL;
+
+ const size_t index = count - 1;
+
+ CuePoint* const* const pp = m_cue_points;
+ assert(pp);
+
+ CuePoint* const pCP = pp[index];
+ assert(pCP);
+
+ pCP->Load(m_pSegment->m_pReader);
+ assert(pCP->GetTimeCode() >= 0);
+
+ return pCP;
+}
+
+
+const CuePoint* Cues::GetNext(const CuePoint* pCurr) const
+{
+ if (pCurr == NULL)
+ return NULL;
+
+ assert(pCurr->GetTimeCode() >= 0);
+ assert(m_cue_points);
+ assert(m_count >= 1);
+
+ const size_t count = m_count + m_preload_count;
+
+ size_t index = pCurr->m_index;
+ assert(index < count);
+
+ CuePoint* const* const pp = m_cue_points;
+ assert(pp);
+ assert(pp[index] == pCurr);
+
+ ++index;
+
+ if (index >= count)
+ return NULL;
+
+ CuePoint* const pNext = pp[index];
+ assert(pNext);
+
+ pNext->Load(m_pSegment->m_pReader);
+
+ return pNext;
+}
+
+
+const BlockEntry* Cues::GetBlock(
+ const CuePoint* pCP,
+ const CuePoint::TrackPosition* pTP) const
+{
+ if (pCP == NULL)
+ return NULL;
+
+ if (pTP == NULL)
+ return NULL;
+
+ return m_pSegment->GetBlock(*pCP, *pTP);
+}
+
+
+const BlockEntry* Segment::GetBlock(
+ const CuePoint& cp,
+ const CuePoint::TrackPosition& tp)
+{
+ Cluster** const ii = m_clusters;
+ Cluster** i = ii;
+
+ const long count = m_clusterCount + m_clusterPreloadCount;
+
+ Cluster** const jj = ii + count;
+ Cluster** j = jj;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[ii, i) < pTP->m_pos
+ //[i, j) ?
+ //[j, jj) > pTP->m_pos
+
+ Cluster** const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ Cluster* const pCluster = *k;
+ assert(pCluster);
+
+ const long long pos_ = pCluster->m_pos;
+ assert(pos_);
+
+ const long long pos = pos_ * ((pos_ < 0) ? -1 : 1);
+
+ if (pos < tp.m_pos)
+ i = k + 1;
+ else if (pos > tp.m_pos)
+ j = k;
+ else
+ return pCluster->GetEntry(cp, tp);
+ }
+
+ assert(i == j);
+
+ Cluster* const pCluster = Cluster::Parse(this, -1, tp.m_pos);
+ const ptrdiff_t idx = i - m_clusters;
+
+ PreloadCluster(pCluster, idx);
+ assert(m_clusters);
+ assert(m_clusterPreloadCount > 0);
+ assert(m_clusters[idx] == pCluster);
+
+ return pCluster->GetEntry(cp, tp);
+}
+
+
+
+CuePoint::CuePoint(size_t idx, long long pos) :
+ m_index(idx),
+ m_timecode(-1 * pos),
+ m_track_positions(NULL),
+ m_track_positions_count(0)
+{
+ assert(pos > 0);
+}
+
+
+CuePoint::~CuePoint()
+{
+ delete[] m_track_positions;
+}
+
+
+void CuePoint::Load(IMkvReader* pReader)
+{
+ //odbgstream os;
+ //os << "CuePoint::Load(begin): timecode=" << m_timecode << endl;
+
+ if (m_timecode >= 0) //already loaded
+ return;
+
+ assert(m_track_positions == NULL);
+ assert(m_track_positions_count == 0);
+
+ long long pos_ = -m_timecode;
+
+ long long stop;
+
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos_, len);
+ assert(id == 0x3B); //CuePoint ID
+ //assert((pos + len) <= stop);
+
+ pos_ += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos_, len);
+ assert(size >= 0);
+ //assert((pos + len) <= stop);
+
+ pos_ += len; //consume Size field
+ //assert((pos + size) <= stop);
+
+ //pos_ now points to start of payload
+
+ stop = pos_ + size;
+ }
+
+ long long pos = pos_;
+
+ //First count number of track positions
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x33) //CueTime ID
+ m_timecode = UnserializeUInt(pReader, pos, size);
+
+ else if (id == 0x37) //CueTrackPosition(s) ID
+ ++m_track_positions_count;
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(m_timecode >= 0);
+ assert(m_track_positions_count > 0);
+
+ //os << "CuePoint::Load(cont'd): idpos=" << idpos
+ // << " timecode=" << m_timecode
+ // << endl;
+
+ m_track_positions = new TrackPosition[m_track_positions_count];
+
+ //Now parse track positions
+
+ TrackPosition* p = m_track_positions;
+ pos = pos_;
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x37) //CueTrackPosition(s) ID
+ {
+ TrackPosition& tp = *p++;
+ tp.Parse(pReader, pos, size);
+ }
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(size_t(p - m_track_positions) == m_track_positions_count);
+}
+
+
+
+void CuePoint::TrackPosition::Parse(
+ IMkvReader* pReader,
+ long long start_,
+ long long size_)
+{
+ const long long stop = start_ + size_;
+ long long pos = start_;
+
+ m_track = -1;
+ m_pos = -1;
+ m_block = 1; //default
+
+ while (pos < stop)
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume Size field
+ assert((pos + size) <= stop);
+
+ if (id == 0x77) //CueTrack ID
+ m_track = UnserializeUInt(pReader, pos, size);
+
+ else if (id == 0x71) //CueClusterPos ID
+ m_pos = UnserializeUInt(pReader, pos, size);
+
+ else if (id == 0x1378) //CueBlockNumber
+ m_block = UnserializeUInt(pReader, pos, size);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+
+ assert(m_pos >= 0);
+ //assert(m_track > 0);
+ //assert(m_block > 0);
+}
+
+
+const CuePoint::TrackPosition* CuePoint::Find(const Track* pTrack) const
+{
+ assert(pTrack);
+
+ const long long n = pTrack->GetNumber();
+
+ const TrackPosition* i = m_track_positions;
+ const TrackPosition* const j = i + m_track_positions_count;
+
+ while (i != j)
+ {
+ const TrackPosition& p = *i++;
+
+ if (p.m_track == n)
+ return &p;
+ }
+
+ return NULL; //no matching track number found
+}
+
+
+long long CuePoint::GetTimeCode() const
+{
+ return m_timecode;
+}
+
+long long CuePoint::GetTime(Segment* pSegment) const
+{
+ assert(pSegment);
+ assert(m_timecode >= 0);
+
+ const SegmentInfo* const pInfo = pSegment->GetInfo();
+ assert(pInfo);
+
+ const long long scale = pInfo->GetTimeCodeScale();
+ assert(scale >= 1);
+
+ const long long time = scale * m_timecode;
+
+ return time;
+}
+
+
+long long Segment::Unparsed() const
+{
+ const long long stop = m_start + m_size;
+
+ const long long result = stop - m_pos;
+ assert(result >= 0);
+
+ return result;
+}
+
+
+Cluster* Segment::GetFirst()
+{
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return &m_eos;
+
+ Cluster* const pCluster = m_clusters[0];
+ assert(pCluster);
+
+ return pCluster;
+}
+
+
+Cluster* Segment::GetLast()
+{
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return &m_eos;
+
+ const long idx = m_clusterCount - 1;
+
+ Cluster* const pCluster = m_clusters[idx];
+ assert(pCluster);
+
+ return pCluster;
+}
+
+
+unsigned long Segment::GetCount() const
+{
+ return m_clusterCount;
+}
+
+
+Cluster* Segment::GetNext(const Cluster* pCurr)
+{
+ assert(pCurr);
+ assert(pCurr != &m_eos);
+ assert(m_clusters);
+
+ long idx = pCurr->m_index;
+
+ if (idx >= 0)
+ {
+ assert(m_clusterCount > 0);
+ assert(idx < m_clusterCount);
+ assert(pCurr == m_clusters[idx]);
+
+ ++idx;
+
+ if (idx >= m_clusterCount)
+ return &m_eos; //caller will LoadCluster as desired
+
+ Cluster* const pNext = m_clusters[idx];
+ assert(pNext);
+ assert(pNext->m_index >= 0);
+ assert(pNext->m_index == idx);
+
+ return pNext;
+ }
+
+ assert(m_clusterPreloadCount > 0);
+
+ const long long off_ = pCurr->m_pos;
+ const long long off = off_ * ((off_ < 0) ? -1 : 1);
+
+ long long pos = m_start + off;
+ const long long stop = m_start + m_size; //end of segment
+
+ {
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long id = ReadUInt(m_pReader, pos, len);
+ assert(id == 0x0F43B675); //Cluster ID //TODO
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size > 0); //TODO
+ assert((pCurr->m_size <= 0) || (pCurr->m_size == size));
+
+ pos += len; //consume length of size of element
+ assert((pos + size) <= stop); //TODO
+
+ //Pos now points to start of payload
+
+ pos += size; //consume payload
+ }
+
+ long long off_next = 0;
+
+ while (pos < stop)
+ {
+ long len;
+
+ long long result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long idpos = pos; //pos of next (potential) cluster
+
+ const long long id = ReadUInt(m_pReader, idpos, len);
+ assert(id > 0); //TODO
+
+ pos += len; //consume ID
+
+ //Read Size
+ result = GetUIntLength(m_pReader, pos, len);
+ assert(result == 0); //TODO
+ assert((pos + len) <= stop); //TODO
+
+ const long long size = ReadUInt(m_pReader, pos, len);
+ assert(size >= 0); //TODO
+
+ pos += len; //consume length of size of element
+ assert((pos + size) <= stop); //TODO
+
+ //Pos now points to start of payload
+
+ if (size == 0) //weird
+ continue;
+
+ if (id == 0x0F43B675) //Cluster ID
+ {
+ off_next = idpos - m_start;
+ break;
+ }
+
+ pos += size; //consume payload
+ }
+
+ if (off_next <= 0)
+ return 0;
+
+ Cluster** const ii = m_clusters + m_clusterCount;
+ Cluster** i = ii;
+
+ Cluster** const jj = ii + m_clusterPreloadCount;
+ Cluster** j = jj;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[0, i) < pos_next
+ //[i, j) ?
+ //[j, jj) > pos_next
+
+ Cluster** const k = i + (j - i) / 2;
+ assert(k < jj);
+
+ Cluster* const pNext = *k;
+ assert(pNext);
+ assert(pNext->m_index < 0);
+
+ const long long pos_ = pNext->m_pos;
+ assert(pos_);
+
+ pos = pos_ * ((pos_ < 0) ? -1 : 1);
+
+ if (pos < off_next)
+ i = k + 1;
+ else if (pos > off_next)
+ j = k;
+ else
+ return pNext;
+ }
+
+ assert(i == j);
+
+ Cluster* const pNext = Cluster::Parse(this, -1, off_next);
+ const ptrdiff_t idx_next = i - m_clusters; //insertion position
+
+ PreloadCluster(pNext, idx_next);
+ assert(m_clusters);
+ assert(idx_next < m_clusterSize);
+ assert(m_clusters[idx_next] == pNext);
+
+ return pNext;
+}
+
+
+Cluster* Segment::FindCluster(long long time_ns)
+{
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return &m_eos;
+
+ {
+ Cluster* const pCluster = m_clusters[0];
+ assert(pCluster);
+ assert(pCluster->m_index == 0);
+
+ if (time_ns <= pCluster->GetTime())
+ return pCluster;
+ }
+
+ //Binary search of cluster array
+
+ long i = 0;
+ long j = m_clusterCount;
+
+ while (i < j)
+ {
+ //INVARIANT:
+ //[0, i) <= time_ns
+ //[i, j) ?
+ //[j, m_clusterCount) > time_ns
+
+ const long k = i + (j - i) / 2;
+ assert(k < m_clusterCount);
+
+ Cluster* const pCluster = m_clusters[k];
+ assert(pCluster);
+ assert(pCluster->m_index == k);
+
+ const long long t = pCluster->GetTime();
+
+ if (t <= time_ns)
+ i = k + 1;
+ else
+ j = k;
+
+ assert(i <= j);
+ }
+
+ assert(i == j);
+ assert(i > 0);
+ assert(i <= m_clusterCount);
+
+ const long k = i - 1;
+
+ Cluster* const pCluster = m_clusters[k];
+ assert(pCluster);
+ assert(pCluster->m_index == k);
+ assert(pCluster->GetTime() <= time_ns);
+
+ return pCluster;
+}
+
+
+const BlockEntry* Segment::Seek(
+ long long time_ns,
+ const Track* pTrack)
+{
+ assert(pTrack);
+
+ if ((m_clusters == NULL) || (m_clusterCount <= 0))
+ return pTrack->GetEOS();
+
+ Cluster** const i = m_clusters;
+ assert(i);
+
+ {
+ Cluster* const pCluster = *i;
+ assert(pCluster);
+ assert(pCluster->m_index == 0); //m_clusterCount > 0
+ assert(pCluster->m_pSegment == this);
+
+ if (time_ns <= pCluster->GetTime())
+ return pCluster->GetEntry(pTrack);
+ }
+
+ Cluster** const j = i + m_clusterCount;
+
+ if (pTrack->GetType() == 2) //audio
+ {
+ //TODO: we could decide to use cues for this, as we do for video.
+ //But we only use it for video because looking around for a keyframe
+ //can get expensive. Audio doesn't require anything special so a
+ //straight cluster search is good enough (we assume).
+
+ Cluster** lo = i;
+ Cluster** hi = j;
+
+ while (lo < hi)
+ {
+ //INVARIANT:
+ //[i, lo) <= time_ns
+ //[lo, hi) ?
+ //[hi, j) > time_ns
+
+ Cluster** const mid = lo + (hi - lo) / 2;
+ assert(mid < hi);
+
+ Cluster* const pCluster = *mid;
+ assert(pCluster);
+ assert(pCluster->m_index == long(mid - m_clusters));
+ assert(pCluster->m_pSegment == this);
+
+ const long long t = pCluster->GetTime();
+
+ if (t <= time_ns)
+ lo = mid + 1;
+ else
+ hi = mid;
+
+ assert(lo <= hi);
+ }
+
+ assert(lo == hi);
+ assert(lo > i);
+ assert(lo <= j);
+
+ Cluster* const pCluster = *--lo;
+ assert(pCluster);
+ assert(pCluster->GetTime() <= time_ns);
+
+ return pCluster->GetEntry(pTrack);
+ }
+
+ assert(pTrack->GetType() == 1); //video
+
+ Cluster** lo = i;
+ Cluster** hi = j;
+
+ while (lo < hi)
+ {
+ //INVARIANT:
+ //[i, lo) <= time_ns
+ //[lo, hi) ?
+ //[hi, j) > time_ns
+
+ Cluster** const mid = lo + (hi - lo) / 2;
+ assert(mid < hi);
+
+ Cluster* const pCluster = *mid;
+ assert(pCluster);
+
+ const long long t = pCluster->GetTime();
+
+ if (t <= time_ns)
+ lo = mid + 1;
+ else
+ hi = mid;
+
+ assert(lo <= hi);
+ }
+
+ assert(lo == hi);
+ assert(lo > i);
+ assert(lo <= j);
+
+ Cluster* pCluster = *--lo;
+ assert(pCluster);
+ assert(pCluster->GetTime() <= time_ns);
+
+ {
+ const BlockEntry* const pBlockEntry = pCluster->GetEntry(pTrack);
+ assert(pBlockEntry);
+
+ if (!pBlockEntry->EOS()) //found a keyframe
+ {
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+
+ //TODO: this isn't necessarily the keyframe we want,
+ //since there might another keyframe on this same
+ //cluster with a greater timecode that but that is
+ //still less than the requested time. For now we
+ //simply return the first keyframe we find.
+
+ if (pBlock->GetTime(pCluster) <= time_ns)
+ return pBlockEntry;
+ }
+ }
+
+ const VideoTrack* const pVideo = static_cast<const VideoTrack*>(pTrack);
+
+ while (lo != i)
+ {
+ pCluster = *--lo;
+ assert(pCluster);
+ assert(pCluster->GetTime() <= time_ns);
+
+ const BlockEntry* const pBlockEntry = pCluster->GetMaxKey(pVideo);
+ assert(pBlockEntry);
+
+ if (!pBlockEntry->EOS())
+ return pBlockEntry;
+ }
+
+ //weird: we're on the first cluster, but no keyframe found
+ //should never happen but we must return something anyway
+
+ return pTrack->GetEOS();
+}
+
+
+#if 0
+bool Segment::SearchCues(
+ long long time_ns,
+ Track* pTrack,
+ Cluster*& pCluster,
+ const BlockEntry*& pBlockEntry,
+ const CuePoint*& pCP,
+ const CuePoint::TrackPosition*& pTP)
+{
+ if (pTrack->GetType() != 1) //not video
+ return false; //TODO: for now, just handle video stream
+
+ if (m_pCues == NULL)
+ return false;
+
+ if (!m_pCues->Find(time_ns, pTrack, pCP, pTP))
+ return false; //weird
+
+ assert(pCP);
+ assert(pTP);
+ assert(pTP->m_track == pTrack->GetNumber());
+
+ //We have the cue point and track position we want,
+ //so we now need to search for the cluster having
+ //the indicated position.
+
+ return GetCluster(pCP, pTP, pCluster, pBlockEntry);
+}
+#endif
+
+
+Tracks* Segment::GetTracks() const
+{
+ return m_pTracks;
+}
+
+
+const SegmentInfo* Segment::GetInfo() const
+{
+ return m_pInfo;
+}
+
+
+const Cues* Segment::GetCues() const
+{
+ return m_pCues;
+}
+
+
+long long Segment::GetDuration() const
+{
+ assert(m_pInfo);
+ return m_pInfo->GetDuration();
+}
+
+
+SegmentInfo::SegmentInfo(Segment* pSegment, long long start, long long size_) :
+ m_pSegment(pSegment),
+ m_start(start),
+ m_size(size_),
+ m_pMuxingAppAsUTF8(NULL),
+ m_pWritingAppAsUTF8(NULL),
+ m_pTitleAsUTF8(NULL)
+{
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ long long pos = start;
+ const long long stop = start + size_;
+
+ m_timecodeScale = 1000000;
+ m_duration = -1;
+
+ while (pos < stop)
+ {
+ if (Match(pReader, pos, 0x0AD7B1, m_timecodeScale))
+ assert(m_timecodeScale > 0);
+
+ else if (Match(pReader, pos, 0x0489, m_duration))
+ assert(m_duration >= 0);
+
+ else if (Match(pReader, pos, 0x0D80, m_pMuxingAppAsUTF8)) //[4D][80]
+ assert(m_pMuxingAppAsUTF8);
+
+ else if (Match(pReader, pos, 0x1741, m_pWritingAppAsUTF8)) //[57][41]
+ assert(m_pWritingAppAsUTF8);
+
+ else if (Match(pReader, pos, 0x3BA9, m_pTitleAsUTF8)) //[7B][A9]
+ assert(m_pTitleAsUTF8);
+
+ else
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ //id;
+ assert(id >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+ assert((stop - pos) > 0);
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len + size; //consume size and payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos == stop);
+}
+
+SegmentInfo::~SegmentInfo()
+{
+ if (m_pMuxingAppAsUTF8)
+ {
+ delete[] m_pMuxingAppAsUTF8;
+ m_pMuxingAppAsUTF8 = NULL;
+ }
+
+ if (m_pWritingAppAsUTF8)
+ {
+ delete[] m_pWritingAppAsUTF8;
+ m_pWritingAppAsUTF8 = NULL;
+ }
+
+ if (m_pTitleAsUTF8)
+ {
+ delete[] m_pTitleAsUTF8;
+ m_pTitleAsUTF8 = NULL;
+ }
+}
+
+long long SegmentInfo::GetTimeCodeScale() const
+{
+ return m_timecodeScale;
+}
+
+
+long long SegmentInfo::GetDuration() const
+{
+ if (m_duration < 0)
+ return -1;
+
+ assert(m_timecodeScale >= 1);
+
+ const double dd = double(m_duration) * double(m_timecodeScale);
+ const long long d = static_cast<long long>(dd);
+
+ return d;
+}
+
+const char* SegmentInfo::GetMuxingAppAsUTF8() const
+{
+ return m_pMuxingAppAsUTF8;
+}
+
+
+const char* SegmentInfo::GetWritingAppAsUTF8() const
+{
+ return m_pWritingAppAsUTF8;
+}
+
+const char* SegmentInfo::GetTitleAsUTF8() const
+{
+ return m_pTitleAsUTF8;
+}
+
+Track::Track(Segment* pSegment, const Info& i) :
+ m_pSegment(pSegment),
+ m_info(i)
+{
+}
+
+Track::~Track()
+{
+ Info& info = const_cast<Info&>(m_info);
+ info.Clear();
+}
+
+Track::Info::Info():
+ type(-1),
+ number(-1),
+ uid(-1),
+ nameAsUTF8(NULL),
+ codecId(NULL),
+ codecPrivate(NULL),
+ codecPrivateSize(0),
+ codecNameAsUTF8(NULL)
+{
+}
+
+
+void Track::Info::Clear()
+{
+ delete[] nameAsUTF8;
+ nameAsUTF8 = NULL;
+
+ delete[] codecId;
+ codecId = NULL;
+
+ delete[] codecPrivate;
+ codecPrivate = NULL;
+
+ codecPrivateSize = 0;
+
+ delete[] codecNameAsUTF8;
+ codecNameAsUTF8 = NULL;
+}
+
+const BlockEntry* Track::GetEOS() const
+{
+ return &m_eos;
+}
+
+long long Track::GetType() const
+{
+ return m_info.type;
+}
+
+long long Track::GetNumber() const
+{
+ return m_info.number;
+}
+
+const char* Track::GetNameAsUTF8() const
+{
+ return m_info.nameAsUTF8;
+}
+
+const char* Track::GetCodecNameAsUTF8() const
+{
+ return m_info.codecNameAsUTF8;
+}
+
+
+const char* Track::GetCodecId() const
+{
+ return m_info.codecId;
+}
+
+const unsigned char* Track::GetCodecPrivate(size_t& size) const
+{
+ size = m_info.codecPrivateSize;
+ return m_info.codecPrivate;
+}
+
+
+long Track::GetFirst(const BlockEntry*& pBlockEntry) const
+{
+ Cluster* pCluster = m_pSegment->GetFirst();
+
+ //If Segment::GetFirst returns NULL, then this must be a network
+ //download, and we haven't loaded any clusters yet. In this case,
+ //returning NULL from Track::GetFirst means the same thing.
+
+ for (int i = 0; i < 100; ++i) //arbitrary upper bound
+ {
+ if (pCluster == NULL)
+ {
+ pBlockEntry = GetEOS();
+ return 1;
+ }
+
+ if (pCluster->EOS())
+ {
+ if (m_pSegment->Unparsed() <= 0) //all clusters have been loaded
+ {
+ pBlockEntry = GetEOS();
+ return 1;
+ }
+
+ pBlockEntry = 0;
+ return E_BUFFER_NOT_FULL;
+ }
+
+ pBlockEntry = pCluster->GetFirst();
+
+ while (pBlockEntry)
+ {
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() == m_info.number)
+ return 0;
+
+ pBlockEntry = pCluster->GetNext(pBlockEntry);
+ }
+
+ pCluster = m_pSegment->GetNext(pCluster);
+ }
+
+ //NOTE: if we get here, it means that we didn't find a block with
+ //a matching track number. We interpret that as an error (which
+ //might be too conservative).
+
+ pBlockEntry = GetEOS(); //so we can return a non-NULL value
+ return 1;
+}
+
+
+long Track::GetNext(
+ const BlockEntry* pCurrEntry,
+ const BlockEntry*& pNextEntry) const
+{
+ assert(pCurrEntry);
+ assert(!pCurrEntry->EOS()); //?
+
+ const Block* const pCurrBlock = pCurrEntry->GetBlock();
+ assert(pCurrBlock->GetTrackNumber() == m_info.number);
+
+ Cluster* pCluster = pCurrEntry->GetCluster();
+ assert(pCluster);
+ assert(!pCluster->EOS());
+
+ pNextEntry = pCluster->GetNext(pCurrEntry);
+
+ for (int i = 0; i < 100; ++i) //arbitrary upper bound to search
+ {
+ while (pNextEntry)
+ {
+ const Block* const pNextBlock = pNextEntry->GetBlock();
+ assert(pNextBlock);
+
+ if (pNextBlock->GetTrackNumber() == m_info.number)
+ return 0;
+
+ pNextEntry = pCluster->GetNext(pNextEntry);
+ }
+
+ pCluster = m_pSegment->GetNext(pCluster);
+
+ if (pCluster == NULL)
+ {
+ pNextEntry = GetEOS();
+ return 1;
+ }
+
+ if (pCluster->EOS())
+ {
+ if (m_pSegment->Unparsed() <= 0) //all clusters have been loaded
+ {
+ pNextEntry = GetEOS();
+ return 1;
+ }
+
+ //TODO: there is a potential O(n^2) problem here: we tell the
+ //caller to (pre)load another cluster, which he does, but then he
+ //calls GetNext again, which repeats the same search. This is
+ //a pathological case, since the only way it can happen is if
+ //there exists a long sequence of clusters none of which contain a
+ // block from this track. One way around this problem is for the
+ //caller to be smarter when he loads another cluster: don't call
+ //us back until you have a cluster that contains a block from this
+ //track. (Of course, that's not cheap either, since our caller
+ //would have to scan the each cluster as it's loaded, so that
+ //would just push back the problem.)
+
+ pNextEntry = NULL;
+ return E_BUFFER_NOT_FULL;
+ }
+
+ pNextEntry = pCluster->GetFirst();
+ }
+
+ //NOTE: if we get here, it means that we didn't find a block with
+ //a matching track number after lots of searching, so we give
+ //up trying.
+
+ pNextEntry = GetEOS(); //so we can return a non-NULL value
+ return 1;
+}
+
+
+Track::EOSBlock::EOSBlock()
+{
+}
+
+
+bool Track::EOSBlock::EOS() const
+{
+ return true;
+}
+
+
+Cluster* Track::EOSBlock::GetCluster() const
+{
+ return NULL;
+}
+
+
+size_t Track::EOSBlock::GetIndex() const
+{
+ return 0;
+}
+
+
+const Block* Track::EOSBlock::GetBlock() const
+{
+ return NULL;
+}
+
+
+bool Track::EOSBlock::IsBFrame() const
+{
+ return false;
+}
+
+
+VideoTrack::VideoTrack(Segment* pSegment, const Info& i) :
+ Track(pSegment, i),
+ m_width(-1),
+ m_height(-1),
+ m_rate(-1)
+{
+ assert(i.type == 1);
+ assert(i.number > 0);
+
+ IMkvReader* const pReader = pSegment->m_pReader;
+
+ const Settings& s = i.settings;
+ assert(s.start >= 0);
+ assert(s.size >= 0);
+
+ long long pos = s.start;
+ assert(pos >= 0);
+
+ const long long stop = pos + s.size;
+
+ while (pos < stop)
+ {
+#ifdef _DEBUG
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+#endif
+ if (Match(pReader, pos, 0x30, m_width))
+ ;
+ else if (Match(pReader, pos, 0x3A, m_height))
+ ;
+ else if (Match(pReader, pos, 0x0383E3, m_rate))
+ ;
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+ assert((pos + size) <= stop);
+
+ //pos now designates start of payload
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ return;
+}
+
+
+bool VideoTrack::VetEntry(const BlockEntry* pBlockEntry) const
+{
+ assert(pBlockEntry);
+
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+ assert(pBlock->GetTrackNumber() == m_info.number);
+
+ return pBlock->IsKey();
+}
+
+
+long long VideoTrack::GetWidth() const
+{
+ return m_width;
+}
+
+
+long long VideoTrack::GetHeight() const
+{
+ return m_height;
+}
+
+
+double VideoTrack::GetFrameRate() const
+{
+ return m_rate;
+}
+
+
+AudioTrack::AudioTrack(Segment* pSegment, const Info& i) :
+ Track(pSegment, i),
+ m_rate(0.0),
+ m_channels(0),
+ m_bitDepth(-1)
+{
+ assert(i.type == 2);
+ assert(i.number > 0);
+
+ IMkvReader* const pReader = pSegment->m_pReader;
+
+ const Settings& s = i.settings;
+ assert(s.start >= 0);
+ assert(s.size >= 0);
+
+ long long pos = s.start;
+ assert(pos >= 0);
+
+ const long long stop = pos + s.size;
+
+ while (pos < stop)
+ {
+#ifdef _DEBUG
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+#endif
+ if (Match(pReader, pos, 0x35, m_rate))
+ ;
+ else if (Match(pReader, pos, 0x1F, m_channels))
+ ;
+ else if (Match(pReader, pos, 0x2264, m_bitDepth))
+ ;
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+ assert((pos + size) <= stop);
+
+ //pos now designates start of payload
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ return;
+}
+
+
+bool AudioTrack::VetEntry(const BlockEntry* pBlockEntry) const
+{
+ assert(pBlockEntry);
+
+ const Block* const pBlock = pBlockEntry->GetBlock();
+ assert(pBlock);
+ assert(pBlock->GetTrackNumber() == m_info.number);
+
+ return true;
+}
+
+
+double AudioTrack::GetSamplingRate() const
+{
+ return m_rate;
+}
+
+
+long long AudioTrack::GetChannels() const
+{
+ return m_channels;
+}
+
+long long AudioTrack::GetBitDepth() const
+{
+ return m_bitDepth;
+}
+
+Tracks::Tracks(Segment* pSegment, long long start, long long size_) :
+ m_pSegment(pSegment),
+ m_start(start),
+ m_size(size_),
+ m_trackEntries(NULL),
+ m_trackEntriesEnd(NULL)
+{
+ long long stop = m_start + m_size;
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ long long pos1 = m_start;
+ int count = 0;
+
+ while (pos1 < stop)
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos1, len);
+ assert(id >= 0);
+ assert((pos1 + len) <= stop);
+
+ pos1 += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos1, len);
+ assert(size >= 0);
+ assert((pos1 + len) <= stop);
+
+ pos1 += len; //consume length of size
+
+ //pos now desinates start of element
+ if (id == 0x2E) //TrackEntry ID
+ ++count;
+
+ pos1 += size; //consume payload
+ assert(pos1 <= stop);
+ }
+
+ if (count <= 0)
+ return;
+
+ m_trackEntries = new Track*[count];
+ m_trackEntriesEnd = m_trackEntries;
+
+ long long pos = m_start;
+
+ while (pos < stop)
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size1 = ReadUInt(pReader, pos, len);
+ assert(size1 >= 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+
+ //pos now desinates start of element
+
+ if (id == 0x2E) //TrackEntry ID
+ ParseTrackEntry(pos, size1, *m_trackEntriesEnd++);
+
+ pos += size1; //consume payload
+ assert(pos <= stop);
+ }
+}
+
+
+unsigned long Tracks::GetTracksCount() const
+{
+ const ptrdiff_t result = m_trackEntriesEnd - m_trackEntries;
+ assert(result >= 0);
+
+ return static_cast<unsigned long>(result);
+}
+
+
+void Tracks::ParseTrackEntry(
+ long long start,
+ long long size,
+ Track*& pTrack)
+{
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ long long pos = start;
+ const long long stop = start + size;
+
+ Track::Info i;
+
+ Track::Settings videoSettings;
+ videoSettings.start = -1;
+
+ Track::Settings audioSettings;
+ audioSettings.start = -1;
+
+ while (pos < stop)
+ {
+#ifdef _DEBUG
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ len;
+ id;
+#endif
+ if (Match(pReader, pos, 0x57, i.number))
+ assert(i.number > 0);
+ else if (Match(pReader, pos, 0x33C5, i.uid))
+ ;
+ else if (Match(pReader, pos, 0x03, i.type))
+ ;
+ else if (Match(pReader, pos, 0x136E, i.nameAsUTF8))
+ assert(i.nameAsUTF8);
+ else if (Match(pReader, pos, 0x06, i.codecId))
+ ;
+ else if (Match(pReader,
+ pos,
+ 0x23A2,
+ i.codecPrivate,
+ i.codecPrivateSize))
+ ;
+ else if (Match(pReader, pos, 0x058688, i.codecNameAsUTF8))
+ assert(i.codecNameAsUTF8);
+ else
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO: handle error case
+ assert((pos + len) <= stop);
+
+ pos += len; //consume length of size
+ const long long start = pos;
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+
+ if (id == 0x60)
+ {
+ videoSettings.start = start;
+ videoSettings.size = size;
+ }
+ else if (id == 0x61)
+ {
+ audioSettings.start = start;
+ audioSettings.size = size;
+ }
+ }
+ }
+
+ assert(pos == stop);
+ //TODO: propertly vet info.number, to ensure both its existence,
+ //and that it is unique among all tracks.
+ assert(i.number > 0);
+
+ //TODO: vet settings, to ensure that video settings (0x60)
+ //were specified when type = 1, and that audio settings (0x61)
+ //were specified when type = 2.
+ if (i.type == 1) //video
+ {
+ assert(audioSettings.start < 0);
+ assert(videoSettings.start >= 0);
+
+ i.settings = videoSettings;
+
+ VideoTrack* const t = new VideoTrack(m_pSegment, i);
+ assert(t); //TODO
+ pTrack = t;
+ }
+ else if (i.type == 2) //audio
+ {
+ assert(videoSettings.start < 0);
+ assert(audioSettings.start >= 0);
+
+ i.settings = audioSettings;
+
+ AudioTrack* const t = new AudioTrack(m_pSegment, i);
+ assert(t); //TODO
+ pTrack = t;
+ }
+ else
+ {
+ // for now we do not support other track types yet.
+ // TODO: support other track types
+ i.Clear();
+
+ pTrack = NULL;
+ }
+
+ return;
+}
+
+
+Tracks::~Tracks()
+{
+ Track** i = m_trackEntries;
+ Track** const j = m_trackEntriesEnd;
+
+ while (i != j)
+ {
+ Track* const pTrack = *i++;
+ delete pTrack;
+ }
+
+ delete[] m_trackEntries;
+}
+
+
+Track* Tracks::GetTrackByNumber(unsigned long tn_) const
+{
+ const long long tn = tn_;
+
+ Track** i = m_trackEntries;
+ Track** const j = m_trackEntriesEnd;
+
+ while (i != j)
+ {
+ Track* const pTrack = *i++;
+
+ if (pTrack == NULL)
+ continue;
+
+ if (tn == pTrack->GetNumber())
+ return pTrack;
+ }
+
+ return NULL; //not found
+}
+
+
+Track* Tracks::GetTrackByIndex(unsigned long idx) const
+{
+ const ptrdiff_t count = m_trackEntriesEnd - m_trackEntries;
+
+ if (idx >= static_cast<unsigned long>(count))
+ return NULL;
+
+ return m_trackEntries[idx];
+}
+
+
+void Cluster::Load()
+{
+ assert(m_pSegment);
+ assert(m_pos);
+ assert(m_size);
+
+ if (m_pos > 0) //loaded
+ {
+ assert(m_size > 0);
+ assert(m_timecode >= 0);
+ return;
+ }
+
+ assert(m_pos < 0); //not loaded yet
+ assert(m_size < 0);
+ assert(m_timecode < 0);
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ m_pos *= -1; //relative to segment
+ long long pos = m_pSegment->m_start + m_pos; //absolute
+
+ long len;
+
+ const long long id_ = ReadUInt(pReader, pos, len);
+ assert(id_ >= 0);
+ assert(id_ == 0x0F43B675); //Cluster ID
+
+ pos += len; //consume id
+
+ const long long size_ = ReadUInt(pReader, pos, len);
+ assert(size_ >= 0);
+
+ pos += len; //consume size
+
+ m_size = size_;
+ const long long stop = pos + size_;
+
+ long long timecode = -1;
+
+ while (pos < stop)
+ {
+ if (Match(pReader, pos, 0x67, timecode))
+ break;
+ else
+ {
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ if (id == 0x20) //BlockGroup ID
+ break;
+
+ if (id == 0x23) //SimpleBlock ID
+ break;
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos <= stop);
+ assert(timecode >= 0);
+
+ m_timecode = timecode;
+}
+
+
+Cluster* Cluster::Parse(
+ Segment* pSegment,
+ long idx,
+ long long off)
+{
+ assert(pSegment);
+ assert(off >= 0);
+ assert(off < pSegment->m_size);
+
+ Cluster* const pCluster = new Cluster(pSegment, idx, -off);
+ assert(pCluster);
+
+ return pCluster;
+}
+
+
+Cluster::Cluster() :
+ m_pSegment(NULL),
+ m_index(0),
+ m_pos(0),
+ m_size(0),
+ m_timecode(0),
+ m_entries(NULL),
+ m_entriesCount(0)
+{
+}
+
+
+Cluster::Cluster(
+ Segment* pSegment,
+ long idx,
+ long long off) :
+ m_pSegment(pSegment),
+ m_index(idx),
+ m_pos(off),
+ m_size(-1),
+ m_timecode(-1),
+ m_entries(NULL),
+ m_entriesCount(0)
+{
+}
+
+
+Cluster::~Cluster()
+{
+ BlockEntry** i = m_entries;
+ BlockEntry** const j = m_entries + m_entriesCount;
+
+ while (i != j)
+ {
+ BlockEntry* p = *i++;
+ assert(p);
+
+ delete p;
+ }
+
+ delete[] m_entries;
+}
+
+
+bool Cluster::EOS() const
+{
+ return (m_pSegment == NULL);
+}
+
+
+void Cluster::LoadBlockEntries()
+{
+ if (m_entries)
+ return;
+
+ assert(m_pSegment);
+ assert(m_pos);
+ assert(m_size);
+ assert(m_entriesCount == 0);
+
+ IMkvReader* const pReader = m_pSegment->m_pReader;
+
+ if (m_pos < 0)
+ m_pos *= -1; //relative to segment
+
+ long long pos = m_pSegment->m_start + m_pos; //absolute
+
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, pos, len);
+ id;
+ assert(id >= 0);
+ assert(id == 0x0F43B675); //Cluster ID
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size > 0);
+
+ pos += len; //consume size
+
+ //pos now points to start of payload
+
+ if (m_size >= 0)
+ assert(size == m_size);
+ else
+ m_size = size;
+ }
+
+ const long long stop = pos + m_size;
+ long long timecode = -1; //of cluster itself
+
+ //First count the number of entries
+
+ long long idx = pos; //points to start of payload
+ m_entriesCount = 0;
+
+ while (idx < stop)
+ {
+ if (Match(pReader, idx, 0x67, timecode))
+ {
+ if (m_timecode >= 0)
+ assert(timecode == m_timecode);
+ else
+ m_timecode = timecode;
+ }
+ else
+ {
+ long len;
+
+ const long long id = ReadUInt(pReader, idx, len);
+ assert(id >= 0); //TODO
+ assert((idx + len) <= stop);
+
+ idx += len; //consume id
+
+ const long long size = ReadUInt(pReader, idx, len);
+ assert(size >= 0); //TODO
+ assert((idx + len) <= stop);
+
+ idx += len; //consume size
+
+ if (id == 0x20) //BlockGroup ID
+ ++m_entriesCount;
+ else if (id == 0x23) //SimpleBlock ID
+ ++m_entriesCount;
+
+ idx += size; //consume payload
+ assert(idx <= stop);
+ }
+ }
+
+ assert(idx == stop);
+ assert(m_timecode >= 0);
+
+ if (m_entriesCount == 0) //TODO: handle empty clusters
+ return;
+
+ m_entries = new BlockEntry*[m_entriesCount];
+ size_t index = 0;
+
+ while (pos < stop)
+ {
+ if (Match(pReader, pos, 0x67, timecode))
+ assert(timecode == m_timecode);
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume id
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ if (id == 0x20) //BlockGroup ID
+ ParseBlockGroup(pos, size, index++);
+ else if (id == 0x23) //SimpleBlock ID
+ ParseSimpleBlock(pos, size, index++);
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos == stop);
+ assert(timecode >= 0);
+ assert(index == m_entriesCount);
+}
+
+
+
+long long Cluster::GetTimeCode()
+{
+ Load();
+ return m_timecode;
+}
+
+
+long long Cluster::GetTime()
+{
+ const long long tc = GetTimeCode();
+ assert(tc >= 0);
+
+ const SegmentInfo* const pInfo = m_pSegment->GetInfo();
+ assert(pInfo);
+
+ const long long scale = pInfo->GetTimeCodeScale();
+ assert(scale >= 1);
+
+ const long long t = m_timecode * scale;
+
+ return t;
+}
+
+
+long long Cluster::GetFirstTime()
+{
+ const BlockEntry* const pEntry = GetFirst();
+
+ if (pEntry == NULL) //empty cluster
+ return GetTime();
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ return pBlock->GetTime(this);
+}
+
+
+long long Cluster::GetLastTime()
+{
+ const BlockEntry* const pEntry = GetLast();
+
+ if (pEntry == NULL) //empty cluster
+ return GetTime();
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ return pBlock->GetTime(this);
+}
+
+
+void Cluster::ParseBlockGroup(long long start, long long size, size_t index)
+{
+ assert(m_entries);
+ assert(m_entriesCount);
+ assert(index < m_entriesCount);
+
+ BlockGroup* const pGroup =
+ new (std::nothrow) BlockGroup(this, index, start, size);
+ assert(pGroup); //TODO
+
+ m_entries[index] = pGroup;
+}
+
+
+
+void Cluster::ParseSimpleBlock(long long start, long long size, size_t index)
+{
+ assert(m_entries);
+ assert(m_entriesCount);
+ assert(index < m_entriesCount);
+
+ SimpleBlock* const pSimpleBlock =
+ new (std::nothrow) SimpleBlock(this, index, start, size);
+ assert(pSimpleBlock); //TODO
+
+ m_entries[index] = pSimpleBlock;
+}
+
+
+const BlockEntry* Cluster::GetFirst()
+{
+ LoadBlockEntries();
+ //assert(m_entries);
+ //assert(m_entriesCount >= 1);
+
+ if ((m_entries == NULL) || (m_entriesCount == 0))
+ return NULL;
+
+ const BlockEntry* const pFirst = m_entries[0];
+ assert(pFirst);
+
+ return pFirst;
+}
+
+
+const BlockEntry* Cluster::GetLast()
+{
+ LoadBlockEntries();
+ //assert(m_entries);
+ //assert(m_entriesCount >= 1);
+
+ if ((m_entries == NULL) || (m_entriesCount == 0))
+ return NULL;
+
+ const size_t idx = m_entriesCount - 1;
+
+ const BlockEntry* const pLast = m_entries[idx];
+ assert(pLast);
+
+ return pLast;
+}
+
+
+const BlockEntry* Cluster::GetNext(const BlockEntry* pEntry) const
+{
+ assert(pEntry);
+ assert(m_entries);
+ assert(m_entriesCount);
+
+ size_t idx = pEntry->GetIndex();
+ assert(idx < m_entriesCount);
+ assert(m_entries[idx] == pEntry);
+
+ ++idx;
+
+ if (idx >= m_entriesCount)
+ return NULL;
+
+ return m_entries[idx];
+}
+
+
+const BlockEntry* Cluster::GetEntry(const Track* pTrack)
+{
+ assert(pTrack);
+
+ if (m_pSegment == NULL) //EOS
+ return pTrack->GetEOS();
+
+ LoadBlockEntries();
+
+ if ((m_entries == NULL) || (m_entriesCount == 0))
+ return NULL;
+
+ BlockEntry** i = m_entries;
+ assert(i);
+
+ BlockEntry** const j = i + m_entriesCount;
+
+ while (i != j)
+ {
+ const BlockEntry* const pEntry = *i++;
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() != pTrack->GetNumber())
+ continue;
+
+ if (pTrack->VetEntry(pEntry))
+ return pEntry;
+ }
+
+ return pTrack->GetEOS(); //no satisfactory block found
+}
+
+
+const BlockEntry*
+Cluster::GetEntry(
+ const CuePoint& cp,
+ const CuePoint::TrackPosition& tp)
+{
+ assert(m_pSegment);
+
+ LoadBlockEntries();
+
+ if (m_entries == NULL)
+ return NULL;
+
+ const long long count = m_entriesCount;
+
+ if (count <= 0)
+ return NULL;
+
+ const long long tc = cp.GetTimeCode();
+
+ if ((tp.m_block > 0) && (tp.m_block <= count))
+ {
+ const size_t block = static_cast<size_t>(tp.m_block);
+ const size_t index = block - 1;
+
+ const BlockEntry* const pEntry = m_entries[index];
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if ((pBlock->GetTrackNumber() == tp.m_track) &&
+ (pBlock->GetTimeCode(this) == tc))
+ {
+ return pEntry;
+ }
+ }
+
+ const BlockEntry* const* i = m_entries;
+ const BlockEntry* const* const j = i + count;
+
+ while (i != j)
+ {
+ const BlockEntry* const pEntry = *i++;
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() != tp.m_track)
+ continue;
+
+ const long long tc_ = pBlock->GetTimeCode(this);
+
+ if (tc_ < tc)
+ continue;
+
+ if (tc_ > tc)
+ return NULL;
+
+ const Tracks* const pTracks = m_pSegment->GetTracks();
+ assert(pTracks);
+
+ const long tn = static_cast<long>(tp.m_track);
+ const Track* const pTrack = pTracks->GetTrackByNumber(tn);
+
+ if (pTrack == NULL)
+ return NULL;
+
+ const long long type = pTrack->GetType();
+
+ if (type == 2) //audio
+ return pEntry;
+
+ if (type != 1) //not video
+ return NULL;
+
+ if (!pBlock->IsKey())
+ return NULL;
+
+ return pEntry;
+ }
+
+ return NULL;
+}
+
+
+const BlockEntry* Cluster::GetMaxKey(const VideoTrack* pTrack)
+{
+ assert(pTrack);
+
+ if (m_pSegment == NULL) //EOS
+ return pTrack->GetEOS();
+
+ LoadBlockEntries();
+ //assert(m_entries);
+
+ BlockEntry** i = m_entries + m_entriesCount;
+ BlockEntry** const j = m_entries;
+
+ while (i != j)
+ {
+ const BlockEntry* const pEntry = *--i;
+ assert(pEntry);
+ assert(!pEntry->EOS());
+
+ const Block* const pBlock = pEntry->GetBlock();
+ assert(pBlock);
+
+ if (pBlock->GetTrackNumber() != pTrack->GetNumber())
+ continue;
+
+ if (pBlock->IsKey())
+ return pEntry;
+ }
+
+ return pTrack->GetEOS(); //no satisfactory block found
+}
+
+
+
+BlockEntry::BlockEntry()
+{
+}
+
+
+BlockEntry::~BlockEntry()
+{
+}
+
+
+SimpleBlock::SimpleBlock(
+ Cluster* pCluster,
+ size_t idx,
+ long long start,
+ long long size) :
+ m_pCluster(pCluster),
+ m_index(idx),
+ m_block(start, size, pCluster->m_pSegment->m_pReader)
+{
+}
+
+
+bool SimpleBlock::EOS() const
+{
+ return false;
+}
+
+
+Cluster* SimpleBlock::GetCluster() const
+{
+ return m_pCluster;
+}
+
+
+size_t SimpleBlock::GetIndex() const
+{
+ return m_index;
+}
+
+
+const Block* SimpleBlock::GetBlock() const
+{
+ return &m_block;
+}
+
+
+bool SimpleBlock::IsBFrame() const
+{
+ return false;
+}
+
+
+BlockGroup::BlockGroup(
+ Cluster* pCluster,
+ size_t idx,
+ long long start,
+ long long size_) :
+ m_pCluster(pCluster),
+ m_index(idx),
+ m_prevTimeCode(0),
+ m_nextTimeCode(0),
+ m_pBlock(NULL) //TODO: accept multiple blocks within a block group
+{
+ IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
+
+ long long pos = start;
+ const long long stop = start + size_;
+
+ bool bSimpleBlock = false;
+ bool bReferenceBlock = false;
+
+ while (pos < stop)
+ {
+ short t;
+
+ if (Match(pReader, pos, 0x7B, t))
+ {
+ if (t < 0)
+ m_prevTimeCode = t;
+ else if (t > 0)
+ m_nextTimeCode = t;
+ else
+ assert(false);
+
+ bReferenceBlock = true;
+ }
+ else
+ {
+ long len;
+ const long long id = ReadUInt(pReader, pos, len);
+ assert(id >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume ID
+
+ const long long size = ReadUInt(pReader, pos, len);
+ assert(size >= 0); //TODO
+ assert((pos + len) <= stop);
+
+ pos += len; //consume size
+
+ switch (id)
+ {
+ case 0x23: //SimpleBlock ID
+ bSimpleBlock = true;
+ //YES, FALL THROUGH TO NEXT CASE
+
+ case 0x21: //Block ID
+ ParseBlock(pos, size);
+ break;
+
+ default:
+ break;
+ }
+
+ pos += size; //consume payload
+ assert(pos <= stop);
+ }
+ }
+
+ assert(pos == stop);
+ assert(m_pBlock);
+
+ if (!bSimpleBlock)
+ m_pBlock->SetKey(!bReferenceBlock);
+}
+
+
+BlockGroup::~BlockGroup()
+{
+ delete m_pBlock;
+}
+
+
+void BlockGroup::ParseBlock(long long start, long long size)
+{
+ IMkvReader* const pReader = m_pCluster->m_pSegment->m_pReader;
+
+ Block* const pBlock = new Block(start, size, pReader);
+ assert(pBlock); //TODO
+
+ //TODO: the Matroska spec says you have multiple blocks within the
+ //same block group, with blocks ranked by priority (the flag bits).
+
+ assert(m_pBlock == NULL);
+ m_pBlock = pBlock;
+}
+
+
+bool BlockGroup::EOS() const
+{
+ return false;
+}
+
+
+Cluster* BlockGroup::GetCluster() const
+{
+ return m_pCluster;
+}
+
+
+size_t BlockGroup::GetIndex() const
+{
+ return m_index;
+}
+
+
+const Block* BlockGroup::GetBlock() const
+{
+ return m_pBlock;
+}
+
+
+short BlockGroup::GetPrevTimeCode() const
+{
+ return m_prevTimeCode;
+}
+
+
+short BlockGroup::GetNextTimeCode() const
+{
+ return m_nextTimeCode;
+}
+
+
+bool BlockGroup::IsBFrame() const
+{
+ return (m_nextTimeCode > 0);
+}
+
+
+
+Block::Block(long long start, long long size_, IMkvReader* pReader) :
+ m_start(start),
+ m_size(size_)
+{
+ long long pos = start;
+ const long long stop = start + size_;
+
+ long len;
+
+ m_track = ReadUInt(pReader, pos, len);
+ assert(m_track > 0);
+ assert((pos + len) <= stop);
+
+ pos += len; //consume track number
+ assert((stop - pos) >= 2);
+
+ m_timecode = Unserialize2SInt(pReader, pos);
+
+ pos += 2;
+ assert((stop - pos) >= 1);
+
+ const long hr = pReader->Read(pos, 1, &m_flags);
+ assert(hr == 0L);
+
+ ++pos;
+ assert(pos <= stop);
+
+ m_frameOff = pos;
+
+ const long long frame_size = stop - pos;
+
+ assert(frame_size <= 2147483647L);
+
+ m_frameSize = static_cast<long>(frame_size);
+}
+
+
+long long Block::GetTimeCode(Cluster* pCluster) const
+{
+ assert(pCluster);
+
+ const long long tc0 = pCluster->GetTimeCode();
+ assert(tc0 >= 0);
+
+ const long long tc = tc0 + static_cast<long long>(m_timecode);
+ assert(tc >= 0);
+
+ return tc; //unscaled timecode units
+}
+
+
+long long Block::GetTime(Cluster* pCluster) const
+{
+ assert(pCluster);
+
+ const long long tc = GetTimeCode(pCluster);
+
+ const Segment* const pSegment = pCluster->m_pSegment;
+ const SegmentInfo* const pInfo = pSegment->GetInfo();
+ assert(pInfo);
+
+ const long long scale = pInfo->GetTimeCodeScale();
+ assert(scale >= 1);
+
+ const long long ns = tc * scale;
+
+ return ns;
+}
+
+
+long long Block::GetTrackNumber() const
+{
+ return m_track;
+}
+
+
+bool Block::IsKey() const
+{
+ return ((m_flags & static_cast<unsigned char>(1 << 7)) != 0);
+}
+
+
+void Block::SetKey(bool bKey)
+{
+ if (bKey)
+ m_flags |= static_cast<unsigned char>(1 << 7);
+ else
+ m_flags &= 0x7F;
+}
+
+
+long long Block::GetOffset() const
+{
+ return m_frameOff;
+}
+
+
+long Block::GetSize() const
+{
+ return m_frameSize;
+}
+
+
+long Block::Read(IMkvReader* pReader, unsigned char* buf) const
+{
+
+ assert(pReader);
+ assert(buf);
+
+ const long hr = pReader->Read(m_frameOff, m_frameSize, buf);
+
+ return hr;
+}
+
+
+} //end namespace mkvparser
diff --git a/media/libstagefright/matroska/mkvparser.hpp b/media/libstagefright/matroska/mkvparser.hpp
index 4d311b4..c46d349 100644
--- a/media/libstagefright/matroska/mkvparser.hpp
+++ b/media/libstagefright/matroska/mkvparser.hpp
@@ -1,428 +1,554 @@
-#ifndef MKVPARSER_HPP
-#define MKVPARSER_HPP
-
-#include <cstdlib>
-#include <cstdio>
-
-namespace mkvparser
-{
-
-const int E_FILE_FORMAT_INVALID = -2;
-const int E_BUFFER_NOT_FULL = -3;
-
-class IMkvReader
-{
-public:
- virtual int Read(long long position, long length, unsigned char* buffer) = 0;
- virtual int Length(long long* total, long long* available) = 0;
-protected:
- virtual ~IMkvReader();
-};
-
-long long GetUIntLength(IMkvReader*, long long, long&);
-long long ReadUInt(IMkvReader*, long long, long&);
-long long SyncReadUInt(IMkvReader*, long long pos, long long stop, long&);
-long long UnserializeUInt(IMkvReader*, long long pos, long long size);
-float Unserialize4Float(IMkvReader*, long long);
-double Unserialize8Double(IMkvReader*, long long);
-short Unserialize2SInt(IMkvReader*, long long);
-signed char Unserialize1SInt(IMkvReader*, long long);
-bool Match(IMkvReader*, long long&, unsigned long, long long&);
-bool Match(IMkvReader*, long long&, unsigned long, char*&);
-bool Match(IMkvReader*, long long&, unsigned long,unsigned char*&,
- size_t *optionalSize = NULL);
-bool Match(IMkvReader*, long long&, unsigned long, double&);
-bool Match(IMkvReader*, long long&, unsigned long, short&);
-
-
-struct EBMLHeader
-{
- EBMLHeader();
- ~EBMLHeader();
- long long m_version;
- long long m_readVersion;
- long long m_maxIdLength;
- long long m_maxSizeLength;
- char* m_docType;
- long long m_docTypeVersion;
- long long m_docTypeReadVersion;
-
- long long Parse(IMkvReader*, long long&);
-};
-
-
-class Segment;
-class Track;
-class Cluster;
-
-class Block
-{
- Block(const Block&);
- Block& operator=(const Block&);
-
-public:
- const long long m_start;
- const long long m_size;
-
- Block(long long start, long long size, IMkvReader*);
-
- unsigned long GetTrackNumber() const;
-
- long long GetTimeCode(Cluster*) const; //absolute, but not scaled
- long long GetTime(Cluster*) const; //absolute, and scaled (nanosecond units)
- bool IsKey() const;
- void SetKey(bool);
-
- long GetSize() const;
- long Read(IMkvReader*, unsigned char*) const;
-
-private:
- long long m_track; //Track::Number()
- short m_timecode; //relative to cluster
- unsigned char m_flags;
- long long m_frameOff;
- long m_frameSize;
-
-};
-
-
-class BlockEntry
-{
- BlockEntry(const BlockEntry&);
- BlockEntry& operator=(const BlockEntry&);
-
-public:
- virtual ~BlockEntry();
- virtual bool EOS() const = 0;
- virtual Cluster* GetCluster() const = 0;
- virtual size_t GetIndex() const = 0;
- virtual const Block* GetBlock() const = 0;
- virtual bool IsBFrame() const = 0;
-
-protected:
- BlockEntry();
-
-};
-
-
-class SimpleBlock : public BlockEntry
-{
- SimpleBlock(const SimpleBlock&);
- SimpleBlock& operator=(const SimpleBlock&);
-
-public:
- SimpleBlock(Cluster*, size_t, long long start, long long size);
-
- bool EOS() const;
- Cluster* GetCluster() const;
- size_t GetIndex() const;
- const Block* GetBlock() const;
- bool IsBFrame() const;
-
-protected:
- Cluster* const m_pCluster;
- const size_t m_index;
- Block m_block;
-
-};
-
-
-class BlockGroup : public BlockEntry
-{
- BlockGroup(const BlockGroup&);
- BlockGroup& operator=(const BlockGroup&);
-
-public:
- BlockGroup(Cluster*, size_t, long long, long long);
- ~BlockGroup();
-
- bool EOS() const;
- Cluster* GetCluster() const;
- size_t GetIndex() const;
- const Block* GetBlock() const;
- bool IsBFrame() const;
-
- short GetPrevTimeCode() const; //relative to block's time
- short GetNextTimeCode() const; //as above
-
-protected:
- Cluster* const m_pCluster;
- const size_t m_index;
-
-private:
- BlockGroup(Cluster*, size_t, unsigned long);
- void ParseBlock(long long start, long long size);
-
- short m_prevTimeCode;
- short m_nextTimeCode;
-
- //TODO: the Matroska spec says you can have multiple blocks within the
- //same block group, with blocks ranked by priority (the flag bits).
- //For now we just cache a single block.
-#if 0
- typedef std::deque<Block*> blocks_t;
- blocks_t m_blocks; //In practice should contain only a single element.
-#else
- Block* m_pBlock;
-#endif
-
-};
-
-
-class Track
-{
- Track(const Track&);
- Track& operator=(const Track&);
-
-public:
- Segment* const m_pSegment;
- virtual ~Track();
-
- long long GetType() const;
- unsigned long GetNumber() const;
- const char* GetNameAsUTF8() const;
- const char* GetCodecNameAsUTF8() const;
- const char* GetCodecId() const;
- const unsigned char* GetCodecPrivate(
- size_t *optionalSize = NULL) const;
-
- const BlockEntry* GetEOS() const;
-
- struct Settings
- {
- long long start;
- long long size;
- };
-
- struct Info
- {
- long long type;
- long long number;
- long long uid;
- char* nameAsUTF8;
- char* codecId;
- unsigned char* codecPrivate;
- size_t codecPrivateSize;
- char* codecNameAsUTF8;
- Settings settings;
- Info();
- void Clear();
- };
-
- long GetFirst(const BlockEntry*&) const;
- long GetNext(const BlockEntry* pCurr, const BlockEntry*& pNext) const;
- virtual bool VetEntry(const BlockEntry*) const = 0;
-
-protected:
- Track(Segment*, const Info&);
- const Info m_info;
-
- class EOSBlock : public BlockEntry
- {
- public:
- EOSBlock();
-
- bool EOS() const;
- Cluster* GetCluster() const;
- size_t GetIndex() const;
- const Block* GetBlock() const;
- bool IsBFrame() const;
- };
-
- EOSBlock m_eos;
-
-};
-
-
-class VideoTrack : public Track
-{
- VideoTrack(const VideoTrack&);
- VideoTrack& operator=(const VideoTrack&);
-
-public:
- VideoTrack(Segment*, const Info&);
- long long GetWidth() const;
- long long GetHeight() const;
- double GetFrameRate() const;
-
- bool VetEntry(const BlockEntry*) const;
-
-private:
- long long m_width;
- long long m_height;
- double m_rate;
-
-};
-
-
-class AudioTrack : public Track
-{
- AudioTrack(const AudioTrack&);
- AudioTrack& operator=(const AudioTrack&);
-
-public:
- AudioTrack(Segment*, const Info&);
- double GetSamplingRate() const;
- long long GetChannels() const;
- long long GetBitDepth() const;
- bool VetEntry(const BlockEntry*) const;
-
-private:
- double m_rate;
- long long m_channels;
- long long m_bitDepth;
-};
-
-
-class Tracks
-{
- Tracks(const Tracks&);
- Tracks& operator=(const Tracks&);
-
-public:
- Segment* const m_pSegment;
- const long long m_start;
- const long long m_size;
-
- Tracks(Segment*, long long start, long long size);
- virtual ~Tracks();
-
- Track* GetTrackByNumber(unsigned long tn) const;
- Track* GetTrackByIndex(unsigned long idx) const;
-
-private:
- Track** m_trackEntries;
- Track** m_trackEntriesEnd;
-
- void ParseTrackEntry(long long, long long, Track*&);
-
-public:
- unsigned long GetTracksCount() const;
-};
-
-
-class SegmentInfo
-{
- SegmentInfo(const SegmentInfo&);
- SegmentInfo& operator=(const SegmentInfo&);
-
-public:
- Segment* const m_pSegment;
- const long long m_start;
- const long long m_size;
-
- SegmentInfo(Segment*, long long start, long long size);
- ~SegmentInfo();
- long long GetTimeCodeScale() const;
- long long GetDuration() const; //scaled
- const char* GetMuxingAppAsUTF8() const;
- const char* GetWritingAppAsUTF8() const;
- const char* GetTitleAsUTF8() const;
-
-private:
- long long m_timecodeScale;
- double m_duration;
- char* m_pMuxingAppAsUTF8;
- char* m_pWritingAppAsUTF8;
- char* m_pTitleAsUTF8;
-};
-
-
-class Cluster
-{
- Cluster(const Cluster&);
- Cluster& operator=(const Cluster&);
-
-public:
- Segment* const m_pSegment;
- const size_t m_index;
-
-public:
- static Cluster* Parse(Segment*, size_t, long long off);
-
- Cluster(); //EndOfStream
- ~Cluster();
-
- bool EOS() const;
-
- long long GetTimeCode(); //absolute, but not scaled
- long long GetTime(); //absolute, and scaled (nanosecond units)
-
- const BlockEntry* GetFirst();
- const BlockEntry* GetLast();
- const BlockEntry* GetNext(const BlockEntry*) const;
- const BlockEntry* GetEntry(const Track*);
-protected:
- Cluster(Segment*, size_t, long long off);
-
-private:
- long long m_start;
- long long m_size;
- long long m_timecode;
- BlockEntry** m_pEntries;
- size_t m_entriesCount;
-
- void Load();
- void LoadBlockEntries();
- void ParseBlockGroup(long long, long long, size_t);
- void ParseSimpleBlock(long long, long long, size_t);
-
-};
-
-
-class Segment
-{
- Segment(const Segment&);
- Segment& operator=(const Segment&);
-
-private:
- Segment(IMkvReader*, long long pos, long long size);
-
-public:
- IMkvReader* const m_pReader;
- const long long m_start; //posn of segment payload
- const long long m_size; //size of segment payload
- Cluster m_eos; //TODO: make private?
-
- static long long CreateInstance(IMkvReader*, long long, Segment*&);
- ~Segment();
-
- //for big-bang loading (source filter)
- long Load();
-
- //for incremental loading (splitter)
- long long Unparsed() const;
- long long ParseHeaders();
- long ParseCluster(Cluster*&, long long& newpos) const;
- bool AddCluster(Cluster*, long long);
-
- Tracks* GetTracks() const;
- const SegmentInfo* const GetInfo() const;
- long long GetDuration() const;
-
- //NOTE: this turned out to be too inefficient.
- //long long Load(long long time_nanoseconds);
-
- Cluster* GetFirst();
- Cluster* GetLast();
- unsigned long GetCount() const;
-
- Cluster* GetNext(const Cluster*);
- Cluster* GetCluster(long long time_nanoseconds);
-
-private:
- long long m_pos; //absolute file posn; what has been consumed so far
- SegmentInfo* m_pInfo;
- Tracks* m_pTracks;
- Cluster** m_clusters;
- size_t m_clusterCount;
-
- void ParseSeekHead(long long pos, long long size, size_t*);
- void ParseSeekEntry(long long pos, long long size, size_t*);
- void ParseSecondarySeekHead(long long off, size_t*);
-};
-
-
-} //end namespace mkvparser
-
-#endif //MKVPARSER_HPP
+// Copyright (c) 2010 The WebM project authors. All Rights Reserved.
+//
+// Use of this source code is governed by a BSD-style license
+// that can be found in the LICENSE file in the root of the source
+// tree. An additional intellectual property rights grant can be found
+// in the file PATENTS. All contributing project authors may
+// be found in the AUTHORS file in the root of the source tree.
+
+#ifndef MKVPARSER_HPP
+#define MKVPARSER_HPP
+
+#include <cstdlib>
+#include <cstdio>
+
+namespace mkvparser
+{
+
+const int E_FILE_FORMAT_INVALID = -2;
+const int E_BUFFER_NOT_FULL = -3;
+
+class IMkvReader
+{
+public:
+ virtual int Read(long long pos, long len, unsigned char* buf) = 0;
+ virtual int Length(long long* total, long long* available) = 0;
+protected:
+ virtual ~IMkvReader();
+};
+
+long long GetUIntLength(IMkvReader*, long long, long&);
+long long ReadUInt(IMkvReader*, long long, long&);
+long long SyncReadUInt(IMkvReader*, long long pos, long long stop, long&);
+long long UnserializeUInt(IMkvReader*, long long pos, long long size);
+float Unserialize4Float(IMkvReader*, long long);
+double Unserialize8Double(IMkvReader*, long long);
+short Unserialize2SInt(IMkvReader*, long long);
+signed char Unserialize1SInt(IMkvReader*, long long);
+bool Match(IMkvReader*, long long&, unsigned long, long long&);
+bool Match(IMkvReader*, long long&, unsigned long, char*&);
+bool Match(IMkvReader*, long long&, unsigned long,unsigned char*&, size_t&);
+bool Match(IMkvReader*, long long&, unsigned long, double&);
+bool Match(IMkvReader*, long long&, unsigned long, short&);
+
+void GetVersion(int& major, int& minor, int& build, int& revision);
+
+struct EBMLHeader
+{
+ EBMLHeader();
+ ~EBMLHeader();
+ long long m_version;
+ long long m_readVersion;
+ long long m_maxIdLength;
+ long long m_maxSizeLength;
+ char* m_docType;
+ long long m_docTypeVersion;
+ long long m_docTypeReadVersion;
+
+ long long Parse(IMkvReader*, long long&);
+};
+
+
+class Segment;
+class Track;
+class Cluster;
+
+class Block
+{
+ Block(const Block&);
+ Block& operator=(const Block&);
+
+public:
+ const long long m_start;
+ const long long m_size;
+
+ Block(long long start, long long size, IMkvReader*);
+
+ long long GetTrackNumber() const;
+ long long GetTimeCode(Cluster*) const; //absolute, but not scaled
+ long long GetTime(Cluster*) const; //absolute, and scaled (ns units)
+ bool IsKey() const;
+ void SetKey(bool);
+
+ long long GetOffset() const;
+ long GetSize() const;
+ long Read(IMkvReader*, unsigned char*) const;
+
+private:
+ long long m_track; //Track::Number()
+ short m_timecode; //relative to cluster
+ unsigned char m_flags;
+ long long m_frameOff;
+ long m_frameSize;
+
+};
+
+
+class BlockEntry
+{
+ BlockEntry(const BlockEntry&);
+ BlockEntry& operator=(const BlockEntry&);
+
+public:
+ virtual ~BlockEntry();
+ virtual bool EOS() const = 0;
+ virtual Cluster* GetCluster() const = 0;
+ virtual size_t GetIndex() const = 0;
+ virtual const Block* GetBlock() const = 0;
+ virtual bool IsBFrame() const = 0;
+
+protected:
+ BlockEntry();
+
+};
+
+
+class SimpleBlock : public BlockEntry
+{
+ SimpleBlock(const SimpleBlock&);
+ SimpleBlock& operator=(const SimpleBlock&);
+
+public:
+ SimpleBlock(Cluster*, size_t, long long start, long long size);
+
+ bool EOS() const;
+ Cluster* GetCluster() const;
+ size_t GetIndex() const;
+ const Block* GetBlock() const;
+ bool IsBFrame() const;
+
+protected:
+ Cluster* const m_pCluster;
+ const size_t m_index;
+ Block m_block;
+
+};
+
+
+class BlockGroup : public BlockEntry
+{
+ BlockGroup(const BlockGroup&);
+ BlockGroup& operator=(const BlockGroup&);
+
+public:
+ BlockGroup(Cluster*, size_t, long long, long long);
+ ~BlockGroup();
+
+ bool EOS() const;
+ Cluster* GetCluster() const;
+ size_t GetIndex() const;
+ const Block* GetBlock() const;
+ bool IsBFrame() const;
+
+ short GetPrevTimeCode() const; //relative to block's time
+ short GetNextTimeCode() const; //as above
+
+protected:
+ Cluster* const m_pCluster;
+ const size_t m_index;
+
+private:
+ BlockGroup(Cluster*, size_t, unsigned long);
+ void ParseBlock(long long start, long long size);
+
+ short m_prevTimeCode;
+ short m_nextTimeCode;
+
+ //TODO: the Matroska spec says you can have multiple blocks within the
+ //same block group, with blocks ranked by priority (the flag bits).
+ //For now we just cache a single block.
+#if 0
+ typedef std::deque<Block*> blocks_t;
+ blocks_t m_blocks; //In practice should contain only a single element.
+#else
+ Block* m_pBlock;
+#endif
+
+};
+
+
+class Track
+{
+ Track(const Track&);
+ Track& operator=(const Track&);
+
+public:
+ Segment* const m_pSegment;
+ virtual ~Track();
+
+ long long GetType() const;
+ long long GetNumber() const;
+ const char* GetNameAsUTF8() const;
+ const char* GetCodecNameAsUTF8() const;
+ const char* GetCodecId() const;
+ const unsigned char* GetCodecPrivate(size_t&) const;
+
+ const BlockEntry* GetEOS() const;
+
+ struct Settings
+ {
+ long long start;
+ long long size;
+ };
+
+ struct Info
+ {
+ long long type;
+ long long number;
+ long long uid;
+ char* nameAsUTF8;
+ char* codecId;
+ unsigned char* codecPrivate;
+ size_t codecPrivateSize;
+ char* codecNameAsUTF8;
+ Settings settings;
+ Info();
+ void Clear();
+ };
+
+ long GetFirst(const BlockEntry*&) const;
+ long GetNext(const BlockEntry* pCurr, const BlockEntry*& pNext) const;
+ virtual bool VetEntry(const BlockEntry*) const = 0;
+
+protected:
+ Track(Segment*, const Info&);
+ const Info m_info;
+
+ class EOSBlock : public BlockEntry
+ {
+ public:
+ EOSBlock();
+
+ bool EOS() const;
+ Cluster* GetCluster() const;
+ size_t GetIndex() const;
+ const Block* GetBlock() const;
+ bool IsBFrame() const;
+ };
+
+ EOSBlock m_eos;
+
+};
+
+
+class VideoTrack : public Track
+{
+ VideoTrack(const VideoTrack&);
+ VideoTrack& operator=(const VideoTrack&);
+
+public:
+ VideoTrack(Segment*, const Info&);
+ long long GetWidth() const;
+ long long GetHeight() const;
+ double GetFrameRate() const;
+
+ bool VetEntry(const BlockEntry*) const;
+
+private:
+ long long m_width;
+ long long m_height;
+ double m_rate;
+
+};
+
+
+class AudioTrack : public Track
+{
+ AudioTrack(const AudioTrack&);
+ AudioTrack& operator=(const AudioTrack&);
+
+public:
+ AudioTrack(Segment*, const Info&);
+ double GetSamplingRate() const;
+ long long GetChannels() const;
+ long long GetBitDepth() const;
+ bool VetEntry(const BlockEntry*) const;
+
+private:
+ double m_rate;
+ long long m_channels;
+ long long m_bitDepth;
+};
+
+
+class Tracks
+{
+ Tracks(const Tracks&);
+ Tracks& operator=(const Tracks&);
+
+public:
+ Segment* const m_pSegment;
+ const long long m_start;
+ const long long m_size;
+
+ Tracks(Segment*, long long start, long long size);
+ virtual ~Tracks();
+
+ Track* GetTrackByNumber(unsigned long tn) const;
+ Track* GetTrackByIndex(unsigned long idx) const;
+
+private:
+ Track** m_trackEntries;
+ Track** m_trackEntriesEnd;
+
+ void ParseTrackEntry(long long, long long, Track*&);
+
+public:
+ unsigned long GetTracksCount() const;
+};
+
+
+class SegmentInfo
+{
+ SegmentInfo(const SegmentInfo&);
+ SegmentInfo& operator=(const SegmentInfo&);
+
+public:
+ Segment* const m_pSegment;
+ const long long m_start;
+ const long long m_size;
+
+ SegmentInfo(Segment*, long long start, long long size);
+ ~SegmentInfo();
+ long long GetTimeCodeScale() const;
+ long long GetDuration() const; //scaled
+ const char* GetMuxingAppAsUTF8() const;
+ const char* GetWritingAppAsUTF8() const;
+ const char* GetTitleAsUTF8() const;
+
+private:
+ long long m_timecodeScale;
+ double m_duration;
+ char* m_pMuxingAppAsUTF8;
+ char* m_pWritingAppAsUTF8;
+ char* m_pTitleAsUTF8;
+};
+
+class Cues;
+class CuePoint
+{
+ friend class Cues;
+
+ CuePoint(size_t, long long);
+ ~CuePoint();
+
+ CuePoint(const CuePoint&);
+ CuePoint& operator=(const CuePoint&);
+
+public:
+ void Load(IMkvReader*);
+
+ long long GetTimeCode() const; //absolute but unscaled
+ long long GetTime(Segment*) const; //absolute and scaled (ns units)
+
+ struct TrackPosition
+ {
+ long long m_track;
+ long long m_pos; //of cluster
+ long long m_block;
+ //codec_state //defaults to 0
+ //reference = clusters containing req'd referenced blocks
+ // reftime = timecode of the referenced block
+
+ void Parse(IMkvReader*, long long, long long);
+ };
+
+ const TrackPosition* Find(const Track*) const;
+
+private:
+ const size_t m_index;
+ long long m_timecode;
+ TrackPosition* m_track_positions;
+ size_t m_track_positions_count;
+
+};
+
+
+class Cues
+{
+ friend class Segment;
+
+ Cues(Segment*, long long start, long long size);
+ ~Cues();
+
+ Cues(const Cues&);
+ Cues& operator=(const Cues&);
+
+public:
+ Segment* const m_pSegment;
+ const long long m_start;
+ const long long m_size;
+
+ bool Find( //lower bound of time_ns
+ long long time_ns,
+ const Track*,
+ const CuePoint*&,
+ const CuePoint::TrackPosition*&) const;
+
+#if 0
+ bool FindNext( //upper_bound of time_ns
+ long long time_ns,
+ const Track*,
+ const CuePoint*&,
+ const CuePoint::TrackPosition*&) const;
+#endif
+
+ const CuePoint* GetFirst() const;
+ const CuePoint* GetLast() const;
+
+ const CuePoint* GetNext(const CuePoint*) const;
+
+ const BlockEntry* GetBlock(
+ const CuePoint*,
+ const CuePoint::TrackPosition*) const;
+
+private:
+ void Init() const;
+ bool LoadCuePoint() const;
+ void PreloadCuePoint(size_t&, long long) const;
+
+ mutable CuePoint** m_cue_points;
+ mutable size_t m_count;
+ mutable size_t m_preload_count;
+ mutable long long m_pos;
+
+};
+
+
+class Cluster
+{
+ Cluster(const Cluster&);
+ Cluster& operator=(const Cluster&);
+
+public:
+ Segment* const m_pSegment;
+
+public:
+ static Cluster* Parse(Segment*, long, long long off);
+
+ Cluster(); //EndOfStream
+ ~Cluster();
+
+ bool EOS() const;
+
+ long long GetTimeCode(); //absolute, but not scaled
+ long long GetTime(); //absolute, and scaled (nanosecond units)
+ long long GetFirstTime(); //time (ns) of first (earliest) block
+ long long GetLastTime(); //time (ns) of last (latest) block
+
+ const BlockEntry* GetFirst();
+ const BlockEntry* GetLast();
+ const BlockEntry* GetNext(const BlockEntry*) const;
+ const BlockEntry* GetEntry(const Track*);
+ const BlockEntry* GetEntry(
+ const CuePoint&,
+ const CuePoint::TrackPosition&);
+ const BlockEntry* GetMaxKey(const VideoTrack*);
+
+protected:
+ Cluster(Segment*, long, long long off);
+
+public:
+ //TODO: these should all be private, with public selector functions
+ long m_index;
+ long long m_pos;
+ long long m_size;
+
+private:
+ long long m_timecode;
+ BlockEntry** m_entries;
+ size_t m_entriesCount;
+
+ void Load();
+ void LoadBlockEntries();
+ void ParseBlockGroup(long long, long long, size_t);
+ void ParseSimpleBlock(long long, long long, size_t);
+
+};
+
+
+class Segment
+{
+ friend class Cues;
+
+ Segment(const Segment&);
+ Segment& operator=(const Segment&);
+
+private:
+ Segment(IMkvReader*, long long pos, long long size);
+
+public:
+ IMkvReader* const m_pReader;
+ const long long m_start; //posn of segment payload
+ const long long m_size; //size of segment payload
+ Cluster m_eos; //TODO: make private?
+
+ static long long CreateInstance(IMkvReader*, long long, Segment*&);
+ ~Segment();
+
+ long Load(); //loads headers and all clusters
+
+ //for incremental loading (splitter)
+ long long Unparsed() const;
+ long long ParseHeaders(); //stops when first cluster is found
+ long LoadCluster(); //loads one cluster
+
+#if 0
+ //This pair parses one cluster, but only changes the state of the
+ //segment object when the cluster is actually added to the index.
+ long ParseCluster(Cluster*&, long long& newpos) const;
+ bool AddCluster(Cluster*, long long);
+#endif
+
+ Tracks* GetTracks() const;
+ const SegmentInfo* GetInfo() const;
+ const Cues* GetCues() const;
+
+ long long GetDuration() const;
+
+ unsigned long GetCount() const;
+ Cluster* GetFirst();
+ Cluster* GetLast();
+ Cluster* GetNext(const Cluster*);
+
+ Cluster* FindCluster(long long time_nanoseconds);
+ const BlockEntry* Seek(long long time_nanoseconds, const Track*);
+
+private:
+
+ long long m_pos; //absolute file posn; what has been consumed so far
+ SegmentInfo* m_pInfo;
+ Tracks* m_pTracks;
+ Cues* m_pCues;
+ Cluster** m_clusters;
+ long m_clusterCount; //number of entries for which m_index >= 0
+ long m_clusterPreloadCount; //number of entries for which m_index < 0
+ long m_clusterSize; //array size
+
+ void AppendCluster(Cluster*);
+ void PreloadCluster(Cluster*, ptrdiff_t);
+
+ void ParseSeekHead(long long pos, long long size);
+ void ParseSeekEntry(long long pos, long long size);
+ void ParseCues(long long);
+
+ const BlockEntry* GetBlock(
+ const CuePoint&,
+ const CuePoint::TrackPosition&);
+
+};
+
+
+} //end namespace mkvparser
+
+#endif //MKVPARSER_HPP
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index c88c6c1..f06a1bb 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -274,6 +274,8 @@ ATSParser::Stream::Stream(
mQueue(streamType == 0x1b
? ElementaryStreamQueue::H264 : ElementaryStreamQueue::AAC) {
mBuffer->setRange(0, 0);
+
+ LOGV("new stream PID 0x%02x, type 0x%02x", elementaryPID, streamType);
}
ATSParser::Stream::~Stream() {
@@ -307,7 +309,8 @@ void ATSParser::Stream::parse(
}
void ATSParser::Stream::signalDiscontinuity(bool isASeek) {
- LOGV("Stream discontinuity");
+ isASeek = false; // Always signal a "real" discontinuity
+
mPayloadStarted = false;
mBuffer->setRange(0, 0);
@@ -317,7 +320,9 @@ void ATSParser::Stream::signalDiscontinuity(bool isASeek) {
// This is only a "minor" discontinuity, we stay within the same
// bitstream.
- mSource->clear();
+ if (mSource != NULL) {
+ mSource->clear();
+ }
return;
}
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index b0b9e66..f11b3c3 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -41,7 +41,10 @@ sp<MetaData> ElementaryStreamQueue::getFormat() {
}
void ElementaryStreamQueue::clear() {
- mBuffer->setRange(0, 0);
+ if (mBuffer != NULL) {
+ mBuffer->setRange(0, 0);
+ }
+
mTimestamps.clear();
mFormat.clear();
}
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 63af26a..4e9920b 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -24,14 +24,11 @@
#include <sys/resource.h>
#include "../include/OMX.h"
-#include "OMXRenderer.h"
#include "../include/OMXNodeInstance.h"
-#include "../include/SoftwareRenderer.h"
#include <binder/IMemory.h>
#include <media/stagefright/MediaDebug.h>
-#include <media/stagefright/VideoRenderer.h>
#include <utils/threads.h>
#include "OMXMaster.h"
@@ -289,6 +286,16 @@ status_t OMX::setConfig(
index, params, size);
}
+status_t OMX::enableGraphicBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->enableGraphicBuffers(port_index, enable);
+}
+
+status_t OMX::storeMetaDataInBuffers(
+ node_id node, OMX_U32 port_index, OMX_BOOL enable) {
+ return findInstance(node)->storeMetaDataInBuffers(port_index, enable);
+}
+
status_t OMX::useBuffer(
node_id node, OMX_U32 port_index, const sp<IMemory> &params,
buffer_id *buffer) {
@@ -296,6 +303,13 @@ status_t OMX::useBuffer(
port_index, params, buffer);
}
+status_t OMX::useGraphicBuffer(
+ node_id node, OMX_U32 port_index,
+ const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer) {
+ return findInstance(node)->useGraphicBuffer(
+ port_index, graphicBuffer, buffer);
+}
+
status_t OMX::allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -425,128 +439,4 @@ void OMX::invalidateNodeID_l(node_id node) {
mNodeIDToInstance.removeItem(node);
}
-////////////////////////////////////////////////////////////////////////////////
-
-struct SharedVideoRenderer : public VideoRenderer {
- SharedVideoRenderer(void *libHandle, VideoRenderer *obj)
- : mLibHandle(libHandle),
- mObj(obj) {
- }
-
- virtual ~SharedVideoRenderer() {
- delete mObj;
- mObj = NULL;
-
- dlclose(mLibHandle);
- mLibHandle = NULL;
- }
-
- virtual void render(
- const void *data, size_t size, void *platformPrivate) {
- return mObj->render(data, size, platformPrivate);
- }
-
-private:
- void *mLibHandle;
- VideoRenderer *mObj;
-
- SharedVideoRenderer(const SharedVideoRenderer &);
- SharedVideoRenderer &operator=(const SharedVideoRenderer &);
-};
-
-sp<IOMXRenderer> OMX::createRenderer(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t encodedWidth, size_t encodedHeight,
- size_t displayWidth, size_t displayHeight,
- int32_t rotationDegrees) {
- Mutex::Autolock autoLock(mLock);
-
- VideoRenderer *impl = NULL;
-
- void *libHandle = dlopen("libstagefrighthw.so", RTLD_NOW);
-
- if (libHandle) {
- typedef VideoRenderer *(*CreateRendererWithRotationFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight,
- int32_t rotationDegrees);
-
- typedef VideoRenderer *(*CreateRendererFunc)(
- const sp<ISurface> &surface,
- const char *componentName,
- OMX_COLOR_FORMATTYPE colorFormat,
- size_t displayWidth, size_t displayHeight,
- size_t decodedWidth, size_t decodedHeight);
-
- CreateRendererWithRotationFunc funcWithRotation =
- (CreateRendererWithRotationFunc)dlsym(
- libHandle,
- "_Z26createRendererWithRotationRKN7android2spINS_8"
- "ISurfaceEEEPKc20OMX_COLOR_FORMATTYPEjjjji");
-
- if (funcWithRotation) {
- impl = (*funcWithRotation)(
- surface, componentName, colorFormat,
- displayWidth, displayHeight, encodedWidth, encodedHeight,
- rotationDegrees);
- } else {
- CreateRendererFunc func =
- (CreateRendererFunc)dlsym(
- libHandle,
- "_Z14createRendererRKN7android2spINS_8ISurfaceEEEPKc20"
- "OMX_COLOR_FORMATTYPEjjjj");
-
- if (func) {
- impl = (*func)(surface, componentName, colorFormat,
- displayWidth, displayHeight, encodedWidth, encodedHeight);
- }
- }
-
- if (impl) {
- impl = new SharedVideoRenderer(libHandle, impl);
- libHandle = NULL;
- }
-
- if (libHandle) {
- dlclose(libHandle);
- libHandle = NULL;
- }
- }
-
- if (!impl) {
- LOGW("Using software renderer.");
- impl = new SoftwareRenderer(
- colorFormat,
- surface,
- displayWidth, displayHeight,
- encodedWidth, encodedHeight);
- }
-
- return new OMXRenderer(impl);
-}
-
-OMXRenderer::OMXRenderer(VideoRenderer *impl)
- : mImpl(impl) {
-}
-
-OMXRenderer::~OMXRenderer() {
- delete mImpl;
- mImpl = NULL;
-}
-
-void OMXRenderer::render(IOMX::buffer_id buffer) {
- OMX_BUFFERHEADERTYPE *header = (OMX_BUFFERHEADERTYPE *)buffer;
-
- mImpl->render(
- header->pBuffer + header->nOffset,
- header->nFilledLen,
- header->pPlatformPrivate);
-}
-
} // namespace android
-
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 5db516e..9b6d441 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -24,6 +24,7 @@
#include <OMX_Component.h>
#include <binder/IMemory.h>
+#include <media/stagefright/HardwareAPI.h>
#include <media/stagefright/MediaDebug.h>
#include <media/stagefright/MediaErrors.h>
@@ -40,6 +41,11 @@ struct BufferMeta {
mIsBackup(false) {
}
+ BufferMeta(const sp<GraphicBuffer> &graphicBuffer)
+ : mGraphicBuffer(graphicBuffer),
+ mIsBackup(false) {
+ }
+
void CopyFromOMX(const OMX_BUFFERHEADERTYPE *header) {
if (!mIsBackup) {
return;
@@ -61,6 +67,7 @@ struct BufferMeta {
}
private:
+ sp<GraphicBuffer> mGraphicBuffer;
sp<IMemory> mMem;
size_t mSize;
bool mIsBackup;
@@ -240,6 +247,74 @@ status_t OMXNodeInstance::setConfig(
return StatusFromOMXError(err);
}
+status_t OMXNodeInstance::enableGraphicBuffers(
+ OMX_U32 portIndex, OMX_BOOL enable) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ EnableAndroidNativeBuffersParams params = {
+ sizeof(EnableAndroidNativeBuffersParams), ver, portIndex, enable,
+ };
+
+ err = OMX_SetParameter(mHandle, index, &params);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_EnableAndroidNativeBuffers failed with error %d (0x%08x)",
+ err, err);
+
+ return UNKNOWN_ERROR;
+ }
+
+ return OK;
+}
+
+status_t OMXNodeInstance::storeMetaDataInBuffers(
+ OMX_U32 portIndex,
+ OMX_BOOL enable) {
+ Mutex::Autolock autolock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.storeMetaDataInBuffers");
+
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex %s failed", name);
+ return StatusFromOMXError(err);
+ }
+
+ StoreMetaDataInBuffersParams params;
+ memset(&params, 0, sizeof(params));
+ params.nSize = sizeof(params);
+
+ // Version: 1.0.0.0
+ params.nVersion.s.nVersionMajor = 1;
+
+ params.nPortIndex = portIndex;
+ params.bStoreMetaData = enable;
+ if ((err = OMX_SetParameter(mHandle, index, &params)) != OMX_ErrorNone) {
+ LOGE("OMX_SetParameter() failed for StoreMetaDataInBuffers: 0x%08x", err);
+ return UNKNOWN_ERROR;
+ }
+ return err;
+}
+
status_t OMXNodeInstance::useBuffer(
OMX_U32 portIndex, const sp<IMemory> &params,
OMX::buffer_id *buffer) {
@@ -273,6 +348,60 @@ status_t OMXNodeInstance::useBuffer(
return OK;
}
+status_t OMXNodeInstance::useGraphicBuffer(
+ OMX_U32 portIndex, const sp<GraphicBuffer>& graphicBuffer,
+ OMX::buffer_id *buffer) {
+ Mutex::Autolock autoLock(mLock);
+
+ OMX_INDEXTYPE index;
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(
+ mHandle,
+ const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"),
+ &index);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_GetExtensionIndex failed");
+
+ return StatusFromOMXError(err);
+ }
+
+ BufferMeta *bufferMeta = new BufferMeta(graphicBuffer);
+
+ OMX_BUFFERHEADERTYPE *header;
+
+ OMX_VERSIONTYPE ver;
+ ver.s.nVersionMajor = 1;
+ ver.s.nVersionMinor = 0;
+ ver.s.nRevision = 0;
+ ver.s.nStep = 0;
+ UseAndroidNativeBufferParams params = {
+ sizeof(UseAndroidNativeBufferParams), ver, portIndex, bufferMeta,
+ &header, graphicBuffer,
+ };
+
+ err = OMX_SetParameter(mHandle, index, &params);
+
+ if (err != OMX_ErrorNone) {
+ LOGE("OMX_UseAndroidNativeBuffer failed with error %d (0x%08x)", err,
+ err);
+
+ delete bufferMeta;
+ bufferMeta = NULL;
+
+ *buffer = 0;
+
+ return UNKNOWN_ERROR;
+ }
+
+ CHECK_EQ(header->pAppPrivate, bufferMeta);
+
+ *buffer = header;
+
+ addActiveBuffer(portIndex, *buffer);
+
+ return OK;
+}
+
status_t OMXNodeInstance::allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data) {
@@ -498,4 +627,3 @@ void OMXNodeInstance::freeActiveBuffers() {
}
} // namespace android
-
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
index b0d2c64..bbde516 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.cpp
@@ -18,18 +18,381 @@
#include "ARTPSource.h"
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaErrors.h>
+
+#include <ctype.h>
namespace android {
-AMPEG4AudioAssembler::AMPEG4AudioAssembler(const sp<AMessage> &notify)
+static bool GetAttribute(const char *s, const char *key, AString *value) {
+ value->clear();
+
+ size_t keyLen = strlen(key);
+
+ for (;;) {
+ while (isspace(*s)) {
+ ++s;
+ }
+
+ const char *colonPos = strchr(s, ';');
+
+ size_t len =
+ (colonPos == NULL) ? strlen(s) : colonPos - s;
+
+ if (len >= keyLen + 1 && s[keyLen] == '=' && !strncmp(s, key, keyLen)) {
+ value->setTo(&s[keyLen + 1], len - keyLen - 1);
+ return true;
+ }
+
+ if (colonPos == NULL) {
+ return false;
+ }
+
+ s = colonPos + 1;
+ }
+}
+
+static sp<ABuffer> decodeHex(const AString &s) {
+ if ((s.size() % 2) != 0) {
+ return NULL;
+ }
+
+ size_t outLen = s.size() / 2;
+ sp<ABuffer> buffer = new ABuffer(outLen);
+ uint8_t *out = buffer->data();
+
+ uint8_t accum = 0;
+ for (size_t i = 0; i < s.size(); ++i) {
+ char c = s.c_str()[i];
+ unsigned value;
+ if (c >= '0' && c <= '9') {
+ value = c - '0';
+ } else if (c >= 'a' && c <= 'f') {
+ value = c - 'a' + 10;
+ } else if (c >= 'A' && c <= 'F') {
+ value = c - 'A' + 10;
+ } else {
+ return NULL;
+ }
+
+ accum = (accum << 4) | value;
+
+ if (i & 1) {
+ *out++ = accum;
+
+ accum = 0;
+ }
+ }
+
+ return buffer;
+}
+
+static status_t parseAudioObjectType(
+ ABitReader *bits, unsigned *audioObjectType) {
+ *audioObjectType = bits->getBits(5);
+ if ((*audioObjectType) == 31) {
+ *audioObjectType = 32 + bits->getBits(6);
+ }
+
+ return OK;
+}
+
+static status_t parseGASpecificConfig(
+ ABitReader *bits,
+ unsigned audioObjectType, unsigned channelConfiguration) {
+ unsigned frameLengthFlag = bits->getBits(1);
+ unsigned dependsOnCoreCoder = bits->getBits(1);
+ if (dependsOnCoreCoder) {
+ /* unsigned coreCoderDelay = */bits->getBits(1);
+ }
+ unsigned extensionFlag = bits->getBits(1);
+
+ if (!channelConfiguration) {
+ // program_config_element
+ return ERROR_UNSUPPORTED; // XXX to be implemented
+ }
+
+ if (audioObjectType == 6 || audioObjectType == 20) {
+ /* unsigned layerNr = */bits->getBits(3);
+ }
+
+ if (extensionFlag) {
+ if (audioObjectType == 22) {
+ /* unsigned numOfSubFrame = */bits->getBits(5);
+ /* unsigned layerLength = */bits->getBits(11);
+ } else if (audioObjectType == 17 || audioObjectType == 19
+ || audioObjectType == 20 || audioObjectType == 23) {
+ /* unsigned aacSectionDataResilienceFlag = */bits->getBits(1);
+ /* unsigned aacScalefactorDataResilienceFlag = */bits->getBits(1);
+ /* unsigned aacSpectralDataResilienceFlag = */bits->getBits(1);
+ }
+
+ unsigned extensionFlag3 = bits->getBits(1);
+ CHECK_EQ(extensionFlag3, 0u); // TBD in version 3
+ }
+
+ return OK;
+}
+
+static status_t parseAudioSpecificConfig(ABitReader *bits) {
+ unsigned audioObjectType;
+ CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+
+ unsigned samplingFreqIndex = bits->getBits(4);
+ if (samplingFreqIndex == 0x0f) {
+ /* unsigned samplingFrequency = */bits->getBits(24);
+ }
+
+ unsigned channelConfiguration = bits->getBits(4);
+
+ unsigned extensionAudioObjectType = 0;
+ unsigned sbrPresent = 0;
+
+ if (audioObjectType == 5) {
+ extensionAudioObjectType = audioObjectType;
+ sbrPresent = 1;
+ unsigned extensionSamplingFreqIndex = bits->getBits(4);
+ if (extensionSamplingFreqIndex == 0x0f) {
+ /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+ }
+ CHECK_EQ(parseAudioObjectType(bits, &audioObjectType), (status_t)OK);
+ }
+
+ CHECK((audioObjectType >= 1 && audioObjectType <= 4)
+ || (audioObjectType >= 6 && audioObjectType <= 7)
+ || audioObjectType == 17
+ || (audioObjectType >= 19 && audioObjectType <= 23));
+
+ CHECK_EQ(parseGASpecificConfig(
+ bits, audioObjectType, channelConfiguration), (status_t)OK);
+
+ if (audioObjectType == 17
+ || (audioObjectType >= 19 && audioObjectType <= 27)) {
+ unsigned epConfig = bits->getBits(2);
+ if (epConfig == 2 || epConfig == 3) {
+ // ErrorProtectionSpecificConfig
+ return ERROR_UNSUPPORTED; // XXX to be implemented
+
+ if (epConfig == 3) {
+ unsigned directMapping = bits->getBits(1);
+ CHECK_EQ(directMapping, 1u);
+ }
+ }
+ }
+
+#if 0
+ // This is not supported here as the upper layers did not explicitly
+ // signal the length of AudioSpecificConfig.
+
+ if (extensionAudioObjectType != 5 && bits->numBitsLeft() >= 16) {
+ unsigned syncExtensionType = bits->getBits(11);
+ if (syncExtensionType == 0x2b7) {
+ CHECK_EQ(parseAudioObjectType(bits, &extensionAudioObjectType),
+ (status_t)OK);
+
+ sbrPresent = bits->getBits(1);
+
+ if (sbrPresent == 1) {
+ unsigned extensionSamplingFreqIndex = bits->getBits(4);
+ if (extensionSamplingFreqIndex == 0x0f) {
+ /* unsigned extensionSamplingFrequency = */bits->getBits(24);
+ }
+ }
+ }
+ }
+#endif
+
+ return OK;
+}
+
+static status_t parseStreamMuxConfig(
+ ABitReader *bits,
+ unsigned *numSubFrames,
+ unsigned *frameLengthType,
+ bool *otherDataPresent,
+ unsigned *otherDataLenBits) {
+ unsigned audioMuxVersion = bits->getBits(1);
+
+ unsigned audioMuxVersionA = 0;
+ if (audioMuxVersion == 1) {
+ audioMuxVersionA = bits->getBits(1);
+ }
+
+ CHECK_EQ(audioMuxVersionA, 0u); // otherwise future spec
+
+ if (audioMuxVersion != 0) {
+ return ERROR_UNSUPPORTED; // XXX to be implemented;
+ }
+ CHECK_EQ(audioMuxVersion, 0u); // XXX to be implemented
+
+ unsigned allStreamsSameTimeFraming = bits->getBits(1);
+ CHECK_EQ(allStreamsSameTimeFraming, 1u); // There's only one stream.
+
+ *numSubFrames = bits->getBits(6);
+ unsigned numProgram = bits->getBits(4);
+ CHECK_EQ(numProgram, 0u); // disabled in RTP LATM
+
+ unsigned numLayer = bits->getBits(3);
+ CHECK_EQ(numLayer, 0u); // disabled in RTP LATM
+
+ if (audioMuxVersion == 0) {
+ // AudioSpecificConfig
+ CHECK_EQ(parseAudioSpecificConfig(bits), (status_t)OK);
+ } else {
+ TRESPASS(); // XXX to be implemented
+ }
+
+ *frameLengthType = bits->getBits(3);
+ switch (*frameLengthType) {
+ case 0:
+ {
+ /* unsigned bufferFullness = */bits->getBits(8);
+
+ // The "coreFrameOffset" does not apply since there's only
+ // a single layer.
+ break;
+ }
+
+ case 1:
+ {
+ /* unsigned frameLength = */bits->getBits(9);
+ break;
+ }
+
+ case 3:
+ case 4:
+ case 5:
+ {
+ /* unsigned CELPframeLengthTableIndex = */bits->getBits(6);
+ break;
+ }
+
+ case 6:
+ case 7:
+ {
+ /* unsigned HVXCframeLengthTableIndex = */bits->getBits(1);
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ *otherDataPresent = bits->getBits(1);
+ *otherDataLenBits = 0;
+ if (*otherDataPresent) {
+ if (audioMuxVersion == 1) {
+ TRESPASS(); // XXX to be implemented
+ } else {
+ *otherDataLenBits = 0;
+
+ unsigned otherDataLenEsc;
+ do {
+ (*otherDataLenBits) <<= 8;
+ otherDataLenEsc = bits->getBits(1);
+ unsigned otherDataLenTmp = bits->getBits(8);
+ (*otherDataLenBits) += otherDataLenTmp;
+ } while (otherDataLenEsc);
+ }
+ }
+
+ unsigned crcCheckPresent = bits->getBits(1);
+ if (crcCheckPresent) {
+ /* unsigned crcCheckSum = */bits->getBits(8);
+ }
+
+ return OK;
+}
+
+sp<ABuffer> AMPEG4AudioAssembler::removeLATMFraming(const sp<ABuffer> &buffer) {
+ CHECK(!mMuxConfigPresent); // XXX to be implemented
+
+ sp<ABuffer> out = new ABuffer(buffer->size());
+ out->setRange(0, 0);
+
+ size_t offset = 0;
+ uint8_t *ptr = buffer->data();
+
+ for (size_t i = 0; i <= mNumSubFrames; ++i) {
+ // parse PayloadLengthInfo
+
+ unsigned payloadLength = 0;
+
+ switch (mFrameLengthType) {
+ case 0:
+ {
+ unsigned muxSlotLengthBytes = 0;
+ unsigned tmp;
+ do {
+ CHECK_LT(offset, buffer->size());
+ tmp = ptr[offset++];
+ muxSlotLengthBytes += tmp;
+ } while (tmp == 0xff);
+
+ payloadLength = muxSlotLengthBytes;
+ break;
+ }
+
+ default:
+ TRESPASS(); // XXX to be implemented
+ break;
+ }
+
+ CHECK_LE(offset + payloadLength, buffer->size());
+
+ memcpy(out->data() + out->size(), &ptr[offset], payloadLength);
+ out->setRange(0, out->size() + payloadLength);
+
+ offset += payloadLength;
+
+ if (mOtherDataPresent) {
+ // We want to stay byte-aligned.
+
+ CHECK((mOtherDataLenBits % 8) == 0);
+ CHECK_LE(offset + (mOtherDataLenBits / 8), buffer->size());
+ offset += mOtherDataLenBits / 8;
+ }
+ }
+
+ CHECK_EQ(offset, buffer->size());
+
+ return out;
+}
+
+AMPEG4AudioAssembler::AMPEG4AudioAssembler(
+ const sp<AMessage> &notify, const AString &params)
: mNotifyMsg(notify),
+ mMuxConfigPresent(false),
mAccessUnitRTPTime(0),
mNextExpectedSeqNoValid(false),
mNextExpectedSeqNo(0),
mAccessUnitDamaged(false) {
+ AString val;
+ if (!GetAttribute(params.c_str(), "cpresent", &val)) {
+ mMuxConfigPresent = true;
+ } else if (val == "0") {
+ mMuxConfigPresent = false;
+ } else {
+ CHECK(val == "1");
+ mMuxConfigPresent = true;
+ }
+
+ CHECK(GetAttribute(params.c_str(), "config", &val));
+
+ sp<ABuffer> config = decodeHex(val);
+ CHECK(config != NULL);
+
+ ABitReader bits(config->data(), config->size());
+ status_t err = parseStreamMuxConfig(
+ &bits, &mNumSubFrames, &mFrameLengthType,
+ &mOtherDataPresent, &mOtherDataLenBits);
+
+ CHECK_EQ(err, (status_t)NO_ERROR);
}
AMPEG4AudioAssembler::~AMPEG4AudioAssembler() {
@@ -108,13 +471,7 @@ void AMPEG4AudioAssembler::submitAccessUnit() {
while (it != mPackets.end()) {
const sp<ABuffer> &unit = *it;
- size_t n = 0;
- while (unit->data()[n] == 0xff) {
- ++n;
- }
- ++n;
-
- totalSize += unit->size() - n;
+ totalSize += unit->size();
++it;
}
@@ -124,20 +481,13 @@ void AMPEG4AudioAssembler::submitAccessUnit() {
while (it != mPackets.end()) {
const sp<ABuffer> &unit = *it;
- size_t n = 0;
- while (unit->data()[n] == 0xff) {
- ++n;
- }
- ++n;
-
memcpy((uint8_t *)accessUnit->data() + offset,
- unit->data() + n, unit->size() - n);
-
- offset += unit->size() - n;
+ unit->data(), unit->size());
++it;
}
+ accessUnit = removeLATMFraming(accessUnit);
CopyTimes(accessUnit, *mPackets.begin());
#if 0
diff --git a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
index bf9f204..9cef94c 100644
--- a/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
+++ b/media/libstagefright/rtsp/AMPEG4AudioAssembler.h
@@ -27,9 +27,11 @@
namespace android {
struct AMessage;
+struct AString;
struct AMPEG4AudioAssembler : public ARTPAssembler {
- AMPEG4AudioAssembler(const sp<AMessage> &notify);
+ AMPEG4AudioAssembler(
+ const sp<AMessage> &notify, const AString &params);
protected:
virtual ~AMPEG4AudioAssembler();
@@ -40,6 +42,13 @@ protected:
private:
sp<AMessage> mNotifyMsg;
+
+ bool mMuxConfigPresent;
+ unsigned mNumSubFrames;
+ unsigned mFrameLengthType;
+ bool mOtherDataPresent;
+ unsigned mOtherDataLenBits;
+
uint32_t mAccessUnitRTPTime;
bool mNextExpectedSeqNoValid;
uint32_t mNextExpectedSeqNo;
@@ -49,6 +58,8 @@ private:
AssemblyStatus addPacket(const sp<ARTPSource> &source);
void submitAccessUnit();
+ sp<ABuffer> removeLATMFraming(const sp<ABuffer> &buffer);
+
DISALLOW_EVIL_CONSTRUCTORS(AMPEG4AudioAssembler);
};
diff --git a/media/libstagefright/rtsp/ARTPSource.cpp b/media/libstagefright/rtsp/ARTPSource.cpp
index 2518264..5aae4e7 100644
--- a/media/libstagefright/rtsp/ARTPSource.cpp
+++ b/media/libstagefright/rtsp/ARTPSource.cpp
@@ -57,7 +57,7 @@ ARTPSource::ARTPSource(
mAssembler = new AAVCAssembler(notify);
mIssueFIRRequests = true;
} else if (!strncmp(desc.c_str(), "MP4A-LATM/", 10)) {
- mAssembler = new AMPEG4AudioAssembler(notify);
+ mAssembler = new AMPEG4AudioAssembler(notify, params);
} else if (!strncmp(desc.c_str(), "H263-1998/", 10)
|| !strncmp(desc.c_str(), "H263-2000/", 10)) {
mAssembler = new AH263Assembler(notify);
diff --git a/media/libstagefright/rtsp/ARTPWriter.cpp b/media/libstagefright/rtsp/ARTPWriter.cpp
index 155fd96..5a033e1 100644
--- a/media/libstagefright/rtsp/ARTPWriter.cpp
+++ b/media/libstagefright/rtsp/ARTPWriter.cpp
@@ -46,7 +46,7 @@ static int UniformRand(int limit) {
ARTPWriter::ARTPWriter(int fd)
: mFlags(0),
- mFd(fd),
+ mFd(dup(fd)),
mLooper(new ALooper),
mReflector(new AHandlerReflector<ARTPWriter>(this)) {
CHECK_GE(fd, 0);
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index f928c06..e936923 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -23,11 +23,13 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/MediaErrors.h>
#include <arpa/inet.h>
#include <fcntl.h>
#include <netdb.h>
+#include <openssl/md5.h>
#include <sys/socket.h>
namespace android {
@@ -37,6 +39,7 @@ const int64_t ARTSPConnection::kSelectTimeoutUs = 1000ll;
ARTSPConnection::ARTSPConnection()
: mState(DISCONNECTED),
+ mAuthType(NONE),
mSocket(-1),
mConnectionID(0),
mNextCSeq(0),
@@ -114,10 +117,13 @@ void ARTSPConnection::onMessageReceived(const sp<AMessage> &msg) {
// static
bool ARTSPConnection::ParseURL(
- const char *url, AString *host, unsigned *port, AString *path) {
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass) {
host->clear();
*port = 0;
path->clear();
+ user->clear();
+ pass->clear();
if (strncasecmp("rtsp://", url, 7)) {
return false;
@@ -133,6 +139,24 @@ bool ARTSPConnection::ParseURL(
path->setTo(slashPos);
}
+ ssize_t atPos = host->find("@");
+
+ if (atPos >= 0) {
+ // Split of user:pass@ from hostname.
+
+ AString userPass(*host, 0, atPos);
+ host->erase(0, atPos + 1);
+
+ ssize_t colonPos = userPass.find(":");
+
+ if (colonPos < 0) {
+ *user = userPass;
+ } else {
+ user->setTo(userPass, 0, colonPos);
+ pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
+ }
+ }
+
const char *colonPos = strchr(host->c_str(), ':');
if (colonPos != NULL) {
@@ -187,7 +211,12 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
AString host, path;
unsigned port;
- if (!ParseURL(url.c_str(), &host, &port, &path)) {
+ if (!ParseURL(url.c_str(), &host, &port, &path, &mUser, &mPass)
+ || (mUser.size() > 0 && mPass.size() == 0)) {
+ // If we have a user name but no password we have to give up
+ // right here, since we currently have no way of asking the user
+ // for this information.
+
LOGE("Malformed rtsp url %s", url.c_str());
reply->setInt32("result", ERROR_MALFORMED);
@@ -197,6 +226,10 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
return;
}
+ if (mUser.size() > 0) {
+ LOGV("user = '%s', pass = '%s'", mUser.c_str(), mPass.c_str());
+ }
+
struct hostent *ent = gethostbyname(host.c_str());
if (ent == NULL) {
LOGE("Unknown host %s", host.c_str());
@@ -262,6 +295,11 @@ void ARTSPConnection::onDisconnect(const sp<AMessage> &msg) {
reply->setInt32("result", OK);
mState = DISCONNECTED;
+ mUser.clear();
+ mPass.clear();
+ mAuthType = NONE;
+ mNonce.clear();
+
reply->post();
}
@@ -335,6 +373,12 @@ void ARTSPConnection::onSendRequest(const sp<AMessage> &msg) {
AString request;
CHECK(msg->findString("request", &request));
+ // Just in case we need to re-issue the request with proper authentication
+ // later, stash it away.
+ reply->setString("original-request", request.c_str(), request.size());
+
+ addAuthentication(&request);
+
// Find the boundary between headers and the body.
ssize_t i = request.find("\r\n\r\n");
CHECK_GE(i, 0);
@@ -347,7 +391,7 @@ void ARTSPConnection::onSendRequest(const sp<AMessage> &msg) {
request.insert(cseqHeader, i + 2);
- LOGV("%s", request.c_str());
+ LOGV("request: '%s'", request.c_str());
size_t numBytesSent = 0;
while (numBytesSent < request.size()) {
@@ -612,6 +656,30 @@ bool ARTSPConnection::receiveRTSPReponse() {
}
}
+ if (response->mStatusCode == 401) {
+ if (mAuthType == NONE && mUser.size() > 0
+ && parseAuthMethod(response)) {
+ ssize_t i;
+ CHECK_EQ((status_t)OK, findPendingRequest(response, &i));
+ CHECK_GE(i, 0);
+
+ sp<AMessage> reply = mPendingRequests.valueAt(i);
+ mPendingRequests.removeItemsAt(i);
+
+ AString request;
+ CHECK(reply->findString("original-request", &request));
+
+ sp<AMessage> msg = new AMessage(kWhatSendRequest, id());
+ msg->setMessage("reply", reply);
+ msg->setString("request", request.c_str(), request.size());
+
+ LOGI("re-sending request with authentication headers...");
+ onSendRequest(msg);
+
+ return true;
+ }
+ }
+
return notifyResponseListener(response);
}
@@ -628,26 +696,47 @@ bool ARTSPConnection::ParseSingleUnsignedLong(
return true;
}
-bool ARTSPConnection::notifyResponseListener(
- const sp<ARTSPResponse> &response) {
+status_t ARTSPConnection::findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const {
+ *index = 0;
+
ssize_t i = response->mHeaders.indexOfKey("cseq");
if (i < 0) {
- return true;
+ // This is an unsolicited server->client message.
+ return OK;
}
AString value = response->mHeaders.valueAt(i);
unsigned long cseq;
if (!ParseSingleUnsignedLong(value.c_str(), &cseq)) {
- return false;
+ return ERROR_MALFORMED;
}
i = mPendingRequests.indexOfKey(cseq);
if (i < 0) {
- // Unsolicited response?
- TRESPASS();
+ return -ENOENT;
+ }
+
+ *index = i;
+
+ return OK;
+}
+
+bool ARTSPConnection::notifyResponseListener(
+ const sp<ARTSPResponse> &response) {
+ ssize_t i;
+ status_t err = findPendingRequest(response, &i);
+
+ if (err == OK && i < 0) {
+ // An unsolicited server response is not a problem.
+ return true;
+ }
+
+ if (err != OK) {
+ return false;
}
sp<AMessage> reply = mPendingRequests.valueAt(i);
@@ -660,4 +749,160 @@ bool ARTSPConnection::notifyResponseListener(
return true;
}
+bool ARTSPConnection::parseAuthMethod(const sp<ARTSPResponse> &response) {
+ ssize_t i = response->mHeaders.indexOfKey("www-authenticate");
+
+ if (i < 0) {
+ return false;
+ }
+
+ AString value = response->mHeaders.valueAt(i);
+
+ if (!strncmp(value.c_str(), "Basic", 5)) {
+ mAuthType = BASIC;
+ } else {
+#if !defined(HAVE_ANDROID_OS)
+ // We don't have access to the MD5 implementation on the simulator,
+ // so we won't support digest authentication.
+ return false;
+#endif
+
+ CHECK(!strncmp(value.c_str(), "Digest", 6));
+ mAuthType = DIGEST;
+
+ i = value.find("nonce=");
+ CHECK_GE(i, 0);
+ CHECK_EQ(value.c_str()[i + 6], '\"');
+ ssize_t j = value.find("\"", i + 7);
+ CHECK_GE(j, 0);
+
+ mNonce.setTo(value, i + 7, j - i - 7);
+ }
+
+ return true;
+}
+
+#if defined(HAVE_ANDROID_OS)
+static void H(const AString &s, AString *out) {
+ out->clear();
+
+ MD5_CTX m;
+ MD5_Init(&m);
+ MD5_Update(&m, s.c_str(), s.size());
+
+ uint8_t key[16];
+ MD5_Final(key, &m);
+
+ for (size_t i = 0; i < 16; ++i) {
+ char nibble = key[i] >> 4;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+
+ nibble = key[i] & 0x0f;
+ if (nibble <= 9) {
+ nibble += '0';
+ } else {
+ nibble += 'a' - 10;
+ }
+ out->append(&nibble, 1);
+ }
+}
+#endif
+
+static void GetMethodAndURL(
+ const AString &request, AString *method, AString *url) {
+ ssize_t space1 = request.find(" ");
+ CHECK_GE(space1, 0);
+
+ ssize_t space2 = request.find(" ", space1 + 1);
+ CHECK_GE(space2, 0);
+
+ method->setTo(request, 0, space1);
+ url->setTo(request, space1 + 1, space2 - space1);
+}
+
+void ARTSPConnection::addAuthentication(AString *request) {
+ if (mAuthType == NONE) {
+ return;
+ }
+
+ // Find the boundary between headers and the body.
+ ssize_t i = request->find("\r\n\r\n");
+ CHECK_GE(i, 0);
+
+ if (mAuthType == BASIC) {
+ AString tmp;
+ tmp.append(mUser);
+ tmp.append(":");
+ tmp.append(mPass);
+
+ AString out;
+ encodeBase64(tmp.c_str(), tmp.size(), &out);
+
+ AString fragment;
+ fragment.append("Authorization: Basic ");
+ fragment.append(out);
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+
+ return;
+ }
+
+#if defined(HAVE_ANDROID_OS)
+ CHECK_EQ((int)mAuthType, (int)DIGEST);
+
+ AString method, url;
+ GetMethodAndURL(*request, &method, &url);
+
+ AString A1;
+ A1.append(mUser);
+ A1.append(":");
+ A1.append("Streaming Server");
+ A1.append(":");
+ A1.append(mPass);
+
+ AString A2;
+ A2.append(method);
+ A2.append(":");
+ A2.append(url);
+
+ AString HA1, HA2;
+ H(A1, &HA1);
+ H(A2, &HA2);
+
+ AString tmp;
+ tmp.append(HA1);
+ tmp.append(":");
+ tmp.append(mNonce);
+ tmp.append(":");
+ tmp.append(HA2);
+
+ AString digest;
+ H(tmp, &digest);
+
+ AString fragment;
+ fragment.append("Authorization: Digest ");
+ fragment.append("nonce=\"");
+ fragment.append(mNonce);
+ fragment.append("\", ");
+ fragment.append("username=\"");
+ fragment.append(mUser);
+ fragment.append("\", ");
+ fragment.append("uri=\"");
+ fragment.append(url);
+ fragment.append("\", ");
+ fragment.append("response=\"");
+ fragment.append(digest);
+ fragment.append("\"");
+ fragment.append("\r\n");
+
+ request->insert(fragment, i + 2);
+#endif
+}
+
} // namespace android
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 96e0d5b..19be2a6 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -42,6 +42,10 @@ struct ARTSPConnection : public AHandler {
void observeBinaryData(const sp<AMessage> &reply);
+ static bool ParseURL(
+ const char *url, AString *host, unsigned *port, AString *path,
+ AString *user, AString *pass);
+
protected:
virtual ~ARTSPConnection();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -62,9 +66,18 @@ private:
kWhatObserveBinaryData = 'obin',
};
+ enum AuthType {
+ NONE,
+ BASIC,
+ DIGEST
+ };
+
static const int64_t kSelectTimeoutUs;
State mState;
+ AString mUser, mPass;
+ AuthType mAuthType;
+ AString mNonce;
int mSocket;
int32_t mConnectionID;
int32_t mNextCSeq;
@@ -90,8 +103,11 @@ private:
sp<ABuffer> receiveBinaryData();
bool notifyResponseListener(const sp<ARTSPResponse> &response);
- static bool ParseURL(
- const char *url, AString *host, unsigned *port, AString *path);
+ bool parseAuthMethod(const sp<ARTSPResponse> &response);
+ void addAuthentication(AString *request);
+
+ status_t findPendingRequest(
+ const sp<ARTSPResponse> &response, ssize_t *index) const;
static bool ParseSingleUnsignedLong(
const char *from, unsigned long *x);
diff --git a/media/libstagefright/rtsp/ASessionDescription.cpp b/media/libstagefright/rtsp/ASessionDescription.cpp
index 612caff..547fbab 100644
--- a/media/libstagefright/rtsp/ASessionDescription.cpp
+++ b/media/libstagefright/rtsp/ASessionDescription.cpp
@@ -53,21 +53,30 @@ bool ASessionDescription::parse(const void *data, size_t size) {
mFormats.push(AString("[root]"));
AString desc((const char *)data, size);
- LOGI("%s", desc.c_str());
size_t i = 0;
for (;;) {
- ssize_t eolPos = desc.find("\r\n", i);
+ ssize_t eolPos = desc.find("\n", i);
+
if (eolPos < 0) {
break;
}
- AString line(desc, i, eolPos - i);
+ AString line;
+ if ((size_t)eolPos > i && desc.c_str()[eolPos - 1] == '\r') {
+ // We accept both '\n' and '\r\n' line endings, if it's
+ // the latter, strip the '\r' as well.
+ line.setTo(desc, i, eolPos - i - 1);
+ } else {
+ line.setTo(desc, i, eolPos - i);
+ }
if (line.size() < 2 || line.c_str()[1] != '=') {
return false;
}
+ LOGI("%s", line.c_str());
+
switch (line.c_str()[0]) {
case 'v':
{
@@ -141,7 +150,7 @@ bool ASessionDescription::parse(const void *data, size_t size) {
}
}
- i = eolPos + 2;
+ i = eolPos + 1;
}
return true;
@@ -245,7 +254,7 @@ bool ASessionDescription::getDurationUs(int64_t *durationUs) const {
return false;
}
- if (value == "npt=now-") {
+ if (value == "npt=now-" || value == "npt=0-") {
return false;
}
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 081ae32..0bbadc1 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -23,6 +23,7 @@ LOCAL_C_INCLUDES:= \
$(JNI_H_INCLUDE) \
$(TOP)/frameworks/base/include/media/stagefright/openmax \
$(TOP)/frameworks/base/media/libstagefright/include \
+ $(TOP)/external/openssl/include
LOCAL_MODULE:= libstagefright_rtsp
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index 6943608..9bb8c46 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -96,6 +96,7 @@ struct MyHandler : public AHandler {
mNetLooper(new ALooper),
mConn(new ARTSPConnection),
mRTPConn(new ARTPConnection),
+ mOriginalSessionURL(url),
mSessionURL(url),
mSetupTracksSuccessful(false),
mSeekPending(false),
@@ -113,6 +114,23 @@ struct MyHandler : public AHandler {
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
PRIORITY_HIGHEST);
+
+ // Strip any authentication info from the session url, we don't
+ // want to transmit user/pass in cleartext.
+ AString host, path, user, pass;
+ unsigned port;
+ if (ARTSPConnection::ParseURL(
+ mSessionURL.c_str(), &host, &port, &path, &user, &pass)
+ && user.size() > 0) {
+ mSessionURL.clear();
+ mSessionURL.append("rtsp://");
+ mSessionURL.append(host);
+ mSessionURL.append(":");
+ mSessionURL.append(StringPrintf("%u", port));
+ mSessionURL.append(path);
+
+ LOGI("rewritten session url: '%s'", mSessionURL.c_str());
+ }
}
void connect(const sp<AMessage> &doneMsg) {
@@ -126,7 +144,7 @@ struct MyHandler : public AHandler {
mConn->observeBinaryData(notify);
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
}
void disconnect(const sp<AMessage> &doneMsg) {
@@ -312,7 +330,7 @@ struct MyHandler : public AHandler {
int32_t reconnect;
if (msg->findInt32("reconnect", &reconnect) && reconnect) {
sp<AMessage> reply = new AMessage('conn', id());
- mConn->connect(mSessionURL.c_str(), reply);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
} else {
(new AMessage('quit', id()))->post();
}
@@ -922,7 +940,7 @@ struct MyHandler : public AHandler {
CHECK(GetAttribute(range.c_str(), "npt", &val));
float npt1, npt2;
- if (val == "now-") {
+ if (val == "now-" || val == "0-") {
// This is a live stream and therefore not seekable.
return;
} else {
@@ -992,6 +1010,7 @@ private:
sp<ARTSPConnection> mConn;
sp<ARTPConnection> mRTPConn;
sp<ASessionDescription> mSessionDesc;
+ AString mOriginalSessionURL; // This one still has user:pass@
AString mSessionURL;
AString mBaseURL;
AString mSessionID;
diff --git a/media/libstagefright/string.cpp b/media/libstagefright/string.cpp
deleted file mode 100644
index 8b2c36c..0000000
--- a/media/libstagefright/string.cpp
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "include/stagefright_string.h"
-
-#include <media/stagefright/MediaDebug.h>
-
-namespace android {
-
-// static
-string::size_type string::npos = (string::size_type)-1;
-
-string::string() {
-}
-
-string::string(const char *s, size_t length)
- : mString(s, length) {
-}
-
-string::string(const string &from, size_type start, size_type length) {
- CHECK(start <= from.size());
- if (length == npos) {
- length = from.size() - start;
- } else {
- CHECK(start + length <= from.size());
- }
-
- mString.setTo(from.c_str() + start, length);
-}
-
-string::string(const char *s)
- : mString(s) {
-}
-
-const char *string::c_str() const {
- return mString.string();
-}
-
-string::size_type string::size() const {
- return mString.length();
-}
-
-void string::clear() {
- mString = String8();
-}
-
-string::size_type string::find(char c) const {
- char s[2];
- s[0] = c;
- s[1] = '\0';
-
- ssize_t index = mString.find(s);
-
- return index < 0 ? npos : (size_type)index;
-}
-
-bool string::operator<(const string &other) const {
- return mString < other.mString;
-}
-
-bool string::operator==(const string &other) const {
- return mString == other.mString;
-}
-
-string &string::operator+=(char c) {
- mString.append(&c, 1);
-
- return *this;
-}
-
-void string::erase(size_t from, size_t length) {
- String8 s(mString.string(), from);
- s.append(mString.string() + from + length);
-
- mString = s;
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
new file mode 100644
index 0000000..0794ad1
--- /dev/null
+++ b/media/libstagefright/yuv/Android.mk
@@ -0,0 +1,13 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ YUVImage.cpp \
+ YUVCanvas.cpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils
+
+LOCAL_MODULE:= libstagefright_yuv
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVCanvas.cpp b/media/libstagefright/yuv/YUVCanvas.cpp
new file mode 100644
index 0000000..38aa779
--- /dev/null
+++ b/media/libstagefright/yuv/YUVCanvas.cpp
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVCanvas"
+
+#include <media/stagefright/MediaDebug.h>
+#include <media/stagefright/YUVCanvas.h>
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+
+namespace android {
+
+YUVCanvas::YUVCanvas(YUVImage &yuvImage)
+ : mYUVImage(yuvImage) {
+}
+
+YUVCanvas::~YUVCanvas() {
+}
+
+void YUVCanvas::FillYUV(uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::FillYUVRectangle(const Rect& rect,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ for (int32_t y = rect.top; y < rect.bottom; ++y) {
+ for (int32_t x = rect.left; x < rect.right; ++x) {
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::CopyImageRect(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage) {
+
+ // Try fast copy first
+ if (YUVImage::fastCopyRectangle(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, mYUVImage)) {
+ return;
+ }
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ for (int32_t offsetY = 0; offsetY < srcRect.height(); ++offsetY) {
+ for (int32_t offsetX = 0; offsetX < srcRect.width(); ++offsetX) {
+ int32_t srcX = srcStartX + offsetX;
+ int32_t srcY = srcStartY + offsetY;
+
+ int32_t destX = destStartX + offsetX;
+ int32_t destY = destStartY + offsetY;
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(destX, destY, yValue, uValue, vValue);
+ }
+ }
+}
+
+void YUVCanvas::downsample(
+ int32_t srcOffsetX, int32_t srcOffsetY,
+ int32_t skipX, int32_t skipY,
+ const YUVImage &srcImage) {
+ // TODO: Add a low pass filter for downsampling.
+
+ // Check that srcImage is big enough to fill mYUVImage.
+ CHECK((srcOffsetX + (mYUVImage.width() - 1) * skipX) < srcImage.width());
+ CHECK((srcOffsetY + (mYUVImage.height() - 1) * skipY) < srcImage.height());
+
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+
+ int32_t srcY = srcOffsetY;
+ for (int32_t y = 0; y < mYUVImage.height(); ++y) {
+ int32_t srcX = srcOffsetX;
+ for (int32_t x = 0; x < mYUVImage.width(); ++x) {
+ srcImage.getPixelValue(srcX, srcY, &yValue, &uValue, &vValue);
+ mYUVImage.setPixelValue(x, y, yValue, uValue, vValue);
+
+ srcX += skipX;
+ }
+ srcY += skipY;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
new file mode 100644
index 0000000..b712062
--- /dev/null
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -0,0 +1,413 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_NDEBUG 0
+#define LOG_TAG "YUVImage"
+
+#include <media/stagefright/YUVImage.h>
+#include <ui/Rect.h>
+#include <media/stagefright/MediaDebug.h>
+
+namespace android {
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+
+ size_t numberOfBytes = bufferSize(yuvFormat, width, height);
+ uint8_t *buffer = new uint8_t[numberOfBytes];
+ mBuffer = buffer;
+ mOwnBuffer = true;
+
+ initializeYUVPointers();
+}
+
+YUVImage::YUVImage(YUVFormat yuvFormat, int32_t width, int32_t height, uint8_t *buffer) {
+ mYUVFormat = yuvFormat;
+ mWidth = width;
+ mHeight = height;
+ mBuffer = buffer;
+ mOwnBuffer = false;
+
+ initializeYUVPointers();
+}
+
+//static
+size_t YUVImage::bufferSize(YUVFormat yuvFormat, int32_t width, int32_t height) {
+ int32_t numberOfPixels = width*height;
+ size_t numberOfBytes = 0;
+ if (yuvFormat == YUV420Planar || yuvFormat == YUV420SemiPlanar) {
+ // Y takes numberOfPixels bytes and U/V take numberOfPixels/4 bytes each.
+ numberOfBytes = (size_t)(numberOfPixels + (numberOfPixels >> 1));
+ } else {
+ LOGE("Format not supported");
+ }
+ return numberOfBytes;
+}
+
+bool YUVImage::initializeYUVPointers() {
+ int32_t numberOfPixels = mWidth * mHeight;
+
+ if (mYUVFormat == YUV420Planar) {
+ mYdata = (uint8_t *)mBuffer;
+ mUdata = mYdata + numberOfPixels;
+ mVdata = mUdata + (numberOfPixels >> 2);
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // U and V channels are interleaved as VUVUVU.
+ // So V data starts at the end of Y channel and
+ // U data starts right after V's start.
+ mYdata = (uint8_t *)mBuffer;
+ mVdata = mYdata + numberOfPixels;
+ mUdata = mVdata + 1;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+ return true;
+}
+
+YUVImage::~YUVImage() {
+ if (mOwnBuffer) delete[] mBuffer;
+}
+
+bool YUVImage::getOffsets(int32_t x, int32_t y,
+ int32_t *yOffset, int32_t *uOffset, int32_t *vOffset) const {
+ *yOffset = y*mWidth + x;
+
+ int32_t uvOffset = (y >> 1) * (mWidth >> 1) + (x >> 1);
+ if (mYUVFormat == YUV420Planar) {
+ *uOffset = uvOffset;
+ *vOffset = uvOffset;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uOffset = 2*uvOffset;
+ *vOffset = 2*uvOffset;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+bool YUVImage::getOffsetIncrementsPerDataRow(
+ int32_t *yDataOffsetIncrement,
+ int32_t *uDataOffsetIncrement,
+ int32_t *vDataOffsetIncrement) const {
+ *yDataOffsetIncrement = mWidth;
+
+ int32_t uvDataOffsetIncrement = mWidth >> 1;
+
+ if (mYUVFormat == YUV420Planar) {
+ *uDataOffsetIncrement = uvDataOffsetIncrement;
+ *vDataOffsetIncrement = uvDataOffsetIncrement;
+ } else if (mYUVFormat == YUV420SemiPlanar) {
+ // Since U and V channels are interleaved, offsets need
+ // to be doubled.
+ *uDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ *vDataOffsetIncrement = 2*uvDataOffsetIncrement;
+ } else {
+ LOGE("Format not supported");
+ return false;
+ }
+
+ return true;
+}
+
+uint8_t* YUVImage::getYAddress(int32_t offset) const {
+ return mYdata + offset;
+}
+
+uint8_t* YUVImage::getUAddress(int32_t offset) const {
+ return mUdata + offset;
+}
+
+uint8_t* YUVImage::getVAddress(int32_t offset) const {
+ return mVdata + offset;
+}
+
+bool YUVImage::getYUVAddresses(int32_t x, int32_t y,
+ uint8_t **yAddr, uint8_t **uAddr, uint8_t **vAddr) const {
+ int32_t yOffset;
+ int32_t uOffset;
+ int32_t vOffset;
+ if (!getOffsets(x, y, &yOffset, &uOffset, &vOffset)) return false;
+
+ *yAddr = getYAddress(yOffset);
+ *uAddr = getUAddress(uOffset);
+ *vAddr = getVAddress(vOffset);
+
+ return true;
+}
+
+bool YUVImage::validPixel(int32_t x, int32_t y) const {
+ return (x >= 0 && x < mWidth &&
+ y >= 0 && y < mHeight);
+}
+
+bool YUVImage::getPixelValue(int32_t x, int32_t y,
+ uint8_t *yPtr, uint8_t *uPtr, uint8_t *vPtr) const {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yPtr = *yAddr;
+ *uPtr = *uAddr;
+ *vPtr = *vAddr;
+
+ return true;
+}
+
+bool YUVImage::setPixelValue(int32_t x, int32_t y,
+ uint8_t yValue, uint8_t uValue, uint8_t vValue) {
+ CHECK(validPixel(x, y));
+
+ uint8_t *yAddr;
+ uint8_t *uAddr;
+ uint8_t *vAddr;
+ if (!getYUVAddresses(x, y, &yAddr, &uAddr, &vAddr)) return false;
+
+ *yAddr = yValue;
+ *uAddr = uValue;
+ *vAddr = vValue;
+
+ return true;
+}
+
+void YUVImage::fastCopyRectangle420Planar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420Planar);
+ CHECK(destImage.mYUVFormat == YUV420Planar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr += ySrcOffsetIncrement;
+ yDestAddr += yDestOffsetIncrement;
+ }
+ }
+
+ // Copy U
+ {
+ size_t numberOfUBytesPerRow = (size_t) (width >> 1);
+ uint8_t *uSrcAddr = uSrcAddrBase;
+ uint8_t *uDestAddr = uDestAddrBase;
+ // Every other row has an entry for U/V channel values. Hence only
+ // go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(uDestAddr, uSrcAddr, numberOfUBytesPerRow);
+
+ uSrcAddr += uSrcOffsetIncrement;
+ uDestAddr += uDestOffsetIncrement;
+ }
+ }
+
+ // Copy V
+ {
+ size_t numberOfVBytesPerRow = (size_t) (width >> 1);
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+void YUVImage::fastCopyRectangle420SemiPlanar(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ CHECK(srcImage.mYUVFormat == YUV420SemiPlanar);
+ CHECK(destImage.mYUVFormat == YUV420SemiPlanar);
+
+ int32_t srcStartX = srcRect.left;
+ int32_t srcStartY = srcRect.top;
+ int32_t width = srcRect.width();
+ int32_t height = srcRect.height();
+
+ // Get source and destination start addresses
+ uint8_t *ySrcAddrBase;
+ uint8_t *uSrcAddrBase;
+ uint8_t *vSrcAddrBase;
+ srcImage.getYUVAddresses(srcStartX, srcStartY,
+ &ySrcAddrBase, &uSrcAddrBase, &vSrcAddrBase);
+
+ uint8_t *yDestAddrBase;
+ uint8_t *uDestAddrBase;
+ uint8_t *vDestAddrBase;
+ destImage.getYUVAddresses(destStartX, destStartY,
+ &yDestAddrBase, &uDestAddrBase, &vDestAddrBase);
+
+ // Get source and destination offset increments incurred in going
+ // from one data row to next.
+ int32_t ySrcOffsetIncrement;
+ int32_t uSrcOffsetIncrement;
+ int32_t vSrcOffsetIncrement;
+ srcImage.getOffsetIncrementsPerDataRow(
+ &ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
+
+ int32_t yDestOffsetIncrement;
+ int32_t uDestOffsetIncrement;
+ int32_t vDestOffsetIncrement;
+ destImage.getOffsetIncrementsPerDataRow(
+ &yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
+
+ // Copy Y
+ {
+ size_t numberOfYBytesPerRow = (size_t) width;
+ uint8_t *ySrcAddr = ySrcAddrBase;
+ uint8_t *yDestAddr = yDestAddrBase;
+ for (int32_t offsetY = 0; offsetY < height; ++offsetY) {
+ memcpy(yDestAddr, ySrcAddr, numberOfYBytesPerRow);
+
+ ySrcAddr = ySrcAddr + ySrcOffsetIncrement;
+ yDestAddr = yDestAddr + yDestOffsetIncrement;
+ }
+ }
+
+ // Copy UV
+ {
+ // UV are interleaved. So number of UV bytes per row is 2*(width/2).
+ size_t numberOfUVBytesPerRow = (size_t) width;
+ uint8_t *vSrcAddr = vSrcAddrBase;
+ uint8_t *vDestAddr = vDestAddrBase;
+ // Every other pixel row has a U/V data row. Hence only go half the height.
+ for (int32_t offsetY = 0; offsetY < (height >> 1); ++offsetY) {
+ memcpy(vDestAddr, vSrcAddr, numberOfUVBytesPerRow);
+
+ vSrcAddr += vSrcOffsetIncrement;
+ vDestAddr += vDestOffsetIncrement;
+ }
+ }
+}
+
+// static
+bool YUVImage::fastCopyRectangle(
+ const Rect& srcRect,
+ int32_t destStartX, int32_t destStartY,
+ const YUVImage &srcImage, YUVImage &destImage) {
+ if (srcImage.mYUVFormat == destImage.mYUVFormat) {
+ if (srcImage.mYUVFormat == YUV420Planar) {
+ fastCopyRectangle420Planar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ } else if (srcImage.mYUVFormat == YUV420SemiPlanar) {
+ fastCopyRectangle420SemiPlanar(
+ srcRect,
+ destStartX, destStartY,
+ srcImage, destImage);
+ }
+ return true;
+ }
+ return false;
+}
+
+uint8_t clamp(uint8_t v, uint8_t minValue, uint8_t maxValue) {
+ CHECK(maxValue >= minValue);
+
+ if (v < minValue) return minValue;
+ else if (v > maxValue) return maxValue;
+ else return v;
+}
+
+void YUVImage::yuv2rgb(uint8_t yValue, uint8_t uValue, uint8_t vValue,
+ uint8_t *r, uint8_t *g, uint8_t *b) const {
+ *r = yValue + (1.370705 * (vValue-128));
+ *g = yValue - (0.698001 * (vValue-128)) - (0.337633 * (uValue-128));
+ *b = yValue + (1.732446 * (uValue-128));
+
+ *r = clamp(*r, 0, 255);
+ *g = clamp(*g, 0, 255);
+ *b = clamp(*b, 0, 255);
+}
+
+bool YUVImage::writeToPPM(const char *filename) const {
+ FILE *fp = fopen(filename, "w");
+ if (fp == NULL) {
+ return false;
+ }
+ fprintf(fp, "P3\n");
+ fprintf(fp, "%d %d\n", mWidth, mHeight);
+ fprintf(fp, "255\n");
+ for (int32_t y = 0; y < mHeight; ++y) {
+ for (int32_t x = 0; x < mWidth; ++x) {
+ uint8_t yValue;
+ uint8_t uValue;
+ uint8_t vValue;
+ getPixelValue(x, y, &yValue, &uValue, & vValue);
+
+ uint8_t rValue;
+ uint8_t gValue;
+ uint8_t bValue;
+ yuv2rgb(yValue, uValue, vValue, &rValue, &gValue, &bValue);
+
+ fprintf(fp, "%d %d %d\n", (int32_t)rValue, (int32_t)gValue, (int32_t)bValue);
+ }
+ }
+ fclose(fp);
+ return true;
+}
+
+} // namespace android