summaryrefslogtreecommitdiffstats
path: root/media/libstagefright
diff options
context:
space:
mode:
Diffstat (limited to 'media/libstagefright')
-rw-r--r--media/libstagefright/ACodec.cpp598
-rw-r--r--media/libstagefright/Android.mk4
-rw-r--r--media/libstagefright/AudioSource.cpp2
-rw-r--r--media/libstagefright/AwesomePlayer.cpp11
-rw-r--r--[-rwxr-xr-x]media/libstagefright/CameraSource.cpp33
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp12
-rw-r--r--media/libstagefright/DataSource.cpp41
-rw-r--r--media/libstagefright/FragmentedMP4Extractor.cpp460
-rw-r--r--media/libstagefright/HTTPBase.cpp10
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp1344
-rw-r--r--[-rwxr-xr-x]media/libstagefright/MPEG4Writer.cpp174
-rw-r--r--media/libstagefright/MediaAdapter.cpp126
-rw-r--r--media/libstagefright/MediaCodec.cpp278
-rw-r--r--media/libstagefright/MediaDefs.cpp1
-rw-r--r--media/libstagefright/MediaExtractor.cpp8
-rw-r--r--media/libstagefright/MediaMuxer.cpp173
-rw-r--r--media/libstagefright/NuMediaExtractor.cpp28
-rw-r--r--media/libstagefright/OMXClient.cpp20
-rw-r--r--[-rwxr-xr-x]media/libstagefright/OMXCodec.cpp20
-rw-r--r--[-rwxr-xr-x]media/libstagefright/SkipCutBuffer.cpp0
-rw-r--r--media/libstagefright/StagefrightMediaScanner.cpp2
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp4
-rw-r--r--media/libstagefright/ThrottledSource.cpp12
-rw-r--r--media/libstagefright/Utils.cpp41
-rw-r--r--media/libstagefright/WAVExtractor.cpp61
-rw-r--r--media/libstagefright/avc_utils.cpp76
-rw-r--r--media/libstagefright/chromium_http/Android.mk1
-rw-r--r--media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp6
-rw-r--r--media/libstagefright/chromium_http/chromium_http_stub.cpp5
-rw-r--r--media/libstagefright/chromium_http/support.cpp55
-rw-r--r--media/libstagefright/chromium_http/support.h14
-rw-r--r--media/libstagefright/chromium_http_stub.cpp21
-rw-r--r--media/libstagefright/codecs/aacdec/Android.mk2
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.cpp116
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.h4
-rw-r--r--media/libstagefright/codecs/aacenc/Android.mk4
-rw-r--r--media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp2
-rw-r--r--media/libstagefright/codecs/amrnb/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/amrnb/enc/Android.mk2
-rw-r--r--media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp2
-rw-r--r--media/libstagefright/codecs/amrwbenc/Android.mk2
-rw-r--r--media/libstagefright/codecs/avc/enc/Android.mk1
-rw-r--r--media/libstagefright/codecs/flac/enc/Android.mk2
-rw-r--r--media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp21
-rw-r--r--media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h1
-rw-r--r--media/libstagefright/codecs/g711/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/gsm/Android.mk4
-rw-r--r--media/libstagefright/codecs/gsm/dec/Android.mk21
-rw-r--r--media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE20
-rw-r--r--media/libstagefright/codecs/gsm/dec/NOTICE190
-rw-r--r--media/libstagefright/codecs/gsm/dec/SoftGSM.cpp269
-rw-r--r--media/libstagefright/codecs/gsm/dec/SoftGSM.h65
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp8
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/Android.mk1
-rw-r--r--media/libstagefright/codecs/mp3dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/mp3dec/SoftMP3.cpp20
-rw-r--r--media/libstagefright/codecs/mp3dec/SoftMP3.h1
-rw-r--r--media/libstagefright/codecs/on2/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/on2/dec/SoftVPX.cpp2
-rw-r--r--media/libstagefright/codecs/on2/enc/Android.mk24
-rw-r--r--media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE20
-rw-r--r--media/libstagefright/codecs/on2/enc/NOTICE190
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp686
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h203
-rw-r--r--media/libstagefright/codecs/on2/h264dec/Android.mk3
-rw-r--r--media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp10
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c6
-rw-r--r--media/libstagefright/codecs/raw/Android.mk2
-rw-r--r--media/libstagefright/codecs/vorbis/dec/Android.mk3
-rw-r--r--media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp16
-rw-r--r--media/libstagefright/codecs/vorbis/dec/SoftVorbis.h1
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp2
-rw-r--r--media/libstagefright/foundation/ALooperRoster.cpp3
-rw-r--r--media/libstagefright/foundation/Android.mk1
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp77
-rw-r--r--media/libstagefright/id3/Android.mk2
-rw-r--r--media/libstagefright/include/AwesomePlayer.h4
-rw-r--r--media/libstagefright/include/ChromiumHTTPDataSource.h3
-rw-r--r--media/libstagefright/include/FragmentedMP4Extractor.h70
-rw-r--r--media/libstagefright/include/FragmentedMP4Parser.h2
-rw-r--r--media/libstagefright/include/HTTPBase.h3
-rw-r--r--media/libstagefright/include/LiveSession.h15
-rw-r--r--media/libstagefright/include/MPEG4Extractor.h25
-rw-r--r--media/libstagefright/include/OMX.h6
-rw-r--r--media/libstagefright/include/OMXNodeInstance.h24
-rw-r--r--media/libstagefright/include/SDPLoader.h70
-rw-r--r--media/libstagefright/include/SimpleSoftOMXComponent.h1
-rw-r--r--media/libstagefright/include/ThrottledSource.h36
-rw-r--r--media/libstagefright/include/avc_utils.h5
-rw-r--r--media/libstagefright/include/chromium_http_stub.h4
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.cpp78
-rw-r--r--media/libstagefright/mp4/FragmentedMP4Parser.cpp18
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp31
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.h4
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp20
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.h3
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp63
-rw-r--r--media/libstagefright/omx/Android.mk3
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp467
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h177
-rw-r--r--media/libstagefright/omx/OMX.cpp14
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp181
-rw-r--r--media/libstagefright/omx/SimpleSoftOMXComponent.cpp8
-rw-r--r--media/libstagefright/omx/SoftOMXPlugin.cpp2
-rw-r--r--media/libstagefright/omx/tests/Android.mk2
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp25
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.h6
-rw-r--r--media/libstagefright/rtsp/Android.mk1
-rw-r--r--media/libstagefright/rtsp/MyHandler.h382
-rw-r--r--media/libstagefright/rtsp/SDPLoader.cpp154
-rw-r--r--media/libstagefright/tests/Android.mk1
-rw-r--r--media/libstagefright/tests/SurfaceMediaSource_test.cpp24
-rw-r--r--media/libstagefright/timedtext/TimedTextSRTSource.cpp7
-rw-r--r--media/libstagefright/wifi-display/ANetworkSession.cpp245
-rw-r--r--media/libstagefright/wifi-display/ANetworkSession.h4
-rw-r--r--media/libstagefright/wifi-display/Android.mk33
-rw-r--r--media/libstagefright/wifi-display/MediaSender.cpp474
-rw-r--r--media/libstagefright/wifi-display/MediaSender.h131
-rw-r--r--media/libstagefright/wifi-display/Parameters.cpp4
-rw-r--r--media/libstagefright/wifi-display/TimeSeries.cpp67
-rw-r--r--media/libstagefright/wifi-display/VideoFormats.cpp419
-rw-r--r--media/libstagefright/wifi-display/VideoFormats.h106
-rw-r--r--media/libstagefright/wifi-display/rtp/RTPBase.h (renamed from media/libstagefright/wifi-display/TimeSeries.h)39
-rw-r--r--media/libstagefright/wifi-display/rtp/RTPSender.cpp795
-rw-r--r--media/libstagefright/wifi-display/rtp/RTPSender.h120
-rw-r--r--media/libstagefright/wifi-display/sink/LinearRegression.cpp110
-rw-r--r--media/libstagefright/wifi-display/sink/LinearRegression.h52
-rw-r--r--media/libstagefright/wifi-display/sink/RTPSink.cpp806
-rw-r--r--media/libstagefright/wifi-display/sink/RTPSink.h98
-rw-r--r--media/libstagefright/wifi-display/sink/TunnelRenderer.cpp396
-rw-r--r--media/libstagefright/wifi-display/sink/TunnelRenderer.h84
-rw-r--r--media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp644
-rw-r--r--media/libstagefright/wifi-display/sink/WifiDisplaySink.h147
-rw-r--r--media/libstagefright/wifi-display/source/Converter.cpp112
-rw-r--r--media/libstagefright/wifi-display/source/Converter.h13
-rw-r--r--media/libstagefright/wifi-display/source/PlaybackSession.cpp700
-rw-r--r--media/libstagefright/wifi-display/source/PlaybackSession.h82
-rw-r--r--media/libstagefright/wifi-display/source/RepeaterSource.cpp19
-rw-r--r--media/libstagefright/wifi-display/source/RepeaterSource.h5
-rw-r--r--media/libstagefright/wifi-display/source/Sender.cpp870
-rw-r--r--media/libstagefright/wifi-display/source/Sender.h169
-rw-r--r--media/libstagefright/wifi-display/source/TSPacketizer.cpp290
-rw-r--r--media/libstagefright/wifi-display/source/TSPacketizer.h11
-rw-r--r--media/libstagefright/wifi-display/source/WifiDisplaySource.cpp329
-rw-r--r--media/libstagefright/wifi-display/source/WifiDisplaySource.h23
-rw-r--r--media/libstagefright/wifi-display/udptest.cpp355
-rw-r--r--media/libstagefright/wifi-display/wfd.cpp140
-rw-r--r--media/libstagefright/yuv/Android.mk3
149 files changed, 9716 insertions, 5738 deletions
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index a01d03f..64e3885 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -26,6 +26,7 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/BufferProducerWrapper.h>
#include <media/stagefright/MediaCodecList.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/NativeWindowWrapper.h>
@@ -165,6 +166,24 @@ private:
////////////////////////////////////////////////////////////////////////////////
+struct ACodec::DeathNotifier : public IBinder::DeathRecipient {
+ DeathNotifier(const sp<AMessage> &notify)
+ : mNotify(notify) {
+ }
+
+ virtual void binderDied(const wp<IBinder> &) {
+ mNotify->post();
+ }
+
+protected:
+ virtual ~DeathNotifier() {}
+
+private:
+ sp<AMessage> mNotify;
+
+ DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier);
+};
+
struct ACodec::UninitializedState : public ACodec::BaseState {
UninitializedState(ACodec *codec);
@@ -176,6 +195,8 @@ private:
void onSetup(const sp<AMessage> &msg);
bool onAllocateComponent(const sp<AMessage> &msg);
+ sp<DeathNotifier> mDeathNotifier;
+
DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
};
@@ -192,6 +213,7 @@ private:
friend struct ACodec::UninitializedState;
bool onConfigureComponent(const sp<AMessage> &msg);
+ void onCreateInputSurface(const sp<AMessage> &msg);
void onStart();
void onShutdown(bool keepComponentAllocated);
@@ -374,6 +396,12 @@ void ACodec::initiateSetup(const sp<AMessage> &msg) {
msg->post();
}
+void ACodec::signalSetParameters(const sp<AMessage> &params) {
+ sp<AMessage> msg = new AMessage(kWhatSetParameters, id());
+ msg->setMessage("params", params);
+ msg->post();
+}
+
void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
msg->setWhat(kWhatAllocateComponent);
msg->setTarget(id());
@@ -386,6 +414,14 @@ void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
msg->post();
}
+void ACodec::initiateCreateInputSurface() {
+ (new AMessage(kWhatCreateInputSurface, id()))->post();
+}
+
+void ACodec::signalEndOfInputStream() {
+ (new AMessage(kWhatSignalEndOfInputStream, id()))->post();
+}
+
void ACodec::initiateStart() {
(new AMessage(kWhatStart, id()))->post();
}
@@ -612,7 +648,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {
sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
BufferInfo info;
info.mStatus = BufferInfo::OWNED_BY_US;
- info.mData = new ABuffer(0);
+ info.mData = new ABuffer(NULL /* data */, def.nBufferSize /* capacity */);
info.mGraphicBuffer = graphicBuffer;
mBuffers[kPortIndexOutput].push(info);
@@ -712,12 +748,10 @@ status_t ACodec::freeOutputBuffersNotOwnedByComponent() {
BufferInfo *info =
&mBuffers[kPortIndexOutput].editItemAt(i);
- if (info->mStatus !=
- BufferInfo::OWNED_BY_COMPONENT) {
- // We shouldn't have sent out any buffers to the client at this
- // point.
- CHECK_NE((int)info->mStatus, (int)BufferInfo::OWNED_BY_DOWNSTREAM);
-
+ // At this time some buffers may still be with the component
+ // or being drained.
+ if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT &&
+ info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) {
CHECK_EQ((status_t)OK, freeBuffer(kPortIndexOutput, i));
}
}
@@ -803,6 +837,8 @@ status_t ACodec::setComponentRole(
"audio_decoder.raw", "audio_encoder.raw" },
{ MEDIA_MIMETYPE_AUDIO_FLAC,
"audio_decoder.flac", "audio_encoder.flac" },
+ { MEDIA_MIMETYPE_AUDIO_MSGSM,
+ "audio_decoder.gsm", "audio_encoder.gsm" },
};
static const size_t kNumMimeToRole =
@@ -922,6 +958,19 @@ status_t ACodec::configureCodec(
err = setupVideoDecoder(mime, width, height);
}
}
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
+ int32_t numChannels, sampleRate;
+ if (!msg->findInt32("channel-count", &numChannels)
+ || !msg->findInt32("sample-rate", &sampleRate)) {
+ // Since we did not always check for these, leave them optional
+ // and have the decoder figure it all out.
+ err = OK;
+ } else {
+ err = setupRawAudioFormat(
+ encoder ? kPortIndexInput : kPortIndexOutput,
+ sampleRate,
+ numChannels);
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
int32_t numChannels, sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
@@ -964,17 +1013,23 @@ status_t ACodec::configureCodec(
err = INVALID_OPERATION;
} else {
if (encoder) {
- if (!msg->findInt32("flac-compression-level", &compressionLevel)) {
+ if (!msg->findInt32(
+ "flac-compression-level", &compressionLevel)) {
compressionLevel = 5;// default FLAC compression level
} else if (compressionLevel < 0) {
- ALOGW("compression level %d outside [0..8] range, using 0", compressionLevel);
+ ALOGW("compression level %d outside [0..8] range, "
+ "using 0",
+ compressionLevel);
compressionLevel = 0;
} else if (compressionLevel > 8) {
- ALOGW("compression level %d outside [0..8] range, using 8", compressionLevel);
+ ALOGW("compression level %d outside [0..8] range, "
+ "using 8",
+ compressionLevel);
compressionLevel = 8;
}
}
- err = setupFlacCodec(encoder, numChannels, sampleRate, compressionLevel);
+ err = setupFlacCodec(
+ encoder, numChannels, sampleRate, compressionLevel);
}
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
int32_t numChannels, sampleRate;
@@ -1408,36 +1463,52 @@ status_t ACodec::setSupportedOutputFormat() {
CHECK_EQ(err, (status_t)OK);
CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
- CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar
- || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
- || format.eColorFormat == OMX_COLOR_FormatCbYCrY
- || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
- || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar
- || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka);
-
return mOMX->setParameter(
mNode, OMX_IndexParamVideoPortFormat,
&format, sizeof(format));
}
+static const struct VideoCodingMapEntry {
+ const char *mMime;
+ OMX_VIDEO_CODINGTYPE mVideoCodingType;
+} kVideoCodingMapEntry[] = {
+ { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
+ { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
+ { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
+ { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
+ { MEDIA_MIMETYPE_VIDEO_VPX, OMX_VIDEO_CodingVPX },
+};
+
static status_t GetVideoCodingTypeFromMime(
const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
- if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
- *codingType = OMX_VIDEO_CodingAVC;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
- *codingType = OMX_VIDEO_CodingMPEG4;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
- *codingType = OMX_VIDEO_CodingH263;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) {
- *codingType = OMX_VIDEO_CodingMPEG2;
- } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VPX, mime)) {
- *codingType = OMX_VIDEO_CodingVPX;
- } else {
- *codingType = OMX_VIDEO_CodingUnused;
- return ERROR_UNSUPPORTED;
+ for (size_t i = 0;
+ i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+ ++i) {
+ if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) {
+ *codingType = kVideoCodingMapEntry[i].mVideoCodingType;
+ return OK;
+ }
}
- return OK;
+ *codingType = OMX_VIDEO_CodingUnused;
+
+ return ERROR_UNSUPPORTED;
+}
+
+static status_t GetMimeTypeForVideoCoding(
+ OMX_VIDEO_CODINGTYPE codingType, AString *mime) {
+ for (size_t i = 0;
+ i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
+ ++i) {
+ if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) {
+ *mime = kVideoCodingMapEntry[i].mMime;
+ return OK;
+ }
+ }
+
+ mime->clear();
+
+ return ERROR_UNSUPPORTED;
}
status_t ACodec::setupVideoDecoder(
@@ -2085,6 +2156,42 @@ size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const {
return n;
}
+size_t ACodec::countBuffersOwnedByNativeWindow() const {
+ size_t n = 0;
+
+ for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
+ const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i);
+
+ if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
+ ++n;
+ }
+ }
+
+ return n;
+}
+
+void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() {
+ if (mNativeWindow == NULL) {
+ return;
+ }
+
+ int minUndequeuedBufs = 0;
+ status_t err = mNativeWindow->query(
+ mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &minUndequeuedBufs);
+
+ if (err != OK) {
+ ALOGE("[%s] NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+ mComponentName.c_str(), strerror(-err), -err);
+
+ minUndequeuedBufs = 0;
+ }
+
+ while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs
+ && dequeueBufferFromNativeWindow() != NULL) {
+ }
+}
+
bool ACodec::allYourBuffersAreBelongToUs(
OMX_U32 portIndex) {
for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
@@ -2141,49 +2248,61 @@ void ACodec::sendFormatChange() {
{
OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
- notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+ AString mime;
+ if (!mIsEncoder) {
+ notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+ } else if (GetMimeTypeForVideoCoding(
+ videoDef->eCompressionFormat, &mime) != OK) {
+ notify->setString("mime", "application/octet-stream");
+ } else {
+ notify->setString("mime", mime.c_str());
+ }
+
notify->setInt32("width", videoDef->nFrameWidth);
notify->setInt32("height", videoDef->nFrameHeight);
- notify->setInt32("stride", videoDef->nStride);
- notify->setInt32("slice-height", videoDef->nSliceHeight);
- notify->setInt32("color-format", videoDef->eColorFormat);
-
- OMX_CONFIG_RECTTYPE rect;
- InitOMXParams(&rect);
- rect.nPortIndex = kPortIndexOutput;
-
- if (mOMX->getConfig(
- mNode, OMX_IndexConfigCommonOutputCrop,
- &rect, sizeof(rect)) != OK) {
- rect.nLeft = 0;
- rect.nTop = 0;
- rect.nWidth = videoDef->nFrameWidth;
- rect.nHeight = videoDef->nFrameHeight;
- }
- CHECK_GE(rect.nLeft, 0);
- CHECK_GE(rect.nTop, 0);
- CHECK_GE(rect.nWidth, 0u);
- CHECK_GE(rect.nHeight, 0u);
- CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
- CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
-
- notify->setRect(
- "crop",
- rect.nLeft,
- rect.nTop,
- rect.nLeft + rect.nWidth - 1,
- rect.nTop + rect.nHeight - 1);
-
- if (mNativeWindow != NULL) {
- android_native_rect_t crop;
- crop.left = rect.nLeft;
- crop.top = rect.nTop;
- crop.right = rect.nLeft + rect.nWidth;
- crop.bottom = rect.nTop + rect.nHeight;
-
- CHECK_EQ(0, native_window_set_crop(
- mNativeWindow.get(), &crop));
+ if (!mIsEncoder) {
+ notify->setInt32("stride", videoDef->nStride);
+ notify->setInt32("slice-height", videoDef->nSliceHeight);
+ notify->setInt32("color-format", videoDef->eColorFormat);
+
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = kPortIndexOutput;
+
+ if (mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect)) != OK) {
+ rect.nLeft = 0;
+ rect.nTop = 0;
+ rect.nWidth = videoDef->nFrameWidth;
+ rect.nHeight = videoDef->nFrameHeight;
+ }
+
+ CHECK_GE(rect.nLeft, 0);
+ CHECK_GE(rect.nTop, 0);
+ CHECK_GE(rect.nWidth, 0u);
+ CHECK_GE(rect.nHeight, 0u);
+ CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
+ CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
+
+ notify->setRect(
+ "crop",
+ rect.nLeft,
+ rect.nTop,
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+
+ if (mNativeWindow != NULL) {
+ android_native_rect_t crop;
+ crop.left = rect.nLeft;
+ crop.top = rect.nTop;
+ crop.right = rect.nLeft + rect.nWidth;
+ crop.bottom = rect.nTop + rect.nHeight;
+
+ CHECK_EQ(0, native_window_set_crop(
+ mNativeWindow.get(), &crop));
+ }
}
break;
}
@@ -2191,41 +2310,108 @@ void ACodec::sendFormatChange() {
case OMX_PortDomainAudio:
{
OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
- CHECK_EQ((int)audioDef->eEncoding, (int)OMX_AUDIO_CodingPCM);
- OMX_AUDIO_PARAM_PCMMODETYPE params;
- InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ switch (audioDef->eEncoding) {
+ case OMX_AUDIO_CodingPCM:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = kPortIndexOutput;
- CHECK_EQ(mOMX->getParameter(
- mNode, OMX_IndexParamAudioPcm,
- &params, sizeof(params)),
- (status_t)OK);
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioPcm,
+ &params, sizeof(params)),
+ (status_t)OK);
- CHECK(params.nChannels == 1 || params.bInterleaved);
- CHECK_EQ(params.nBitPerSample, 16u);
- CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
- CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
-
- notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
- notify->setInt32("channel-count", params.nChannels);
- notify->setInt32("sample-rate", params.nSamplingRate);
- if (mEncoderDelay + mEncoderPadding) {
- size_t frameSize = params.nChannels * sizeof(int16_t);
- if (mSkipCutBuffer != NULL) {
- size_t prevbufsize = mSkipCutBuffer->size();
- if (prevbufsize != 0) {
- ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize);
+ CHECK(params.nChannels == 1 || params.bInterleaved);
+ CHECK_EQ(params.nBitPerSample, 16u);
+ CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
+ CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSamplingRate);
+ if (mEncoderDelay + mEncoderPadding) {
+ size_t frameSize = params.nChannels * sizeof(int16_t);
+ if (mSkipCutBuffer != NULL) {
+ size_t prevbufsize = mSkipCutBuffer->size();
+ if (prevbufsize != 0) {
+ ALOGW("Replacing SkipCutBuffer holding %d bytes", prevbufsize);
+ }
+ }
+ mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize,
+ mEncoderPadding * frameSize);
}
+
+ if (mChannelMaskPresent) {
+ notify->setInt32("channel-mask", mChannelMask);
+ }
+ break;
}
- mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay * frameSize,
- mEncoderPadding * frameSize);
- }
- if (mChannelMaskPresent) {
- notify->setInt32("channel-mask", mChannelMask);
- }
+ case OMX_AUDIO_CodingAAC:
+ {
+ OMX_AUDIO_PARAM_AACPROFILETYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = kPortIndexOutput;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAac,
+ &params, sizeof(params)),
+ (status_t)OK);
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingAMR:
+ {
+ OMX_AUDIO_PARAM_AMRTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = kPortIndexOutput;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioAmr,
+ &params, sizeof(params)),
+ (status_t)OK);
+
+ notify->setInt32("channel-count", 1);
+ if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) {
+ notify->setString(
+ "mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
+
+ notify->setInt32("sample-rate", 16000);
+ } else {
+ notify->setString(
+ "mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
+
+ notify->setInt32("sample-rate", 8000);
+ }
+ break;
+ }
+
+ case OMX_AUDIO_CodingFLAC:
+ {
+ OMX_AUDIO_PARAM_FLACTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = kPortIndexOutput;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioFlac,
+ &params, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
break;
}
@@ -2454,6 +2640,21 @@ bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) {
return onOMXMessage(msg);
}
+ case ACodec::kWhatCreateInputSurface:
+ case ACodec::kWhatSignalEndOfInputStream:
+ {
+ ALOGE("Message 0x%x was not handled", msg->what());
+ mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION);
+ return true;
+ }
+
+ case ACodec::kWhatOMXDied:
+ {
+ ALOGE("OMX/mediaserver died, signalling error!");
+ mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT);
+ break;
+ }
+
default:
return false;
}
@@ -2856,19 +3057,18 @@ bool ACodec::BaseState::onOMXFillBufferDone(
break;
}
- if (!mCodec->mIsEncoder && !mCodec->mSentFormat) {
+ if (!mCodec->mSentFormat) {
mCodec->sendFormatChange();
}
- if (mCodec->mNativeWindow == NULL) {
- info->mData->setRange(rangeOffset, rangeLength);
-
+ info->mData->setRange(rangeOffset, rangeLength);
#if 0
+ if (mCodec->mNativeWindow == NULL) {
if (IsIDR(info->mData)) {
ALOGI("IDR frame");
}
-#endif
}
+#endif
if (mCodec->mSkipCutBuffer != NULL) {
mCodec->mSkipCutBuffer->submit(info->mData);
@@ -2929,7 +3129,8 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
int32_t render;
if (mCodec->mNativeWindow != NULL
- && msg->findInt32("render", &render) && render != 0) {
+ && msg->findInt32("render", &render) && render != 0
+ && (info->mData == NULL || info->mData->size() != 0)) {
// The client wants this buffer to be rendered.
status_t err;
@@ -3003,6 +3204,18 @@ ACodec::UninitializedState::UninitializedState(ACodec *codec)
void ACodec::UninitializedState::stateEntered() {
ALOGV("Now uninitialized");
+
+ if (mDeathNotifier != NULL) {
+ mCodec->mOMX->asBinder()->unlinkToDeath(mDeathNotifier);
+ mDeathNotifier.clear();
+ }
+
+ mCodec->mNativeWindow.clear();
+ mCodec->mNode = NULL;
+ mCodec->mOMX.clear();
+ mCodec->mQuirks = 0;
+ mCodec->mFlags = 0;
+ mCodec->mComponentName.clear();
}
bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
@@ -3074,6 +3287,15 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
sp<IOMX> omx = client.interface();
+ sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec->id());
+
+ mDeathNotifier = new DeathNotifier(notify);
+ if (omx->asBinder()->linkToDeath(mDeathNotifier) != OK) {
+ // This was a local binder, if it dies so do we, we won't care
+ // about any notifications in the afterlife.
+ mDeathNotifier.clear();
+ }
+
Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs;
AString mime;
@@ -3138,7 +3360,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
return false;
}
- sp<AMessage> notify = new AMessage(kWhatOMXMessage, mCodec->id());
+ notify = new AMessage(kWhatOMXMessage, mCodec->id());
observer->setNotificationMessage(notify);
mCodec->mComponentName = componentName;
@@ -3152,11 +3374,6 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
mCodec->mOMX = omx;
mCodec->mNode = node;
- mCodec->mPortEOS[kPortIndexInput] =
- mCodec->mPortEOS[kPortIndexOutput] = false;
-
- mCodec->mInputEOSResult = OK;
-
{
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", ACodec::kWhatComponentAllocated);
@@ -3178,6 +3395,11 @@ ACodec::LoadedState::LoadedState(ACodec *codec)
void ACodec::LoadedState::stateEntered() {
ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str());
+ mCodec->mPortEOS[kPortIndexInput] =
+ mCodec->mPortEOS[kPortIndexOutput] = false;
+
+ mCodec->mInputEOSResult = OK;
+
if (mCodec->mShutdownInProgress) {
bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
@@ -3192,13 +3414,6 @@ void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
if (!keepComponentAllocated) {
CHECK_EQ(mCodec->mOMX->freeNode(mCodec->mNode), (status_t)OK);
- mCodec->mNativeWindow.clear();
- mCodec->mNode = NULL;
- mCodec->mOMX.clear();
- mCodec->mQuirks = 0;
- mCodec->mFlags = 0;
- mCodec->mComponentName.clear();
-
mCodec->changeState(mCodec->mUninitializedState);
}
@@ -3218,6 +3433,13 @@ bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case ACodec::kWhatCreateInputSurface:
+ {
+ onCreateInputSurface(msg);
+ handled = true;
+ break;
+ }
+
case ACodec::kWhatStart:
{
onStart();
@@ -3296,6 +3518,32 @@ bool ACodec::LoadedState::onConfigureComponent(
return true;
}
+void ACodec::LoadedState::onCreateInputSurface(
+ const sp<AMessage> &msg) {
+ ALOGV("onCreateInputSurface");
+
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatInputSurfaceCreated);
+
+ sp<IGraphicBufferProducer> bufferProducer;
+ status_t err;
+
+ err = mCodec->mOMX->createInputSurface(mCodec->mNode, kPortIndexInput,
+ &bufferProducer);
+ if (err == OK) {
+ notify->setObject("input-surface",
+ new BufferProducerWrapper(bufferProducer));
+ } else {
+ // Can't use mCodec->signalError() here -- MediaCodec won't forward
+ // the error through because it's in the "configured" state. We
+ // send a kWhatInputSurfaceCreated with an error value instead.
+ ALOGE("[%s] onCreateInputSurface returning error %d",
+ mCodec->mComponentName.c_str(), err);
+ notify->setInt32("err", err);
+ }
+ notify->post();
+}
+
void ACodec::LoadedState::onStart() {
ALOGV("onStart");
@@ -3345,6 +3593,27 @@ bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
return true;
}
+ case kWhatSignalEndOfInputStream:
+ {
+ mCodec->onSignalEndOfInputStream();
+ return true;
+ }
+
+ case kWhatResume:
+ {
+ // We'll be active soon enough.
+ return true;
+ }
+
+ case kWhatFlush:
+ {
+ // We haven't even started yet, so we're flushed alright...
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->post();
+ return true;
+ }
+
default:
return BaseState::onMessageReceived(msg);
}
@@ -3390,6 +3659,28 @@ bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
return true;
}
+ case kWhatResume:
+ {
+ // We'll be active soon enough.
+ return true;
+ }
+
+ case kWhatFlush:
+ {
+ // We haven't even started yet, so we're flushed alright...
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatFlushCompleted);
+ notify->post();
+
+ return true;
+ }
+
+ case kWhatSignalEndOfInputStream:
+ {
+ mCodec->onSignalEndOfInputStream();
+ return true;
+ }
+
default:
return BaseState::onMessageReceived(msg);
}
@@ -3518,7 +3809,6 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
(status_t)OK);
mCodec->changeState(mCodec->mFlushingState);
-
handled = true;
break;
}
@@ -3542,6 +3832,30 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSetParameters:
+ {
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ status_t err = mCodec->setParameters(params);
+
+ sp<AMessage> reply;
+ if (msg->findMessage("reply", &reply)) {
+ reply->setInt32("err", err);
+ reply->post();
+ }
+
+ handled = true;
+ break;
+ }
+
+ case ACodec::kWhatSignalEndOfInputStream:
+ {
+ mCodec->onSignalEndOfInputStream();
+ handled = true;
+ break;
+ }
+
default:
handled = BaseState::onMessageReceived(msg);
break;
@@ -3550,6 +3864,42 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
return handled;
}
+status_t ACodec::setParameters(const sp<AMessage> &params) {
+ int32_t videoBitrate;
+ if (params->findInt32("videoBitrate", &videoBitrate)) {
+ OMX_VIDEO_CONFIG_BITRATETYPE configParams;
+ InitOMXParams(&configParams);
+ configParams.nPortIndex = kPortIndexOutput;
+ configParams.nEncodeBitrate = videoBitrate;
+
+ status_t err = mOMX->setConfig(
+ mNode,
+ OMX_IndexConfigVideoBitrate,
+ &configParams,
+ sizeof(configParams));
+
+ if (err != OK) {
+ ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d",
+ videoBitrate, err);
+
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+void ACodec::onSignalEndOfInputStream() {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatSignaledInputEOS);
+
+ status_t err = mOMX->signalEndOfInputStream(mNode);
+ if (err != OK) {
+ notify->setInt32("err", err);
+ }
+ notify->post();
+}
+
bool ACodec::ExecutingState::onOMXEvent(
OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
switch (event) {
@@ -3964,6 +4314,10 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {
if (mFlushComplete[kPortIndexInput]
&& mFlushComplete[kPortIndexOutput]
&& mCodec->allYourBuffersAreBelongToUs()) {
+ // We now own all buffers except possibly those still queued with
+ // the native window for rendering. Let's get those back as well.
+ mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs();
+
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", ACodec::kWhatFlushCompleted);
notify->post();
@@ -3973,6 +4327,10 @@ void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() {
mCodec->mInputEOSResult = OK;
+ if (mCodec->mSkipCutBuffer != NULL) {
+ mCodec->mSkipCutBuffer->clear();
+ }
+
mCodec->changeState(mCodec->mExecutingState);
}
}
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index cc0581e..acc3abf 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -19,19 +19,20 @@ LOCAL_SRC_FILES:= \
ESDS.cpp \
FileSource.cpp \
FLACExtractor.cpp \
- FragmentedMP4Extractor.cpp \
HTTPBase.cpp \
JPEGSource.cpp \
MP3Extractor.cpp \
MPEG2TSWriter.cpp \
MPEG4Extractor.cpp \
MPEG4Writer.cpp \
+ MediaAdapter.cpp \
MediaBuffer.cpp \
MediaBufferGroup.cpp \
MediaCodec.cpp \
MediaCodecList.cpp \
MediaDefs.cpp \
MediaExtractor.cpp \
+ MediaMuxer.cpp \
MediaSource.cpp \
MetaData.cpp \
NuCachedSource2.cpp \
@@ -78,7 +79,6 @@ LOCAL_SHARED_LIBRARIES := \
libicuuc \
liblog \
libmedia \
- libmedia_native \
libsonivox \
libssl \
libstagefright_omx \
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index 861aebe..3cf4d5c 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -58,7 +58,7 @@ AudioSource::AudioSource(
ALOGV("sampleRate: %d, channelCount: %d", sampleRate, channelCount);
CHECK(channelCount == 1 || channelCount == 2);
- int minFrameCount;
+ size_t minFrameCount;
status_t status = AudioRecord::getMinFrameCount(&minFrameCount,
sampleRate,
AUDIO_FORMAT_PCM_16_BIT,
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index 1e2625a..bd28118 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -48,8 +48,8 @@
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
-#include <gui/ISurfaceTexture.h>
-#include <gui/SurfaceTextureClient.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -1178,12 +1178,12 @@ bool AwesomePlayer::isPlaying() const {
return (mFlags & PLAYING) || (mFlags & CACHE_UNDERRUN);
}
-status_t AwesomePlayer::setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) {
+status_t AwesomePlayer::setSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer) {
Mutex::Autolock autoLock(mLock);
status_t err;
- if (surfaceTexture != NULL) {
- err = setNativeWindow_l(new SurfaceTextureClient(surfaceTexture));
+ if (bufferProducer != NULL) {
+ err = setNativeWindow_l(new Surface(bufferProducer));
} else {
err = setNativeWindow_l(NULL);
}
@@ -2511,6 +2511,7 @@ status_t AwesomePlayer::setVideoScalingMode_l(int32_t mode) {
if (err != OK) {
ALOGW("Failed to set scaling mode: %d", err);
}
+ return err;
}
return OK;
}
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index efd7af7..5a26b06 100755..100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -121,13 +121,14 @@ static int32_t getColorFormat(const char* colorFormat) {
CHECK(!"Unknown color format");
}
-CameraSource *CameraSource::Create() {
+CameraSource *CameraSource::Create(const String16 &clientName) {
Size size;
size.width = -1;
size.height = -1;
sp<ICamera> camera;
- return new CameraSource(camera, NULL, 0, size, -1, NULL, false);
+ return new CameraSource(camera, NULL, 0, clientName, -1,
+ size, -1, NULL, false);
}
// static
@@ -135,14 +136,16 @@ CameraSource *CameraSource::CreateFromCamera(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
Size videoSize,
int32_t frameRate,
- const sp<Surface>& surface,
+ const sp<IGraphicBufferProducer>& surface,
bool storeMetaDataInVideoBuffers) {
CameraSource *source = new CameraSource(camera, proxy, cameraId,
- videoSize, frameRate, surface,
- storeMetaDataInVideoBuffers);
+ clientName, clientUid, videoSize, frameRate, surface,
+ storeMetaDataInVideoBuffers);
return source;
}
@@ -150,9 +153,11 @@ CameraSource::CameraSource(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
Size videoSize,
int32_t frameRate,
- const sp<Surface>& surface,
+ const sp<IGraphicBufferProducer>& surface,
bool storeMetaDataInVideoBuffers)
: mCameraFlags(0),
mNumInputBuffers(0),
@@ -173,6 +178,7 @@ CameraSource::CameraSource(
mVideoSize.height = -1;
mInitCheck = init(camera, proxy, cameraId,
+ clientName, clientUid,
videoSize, frameRate,
storeMetaDataInVideoBuffers);
if (mInitCheck != OK) releaseCamera();
@@ -184,10 +190,10 @@ status_t CameraSource::initCheck() const {
status_t CameraSource::isCameraAvailable(
const sp<ICamera>& camera, const sp<ICameraRecordingProxy>& proxy,
- int32_t cameraId) {
+ int32_t cameraId, const String16& clientName, uid_t clientUid) {
if (camera == 0) {
- mCamera = Camera::connect(cameraId);
+ mCamera = Camera::connect(cameraId, clientName, clientUid);
if (mCamera == 0) return -EBUSY;
mCameraFlags &= ~FLAGS_HOT_CAMERA;
} else {
@@ -469,6 +475,8 @@ status_t CameraSource::init(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
Size videoSize,
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
@@ -476,7 +484,7 @@ status_t CameraSource::init(
ALOGV("init");
status_t err = OK;
int64_t token = IPCThreadState::self()->clearCallingIdentity();
- err = initWithCameraAccess(camera, proxy, cameraId,
+ err = initWithCameraAccess(camera, proxy, cameraId, clientName, clientUid,
videoSize, frameRate,
storeMetaDataInVideoBuffers);
IPCThreadState::self()->restoreCallingIdentity(token);
@@ -487,13 +495,16 @@ status_t CameraSource::initWithCameraAccess(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
Size videoSize,
int32_t frameRate,
bool storeMetaDataInVideoBuffers) {
ALOGV("initWithCameraAccess");
status_t err = OK;
- if ((err = isCameraAvailable(camera, proxy, cameraId)) != OK) {
+ if ((err = isCameraAvailable(camera, proxy, cameraId,
+ clientName, clientUid)) != OK) {
ALOGE("Camera connection could not be established.");
return err;
}
@@ -525,7 +536,7 @@ status_t CameraSource::initWithCameraAccess(
if (mSurface != NULL) {
// This CHECK is good, since we just passed the lock/unlock
// check earlier by calling mCamera->setParameters().
- CHECK_EQ((status_t)OK, mCamera->setPreviewDisplay(mSurface));
+ CHECK_EQ((status_t)OK, mCamera->setPreviewTexture(mSurface));
}
// By default, do not store metadata in video buffers
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 26ce7ae..20214e8 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -36,13 +36,16 @@ CameraSourceTimeLapse *CameraSourceTimeLapse::CreateFromCamera(
const sp<ICamera> &camera,
const sp<ICameraRecordingProxy> &proxy,
int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
Size videoSize,
int32_t videoFrameRate,
- const sp<Surface>& surface,
+ const sp<IGraphicBufferProducer>& surface,
int64_t timeBetweenFrameCaptureUs) {
CameraSourceTimeLapse *source = new
CameraSourceTimeLapse(camera, proxy, cameraId,
+ clientName, clientUid,
videoSize, videoFrameRate, surface,
timeBetweenFrameCaptureUs);
@@ -59,11 +62,14 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(
const sp<ICamera>& camera,
const sp<ICameraRecordingProxy>& proxy,
int32_t cameraId,
+ const String16& clientName,
+ uid_t clientUid,
Size videoSize,
int32_t videoFrameRate,
- const sp<Surface>& surface,
+ const sp<IGraphicBufferProducer>& surface,
int64_t timeBetweenFrameCaptureUs)
- : CameraSource(camera, proxy, cameraId, videoSize, videoFrameRate, surface, true),
+ : CameraSource(camera, proxy, cameraId, clientName, clientUid,
+ videoSize, videoFrameRate, surface, true),
mTimeBetweenTimeLapseVideoFramesUs(1E6/videoFrameRate),
mLastTimeLapseFrameRealTimestampUs(0),
mSkipCurrentFrame(false) {
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 9d0eea2..fc6fd9c 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -23,7 +23,6 @@
#include "include/AACExtractor.h"
#include "include/DRMExtractor.h"
#include "include/FLACExtractor.h"
-#include "include/FragmentedMP4Extractor.h"
#include "include/HTTPBase.h"
#include "include/MP3Extractor.h"
#include "include/MPEG2PSExtractor.h"
@@ -59,6 +58,45 @@ bool DataSource::getUInt16(off64_t offset, uint16_t *x) {
return true;
}
+bool DataSource::getUInt24(off64_t offset, uint32_t *x) {
+ *x = 0;
+
+ uint8_t byte[3];
+ if (readAt(offset, byte, 3) != 3) {
+ return false;
+ }
+
+ *x = (byte[0] << 16) | (byte[1] << 8) | byte[2];
+
+ return true;
+}
+
+bool DataSource::getUInt32(off64_t offset, uint32_t *x) {
+ *x = 0;
+
+ uint32_t tmp;
+ if (readAt(offset, &tmp, 4) != 4) {
+ return false;
+ }
+
+ *x = ntohl(tmp);
+
+ return true;
+}
+
+bool DataSource::getUInt64(off64_t offset, uint64_t *x) {
+ *x = 0;
+
+ uint64_t tmp;
+ if (readAt(offset, &tmp, 8) != 8) {
+ return false;
+ }
+
+ *x = ntoh64(tmp);
+
+ return true;
+}
+
status_t DataSource::getSize(off64_t *size) {
*size = 0;
@@ -111,7 +149,6 @@ void DataSource::RegisterSniffer(SnifferFunc func) {
// static
void DataSource::RegisterDefaultSniffers() {
RegisterSniffer(SniffMPEG4);
- RegisterSniffer(SniffFragmentedMP4);
RegisterSniffer(SniffMatroska);
RegisterSniffer(SniffOgg);
RegisterSniffer(SniffWAV);
diff --git a/media/libstagefright/FragmentedMP4Extractor.cpp b/media/libstagefright/FragmentedMP4Extractor.cpp
deleted file mode 100644
index 82712ef..0000000
--- a/media/libstagefright/FragmentedMP4Extractor.cpp
+++ /dev/null
@@ -1,460 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "FragmentedMP4Extractor"
-#include <utils/Log.h>
-
-#include "include/FragmentedMP4Extractor.h"
-#include "include/SampleTable.h"
-#include "include/ESDS.h"
-
-#include <arpa/inet.h>
-
-#include <ctype.h>
-#include <stdint.h>
-#include <stdlib.h>
-#include <string.h>
-
-#include <cutils/properties.h> // for property_get
-
-#include <media/stagefright/foundation/ABitReader.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
-#include <media/stagefright/MediaBuffer.h>
-#include <media/stagefright/MediaBufferGroup.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaSource.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
-#include <utils/String8.h>
-
-namespace android {
-
-class FragmentedMPEG4Source : public MediaSource {
-public:
- // Caller retains ownership of the Parser
- FragmentedMPEG4Source(bool audio,
- const sp<MetaData> &format,
- const sp<FragmentedMP4Parser> &parser,
- const sp<FragmentedMP4Extractor> &extractor);
-
- virtual status_t start(MetaData *params = NULL);
- virtual status_t stop();
-
- virtual sp<MetaData> getFormat();
-
- virtual status_t read(
- MediaBuffer **buffer, const ReadOptions *options = NULL);
-
-protected:
- virtual ~FragmentedMPEG4Source();
-
-private:
- Mutex mLock;
-
- sp<MetaData> mFormat;
- sp<FragmentedMP4Parser> mParser;
- sp<FragmentedMP4Extractor> mExtractor;
- bool mIsAudioTrack;
- uint32_t mCurrentSampleIndex;
-
- bool mIsAVC;
- size_t mNALLengthSize;
-
- bool mStarted;
-
- MediaBufferGroup *mGroup;
-
- bool mWantsNALFragments;
-
- uint8_t *mSrcBuffer;
-
- FragmentedMPEG4Source(const FragmentedMPEG4Source &);
- FragmentedMPEG4Source &operator=(const FragmentedMPEG4Source &);
-};
-
-
-FragmentedMP4Extractor::FragmentedMP4Extractor(const sp<DataSource> &source)
- : mLooper(new ALooper),
- mParser(new FragmentedMP4Parser()),
- mDataSource(source),
- mInitCheck(NO_INIT),
- mFileMetaData(new MetaData) {
- ALOGV("FragmentedMP4Extractor");
- mLooper->registerHandler(mParser);
- mLooper->start(false /* runOnCallingThread */);
- mParser->start(mDataSource);
-
- bool hasVideo = mParser->getFormat(false /* audio */, true /* synchronous */) != NULL;
- bool hasAudio = mParser->getFormat(true /* audio */, true /* synchronous */) != NULL;
-
- ALOGV("number of tracks: %d", countTracks());
-
- if (hasVideo) {
- mFileMetaData->setCString(
- kKeyMIMEType, MEDIA_MIMETYPE_CONTAINER_MPEG4);
- } else if (hasAudio) {
- mFileMetaData->setCString(kKeyMIMEType, "audio/mp4");
- } else {
- ALOGE("no audio and no video, no idea what file type this is");
- }
- // tracks are numbered such that video track is first, audio track is second
- if (hasAudio && hasVideo) {
- mTrackCount = 2;
- mAudioTrackIndex = 1;
- } else if (hasAudio) {
- mTrackCount = 1;
- mAudioTrackIndex = 0;
- } else if (hasVideo) {
- mTrackCount = 1;
- mAudioTrackIndex = -1;
- } else {
- mTrackCount = 0;
- mAudioTrackIndex = -1;
- }
-}
-
-FragmentedMP4Extractor::~FragmentedMP4Extractor() {
- ALOGV("~FragmentedMP4Extractor");
- mLooper->stop();
-}
-
-uint32_t FragmentedMP4Extractor::flags() const {
- return CAN_PAUSE |
- (mParser->isSeekable() ? (CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK) : 0);
-}
-
-sp<MetaData> FragmentedMP4Extractor::getMetaData() {
- return mFileMetaData;
-}
-
-size_t FragmentedMP4Extractor::countTracks() {
- return mTrackCount;
-}
-
-
-sp<MetaData> FragmentedMP4Extractor::getTrackMetaData(
- size_t index, uint32_t flags) {
- if (index >= countTracks()) {
- return NULL;
- }
-
- sp<AMessage> msg = mParser->getFormat(index == mAudioTrackIndex, true /* synchronous */);
-
- if (msg == NULL) {
- ALOGV("got null format for track %d", index);
- return NULL;
- }
-
- sp<MetaData> meta = new MetaData();
- convertMessageToMetaData(msg, meta);
- return meta;
-}
-
-static void MakeFourCCString(uint32_t x, char *s) {
- s[0] = x >> 24;
- s[1] = (x >> 16) & 0xff;
- s[2] = (x >> 8) & 0xff;
- s[3] = x & 0xff;
- s[4] = '\0';
-}
-
-sp<MediaSource> FragmentedMP4Extractor::getTrack(size_t index) {
- if (index >= countTracks()) {
- return NULL;
- }
- return new FragmentedMPEG4Source(index == mAudioTrackIndex, getTrackMetaData(index, 0), mParser, this);
-}
-
-
-////////////////////////////////////////////////////////////////////////////////
-
-FragmentedMPEG4Source::FragmentedMPEG4Source(
- bool audio,
- const sp<MetaData> &format,
- const sp<FragmentedMP4Parser> &parser,
- const sp<FragmentedMP4Extractor> &extractor)
- : mFormat(format),
- mParser(parser),
- mExtractor(extractor),
- mIsAudioTrack(audio),
- mStarted(false),
- mGroup(NULL),
- mWantsNALFragments(false),
- mSrcBuffer(NULL) {
-}
-
-FragmentedMPEG4Source::~FragmentedMPEG4Source() {
- if (mStarted) {
- stop();
- }
-}
-
-status_t FragmentedMPEG4Source::start(MetaData *params) {
- Mutex::Autolock autoLock(mLock);
-
- CHECK(!mStarted);
-
- int32_t val;
- if (params && params->findInt32(kKeyWantsNALFragments, &val)
- && val != 0) {
- mWantsNALFragments = true;
- } else {
- mWantsNALFragments = false;
- }
- ALOGV("caller wants NAL fragments: %s", mWantsNALFragments ? "yes" : "no");
-
- mGroup = new MediaBufferGroup;
-
- int32_t max_size = 65536;
- // XXX CHECK(mFormat->findInt32(kKeyMaxInputSize, &max_size));
-
- mGroup->add_buffer(new MediaBuffer(max_size));
-
- mSrcBuffer = new uint8_t[max_size];
-
- mStarted = true;
-
- return OK;
-}
-
-status_t FragmentedMPEG4Source::stop() {
- Mutex::Autolock autoLock(mLock);
-
- CHECK(mStarted);
-
- delete[] mSrcBuffer;
- mSrcBuffer = NULL;
-
- delete mGroup;
- mGroup = NULL;
-
- mStarted = false;
- mCurrentSampleIndex = 0;
-
- return OK;
-}
-
-sp<MetaData> FragmentedMPEG4Source::getFormat() {
- Mutex::Autolock autoLock(mLock);
-
- return mFormat;
-}
-
-
-status_t FragmentedMPEG4Source::read(
- MediaBuffer **out, const ReadOptions *options) {
- int64_t seekTimeUs;
- ReadOptions::SeekMode mode;
- if (options && options->getSeekTo(&seekTimeUs, &mode)) {
- mParser->seekTo(mIsAudioTrack, seekTimeUs);
- }
- MediaBuffer *buffer = NULL;
- mGroup->acquire_buffer(&buffer);
- sp<ABuffer> parseBuffer;
-
- status_t ret = mParser->dequeueAccessUnit(mIsAudioTrack, &parseBuffer, true /* synchronous */);
- if (ret != OK) {
- buffer->release();
- ALOGV("returning %d", ret);
- return ret;
- }
- sp<AMessage> meta = parseBuffer->meta();
- int64_t timeUs;
- CHECK(meta->findInt64("timeUs", &timeUs));
- buffer->meta_data()->setInt64(kKeyTime, timeUs);
- buffer->set_range(0, parseBuffer->size());
- memcpy(buffer->data(), parseBuffer->data(), parseBuffer->size());
- *out = buffer;
- return OK;
-}
-
-
-static bool isCompatibleBrand(uint32_t fourcc) {
- static const uint32_t kCompatibleBrands[] = {
- FOURCC('i', 's', 'o', 'm'),
- FOURCC('i', 's', 'o', '2'),
- FOURCC('a', 'v', 'c', '1'),
- FOURCC('3', 'g', 'p', '4'),
- FOURCC('m', 'p', '4', '1'),
- FOURCC('m', 'p', '4', '2'),
-
- // Won't promise that the following file types can be played.
- // Just give these file types a chance.
- FOURCC('q', 't', ' ', ' '), // Apple's QuickTime
- FOURCC('M', 'S', 'N', 'V'), // Sony's PSP
-
- FOURCC('3', 'g', '2', 'a'), // 3GPP2
- FOURCC('3', 'g', '2', 'b'),
- };
-
- for (size_t i = 0;
- i < sizeof(kCompatibleBrands) / sizeof(kCompatibleBrands[0]);
- ++i) {
- if (kCompatibleBrands[i] == fourcc) {
- return true;
- }
- }
-
- return false;
-}
-
-// Attempt to actually parse the 'ftyp' atom and determine if a suitable
-// compatible brand is present.
-// Also try to identify where this file's metadata ends
-// (end of the 'moov' atom) and report it to the caller as part of
-// the metadata.
-static bool Sniff(
- const sp<DataSource> &source, String8 *mimeType, float *confidence,
- sp<AMessage> *meta) {
- // We scan up to 128k bytes to identify this file as an MP4.
- static const off64_t kMaxScanOffset = 128ll * 1024ll;
-
- off64_t offset = 0ll;
- bool foundGoodFileType = false;
- bool isFragmented = false;
- off64_t moovAtomEndOffset = -1ll;
- bool done = false;
-
- while (!done && offset < kMaxScanOffset) {
- uint32_t hdr[2];
- if (source->readAt(offset, hdr, 8) < 8) {
- return false;
- }
-
- uint64_t chunkSize = ntohl(hdr[0]);
- uint32_t chunkType = ntohl(hdr[1]);
- off64_t chunkDataOffset = offset + 8;
-
- if (chunkSize == 1) {
- if (source->readAt(offset + 8, &chunkSize, 8) < 8) {
- return false;
- }
-
- chunkSize = ntoh64(chunkSize);
- chunkDataOffset += 8;
-
- if (chunkSize < 16) {
- // The smallest valid chunk is 16 bytes long in this case.
- return false;
- }
- } else if (chunkSize < 8) {
- // The smallest valid chunk is 8 bytes long.
- return false;
- }
-
- off64_t chunkDataSize = offset + chunkSize - chunkDataOffset;
-
- char chunkstring[5];
- MakeFourCCString(chunkType, chunkstring);
- ALOGV("saw chunk type %s, size %lld @ %lld", chunkstring, chunkSize, offset);
- switch (chunkType) {
- case FOURCC('f', 't', 'y', 'p'):
- {
- if (chunkDataSize < 8) {
- return false;
- }
-
- uint32_t numCompatibleBrands = (chunkDataSize - 8) / 4;
- for (size_t i = 0; i < numCompatibleBrands + 2; ++i) {
- if (i == 1) {
- // Skip this index, it refers to the minorVersion,
- // not a brand.
- continue;
- }
-
- uint32_t brand;
- if (source->readAt(
- chunkDataOffset + 4 * i, &brand, 4) < 4) {
- return false;
- }
-
- brand = ntohl(brand);
- char brandstring[5];
- MakeFourCCString(brand, brandstring);
- ALOGV("Brand: %s", brandstring);
-
- if (isCompatibleBrand(brand)) {
- foundGoodFileType = true;
- break;
- }
- }
-
- if (!foundGoodFileType) {
- return false;
- }
-
- break;
- }
-
- case FOURCC('m', 'o', 'o', 'v'):
- {
- moovAtomEndOffset = offset + chunkSize;
- break;
- }
-
- case FOURCC('m', 'o', 'o', 'f'):
- {
- // this is kind of broken, since we might not actually find a
- // moof box in the first 128k.
- isFragmented = true;
- done = true;
- break;
- }
-
- default:
- break;
- }
-
- offset += chunkSize;
- }
-
- if (!foundGoodFileType || !isFragmented) {
- return false;
- }
-
- *mimeType = MEDIA_MIMETYPE_CONTAINER_MPEG4;
- *confidence = 0.5f; // slightly more than MPEG4Extractor
-
- if (moovAtomEndOffset >= 0) {
- *meta = new AMessage;
- (*meta)->setInt64("meta-data-size", moovAtomEndOffset);
- (*meta)->setInt32("fragmented", 1); // tell MediaExtractor what to instantiate
-
- ALOGV("found metadata size: %lld", moovAtomEndOffset);
- }
-
- return true;
-}
-
-// used by DataSource::RegisterDefaultSniffers
-bool SniffFragmentedMP4(
- const sp<DataSource> &source, String8 *mimeType, float *confidence,
- sp<AMessage> *meta) {
- ALOGV("SniffFragmentedMP4");
- char prop[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.use-fragmp4", prop, NULL)
- && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) {
- return Sniff(source, mimeType, confidence, meta);
- }
-
- return false;
-}
-
-} // namespace android
diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp
index 40bfc55..d2cc6c2 100644
--- a/media/libstagefright/HTTPBase.cpp
+++ b/media/libstagefright/HTTPBase.cpp
@@ -58,6 +58,16 @@ sp<HTTPBase> HTTPBase::Create(uint32_t flags) {
}
}
+// static
+status_t HTTPBase::UpdateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList) {
+#if CHROMIUM_AVAILABLE
+ return UpdateChromiumHTTPDataSourceProxyConfig(host, port, exclusionList);
+#else
+ return INVALID_OPERATION;
+#endif
+}
+
void HTTPBase::addBandwidthMeasurement(
size_t numBytes, int64_t delayUs) {
Mutex::Autolock autoLock(mLock);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 1a62f9d..145869e 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -22,8 +22,6 @@
#include "include/SampleTable.h"
#include "include/ESDS.h"
-#include <arpa/inet.h>
-
#include <ctype.h>
#include <stdint.h>
#include <stdlib.h>
@@ -33,13 +31,11 @@
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaBufferGroup.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
-#include <media/stagefright/Utils.h>
#include <utils/String8.h>
namespace android {
@@ -50,15 +46,17 @@ public:
MPEG4Source(const sp<MetaData> &format,
const sp<DataSource> &dataSource,
int32_t timeScale,
- const sp<SampleTable> &sampleTable);
+ const sp<SampleTable> &sampleTable,
+ Vector<SidxEntry> &sidx,
+ off64_t firstMoofOffset);
virtual status_t start(MetaData *params = NULL);
virtual status_t stop();
virtual sp<MetaData> getFormat();
- virtual status_t read(
- MediaBuffer **buffer, const ReadOptions *options = NULL);
+ virtual status_t read(MediaBuffer **buffer, const ReadOptions *options = NULL);
+ virtual status_t fragmentedRead(MediaBuffer **buffer, const ReadOptions *options = NULL);
protected:
virtual ~MPEG4Source();
@@ -71,6 +69,27 @@ private:
int32_t mTimescale;
sp<SampleTable> mSampleTable;
uint32_t mCurrentSampleIndex;
+ uint32_t mCurrentFragmentIndex;
+ Vector<SidxEntry> &mSegments;
+ off64_t mFirstMoofOffset;
+ off64_t mCurrentMoofOffset;
+ off64_t mNextMoofOffset;
+ uint32_t mCurrentTime;
+ int32_t mLastParsedTrackId;
+ int32_t mTrackId;
+
+ int32_t mCryptoMode; // passed in from extractor
+ int32_t mDefaultIVSize; // passed in from extractor
+ uint8_t mCryptoKey[16]; // passed in from extractor
+ uint32_t mCurrentAuxInfoType;
+ uint32_t mCurrentAuxInfoTypeParameter;
+ int32_t mCurrentDefaultSampleInfoSize;
+ uint32_t mCurrentSampleInfoCount;
+ uint32_t mCurrentSampleInfoAllocSize;
+ uint8_t* mCurrentSampleInfoSizes;
+ uint32_t mCurrentSampleInfoOffsetCount;
+ uint32_t mCurrentSampleInfoOffsetsAllocSize;
+ uint64_t* mCurrentSampleInfoOffsets;
bool mIsAVC;
size_t mNALLengthSize;
@@ -86,6 +105,43 @@ private:
uint8_t *mSrcBuffer;
size_t parseNALSize(const uint8_t *data) const;
+ status_t parseChunk(off64_t *offset);
+ status_t parseTrackFragmentHeader(off64_t offset, off64_t size);
+ status_t parseTrackFragmentRun(off64_t offset, off64_t size);
+ status_t parseSampleAuxiliaryInformationSizes(off64_t offset, off64_t size);
+ status_t parseSampleAuxiliaryInformationOffsets(off64_t offset, off64_t size);
+
+ struct TrackFragmentHeaderInfo {
+ enum Flags {
+ kBaseDataOffsetPresent = 0x01,
+ kSampleDescriptionIndexPresent = 0x02,
+ kDefaultSampleDurationPresent = 0x08,
+ kDefaultSampleSizePresent = 0x10,
+ kDefaultSampleFlagsPresent = 0x20,
+ kDurationIsEmpty = 0x10000,
+ };
+
+ uint32_t mTrackID;
+ uint32_t mFlags;
+ uint64_t mBaseDataOffset;
+ uint32_t mSampleDescriptionIndex;
+ uint32_t mDefaultSampleDuration;
+ uint32_t mDefaultSampleSize;
+ uint32_t mDefaultSampleFlags;
+
+ uint64_t mDataOffset;
+ };
+ TrackFragmentHeaderInfo mTrackFragmentHeaderInfo;
+
+ struct Sample {
+ off64_t offset;
+ size_t size;
+ uint32_t duration;
+ uint8_t iv[16];
+ Vector<size_t> clearsizes;
+ Vector<size_t> encryptedsizes;
+ };
+ Vector<Sample> mCurrentSamples;
MPEG4Source(const MPEG4Source &);
MPEG4Source &operator=(const MPEG4Source &);
@@ -264,8 +320,25 @@ static const char *FourCC2MIME(uint32_t fourcc) {
}
}
+static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t *rate) {
+ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, FourCC2MIME(fourcc))) {
+ // AMR NB audio is always mono, 8kHz
+ *channels = 1;
+ *rate = 8000;
+ return true;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, FourCC2MIME(fourcc))) {
+ // AMR WB audio is always mono, 16kHz
+ *channels = 1;
+ *rate = 16000;
+ return true;
+ }
+ return false;
+}
+
MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source)
- : mDataSource(source),
+ : mSidxDuration(0),
+ mMoofOffset(0),
+ mDataSource(source),
mInitCheck(NO_INIT),
mHasVideo(false),
mFirstTrack(NULL),
@@ -293,6 +366,16 @@ MPEG4Extractor::~MPEG4Extractor() {
sinf = next;
}
mFirstSINF = NULL;
+
+ for (size_t i = 0; i < mPssh.size(); i++) {
+ delete [] mPssh[i].data;
+ }
+}
+
+uint32_t MPEG4Extractor::flags() const {
+ return CAN_PAUSE |
+ ((mMoofOffset == 0 || mSidxEntries.size() != 0) ?
+ (CAN_SEEK_BACKWARD | CAN_SEEK_FORWARD | CAN_SEEK) : 0);
}
sp<MetaData> MPEG4Extractor::getMetaData() {
@@ -307,6 +390,7 @@ sp<MetaData> MPEG4Extractor::getMetaData() {
size_t MPEG4Extractor::countTracks() {
status_t err;
if ((err = readMetaData()) != OK) {
+ ALOGV("MPEG4Extractor::countTracks: no tracks");
return 0;
}
@@ -317,6 +401,7 @@ size_t MPEG4Extractor::countTracks() {
track = track->next;
}
+ ALOGV("MPEG4Extractor::countTracks: %d tracks", n);
return n;
}
@@ -348,15 +433,24 @@ sp<MetaData> MPEG4Extractor::getTrackMetaData(
const char *mime;
CHECK(track->meta->findCString(kKeyMIMEType, &mime));
if (!strncasecmp("video/", mime, 6)) {
- uint32_t sampleIndex;
- uint32_t sampleTime;
- if (track->sampleTable->findThumbnailSample(&sampleIndex) == OK
- && track->sampleTable->getMetaDataForSample(
- sampleIndex, NULL /* offset */, NULL /* size */,
- &sampleTime) == OK) {
- track->meta->setInt64(
- kKeyThumbnailTime,
- ((int64_t)sampleTime * 1000000) / track->timescale);
+ if (mMoofOffset > 0) {
+ int64_t duration;
+ if (track->meta->findInt64(kKeyDuration, &duration)) {
+ // nothing fancy, just pick a frame near 1/4th of the duration
+ track->meta->setInt64(
+ kKeyThumbnailTime, duration / 4);
+ }
+ } else {
+ uint32_t sampleIndex;
+ uint32_t sampleTime;
+ if (track->sampleTable->findThumbnailSample(&sampleIndex) == OK
+ && track->sampleTable->getMetaDataForSample(
+ sampleIndex, NULL /* offset */, NULL /* size */,
+ &sampleTime) == OK) {
+ track->meta->setInt64(
+ kKeyThumbnailTime,
+ ((int64_t)sampleTime * 1000000) / track->timescale);
+ }
}
}
}
@@ -364,6 +458,14 @@ sp<MetaData> MPEG4Extractor::getTrackMetaData(
return track->meta;
}
+static void MakeFourCCString(uint32_t x, char *s) {
+ s[0] = x >> 24;
+ s[1] = (x >> 16) & 0xff;
+ s[2] = (x >> 8) & 0xff;
+ s[3] = x & 0xff;
+ s[4] = '\0';
+}
+
status_t MPEG4Extractor::readMetaData() {
if (mInitCheck != NO_INIT) {
return mInitCheck;
@@ -371,7 +473,25 @@ status_t MPEG4Extractor::readMetaData() {
off64_t offset = 0;
status_t err;
- while ((err = parseChunk(&offset, 0)) == OK) {
+ while (true) {
+ err = parseChunk(&offset, 0);
+ if (err == OK) {
+ continue;
+ }
+
+ uint32_t hdr[2];
+ if (mDataSource->readAt(offset, hdr, 8) < 8) {
+ break;
+ }
+ uint32_t chunk_type = ntohl(hdr[1]);
+ if (chunk_type == FOURCC('s', 'i', 'd', 'x')) {
+ // parse the sidx box too
+ continue;
+ } else if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ // store the offset of the first segment
+ mMoofOffset = offset;
+ }
+ break;
}
if (mInitCheck == OK) {
@@ -388,6 +508,23 @@ status_t MPEG4Extractor::readMetaData() {
}
CHECK_NE(err, (status_t)NO_INIT);
+
+ // copy pssh data into file metadata
+ int psshsize = 0;
+ for (size_t i = 0; i < mPssh.size(); i++) {
+ psshsize += 20 + mPssh[i].datalen;
+ }
+ if (psshsize) {
+ char *buf = (char*)malloc(psshsize);
+ char *ptr = buf;
+ for (size_t i = 0; i < mPssh.size(); i++) {
+ memcpy(ptr, mPssh[i].uuid, 20); // uuid + length
+ memcpy(ptr + 20, mPssh[i].data, mPssh[i].datalen);
+ ptr += (20 + mPssh[i].datalen);
+ }
+ mFileMetaData->setData(kKeyPssh, 'pssh', buf, psshsize);
+ free(buf);
+ }
return mInitCheck;
}
@@ -559,14 +696,6 @@ status_t MPEG4Extractor::parseDrmSINF(off64_t *offset, off64_t data_offset) {
return UNKNOWN_ERROR; // Return a dummy error.
}
-static void MakeFourCCString(uint32_t x, char *s) {
- s[0] = x >> 24;
- s[1] = (x >> 16) & 0xff;
- s[2] = (x >> 8) & 0xff;
- s[3] = x & 0xff;
- s[4] = '\0';
-}
-
struct PathAdder {
PathAdder(Vector<uint32_t> *path, uint32_t chunkType)
: mPath(path) {
@@ -630,7 +759,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
char chunk[5];
MakeFourCCString(chunk_type, chunk);
- ALOGV("chunk: %s @ %lld", chunk, *offset);
+ ALOGV("chunk: %s @ %lld, %d", chunk, *offset, depth);
#if 0
static const char kWhitespace[] = " ";
@@ -686,6 +815,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('m', 'f', 'r', 'a'):
case FOURCC('u', 'd', 't', 'a'):
case FOURCC('i', 'l', 's', 't'):
+ case FOURCC('s', 'i', 'n', 'f'):
+ case FOURCC('s', 'c', 'h', 'i'):
{
if (chunk_type == FOURCC('s', 't', 'b', 'l')) {
ALOGV("sampleTable chunk is %d bytes long.", (size_t)chunk_size);
@@ -773,6 +904,75 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
break;
}
+ case FOURCC('f', 'r', 'm', 'a'):
+ {
+ uint32_t original_fourcc;
+ if (mDataSource->readAt(data_offset, &original_fourcc, 4) < 4) {
+ return ERROR_IO;
+ }
+ original_fourcc = ntohl(original_fourcc);
+ ALOGV("read original format: %d", original_fourcc);
+ mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(original_fourcc));
+ uint32_t num_channels = 0;
+ uint32_t sample_rate = 0;
+ if (AdjustChannelsAndRate(original_fourcc, &num_channels, &sample_rate)) {
+ mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
+ mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
+ }
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('t', 'e', 'n', 'c'):
+ {
+ if (chunk_size < 32) {
+ return ERROR_MALFORMED;
+ }
+
+ // tenc box contains 1 byte version, 3 byte flags, 3 byte default algorithm id, one byte
+ // default IV size, 16 bytes default KeyID
+ // (ISO 23001-7)
+ char buf[4];
+ memset(buf, 0, 4);
+ if (mDataSource->readAt(data_offset + 4, buf + 1, 3) < 3) {
+ return ERROR_IO;
+ }
+ uint32_t defaultAlgorithmId = ntohl(*((int32_t*)buf));
+ if (defaultAlgorithmId > 1) {
+ // only 0 (clear) and 1 (AES-128) are valid
+ return ERROR_MALFORMED;
+ }
+
+ memset(buf, 0, 4);
+ if (mDataSource->readAt(data_offset + 7, buf + 3, 1) < 1) {
+ return ERROR_IO;
+ }
+ uint32_t defaultIVSize = ntohl(*((int32_t*)buf));
+
+ if ((defaultAlgorithmId == 0 && defaultIVSize != 0) ||
+ (defaultAlgorithmId != 0 && defaultIVSize == 0)) {
+ // only unencrypted data must have 0 IV size
+ return ERROR_MALFORMED;
+ } else if (defaultIVSize != 0 &&
+ defaultIVSize != 8 &&
+ defaultIVSize != 16) {
+ // only supported sizes are 0, 8 and 16
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t defaultKeyId[16];
+
+ if (mDataSource->readAt(data_offset + 8, &defaultKeyId, 16) < 16) {
+ return ERROR_IO;
+ }
+
+ mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId);
+ mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize);
+ mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16);
+ *offset += chunk_size;
+ break;
+ }
+
case FOURCC('t', 'k', 'h', 'd'):
{
status_t err;
@@ -784,6 +984,37 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
break;
}
+ case FOURCC('p', 's', 's', 'h'):
+ {
+ PsshInfo pssh;
+
+ if (mDataSource->readAt(data_offset + 4, &pssh.uuid, 16) < 16) {
+ return ERROR_IO;
+ }
+
+ uint32_t psshdatalen = 0;
+ if (mDataSource->readAt(data_offset + 20, &psshdatalen, 4) < 4) {
+ return ERROR_IO;
+ }
+ pssh.datalen = ntohl(psshdatalen);
+ ALOGV("pssh data size: %d", pssh.datalen);
+ if (pssh.datalen + 20 > chunk_size) {
+ // pssh data length exceeds size of containing box
+ return ERROR_MALFORMED;
+ }
+
+ pssh.data = new uint8_t[pssh.datalen];
+ ALOGV("allocated pssh @ %p", pssh.data);
+ ssize_t requested = (ssize_t) pssh.datalen;
+ if (mDataSource->readAt(data_offset + 24, pssh.data, requested) < requested) {
+ return ERROR_IO;
+ }
+ mPssh.push_back(pssh);
+
+ *offset += chunk_size;
+ break;
+ }
+
case FOURCC('m', 'd', 'h', 'd'):
{
if (chunk_data_size < 4) {
@@ -816,7 +1047,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->timescale = ntohl(timescale);
- int64_t duration;
+ int64_t duration = 0;
if (version == 1) {
if (mDataSource->readAt(
timescale_offset + 4, &duration, sizeof(duration))
@@ -825,13 +1056,16 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
duration = ntoh64(duration);
} else {
- int32_t duration32;
+ uint32_t duration32;
if (mDataSource->readAt(
timescale_offset + 4, &duration32, sizeof(duration32))
< (ssize_t)sizeof(duration32)) {
return ERROR_IO;
}
- duration = ntohl(duration32);
+ // ffmpeg sets duration to -1, which is incorrect.
+ if (duration32 != 0xffffffff) {
+ duration = ntohl(duration32);
+ }
}
mLastTrack->meta->setInt64(
kKeyDuration, (duration * 1000000) / mLastTrack->timescale);
@@ -894,16 +1128,17 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
// For 3GPP timed text, there could be multiple tx3g boxes contain
// multiple text display formats. These formats will be used to
// display the timed text.
+ // For encrypted files, there may also be more than one entry.
const char *mime;
CHECK(mLastTrack->meta->findCString(kKeyMIMEType, &mime));
- if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP)) {
+ if (strcasecmp(mime, MEDIA_MIMETYPE_TEXT_3GPP) &&
+ strcasecmp(mime, "application/octet-stream")) {
// For now we only support a single type of media per track.
mLastTrack->skipTrack = true;
*offset += chunk_size;
break;
}
}
-
off64_t stop_offset = *offset + chunk_size;
*offset = data_offset + 8;
for (uint32_t i = 0; i < entry_count; ++i) {
@@ -920,6 +1155,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
case FOURCC('m', 'p', '4', 'a'):
+ case FOURCC('e', 'n', 'c', 'a'):
case FOURCC('s', 'a', 'm', 'r'):
case FOURCC('s', 'a', 'w', 'b'):
{
@@ -935,29 +1171,18 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
uint16_t data_ref_index = U16_AT(&buffer[6]);
- uint16_t num_channels = U16_AT(&buffer[16]);
+ uint32_t num_channels = U16_AT(&buffer[16]);
uint16_t sample_size = U16_AT(&buffer[18]);
uint32_t sample_rate = U32_AT(&buffer[24]) >> 16;
- if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB,
- FourCC2MIME(chunk_type))) {
- // AMR NB audio is always mono, 8kHz
- num_channels = 1;
- sample_rate = 8000;
- } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB,
- FourCC2MIME(chunk_type))) {
- // AMR WB audio is always mono, 16kHz
- num_channels = 1;
- sample_rate = 16000;
+ if (chunk_type != FOURCC('e', 'n', 'c', 'a')) {
+ // if the chunk type is enca, we'll get the type from the sinf/frma box later
+ mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
+ AdjustChannelsAndRate(chunk_type, &num_channels, &sample_rate);
}
-
-#if 0
- printf("*** coding='%s' %d channels, size %d, rate %d\n",
+ ALOGV("*** coding='%s' %d channels, size %d, rate %d\n",
chunk, num_channels, sample_size, sample_rate);
-#endif
-
- mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
@@ -977,6 +1202,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
case FOURCC('m', 'p', '4', 'v'):
+ case FOURCC('e', 'n', 'c', 'v'):
case FOURCC('s', '2', '6', '3'):
case FOURCC('H', '2', '6', '3'):
case FOURCC('h', '2', '6', '3'):
@@ -999,7 +1225,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
uint16_t width = U16_AT(&buffer[6 + 18]);
uint16_t height = U16_AT(&buffer[6 + 20]);
- // The video sample is not stand-compliant if it has invalid dimension.
+ // The video sample is not standard-compliant if it has invalid dimension.
// Use some default width and height value, and
// let the decoder figure out the actual width and height (and thus
// be prepared for INFO_FOMRAT_CHANGED event).
@@ -1009,7 +1235,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
// printf("*** coding='%s' width=%d height=%d\n",
// chunk, width, height);
- mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
+ if (chunk_type != FOURCC('e', 'n', 'c', 'v')) {
+ // if the chunk type is encv, we'll get the type from the sinf/frma box later
+ mLastTrack->meta->setCString(kKeyMIMEType, FourCC2MIME(chunk_type));
+ }
mLastTrack->meta->setInt32(kKeyWidth, width);
mLastTrack->meta->setInt32(kKeyHeight, height);
@@ -1075,11 +1304,23 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return err;
}
- // Assume that a given buffer only contains at most 10 fragments,
- // each fragment originally prefixed with a 2 byte length will
- // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion,
- // and thus will grow by 2 bytes per fragment.
- mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2);
+ if (max_size != 0) {
+ // Assume that a given buffer only contains at most 10 chunks,
+ // each chunk originally prefixed with a 2 byte length will
+ // have a 4 byte header (0x00 0x00 0x00 0x01) after conversion,
+ // and thus will grow by 2 bytes per chunk.
+ mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size + 10 * 2);
+ } else {
+ // No size was specified. Pick a conservatively large size.
+ int32_t width, height;
+ if (mLastTrack->meta->findInt32(kKeyWidth, &width) &&
+ mLastTrack->meta->findInt32(kKeyHeight, &height)) {
+ mLastTrack->meta->setInt32(kKeyMaxInputSize, width * height * 3 / 2);
+ } else {
+ ALOGE("No width or height, assuming worst case 1080p");
+ mLastTrack->meta->setInt32(kKeyMaxInputSize, 3110400);
+ }
+ }
*offset += chunk_size;
// Calculate average frame rate.
@@ -1354,6 +1595,7 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('m', 'd', 'a', 't'):
{
+ ALOGV("mdat chunk, drm: %d", mIsDrm);
if (!mIsDrm) {
*offset += chunk_size;
break;
@@ -1448,6 +1690,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
break;
}
+ case FOURCC('s', 'i', 'd', 'x'):
+ {
+ parseSegmentIndex(data_offset, chunk_data_size);
+ *offset += chunk_size;
+ return UNKNOWN_ERROR; // stop parsing after sidx
+ }
+
default:
{
*offset += chunk_size;
@@ -1458,6 +1707,125 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return OK;
}
+status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
+ ALOGV("MPEG4Extractor::parseSegmentIndex");
+
+ if (size < 12) {
+ return -EINVAL;
+ }
+
+ uint32_t flags;
+ if (!mDataSource->getUInt32(offset, &flags)) {
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t version = flags >> 24;
+ flags &= 0xffffff;
+
+ ALOGV("sidx version %d", version);
+
+ uint32_t referenceId;
+ if (!mDataSource->getUInt32(offset + 4, &referenceId)) {
+ return ERROR_MALFORMED;
+ }
+
+ uint32_t timeScale;
+ if (!mDataSource->getUInt32(offset + 8, &timeScale)) {
+ return ERROR_MALFORMED;
+ }
+ ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale);
+
+ uint64_t earliestPresentationTime;
+ uint64_t firstOffset;
+
+ offset += 12;
+ size -= 12;
+
+ if (version == 0) {
+ if (size < 8) {
+ return -EINVAL;
+ }
+ uint32_t tmp;
+ if (!mDataSource->getUInt32(offset, &tmp)) {
+ return ERROR_MALFORMED;
+ }
+ earliestPresentationTime = tmp;
+ if (!mDataSource->getUInt32(offset + 4, &tmp)) {
+ return ERROR_MALFORMED;
+ }
+ firstOffset = tmp;
+ offset += 8;
+ size -= 8;
+ } else {
+ if (size < 16) {
+ return -EINVAL;
+ }
+ if (!mDataSource->getUInt64(offset, &earliestPresentationTime)) {
+ return ERROR_MALFORMED;
+ }
+ if (!mDataSource->getUInt64(offset + 8, &firstOffset)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 16;
+ size -= 16;
+ }
+ ALOGV("sidx pres/off: %Ld/%Ld", earliestPresentationTime, firstOffset);
+
+ if (size < 4) {
+ return -EINVAL;
+ }
+
+ uint16_t referenceCount;
+ if (!mDataSource->getUInt16(offset + 2, &referenceCount)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ size -= 4;
+ ALOGV("refcount: %d", referenceCount);
+
+ if (size < referenceCount * 12) {
+ return -EINVAL;
+ }
+
+ uint64_t total_duration = 0;
+ for (unsigned int i = 0; i < referenceCount; i++) {
+ uint32_t d1, d2, d3;
+
+ if (!mDataSource->getUInt32(offset, &d1) || // size
+ !mDataSource->getUInt32(offset + 4, &d2) || // duration
+ !mDataSource->getUInt32(offset + 8, &d3)) { // flags
+ return ERROR_MALFORMED;
+ }
+
+ if (d1 & 0x80000000) {
+ ALOGW("sub-sidx boxes not supported yet");
+ }
+ bool sap = d3 & 0x80000000;
+ bool saptype = d3 >> 28;
+ if (!sap || saptype > 2) {
+ ALOGW("not a stream access point, or unsupported type");
+ }
+ total_duration += d2;
+ offset += 12;
+ ALOGV(" item %d, %08x %08x %08x", i, d1, d2, d3);
+ SidxEntry se;
+ se.mSize = d1 & 0x7fffffff;
+ se.mDurationUs = 1000000LL * d2 / timeScale;
+ mSidxEntries.add(se);
+ }
+
+ mSidxDuration = total_duration * 1000000 / timeScale;
+ ALOGV("duration: %lld", mSidxDuration);
+
+ int64_t metaDuration;
+ if (!mLastTrack->meta->findInt64(kKeyDuration, &metaDuration) || metaDuration == 0) {
+ mLastTrack->meta->setInt64(kKeyDuration, mSidxDuration);
+ }
+ return OK;
+}
+
+
+
status_t MPEG4Extractor::parseTrackHeader(
off64_t data_offset, off64_t data_size) {
if (data_size < 4) {
@@ -1754,8 +2122,11 @@ sp<MediaSource> MPEG4Extractor::getTrack(size_t index) {
return NULL;
}
+ ALOGV("getTrack called, pssh: %d", mPssh.size());
+
return new MPEG4Source(
- track->meta, mDataSource, track->timescale, track->sampleTable);
+ track->meta, mDataSource, track->timescale, track->sampleTable,
+ mSidxEntries, mMoofOffset);
}
// static
@@ -1852,17 +2223,30 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
sampleRate = br.getBits(24);
numChannels = br.getBits(4);
} else {
- static uint32_t kSamplingRate[] = {
- 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
- 16000, 12000, 11025, 8000, 7350
- };
-
- if (freqIndex == 13 || freqIndex == 14) {
- return ERROR_MALFORMED;
+ numChannels = br.getBits(4);
+ if (objectType == 5) {
+ // SBR specific config per 14496-3 table 1.13
+ freqIndex = br.getBits(4);
+ if (freqIndex == 15) {
+ if (csd_size < 8) {
+ return ERROR_MALFORMED;
+ }
+ sampleRate = br.getBits(24);
+ }
}
- sampleRate = kSamplingRate[freqIndex];
- numChannels = br.getBits(4);
+ if (sampleRate == 0) {
+ static uint32_t kSamplingRate[] = {
+ 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
+ 16000, 12000, 11025, 8000, 7350
+ };
+
+ if (freqIndex == 13 || freqIndex == 14) {
+ return ERROR_MALFORMED;
+ }
+
+ sampleRate = kSamplingRate[freqIndex];
+ }
}
if (numChannels == 0) {
@@ -1898,12 +2282,23 @@ MPEG4Source::MPEG4Source(
const sp<MetaData> &format,
const sp<DataSource> &dataSource,
int32_t timeScale,
- const sp<SampleTable> &sampleTable)
+ const sp<SampleTable> &sampleTable,
+ Vector<SidxEntry> &sidx,
+ off64_t firstMoofOffset)
: mFormat(format),
mDataSource(dataSource),
mTimescale(timeScale),
mSampleTable(sampleTable),
mCurrentSampleIndex(0),
+ mCurrentFragmentIndex(0),
+ mSegments(sidx),
+ mFirstMoofOffset(firstMoofOffset),
+ mCurrentMoofOffset(firstMoofOffset),
+ mCurrentTime(0),
+ mCurrentSampleInfoAllocSize(0),
+ mCurrentSampleInfoSizes(NULL),
+ mCurrentSampleInfoOffsetsAllocSize(0),
+ mCurrentSampleInfoOffsets(NULL),
mIsAVC(false),
mNALLengthSize(0),
mStarted(false),
@@ -1911,6 +2306,19 @@ MPEG4Source::MPEG4Source(
mBuffer(NULL),
mWantsNALFragments(false),
mSrcBuffer(NULL) {
+
+ mFormat->findInt32(kKeyCryptoMode, &mCryptoMode);
+ mDefaultIVSize = 0;
+ mFormat->findInt32(kKeyCryptoDefaultIVSize, &mDefaultIVSize);
+ uint32_t keytype;
+ const void *key;
+ size_t keysize;
+ if (mFormat->findData(kKeyCryptoKey, &keytype, &key, &keysize)) {
+ CHECK(keysize <= 16);
+ memset(mCryptoKey, 0, 16);
+ memcpy(mCryptoKey, key, keysize);
+ }
+
const char *mime;
bool success = mFormat->findCString(kKeyMIMEType, &mime);
CHECK(success);
@@ -1931,12 +2339,21 @@ MPEG4Source::MPEG4Source(
// The number of bytes used to encode the length of a NAL unit.
mNALLengthSize = 1 + (ptr[4] & 3);
}
+
+ CHECK(format->findInt32(kKeyTrackID, &mTrackId));
+
+ if (mFirstMoofOffset != 0) {
+ off64_t offset = mFirstMoofOffset;
+ parseChunk(&offset);
+ }
}
MPEG4Source::~MPEG4Source() {
if (mStarted) {
stop();
}
+ free(mCurrentSampleInfoSizes);
+ free(mCurrentSampleInfoOffsets);
}
status_t MPEG4Source::start(MetaData *params) {
@@ -1988,6 +2405,529 @@ status_t MPEG4Source::stop() {
return OK;
}
+status_t MPEG4Source::parseChunk(off64_t *offset) {
+ uint32_t hdr[2];
+ if (mDataSource->readAt(*offset, hdr, 8) < 8) {
+ return ERROR_IO;
+ }
+ uint64_t chunk_size = ntohl(hdr[0]);
+ uint32_t chunk_type = ntohl(hdr[1]);
+ off64_t data_offset = *offset + 8;
+
+ if (chunk_size == 1) {
+ if (mDataSource->readAt(*offset + 8, &chunk_size, 8) < 8) {
+ return ERROR_IO;
+ }
+ chunk_size = ntoh64(chunk_size);
+ data_offset += 8;
+
+ if (chunk_size < 16) {
+ // The smallest valid chunk is 16 bytes long in this case.
+ return ERROR_MALFORMED;
+ }
+ } else if (chunk_size < 8) {
+ // The smallest valid chunk is 8 bytes long.
+ return ERROR_MALFORMED;
+ }
+
+ char chunk[5];
+ MakeFourCCString(chunk_type, chunk);
+ ALOGV("MPEG4Source chunk %s @ %llx", chunk, *offset);
+
+ off64_t chunk_data_size = *offset + chunk_size - data_offset;
+
+ switch(chunk_type) {
+
+ case FOURCC('t', 'r', 'a', 'f'):
+ case FOURCC('m', 'o', 'o', 'f'): {
+ off64_t stop_offset = *offset + chunk_size;
+ *offset = data_offset;
+ while (*offset < stop_offset) {
+ status_t err = parseChunk(offset);
+ if (err != OK) {
+ return err;
+ }
+ }
+ if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ // *offset points to the mdat box following this moof
+ parseChunk(offset); // doesn't actually parse it, just updates offset
+ mNextMoofOffset = *offset;
+ }
+ break;
+ }
+
+ case FOURCC('t', 'f', 'h', 'd'): {
+ status_t err;
+ if ((err = parseTrackFragmentHeader(data_offset, chunk_data_size)) != OK) {
+ return err;
+ }
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('t', 'r', 'u', 'n'): {
+ status_t err;
+ if (mLastParsedTrackId == mTrackId) {
+ if ((err = parseTrackFragmentRun(data_offset, chunk_data_size)) != OK) {
+ return err;
+ }
+ }
+
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('s', 'a', 'i', 'z'): {
+ status_t err;
+ if ((err = parseSampleAuxiliaryInformationSizes(data_offset, chunk_data_size)) != OK) {
+ return err;
+ }
+ *offset += chunk_size;
+ break;
+ }
+ case FOURCC('s', 'a', 'i', 'o'): {
+ status_t err;
+ if ((err = parseSampleAuxiliaryInformationOffsets(data_offset, chunk_data_size)) != OK) {
+ return err;
+ }
+ *offset += chunk_size;
+ break;
+ }
+
+ case FOURCC('m', 'd', 'a', 't'): {
+ // parse DRM info if present
+ ALOGV("MPEG4Source::parseChunk mdat");
+ // if saiz/saoi was previously observed, do something with the sampleinfos
+ *offset += chunk_size;
+ break;
+ }
+
+ default: {
+ *offset += chunk_size;
+ break;
+ }
+ }
+ return OK;
+}
+
+status_t MPEG4Source::parseSampleAuxiliaryInformationSizes(off64_t offset, off64_t size) {
+ ALOGV("parseSampleAuxiliaryInformationSizes");
+ // 14496-12 8.7.12
+ uint8_t version;
+ if (mDataSource->readAt(
+ offset, &version, sizeof(version))
+ < (ssize_t)sizeof(version)) {
+ return ERROR_IO;
+ }
+
+ if (version != 0) {
+ return ERROR_UNSUPPORTED;
+ }
+ offset++;
+
+ uint32_t flags;
+ if (!mDataSource->getUInt24(offset, &flags)) {
+ return ERROR_IO;
+ }
+ offset += 3;
+
+ if (flags & 1) {
+ uint32_t tmp;
+ if (!mDataSource->getUInt32(offset, &tmp)) {
+ return ERROR_MALFORMED;
+ }
+ mCurrentAuxInfoType = tmp;
+ offset += 4;
+ if (!mDataSource->getUInt32(offset, &tmp)) {
+ return ERROR_MALFORMED;
+ }
+ mCurrentAuxInfoTypeParameter = tmp;
+ offset += 4;
+ }
+
+ uint8_t defsize;
+ if (mDataSource->readAt(offset, &defsize, 1) != 1) {
+ return ERROR_MALFORMED;
+ }
+ mCurrentDefaultSampleInfoSize = defsize;
+ offset++;
+
+ uint32_t smplcnt;
+ if (!mDataSource->getUInt32(offset, &smplcnt)) {
+ return ERROR_MALFORMED;
+ }
+ mCurrentSampleInfoCount = smplcnt;
+ offset += 4;
+
+ if (mCurrentDefaultSampleInfoSize != 0) {
+ ALOGV("@@@@ using default sample info size of %d", mCurrentDefaultSampleInfoSize);
+ return OK;
+ }
+ if (smplcnt > mCurrentSampleInfoAllocSize) {
+ mCurrentSampleInfoSizes = (uint8_t*) realloc(mCurrentSampleInfoSizes, smplcnt);
+ mCurrentSampleInfoAllocSize = smplcnt;
+ }
+
+ mDataSource->readAt(offset, mCurrentSampleInfoSizes, smplcnt);
+ return OK;
+}
+
+status_t MPEG4Source::parseSampleAuxiliaryInformationOffsets(off64_t offset, off64_t size) {
+ ALOGV("parseSampleAuxiliaryInformationOffsets");
+ // 14496-12 8.7.13
+ uint8_t version;
+ if (mDataSource->readAt(offset, &version, sizeof(version)) != 1) {
+ return ERROR_IO;
+ }
+ offset++;
+
+ uint32_t flags;
+ if (!mDataSource->getUInt24(offset, &flags)) {
+ return ERROR_IO;
+ }
+ offset += 3;
+
+ uint32_t entrycount;
+ if (!mDataSource->getUInt32(offset, &entrycount)) {
+ return ERROR_IO;
+ }
+ offset += 4;
+
+ if (entrycount > mCurrentSampleInfoOffsetsAllocSize) {
+ mCurrentSampleInfoOffsets = (uint64_t*) realloc(mCurrentSampleInfoOffsets, entrycount * 8);
+ mCurrentSampleInfoOffsetsAllocSize = entrycount;
+ }
+ mCurrentSampleInfoOffsetCount = entrycount;
+
+ for (size_t i = 0; i < entrycount; i++) {
+ if (version == 0) {
+ uint32_t tmp;
+ if (!mDataSource->getUInt32(offset, &tmp)) {
+ return ERROR_IO;
+ }
+ mCurrentSampleInfoOffsets[i] = tmp;
+ offset += 4;
+ } else {
+ uint64_t tmp;
+ if (!mDataSource->getUInt64(offset, &tmp)) {
+ return ERROR_IO;
+ }
+ mCurrentSampleInfoOffsets[i] = tmp;
+ offset += 8;
+ }
+ }
+
+ // parse clear/encrypted data
+
+ off64_t drmoffset = mCurrentSampleInfoOffsets[0]; // from moof
+
+ drmoffset += mCurrentMoofOffset;
+ int ivlength;
+ CHECK(mFormat->findInt32(kKeyCryptoDefaultIVSize, &ivlength));
+
+ // read CencSampleAuxiliaryDataFormats
+ for (size_t i = 0; i < mCurrentSampleInfoCount; i++) {
+ Sample *smpl = &mCurrentSamples.editItemAt(i);
+
+ memset(smpl->iv, 0, 16);
+ if (mDataSource->readAt(drmoffset, smpl->iv, ivlength) != ivlength) {
+ return ERROR_IO;
+ }
+
+ drmoffset += ivlength;
+
+ int32_t smplinfosize = mCurrentDefaultSampleInfoSize;
+ if (smplinfosize == 0) {
+ smplinfosize = mCurrentSampleInfoSizes[i];
+ }
+ if (smplinfosize > ivlength) {
+ uint16_t numsubsamples;
+ if (!mDataSource->getUInt16(drmoffset, &numsubsamples)) {
+ return ERROR_IO;
+ }
+ drmoffset += 2;
+ for (size_t j = 0; j < numsubsamples; j++) {
+ uint16_t numclear;
+ uint32_t numencrypted;
+ if (!mDataSource->getUInt16(drmoffset, &numclear)) {
+ return ERROR_IO;
+ }
+ drmoffset += 2;
+ if (!mDataSource->getUInt32(drmoffset, &numencrypted)) {
+ return ERROR_IO;
+ }
+ drmoffset += 4;
+ smpl->clearsizes.add(numclear);
+ smpl->encryptedsizes.add(numencrypted);
+ }
+ } else {
+ smpl->clearsizes.add(0);
+ smpl->encryptedsizes.add(smpl->size);
+ }
+ }
+
+
+ return OK;
+}
+
+status_t MPEG4Source::parseTrackFragmentHeader(off64_t offset, off64_t size) {
+
+ if (size < 8) {
+ return -EINVAL;
+ }
+
+ uint32_t flags;
+ if (!mDataSource->getUInt32(offset, &flags)) { // actually version + flags
+ return ERROR_MALFORMED;
+ }
+
+ if (flags & 0xff000000) {
+ return -EINVAL;
+ }
+
+ if (!mDataSource->getUInt32(offset + 4, (uint32_t*)&mLastParsedTrackId)) {
+ return ERROR_MALFORMED;
+ }
+
+ if (mLastParsedTrackId != mTrackId) {
+ // this is not the right track, skip it
+ return OK;
+ }
+
+ mTrackFragmentHeaderInfo.mFlags = flags;
+ mTrackFragmentHeaderInfo.mTrackID = mLastParsedTrackId;
+ offset += 8;
+ size -= 8;
+
+ ALOGV("fragment header: %08x %08x", flags, mTrackFragmentHeaderInfo.mTrackID);
+
+ if (flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent) {
+ if (size < 8) {
+ return -EINVAL;
+ }
+
+ if (!mDataSource->getUInt64(offset, &mTrackFragmentHeaderInfo.mBaseDataOffset)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 8;
+ size -= 8;
+ }
+
+ if (flags & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent) {
+ if (size < 4) {
+ return -EINVAL;
+ }
+
+ if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mSampleDescriptionIndex)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ size -= 4;
+ }
+
+ if (flags & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
+ if (size < 4) {
+ return -EINVAL;
+ }
+
+ if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleDuration)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ size -= 4;
+ }
+
+ if (flags & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) {
+ if (size < 4) {
+ return -EINVAL;
+ }
+
+ if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleSize)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ size -= 4;
+ }
+
+ if (flags & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) {
+ if (size < 4) {
+ return -EINVAL;
+ }
+
+ if (!mDataSource->getUInt32(offset, &mTrackFragmentHeaderInfo.mDefaultSampleFlags)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ size -= 4;
+ }
+
+ if (!(flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent)) {
+ mTrackFragmentHeaderInfo.mBaseDataOffset = mCurrentMoofOffset;
+ }
+
+ mTrackFragmentHeaderInfo.mDataOffset = 0;
+ return OK;
+}
+
+status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
+
+ ALOGV("MPEG4Extractor::parseTrackFragmentRun");
+ if (size < 8) {
+ return -EINVAL;
+ }
+
+ enum {
+ kDataOffsetPresent = 0x01,
+ kFirstSampleFlagsPresent = 0x04,
+ kSampleDurationPresent = 0x100,
+ kSampleSizePresent = 0x200,
+ kSampleFlagsPresent = 0x400,
+ kSampleCompositionTimeOffsetPresent = 0x800,
+ };
+
+ uint32_t flags;
+ if (!mDataSource->getUInt32(offset, &flags)) {
+ return ERROR_MALFORMED;
+ }
+ ALOGV("fragment run flags: %08x", flags);
+
+ if (flags & 0xff000000) {
+ return -EINVAL;
+ }
+
+ if ((flags & kFirstSampleFlagsPresent) && (flags & kSampleFlagsPresent)) {
+ // These two shall not be used together.
+ return -EINVAL;
+ }
+
+ uint32_t sampleCount;
+ if (!mDataSource->getUInt32(offset + 4, &sampleCount)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 8;
+ size -= 8;
+
+ uint64_t dataOffset = mTrackFragmentHeaderInfo.mDataOffset;
+
+ uint32_t firstSampleFlags = 0;
+
+ if (flags & kDataOffsetPresent) {
+ if (size < 4) {
+ return -EINVAL;
+ }
+
+ int32_t dataOffsetDelta;
+ if (!mDataSource->getUInt32(offset, (uint32_t*)&dataOffsetDelta)) {
+ return ERROR_MALFORMED;
+ }
+
+ dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta;
+
+ offset += 4;
+ size -= 4;
+ }
+
+ if (flags & kFirstSampleFlagsPresent) {
+ if (size < 4) {
+ return -EINVAL;
+ }
+
+ if (!mDataSource->getUInt32(offset, &firstSampleFlags)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ size -= 4;
+ }
+
+ uint32_t sampleDuration = 0, sampleSize = 0, sampleFlags = 0,
+ sampleCtsOffset = 0;
+
+ size_t bytesPerSample = 0;
+ if (flags & kSampleDurationPresent) {
+ bytesPerSample += 4;
+ } else if (mTrackFragmentHeaderInfo.mFlags
+ & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
+ sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
+ } else {
+ sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
+ }
+
+ if (flags & kSampleSizePresent) {
+ bytesPerSample += 4;
+ } else if (mTrackFragmentHeaderInfo.mFlags
+ & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) {
+ sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize;
+ } else {
+ sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize;
+ }
+
+ if (flags & kSampleFlagsPresent) {
+ bytesPerSample += 4;
+ } else if (mTrackFragmentHeaderInfo.mFlags
+ & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) {
+ sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags;
+ } else {
+ sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags;
+ }
+
+ if (flags & kSampleCompositionTimeOffsetPresent) {
+ bytesPerSample += 4;
+ } else {
+ sampleCtsOffset = 0;
+ }
+
+ if (size < sampleCount * bytesPerSample) {
+ return -EINVAL;
+ }
+
+ Sample tmp;
+ for (uint32_t i = 0; i < sampleCount; ++i) {
+ if (flags & kSampleDurationPresent) {
+ if (!mDataSource->getUInt32(offset, &sampleDuration)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ }
+
+ if (flags & kSampleSizePresent) {
+ if (!mDataSource->getUInt32(offset, &sampleSize)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ }
+
+ if (flags & kSampleFlagsPresent) {
+ if (!mDataSource->getUInt32(offset, &sampleFlags)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ }
+
+ if (flags & kSampleCompositionTimeOffsetPresent) {
+ if (!mDataSource->getUInt32(offset, &sampleCtsOffset)) {
+ return ERROR_MALFORMED;
+ }
+ offset += 4;
+ }
+
+ ALOGV("adding sample %d at offset 0x%08llx, size %u, duration %u, "
+ " flags 0x%08x", i + 1,
+ dataOffset, sampleSize, sampleDuration,
+ (flags & kFirstSampleFlagsPresent) && i == 0
+ ? firstSampleFlags : sampleFlags);
+ tmp.offset = dataOffset;
+ tmp.size = sampleSize;
+ tmp.duration = sampleDuration;
+ mCurrentSamples.add(tmp);
+
+ dataOffset += sampleSize;
+ }
+
+ mTrackFragmentHeaderInfo.mDataOffset = dataOffset;
+
+ return OK;
+}
+
sp<MetaData> MPEG4Source::getFormat() {
Mutex::Autolock autoLock(mLock);
@@ -2019,6 +2959,10 @@ status_t MPEG4Source::read(
CHECK(mStarted);
+ if (mFirstMoofOffset > 0) {
+ return fragmentedRead(out, options);
+ }
+
*out = NULL;
int64_t targetSampleTimeUs = -1;
@@ -2076,6 +3020,7 @@ status_t MPEG4Source::read(
// we had seeked to the end of stream, ending normally.
err = ERROR_END_OF_STREAM;
}
+ ALOGV("end of stream");
return err;
}
@@ -2286,6 +3231,268 @@ status_t MPEG4Source::read(
}
}
+status_t MPEG4Source::fragmentedRead(
+ MediaBuffer **out, const ReadOptions *options) {
+
+ ALOGV("MPEG4Source::fragmentedRead");
+
+ CHECK(mStarted);
+
+ *out = NULL;
+
+ int64_t targetSampleTimeUs = -1;
+
+ int64_t seekTimeUs;
+ ReadOptions::SeekMode mode;
+ if (options && options->getSeekTo(&seekTimeUs, &mode)) {
+
+ int numSidxEntries = mSegments.size();
+ if (numSidxEntries != 0) {
+ int64_t totalTime = 0;
+ off64_t totalOffset = mFirstMoofOffset;
+ for (int i = 0; i < numSidxEntries; i++) {
+ const SidxEntry *se = &mSegments[i];
+ if (totalTime + se->mDurationUs > seekTimeUs) {
+ // The requested time is somewhere in this segment
+ if ((mode == ReadOptions::SEEK_NEXT_SYNC) ||
+ (mode == ReadOptions::SEEK_CLOSEST_SYNC &&
+ (seekTimeUs - totalTime) > (totalTime + se->mDurationUs - seekTimeUs))) {
+ // requested next sync, or closest sync and it was closer to the end of
+ // this segment
+ totalTime += se->mDurationUs;
+ totalOffset += se->mSize;
+ }
+ break;
+ }
+ totalTime += se->mDurationUs;
+ totalOffset += se->mSize;
+ }
+ mCurrentMoofOffset = totalOffset;
+ mCurrentSamples.clear();
+ mCurrentSampleIndex = 0;
+ parseChunk(&totalOffset);
+ mCurrentTime = totalTime * mTimescale / 1000000ll;
+ }
+
+ if (mBuffer != NULL) {
+ mBuffer->release();
+ mBuffer = NULL;
+ }
+
+ // fall through
+ }
+
+ off64_t offset = 0;
+ size_t size;
+ uint32_t cts = 0;
+ bool isSyncSample = false;
+ bool newBuffer = false;
+ if (mBuffer == NULL) {
+ newBuffer = true;
+
+ if (mCurrentSampleIndex >= mCurrentSamples.size()) {
+ // move to next fragment
+ Sample lastSample = mCurrentSamples[mCurrentSamples.size() - 1];
+ off64_t nextMoof = mNextMoofOffset; // lastSample.offset + lastSample.size;
+ mCurrentMoofOffset = nextMoof;
+ mCurrentSamples.clear();
+ mCurrentSampleIndex = 0;
+ parseChunk(&nextMoof);
+ if (mCurrentSampleIndex >= mCurrentSamples.size()) {
+ return ERROR_END_OF_STREAM;
+ }
+ }
+
+ const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex];
+ offset = smpl->offset;
+ size = smpl->size;
+ cts = mCurrentTime;
+ mCurrentTime += smpl->duration;
+ isSyncSample = (mCurrentSampleIndex == 0); // XXX
+
+ status_t err = mGroup->acquire_buffer(&mBuffer);
+
+ if (err != OK) {
+ CHECK(mBuffer == NULL);
+ ALOGV("acquire_buffer returned %d", err);
+ return err;
+ }
+ }
+
+ const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex];
+ const sp<MetaData> bufmeta = mBuffer->meta_data();
+ bufmeta->clear();
+ if (smpl->encryptedsizes.size()) {
+ // store clear/encrypted lengths in metadata
+ bufmeta->setData(kKeyPlainSizes, 0,
+ smpl->clearsizes.array(), smpl->clearsizes.size() * 4);
+ bufmeta->setData(kKeyEncryptedSizes, 0,
+ smpl->encryptedsizes.array(), smpl->encryptedsizes.size() * 4);
+ bufmeta->setData(kKeyCryptoIV, 0, smpl->iv, 16); // use 16 or the actual size?
+ bufmeta->setInt32(kKeyCryptoDefaultIVSize, mDefaultIVSize);
+ bufmeta->setInt32(kKeyCryptoMode, mCryptoMode);
+ bufmeta->setData(kKeyCryptoKey, 0, mCryptoKey, 16);
+ }
+
+ if (!mIsAVC || mWantsNALFragments) {
+ if (newBuffer) {
+ ssize_t num_bytes_read =
+ mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size);
+
+ if (num_bytes_read < (ssize_t)size) {
+ mBuffer->release();
+ mBuffer = NULL;
+
+ ALOGV("i/o error");
+ return ERROR_IO;
+ }
+
+ CHECK(mBuffer != NULL);
+ mBuffer->set_range(0, size);
+ mBuffer->meta_data()->setInt64(
+ kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+
+ if (targetSampleTimeUs >= 0) {
+ mBuffer->meta_data()->setInt64(
+ kKeyTargetTime, targetSampleTimeUs);
+ }
+
+ if (isSyncSample) {
+ mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
+ }
+
+ ++mCurrentSampleIndex;
+ }
+
+ if (!mIsAVC) {
+ *out = mBuffer;
+ mBuffer = NULL;
+
+ return OK;
+ }
+
+ // Each NAL unit is split up into its constituent fragments and
+ // each one of them returned in its own buffer.
+
+ CHECK(mBuffer->range_length() >= mNALLengthSize);
+
+ const uint8_t *src =
+ (const uint8_t *)mBuffer->data() + mBuffer->range_offset();
+
+ size_t nal_size = parseNALSize(src);
+ if (mBuffer->range_length() < mNALLengthSize + nal_size) {
+ ALOGE("incomplete NAL unit.");
+
+ mBuffer->release();
+ mBuffer = NULL;
+
+ return ERROR_MALFORMED;
+ }
+
+ MediaBuffer *clone = mBuffer->clone();
+ CHECK(clone != NULL);
+ clone->set_range(mBuffer->range_offset() + mNALLengthSize, nal_size);
+
+ CHECK(mBuffer != NULL);
+ mBuffer->set_range(
+ mBuffer->range_offset() + mNALLengthSize + nal_size,
+ mBuffer->range_length() - mNALLengthSize - nal_size);
+
+ if (mBuffer->range_length() == 0) {
+ mBuffer->release();
+ mBuffer = NULL;
+ }
+
+ *out = clone;
+
+ return OK;
+ } else {
+ ALOGV("whole NAL");
+ // Whole NAL units are returned but each fragment is prefixed by
+ // the start code (0x00 00 00 01).
+ ssize_t num_bytes_read = 0;
+ int32_t drm = 0;
+ bool usesDRM = (mFormat->findInt32(kKeyIsDRM, &drm) && drm != 0);
+ if (usesDRM) {
+ num_bytes_read =
+ mDataSource->readAt(offset, (uint8_t*)mBuffer->data(), size);
+ } else {
+ num_bytes_read = mDataSource->readAt(offset, mSrcBuffer, size);
+ }
+
+ if (num_bytes_read < (ssize_t)size) {
+ mBuffer->release();
+ mBuffer = NULL;
+
+ ALOGV("i/o error");
+ return ERROR_IO;
+ }
+
+ if (usesDRM) {
+ CHECK(mBuffer != NULL);
+ mBuffer->set_range(0, size);
+
+ } else {
+ uint8_t *dstData = (uint8_t *)mBuffer->data();
+ size_t srcOffset = 0;
+ size_t dstOffset = 0;
+
+ while (srcOffset < size) {
+ bool isMalFormed = (srcOffset + mNALLengthSize > size);
+ size_t nalLength = 0;
+ if (!isMalFormed) {
+ nalLength = parseNALSize(&mSrcBuffer[srcOffset]);
+ srcOffset += mNALLengthSize;
+ isMalFormed = srcOffset + nalLength > size;
+ }
+
+ if (isMalFormed) {
+ ALOGE("Video is malformed");
+ mBuffer->release();
+ mBuffer = NULL;
+ return ERROR_MALFORMED;
+ }
+
+ if (nalLength == 0) {
+ continue;
+ }
+
+ CHECK(dstOffset + 4 <= mBuffer->size());
+
+ dstData[dstOffset++] = 0;
+ dstData[dstOffset++] = 0;
+ dstData[dstOffset++] = 0;
+ dstData[dstOffset++] = 1;
+ memcpy(&dstData[dstOffset], &mSrcBuffer[srcOffset], nalLength);
+ srcOffset += nalLength;
+ dstOffset += nalLength;
+ }
+ CHECK_EQ(srcOffset, size);
+ CHECK(mBuffer != NULL);
+ mBuffer->set_range(0, dstOffset);
+ }
+
+ mBuffer->meta_data()->setInt64(
+ kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+
+ if (targetSampleTimeUs >= 0) {
+ mBuffer->meta_data()->setInt64(
+ kKeyTargetTime, targetSampleTimeUs);
+ }
+
+ if (isSyncSample) {
+ mBuffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
+ }
+
+ ++mCurrentSampleIndex;
+
+ *out = mBuffer;
+ mBuffer = NULL;
+
+ return OK;
+ }
+}
+
MPEG4Extractor::Track *MPEG4Extractor::findTrackByMimePrefix(
const char *mimePrefix) {
for (Track *track = mFirstTrack; track != NULL; track = track->next) {
@@ -2398,6 +3605,9 @@ static bool BetterSniffMPEG4(
off64_t chunkDataSize = offset + chunkSize - chunkDataOffset;
+ char chunkstring[5];
+ MakeFourCCString(chunkType, chunkstring);
+ ALOGV("saw chunk type %s, size %lld @ %lld", chunkstring, chunkSize, offset);
switch (chunkType) {
case FOURCC('f', 't', 'y', 'p'):
{
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 326930f..a0f17b5 100755..100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -212,7 +212,6 @@ private:
int64_t mTrackDurationUs;
int64_t mMaxChunkDurationUs;
- bool mIsRealTimeRecording;
int64_t mEstimatedTrackSizeBytes;
int64_t mMdatSizeBytes;
int32_t mTimeScale;
@@ -335,6 +334,7 @@ private:
MPEG4Writer::MPEG4Writer(const char *filename)
: mFd(-1),
mInitCheck(NO_INIT),
+ mIsRealTimeRecording(true),
mUse4ByteNalLength(true),
mUse32BitOffset(true),
mIsFileSizeLimitExplicitlyRequested(false),
@@ -359,6 +359,7 @@ MPEG4Writer::MPEG4Writer(const char *filename)
MPEG4Writer::MPEG4Writer(int fd)
: mFd(dup(fd)),
mInitCheck(mFd < 0? NO_INIT: OK),
+ mIsRealTimeRecording(true),
mUse4ByteNalLength(true),
mUse32BitOffset(true),
mIsFileSizeLimitExplicitlyRequested(false),
@@ -428,6 +429,42 @@ status_t MPEG4Writer::addSource(const sp<MediaSource> &source) {
ALOGE("Attempt to add source AFTER recording is started");
return UNKNOWN_ERROR;
}
+
+ // At most 2 tracks can be supported.
+ if (mTracks.size() >= 2) {
+ ALOGE("Too many tracks (%d) to add", mTracks.size());
+ return ERROR_UNSUPPORTED;
+ }
+
+ CHECK(source.get() != NULL);
+
+ // A track of type other than video or audio is not supported.
+ const char *mime;
+ source->getFormat()->findCString(kKeyMIMEType, &mime);
+ bool isAudio = !strncasecmp(mime, "audio/", 6);
+ bool isVideo = !strncasecmp(mime, "video/", 6);
+ if (!isAudio && !isVideo) {
+ ALOGE("Track (%s) other than video or audio is not supported",
+ mime);
+ return ERROR_UNSUPPORTED;
+ }
+
+ // At this point, we know the track to be added is either
+ // video or audio. Thus, we only need to check whether it
+ // is an audio track or not (if it is not, then it must be
+ // a video track).
+
+ // No more than one video or one audio track is supported.
+ for (List<Track*>::iterator it = mTracks.begin();
+ it != mTracks.end(); ++it) {
+ if ((*it)->isAudio() == isAudio) {
+ ALOGE("%s track already exists", isAudio? "Audio": "Video");
+ return ERROR_UNSUPPORTED;
+ }
+ }
+
+ // This is the first track of either audio or video.
+ // Go ahead to add the track.
Track *track = new Track(this, source, 1 + mTracks.size());
mTracks.push_back(track);
@@ -435,6 +472,11 @@ status_t MPEG4Writer::addSource(const sp<MediaSource> &source) {
}
status_t MPEG4Writer::startTracks(MetaData *params) {
+ if (mTracks.empty()) {
+ ALOGE("No source added");
+ return INVALID_OPERATION;
+ }
+
for (List<Track *>::iterator it = mTracks.begin();
it != mTracks.end(); ++it) {
status_t err = (*it)->start(params);
@@ -555,6 +597,11 @@ status_t MPEG4Writer::start(MetaData *param) {
mUse4ByteNalLength = false;
}
+ int32_t isRealTimeRecording;
+ if (param && param->findInt32(kKeyRealTimeRecording, &isRealTimeRecording)) {
+ mIsRealTimeRecording = isRealTimeRecording;
+ }
+
mStartTimestampUs = -1;
if (mStarted) {
@@ -575,13 +622,50 @@ status_t MPEG4Writer::start(MetaData *param) {
/*
* When the requested file size limit is small, the priority
* is to meet the file size limit requirement, rather than
- * to make the file streamable.
+ * to make the file streamable. mStreamableFile does not tell
+ * whether the actual recorded file is streamable or not.
*/
mStreamableFile =
(mMaxFileSizeLimitBytes != 0 &&
mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes);
- mWriteMoovBoxToMemory = mStreamableFile;
+ /*
+ * mWriteMoovBoxToMemory is true if the amount of data in moov box is
+ * smaller than the reserved free space at the beginning of a file, AND
+ * when the content of moov box is constructed. Note that video/audio
+ * frame data is always written to the file but not in the memory.
+ *
+ * Before stop()/reset() is called, mWriteMoovBoxToMemory is always
+ * false. When reset() is called at the end of a recording session,
+ * Moov box needs to be constructed.
+ *
+ * 1) Right before a moov box is constructed, mWriteMoovBoxToMemory
+ * to set to mStreamableFile so that if
+ * the file is intended to be streamable, it is set to true;
+ * otherwise, it is set to false. When the value is set to false,
+ * all the content of the moov box is written immediately to
+ * the end of the file. When the value is set to true, all the
+ * content of the moov box is written to an in-memory cache,
+ * mMoovBoxBuffer, util the following condition happens. Note
+ * that the size of the in-memory cache is the same as the
+ * reserved free space at the beginning of the file.
+ *
+ * 2) While the data of the moov box is written to an in-memory
+ * cache, the data size is checked against the reserved space.
+ * If the data size surpasses the reserved space, subsequent moov
+ * data could no longer be hold in the in-memory cache. This also
+ * indicates that the reserved space was too small. At this point,
+ * _all_ moov data must be written to the end of the file.
+ * mWriteMoovBoxToMemory must be set to false to direct the write
+ * to the file.
+ *
+ * 3) If the data size in moov box is smaller than the reserved
+ * space after moov box is completely constructed, the in-memory
+ * cache copy of the moov box is written to the reserved free
+ * space. Thus, immediately after the moov is completedly
+ * constructed, mWriteMoovBoxToMemory is always set to false.
+ */
+ mWriteMoovBoxToMemory = false;
mMoovBoxBuffer = NULL;
mMoovBoxBufferOffset = 0;
@@ -786,15 +870,25 @@ status_t MPEG4Writer::reset() {
}
lseek64(mFd, mOffset, SEEK_SET);
- const off64_t moovOffset = mOffset;
- mWriteMoovBoxToMemory = mStreamableFile;
- mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
+ // Construct moov box now
mMoovBoxBufferOffset = 0;
- CHECK(mMoovBoxBuffer != NULL);
+ mWriteMoovBoxToMemory = mStreamableFile;
+ if (mWriteMoovBoxToMemory) {
+ // There is no need to allocate in-memory cache
+ // for moov box if the file is not streamable.
+
+ mMoovBoxBuffer = (uint8_t *) malloc(mEstimatedMoovBoxSize);
+ CHECK(mMoovBoxBuffer != NULL);
+ }
writeMoovBox(maxDurationUs);
- mWriteMoovBoxToMemory = false;
- if (mStreamableFile) {
+ // mWriteMoovBoxToMemory could be set to false in
+ // MPEG4Writer::write() method
+ if (mWriteMoovBoxToMemory) {
+ mWriteMoovBoxToMemory = false;
+ // Content of the moov box is saved in the cache, and the in-memory
+ // moov box needs to be written to the file in a single shot.
+
CHECK_LE(mMoovBoxBufferOffset + 8, mEstimatedMoovBoxSize);
// Moov box
@@ -806,13 +900,15 @@ status_t MPEG4Writer::reset() {
lseek64(mFd, mOffset, SEEK_SET);
writeInt32(mEstimatedMoovBoxSize - mMoovBoxBufferOffset);
write("free", 4);
+ } else {
+ ALOGI("The mp4 file will not be streamable.");
+ }
- // Free temp memory
+ // Free in-memory cache for moov box
+ if (mMoovBoxBuffer != NULL) {
free(mMoovBoxBuffer);
mMoovBoxBuffer = NULL;
mMoovBoxBufferOffset = 0;
- } else {
- ALOGI("The mp4 file will not be streamable.");
}
CHECK(mBoxes.empty());
@@ -994,23 +1090,28 @@ size_t MPEG4Writer::write(
const size_t bytes = size * nmemb;
if (mWriteMoovBoxToMemory) {
- // This happens only when we write the moov box at the end of
- // recording, not for each output video/audio frame we receive.
+
off64_t moovBoxSize = 8 + mMoovBoxBufferOffset + bytes;
if (moovBoxSize > mEstimatedMoovBoxSize) {
+ // The reserved moov box at the beginning of the file
+ // is not big enough. Moov box should be written to
+ // the end of the file from now on, but not to the
+ // in-memory cache.
+
+ // We write partial moov box that is in the memory to
+ // the file first.
for (List<off64_t>::iterator it = mBoxes.begin();
it != mBoxes.end(); ++it) {
(*it) += mOffset;
}
lseek64(mFd, mOffset, SEEK_SET);
::write(mFd, mMoovBoxBuffer, mMoovBoxBufferOffset);
- ::write(mFd, ptr, size * nmemb);
+ ::write(mFd, ptr, bytes);
mOffset += (bytes + mMoovBoxBufferOffset);
- free(mMoovBoxBuffer);
- mMoovBoxBuffer = NULL;
- mMoovBoxBufferOffset = 0;
+
+ // All subsequent moov box content will be written
+ // to the end of the file.
mWriteMoovBoxToMemory = false;
- mStreamableFile = false;
} else {
memcpy(mMoovBoxBuffer + mMoovBoxBufferOffset, ptr, bytes);
mMoovBoxBufferOffset += bytes;
@@ -1545,12 +1646,18 @@ void MPEG4Writer::threadFunc() {
mChunkReadyCondition.wait(mLock);
}
- // Actual write without holding the lock in order to
- // reduce the blocking time for media track threads.
+ // In real time recording mode, write without holding the lock in order
+ // to reduce the blocking time for media track threads.
+ // Otherwise, hold the lock until the existing chunks get written to the
+ // file.
if (chunkFound) {
- mLock.unlock();
+ if (mIsRealTimeRecording) {
+ mLock.unlock();
+ }
writeChunkToFile(&chunk);
- mLock.lock();
+ if (mIsRealTimeRecording) {
+ mLock.lock();
+ }
}
}
@@ -1600,18 +1707,10 @@ status_t MPEG4Writer::Track::start(MetaData *params) {
mRotation = rotationDegrees;
}
- mIsRealTimeRecording = true;
- {
- int32_t isNotRealTime;
- if (params && params->findInt32(kKeyNotRealTime, &isNotRealTime)) {
- mIsRealTimeRecording = (isNotRealTime == 0);
- }
- }
-
initTrackingProgressStatus(params);
sp<MetaData> meta = new MetaData;
- if (mIsRealTimeRecording && mOwner->numTracks() > 1) {
+ if (mOwner->isRealTimeRecording() && mOwner->numTracks() > 1) {
/*
* This extra delay of accepting incoming audio/video signals
* helps to align a/v start time at the beginning of a recording
@@ -1989,7 +2088,10 @@ status_t MPEG4Writer::Track::threadEntry() {
} else {
prctl(PR_SET_NAME, (unsigned long)"VideoTrackEncoding", 0, 0, 0);
}
- androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO);
+
+ if (mOwner->isRealTimeRecording()) {
+ androidSetThreadPriority(0, ANDROID_PRIORITY_AUDIO);
+ }
sp<MetaData> meta_data;
@@ -2150,7 +2252,7 @@ status_t MPEG4Writer::Track::threadEntry() {
}
- if (mIsRealTimeRecording) {
+ if (mOwner->isRealTimeRecording()) {
if (mIsAudio) {
updateDriftTime(meta_data);
}
@@ -2436,6 +2538,10 @@ int64_t MPEG4Writer::getDriftTimeUs() {
return mDriftTimeUs;
}
+bool MPEG4Writer::isRealTimeRecording() const {
+ return mIsRealTimeRecording;
+}
+
bool MPEG4Writer::useNalLengthFour() {
return mUse4ByteNalLength;
}
diff --git a/media/libstagefright/MediaAdapter.cpp b/media/libstagefright/MediaAdapter.cpp
new file mode 100644
index 0000000..2484212
--- /dev/null
+++ b/media/libstagefright/MediaAdapter.cpp
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaAdapter"
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaAdapter.h>
+#include <media/stagefright/MediaBuffer.h>
+
+namespace android {
+
+MediaAdapter::MediaAdapter(const sp<MetaData> &meta)
+ : mCurrentMediaBuffer(NULL),
+ mStarted(false),
+ mOutputFormat(meta) {
+}
+
+MediaAdapter::~MediaAdapter() {
+ Mutex::Autolock autoLock(mAdapterLock);
+ mOutputFormat.clear();
+ CHECK(mCurrentMediaBuffer == NULL);
+}
+
+status_t MediaAdapter::start(MetaData *params) {
+ Mutex::Autolock autoLock(mAdapterLock);
+ if (!mStarted) {
+ mStarted = true;
+ }
+ return OK;
+}
+
+status_t MediaAdapter::stop() {
+ Mutex::Autolock autoLock(mAdapterLock);
+ if (mStarted) {
+ mStarted = false;
+ // If stop() happens immediately after a pushBuffer(), we should
+ // clean up the mCurrentMediaBuffer
+ if (mCurrentMediaBuffer != NULL) {
+ mCurrentMediaBuffer->release();
+ mCurrentMediaBuffer = NULL;
+ }
+ // While read() is still waiting, we should signal it to finish.
+ mBufferReadCond.signal();
+ }
+ return OK;
+}
+
+sp<MetaData> MediaAdapter::getFormat() {
+ Mutex::Autolock autoLock(mAdapterLock);
+ return mOutputFormat;
+}
+
+void MediaAdapter::signalBufferReturned(MediaBuffer *buffer) {
+ Mutex::Autolock autoLock(mAdapterLock);
+ CHECK(buffer != NULL);
+ buffer->setObserver(0);
+ buffer->release();
+ ALOGV("buffer returned %p", buffer);
+ mBufferReturnedCond.signal();
+}
+
+status_t MediaAdapter::read(
+ MediaBuffer **buffer, const ReadOptions *options) {
+ Mutex::Autolock autoLock(mAdapterLock);
+ if (!mStarted) {
+ ALOGV("Read before even started!");
+ return ERROR_END_OF_STREAM;
+ }
+
+ while (mCurrentMediaBuffer == NULL && mStarted) {
+ ALOGV("waiting @ read()");
+ mBufferReadCond.wait(mAdapterLock);
+ }
+
+ if (!mStarted) {
+ ALOGV("read interrupted after stop");
+ CHECK(mCurrentMediaBuffer == NULL);
+ return ERROR_END_OF_STREAM;
+ }
+
+ CHECK(mCurrentMediaBuffer != NULL);
+
+ *buffer = mCurrentMediaBuffer;
+ mCurrentMediaBuffer = NULL;
+ (*buffer)->setObserver(this);
+
+ return OK;
+}
+
+status_t MediaAdapter::pushBuffer(MediaBuffer *buffer) {
+ if (buffer == NULL) {
+ ALOGE("pushBuffer get an NULL buffer");
+ return -EINVAL;
+ }
+
+ Mutex::Autolock autoLock(mAdapterLock);
+ if (!mStarted) {
+ ALOGE("pushBuffer called before start");
+ return INVALID_OPERATION;
+ }
+ mCurrentMediaBuffer = buffer;
+ mBufferReadCond.signal();
+
+ ALOGV("wait for the buffer returned @ pushBuffer! %p", buffer);
+ mBufferReturnedCond.wait(mAdapterLock);
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index cb8a651..f412dc8 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -22,7 +22,7 @@
#include "include/SoftwareRenderer.h"
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -30,10 +30,14 @@
#include <media/stagefright/foundation/AString.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/ACodec.h>
+#include <media/stagefright/BufferProducerWrapper.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/NativeWindowWrapper.h>
+#include "include/avc_utils.h"
+
namespace android {
// static
@@ -62,12 +66,14 @@ MediaCodec::MediaCodec(const sp<ALooper> &looper)
: mState(UNINITIALIZED),
mLooper(looper),
mCodec(new ACodec),
+ mReplyID(0),
mFlags(0),
mSoftRenderer(NULL),
mDequeueInputTimeoutGeneration(0),
mDequeueInputReplyID(0),
mDequeueOutputTimeoutGeneration(0),
- mDequeueOutputReplyID(0) {
+ mDequeueOutputReplyID(0),
+ mHaveInputSurface(false) {
}
MediaCodec::~MediaCodec() {
@@ -132,7 +138,7 @@ status_t MediaCodec::init(const char *name, bool nameIsType, bool encoder) {
status_t MediaCodec::configure(
const sp<AMessage> &format,
- const sp<SurfaceTextureClient> &nativeWindow,
+ const sp<Surface> &nativeWindow,
const sp<ICrypto> &crypto,
uint32_t flags) {
sp<AMessage> msg = new AMessage(kWhatConfigure, id());
@@ -154,6 +160,26 @@ status_t MediaCodec::configure(
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::createInputSurface(
+ sp<IGraphicBufferProducer>* bufferProducer) {
+ sp<AMessage> msg = new AMessage(kWhatCreateInputSurface, id());
+
+ sp<AMessage> response;
+ status_t err = PostAndAwaitResponse(msg, &response);
+ if (err == NO_ERROR) {
+ // unwrap the sp<IGraphicBufferProducer>
+ sp<RefBase> obj;
+ bool found = response->findObject("input-surface", &obj);
+ CHECK(found);
+ sp<BufferProducerWrapper> wrapper(
+ static_cast<BufferProducerWrapper*>(obj.get()));
+ *bufferProducer = wrapper->getBufferProducer();
+ } else {
+ ALOGW("createInputSurface failed, err=%d", err);
+ }
+ return err;
+}
+
status_t MediaCodec::start() {
sp<AMessage> msg = new AMessage(kWhatStart, id());
@@ -288,6 +314,13 @@ status_t MediaCodec::releaseOutputBuffer(size_t index) {
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::signalEndOfInputStream() {
+ sp<AMessage> msg = new AMessage(kWhatSignalEndOfInputStream, id());
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
sp<AMessage> msg = new AMessage(kWhatGetOutputFormat, id());
@@ -476,6 +509,11 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
"(omx error 0x%08x, internalError %d)",
omxError, internalError);
+ if (omxError == OMX_ErrorResourcesLost
+ && internalError == DEAD_OBJECT) {
+ mFlags |= kFlagSawMediaServerDie;
+ }
+
bool sendErrorReponse = true;
switch (mState) {
@@ -504,6 +542,19 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// the shutdown complete notification.
sendErrorReponse = false;
+
+ if (mFlags & kFlagSawMediaServerDie) {
+ // MediaServer died, there definitely won't
+ // be a shutdown complete notification after
+ // all.
+
+ // note that we're directly going from
+ // STOPPING->UNINITIALIZED, instead of the
+ // usual STOPPING->INITIALIZED state.
+ setState(UNINITIALIZED);
+
+ (new AMessage)->postReply(mReplyID);
+ }
break;
}
@@ -571,10 +622,44 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
CHECK_EQ(mState, CONFIGURING);
setState(CONFIGURED);
+ // reset input surface flag
+ mHaveInputSurface = false;
+
(new AMessage)->postReply(mReplyID);
break;
}
+ case ACodec::kWhatInputSurfaceCreated:
+ {
+ // response to ACodec::kWhatCreateInputSurface
+ status_t err = NO_ERROR;
+ sp<AMessage> response = new AMessage();
+ if (!msg->findInt32("err", &err)) {
+ sp<RefBase> obj;
+ msg->findObject("input-surface", &obj);
+ CHECK(obj != NULL);
+ response->setObject("input-surface", obj);
+ mHaveInputSurface = true;
+ } else {
+ response->setInt32("err", err);
+ }
+ response->postReply(mReplyID);
+ break;
+ }
+
+ case ACodec::kWhatSignaledInputEOS:
+ {
+ // response to ACodec::kWhatSignalEndOfInputStream
+ sp<AMessage> response = new AMessage();
+ status_t err;
+ if (msg->findInt32("err", &err)) {
+ response->setInt32("err", err);
+ }
+ response->postReply(mReplyID);
+ break;
+ }
+
+
case ACodec::kWhatBuffersAllocated:
{
int32_t portIndex;
@@ -659,8 +744,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
}
mOutputFormat = msg;
- mFlags |= kFlagOutputFormatChanged;
- postActivityNotificationIfPossible();
+
+ if (mFlags & kFlagIsEncoder) {
+ // Before we announce the format change we should
+ // collect codec specific data and amend the output
+ // format as necessary.
+ mFlags |= kFlagGatherCodecSpecificData;
+ } else {
+ mFlags |= kFlagOutputFormatChanged;
+ postActivityNotificationIfPossible();
+ }
break;
}
@@ -730,6 +823,25 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
buffer->meta()->setInt32("omxFlags", omxFlags);
+ if (mFlags & kFlagGatherCodecSpecificData) {
+ // This is the very first output buffer after a
+ // format change was signalled, it'll either contain
+ // the one piece of codec specific data we can expect
+ // or there won't be codec specific data.
+ if (omxFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ status_t err =
+ amendOutputFormatWithCodecSpecificData(buffer);
+
+ if (err != OK) {
+ ALOGE("Codec spit out malformed codec "
+ "specific data!");
+ }
+ }
+
+ mFlags &= ~kFlagGatherCodecSpecificData;
+ mFlags |= kFlagOutputFormatChanged;
+ }
+
if (mFlags & kFlagDequeueOutputPending) {
CHECK(handleDequeueOutputBuffer(mDequeueOutputReplyID));
@@ -873,6 +985,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
if (flags & CONFIGURE_FLAG_ENCODE) {
format->setInt32("encoder", true);
+ mFlags |= kFlagIsEncoder;
}
extractCSD(format);
@@ -881,11 +994,12 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
- case kWhatStart:
+ case kWhatCreateInputSurface:
{
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ // Must be configured, but can't have been started yet.
if (mState != CONFIGURED) {
sp<AMessage> response = new AMessage;
response->setInt32("err", INVALID_OPERATION);
@@ -895,19 +1009,16 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
}
mReplyID = replyID;
- setState(STARTING);
-
- mCodec->initiateStart();
+ mCodec->initiateCreateInputSurface();
break;
}
- case kWhatStop:
+ case kWhatStart:
{
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if (mState != INITIALIZED
- && mState != CONFIGURED && mState != STARTED) {
+ if (mState != CONFIGURED) {
sp<AMessage> response = new AMessage;
response->setInt32("err", INVALID_OPERATION);
@@ -916,31 +1027,53 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
}
mReplyID = replyID;
- setState(STOPPING);
+ setState(STARTING);
- mCodec->initiateShutdown(true /* keepComponentAllocated */);
- returnBuffersToCodec();
+ mCodec->initiateStart();
break;
}
+ case kWhatStop:
case kWhatRelease:
{
+ State targetState =
+ (msg->what() == kWhatStop) ? INITIALIZED : UNINITIALIZED;
+
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
if (mState != INITIALIZED
&& mState != CONFIGURED && mState != STARTED) {
+ // We may be in "UNINITIALIZED" state already without the
+ // client being aware of this if media server died while
+ // we were being stopped. The client would assume that
+ // after stop() returned, it would be safe to call release()
+ // and it should be in this case, no harm to allow a release()
+ // if we're already uninitialized.
+ // Similarly stopping a stopped MediaCodec should be benign.
sp<AMessage> response = new AMessage;
- response->setInt32("err", INVALID_OPERATION);
+ response->setInt32(
+ "err",
+ mState == targetState ? OK : INVALID_OPERATION);
response->postReply(replyID);
break;
}
+ if (mFlags & kFlagSawMediaServerDie) {
+ // It's dead, Jim. Don't expect initiateShutdown to yield
+ // any useful results now...
+ setState(UNINITIALIZED);
+ (new AMessage)->postReply(replyID);
+ break;
+ }
+
mReplyID = replyID;
- setState(RELEASING);
+ setState(msg->what() == kWhatStop ? STOPPING : RELEASING);
+
+ mCodec->initiateShutdown(
+ msg->what() == kWhatStop /* keepComponentAllocated */);
- mCodec->initiateShutdown();
returnBuffersToCodec();
break;
}
@@ -950,6 +1083,14 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ if (mHaveInputSurface) {
+ ALOGE("dequeueInputBuffer can't be used with input surface");
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+ response->postReply(replyID);
+ break;
+ }
+
if (handleDequeueInputBuffer(replyID, true /* new request */)) {
break;
}
@@ -1093,6 +1234,24 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSignalEndOfInputStream:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (mState != STARTED || (mFlags & kFlagStickyError)) {
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", INVALID_OPERATION);
+
+ response->postReply(replyID);
+ break;
+ }
+
+ mReplyID = replyID;
+ mCodec->signalEndOfInputStream();
+ break;
+ }
+
case kWhatGetBuffers:
{
uint32_t replyID;
@@ -1203,6 +1362,23 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatSetParameters:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<AMessage> params;
+ CHECK(msg->findMessage("params", &params));
+
+ status_t err = onSetParameters(params);
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+
+ response->postReply(replyID);
+ break;
+ }
+
default:
TRESPASS();
}
@@ -1268,12 +1444,19 @@ void MediaCodec::setState(State newState) {
mFlags &= ~kFlagOutputFormatChanged;
mFlags &= ~kFlagOutputBuffersChanged;
mFlags &= ~kFlagStickyError;
+ mFlags &= ~kFlagIsEncoder;
+ mFlags &= ~kFlagGatherCodecSpecificData;
mActivityNotify.clear();
}
if (newState == UNINITIALIZED) {
mComponentName.clear();
+
+ // The component is gone, mediaserver's probably back up already
+ // but should definitely be back up should we try to instantiate
+ // another component.. and the cycle continues.
+ mFlags &= ~kFlagSawMediaServerDie;
}
mState = newState;
@@ -1473,7 +1656,7 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
return -EACCES;
}
- if (render) {
+ if (render && (info->mData == NULL || info->mData->size() != 0)) {
info->mNotify->setInt32("render", true);
if (mSoftRenderer != NULL) {
@@ -1509,7 +1692,7 @@ ssize_t MediaCodec::dequeuePortBuffer(int32_t portIndex) {
}
status_t MediaCodec::setNativeWindow(
- const sp<SurfaceTextureClient> &surfaceTextureClient) {
+ const sp<Surface> &surfaceTextureClient) {
status_t err;
if (mNativeWindow != NULL) {
@@ -1556,4 +1739,59 @@ void MediaCodec::postActivityNotificationIfPossible() {
}
}
+status_t MediaCodec::setParameters(const sp<AMessage> &params) {
+ sp<AMessage> msg = new AMessage(kWhatSetParameters, id());
+ msg->setMessage("params", params);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
+status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
+ mCodec->signalSetParameters(params);
+
+ return OK;
+}
+
+status_t MediaCodec::amendOutputFormatWithCodecSpecificData(
+ const sp<ABuffer> &buffer) {
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+
+ if (!strcasecmp(mime.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)) {
+ // Codec specific data should be SPS and PPS in a single buffer,
+ // each prefixed by a startcode (0x00 0x00 0x00 0x01).
+ // We separate the two and put them into the output format
+ // under the keys "csd-0" and "csd-1".
+
+ unsigned csdIndex = 0;
+
+ const uint8_t *data = buffer->data();
+ size_t size = buffer->size();
+
+ const uint8_t *nalStart;
+ size_t nalSize;
+ while (getNextNALUnit(&data, &size, &nalStart, &nalSize, true) == OK) {
+ sp<ABuffer> csd = new ABuffer(nalSize + 4);
+ memcpy(csd->data(), "\x00\x00\x00\x01", 4);
+ memcpy(csd->data() + 4, nalStart, nalSize);
+
+ mOutputFormat->setBuffer(
+ StringPrintf("csd-%u", csdIndex).c_str(), csd);
+
+ ++csdIndex;
+ }
+
+ if (csdIndex != 2) {
+ return ERROR_MALFORMED;
+ }
+ } else {
+ // For everything else we just stash the codec specific data into
+ // the output format as a single piece of csd under "csd-0".
+ mOutputFormat->setBuffer("csd-0", buffer);
+ }
+
+ return OK;
+}
+
} // namespace android
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index e7b5903..5d8029c 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -40,6 +40,7 @@ const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw";
const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw";
const char *MEDIA_MIMETYPE_AUDIO_FLAC = "audio/flac";
const char *MEDIA_MIMETYPE_AUDIO_AAC_ADTS = "audio/aac-adts";
+const char *MEDIA_MIMETYPE_AUDIO_MSGSM = "audio/gsm";
const char *MEDIA_MIMETYPE_CONTAINER_MPEG4 = "video/mp4";
const char *MEDIA_MIMETYPE_CONTAINER_WAV = "audio/x-wav";
diff --git a/media/libstagefright/MediaExtractor.cpp b/media/libstagefright/MediaExtractor.cpp
index b18c916..9ab6611 100644
--- a/media/libstagefright/MediaExtractor.cpp
+++ b/media/libstagefright/MediaExtractor.cpp
@@ -21,7 +21,6 @@
#include "include/AMRExtractor.h"
#include "include/MP3Extractor.h"
#include "include/MPEG4Extractor.h"
-#include "include/FragmentedMP4Extractor.h"
#include "include/WAVExtractor.h"
#include "include/OggExtractor.h"
#include "include/MPEG2PSExtractor.h"
@@ -94,12 +93,7 @@ sp<MediaExtractor> MediaExtractor::Create(
MediaExtractor *ret = NULL;
if (!strcasecmp(mime, MEDIA_MIMETYPE_CONTAINER_MPEG4)
|| !strcasecmp(mime, "audio/mp4")) {
- int fragmented = 0;
- if (meta != NULL && meta->findInt32("fragmented", &fragmented) && fragmented) {
- ret = new FragmentedMP4Extractor(source);
- } else {
- ret = new MPEG4Extractor(source);
- }
+ ret = new MPEG4Extractor(source);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
ret = new MP3Extractor(source, meta);
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
new file mode 100644
index 0000000..94ce5de
--- /dev/null
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -0,0 +1,173 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaMuxer"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaMuxer.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaAdapter.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MPEG4Writer.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+MediaMuxer::MediaMuxer(const char *path, OutputFormat format)
+ : mState(UNINITIALIZED) {
+ if (format == OUTPUT_FORMAT_MPEG_4) {
+ mWriter = new MPEG4Writer(path);
+ mFileMeta = new MetaData;
+ mState = INITIALIZED;
+ }
+
+}
+
+MediaMuxer::MediaMuxer(int fd, OutputFormat format)
+ : mState(UNINITIALIZED) {
+ if (format == OUTPUT_FORMAT_MPEG_4) {
+ mWriter = new MPEG4Writer(fd);
+ mFileMeta = new MetaData;
+ mState = INITIALIZED;
+ }
+}
+
+MediaMuxer::~MediaMuxer() {
+ Mutex::Autolock autoLock(mMuxerLock);
+
+ // Clean up all the internal resources.
+ mFileMeta.clear();
+ mWriter.clear();
+ mTrackList.clear();
+}
+
+ssize_t MediaMuxer::addTrack(const sp<AMessage> &format) {
+ Mutex::Autolock autoLock(mMuxerLock);
+
+ if (format.get() == NULL) {
+ ALOGE("addTrack() get a null format");
+ return -EINVAL;
+ }
+
+ if (mState != INITIALIZED) {
+ ALOGE("addTrack() must be called after constructor and before start().");
+ return INVALID_OPERATION;
+ }
+
+ sp<MetaData> trackMeta = new MetaData;
+ convertMessageToMetaData(format, trackMeta);
+
+ sp<MediaAdapter> newTrack = new MediaAdapter(trackMeta);
+ status_t result = mWriter->addSource(newTrack);
+ if (result == OK) {
+ return mTrackList.add(newTrack);
+ }
+ return -1;
+}
+
+status_t MediaMuxer::setOrientationHint(int degrees) {
+ Mutex::Autolock autoLock(mMuxerLock);
+ if (mState != INITIALIZED) {
+ ALOGE("setOrientationHint() must be called before start().");
+ return INVALID_OPERATION;
+ }
+
+ if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) {
+ ALOGE("setOrientationHint() get invalid degrees");
+ return -EINVAL;
+ }
+
+ mFileMeta->setInt32(kKeyRotation, degrees);
+ return OK;
+}
+
+status_t MediaMuxer::start() {
+ Mutex::Autolock autoLock(mMuxerLock);
+ if (mState == INITIALIZED) {
+ mState = STARTED;
+ mFileMeta->setInt32(kKeyRealTimeRecording, false);
+ return mWriter->start(mFileMeta.get());
+ } else {
+ ALOGE("start() is called in invalid state %d", mState);
+ return INVALID_OPERATION;
+ }
+}
+
+status_t MediaMuxer::stop() {
+ Mutex::Autolock autoLock(mMuxerLock);
+
+ if (mState == STARTED) {
+ mState = STOPPED;
+ for (size_t i = 0; i < mTrackList.size(); i++) {
+ if (mTrackList[i]->stop() != OK) {
+ return INVALID_OPERATION;
+ }
+ }
+ return mWriter->stop();
+ } else {
+ ALOGE("stop() is called in invalid state %d", mState);
+ return INVALID_OPERATION;
+ }
+}
+
+status_t MediaMuxer::writeSampleData(const sp<ABuffer> &buffer, size_t trackIndex,
+ int64_t timeUs, uint32_t flags) {
+ Mutex::Autolock autoLock(mMuxerLock);
+
+ if (buffer.get() == NULL) {
+ ALOGE("WriteSampleData() get an NULL buffer.");
+ return -EINVAL;
+ }
+
+ if (mState != STARTED) {
+ ALOGE("WriteSampleData() is called in invalid state %d", mState);
+ return INVALID_OPERATION;
+ }
+
+ if (trackIndex >= mTrackList.size()) {
+ ALOGE("WriteSampleData() get an invalid index %d", trackIndex);
+ return -EINVAL;
+ }
+
+ MediaBuffer* mediaBuffer = new MediaBuffer(buffer);
+
+ mediaBuffer->add_ref(); // Released in MediaAdapter::signalBufferReturned().
+ mediaBuffer->set_range(buffer->offset(), buffer->size());
+
+ sp<MetaData> sampleMetaData = mediaBuffer->meta_data();
+ sampleMetaData->setInt64(kKeyTime, timeUs);
+ // Just set the kKeyDecodingTime as the presentation time for now.
+ sampleMetaData->setInt64(kKeyDecodingTime, timeUs);
+
+ if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
+ sampleMetaData->setInt32(kKeyIsSyncFrame, true);
+ }
+
+ sp<MediaAdapter> currentTrack = mTrackList[trackIndex];
+ // This pushBuffer will wait until the mediaBuffer is consumed.
+ return currentTrack->pushBuffer(mediaBuffer);
+}
+
+} // namespace android
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 404fa94..7bc7da2 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -228,6 +228,34 @@ status_t NuMediaExtractor::getTrackFormat(
return convertMetaDataToMessage(meta, format);
}
+status_t NuMediaExtractor::getFileFormat(sp<AMessage> *format) const {
+ Mutex::Autolock autoLock(mLock);
+
+ *format = NULL;
+
+ if (mImpl == NULL) {
+ return -EINVAL;
+ }
+
+ sp<MetaData> meta = mImpl->getMetaData();
+
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+ *format = new AMessage();
+ (*format)->setString("mime", mime);
+
+ uint32_t type;
+ const void *pssh;
+ size_t psshsize;
+ if (meta->findData(kKeyPssh, &type, &pssh, &psshsize)) {
+ sp<ABuffer> buf = new ABuffer(psshsize);
+ memcpy(buf->data(), pssh, psshsize);
+ (*format)->setBuffer("pssh", buf);
+ }
+
+ return OK;
+}
+
status_t NuMediaExtractor::selectTrack(size_t index) {
Mutex::Autolock autoLock(mLock);
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 7cdb793..1822f07 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -32,7 +32,7 @@ struct MuxOMX : public IOMX {
MuxOMX(const sp<IOMX> &remoteOMX);
virtual ~MuxOMX();
- virtual IBinder *onAsBinder() { return NULL; }
+ virtual IBinder *onAsBinder() { return mRemoteOMX->asBinder().get(); }
virtual bool livesLocally(node_id node, pid_t pid);
@@ -83,6 +83,12 @@ struct MuxOMX : public IOMX {
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+ virtual status_t createInputSurface(
+ node_id node, OMX_U32 port_index,
+ sp<IGraphicBufferProducer> *bufferProducer);
+
+ virtual status_t signalEndOfInputStream(node_id node);
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data);
@@ -274,6 +280,18 @@ status_t MuxOMX::useGraphicBuffer(
node, port_index, graphicBuffer, buffer);
}
+status_t MuxOMX::createInputSurface(
+ node_id node, OMX_U32 port_index,
+ sp<IGraphicBufferProducer> *bufferProducer) {
+ status_t err = getOMX(node)->createInputSurface(
+ node, port_index, bufferProducer);
+ return err;
+}
+
+status_t MuxOMX::signalEndOfInputStream(node_id node) {
+ return getOMX(node)->signalEndOfInputStream(node);
+}
+
status_t MuxOMX::allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 70de174..9d349a1 100755..100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -522,6 +522,17 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta) {
CODEC_LOGE("setAACFormat() failed (err = %d)", err);
return err;
}
+ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_MPEG, mMIME)) {
+ int32_t numChannels, sampleRate;
+ if (meta->findInt32(kKeyChannelCount, &numChannels)
+ && meta->findInt32(kKeySampleRate, &sampleRate)) {
+ // Since we did not always check for these, leave them optional
+ // and have the decoder figure it all out.
+ setRawAudioFormat(
+ mIsEncoder ? kPortIndexInput : kPortIndexOutput,
+ sampleRate,
+ numChannels);
+ }
} else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_ALAW, mMIME)
|| !strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_MLAW, mMIME)) {
// These are PCM-like formats with a fixed sample rate but
@@ -1213,13 +1224,6 @@ status_t OMXCodec::setVideoOutputFormat(
CHECK_EQ(err, (status_t)OK);
CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
- CHECK(format.eColorFormat == OMX_COLOR_FormatYUV420Planar
- || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
- || format.eColorFormat == OMX_COLOR_FormatCbYCrY
- || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
- || format.eColorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar
- || format.eColorFormat == OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka);
-
int32_t colorFormat;
if (meta->findInt32(kKeyColorFormat, &colorFormat)
&& colorFormat != OMX_COLOR_FormatUnused
@@ -1390,6 +1394,8 @@ void OMXCodec::setComponentRole(
"audio_decoder.raw", "audio_encoder.raw" },
{ MEDIA_MIMETYPE_AUDIO_FLAC,
"audio_decoder.flac", "audio_encoder.flac" },
+ { MEDIA_MIMETYPE_AUDIO_MSGSM,
+ "audio_decoder.gsm", "audio_encoder.gsm" },
};
static const size_t kNumMimeToRole =
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index 773854f..773854f 100755..100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index bccffd8..af8186c 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -42,7 +42,7 @@ static bool FileHasAcceptableExtension(const char *extension) {
".mpeg", ".ogg", ".mid", ".smf", ".imy", ".wma", ".aac",
".wav", ".amr", ".midi", ".xmf", ".rtttl", ".rtx", ".ota",
".mkv", ".mka", ".webm", ".ts", ".fl", ".flac", ".mxmf",
- ".avi", ".mpeg", ".mpg", ".mpga"
+ ".avi", ".mpeg", ".mpg", ".awb", ".mpga"
};
static const size_t kNumValidExtensions =
sizeof(kValidExtensions) / sizeof(kValidExtensions[0]);
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index 3c002fc..409038a 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -298,6 +298,10 @@ status_t SurfaceMediaSource::read( MediaBuffer **buffer,
// wait for a buffer to be queued
mFrameAvailableCondition.wait(mMutex);
} else if (err == OK) {
+ err = item.mFence->waitForever("SurfaceMediaSource::read");
+ if (err) {
+ ALOGW("read: failed to wait for buffer fence: %d", err);
+ }
// First time seeing the buffer? Added it to the SMS slot
if (item.mGraphicBuffer != NULL) {
diff --git a/media/libstagefright/ThrottledSource.cpp b/media/libstagefright/ThrottledSource.cpp
index 348a9d3..7496752 100644
--- a/media/libstagefright/ThrottledSource.cpp
+++ b/media/libstagefright/ThrottledSource.cpp
@@ -31,10 +31,6 @@ ThrottledSource::ThrottledSource(
CHECK(mBandwidthLimitBytesPerSecond > 0);
}
-status_t ThrottledSource::initCheck() const {
- return mSource->initCheck();
-}
-
ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) {
Mutex::Autolock autoLock(mLock);
@@ -62,17 +58,9 @@ ssize_t ThrottledSource::readAt(off64_t offset, void *data, size_t size) {
if (whenUs > nowUs) {
usleep(whenUs - nowUs);
}
-
return n;
}
-status_t ThrottledSource::getSize(off64_t *size) {
- return mSource->getSize(size);
-}
-
-uint32_t ThrottledSource::flags() {
- return mSource->flags();
-}
} // namespace android
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 74e9222..b0df379 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -21,7 +21,7 @@
#include "include/ESDS.h"
#include <arpa/inet.h>
-
+#include <cutils/properties.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -78,6 +78,11 @@ status_t convertMetaDataToMessage(
msg->setInt64("durationUs", durationUs);
}
+ int32_t isSync;
+ if (meta->findInt32(kKeyIsSyncFrame, &isSync) && isSync != 0) {
+ msg->setInt32("is-sync-frame", 1);
+ }
+
if (!strncasecmp("video/", mime, 6)) {
int32_t width, height;
CHECK(meta->findInt32(kKeyWidth, &width));
@@ -85,6 +90,13 @@ status_t convertMetaDataToMessage(
msg->setInt32("width", width);
msg->setInt32("height", height);
+
+ int32_t sarWidth, sarHeight;
+ if (meta->findInt32(kKeySARWidth, &sarWidth)
+ && meta->findInt32(kKeySARHeight, &sarHeight)) {
+ msg->setInt32("sar-width", sarWidth);
+ msg->setInt32("sar-height", sarHeight);
+ }
} else if (!strncasecmp("audio/", mime, 6)) {
int32_t numChannels, sampleRate;
CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
@@ -363,6 +375,11 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
meta->setInt64(kKeyDuration, durationUs);
}
+ int32_t isSync;
+ if (msg->findInt32("is-sync-frame", &isSync) && isSync != 0) {
+ meta->setInt32(kKeyIsSyncFrame, 1);
+ }
+
if (mime.startsWith("video/")) {
int32_t width;
int32_t height;
@@ -372,6 +389,13 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
} else {
ALOGW("did not find width and/or height");
}
+
+ int32_t sarWidth, sarHeight;
+ if (msg->findInt32("sar-width", &sarWidth)
+ && msg->findInt32("sar-height", &sarHeight)) {
+ meta->setInt32(kKeySARWidth, sarWidth);
+ meta->setInt32(kKeySARHeight, sarHeight);
+ }
} else if (mime.startsWith("audio/")) {
int32_t numChannels;
if (msg->findInt32("channel-count", &numChannels)) {
@@ -431,6 +455,21 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
#endif
}
+AString MakeUserAgent() {
+ AString ua;
+ ua.append("stagefright/1.2 (Linux;Android ");
+
+#if (PROPERTY_VALUE_MAX < 8)
+#error "PROPERTY_VALUE_MAX must be at least 8"
+#endif
+
+ char value[PROPERTY_VALUE_MAX];
+ property_get("ro.build.version.release", value, "Unknown");
+ ua.append(value);
+ ua.append(")");
+
+ return ua;
+}
} // namespace android
diff --git a/media/libstagefright/WAVExtractor.cpp b/media/libstagefright/WAVExtractor.cpp
index 2a7f628..22af6fb 100644
--- a/media/libstagefright/WAVExtractor.cpp
+++ b/media/libstagefright/WAVExtractor.cpp
@@ -38,6 +38,7 @@ enum {
WAVE_FORMAT_PCM = 0x0001,
WAVE_FORMAT_ALAW = 0x0006,
WAVE_FORMAT_MULAW = 0x0007,
+ WAVE_FORMAT_MSGSM = 0x0031,
WAVE_FORMAT_EXTENSIBLE = 0xFFFE
};
@@ -178,6 +179,7 @@ status_t WAVExtractor::init() {
if (mWaveFormat != WAVE_FORMAT_PCM
&& mWaveFormat != WAVE_FORMAT_ALAW
&& mWaveFormat != WAVE_FORMAT_MULAW
+ && mWaveFormat != WAVE_FORMAT_MSGSM
&& mWaveFormat != WAVE_FORMAT_EXTENSIBLE) {
return ERROR_UNSUPPORTED;
}
@@ -216,6 +218,10 @@ status_t WAVExtractor::init() {
&& mBitsPerSample != 24) {
return ERROR_UNSUPPORTED;
}
+ } else if (mWaveFormat == WAVE_FORMAT_MSGSM) {
+ if (mBitsPerSample != 0) {
+ return ERROR_UNSUPPORTED;
+ }
} else {
CHECK(mWaveFormat == WAVE_FORMAT_MULAW
|| mWaveFormat == WAVE_FORMAT_ALAW);
@@ -283,6 +289,10 @@ status_t WAVExtractor::init() {
mTrackMeta->setCString(
kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_G711_ALAW);
break;
+ case WAVE_FORMAT_MSGSM:
+ mTrackMeta->setCString(
+ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MSGSM);
+ break;
default:
CHECK_EQ(mWaveFormat, (uint16_t)WAVE_FORMAT_MULAW);
mTrackMeta->setCString(
@@ -294,11 +304,17 @@ status_t WAVExtractor::init() {
mTrackMeta->setInt32(kKeyChannelMask, mChannelMask);
mTrackMeta->setInt32(kKeySampleRate, mSampleRate);
- size_t bytesPerSample = mBitsPerSample >> 3;
-
- int64_t durationUs =
- 1000000LL * (mDataSize / (mNumChannels * bytesPerSample))
- / mSampleRate;
+ int64_t durationUs = 0;
+ if (mWaveFormat == WAVE_FORMAT_MSGSM) {
+ // 65 bytes decode to 320 8kHz samples
+ durationUs =
+ 1000000LL * (mDataSize / 65 * 320) / 8000;
+ } else {
+ size_t bytesPerSample = mBitsPerSample >> 3;
+ durationUs =
+ 1000000LL * (mDataSize / (mNumChannels * bytesPerSample))
+ / mSampleRate;
+ }
mTrackMeta->setInt64(kKeyDuration, durationUs);
@@ -388,7 +404,16 @@ status_t WAVSource::read(
int64_t seekTimeUs;
ReadOptions::SeekMode mode;
if (options != NULL && options->getSeekTo(&seekTimeUs, &mode)) {
- int64_t pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3);
+ int64_t pos = 0;
+
+ if (mWaveFormat == WAVE_FORMAT_MSGSM) {
+ // 65 bytes decode to 320 8kHz samples
+ int64_t samplenumber = (seekTimeUs * mSampleRate) / 1000000;
+ int64_t framenumber = samplenumber / 320;
+ pos = framenumber * 65;
+ } else {
+ pos = (seekTimeUs * mSampleRate) / 1000000 * mNumChannels * (mBitsPerSample >> 3);
+ }
if (pos > mSize) {
pos = mSize;
}
@@ -414,6 +439,15 @@ status_t WAVSource::read(
maxBytesToRead = maxBytesAvailable;
}
+ if (mWaveFormat == WAVE_FORMAT_MSGSM) {
+ // Microsoft packs 2 frames into 65 bytes, rather than using separate 33-byte frames,
+ // so read multiples of 65, and use smaller buffers to account for ~10:1 expansion ratio
+ if (maxBytesToRead > 1024) {
+ maxBytesToRead = 1024;
+ }
+ maxBytesToRead = (maxBytesToRead / 65) * 65;
+ }
+
ssize_t n = mDataSource->readAt(
mCurrentPos, buffer->data(),
maxBytesToRead);
@@ -470,12 +504,17 @@ status_t WAVSource::read(
}
}
- size_t bytesPerSample = mBitsPerSample >> 3;
+ int64_t timeStampUs = 0;
+
+ if (mWaveFormat == WAVE_FORMAT_MSGSM) {
+ timeStampUs = 1000000LL * (mCurrentPos - mOffset) * 320 / 65 / mSampleRate;
+ } else {
+ size_t bytesPerSample = mBitsPerSample >> 3;
+ timeStampUs = 1000000LL * (mCurrentPos - mOffset)
+ / (mNumChannels * bytesPerSample) / mSampleRate;
+ }
- buffer->meta_data()->setInt64(
- kKeyTime,
- 1000000LL * (mCurrentPos - mOffset)
- / (mNumChannels * bytesPerSample) / mSampleRate);
+ buffer->meta_data()->setInt64(kKeyTime, timeStampUs);
buffer->meta_data()->setInt32(kKeyIsSyncFrame, 1);
mCurrentPos += n;
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index a141752..b822868 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -22,6 +22,7 @@
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -41,7 +42,9 @@ unsigned parseUE(ABitReader *br) {
// Determine video dimensions from the sequence parameterset.
void FindAVCDimensions(
- const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height) {
+ const sp<ABuffer> &seqParamSet,
+ int32_t *width, int32_t *height,
+ int32_t *sarWidth, int32_t *sarHeight) {
ABitReader br(seqParamSet->data() + 1, seqParamSet->size() - 1);
unsigned profile_idc = br.getBits(8);
@@ -129,6 +132,48 @@ void FindAVCDimensions(
*height -=
(frame_crop_top_offset + frame_crop_bottom_offset) * cropUnitY;
}
+
+ if (sarWidth != NULL) {
+ *sarWidth = 0;
+ }
+
+ if (sarHeight != NULL) {
+ *sarHeight = 0;
+ }
+
+ if (br.getBits(1)) { // vui_parameters_present_flag
+ unsigned sar_width = 0, sar_height = 0;
+
+ if (br.getBits(1)) { // aspect_ratio_info_present_flag
+ unsigned aspect_ratio_idc = br.getBits(8);
+
+ if (aspect_ratio_idc == 255 /* extendedSAR */) {
+ sar_width = br.getBits(16);
+ sar_height = br.getBits(16);
+ } else if (aspect_ratio_idc > 0 && aspect_ratio_idc < 14) {
+ static const int32_t kFixedSARWidth[] = {
+ 1, 12, 10, 16, 40, 24, 20, 32, 80, 18, 15, 64, 160
+ };
+
+ static const int32_t kFixedSARHeight[] = {
+ 1, 11, 11, 11, 33, 11, 11, 11, 33, 11, 11, 33, 99
+ };
+
+ sar_width = kFixedSARWidth[aspect_ratio_idc - 1];
+ sar_height = kFixedSARHeight[aspect_ratio_idc - 1];
+ }
+ }
+
+ ALOGV("sample aspect ratio = %u : %u", sar_width, sar_height);
+
+ if (sarWidth != NULL) {
+ *sarWidth = sar_width;
+ }
+
+ if (sarHeight != NULL) {
+ *sarHeight = sar_height;
+ }
+ }
}
status_t getNextNALUnit(
@@ -254,7 +299,9 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
}
int32_t width, height;
- FindAVCDimensions(seqParamSet, &width, &height);
+ int32_t sarWidth, sarHeight;
+ FindAVCDimensions(
+ seqParamSet, &width, &height, &sarWidth, &sarHeight);
size_t stopOffset;
sp<ABuffer> picParamSet = FindNAL(data, size, 8, &stopOffset);
@@ -301,8 +348,29 @@ sp<MetaData> MakeAVCCodecSpecificData(const sp<ABuffer> &accessUnit) {
meta->setInt32(kKeyWidth, width);
meta->setInt32(kKeyHeight, height);
- ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
- width, height, AVCProfileToString(profile), level / 10, level % 10);
+ if (sarWidth > 1 || sarHeight > 1) {
+ // We treat 0:0 (unspecified) as 1:1.
+
+ meta->setInt32(kKeySARWidth, sarWidth);
+ meta->setInt32(kKeySARHeight, sarHeight);
+
+ ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d) "
+ "SAR %d : %d",
+ width,
+ height,
+ AVCProfileToString(profile),
+ level / 10,
+ level % 10,
+ sarWidth,
+ sarHeight);
+ } else {
+ ALOGI("found AVC codec config (%d x %d, %s-profile level %d.%d)",
+ width,
+ height,
+ AVCProfileToString(profile),
+ level / 10,
+ level % 10);
+ }
return meta;
}
diff --git a/media/libstagefright/chromium_http/Android.mk b/media/libstagefright/chromium_http/Android.mk
index 2c6d84c..f26f386 100644
--- a/media/libstagefright/chromium_http/Android.mk
+++ b/media/libstagefright/chromium_http/Android.mk
@@ -22,6 +22,7 @@ LOCAL_SHARED_LIBRARIES += \
libchromium_net \
libutils \
libcutils \
+ liblog \
libstagefright_foundation \
libstagefright \
libdrmframework
diff --git a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
index 91ce175..a862d8b 100644
--- a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
+++ b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
@@ -340,5 +340,11 @@ status_t ChromiumHTTPDataSource::reconnectAtOffset(off64_t offset) {
return err;
}
+// static
+status_t ChromiumHTTPDataSource::UpdateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList) {
+ return SfDelegate::UpdateProxyConfig(host, port, exclusionList);
+}
+
} // namespace android
diff --git a/media/libstagefright/chromium_http/chromium_http_stub.cpp b/media/libstagefright/chromium_http/chromium_http_stub.cpp
index 560a61f..289f6de 100644
--- a/media/libstagefright/chromium_http/chromium_http_stub.cpp
+++ b/media/libstagefright/chromium_http/chromium_http_stub.cpp
@@ -26,6 +26,11 @@ HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
return new ChromiumHTTPDataSource(flags);
}
+status_t UpdateChromiumHTTPDataSourceProxyConfig(
+ const char *host, int32_t port, const char *exclusionList) {
+ return ChromiumHTTPDataSource::UpdateProxyConfig(host, port, exclusionList);
+}
+
DataSource *createDataUriSource(const char *uri) {
return new DataUriSource(uri);
}
diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp
index 13ae3df..741cb1d 100644
--- a/media/libstagefright/chromium_http/support.cpp
+++ b/media/libstagefright/chromium_http/support.cpp
@@ -36,15 +36,15 @@
#include "include/ChromiumHTTPDataSource.h"
#include <cutils/log.h>
-#include <cutils/properties.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
#include <string>
namespace android {
static Mutex gNetworkThreadLock;
static base::Thread *gNetworkThread = NULL;
-static scoped_refptr<net::URLRequestContext> gReqContext;
+static scoped_refptr<SfRequestContext> gReqContext;
static scoped_ptr<net::NetworkChangeNotifier> gNetworkChangeNotifier;
bool logMessageHandler(
@@ -156,19 +156,7 @@ net::NetLog::LogLevel SfNetLog::GetLogLevel() const {
////////////////////////////////////////////////////////////////////////////////
SfRequestContext::SfRequestContext() {
- AString ua;
- ua.append("stagefright/1.2 (Linux;Android ");
-
-#if (PROPERTY_VALUE_MAX < 8)
-#error "PROPERTY_VALUE_MAX must be at least 8"
-#endif
-
- char value[PROPERTY_VALUE_MAX];
- property_get("ro.build.version.release", value, "Unknown");
- ua.append(value);
- ua.append(")");
-
- mUserAgent = ua.c_str();
+ mUserAgent = MakeUserAgent().c_str();
set_net_log(new SfNetLog());
@@ -181,8 +169,10 @@ SfRequestContext::SfRequestContext() {
set_ssl_config_service(
net::SSLConfigService::CreateSystemSSLConfigService());
+ mProxyConfigService = new net::ProxyConfigServiceAndroid;
+
set_proxy_service(net::ProxyService::CreateWithoutProxyResolver(
- new net::ProxyConfigServiceAndroid, net_log()));
+ mProxyConfigService, net_log()));
set_http_transaction_factory(new net::HttpCache(
host_resolver(),
@@ -203,6 +193,31 @@ const std::string &SfRequestContext::GetUserAgent(const GURL &url) const {
return mUserAgent;
}
+status_t SfRequestContext::updateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList) {
+ Mutex::Autolock autoLock(mProxyConfigLock);
+
+ if (host == NULL || *host == '\0') {
+ MY_LOGV("updateProxyConfig NULL");
+
+ std::string proxy;
+ std::string exList;
+ mProxyConfigService->UpdateProxySettings(proxy, exList);
+ } else {
+#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
+ LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG,
+ "updateProxyConfig %s:%d, exclude '%s'",
+ host, port, exclusionList);
+#endif
+
+ std::string proxy = StringPrintf("%s:%d", host, port).c_str();
+ std::string exList = exclusionList;
+ mProxyConfigService->UpdateProxySettings(proxy, exList);
+ }
+
+ return OK;
+}
+
////////////////////////////////////////////////////////////////////////////////
SfNetworkLibrary::SfNetworkLibrary() {}
@@ -231,6 +246,14 @@ SfDelegate::~SfDelegate() {
CHECK(mURLRequest == NULL);
}
+// static
+status_t SfDelegate::UpdateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList) {
+ InitializeNetworkThreadIfNecessary();
+
+ return gReqContext->updateProxyConfig(host, port, exclusionList);
+}
+
void SfDelegate::setOwner(ChromiumHTTPDataSource *owner) {
mOwner = owner;
}
diff --git a/media/libstagefright/chromium_http/support.h b/media/libstagefright/chromium_http/support.h
index d2c5bc0..975a1d3 100644
--- a/media/libstagefright/chromium_http/support.h
+++ b/media/libstagefright/chromium_http/support.h
@@ -27,8 +27,13 @@
#include "net/base/io_buffer.h"
#include <utils/KeyedVector.h>
+#include <utils/Mutex.h>
#include <utils/String8.h>
+namespace net {
+ struct ProxyConfigServiceAndroid;
+};
+
namespace android {
struct SfNetLog : public net::NetLog {
@@ -55,8 +60,14 @@ struct SfRequestContext : public net::URLRequestContext {
virtual const std::string &GetUserAgent(const GURL &url) const;
+ status_t updateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList);
+
private:
+ Mutex mProxyConfigLock;
+
std::string mUserAgent;
+ net::ProxyConfigServiceAndroid *mProxyConfigService;
DISALLOW_EVIL_CONSTRUCTORS(SfRequestContext);
};
@@ -120,6 +131,9 @@ struct SfDelegate : public net::URLRequest::Delegate {
virtual void OnReadCompleted(net::URLRequest *request, int bytes_read);
+ static status_t UpdateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList);
+
private:
typedef Delegate inherited;
diff --git a/media/libstagefright/chromium_http_stub.cpp b/media/libstagefright/chromium_http_stub.cpp
index cbd8796..ed8a878 100644
--- a/media/libstagefright/chromium_http_stub.cpp
+++ b/media/libstagefright/chromium_http_stub.cpp
@@ -30,6 +30,9 @@ static Mutex gLibMutex;
HTTPBase *(*gLib_createChromiumHTTPDataSource)(uint32_t flags);
DataSource *(*gLib_createDataUriSource)(const char *uri);
+status_t (*gLib_UpdateChromiumHTTPDataSourceProxyConfig)(
+ const char *host, int32_t port, const char *exclusionList);
+
static bool load_libstagefright_chromium_http() {
Mutex::Autolock autoLock(gLibMutex);
void *sym;
@@ -59,6 +62,14 @@ static bool load_libstagefright_chromium_http() {
}
gLib_createDataUriSource = (DataSource *(*)(const char *))sym;
+ sym = dlsym(gHandle, "UpdateChromiumHTTPDataSourceProxyConfig");
+ if (sym == NULL) {
+ gHandle = NULL;
+ return false;
+ }
+ gLib_UpdateChromiumHTTPDataSourceProxyConfig =
+ (status_t (*)(const char *, int32_t, const char *))sym;
+
return true;
}
@@ -70,6 +81,16 @@ HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
return gLib_createChromiumHTTPDataSource(flags);
}
+status_t UpdateChromiumHTTPDataSourceProxyConfig(
+ const char *host, int32_t port, const char *exclusionList) {
+ if (!load_libstagefright_chromium_http()) {
+ return INVALID_OPERATION;
+ }
+
+ return gLib_UpdateChromiumHTTPDataSourceProxyConfig(
+ host, port, exclusionList);
+}
+
DataSource *createDataUriSource(const char *uri) {
if (!load_libstagefright_chromium_http()) {
return NULL;
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index 4dc38a8..ffa64f9 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -20,7 +20,7 @@ LOCAL_CFLAGS :=
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
- libstagefright_omx libstagefright_foundation libutils libcutils
+ libstagefright_omx libstagefright_foundation libutils libcutils liblog
LOCAL_MODULE := libstagefright_soft_aacdec
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index d88813e..cf50dc9 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -29,6 +29,7 @@
#define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */
+#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
#define MAX_CHANNEL_COUNT 6 /* maximum number of audio channels that can be decoded */
// names of properties that can be used to override the default DRC settings
#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
@@ -118,7 +119,7 @@ status_t SoftAAC2::initDecoder() {
status = OK;
}
}
- mIsFirst = true;
+ mDecoderHasData = false;
// for streams that contain metadata, use the mobile profile DRC settings unless overridden
// by platform properties:
@@ -146,6 +147,8 @@ status_t SoftAAC2::initDecoder() {
unsigned boost = atoi(value);
ALOGV("AAC decoder using AAC_DRC_BOOST_FACTOR of %d", boost);
aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, boost);
+ } else {
+ aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
}
return status;
@@ -327,6 +330,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
return;
}
+
inQueue.erase(inQueue.begin());
info->mOwnedByUs = false;
notifyEmptyBufferDone(header);
@@ -358,7 +362,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
inInfo->mOwnedByUs = false;
notifyEmptyBufferDone(inHeader);
- if (!mIsFirst) {
+ if (mDecoderHasData) {
// flush out the decoder's delayed data by calling DecodeFrame
// one more time, with the AACDEC_FLUSH flag set
INT_PCM *outBuffer =
@@ -370,6 +374,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
outBuffer,
outHeader->nAllocLen,
AACDEC_FLUSH);
+ mDecoderHasData = false;
if (decoderErr != AAC_DEC_OK) {
mSignalledError = true;
@@ -385,9 +390,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
* sizeof(int16_t)
* mStreamInfo->numChannels;
} else {
- // Since we never discarded frames from the start, we won't have
- // to add any padding at the end either.
-
+ // we never submitted any data to the decoder, so there's nothing to flush out
outHeader->nFilledLen = 0;
}
@@ -473,6 +476,7 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
inBuffer,
inBufferLength,
bytesValid);
+ mDecoderHasData = true;
decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
outBuffer,
@@ -484,6 +488,35 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
}
}
+ size_t numOutBytes =
+ mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+
+ if (decoderErr == AAC_DEC_OK) {
+ UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
+ inHeader->nFilledLen -= inBufferUsedLength;
+ inHeader->nOffset += inBufferUsedLength;
+ } else {
+ ALOGW("AAC decoder returned error %d, substituting silence",
+ decoderErr);
+
+ memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
+
+ // Discard input buffer.
+ inHeader->nFilledLen = 0;
+
+ aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+
+ // fall through
+ }
+
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+
/*
* AAC+/eAAC+ streams can be signalled in two ways: either explicitly
* or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
@@ -502,15 +535,9 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
if (mStreamInfo->sampleRate != prevSampleRate ||
mStreamInfo->numChannels != prevNumChannels) {
maybeConfigureDownmix();
- ALOGI("Reconfiguring decoder: %d Hz, %d channels",
- mStreamInfo->sampleRate,
- mStreamInfo->numChannels);
-
- // We're going to want to revisit this input buffer, but
- // may have already advanced the offset. Undo that if
- // necessary.
- inHeader->nOffset -= adtsHeaderSize;
- inHeader->nFilledLen += adtsHeaderSize;
+ ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+ prevSampleRate, mStreamInfo->sampleRate,
+ prevNumChannels, mStreamInfo->numChannels);
notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
mOutputPortSettingsChange = AWAITING_DISABLED;
@@ -523,38 +550,10 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
return;
}
- size_t numOutBytes =
- mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
-
- if (decoderErr == AAC_DEC_OK) {
- UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
- inHeader->nFilledLen -= inBufferUsedLength;
- inHeader->nOffset += inBufferUsedLength;
- } else {
- ALOGW("AAC decoder returned error %d, substituting silence",
- decoderErr);
-
- memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
-
- // Discard input buffer.
- inHeader->nFilledLen = 0;
-
- aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
-
- // fall through
- }
-
if (decoderErr == AAC_DEC_OK || mNumSamplesOutput > 0) {
// We'll only output data if we successfully decoded it or
// we've previously decoded valid data, in the latter case
// (decode failed) we'll output a silent frame.
- if (mIsFirst) {
- mIsFirst = false;
- // the first decoded frame should be discarded to account
- // for decoder delay
- numOutBytes = 0;
- }
-
outHeader->nFilledLen = numOutBytes;
outHeader->nFlags = 0;
@@ -571,14 +570,6 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
outHeader = NULL;
}
- if (inHeader->nFilledLen == 0) {
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
- }
-
if (decoderErr == AAC_DEC_OK) {
++mInputBufferCount;
}
@@ -589,11 +580,32 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
if (portIndex == 0) {
// Make sure that the next buffer output does not still
// depend on fragments from the last one decoded.
- aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
- mIsFirst = true;
+ // drain all existing data
+ drainDecoder();
}
}
+void SoftAAC2::drainDecoder() {
+ // a buffer big enough for 6 channels of decoded HE-AAC
+ short buf [2048*6];
+ aacDecoder_DecodeFrame(mAACDecoder,
+ buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
+ aacDecoder_DecodeFrame(mAACDecoder,
+ buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
+ aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+ mDecoderHasData = false;
+}
+
+void SoftAAC2::onReset() {
+ drainDecoder();
+ // reset the "configured" state
+ mInputBufferCount = 0;
+ mNumSamplesOutput = 0;
+ // To make the codec behave the same before and after a reset, we need to invalidate the
+ // streaminfo struct. This does that:
+ mStreamInfo->sampleRate = 0;
+}
+
void SoftAAC2::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
if (portIndex != 1) {
return;
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index 0353196..2d960ab 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -41,6 +41,7 @@ protected:
virtual void onQueueFilled(OMX_U32 portIndex);
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+ virtual void onReset();
private:
enum {
@@ -51,7 +52,7 @@ private:
HANDLE_AACDECODER mAACDecoder;
CStreamInfo *mStreamInfo;
bool mIsADTS;
- bool mIsFirst;
+ bool mDecoderHasData;
size_t mInputBufferCount;
bool mSignalledError;
int64_t mAnchorTimeUs;
@@ -67,6 +68,7 @@ private:
status_t initDecoder();
bool isConfigured() const;
void maybeConfigureDownmix() const;
+ void drainDecoder();
DISALLOW_EVIL_CONSTRUCTORS(SoftAAC2);
};
diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk
index 820734d..057c69b 100644
--- a/media/libstagefright/codecs/aacenc/Android.mk
+++ b/media/libstagefright/codecs/aacenc/Android.mk
@@ -109,7 +109,7 @@ ifeq ($(AAC_LIBRARY), fraunhofer)
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
- libstagefright_omx libstagefright_foundation libutils
+ libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_aacenc
LOCAL_MODULE_TAGS := optional
@@ -132,7 +132,7 @@ else # visualon
libstagefright_aacenc
LOCAL_SHARED_LIBRARIES := \
- libstagefright_omx libstagefright_foundation libutils \
+ libstagefright_omx libstagefright_foundation libutils liblog \
libstagefright_enc_common
LOCAL_MODULE := libstagefright_soft_aacenc
diff --git a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
index 7719435..5749733 100644
--- a/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
+++ b/media/libstagefright/codecs/aacenc/SoftAACEncoder2.cpp
@@ -481,7 +481,7 @@ void SoftAACEncoder2::onQueueFilled(OMX_U32 portIndex) {
void* inBuffer[] = { (unsigned char *)mInputFrame };
INT inBufferIds[] = { IN_AUDIO_DATA };
- INT inBufferSize[] = { numBytesPerInputFrame };
+ INT inBufferSize[] = { (INT)numBytesPerInputFrame };
INT inBufferElSize[] = { sizeof(int16_t) };
AACENC_BufDesc inBufDesc;
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk
index b48a459..8d6c6f8 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.mk
+++ b/media/libstagefright/codecs/amrnb/dec/Android.mk
@@ -72,7 +72,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbdec libstagefright_amrwbdec
LOCAL_SHARED_LIBRARIES := \
- libstagefright_omx libstagefright_foundation libutils \
+ libstagefright_omx libstagefright_foundation libutils liblog \
libstagefright_amrnb_common
LOCAL_MODULE := libstagefright_soft_amrdec
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk
index 457656a..f4e467a 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.mk
+++ b/media/libstagefright/codecs/amrnb/enc/Android.mk
@@ -92,7 +92,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbenc
LOCAL_SHARED_LIBRARIES := \
- libstagefright_omx libstagefright_foundation libutils \
+ libstagefright_omx libstagefright_foundation libutils liblog \
libstagefright_amrnb_common
LOCAL_MODULE := libstagefright_soft_amrnbenc
diff --git a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
index 07f8b4f..50b739c 100644
--- a/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
+++ b/media/libstagefright/codecs/amrnb/enc/SoftAMRNBEncoder.cpp
@@ -257,7 +257,7 @@ OMX_ERRORTYPE SoftAMRNBEncoder::internalSetParameter(
}
if (pcmParams->nChannels != 1
- || pcmParams->nSamplingRate != kSampleRate) {
+ || pcmParams->nSamplingRate != (OMX_U32)kSampleRate) {
return OMX_ErrorUndefined;
}
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index edfd7b7..c5b8e0c 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -130,7 +130,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_amrwbenc
LOCAL_SHARED_LIBRARIES := \
- libstagefright_omx libstagefright_foundation libutils \
+ libstagefright_omx libstagefright_foundation libutils liblog \
libstagefright_enc_common
LOCAL_MODULE := libstagefright_soft_amrwbenc
diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk
index cffe469..7d17c2a 100644
--- a/media/libstagefright/codecs/avc/enc/Android.mk
+++ b/media/libstagefright/codecs/avc/enc/Android.mk
@@ -62,6 +62,7 @@ LOCAL_SHARED_LIBRARIES := \
libstagefright_foundation \
libstagefright_omx \
libutils \
+ liblog \
libui
diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk
index 546a357..f01d605 100644
--- a/media/libstagefright/codecs/flac/enc/Android.mk
+++ b/media/libstagefright/codecs/flac/enc/Android.mk
@@ -10,7 +10,7 @@ LOCAL_C_INCLUDES := \
external/flac/include
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_STATIC_LIBRARIES := \
libFLAC \
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index 233aed3..e64fe72 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -109,7 +109,7 @@ void SoftFlacEncoder::initPorts() {
def.eDir = OMX_DirInput;
def.nBufferCountMin = kNumBuffers;// TODO verify that 1 is enough
def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2;
+ def.nBufferSize = kMaxInputBufferSize;
def.bEnabled = OMX_TRUE;
def.bPopulated = OMX_FALSE;
def.eDomain = OMX_PortDomainAudio;
@@ -234,6 +234,22 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter(
return OMX_ErrorNone;
}
+ case OMX_IndexParamPortDefinition:
+ {
+ OMX_PARAM_PORTDEFINITIONTYPE *defParams =
+ (OMX_PARAM_PORTDEFINITIONTYPE *)params;
+
+ if (defParams->nPortIndex == 0) {
+ if (defParams->nBufferSize > kMaxInputBufferSize) {
+ ALOGE("Input buffer size must be at most %zu bytes",
+ kMaxInputBufferSize);
+ return OMX_ErrorUnsupportedSetting;
+ }
+ }
+
+ // fall through
+ }
+
default:
ALOGV("SoftFlacEncoder::internalSetParameter(default)");
return SimpleSoftOMXComponent::internalSetParameter(index, params);
@@ -273,7 +289,7 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
return;
}
- if (inHeader->nFilledLen > kMaxNumSamplesPerFrame * sizeof(FLAC__int32) * 2) {
+ if (inHeader->nFilledLen > kMaxInputBufferSize) {
ALOGE("input buffer too large (%ld).", inHeader->nFilledLen);
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
@@ -290,6 +306,7 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
const unsigned nbInputSamples = inHeader->nFilledLen / 2;
const OMX_S16 * const pcm16 = reinterpret_cast<OMX_S16 *>(inHeader->pBuffer);
+ CHECK_LE(nbInputSamples, 2 * kMaxNumSamplesPerFrame);
for (unsigned i=0 ; i < nbInputSamples ; i++) {
mInputBufferPcm32[i] = (FLAC__int32) pcm16[i];
}
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
index 1e0148a..97361fa 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.h
@@ -52,6 +52,7 @@ private:
enum {
kNumBuffers = 2,
kMaxNumSamplesPerFrame = 1152,
+ kMaxInputBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t) * 2,
kMaxOutputBufferSize = 65536, //TODO check if this can be reduced
};
diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk
index 28be646..4c80da6 100644
--- a/media/libstagefright/codecs/g711/dec/Android.mk
+++ b/media/libstagefright/codecs/g711/dec/Android.mk
@@ -9,7 +9,7 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_g711dec
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/codecs/gsm/Android.mk b/media/libstagefright/codecs/gsm/Android.mk
new file mode 100644
index 0000000..2e43120
--- /dev/null
+++ b/media/libstagefright/codecs/gsm/Android.mk
@@ -0,0 +1,4 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk
new file mode 100644
index 0000000..71613d2
--- /dev/null
+++ b/media/libstagefright/codecs/gsm/dec/Android.mk
@@ -0,0 +1,21 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftGSM.cpp
+
+LOCAL_C_INCLUDES := \
+ frameworks/av/media/libstagefright/include \
+ frameworks/native/include/media/openmax \
+ external/libgsm/inc
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog
+
+LOCAL_STATIC_LIBRARIES := \
+ libgsm
+
+LOCAL_MODULE := libstagefright_soft_gsmdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libstagefright/codecs/gsm/dec/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/gsm/dec/NOTICE b/media/libstagefright/codecs/gsm/dec/NOTICE
new file mode 100644
index 0000000..c5b1efa
--- /dev/null
+++ b/media/libstagefright/codecs/gsm/dec/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2008, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
new file mode 100644
index 0000000..00e0c85
--- /dev/null
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.cpp
@@ -0,0 +1,269 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftGSM"
+#include <utils/Log.h>
+
+#include "SoftGSM.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftGSM::SoftGSM(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mSignalledError(false) {
+
+ CHECK(!strcmp(name, "OMX.google.gsm.decoder"));
+
+ mGsm = gsm_create();
+ CHECK(mGsm);
+ int msopt = 1;
+ gsm_option(mGsm, GSM_OPT_WAV49, &msopt);
+
+ initPorts();
+}
+
+SoftGSM::~SoftGSM() {
+ gsm_destroy(mGsm);
+}
+
+void SoftGSM::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = sizeof(gsm_frame);
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_AUDIO_MSGSM);
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingGSMFR;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = kMaxNumSamplesPerFrame * sizeof(int16_t);
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+OMX_ERRORTYPE SoftGSM::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+
+ pcmParams->nChannels = 1;
+ pcmParams->nSamplingRate = 8000;
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftGSM::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch (index) {
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 0 && pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (pcmParams->nChannels != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ if (pcmParams->nSamplingRate != 8000) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.gsm",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+void SoftGSM::onQueueFilled(OMX_U32 portIndex) {
+ if (mSignalledError) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nFilledLen > kMaxNumSamplesPerFrame) {
+ ALOGE("input buffer too large (%ld).", inHeader->nFilledLen);
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ }
+
+ if(((inHeader->nFilledLen / 65) * 65) != inHeader->nFilledLen) {
+ ALOGE("input buffer not multiple of 65 (%ld).", inHeader->nFilledLen);
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ mSignalledError = true;
+ }
+
+ uint8_t *inputptr = inHeader->pBuffer + inHeader->nOffset;
+
+ int n = mSignalledError ? 0 : DecodeGSM(mGsm,
+ reinterpret_cast<int16_t *>(outHeader->pBuffer), inputptr, inHeader->nFilledLen);
+
+ outHeader->nTimeStamp = inHeader->nTimeStamp;
+ outHeader->nOffset = 0;
+ outHeader->nFilledLen = n * sizeof(int16_t);
+ outHeader->nFlags = 0;
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+}
+
+
+// static
+int SoftGSM::DecodeGSM(gsm handle,
+ int16_t *out, uint8_t *in, size_t inSize) {
+
+ int ret = 0;
+ while (inSize > 0) {
+ gsm_decode(handle, in, out);
+ in += 33;
+ inSize -= 33;
+ out += 160;
+ ret += 160;
+ gsm_decode(handle, in, out);
+ in += 32;
+ inSize -= 32;
+ out += 160;
+ ret += 160;
+ }
+ return ret;
+}
+
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftGSM(name, callbacks, appData, component);
+}
+
diff --git a/media/libstagefright/codecs/gsm/dec/SoftGSM.h b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
new file mode 100644
index 0000000..8ab6116
--- /dev/null
+++ b/media/libstagefright/codecs/gsm/dec/SoftGSM.h
@@ -0,0 +1,65 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_GSM_H_
+
+#define SOFT_GSM_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+extern "C" {
+#include "gsm.h"
+}
+
+namespace android {
+
+struct SoftGSM : public SimpleSoftOMXComponent {
+ SoftGSM(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftGSM();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kMaxNumSamplesPerFrame = 16384,
+ };
+
+ bool mSignalledError;
+ gsm mGsm;
+
+ void initPorts();
+
+ static int DecodeGSM(gsm handle, int16_t *out, uint8_t *in, size_t inSize);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftGSM);
+};
+
+} // namespace android
+
+#endif // SOFT_GSM_H_
+
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
index a6b1edc..a3d5779 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
@@ -67,7 +67,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_m4vh263dec
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_mpeg4dec
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
index d527fde..020cc0a 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
+++ b/media/libstagefright/codecs/m4v_h263/dec/SoftMPEG4.cpp
@@ -326,7 +326,7 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
OMX_BUFFERHEADERTYPE *outHeader =
port->mBuffers.editItemAt(mNumSamplesOutput & 1).mHeader;
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ if ((inHeader->nFlags & OMX_BUFFERFLAG_EOS) && inHeader->nFilledLen == 0) {
inQueue.erase(inQueue.begin());
inInfo->mOwnedByUs = false;
notifyEmptyBufferDone(inHeader);
@@ -445,6 +445,11 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
inHeader->nOffset += bufferSize;
inHeader->nFilledLen = 0;
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ } else {
+ outHeader->nFlags = 0;
+ }
if (inHeader->nFilledLen == 0) {
inInfo->mOwnedByUs = false;
@@ -458,7 +463,6 @@ void SoftMPEG4::onQueueFilled(OMX_U32 portIndex) {
outHeader->nOffset = 0;
outHeader->nFilledLen = (mWidth * mHeight * 3) / 2;
- outHeader->nFlags = 0;
List<BufferInfo *>::iterator it = outQueue.begin();
while ((*it)->mHeader != outHeader) {
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
index 865cc9c..83a2dd2 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
@@ -65,6 +65,7 @@ LOCAL_SHARED_LIBRARIES := \
libstagefright_foundation \
libstagefright_omx \
libutils \
+ liblog \
libui
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index ec8d7ec..135c715 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -70,7 +70,7 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_STATIC_LIBRARIES := \
libstagefright_mp3dec
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index fb1135c..9f25536 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -166,6 +166,21 @@ OMX_ERRORTYPE SoftMP3::internalSetParameter(
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioPcm:
+ {
+ const OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (const OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ mNumChannels = pcmParams->nChannels;
+ mSamplingRate = pcmParams->nSamplingRate;
+
+ return OMX_ErrorNone;
+ }
+
default:
return SimpleSoftOMXComponent::internalSetParameter(index, params);
}
@@ -343,6 +358,11 @@ void SoftMP3::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
}
}
+void SoftMP3::onReset() {
+ pvmp3_InitDecoder(mConfig, mDecoderBuf);
+ mIsFirst = true;
+}
+
} // namespace android
android::SoftOMXComponent *createSoftOMXComponent(
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.h b/media/libstagefright/codecs/mp3dec/SoftMP3.h
index 3a05466..4af91ea 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.h
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.h
@@ -42,6 +42,7 @@ protected:
virtual void onQueueFilled(OMX_U32 portIndex);
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+ virtual void onReset();
private:
enum {
diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk
index 0082d7c..7f2c46d 100644
--- a/media/libstagefright/codecs/on2/dec/Android.mk
+++ b/media/libstagefright/codecs/on2/dec/Android.mk
@@ -15,7 +15,7 @@ LOCAL_STATIC_LIBRARIES := \
libvpx
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_vpxdec
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
index bf9ab3a..a400b4c 100644
--- a/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
+++ b/media/libstagefright/codecs/on2/dec/SoftVPX.cpp
@@ -66,7 +66,7 @@ void SoftVPX::initPorts() {
def.eDir = OMX_DirInput;
def.nBufferCountMin = kNumBuffers;
def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = 256 * 1024;
+ def.nBufferSize = 768 * 1024;
def.bEnabled = OMX_TRUE;
def.bPopulated = OMX_FALSE;
def.eDomain = OMX_PortDomainVideo;
diff --git a/media/libstagefright/codecs/on2/enc/Android.mk b/media/libstagefright/codecs/on2/enc/Android.mk
new file mode 100644
index 0000000..a92d376
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/Android.mk
@@ -0,0 +1,24 @@
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftVPXEncoder.cpp
+
+LOCAL_C_INCLUDES := \
+ $(TOP)/external/libvpx/libvpx \
+ $(TOP)/external/openssl/include \
+ $(TOP)/external/libvpx/libvpx/vpx_codec \
+ $(TOP)/external/libvpx/libvpx/vpx_ports \
+ frameworks/av/media/libstagefright/include \
+ frameworks/native/include/media/openmax \
+
+LOCAL_STATIC_LIBRARIES := \
+ libvpx
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog \
+
+LOCAL_MODULE := libstagefright_soft_vpxenc
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2 b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/MODULE_LICENSE_APACHE2
diff --git a/media/libstagefright/codecs/on2/enc/NOTICE b/media/libstagefright/codecs/on2/enc/NOTICE
new file mode 100644
index 0000000..faed58a
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/NOTICE
@@ -0,0 +1,190 @@
+
+ Copyright (c) 2005-2013, The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
new file mode 100644
index 0000000..e25637a
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -0,0 +1,686 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "SoftVPXEncoder"
+#include "SoftVPXEncoder.h"
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+namespace android {
+
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ // OMX IL 1.1.2
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 1;
+ params->nVersion.s.nRevision = 2;
+ params->nVersion.s.nStep = 0;
+}
+
+
+static int GetCPUCoreCount() {
+ int cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK_GE(cpuCoreCount, 1);
+ return cpuCoreCount;
+}
+
+
+// This color conversion utility is copied from SoftMPEG4Encoder.cpp
+inline static void ConvertSemiPlanarToPlanar(uint8_t *inyuv,
+ uint8_t* outyuv,
+ int32_t width,
+ int32_t height) {
+ int32_t outYsize = width * height;
+ uint32_t *outy = (uint32_t *) outyuv;
+ uint16_t *outcb = (uint16_t *) (outyuv + outYsize);
+ uint16_t *outcr = (uint16_t *) (outyuv + outYsize + (outYsize >> 2));
+
+ /* Y copying */
+ memcpy(outy, inyuv, outYsize);
+
+ /* U & V copying */
+ uint32_t *inyuv_4 = (uint32_t *) (inyuv + outYsize);
+ for (int32_t i = height >> 1; i > 0; --i) {
+ for (int32_t j = width >> 2; j > 0; --j) {
+ uint32_t temp = *inyuv_4++;
+ uint32_t tempU = temp & 0xFF;
+ tempU = tempU | ((temp >> 8) & 0xFF00);
+
+ uint32_t tempV = (temp >> 8) & 0xFF;
+ tempV = tempV | ((temp >> 16) & 0xFF00);
+
+ // Flip U and V
+ *outcb++ = tempV;
+ *outcr++ = tempU;
+ }
+ }
+}
+
+
+SoftVPXEncoder::SoftVPXEncoder(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mCodecContext(NULL),
+ mCodecConfiguration(NULL),
+ mCodecInterface(NULL),
+ mWidth(176),
+ mHeight(144),
+ mBitrate(192000), // in bps
+ mBitrateControlMode(VPX_VBR), // variable bitrate
+ mFrameDurationUs(33333), // Defaults to 30 fps
+ mDCTPartitions(0),
+ mErrorResilience(OMX_FALSE),
+ mColorFormat(OMX_COLOR_FormatYUV420Planar),
+ mLevel(OMX_VIDEO_VP8Level_Version0),
+ mConversionBuffer(NULL) {
+
+ initPorts();
+}
+
+
+SoftVPXEncoder::~SoftVPXEncoder() {
+ releaseEncoder();
+}
+
+
+void SoftVPXEncoder::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE inputPort;
+ OMX_PARAM_PORTDEFINITIONTYPE outputPort;
+
+ InitOMXParams(&inputPort);
+ InitOMXParams(&outputPort);
+
+ inputPort.nBufferCountMin = kNumBuffers;
+ inputPort.nBufferCountActual = inputPort.nBufferCountMin;
+ inputPort.bEnabled = OMX_TRUE;
+ inputPort.bPopulated = OMX_FALSE;
+ inputPort.eDomain = OMX_PortDomainVideo;
+ inputPort.bBuffersContiguous = OMX_FALSE;
+ inputPort.format.video.pNativeRender = NULL;
+ inputPort.format.video.nFrameWidth = mWidth;
+ inputPort.format.video.nFrameHeight = mHeight;
+ inputPort.format.video.nStride = inputPort.format.video.nFrameWidth;
+ inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight;
+ inputPort.format.video.nBitrate = 0;
+ // frameRate is reciprocal of frameDuration, which is
+ // in microseconds. It is also in Q16 format.
+ inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16;
+ inputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
+ inputPort.nPortIndex = kInputPortIndex;
+ inputPort.eDir = OMX_DirInput;
+ inputPort.nBufferAlignment = kInputBufferAlignment;
+ inputPort.format.video.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
+ inputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
+ inputPort.format.video.eColorFormat = mColorFormat;
+ inputPort.format.video.pNativeWindow = NULL;
+ inputPort.nBufferSize =
+ (inputPort.format.video.nStride *
+ inputPort.format.video.nSliceHeight * 3) / 2;
+
+ addPort(inputPort);
+
+ outputPort.nBufferCountMin = kNumBuffers;
+ outputPort.nBufferCountActual = outputPort.nBufferCountMin;
+ outputPort.bEnabled = OMX_TRUE;
+ outputPort.bPopulated = OMX_FALSE;
+ outputPort.eDomain = OMX_PortDomainVideo;
+ outputPort.bBuffersContiguous = OMX_FALSE;
+ outputPort.format.video.pNativeRender = NULL;
+ outputPort.format.video.nFrameWidth = mWidth;
+ outputPort.format.video.nFrameHeight = mHeight;
+ outputPort.format.video.nStride = outputPort.format.video.nFrameWidth;
+ outputPort.format.video.nSliceHeight = outputPort.format.video.nFrameHeight;
+ outputPort.format.video.nBitrate = mBitrate;
+ outputPort.format.video.xFramerate = 0;
+ outputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
+ outputPort.nPortIndex = kOutputPortIndex;
+ outputPort.eDir = OMX_DirOutput;
+ outputPort.nBufferAlignment = kOutputBufferAlignment;
+ outputPort.format.video.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_VIDEO_VPX);
+ outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVPX;
+ outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused;
+ outputPort.format.video.pNativeWindow = NULL;
+ outputPort.nBufferSize = 256 * 1024; // arbitrary
+
+ addPort(outputPort);
+}
+
+
+status_t SoftVPXEncoder::initEncoder() {
+ vpx_codec_err_t codec_return;
+
+ mCodecContext = new vpx_codec_ctx_t;
+ mCodecConfiguration = new vpx_codec_enc_cfg_t;
+ mCodecInterface = vpx_codec_vp8_cx();
+
+ if (mCodecInterface == NULL) {
+ return UNKNOWN_ERROR;
+ }
+
+ codec_return = vpx_codec_enc_config_default(mCodecInterface,
+ mCodecConfiguration,
+ 0); // Codec specific flags
+
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error populating default configuration for vpx encoder.");
+ return UNKNOWN_ERROR;
+ }
+
+ mCodecConfiguration->g_w = mWidth;
+ mCodecConfiguration->g_h = mHeight;
+ mCodecConfiguration->g_threads = GetCPUCoreCount();
+ mCodecConfiguration->g_error_resilient = mErrorResilience;
+
+ switch (mLevel) {
+ case OMX_VIDEO_VP8Level_Version0:
+ mCodecConfiguration->g_profile = 0;
+ break;
+
+ case OMX_VIDEO_VP8Level_Version1:
+ mCodecConfiguration->g_profile = 1;
+ break;
+
+ case OMX_VIDEO_VP8Level_Version2:
+ mCodecConfiguration->g_profile = 2;
+ break;
+
+ case OMX_VIDEO_VP8Level_Version3:
+ mCodecConfiguration->g_profile = 3;
+ break;
+
+ default:
+ mCodecConfiguration->g_profile = 0;
+ }
+
+ // OMX timebase unit is microsecond
+ // g_timebase is in seconds (i.e. 1/1000000 seconds)
+ mCodecConfiguration->g_timebase.num = 1;
+ mCodecConfiguration->g_timebase.den = 1000000;
+ // rc_target_bitrate is in kbps, mBitrate in bps
+ mCodecConfiguration->rc_target_bitrate = mBitrate/1000;
+ mCodecConfiguration->rc_end_usage = mBitrateControlMode;
+
+ codec_return = vpx_codec_enc_init(mCodecContext,
+ mCodecInterface,
+ mCodecConfiguration,
+ 0); // flags
+
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error initializing vpx encoder");
+ return UNKNOWN_ERROR;
+ }
+
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_TOKEN_PARTITIONS,
+ mDCTPartitions);
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error setting dct partitions for vpx encoder.");
+ return UNKNOWN_ERROR;
+ }
+
+ if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ if (mConversionBuffer == NULL) {
+ mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
+ if (mConversionBuffer == NULL) {
+ ALOGE("Allocating conversion buffer failed.");
+ return UNKNOWN_ERROR;
+ }
+ }
+ }
+ return OK;
+}
+
+
+status_t SoftVPXEncoder::releaseEncoder() {
+ if (mCodecContext != NULL) {
+ vpx_codec_destroy(mCodecContext);
+ delete mCodecContext;
+ mCodecContext = NULL;
+ }
+
+ if (mCodecConfiguration != NULL) {
+ delete mCodecConfiguration;
+ mCodecConfiguration = NULL;
+ }
+
+ if (mConversionBuffer != NULL) {
+ delete mConversionBuffer;
+ mConversionBuffer = NULL;
+ }
+
+ // this one is not allocated by us
+ mCodecInterface = NULL;
+
+ return OK;
+}
+
+
+OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
+ OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamVideoPortFormat: {
+ OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
+ (OMX_VIDEO_PARAM_PORTFORMATTYPE *)param;
+
+ if (formatParams->nPortIndex == kInputPortIndex) {
+ if (formatParams->nIndex >= kNumberOfSupportedColorFormats) {
+ return OMX_ErrorNoMore;
+ }
+
+ // Color formats, in order of preference
+ if (formatParams->nIndex == 0) {
+ formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
+ } else if (formatParams->nIndex == 1) {
+ formatParams->eColorFormat =
+ OMX_COLOR_FormatYUV420SemiPlanar;
+ } else {
+ formatParams->eColorFormat = OMX_COLOR_FormatAndroidOpaque;
+ }
+
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
+ // Converting from microseconds
+ // Also converting to Q16 format
+ formatParams->xFramerate = (1000000/mFrameDurationUs) << 16;
+ return OMX_ErrorNone;
+ } else if (formatParams->nPortIndex == kOutputPortIndex) {
+ formatParams->eCompressionFormat = OMX_VIDEO_CodingVPX;
+ formatParams->eColorFormat = OMX_COLOR_FormatUnused;
+ formatParams->xFramerate = 0;
+ return OMX_ErrorNone;
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+ }
+
+ case OMX_IndexParamVideoBitrate: {
+ OMX_VIDEO_PARAM_BITRATETYPE *bitrate =
+ (OMX_VIDEO_PARAM_BITRATETYPE *)param;
+
+ if (bitrate->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ bitrate->nTargetBitrate = mBitrate;
+
+ if (mBitrateControlMode == VPX_VBR) {
+ bitrate->eControlRate = OMX_Video_ControlRateVariable;
+ } else if (mBitrateControlMode == VPX_CBR) {
+ bitrate->eControlRate = OMX_Video_ControlRateConstant;
+ } else {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ return OMX_ErrorNone;
+ }
+
+ // VP8 specific parameters that use extension headers
+ case OMX_IndexParamVideoVp8: {
+ OMX_VIDEO_PARAM_VP8TYPE *vp8Params =
+ (OMX_VIDEO_PARAM_VP8TYPE *)param;
+
+ if (vp8Params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ vp8Params->eProfile = OMX_VIDEO_VP8ProfileMain;
+ vp8Params->eLevel = mLevel;
+ vp8Params->nDCTPartitions = mDCTPartitions;
+ vp8Params->bErrorResilientMode = mErrorResilience;
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamVideoProfileLevelQuerySupported: {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
+ (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
+
+ if (profileAndLevel->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ switch (profileAndLevel->nProfileIndex) {
+ case 0:
+ profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version0;
+ break;
+
+ case 1:
+ profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version1;
+ break;
+
+ case 2:
+ profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version2;
+ break;
+
+ case 3:
+ profileAndLevel->eLevel = OMX_VIDEO_VP8Level_Version3;
+ break;
+
+ default:
+ return OMX_ErrorNoMore;
+ }
+
+ profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamVideoProfileLevelCurrent: {
+ OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
+ (OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
+
+ if (profileAndLevel->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ profileAndLevel->eLevel = mLevel;
+ profileAndLevel->eProfile = OMX_VIDEO_VP8ProfileMain;
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, param);
+ }
+}
+
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
+ const OMX_PTR param) {
+ // can include extension index OMX_INDEXEXTTYPE
+ const int32_t indexFull = index;
+
+ switch (indexFull) {
+ case OMX_IndexParamStandardComponentRole:
+ return internalSetRoleParams(
+ (const OMX_PARAM_COMPONENTROLETYPE *)param);
+
+ case OMX_IndexParamVideoBitrate:
+ return internalSetBitrateParams(
+ (const OMX_VIDEO_PARAM_BITRATETYPE *)param);
+
+ case OMX_IndexParamPortDefinition:
+ return internalSetPortParams(
+ (const OMX_PARAM_PORTDEFINITIONTYPE *)param);
+
+ case OMX_IndexParamVideoPortFormat:
+ return internalSetFormatParams(
+ (const OMX_VIDEO_PARAM_PORTFORMATTYPE *)param);
+
+ case OMX_IndexParamVideoVp8:
+ return internalSetVp8Params(
+ (const OMX_VIDEO_PARAM_VP8TYPE *)param);
+
+ case OMX_IndexParamVideoProfileLevelCurrent:
+ return internalSetProfileLevel(
+ (const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param);
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, param);
+ }
+}
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetProfileLevel(
+ const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel) {
+ if (profileAndLevel->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ if (profileAndLevel->eProfile != OMX_VIDEO_VP8ProfileMain) {
+ return OMX_ErrorBadParameter;
+ }
+
+ if (profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version0 ||
+ profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version1 ||
+ profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version2 ||
+ profileAndLevel->eLevel == OMX_VIDEO_VP8Level_Version3) {
+ mLevel = (OMX_VIDEO_VP8LEVELTYPE)profileAndLevel->eLevel;
+ } else {
+ return OMX_ErrorBadParameter;
+ }
+
+ return OMX_ErrorNone;
+}
+
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
+ const OMX_VIDEO_PARAM_VP8TYPE* vp8Params) {
+ if (vp8Params->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ if (vp8Params->eProfile != OMX_VIDEO_VP8ProfileMain) {
+ return OMX_ErrorBadParameter;
+ }
+
+ if (vp8Params->eLevel == OMX_VIDEO_VP8Level_Version0 ||
+ vp8Params->eLevel == OMX_VIDEO_VP8Level_Version1 ||
+ vp8Params->eLevel == OMX_VIDEO_VP8Level_Version2 ||
+ vp8Params->eLevel == OMX_VIDEO_VP8Level_Version3) {
+ mLevel = vp8Params->eLevel;
+ } else {
+ return OMX_ErrorBadParameter;
+ }
+
+ if (vp8Params->nDCTPartitions <= kMaxDCTPartitions) {
+ mDCTPartitions = vp8Params->nDCTPartitions;
+ } else {
+ return OMX_ErrorBadParameter;
+ }
+
+ mErrorResilience = vp8Params->bErrorResilientMode;
+ return OMX_ErrorNone;
+}
+
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams(
+ const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) {
+ if (format->nPortIndex == kInputPortIndex) {
+ if (format->eColorFormat == OMX_COLOR_FormatYUV420Planar ||
+ format->eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+ format->eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ mColorFormat = format->eColorFormat;
+ return OMX_ErrorNone;
+ } else {
+ ALOGE("Unsupported color format %i", format->eColorFormat);
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else if (format->nPortIndex == kOutputPortIndex) {
+ if (format->eCompressionFormat == OMX_VIDEO_CodingVPX) {
+ return OMX_ErrorNone;
+ } else {
+ return OMX_ErrorUnsupportedSetting;
+ }
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+}
+
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetRoleParams(
+ const OMX_PARAM_COMPONENTROLETYPE* role) {
+ const char* roleText = (const char*)role->cRole;
+ const size_t roleTextMaxSize = OMX_MAX_STRINGNAME_SIZE - 1;
+
+ if (strncmp(roleText, "video_encoder.vpx", roleTextMaxSize)) {
+ ALOGE("Unsupported component role");
+ return OMX_ErrorBadParameter;
+ }
+
+ return OMX_ErrorNone;
+}
+
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
+ const OMX_PARAM_PORTDEFINITIONTYPE* port) {
+ if (port->nPortIndex == kInputPortIndex) {
+ mWidth = port->format.video.nFrameWidth;
+ mHeight = port->format.video.nFrameHeight;
+
+ // xFramerate comes in Q16 format, in frames per second unit
+ const uint32_t framerate = port->format.video.xFramerate >> 16;
+ // frame duration is in microseconds
+ mFrameDurationUs = (1000000/framerate);
+
+ if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
+ port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
+ port->format.video.eColorFormat == OMX_COLOR_FormatAndroidOpaque) {
+ mColorFormat = port->format.video.eColorFormat;
+ } else {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ return OMX_ErrorNone;
+ } else if (port->nPortIndex == kOutputPortIndex) {
+ mBitrate = port->format.video.nBitrate;
+ return OMX_ErrorNone;
+ } else {
+ return OMX_ErrorBadPortIndex;
+ }
+}
+
+
+OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
+ const OMX_VIDEO_PARAM_BITRATETYPE* bitrate) {
+ if (bitrate->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ mBitrate = bitrate->nTargetBitrate;
+
+ if (bitrate->eControlRate == OMX_Video_ControlRateVariable) {
+ mBitrateControlMode = VPX_VBR;
+ } else if (bitrate->eControlRate == OMX_Video_ControlRateConstant) {
+ mBitrateControlMode = VPX_CBR;
+ } else {
+ return OMX_ErrorUnsupportedSetting;
+ }
+
+ return OMX_ErrorNone;
+}
+
+
+void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
+ // Initialize encoder if not already
+ if (mCodecContext == NULL) {
+ if (OK != initEncoder()) {
+ ALOGE("Failed to initialize encoder");
+ notify(OMX_EventError,
+ OMX_ErrorUndefined,
+ 0, // Extra notification data
+ NULL); // Notification data pointer
+ return;
+ }
+ }
+
+ vpx_codec_err_t codec_return;
+ List<BufferInfo *> &inputBufferInfoQueue = getPortQueue(kInputPortIndex);
+ List<BufferInfo *> &outputBufferInfoQueue = getPortQueue(kOutputPortIndex);
+
+ while (!inputBufferInfoQueue.empty() && !outputBufferInfoQueue.empty()) {
+ BufferInfo *inputBufferInfo = *inputBufferInfoQueue.begin();
+ OMX_BUFFERHEADERTYPE *inputBufferHeader = inputBufferInfo->mHeader;
+
+ BufferInfo *outputBufferInfo = *outputBufferInfoQueue.begin();
+ OMX_BUFFERHEADERTYPE *outputBufferHeader = outputBufferInfo->mHeader;
+
+ if (inputBufferHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inputBufferInfoQueue.erase(inputBufferInfoQueue.begin());
+ inputBufferInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inputBufferHeader);
+
+ outputBufferHeader->nFilledLen = 0;
+ outputBufferHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outputBufferInfoQueue.erase(outputBufferInfoQueue.begin());
+ outputBufferInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outputBufferHeader);
+ return;
+ }
+
+ uint8_t* source = inputBufferHeader->pBuffer + inputBufferHeader->nOffset;
+
+ // NOTE: As much as nothing is known about color format
+ // when it is denoted as AndroidOpaque, it is at least
+ // assumed to be planar.
+ if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar) {
+ ConvertSemiPlanarToPlanar(source, mConversionBuffer, mWidth, mHeight);
+ source = mConversionBuffer;
+ }
+ vpx_image_t raw_frame;
+ vpx_img_wrap(&raw_frame, VPX_IMG_FMT_I420, mWidth, mHeight,
+ kInputBufferAlignment, source);
+ codec_return = vpx_codec_encode(mCodecContext,
+ &raw_frame,
+ inputBufferHeader->nTimeStamp, // in timebase units
+ mFrameDurationUs, // frame duration in timebase units
+ 0, // frame flags
+ VPX_DL_REALTIME); // encoding deadline
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("vpx encoder failed to encode frame");
+ notify(OMX_EventError,
+ OMX_ErrorUndefined,
+ 0, // Extra notification data
+ NULL); // Notification data pointer
+ return;
+ }
+
+ vpx_codec_iter_t encoded_packet_iterator = NULL;
+ const vpx_codec_cx_pkt_t* encoded_packet;
+
+ while ((encoded_packet = vpx_codec_get_cx_data(
+ mCodecContext, &encoded_packet_iterator))) {
+ if (encoded_packet->kind == VPX_CODEC_CX_FRAME_PKT) {
+ outputBufferHeader->nTimeStamp = encoded_packet->data.frame.pts;
+ outputBufferHeader->nFlags = 0;
+ outputBufferHeader->nOffset = 0;
+ outputBufferHeader->nFilledLen = encoded_packet->data.frame.sz;
+ memcpy(outputBufferHeader->pBuffer,
+ encoded_packet->data.frame.buf,
+ encoded_packet->data.frame.sz);
+ outputBufferInfo->mOwnedByUs = false;
+ outputBufferInfoQueue.erase(outputBufferInfoQueue.begin());
+ notifyFillBufferDone(outputBufferHeader);
+ }
+ }
+
+ inputBufferInfo->mOwnedByUs = false;
+ inputBufferInfoQueue.erase(inputBufferInfoQueue.begin());
+ notifyEmptyBufferDone(inputBufferHeader);
+ }
+}
+} // namespace android
+
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftVPXEncoder(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
new file mode 100644
index 0000000..3bc05c0
--- /dev/null
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -0,0 +1,203 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_VPX_ENCODER_H_
+
+#define SOFT_VPX_ENCODER_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+#include <OMX_VideoExt.h>
+#include <OMX_IndexExt.h>
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vpx_codec.h"
+#include "vpx/vp8cx.h"
+
+namespace android {
+
+// Exposes a vpx encoder as an OMX Component
+//
+// Boilerplate for callback bindings are taken care
+// by the base class SimpleSoftOMXComponent and its
+// parent SoftOMXComponent.
+//
+// Only following encoder settings are available
+// - target bitrate
+// - rate control (constant / variable)
+// - frame rate
+// - error resilience
+// - token partitioning
+// - reconstruction & loop filters (g_profile)
+//
+// Only following color formats are recognized
+// - YUV420Planar
+// - YUV420SemiPlanar
+// - AndroidOpaque
+//
+// Following settings are not configurable by the client
+// - encoding deadline is realtime
+// - multithreaded encoding utilizes a number of threads equal
+// to online cpu's available
+// - the algorithm interface for encoder is vp8
+// - fractional bits of frame rate is discarded
+// - OMX timestamps are in microseconds, therefore
+// encoder timebase is fixed to 1/1000000
+
+class SoftVPXEncoder : public SimpleSoftOMXComponent {
+ public:
+ SoftVPXEncoder(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+ protected:
+ virtual ~SoftVPXEncoder();
+
+ // Returns current values for requested OMX
+ // parameters
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR param);
+
+ // Validates, extracts and stores relevant OMX
+ // parameters
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR param);
+
+ // OMX callback when buffers available
+ // Note that both an input and output buffer
+ // is expected to be available to carry out
+ // encoding of the frame
+ virtual void onQueueFilled(OMX_U32 portIndex);
+
+ private:
+ // number of buffers allocated per port
+ static const uint32_t kNumBuffers = 4;
+
+ // OMX port indexes that refer to input and
+ // output ports respectively
+ static const uint32_t kInputPortIndex = 0;
+ static const uint32_t kOutputPortIndex = 1;
+
+ // Byte-alignment required for buffers
+ static const uint32_t kInputBufferAlignment = 1;
+ static const uint32_t kOutputBufferAlignment = 2;
+
+ // Max value supported for DCT partitions
+ static const uint32_t kMaxDCTPartitions = 3;
+
+ // Number of supported input color formats
+ static const uint32_t kNumberOfSupportedColorFormats = 3;
+
+ // vpx specific opaque data structure that
+ // stores encoder state
+ vpx_codec_ctx_t* mCodecContext;
+
+ // vpx specific data structure that
+ // stores encoder configuration
+ vpx_codec_enc_cfg_t* mCodecConfiguration;
+
+ // vpx specific read-only data structure
+ // that specifies algorithm interface (e.g. vp8)
+ vpx_codec_iface_t* mCodecInterface;
+
+ // Width of the input frames
+ int32_t mWidth;
+
+ // Height of the input frames
+ int32_t mHeight;
+
+ // Target bitrate set for the encoder, in bits per second.
+ int32_t mBitrate;
+
+ // Bitrate control mode, either constant or variable
+ vpx_rc_mode mBitrateControlMode;
+
+ // Frame duration is the reciprocal of framerate, denoted
+ // in microseconds
+ uint64_t mFrameDurationUs;
+
+ // vp8 specific configuration parameter
+ // that enables token partitioning of
+ // the stream into substreams
+ int32_t mDCTPartitions;
+
+ // Parameter that denotes whether error resilience
+ // is enabled in encoder
+ OMX_BOOL mErrorResilience;
+
+ // Color format for the input port
+ OMX_COLOR_FORMATTYPE mColorFormat;
+
+ // Encoder profile corresponding to OMX level parameter
+ //
+ // The inconsistency in the naming is caused by
+ // OMX spec referring vpx profiles (g_profile)
+ // as "levels" whereas using the name "profile" for
+ // something else.
+ OMX_VIDEO_VP8LEVELTYPE mLevel;
+
+ // Conversion buffer is needed to convert semi
+ // planar yuv420 to planar format
+ // It is only allocated if input format is
+ // indeed YUV420SemiPlanar.
+ uint8_t* mConversionBuffer;
+
+ // Initializes input and output OMX ports with sensible
+ // default values.
+ void initPorts();
+
+ // Initializes vpx encoder with available settings.
+ status_t initEncoder();
+
+ // Releases vpx encoder instance, with it's associated
+ // data structures.
+ //
+ // Unless called earlier, this is handled by the
+ // dtor.
+ status_t releaseEncoder();
+
+ // Handles port changes with respect to color formats
+ OMX_ERRORTYPE internalSetFormatParams(
+ const OMX_VIDEO_PARAM_PORTFORMATTYPE* format);
+
+ // Verifies the component role tried to be set to this OMX component is
+ // strictly video_encoder.vpx
+ OMX_ERRORTYPE internalSetRoleParams(
+ const OMX_PARAM_COMPONENTROLETYPE* role);
+
+ // Updates bitrate to reflect port settings.
+ OMX_ERRORTYPE internalSetBitrateParams(
+ const OMX_VIDEO_PARAM_BITRATETYPE* bitrate);
+
+ // Handles port definition changes.
+ OMX_ERRORTYPE internalSetPortParams(
+ const OMX_PARAM_PORTDEFINITIONTYPE* port);
+
+ // Handles vp8 specific parameters.
+ OMX_ERRORTYPE internalSetVp8Params(
+ const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
+
+ // Updates encoder profile
+ OMX_ERRORTYPE internalSetProfileLevel(
+ const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel);
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftVPXEncoder);
+};
+
+} // namespace android
+
+#endif // SOFT_VPX_ENCODER_H_
diff --git a/media/libstagefright/codecs/on2/h264dec/Android.mk b/media/libstagefright/codecs/on2/h264dec/Android.mk
index 772fd60..2539f98 100644
--- a/media/libstagefright/codecs/on2/h264dec/Android.mk
+++ b/media/libstagefright/codecs/on2/h264dec/Android.mk
@@ -97,7 +97,7 @@ ifeq ($(ARCH_ARM_HAVE_NEON),true)
endif
LOCAL_SHARED_LIBRARIES := \
- libstagefright libstagefright_omx libstagefright_foundation libutils \
+ libstagefright libstagefright_omx libstagefright_foundation libutils liblog \
LOCAL_MODULE := libstagefright_soft_h264dec
@@ -124,4 +124,3 @@ LOCAL_MODULE_TAGS := debug
LOCAL_MODULE := decoder
include $(BUILD_EXECUTABLE)
-
diff --git a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
index 6c3f834..6e36651 100644
--- a/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
+++ b/media/libstagefright/codecs/on2/h264dec/SoftAVC.cpp
@@ -311,18 +311,14 @@ void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
BufferInfo *inInfo = *inQueue.begin();
OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
++mPicId;
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
- mEOSStatus = INPUT_EOS_SEEN;
- continue;
- }
OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE;
memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE));
header->nTimeStamp = inHeader->nTimeStamp;
header->nFlags = inHeader->nFlags;
+ if (header->nFlags & OMX_BUFFERFLAG_EOS) {
+ mEOSStatus = INPUT_EOS_SEEN;
+ }
mPicToHeaderMap.add(mPicId, header);
inQueue.erase(inQueue.begin());
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c
index 53b2fd8..cc838fd 100755
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_util.c
@@ -220,7 +220,7 @@ u32 h264bsdNextMbAddress(u32 *pSliceGroupMap, u32 picSizeInMbs, u32 currMbAddr)
/* Variables */
- u32 i, sliceGroup, tmp;
+ u32 i, sliceGroup;
/* Code */
@@ -231,11 +231,9 @@ u32 h264bsdNextMbAddress(u32 *pSliceGroupMap, u32 picSizeInMbs, u32 currMbAddr)
sliceGroup = pSliceGroupMap[currMbAddr];
i = currMbAddr + 1;
- tmp = pSliceGroupMap[i];
- while ((i < picSizeInMbs) && (tmp != sliceGroup))
+ while ((i < picSizeInMbs) && (pSliceGroupMap[i] != sliceGroup))
{
i++;
- tmp = pSliceGroupMap[i];
}
if (i == picSizeInMbs)
diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk
index 285c747..fe90a03 100644
--- a/media/libstagefright/codecs/raw/Android.mk
+++ b/media/libstagefright/codecs/raw/Android.mk
@@ -9,7 +9,7 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax
LOCAL_SHARED_LIBRARIES := \
- libstagefright_omx libstagefright_foundation libutils
+ libstagefright_omx libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_rawdec
LOCAL_MODULE_TAGS := optional
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk
index 395dd6b..2232353 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.mk
+++ b/media/libstagefright/codecs/vorbis/dec/Android.mk
@@ -11,10 +11,9 @@ LOCAL_C_INCLUDES := \
LOCAL_SHARED_LIBRARIES := \
libvorbisidec libstagefright libstagefright_omx \
- libstagefright_foundation libutils
+ libstagefright_foundation libutils liblog
LOCAL_MODULE := libstagefright_soft_vorbisdec
LOCAL_MODULE_TAGS := optional
include $(BUILD_SHARED_LIBRARY)
-
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index fab0b0c..4115324 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -410,6 +410,22 @@ void SoftVorbis::onPortFlushCompleted(OMX_U32 portIndex) {
}
}
+void SoftVorbis::onReset() {
+ mInputBufferCount = 0;
+ mNumFramesOutput = 0;
+ if (mState != NULL) {
+ vorbis_dsp_clear(mState);
+ delete mState;
+ mState = NULL;
+ }
+
+ if (mVi != NULL) {
+ vorbis_info_clear(mVi);
+ delete mVi;
+ mVi = NULL;
+ }
+}
+
void SoftVorbis::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
if (portIndex != 1) {
return;
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
index e252f55..cb628a0 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.h
@@ -43,6 +43,7 @@ protected:
virtual void onQueueFilled(OMX_U32 portIndex);
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+ virtual void onReset();
private:
enum {
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 2704a37..77f21b7 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -24,7 +24,7 @@
#include <media/stagefright/MetaData.h>
#include <system/window.h>
#include <ui/GraphicBufferMapper.h>
-#include <gui/ISurfaceTexture.h>
+#include <gui/IGraphicBufferProducer.h>
namespace android {
diff --git a/media/libstagefright/foundation/ALooperRoster.cpp b/media/libstagefright/foundation/ALooperRoster.cpp
index dff931d..ad10d2b 100644
--- a/media/libstagefright/foundation/ALooperRoster.cpp
+++ b/media/libstagefright/foundation/ALooperRoster.cpp
@@ -82,7 +82,8 @@ status_t ALooperRoster::postMessage_l(
ssize_t index = mHandlers.indexOfKey(msg->target());
if (index < 0) {
- ALOGW("failed to post message. Target handler not registered.");
+ ALOGW("failed to post message '%s'. Target handler not registered.",
+ msg->debugString().c_str());
return -ENOENT;
}
diff --git a/media/libstagefright/foundation/Android.mk b/media/libstagefright/foundation/Android.mk
index b7577d6..d65e213 100644
--- a/media/libstagefright/foundation/Android.mk
+++ b/media/libstagefright/foundation/Android.mk
@@ -20,6 +20,7 @@ LOCAL_C_INCLUDES:= \
LOCAL_SHARED_LIBRARIES := \
libbinder \
libutils \
+ liblog
LOCAL_CFLAGS += -Wno-multichar
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 733753b..505bdb3 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -40,10 +40,13 @@
namespace android {
-LiveSession::LiveSession(uint32_t flags, bool uidValid, uid_t uid)
- : mFlags(flags),
+LiveSession::LiveSession(
+ const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+ : mNotify(notify),
+ mFlags(flags),
mUIDValid(uidValid),
mUID(uid),
+ mInPreparationPhase(true),
mDataSource(new LiveDataSource),
mHTTPDataSource(
HTTPBase::Create(
@@ -179,7 +182,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
if (playlist == NULL) {
ALOGE("unable to fetch master playlist '%s'.", url.c_str());
- mDataSource->queueEOS(ERROR_IO);
+ signalEOS(ERROR_IO);
return;
}
@@ -207,7 +210,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
void LiveSession::onDisconnect() {
ALOGI("onDisconnect");
- mDataSource->queueEOS(ERROR_END_OF_STREAM);
+ signalEOS(ERROR_END_OF_STREAM);
Mutex::Autolock autoLock(mLock);
mDisconnectPending = false;
@@ -561,7 +564,8 @@ rinse_repeat:
// unchanged from the last time we tried.
} else {
ALOGE("failed to load playlist at url '%s'", url.c_str());
- mDataSource->queueEOS(ERROR_IO);
+ signalEOS(ERROR_IO);
+
return;
}
} else {
@@ -627,22 +631,20 @@ rinse_repeat:
if (index < mPlaylist->size()) {
int32_t newSeqNumber = firstSeqNumberInPlaylist + index;
- if (newSeqNumber != mSeqNumber) {
- ALOGI("seeking to seq no %d", newSeqNumber);
+ ALOGI("seeking to seq no %d", newSeqNumber);
- mSeqNumber = newSeqNumber;
+ mSeqNumber = newSeqNumber;
- mDataSource->reset();
+ mDataSource->reset();
- // reseting the data source will have had the
- // side effect of discarding any previously queued
- // bandwidth change discontinuity.
- // Therefore we'll need to treat these seek
- // discontinuities as involving a bandwidth change
- // even if they aren't directly.
- seekDiscontinuity = true;
- bandwidthChanged = true;
- }
+ // reseting the data source will have had the
+ // side effect of discarding any previously queued
+ // bandwidth change discontinuity.
+ // Therefore we'll need to treat these seek
+ // discontinuities as involving a bandwidth change
+ // even if they aren't directly.
+ seekDiscontinuity = true;
+ bandwidthChanged = true;
}
}
@@ -704,7 +706,7 @@ rinse_repeat:
mSeqNumber, firstSeqNumberInPlaylist,
firstSeqNumberInPlaylist + mPlaylist->size() - 1);
- mDataSource->queueEOS(ERROR_END_OF_STREAM);
+ signalEOS(ERROR_END_OF_STREAM);
return;
}
}
@@ -737,7 +739,7 @@ rinse_repeat:
status_t err = fetchFile(uri.c_str(), &buffer, range_offset, range_length);
if (err != OK) {
ALOGE("failed to fetch .ts segment at url '%s'", uri.c_str());
- mDataSource->queueEOS(err);
+ signalEOS(err);
return;
}
@@ -748,7 +750,7 @@ rinse_repeat:
if (err != OK) {
ALOGE("decryptBuffer failed w/ error %d", err);
- mDataSource->queueEOS(err);
+ signalEOS(err);
return;
}
@@ -760,7 +762,7 @@ rinse_repeat:
mBandwidthItems.removeAt(bandwidthIndex);
if (mBandwidthItems.isEmpty()) {
- mDataSource->queueEOS(ERROR_UNSUPPORTED);
+ signalEOS(ERROR_UNSUPPORTED);
return;
}
@@ -824,11 +826,42 @@ rinse_repeat:
postMonitorQueue();
}
+void LiveSession::signalEOS(status_t err) {
+ if (mInPreparationPhase && mNotify != NULL) {
+ sp<AMessage> notify = mNotify->dup();
+
+ notify->setInt32(
+ "what",
+ err == ERROR_END_OF_STREAM
+ ? kWhatPrepared : kWhatPreparationFailed);
+
+ if (err != ERROR_END_OF_STREAM) {
+ notify->setInt32("err", err);
+ }
+
+ notify->post();
+
+ mInPreparationPhase = false;
+ }
+
+ mDataSource->queueEOS(err);
+}
+
void LiveSession::onMonitorQueue() {
if (mSeekTimeUs >= 0
|| mDataSource->countQueuedBuffers() < kMaxNumQueuedFragments) {
onDownloadNext();
} else {
+ if (mInPreparationPhase) {
+ if (mNotify != NULL) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatPrepared);
+ notify->post();
+ }
+
+ mInPreparationPhase = false;
+ }
+
postMonitorQueue(1000000ll);
}
}
diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk
index ff35d4a..80a1a3a 100644
--- a/media/libstagefright/id3/Android.mk
+++ b/media/libstagefright/id3/Android.mk
@@ -16,7 +16,7 @@ LOCAL_SRC_FILES := \
testid3.cpp
LOCAL_SHARED_LIBRARIES := \
- libstagefright libutils libbinder libstagefright_foundation
+ libstagefright libutils liblog libbinder libstagefright_foundation
LOCAL_STATIC_LIBRARIES := \
libstagefright_id3
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 1422687..2306f31 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -36,7 +36,7 @@ struct MediaBuffer;
struct MediaExtractor;
struct MediaSource;
struct NuCachedSource2;
-struct ISurfaceTexture;
+struct IGraphicBufferProducer;
class DrmManagerClinet;
class DecryptHandle;
@@ -81,7 +81,7 @@ struct AwesomePlayer {
bool isPlaying() const;
- status_t setSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture);
+ status_t setSurfaceTexture(const sp<IGraphicBufferProducer> &bufferProducer);
void setAudioSink(const sp<MediaPlayerBase::AudioSink> &audioSink);
status_t setLooping(bool shouldLoop);
diff --git a/media/libstagefright/include/ChromiumHTTPDataSource.h b/media/libstagefright/include/ChromiumHTTPDataSource.h
index 82e08fd..785f939 100644
--- a/media/libstagefright/include/ChromiumHTTPDataSource.h
+++ b/media/libstagefright/include/ChromiumHTTPDataSource.h
@@ -53,6 +53,9 @@ struct ChromiumHTTPDataSource : public HTTPBase {
virtual status_t reconnectAtOffset(off64_t offset);
+ static status_t UpdateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList);
+
protected:
virtual ~ChromiumHTTPDataSource();
diff --git a/media/libstagefright/include/FragmentedMP4Extractor.h b/media/libstagefright/include/FragmentedMP4Extractor.h
deleted file mode 100644
index 763cd3a..0000000
--- a/media/libstagefright/include/FragmentedMP4Extractor.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef FRAGMENTED_MP4_EXTRACTOR_H_
-
-#define FRAGMENTED_MP4_EXTRACTOR_H_
-
-#include "include/FragmentedMP4Parser.h"
-
-#include <media/stagefright/MediaExtractor.h>
-#include <utils/Vector.h>
-#include <utils/String8.h>
-
-namespace android {
-
-struct AMessage;
-class DataSource;
-class SampleTable;
-class String8;
-
-class FragmentedMP4Extractor : public MediaExtractor {
-public:
- // Extractor assumes ownership of "source".
- FragmentedMP4Extractor(const sp<DataSource> &source);
-
- virtual size_t countTracks();
- virtual sp<MediaSource> getTrack(size_t index);
- virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
- virtual sp<MetaData> getMetaData();
- virtual uint32_t flags() const;
-
-protected:
- virtual ~FragmentedMP4Extractor();
-
-private:
- sp<ALooper> mLooper;
- sp<FragmentedMP4Parser> mParser;
- sp<DataSource> mDataSource;
- status_t mInitCheck;
- size_t mAudioTrackIndex;
- size_t mTrackCount;
-
- sp<MetaData> mFileMetaData;
-
- Vector<uint32_t> mPath;
-
- FragmentedMP4Extractor(const FragmentedMP4Extractor &);
- FragmentedMP4Extractor &operator=(const FragmentedMP4Extractor &);
-};
-
-bool SniffFragmentedMP4(
- const sp<DataSource> &source, String8 *mimeType, float *confidence,
- sp<AMessage> *);
-
-} // namespace android
-
-#endif // MPEG4_EXTRACTOR_H_
diff --git a/media/libstagefright/include/FragmentedMP4Parser.h b/media/libstagefright/include/FragmentedMP4Parser.h
index 0edafb9..dbe02b8 100644
--- a/media/libstagefright/include/FragmentedMP4Parser.h
+++ b/media/libstagefright/include/FragmentedMP4Parser.h
@@ -263,7 +263,7 @@ private:
void copyBuffer(
sp<ABuffer> *dst,
- size_t offset, uint64_t size, size_t extra = 0) const;
+ size_t offset, uint64_t size) const;
DISALLOW_EVIL_CONSTRUCTORS(FragmentedMP4Parser);
};
diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h
index b8e10f7..c2dc351 100644
--- a/media/libstagefright/include/HTTPBase.h
+++ b/media/libstagefright/include/HTTPBase.h
@@ -48,6 +48,9 @@ struct HTTPBase : public DataSource {
virtual status_t setBandwidthStatCollectFreq(int32_t freqMs);
+ static status_t UpdateProxyConfig(
+ const char *host, int32_t port, const char *exclusionList);
+
void setUID(uid_t uid);
bool getUID(uid_t *uid) const;
diff --git a/media/libstagefright/include/LiveSession.h b/media/libstagefright/include/LiveSession.h
index f329cc9..db44a33 100644
--- a/media/libstagefright/include/LiveSession.h
+++ b/media/libstagefright/include/LiveSession.h
@@ -35,7 +35,9 @@ struct LiveSession : public AHandler {
// Don't log any URLs.
kFlagIncognito = 1,
};
- LiveSession(uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+ LiveSession(
+ const sp<AMessage> &notify,
+ uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
sp<DataSource> getDataSource();
@@ -53,6 +55,12 @@ struct LiveSession : public AHandler {
bool isSeekable() const;
bool hasDynamicDuration() const;
+ // Posted notification's "what" field will carry one of the following:
+ enum {
+ kWhatPrepared,
+ kWhatPreparationFailed,
+ };
+
protected:
virtual ~LiveSession();
@@ -76,10 +84,13 @@ private:
unsigned long mBandwidth;
};
+ sp<AMessage> mNotify;
uint32_t mFlags;
bool mUIDValid;
uid_t mUID;
+ bool mInPreparationPhase;
+
sp<LiveDataSource> mDataSource;
sp<HTTPBase> mHTTPDataSource;
@@ -144,6 +155,8 @@ private:
// This is computed by summing the durations of all segments before it.
int64_t getSegmentStartTimeUs(int32_t seqNumber) const;
+ void signalEOS(status_t err);
+
DISALLOW_EVIL_CONSTRUCTORS(LiveSession);
};
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index 5c549e0..35eff96 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -18,7 +18,12 @@
#define MPEG4_EXTRACTOR_H_
+#include <arpa/inet.h>
+
+#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/Utils.h>
+#include <utils/List.h>
#include <utils/Vector.h>
#include <utils/String8.h>
@@ -29,6 +34,11 @@ class DataSource;
class SampleTable;
class String8;
+struct SidxEntry {
+ size_t mSize;
+ uint32_t mDurationUs;
+};
+
class MPEG4Extractor : public MediaExtractor {
public:
// Extractor assumes ownership of "source".
@@ -39,6 +49,7 @@ public:
virtual sp<MetaData> getTrackMetaData(size_t index, uint32_t flags);
virtual sp<MetaData> getMetaData();
+ virtual uint32_t flags() const;
// for DRM
virtual char* getDrmTrackInfo(size_t trackID, int *len);
@@ -47,6 +58,12 @@ protected:
virtual ~MPEG4Extractor();
private:
+
+ struct PsshInfo {
+ uint8_t uuid[16];
+ uint32_t datalen;
+ uint8_t *data;
+ };
struct Track {
Track *next;
sp<MetaData> meta;
@@ -56,6 +73,12 @@ private:
bool skipTrack;
};
+ Vector<SidxEntry> mSidxEntries;
+ uint64_t mSidxDuration;
+ off64_t mMoofOffset;
+
+ Vector<PsshInfo> mPssh;
+
sp<DataSource> mDataSource;
status_t mInitCheck;
bool mHasVideo;
@@ -93,6 +116,8 @@ private:
status_t parseTrackHeader(off64_t data_offset, off64_t data_size);
+ status_t parseSegmentIndex(off64_t data_offset, size_t data_size);
+
Track *findTrackByMimePrefix(const char *mimePrefix);
MPEG4Extractor(const MPEG4Extractor &);
diff --git a/media/libstagefright/include/OMX.h b/media/libstagefright/include/OMX.h
index 2c87b34..24b8d98 100644
--- a/media/libstagefright/include/OMX.h
+++ b/media/libstagefright/include/OMX.h
@@ -79,6 +79,12 @@ public:
node_id node, OMX_U32 port_index,
const sp<GraphicBuffer> &graphicBuffer, buffer_id *buffer);
+ virtual status_t createInputSurface(
+ node_id node, OMX_U32 port_index,
+ sp<IGraphicBufferProducer> *bufferProducer);
+
+ virtual status_t signalEndOfInputStream(node_id node);
+
virtual status_t allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data);
diff --git a/media/libstagefright/include/OMXNodeInstance.h b/media/libstagefright/include/OMXNodeInstance.h
index 47ca579..67aba6b 100644
--- a/media/libstagefright/include/OMXNodeInstance.h
+++ b/media/libstagefright/include/OMXNodeInstance.h
@@ -27,6 +27,7 @@ namespace android {
class IOMXObserver;
struct OMXMaster;
+struct GraphicBufferSource;
struct OMXNodeInstance {
OMXNodeInstance(
@@ -65,6 +66,11 @@ struct OMXNodeInstance {
OMX_U32 portIndex, const sp<GraphicBuffer> &graphicBuffer,
OMX::buffer_id *buffer);
+ status_t createInputSurface(
+ OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer);
+
+ status_t signalEndOfInputStream();
+
status_t allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data);
@@ -82,12 +88,18 @@ struct OMXNodeInstance {
OMX_U32 rangeOffset, OMX_U32 rangeLength,
OMX_U32 flags, OMX_TICKS timestamp);
+ status_t emptyDirectBuffer(
+ OMX_BUFFERHEADERTYPE *header,
+ OMX_U32 rangeOffset, OMX_U32 rangeLength,
+ OMX_U32 flags, OMX_TICKS timestamp);
+
status_t getExtensionIndex(
const char *parameterName, OMX_INDEXTYPE *index);
void onMessage(const omx_message &msg);
void onObserverDied(OMXMaster *master);
void onGetHandleFailed();
+ void onEvent(OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2);
static OMX_CALLBACKTYPE kCallbacks;
@@ -100,6 +112,13 @@ private:
sp<IOMXObserver> mObserver;
bool mDying;
+ // Lock only covers mGraphicBufferSource. We can't always use mLock
+ // because of rare instances where we'd end up locking it recursively.
+ Mutex mGraphicBufferSourceLock;
+ // Access this through getGraphicBufferSource().
+ sp<GraphicBufferSource> mGraphicBufferSource;
+
+
struct ActiveBuffer {
OMX_U32 mPortIndex;
OMX::buffer_id mID;
@@ -132,6 +151,11 @@ private:
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE *pBuffer);
+ status_t storeMetaDataInBuffers_l(OMX_U32 portIndex, OMX_BOOL enable);
+
+ sp<GraphicBufferSource> getGraphicBufferSource();
+ void setGraphicBufferSource(const sp<GraphicBufferSource>& bufferSource);
+
OMXNodeInstance(const OMXNodeInstance &);
OMXNodeInstance &operator=(const OMXNodeInstance &);
};
diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h
new file mode 100644
index 0000000..ca59dc0
--- /dev/null
+++ b/media/libstagefright/include/SDPLoader.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SDP_LOADER_H_
+
+#define SDP_LOADER_H_
+
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <utils/String8.h>
+
+namespace android {
+
+struct HTTPBase;
+
+struct SDPLoader : public AHandler {
+ enum Flags {
+ // Don't log any URLs.
+ kFlagIncognito = 1,
+ };
+ enum {
+ kWhatSDPLoaded = 'sdpl'
+ };
+ SDPLoader(const sp<AMessage> &notify, uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+
+ void load(const char* url, const KeyedVector<String8, String8> *headers);
+
+ void cancel();
+
+protected:
+ virtual ~SDPLoader() {}
+
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum {
+ kWhatLoad = 'load',
+ };
+
+ void onLoad(const sp<AMessage> &msg);
+
+ sp<AMessage> mNotify;
+ const char* mUrl;
+ uint32_t mFlags;
+ bool mUIDValid;
+ uid_t mUID;
+ sp<ALooper> mNetLooper;
+ bool mCancelled;
+
+ sp<HTTPBase> mHTTPDataSource;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SDPLoader);
+};
+
+} // namespace android
+
+#endif // SDP_LOADER_H_
diff --git a/media/libstagefright/include/SimpleSoftOMXComponent.h b/media/libstagefright/include/SimpleSoftOMXComponent.h
index 50cd275..f8c61eb 100644
--- a/media/libstagefright/include/SimpleSoftOMXComponent.h
+++ b/media/libstagefright/include/SimpleSoftOMXComponent.h
@@ -71,6 +71,7 @@ protected:
virtual void onPortFlushCompleted(OMX_U32 portIndex);
virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+ virtual void onReset();
PortInfo *editPortInfo(OMX_U32 portIndex);
diff --git a/media/libstagefright/include/ThrottledSource.h b/media/libstagefright/include/ThrottledSource.h
index 7fe7c06..673268b 100644
--- a/media/libstagefright/include/ThrottledSource.h
+++ b/media/libstagefright/include/ThrottledSource.h
@@ -28,18 +28,44 @@ struct ThrottledSource : public DataSource {
const sp<DataSource> &source,
int32_t bandwidthLimitBytesPerSecond);
- virtual status_t initCheck() const;
-
+ // implementation of readAt() that sleeps to achieve the desired max throughput
virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off64_t *size);
- virtual uint32_t flags();
+ // returns an empty string to prevent callers from using the Uri to construct a new datasource
+ virtual String8 getUri() {
+ return String8();
+ }
+
+ // following methods all call through to the wrapped DataSource's methods
+
+ status_t initCheck() const {
+ return mSource->initCheck();
+ }
+
+ virtual status_t getSize(off64_t *size) {
+ return mSource->getSize(size);
+ }
+
+ virtual uint32_t flags() {
+ return mSource->flags();
+ }
+
+ virtual status_t reconnectAtOffset(off64_t offset) {
+ return mSource->reconnectAtOffset(offset);
+ }
+
+ virtual sp<DecryptHandle> DrmInitialization(const char *mime = NULL) {
+ return mSource->DrmInitialization(mime);
+ }
+
+ virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client) {
+ mSource->getDrmInfo(handle, client);
+ };
virtual String8 getMIMEType() const {
return mSource->getMIMEType();
}
-
private:
Mutex mLock;
diff --git a/media/libstagefright/include/avc_utils.h b/media/libstagefright/include/avc_utils.h
index e418822..d517320 100644
--- a/media/libstagefright/include/avc_utils.h
+++ b/media/libstagefright/include/avc_utils.h
@@ -36,8 +36,11 @@ enum {
kAVCProfileCAVLC444Intra = 0x2c
};
+// Optionally returns sample aspect ratio as well.
void FindAVCDimensions(
- const sp<ABuffer> &seqParamSet, int32_t *width, int32_t *height);
+ const sp<ABuffer> &seqParamSet,
+ int32_t *width, int32_t *height,
+ int32_t *sarWidth = NULL, int32_t *sarHeight = NULL);
unsigned parseUE(ABitReader *br);
diff --git a/media/libstagefright/include/chromium_http_stub.h b/media/libstagefright/include/chromium_http_stub.h
index 869d4ac..e0651a4 100644
--- a/media/libstagefright/include/chromium_http_stub.h
+++ b/media/libstagefright/include/chromium_http_stub.h
@@ -23,6 +23,10 @@
namespace android {
extern "C" {
HTTPBase *createChromiumHTTPDataSource(uint32_t flags);
+
+status_t UpdateChromiumHTTPDataSourceProxyConfig(
+ const char *host, int32_t port, const char *exclusionList);
+
DataSource *createDataUriSource(const char *uri);
}
}
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 8f7d12b..b304749 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -263,8 +263,8 @@ void BlockIterator::advance_l() {
mCluster, nextCluster, pos, len);
ALOGV("ParseNext returned %ld", res);
- if (res > 0) {
- // EOF
+ if (res != 0) {
+ // EOF or error
mCluster = NULL;
break;
@@ -758,31 +758,69 @@ static void addESDSFromCodecPrivate(
esds = NULL;
}
-void addVorbisCodecInfo(
+status_t addVorbisCodecInfo(
const sp<MetaData> &meta,
const void *_codecPrivate, size_t codecPrivateSize) {
- // printf("vorbis private data follows:\n");
// hexdump(_codecPrivate, codecPrivateSize);
- CHECK(codecPrivateSize >= 3);
+ if (codecPrivateSize < 1) {
+ return ERROR_MALFORMED;
+ }
const uint8_t *codecPrivate = (const uint8_t *)_codecPrivate;
- CHECK(codecPrivate[0] == 0x02);
- size_t len1 = codecPrivate[1];
- size_t len2 = codecPrivate[2];
+ if (codecPrivate[0] != 0x02) {
+ return ERROR_MALFORMED;
+ }
- CHECK(codecPrivateSize > 3 + len1 + len2);
+ // codecInfo starts with two lengths, len1 and len2, that are
+ // "Xiph-style-lacing encoded"...
- CHECK(codecPrivate[3] == 0x01);
- meta->setData(kKeyVorbisInfo, 0, &codecPrivate[3], len1);
+ size_t offset = 1;
+ size_t len1 = 0;
+ while (offset < codecPrivateSize && codecPrivate[offset] == 0xff) {
+ len1 += 0xff;
+ ++offset;
+ }
+ if (offset >= codecPrivateSize) {
+ return ERROR_MALFORMED;
+ }
+ len1 += codecPrivate[offset++];
- CHECK(codecPrivate[len1 + 3] == 0x03);
+ size_t len2 = 0;
+ while (offset < codecPrivateSize && codecPrivate[offset] == 0xff) {
+ len2 += 0xff;
+ ++offset;
+ }
+ if (offset >= codecPrivateSize) {
+ return ERROR_MALFORMED;
+ }
+ len2 += codecPrivate[offset++];
+
+ if (codecPrivateSize < offset + len1 + len2) {
+ return ERROR_MALFORMED;
+ }
+
+ if (codecPrivate[offset] != 0x01) {
+ return ERROR_MALFORMED;
+ }
+ meta->setData(kKeyVorbisInfo, 0, &codecPrivate[offset], len1);
+
+ offset += len1;
+ if (codecPrivate[offset] != 0x03) {
+ return ERROR_MALFORMED;
+ }
+
+ offset += len2;
+ if (codecPrivate[offset] != 0x05) {
+ return ERROR_MALFORMED;
+ }
- CHECK(codecPrivate[len1 + len2 + 3] == 0x05);
meta->setData(
- kKeyVorbisBooks, 0, &codecPrivate[len1 + len2 + 3],
- codecPrivateSize - len1 - len2 - 3);
+ kKeyVorbisBooks, 0, &codecPrivate[offset],
+ codecPrivateSize - offset);
+
+ return OK;
}
void MatroskaExtractor::addTracks() {
@@ -809,6 +847,8 @@ void MatroskaExtractor::addTracks() {
sp<MetaData> meta = new MetaData;
+ status_t err = OK;
+
switch (track->GetType()) {
case VIDEO_TRACK:
{
@@ -855,7 +895,8 @@ void MatroskaExtractor::addTracks() {
} else if (!strcmp("A_VORBIS", codecID)) {
meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS);
- addVorbisCodecInfo(meta, codecPrivate, codecPrivateSize);
+ err = addVorbisCodecInfo(
+ meta, codecPrivate, codecPrivateSize);
} else if (!strcmp("A_MPEG/L3", codecID)) {
meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
} else {
@@ -872,6 +913,11 @@ void MatroskaExtractor::addTracks() {
continue;
}
+ if (err != OK) {
+ ALOGE("skipping track, codec specific data was malformed.");
+ continue;
+ }
+
long long durationNs = mSegment->GetDuration();
meta->setInt64(kKeyDuration, (durationNs + 500) / 1000);
diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp
index 451c837..0102656 100644
--- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp
+++ b/media/libstagefright/mp4/FragmentedMP4Parser.cpp
@@ -18,6 +18,7 @@
#define LOG_TAG "FragmentedMP4Parser"
#include <utils/Log.h>
+#include "include/avc_utils.h"
#include "include/ESDS.h"
#include "include/FragmentedMP4Parser.h"
#include "TrackFragment.h"
@@ -323,8 +324,7 @@ status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) {
off_t totalOffset = mFirstMoofOffset;
for (int i = 0; i < numSidxEntries; i++) {
const SidxEntry *se = &info->mSidx[i];
- totalTime += se->mDurationUs;
- if (totalTime > position) {
+ if (totalTime + se->mDurationUs > position) {
mBuffer->setRange(0,0);
mBufferPos = totalOffset;
if (mFinalResult == ERROR_END_OF_STREAM) {
@@ -333,9 +333,10 @@ status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) {
resumeIfNecessary();
}
info->mFragments.clear();
- info->mDecodingTime = position * info->mMediaTimeScale / 1000000ll;
+ info->mDecodingTime = totalTime * info->mMediaTimeScale / 1000000ll;
return OK;
}
+ totalTime += se->mDurationUs;
totalOffset += se->mSize;
}
}
@@ -965,6 +966,10 @@ status_t FragmentedMP4Parser::makeAccessUnit(
sample.mSize);
(*accessUnit)->meta()->setInt64("timeUs", presentationTimeUs);
+ if (IsIDR(*accessUnit)) {
+ (*accessUnit)->meta()->setInt32("is-sync-frame", 1);
+ }
+
return OK;
}
@@ -1007,6 +1012,9 @@ status_t FragmentedMP4Parser::makeAccessUnit(
"timeUs", presentationTimeUs);
}
}
+ if (IsIDR(*accessUnit)) {
+ (*accessUnit)->meta()->setInt32("is-sync-frame", 1);
+ }
return OK;
}
@@ -1975,8 +1983,8 @@ status_t FragmentedMP4Parser::parseTrackFragmentRun(
}
void FragmentedMP4Parser::copyBuffer(
- sp<ABuffer> *dst, size_t offset, uint64_t size, size_t extra) const {
- sp<ABuffer> buf = new ABuffer(size + extra);
+ sp<ABuffer> *dst, size_t offset, uint64_t size) const {
+ sp<ABuffer> buf = new ABuffer(size);
memcpy(buf->data(), mBuffer->data() + offset, size);
*dst = buf;
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index 4f6c4b2..9850a46 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -452,6 +452,10 @@ int64_t ATSParser::Program::convertPTSToTimestamp(uint64_t PTS) {
timeUs += mParser->mAbsoluteTimeAnchorUs;
}
+ if (mParser->mTimeOffsetValid) {
+ timeUs += mParser->mTimeOffsetUs;
+ }
+
return timeUs;
}
@@ -534,6 +538,16 @@ status_t ATSParser::Stream::parse(
mBuffer->setRange(0, 0);
mExpectedContinuityCounter = -1;
+#if 0
+ // Uncomment this if you'd rather see no corruption whatsoever on
+ // screen and suspend updates until we come across another IDR frame.
+
+ if (mStreamType == STREAMTYPE_H264) {
+ ALOGI("clearing video queue");
+ mQueue->clear(true /* clearFormat */);
+ }
+#endif
+
return OK;
}
@@ -920,6 +934,8 @@ sp<MediaSource> ATSParser::Stream::getSource(SourceType type) {
ATSParser::ATSParser(uint32_t flags)
: mFlags(flags),
mAbsoluteTimeAnchorUs(-1ll),
+ mTimeOffsetValid(false),
+ mTimeOffsetUs(0ll),
mNumTSPacketsParsed(0),
mNumPCRs(0) {
mPSISections.add(0 /* PID */, new PSISection);
@@ -950,6 +966,13 @@ void ATSParser::signalDiscontinuity(
CHECK(mPrograms.empty());
mAbsoluteTimeAnchorUs = timeUs;
return;
+ } else if (type == DISCONTINUITY_TIME_OFFSET) {
+ int64_t offset;
+ CHECK(extra->findInt64("offset", &offset));
+
+ mTimeOffsetValid = true;
+ mTimeOffsetUs = offset;
+ return;
}
for (size_t i = 0; i < mPrograms.size(); ++i) {
@@ -1036,7 +1059,7 @@ status_t ATSParser::parsePID(
ssize_t sectionIndex = mPSISections.indexOfKey(PID);
if (sectionIndex >= 0) {
- const sp<PSISection> &section = mPSISections.valueAt(sectionIndex);
+ sp<PSISection> section = mPSISections.valueAt(sectionIndex);
if (payload_unit_start_indicator) {
CHECK(section->isEmpty());
@@ -1045,7 +1068,6 @@ status_t ATSParser::parsePID(
br->skipBits(skip * 8);
}
-
CHECK((br->numBitsLeft() % 8) == 0);
status_t err = section->append(br->data(), br->numBitsLeft() / 8);
@@ -1080,10 +1102,13 @@ status_t ATSParser::parsePID(
if (!handled) {
mPSISections.removeItem(PID);
+ section.clear();
}
}
- section->clear();
+ if (section != NULL) {
+ section->clear();
+ }
return OK;
}
diff --git a/media/libstagefright/mpeg2ts/ATSParser.h b/media/libstagefright/mpeg2ts/ATSParser.h
index 46edc45..a10edc9 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.h
+++ b/media/libstagefright/mpeg2ts/ATSParser.h
@@ -39,6 +39,7 @@ struct ATSParser : public RefBase {
DISCONTINUITY_AUDIO_FORMAT = 2,
DISCONTINUITY_VIDEO_FORMAT = 4,
DISCONTINUITY_ABSOLUTE_TIME = 8,
+ DISCONTINUITY_TIME_OFFSET = 16,
DISCONTINUITY_SEEK = DISCONTINUITY_TIME,
@@ -106,6 +107,9 @@ private:
int64_t mAbsoluteTimeAnchorUs;
+ bool mTimeOffsetValid;
+ int64_t mTimeOffsetUs;
+
size_t mNumTSPacketsParsed;
void parseProgramAssociationTable(ABitReader *br);
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index a605a05..3de3a61 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -28,9 +28,12 @@
namespace android {
+const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs
+
AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)
: mIsAudio(false),
mFormat(meta),
+ mLastQueuedTimeUs(0),
mEOSResult(OK) {
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
@@ -141,9 +144,8 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
return;
}
- int64_t timeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", timeUs, timeUs / 1E6);
+ CHECK(buffer->meta()->findInt64("timeUs", &mLastQueuedTimeUs));
+ ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
Mutex::Autolock autoLock(mLock);
mBuffers.push_back(buffer);
@@ -171,6 +173,7 @@ void AnotherPacketSource::queueDiscontinuity(
}
mEOSResult = OK;
+ mLastQueuedTimeUs = 0;
sp<ABuffer> buffer = new ABuffer(0);
buffer->meta()->setInt32("discontinuity", static_cast<int32_t>(type));
@@ -247,4 +250,15 @@ status_t AnotherPacketSource::nextBufferTime(int64_t *timeUs) {
return OK;
}
+bool AnotherPacketSource::isFinished(int64_t duration) const {
+ if (duration > 0) {
+ int64_t diff = duration - mLastQueuedTimeUs;
+ if (diff < kNearEOSMarkUs && diff > -kNearEOSMarkUs) {
+ ALOGV("Detecting EOS due to near end");
+ return true;
+ }
+ }
+ return (mEOSResult != OK);
+}
+
} // namespace android
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.h b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
index d685b98..1db4068 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.h
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.h
@@ -58,6 +58,8 @@ struct AnotherPacketSource : public MediaSource {
status_t dequeueAccessUnit(sp<ABuffer> *buffer);
+ bool isFinished(int64_t duration) const;
+
protected:
virtual ~AnotherPacketSource();
@@ -67,6 +69,7 @@ private:
bool mIsAudio;
sp<MetaData> mFormat;
+ int64_t mLastQueuedTimeUs;
List<sp<ABuffer> > mBuffers;
status_t mEOSResult;
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index 82fb637..9f3b19c 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -147,9 +147,9 @@ status_t ElementaryStreamQueue::appendData(
}
if (startOffset > 0) {
- ALOGI("found something resembling an H.264/MPEG syncword at "
- "offset %ld",
- startOffset);
+ ALOGI("found something resembling an H.264/MPEG syncword "
+ "at offset %d",
+ startOffset);
}
data = &ptr[startOffset];
@@ -180,9 +180,9 @@ status_t ElementaryStreamQueue::appendData(
}
if (startOffset > 0) {
- ALOGI("found something resembling an H.264/MPEG syncword at "
- "offset %ld",
- startOffset);
+ ALOGI("found something resembling an H.264/MPEG syncword "
+ "at offset %d",
+ startOffset);
}
data = &ptr[startOffset];
@@ -213,8 +213,9 @@ status_t ElementaryStreamQueue::appendData(
}
if (startOffset > 0) {
- ALOGI("found something resembling an AAC syncword at offset %ld",
- startOffset);
+ ALOGI("found something resembling an AAC syncword at "
+ "offset %d",
+ startOffset);
}
data = &ptr[startOffset];
@@ -241,8 +242,8 @@ status_t ElementaryStreamQueue::appendData(
if (startOffset > 0) {
ALOGI("found something resembling an MPEG audio "
- "syncword at offset %ld",
- startOffset);
+ "syncword at offset %d",
+ startOffset);
}
data = &ptr[startOffset];
@@ -394,10 +395,30 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitPCMAudio() {
}
sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
- int64_t timeUs;
+ if (mBuffer->size() == 0) {
+ return NULL;
+ }
+
+ CHECK(!mRangeInfos.empty());
+ const RangeInfo &info = *mRangeInfos.begin();
+ if (mBuffer->size() < info.mLength) {
+ return NULL;
+ }
+
+ CHECK_GE(info.mTimestampUs, 0ll);
+
+ // The idea here is consume all AAC frames starting at offsets before
+ // info.mLength so we can assign a meaningful timestamp without
+ // having to interpolate.
+ // The final AAC frame may well extend into the next RangeInfo but
+ // that's ok.
size_t offset = 0;
- while (offset + 7 <= mBuffer->size()) {
+ while (offset < info.mLength) {
+ if (offset + 7 > mBuffer->size()) {
+ return NULL;
+ }
+
ABitReader bits(mBuffer->data() + offset, mBuffer->size() - offset);
// adts_fixed_header
@@ -450,24 +471,15 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitAAC() {
}
if (offset + aac_frame_length > mBuffer->size()) {
- break;
+ return NULL;
}
size_t headerSize = protection_absent ? 7 : 9;
- int64_t tmpUs = fetchTimestamp(aac_frame_length);
- CHECK_GE(tmpUs, 0ll);
-
- if (offset == 0) {
- timeUs = tmpUs;
- }
-
offset += aac_frame_length;
}
- if (offset == 0) {
- return NULL;
- }
+ int64_t timeUs = fetchTimestamp(offset);
sp<ABuffer> accessUnit = new ABuffer(offset);
memcpy(accessUnit->data(), mBuffer->data(), offset);
@@ -492,7 +504,6 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {
if (first) {
timeUs = info->mTimestampUs;
- first = false;
}
if (info->mLength > size) {
@@ -509,6 +520,8 @@ int64_t ElementaryStreamQueue::fetchTimestamp(size_t size) {
mRangeInfos.erase(mRangeInfos.begin());
info = NULL;
}
+
+ first = false;
}
if (timeUs == 0ll) {
@@ -536,7 +549,7 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
size_t nalSize;
bool foundSlice = false;
while ((err = getNextNALUnit(&data, &size, &nalStart, &nalSize)) == OK) {
- CHECK_GT(nalSize, 0u);
+ if (nalSize == 0) continue;
unsigned nalType = nalStart[0] & 0x1f;
bool flush = false;
diff --git a/media/libstagefright/omx/Android.mk b/media/libstagefright/omx/Android.mk
index d7fbbbe..a8b4939 100644
--- a/media/libstagefright/omx/Android.mk
+++ b/media/libstagefright/omx/Android.mk
@@ -2,6 +2,7 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
+ GraphicBufferSource.cpp \
OMX.cpp \
OMXMaster.cpp \
OMXNodeInstance.cpp \
@@ -18,7 +19,9 @@ LOCAL_SHARED_LIBRARIES := \
libbinder \
libmedia \
libutils \
+ liblog \
libui \
+ libgui \
libcutils \
libstagefright_foundation \
libdl
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
new file mode 100644
index 0000000..ef27879
--- /dev/null
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -0,0 +1,467 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "GraphicBufferSource"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+#include <GraphicBufferSource.h>
+
+#include <OMX_Core.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <MetadataBufferType.h>
+#include <ui/GraphicBuffer.h>
+
+namespace android {
+
+static const bool EXTRA_CHECK = true;
+
+
+GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
+ uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount) :
+ mInitCheck(UNKNOWN_ERROR),
+ mNodeInstance(nodeInstance),
+ mExecuting(false),
+ mNumFramesAvailable(0),
+ mEndOfStream(false),
+ mEndOfStreamSent(false) {
+
+ ALOGV("GraphicBufferSource w=%u h=%u c=%u",
+ bufferWidth, bufferHeight, bufferCount);
+
+ if (bufferWidth == 0 || bufferHeight == 0) {
+ ALOGE("Invalid dimensions %ux%u", bufferWidth, bufferHeight);
+ mInitCheck = BAD_VALUE;
+ return;
+ }
+
+ String8 name("GraphicBufferSource");
+
+ mBufferQueue = new BufferQueue(true);
+ mBufferQueue->setConsumerName(name);
+ mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mBufferQueue->setSynchronousMode(true);
+ mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
+ GRALLOC_USAGE_HW_TEXTURE);
+
+ mInitCheck = mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+ if (mInitCheck != NO_ERROR) {
+ ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
+ bufferCount, mInitCheck);
+ return;
+ }
+
+ // Note that we can't create an sp<...>(this) in a ctor that will not keep a
+ // reference once the ctor ends, as that would cause the refcount of 'this'
+ // dropping to 0 at the end of the ctor. Since all we need is a wp<...>
+ // that's what we create.
+ wp<BufferQueue::ConsumerListener> listener;
+ listener = static_cast<BufferQueue::ConsumerListener*>(this);
+
+ sp<BufferQueue::ConsumerListener> proxy;
+ proxy = new BufferQueue::ProxyConsumerListener(listener);
+
+ mInitCheck = mBufferQueue->consumerConnect(proxy);
+ if (mInitCheck != NO_ERROR) {
+ ALOGE("Error connecting to BufferQueue: %s (%d)",
+ strerror(-mInitCheck), mInitCheck);
+ return;
+ }
+
+ CHECK(mInitCheck == NO_ERROR);
+}
+
+GraphicBufferSource::~GraphicBufferSource() {
+ ALOGV("~GraphicBufferSource");
+ if (mBufferQueue != NULL) {
+ status_t err = mBufferQueue->consumerDisconnect();
+ if (err != NO_ERROR) {
+ ALOGW("consumerDisconnect failed: %d", err);
+ }
+ }
+}
+
+void GraphicBufferSource::omxExecuting() {
+ Mutex::Autolock autoLock(mMutex);
+ ALOGV("--> executing; avail=%d, codec vec size=%zd",
+ mNumFramesAvailable, mCodecBuffers.size());
+ CHECK(!mExecuting);
+ mExecuting = true;
+
+ // Start by loading up as many buffers as possible. We want to do this,
+ // rather than just submit the first buffer, to avoid a degenerate case:
+ // if all BQ buffers arrive before we start executing, and we only submit
+ // one here, the other BQ buffers will just sit until we get notified
+ // that the codec buffer has been released. We'd then acquire and
+ // submit a single additional buffer, repeatedly, never using more than
+ // one codec buffer simultaneously. (We could instead try to submit
+ // all BQ buffers whenever any codec buffer is freed, but if we get the
+ // initial conditions right that will never be useful.)
+ while (mNumFramesAvailable) {
+ if (!fillCodecBuffer_l()) {
+ ALOGV("stop load with frames available (codecAvail=%d)",
+ isCodecBufferAvailable_l());
+ break;
+ }
+ }
+
+ ALOGV("done loading initial frames, avail=%d", mNumFramesAvailable);
+
+ // If EOS has already been signaled, and there are no more frames to
+ // submit, try to send EOS now as well.
+ if (mEndOfStream && mNumFramesAvailable == 0) {
+ submitEndOfInputStream_l();
+ }
+}
+
+void GraphicBufferSource::omxLoaded(){
+ Mutex::Autolock autoLock(mMutex);
+ ALOGV("--> loaded");
+ CHECK(mExecuting);
+
+ ALOGV("Dropped down to loaded, avail=%d eos=%d eosSent=%d",
+ mNumFramesAvailable, mEndOfStream, mEndOfStreamSent);
+
+ // Codec is no longer executing. Discard all codec-related state.
+ mCodecBuffers.clear();
+ // TODO: scan mCodecBuffers to verify that all mGraphicBuffer entries
+ // are null; complain if not
+
+ mExecuting = false;
+}
+
+void GraphicBufferSource::addCodecBuffer(OMX_BUFFERHEADERTYPE* header) {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mExecuting) {
+ // This should never happen -- buffers can only be allocated when
+ // transitioning from "loaded" to "idle".
+ ALOGE("addCodecBuffer: buffer added while executing");
+ return;
+ }
+
+ ALOGV("addCodecBuffer h=%p size=%lu p=%p",
+ header, header->nAllocLen, header->pBuffer);
+ CodecBuffer codecBuffer;
+ codecBuffer.mHeader = header;
+ mCodecBuffers.add(codecBuffer);
+}
+
+void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
+ Mutex::Autolock autoLock(mMutex);
+
+ CHECK(mExecuting); // could this happen if app stop()s early?
+
+ int cbi = findMatchingCodecBuffer_l(header);
+ if (cbi < 0) {
+ // This should never happen.
+ ALOGE("codecBufferEmptied: buffer not recognized (h=%p)", header);
+ return;
+ }
+
+ ALOGV("codecBufferEmptied h=%p size=%lu filled=%lu p=%p",
+ header, header->nAllocLen, header->nFilledLen,
+ header->pBuffer);
+ CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
+
+ // header->nFilledLen may not be the original value, so we can't compare
+ // that to zero to see of this was the EOS buffer. Instead we just
+ // see if the GraphicBuffer reference was null, which should only ever
+ // happen for EOS.
+ if (codecBuffer.mGraphicBuffer == NULL) {
+ CHECK(mEndOfStream && mEndOfStreamSent);
+ // No GraphicBuffer to deal with, no additional input or output is
+ // expected, so just return.
+ return;
+ }
+
+ if (EXTRA_CHECK) {
+ // Pull the graphic buffer handle back out of the buffer, and confirm
+ // that it matches expectations.
+ OMX_U8* data = header->pBuffer;
+ buffer_handle_t bufferHandle;
+ memcpy(&bufferHandle, data + 4, sizeof(buffer_handle_t));
+ if (bufferHandle != codecBuffer.mGraphicBuffer->handle) {
+ // should never happen
+ ALOGE("codecBufferEmptied: buffer's handle is %p, expected %p",
+ bufferHandle, codecBuffer.mGraphicBuffer->handle);
+ CHECK(!"codecBufferEmptied: mismatched buffer");
+ }
+ }
+
+ // Find matching entry in our cached copy of the BufferQueue slots.
+ // If we find a match, release that slot. If we don't, the BufferQueue
+ // has dropped that GraphicBuffer, and there's nothing for us to release.
+ //
+ // (We could store "id" in CodecBuffer and avoid the slot search.)
+ int id;
+ for (id = 0; id < BufferQueue::NUM_BUFFER_SLOTS; id++) {
+ if (mBufferSlot[id] == NULL) {
+ continue;
+ }
+
+ if (mBufferSlot[id]->handle == codecBuffer.mGraphicBuffer->handle) {
+ ALOGV("cbi %d matches bq slot %d, handle=%p",
+ cbi, id, mBufferSlot[id]->handle);
+
+ mBufferQueue->releaseBuffer(id, EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
+ Fence::NO_FENCE);
+ break;
+ }
+ }
+ if (id == BufferQueue::NUM_BUFFER_SLOTS) {
+ ALOGV("codecBufferEmptied: no match for emptied buffer in cbi %d",
+ cbi);
+ }
+
+ // Mark the codec buffer as available by clearing the GraphicBuffer ref.
+ codecBuffer.mGraphicBuffer = NULL;
+
+ if (mNumFramesAvailable) {
+ // Fill this codec buffer.
+ CHECK(!mEndOfStreamSent);
+ ALOGV("buffer freed, %d frames avail (eos=%d)",
+ mNumFramesAvailable, mEndOfStream);
+ fillCodecBuffer_l();
+ } else if (mEndOfStream) {
+ // No frames available, but EOS is pending, so use this buffer to
+ // send that.
+ ALOGV("buffer freed, EOS pending");
+ submitEndOfInputStream_l();
+ }
+ return;
+}
+
+bool GraphicBufferSource::fillCodecBuffer_l() {
+ CHECK(mExecuting && mNumFramesAvailable > 0);
+
+ int cbi = findAvailableCodecBuffer_l();
+ if (cbi < 0) {
+ // No buffers available, bail.
+ ALOGV("fillCodecBuffer_l: no codec buffers, avail now %d",
+ mNumFramesAvailable);
+ return false;
+ }
+
+ ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d",
+ mNumFramesAvailable);
+ BufferQueue::BufferItem item;
+ status_t err = mBufferQueue->acquireBuffer(&item);
+ if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
+ // shouldn't happen
+ ALOGW("fillCodecBuffer_l: frame was not available");
+ return false;
+ } else if (err != OK) {
+ // now what? fake end-of-stream?
+ ALOGW("fillCodecBuffer_l: acquireBuffer returned err=%d", err);
+ return false;
+ }
+
+ mNumFramesAvailable--;
+
+ // Wait for it to become available.
+ err = item.mFence->waitForever("GraphicBufferSource::fillCodecBuffer_l");
+ if (err != OK) {
+ ALOGW("failed to wait for buffer fence: %d", err);
+ // keep going
+ }
+
+ // If this is the first time we're seeing this buffer, add it to our
+ // slot table.
+ if (item.mGraphicBuffer != NULL) {
+ ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf);
+ mBufferSlot[item.mBuf] = item.mGraphicBuffer;
+ }
+
+ err = submitBuffer_l(mBufferSlot[item.mBuf], item.mTimestamp / 1000, cbi);
+ if (err != OK) {
+ ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf);
+ mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY,
+ EGL_NO_SYNC_KHR, Fence::NO_FENCE);
+ } else {
+ ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
+ }
+
+ return true;
+}
+
+status_t GraphicBufferSource::signalEndOfInputStream() {
+ Mutex::Autolock autoLock(mMutex);
+ ALOGV("signalEndOfInputStream: exec=%d avail=%d eos=%d",
+ mExecuting, mNumFramesAvailable, mEndOfStream);
+
+ if (mEndOfStream) {
+ ALOGE("EOS was already signaled");
+ return INVALID_OPERATION;
+ }
+
+ // Set the end-of-stream flag. If no frames are pending from the
+ // BufferQueue, and a codec buffer is available, and we're executing,
+ // we initiate the EOS from here. Otherwise, we'll let
+ // codecBufferEmptied() (or omxExecuting) do it.
+ //
+ // Note: if there are no pending frames and all codec buffers are
+ // available, we *must* submit the EOS from here or we'll just
+ // stall since no future events are expected.
+ mEndOfStream = true;
+
+ if (mExecuting && mNumFramesAvailable == 0) {
+ submitEndOfInputStream_l();
+ }
+
+ return OK;
+}
+
+status_t GraphicBufferSource::submitBuffer_l(sp<GraphicBuffer>& graphicBuffer,
+ int64_t timestampUsec, int cbi) {
+ ALOGV("submitBuffer_l cbi=%d", cbi);
+ CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
+ codecBuffer.mGraphicBuffer = graphicBuffer;
+
+ OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
+ CHECK(header->nAllocLen >= 4 + sizeof(buffer_handle_t));
+ OMX_U8* data = header->pBuffer;
+ const OMX_U32 type = kMetadataBufferTypeGrallocSource;
+ buffer_handle_t handle = codecBuffer.mGraphicBuffer->handle;
+ memcpy(data, &type, 4);
+ memcpy(data + 4, &handle, sizeof(buffer_handle_t));
+
+ status_t err = mNodeInstance->emptyDirectBuffer(header, 0,
+ 4 + sizeof(buffer_handle_t), OMX_BUFFERFLAG_ENDOFFRAME,
+ timestampUsec);
+ if (err != OK) {
+ ALOGW("WARNING: emptyDirectBuffer failed: 0x%x", err);
+ codecBuffer.mGraphicBuffer = NULL;
+ return err;
+ }
+
+ ALOGV("emptyDirectBuffer succeeded, h=%p p=%p bufhandle=%p",
+ header, header->pBuffer, handle);
+ return OK;
+}
+
+void GraphicBufferSource::submitEndOfInputStream_l() {
+ CHECK(mEndOfStream);
+ if (mEndOfStreamSent) {
+ ALOGV("EOS already sent");
+ return;
+ }
+
+ int cbi = findAvailableCodecBuffer_l();
+ if (cbi < 0) {
+ ALOGV("submitEndOfInputStream_l: no codec buffers available");
+ return;
+ }
+
+ // We reject any additional incoming graphic buffers, so there's no need
+ // to stick a placeholder into codecBuffer.mGraphicBuffer to mark it as
+ // in-use.
+ CodecBuffer& codecBuffer(mCodecBuffers.editItemAt(cbi));
+
+ OMX_BUFFERHEADERTYPE* header = codecBuffer.mHeader;
+ if (EXTRA_CHECK) {
+ // Guard against implementations that don't check nFilledLen.
+ size_t fillLen = 4 + sizeof(buffer_handle_t);
+ CHECK(header->nAllocLen >= fillLen);
+ OMX_U8* data = header->pBuffer;
+ memset(data, 0xcd, fillLen);
+ }
+
+ uint64_t timestamp = 0; // does this matter?
+
+ status_t err = mNodeInstance->emptyDirectBuffer(header, /*offset*/ 0,
+ /*length*/ 0, OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS,
+ timestamp);
+ if (err != OK) {
+ ALOGW("emptyDirectBuffer EOS failed: 0x%x", err);
+ } else {
+ ALOGV("submitEndOfInputStream_l: buffer submitted, header=%p cbi=%d",
+ header, cbi);
+ mEndOfStreamSent = true;
+ }
+}
+
+int GraphicBufferSource::findAvailableCodecBuffer_l() {
+ CHECK(mCodecBuffers.size() > 0);
+
+ for (int i = (int)mCodecBuffers.size() - 1; i>= 0; --i) {
+ if (mCodecBuffers[i].mGraphicBuffer == NULL) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+int GraphicBufferSource::findMatchingCodecBuffer_l(
+ const OMX_BUFFERHEADERTYPE* header) {
+ for (int i = (int)mCodecBuffers.size() - 1; i>= 0; --i) {
+ if (mCodecBuffers[i].mHeader == header) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+// BufferQueue::ConsumerListener callback
+void GraphicBufferSource::onFrameAvailable() {
+ Mutex::Autolock autoLock(mMutex);
+
+ ALOGV("onFrameAvailable exec=%d avail=%d",
+ mExecuting, mNumFramesAvailable);
+
+ if (mEndOfStream) {
+ // This should only be possible if a new buffer was queued after
+ // EOS was signaled, i.e. the app is misbehaving.
+ ALOGW("onFrameAvailable: EOS is set, ignoring frame");
+
+ BufferQueue::BufferItem item;
+ status_t err = mBufferQueue->acquireBuffer(&item);
+ if (err == OK) {
+ mBufferQueue->releaseBuffer(item.mBuf, EGL_NO_DISPLAY,
+ EGL_NO_SYNC_KHR, item.mFence);
+ }
+ return;
+ }
+
+ mNumFramesAvailable++;
+
+ if (mExecuting) {
+ fillCodecBuffer_l();
+ }
+}
+
+// BufferQueue::ConsumerListener callback
+void GraphicBufferSource::onBuffersReleased() {
+ Mutex::Autolock lock(mMutex);
+
+ uint32_t slotMask;
+ if (mBufferQueue->getReleasedBuffers(&slotMask) != NO_ERROR) {
+ ALOGW("onBuffersReleased: unable to get released buffer set");
+ slotMask = 0xffffffff;
+ }
+
+ ALOGV("onBuffersReleased: 0x%08x", slotMask);
+
+ for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
+ if ((slotMask & 0x01) != 0) {
+ mBufferSlot[i] = NULL;
+ }
+ slotMask >>= 1;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
new file mode 100644
index 0000000..562d342
--- /dev/null
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GRAPHIC_BUFFER_SOURCE_H_
+
+#define GRAPHIC_BUFFER_SOURCE_H_
+
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/BufferQueue.h>
+#include <utils/RefBase.h>
+
+#include <OMX_Core.h>
+#include "../include/OMXNodeInstance.h"
+#include <media/stagefright/foundation/ABase.h>
+
+namespace android {
+
+/*
+ * This class is used to feed OMX codecs from a Surface via BufferQueue.
+ *
+ * Instances of the class don't run on a dedicated thread. Instead,
+ * various events trigger data movement:
+ *
+ * - Availability of a new frame of data from the BufferQueue (notified
+ * via the onFrameAvailable callback).
+ * - The return of a codec buffer (via OnEmptyBufferDone).
+ * - Application signaling end-of-stream.
+ * - Transition to or from "executing" state.
+ *
+ * Frames of data (and, perhaps, the end-of-stream indication) can arrive
+ * before the codec is in the "executing" state, so we need to queue
+ * things up until we're ready to go.
+ */
+class GraphicBufferSource : public BufferQueue::ConsumerListener {
+public:
+ GraphicBufferSource(OMXNodeInstance* nodeInstance,
+ uint32_t bufferWidth, uint32_t bufferHeight, uint32_t bufferCount);
+ virtual ~GraphicBufferSource();
+
+ // We can't throw an exception if the constructor fails, so we just set
+ // this and require that the caller test the value.
+ status_t initCheck() const {
+ return mInitCheck;
+ }
+
+ // Returns the handle to the producer side of the BufferQueue. Buffers
+ // queued on this will be received by GraphicBufferSource.
+ sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
+ return mBufferQueue;
+ }
+
+ // This is called when OMX transitions to OMX_StateExecuting, which means
+ // we can start handing it buffers. If we already have buffers of data
+ // sitting in the BufferQueue, this will send them to the codec.
+ void omxExecuting();
+
+ // This is called when OMX transitions to OMX_StateLoaded, indicating that
+ // we are shutting down.
+ void omxLoaded();
+
+ // A "codec buffer", i.e. a buffer that can be used to pass data into
+ // the encoder, has been allocated. (This call does not call back into
+ // OMXNodeInstance.)
+ void addCodecBuffer(OMX_BUFFERHEADERTYPE* header);
+
+ // Called from OnEmptyBufferDone. If we have a BQ buffer available,
+ // fill it with a new frame of data; otherwise, just mark it as available.
+ void codecBufferEmptied(OMX_BUFFERHEADERTYPE* header);
+
+ // This is called after the last input frame has been submitted. We
+ // need to submit an empty buffer with the EOS flag set. If we don't
+ // have a codec buffer ready, we just set the mEndOfStream flag.
+ status_t signalEndOfInputStream();
+
+protected:
+ // BufferQueue::ConsumerListener interface, called when a new frame of
+ // data is available. If we're executing and a codec buffer is
+ // available, we acquire the buffer, copy the GraphicBuffer reference
+ // into the codec buffer, and call Empty[This]Buffer. If we're not yet
+ // executing or there's no codec buffer available, we just increment
+ // mNumFramesAvailable and return.
+ virtual void onFrameAvailable();
+
+ // BufferQueue::ConsumerListener interface, called when the client has
+ // released one or more GraphicBuffers. We clear out the appropriate
+ // set of mBufferSlot entries.
+ virtual void onBuffersReleased();
+
+private:
+ // Keep track of codec input buffers. They may either be available
+ // (mGraphicBuffer == NULL) or in use by the codec.
+ struct CodecBuffer {
+ OMX_BUFFERHEADERTYPE* mHeader;
+ sp<GraphicBuffer> mGraphicBuffer;
+ };
+
+ // Returns the index of an available codec buffer. If none are
+ // available, returns -1. Mutex must be held by caller.
+ int findAvailableCodecBuffer_l();
+
+ // Returns true if a codec buffer is available.
+ bool isCodecBufferAvailable_l() {
+ return findAvailableCodecBuffer_l() >= 0;
+ }
+
+ // Finds the mCodecBuffers entry that matches. Returns -1 if not found.
+ int findMatchingCodecBuffer_l(const OMX_BUFFERHEADERTYPE* header);
+
+ // Fills a codec buffer with a frame from the BufferQueue. This must
+ // only be called when we know that a frame of data is ready (i.e. we're
+ // in the onFrameAvailable callback, or if we're in codecBufferEmptied
+ // and mNumFramesAvailable is nonzero). Returns without doing anything if
+ // we don't have a codec buffer available.
+ //
+ // Returns true if we successfully filled a codec buffer with a BQ buffer.
+ bool fillCodecBuffer_l();
+
+ // Marks the mCodecBuffers entry as in-use, copies the GraphicBuffer
+ // reference into the codec buffer, and submits the data to the codec.
+ status_t submitBuffer_l(sp<GraphicBuffer>& graphicBuffer,
+ int64_t timestampUsec, int cbi);
+
+ // Submits an empty buffer, with the EOS flag set. Returns without
+ // doing anything if we don't have a codec buffer available.
+ void submitEndOfInputStream_l();
+
+ // Lock, covers all member variables.
+ mutable Mutex mMutex;
+
+ // Used to report constructor failure.
+ status_t mInitCheck;
+
+ // Pointer back to the object that contains us. We send buffers here.
+ OMXNodeInstance* mNodeInstance;
+
+ // Set by omxExecuting() / omxIdling().
+ bool mExecuting;
+
+ // We consume graphic buffers from this.
+ sp<BufferQueue> mBufferQueue;
+
+ // Number of frames pending in BufferQueue that haven't yet been
+ // forwarded to the codec.
+ size_t mNumFramesAvailable;
+
+ // Set to true if we want to send end-of-stream after we run out of
+ // frames in BufferQueue.
+ bool mEndOfStream;
+ bool mEndOfStreamSent;
+
+ // Cache of GraphicBuffers from the buffer queue. When the codec
+ // is done processing a GraphicBuffer, we can use this to map back
+ // to a slot number.
+ sp<GraphicBuffer> mBufferSlot[BufferQueue::NUM_BUFFER_SLOTS];
+
+ // Tracks codec buffers.
+ Vector<CodecBuffer> mCodecBuffers;
+
+ DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
+};
+
+} // namespace android
+
+#endif // GRAPHIC_BUFFER_SOURCE_H_
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 29bc733..3987ead 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -345,6 +345,17 @@ status_t OMX::useGraphicBuffer(
port_index, graphicBuffer, buffer);
}
+status_t OMX::createInputSurface(
+ node_id node, OMX_U32 port_index,
+ sp<IGraphicBufferProducer> *bufferProducer) {
+ return findInstance(node)->createInputSurface(
+ port_index, bufferProducer);
+}
+
+status_t OMX::signalEndOfInputStream(node_id node) {
+ return findInstance(node)->signalEndOfInputStream();
+}
+
status_t OMX::allocateBuffer(
node_id node, OMX_U32 port_index, size_t size,
buffer_id *buffer, void **buffer_data) {
@@ -393,6 +404,9 @@ OMX_ERRORTYPE OMX::OnEvent(
OMX_IN OMX_PTR pEventData) {
ALOGV("OnEvent(%d, %ld, %ld)", eEvent, nData1, nData2);
+ // Forward to OMXNodeInstance.
+ findInstance(node)->onEvent(eEvent, nData1, nData2);
+
omx_message msg;
msg.type = omx_message::EVENT;
msg.node = node;
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index bff3def..a9eb94f 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -20,14 +20,18 @@
#include "../include/OMXNodeInstance.h"
#include "OMXMaster.h"
+#include "GraphicBufferSource.h"
#include <OMX_Component.h>
#include <binder/IMemory.h>
+#include <gui/BufferQueue.h>
#include <HardwareAPI.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaErrors.h>
+static const OMX_U32 kPortIndexInput = 0;
+
namespace android {
struct BufferMeta {
@@ -100,6 +104,17 @@ void OMXNodeInstance::setHandle(OMX::node_id node_id, OMX_HANDLETYPE handle) {
mHandle = handle;
}
+sp<GraphicBufferSource> OMXNodeInstance::getGraphicBufferSource() {
+ Mutex::Autolock autoLock(mGraphicBufferSourceLock);
+ return mGraphicBufferSource;
+}
+
+void OMXNodeInstance::setGraphicBufferSource(
+ const sp<GraphicBufferSource>& bufferSource) {
+ Mutex::Autolock autoLock(mGraphicBufferSourceLock);
+ mGraphicBufferSource = bufferSource;
+}
+
OMX *OMXNodeInstance::owner() {
return mOwner;
}
@@ -277,15 +292,16 @@ status_t OMXNodeInstance::getState(OMX_STATETYPE* state) {
status_t OMXNodeInstance::enableGraphicBuffers(
OMX_U32 portIndex, OMX_BOOL enable) {
Mutex::Autolock autoLock(mLock);
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.enableAndroidNativeBuffers");
OMX_INDEXTYPE index;
- OMX_ERRORTYPE err = OMX_GetExtensionIndex(
- mHandle,
- const_cast<OMX_STRING>("OMX.google.android.index.enableAndroidNativeBuffers"),
- &index);
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex failed");
+ if (enable) {
+ ALOGE("OMX_GetExtensionIndex %s failed", name);
+ }
return StatusFromOMXError(err);
}
@@ -316,14 +332,12 @@ status_t OMXNodeInstance::getGraphicBufferUsage(
Mutex::Autolock autoLock(mLock);
OMX_INDEXTYPE index;
- OMX_ERRORTYPE err = OMX_GetExtensionIndex(
- mHandle,
- const_cast<OMX_STRING>(
- "OMX.google.android.index.getAndroidNativeBufferUsage"),
- &index);
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.getAndroidNativeBufferUsage");
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex failed");
+ ALOGE("OMX_GetExtensionIndex %s failed", name);
return StatusFromOMXError(err);
}
@@ -354,7 +368,12 @@ status_t OMXNodeInstance::storeMetaDataInBuffers(
OMX_U32 portIndex,
OMX_BOOL enable) {
Mutex::Autolock autolock(mLock);
+ return storeMetaDataInBuffers_l(portIndex, enable);
+}
+status_t OMXNodeInstance::storeMetaDataInBuffers_l(
+ OMX_U32 portIndex,
+ OMX_BOOL enable) {
OMX_INDEXTYPE index;
OMX_STRING name = const_cast<OMX_STRING>(
"OMX.google.android.index.storeMetaDataInBuffers");
@@ -362,6 +381,7 @@ status_t OMXNodeInstance::storeMetaDataInBuffers(
OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
ALOGE("OMX_GetExtensionIndex %s failed", name);
+
return StatusFromOMXError(err);
}
@@ -411,6 +431,11 @@ status_t OMXNodeInstance::useBuffer(
addActiveBuffer(portIndex, *buffer);
+ sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
+ if (bufferSource != NULL && portIndex == kPortIndexInput) {
+ bufferSource->addCodecBuffer(header);
+ }
+
return OK;
}
@@ -482,13 +507,12 @@ status_t OMXNodeInstance::useGraphicBuffer(
return useGraphicBuffer2_l(portIndex, graphicBuffer, buffer);
}
- OMX_ERRORTYPE err = OMX_GetExtensionIndex(
- mHandle,
- const_cast<OMX_STRING>("OMX.google.android.index.useAndroidNativeBuffer"),
- &index);
+ OMX_STRING name = const_cast<OMX_STRING>(
+ "OMX.google.android.index.useAndroidNativeBuffer");
+ OMX_ERRORTYPE err = OMX_GetExtensionIndex(mHandle, name, &index);
if (err != OMX_ErrorNone) {
- ALOGE("OMX_GetExtensionIndex failed");
+ ALOGE("OMX_GetExtensionIndex %s failed", name);
return StatusFromOMXError(err);
}
@@ -530,6 +554,65 @@ status_t OMXNodeInstance::useGraphicBuffer(
return OK;
}
+status_t OMXNodeInstance::createInputSurface(
+ OMX_U32 portIndex, sp<IGraphicBufferProducer> *bufferProducer) {
+ Mutex::Autolock autolock(mLock);
+ status_t err;
+
+ const sp<GraphicBufferSource>& surfaceCheck = getGraphicBufferSource();
+ if (surfaceCheck != NULL) {
+ return ALREADY_EXISTS;
+ }
+
+ // Input buffers will hold meta-data (gralloc references).
+ err = storeMetaDataInBuffers_l(portIndex, OMX_TRUE);
+ if (err != OK) {
+ return err;
+ }
+
+ // Retrieve the width and height of the graphic buffer, set when the
+ // codec was configured.
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ def.nSize = sizeof(def);
+ def.nVersion.s.nVersionMajor = 1;
+ def.nVersion.s.nVersionMinor = 0;
+ def.nVersion.s.nRevision = 0;
+ def.nVersion.s.nStep = 0;
+ def.nPortIndex = portIndex;
+ OMX_ERRORTYPE oerr = OMX_GetParameter(
+ mHandle, OMX_IndexParamPortDefinition, &def);
+ CHECK(oerr == OMX_ErrorNone);
+
+ if (def.format.video.eColorFormat != OMX_COLOR_FormatAndroidOpaque) {
+ ALOGE("createInputSurface requires AndroidOpaque color format");
+ return INVALID_OPERATION;
+ }
+
+ GraphicBufferSource* bufferSource = new GraphicBufferSource(
+ this, def.format.video.nFrameWidth, def.format.video.nFrameHeight,
+ def.nBufferCountActual);
+ if ((err = bufferSource->initCheck()) != OK) {
+ delete bufferSource;
+ return err;
+ }
+ setGraphicBufferSource(bufferSource);
+
+ *bufferProducer = bufferSource->getIGraphicBufferProducer();
+ return OK;
+}
+
+status_t OMXNodeInstance::signalEndOfInputStream() {
+ // For non-Surface input, the MediaCodec should convert the call to a
+ // pair of requests (dequeue input buffer, queue input buffer with EOS
+ // flag set). Seems easier than doing the equivalent from here.
+ sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
+ if (bufferSource == NULL) {
+ ALOGW("signalEndOfInputStream can only be used with Surface input");
+ return INVALID_OPERATION;
+ };
+ return bufferSource->signalEndOfInputStream();
+}
+
status_t OMXNodeInstance::allocateBuffer(
OMX_U32 portIndex, size_t size, OMX::buffer_id *buffer,
void **buffer_data) {
@@ -560,6 +643,11 @@ status_t OMXNodeInstance::allocateBuffer(
addActiveBuffer(portIndex, *buffer);
+ sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
+ if (bufferSource != NULL && portIndex == kPortIndexInput) {
+ bufferSource->addCodecBuffer(header);
+ }
+
return OK;
}
@@ -592,6 +680,11 @@ status_t OMXNodeInstance::allocateBufferWithBackup(
addActiveBuffer(portIndex, *buffer);
+ sp<GraphicBufferSource> bufferSource(getGraphicBufferSource());
+ if (bufferSource != NULL && portIndex == kPortIndexInput) {
+ bufferSource->addCodecBuffer(header);
+ }
+
return OK;
}
@@ -646,6 +739,26 @@ status_t OMXNodeInstance::emptyBuffer(
return StatusFromOMXError(err);
}
+// like emptyBuffer, but the data is already in header->pBuffer
+status_t OMXNodeInstance::emptyDirectBuffer(
+ OMX_BUFFERHEADERTYPE *header,
+ OMX_U32 rangeOffset, OMX_U32 rangeLength,
+ OMX_U32 flags, OMX_TICKS timestamp) {
+ Mutex::Autolock autoLock(mLock);
+
+ header->nFilledLen = rangeLength;
+ header->nOffset = rangeOffset;
+ header->nFlags = flags;
+ header->nTimeStamp = timestamp;
+
+ OMX_ERRORTYPE err = OMX_EmptyThisBuffer(mHandle, header);
+ if (err != OMX_ErrorNone) {
+ ALOGW("emptyDirectBuffer failed, OMX err=0x%x", err);
+ }
+
+ return StatusFromOMXError(err);
+}
+
status_t OMXNodeInstance::getExtensionIndex(
const char *parameterName, OMX_INDEXTYPE *index) {
Mutex::Autolock autoLock(mLock);
@@ -666,6 +779,23 @@ void OMXNodeInstance::onMessage(const omx_message &msg) {
static_cast<BufferMeta *>(buffer->pAppPrivate);
buffer_meta->CopyFromOMX(buffer);
+ } else if (msg.type == omx_message::EMPTY_BUFFER_DONE) {
+ const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
+
+ if (bufferSource != NULL) {
+ // This is one of the buffers used exclusively by
+ // GraphicBufferSource.
+ // Don't dispatch a message back to ACodec, since it doesn't
+ // know that anyone asked to have the buffer emptied and will
+ // be very confused.
+
+ OMX_BUFFERHEADERTYPE *buffer =
+ static_cast<OMX_BUFFERHEADERTYPE *>(
+ msg.u.buffer_data.buffer);
+
+ bufferSource->codecBufferEmptied(buffer);
+ return;
+ }
}
mObserver->onMessage(msg);
@@ -682,6 +812,25 @@ void OMXNodeInstance::onGetHandleFailed() {
delete this;
}
+// OMXNodeInstance::OnEvent calls OMX::OnEvent, which then calls here.
+// Don't try to acquire mLock here -- in rare circumstances this will hang.
+void OMXNodeInstance::onEvent(
+ OMX_EVENTTYPE event, OMX_U32 arg1, OMX_U32 arg2) {
+ const sp<GraphicBufferSource>& bufferSource(getGraphicBufferSource());
+
+ if (bufferSource != NULL && event == OMX_EventCmdComplete &&
+ arg1 == OMX_CommandStateSet) {
+ if (arg2 == OMX_StateExecuting) {
+ bufferSource->omxExecuting();
+ } else if (arg2 == OMX_StateLoaded) {
+ // Must be shutting down -- won't have a GraphicBufferSource
+ // on the way up.
+ bufferSource->omxLoaded();
+ setGraphicBufferSource(NULL);
+ }
+ }
+}
+
// static
OMX_ERRORTYPE OMXNodeInstance::OnEvent(
OMX_IN OMX_HANDLETYPE hComponent,
diff --git a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
index c79e01f..4999663 100644
--- a/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
+++ b/media/libstagefright/omx/SimpleSoftOMXComponent.cpp
@@ -450,6 +450,10 @@ void SimpleSoftOMXComponent::onChangeState(OMX_STATETYPE state) {
checkTransitions();
}
+void SimpleSoftOMXComponent::onReset() {
+ // no-op
+}
+
void SimpleSoftOMXComponent::onPortEnable(OMX_U32 portIndex, bool enable) {
CHECK_LT(portIndex, mPorts.size());
@@ -581,6 +585,10 @@ void SimpleSoftOMXComponent::checkTransitions() {
if (transitionComplete) {
mState = mTargetState;
+ if (mState == OMX_StateLoaded) {
+ onReset();
+ }
+
notify(OMX_EventCmdComplete, OMX_CommandStateSet, mState, NULL);
}
}
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index 3747b3b..b3fe98e 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -51,8 +51,10 @@ static const struct {
{ "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" },
{ "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" },
{ "OMX.google.vpx.decoder", "vpxdec", "video_decoder.vpx" },
+ { "OMX.google.vpx.encoder", "vpxenc", "video_encoder.vpx" },
{ "OMX.google.raw.decoder", "rawdec", "audio_decoder.raw" },
{ "OMX.google.flac.encoder", "flacenc", "audio_encoder.flac" },
+ { "OMX.google.gsm.decoder", "gsmdec", "audio_decoder.gsm" },
};
static const size_t kNumComponents =
diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk
index 04441ca..1061c39 100644
--- a/media/libstagefright/omx/tests/Android.mk
+++ b/media/libstagefright/omx/tests/Android.mk
@@ -5,7 +5,7 @@ LOCAL_SRC_FILES = \
OMXHarness.cpp \
LOCAL_SHARED_LIBRARIES := \
- libstagefright libbinder libmedia libutils libstagefright_foundation
+ libstagefright libbinder libmedia libutils liblog libstagefright_foundation
LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright \
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index 161bd4f..3068541 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -20,13 +20,12 @@
#include "ARTSPConnection.h"
-#include <cutils/properties.h>
-
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/foundation/base64.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
#include <arpa/inet.h>
#include <fcntl.h>
@@ -41,6 +40,10 @@ namespace android {
// static
const int64_t ARTSPConnection::kSelectTimeoutUs = 1000ll;
+// static
+const AString ARTSPConnection::sUserAgent =
+ StringPrintf("User-Agent: %s\r\n", MakeUserAgent().c_str());
+
ARTSPConnection::ARTSPConnection(bool uidValid, uid_t uid)
: mUIDValid(uidValid),
mUID(uid),
@@ -50,7 +53,6 @@ ARTSPConnection::ARTSPConnection(bool uidValid, uid_t uid)
mConnectionID(0),
mNextCSeq(0),
mReceiveResponseEventPending(false) {
- MakeUserAgent(&mUserAgent);
}
ARTSPConnection::~ARTSPConnection() {
@@ -1032,27 +1034,12 @@ void ARTSPConnection::addAuthentication(AString *request) {
#endif
}
-// static
-void ARTSPConnection::MakeUserAgent(AString *userAgent) {
- userAgent->clear();
- userAgent->setTo("User-Agent: stagefright/1.1 (Linux;Android ");
-
-#if (PROPERTY_VALUE_MAX < 8)
-#error "PROPERTY_VALUE_MAX must be at least 8"
-#endif
-
- char value[PROPERTY_VALUE_MAX];
- property_get("ro.build.version.release", value, "Unknown");
- userAgent->append(value);
- userAgent->append(")\r\n");
-}
-
void ARTSPConnection::addUserAgent(AString *request) const {
// Find the boundary between headers and the body.
ssize_t i = request->find("\r\n\r\n");
CHECK_GE(i, 0);
- request->insert(mUserAgent, i + 2);
+ request->insert(sUserAgent, i + 2);
}
} // namespace android
diff --git a/media/libstagefright/rtsp/ARTSPConnection.h b/media/libstagefright/rtsp/ARTSPConnection.h
index 68f2d59..1fe9c99 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.h
+++ b/media/libstagefright/rtsp/ARTSPConnection.h
@@ -74,6 +74,8 @@ private:
static const int64_t kSelectTimeoutUs;
+ static const AString sUserAgent;
+
bool mUIDValid;
uid_t mUID;
State mState;
@@ -89,8 +91,6 @@ private:
sp<AMessage> mObserveBinaryMessage;
- AString mUserAgent;
-
void performDisconnect();
void onConnect(const sp<AMessage> &msg);
@@ -122,8 +122,6 @@ private:
static bool ParseSingleUnsignedLong(
const char *from, unsigned long *x);
- static void MakeUserAgent(AString *userAgent);
-
DISALLOW_EVIL_CONSTRUCTORS(ARTSPConnection);
};
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 49e2daf..9e2724d 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -17,6 +17,7 @@ LOCAL_SRC_FILES:= \
ARTPWriter.cpp \
ARTSPConnection.cpp \
ASessionDescription.cpp \
+ SDPLoader.cpp \
LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/media/libstagefright/include \
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index b2f0e5e..e067e20 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -28,13 +28,13 @@
#include "ASessionDescription.h"
#include <ctype.h>
-#include <cutils/properties.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
#include <arpa/inet.h>
#include <sys/socket.h>
@@ -52,20 +52,9 @@ static int64_t kStartupTimeoutUs = 10000000ll;
static int64_t kDefaultKeepAliveTimeoutUs = 60000000ll;
-namespace android {
-
-static void MakeUserAgentString(AString *s) {
- s->setTo("stagefright/1.1 (Linux;Android ");
-
-#if (PROPERTY_VALUE_MAX < 8)
-#error "PROPERTY_VALUE_MAX must be at least 8"
-#endif
+static int64_t kPauseDelayUs = 3000000ll;
- char value[PROPERTY_VALUE_MAX];
- property_get("ro.build.version.release", value, "Unknown");
- s->append(value);
- s->append(")");
-}
+namespace android {
static bool GetAttribute(const char *s, const char *key, AString *value) {
value->clear();
@@ -129,13 +118,16 @@ struct MyHandler : public AHandler {
mNumAccessUnitsReceived(0),
mCheckPending(false),
mCheckGeneration(0),
+ mCheckTimeoutGeneration(0),
mTryTCPInterleaving(false),
mTryFakeRTCP(false),
mReceivedFirstRTCPPacket(false),
mReceivedFirstRTPPacket(false),
- mSeekable(false),
+ mSeekable(true),
mKeepAliveTimeoutUs(kDefaultKeepAliveTimeoutUs),
- mKeepAliveGeneration(0) {
+ mKeepAliveGeneration(0),
+ mPausing(false),
+ mPauseGeneration(0) {
mNetLooper->setName("rtsp net");
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
@@ -173,6 +165,39 @@ struct MyHandler : public AHandler {
mConn->connect(mOriginalSessionURL.c_str(), reply);
}
+ void loadSDP(const sp<ASessionDescription>& desc) {
+ looper()->registerHandler(mConn);
+ (1 ? mNetLooper : looper())->registerHandler(mRTPConn);
+
+ sp<AMessage> notify = new AMessage('biny', id());
+ mConn->observeBinaryData(notify);
+
+ sp<AMessage> reply = new AMessage('sdpl', id());
+ reply->setObject("description", desc);
+ mConn->connect(mOriginalSessionURL.c_str(), reply);
+ }
+
+ AString getControlURL(sp<ASessionDescription> desc) {
+ AString sessionLevelControlURL;
+ if (mSessionDesc->findAttribute(
+ 0,
+ "a=control",
+ &sessionLevelControlURL)) {
+ if (sessionLevelControlURL.compare("*") == 0) {
+ return mBaseURL;
+ } else {
+ AString controlURL;
+ CHECK(MakeURL(
+ mBaseURL.c_str(),
+ sessionLevelControlURL.c_str(),
+ &controlURL));
+ return controlURL;
+ }
+ } else {
+ return mSessionURL;
+ }
+ }
+
void disconnect() {
(new AMessage('abor', id()))->post();
}
@@ -180,6 +205,24 @@ struct MyHandler : public AHandler {
void seek(int64_t timeUs) {
sp<AMessage> msg = new AMessage('seek', id());
msg->setInt64("time", timeUs);
+ mPauseGeneration++;
+ msg->post();
+ }
+
+ bool isSeekable() const {
+ return mSeekable;
+ }
+
+ void pause() {
+ sp<AMessage> msg = new AMessage('paus', id());
+ mPauseGeneration++;
+ msg->setInt32("pausecheck", mPauseGeneration);
+ msg->post(kPauseDelayUs);
+ }
+
+ void resume() {
+ sp<AMessage> msg = new AMessage('resu', id());
+ mPauseGeneration++;
msg->post();
}
@@ -223,8 +266,7 @@ struct MyHandler : public AHandler {
data[offset++] = 6; // TOOL
- AString tool;
- MakeUserAgentString(&tool);
+ AString tool = MakeUserAgent();
data[offset++] = tool.size();
@@ -348,6 +390,39 @@ struct MyHandler : public AHandler {
return true;
}
+ static bool isLiveStream(const sp<ASessionDescription> &desc) {
+ AString attrLiveStream;
+ if (desc->findAttribute(0, "a=LiveStream", &attrLiveStream)) {
+ ssize_t semicolonPos = attrLiveStream.find(";", 2);
+
+ const char* liveStreamValue;
+ if (semicolonPos < 0) {
+ liveStreamValue = attrLiveStream.c_str();
+ } else {
+ AString valString;
+ valString.setTo(attrLiveStream,
+ semicolonPos + 1,
+ attrLiveStream.size() - semicolonPos - 1);
+ liveStreamValue = valString.c_str();
+ }
+
+ uint32_t value = strtoul(liveStreamValue, NULL, 10);
+ if (value == 1) {
+ ALOGV("found live stream");
+ return true;
+ }
+ } else {
+ // It is a live stream if no duration is returned
+ int64_t durationUs;
+ if (!desc->getDurationUs(&durationUs)) {
+ ALOGV("No duration found, assume live stream");
+ return true;
+ }
+ }
+
+ return false;
+ }
+
virtual void onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case 'conn':
@@ -448,6 +523,8 @@ struct MyHandler : public AHandler {
}
}
+ mSeekable = !isLiveStream(mSessionDesc);
+
if (!mBaseURL.startsWith("rtsp://")) {
// Some misbehaving servers specify a relative
// URL in one of the locations above, combine
@@ -467,6 +544,8 @@ struct MyHandler : public AHandler {
mBaseURL = tmp;
}
+ mControlURL = getControlURL(mSessionDesc);
+
if (mSessionDesc->countTracks() < 2) {
// There's no actual tracks in this session.
// The first "track" is merely session meta
@@ -489,6 +568,51 @@ struct MyHandler : public AHandler {
break;
}
+ case 'sdpl':
+ {
+ int32_t result;
+ CHECK(msg->findInt32("result", &result));
+
+ ALOGI("SDP connection request completed with result %d (%s)",
+ result, strerror(-result));
+
+ if (result == OK) {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("description", &obj));
+ mSessionDesc =
+ static_cast<ASessionDescription *>(obj.get());
+
+ if (!mSessionDesc->isValid()) {
+ ALOGE("Failed to parse session description.");
+ result = ERROR_MALFORMED;
+ } else {
+ mBaseURL = mSessionURL;
+
+ mSeekable = !isLiveStream(mSessionDesc);
+
+ mControlURL = getControlURL(mSessionDesc);
+
+ if (mSessionDesc->countTracks() < 2) {
+ // There's no actual tracks in this session.
+ // The first "track" is merely session meta
+ // data.
+
+ ALOGW("Session doesn't contain any playable "
+ "tracks. Aborting.");
+ result = ERROR_UNSUPPORTED;
+ } else {
+ setupTrack(1);
+ }
+ }
+ }
+
+ if (result != OK) {
+ sp<AMessage> reply = new AMessage('disc', id());
+ mConn->disconnect(reply);
+ }
+ break;
+ }
+
case 'setu':
{
size_t index;
@@ -606,7 +730,7 @@ struct MyHandler : public AHandler {
postKeepAlive();
AString request = "PLAY ";
- request.append(mSessionURL);
+ request.append(mControlURL);
request.append(" RTSP/1.0\r\n");
request.append("Session: ");
@@ -644,6 +768,8 @@ struct MyHandler : public AHandler {
parsePlayResponse(response);
sp<AMessage> timeout = new AMessage('tiou', id());
+ mCheckTimeoutGeneration++;
+ timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
timeout->post(kStartupTimeoutUs);
}
}
@@ -733,7 +859,8 @@ struct MyHandler : public AHandler {
mNumAccessUnitsReceived = 0;
mReceivedFirstRTCPPacket = false;
mReceivedFirstRTPPacket = false;
- mSeekable = false;
+ mPausing = false;
+ mSeekable = true;
sp<AMessage> reply = new AMessage('tear', id());
@@ -854,9 +981,16 @@ struct MyHandler : public AHandler {
int32_t eos;
if (msg->findInt32("eos", &eos)) {
ALOGI("received BYE on track index %d", trackIndex);
-#if 0
- track->mPacketSource->signalEOS(ERROR_END_OF_STREAM);
-#endif
+ if (!mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+ ALOGI("No time established => fake existing data");
+
+ track->mEOSReceived = true;
+ mTryFakeRTCP = true;
+ mReceivedFirstRTCPPacket = true;
+ fakeTimestamps();
+ } else {
+ postQueueEOS(trackIndex, ERROR_END_OF_STREAM);
+ }
return;
}
@@ -884,6 +1018,115 @@ struct MyHandler : public AHandler {
break;
}
+ case 'paus':
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("pausecheck", &generation));
+ if (generation != mPauseGeneration) {
+ ALOGV("Ignoring outdated pause message.");
+ break;
+ }
+
+ if (!mSeekable) {
+ ALOGW("This is a live stream, ignoring pause request.");
+ break;
+ }
+ mCheckPending = true;
+ ++mCheckGeneration;
+ mPausing = true;
+
+ AString request = "PAUSE ";
+ request.append(mControlURL);
+ request.append(" RTSP/1.0\r\n");
+
+ request.append("Session: ");
+ request.append(mSessionID);
+ request.append("\r\n");
+
+ request.append("\r\n");
+
+ sp<AMessage> reply = new AMessage('pau2', id());
+ mConn->sendRequest(request.c_str(), reply);
+ break;
+ }
+
+ case 'pau2':
+ {
+ int32_t result;
+ CHECK(msg->findInt32("result", &result));
+ mCheckTimeoutGeneration++;
+
+ ALOGI("PAUSE completed with result %d (%s)",
+ result, strerror(-result));
+ break;
+ }
+
+ case 'resu':
+ {
+ if (mPausing && mSeekPending) {
+ // If seeking, Play will be sent from see1 instead
+ break;
+ }
+
+ if (!mPausing) {
+ // Dont send PLAY if we have not paused
+ break;
+ }
+ AString request = "PLAY ";
+ request.append(mControlURL);
+ request.append(" RTSP/1.0\r\n");
+
+ request.append("Session: ");
+ request.append(mSessionID);
+ request.append("\r\n");
+
+ request.append("\r\n");
+
+ sp<AMessage> reply = new AMessage('res2', id());
+ mConn->sendRequest(request.c_str(), reply);
+ break;
+ }
+
+ case 'res2':
+ {
+ int32_t result;
+ CHECK(msg->findInt32("result", &result));
+
+ ALOGI("PLAY completed with result %d (%s)",
+ result, strerror(-result));
+
+ mCheckPending = false;
+ postAccessUnitTimeoutCheck();
+
+ if (result == OK) {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("response", &obj));
+ sp<ARTSPResponse> response =
+ static_cast<ARTSPResponse *>(obj.get());
+
+ if (response->mStatusCode != 200) {
+ result = UNKNOWN_ERROR;
+ } else {
+ parsePlayResponse(response);
+
+ // Post new timeout in order to make sure to use
+ // fake timestamps if no new Sender Reports arrive
+ sp<AMessage> timeout = new AMessage('tiou', id());
+ mCheckTimeoutGeneration++;
+ timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
+ timeout->post(kStartupTimeoutUs);
+ }
+ }
+
+ if (result != OK) {
+ ALOGE("resume failed, aborting.");
+ (new AMessage('abor', id()))->post();
+ }
+
+ mPausing = false;
+ break;
+ }
+
case 'seek':
{
if (!mSeekable) {
@@ -905,8 +1148,17 @@ struct MyHandler : public AHandler {
mCheckPending = true;
++mCheckGeneration;
+ sp<AMessage> reply = new AMessage('see1', id());
+ reply->setInt64("time", timeUs);
+
+ if (mPausing) {
+ // PAUSE already sent
+ ALOGI("Pause already sent");
+ reply->post();
+ break;
+ }
AString request = "PAUSE ";
- request.append(mSessionURL);
+ request.append(mControlURL);
request.append(" RTSP/1.0\r\n");
request.append("Session: ");
@@ -915,8 +1167,6 @@ struct MyHandler : public AHandler {
request.append("\r\n");
- sp<AMessage> reply = new AMessage('see1', id());
- reply->setInt64("time", timeUs);
mConn->sendRequest(request.c_str(), reply);
break;
}
@@ -928,6 +1178,7 @@ struct MyHandler : public AHandler {
TrackInfo *info = &mTracks.editItemAt(i);
postQueueSeekDiscontinuity(i);
+ info->mEOSReceived = false;
info->mRTPAnchor = 0;
info->mNTPAnchorUs = -1;
@@ -936,11 +1187,18 @@ struct MyHandler : public AHandler {
mAllTracksHaveTime = false;
mNTPAnchorUs = -1;
+ // Start new timeoutgeneration to avoid getting timeout
+ // before PLAY response arrive
+ sp<AMessage> timeout = new AMessage('tiou', id());
+ mCheckTimeoutGeneration++;
+ timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
+ timeout->post(kStartupTimeoutUs);
+
int64_t timeUs;
CHECK(msg->findInt64("time", &timeUs));
AString request = "PLAY ";
- request.append(mSessionURL);
+ request.append(mControlURL);
request.append(" RTSP/1.0\r\n");
request.append("Session: ");
@@ -960,7 +1218,10 @@ struct MyHandler : public AHandler {
case 'see2':
{
- CHECK(mSeekPending);
+ if (mTracks.size() == 0) {
+ // We have already hit abor, break
+ break;
+ }
int32_t result;
CHECK(msg->findInt32("result", &result));
@@ -982,6 +1243,13 @@ struct MyHandler : public AHandler {
} else {
parsePlayResponse(response);
+ // Post new timeout in order to make sure to use
+ // fake timestamps if no new Sender Reports arrive
+ sp<AMessage> timeout = new AMessage('tiou', id());
+ mCheckTimeoutGeneration++;
+ timeout->setInt32("tioucheck", mCheckTimeoutGeneration);
+ timeout->post(kStartupTimeoutUs);
+
ssize_t i = response->mHeaders.indexOfKey("rtp-info");
CHECK_GE(i, 0);
@@ -996,6 +1264,7 @@ struct MyHandler : public AHandler {
(new AMessage('abor', id()))->post();
}
+ mPausing = false;
mSeekPending = false;
sp<AMessage> msg = mNotify->dup();
@@ -1018,8 +1287,17 @@ struct MyHandler : public AHandler {
case 'tiou':
{
+ int32_t timeoutGenerationCheck;
+ CHECK(msg->findInt32("tioucheck", &timeoutGenerationCheck));
+ if (timeoutGenerationCheck != mCheckTimeoutGeneration) {
+ // This is an outdated message. Ignore.
+ // This typically happens if a lot of seeks are
+ // performed, since new timeout messages now are
+ // posted at seek as well.
+ break;
+ }
if (!mReceivedFirstRTCPPacket) {
- if (mReceivedFirstRTPPacket && !mTryFakeRTCP) {
+ if (dataReceivedOnAllChannels() && !mTryFakeRTCP) {
ALOGW("We received RTP packets but no RTCP packets, "
"using fake timestamps.");
@@ -1093,7 +1371,6 @@ struct MyHandler : public AHandler {
}
void parsePlayResponse(const sp<ARTSPResponse> &response) {
- mSeekable = false;
if (mTracks.size() == 0) {
ALOGV("parsePlayResponse: late packets ignored.");
return;
@@ -1168,8 +1445,6 @@ struct MyHandler : public AHandler {
++n;
}
-
- mSeekable = true;
}
sp<MetaData> getTrackFormat(size_t index, int32_t *timeScale) {
@@ -1199,6 +1474,7 @@ private:
uint32_t mRTPAnchor;
int64_t mNTPAnchorUs;
int32_t mTimeScale;
+ bool mEOSReceived;
uint32_t mNormalPlayTimeRTP;
int64_t mNormalPlayTimeUs;
@@ -1221,6 +1497,7 @@ private:
AString mSessionURL;
AString mSessionHost;
AString mBaseURL;
+ AString mControlURL;
AString mSessionID;
bool mSetupTracksSuccessful;
bool mSeekPending;
@@ -1234,6 +1511,7 @@ private:
int64_t mNumAccessUnitsReceived;
bool mCheckPending;
int32_t mCheckGeneration;
+ int32_t mCheckTimeoutGeneration;
bool mTryTCPInterleaving;
bool mTryFakeRTCP;
bool mReceivedFirstRTCPPacket;
@@ -1241,6 +1519,8 @@ private:
bool mSeekable;
int64_t mKeepAliveTimeoutUs;
int32_t mKeepAliveGeneration;
+ bool mPausing;
+ int32_t mPauseGeneration;
Vector<TrackInfo> mTracks;
@@ -1287,6 +1567,7 @@ private:
formatDesc.c_str(), &timescale, &numChannels);
info->mTimeScale = timescale;
+ info->mEOSReceived = false;
ALOGV("track #%d URL=%s", mTracks.size(), trackURL.c_str());
@@ -1379,6 +1660,17 @@ private:
}
}
+ bool dataReceivedOnAllChannels() {
+ TrackInfo *track;
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ track = &mTracks.editItemAt(i);
+ if (track->mPackets.empty()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
void onTimeUpdate(int32_t trackIndex, uint32_t rtpTime, uint64_t ntpTime) {
ALOGV("onTimeUpdate track %d, rtpTime = 0x%08x, ntpTime = 0x%016llx",
trackIndex, rtpTime, ntpTime);
@@ -1409,6 +1701,27 @@ private:
ALOGI("Time now established for all tracks.");
}
}
+ if (mAllTracksHaveTime && dataReceivedOnAllChannels()) {
+ // Time is now established, lets start timestamping immediately
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ TrackInfo *trackInfo = &mTracks.editItemAt(i);
+ while (!trackInfo->mPackets.empty()) {
+ sp<ABuffer> accessUnit = *trackInfo->mPackets.begin();
+ trackInfo->mPackets.erase(trackInfo->mPackets.begin());
+
+ if (addMediaTimestamp(i, trackInfo, accessUnit)) {
+ postQueueAccessUnit(i, accessUnit);
+ }
+ }
+ }
+ for (size_t i = 0; i < mTracks.size(); ++i) {
+ TrackInfo *trackInfo = &mTracks.editItemAt(i);
+ if (trackInfo->mEOSReceived) {
+ postQueueEOS(i, ERROR_END_OF_STREAM);
+ trackInfo->mEOSReceived = false;
+ }
+ }
+ }
}
void onAccessUnitComplete(
@@ -1453,6 +1766,11 @@ private:
if (addMediaTimestamp(trackIndex, track, accessUnit)) {
postQueueAccessUnit(trackIndex, accessUnit);
}
+
+ if (track->mEOSReceived) {
+ postQueueEOS(trackIndex, ERROR_END_OF_STREAM);
+ track->mEOSReceived = false;
+ }
}
bool addMediaTimestamp(
diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp
new file mode 100644
index 0000000..ed3fa7e
--- /dev/null
+++ b/media/libstagefright/rtsp/SDPLoader.cpp
@@ -0,0 +1,154 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SDPLoader"
+#include <utils/Log.h>
+
+#include "SDPLoader.h"
+
+#include "ASessionDescription.h"
+#include "HTTPBase.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#define DEFAULT_SDP_SIZE 100000
+
+namespace android {
+
+SDPLoader::SDPLoader(const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+ : mNotify(notify),
+ mFlags(flags),
+ mUIDValid(uidValid),
+ mUID(uid),
+ mNetLooper(new ALooper),
+ mCancelled(false),
+ mHTTPDataSource(
+ HTTPBase::Create(
+ (mFlags & kFlagIncognito)
+ ? HTTPBase::kFlagIncognito
+ : 0)) {
+ if (mUIDValid) {
+ mHTTPDataSource->setUID(mUID);
+ }
+
+ mNetLooper->setName("sdp net");
+ mNetLooper->start(false /* runOnCallingThread */,
+ false /* canCallJava */,
+ PRIORITY_HIGHEST);
+}
+
+void SDPLoader::load(const char *url, const KeyedVector<String8, String8> *headers) {
+ mNetLooper->registerHandler(this);
+
+ sp<AMessage> msg = new AMessage(kWhatLoad, id());
+ msg->setString("url", url);
+
+ if (headers != NULL) {
+ msg->setPointer(
+ "headers",
+ new KeyedVector<String8, String8>(*headers));
+ }
+
+ msg->post();
+}
+
+void SDPLoader::cancel() {
+ mCancelled = true;
+ sp<HTTPBase> HTTPDataSource = mHTTPDataSource;
+ HTTPDataSource->disconnect();
+}
+
+void SDPLoader::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatLoad:
+ onLoad(msg);
+ break;
+
+ default:
+ TRESPASS();
+ break;
+ }
+}
+
+void SDPLoader::onLoad(const sp<AMessage> &msg) {
+ status_t err = OK;
+ sp<ASessionDescription> desc = NULL;
+ AString url;
+ CHECK(msg->findString("url", &url));
+
+ KeyedVector<String8, String8> *headers = NULL;
+ msg->findPointer("headers", (void **)&headers);
+
+ if (!(mFlags & kFlagIncognito)) {
+ ALOGI("onLoad '%s'", url.c_str());
+ } else {
+ ALOGI("onLoad <URL suppressed>");
+ }
+
+ if (!mCancelled) {
+ err = mHTTPDataSource->connect(url.c_str(), headers);
+
+ if (err != OK) {
+ ALOGE("connect() returned %d", err);
+ }
+ }
+
+ if (headers != NULL) {
+ delete headers;
+ headers = NULL;
+ }
+
+ off64_t sdpSize;
+ if (err == OK && !mCancelled) {
+ err = mHTTPDataSource->getSize(&sdpSize);
+
+ if (err != OK) {
+ //We did not get the size of the sdp file, default to a large value
+ sdpSize = DEFAULT_SDP_SIZE;
+ err = OK;
+ }
+ }
+
+ sp<ABuffer> buffer = new ABuffer(sdpSize);
+
+ if (err == OK && !mCancelled) {
+ ssize_t readSize = mHTTPDataSource->readAt(0, buffer->data(), sdpSize);
+
+ if (readSize < 0) {
+ ALOGE("Failed to read SDP, error code = %ld", readSize);
+ err = UNKNOWN_ERROR;
+ } else {
+ desc = new ASessionDescription;
+
+ if (desc == NULL || !desc->setTo(buffer->data(), (size_t)readSize)) {
+ err = UNKNOWN_ERROR;
+ ALOGE("Failed to parse SDP");
+ }
+ }
+ }
+
+ mHTTPDataSource.clear();
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatSDPLoaded);
+ notify->setInt32("result", err);
+ notify->setObject("description", desc);
+ notify->post();
+}
+
+} // namespace android
diff --git a/media/libstagefright/tests/Android.mk b/media/libstagefright/tests/Android.mk
index 57fff0b..06ce16b 100644
--- a/media/libstagefright/tests/Android.mk
+++ b/media/libstagefright/tests/Android.mk
@@ -26,6 +26,7 @@ LOCAL_SHARED_LIBRARIES := \
libsync \
libui \
libutils \
+ liblog
LOCAL_STATIC_LIBRARIES := \
libgtest \
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index a61d6a2..a5459fe 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -27,7 +27,7 @@
#include <media/mediarecorder.h>
#include <ui/GraphicBuffer.h>
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
#include <gui/ISurfaceComposer.h>
#include <gui/Surface.h>
#include <gui/SurfaceComposerClient.h>
@@ -107,9 +107,9 @@ protected:
window.get(), NULL);
} else {
ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource");
- sp<ISurfaceTexture> sms = (new SurfaceMediaSource(
+ sp<IGraphicBufferProducer> sms = (new SurfaceMediaSource(
getSurfaceWidth(), getSurfaceHeight()))->getBufferQueue();
- sp<SurfaceTextureClient> stc = new SurfaceTextureClient(sms);
+ sp<Surface> stc = new Surface(sms);
sp<ANativeWindow> window = stc;
mEglSurface = eglCreateWindowSurface(mEglDisplay, mGlConfig,
@@ -361,7 +361,7 @@ protected:
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
// Manual cast is required to avoid constructor ambiguity
- mSTC = new SurfaceTextureClient(static_cast<sp<ISurfaceTexture> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
mANW = mSTC;
}
@@ -375,7 +375,7 @@ protected:
const int mYuvTexHeight;
sp<SurfaceMediaSource> mSMS;
- sp<SurfaceTextureClient> mSTC;
+ sp<Surface> mSTC;
sp<ANativeWindow> mANW;
};
@@ -396,7 +396,7 @@ protected:
ALOGV("SMS-GLTest::SetUp()");
android::ProcessState::self()->startThreadPool();
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
- mSTC = new SurfaceTextureClient(static_cast<sp<ISurfaceTexture> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
mANW = mSTC;
// Doing the setup related to the GL Side
@@ -416,7 +416,7 @@ protected:
const int mYuvTexHeight;
sp<SurfaceMediaSource> mSMS;
- sp<SurfaceTextureClient> mSTC;
+ sp<Surface> mSTC;
sp<ANativeWindow> mANW;
};
@@ -482,8 +482,8 @@ sp<MediaRecorder> SurfaceMediaSourceGLTest::setUpMediaRecorder(int fd, int video
// query the mediarecorder for a surfacemeidasource and create an egl surface with that
void SurfaceMediaSourceGLTest::setUpEGLSurfaceFromMediaRecorder(sp<MediaRecorder>& mr) {
- sp<ISurfaceTexture> iST = mr->querySurfaceMediaSourceFromMediaServer();
- mSTC = new SurfaceTextureClient(iST);
+ sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
+ mSTC = new Surface(iST);
mANW = mSTC;
if (mEglSurface != EGL_NO_SURFACE) {
@@ -749,8 +749,8 @@ TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaS
mYuvTexHeight, 30);
// get the reference to the surfacemediasource living in
// mediaserver that is created by stagefrightrecorder
- sp<ISurfaceTexture> iST = mr->querySurfaceMediaSourceFromMediaServer();
- mSTC = new SurfaceTextureClient(iST);
+ sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
+ mSTC = new Surface(iST);
mANW = mSTC;
ASSERT_EQ(NO_ERROR, native_window_api_connect(mANW.get(), NATIVE_WINDOW_API_CPU));
ASSERT_EQ(NO_ERROR, native_window_set_buffers_format(mANW.get(),
@@ -781,7 +781,7 @@ TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) {
ALOGV("Verify creating a surface w/ right config + dummy writer*********");
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
- mSTC = new SurfaceTextureClient(static_cast<sp<ISurfaceTexture> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
mANW = mSTC;
DummyRecorder writer(mSMS);
diff --git a/media/libstagefright/timedtext/TimedTextSRTSource.cpp b/media/libstagefright/timedtext/TimedTextSRTSource.cpp
index eac23ba..2ac1e72 100644
--- a/media/libstagefright/timedtext/TimedTextSRTSource.cpp
+++ b/media/libstagefright/timedtext/TimedTextSRTSource.cpp
@@ -36,6 +36,9 @@ TimedTextSRTSource::TimedTextSRTSource(const sp<DataSource>& dataSource)
: mSource(dataSource),
mMetaData(new MetaData),
mIndex(0) {
+ // TODO: Need to detect the language, because SRT doesn't give language
+ // information explicitly.
+ mMetaData->setCString(kKeyMediaLanguage, "und");
}
TimedTextSRTSource::~TimedTextSRTSource() {
@@ -46,14 +49,10 @@ status_t TimedTextSRTSource::start() {
if (err != OK) {
reset();
}
- // TODO: Need to detect the language, because SRT doesn't give language
- // information explicitly.
- mMetaData->setCString(kKeyMediaLanguage, "");
return err;
}
void TimedTextSRTSource::reset() {
- mMetaData->clear();
mTextVector.clear();
mIndex = 0;
}
diff --git a/media/libstagefright/wifi-display/ANetworkSession.cpp b/media/libstagefright/wifi-display/ANetworkSession.cpp
index 62a6e7f..938d601 100644
--- a/media/libstagefright/wifi-display/ANetworkSession.cpp
+++ b/media/libstagefright/wifi-display/ANetworkSession.cpp
@@ -23,9 +23,11 @@
#include <arpa/inet.h>
#include <fcntl.h>
+#include <linux/tcp.h>
#include <net/if.h>
#include <netdb.h>
#include <netinet/in.h>
+#include <sys/ioctl.h>
#include <sys/socket.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -37,6 +39,7 @@
namespace android {
static const size_t kMaxUDPSize = 1500;
+static const int32_t kMaxUDPRetries = 200;
struct ANetworkSession::NetworkThread : public Thread {
NetworkThread(ANetworkSession *session);
@@ -79,7 +82,8 @@ struct ANetworkSession::Session : public RefBase {
status_t readMore();
status_t writeMore();
- status_t sendRequest(const void *data, ssize_t size);
+ status_t sendRequest(
+ const void *data, ssize_t size, bool timeValid, int64_t timeUs);
void setIsRTSPConnection(bool yesno);
@@ -87,24 +91,34 @@ protected:
virtual ~Session();
private:
+ enum {
+ FRAGMENT_FLAG_TIME_VALID = 1,
+ };
+ struct Fragment {
+ uint32_t mFlags;
+ int64_t mTimeUs;
+ sp<ABuffer> mBuffer;
+ };
+
int32_t mSessionID;
State mState;
bool mIsRTSPConnection;
int mSocket;
sp<AMessage> mNotify;
bool mSawReceiveFailure, mSawSendFailure;
+ int32_t mUDPRetries;
- // for TCP / stream data
- AString mOutBuffer;
-
- // for UDP / datagrams
- List<sp<ABuffer> > mOutDatagrams;
+ List<Fragment> mOutFragments;
AString mInBuffer;
+ int64_t mLastStallReportUs;
+
void notifyError(bool send, status_t err, const char *detail);
void notify(NotificationReason reason);
+ void dumpFragmentStats(const Fragment &frag);
+
DISALLOW_EVIL_CONSTRUCTORS(Session);
};
////////////////////////////////////////////////////////////////////////////////
@@ -135,7 +149,9 @@ ANetworkSession::Session::Session(
mSocket(s),
mNotify(notify),
mSawReceiveFailure(false),
- mSawSendFailure(false) {
+ mSawSendFailure(false),
+ mUDPRetries(kMaxUDPRetries),
+ mLastStallReportUs(-1ll) {
if (mState == CONNECTED) {
struct sockaddr_in localAddr;
socklen_t localAddrLen = sizeof(localAddr);
@@ -216,8 +232,8 @@ bool ANetworkSession::Session::wantsToRead() {
bool ANetworkSession::Session::wantsToWrite() {
return !mSawSendFailure
&& (mState == CONNECTING
- || (mState == CONNECTED && !mOutBuffer.empty())
- || (mState == DATAGRAM && !mOutDatagrams.empty()));
+ || (mState == CONNECTED && !mOutFragments.empty())
+ || (mState == DATAGRAM && !mOutFragments.empty()));
}
status_t ANetworkSession::Session::readMore() {
@@ -273,8 +289,17 @@ status_t ANetworkSession::Session::readMore() {
}
if (err != OK) {
- notifyError(false /* send */, err, "Recvfrom failed.");
- mSawReceiveFailure = true;
+ if (!mUDPRetries) {
+ notifyError(false /* send */, err, "Recvfrom failed.");
+ mSawReceiveFailure = true;
+ } else {
+ mUDPRetries--;
+ ALOGE("Recvfrom failed, %d/%d retries left",
+ mUDPRetries, kMaxUDPRetries);
+ err = OK;
+ }
+ } else {
+ mUDPRetries = kMaxUDPRetries;
}
return err;
@@ -314,6 +339,9 @@ status_t ANetworkSession::Session::readMore() {
sp<ABuffer> packet = new ABuffer(packetSize);
memcpy(packet->data(), mInBuffer.c_str() + 2, packetSize);
+ int64_t nowUs = ALooper::GetNowUs();
+ packet->meta()->setInt64("arrivalTimeUs", nowUs);
+
sp<AMessage> notify = mNotify->dup();
notify->setInt32("sessionID", mSessionID);
notify->setInt32("reason", kWhatDatagram);
@@ -399,31 +427,41 @@ status_t ANetworkSession::Session::readMore() {
return err;
}
-status_t ANetworkSession::Session::writeMore() {
- if (mState == DATAGRAM) {
- CHECK(!mOutDatagrams.empty());
+void ANetworkSession::Session::dumpFragmentStats(const Fragment &frag) {
+#if 0
+ int64_t nowUs = ALooper::GetNowUs();
+ int64_t delayMs = (nowUs - frag.mTimeUs) / 1000ll;
- status_t err;
- do {
- const sp<ABuffer> &datagram = *mOutDatagrams.begin();
+ static const int64_t kMinDelayMs = 0;
+ static const int64_t kMaxDelayMs = 300;
- uint8_t *data = datagram->data();
- if (data[0] == 0x80 && (data[1] & 0x7f) == 33) {
- int64_t nowUs = ALooper::GetNowUs();
+ const char *kPattern = "########################################";
+ size_t kPatternSize = strlen(kPattern);
- uint32_t prevRtpTime = U32_AT(&data[4]);
+ int n = (kPatternSize * (delayMs - kMinDelayMs))
+ / (kMaxDelayMs - kMinDelayMs);
- // 90kHz time scale
- uint32_t rtpTime = (nowUs * 9ll) / 100ll;
- int32_t diffTime = (int32_t)rtpTime - (int32_t)prevRtpTime;
+ if (n < 0) {
+ n = 0;
+ } else if ((size_t)n > kPatternSize) {
+ n = kPatternSize;
+ }
- ALOGV("correcting rtpTime by %.0f ms", diffTime / 90.0);
+ ALOGI("[%lld]: (%4lld ms) %s\n",
+ frag.mTimeUs / 1000,
+ delayMs,
+ kPattern + kPatternSize - n);
+#endif
+}
- data[4] = rtpTime >> 24;
- data[5] = (rtpTime >> 16) & 0xff;
- data[6] = (rtpTime >> 8) & 0xff;
- data[7] = rtpTime & 0xff;
- }
+status_t ANetworkSession::Session::writeMore() {
+ if (mState == DATAGRAM) {
+ CHECK(!mOutFragments.empty());
+
+ status_t err;
+ do {
+ const Fragment &frag = *mOutFragments.begin();
+ const sp<ABuffer> &datagram = frag.mBuffer;
int n;
do {
@@ -433,24 +471,37 @@ status_t ANetworkSession::Session::writeMore() {
err = OK;
if (n > 0) {
- mOutDatagrams.erase(mOutDatagrams.begin());
+ if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) {
+ dumpFragmentStats(frag);
+ }
+
+ mOutFragments.erase(mOutFragments.begin());
} else if (n < 0) {
err = -errno;
} else if (n == 0) {
err = -ECONNRESET;
}
- } while (err == OK && !mOutDatagrams.empty());
+ } while (err == OK && !mOutFragments.empty());
if (err == -EAGAIN) {
- if (!mOutDatagrams.empty()) {
- ALOGI("%d datagrams remain queued.", mOutDatagrams.size());
+ if (!mOutFragments.empty()) {
+ ALOGI("%d datagrams remain queued.", mOutFragments.size());
}
err = OK;
}
if (err != OK) {
- notifyError(true /* send */, err, "Send datagram failed.");
- mSawSendFailure = true;
+ if (!mUDPRetries) {
+ notifyError(true /* send */, err, "Send datagram failed.");
+ mSawSendFailure = true;
+ } else {
+ mUDPRetries--;
+ ALOGE("Send datagram failed, %d/%d retries left",
+ mUDPRetries, kMaxUDPRetries);
+ err = OK;
+ }
+ } else {
+ mUDPRetries = kMaxUDPRetries;
}
return err;
@@ -476,23 +527,37 @@ status_t ANetworkSession::Session::writeMore() {
}
CHECK_EQ(mState, CONNECTED);
- CHECK(!mOutBuffer.empty());
+ CHECK(!mOutFragments.empty());
ssize_t n;
- do {
- n = send(mSocket, mOutBuffer.c_str(), mOutBuffer.size(), 0);
- } while (n < 0 && errno == EINTR);
+ while (!mOutFragments.empty()) {
+ const Fragment &frag = *mOutFragments.begin();
- status_t err = OK;
+ do {
+ n = send(mSocket, frag.mBuffer->data(), frag.mBuffer->size(), 0);
+ } while (n < 0 && errno == EINTR);
- if (n > 0) {
-#if 0
- ALOGI("out:");
- hexdump(mOutBuffer.c_str(), n);
-#endif
+ if (n <= 0) {
+ break;
+ }
- mOutBuffer.erase(0, n);
- } else if (n < 0) {
+ frag.mBuffer->setRange(
+ frag.mBuffer->offset() + n, frag.mBuffer->size() - n);
+
+ if (frag.mBuffer->size() > 0) {
+ break;
+ }
+
+ if (frag.mFlags & FRAGMENT_FLAG_TIME_VALID) {
+ dumpFragmentStats(frag);
+ }
+
+ mOutFragments.erase(mOutFragments.begin());
+ }
+
+ status_t err = OK;
+
+ if (n < 0) {
err = -errno;
} else if (n == 0) {
err = -ECONNRESET;
@@ -503,35 +568,69 @@ status_t ANetworkSession::Session::writeMore() {
mSawSendFailure = true;
}
+#if 0
+ int numBytesQueued;
+ int res = ioctl(mSocket, SIOCOUTQ, &numBytesQueued);
+ if (res == 0 && numBytesQueued > 50 * 1024) {
+ if (numBytesQueued > 409600) {
+ ALOGW("!!! numBytesQueued = %d", numBytesQueued);
+ }
+
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mLastStallReportUs < 0ll
+ || nowUs > mLastStallReportUs + 100000ll) {
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("sessionID", mSessionID);
+ msg->setInt32("reason", kWhatNetworkStall);
+ msg->setSize("numBytesQueued", numBytesQueued);
+ msg->post();
+
+ mLastStallReportUs = nowUs;
+ }
+ }
+#endif
+
return err;
}
-status_t ANetworkSession::Session::sendRequest(const void *data, ssize_t size) {
+status_t ANetworkSession::Session::sendRequest(
+ const void *data, ssize_t size, bool timeValid, int64_t timeUs) {
CHECK(mState == CONNECTED || mState == DATAGRAM);
- if (mState == DATAGRAM) {
- CHECK_GE(size, 0);
-
- sp<ABuffer> datagram = new ABuffer(size);
- memcpy(datagram->data(), data, size);
+ if (size < 0) {
+ size = strlen((const char *)data);
+ }
- mOutDatagrams.push_back(datagram);
+ if (size == 0) {
return OK;
}
+ sp<ABuffer> buffer;
+
if (mState == CONNECTED && !mIsRTSPConnection) {
CHECK_LE(size, 65535);
- uint8_t prefix[2];
- prefix[0] = size >> 8;
- prefix[1] = size & 0xff;
+ buffer = new ABuffer(size + 2);
+ buffer->data()[0] = size >> 8;
+ buffer->data()[1] = size & 0xff;
+ memcpy(buffer->data() + 2, data, size);
+ } else {
+ buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+ }
+
+ Fragment frag;
- mOutBuffer.append((const char *)prefix, sizeof(prefix));
+ frag.mFlags = 0;
+ if (timeValid) {
+ frag.mFlags = FRAGMENT_FLAG_TIME_VALID;
+ frag.mTimeUs = timeUs;
}
- mOutBuffer.append(
- (const char *)data,
- (size >= 0) ? size : strlen((const char *)data));
+ frag.mBuffer = buffer;
+
+ mOutFragments.push_back(frag);
return OK;
}
@@ -770,6 +869,22 @@ status_t ANetworkSession::createClientOrServer(
err = -errno;
goto bail2;
}
+ } else if (mode == kModeCreateTCPDatagramSessionActive) {
+ int flag = 1;
+ res = setsockopt(s, IPPROTO_TCP, TCP_NODELAY, &flag, sizeof(flag));
+
+ if (res < 0) {
+ err = -errno;
+ goto bail2;
+ }
+
+ int tos = 224; // VOICE
+ res = setsockopt(s, IPPROTO_IP, IP_TOS, &tos, sizeof(tos));
+
+ if (res < 0) {
+ err = -errno;
+ goto bail2;
+ }
}
err = MakeSocketNonBlocking(s);
@@ -946,7 +1061,8 @@ status_t ANetworkSession::connectUDPSession(
}
status_t ANetworkSession::sendRequest(
- int32_t sessionID, const void *data, ssize_t size) {
+ int32_t sessionID, const void *data, ssize_t size,
+ bool timeValid, int64_t timeUs) {
Mutex::Autolock autoLock(mLock);
ssize_t index = mSessions.indexOfKey(sessionID);
@@ -957,7 +1073,7 @@ status_t ANetworkSession::sendRequest(
const sp<Session> session = mSessions.valueAt(index);
- status_t err = session->sendRequest(data, size);
+ status_t err = session->sendRequest(data, size, timeValid, timeUs);
interrupt();
@@ -1091,7 +1207,6 @@ void ANetworkSession::threadLoop() {
clientSocket);
sp<Session> clientSession =
- // using socket sd as sessionID
new Session(
mNextSessionID++,
Session::CONNECTED,
diff --git a/media/libstagefright/wifi-display/ANetworkSession.h b/media/libstagefright/wifi-display/ANetworkSession.h
index c1acdcc..7c62b29 100644
--- a/media/libstagefright/wifi-display/ANetworkSession.h
+++ b/media/libstagefright/wifi-display/ANetworkSession.h
@@ -74,7 +74,8 @@ struct ANetworkSession : public RefBase {
status_t destroySession(int32_t sessionID);
status_t sendRequest(
- int32_t sessionID, const void *data, ssize_t size = -1);
+ int32_t sessionID, const void *data, ssize_t size = -1,
+ bool timeValid = false, int64_t timeUs = -1ll);
enum NotificationReason {
kWhatError,
@@ -83,6 +84,7 @@ struct ANetworkSession : public RefBase {
kWhatData,
kWhatDatagram,
kWhatBinaryData,
+ kWhatNetworkStall,
};
protected:
diff --git a/media/libstagefright/wifi-display/Android.mk b/media/libstagefright/wifi-display/Android.mk
index 75098f1..061ae89 100644
--- a/media/libstagefright/wifi-display/Android.mk
+++ b/media/libstagefright/wifi-display/Android.mk
@@ -4,20 +4,17 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES:= \
ANetworkSession.cpp \
+ MediaSender.cpp \
Parameters.cpp \
ParsedMessage.cpp \
- sink/LinearRegression.cpp \
- sink/RTPSink.cpp \
- sink/TunnelRenderer.cpp \
- sink/WifiDisplaySink.cpp \
+ rtp/RTPSender.cpp \
source/Converter.cpp \
source/MediaPuller.cpp \
source/PlaybackSession.cpp \
source/RepeaterSource.cpp \
- source/Sender.cpp \
source/TSPacketizer.cpp \
source/WifiDisplaySource.cpp \
- TimeSeries.cpp \
+ VideoFormats.cpp \
LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/media/libstagefright \
@@ -27,6 +24,7 @@ LOCAL_C_INCLUDES:= \
LOCAL_SHARED_LIBRARIES:= \
libbinder \
libcutils \
+ liblog \
libgui \
libmedia \
libstagefright \
@@ -55,31 +53,10 @@ LOCAL_SHARED_LIBRARIES:= \
libstagefright_foundation \
libstagefright_wfd \
libutils \
+ liblog \
LOCAL_MODULE:= wfd
LOCAL_MODULE_TAGS := debug
include $(BUILD_EXECUTABLE)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- udptest.cpp \
-
-LOCAL_SHARED_LIBRARIES:= \
- libbinder \
- libgui \
- libmedia \
- libstagefright \
- libstagefright_foundation \
- libstagefright_wfd \
- libutils \
-
-LOCAL_MODULE:= udptest
-
-LOCAL_MODULE_TAGS := debug
-
-include $(BUILD_EXECUTABLE)
diff --git a/media/libstagefright/wifi-display/MediaSender.cpp b/media/libstagefright/wifi-display/MediaSender.cpp
new file mode 100644
index 0000000..8a3566f
--- /dev/null
+++ b/media/libstagefright/wifi-display/MediaSender.cpp
@@ -0,0 +1,474 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaSender"
+#include <utils/Log.h>
+
+#include "MediaSender.h"
+
+#include "ANetworkSession.h"
+#include "rtp/RTPSender.h"
+#include "source/TSPacketizer.h"
+
+#include "include/avc_utils.h"
+
+#include <media/IHDCP.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+MediaSender::MediaSender(
+ const sp<ANetworkSession> &netSession,
+ const sp<AMessage> &notify)
+ : mNetSession(netSession),
+ mNotify(notify),
+ mMode(MODE_UNDEFINED),
+ mGeneration(0),
+ mPrevTimeUs(-1ll),
+ mInitDoneCount(0),
+ mLogFile(NULL) {
+ // mLogFile = fopen("/data/misc/log.ts", "wb");
+}
+
+MediaSender::~MediaSender() {
+ if (mLogFile != NULL) {
+ fclose(mLogFile);
+ mLogFile = NULL;
+ }
+}
+
+status_t MediaSender::setHDCP(const sp<IHDCP> &hdcp) {
+ if (mMode != MODE_UNDEFINED) {
+ return INVALID_OPERATION;
+ }
+
+ mHDCP = hdcp;
+
+ return OK;
+}
+
+ssize_t MediaSender::addTrack(const sp<AMessage> &format, uint32_t flags) {
+ if (mMode != MODE_UNDEFINED) {
+ return INVALID_OPERATION;
+ }
+
+ TrackInfo info;
+ info.mFormat = format;
+ info.mFlags = flags;
+ info.mPacketizerTrackIndex = -1;
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+ info.mIsAudio = !strncasecmp("audio/", mime.c_str(), 6);
+
+ size_t index = mTrackInfos.size();
+ mTrackInfos.push_back(info);
+
+ return index;
+}
+
+status_t MediaSender::initAsync(
+ ssize_t trackIndex,
+ const char *remoteHost,
+ int32_t remoteRTPPort,
+ RTPSender::TransportMode rtpMode,
+ int32_t remoteRTCPPort,
+ RTPSender::TransportMode rtcpMode,
+ int32_t *localRTPPort) {
+ if (trackIndex < 0) {
+ if (mMode != MODE_UNDEFINED) {
+ return INVALID_OPERATION;
+ }
+
+ uint32_t flags = 0;
+ if (mHDCP != NULL) {
+ // XXX Determine proper HDCP version.
+ flags |= TSPacketizer::EMIT_HDCP20_DESCRIPTOR;
+ }
+ mTSPacketizer = new TSPacketizer(flags);
+
+ status_t err = OK;
+ for (size_t i = 0; i < mTrackInfos.size(); ++i) {
+ TrackInfo *info = &mTrackInfos.editItemAt(i);
+
+ ssize_t packetizerTrackIndex =
+ mTSPacketizer->addTrack(info->mFormat);
+
+ if (packetizerTrackIndex < 0) {
+ err = packetizerTrackIndex;
+ break;
+ }
+
+ info->mPacketizerTrackIndex = packetizerTrackIndex;
+ }
+
+ if (err == OK) {
+ sp<AMessage> notify = new AMessage(kWhatSenderNotify, id());
+ notify->setInt32("generation", mGeneration);
+ mTSSender = new RTPSender(mNetSession, notify);
+ looper()->registerHandler(mTSSender);
+
+ err = mTSSender->initAsync(
+ remoteHost,
+ remoteRTPPort,
+ rtpMode,
+ remoteRTCPPort,
+ rtcpMode,
+ localRTPPort);
+
+ if (err != OK) {
+ looper()->unregisterHandler(mTSSender->id());
+ mTSSender.clear();
+ }
+ }
+
+ if (err != OK) {
+ for (size_t i = 0; i < mTrackInfos.size(); ++i) {
+ TrackInfo *info = &mTrackInfos.editItemAt(i);
+ info->mPacketizerTrackIndex = -1;
+ }
+
+ mTSPacketizer.clear();
+ return err;
+ }
+
+ mMode = MODE_TRANSPORT_STREAM;
+ mInitDoneCount = 1;
+
+ return OK;
+ }
+
+ if (mMode == MODE_TRANSPORT_STREAM) {
+ return INVALID_OPERATION;
+ }
+
+ if ((size_t)trackIndex >= mTrackInfos.size()) {
+ return -ERANGE;
+ }
+
+ TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
+
+ if (info->mSender != NULL) {
+ return INVALID_OPERATION;
+ }
+
+ sp<AMessage> notify = new AMessage(kWhatSenderNotify, id());
+ notify->setInt32("generation", mGeneration);
+ notify->setSize("trackIndex", trackIndex);
+
+ info->mSender = new RTPSender(mNetSession, notify);
+ looper()->registerHandler(info->mSender);
+
+ status_t err = info->mSender->initAsync(
+ remoteHost,
+ remoteRTPPort,
+ rtpMode,
+ remoteRTCPPort,
+ rtcpMode,
+ localRTPPort);
+
+ if (err != OK) {
+ looper()->unregisterHandler(info->mSender->id());
+ info->mSender.clear();
+
+ return err;
+ }
+
+ if (mMode == MODE_UNDEFINED) {
+ mInitDoneCount = mTrackInfos.size();
+ }
+
+ mMode = MODE_ELEMENTARY_STREAMS;
+
+ return OK;
+}
+
+status_t MediaSender::queueAccessUnit(
+ size_t trackIndex, const sp<ABuffer> &accessUnit) {
+ if (mMode == MODE_UNDEFINED) {
+ return INVALID_OPERATION;
+ }
+
+ if (trackIndex >= mTrackInfos.size()) {
+ return -ERANGE;
+ }
+
+ if (mMode == MODE_TRANSPORT_STREAM) {
+ TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
+ info->mAccessUnits.push_back(accessUnit);
+
+ mTSPacketizer->extractCSDIfNecessary(info->mPacketizerTrackIndex);
+
+ for (;;) {
+ ssize_t minTrackIndex = -1;
+ int64_t minTimeUs = -1ll;
+
+ for (size_t i = 0; i < mTrackInfos.size(); ++i) {
+ const TrackInfo &info = mTrackInfos.itemAt(i);
+
+ if (info.mAccessUnits.empty()) {
+ minTrackIndex = -1;
+ minTimeUs = -1ll;
+ break;
+ }
+
+ int64_t timeUs;
+ const sp<ABuffer> &accessUnit = *info.mAccessUnits.begin();
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ if (minTrackIndex < 0 || timeUs < minTimeUs) {
+ minTrackIndex = i;
+ minTimeUs = timeUs;
+ }
+ }
+
+ if (minTrackIndex < 0) {
+ return OK;
+ }
+
+ TrackInfo *info = &mTrackInfos.editItemAt(minTrackIndex);
+ sp<ABuffer> accessUnit = *info->mAccessUnits.begin();
+ info->mAccessUnits.erase(info->mAccessUnits.begin());
+
+ sp<ABuffer> tsPackets;
+ status_t err = packetizeAccessUnit(
+ minTrackIndex, accessUnit, &tsPackets);
+
+ if (err == OK) {
+ if (mLogFile != NULL) {
+ fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile);
+ }
+
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+ tsPackets->meta()->setInt64("timeUs", timeUs);
+
+ err = mTSSender->queueBuffer(
+ tsPackets,
+ 33 /* packetType */,
+ RTPSender::PACKETIZATION_TRANSPORT_STREAM);
+ }
+
+ if (err != OK) {
+ return err;
+ }
+ }
+ }
+
+ TrackInfo *info = &mTrackInfos.editItemAt(trackIndex);
+
+ return info->mSender->queueBuffer(
+ accessUnit,
+ info->mIsAudio ? 96 : 97 /* packetType */,
+ info->mIsAudio
+ ? RTPSender::PACKETIZATION_AAC : RTPSender::PACKETIZATION_H264);
+}
+
+void MediaSender::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatSenderNotify:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ if (generation != mGeneration) {
+ break;
+ }
+
+ onSenderNotify(msg);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+void MediaSender::onSenderNotify(const sp<AMessage> &msg) {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ switch (what) {
+ case RTPSender::kWhatInitDone:
+ {
+ --mInitDoneCount;
+
+ int32_t err;
+ CHECK(msg->findInt32("err", &err));
+
+ if (err != OK) {
+ notifyInitDone(err);
+ ++mGeneration;
+ break;
+ }
+
+ if (mInitDoneCount == 0) {
+ notifyInitDone(OK);
+ }
+ break;
+ }
+
+ case RTPSender::kWhatError:
+ {
+ int32_t err;
+ CHECK(msg->findInt32("err", &err));
+
+ notifyError(err);
+ break;
+ }
+
+ case kWhatNetworkStall:
+ {
+ size_t numBytesQueued;
+ CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
+
+ notifyNetworkStall(numBytesQueued);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+void MediaSender::notifyInitDone(status_t err) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatInitDone);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+void MediaSender::notifyError(status_t err) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+void MediaSender::notifyNetworkStall(size_t numBytesQueued) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatNetworkStall);
+ notify->setSize("numBytesQueued", numBytesQueued);
+ notify->post();
+}
+
+status_t MediaSender::packetizeAccessUnit(
+ size_t trackIndex,
+ sp<ABuffer> accessUnit,
+ sp<ABuffer> *tsPackets) {
+ const TrackInfo &info = mTrackInfos.itemAt(trackIndex);
+
+ uint32_t flags = 0;
+
+ bool isHDCPEncrypted = false;
+ uint64_t inputCTR;
+ uint8_t HDCP_private_data[16];
+
+ bool manuallyPrependSPSPPS =
+ !info.mIsAudio
+ && (info.mFlags & FLAG_MANUALLY_PREPEND_SPS_PPS)
+ && IsIDR(accessUnit);
+
+ if (mHDCP != NULL && !info.mIsAudio) {
+ isHDCPEncrypted = true;
+
+ if (manuallyPrependSPSPPS) {
+ accessUnit = mTSPacketizer->prependCSD(
+ info.mPacketizerTrackIndex, accessUnit);
+ }
+
+ status_t err = mHDCP->encrypt(
+ accessUnit->data(), accessUnit->size(),
+ trackIndex /* streamCTR */,
+ &inputCTR,
+ accessUnit->data());
+
+ if (err != OK) {
+ ALOGE("Failed to HDCP-encrypt media data (err %d)",
+ err);
+
+ return err;
+ }
+
+ HDCP_private_data[0] = 0x00;
+
+ HDCP_private_data[1] =
+ (((trackIndex >> 30) & 3) << 1) | 1;
+
+ HDCP_private_data[2] = (trackIndex >> 22) & 0xff;
+
+ HDCP_private_data[3] =
+ (((trackIndex >> 15) & 0x7f) << 1) | 1;
+
+ HDCP_private_data[4] = (trackIndex >> 7) & 0xff;
+
+ HDCP_private_data[5] =
+ ((trackIndex & 0x7f) << 1) | 1;
+
+ HDCP_private_data[6] = 0x00;
+
+ HDCP_private_data[7] =
+ (((inputCTR >> 60) & 0x0f) << 1) | 1;
+
+ HDCP_private_data[8] = (inputCTR >> 52) & 0xff;
+
+ HDCP_private_data[9] =
+ (((inputCTR >> 45) & 0x7f) << 1) | 1;
+
+ HDCP_private_data[10] = (inputCTR >> 37) & 0xff;
+
+ HDCP_private_data[11] =
+ (((inputCTR >> 30) & 0x7f) << 1) | 1;
+
+ HDCP_private_data[12] = (inputCTR >> 22) & 0xff;
+
+ HDCP_private_data[13] =
+ (((inputCTR >> 15) & 0x7f) << 1) | 1;
+
+ HDCP_private_data[14] = (inputCTR >> 7) & 0xff;
+
+ HDCP_private_data[15] =
+ ((inputCTR & 0x7f) << 1) | 1;
+
+ flags |= TSPacketizer::IS_ENCRYPTED;
+ } else if (manuallyPrependSPSPPS) {
+ flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES;
+ }
+
+ int64_t timeUs = ALooper::GetNowUs();
+ if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) {
+ flags |= TSPacketizer::EMIT_PCR;
+ flags |= TSPacketizer::EMIT_PAT_AND_PMT;
+
+ mPrevTimeUs = timeUs;
+ }
+
+ mTSPacketizer->packetize(
+ info.mPacketizerTrackIndex,
+ accessUnit,
+ tsPackets,
+ flags,
+ !isHDCPEncrypted ? NULL : HDCP_private_data,
+ !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data),
+ info.mIsAudio ? 2 : 0 /* numStuffingBytes */);
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/MediaSender.h b/media/libstagefright/wifi-display/MediaSender.h
new file mode 100644
index 0000000..64722c5
--- /dev/null
+++ b/media/libstagefright/wifi-display/MediaSender.h
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef MEDIA_SENDER_H_
+
+#define MEDIA_SENDER_H_
+
+#include "rtp/RTPSender.h"
+
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <utils/Errors.h>
+#include <utils/Vector.h>
+
+namespace android {
+
+struct ABuffer;
+struct ANetworkSession;
+struct AMessage;
+struct IHDCP;
+struct TSPacketizer;
+
+// This class facilitates sending of data from one or more media tracks
+// through one or more RTP channels, either providing a 1:1 mapping from
+// track to RTP channel or muxing all tracks into a single RTP channel and
+// using transport stream encapsulation.
+// Optionally the (video) data is encrypted using the provided hdcp object.
+struct MediaSender : public AHandler {
+ enum {
+ kWhatInitDone,
+ kWhatError,
+ kWhatNetworkStall,
+ };
+
+ MediaSender(
+ const sp<ANetworkSession> &netSession,
+ const sp<AMessage> &notify);
+
+ status_t setHDCP(const sp<IHDCP> &hdcp);
+
+ enum FlagBits {
+ FLAG_MANUALLY_PREPEND_SPS_PPS = 1,
+ };
+ ssize_t addTrack(const sp<AMessage> &format, uint32_t flags);
+
+ // If trackIndex == -1, initialize for transport stream muxing.
+ status_t initAsync(
+ ssize_t trackIndex,
+ const char *remoteHost,
+ int32_t remoteRTPPort,
+ RTPSender::TransportMode rtpMode,
+ int32_t remoteRTCPPort,
+ RTPSender::TransportMode rtcpMode,
+ int32_t *localRTPPort);
+
+ status_t queueAccessUnit(
+ size_t trackIndex, const sp<ABuffer> &accessUnit);
+
+protected:
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+ virtual ~MediaSender();
+
+private:
+ enum {
+ kWhatSenderNotify,
+ };
+
+ enum Mode {
+ MODE_UNDEFINED,
+ MODE_TRANSPORT_STREAM,
+ MODE_ELEMENTARY_STREAMS,
+ };
+
+ struct TrackInfo {
+ sp<AMessage> mFormat;
+ uint32_t mFlags;
+ sp<RTPSender> mSender;
+ List<sp<ABuffer> > mAccessUnits;
+ ssize_t mPacketizerTrackIndex;
+ bool mIsAudio;
+ };
+
+ sp<ANetworkSession> mNetSession;
+ sp<AMessage> mNotify;
+
+ sp<IHDCP> mHDCP;
+
+ Mode mMode;
+ int32_t mGeneration;
+
+ Vector<TrackInfo> mTrackInfos;
+
+ sp<TSPacketizer> mTSPacketizer;
+ sp<RTPSender> mTSSender;
+ int64_t mPrevTimeUs;
+
+ size_t mInitDoneCount;
+
+ FILE *mLogFile;
+
+ void onSenderNotify(const sp<AMessage> &msg);
+
+ void notifyInitDone(status_t err);
+ void notifyError(status_t err);
+ void notifyNetworkStall(size_t numBytesQueued);
+
+ status_t packetizeAccessUnit(
+ size_t trackIndex,
+ sp<ABuffer> accessUnit,
+ sp<ABuffer> *tsPackets);
+
+ DISALLOW_EVIL_CONSTRUCTORS(MediaSender);
+};
+
+} // namespace android
+
+#endif // MEDIA_SENDER_H_
+
diff --git a/media/libstagefright/wifi-display/Parameters.cpp b/media/libstagefright/wifi-display/Parameters.cpp
index f7118b3..d2a61ea 100644
--- a/media/libstagefright/wifi-display/Parameters.cpp
+++ b/media/libstagefright/wifi-display/Parameters.cpp
@@ -65,7 +65,9 @@ status_t Parameters::parse(const char *data, size_t size) {
mDict.add(name, value);
- i += 2;
+ while (i + 1 < size && data[i] == '\r' && data[i + 1] == '\n') {
+ i += 2;
+ }
}
return OK;
diff --git a/media/libstagefright/wifi-display/TimeSeries.cpp b/media/libstagefright/wifi-display/TimeSeries.cpp
deleted file mode 100644
index d882d98..0000000
--- a/media/libstagefright/wifi-display/TimeSeries.cpp
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "TimeSeries.h"
-
-#include <math.h>
-#include <string.h>
-
-namespace android {
-
-TimeSeries::TimeSeries()
- : mCount(0),
- mSum(0.0) {
-}
-
-void TimeSeries::add(double val) {
- if (mCount < kHistorySize) {
- mValues[mCount++] = val;
- mSum += val;
- } else {
- mSum -= mValues[0];
- memmove(&mValues[0], &mValues[1], (kHistorySize - 1) * sizeof(double));
- mValues[kHistorySize - 1] = val;
- mSum += val;
- }
-}
-
-double TimeSeries::mean() const {
- if (mCount < 1) {
- return 0.0;
- }
-
- return mSum / mCount;
-}
-
-double TimeSeries::sdev() const {
- if (mCount < 1) {
- return 0.0;
- }
-
- double m = mean();
-
- double sum = 0.0;
- for (size_t i = 0; i < mCount; ++i) {
- double tmp = mValues[i] - m;
- tmp *= tmp;
-
- sum += tmp;
- }
-
- return sqrt(sum / mCount);
-}
-
-} // namespace android
diff --git a/media/libstagefright/wifi-display/VideoFormats.cpp b/media/libstagefright/wifi-display/VideoFormats.cpp
new file mode 100644
index 0000000..458b163
--- /dev/null
+++ b/media/libstagefright/wifi-display/VideoFormats.cpp
@@ -0,0 +1,419 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "VideoFormats"
+#include <utils/Log.h>
+
+#include "VideoFormats.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+VideoFormats::config_t VideoFormats::mConfigs[][32] = {
+ {
+ // CEA Resolutions
+ { 640, 480, 60, false, 0, 0},
+ { 720, 480, 60, false, 0, 0},
+ { 720, 480, 60, true, 0, 0},
+ { 720, 576, 50, false, 0, 0},
+ { 720, 576, 50, true, 0, 0},
+ { 1280, 720, 30, false, 0, 0},
+ { 1280, 720, 60, false, 0, 0},
+ { 1920, 1080, 30, false, 0, 0},
+ { 1920, 1080, 60, false, 0, 0},
+ { 1920, 1080, 60, true, 0, 0},
+ { 1280, 720, 25, false, 0, 0},
+ { 1280, 720, 50, false, 0, 0},
+ { 1920, 1080, 25, false, 0, 0},
+ { 1920, 1080, 50, false, 0, 0},
+ { 1920, 1080, 50, true, 0, 0},
+ { 1280, 720, 24, false, 0, 0},
+ { 1920, 1080, 24, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ },
+ {
+ // VESA Resolutions
+ { 800, 600, 30, false, 0, 0},
+ { 800, 600, 60, false, 0, 0},
+ { 1024, 768, 30, false, 0, 0},
+ { 1024, 768, 60, false, 0, 0},
+ { 1152, 864, 30, false, 0, 0},
+ { 1152, 864, 60, false, 0, 0},
+ { 1280, 768, 30, false, 0, 0},
+ { 1280, 768, 60, false, 0, 0},
+ { 1280, 800, 30, false, 0, 0},
+ { 1280, 800, 60, false, 0, 0},
+ { 1360, 768, 30, false, 0, 0},
+ { 1360, 768, 60, false, 0, 0},
+ { 1366, 768, 30, false, 0, 0},
+ { 1366, 768, 60, false, 0, 0},
+ { 1280, 1024, 30, false, 0, 0},
+ { 1280, 1024, 60, false, 0, 0},
+ { 1400, 1050, 30, false, 0, 0},
+ { 1400, 1050, 60, false, 0, 0},
+ { 1440, 900, 30, false, 0, 0},
+ { 1440, 900, 60, false, 0, 0},
+ { 1600, 900, 30, false, 0, 0},
+ { 1600, 900, 60, false, 0, 0},
+ { 1600, 1200, 30, false, 0, 0},
+ { 1600, 1200, 60, false, 0, 0},
+ { 1680, 1024, 30, false, 0, 0},
+ { 1680, 1024, 60, false, 0, 0},
+ { 1680, 1050, 30, false, 0, 0},
+ { 1680, 1050, 60, false, 0, 0},
+ { 1920, 1200, 30, false, 0, 0},
+ { 1920, 1200, 60, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ },
+ {
+ // HH Resolutions
+ { 800, 480, 30, false, 0, 0},
+ { 800, 480, 60, false, 0, 0},
+ { 854, 480, 30, false, 0, 0},
+ { 854, 480, 60, false, 0, 0},
+ { 864, 480, 30, false, 0, 0},
+ { 864, 480, 60, false, 0, 0},
+ { 640, 360, 30, false, 0, 0},
+ { 640, 360, 60, false, 0, 0},
+ { 960, 540, 30, false, 0, 0},
+ { 960, 540, 60, false, 0, 0},
+ { 848, 480, 30, false, 0, 0},
+ { 848, 480, 60, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ { 0, 0, 0, false, 0, 0},
+ }
+};
+
+VideoFormats::VideoFormats() {
+ for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+ mResolutionEnabled[i] = 0;
+ }
+
+ setNativeResolution(RESOLUTION_CEA, 0); // default to 640x480 p60
+}
+
+void VideoFormats::setNativeResolution(ResolutionType type, size_t index) {
+ CHECK_LT(type, kNumResolutionTypes);
+ CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+ mNativeType = type;
+ mNativeIndex = index;
+
+ setResolutionEnabled(type, index);
+}
+
+void VideoFormats::getNativeResolution(
+ ResolutionType *type, size_t *index) const {
+ *type = mNativeType;
+ *index = mNativeIndex;
+}
+
+void VideoFormats::disableAll() {
+ for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+ mResolutionEnabled[i] = 0;
+ for (size_t j = 0; j < 32; j++) {
+ mConfigs[i][j].profile = mConfigs[i][j].level = 0;
+ }
+ }
+}
+
+void VideoFormats::enableAll() {
+ for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+ mResolutionEnabled[i] = 0xffffffff;
+ for (size_t j = 0; j < 32; j++) {
+ mConfigs[i][j].profile = (1ul << PROFILE_CBP);
+ mConfigs[i][j].level = (1ul << LEVEL_31);
+ }
+ }
+}
+
+void VideoFormats::setResolutionEnabled(
+ ResolutionType type, size_t index, bool enabled) {
+ CHECK_LT(type, kNumResolutionTypes);
+ CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+ if (enabled) {
+ mResolutionEnabled[type] |= (1ul << index);
+ } else {
+ mResolutionEnabled[type] &= ~(1ul << index);
+ }
+}
+
+bool VideoFormats::isResolutionEnabled(
+ ResolutionType type, size_t index) const {
+ CHECK_LT(type, kNumResolutionTypes);
+ CHECK(GetConfiguration(type, index, NULL, NULL, NULL, NULL));
+
+ return mResolutionEnabled[type] & (1ul << index);
+}
+
+// static
+bool VideoFormats::GetConfiguration(
+ ResolutionType type,
+ size_t index,
+ size_t *width, size_t *height, size_t *framesPerSecond,
+ bool *interlaced) {
+ CHECK_LT(type, kNumResolutionTypes);
+
+ if (index >= 32) {
+ return false;
+ }
+
+ const config_t *config = &mConfigs[type][index];
+
+ if (config->width == 0) {
+ return false;
+ }
+
+ if (width) {
+ *width = config->width;
+ }
+
+ if (height) {
+ *height = config->height;
+ }
+
+ if (framesPerSecond) {
+ *framesPerSecond = config->framesPerSecond;
+ }
+
+ if (interlaced) {
+ *interlaced = config->interlaced;
+ }
+
+ return true;
+}
+
+bool VideoFormats::parseH264Codec(const char *spec) {
+ unsigned profile, level, res[3];
+
+ if (sscanf(
+ spec,
+ "%02x %02x %08X %08X %08X",
+ &profile,
+ &level,
+ &res[0],
+ &res[1],
+ &res[2]) != 5) {
+ return false;
+ }
+
+ for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+ for (size_t j = 0; j < 32; ++j) {
+ if (res[i] & (1ul << j)){
+ mResolutionEnabled[i] |= (1ul << j);
+ if (profile > mConfigs[i][j].profile) {
+ mConfigs[i][j].profile = profile;
+ if (level > mConfigs[i][j].level)
+ mConfigs[i][j].level = level;
+ }
+ }
+ }
+ }
+
+ return true;
+}
+
+bool VideoFormats::parseFormatSpec(const char *spec) {
+ CHECK_EQ(kNumResolutionTypes, 3);
+
+ unsigned native, dummy;
+ unsigned res[3];
+ size_t size = strlen(spec);
+ size_t offset = 0;
+
+ if (sscanf(spec, "%02x %02x ", &native, &dummy) != 2) {
+ return false;
+ }
+
+ offset += 6; // skip native and preferred-display-mode-supported
+ CHECK_LE(offset + 58, size);
+ while (offset < size) {
+ parseH264Codec(spec + offset);
+ offset += 60; // skip H.264-codec + ", "
+ }
+
+ mNativeIndex = native >> 3;
+ mNativeType = (ResolutionType)(native & 7);
+
+ bool success;
+ if (mNativeType >= kNumResolutionTypes) {
+ success = false;
+ } else {
+ success = GetConfiguration(
+ mNativeType, mNativeIndex, NULL, NULL, NULL, NULL);
+ }
+
+ if (!success) {
+ ALOGW("sink advertised an illegal native resolution, fortunately "
+ "this value is ignored for the time being...");
+ }
+
+ return true;
+}
+
+AString VideoFormats::getFormatSpec(bool forM4Message) const {
+ CHECK_EQ(kNumResolutionTypes, 3);
+
+ // wfd_video_formats:
+ // 1 byte "native"
+ // 1 byte "preferred-display-mode-supported" 0 or 1
+ // one or more avc codec structures
+ // 1 byte profile
+ // 1 byte level
+ // 4 byte CEA mask
+ // 4 byte VESA mask
+ // 4 byte HH mask
+ // 1 byte latency
+ // 2 byte min-slice-slice
+ // 2 byte slice-enc-params
+ // 1 byte framerate-control-support
+ // max-hres (none or 2 byte)
+ // max-vres (none or 2 byte)
+
+ return StringPrintf(
+ "%02x 00 02 02 %08x %08x %08x 00 0000 0000 00 none none",
+ forM4Message ? 0x00 : ((mNativeIndex << 3) | mNativeType),
+ mResolutionEnabled[0],
+ mResolutionEnabled[1],
+ mResolutionEnabled[2]);
+}
+
+// static
+bool VideoFormats::PickBestFormat(
+ const VideoFormats &sinkSupported,
+ const VideoFormats &sourceSupported,
+ ResolutionType *chosenType,
+ size_t *chosenIndex) {
+#if 0
+ // Support for the native format is a great idea, the spec includes
+ // these features, but nobody supports it and the tests don't validate it.
+
+ ResolutionType nativeType;
+ size_t nativeIndex;
+ sinkSupported.getNativeResolution(&nativeType, &nativeIndex);
+ if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+ if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+ ALOGI("Choosing sink's native resolution");
+ *chosenType = nativeType;
+ *chosenIndex = nativeIndex;
+ return true;
+ }
+ } else {
+ ALOGW("Sink advertised native resolution that it doesn't "
+ "actually support... ignoring");
+ }
+
+ sourceSupported.getNativeResolution(&nativeType, &nativeIndex);
+ if (sourceSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+ if (sinkSupported.isResolutionEnabled(nativeType, nativeIndex)) {
+ ALOGI("Choosing source's native resolution");
+ *chosenType = nativeType;
+ *chosenIndex = nativeIndex;
+ return true;
+ }
+ } else {
+ ALOGW("Source advertised native resolution that it doesn't "
+ "actually support... ignoring");
+ }
+#endif
+
+ bool first = true;
+ uint32_t bestScore = 0;
+ size_t bestType = 0;
+ size_t bestIndex = 0;
+ for (size_t i = 0; i < kNumResolutionTypes; ++i) {
+ for (size_t j = 0; j < 32; ++j) {
+ size_t width, height, framesPerSecond;
+ bool interlaced;
+ if (!GetConfiguration(
+ (ResolutionType)i,
+ j,
+ &width, &height, &framesPerSecond, &interlaced)) {
+ break;
+ }
+
+ if (!sinkSupported.isResolutionEnabled((ResolutionType)i, j)
+ || !sourceSupported.isResolutionEnabled(
+ (ResolutionType)i, j)) {
+ continue;
+ }
+
+ ALOGV("type %u, index %u, %u x %u %c%u supported",
+ i, j, width, height, interlaced ? 'i' : 'p', framesPerSecond);
+
+ uint32_t score = width * height * framesPerSecond;
+ if (!interlaced) {
+ score *= 2;
+ }
+
+ if (first || score > bestScore) {
+ bestScore = score;
+ bestType = i;
+ bestIndex = j;
+
+ first = false;
+ }
+ }
+ }
+
+ if (first) {
+ return false;
+ }
+
+ *chosenType = (ResolutionType)bestType;
+ *chosenIndex = bestIndex;
+
+ return true;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/VideoFormats.h b/media/libstagefright/wifi-display/VideoFormats.h
new file mode 100644
index 0000000..01de246
--- /dev/null
+++ b/media/libstagefright/wifi-display/VideoFormats.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VIDEO_FORMATS_H_
+
+#define VIDEO_FORMATS_H_
+
+#include <media/stagefright/foundation/ABase.h>
+
+#include <stdint.h>
+
+namespace android {
+
+struct AString;
+
+// This class encapsulates that video resolution capabilities of a wfd source
+// or sink as outlined in the wfd specs. Currently three sets of resolutions
+// are specified, each of which supports up to 32 resolutions.
+// In addition to its capabilities each sink/source also publishes its
+// "native" resolution, presumably one that is preferred among all others
+// because it wouldn't require any scaling and directly corresponds to the
+// display capabilities/pixels.
+struct VideoFormats {
+ VideoFormats();
+
+ struct config_t {
+ size_t width, height, framesPerSecond;
+ bool interlaced;
+ unsigned char profile, level;
+ };
+
+ enum ProfileType {
+ PROFILE_CBP = 0,
+ PROFILE_CHP,
+ kNumProfileTypes,
+ };
+
+ enum LevelType {
+ LEVEL_31 = 0,
+ LEVEL_32,
+ LEVEL_40,
+ LEVEL_41,
+ LEVEL_42,
+ kNumLevelTypes,
+ };
+
+ enum ResolutionType {
+ RESOLUTION_CEA,
+ RESOLUTION_VESA,
+ RESOLUTION_HH,
+ kNumResolutionTypes,
+ };
+
+ void setNativeResolution(ResolutionType type, size_t index);
+ void getNativeResolution(ResolutionType *type, size_t *index) const;
+
+ void disableAll();
+ void enableAll();
+
+ void setResolutionEnabled(
+ ResolutionType type, size_t index, bool enabled = true);
+
+ bool isResolutionEnabled(ResolutionType type, size_t index) const;
+
+ static bool GetConfiguration(
+ ResolutionType type, size_t index,
+ size_t *width, size_t *height, size_t *framesPerSecond,
+ bool *interlaced);
+
+ bool parseFormatSpec(const char *spec);
+ AString getFormatSpec(bool forM4Message = false) const;
+
+ static bool PickBestFormat(
+ const VideoFormats &sinkSupported,
+ const VideoFormats &sourceSupported,
+ ResolutionType *chosenType,
+ size_t *chosenIndex);
+
+private:
+ bool parseH264Codec(const char *spec);
+ ResolutionType mNativeType;
+ size_t mNativeIndex;
+
+ uint32_t mResolutionEnabled[kNumResolutionTypes];
+ static config_t mConfigs[kNumResolutionTypes][32];
+
+ DISALLOW_EVIL_CONSTRUCTORS(VideoFormats);
+};
+
+} // namespace android
+
+#endif // VIDEO_FORMATS_H_
+
diff --git a/media/libstagefright/wifi-display/TimeSeries.h b/media/libstagefright/wifi-display/rtp/RTPBase.h
index c818d51..6178f00 100644
--- a/media/libstagefright/wifi-display/TimeSeries.h
+++ b/media/libstagefright/wifi-display/rtp/RTPBase.h
@@ -1,5 +1,5 @@
/*
- * Copyright 2012, The Android Open Source Project
+ * Copyright 2013, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,33 +14,38 @@
* limitations under the License.
*/
-#ifndef TIME_SERIES_H_
+#ifndef RTP_BASE_H_
-#define TIME_SERIES_H_
-
-#include <sys/types.h>
+#define RTP_BASE_H_
namespace android {
-struct TimeSeries {
- TimeSeries();
-
- void add(double val);
+struct RTPBase {
+ enum PacketizationMode {
+ PACKETIZATION_TRANSPORT_STREAM,
+ PACKETIZATION_H264,
+ PACKETIZATION_AAC,
+ PACKETIZATION_NONE,
+ };
- double mean() const;
- double sdev() const;
+ enum TransportMode {
+ TRANSPORT_UNDEFINED,
+ TRANSPORT_NONE,
+ TRANSPORT_UDP,
+ TRANSPORT_TCP,
+ TRANSPORT_TCP_INTERLEAVED,
+ };
-private:
enum {
- kHistorySize = 20
+ // Really UDP _payload_ size
+ kMaxUDPPacketSize = 1472, // 1472 good, 1473 bad on Android@Home
};
- double mValues[kHistorySize];
- size_t mCount;
- double mSum;
+ static int32_t PickRandomRTPPort();
};
} // namespace android
-#endif // TIME_SERIES_H_
+#endif // RTP_BASE_H_
+
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.cpp b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
new file mode 100644
index 0000000..095fd97
--- /dev/null
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.cpp
@@ -0,0 +1,795 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "RTPSender"
+#include <utils/Log.h>
+
+#include "RTPSender.h"
+
+#include "ANetworkSession.h"
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/hexdump.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
+
+#include "include/avc_utils.h"
+
+namespace android {
+
+RTPSender::RTPSender(
+ const sp<ANetworkSession> &netSession,
+ const sp<AMessage> &notify)
+ : mNetSession(netSession),
+ mNotify(notify),
+ mRTPMode(TRANSPORT_UNDEFINED),
+ mRTCPMode(TRANSPORT_UNDEFINED),
+ mRTPSessionID(0),
+ mRTCPSessionID(0),
+ mRTPConnected(false),
+ mRTCPConnected(false),
+ mLastNTPTime(0),
+ mLastRTPTime(0),
+ mNumRTPSent(0),
+ mNumRTPOctetsSent(0),
+ mNumSRsSent(0),
+ mRTPSeqNo(0),
+ mHistorySize(0) {
+}
+
+RTPSender::~RTPSender() {
+ if (mRTCPSessionID != 0) {
+ mNetSession->destroySession(mRTCPSessionID);
+ mRTCPSessionID = 0;
+ }
+
+ if (mRTPSessionID != 0) {
+ mNetSession->destroySession(mRTPSessionID);
+ mRTPSessionID = 0;
+ }
+}
+
+// static
+int32_t RTPBase::PickRandomRTPPort() {
+ // Pick an even integer in range [1024, 65534)
+
+ static const size_t kRange = (65534 - 1024) / 2;
+
+ return (int32_t)(((float)(kRange + 1) * rand()) / RAND_MAX) * 2 + 1024;
+}
+
+status_t RTPSender::initAsync(
+ const char *remoteHost,
+ int32_t remoteRTPPort,
+ TransportMode rtpMode,
+ int32_t remoteRTCPPort,
+ TransportMode rtcpMode,
+ int32_t *outLocalRTPPort) {
+ if (mRTPMode != TRANSPORT_UNDEFINED
+ || rtpMode == TRANSPORT_UNDEFINED
+ || rtpMode == TRANSPORT_NONE
+ || rtcpMode == TRANSPORT_UNDEFINED) {
+ return INVALID_OPERATION;
+ }
+
+ CHECK_NE(rtpMode, TRANSPORT_TCP_INTERLEAVED);
+ CHECK_NE(rtcpMode, TRANSPORT_TCP_INTERLEAVED);
+
+ if ((rtcpMode == TRANSPORT_NONE && remoteRTCPPort >= 0)
+ || (rtcpMode != TRANSPORT_NONE && remoteRTCPPort < 0)) {
+ return INVALID_OPERATION;
+ }
+
+ sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
+
+ sp<AMessage> rtcpNotify;
+ if (remoteRTCPPort >= 0) {
+ rtcpNotify = new AMessage(kWhatRTCPNotify, id());
+ }
+
+ CHECK_EQ(mRTPSessionID, 0);
+ CHECK_EQ(mRTCPSessionID, 0);
+
+ int32_t localRTPPort;
+
+ for (;;) {
+ localRTPPort = PickRandomRTPPort();
+
+ status_t err;
+ if (rtpMode == TRANSPORT_UDP) {
+ err = mNetSession->createUDPSession(
+ localRTPPort,
+ remoteHost,
+ remoteRTPPort,
+ rtpNotify,
+ &mRTPSessionID);
+ } else {
+ CHECK_EQ(rtpMode, TRANSPORT_TCP);
+ err = mNetSession->createTCPDatagramSession(
+ localRTPPort,
+ remoteHost,
+ remoteRTPPort,
+ rtpNotify,
+ &mRTPSessionID);
+ }
+
+ if (err != OK) {
+ continue;
+ }
+
+ if (remoteRTCPPort < 0) {
+ break;
+ }
+
+ if (rtcpMode == TRANSPORT_UDP) {
+ err = mNetSession->createUDPSession(
+ localRTPPort + 1,
+ remoteHost,
+ remoteRTCPPort,
+ rtcpNotify,
+ &mRTCPSessionID);
+ } else {
+ CHECK_EQ(rtcpMode, TRANSPORT_TCP);
+ err = mNetSession->createTCPDatagramSession(
+ localRTPPort + 1,
+ remoteHost,
+ remoteRTCPPort,
+ rtcpNotify,
+ &mRTCPSessionID);
+ }
+
+ if (err == OK) {
+ break;
+ }
+
+ mNetSession->destroySession(mRTPSessionID);
+ mRTPSessionID = 0;
+ }
+
+ if (rtpMode == TRANSPORT_UDP) {
+ mRTPConnected = true;
+ }
+
+ if (rtcpMode == TRANSPORT_UDP) {
+ mRTCPConnected = true;
+ }
+
+ mRTPMode = rtpMode;
+ mRTCPMode = rtcpMode;
+ *outLocalRTPPort = localRTPPort;
+
+ if (mRTPMode == TRANSPORT_UDP
+ && (mRTCPMode == TRANSPORT_UDP || mRTCPMode == TRANSPORT_NONE)) {
+ notifyInitDone(OK);
+ }
+
+ return OK;
+}
+
+status_t RTPSender::queueBuffer(
+ const sp<ABuffer> &buffer, uint8_t packetType, PacketizationMode mode) {
+ status_t err;
+
+ switch (mode) {
+ case PACKETIZATION_NONE:
+ err = queueRawPacket(buffer, packetType);
+ break;
+
+ case PACKETIZATION_TRANSPORT_STREAM:
+ err = queueTSPackets(buffer, packetType);
+ break;
+
+ case PACKETIZATION_H264:
+ err = queueAVCBuffer(buffer, packetType);
+ break;
+
+ default:
+ TRESPASS();
+ }
+
+ return err;
+}
+
+status_t RTPSender::queueRawPacket(
+ const sp<ABuffer> &packet, uint8_t packetType) {
+ CHECK_LE(packet->size(), kMaxUDPPacketSize - 12);
+
+ int64_t timeUs;
+ CHECK(packet->meta()->findInt64("timeUs", &timeUs));
+
+ sp<ABuffer> udpPacket = new ABuffer(12 + packet->size());
+
+ udpPacket->setInt32Data(mRTPSeqNo);
+
+ uint8_t *rtp = udpPacket->data();
+ rtp[0] = 0x80;
+ rtp[1] = packetType;
+
+ rtp[2] = (mRTPSeqNo >> 8) & 0xff;
+ rtp[3] = mRTPSeqNo & 0xff;
+ ++mRTPSeqNo;
+
+ uint32_t rtpTime = (timeUs * 9) / 100ll;
+
+ rtp[4] = rtpTime >> 24;
+ rtp[5] = (rtpTime >> 16) & 0xff;
+ rtp[6] = (rtpTime >> 8) & 0xff;
+ rtp[7] = rtpTime & 0xff;
+
+ rtp[8] = kSourceID >> 24;
+ rtp[9] = (kSourceID >> 16) & 0xff;
+ rtp[10] = (kSourceID >> 8) & 0xff;
+ rtp[11] = kSourceID & 0xff;
+
+ memcpy(&rtp[12], packet->data(), packet->size());
+
+ return sendRTPPacket(
+ udpPacket,
+ true /* storeInHistory */,
+ true /* timeValid */,
+ ALooper::GetNowUs());
+}
+
+status_t RTPSender::queueTSPackets(
+ const sp<ABuffer> &tsPackets, uint8_t packetType) {
+ CHECK_EQ(0, tsPackets->size() % 188);
+
+ int64_t timeUs;
+ CHECK(tsPackets->meta()->findInt64("timeUs", &timeUs));
+
+ const size_t numTSPackets = tsPackets->size() / 188;
+
+ size_t srcOffset = 0;
+ while (srcOffset < tsPackets->size()) {
+ sp<ABuffer> udpPacket =
+ new ABuffer(12 + kMaxNumTSPacketsPerRTPPacket * 188);
+
+ udpPacket->setInt32Data(mRTPSeqNo);
+
+ uint8_t *rtp = udpPacket->data();
+ rtp[0] = 0x80;
+ rtp[1] = packetType;
+
+ rtp[2] = (mRTPSeqNo >> 8) & 0xff;
+ rtp[3] = mRTPSeqNo & 0xff;
+ ++mRTPSeqNo;
+
+ int64_t nowUs = ALooper::GetNowUs();
+ uint32_t rtpTime = (nowUs * 9) / 100ll;
+
+ rtp[4] = rtpTime >> 24;
+ rtp[5] = (rtpTime >> 16) & 0xff;
+ rtp[6] = (rtpTime >> 8) & 0xff;
+ rtp[7] = rtpTime & 0xff;
+
+ rtp[8] = kSourceID >> 24;
+ rtp[9] = (kSourceID >> 16) & 0xff;
+ rtp[10] = (kSourceID >> 8) & 0xff;
+ rtp[11] = kSourceID & 0xff;
+
+ size_t numTSPackets = (tsPackets->size() - srcOffset) / 188;
+ if (numTSPackets > kMaxNumTSPacketsPerRTPPacket) {
+ numTSPackets = kMaxNumTSPacketsPerRTPPacket;
+ }
+
+ memcpy(&rtp[12], tsPackets->data() + srcOffset, numTSPackets * 188);
+
+ udpPacket->setRange(0, 12 + numTSPackets * 188);
+
+ srcOffset += numTSPackets * 188;
+ bool isLastPacket = (srcOffset == tsPackets->size());
+
+ status_t err = sendRTPPacket(
+ udpPacket,
+ true /* storeInHistory */,
+ isLastPacket /* timeValid */,
+ timeUs);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t RTPSender::queueAVCBuffer(
+ const sp<ABuffer> &accessUnit, uint8_t packetType) {
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ uint32_t rtpTime = (timeUs * 9 / 100ll);
+
+ List<sp<ABuffer> > packets;
+
+ sp<ABuffer> out = new ABuffer(kMaxUDPPacketSize);
+ size_t outBytesUsed = 12; // Placeholder for RTP header.
+
+ const uint8_t *data = accessUnit->data();
+ size_t size = accessUnit->size();
+ const uint8_t *nalStart;
+ size_t nalSize;
+ while (getNextNALUnit(
+ &data, &size, &nalStart, &nalSize,
+ true /* startCodeFollows */) == OK) {
+ size_t bytesNeeded = nalSize + 2;
+ if (outBytesUsed == 12) {
+ ++bytesNeeded;
+ }
+
+ if (outBytesUsed + bytesNeeded > out->capacity()) {
+ bool emitSingleNALPacket = false;
+
+ if (outBytesUsed == 12
+ && outBytesUsed + nalSize <= out->capacity()) {
+ // We haven't emitted anything into the current packet yet and
+ // this NAL unit fits into a single-NAL-unit-packet while
+ // it wouldn't have fit as part of a STAP-A packet.
+
+ memcpy(out->data() + outBytesUsed, nalStart, nalSize);
+ outBytesUsed += nalSize;
+
+ emitSingleNALPacket = true;
+ }
+
+ if (outBytesUsed > 12) {
+ out->setRange(0, outBytesUsed);
+ packets.push_back(out);
+ out = new ABuffer(kMaxUDPPacketSize);
+ outBytesUsed = 12; // Placeholder for RTP header
+ }
+
+ if (emitSingleNALPacket) {
+ continue;
+ }
+ }
+
+ if (outBytesUsed + bytesNeeded <= out->capacity()) {
+ uint8_t *dst = out->data() + outBytesUsed;
+
+ if (outBytesUsed == 12) {
+ *dst++ = 24; // STAP-A header
+ }
+
+ *dst++ = (nalSize >> 8) & 0xff;
+ *dst++ = nalSize & 0xff;
+ memcpy(dst, nalStart, nalSize);
+
+ outBytesUsed += bytesNeeded;
+ continue;
+ }
+
+ // This single NAL unit does not fit into a single RTP packet,
+ // we need to emit an FU-A.
+
+ CHECK_EQ(outBytesUsed, 12u);
+
+ uint8_t nalType = nalStart[0] & 0x1f;
+ uint8_t nri = (nalStart[0] >> 5) & 3;
+
+ size_t srcOffset = 1;
+ while (srcOffset < nalSize) {
+ size_t copy = out->capacity() - outBytesUsed - 2;
+ if (copy > nalSize - srcOffset) {
+ copy = nalSize - srcOffset;
+ }
+
+ uint8_t *dst = out->data() + outBytesUsed;
+ dst[0] = (nri << 5) | 28;
+
+ dst[1] = nalType;
+
+ if (srcOffset == 1) {
+ dst[1] |= 0x80;
+ }
+
+ if (srcOffset + copy == nalSize) {
+ dst[1] |= 0x40;
+ }
+
+ memcpy(&dst[2], nalStart + srcOffset, copy);
+ srcOffset += copy;
+
+ out->setRange(0, outBytesUsed + copy + 2);
+
+ packets.push_back(out);
+ out = new ABuffer(kMaxUDPPacketSize);
+ outBytesUsed = 12; // Placeholder for RTP header
+ }
+ }
+
+ if (outBytesUsed > 12) {
+ out->setRange(0, outBytesUsed);
+ packets.push_back(out);
+ }
+
+ while (!packets.empty()) {
+ sp<ABuffer> out = *packets.begin();
+ packets.erase(packets.begin());
+
+ out->setInt32Data(mRTPSeqNo);
+
+ bool last = packets.empty();
+
+ uint8_t *dst = out->data();
+
+ dst[0] = 0x80;
+
+ dst[1] = packetType;
+ if (last) {
+ dst[1] |= 1 << 7; // M-bit
+ }
+
+ dst[2] = (mRTPSeqNo >> 8) & 0xff;
+ dst[3] = mRTPSeqNo & 0xff;
+ ++mRTPSeqNo;
+
+ dst[4] = rtpTime >> 24;
+ dst[5] = (rtpTime >> 16) & 0xff;
+ dst[6] = (rtpTime >> 8) & 0xff;
+ dst[7] = rtpTime & 0xff;
+ dst[8] = kSourceID >> 24;
+ dst[9] = (kSourceID >> 16) & 0xff;
+ dst[10] = (kSourceID >> 8) & 0xff;
+ dst[11] = kSourceID & 0xff;
+
+ status_t err = sendRTPPacket(out, true /* storeInHistory */);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t RTPSender::sendRTPPacket(
+ const sp<ABuffer> &buffer, bool storeInHistory,
+ bool timeValid, int64_t timeUs) {
+ CHECK(mRTPConnected);
+
+ status_t err = mNetSession->sendRequest(
+ mRTPSessionID, buffer->data(), buffer->size(),
+ timeValid, timeUs);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mLastNTPTime = GetNowNTP();
+ mLastRTPTime = U32_AT(buffer->data() + 4);
+
+ ++mNumRTPSent;
+ mNumRTPOctetsSent += buffer->size() - 12;
+
+ if (storeInHistory) {
+ if (mHistorySize == kMaxHistorySize) {
+ mHistory.erase(mHistory.begin());
+ } else {
+ ++mHistorySize;
+ }
+ mHistory.push_back(buffer);
+ }
+
+ return OK;
+}
+
+// static
+uint64_t RTPSender::GetNowNTP() {
+ struct timeval tv;
+ gettimeofday(&tv, NULL /* timezone */);
+
+ uint64_t nowUs = tv.tv_sec * 1000000ll + tv.tv_usec;
+
+ nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
+
+ uint64_t hi = nowUs / 1000000ll;
+ uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll;
+
+ return (hi << 32) | lo;
+}
+
+void RTPSender::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatRTPNotify:
+ case kWhatRTCPNotify:
+ onNetNotify(msg->what() == kWhatRTPNotify, msg);
+ break;
+
+ default:
+ TRESPASS();
+ }
+}
+
+void RTPSender::onNetNotify(bool isRTP, const sp<AMessage> &msg) {
+ int32_t reason;
+ CHECK(msg->findInt32("reason", &reason));
+
+ switch (reason) {
+ case ANetworkSession::kWhatError:
+ {
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ int32_t err;
+ CHECK(msg->findInt32("err", &err));
+
+ int32_t errorOccuredDuringSend;
+ CHECK(msg->findInt32("send", &errorOccuredDuringSend));
+
+ AString detail;
+ CHECK(msg->findString("detail", &detail));
+
+ ALOGE("An error occurred during %s in session %d "
+ "(%d, '%s' (%s)).",
+ errorOccuredDuringSend ? "send" : "receive",
+ sessionID,
+ err,
+ detail.c_str(),
+ strerror(-err));
+
+ mNetSession->destroySession(sessionID);
+
+ if (sessionID == mRTPSessionID) {
+ mRTPSessionID = 0;
+ } else if (sessionID == mRTCPSessionID) {
+ mRTCPSessionID = 0;
+ }
+
+ if (!mRTPConnected
+ || (mRTPMode != TRANSPORT_NONE && !mRTCPConnected)) {
+ // We haven't completed initialization, attach the error
+ // to the notification instead.
+ notifyInitDone(err);
+ break;
+ }
+
+ notifyError(err);
+ break;
+ }
+
+ case ANetworkSession::kWhatDatagram:
+ {
+ sp<ABuffer> data;
+ CHECK(msg->findBuffer("data", &data));
+
+ if (isRTP) {
+ ALOGW("Huh? Received data on RTP connection...");
+ } else {
+ onRTCPData(data);
+ }
+ break;
+ }
+
+ case ANetworkSession::kWhatConnected:
+ {
+ int32_t sessionID;
+ CHECK(msg->findInt32("sessionID", &sessionID));
+
+ if (isRTP) {
+ CHECK_EQ(mRTPMode, TRANSPORT_TCP);
+ CHECK_EQ(sessionID, mRTPSessionID);
+ mRTPConnected = true;
+ } else {
+ CHECK_EQ(mRTCPMode, TRANSPORT_TCP);
+ CHECK_EQ(sessionID, mRTCPSessionID);
+ mRTCPConnected = true;
+ }
+
+ if (mRTPConnected
+ && (mRTCPMode == TRANSPORT_NONE || mRTCPConnected)) {
+ notifyInitDone(OK);
+ }
+ break;
+ }
+
+ case ANetworkSession::kWhatNetworkStall:
+ {
+ size_t numBytesQueued;
+ CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
+
+ notifyNetworkStall(numBytesQueued);
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+status_t RTPSender::onRTCPData(const sp<ABuffer> &buffer) {
+ const uint8_t *data = buffer->data();
+ size_t size = buffer->size();
+
+ while (size > 0) {
+ if (size < 8) {
+ // Too short to be a valid RTCP header
+ return ERROR_MALFORMED;
+ }
+
+ if ((data[0] >> 6) != 2) {
+ // Unsupported version.
+ return ERROR_UNSUPPORTED;
+ }
+
+ if (data[0] & 0x20) {
+ // Padding present.
+
+ size_t paddingLength = data[size - 1];
+
+ if (paddingLength + 12 > size) {
+ // If we removed this much padding we'd end up with something
+ // that's too short to be a valid RTP header.
+ return ERROR_MALFORMED;
+ }
+
+ size -= paddingLength;
+ }
+
+ size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4;
+
+ if (size < headerLength) {
+ // Only received a partial packet?
+ return ERROR_MALFORMED;
+ }
+
+ switch (data[1]) {
+ case 200:
+ case 201: // RR
+ parseReceiverReport(data, headerLength);
+ break;
+
+ case 202: // SDES
+ case 203:
+ break;
+
+ case 204: // APP
+ parseAPP(data, headerLength);
+ break;
+
+ case 205: // TSFB (transport layer specific feedback)
+ parseTSFB(data, headerLength);
+ break;
+
+ case 206: // PSFB (payload specific feedback)
+ // hexdump(data, headerLength);
+ break;
+
+ default:
+ {
+ ALOGW("Unknown RTCP packet type %u of size %d",
+ (unsigned)data[1], headerLength);
+ break;
+ }
+ }
+
+ data += headerLength;
+ size -= headerLength;
+ }
+
+ return OK;
+}
+
+status_t RTPSender::parseReceiverReport(const uint8_t *data, size_t size) {
+ // hexdump(data, size);
+
+ float fractionLost = data[12] / 256.0f;
+
+ ALOGI("lost %.2f %% of packets during report interval.",
+ 100.0f * fractionLost);
+
+ return OK;
+}
+
+status_t RTPSender::parseTSFB(const uint8_t *data, size_t size) {
+ if ((data[0] & 0x1f) != 1) {
+ return ERROR_UNSUPPORTED; // We only support NACK for now.
+ }
+
+ uint32_t srcId = U32_AT(&data[8]);
+ if (srcId != kSourceID) {
+ return ERROR_MALFORMED;
+ }
+
+ for (size_t i = 12; i < size; i += 4) {
+ uint16_t seqNo = U16_AT(&data[i]);
+ uint16_t blp = U16_AT(&data[i + 2]);
+
+ List<sp<ABuffer> >::iterator it = mHistory.begin();
+ bool foundSeqNo = false;
+ while (it != mHistory.end()) {
+ const sp<ABuffer> &buffer = *it;
+
+ uint16_t bufferSeqNo = buffer->int32Data() & 0xffff;
+
+ bool retransmit = false;
+ if (bufferSeqNo == seqNo) {
+ retransmit = true;
+ } else if (blp != 0) {
+ for (size_t i = 0; i < 16; ++i) {
+ if ((blp & (1 << i))
+ && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) {
+ blp &= ~(1 << i);
+ retransmit = true;
+ }
+ }
+ }
+
+ if (retransmit) {
+ ALOGV("retransmitting seqNo %d", bufferSeqNo);
+
+ CHECK_EQ((status_t)OK,
+ sendRTPPacket(buffer, false /* storeInHistory */));
+
+ if (bufferSeqNo == seqNo) {
+ foundSeqNo = true;
+ }
+
+ if (foundSeqNo && blp == 0) {
+ break;
+ }
+ }
+
+ ++it;
+ }
+
+ if (!foundSeqNo || blp != 0) {
+ ALOGI("Some sequence numbers were no longer available for "
+ "retransmission (seqNo = %d, foundSeqNo = %d, blp = 0x%04x)",
+ seqNo, foundSeqNo, blp);
+
+ if (!mHistory.empty()) {
+ int32_t earliest = (*mHistory.begin())->int32Data() & 0xffff;
+ int32_t latest = (*--mHistory.end())->int32Data() & 0xffff;
+
+ ALOGI("have seq numbers from %d - %d", earliest, latest);
+ }
+ }
+ }
+
+ return OK;
+}
+
+status_t RTPSender::parseAPP(const uint8_t *data, size_t size) {
+ return OK;
+}
+
+void RTPSender::notifyInitDone(status_t err) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatInitDone);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+void RTPSender::notifyError(status_t err) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+void RTPSender::notifyNetworkStall(size_t numBytesQueued) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatNetworkStall);
+ notify->setSize("numBytesQueued", numBytesQueued);
+ notify->post();
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/wifi-display/rtp/RTPSender.h b/media/libstagefright/wifi-display/rtp/RTPSender.h
new file mode 100644
index 0000000..7dc138a
--- /dev/null
+++ b/media/libstagefright/wifi-display/rtp/RTPSender.h
@@ -0,0 +1,120 @@
+/*
+ * Copyright 2013, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef RTP_SENDER_H_
+
+#define RTP_SENDER_H_
+
+#include "RTPBase.h"
+
+#include <media/stagefright/foundation/AHandler.h>
+
+namespace android {
+
+struct ABuffer;
+struct ANetworkSession;
+
+// An object of this class facilitates sending of media data over an RTP
+// channel. The channel is established over a UDP or TCP connection depending
+// on which "TransportMode" was chosen. In addition different RTP packetization
+// schemes are supported such as "Transport Stream Packets over RTP",
+// or "AVC/H.264 encapsulation as specified in RFC 3984 (non-interleaved mode)"
+struct RTPSender : public RTPBase, public AHandler {
+ enum {
+ kWhatInitDone,
+ kWhatError,
+ kWhatNetworkStall,
+ };
+ RTPSender(
+ const sp<ANetworkSession> &netSession,
+ const sp<AMessage> &notify);
+
+ status_t initAsync(
+ const char *remoteHost,
+ int32_t remoteRTPPort,
+ TransportMode rtpMode,
+ int32_t remoteRTCPPort,
+ TransportMode rtcpMode,
+ int32_t *outLocalRTPPort);
+
+ status_t queueBuffer(
+ const sp<ABuffer> &buffer,
+ uint8_t packetType,
+ PacketizationMode mode);
+
+protected:
+ virtual ~RTPSender();
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+
+private:
+ enum {
+ kWhatRTPNotify,
+ kWhatRTCPNotify,
+ };
+
+ enum {
+ kMaxNumTSPacketsPerRTPPacket = (kMaxUDPPacketSize - 12) / 188,
+ kMaxHistorySize = 1024,
+ kSourceID = 0xdeadbeef,
+ };
+
+ sp<ANetworkSession> mNetSession;
+ sp<AMessage> mNotify;
+ TransportMode mRTPMode;
+ TransportMode mRTCPMode;
+ int32_t mRTPSessionID;
+ int32_t mRTCPSessionID;
+ bool mRTPConnected;
+ bool mRTCPConnected;
+
+ uint64_t mLastNTPTime;
+ uint32_t mLastRTPTime;
+ uint32_t mNumRTPSent;
+ uint32_t mNumRTPOctetsSent;
+ uint32_t mNumSRsSent;
+
+ uint32_t mRTPSeqNo;
+
+ List<sp<ABuffer> > mHistory;
+ size_t mHistorySize;
+
+ static uint64_t GetNowNTP();
+
+ status_t queueRawPacket(const sp<ABuffer> &tsPackets, uint8_t packetType);
+ status_t queueTSPackets(const sp<ABuffer> &tsPackets, uint8_t packetType);
+ status_t queueAVCBuffer(const sp<ABuffer> &accessUnit, uint8_t packetType);
+
+ status_t sendRTPPacket(
+ const sp<ABuffer> &packet, bool storeInHistory,
+ bool timeValid = false, int64_t timeUs = -1ll);
+
+ void onNetNotify(bool isRTP, const sp<AMessage> &msg);
+
+ status_t onRTCPData(const sp<ABuffer> &data);
+ status_t parseReceiverReport(const uint8_t *data, size_t size);
+ status_t parseTSFB(const uint8_t *data, size_t size);
+ status_t parseAPP(const uint8_t *data, size_t size);
+
+ void notifyInitDone(status_t err);
+ void notifyError(status_t err);
+ void notifyNetworkStall(size_t numBytesQueued);
+
+ DISALLOW_EVIL_CONSTRUCTORS(RTPSender);
+};
+
+} // namespace android
+
+#endif // RTP_SENDER_H_
diff --git a/media/libstagefright/wifi-display/sink/LinearRegression.cpp b/media/libstagefright/wifi-display/sink/LinearRegression.cpp
deleted file mode 100644
index 8cfce37..0000000
--- a/media/libstagefright/wifi-display/sink/LinearRegression.cpp
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "LinearRegression"
-#include <utils/Log.h>
-
-#include "LinearRegression.h"
-
-#include <math.h>
-#include <string.h>
-
-namespace android {
-
-LinearRegression::LinearRegression(size_t historySize)
- : mHistorySize(historySize),
- mCount(0),
- mHistory(new Point[mHistorySize]),
- mSumX(0.0),
- mSumY(0.0) {
-}
-
-LinearRegression::~LinearRegression() {
- delete[] mHistory;
- mHistory = NULL;
-}
-
-void LinearRegression::addPoint(float x, float y) {
- if (mCount == mHistorySize) {
- const Point &oldest = mHistory[0];
-
- mSumX -= oldest.mX;
- mSumY -= oldest.mY;
-
- memmove(&mHistory[0], &mHistory[1], (mHistorySize - 1) * sizeof(Point));
- --mCount;
- }
-
- Point *newest = &mHistory[mCount++];
- newest->mX = x;
- newest->mY = y;
-
- mSumX += x;
- mSumY += y;
-}
-
-bool LinearRegression::approxLine(float *n1, float *n2, float *b) const {
- static const float kEpsilon = 1.0E-4;
-
- if (mCount < 2) {
- return false;
- }
-
- float sumX2 = 0.0f;
- float sumY2 = 0.0f;
- float sumXY = 0.0f;
-
- float meanX = mSumX / (float)mCount;
- float meanY = mSumY / (float)mCount;
-
- for (size_t i = 0; i < mCount; ++i) {
- const Point &p = mHistory[i];
-
- float x = p.mX - meanX;
- float y = p.mY - meanY;
-
- sumX2 += x * x;
- sumY2 += y * y;
- sumXY += x * y;
- }
-
- float T = sumX2 + sumY2;
- float D = sumX2 * sumY2 - sumXY * sumXY;
- float root = sqrt(T * T * 0.25 - D);
-
- float L1 = T * 0.5 - root;
-
- if (fabs(sumXY) > kEpsilon) {
- *n1 = 1.0;
- *n2 = (2.0 * L1 - sumX2) / sumXY;
-
- float mag = sqrt((*n1) * (*n1) + (*n2) * (*n2));
-
- *n1 /= mag;
- *n2 /= mag;
- } else {
- *n1 = 0.0;
- *n2 = 1.0;
- }
-
- *b = (*n1) * meanX + (*n2) * meanY;
-
- return true;
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/wifi-display/sink/LinearRegression.h b/media/libstagefright/wifi-display/sink/LinearRegression.h
deleted file mode 100644
index ca6f5a1..0000000
--- a/media/libstagefright/wifi-display/sink/LinearRegression.h
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef LINEAR_REGRESSION_H_
-
-#define LINEAR_REGRESSION_H_
-
-#include <sys/types.h>
-#include <media/stagefright/foundation/ABase.h>
-
-namespace android {
-
-// Helper class to fit a line to a set of points minimizing the sum of
-// squared (orthogonal) distances from line to individual points.
-struct LinearRegression {
- LinearRegression(size_t historySize);
- ~LinearRegression();
-
- void addPoint(float x, float y);
-
- bool approxLine(float *n1, float *n2, float *b) const;
-
-private:
- struct Point {
- float mX, mY;
- };
-
- size_t mHistorySize;
- size_t mCount;
- Point *mHistory;
-
- float mSumX, mSumY;
-
- DISALLOW_EVIL_CONSTRUCTORS(LinearRegression);
-};
-
-} // namespace android
-
-#endif // LINEAR_REGRESSION_H_
diff --git a/media/libstagefright/wifi-display/sink/RTPSink.cpp b/media/libstagefright/wifi-display/sink/RTPSink.cpp
deleted file mode 100644
index 0918034..0000000
--- a/media/libstagefright/wifi-display/sink/RTPSink.cpp
+++ /dev/null
@@ -1,806 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "RTPSink"
-#include <utils/Log.h>
-
-#include "RTPSink.h"
-
-#include "ANetworkSession.h"
-#include "TunnelRenderer.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-struct RTPSink::Source : public RefBase {
- Source(uint16_t seq, const sp<ABuffer> &buffer,
- const sp<AMessage> queueBufferMsg);
-
- bool updateSeq(uint16_t seq, const sp<ABuffer> &buffer);
-
- void addReportBlock(uint32_t ssrc, const sp<ABuffer> &buf);
-
-protected:
- virtual ~Source();
-
-private:
- static const uint32_t kMinSequential = 2;
- static const uint32_t kMaxDropout = 3000;
- static const uint32_t kMaxMisorder = 100;
- static const uint32_t kRTPSeqMod = 1u << 16;
-
- sp<AMessage> mQueueBufferMsg;
-
- uint16_t mMaxSeq;
- uint32_t mCycles;
- uint32_t mBaseSeq;
- uint32_t mBadSeq;
- uint32_t mProbation;
- uint32_t mReceived;
- uint32_t mExpectedPrior;
- uint32_t mReceivedPrior;
-
- void initSeq(uint16_t seq);
- void queuePacket(const sp<ABuffer> &buffer);
-
- DISALLOW_EVIL_CONSTRUCTORS(Source);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPSink::Source::Source(
- uint16_t seq, const sp<ABuffer> &buffer,
- const sp<AMessage> queueBufferMsg)
- : mQueueBufferMsg(queueBufferMsg),
- mProbation(kMinSequential) {
- initSeq(seq);
- mMaxSeq = seq - 1;
-
- buffer->setInt32Data(mCycles | seq);
- queuePacket(buffer);
-}
-
-RTPSink::Source::~Source() {
-}
-
-void RTPSink::Source::initSeq(uint16_t seq) {
- mMaxSeq = seq;
- mCycles = 0;
- mBaseSeq = seq;
- mBadSeq = kRTPSeqMod + 1;
- mReceived = 0;
- mExpectedPrior = 0;
- mReceivedPrior = 0;
-}
-
-bool RTPSink::Source::updateSeq(uint16_t seq, const sp<ABuffer> &buffer) {
- uint16_t udelta = seq - mMaxSeq;
-
- if (mProbation) {
- // Startup phase
-
- if (seq == mMaxSeq + 1) {
- buffer->setInt32Data(mCycles | seq);
- queuePacket(buffer);
-
- --mProbation;
- mMaxSeq = seq;
- if (mProbation == 0) {
- initSeq(seq);
- ++mReceived;
-
- return true;
- }
- } else {
- // Packet out of sequence, restart startup phase
-
- mProbation = kMinSequential - 1;
- mMaxSeq = seq;
-
-#if 0
- mPackets.clear();
- mTotalBytesQueued = 0;
- ALOGI("XXX cleared packets");
-#endif
-
- buffer->setInt32Data(mCycles | seq);
- queuePacket(buffer);
- }
-
- return false;
- }
-
- if (udelta < kMaxDropout) {
- // In order, with permissible gap.
-
- if (seq < mMaxSeq) {
- // Sequence number wrapped - count another 64K cycle
- mCycles += kRTPSeqMod;
- }
-
- mMaxSeq = seq;
- } else if (udelta <= kRTPSeqMod - kMaxMisorder) {
- // The sequence number made a very large jump
-
- if (seq == mBadSeq) {
- // Two sequential packets -- assume that the other side
- // restarted without telling us so just re-sync
- // (i.e. pretend this was the first packet)
-
- initSeq(seq);
- } else {
- mBadSeq = (seq + 1) & (kRTPSeqMod - 1);
-
- return false;
- }
- } else {
- // Duplicate or reordered packet.
- }
-
- ++mReceived;
-
- buffer->setInt32Data(mCycles | seq);
- queuePacket(buffer);
-
- return true;
-}
-
-void RTPSink::Source::queuePacket(const sp<ABuffer> &buffer) {
- sp<AMessage> msg = mQueueBufferMsg->dup();
- msg->setBuffer("buffer", buffer);
- msg->post();
-}
-
-void RTPSink::Source::addReportBlock(
- uint32_t ssrc, const sp<ABuffer> &buf) {
- uint32_t extMaxSeq = mMaxSeq | mCycles;
- uint32_t expected = extMaxSeq - mBaseSeq + 1;
-
- int64_t lost = (int64_t)expected - (int64_t)mReceived;
- if (lost > 0x7fffff) {
- lost = 0x7fffff;
- } else if (lost < -0x800000) {
- lost = -0x800000;
- }
-
- uint32_t expectedInterval = expected - mExpectedPrior;
- mExpectedPrior = expected;
-
- uint32_t receivedInterval = mReceived - mReceivedPrior;
- mReceivedPrior = mReceived;
-
- int64_t lostInterval = expectedInterval - receivedInterval;
-
- uint8_t fractionLost;
- if (expectedInterval == 0 || lostInterval <=0) {
- fractionLost = 0;
- } else {
- fractionLost = (lostInterval << 8) / expectedInterval;
- }
-
- uint8_t *ptr = buf->data() + buf->size();
-
- ptr[0] = ssrc >> 24;
- ptr[1] = (ssrc >> 16) & 0xff;
- ptr[2] = (ssrc >> 8) & 0xff;
- ptr[3] = ssrc & 0xff;
-
- ptr[4] = fractionLost;
-
- ptr[5] = (lost >> 16) & 0xff;
- ptr[6] = (lost >> 8) & 0xff;
- ptr[7] = lost & 0xff;
-
- ptr[8] = extMaxSeq >> 24;
- ptr[9] = (extMaxSeq >> 16) & 0xff;
- ptr[10] = (extMaxSeq >> 8) & 0xff;
- ptr[11] = extMaxSeq & 0xff;
-
- // XXX TODO:
-
- ptr[12] = 0x00; // interarrival jitter
- ptr[13] = 0x00;
- ptr[14] = 0x00;
- ptr[15] = 0x00;
-
- ptr[16] = 0x00; // last SR
- ptr[17] = 0x00;
- ptr[18] = 0x00;
- ptr[19] = 0x00;
-
- ptr[20] = 0x00; // delay since last SR
- ptr[21] = 0x00;
- ptr[22] = 0x00;
- ptr[23] = 0x00;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-RTPSink::RTPSink(
- const sp<ANetworkSession> &netSession,
- const sp<ISurfaceTexture> &surfaceTex)
- : mNetSession(netSession),
- mSurfaceTex(surfaceTex),
- mRTPPort(0),
- mRTPSessionID(0),
- mRTCPSessionID(0),
- mFirstArrivalTimeUs(-1ll),
- mNumPacketsReceived(0ll),
- mRegression(1000),
- mMaxDelayMs(-1ll) {
-}
-
-RTPSink::~RTPSink() {
- if (mRTCPSessionID != 0) {
- mNetSession->destroySession(mRTCPSessionID);
- }
-
- if (mRTPSessionID != 0) {
- mNetSession->destroySession(mRTPSessionID);
- }
-}
-
-status_t RTPSink::init(bool useTCPInterleaving) {
- if (useTCPInterleaving) {
- return OK;
- }
-
- int clientRtp;
-
- sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
- sp<AMessage> rtcpNotify = new AMessage(kWhatRTCPNotify, id());
- for (clientRtp = 15550;; clientRtp += 2) {
- int32_t rtpSession;
- status_t err = mNetSession->createUDPSession(
- clientRtp, rtpNotify, &rtpSession);
-
- if (err != OK) {
- ALOGI("failed to create RTP socket on port %d", clientRtp);
- continue;
- }
-
- int32_t rtcpSession;
- err = mNetSession->createUDPSession(
- clientRtp + 1, rtcpNotify, &rtcpSession);
-
- if (err == OK) {
- mRTPPort = clientRtp;
- mRTPSessionID = rtpSession;
- mRTCPSessionID = rtcpSession;
- break;
- }
-
- ALOGI("failed to create RTCP socket on port %d", clientRtp + 1);
- mNetSession->destroySession(rtpSession);
- }
-
- if (mRTPPort == 0) {
- return UNKNOWN_ERROR;
- }
-
- return OK;
-}
-
-int32_t RTPSink::getRTPPort() const {
- return mRTPPort;
-}
-
-void RTPSink::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatRTPNotify:
- case kWhatRTCPNotify:
- {
- int32_t reason;
- CHECK(msg->findInt32("reason", &reason));
-
- switch (reason) {
- case ANetworkSession::kWhatError:
- {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- int32_t err;
- CHECK(msg->findInt32("err", &err));
-
- AString detail;
- CHECK(msg->findString("detail", &detail));
-
- ALOGE("An error occurred in session %d (%d, '%s/%s').",
- sessionID,
- err,
- detail.c_str(),
- strerror(-err));
-
- mNetSession->destroySession(sessionID);
-
- if (sessionID == mRTPSessionID) {
- mRTPSessionID = 0;
- } else if (sessionID == mRTCPSessionID) {
- mRTCPSessionID = 0;
- }
- break;
- }
-
- case ANetworkSession::kWhatDatagram:
- {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- sp<ABuffer> data;
- CHECK(msg->findBuffer("data", &data));
-
- status_t err;
- if (msg->what() == kWhatRTPNotify) {
- err = parseRTP(data);
- } else {
- err = parseRTCP(data);
- }
- break;
- }
-
- default:
- TRESPASS();
- }
- break;
- }
-
- case kWhatSendRR:
- {
- onSendRR();
- break;
- }
-
- case kWhatPacketLost:
- {
- onPacketLost(msg);
- break;
- }
-
- case kWhatInject:
- {
- int32_t isRTP;
- CHECK(msg->findInt32("isRTP", &isRTP));
-
- sp<ABuffer> buffer;
- CHECK(msg->findBuffer("buffer", &buffer));
-
- status_t err;
- if (isRTP) {
- err = parseRTP(buffer);
- } else {
- err = parseRTCP(buffer);
- }
- break;
- }
-
- default:
- TRESPASS();
- }
-}
-
-status_t RTPSink::injectPacket(bool isRTP, const sp<ABuffer> &buffer) {
- sp<AMessage> msg = new AMessage(kWhatInject, id());
- msg->setInt32("isRTP", isRTP);
- msg->setBuffer("buffer", buffer);
- msg->post();
-
- return OK;
-}
-
-status_t RTPSink::parseRTP(const sp<ABuffer> &buffer) {
- size_t size = buffer->size();
- if (size < 12) {
- // Too short to be a valid RTP header.
- return ERROR_MALFORMED;
- }
-
- const uint8_t *data = buffer->data();
-
- if ((data[0] >> 6) != 2) {
- // Unsupported version.
- return ERROR_UNSUPPORTED;
- }
-
- if (data[0] & 0x20) {
- // Padding present.
-
- size_t paddingLength = data[size - 1];
-
- if (paddingLength + 12 > size) {
- // If we removed this much padding we'd end up with something
- // that's too short to be a valid RTP header.
- return ERROR_MALFORMED;
- }
-
- size -= paddingLength;
- }
-
- int numCSRCs = data[0] & 0x0f;
-
- size_t payloadOffset = 12 + 4 * numCSRCs;
-
- if (size < payloadOffset) {
- // Not enough data to fit the basic header and all the CSRC entries.
- return ERROR_MALFORMED;
- }
-
- if (data[0] & 0x10) {
- // Header eXtension present.
-
- if (size < payloadOffset + 4) {
- // Not enough data to fit the basic header, all CSRC entries
- // and the first 4 bytes of the extension header.
-
- return ERROR_MALFORMED;
- }
-
- const uint8_t *extensionData = &data[payloadOffset];
-
- size_t extensionLength =
- 4 * (extensionData[2] << 8 | extensionData[3]);
-
- if (size < payloadOffset + 4 + extensionLength) {
- return ERROR_MALFORMED;
- }
-
- payloadOffset += 4 + extensionLength;
- }
-
- uint32_t srcId = U32_AT(&data[8]);
- uint32_t rtpTime = U32_AT(&data[4]);
- uint16_t seqNo = U16_AT(&data[2]);
-
- int64_t arrivalTimeUs;
- CHECK(buffer->meta()->findInt64("arrivalTimeUs", &arrivalTimeUs));
-
- if (mFirstArrivalTimeUs < 0ll) {
- mFirstArrivalTimeUs = arrivalTimeUs;
- }
- arrivalTimeUs -= mFirstArrivalTimeUs;
-
- int64_t arrivalTimeMedia = (arrivalTimeUs * 9ll) / 100ll;
-
- ALOGV("seqNo: %d, SSRC 0x%08x, diff %lld",
- seqNo, srcId, rtpTime - arrivalTimeMedia);
-
- mRegression.addPoint((float)rtpTime, (float)arrivalTimeMedia);
-
- ++mNumPacketsReceived;
-
- float n1, n2, b;
- if (mRegression.approxLine(&n1, &n2, &b)) {
- ALOGV("Line %lld: %.2f %.2f %.2f, slope %.2f",
- mNumPacketsReceived, n1, n2, b, -n1 / n2);
-
- float expectedArrivalTimeMedia = (b - n1 * (float)rtpTime) / n2;
- float latenessMs = (arrivalTimeMedia - expectedArrivalTimeMedia) / 90.0;
-
- if (mMaxDelayMs < 0ll || latenessMs > mMaxDelayMs) {
- mMaxDelayMs = latenessMs;
- ALOGI("packet was %.2f ms late", latenessMs);
- }
- }
-
- sp<AMessage> meta = buffer->meta();
- meta->setInt32("ssrc", srcId);
- meta->setInt32("rtp-time", rtpTime);
- meta->setInt32("PT", data[1] & 0x7f);
- meta->setInt32("M", data[1] >> 7);
-
- buffer->setRange(payloadOffset, size - payloadOffset);
-
- ssize_t index = mSources.indexOfKey(srcId);
- if (index < 0) {
- if (mRenderer == NULL) {
- sp<AMessage> notifyLost = new AMessage(kWhatPacketLost, id());
- notifyLost->setInt32("ssrc", srcId);
-
- mRenderer = new TunnelRenderer(notifyLost, mSurfaceTex);
- looper()->registerHandler(mRenderer);
- }
-
- sp<AMessage> queueBufferMsg =
- new AMessage(TunnelRenderer::kWhatQueueBuffer, mRenderer->id());
-
- sp<Source> source = new Source(seqNo, buffer, queueBufferMsg);
- mSources.add(srcId, source);
- } else {
- mSources.valueAt(index)->updateSeq(seqNo, buffer);
- }
-
- return OK;
-}
-
-status_t RTPSink::parseRTCP(const sp<ABuffer> &buffer) {
- const uint8_t *data = buffer->data();
- size_t size = buffer->size();
-
- while (size > 0) {
- if (size < 8) {
- // Too short to be a valid RTCP header
- return ERROR_MALFORMED;
- }
-
- if ((data[0] >> 6) != 2) {
- // Unsupported version.
- return ERROR_UNSUPPORTED;
- }
-
- if (data[0] & 0x20) {
- // Padding present.
-
- size_t paddingLength = data[size - 1];
-
- if (paddingLength + 12 > size) {
- // If we removed this much padding we'd end up with something
- // that's too short to be a valid RTP header.
- return ERROR_MALFORMED;
- }
-
- size -= paddingLength;
- }
-
- size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4;
-
- if (size < headerLength) {
- // Only received a partial packet?
- return ERROR_MALFORMED;
- }
-
- switch (data[1]) {
- case 200:
- {
- parseSR(data, headerLength);
- break;
- }
-
- case 201: // RR
- case 202: // SDES
- case 204: // APP
- break;
-
- case 205: // TSFB (transport layer specific feedback)
- case 206: // PSFB (payload specific feedback)
- // hexdump(data, headerLength);
- break;
-
- case 203:
- {
- parseBYE(data, headerLength);
- break;
- }
-
- default:
- {
- ALOGW("Unknown RTCP packet type %u of size %d",
- (unsigned)data[1], headerLength);
- break;
- }
- }
-
- data += headerLength;
- size -= headerLength;
- }
-
- return OK;
-}
-
-status_t RTPSink::parseBYE(const uint8_t *data, size_t size) {
- size_t SC = data[0] & 0x3f;
-
- if (SC == 0 || size < (4 + SC * 4)) {
- // Packet too short for the minimal BYE header.
- return ERROR_MALFORMED;
- }
-
- uint32_t id = U32_AT(&data[4]);
-
- return OK;
-}
-
-status_t RTPSink::parseSR(const uint8_t *data, size_t size) {
- size_t RC = data[0] & 0x1f;
-
- if (size < (7 + RC * 6) * 4) {
- // Packet too short for the minimal SR header.
- return ERROR_MALFORMED;
- }
-
- uint32_t id = U32_AT(&data[4]);
- uint64_t ntpTime = U64_AT(&data[8]);
- uint32_t rtpTime = U32_AT(&data[16]);
-
- ALOGV("SR: ssrc 0x%08x, ntpTime 0x%016llx, rtpTime 0x%08x",
- id, ntpTime, rtpTime);
-
- return OK;
-}
-
-status_t RTPSink::connect(
- const char *host, int32_t remoteRtpPort, int32_t remoteRtcpPort) {
- ALOGI("connecting RTP/RTCP sockets to %s:{%d,%d}",
- host, remoteRtpPort, remoteRtcpPort);
-
- status_t err =
- mNetSession->connectUDPSession(mRTPSessionID, host, remoteRtpPort);
-
- if (err != OK) {
- return err;
- }
-
- err = mNetSession->connectUDPSession(mRTCPSessionID, host, remoteRtcpPort);
-
- if (err != OK) {
- return err;
- }
-
-#if 0
- sp<ABuffer> buf = new ABuffer(1500);
- memset(buf->data(), 0, buf->size());
-
- mNetSession->sendRequest(
- mRTPSessionID, buf->data(), buf->size());
-
- mNetSession->sendRequest(
- mRTCPSessionID, buf->data(), buf->size());
-#endif
-
- scheduleSendRR();
-
- return OK;
-}
-
-void RTPSink::scheduleSendRR() {
- (new AMessage(kWhatSendRR, id()))->post(2000000ll);
-}
-
-void RTPSink::addSDES(const sp<ABuffer> &buffer) {
- uint8_t *data = buffer->data() + buffer->size();
- data[0] = 0x80 | 1;
- data[1] = 202; // SDES
- data[4] = 0xde; // SSRC
- data[5] = 0xad;
- data[6] = 0xbe;
- data[7] = 0xef;
-
- size_t offset = 8;
-
- data[offset++] = 1; // CNAME
-
- AString cname = "stagefright@somewhere";
- data[offset++] = cname.size();
-
- memcpy(&data[offset], cname.c_str(), cname.size());
- offset += cname.size();
-
- data[offset++] = 6; // TOOL
-
- AString tool = "stagefright/1.0";
- data[offset++] = tool.size();
-
- memcpy(&data[offset], tool.c_str(), tool.size());
- offset += tool.size();
-
- data[offset++] = 0;
-
- if ((offset % 4) > 0) {
- size_t count = 4 - (offset % 4);
- switch (count) {
- case 3:
- data[offset++] = 0;
- case 2:
- data[offset++] = 0;
- case 1:
- data[offset++] = 0;
- }
- }
-
- size_t numWords = (offset / 4) - 1;
- data[2] = numWords >> 8;
- data[3] = numWords & 0xff;
-
- buffer->setRange(buffer->offset(), buffer->size() + offset);
-}
-
-void RTPSink::onSendRR() {
- sp<ABuffer> buf = new ABuffer(1500);
- buf->setRange(0, 0);
-
- uint8_t *ptr = buf->data();
- ptr[0] = 0x80 | 0;
- ptr[1] = 201; // RR
- ptr[2] = 0;
- ptr[3] = 1;
- ptr[4] = 0xde; // SSRC
- ptr[5] = 0xad;
- ptr[6] = 0xbe;
- ptr[7] = 0xef;
-
- buf->setRange(0, 8);
-
- size_t numReportBlocks = 0;
- for (size_t i = 0; i < mSources.size(); ++i) {
- uint32_t ssrc = mSources.keyAt(i);
- sp<Source> source = mSources.valueAt(i);
-
- if (numReportBlocks > 31 || buf->size() + 24 > buf->capacity()) {
- // Cannot fit another report block.
- break;
- }
-
- source->addReportBlock(ssrc, buf);
- ++numReportBlocks;
- }
-
- ptr[0] |= numReportBlocks; // 5 bit
-
- size_t sizeInWordsMinus1 = 1 + 6 * numReportBlocks;
- ptr[2] = sizeInWordsMinus1 >> 8;
- ptr[3] = sizeInWordsMinus1 & 0xff;
-
- buf->setRange(0, (sizeInWordsMinus1 + 1) * 4);
-
- addSDES(buf);
-
- mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-
- scheduleSendRR();
-}
-
-void RTPSink::onPacketLost(const sp<AMessage> &msg) {
- uint32_t srcId;
- CHECK(msg->findInt32("ssrc", (int32_t *)&srcId));
-
- int32_t seqNo;
- CHECK(msg->findInt32("seqNo", &seqNo));
-
- int32_t blp = 0;
-
- sp<ABuffer> buf = new ABuffer(1500);
- buf->setRange(0, 0);
-
- uint8_t *ptr = buf->data();
- ptr[0] = 0x80 | 1; // generic NACK
- ptr[1] = 205; // RTPFB
- ptr[2] = 0;
- ptr[3] = 3;
- ptr[4] = 0xde; // sender SSRC
- ptr[5] = 0xad;
- ptr[6] = 0xbe;
- ptr[7] = 0xef;
- ptr[8] = (srcId >> 24) & 0xff;
- ptr[9] = (srcId >> 16) & 0xff;
- ptr[10] = (srcId >> 8) & 0xff;
- ptr[11] = (srcId & 0xff);
- ptr[12] = (seqNo >> 8) & 0xff;
- ptr[13] = (seqNo & 0xff);
- ptr[14] = (blp >> 8) & 0xff;
- ptr[15] = (blp & 0xff);
-
- buf->setRange(0, 16);
-
- mNetSession->sendRequest(mRTCPSessionID, buf->data(), buf->size());
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/wifi-display/sink/RTPSink.h b/media/libstagefright/wifi-display/sink/RTPSink.h
deleted file mode 100644
index a1d127d..0000000
--- a/media/libstagefright/wifi-display/sink/RTPSink.h
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef RTP_SINK_H_
-
-#define RTP_SINK_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-#include "LinearRegression.h"
-
-#include <gui/Surface.h>
-
-namespace android {
-
-struct ABuffer;
-struct ANetworkSession;
-struct TunnelRenderer;
-
-// Creates a pair of sockets for RTP/RTCP traffic, instantiates a renderer
-// for incoming transport stream data and occasionally sends statistics over
-// the RTCP channel.
-struct RTPSink : public AHandler {
- RTPSink(const sp<ANetworkSession> &netSession,
- const sp<ISurfaceTexture> &surfaceTex);
-
- // If TCP interleaving is used, no UDP sockets are created, instead
- // incoming RTP/RTCP packets (arriving on the RTSP control connection)
- // are manually injected by WifiDisplaySink.
- status_t init(bool useTCPInterleaving);
-
- status_t connect(
- const char *host, int32_t remoteRtpPort, int32_t remoteRtcpPort);
-
- int32_t getRTPPort() const;
-
- status_t injectPacket(bool isRTP, const sp<ABuffer> &buffer);
-
-protected:
- virtual void onMessageReceived(const sp<AMessage> &msg);
- virtual ~RTPSink();
-
-private:
- enum {
- kWhatRTPNotify,
- kWhatRTCPNotify,
- kWhatSendRR,
- kWhatPacketLost,
- kWhatInject,
- };
-
- struct Source;
- struct StreamSource;
-
- sp<ANetworkSession> mNetSession;
- sp<ISurfaceTexture> mSurfaceTex;
- KeyedVector<uint32_t, sp<Source> > mSources;
-
- int32_t mRTPPort;
- int32_t mRTPSessionID;
- int32_t mRTCPSessionID;
-
- int64_t mFirstArrivalTimeUs;
- int64_t mNumPacketsReceived;
- LinearRegression mRegression;
- int64_t mMaxDelayMs;
-
- sp<TunnelRenderer> mRenderer;
-
- status_t parseRTP(const sp<ABuffer> &buffer);
- status_t parseRTCP(const sp<ABuffer> &buffer);
- status_t parseBYE(const uint8_t *data, size_t size);
- status_t parseSR(const uint8_t *data, size_t size);
-
- void addSDES(const sp<ABuffer> &buffer);
- void onSendRR();
- void onPacketLost(const sp<AMessage> &msg);
- void scheduleSendRR();
-
- DISALLOW_EVIL_CONSTRUCTORS(RTPSink);
-};
-
-} // namespace android
-
-#endif // RTP_SINK_H_
diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp b/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp
deleted file mode 100644
index bc35aef..0000000
--- a/media/libstagefright/wifi-display/sink/TunnelRenderer.cpp
+++ /dev/null
@@ -1,396 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "TunnelRenderer"
-#include <utils/Log.h>
-
-#include "TunnelRenderer.h"
-
-#include "ATSParser.h"
-
-#include <binder/IMemory.h>
-#include <binder/IServiceManager.h>
-#include <gui/SurfaceComposerClient.h>
-#include <media/IMediaPlayerService.h>
-#include <media/IStreamSource.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <ui/DisplayInfo.h>
-
-namespace android {
-
-struct TunnelRenderer::PlayerClient : public BnMediaPlayerClient {
- PlayerClient() {}
-
- virtual void notify(int msg, int ext1, int ext2, const Parcel *obj) {
- ALOGI("notify %d, %d, %d", msg, ext1, ext2);
- }
-
-protected:
- virtual ~PlayerClient() {}
-
-private:
- DISALLOW_EVIL_CONSTRUCTORS(PlayerClient);
-};
-
-struct TunnelRenderer::StreamSource : public BnStreamSource {
- StreamSource(TunnelRenderer *owner);
-
- virtual void setListener(const sp<IStreamListener> &listener);
- virtual void setBuffers(const Vector<sp<IMemory> > &buffers);
-
- virtual void onBufferAvailable(size_t index);
-
- virtual uint32_t flags() const;
-
- void doSomeWork();
-
-protected:
- virtual ~StreamSource();
-
-private:
- mutable Mutex mLock;
-
- TunnelRenderer *mOwner;
-
- sp<IStreamListener> mListener;
-
- Vector<sp<IMemory> > mBuffers;
- List<size_t> mIndicesAvailable;
-
- size_t mNumDeqeued;
-
- DISALLOW_EVIL_CONSTRUCTORS(StreamSource);
-};
-
-////////////////////////////////////////////////////////////////////////////////
-
-TunnelRenderer::StreamSource::StreamSource(TunnelRenderer *owner)
- : mOwner(owner),
- mNumDeqeued(0) {
-}
-
-TunnelRenderer::StreamSource::~StreamSource() {
-}
-
-void TunnelRenderer::StreamSource::setListener(
- const sp<IStreamListener> &listener) {
- mListener = listener;
-}
-
-void TunnelRenderer::StreamSource::setBuffers(
- const Vector<sp<IMemory> > &buffers) {
- mBuffers = buffers;
-}
-
-void TunnelRenderer::StreamSource::onBufferAvailable(size_t index) {
- CHECK_LT(index, mBuffers.size());
-
- {
- Mutex::Autolock autoLock(mLock);
- mIndicesAvailable.push_back(index);
- }
-
- doSomeWork();
-}
-
-uint32_t TunnelRenderer::StreamSource::flags() const {
- return kFlagAlignedVideoData;
-}
-
-void TunnelRenderer::StreamSource::doSomeWork() {
- Mutex::Autolock autoLock(mLock);
-
- while (!mIndicesAvailable.empty()) {
- sp<ABuffer> srcBuffer = mOwner->dequeueBuffer();
- if (srcBuffer == NULL) {
- break;
- }
-
- ++mNumDeqeued;
-
- if (mNumDeqeued == 1) {
- ALOGI("fixing real time now.");
-
- sp<AMessage> extra = new AMessage;
-
- extra->setInt32(
- IStreamListener::kKeyDiscontinuityMask,
- ATSParser::DISCONTINUITY_ABSOLUTE_TIME);
-
- extra->setInt64("timeUs", ALooper::GetNowUs());
-
- mListener->issueCommand(
- IStreamListener::DISCONTINUITY,
- false /* synchronous */,
- extra);
- }
-
- ALOGV("dequeue TS packet of size %d", srcBuffer->size());
-
- size_t index = *mIndicesAvailable.begin();
- mIndicesAvailable.erase(mIndicesAvailable.begin());
-
- sp<IMemory> mem = mBuffers.itemAt(index);
- CHECK_LE(srcBuffer->size(), mem->size());
- CHECK_EQ((srcBuffer->size() % 188), 0u);
-
- memcpy(mem->pointer(), srcBuffer->data(), srcBuffer->size());
- mListener->queueBuffer(index, srcBuffer->size());
- }
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-TunnelRenderer::TunnelRenderer(
- const sp<AMessage> &notifyLost,
- const sp<ISurfaceTexture> &surfaceTex)
- : mNotifyLost(notifyLost),
- mSurfaceTex(surfaceTex),
- mTotalBytesQueued(0ll),
- mLastDequeuedExtSeqNo(-1),
- mFirstFailedAttemptUs(-1ll),
- mRequestedRetransmission(false) {
-}
-
-TunnelRenderer::~TunnelRenderer() {
- destroyPlayer();
-}
-
-void TunnelRenderer::queueBuffer(const sp<ABuffer> &buffer) {
- Mutex::Autolock autoLock(mLock);
-
- mTotalBytesQueued += buffer->size();
-
- if (mPackets.empty()) {
- mPackets.push_back(buffer);
- return;
- }
-
- int32_t newExtendedSeqNo = buffer->int32Data();
-
- List<sp<ABuffer> >::iterator firstIt = mPackets.begin();
- List<sp<ABuffer> >::iterator it = --mPackets.end();
- for (;;) {
- int32_t extendedSeqNo = (*it)->int32Data();
-
- if (extendedSeqNo == newExtendedSeqNo) {
- // Duplicate packet.
- return;
- }
-
- if (extendedSeqNo < newExtendedSeqNo) {
- // Insert new packet after the one at "it".
- mPackets.insert(++it, buffer);
- return;
- }
-
- if (it == firstIt) {
- // Insert new packet before the first existing one.
- mPackets.insert(it, buffer);
- return;
- }
-
- --it;
- }
-}
-
-sp<ABuffer> TunnelRenderer::dequeueBuffer() {
- Mutex::Autolock autoLock(mLock);
-
- sp<ABuffer> buffer;
- int32_t extSeqNo;
- while (!mPackets.empty()) {
- buffer = *mPackets.begin();
- extSeqNo = buffer->int32Data();
-
- if (mLastDequeuedExtSeqNo < 0 || extSeqNo > mLastDequeuedExtSeqNo) {
- break;
- }
-
- // This is a retransmission of a packet we've already returned.
-
- mTotalBytesQueued -= buffer->size();
- buffer.clear();
- extSeqNo = -1;
-
- mPackets.erase(mPackets.begin());
- }
-
- if (mPackets.empty()) {
- if (mFirstFailedAttemptUs < 0ll) {
- mFirstFailedAttemptUs = ALooper::GetNowUs();
- mRequestedRetransmission = false;
- } else {
- ALOGV("no packets available for %.2f secs",
- (ALooper::GetNowUs() - mFirstFailedAttemptUs) / 1E6);
- }
-
- return NULL;
- }
-
- if (mLastDequeuedExtSeqNo < 0 || extSeqNo == mLastDequeuedExtSeqNo + 1) {
- if (mRequestedRetransmission) {
- ALOGI("Recovered after requesting retransmission of %d",
- extSeqNo);
- }
-
- mLastDequeuedExtSeqNo = extSeqNo;
- mFirstFailedAttemptUs = -1ll;
- mRequestedRetransmission = false;
-
- mPackets.erase(mPackets.begin());
-
- mTotalBytesQueued -= buffer->size();
-
- return buffer;
- }
-
- if (mFirstFailedAttemptUs < 0ll) {
- mFirstFailedAttemptUs = ALooper::GetNowUs();
-
- ALOGI("failed to get the correct packet the first time.");
- return NULL;
- }
-
- if (mFirstFailedAttemptUs + 50000ll > ALooper::GetNowUs()) {
- // We're willing to wait a little while to get the right packet.
-
- if (!mRequestedRetransmission) {
- ALOGI("requesting retransmission of seqNo %d",
- (mLastDequeuedExtSeqNo + 1) & 0xffff);
-
- sp<AMessage> notify = mNotifyLost->dup();
- notify->setInt32("seqNo", (mLastDequeuedExtSeqNo + 1) & 0xffff);
- notify->post();
-
- mRequestedRetransmission = true;
- } else {
- ALOGI("still waiting for the correct packet to arrive.");
- }
-
- return NULL;
- }
-
- ALOGI("dropping packet. extSeqNo %d didn't arrive in time",
- mLastDequeuedExtSeqNo + 1);
-
- // Permanent failure, we never received the packet.
- mLastDequeuedExtSeqNo = extSeqNo;
- mFirstFailedAttemptUs = -1ll;
- mRequestedRetransmission = false;
-
- mTotalBytesQueued -= buffer->size();
-
- mPackets.erase(mPackets.begin());
-
- return buffer;
-}
-
-void TunnelRenderer::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatQueueBuffer:
- {
- sp<ABuffer> buffer;
- CHECK(msg->findBuffer("buffer", &buffer));
-
- queueBuffer(buffer);
-
- if (mStreamSource == NULL) {
- if (mTotalBytesQueued > 0ll) {
- initPlayer();
- } else {
- ALOGI("Have %lld bytes queued...", mTotalBytesQueued);
- }
- } else {
- mStreamSource->doSomeWork();
- }
- break;
- }
-
- default:
- TRESPASS();
- }
-}
-
-void TunnelRenderer::initPlayer() {
- if (mSurfaceTex == NULL) {
- mComposerClient = new SurfaceComposerClient;
- CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
-
- DisplayInfo info;
- SurfaceComposerClient::getDisplayInfo(0, &info);
- ssize_t displayWidth = info.w;
- ssize_t displayHeight = info.h;
-
- mSurfaceControl =
- mComposerClient->createSurface(
- String8("A Surface"),
- displayWidth,
- displayHeight,
- PIXEL_FORMAT_RGB_565,
- 0);
-
- CHECK(mSurfaceControl != NULL);
- CHECK(mSurfaceControl->isValid());
-
- SurfaceComposerClient::openGlobalTransaction();
- CHECK_EQ(mSurfaceControl->setLayer(INT_MAX), (status_t)OK);
- CHECK_EQ(mSurfaceControl->show(), (status_t)OK);
- SurfaceComposerClient::closeGlobalTransaction();
-
- mSurface = mSurfaceControl->getSurface();
- CHECK(mSurface != NULL);
- }
-
- sp<IServiceManager> sm = defaultServiceManager();
- sp<IBinder> binder = sm->getService(String16("media.player"));
- sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
- CHECK(service.get() != NULL);
-
- mStreamSource = new StreamSource(this);
-
- mPlayerClient = new PlayerClient;
-
- mPlayer = service->create(getpid(), mPlayerClient, 0);
- CHECK(mPlayer != NULL);
- CHECK_EQ(mPlayer->setDataSource(mStreamSource), (status_t)OK);
-
- mPlayer->setVideoSurfaceTexture(
- mSurfaceTex != NULL ? mSurfaceTex : mSurface->getSurfaceTexture());
-
- mPlayer->start();
-}
-
-void TunnelRenderer::destroyPlayer() {
- mStreamSource.clear();
-
- mPlayer->stop();
- mPlayer.clear();
-
- if (mSurfaceTex == NULL) {
- mSurface.clear();
- mSurfaceControl.clear();
-
- mComposerClient->dispose();
- mComposerClient.clear();
- }
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/wifi-display/sink/TunnelRenderer.h b/media/libstagefright/wifi-display/sink/TunnelRenderer.h
deleted file mode 100644
index c9597e0..0000000
--- a/media/libstagefright/wifi-display/sink/TunnelRenderer.h
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TUNNEL_RENDERER_H_
-
-#define TUNNEL_RENDERER_H_
-
-#include <gui/Surface.h>
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ABuffer;
-struct SurfaceComposerClient;
-struct SurfaceControl;
-struct Surface;
-struct IMediaPlayer;
-struct IStreamListener;
-
-// This class reassembles incoming RTP packets into the correct order
-// and sends the resulting transport stream to a mediaplayer instance
-// for playback.
-struct TunnelRenderer : public AHandler {
- TunnelRenderer(
- const sp<AMessage> &notifyLost,
- const sp<ISurfaceTexture> &surfaceTex);
-
- sp<ABuffer> dequeueBuffer();
-
- enum {
- kWhatQueueBuffer,
- };
-
-protected:
- virtual void onMessageReceived(const sp<AMessage> &msg);
- virtual ~TunnelRenderer();
-
-private:
- struct PlayerClient;
- struct StreamSource;
-
- mutable Mutex mLock;
-
- sp<AMessage> mNotifyLost;
- sp<ISurfaceTexture> mSurfaceTex;
-
- List<sp<ABuffer> > mPackets;
- int64_t mTotalBytesQueued;
-
- sp<SurfaceComposerClient> mComposerClient;
- sp<SurfaceControl> mSurfaceControl;
- sp<Surface> mSurface;
- sp<PlayerClient> mPlayerClient;
- sp<IMediaPlayer> mPlayer;
- sp<StreamSource> mStreamSource;
-
- int32_t mLastDequeuedExtSeqNo;
- int64_t mFirstFailedAttemptUs;
- bool mRequestedRetransmission;
-
- void initPlayer();
- void destroyPlayer();
-
- void queueBuffer(const sp<ABuffer> &buffer);
-
- DISALLOW_EVIL_CONSTRUCTORS(TunnelRenderer);
-};
-
-} // namespace android
-
-#endif // TUNNEL_RENDERER_H_
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp b/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
deleted file mode 100644
index fcd20d4..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.cpp
+++ /dev/null
@@ -1,644 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "WifiDisplaySink"
-#include <utils/Log.h>
-
-#include "WifiDisplaySink.h"
-#include "ParsedMessage.h"
-#include "RTPSink.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaErrors.h>
-
-namespace android {
-
-WifiDisplaySink::WifiDisplaySink(
- const sp<ANetworkSession> &netSession,
- const sp<ISurfaceTexture> &surfaceTex)
- : mState(UNDEFINED),
- mNetSession(netSession),
- mSurfaceTex(surfaceTex),
- mSessionID(0),
- mNextCSeq(1) {
-}
-
-WifiDisplaySink::~WifiDisplaySink() {
-}
-
-void WifiDisplaySink::start(const char *sourceHost, int32_t sourcePort) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setString("sourceHost", sourceHost);
- msg->setInt32("sourcePort", sourcePort);
- msg->post();
-}
-
-void WifiDisplaySink::start(const char *uri) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setString("setupURI", uri);
- msg->post();
-}
-
-// static
-bool WifiDisplaySink::ParseURL(
- const char *url, AString *host, int32_t *port, AString *path,
- AString *user, AString *pass) {
- host->clear();
- *port = 0;
- path->clear();
- user->clear();
- pass->clear();
-
- if (strncasecmp("rtsp://", url, 7)) {
- return false;
- }
-
- const char *slashPos = strchr(&url[7], '/');
-
- if (slashPos == NULL) {
- host->setTo(&url[7]);
- path->setTo("/");
- } else {
- host->setTo(&url[7], slashPos - &url[7]);
- path->setTo(slashPos);
- }
-
- ssize_t atPos = host->find("@");
-
- if (atPos >= 0) {
- // Split of user:pass@ from hostname.
-
- AString userPass(*host, 0, atPos);
- host->erase(0, atPos + 1);
-
- ssize_t colonPos = userPass.find(":");
-
- if (colonPos < 0) {
- *user = userPass;
- } else {
- user->setTo(userPass, 0, colonPos);
- pass->setTo(userPass, colonPos + 1, userPass.size() - colonPos - 1);
- }
- }
-
- const char *colonPos = strchr(host->c_str(), ':');
-
- if (colonPos != NULL) {
- char *end;
- unsigned long x = strtoul(colonPos + 1, &end, 10);
-
- if (end == colonPos + 1 || *end != '\0' || x >= 65536) {
- return false;
- }
-
- *port = x;
-
- size_t colonOffset = colonPos - host->c_str();
- size_t trailing = host->size() - colonOffset;
- host->erase(colonOffset, trailing);
- } else {
- *port = 554;
- }
-
- return true;
-}
-
-void WifiDisplaySink::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatStart:
- {
- int32_t sourcePort;
-
- if (msg->findString("setupURI", &mSetupURI)) {
- AString path, user, pass;
- CHECK(ParseURL(
- mSetupURI.c_str(),
- &mRTSPHost, &sourcePort, &path, &user, &pass)
- && user.empty() && pass.empty());
- } else {
- CHECK(msg->findString("sourceHost", &mRTSPHost));
- CHECK(msg->findInt32("sourcePort", &sourcePort));
- }
-
- sp<AMessage> notify = new AMessage(kWhatRTSPNotify, id());
-
- status_t err = mNetSession->createRTSPClient(
- mRTSPHost.c_str(), sourcePort, notify, &mSessionID);
- CHECK_EQ(err, (status_t)OK);
-
- mState = CONNECTING;
- break;
- }
-
- case kWhatRTSPNotify:
- {
- int32_t reason;
- CHECK(msg->findInt32("reason", &reason));
-
- switch (reason) {
- case ANetworkSession::kWhatError:
- {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- int32_t err;
- CHECK(msg->findInt32("err", &err));
-
- AString detail;
- CHECK(msg->findString("detail", &detail));
-
- ALOGE("An error occurred in session %d (%d, '%s/%s').",
- sessionID,
- err,
- detail.c_str(),
- strerror(-err));
-
- if (sessionID == mSessionID) {
- ALOGI("Lost control connection.");
-
- // The control connection is dead now.
- mNetSession->destroySession(mSessionID);
- mSessionID = 0;
-
- looper()->stop();
- }
- break;
- }
-
- case ANetworkSession::kWhatConnected:
- {
- ALOGI("We're now connected.");
- mState = CONNECTED;
-
- if (!mSetupURI.empty()) {
- status_t err =
- sendDescribe(mSessionID, mSetupURI.c_str());
-
- CHECK_EQ(err, (status_t)OK);
- }
- break;
- }
-
- case ANetworkSession::kWhatData:
- {
- onReceiveClientData(msg);
- break;
- }
-
- case ANetworkSession::kWhatBinaryData:
- {
- CHECK(sUseTCPInterleaving);
-
- int32_t channel;
- CHECK(msg->findInt32("channel", &channel));
-
- sp<ABuffer> data;
- CHECK(msg->findBuffer("data", &data));
-
- mRTPSink->injectPacket(channel == 0 /* isRTP */, data);
- break;
- }
-
- default:
- TRESPASS();
- }
- break;
- }
-
- case kWhatStop:
- {
- looper()->stop();
- break;
- }
-
- default:
- TRESPASS();
- }
-}
-
-void WifiDisplaySink::registerResponseHandler(
- int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func) {
- ResponseID id;
- id.mSessionID = sessionID;
- id.mCSeq = cseq;
- mResponseHandlers.add(id, func);
-}
-
-status_t WifiDisplaySink::sendM2(int32_t sessionID) {
- AString request = "OPTIONS * RTSP/1.0\r\n";
- AppendCommonResponse(&request, mNextCSeq);
-
- request.append(
- "Require: org.wfa.wfd1.0\r\n"
- "\r\n");
-
- status_t err =
- mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
- if (err != OK) {
- return err;
- }
-
- registerResponseHandler(
- sessionID, mNextCSeq, &WifiDisplaySink::onReceiveM2Response);
-
- ++mNextCSeq;
-
- return OK;
-}
-
-status_t WifiDisplaySink::onReceiveM2Response(
- int32_t sessionID, const sp<ParsedMessage> &msg) {
- int32_t statusCode;
- if (!msg->getStatusCode(&statusCode)) {
- return ERROR_MALFORMED;
- }
-
- if (statusCode != 200) {
- return ERROR_UNSUPPORTED;
- }
-
- return OK;
-}
-
-status_t WifiDisplaySink::onReceiveDescribeResponse(
- int32_t sessionID, const sp<ParsedMessage> &msg) {
- int32_t statusCode;
- if (!msg->getStatusCode(&statusCode)) {
- return ERROR_MALFORMED;
- }
-
- if (statusCode != 200) {
- return ERROR_UNSUPPORTED;
- }
-
- return sendSetup(sessionID, mSetupURI.c_str());
-}
-
-status_t WifiDisplaySink::onReceiveSetupResponse(
- int32_t sessionID, const sp<ParsedMessage> &msg) {
- int32_t statusCode;
- if (!msg->getStatusCode(&statusCode)) {
- return ERROR_MALFORMED;
- }
-
- if (statusCode != 200) {
- return ERROR_UNSUPPORTED;
- }
-
- if (!msg->findString("session", &mPlaybackSessionID)) {
- return ERROR_MALFORMED;
- }
-
- if (!ParsedMessage::GetInt32Attribute(
- mPlaybackSessionID.c_str(),
- "timeout",
- &mPlaybackSessionTimeoutSecs)) {
- mPlaybackSessionTimeoutSecs = -1;
- }
-
- ssize_t colonPos = mPlaybackSessionID.find(";");
- if (colonPos >= 0) {
- // Strip any options from the returned session id.
- mPlaybackSessionID.erase(
- colonPos, mPlaybackSessionID.size() - colonPos);
- }
-
- status_t err = configureTransport(msg);
-
- if (err != OK) {
- return err;
- }
-
- mState = PAUSED;
-
- return sendPlay(
- sessionID,
- !mSetupURI.empty()
- ? mSetupURI.c_str() : "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-}
-
-status_t WifiDisplaySink::configureTransport(const sp<ParsedMessage> &msg) {
- if (sUseTCPInterleaving) {
- return OK;
- }
-
- AString transport;
- if (!msg->findString("transport", &transport)) {
- ALOGE("Missing 'transport' field in SETUP response.");
- return ERROR_MALFORMED;
- }
-
- AString sourceHost;
- if (!ParsedMessage::GetAttribute(
- transport.c_str(), "source", &sourceHost)) {
- sourceHost = mRTSPHost;
- }
-
- AString serverPortStr;
- if (!ParsedMessage::GetAttribute(
- transport.c_str(), "server_port", &serverPortStr)) {
- ALOGE("Missing 'server_port' in Transport field.");
- return ERROR_MALFORMED;
- }
-
- int rtpPort, rtcpPort;
- if (sscanf(serverPortStr.c_str(), "%d-%d", &rtpPort, &rtcpPort) != 2
- || rtpPort <= 0 || rtpPort > 65535
- || rtcpPort <=0 || rtcpPort > 65535
- || rtcpPort != rtpPort + 1) {
- ALOGE("Invalid server_port description '%s'.",
- serverPortStr.c_str());
-
- return ERROR_MALFORMED;
- }
-
- if (rtpPort & 1) {
- ALOGW("Server picked an odd numbered RTP port.");
- }
-
- return mRTPSink->connect(sourceHost.c_str(), rtpPort, rtcpPort);
-}
-
-status_t WifiDisplaySink::onReceivePlayResponse(
- int32_t sessionID, const sp<ParsedMessage> &msg) {
- int32_t statusCode;
- if (!msg->getStatusCode(&statusCode)) {
- return ERROR_MALFORMED;
- }
-
- if (statusCode != 200) {
- return ERROR_UNSUPPORTED;
- }
-
- mState = PLAYING;
-
- return OK;
-}
-
-void WifiDisplaySink::onReceiveClientData(const sp<AMessage> &msg) {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- sp<RefBase> obj;
- CHECK(msg->findObject("data", &obj));
-
- sp<ParsedMessage> data =
- static_cast<ParsedMessage *>(obj.get());
-
- ALOGV("session %d received '%s'",
- sessionID, data->debugString().c_str());
-
- AString method;
- AString uri;
- data->getRequestField(0, &method);
-
- int32_t cseq;
- if (!data->findInt32("cseq", &cseq)) {
- sendErrorResponse(sessionID, "400 Bad Request", -1 /* cseq */);
- return;
- }
-
- if (method.startsWith("RTSP/")) {
- // This is a response.
-
- ResponseID id;
- id.mSessionID = sessionID;
- id.mCSeq = cseq;
-
- ssize_t index = mResponseHandlers.indexOfKey(id);
-
- if (index < 0) {
- ALOGW("Received unsolicited server response, cseq %d", cseq);
- return;
- }
-
- HandleRTSPResponseFunc func = mResponseHandlers.valueAt(index);
- mResponseHandlers.removeItemsAt(index);
-
- status_t err = (this->*func)(sessionID, data);
- CHECK_EQ(err, (status_t)OK);
- } else {
- AString version;
- data->getRequestField(2, &version);
- if (!(version == AString("RTSP/1.0"))) {
- sendErrorResponse(sessionID, "505 RTSP Version not supported", cseq);
- return;
- }
-
- if (method == "OPTIONS") {
- onOptionsRequest(sessionID, cseq, data);
- } else if (method == "GET_PARAMETER") {
- onGetParameterRequest(sessionID, cseq, data);
- } else if (method == "SET_PARAMETER") {
- onSetParameterRequest(sessionID, cseq, data);
- } else {
- sendErrorResponse(sessionID, "405 Method Not Allowed", cseq);
- }
- }
-}
-
-void WifiDisplaySink::onOptionsRequest(
- int32_t sessionID,
- int32_t cseq,
- const sp<ParsedMessage> &data) {
- AString response = "RTSP/1.0 200 OK\r\n";
- AppendCommonResponse(&response, cseq);
- response.append("Public: org.wfa.wfd1.0, GET_PARAMETER, SET_PARAMETER\r\n");
- response.append("\r\n");
-
- status_t err = mNetSession->sendRequest(sessionID, response.c_str());
- CHECK_EQ(err, (status_t)OK);
-
- err = sendM2(sessionID);
- CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::onGetParameterRequest(
- int32_t sessionID,
- int32_t cseq,
- const sp<ParsedMessage> &data) {
- AString body =
- "wfd_video_formats: xxx\r\n"
- "wfd_audio_codecs: xxx\r\n"
- "wfd_client_rtp_ports: RTP/AVP/UDP;unicast xxx 0 mode=play\r\n";
-
- AString response = "RTSP/1.0 200 OK\r\n";
- AppendCommonResponse(&response, cseq);
- response.append("Content-Type: text/parameters\r\n");
- response.append(StringPrintf("Content-Length: %d\r\n", body.size()));
- response.append("\r\n");
- response.append(body);
-
- status_t err = mNetSession->sendRequest(sessionID, response.c_str());
- CHECK_EQ(err, (status_t)OK);
-}
-
-status_t WifiDisplaySink::sendDescribe(int32_t sessionID, const char *uri) {
- uri = "rtsp://xwgntvx.is.livestream-api.com/livestreamiphone/wgntv";
- uri = "rtsp://v2.cache6.c.youtube.com/video.3gp?cid=e101d4bf280055f9&fmt=18";
-
- AString request = StringPrintf("DESCRIBE %s RTSP/1.0\r\n", uri);
- AppendCommonResponse(&request, mNextCSeq);
-
- request.append("Accept: application/sdp\r\n");
- request.append("\r\n");
-
- status_t err = mNetSession->sendRequest(
- sessionID, request.c_str(), request.size());
-
- if (err != OK) {
- return err;
- }
-
- registerResponseHandler(
- sessionID, mNextCSeq, &WifiDisplaySink::onReceiveDescribeResponse);
-
- ++mNextCSeq;
-
- return OK;
-}
-
-status_t WifiDisplaySink::sendSetup(int32_t sessionID, const char *uri) {
- mRTPSink = new RTPSink(mNetSession, mSurfaceTex);
- looper()->registerHandler(mRTPSink);
-
- status_t err = mRTPSink->init(sUseTCPInterleaving);
-
- if (err != OK) {
- looper()->unregisterHandler(mRTPSink->id());
- mRTPSink.clear();
- return err;
- }
-
- AString request = StringPrintf("SETUP %s RTSP/1.0\r\n", uri);
-
- AppendCommonResponse(&request, mNextCSeq);
-
- if (sUseTCPInterleaving) {
- request.append("Transport: RTP/AVP/TCP;interleaved=0-1\r\n");
- } else {
- int32_t rtpPort = mRTPSink->getRTPPort();
-
- request.append(
- StringPrintf(
- "Transport: RTP/AVP/UDP;unicast;client_port=%d-%d\r\n",
- rtpPort, rtpPort + 1));
- }
-
- request.append("\r\n");
-
- ALOGV("request = '%s'", request.c_str());
-
- err = mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
- if (err != OK) {
- return err;
- }
-
- registerResponseHandler(
- sessionID, mNextCSeq, &WifiDisplaySink::onReceiveSetupResponse);
-
- ++mNextCSeq;
-
- return OK;
-}
-
-status_t WifiDisplaySink::sendPlay(int32_t sessionID, const char *uri) {
- AString request = StringPrintf("PLAY %s RTSP/1.0\r\n", uri);
-
- AppendCommonResponse(&request, mNextCSeq);
-
- request.append(StringPrintf("Session: %s\r\n", mPlaybackSessionID.c_str()));
- request.append("\r\n");
-
- status_t err =
- mNetSession->sendRequest(sessionID, request.c_str(), request.size());
-
- if (err != OK) {
- return err;
- }
-
- registerResponseHandler(
- sessionID, mNextCSeq, &WifiDisplaySink::onReceivePlayResponse);
-
- ++mNextCSeq;
-
- return OK;
-}
-
-void WifiDisplaySink::onSetParameterRequest(
- int32_t sessionID,
- int32_t cseq,
- const sp<ParsedMessage> &data) {
- const char *content = data->getContent();
-
- if (strstr(content, "wfd_trigger_method: SETUP\r\n") != NULL) {
- status_t err =
- sendSetup(
- sessionID,
- "rtsp://x.x.x.x:x/wfd1.0/streamid=0");
-
- CHECK_EQ(err, (status_t)OK);
- }
-
- AString response = "RTSP/1.0 200 OK\r\n";
- AppendCommonResponse(&response, cseq);
- response.append("\r\n");
-
- status_t err = mNetSession->sendRequest(sessionID, response.c_str());
- CHECK_EQ(err, (status_t)OK);
-}
-
-void WifiDisplaySink::sendErrorResponse(
- int32_t sessionID,
- const char *errorDetail,
- int32_t cseq) {
- AString response;
- response.append("RTSP/1.0 ");
- response.append(errorDetail);
- response.append("\r\n");
-
- AppendCommonResponse(&response, cseq);
-
- response.append("\r\n");
-
- status_t err = mNetSession->sendRequest(sessionID, response.c_str());
- CHECK_EQ(err, (status_t)OK);
-}
-
-// static
-void WifiDisplaySink::AppendCommonResponse(AString *response, int32_t cseq) {
- time_t now = time(NULL);
- struct tm *now2 = gmtime(&now);
- char buf[128];
- strftime(buf, sizeof(buf), "%a, %d %b %Y %H:%M:%S %z", now2);
-
- response->append("Date: ");
- response->append(buf);
- response->append("\r\n");
-
- response->append("User-Agent: stagefright/1.1 (Linux;Android 4.1)\r\n");
-
- if (cseq >= 0) {
- response->append(StringPrintf("CSeq: %d\r\n", cseq));
- }
-}
-
-} // namespace android
diff --git a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h b/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
deleted file mode 100644
index f886ee5..0000000
--- a/media/libstagefright/wifi-display/sink/WifiDisplaySink.h
+++ /dev/null
@@ -1,147 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef WIFI_DISPLAY_SINK_H_
-
-#define WIFI_DISPLAY_SINK_H_
-
-#include "ANetworkSession.h"
-
-#include <gui/Surface.h>
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-struct ParsedMessage;
-struct RTPSink;
-
-// Represents the RTSP client acting as a wifi display sink.
-// Connects to a wifi display source and renders the incoming
-// transport stream using a MediaPlayer instance.
-struct WifiDisplaySink : public AHandler {
- WifiDisplaySink(
- const sp<ANetworkSession> &netSession,
- const sp<ISurfaceTexture> &surfaceTex = NULL);
-
- void start(const char *sourceHost, int32_t sourcePort);
- void start(const char *uri);
-
-protected:
- virtual ~WifiDisplaySink();
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
- enum State {
- UNDEFINED,
- CONNECTING,
- CONNECTED,
- PAUSED,
- PLAYING,
- };
-
- enum {
- kWhatStart,
- kWhatRTSPNotify,
- kWhatStop,
- };
-
- struct ResponseID {
- int32_t mSessionID;
- int32_t mCSeq;
-
- bool operator<(const ResponseID &other) const {
- return mSessionID < other.mSessionID
- || (mSessionID == other.mSessionID
- && mCSeq < other.mCSeq);
- }
- };
-
- typedef status_t (WifiDisplaySink::*HandleRTSPResponseFunc)(
- int32_t sessionID, const sp<ParsedMessage> &msg);
-
- static const bool sUseTCPInterleaving = false;
-
- State mState;
- sp<ANetworkSession> mNetSession;
- sp<ISurfaceTexture> mSurfaceTex;
- AString mSetupURI;
- AString mRTSPHost;
- int32_t mSessionID;
-
- int32_t mNextCSeq;
-
- KeyedVector<ResponseID, HandleRTSPResponseFunc> mResponseHandlers;
-
- sp<RTPSink> mRTPSink;
- AString mPlaybackSessionID;
- int32_t mPlaybackSessionTimeoutSecs;
-
- status_t sendM2(int32_t sessionID);
- status_t sendDescribe(int32_t sessionID, const char *uri);
- status_t sendSetup(int32_t sessionID, const char *uri);
- status_t sendPlay(int32_t sessionID, const char *uri);
-
- status_t onReceiveM2Response(
- int32_t sessionID, const sp<ParsedMessage> &msg);
-
- status_t onReceiveDescribeResponse(
- int32_t sessionID, const sp<ParsedMessage> &msg);
-
- status_t onReceiveSetupResponse(
- int32_t sessionID, const sp<ParsedMessage> &msg);
-
- status_t configureTransport(const sp<ParsedMessage> &msg);
-
- status_t onReceivePlayResponse(
- int32_t sessionID, const sp<ParsedMessage> &msg);
-
- void registerResponseHandler(
- int32_t sessionID, int32_t cseq, HandleRTSPResponseFunc func);
-
- void onReceiveClientData(const sp<AMessage> &msg);
-
- void onOptionsRequest(
- int32_t sessionID,
- int32_t cseq,
- const sp<ParsedMessage> &data);
-
- void onGetParameterRequest(
- int32_t sessionID,
- int32_t cseq,
- const sp<ParsedMessage> &data);
-
- void onSetParameterRequest(
- int32_t sessionID,
- int32_t cseq,
- const sp<ParsedMessage> &data);
-
- void sendErrorResponse(
- int32_t sessionID,
- const char *errorDetail,
- int32_t cseq);
-
- static void AppendCommonResponse(AString *response, int32_t cseq);
-
- bool ParseURL(
- const char *url, AString *host, int32_t *port, AString *path,
- AString *user, AString *pass);
-
- DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySink);
-};
-
-} // namespace android
-
-#endif // WIFI_DISPLAY_SINK_H_
diff --git a/media/libstagefright/wifi-display/source/Converter.cpp b/media/libstagefright/wifi-display/source/Converter.cpp
index 7a87444..5344623 100644
--- a/media/libstagefright/wifi-display/source/Converter.cpp
+++ b/media/libstagefright/wifi-display/source/Converter.cpp
@@ -23,7 +23,7 @@
#include "MediaPuller.h"
#include <cutils/properties.h>
-#include <gui/SurfaceTextureClient.h>
+#include <gui/Surface.h>
#include <media/ICrypto.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -54,6 +54,8 @@ Converter::Converter(
,mFirstSilentFrameUs(-1ll)
,mInSilentMode(false)
#endif
+ ,mPrevVideoBitrate(-1)
+ ,mNumFramesToDrop(0)
{
AString mime;
CHECK(mInputFormat->findString("mime", &mime));
@@ -67,11 +69,45 @@ Converter::Converter(
mInitCheck = initEncoder();
if (mInitCheck != OK) {
- if (mEncoder != NULL) {
- mEncoder->release();
- mEncoder.clear();
- }
+ releaseEncoder();
+ }
+}
+
+static void ReleaseMediaBufferReference(const sp<ABuffer> &accessUnit) {
+ void *mbuf;
+ if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf)
+ && mbuf != NULL) {
+ ALOGV("releasing mbuf %p", mbuf);
+
+ accessUnit->meta()->setPointer("mediaBuffer", NULL);
+
+ static_cast<MediaBuffer *>(mbuf)->release();
+ mbuf = NULL;
+ }
+}
+
+void Converter::releaseEncoder() {
+ if (mEncoder == NULL) {
+ return;
+ }
+
+ mEncoder->release();
+ mEncoder.clear();
+
+ while (!mInputBufferQueue.empty()) {
+ sp<ABuffer> accessUnit = *mInputBufferQueue.begin();
+ mInputBufferQueue.erase(mInputBufferQueue.begin());
+
+ ReleaseMediaBufferReference(accessUnit);
}
+
+ for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) {
+ sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i);
+ ReleaseMediaBufferReference(accessUnit);
+ }
+
+ mEncoderInputBuffers.clear();
+ mEncoderOutputBuffers.clear();
}
Converter::~Converter() {
@@ -99,7 +135,9 @@ bool Converter::needToManuallyPrependSPSPPS() const {
return mNeedToManuallyPrependSPSPPS;
}
-static int32_t getBitrate(const char *propName, int32_t defaultValue) {
+// static
+int32_t Converter::GetInt32Property(
+ const char *propName, int32_t defaultValue) {
char val[PROPERTY_VALUE_MAX];
if (property_get(propName, val, NULL)) {
char *end;
@@ -149,8 +187,9 @@ status_t Converter::initEncoder() {
mOutputFormat->setString("mime", outputMIME.c_str());
- int32_t audioBitrate = getBitrate("media.wfd.audio-bitrate", 128000);
- int32_t videoBitrate = getBitrate("media.wfd.video-bitrate", 5000000);
+ int32_t audioBitrate = GetInt32Property("media.wfd.audio-bitrate", 128000);
+ int32_t videoBitrate = GetInt32Property("media.wfd.video-bitrate", 5000000);
+ mPrevVideoBitrate = videoBitrate;
ALOGI("using audio bitrate of %d bps, video bitrate of %d bps",
audioBitrate, videoBitrate);
@@ -274,16 +313,7 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) {
sp<ABuffer> accessUnit;
CHECK(msg->findBuffer("accessUnit", &accessUnit));
- void *mbuf;
- if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf)
- && mbuf != NULL) {
- ALOGV("releasing mbuf %p", mbuf);
-
- accessUnit->meta()->setPointer("mediaBuffer", NULL);
-
- static_cast<MediaBuffer *>(mbuf)->release();
- mbuf = NULL;
- }
+ ReleaseMediaBufferReference(accessUnit);
}
break;
}
@@ -300,6 +330,13 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) {
sp<ABuffer> accessUnit;
CHECK(msg->findBuffer("accessUnit", &accessUnit));
+ if (mIsVideo && mNumFramesToDrop) {
+ --mNumFramesToDrop;
+ ALOGI("dropping frame.");
+ ReleaseMediaBufferReference(accessUnit);
+ break;
+ }
+
#if 0
void *mbuf;
if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf)
@@ -385,12 +422,9 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) {
case kWhatShutdown:
{
- ALOGI("shutting down encoder");
+ ALOGI("shutting down %s encoder", mIsVideo ? "video" : "audio");
- if (mEncoder != NULL) {
- mEncoder->release();
- mEncoder.clear();
- }
+ releaseEncoder();
AString mime;
CHECK(mInputFormat->findString("mime", &mime));
@@ -398,6 +432,12 @@ void Converter::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatDropAFrame:
+ {
+ ++mNumFramesToDrop;
+ break;
+ }
+
default:
TRESPASS();
}
@@ -609,6 +649,13 @@ status_t Converter::doMoreWork() {
&bufferIndex, &offset, &size, &timeUs, &flags);
if (err != OK) {
+ if (err == INFO_FORMAT_CHANGED) {
+ continue;
+ } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+ mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+ continue;
+ }
+
if (err == -EAGAIN) {
err = OK;
}
@@ -654,4 +701,23 @@ void Converter::requestIDRFrame() {
(new AMessage(kWhatRequestIDRFrame, id()))->post();
}
+void Converter::dropAFrame() {
+ (new AMessage(kWhatDropAFrame, id()))->post();
+}
+
+int32_t Converter::getVideoBitrate() const {
+ return mPrevVideoBitrate;
+}
+
+void Converter::setVideoBitrate(int32_t bitRate) {
+ if (mIsVideo && mEncoder != NULL && bitRate != mPrevVideoBitrate) {
+ sp<AMessage> params = new AMessage;
+ params->setInt32("videoBitrate", bitRate);
+
+ mEncoder->setParameters(params);
+
+ mPrevVideoBitrate = bitRate;
+ }
+}
+
} // namespace android
diff --git a/media/libstagefright/wifi-display/source/Converter.h b/media/libstagefright/wifi-display/source/Converter.h
index 0665eea..ba297c4 100644
--- a/media/libstagefright/wifi-display/source/Converter.h
+++ b/media/libstagefright/wifi-display/source/Converter.h
@@ -51,6 +51,8 @@ struct Converter : public AHandler {
void requestIDRFrame();
+ void dropAFrame();
+
enum {
kWhatAccessUnit,
kWhatEOS,
@@ -63,10 +65,16 @@ struct Converter : public AHandler {
kWhatShutdown,
kWhatMediaPullerNotify,
kWhatEncoderActivity,
+ kWhatDropAFrame,
};
void shutdownAsync();
+ int32_t getVideoBitrate() const;
+ void setVideoBitrate(int32_t bitrate);
+
+ static int32_t GetInt32Property(const char *propName, int32_t defaultValue);
+
protected:
virtual ~Converter();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -100,7 +108,12 @@ private:
sp<ABuffer> mPartialAudioAU;
+ int32_t mPrevVideoBitrate;
+
+ int32_t mNumFramesToDrop;
+
status_t initEncoder();
+ void releaseEncoder();
status_t feedEncoderInputBuffers();
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 916f797..3d7b865 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -23,14 +23,11 @@
#include "Converter.h"
#include "MediaPuller.h"
#include "RepeaterSource.h"
-#include "Sender.h"
-#include "TSPacketizer.h"
#include "include/avc_utils.h"
#include "WifiDisplaySource.h"
#include <binder/IServiceManager.h>
-#include <gui/ISurfaceComposer.h>
-#include <gui/SurfaceComposerClient.h>
+#include <cutils/properties.h>
#include <media/IHDCP.h>
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
@@ -41,10 +38,9 @@
#include <media/stagefright/DataSource.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MediaExtractor.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
-#include <media/stagefright/MPEG2TSWriter.h>
+#include <media/stagefright/NuMediaExtractor.h>
#include <media/stagefright/SurfaceMediaSource.h>
#include <media/stagefright/Utils.h>
@@ -63,15 +59,18 @@ struct WifiDisplaySource::PlaybackSession::Track : public AHandler {
const sp<MediaPuller> &mediaPuller,
const sp<Converter> &converter);
+ Track(const sp<AMessage> &notify, const sp<AMessage> &format);
+
void setRepeaterSource(const sp<RepeaterSource> &source);
sp<AMessage> getFormat();
bool isAudio() const;
const sp<Converter> &converter() const;
- ssize_t packetizerTrackIndex() const;
+ const sp<RepeaterSource> &repeaterSource() const;
- void setPacketizerTrackIndex(size_t index);
+ ssize_t mediaSenderTrackIndex() const;
+ void setMediaSenderTrackIndex(size_t index);
status_t start();
void stopAsync();
@@ -110,8 +109,9 @@ private:
sp<ALooper> mCodecLooper;
sp<MediaPuller> mMediaPuller;
sp<Converter> mConverter;
+ sp<AMessage> mFormat;
bool mStarted;
- ssize_t mPacketizerTrackIndex;
+ ssize_t mMediaSenderTrackIndex;
bool mIsAudio;
List<sp<ABuffer> > mQueuedAccessUnits;
sp<RepeaterSource> mRepeaterSource;
@@ -135,11 +135,19 @@ WifiDisplaySource::PlaybackSession::Track::Track(
mMediaPuller(mediaPuller),
mConverter(converter),
mStarted(false),
- mPacketizerTrackIndex(-1),
mIsAudio(IsAudioFormat(mConverter->getOutputFormat())),
mLastOutputBufferQueuedTimeUs(-1ll) {
}
+WifiDisplaySource::PlaybackSession::Track::Track(
+ const sp<AMessage> &notify, const sp<AMessage> &format)
+ : mNotify(notify),
+ mFormat(format),
+ mStarted(false),
+ mIsAudio(IsAudioFormat(format)),
+ mLastOutputBufferQueuedTimeUs(-1ll) {
+}
+
WifiDisplaySource::PlaybackSession::Track::~Track() {
CHECK(!mStarted);
}
@@ -154,7 +162,7 @@ bool WifiDisplaySource::PlaybackSession::Track::IsAudioFormat(
}
sp<AMessage> WifiDisplaySource::PlaybackSession::Track::getFormat() {
- return mConverter->getOutputFormat();
+ return mFormat != NULL ? mFormat : mConverter->getOutputFormat();
}
bool WifiDisplaySource::PlaybackSession::Track::isAudio() const {
@@ -165,13 +173,19 @@ const sp<Converter> &WifiDisplaySource::PlaybackSession::Track::converter() cons
return mConverter;
}
-ssize_t WifiDisplaySource::PlaybackSession::Track::packetizerTrackIndex() const {
- return mPacketizerTrackIndex;
+const sp<RepeaterSource> &
+WifiDisplaySource::PlaybackSession::Track::repeaterSource() const {
+ return mRepeaterSource;
+}
+
+ssize_t WifiDisplaySource::PlaybackSession::Track::mediaSenderTrackIndex() const {
+ CHECK_GE(mMediaSenderTrackIndex, 0);
+ return mMediaSenderTrackIndex;
}
-void WifiDisplaySource::PlaybackSession::Track::setPacketizerTrackIndex(size_t index) {
- CHECK_LT(mPacketizerTrackIndex, 0);
- mPacketizerTrackIndex = index;
+void WifiDisplaySource::PlaybackSession::Track::setMediaSenderTrackIndex(
+ size_t index) {
+ mMediaSenderTrackIndex = index;
}
status_t WifiDisplaySource::PlaybackSession::Track::start() {
@@ -195,7 +209,9 @@ status_t WifiDisplaySource::PlaybackSession::Track::start() {
void WifiDisplaySource::PlaybackSession::Track::stopAsync() {
ALOGV("Track::stopAsync isAudio=%d", mIsAudio);
- mConverter->shutdownAsync();
+ if (mConverter != NULL) {
+ mConverter->shutdownAsync();
+ }
sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, id());
@@ -207,6 +223,7 @@ void WifiDisplaySource::PlaybackSession::Track::stopAsync() {
mMediaPuller->stopAsync(msg);
} else {
+ mStarted = false;
msg->post();
}
}
@@ -330,45 +347,68 @@ WifiDisplaySource::PlaybackSession::PlaybackSession(
const sp<ANetworkSession> &netSession,
const sp<AMessage> &notify,
const in_addr &interfaceAddr,
- const sp<IHDCP> &hdcp)
+ const sp<IHDCP> &hdcp,
+ const char *path)
: mNetSession(netSession),
mNotify(notify),
mInterfaceAddr(interfaceAddr),
mHDCP(hdcp),
+ mLocalRTPPort(-1),
mWeAreDead(false),
mPaused(false),
mLastLifesignUs(),
mVideoTrackIndex(-1),
mPrevTimeUs(-1ll),
- mAllTracksHavePacketizerIndex(false) {
+ mPullExtractorPending(false),
+ mPullExtractorGeneration(0),
+ mFirstSampleTimeRealUs(-1ll),
+ mFirstSampleTimeUs(-1ll) {
+ if (path != NULL) {
+ mMediaPath.setTo(path);
+ }
}
status_t WifiDisplaySource::PlaybackSession::init(
- const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
- Sender::TransportMode transportMode,
- bool usePCMAudio) {
- status_t err = setupPacketizer(usePCMAudio);
+ const char *clientIP,
+ int32_t clientRtp,
+ RTPSender::TransportMode rtpMode,
+ int32_t clientRtcp,
+ RTPSender::TransportMode rtcpMode,
+ bool enableAudio,
+ bool usePCMAudio,
+ bool enableVideo,
+ VideoFormats::ResolutionType videoResolutionType,
+ size_t videoResolutionIndex) {
+ sp<AMessage> notify = new AMessage(kWhatMediaSenderNotify, id());
+ mMediaSender = new MediaSender(mNetSession, notify);
+ looper()->registerHandler(mMediaSender);
+
+ mMediaSender->setHDCP(mHDCP);
+
+ status_t err = setupPacketizer(
+ enableAudio,
+ usePCMAudio,
+ enableVideo,
+ videoResolutionType,
+ videoResolutionIndex);
- if (err != OK) {
- return err;
+ if (err == OK) {
+ err = mMediaSender->initAsync(
+ -1 /* trackIndex */,
+ clientIP,
+ clientRtp,
+ rtpMode,
+ clientRtcp,
+ rtcpMode,
+ &mLocalRTPPort);
}
- sp<AMessage> notify = new AMessage(kWhatSenderNotify, id());
- mSender = new Sender(mNetSession, notify);
-
- mSenderLooper = new ALooper;
- mSenderLooper->setName("sender_looper");
-
- mSenderLooper->start(
- false /* runOnCallingThread */,
- false /* canCallJava */,
- PRIORITY_AUDIO);
-
- mSenderLooper->registerHandler(mSender);
+ if (err != OK) {
+ mLocalRTPPort = -1;
- err = mSender->init(clientIP, clientRtp, clientRtcp, transportMode);
+ looper()->unregisterHandler(mMediaSender->id());
+ mMediaSender.clear();
- if (err != OK) {
return err;
}
@@ -381,7 +421,7 @@ WifiDisplaySource::PlaybackSession::~PlaybackSession() {
}
int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const {
- return mSender->getRTPPort();
+ return mLocalRTPPort;
}
int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const {
@@ -400,19 +440,7 @@ status_t WifiDisplaySource::PlaybackSession::play() {
return OK;
}
-status_t WifiDisplaySource::PlaybackSession::finishPlay() {
- // XXX Give the dongle a second to bind its sockets.
- (new AMessage(kWhatFinishPlay, id()))->post(1000000ll);
- return OK;
-}
-
-status_t WifiDisplaySource::PlaybackSession::onFinishPlay() {
- return mSender->finishInit();
-}
-
-status_t WifiDisplaySource::PlaybackSession::onFinishPlay2() {
- mSender->scheduleSendSR();
-
+status_t WifiDisplaySource::PlaybackSession::onMediaSenderInitialized() {
for (size_t i = 0; i < mTracks.size(); ++i) {
CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start());
}
@@ -459,44 +487,18 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(
CHECK(msg->findSize("trackIndex", &trackIndex));
if (what == Converter::kWhatAccessUnit) {
- const sp<Track> &track = mTracks.valueFor(trackIndex);
-
- ssize_t packetizerTrackIndex = track->packetizerTrackIndex();
-
- if (packetizerTrackIndex < 0) {
- sp<AMessage> trackFormat = track->getFormat()->dup();
- if (mHDCP != NULL && !track->isAudio()) {
- // HDCP2.0 _and_ HDCP 2.1 specs say to set the version
- // inside the HDCP descriptor to 0x20!!!
- trackFormat->setInt32("hdcp-version", 0x20);
- }
- packetizerTrackIndex = mPacketizer->addTrack(trackFormat);
-
- CHECK_GE(packetizerTrackIndex, 0);
-
- track->setPacketizerTrackIndex(packetizerTrackIndex);
-
- if (allTracksHavePacketizerIndex()) {
- status_t err = packetizeQueuedAccessUnits();
-
- if (err != OK) {
- notifySessionDead();
- break;
- }
- }
- }
-
sp<ABuffer> accessUnit;
CHECK(msg->findBuffer("accessUnit", &accessUnit));
- if (!allTracksHavePacketizerIndex()) {
- track->queueAccessUnit(accessUnit);
- break;
- }
+ const sp<Track> &track = mTracks.valueFor(trackIndex);
- track->queueOutputBuffer(accessUnit);
+ status_t err = mMediaSender->queueAccessUnit(
+ track->mediaSenderTrackIndex(),
+ accessUnit);
- drainAccessUnits();
+ if (err != OK) {
+ notifySessionDead();
+ }
break;
} else if (what == Converter::kWhatEOS) {
CHECK_EQ(what, Converter::kWhatEOS);
@@ -528,25 +530,38 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(
break;
}
- case kWhatSenderNotify:
+ case kWhatMediaSenderNotify:
{
int32_t what;
CHECK(msg->findInt32("what", &what));
- if (what == Sender::kWhatInitDone) {
- onFinishPlay2();
- } else if (what == Sender::kWhatSessionDead) {
+ if (what == MediaSender::kWhatInitDone) {
+ status_t err;
+ CHECK(msg->findInt32("err", &err));
+
+ if (err == OK) {
+ onMediaSenderInitialized();
+ } else {
+ notifySessionDead();
+ }
+ } else if (what == MediaSender::kWhatError) {
notifySessionDead();
+ } else if (what == MediaSender::kWhatNetworkStall) {
+ size_t numBytesQueued;
+ CHECK(msg->findSize("numBytesQueued", &numBytesQueued));
+
+ if (mVideoTrackIndex >= 0) {
+ const sp<Track> &videoTrack =
+ mTracks.valueFor(mVideoTrackIndex);
+
+ sp<Converter> converter = videoTrack->converter();
+ if (converter != NULL) {
+ converter->dropAFrame();
+ }
+ }
} else {
TRESPASS();
}
-
- break;
- }
-
- case kWhatFinishPlay:
- {
- onFinishPlay();
break;
}
@@ -571,11 +586,8 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(
break;
}
- mSenderLooper->unregisterHandler(mSender->id());
- mSender.clear();
- mSenderLooper.clear();
-
- mPacketizer.clear();
+ looper()->unregisterHandler(mMediaSender->id());
+ mMediaSender.clear();
sp<AMessage> notify = mNotify->dup();
notify->setInt32("what", kWhatSessionDestroyed);
@@ -584,30 +596,14 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(
break;
}
- case kWhatPacketize:
+ case kWhatPause:
{
- size_t trackIndex;
- CHECK(msg->findSize("trackIndex", &trackIndex));
-
- sp<ABuffer> accessUnit;
- CHECK(msg->findBuffer("accessUnit", &accessUnit));
-
-#if 0
- if ((ssize_t)trackIndex == mVideoTrackIndex) {
- int64_t nowUs = ALooper::GetNowUs();
- static int64_t prevNowUs = 0ll;
-
- ALOGI("sending AU, dNowUs=%lld us", nowUs - prevNowUs);
-
- prevNowUs = nowUs;
+ if (mExtractor != NULL) {
+ ++mPullExtractorGeneration;
+ mFirstSampleTimeRealUs = -1ll;
+ mFirstSampleTimeUs = -1ll;
}
-#endif
- break;
- }
-
- case kWhatPause:
- {
if (mPaused) {
break;
}
@@ -622,6 +618,10 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(
case kWhatResume:
{
+ if (mExtractor != NULL) {
+ schedulePullExtractor();
+ }
+
if (!mPaused) {
break;
}
@@ -634,20 +634,177 @@ void WifiDisplaySource::PlaybackSession::onMessageReceived(
break;
}
+ case kWhatPullExtractorSample:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mPullExtractorGeneration) {
+ break;
+ }
+
+ mPullExtractorPending = false;
+
+ onPullExtractor();
+ break;
+ }
+
default:
TRESPASS();
}
}
-status_t WifiDisplaySource::PlaybackSession::setupPacketizer(bool usePCMAudio) {
- mPacketizer = new TSPacketizer;
+status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
+ bool enableAudio, bool enableVideo) {
+ DataSource::RegisterDefaultSniffers();
- status_t err = addVideoSource();
+ mExtractor = new NuMediaExtractor;
+
+ status_t err = mExtractor->setDataSource(mMediaPath.c_str());
if (err != OK) {
return err;
}
+ size_t n = mExtractor->countTracks();
+ bool haveAudio = false;
+ bool haveVideo = false;
+ for (size_t i = 0; i < n; ++i) {
+ sp<AMessage> format;
+ err = mExtractor->getTrackFormat(i, &format);
+
+ if (err != OK) {
+ continue;
+ }
+
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
+ bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
+
+ if (isAudio && enableAudio && !haveAudio) {
+ haveAudio = true;
+ } else if (isVideo && enableVideo && !haveVideo) {
+ haveVideo = true;
+ } else {
+ continue;
+ }
+
+ err = mExtractor->selectTrack(i);
+
+ size_t trackIndex = mTracks.size();
+
+ sp<AMessage> notify = new AMessage(kWhatTrackNotify, id());
+ notify->setSize("trackIndex", trackIndex);
+
+ sp<Track> track = new Track(notify, format);
+ looper()->registerHandler(track);
+
+ mTracks.add(trackIndex, track);
+
+ mExtractorTrackToInternalTrack.add(i, trackIndex);
+
+ if (isVideo) {
+ mVideoTrackIndex = trackIndex;
+ }
+
+ uint32_t flags = MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
+
+ ssize_t mediaSenderTrackIndex =
+ mMediaSender->addTrack(format, flags);
+ CHECK_GE(mediaSenderTrackIndex, 0);
+
+ track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
+
+ if ((haveAudio || !enableAudio) && (haveVideo || !enableVideo)) {
+ break;
+ }
+ }
+
+ return OK;
+}
+
+void WifiDisplaySource::PlaybackSession::schedulePullExtractor() {
+ if (mPullExtractorPending) {
+ return;
+ }
+
+ int64_t sampleTimeUs;
+ status_t err = mExtractor->getSampleTime(&sampleTimeUs);
+
+ int64_t nowUs = ALooper::GetNowUs();
+
+ if (mFirstSampleTimeRealUs < 0ll) {
+ mFirstSampleTimeRealUs = nowUs;
+ mFirstSampleTimeUs = sampleTimeUs;
+ }
+
+ int64_t whenUs = sampleTimeUs - mFirstSampleTimeUs + mFirstSampleTimeRealUs;
+
+ sp<AMessage> msg = new AMessage(kWhatPullExtractorSample, id());
+ msg->setInt32("generation", mPullExtractorGeneration);
+ msg->post(whenUs - nowUs);
+
+ mPullExtractorPending = true;
+}
+
+void WifiDisplaySource::PlaybackSession::onPullExtractor() {
+ sp<ABuffer> accessUnit = new ABuffer(1024 * 1024);
+ status_t err = mExtractor->readSampleData(accessUnit);
+ if (err != OK) {
+ // EOS.
+ return;
+ }
+
+ int64_t timeUs;
+ CHECK_EQ((status_t)OK, mExtractor->getSampleTime(&timeUs));
+
+ accessUnit->meta()->setInt64(
+ "timeUs", mFirstSampleTimeRealUs + timeUs - mFirstSampleTimeUs);
+
+ size_t trackIndex;
+ CHECK_EQ((status_t)OK, mExtractor->getSampleTrackIndex(&trackIndex));
+
+ sp<AMessage> msg = new AMessage(kWhatConverterNotify, id());
+
+ msg->setSize(
+ "trackIndex", mExtractorTrackToInternalTrack.valueFor(trackIndex));
+
+ msg->setInt32("what", Converter::kWhatAccessUnit);
+ msg->setBuffer("accessUnit", accessUnit);
+ msg->post();
+
+ mExtractor->advance();
+
+ schedulePullExtractor();
+}
+
+status_t WifiDisplaySource::PlaybackSession::setupPacketizer(
+ bool enableAudio,
+ bool usePCMAudio,
+ bool enableVideo,
+ VideoFormats::ResolutionType videoResolutionType,
+ size_t videoResolutionIndex) {
+ CHECK(enableAudio || enableVideo);
+
+ if (!mMediaPath.empty()) {
+ return setupMediaPacketizer(enableAudio, enableVideo);
+ }
+
+ if (enableVideo) {
+ status_t err = addVideoSource(
+ videoResolutionType, videoResolutionIndex);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ if (!enableAudio) {
+ return OK;
+ }
+
return addAudioSource(usePCMAudio);
}
@@ -732,30 +889,44 @@ status_t WifiDisplaySource::PlaybackSession::addSource(
mVideoTrackIndex = trackIndex;
}
+ uint32_t flags = 0;
+ if (converter->needToManuallyPrependSPSPPS()) {
+ flags |= MediaSender::FLAG_MANUALLY_PREPEND_SPS_PPS;
+ }
+
+ ssize_t mediaSenderTrackIndex =
+ mMediaSender->addTrack(converter->getOutputFormat(), flags);
+ CHECK_GE(mediaSenderTrackIndex, 0);
+
+ track->setMediaSenderTrackIndex(mediaSenderTrackIndex);
+
return OK;
}
-status_t WifiDisplaySource::PlaybackSession::addVideoSource() {
- sp<SurfaceMediaSource> source = new SurfaceMediaSource(width(), height());
+status_t WifiDisplaySource::PlaybackSession::addVideoSource(
+ VideoFormats::ResolutionType videoResolutionType,
+ size_t videoResolutionIndex) {
+ size_t width, height, framesPerSecond;
+ bool interlaced;
+ CHECK(VideoFormats::GetConfiguration(
+ videoResolutionType,
+ videoResolutionIndex,
+ &width,
+ &height,
+ &framesPerSecond,
+ &interlaced));
+
+ sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height);
source->setUseAbsoluteTimestamps();
-#if 1
sp<RepeaterSource> videoSource =
- new RepeaterSource(source, 30.0 /* rateHz */);
-#endif
+ new RepeaterSource(source, framesPerSecond);
-#if 1
size_t numInputBuffers;
status_t err = addSource(
true /* isVideo */, videoSource, true /* isRepeaterSource */,
false /* usePCMAudio */, &numInputBuffers);
-#else
- size_t numInputBuffers;
- status_t err = addSource(
- true /* isVideo */, source, false /* isRepeaterSource */,
- false /* usePCMAudio */, &numInputBuffers);
-#endif
if (err != OK) {
return err;
@@ -786,26 +957,10 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {
return OK;
}
-sp<ISurfaceTexture> WifiDisplaySource::PlaybackSession::getSurfaceTexture() {
+sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() {
return mBufferQueue;
}
-int32_t WifiDisplaySource::PlaybackSession::width() const {
-#if USE_1080P
- return 1920;
-#else
- return 1280;
-#endif
-}
-
-int32_t WifiDisplaySource::PlaybackSession::height() const {
-#if USE_1080P
- return 1080;
-#else
- return 720;
-#endif
-}
-
void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
for (size_t i = 0; i < mTracks.size(); ++i) {
const sp<Track> &track = mTracks.valueAt(i);
@@ -814,168 +969,6 @@ void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
}
}
-bool WifiDisplaySource::PlaybackSession::allTracksHavePacketizerIndex() {
- if (mAllTracksHavePacketizerIndex) {
- return true;
- }
-
- for (size_t i = 0; i < mTracks.size(); ++i) {
- if (mTracks.valueAt(i)->packetizerTrackIndex() < 0) {
- return false;
- }
- }
-
- mAllTracksHavePacketizerIndex = true;
-
- return true;
-}
-
-status_t WifiDisplaySource::PlaybackSession::packetizeAccessUnit(
- size_t trackIndex, sp<ABuffer> accessUnit,
- sp<ABuffer> *packets) {
- const sp<Track> &track = mTracks.valueFor(trackIndex);
-
- uint32_t flags = 0;
-
- bool isHDCPEncrypted = false;
- uint64_t inputCTR;
- uint8_t HDCP_private_data[16];
-
- bool manuallyPrependSPSPPS =
- !track->isAudio()
- && track->converter()->needToManuallyPrependSPSPPS()
- && IsIDR(accessUnit);
-
- if (mHDCP != NULL && !track->isAudio()) {
- isHDCPEncrypted = true;
-
- if (manuallyPrependSPSPPS) {
- accessUnit = mPacketizer->prependCSD(
- track->packetizerTrackIndex(), accessUnit);
- }
-
- status_t err = mHDCP->encrypt(
- accessUnit->data(), accessUnit->size(),
- trackIndex /* streamCTR */,
- &inputCTR,
- accessUnit->data());
-
- if (err != OK) {
- ALOGE("Failed to HDCP-encrypt media data (err %d)",
- err);
-
- return err;
- }
-
- HDCP_private_data[0] = 0x00;
-
- HDCP_private_data[1] =
- (((trackIndex >> 30) & 3) << 1) | 1;
-
- HDCP_private_data[2] = (trackIndex >> 22) & 0xff;
-
- HDCP_private_data[3] =
- (((trackIndex >> 15) & 0x7f) << 1) | 1;
-
- HDCP_private_data[4] = (trackIndex >> 7) & 0xff;
-
- HDCP_private_data[5] =
- ((trackIndex & 0x7f) << 1) | 1;
-
- HDCP_private_data[6] = 0x00;
-
- HDCP_private_data[7] =
- (((inputCTR >> 60) & 0x0f) << 1) | 1;
-
- HDCP_private_data[8] = (inputCTR >> 52) & 0xff;
-
- HDCP_private_data[9] =
- (((inputCTR >> 45) & 0x7f) << 1) | 1;
-
- HDCP_private_data[10] = (inputCTR >> 37) & 0xff;
-
- HDCP_private_data[11] =
- (((inputCTR >> 30) & 0x7f) << 1) | 1;
-
- HDCP_private_data[12] = (inputCTR >> 22) & 0xff;
-
- HDCP_private_data[13] =
- (((inputCTR >> 15) & 0x7f) << 1) | 1;
-
- HDCP_private_data[14] = (inputCTR >> 7) & 0xff;
-
- HDCP_private_data[15] =
- ((inputCTR & 0x7f) << 1) | 1;
-
-#if 0
- ALOGI("HDCP_private_data:");
- hexdump(HDCP_private_data, sizeof(HDCP_private_data));
-
- ABitReader br(HDCP_private_data, sizeof(HDCP_private_data));
- CHECK_EQ(br.getBits(13), 0);
- CHECK_EQ(br.getBits(2), (trackIndex >> 30) & 3);
- CHECK_EQ(br.getBits(1), 1u);
- CHECK_EQ(br.getBits(15), (trackIndex >> 15) & 0x7fff);
- CHECK_EQ(br.getBits(1), 1u);
- CHECK_EQ(br.getBits(15), trackIndex & 0x7fff);
- CHECK_EQ(br.getBits(1), 1u);
- CHECK_EQ(br.getBits(11), 0);
- CHECK_EQ(br.getBits(4), (inputCTR >> 60) & 0xf);
- CHECK_EQ(br.getBits(1), 1u);
- CHECK_EQ(br.getBits(15), (inputCTR >> 45) & 0x7fff);
- CHECK_EQ(br.getBits(1), 1u);
- CHECK_EQ(br.getBits(15), (inputCTR >> 30) & 0x7fff);
- CHECK_EQ(br.getBits(1), 1u);
- CHECK_EQ(br.getBits(15), (inputCTR >> 15) & 0x7fff);
- CHECK_EQ(br.getBits(1), 1u);
- CHECK_EQ(br.getBits(15), inputCTR & 0x7fff);
- CHECK_EQ(br.getBits(1), 1u);
-#endif
-
- flags |= TSPacketizer::IS_ENCRYPTED;
- } else if (manuallyPrependSPSPPS) {
- flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES;
- }
-
- int64_t timeUs = ALooper::GetNowUs();
- if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) {
- flags |= TSPacketizer::EMIT_PCR;
- flags |= TSPacketizer::EMIT_PAT_AND_PMT;
-
- mPrevTimeUs = timeUs;
- }
-
- mPacketizer->packetize(
- track->packetizerTrackIndex(), accessUnit, packets, flags,
- !isHDCPEncrypted ? NULL : HDCP_private_data,
- !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data),
- track->isAudio() ? 2 : 0 /* numStuffingBytes */);
-
- return OK;
-}
-
-status_t WifiDisplaySource::PlaybackSession::packetizeQueuedAccessUnits() {
- for (;;) {
- bool gotMoreData = false;
- for (size_t i = 0; i < mTracks.size(); ++i) {
- size_t trackIndex = mTracks.keyAt(i);
- const sp<Track> &track = mTracks.valueAt(i);
-
- sp<ABuffer> accessUnit = track->dequeueAccessUnit();
- if (accessUnit != NULL) {
- track->queueOutputBuffer(accessUnit);
- gotMoreData = true;
- }
- }
-
- if (!gotMoreData) {
- break;
- }
- }
-
- return OK;
-}
-
void WifiDisplaySource::PlaybackSession::notifySessionDead() {
// Inform WifiDisplaySource of our premature death (wish).
sp<AMessage> notify = mNotify->dup();
@@ -985,78 +978,5 @@ void WifiDisplaySource::PlaybackSession::notifySessionDead() {
mWeAreDead = true;
}
-void WifiDisplaySource::PlaybackSession::drainAccessUnits() {
- ALOGV("audio/video has %d/%d buffers ready.",
- mTracks.valueFor(1)->countQueuedOutputBuffers(),
- mTracks.valueFor(0)->countQueuedOutputBuffers());
-
- while (drainAccessUnit()) {
- }
-}
-
-bool WifiDisplaySource::PlaybackSession::drainAccessUnit() {
- ssize_t minTrackIndex = -1;
- int64_t minTimeUs = -1ll;
-
- for (size_t i = 0; i < mTracks.size(); ++i) {
- const sp<Track> &track = mTracks.valueAt(i);
-
- int64_t timeUs;
- if (track->hasOutputBuffer(&timeUs)) {
- if (minTrackIndex < 0 || timeUs < minTimeUs) {
- minTrackIndex = mTracks.keyAt(i);
- minTimeUs = timeUs;
- }
- }
-#if SUSPEND_VIDEO_IF_IDLE
- else if (!track->isSuspended()) {
- // We still consider this track "live", so it should keep
- // delivering output data whose time stamps we'll have to
- // consider for proper interleaving.
- return false;
- }
-#else
- else {
- // We need access units available on all tracks to be able to
- // dequeue the earliest one.
- return false;
- }
-#endif
- }
-
- if (minTrackIndex < 0) {
- return false;
- }
-
- const sp<Track> &track = mTracks.valueFor(minTrackIndex);
- sp<ABuffer> accessUnit = track->dequeueOutputBuffer();
-
- sp<ABuffer> packets;
- status_t err = packetizeAccessUnit(minTrackIndex, accessUnit, &packets);
-
- if (err != OK) {
- notifySessionDead();
- return false;
- }
-
- if ((ssize_t)minTrackIndex == mVideoTrackIndex) {
- packets->meta()->setInt32("isVideo", 1);
- }
- mSender->queuePackets(minTimeUs, packets);
-
-#if 0
- if (minTrackIndex == mVideoTrackIndex) {
- int64_t nowUs = ALooper::GetNowUs();
-
- // Latency from "data acquired" to "ready to send if we wanted to".
- ALOGI("[%s] latencyUs = %lld ms",
- minTrackIndex == mVideoTrackIndex ? "video" : "audio",
- (nowUs - minTimeUs) / 1000ll);
- }
-#endif
-
- return true;
-}
-
} // namespace android
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h
index b9d193b..39086a1 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.h
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.h
@@ -18,7 +18,8 @@
#define PLAYBACK_SESSION_H_
-#include "Sender.h"
+#include "MediaSender.h"
+#include "VideoFormats.h"
#include "WifiDisplaySource.h"
namespace android {
@@ -26,10 +27,11 @@ namespace android {
struct ABuffer;
struct BufferQueue;
struct IHDCP;
-struct ISurfaceTexture;
+struct IGraphicBufferProducer;
struct MediaPuller;
struct MediaSource;
-struct TSPacketizer;
+struct MediaSender;
+struct NuMediaExtractor;
// Encapsulates the state of an RTP/RTCP session in the context of wifi
// display.
@@ -38,12 +40,20 @@ struct WifiDisplaySource::PlaybackSession : public AHandler {
const sp<ANetworkSession> &netSession,
const sp<AMessage> &notify,
const struct in_addr &interfaceAddr,
- const sp<IHDCP> &hdcp);
+ const sp<IHDCP> &hdcp,
+ const char *path = NULL);
status_t init(
- const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
- Sender::TransportMode transportMode,
- bool usePCMAudio);
+ const char *clientIP,
+ int32_t clientRtp,
+ RTPSender::TransportMode rtpMode,
+ int32_t clientRtcp,
+ RTPSender::TransportMode rtcpMode,
+ bool enableAudio,
+ bool usePCMAudio,
+ bool enableVideo,
+ VideoFormats::ResolutionType videoResolutionType,
+ size_t videoResolutionIndex);
void destroyAsync();
@@ -56,9 +66,7 @@ struct WifiDisplaySource::PlaybackSession : public AHandler {
status_t finishPlay();
status_t pause();
- sp<ISurfaceTexture> getSurfaceTexture();
- int32_t width() const;
- int32_t height() const;
+ sp<IGraphicBufferProducer> getSurfaceTexture();
void requestIDRFrame();
@@ -80,26 +88,27 @@ private:
kWhatMediaPullerNotify,
kWhatConverterNotify,
kWhatTrackNotify,
- kWhatSenderNotify,
kWhatUpdateSurface,
- kWhatFinishPlay,
- kWhatPacketize,
kWhatPause,
kWhatResume,
+ kWhatMediaSenderNotify,
+ kWhatPullExtractorSample,
};
sp<ANetworkSession> mNetSession;
- sp<Sender> mSender;
- sp<ALooper> mSenderLooper;
sp<AMessage> mNotify;
in_addr mInterfaceAddr;
sp<IHDCP> mHDCP;
+ AString mMediaPath;
+
+ sp<MediaSender> mMediaSender;
+ int32_t mLocalRTPPort;
+
bool mWeAreDead;
bool mPaused;
int64_t mLastLifesignUs;
- sp<TSPacketizer> mPacketizer;
sp<BufferQueue> mBufferQueue;
KeyedVector<size_t, sp<Track> > mTracks;
@@ -107,9 +116,21 @@ private:
int64_t mPrevTimeUs;
- bool mAllTracksHavePacketizerIndex;
+ sp<NuMediaExtractor> mExtractor;
+ KeyedVector<size_t, size_t> mExtractorTrackToInternalTrack;
+ bool mPullExtractorPending;
+ int32_t mPullExtractorGeneration;
+ int64_t mFirstSampleTimeRealUs;
+ int64_t mFirstSampleTimeUs;
- status_t setupPacketizer(bool usePCMAudio);
+ status_t setupMediaPacketizer(bool enableAudio, bool enableVideo);
+
+ status_t setupPacketizer(
+ bool enableAudio,
+ bool usePCMAudio,
+ bool enableVideo,
+ VideoFormats::ResolutionType videoResolutionType,
+ size_t videoResolutionIndex);
status_t addSource(
bool isVideo,
@@ -118,29 +139,20 @@ private:
bool usePCMAudio,
size_t *numInputBuffers);
- status_t addVideoSource();
- status_t addAudioSource(bool usePCMAudio);
-
- ssize_t appendTSData(
- const void *data, size_t size, bool timeDiscontinuity, bool flush);
-
- status_t onFinishPlay();
- status_t onFinishPlay2();
+ status_t addVideoSource(
+ VideoFormats::ResolutionType videoResolutionType,
+ size_t videoResolutionIndex);
- bool allTracksHavePacketizerIndex();
-
- status_t packetizeAccessUnit(
- size_t trackIndex, sp<ABuffer> accessUnit,
- sp<ABuffer> *packets);
+ status_t addAudioSource(bool usePCMAudio);
- status_t packetizeQueuedAccessUnits();
+ status_t onMediaSenderInitialized();
void notifySessionDead();
- void drainAccessUnits();
+ void schedulePullExtractor();
+ void onPullExtractor();
- // Returns true iff an access unit was successfully drained.
- bool drainAccessUnit();
+ void onSinkFeedback(const sp<AMessage> &msg);
DISALLOW_EVIL_CONSTRUCTORS(PlaybackSession);
};
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
index 72be927..cc8dee3 100644
--- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
@@ -27,6 +27,25 @@ RepeaterSource::~RepeaterSource() {
CHECK(!mStarted);
}
+double RepeaterSource::getFrameRate() const {
+ return mRateHz;
+}
+
+void RepeaterSource::setFrameRate(double rateHz) {
+ Mutex::Autolock autoLock(mLock);
+
+ if (rateHz == mRateHz) {
+ return;
+ }
+
+ if (mStartTimeUs >= 0ll) {
+ int64_t nextTimeUs = mStartTimeUs + (mFrameCount * 1000000ll) / mRateHz;
+ mStartTimeUs = nextTimeUs;
+ mFrameCount = 0;
+ }
+ mRateHz = rateHz;
+}
+
status_t RepeaterSource::start(MetaData *params) {
CHECK(!mStarted);
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.h b/media/libstagefright/wifi-display/source/RepeaterSource.h
index a13973c..8d414fd 100644
--- a/media/libstagefright/wifi-display/source/RepeaterSource.h
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.h
@@ -6,7 +6,7 @@
#include <media/stagefright/foundation/AHandlerReflector.h>
#include <media/stagefright/MediaSource.h>
-#define SUSPEND_VIDEO_IF_IDLE 1
+#define SUSPEND_VIDEO_IF_IDLE 0
namespace android {
@@ -28,6 +28,9 @@ struct RepeaterSource : public MediaSource {
// send updates in a while, this is its wakeup call.
void wakeUp();
+ double getFrameRate() const;
+ void setFrameRate(double rateHz);
+
protected:
virtual ~RepeaterSource();
diff --git a/media/libstagefright/wifi-display/source/Sender.cpp b/media/libstagefright/wifi-display/source/Sender.cpp
deleted file mode 100644
index 9048691..0000000
--- a/media/libstagefright/wifi-display/source/Sender.cpp
+++ /dev/null
@@ -1,870 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "Sender"
-#include <utils/Log.h>
-
-#include "Sender.h"
-
-#include "ANetworkSession.h"
-#include "TimeSeries.h"
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-static size_t kMaxRTPPacketSize = 1500;
-static size_t kMaxNumTSPacketsPerRTPPacket = (kMaxRTPPacketSize - 12) / 188;
-
-Sender::Sender(
- const sp<ANetworkSession> &netSession,
- const sp<AMessage> &notify)
- : mNetSession(netSession),
- mNotify(notify),
- mTransportMode(TRANSPORT_UDP),
- mRTPChannel(0),
- mRTCPChannel(0),
- mRTPPort(0),
- mRTPSessionID(0),
- mRTCPSessionID(0),
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- mRTPRetransmissionSessionID(0),
- mRTCPRetransmissionSessionID(0),
-#endif
- mClientRTPPort(0),
- mClientRTCPPort(0),
- mRTPConnected(false),
- mRTCPConnected(false),
- mFirstOutputBufferReadyTimeUs(-1ll),
- mFirstOutputBufferSentTimeUs(-1ll),
- mRTPSeqNo(0),
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- mRTPRetransmissionSeqNo(0),
-#endif
- mLastNTPTime(0),
- mLastRTPTime(0),
- mNumRTPSent(0),
- mNumRTPOctetsSent(0),
- mNumSRsSent(0),
- mSendSRPending(false)
-#if ENABLE_RETRANSMISSION
- ,mHistoryLength(0)
-#endif
-#if TRACK_BANDWIDTH
- ,mFirstPacketTimeUs(-1ll)
- ,mTotalBytesSent(0ll)
-#endif
-#if LOG_TRANSPORT_STREAM
- ,mLogFile(NULL)
-#endif
-{
-#if LOG_TRANSPORT_STREAM
- mLogFile = fopen("/system/etc/log.ts", "wb");
-#endif
-}
-
-Sender::~Sender() {
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- if (mRTCPRetransmissionSessionID != 0) {
- mNetSession->destroySession(mRTCPRetransmissionSessionID);
- }
-
- if (mRTPRetransmissionSessionID != 0) {
- mNetSession->destroySession(mRTPRetransmissionSessionID);
- }
-#endif
-
- if (mRTCPSessionID != 0) {
- mNetSession->destroySession(mRTCPSessionID);
- }
-
- if (mRTPSessionID != 0) {
- mNetSession->destroySession(mRTPSessionID);
- }
-
-#if LOG_TRANSPORT_STREAM
- if (mLogFile != NULL) {
- fclose(mLogFile);
- mLogFile = NULL;
- }
-#endif
-}
-
-status_t Sender::init(
- const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
- TransportMode transportMode) {
- mClientIP = clientIP;
- mTransportMode = transportMode;
-
- if (transportMode == TRANSPORT_TCP_INTERLEAVED) {
- mRTPChannel = clientRtp;
- mRTCPChannel = clientRtcp;
- mRTPPort = 0;
- mRTPSessionID = 0;
- mRTCPSessionID = 0;
- return OK;
- }
-
- mRTPChannel = 0;
- mRTCPChannel = 0;
-
- if (mTransportMode == TRANSPORT_TCP) {
- // XXX This is wrong, we need to allocate sockets here, we only
- // need to do this because the dongles are not establishing their
- // end until after PLAY instead of before SETUP.
- mRTPPort = 20000;
- mRTPSessionID = 0;
- mRTCPSessionID = 0;
- mClientRTPPort = clientRtp;
- mClientRTCPPort = clientRtcp;
- return OK;
- }
-
- int serverRtp;
-
- sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
- sp<AMessage> rtcpNotify = new AMessage(kWhatRTCPNotify, id());
-
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- sp<AMessage> rtpRetransmissionNotify =
- new AMessage(kWhatRTPRetransmissionNotify, id());
-
- sp<AMessage> rtcpRetransmissionNotify =
- new AMessage(kWhatRTCPRetransmissionNotify, id());
-#endif
-
- status_t err;
- for (serverRtp = 15550;; serverRtp += 2) {
- int32_t rtpSession;
- if (mTransportMode == TRANSPORT_UDP) {
- err = mNetSession->createUDPSession(
- serverRtp, clientIP, clientRtp,
- rtpNotify, &rtpSession);
- } else {
- err = mNetSession->createTCPDatagramSession(
- serverRtp, clientIP, clientRtp,
- rtpNotify, &rtpSession);
- }
-
- if (err != OK) {
- ALOGI("failed to create RTP socket on port %d", serverRtp);
- continue;
- }
-
- int32_t rtcpSession = 0;
-
- if (clientRtcp >= 0) {
- if (mTransportMode == TRANSPORT_UDP) {
- err = mNetSession->createUDPSession(
- serverRtp + 1, clientIP, clientRtcp,
- rtcpNotify, &rtcpSession);
- } else {
- err = mNetSession->createTCPDatagramSession(
- serverRtp + 1, clientIP, clientRtcp,
- rtcpNotify, &rtcpSession);
- }
-
- if (err != OK) {
- ALOGI("failed to create RTCP socket on port %d", serverRtp + 1);
-
- mNetSession->destroySession(rtpSession);
- continue;
- }
- }
-
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- if (mTransportMode == TRANSPORT_UDP) {
- int32_t rtpRetransmissionSession;
-
- err = mNetSession->createUDPSession(
- serverRtp + kRetransmissionPortOffset,
- clientIP,
- clientRtp + kRetransmissionPortOffset,
- rtpRetransmissionNotify,
- &rtpRetransmissionSession);
-
- if (err != OK) {
- mNetSession->destroySession(rtcpSession);
- mNetSession->destroySession(rtpSession);
- continue;
- }
-
- CHECK_GE(clientRtcp, 0);
-
- int32_t rtcpRetransmissionSession;
- err = mNetSession->createUDPSession(
- serverRtp + 1 + kRetransmissionPortOffset,
- clientIP,
- clientRtp + 1 + kRetransmissionPortOffset,
- rtcpRetransmissionNotify,
- &rtcpRetransmissionSession);
-
- if (err != OK) {
- mNetSession->destroySession(rtpRetransmissionSession);
- mNetSession->destroySession(rtcpSession);
- mNetSession->destroySession(rtpSession);
- continue;
- }
-
- mRTPRetransmissionSessionID = rtpRetransmissionSession;
- mRTCPRetransmissionSessionID = rtcpRetransmissionSession;
-
- ALOGI("rtpRetransmissionSessionID = %d, "
- "rtcpRetransmissionSessionID = %d",
- rtpRetransmissionSession, rtcpRetransmissionSession);
- }
-#endif
-
- mRTPPort = serverRtp;
- mRTPSessionID = rtpSession;
- mRTCPSessionID = rtcpSession;
-
- ALOGI("rtpSessionID = %d, rtcpSessionID = %d", rtpSession, rtcpSession);
- break;
- }
-
- if (mRTPPort == 0) {
- return UNKNOWN_ERROR;
- }
-
- return OK;
-}
-
-status_t Sender::finishInit() {
- if (mTransportMode != TRANSPORT_TCP) {
- notifyInitDone();
- return OK;
- }
-
- sp<AMessage> rtpNotify = new AMessage(kWhatRTPNotify, id());
-
- status_t err = mNetSession->createTCPDatagramSession(
- mRTPPort, mClientIP.c_str(), mClientRTPPort,
- rtpNotify, &mRTPSessionID);
-
- if (err != OK) {
- return err;
- }
-
- if (mClientRTCPPort >= 0) {
- sp<AMessage> rtcpNotify = new AMessage(kWhatRTCPNotify, id());
-
- err = mNetSession->createTCPDatagramSession(
- mRTPPort + 1, mClientIP.c_str(), mClientRTCPPort,
- rtcpNotify, &mRTCPSessionID);
-
- if (err != OK) {
- return err;
- }
- }
-
- return OK;
-}
-
-int32_t Sender::getRTPPort() const {
- return mRTPPort;
-}
-
-void Sender::queuePackets(
- int64_t timeUs, const sp<ABuffer> &tsPackets) {
- const size_t numTSPackets = tsPackets->size() / 188;
-
- const size_t numRTPPackets =
- (numTSPackets + kMaxNumTSPacketsPerRTPPacket - 1)
- / kMaxNumTSPacketsPerRTPPacket;
-
- sp<ABuffer> udpPackets = new ABuffer(
- numRTPPackets * (12 + kMaxNumTSPacketsPerRTPPacket * 188));
-
- udpPackets->meta()->setInt64("timeUs", timeUs);
-
- size_t dstOffset = 0;
- for (size_t i = 0; i < numTSPackets; ++i) {
- if ((i % kMaxNumTSPacketsPerRTPPacket) == 0) {
- static const bool kMarkerBit = false;
-
- uint8_t *rtp = udpPackets->data() + dstOffset;
- rtp[0] = 0x80;
- rtp[1] = 33 | (kMarkerBit ? (1 << 7) : 0); // M-bit
- rtp[2] = (mRTPSeqNo >> 8) & 0xff;
- rtp[3] = mRTPSeqNo & 0xff;
- rtp[4] = 0x00; // rtp time to be filled in later.
- rtp[5] = 0x00;
- rtp[6] = 0x00;
- rtp[7] = 0x00;
- rtp[8] = kSourceID >> 24;
- rtp[9] = (kSourceID >> 16) & 0xff;
- rtp[10] = (kSourceID >> 8) & 0xff;
- rtp[11] = kSourceID & 0xff;
-
- ++mRTPSeqNo;
-
- dstOffset += 12;
- }
-
- memcpy(udpPackets->data() + dstOffset,
- tsPackets->data() + 188 * i,
- 188);
-
- dstOffset += 188;
- }
-
- udpPackets->setRange(0, dstOffset);
-
- sp<AMessage> msg = new AMessage(kWhatDrainQueue, id());
- msg->setBuffer("udpPackets", udpPackets);
- msg->post();
-
-#if LOG_TRANSPORT_STREAM
- if (mLogFile != NULL) {
- fwrite(tsPackets->data(), 1, tsPackets->size(), mLogFile);
- }
-#endif
-}
-
-void Sender::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatRTPNotify:
- case kWhatRTCPNotify:
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- case kWhatRTPRetransmissionNotify:
- case kWhatRTCPRetransmissionNotify:
-#endif
- {
- int32_t reason;
- CHECK(msg->findInt32("reason", &reason));
-
- switch (reason) {
- case ANetworkSession::kWhatError:
- {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- int32_t err;
- CHECK(msg->findInt32("err", &err));
-
- int32_t errorOccuredDuringSend;
- CHECK(msg->findInt32("send", &errorOccuredDuringSend));
-
- AString detail;
- CHECK(msg->findString("detail", &detail));
-
- if ((msg->what() == kWhatRTPNotify
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- || msg->what() == kWhatRTPRetransmissionNotify
-#endif
- ) && !errorOccuredDuringSend) {
- // This is ok, we don't expect to receive anything on
- // the RTP socket.
- break;
- }
-
- ALOGE("An error occurred during %s in session %d "
- "(%d, '%s' (%s)).",
- errorOccuredDuringSend ? "send" : "receive",
- sessionID,
- err,
- detail.c_str(),
- strerror(-err));
-
- mNetSession->destroySession(sessionID);
-
- if (sessionID == mRTPSessionID) {
- mRTPSessionID = 0;
- } else if (sessionID == mRTCPSessionID) {
- mRTCPSessionID = 0;
- }
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- else if (sessionID == mRTPRetransmissionSessionID) {
- mRTPRetransmissionSessionID = 0;
- } else if (sessionID == mRTCPRetransmissionSessionID) {
- mRTCPRetransmissionSessionID = 0;
- }
-#endif
-
- notifySessionDead();
- break;
- }
-
- case ANetworkSession::kWhatDatagram:
- {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- sp<ABuffer> data;
- CHECK(msg->findBuffer("data", &data));
-
- status_t err;
- if (msg->what() == kWhatRTCPNotify
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- || msg->what() == kWhatRTCPRetransmissionNotify
-#endif
- )
- {
- err = parseRTCP(data);
- }
- break;
- }
-
- case ANetworkSession::kWhatConnected:
- {
- CHECK_EQ(mTransportMode, TRANSPORT_TCP);
-
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- if (sessionID == mRTPSessionID) {
- CHECK(!mRTPConnected);
- mRTPConnected = true;
- ALOGI("RTP Session now connected.");
- } else if (sessionID == mRTCPSessionID) {
- CHECK(!mRTCPConnected);
- mRTCPConnected = true;
- ALOGI("RTCP Session now connected.");
- } else {
- TRESPASS();
- }
-
- if (mRTPConnected
- && (mClientRTCPPort < 0 || mRTCPConnected)) {
- notifyInitDone();
- }
- break;
- }
-
- default:
- TRESPASS();
- }
- break;
- }
-
- case kWhatDrainQueue:
- {
- sp<ABuffer> udpPackets;
- CHECK(msg->findBuffer("udpPackets", &udpPackets));
-
- onDrainQueue(udpPackets);
- break;
- }
-
- case kWhatSendSR:
- {
- mSendSRPending = false;
-
- if (mRTCPSessionID == 0) {
- break;
- }
-
- onSendSR();
-
- scheduleSendSR();
- break;
- }
- }
-}
-
-void Sender::scheduleSendSR() {
- if (mSendSRPending || mRTCPSessionID == 0) {
- return;
- }
-
- mSendSRPending = true;
- (new AMessage(kWhatSendSR, id()))->post(kSendSRIntervalUs);
-}
-
-void Sender::addSR(const sp<ABuffer> &buffer) {
- uint8_t *data = buffer->data() + buffer->size();
-
- // TODO: Use macros/utility functions to clean up all the bitshifts below.
-
- data[0] = 0x80 | 0;
- data[1] = 200; // SR
- data[2] = 0;
- data[3] = 6;
- data[4] = kSourceID >> 24;
- data[5] = (kSourceID >> 16) & 0xff;
- data[6] = (kSourceID >> 8) & 0xff;
- data[7] = kSourceID & 0xff;
-
- data[8] = mLastNTPTime >> (64 - 8);
- data[9] = (mLastNTPTime >> (64 - 16)) & 0xff;
- data[10] = (mLastNTPTime >> (64 - 24)) & 0xff;
- data[11] = (mLastNTPTime >> 32) & 0xff;
- data[12] = (mLastNTPTime >> 24) & 0xff;
- data[13] = (mLastNTPTime >> 16) & 0xff;
- data[14] = (mLastNTPTime >> 8) & 0xff;
- data[15] = mLastNTPTime & 0xff;
-
- data[16] = (mLastRTPTime >> 24) & 0xff;
- data[17] = (mLastRTPTime >> 16) & 0xff;
- data[18] = (mLastRTPTime >> 8) & 0xff;
- data[19] = mLastRTPTime & 0xff;
-
- data[20] = mNumRTPSent >> 24;
- data[21] = (mNumRTPSent >> 16) & 0xff;
- data[22] = (mNumRTPSent >> 8) & 0xff;
- data[23] = mNumRTPSent & 0xff;
-
- data[24] = mNumRTPOctetsSent >> 24;
- data[25] = (mNumRTPOctetsSent >> 16) & 0xff;
- data[26] = (mNumRTPOctetsSent >> 8) & 0xff;
- data[27] = mNumRTPOctetsSent & 0xff;
-
- buffer->setRange(buffer->offset(), buffer->size() + 28);
-}
-
-void Sender::addSDES(const sp<ABuffer> &buffer) {
- uint8_t *data = buffer->data() + buffer->size();
- data[0] = 0x80 | 1;
- data[1] = 202; // SDES
- data[4] = kSourceID >> 24;
- data[5] = (kSourceID >> 16) & 0xff;
- data[6] = (kSourceID >> 8) & 0xff;
- data[7] = kSourceID & 0xff;
-
- size_t offset = 8;
-
- data[offset++] = 1; // CNAME
-
- static const char *kCNAME = "someone@somewhere";
- data[offset++] = strlen(kCNAME);
-
- memcpy(&data[offset], kCNAME, strlen(kCNAME));
- offset += strlen(kCNAME);
-
- data[offset++] = 7; // NOTE
-
- static const char *kNOTE = "Hell's frozen over.";
- data[offset++] = strlen(kNOTE);
-
- memcpy(&data[offset], kNOTE, strlen(kNOTE));
- offset += strlen(kNOTE);
-
- data[offset++] = 0;
-
- if ((offset % 4) > 0) {
- size_t count = 4 - (offset % 4);
- switch (count) {
- case 3:
- data[offset++] = 0;
- case 2:
- data[offset++] = 0;
- case 1:
- data[offset++] = 0;
- }
- }
-
- size_t numWords = (offset / 4) - 1;
- data[2] = numWords >> 8;
- data[3] = numWords & 0xff;
-
- buffer->setRange(buffer->offset(), buffer->size() + offset);
-}
-
-// static
-uint64_t Sender::GetNowNTP() {
- uint64_t nowUs = ALooper::GetNowUs();
-
- nowUs += ((70ll * 365 + 17) * 24) * 60 * 60 * 1000000ll;
-
- uint64_t hi = nowUs / 1000000ll;
- uint64_t lo = ((1ll << 32) * (nowUs % 1000000ll)) / 1000000ll;
-
- return (hi << 32) | lo;
-}
-
-void Sender::onSendSR() {
- sp<ABuffer> buffer = new ABuffer(1500);
- buffer->setRange(0, 0);
-
- addSR(buffer);
- addSDES(buffer);
-
- if (mTransportMode == TRANSPORT_TCP_INTERLEAVED) {
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatBinaryData);
- notify->setInt32("channel", mRTCPChannel);
- notify->setBuffer("data", buffer);
- notify->post();
- } else {
- sendPacket(mRTCPSessionID, buffer->data(), buffer->size());
- }
-
- ++mNumSRsSent;
-}
-
-#if ENABLE_RETRANSMISSION
-status_t Sender::parseTSFB(
- const uint8_t *data, size_t size) {
- if ((data[0] & 0x1f) != 1) {
- return ERROR_UNSUPPORTED; // We only support NACK for now.
- }
-
- uint32_t srcId = U32_AT(&data[8]);
- if (srcId != kSourceID) {
- return ERROR_MALFORMED;
- }
-
- for (size_t i = 12; i < size; i += 4) {
- uint16_t seqNo = U16_AT(&data[i]);
- uint16_t blp = U16_AT(&data[i + 2]);
-
- List<sp<ABuffer> >::iterator it = mHistory.begin();
- bool foundSeqNo = false;
- while (it != mHistory.end()) {
- const sp<ABuffer> &buffer = *it;
-
- uint16_t bufferSeqNo = buffer->int32Data() & 0xffff;
-
- bool retransmit = false;
- if (bufferSeqNo == seqNo) {
- retransmit = true;
- } else if (blp != 0) {
- for (size_t i = 0; i < 16; ++i) {
- if ((blp & (1 << i))
- && (bufferSeqNo == ((seqNo + i + 1) & 0xffff))) {
- blp &= ~(1 << i);
- retransmit = true;
- }
- }
- }
-
- if (retransmit) {
- ALOGI("retransmitting seqNo %d", bufferSeqNo);
-
-#if RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- sp<ABuffer> retransRTP = new ABuffer(2 + buffer->size());
- uint8_t *rtp = retransRTP->data();
- memcpy(rtp, buffer->data(), 12);
- rtp[2] = (mRTPRetransmissionSeqNo >> 8) & 0xff;
- rtp[3] = mRTPRetransmissionSeqNo & 0xff;
- rtp[12] = (bufferSeqNo >> 8) & 0xff;
- rtp[13] = bufferSeqNo & 0xff;
- memcpy(&rtp[14], buffer->data() + 12, buffer->size() - 12);
-
- ++mRTPRetransmissionSeqNo;
-
- sendPacket(
- mRTPRetransmissionSessionID,
- retransRTP->data(), retransRTP->size());
-#else
- sendPacket(
- mRTPSessionID, buffer->data(), buffer->size());
-#endif
-
- if (bufferSeqNo == seqNo) {
- foundSeqNo = true;
- }
-
- if (foundSeqNo && blp == 0) {
- break;
- }
- }
-
- ++it;
- }
-
- if (!foundSeqNo || blp != 0) {
- ALOGI("Some sequence numbers were no longer available for "
- "retransmission");
- }
- }
-
- return OK;
-}
-#endif
-
-status_t Sender::parseRTCP(
- const sp<ABuffer> &buffer) {
- const uint8_t *data = buffer->data();
- size_t size = buffer->size();
-
- while (size > 0) {
- if (size < 8) {
- // Too short to be a valid RTCP header
- return ERROR_MALFORMED;
- }
-
- if ((data[0] >> 6) != 2) {
- // Unsupported version.
- return ERROR_UNSUPPORTED;
- }
-
- if (data[0] & 0x20) {
- // Padding present.
-
- size_t paddingLength = data[size - 1];
-
- if (paddingLength + 12 > size) {
- // If we removed this much padding we'd end up with something
- // that's too short to be a valid RTP header.
- return ERROR_MALFORMED;
- }
-
- size -= paddingLength;
- }
-
- size_t headerLength = 4 * (data[2] << 8 | data[3]) + 4;
-
- if (size < headerLength) {
- // Only received a partial packet?
- return ERROR_MALFORMED;
- }
-
- switch (data[1]) {
- case 200:
- case 201: // RR
- case 202: // SDES
- case 203:
- case 204: // APP
- break;
-
-#if ENABLE_RETRANSMISSION
- case 205: // TSFB (transport layer specific feedback)
- parseTSFB(data, headerLength);
- break;
-#endif
-
- case 206: // PSFB (payload specific feedback)
- hexdump(data, headerLength);
- break;
-
- default:
- {
- ALOGW("Unknown RTCP packet type %u of size %d",
- (unsigned)data[1], headerLength);
- break;
- }
- }
-
- data += headerLength;
- size -= headerLength;
- }
-
- return OK;
-}
-
-status_t Sender::sendPacket(
- int32_t sessionID, const void *data, size_t size) {
- return mNetSession->sendRequest(sessionID, data, size);
-}
-
-void Sender::notifyInitDone() {
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatInitDone);
- notify->post();
-}
-
-void Sender::notifySessionDead() {
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatSessionDead);
- notify->post();
-}
-
-void Sender::onDrainQueue(const sp<ABuffer> &udpPackets) {
- static const size_t kFullRTPPacketSize =
- 12 + 188 * kMaxNumTSPacketsPerRTPPacket;
-
- size_t srcOffset = 0;
- while (srcOffset < udpPackets->size()) {
- uint8_t *rtp = udpPackets->data() + srcOffset;
-
- size_t rtpPacketSize = udpPackets->size() - srcOffset;
- if (rtpPacketSize > kFullRTPPacketSize) {
- rtpPacketSize = kFullRTPPacketSize;
- }
-
- int64_t nowUs = ALooper::GetNowUs();
- mLastNTPTime = GetNowNTP();
-
- // 90kHz time scale
- uint32_t rtpTime = (nowUs * 9ll) / 100ll;
-
- rtp[4] = rtpTime >> 24;
- rtp[5] = (rtpTime >> 16) & 0xff;
- rtp[6] = (rtpTime >> 8) & 0xff;
- rtp[7] = rtpTime & 0xff;
-
- ++mNumRTPSent;
- mNumRTPOctetsSent += rtpPacketSize - 12;
-
- mLastRTPTime = rtpTime;
-
- if (mTransportMode == TRANSPORT_TCP_INTERLEAVED) {
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatBinaryData);
-
- sp<ABuffer> data = new ABuffer(rtpPacketSize);
- memcpy(data->data(), rtp, rtpPacketSize);
-
- notify->setInt32("channel", mRTPChannel);
- notify->setBuffer("data", data);
- notify->post();
- } else {
- sendPacket(mRTPSessionID, rtp, rtpPacketSize);
-
-#if TRACK_BANDWIDTH
- mTotalBytesSent += rtpPacketSize->size();
- int64_t delayUs = ALooper::GetNowUs() - mFirstPacketTimeUs;
-
- if (delayUs > 0ll) {
- ALOGI("approx. net bandwidth used: %.2f Mbit/sec",
- mTotalBytesSent * 8.0 / delayUs);
- }
-#endif
- }
-
-#if ENABLE_RETRANSMISSION
- addToHistory(rtp, rtpPacketSize);
-#endif
-
- srcOffset += rtpPacketSize;
- }
-
-#if 0
- int64_t timeUs;
- CHECK(udpPackets->meta()->findInt64("timeUs", &timeUs));
-
- ALOGI("dTimeUs = %lld us", ALooper::GetNowUs() - timeUs);
-#endif
-}
-
-#if ENABLE_RETRANSMISSION
-void Sender::addToHistory(const uint8_t *rtp, size_t rtpPacketSize) {
- sp<ABuffer> packet = new ABuffer(rtpPacketSize);
- memcpy(packet->data(), rtp, rtpPacketSize);
-
- unsigned rtpSeqNo = U16_AT(&rtp[2]);
- packet->setInt32Data(rtpSeqNo);
-
- mHistory.push_back(packet);
- ++mHistoryLength;
-
- if (mHistoryLength > kMaxHistoryLength) {
- mHistory.erase(mHistory.begin());
- --mHistoryLength;
- }
-}
-#endif
-
-} // namespace android
-
diff --git a/media/libstagefright/wifi-display/source/Sender.h b/media/libstagefright/wifi-display/source/Sender.h
deleted file mode 100644
index 66951f7..0000000
--- a/media/libstagefright/wifi-display/source/Sender.h
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SENDER_H_
-
-#define SENDER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-
-namespace android {
-
-#define LOG_TRANSPORT_STREAM 0
-#define TRACK_BANDWIDTH 0
-
-#define ENABLE_RETRANSMISSION 1
-
-// If retransmission is enabled the following define determines what
-// kind we support, if RETRANSMISSION_ACCORDING_TO_RFC_XXXX is 0
-// we'll send NACKs on the original RTCP channel and retransmit packets
-// on the original RTP channel, otherwise a separate channel pair is used
-// for this purpose.
-#define RETRANSMISSION_ACCORDING_TO_RFC_XXXX 0
-
-struct ABuffer;
-struct ANetworkSession;
-
-struct Sender : public AHandler {
- Sender(const sp<ANetworkSession> &netSession, const sp<AMessage> &notify);
-
- enum {
- kWhatInitDone,
- kWhatSessionDead,
- kWhatBinaryData,
- };
-
- enum TransportMode {
- TRANSPORT_UDP,
- TRANSPORT_TCP_INTERLEAVED,
- TRANSPORT_TCP,
- };
- status_t init(
- const char *clientIP, int32_t clientRtp, int32_t clientRtcp,
- TransportMode transportMode);
-
- status_t finishInit();
-
- int32_t getRTPPort() const;
-
- void queuePackets(int64_t timeUs, const sp<ABuffer> &tsPackets);
- void scheduleSendSR();
-
-protected:
- virtual ~Sender();
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
- enum {
- kWhatDrainQueue,
- kWhatSendSR,
- kWhatRTPNotify,
- kWhatRTCPNotify,
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- kWhatRTPRetransmissionNotify,
- kWhatRTCPRetransmissionNotify,
-#endif
- };
-
- static const int64_t kSendSRIntervalUs = 10000000ll;
-
- static const uint32_t kSourceID = 0xdeadbeef;
- static const size_t kMaxHistoryLength = 128;
-
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- static const size_t kRetransmissionPortOffset = 120;
-#endif
-
- sp<ANetworkSession> mNetSession;
- sp<AMessage> mNotify;
-
- TransportMode mTransportMode;
- AString mClientIP;
-
- // in TCP mode
- int32_t mRTPChannel;
- int32_t mRTCPChannel;
-
- // in UDP mode
- int32_t mRTPPort;
- int32_t mRTPSessionID;
- int32_t mRTCPSessionID;
-
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- int32_t mRTPRetransmissionSessionID;
- int32_t mRTCPRetransmissionSessionID;
-#endif
-
- int32_t mClientRTPPort;
- int32_t mClientRTCPPort;
- bool mRTPConnected;
- bool mRTCPConnected;
-
- int64_t mFirstOutputBufferReadyTimeUs;
- int64_t mFirstOutputBufferSentTimeUs;
-
- uint32_t mRTPSeqNo;
-#if ENABLE_RETRANSMISSION && RETRANSMISSION_ACCORDING_TO_RFC_XXXX
- uint32_t mRTPRetransmissionSeqNo;
-#endif
-
- uint64_t mLastNTPTime;
- uint32_t mLastRTPTime;
- uint32_t mNumRTPSent;
- uint32_t mNumRTPOctetsSent;
- uint32_t mNumSRsSent;
-
- bool mSendSRPending;
-
-#if ENABLE_RETRANSMISSION
- List<sp<ABuffer> > mHistory;
- size_t mHistoryLength;
-#endif
-
-#if TRACK_BANDWIDTH
- int64_t mFirstPacketTimeUs;
- uint64_t mTotalBytesSent;
-#endif
-
-#if LOG_TRANSPORT_STREAM
- FILE *mLogFile;
-#endif
-
- void onSendSR();
- void addSR(const sp<ABuffer> &buffer);
- void addSDES(const sp<ABuffer> &buffer);
- static uint64_t GetNowNTP();
-
-#if ENABLE_RETRANSMISSION
- status_t parseTSFB(const uint8_t *data, size_t size);
- void addToHistory(const uint8_t *rtp, size_t rtpPacketSize);
-#endif
-
- status_t parseRTCP(const sp<ABuffer> &buffer);
-
- status_t sendPacket(int32_t sessionID, const void *data, size_t size);
-
- void notifyInitDone();
- void notifySessionDead();
-
- void onDrainQueue(const sp<ABuffer> &udpPackets);
-
- DISALLOW_EVIL_CONSTRUCTORS(Sender);
-};
-
-} // namespace android
-
-#endif // SENDER_H_
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.cpp b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
index ef57a4d..2c4a373 100644
--- a/media/libstagefright/wifi-display/source/TSPacketizer.cpp
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.cpp
@@ -58,6 +58,7 @@ struct TSPacketizer::Track : public RefBase {
sp<ABuffer> descriptorAt(size_t index) const;
void finalize();
+ void extractCSDIfNecessary();
protected:
virtual ~Track();
@@ -77,6 +78,7 @@ private:
bool mAudioLacksATDSHeaders;
bool mFinalized;
+ bool mExtractedCSD;
DISALLOW_EVIL_CONSTRUCTORS(Track);
};
@@ -90,14 +92,21 @@ TSPacketizer::Track::Track(
mStreamID(streamID),
mContinuityCounter(0),
mAudioLacksATDSHeaders(false),
- mFinalized(false) {
+ mFinalized(false),
+ mExtractedCSD(false) {
CHECK(format->findString("mime", &mMIME));
+}
+
+void TSPacketizer::Track::extractCSDIfNecessary() {
+ if (mExtractedCSD) {
+ return;
+ }
if (!strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_VIDEO_AVC)
|| !strcasecmp(mMIME.c_str(), MEDIA_MIMETYPE_AUDIO_AAC)) {
for (size_t i = 0;; ++i) {
sp<ABuffer> csd;
- if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
+ if (!mFormat->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
break;
}
@@ -111,6 +120,8 @@ TSPacketizer::Track::Track(
}
}
}
+
+ mExtractedCSD = true;
}
TSPacketizer::Track::~Track() {
@@ -250,7 +261,7 @@ void TSPacketizer::Track::finalize() {
data[0] = 40; // descriptor_tag
data[1] = 4; // descriptor_length
- CHECK_EQ(mCSD.size(), 1u);
+ CHECK_GE(mCSD.size(), 1u);
const sp<ABuffer> &sps = mCSD.itemAt(0);
CHECK(!memcmp("\x00\x00\x00\x01", sps->data(), 4));
CHECK_GE(sps->size(), 7u);
@@ -314,12 +325,31 @@ void TSPacketizer::Track::finalize() {
mDescriptors.push_back(descriptor);
}
- int32_t hdcpVersion;
- if (mFormat->findInt32("hdcp-version", &hdcpVersion)) {
- // HDCP descriptor
+ mFinalized = true;
+}
+
+////////////////////////////////////////////////////////////////////////////////
- CHECK(hdcpVersion == 0x20 || hdcpVersion == 0x21);
+TSPacketizer::TSPacketizer(uint32_t flags)
+ : mFlags(flags),
+ mPATContinuityCounter(0),
+ mPMTContinuityCounter(0) {
+ initCrcTable();
+ if (flags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR)) {
+ int32_t hdcpVersion;
+ if (flags & EMIT_HDCP20_DESCRIPTOR) {
+ CHECK(!(flags & EMIT_HDCP21_DESCRIPTOR));
+ hdcpVersion = 0x20;
+ } else {
+ CHECK(!(flags & EMIT_HDCP20_DESCRIPTOR));
+
+ // HDCP2.0 _and_ HDCP 2.1 specs say to set the version
+ // inside the HDCP descriptor to 0x20!!!
+ hdcpVersion = 0x20;
+ }
+
+ // HDCP descriptor
sp<ABuffer> descriptor = new ABuffer(7);
uint8_t *data = descriptor->data();
data[0] = 0x05; // descriptor_tag
@@ -330,18 +360,8 @@ void TSPacketizer::Track::finalize() {
data[5] = 'P';
data[6] = hdcpVersion;
- mDescriptors.push_back(descriptor);
+ mProgramInfoDescriptors.push_back(descriptor);
}
-
- mFinalized = true;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-TSPacketizer::TSPacketizer()
- : mPATContinuityCounter(0),
- mPMTContinuityCounter(0) {
- initCrcTable();
}
TSPacketizer::~TSPacketizer() {
@@ -407,6 +427,17 @@ ssize_t TSPacketizer::addTrack(const sp<AMessage> &format) {
return mTracks.add(track);
}
+status_t TSPacketizer::extractCSDIfNecessary(size_t trackIndex) {
+ if (trackIndex >= mTracks.size()) {
+ return -ERANGE;
+ }
+
+ const sp<Track> &track = mTracks.itemAt(trackIndex);
+ track->extractCSDIfNecessary();
+
+ return OK;
+}
+
status_t TSPacketizer::packetize(
size_t trackIndex,
const sp<ABuffer> &_accessUnit,
@@ -471,16 +502,121 @@ status_t TSPacketizer::packetize(
// reserved = b1
// the first fragment of "buffer" follows
+ // Each transport packet (except for the last one contributing to the PES
+ // payload) must contain a multiple of 16 bytes of payload per HDCP spec.
+ bool alignPayload =
+ (mFlags & (EMIT_HDCP20_DESCRIPTOR | EMIT_HDCP21_DESCRIPTOR));
+
+ /*
+ a) The very first PES transport stream packet contains
+
+ 4 bytes of TS header
+ ... padding
+ 14 bytes of static PES header
+ PES_private_data_len + 1 bytes (only if PES_private_data_len > 0)
+ numStuffingBytes bytes
+
+ followed by the payload
+
+ b) Subsequent PES transport stream packets contain
+
+ 4 bytes of TS header
+ ... padding
+
+ followed by the payload
+ */
+
size_t PES_packet_length = accessUnit->size() + 8 + numStuffingBytes;
if (PES_private_data_len > 0) {
PES_packet_length += PES_private_data_len + 1;
}
- size_t numTSPackets;
- if (PES_packet_length <= 178) {
- numTSPackets = 1;
- } else {
- numTSPackets = 1 + ((PES_packet_length - 178) + 183) / 184;
+ size_t numTSPackets = 1;
+
+ {
+ // Make sure the PES header fits into a single TS packet:
+ size_t PES_header_size = 14 + numStuffingBytes;
+ if (PES_private_data_len > 0) {
+ PES_header_size += PES_private_data_len + 1;
+ }
+
+ CHECK_LE(PES_header_size, 188u - 4u);
+
+ size_t sizeAvailableForPayload = 188 - 4 - PES_header_size;
+ size_t numBytesOfPayload = accessUnit->size();
+
+ if (numBytesOfPayload > sizeAvailableForPayload) {
+ numBytesOfPayload = sizeAvailableForPayload;
+
+ if (alignPayload && numBytesOfPayload > 16) {
+ numBytesOfPayload -= (numBytesOfPayload % 16);
+ }
+ }
+
+ // size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload;
+ ALOGV("packet 1 contains %zd padding bytes and %zd bytes of payload",
+ numPaddingBytes, numBytesOfPayload);
+
+ size_t numBytesOfPayloadRemaining = accessUnit->size() - numBytesOfPayload;
+
+#if 0
+ // The following hopefully illustrates the logic that led to the
+ // more efficient computation in the #else block...
+
+ while (numBytesOfPayloadRemaining > 0) {
+ size_t sizeAvailableForPayload = 188 - 4;
+
+ size_t numBytesOfPayload = numBytesOfPayloadRemaining;
+
+ if (numBytesOfPayload > sizeAvailableForPayload) {
+ numBytesOfPayload = sizeAvailableForPayload;
+
+ if (alignPayload && numBytesOfPayload > 16) {
+ numBytesOfPayload -= (numBytesOfPayload % 16);
+ }
+ }
+
+ size_t numPaddingBytes = sizeAvailableForPayload - numBytesOfPayload;
+ ALOGI("packet %zd contains %zd padding bytes and %zd bytes of payload",
+ numTSPackets + 1, numPaddingBytes, numBytesOfPayload);
+
+ numBytesOfPayloadRemaining -= numBytesOfPayload;
+ ++numTSPackets;
+ }
+#else
+ // This is how many bytes of payload each subsequent TS packet
+ // can contain at most.
+ sizeAvailableForPayload = 188 - 4;
+ size_t sizeAvailableForAlignedPayload = sizeAvailableForPayload;
+ if (alignPayload) {
+ // We're only going to use a subset of the available space
+ // since we need to make each fragment a multiple of 16 in size.
+ sizeAvailableForAlignedPayload -=
+ (sizeAvailableForAlignedPayload % 16);
+ }
+
+ size_t numFullTSPackets =
+ numBytesOfPayloadRemaining / sizeAvailableForAlignedPayload;
+
+ numTSPackets += numFullTSPackets;
+
+ numBytesOfPayloadRemaining -=
+ numFullTSPackets * sizeAvailableForAlignedPayload;
+
+ // numBytesOfPayloadRemaining < sizeAvailableForAlignedPayload
+ if (numFullTSPackets == 0 && numBytesOfPayloadRemaining > 0) {
+ // There wasn't enough payload left to form a full aligned payload,
+ // the last packet doesn't have to be aligned.
+ ++numTSPackets;
+ } else if (numFullTSPackets > 0
+ && numBytesOfPayloadRemaining
+ + sizeAvailableForAlignedPayload > sizeAvailableForPayload) {
+ // The last packet emitted had a full aligned payload and together
+ // with the bytes remaining does exceed the unaligned payload
+ // size, so we need another packet.
+ ++numTSPackets;
+ }
+#endif
}
if (flags & EMIT_PAT_AND_PMT) {
@@ -583,8 +719,9 @@ status_t TSPacketizer::packetize(
// reserved = b111
// PCR_PID = kPCR_PID (13 bits)
// reserved = b1111
- // program_info_length = 0x000
- // one or more elementary stream descriptions follow:
+ // program_info_length = 0x???
+ // program_info_descriptors follow
+ // one or more elementary stream descriptions follow:
// stream_type = 0x??
// reserved = b111
// elementary_PID = b? ???? ???? ???? (13 bits)
@@ -616,8 +753,21 @@ status_t TSPacketizer::packetize(
*ptr++ = 0x00;
*ptr++ = 0xe0 | (kPID_PCR >> 8);
*ptr++ = kPID_PCR & 0xff;
- *ptr++ = 0xf0;
- *ptr++ = 0x00;
+
+ size_t program_info_length = 0;
+ for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) {
+ program_info_length += mProgramInfoDescriptors.itemAt(i)->size();
+ }
+
+ CHECK_LT(program_info_length, 0x400);
+ *ptr++ = 0xf0 | (program_info_length >> 8);
+ *ptr++ = (program_info_length & 0xff);
+
+ for (size_t i = 0; i < mProgramInfoDescriptors.size(); ++i) {
+ const sp<ABuffer> &desc = mProgramInfoDescriptors.itemAt(i);
+ memcpy(ptr, desc->data(), desc->size());
+ ptr += desc->size();
+ }
for (size_t i = 0; i < mTracks.size(); ++i) {
const sp<Track> &track = mTracks.itemAt(i);
@@ -710,8 +860,6 @@ status_t TSPacketizer::packetize(
uint64_t PTS = (timeUs * 9ll) / 100ll;
- bool padding = (PES_packet_length < (188 - 10));
-
if (PES_packet_length >= 65536) {
// This really should only happen for video.
CHECK(track->isVideo());
@@ -720,19 +868,37 @@ status_t TSPacketizer::packetize(
PES_packet_length = 0;
}
+ size_t sizeAvailableForPayload = 188 - 4 - 14 - numStuffingBytes;
+ if (PES_private_data_len > 0) {
+ sizeAvailableForPayload -= PES_private_data_len + 1;
+ }
+
+ size_t copy = accessUnit->size();
+
+ if (copy > sizeAvailableForPayload) {
+ copy = sizeAvailableForPayload;
+
+ if (alignPayload && copy > 16) {
+ copy -= (copy % 16);
+ }
+ }
+
+ size_t numPaddingBytes = sizeAvailableForPayload - copy;
+
uint8_t *ptr = packetDataStart;
*ptr++ = 0x47;
*ptr++ = 0x40 | (track->PID() >> 8);
*ptr++ = track->PID() & 0xff;
- *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter();
- if (padding) {
- size_t paddingSize = 188 - 10 - PES_packet_length;
- *ptr++ = paddingSize - 1;
- if (paddingSize >= 2) {
+ *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10)
+ | track->incrementContinuityCounter();
+
+ if (numPaddingBytes > 0) {
+ *ptr++ = numPaddingBytes - 1;
+ if (numPaddingBytes >= 2) {
*ptr++ = 0x00;
- memset(ptr, 0xff, paddingSize - 2);
- ptr += paddingSize - 2;
+ memset(ptr, 0xff, numPaddingBytes - 2);
+ ptr += numPaddingBytes - 2;
}
}
@@ -768,25 +934,14 @@ status_t TSPacketizer::packetize(
*ptr++ = 0xff;
}
- // 18 bytes of TS/PES header leave 188 - 18 = 170 bytes for the payload
-
- size_t sizeLeft = packetDataStart + 188 - ptr;
- size_t copy = accessUnit->size();
- if (copy > sizeLeft) {
- copy = sizeLeft;
- }
-
memcpy(ptr, accessUnit->data(), copy);
ptr += copy;
- CHECK_EQ(sizeLeft, copy);
- memset(ptr, 0xff, sizeLeft - copy);
+ CHECK_EQ(ptr, packetDataStart + 188);
packetDataStart += 188;
size_t offset = copy;
while (offset < accessUnit->size()) {
- bool padding = (accessUnit->size() - offset) < (188 - 4);
-
// for subsequent fragments of "buffer":
// 0x47
// transport_error_indicator = b0
@@ -798,35 +953,40 @@ status_t TSPacketizer::packetize(
// continuity_counter = b????
// the fragment of "buffer" follows.
+ size_t sizeAvailableForPayload = 188 - 4;
+
+ size_t copy = accessUnit->size() - offset;
+
+ if (copy > sizeAvailableForPayload) {
+ copy = sizeAvailableForPayload;
+
+ if (alignPayload && copy > 16) {
+ copy -= (copy % 16);
+ }
+ }
+
+ size_t numPaddingBytes = sizeAvailableForPayload - copy;
+
uint8_t *ptr = packetDataStart;
*ptr++ = 0x47;
*ptr++ = 0x00 | (track->PID() >> 8);
*ptr++ = track->PID() & 0xff;
- *ptr++ = (padding ? 0x30 : 0x10) | track->incrementContinuityCounter();
+ *ptr++ = (numPaddingBytes > 0 ? 0x30 : 0x10)
+ | track->incrementContinuityCounter();
- if (padding) {
- size_t paddingSize = 188 - 4 - (accessUnit->size() - offset);
- *ptr++ = paddingSize - 1;
- if (paddingSize >= 2) {
+ if (numPaddingBytes > 0) {
+ *ptr++ = numPaddingBytes - 1;
+ if (numPaddingBytes >= 2) {
*ptr++ = 0x00;
- memset(ptr, 0xff, paddingSize - 2);
- ptr += paddingSize - 2;
+ memset(ptr, 0xff, numPaddingBytes - 2);
+ ptr += numPaddingBytes - 2;
}
}
- // 4 bytes of TS header leave 188 - 4 = 184 bytes for the payload
-
- size_t sizeLeft = packetDataStart + 188 - ptr;
- size_t copy = accessUnit->size() - offset;
- if (copy > sizeLeft) {
- copy = sizeLeft;
- }
-
memcpy(ptr, accessUnit->data() + offset, copy);
ptr += copy;
- CHECK_EQ(sizeLeft, copy);
- memset(ptr, 0xff, sizeLeft - copy);
+ CHECK_EQ(ptr, packetDataStart + 188);
offset += copy;
packetDataStart += 188;
diff --git a/media/libstagefright/wifi-display/source/TSPacketizer.h b/media/libstagefright/wifi-display/source/TSPacketizer.h
index a37917d..4a664ee 100644
--- a/media/libstagefright/wifi-display/source/TSPacketizer.h
+++ b/media/libstagefright/wifi-display/source/TSPacketizer.h
@@ -32,7 +32,11 @@ struct AMessage;
// Emits metadata tables (PAT and PMT) and timestamp stream (PCR) based
// on flags.
struct TSPacketizer : public RefBase {
- TSPacketizer();
+ enum {
+ EMIT_HDCP20_DESCRIPTOR = 1,
+ EMIT_HDCP21_DESCRIPTOR = 2,
+ };
+ TSPacketizer(uint32_t flags);
// Returns trackIndex or error.
ssize_t addTrack(const sp<AMessage> &format);
@@ -50,6 +54,8 @@ struct TSPacketizer : public RefBase {
const uint8_t *PES_private_data, size_t PES_private_data_len,
size_t numStuffingBytes = 0);
+ status_t extractCSDIfNecessary(size_t trackIndex);
+
// XXX to be removed once encoder config option takes care of this for
// encrypted mode.
sp<ABuffer> prependCSD(
@@ -66,8 +72,11 @@ private:
struct Track;
+ uint32_t mFlags;
Vector<sp<Track> > mTracks;
+ Vector<sp<ABuffer> > mProgramInfoDescriptors;
+
unsigned mPATContinuityCounter;
unsigned mPMTContinuityCounter;
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
index 08f67f9..22dd0b1 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.cpp
@@ -22,10 +22,10 @@
#include "PlaybackSession.h"
#include "Parameters.h"
#include "ParsedMessage.h"
-#include "Sender.h"
+#include "rtp/RTPSender.h"
#include <binder/IServiceManager.h>
-#include <gui/ISurfaceTexture.h>
+#include <gui/IGraphicBufferProducer.h>
#include <media/IHDCP.h>
#include <media/IMediaPlayerService.h>
#include <media/IRemoteDisplayClient.h>
@@ -33,6 +33,7 @@
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/Utils.h>
#include <arpa/inet.h>
#include <cutils/properties.h>
@@ -41,9 +42,13 @@
namespace android {
+// static
+const AString WifiDisplaySource::sUserAgent = MakeUserAgent();
+
WifiDisplaySource::WifiDisplaySource(
const sp<ANetworkSession> &netSession,
- const sp<IRemoteDisplayClient> &client)
+ const sp<IRemoteDisplayClient> &client,
+ const char *path)
: mState(INITIALIZED),
mNetSession(netSession),
mClient(client),
@@ -58,8 +63,16 @@ WifiDisplaySource::WifiDisplaySource(
mIsHDCP2_0(false),
mHDCPPort(0),
mHDCPInitializationComplete(false),
- mSetupTriggerDeferred(false)
-{
+ mSetupTriggerDeferred(false),
+ mPlaybackSessionEstablished(false) {
+ if (path != NULL) {
+ mMediaPath.setTo(path);
+ }
+
+ mSupportedSourceVideoFormats.disableAll();
+
+ mSupportedSourceVideoFormats.setNativeResolution(
+ VideoFormats::RESOLUTION_CEA, 5); // 1280x720 p30
}
WifiDisplaySource::~WifiDisplaySource() {
@@ -151,9 +164,7 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
} else {
err = -EINVAL;
}
- }
- if (err == OK) {
mState = AWAITING_CLIENT_CONNECTION;
}
@@ -253,7 +264,8 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
if (!strcasecmp(val, "pause") && mState == PLAYING) {
mState = PLAYING_TO_PAUSED;
sendTrigger(mClientSessionID, TRIGGER_PAUSE);
- } else if (!strcasecmp(val, "play") && mState == PAUSED) {
+ } else if (!strcasecmp(val, "play")
+ && mState == PAUSED) {
mState = PAUSED_TO_PLAYING;
sendTrigger(mClientSessionID, TRIGGER_PLAY);
}
@@ -262,6 +274,11 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case ANetworkSession::kWhatNetworkStall:
+ {
+ break;
+ }
+
default:
TRESPASS();
}
@@ -374,16 +391,41 @@ void WifiDisplaySource::onMessageReceived(const sp<AMessage> &msg) {
mClient->onDisplayError(
IRemoteDisplayClient::kDisplayErrorUnknown);
} else if (what == PlaybackSession::kWhatSessionEstablished) {
+ mPlaybackSessionEstablished = true;
+
if (mClient != NULL) {
- mClient->onDisplayConnected(
- mClientInfo.mPlaybackSession->getSurfaceTexture(),
- mClientInfo.mPlaybackSession->width(),
- mClientInfo.mPlaybackSession->height(),
- mUsingHDCP
- ? IRemoteDisplayClient::kDisplayFlagSecure
- : 0);
+ if (!mSinkSupportsVideo) {
+ mClient->onDisplayConnected(
+ NULL, // SurfaceTexture
+ 0, // width,
+ 0, // height,
+ mUsingHDCP
+ ? IRemoteDisplayClient::kDisplayFlagSecure
+ : 0);
+ } else {
+ size_t width, height;
+
+ CHECK(VideoFormats::GetConfiguration(
+ mChosenVideoResolutionType,
+ mChosenVideoResolutionIndex,
+ &width,
+ &height,
+ NULL /* framesPerSecond */,
+ NULL /* interlaced */));
+
+ mClient->onDisplayConnected(
+ mClientInfo.mPlaybackSession
+ ->getSurfaceTexture(),
+ width,
+ height,
+ mUsingHDCP
+ ? IRemoteDisplayClient::kDisplayFlagSecure
+ : 0);
+ }
}
+ finishPlay();
+
if (mState == ABOUT_TO_PLAY) {
mState = PLAYING;
}
@@ -564,55 +606,38 @@ status_t WifiDisplaySource::sendM3(int32_t sessionID) {
}
status_t WifiDisplaySource::sendM4(int32_t sessionID) {
- // wfd_video_formats:
- // 1 byte "native"
- // 1 byte "preferred-display-mode-supported" 0 or 1
- // one or more avc codec structures
- // 1 byte profile
- // 1 byte level
- // 4 byte CEA mask
- // 4 byte VESA mask
- // 4 byte HH mask
- // 1 byte latency
- // 2 byte min-slice-slice
- // 2 byte slice-enc-params
- // 1 byte framerate-control-support
- // max-hres (none or 2 byte)
- // max-vres (none or 2 byte)
-
CHECK_EQ(sessionID, mClientSessionID);
- AString transportString = "UDP";
-
- char val[PROPERTY_VALUE_MAX];
- if (property_get("media.wfd.enable-tcp", val, NULL)
- && (!strcasecmp("true", val) || !strcmp("1", val))) {
- ALOGI("Using TCP transport.");
- transportString = "TCP";
- }
-
- // For 720p60:
- // use "30 00 02 02 00000040 00000000 00000000 00 0000 0000 00 none none\r\n"
- // For 720p30:
- // use "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n"
- // For 720p24:
- // use "78 00 02 02 00008000 00000000 00000000 00 0000 0000 00 none none\r\n"
- // For 1080p30:
- // use "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n"
- AString body = StringPrintf(
- "wfd_video_formats: "
-#if USE_1080P
- "38 00 02 02 00000080 00000000 00000000 00 0000 0000 00 none none\r\n"
-#else
- "28 00 02 02 00000020 00000000 00000000 00 0000 0000 00 none none\r\n"
-#endif
- "wfd_audio_codecs: %s\r\n"
- "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n"
- "wfd_client_rtp_ports: RTP/AVP/%s;unicast %d 0 mode=play\r\n",
- (mUsingPCMAudio
- ? "LPCM 00000002 00" // 2 ch PCM 48kHz
- : "AAC 00000001 00"), // 2 ch AAC 48kHz
- mClientInfo.mLocalIP.c_str(), transportString.c_str(), mChosenRTPPort);
+ AString body;
+
+ if (mSinkSupportsVideo) {
+ body.append("wfd_video_formats: ");
+
+ VideoFormats chosenVideoFormat;
+ chosenVideoFormat.disableAll();
+ chosenVideoFormat.setNativeResolution(
+ mChosenVideoResolutionType, mChosenVideoResolutionIndex);
+
+ body.append(chosenVideoFormat.getFormatSpec(true /* forM4Message */));
+ body.append("\r\n");
+ }
+
+ if (mSinkSupportsAudio) {
+ body.append(
+ StringPrintf("wfd_audio_codecs: %s\r\n",
+ (mUsingPCMAudio
+ ? "LPCM 00000002 00" // 2 ch PCM 48kHz
+ : "AAC 00000001 00"))); // 2 ch AAC 48kHz
+ }
+
+ body.append(
+ StringPrintf(
+ "wfd_presentation_URL: rtsp://%s/wfd1.0/streamid=0 none\r\n",
+ mClientInfo.mLocalIP.c_str()));
+
+ body.append(
+ StringPrintf(
+ "wfd_client_rtp_ports: %s\r\n", mWfdClientRtpPorts.c_str()));
AString request = "SET_PARAMETER rtsp://localhost/wfd1.0 RTSP/1.0\r\n";
AppendCommonResponse(&request, mNextCSeq);
@@ -775,53 +800,115 @@ status_t WifiDisplaySource::onReceiveM3Response(
return ERROR_MALFORMED;
}
- unsigned port0, port1;
+ unsigned port0 = 0, port1 = 0;
if (sscanf(value.c_str(),
"RTP/AVP/UDP;unicast %u %u mode=play",
&port0,
- &port1) != 2
- || port0 == 0 || port0 > 65535 || port1 != 0) {
- ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)",
+ &port1) == 2
+ || sscanf(value.c_str(),
+ "RTP/AVP/TCP;unicast %u %u mode=play",
+ &port0,
+ &port1) == 2) {
+ if (port0 == 0 || port0 > 65535 || port1 != 0) {
+ ALOGE("Sink chose its wfd_client_rtp_ports poorly (%s)",
+ value.c_str());
+
+ return ERROR_MALFORMED;
+ }
+ } else if (strcmp(value.c_str(), "RTP/AVP/TCP;interleaved mode=play")) {
+ ALOGE("Unsupported value for wfd_client_rtp_ports (%s)",
value.c_str());
- return ERROR_MALFORMED;
+ return ERROR_UNSUPPORTED;
}
+ mWfdClientRtpPorts = value;
mChosenRTPPort = port0;
+ if (!params->findParameter("wfd_video_formats", &value)) {
+ ALOGE("Sink doesn't report its choice of wfd_video_formats.");
+ return ERROR_MALFORMED;
+ }
+
+ mSinkSupportsVideo = false;
+
+ if (!(value == "none")) {
+ mSinkSupportsVideo = true;
+ if (!mSupportedSinkVideoFormats.parseFormatSpec(value.c_str())) {
+ ALOGE("Failed to parse sink provided wfd_video_formats (%s)",
+ value.c_str());
+
+ return ERROR_MALFORMED;
+ }
+
+ if (!VideoFormats::PickBestFormat(
+ mSupportedSinkVideoFormats,
+ mSupportedSourceVideoFormats,
+ &mChosenVideoResolutionType,
+ &mChosenVideoResolutionIndex)) {
+ ALOGE("Sink and source share no commonly supported video "
+ "formats.");
+
+ return ERROR_UNSUPPORTED;
+ }
+
+ size_t width, height, framesPerSecond;
+ bool interlaced;
+ CHECK(VideoFormats::GetConfiguration(
+ mChosenVideoResolutionType,
+ mChosenVideoResolutionIndex,
+ &width,
+ &height,
+ &framesPerSecond,
+ &interlaced));
+
+ ALOGI("Picked video resolution %u x %u %c%u",
+ width, height, interlaced ? 'i' : 'p', framesPerSecond);
+ } else {
+ ALOGI("Sink doesn't support video at all.");
+ }
+
if (!params->findParameter("wfd_audio_codecs", &value)) {
ALOGE("Sink doesn't report its choice of wfd_audio_codecs.");
return ERROR_MALFORMED;
}
- if (value == "none") {
- ALOGE("Sink doesn't support audio at all.");
- return ERROR_UNSUPPORTED;
- }
+ mSinkSupportsAudio = false;
- uint32_t modes;
- GetAudioModes(value.c_str(), "AAC", &modes);
+ if (!(value == "none")) {
+ mSinkSupportsAudio = true;
- bool supportsAAC = (modes & 1) != 0; // AAC 2ch 48kHz
+ uint32_t modes;
+ GetAudioModes(value.c_str(), "AAC", &modes);
- GetAudioModes(value.c_str(), "LPCM", &modes);
+ bool supportsAAC = (modes & 1) != 0; // AAC 2ch 48kHz
- bool supportsPCM = (modes & 2) != 0; // LPCM 2ch 48kHz
+ GetAudioModes(value.c_str(), "LPCM", &modes);
- char val[PROPERTY_VALUE_MAX];
- if (supportsPCM
- && property_get("media.wfd.use-pcm-audio", val, NULL)
- && (!strcasecmp("true", val) || !strcmp("1", val))) {
- ALOGI("Using PCM audio.");
- mUsingPCMAudio = true;
- } else if (supportsAAC) {
- ALOGI("Using AAC audio.");
- mUsingPCMAudio = false;
- } else if (supportsPCM) {
- ALOGI("Using PCM audio.");
- mUsingPCMAudio = true;
+ bool supportsPCM = (modes & 2) != 0; // LPCM 2ch 48kHz
+
+ char val[PROPERTY_VALUE_MAX];
+ if (supportsPCM
+ && property_get("media.wfd.use-pcm-audio", val, NULL)
+ && (!strcasecmp("true", val) || !strcmp("1", val))) {
+ ALOGI("Using PCM audio.");
+ mUsingPCMAudio = true;
+ } else if (supportsAAC) {
+ ALOGI("Using AAC audio.");
+ mUsingPCMAudio = false;
+ } else if (supportsPCM) {
+ ALOGI("Using PCM audio.");
+ mUsingPCMAudio = true;
+ } else {
+ ALOGI("Sink doesn't support an audio format we do.");
+ return ERROR_UNSUPPORTED;
+ }
} else {
- ALOGI("Sink doesn't support an audio format we do.");
+ ALOGI("Sink doesn't support audio at all.");
+ }
+
+ if (!mSinkSupportsVideo && !mSinkSupportsAudio) {
+ ALOGE("Sink supports neither video nor audio...");
return ERROR_UNSUPPORTED;
}
@@ -1065,7 +1152,7 @@ status_t WifiDisplaySource::onSetupRequest(
return ERROR_MALFORMED;
}
- Sender::TransportMode transportMode = Sender::TRANSPORT_UDP;
+ RTPSender::TransportMode rtpMode = RTPSender::TRANSPORT_UDP;
int clientRtp, clientRtcp;
if (transport.startsWith("RTP/AVP/TCP;")) {
@@ -1074,7 +1161,7 @@ status_t WifiDisplaySource::onSetupRequest(
transport.c_str(), "interleaved", &interleaved)
&& sscanf(interleaved.c_str(), "%d-%d",
&clientRtp, &clientRtcp) == 2) {
- transportMode = Sender::TRANSPORT_TCP_INTERLEAVED;
+ rtpMode = RTPSender::TRANSPORT_TCP_INTERLEAVED;
} else {
bool badRequest = false;
@@ -1096,7 +1183,7 @@ status_t WifiDisplaySource::onSetupRequest(
return ERROR_MALFORMED;
}
- transportMode = Sender::TRANSPORT_TCP;
+ rtpMode = RTPSender::TRANSPORT_TCP;
}
} else if (transport.startsWith("RTP/AVP;unicast;")
|| transport.startsWith("RTP/AVP/UDP;unicast;")) {
@@ -1138,7 +1225,7 @@ status_t WifiDisplaySource::onSetupRequest(
sp<PlaybackSession> playbackSession =
new PlaybackSession(
- mNetSession, notify, mInterfaceAddr, mHDCP);
+ mNetSession, notify, mInterfaceAddr, mHDCP, mMediaPath.c_str());
looper()->registerHandler(playbackSession);
@@ -1155,12 +1242,22 @@ status_t WifiDisplaySource::onSetupRequest(
return ERROR_MALFORMED;
}
+ RTPSender::TransportMode rtcpMode = RTPSender::TRANSPORT_UDP;
+ if (clientRtcp < 0) {
+ rtcpMode = RTPSender::TRANSPORT_NONE;
+ }
+
status_t err = playbackSession->init(
mClientInfo.mRemoteIP.c_str(),
clientRtp,
+ rtpMode,
clientRtcp,
- transportMode,
- mUsingPCMAudio);
+ rtcpMode,
+ mSinkSupportsAudio,
+ mUsingPCMAudio,
+ mSinkSupportsVideo,
+ mChosenVideoResolutionType,
+ mChosenVideoResolutionIndex);
if (err != OK) {
looper()->unregisterHandler(playbackSession->id());
@@ -1184,7 +1281,7 @@ status_t WifiDisplaySource::onSetupRequest(
AString response = "RTSP/1.0 200 OK\r\n";
AppendCommonResponse(&response, cseq, playbackSessionID);
- if (transportMode == Sender::TRANSPORT_TCP_INTERLEAVED) {
+ if (rtpMode == RTPSender::TRANSPORT_TCP_INTERLEAVED) {
response.append(
StringPrintf(
"Transport: RTP/AVP/TCP;interleaved=%d-%d;",
@@ -1193,7 +1290,7 @@ status_t WifiDisplaySource::onSetupRequest(
int32_t serverRtp = playbackSession->getRTPPort();
AString transportString = "UDP";
- if (transportMode == Sender::TRANSPORT_TCP) {
+ if (rtpMode == RTPSender::TRANSPORT_TCP) {
transportString = "TCP";
}
@@ -1243,17 +1340,28 @@ status_t WifiDisplaySource::onPlayRequest(
return ERROR_MALFORMED;
}
- ALOGI("Received PLAY request.");
+ if (mState != AWAITING_CLIENT_PLAY) {
+ ALOGW("Received PLAY request but we're in state %d", mState);
- status_t err = playbackSession->play();
- CHECK_EQ(err, (status_t)OK);
+ sendErrorResponse(
+ sessionID, "455 Method Not Valid in This State", cseq);
+
+ return INVALID_OPERATION;
+ }
+
+ ALOGI("Received PLAY request.");
+ if (mPlaybackSessionEstablished) {
+ finishPlay();
+ } else {
+ ALOGI("deferring PLAY request until session established.");
+ }
AString response = "RTSP/1.0 200 OK\r\n";
AppendCommonResponse(&response, cseq, playbackSessionID);
response.append("Range: npt=now-\r\n");
response.append("\r\n");
- err = mNetSession->sendRequest(sessionID, response.c_str());
+ status_t err = mNetSession->sendRequest(sessionID, response.c_str());
if (err != OK) {
return err;
@@ -1264,14 +1372,20 @@ status_t WifiDisplaySource::onPlayRequest(
return OK;
}
- playbackSession->finishPlay();
-
CHECK_EQ(mState, AWAITING_CLIENT_PLAY);
mState = ABOUT_TO_PLAY;
return OK;
}
+void WifiDisplaySource::finishPlay() {
+ const sp<PlaybackSession> &playbackSession =
+ mClientInfo.mPlaybackSession;
+
+ status_t err = playbackSession->play();
+ CHECK_EQ(err, (status_t)OK);
+}
+
status_t WifiDisplaySource::onPauseRequest(
int32_t sessionID,
int32_t cseq,
@@ -1447,7 +1561,7 @@ void WifiDisplaySource::AppendCommonResponse(
response->append(buf);
response->append("\r\n");
- response->append("Server: Mine/1.0\r\n");
+ response->append(StringPrintf("Server: %s\r\n", sUserAgent.c_str()));
if (cseq >= 0) {
response->append(StringPrintf("CSeq: %d\r\n", cseq));
@@ -1557,10 +1671,13 @@ void WifiDisplaySource::HDCPObserver::notify(
status_t WifiDisplaySource::makeHDCP() {
sp<IServiceManager> sm = defaultServiceManager();
sp<IBinder> binder = sm->getService(String16("media.player"));
- sp<IMediaPlayerService> service = interface_cast<IMediaPlayerService>(binder);
+
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
CHECK(service != NULL);
- mHDCP = service->makeHDCP();
+ mHDCP = service->makeHDCP(true /* createEncryptionModule */);
if (mHDCP == NULL) {
return ERROR_UNSUPPORTED;
diff --git a/media/libstagefright/wifi-display/source/WifiDisplaySource.h b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
index 974e070..44d3e4d 100644
--- a/media/libstagefright/wifi-display/source/WifiDisplaySource.h
+++ b/media/libstagefright/wifi-display/source/WifiDisplaySource.h
@@ -19,6 +19,7 @@
#define WIFI_DISPLAY_SOURCE_H_
#include "ANetworkSession.h"
+#include "VideoFormats.h"
#include <media/stagefright/foundation/AHandler.h>
@@ -26,8 +27,6 @@
namespace android {
-#define USE_1080P 0
-
struct IHDCP;
struct IRemoteDisplayClient;
struct ParsedMessage;
@@ -39,7 +38,8 @@ struct WifiDisplaySource : public AHandler {
WifiDisplaySource(
const sp<ANetworkSession> &netSession,
- const sp<IRemoteDisplayClient> &client);
+ const sp<IRemoteDisplayClient> &client,
+ const char *path = NULL);
status_t start(const char *iface);
status_t stop();
@@ -111,16 +111,29 @@ private:
static const int64_t kPlaybackSessionTimeoutUs =
kPlaybackSessionTimeoutSecs * 1000000ll;
+ static const AString sUserAgent;
+
State mState;
+ VideoFormats mSupportedSourceVideoFormats;
sp<ANetworkSession> mNetSession;
sp<IRemoteDisplayClient> mClient;
+ AString mMediaPath;
struct in_addr mInterfaceAddr;
int32_t mSessionID;
uint32_t mStopReplyID;
+ AString mWfdClientRtpPorts;
int32_t mChosenRTPPort; // extracted from "wfd_client_rtp_ports"
+ bool mSinkSupportsVideo;
+ VideoFormats mSupportedSinkVideoFormats;
+
+ VideoFormats::ResolutionType mChosenVideoResolutionType;
+ size_t mChosenVideoResolutionIndex;
+
+ bool mSinkSupportsAudio;
+
bool mUsingPCMAudio;
int32_t mClientSessionID;
@@ -149,6 +162,8 @@ private:
bool mHDCPInitializationComplete;
bool mSetupTriggerDeferred;
+ bool mPlaybackSessionEstablished;
+
status_t makeHDCP();
// <<<< HDCP specific section
@@ -245,6 +260,8 @@ private:
void finishStopAfterDisconnectingClient();
void finishStop2();
+ void finishPlay();
+
DISALLOW_EVIL_CONSTRUCTORS(WifiDisplaySource);
};
diff --git a/media/libstagefright/wifi-display/udptest.cpp b/media/libstagefright/wifi-display/udptest.cpp
deleted file mode 100644
index 1cd82c3..0000000
--- a/media/libstagefright/wifi-display/udptest.cpp
+++ /dev/null
@@ -1,355 +0,0 @@
-/*
- * Copyright 2012, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NEBUG 0
-#define LOG_TAG "udptest"
-#include <utils/Log.h>
-
-#include "ANetworkSession.h"
-
-#include <binder/ProcessState.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/Utils.h>
-
-namespace android {
-
-struct TestHandler : public AHandler {
- TestHandler(const sp<ANetworkSession> &netSession);
-
- void startServer(unsigned localPort);
- void startClient(const char *remoteHost, unsigned remotePort);
-
-protected:
- virtual ~TestHandler();
-
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-private:
- enum {
- kWhatStartServer,
- kWhatStartClient,
- kWhatUDPNotify,
- kWhatSendPacket,
- };
-
- sp<ANetworkSession> mNetSession;
-
- bool mIsServer;
- bool mConnected;
- int32_t mUDPSession;
- uint32_t mSeqNo;
- double mTotalTimeUs;
- int32_t mCount;
-
- void postSendPacket(int64_t delayUs = 0ll);
-
- DISALLOW_EVIL_CONSTRUCTORS(TestHandler);
-};
-
-TestHandler::TestHandler(const sp<ANetworkSession> &netSession)
- : mNetSession(netSession),
- mIsServer(false),
- mConnected(false),
- mUDPSession(0),
- mSeqNo(0),
- mTotalTimeUs(0.0),
- mCount(0) {
-}
-
-TestHandler::~TestHandler() {
-}
-
-void TestHandler::startServer(unsigned localPort) {
- sp<AMessage> msg = new AMessage(kWhatStartServer, id());
- msg->setInt32("localPort", localPort);
- msg->post();
-}
-
-void TestHandler::startClient(const char *remoteHost, unsigned remotePort) {
- sp<AMessage> msg = new AMessage(kWhatStartClient, id());
- msg->setString("remoteHost", remoteHost);
- msg->setInt32("remotePort", remotePort);
- msg->post();
-}
-
-void TestHandler::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatStartClient:
- {
- AString remoteHost;
- CHECK(msg->findString("remoteHost", &remoteHost));
-
- int32_t remotePort;
- CHECK(msg->findInt32("remotePort", &remotePort));
-
- sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
- CHECK_EQ((status_t)OK,
- mNetSession->createUDPSession(
- 0 /* localPort */,
- remoteHost.c_str(),
- remotePort,
- notify,
- &mUDPSession));
-
- postSendPacket();
- break;
- }
-
- case kWhatStartServer:
- {
- mIsServer = true;
-
- int32_t localPort;
- CHECK(msg->findInt32("localPort", &localPort));
-
- sp<AMessage> notify = new AMessage(kWhatUDPNotify, id());
-
- CHECK_EQ((status_t)OK,
- mNetSession->createUDPSession(
- localPort, notify, &mUDPSession));
-
- break;
- }
-
- case kWhatSendPacket:
- {
- char buffer[12];
- memset(buffer, 0, sizeof(buffer));
-
- buffer[0] = mSeqNo >> 24;
- buffer[1] = (mSeqNo >> 16) & 0xff;
- buffer[2] = (mSeqNo >> 8) & 0xff;
- buffer[3] = mSeqNo & 0xff;
- ++mSeqNo;
-
- int64_t nowUs = ALooper::GetNowUs();
- buffer[4] = nowUs >> 56;
- buffer[5] = (nowUs >> 48) & 0xff;
- buffer[6] = (nowUs >> 40) & 0xff;
- buffer[7] = (nowUs >> 32) & 0xff;
- buffer[8] = (nowUs >> 24) & 0xff;
- buffer[9] = (nowUs >> 16) & 0xff;
- buffer[10] = (nowUs >> 8) & 0xff;
- buffer[11] = nowUs & 0xff;
-
- CHECK_EQ((status_t)OK,
- mNetSession->sendRequest(
- mUDPSession, buffer, sizeof(buffer)));
-
- postSendPacket(20000ll);
- break;
- }
-
- case kWhatUDPNotify:
- {
- int32_t reason;
- CHECK(msg->findInt32("reason", &reason));
-
- switch (reason) {
- case ANetworkSession::kWhatError:
- {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- int32_t err;
- CHECK(msg->findInt32("err", &err));
-
- AString detail;
- CHECK(msg->findString("detail", &detail));
-
- ALOGE("An error occurred in session %d (%d, '%s/%s').",
- sessionID,
- err,
- detail.c_str(),
- strerror(-err));
-
- mNetSession->destroySession(sessionID);
- break;
- }
-
- case ANetworkSession::kWhatDatagram:
- {
- int32_t sessionID;
- CHECK(msg->findInt32("sessionID", &sessionID));
-
- sp<ABuffer> data;
- CHECK(msg->findBuffer("data", &data));
-
- if (mIsServer) {
- if (!mConnected) {
- AString fromAddr;
- CHECK(msg->findString("fromAddr", &fromAddr));
-
- int32_t fromPort;
- CHECK(msg->findInt32("fromPort", &fromPort));
-
- CHECK_EQ((status_t)OK,
- mNetSession->connectUDPSession(
- mUDPSession, fromAddr.c_str(), fromPort));
-
- mConnected = true;
- }
-
- int64_t nowUs = ALooper::GetNowUs();
-
- sp<ABuffer> buffer = new ABuffer(data->size() + 8);
- memcpy(buffer->data(), data->data(), data->size());
-
- uint8_t *ptr = buffer->data() + data->size();
-
- *ptr++ = nowUs >> 56;
- *ptr++ = (nowUs >> 48) & 0xff;
- *ptr++ = (nowUs >> 40) & 0xff;
- *ptr++ = (nowUs >> 32) & 0xff;
- *ptr++ = (nowUs >> 24) & 0xff;
- *ptr++ = (nowUs >> 16) & 0xff;
- *ptr++ = (nowUs >> 8) & 0xff;
- *ptr++ = nowUs & 0xff;
-
- CHECK_EQ((status_t)OK,
- mNetSession->sendRequest(
- mUDPSession, buffer->data(), buffer->size()));
- } else {
- CHECK_EQ(data->size(), 20u);
-
- uint32_t seqNo = U32_AT(data->data());
- int64_t t1 = U64_AT(data->data() + 4);
- int64_t t2 = U64_AT(data->data() + 12);
-
- int64_t t3;
- CHECK(data->meta()->findInt64("arrivalTimeUs", &t3));
-
-#if 0
- printf("roundtrip seqNo %u, time = %lld us\n",
- seqNo, t3 - t1);
-#else
- mTotalTimeUs += t3 - t1;
- ++mCount;
- printf("avg. roundtrip time %.2f us\n", mTotalTimeUs / mCount);
-#endif
- }
- break;
- }
-
- default:
- TRESPASS();
- }
-
- break;
- }
-
- default:
- TRESPASS();
- }
-}
-
-void TestHandler::postSendPacket(int64_t delayUs) {
- (new AMessage(kWhatSendPacket, id()))->post(delayUs);
-}
-
-} // namespace android
-
-static void usage(const char *me) {
- fprintf(stderr,
- "usage: %s -c host[:port]\tconnect to test server\n"
- " -l \tcreate a test server\n",
- me);
-}
-
-int main(int argc, char **argv) {
- using namespace android;
-
- ProcessState::self()->startThreadPool();
-
- int32_t localPort = -1;
- int32_t connectToPort = -1;
- AString connectToHost;
-
- int res;
- while ((res = getopt(argc, argv, "hc:l:")) >= 0) {
- switch (res) {
- case 'c':
- {
- const char *colonPos = strrchr(optarg, ':');
-
- if (colonPos == NULL) {
- connectToHost = optarg;
- connectToPort = 49152;
- } else {
- connectToHost.setTo(optarg, colonPos - optarg);
-
- char *end;
- connectToPort = strtol(colonPos + 1, &end, 10);
-
- if (*end != '\0' || end == colonPos + 1
- || connectToPort < 1 || connectToPort > 65535) {
- fprintf(stderr, "Illegal port specified.\n");
- exit(1);
- }
- }
- break;
- }
-
- case 'l':
- {
- char *end;
- localPort = strtol(optarg, &end, 10);
-
- if (*end != '\0' || end == optarg
- || localPort < 1 || localPort > 65535) {
- fprintf(stderr, "Illegal port specified.\n");
- exit(1);
- }
- break;
- }
-
- case '?':
- case 'h':
- usage(argv[0]);
- exit(1);
- }
- }
-
- if (localPort < 0 && connectToPort < 0) {
- fprintf(stderr,
- "You need to select either client or server mode.\n");
- exit(1);
- }
-
- sp<ANetworkSession> netSession = new ANetworkSession;
- netSession->start();
-
- sp<ALooper> looper = new ALooper;
-
- sp<TestHandler> handler = new TestHandler(netSession);
- looper->registerHandler(handler);
-
- if (localPort >= 0) {
- handler->startServer(localPort);
- } else {
- handler->startClient(connectToHost.c_str(), connectToPort);
- }
-
- looper->start(true /* runOnCallingThread */);
-
- return 0;
-}
-
diff --git a/media/libstagefright/wifi-display/wfd.cpp b/media/libstagefright/wifi-display/wfd.cpp
index 03a1123..c947765 100644
--- a/media/libstagefright/wifi-display/wfd.cpp
+++ b/media/libstagefright/wifi-display/wfd.cpp
@@ -18,11 +18,11 @@
#define LOG_TAG "wfd"
#include <utils/Log.h>
-#include "sink/WifiDisplaySink.h"
#include "source/WifiDisplaySource.h"
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
+#include <gui/ISurfaceComposer.h>
#include <gui/SurfaceComposerClient.h>
#include <media/AudioSystem.h>
#include <media/IMediaPlayerService.h>
@@ -30,16 +30,16 @@
#include <media/IRemoteDisplayClient.h>
#include <media/stagefright/DataSource.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <ui/DisplayInfo.h>
namespace android {
static void usage(const char *me) {
fprintf(stderr,
"usage:\n"
- " %s -c host[:port]\tconnect to wifi source\n"
- " -u uri \tconnect to an rtsp uri\n"
- " -l ip[:port] \tlisten on the specified port "
- "(create a sink)\n",
+ " %s -l iface[:port]\tcreate a wifi display source\n"
+ " -f(ilename) \tstream media\n",
me);
}
@@ -47,7 +47,7 @@ struct RemoteDisplayClient : public BnRemoteDisplayClient {
RemoteDisplayClient();
virtual void onDisplayConnected(
- const sp<ISurfaceTexture> &surfaceTexture,
+ const sp<IGraphicBufferProducer> &bufferProducer,
uint32_t width,
uint32_t height,
uint32_t flags);
@@ -67,7 +67,7 @@ private:
bool mDone;
sp<SurfaceComposerClient> mComposerClient;
- sp<ISurfaceTexture> mSurfaceTexture;
+ sp<IGraphicBufferProducer> mSurfaceTexture;
sp<IBinder> mDisplayBinder;
DISALLOW_EVIL_CONSTRUCTORS(RemoteDisplayClient);
@@ -83,29 +83,31 @@ RemoteDisplayClient::~RemoteDisplayClient() {
}
void RemoteDisplayClient::onDisplayConnected(
- const sp<ISurfaceTexture> &surfaceTexture,
+ const sp<IGraphicBufferProducer> &bufferProducer,
uint32_t width,
uint32_t height,
uint32_t flags) {
ALOGI("onDisplayConnected width=%u, height=%u, flags = 0x%08x",
width, height, flags);
- mSurfaceTexture = surfaceTexture;
- mDisplayBinder = mComposerClient->createDisplay(
- String8("foo"), false /* secure */);
+ if (bufferProducer != NULL) {
+ mSurfaceTexture = bufferProducer;
+ mDisplayBinder = mComposerClient->createDisplay(
+ String8("foo"), false /* secure */);
- SurfaceComposerClient::openGlobalTransaction();
- mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture);
+ SurfaceComposerClient::openGlobalTransaction();
+ mComposerClient->setDisplaySurface(mDisplayBinder, mSurfaceTexture);
- Rect layerStackRect(1280, 720); // XXX fix this.
- Rect displayRect(1280, 720);
+ Rect layerStackRect(1280, 720); // XXX fix this.
+ Rect displayRect(1280, 720);
- mComposerClient->setDisplayProjection(
- mDisplayBinder, 0 /* 0 degree rotation */,
- layerStackRect,
- displayRect);
+ mComposerClient->setDisplayProjection(
+ mDisplayBinder, 0 /* 0 degree rotation */,
+ layerStackRect,
+ displayRect);
- SurfaceComposerClient::closeGlobalTransaction();
+ SurfaceComposerClient::closeGlobalTransaction();
+ }
}
void RemoteDisplayClient::onDisplayDisconnected() {
@@ -178,6 +180,26 @@ static void createSource(const AString &addr, int32_t port) {
enableAudioSubmix(false /* enable */);
}
+static void createFileSource(
+ const AString &addr, int32_t port, const char *path) {
+ sp<ANetworkSession> session = new ANetworkSession;
+ session->start();
+
+ sp<ALooper> looper = new ALooper;
+ looper->start();
+
+ sp<RemoteDisplayClient> client = new RemoteDisplayClient;
+ sp<WifiDisplaySource> source = new WifiDisplaySource(session, client, path);
+ looper->registerHandler(source);
+
+ AString iface = StringPrintf("%s:%d", addr.c_str(), port);
+ CHECK_EQ((status_t)OK, source->start(iface.c_str()));
+
+ client->waitUntilDone();
+
+ source->stop();
+}
+
} // namespace android
int main(int argc, char **argv) {
@@ -187,41 +209,17 @@ int main(int argc, char **argv) {
DataSource::RegisterDefaultSniffers();
- AString connectToHost;
- int32_t connectToPort = -1;
- AString uri;
-
AString listenOnAddr;
int32_t listenOnPort = -1;
+ AString path;
+
int res;
- while ((res = getopt(argc, argv, "hc:l:u:")) >= 0) {
+ while ((res = getopt(argc, argv, "hl:f:")) >= 0) {
switch (res) {
- case 'c':
- {
- const char *colonPos = strrchr(optarg, ':');
-
- if (colonPos == NULL) {
- connectToHost = optarg;
- connectToPort = WifiDisplaySource::kWifiDisplayDefaultPort;
- } else {
- connectToHost.setTo(optarg, colonPos - optarg);
-
- char *end;
- connectToPort = strtol(colonPos + 1, &end, 10);
-
- if (*end != '\0' || end == colonPos + 1
- || connectToPort < 1 || connectToPort > 65535) {
- fprintf(stderr, "Illegal port specified.\n");
- exit(1);
- }
- }
- break;
- }
-
- case 'u':
+ case 'f':
{
- uri = optarg;
+ path = optarg;
break;
}
@@ -255,47 +253,17 @@ int main(int argc, char **argv) {
}
}
- if (connectToPort >= 0 && listenOnPort >= 0) {
- fprintf(stderr,
- "You can connect to a source or create one, "
- "but not both at the same time.\n");
- exit(1);
- }
-
if (listenOnPort >= 0) {
- createSource(listenOnAddr, listenOnPort);
- exit(0);
- }
-
- if (connectToPort < 0 && uri.empty()) {
- fprintf(stderr,
- "You need to select either source host or uri.\n");
-
- exit(1);
- }
-
- if (connectToPort >= 0 && !uri.empty()) {
- fprintf(stderr,
- "You need to either connect to a wfd host or an rtsp url, "
- "not both.\n");
- exit(1);
- }
-
- sp<ANetworkSession> session = new ANetworkSession;
- session->start();
-
- sp<ALooper> looper = new ALooper;
-
- sp<WifiDisplaySink> sink = new WifiDisplaySink(session);
- looper->registerHandler(sink);
+ if (path.empty()) {
+ createSource(listenOnAddr, listenOnPort);
+ } else {
+ createFileSource(listenOnAddr, listenOnPort, path.c_str());
+ }
- if (connectToPort >= 0) {
- sink->start(connectToHost.c_str(), connectToPort);
- } else {
- sink->start(uri.c_str());
+ exit(0);
}
- looper->start(true /* runOnCallingThread */);
+ usage(argv[0]);
return 0;
}
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
index a4253f6..b3f7b1b 100644
--- a/media/libstagefright/yuv/Android.mk
+++ b/media/libstagefright/yuv/Android.mk
@@ -6,7 +6,8 @@ LOCAL_SRC_FILES:= \
YUVCanvas.cpp
LOCAL_SHARED_LIBRARIES := \
- libcutils
+ libcutils \
+ liblog
LOCAL_MODULE:= libstagefright_yuv