summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/img_utils/Android.mk15
-rw-r--r--media/img_utils/include/img_utils/ByteArrayOutput.h82
-rw-r--r--media/img_utils/include/img_utils/DngUtils.h132
-rw-r--r--media/img_utils/include/img_utils/EndianUtils.h250
-rw-r--r--media/img_utils/include/img_utils/FileInput.h76
-rw-r--r--media/img_utils/include/img_utils/FileOutput.h46
-rw-r--r--media/img_utils/include/img_utils/Input.h63
-rw-r--r--media/img_utils/include/img_utils/Orderable.h57
-rw-r--r--media/img_utils/include/img_utils/Output.h61
-rw-r--r--media/img_utils/include/img_utils/Pair.h (renamed from media/libstagefright/chromium_http/chromium_http_stub.cpp)38
-rw-r--r--media/img_utils/include/img_utils/SortedEntryVector.h53
-rw-r--r--media/img_utils/include/img_utils/TagDefinitions.h1187
-rw-r--r--media/img_utils/include/img_utils/TiffEntry.h129
-rw-r--r--media/img_utils/include/img_utils/TiffEntryImpl.h190
-rw-r--r--media/img_utils/include/img_utils/TiffHelpers.h132
-rw-r--r--media/img_utils/include/img_utils/TiffIfd.h124
-rw-r--r--media/img_utils/include/img_utils/TiffWritable.h60
-rw-r--r--media/img_utils/include/img_utils/TiffWriter.h267
-rw-r--r--media/img_utils/src/Android.mk61
-rw-r--r--media/img_utils/src/ByteArrayOutput.cpp54
-rw-r--r--media/img_utils/src/DngUtils.cpp280
-rw-r--r--media/img_utils/src/EndianUtils.cpp83
-rw-r--r--media/img_utils/src/FileInput.cpp80
-rw-r--r--media/img_utils/src/FileOutput.cpp79
-rw-r--r--media/img_utils/src/Input.cpp29
-rw-r--r--media/img_utils/src/Orderable.cpp (renamed from media/libstagefright/include/chromium_http_stub.h)31
-rw-r--r--media/img_utils/src/Output.cpp28
-rw-r--r--media/img_utils/src/SortedEntryVector.cpp44
-rw-r--r--media/img_utils/src/TiffEntry.cpp238
-rw-r--r--media/img_utils/src/TiffEntryImpl.cpp44
-rw-r--r--media/img_utils/src/TiffIfd.cpp182
-rw-r--r--media/img_utils/src/TiffWritable.cpp31
-rw-r--r--media/img_utils/src/TiffWriter.cpp232
-rw-r--r--media/libeffects/downmix/Android.mk6
-rw-r--r--media/libeffects/downmix/EffectDownmix.c95
-rw-r--r--media/libeffects/downmix/EffectDownmix.h1
-rw-r--r--media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp29
-rw-r--r--media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp29
-rw-r--r--media/libeffects/preprocessing/Android.mk5
-rw-r--r--media/libeffects/preprocessing/PreProcessing.cpp6
-rw-r--r--media/libeffects/visualizer/Android.mk1
-rw-r--r--media/libeffects/visualizer/EffectVisualizer.cpp3
-rw-r--r--media/libmedia/Android.mk31
-rw-r--r--media/libmedia/AudioEffect.cpp4
-rw-r--r--media/libmedia/AudioRecord.cpp337
-rw-r--r--media/libmedia/AudioSystem.cpp231
-rw-r--r--media/libmedia/AudioTrack.cpp705
-rw-r--r--media/libmedia/AudioTrackShared.cpp51
-rw-r--r--media/libmedia/CharacterEncodingDetector.cpp441
-rw-r--r--media/libmedia/CharacterEncodingDetector.h63
-rw-r--r--media/libmedia/CharacterEncodingDetectorTables.h2092
-rw-r--r--media/libmedia/IAudioFlinger.cpp341
-rw-r--r--media/libmedia/IAudioPolicyService.cpp311
-rw-r--r--media/libmedia/IAudioPolicyServiceClient.cpp83
-rw-r--r--media/libmedia/IAudioRecord.cpp19
-rw-r--r--media/libmedia/IAudioTrack.cpp6
-rw-r--r--media/libmedia/IEffect.cpp3
-rw-r--r--media/libmedia/IMediaDeathNotifier.cpp2
-rw-r--r--media/libmedia/IMediaHTTPConnection.cpp182
-rw-r--r--media/libmedia/IMediaHTTPService.cpp58
-rw-r--r--media/libmedia/IMediaMetadataRetriever.cpp25
-rw-r--r--media/libmedia/IMediaPlayer.cpp19
-rw-r--r--media/libmedia/IMediaPlayerService.cpp68
-rw-r--r--media/libmedia/IMediaRecorder.cpp8
-rw-r--r--media/libmedia/JetPlayer.cpp2
-rw-r--r--media/libmedia/MediaProfiles.cpp28
-rw-r--r--media/libmedia/MediaScannerClient.cpp202
-rw-r--r--media/libmedia/SoundPool.cpp36
-rw-r--r--media/libmedia/ToneGenerator.cpp2
-rw-r--r--media/libmedia/autodetect.cpp885
-rw-r--r--media/libmedia/autodetect.h37
-rw-r--r--media/libmedia/mediametadataretriever.cpp15
-rw-r--r--media/libmedia/mediaplayer.cpp57
-rw-r--r--media/libmedia/mediarecorder.cpp5
-rw-r--r--media/libmediaplayerservice/Android.mk1
-rw-r--r--media/libmediaplayerservice/HDCP.cpp6
-rw-r--r--media/libmediaplayerservice/MediaPlayerFactory.cpp8
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.cpp55
-rw-r--r--media/libmediaplayerservice/MediaPlayerService.h25
-rw-r--r--media/libmediaplayerservice/MediaRecorderClient.cpp3
-rw-r--r--media/libmediaplayerservice/MetadataRetrieverClient.cpp11
-rw-r--r--media/libmediaplayerservice/MetadataRetrieverClient.h5
-rw-r--r--media/libmediaplayerservice/MidiFile.cpp4
-rw-r--r--media/libmediaplayerservice/MidiFile.h4
-rw-r--r--media/libmediaplayerservice/MidiMetadataRetriever.cpp8
-rw-r--r--media/libmediaplayerservice/MidiMetadataRetriever.h4
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.cpp6
-rw-r--r--media/libmediaplayerservice/StagefrightPlayer.h4
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.cpp389
-rw-r--r--media/libmediaplayerservice/StagefrightRecorder.h37
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.cpp6
-rw-r--r--media/libmediaplayerservice/TestPlayerStub.h4
-rw-r--r--media/libmediaplayerservice/nuplayer/Android.mk1
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.cpp15
-rw-r--r--media/libmediaplayerservice/nuplayer/GenericSource.h5
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp19
-rw-r--r--media/libmediaplayerservice/nuplayer/HTTPLiveSource.h11
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.cpp291
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayer.h15
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp741
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h76
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp10
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerDriver.h4
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp12
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h4
-rw-r--r--media/libmediaplayerservice/nuplayer/NuPlayerSource.h8
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.cpp5
-rw-r--r--media/libmediaplayerservice/nuplayer/RTSPSource.h2
-rw-r--r--media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp144
-rw-r--r--media/libmediaplayerservice/nuplayer/mp4/MP4Source.h53
-rw-r--r--media/libnbaio/Android.mk7
-rw-r--r--media/libnbaio/AudioBufferProviderSource.cpp8
-rw-r--r--media/libnbaio/AudioStreamInSource.cpp23
-rw-r--r--media/libnbaio/AudioStreamOutSink.cpp21
-rw-r--r--media/libnbaio/MonoPipe.cpp42
-rw-r--r--media/libnbaio/MonoPipeReader.cpp4
-rw-r--r--media/libnbaio/NBAIO.cpp134
-rw-r--r--media/libnbaio/NBLog.cpp6
-rw-r--r--media/libnbaio/Pipe.cpp15
-rw-r--r--media/libnbaio/PipeReader.cpp6
-rw-r--r--media/libnbaio/SourceAudioBufferProvider.cpp10
-rw-r--r--media/libstagefright/ACodec.cpp520
-rw-r--r--media/libstagefright/Android.mk14
-rw-r--r--media/libstagefright/AudioPlayer.cpp3
-rw-r--r--media/libstagefright/AudioSource.cpp8
-rw-r--r--media/libstagefright/AwesomePlayer.cpp77
-rw-r--r--media/libstagefright/CameraSource.cpp36
-rw-r--r--media/libstagefright/CameraSourceTimeLapse.cpp3
-rw-r--r--media/libstagefright/DataSource.cpp19
-rw-r--r--media/libstagefright/DataURISource.cpp109
-rw-r--r--media/libstagefright/HTTPBase.cpp48
-rw-r--r--media/libstagefright/MPEG4Extractor.cpp551
-rw-r--r--media/libstagefright/MPEG4Writer.cpp92
-rw-r--r--media/libstagefright/MediaCodec.cpp59
-rw-r--r--media/libstagefright/MediaCodecList.cpp136
-rw-r--r--media/libstagefright/MediaCodecSource.cpp881
-rw-r--r--media/libstagefright/MediaDefs.cpp4
-rw-r--r--media/libstagefright/MediaMuxer.cpp27
-rw-r--r--media/libstagefright/NuCachedSource2.cpp9
-rw-r--r--media/libstagefright/NuMediaExtractor.cpp6
-rw-r--r--media/libstagefright/OMXClient.cpp12
-rw-r--r--media/libstagefright/OMXCodec.cpp155
-rw-r--r--media/libstagefright/SampleIterator.cpp9
-rw-r--r--media/libstagefright/SampleTable.cpp7
-rw-r--r--media/libstagefright/SkipCutBuffer.cpp3
-rw-r--r--media/libstagefright/StagefrightMediaScanner.cpp3
-rw-r--r--media/libstagefright/StagefrightMetadataRetriever.cpp7
-rw-r--r--media/libstagefright/SurfaceMediaSource.cpp29
-rw-r--r--media/libstagefright/Utils.cpp99
-rw-r--r--media/libstagefright/avc_utils.cpp38
-rw-r--r--media/libstagefright/chromium_http/Android.mk39
-rw-r--r--media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp355
-rw-r--r--media/libstagefright/chromium_http/DataUriSource.cpp68
-rw-r--r--media/libstagefright/chromium_http/support.cpp659
-rw-r--r--media/libstagefright/chromium_http/support.h178
-rw-r--r--media/libstagefright/chromium_http_stub.cpp102
-rw-r--r--media/libstagefright/codecs/aacdec/Android.mk5
-rw-r--r--media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp372
-rw-r--r--media/libstagefright/codecs/aacdec/DrcPresModeWrap.h62
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.cpp729
-rw-r--r--media/libstagefright/codecs/aacdec/SoftAAC2.h27
-rw-r--r--media/libstagefright/codecs/aacenc/Android.mk6
-rw-r--r--media/libstagefright/codecs/aacenc/basic_op/oper_32b.c4
-rw-r--r--media/libstagefright/codecs/aacenc/src/aacenc.c8
-rw-r--r--media/libstagefright/codecs/aacenc/src/bitenc.c3
-rw-r--r--media/libstagefright/codecs/aacenc/src/psy_main.c6
-rw-r--r--media/libstagefright/codecs/aacenc/src/qc_main.c8
-rw-r--r--media/libstagefright/codecs/aacenc/src/tns.c4
-rw-r--r--media/libstagefright/codecs/amrnb/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/amrnb/dec/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrnb/enc/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrwb/Android.mk2
-rw-r--r--media/libstagefright/codecs/amrwbenc/Android.mk4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/autocorr.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/convolve.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/pitch_f4.c3
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/syn_filt.c4
-rw-r--r--media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c4
-rw-r--r--media/libstagefright/codecs/avc/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/avc/enc/Android.mk4
-rw-r--r--media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp15
-rw-r--r--media/libstagefright/codecs/common/Android.mk2
-rw-r--r--media/libstagefright/codecs/flac/enc/Android.mk2
-rw-r--r--media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp25
-rw-r--r--media/libstagefright/codecs/g711/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/gsm/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/hevcdec/Android.mk26
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.cpp710
-rw-r--r--media/libstagefright/codecs/hevcdec/SoftHEVC.h117
-rw-r--r--media/libstagefright/codecs/m4v_h263/dec/Android.mk4
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/Android.mk4
-rw-r--r--media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp2
-rw-r--r--media/libstagefright/codecs/mp3dec/Android.mk4
-rw-r--r--media/libstagefright/codecs/mp3dec/SoftMP3.cpp20
-rw-r--r--media/libstagefright/codecs/on2/dec/Android.mk2
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp333
-rw-r--r--media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h85
-rw-r--r--media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c3
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c5
-rwxr-xr-xmedia/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c3
-rw-r--r--media/libstagefright/codecs/opus/Android.mk4
-rw-r--r--media/libstagefright/codecs/opus/dec/Android.mk19
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.cpp540
-rw-r--r--media/libstagefright/codecs/opus/dec/SoftOpus.h94
-rw-r--r--media/libstagefright/codecs/raw/Android.mk2
-rw-r--r--media/libstagefright/codecs/vorbis/dec/Android.mk2
-rw-r--r--media/libstagefright/colorconversion/SoftwareRenderer.cpp7
-rw-r--r--media/libstagefright/data/media_codecs_google_audio.xml36
-rw-r--r--media/libstagefright/data/media_codecs_google_telephony.xml21
-rw-r--r--media/libstagefright/data/media_codecs_google_video.xml33
-rw-r--r--media/libstagefright/foundation/AString.cpp8
-rw-r--r--media/libstagefright/foundation/base64.cpp6
-rw-r--r--media/libstagefright/http/Android.mk28
-rw-r--r--media/libstagefright/http/HTTPHelper.cpp70
-rw-r--r--media/libstagefright/http/HTTPHelper.h31
-rw-r--r--media/libstagefright/http/MediaHTTP.cpp205
-rw-r--r--media/libstagefright/httplive/Android.mk2
-rw-r--r--media/libstagefright/httplive/LiveSession.cpp37
-rw-r--r--media/libstagefright/httplive/LiveSession.h10
-rw-r--r--media/libstagefright/httplive/M3UParser.cpp84
-rw-r--r--media/libstagefright/httplive/M3UParser.h3
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.cpp65
-rw-r--r--media/libstagefright/httplive/PlaylistFetcher.h3
-rw-r--r--media/libstagefright/id3/Android.mk4
-rw-r--r--media/libstagefright/id3/ID3.cpp79
-rw-r--r--media/libstagefright/include/AwesomePlayer.h3
-rw-r--r--media/libstagefright/include/ChromiumHTTPDataSource.h125
-rw-r--r--media/libstagefright/include/FragmentedMP4Parser.h274
-rw-r--r--media/libstagefright/include/HTTPBase.h11
-rw-r--r--media/libstagefright/include/MPEG4Extractor.h11
-rw-r--r--media/libstagefright/include/SDPLoader.h8
-rw-r--r--media/libstagefright/include/SampleIterator.h4
-rw-r--r--media/libstagefright/include/SampleTable.h3
-rw-r--r--media/libstagefright/include/SoftwareRenderer.h2
-rw-r--r--media/libstagefright/include/StagefrightMetadataRetriever.h1
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.cpp29
-rw-r--r--media/libstagefright/matroska/MatroskaExtractor.h1
-rw-r--r--media/libstagefright/mp4/FragmentedMP4Parser.cpp1993
-rw-r--r--media/libstagefright/mp4/TrackFragment.cpp364
-rw-r--r--media/libstagefright/mp4/TrackFragment.h122
-rw-r--r--media/libstagefright/mpeg2ts/ATSParser.cpp4
-rw-r--r--media/libstagefright/mpeg2ts/Android.mk2
-rw-r--r--media/libstagefright/mpeg2ts/AnotherPacketSource.cpp4
-rw-r--r--media/libstagefright/mpeg2ts/ESQueue.cpp10
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.cpp119
-rw-r--r--media/libstagefright/omx/GraphicBufferSource.h36
-rw-r--r--media/libstagefright/omx/OMX.cpp2
-rw-r--r--media/libstagefright/omx/OMXNodeInstance.cpp19
-rw-r--r--media/libstagefright/omx/SoftOMXPlugin.cpp2
-rw-r--r--media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp6
-rw-r--r--media/libstagefright/omx/tests/Android.mk2
-rw-r--r--media/libstagefright/omx/tests/OMXHarness.cpp6
-rw-r--r--media/libstagefright/rtsp/APacketSource.cpp2
-rw-r--r--media/libstagefright/rtsp/ARTSPConnection.cpp4
-rw-r--r--media/libstagefright/rtsp/Android.mk4
-rw-r--r--media/libstagefright/rtsp/MyHandler.h18
-rw-r--r--media/libstagefright/rtsp/SDPLoader.cpp30
-rw-r--r--media/libstagefright/tests/SurfaceMediaSource_test.cpp23
-rw-r--r--media/libstagefright/timedtext/TimedTextDriver.cpp7
-rw-r--r--media/libstagefright/timedtext/test/Android.mk8
-rw-r--r--media/libstagefright/webm/Android.mk23
-rw-r--r--media/libstagefright/webm/EbmlUtil.cpp108
-rw-r--r--media/libstagefright/webm/EbmlUtil.h50
-rw-r--r--media/libstagefright/webm/LinkedBlockingQueue.h79
-rw-r--r--media/libstagefright/webm/WebmConstants.h133
-rw-r--r--media/libstagefright/webm/WebmElement.cpp367
-rw-r--r--media/libstagefright/webm/WebmElement.h127
-rw-r--r--media/libstagefright/webm/WebmFrame.cpp83
-rw-r--r--media/libstagefright/webm/WebmFrame.h46
-rw-r--r--media/libstagefright/webm/WebmFrameThread.cpp399
-rw-r--r--media/libstagefright/webm/WebmFrameThread.h160
-rw-r--r--media/libstagefright/webm/WebmWriter.cpp551
-rw-r--r--media/libstagefright/webm/WebmWriter.h130
-rw-r--r--media/libstagefright/wifi-display/source/PlaybackSession.cpp8
-rw-r--r--media/libstagefright/wifi-display/source/PlaybackSession.h3
-rw-r--r--media/libstagefright/wifi-display/source/RepeaterSource.cpp3
-rw-r--r--media/libstagefright/yuv/Android.mk2
-rw-r--r--media/libstagefright/yuv/YUVImage.cpp12
-rw-r--r--media/mediaserver/Android.mk10
-rw-r--r--media/mediaserver/main_mediaserver.cpp4
-rw-r--r--media/mtp/MtpProperty.cpp13
-rw-r--r--media/mtp/MtpServer.cpp6
-rw-r--r--media/mtp/MtpServer.h1
-rw-r--r--media/ndk/Android.mk52
-rw-r--r--media/ndk/NdkMediaCodec.cpp503
-rw-r--r--media/ndk/NdkMediaCrypto.cpp121
-rw-r--r--media/ndk/NdkMediaCryptoPriv.h41
-rw-r--r--media/ndk/NdkMediaDrm.cpp728
-rw-r--r--media/ndk/NdkMediaExtractor.cpp355
-rw-r--r--media/ndk/NdkMediaFormat.cpp259
-rw-r--r--media/ndk/NdkMediaFormatPriv.h44
-rw-r--r--media/ndk/NdkMediaMuxer.cpp107
292 files changed, 21469 insertions, 7971 deletions
diff --git a/media/img_utils/Android.mk b/media/img_utils/Android.mk
new file mode 100644
index 0000000..1cd00bd
--- /dev/null
+++ b/media/img_utils/Android.mk
@@ -0,0 +1,15 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+include $(call all-subdir-makefiles)
diff --git a/media/img_utils/include/img_utils/ByteArrayOutput.h b/media/img_utils/include/img_utils/ByteArrayOutput.h
new file mode 100644
index 0000000..ba73977
--- /dev/null
+++ b/media/img_utils/include/img_utils/ByteArrayOutput.h
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_BYTE_ARRAY_OUTPUT_H
+#define IMG_UTILS_BYTE_ARRAY_OUTPUT_H
+
+#include <img_utils/Output.h>
+
+#include <utils/Errors.h>
+#include <utils/Vector.h>
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class that accumulates written bytes into a buffer.
+ */
+class ANDROID_API ByteArrayOutput : public Output {
+ public:
+
+ ByteArrayOutput();
+
+ virtual ~ByteArrayOutput();
+
+ /**
+ * Open this ByteArrayOutput.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Write bytes from the given buffer. The number of bytes given in the count
+ * argument will be written. Bytes will be written from the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
+
+ /**
+ * Close this ByteArrayOutput.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+
+ /**
+ * Get current size of the array of bytes written.
+ */
+ virtual size_t getSize() const;
+
+ /**
+ * Get pointer to array of bytes written. It is not valid to use this pointer if
+ * open, write, or close is called after this method.
+ */
+ virtual const uint8_t* getArray() const;
+
+ protected:
+ Vector<uint8_t> mByteArray;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_BYTE_ARRAY_OUTPUT_H*/
diff --git a/media/img_utils/include/img_utils/DngUtils.h b/media/img_utils/include/img_utils/DngUtils.h
new file mode 100644
index 0000000..4389b02
--- /dev/null
+++ b/media/img_utils/include/img_utils/DngUtils.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_DNG_UTILS_H
+#define IMG_UTILS_DNG_UTILS_H
+
+#include <img_utils/ByteArrayOutput.h>
+#include <img_utils/EndianUtils.h>
+
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <utils/RefBase.h>
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+#define NELEMS(x) ((int) (sizeof(x) / sizeof((x)[0])))
+
+/**
+ * Utility class for building values for the OpcodeList tags specified
+ * in the Adobe DNG 1.4 spec.
+ */
+class ANDROID_API OpcodeListBuilder : public LightRefBase<OpcodeListBuilder> {
+ public:
+ enum CfaLayout {
+ CFA_RGGB = 0,
+ CFA_GRBG,
+ CFA_GBRG,
+ CFA_BGGR,
+ };
+
+ OpcodeListBuilder();
+ virtual ~OpcodeListBuilder();
+
+ /**
+ * Get the total size of this opcode list in bytes.
+ */
+ virtual size_t getSize() const;
+
+ /**
+ * Get the number of opcodes defined in this list.
+ */
+ virtual uint32_t getCount() const;
+
+ /**
+ * Write the opcode list into the given buffer. This buffer
+ * must be able to hold at least as many elements as returned
+ * by calling the getSize() method.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t buildOpList(/*out*/ uint8_t* buf) const;
+
+ /**
+ * Add GainMap opcode(s) for the given metadata parameters. The given
+ * CFA layout must match the layout of the shading map passed into the
+ * lensShadingMap parameter.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addGainMapsForMetadata(uint32_t lsmWidth,
+ uint32_t lsmHeight,
+ uint32_t activeAreaTop,
+ uint32_t activeAreaLeft,
+ uint32_t activeAreaBottom,
+ uint32_t activeAreaRight,
+ CfaLayout cfa,
+ const float* lensShadingMap);
+
+
+ /**
+ * Add a GainMap opcode with the given fields. The mapGains array
+ * must have mapPointsV * mapPointsH * mapPlanes elements.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t addGainMap(uint32_t top,
+ uint32_t left,
+ uint32_t bottom,
+ uint32_t right,
+ uint32_t plane,
+ uint32_t planes,
+ uint32_t rowPitch,
+ uint32_t colPitch,
+ uint32_t mapPointsV,
+ uint32_t mapPointsH,
+ double mapSpacingV,
+ double mapSpacingH,
+ double mapOriginV,
+ double mapOriginH,
+ uint32_t mapPlanes,
+ const float* mapGains);
+
+ // TODO: Add other Opcode methods
+ protected:
+ static const uint32_t FLAG_OPTIONAL = 0x1u;
+ static const uint32_t FLAG_OPTIONAL_FOR_PREVIEW = 0x2u;
+
+ enum {
+ GAIN_MAP_ID = 9,
+ LSM_R_IND = 0,
+ LSM_GE_IND = 1,
+ LSM_GO_IND = 2,
+ LSM_B_IND = 3,
+ };
+
+ uint32_t mCount;
+ ByteArrayOutput mOpList;
+ EndianOutput mEndianOut;
+
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_DNG_UTILS_H*/
diff --git a/media/img_utils/include/img_utils/EndianUtils.h b/media/img_utils/include/img_utils/EndianUtils.h
new file mode 100644
index 0000000..e99be1a
--- /dev/null
+++ b/media/img_utils/include/img_utils/EndianUtils.h
@@ -0,0 +1,250 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_ENDIAN_UTILS
+#define IMG_UTILS_ENDIAN_UTILS
+
+#include <img_utils/Output.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+#include <endian.h>
+#include <assert.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Endianness types supported.
+ */
+enum ANDROID_API Endianness {
+ UNDEFINED_ENDIAN, // Default endianness will be used.
+ BIG,
+ LITTLE
+};
+
+/**
+ * Convert from the native device endianness to big endian.
+ */
+template<typename T>
+T convertToBigEndian(T in);
+
+/**
+ * Convert from the native device endianness to little endian.
+ */
+template<typename T>
+T convertToLittleEndian(T in);
+
+/**
+ * A utility class for writing to an Output with the given endianness.
+ */
+class ANDROID_API EndianOutput : public Output {
+ public:
+ /**
+ * Wrap the given Output. Calling write methods will result in
+ * writes to this output.
+ */
+ EndianOutput(Output* out, Endianness end=LITTLE);
+
+ virtual ~EndianOutput();
+
+ /**
+ * Call open on the wrapped output.
+ */
+ virtual status_t open();
+
+ /**
+ * Call close on the wrapped output.
+ */
+ virtual status_t close();
+
+ /**
+ * Set the endianness to use when writing.
+ */
+ virtual void setEndianness(Endianness end);
+
+ /**
+ * Get the currently configured endianness.
+ */
+ virtual Endianness getEndianness() const;
+
+ /**
+ * Get the current number of bytes written by this EndianOutput.
+ */
+ virtual uint32_t getCurrentOffset() const;
+
+
+ // TODO: switch write methods to uint32_t instead of size_t,
+ // the max size of a TIFF files is bounded
+
+ /**
+ * The following methods will write elements from given input buffer to the output.
+ * Count elements in the buffer will be written with the endianness set for this
+ * EndianOutput. If the given offset is greater than zero, that many elements will
+ * be skipped in the buffer before writing.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int8_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const uint16_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int16_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const uint32_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int32_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const uint64_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const int64_t* buf, size_t offset, size_t count);
+
+ virtual status_t write(const float* buf, size_t offset, size_t count);
+
+ virtual status_t write(const double* buf, size_t offset, size_t count);
+
+ protected:
+ template<typename T>
+ inline status_t writeHelper(const T* buf, size_t offset, size_t count);
+
+ uint32_t mOffset;
+ Output* mOutput;
+ Endianness mEndian;
+};
+
+template<typename T>
+inline status_t EndianOutput::writeHelper(const T* buf, size_t offset, size_t count) {
+ assert(offset <= count);
+ status_t res = OK;
+ size_t size = sizeof(T);
+ switch(mEndian) {
+ case BIG: {
+ for (size_t i = offset; i < count; ++i) {
+ T tmp = convertToBigEndian<T>(buf[offset + i]);
+ if ((res = mOutput->write(reinterpret_cast<uint8_t*>(&tmp), 0, size))
+ != OK) {
+ return res;
+ }
+ mOffset += size;
+ }
+ break;
+ }
+ case LITTLE: {
+ for (size_t i = offset; i < count; ++i) {
+ T tmp = convertToLittleEndian<T>(buf[offset + i]);
+ if ((res = mOutput->write(reinterpret_cast<uint8_t*>(&tmp), 0, size))
+ != OK) {
+ return res;
+ }
+ mOffset += size;
+ }
+ break;
+ }
+ default: {
+ return BAD_VALUE;
+ }
+ }
+ return res;
+}
+
+template<>
+inline uint8_t convertToBigEndian(uint8_t in) {
+ return in;
+}
+
+template<>
+inline int8_t convertToBigEndian(int8_t in) {
+ return in;
+}
+
+template<>
+inline uint16_t convertToBigEndian(uint16_t in) {
+ return htobe16(in);
+}
+
+template<>
+inline int16_t convertToBigEndian(int16_t in) {
+ return htobe16(in);
+}
+
+template<>
+inline uint32_t convertToBigEndian(uint32_t in) {
+ return htobe32(in);
+}
+
+template<>
+inline int32_t convertToBigEndian(int32_t in) {
+ return htobe32(in);
+}
+
+template<>
+inline uint64_t convertToBigEndian(uint64_t in) {
+ return htobe64(in);
+}
+
+template<>
+inline int64_t convertToBigEndian(int64_t in) {
+ return htobe64(in);
+}
+
+template<>
+inline uint8_t convertToLittleEndian(uint8_t in) {
+ return in;
+}
+
+template<>
+inline int8_t convertToLittleEndian(int8_t in) {
+ return in;
+}
+
+template<>
+inline uint16_t convertToLittleEndian(uint16_t in) {
+ return htole16(in);
+}
+
+template<>
+inline int16_t convertToLittleEndian(int16_t in) {
+ return htole16(in);
+}
+
+template<>
+inline uint32_t convertToLittleEndian(uint32_t in) {
+ return htole32(in);
+}
+
+template<>
+inline int32_t convertToLittleEndian(int32_t in) {
+ return htole32(in);
+}
+
+template<>
+inline uint64_t convertToLittleEndian(uint64_t in) {
+ return htole64(in);
+}
+
+template<>
+inline int64_t convertToLittleEndian(int64_t in) {
+ return htole64(in);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_ENDIAN_UTILS*/
+
diff --git a/media/img_utils/include/img_utils/FileInput.h b/media/img_utils/include/img_utils/FileInput.h
new file mode 100644
index 0000000..d3c5ec1
--- /dev/null
+++ b/media/img_utils/include/img_utils/FileInput.h
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_FILE_INPUT_H
+#define IMG_UTILS_FILE_INPUT_H
+
+#include <img_utils/Input.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/String8.h>
+#include <stdio.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class for reading from a file.
+ */
+class ANDROID_API FileInput : public Input {
+ public:
+ /**
+ * Create a file input for the given path.
+ */
+ FileInput(String8 path);
+
+ virtual ~FileInput();
+
+ /**
+ * Open a file descriptor to the path given in the constructor.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Read bytes from the file into the given buffer. At most, the number
+ * of bytes given in the count argument will be read. Bytes will be written
+ * into the given buffer starting at the index given in the offset argument.
+ *
+ * Returns the number of bytes read. If an error has occurred, the value pointed
+ * to by the given status_t pointer will be set to a negative error code.
+ */
+ virtual size_t read(uint8_t* buf, size_t offset, size_t count, status_t* err);
+
+ /**
+ * Close the file descriptor to the path given in the constructor.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+ private:
+ FILE *mFp;
+ String8 mPath;
+ bool mOpen;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+
+#endif /*IMG_UTILS_INPUT_H*/
diff --git a/media/img_utils/include/img_utils/FileOutput.h b/media/img_utils/include/img_utils/FileOutput.h
new file mode 100644
index 0000000..fd5be27
--- /dev/null
+++ b/media/img_utils/include/img_utils/FileOutput.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_FILE_OUTPUT_H
+#define IMG_UTILS_FILE_OUTPUT_H
+
+#include <img_utils/Output.h>
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/String8.h>
+#include <stdio.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+class ANDROID_API FileOutput : public Output {
+ public:
+ FileOutput(String8 path);
+ virtual ~FileOutput();
+ virtual status_t open();
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count);
+ virtual status_t close();
+ private:
+ FILE *mFp;
+ String8 mPath;
+ bool mOpen;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_FILE_OUTPUT_H*/
diff --git a/media/img_utils/include/img_utils/Input.h b/media/img_utils/include/img_utils/Input.h
new file mode 100644
index 0000000..2166601
--- /dev/null
+++ b/media/img_utils/include/img_utils/Input.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_INPUT_H
+#define IMG_UTILS_INPUT_H
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class used as a source of bytes.
+ */
+class ANDROID_API Input {
+ public:
+ virtual ~Input();
+
+ /**
+ * Open this Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Read bytes into the given buffer. At most, the number of bytes given in the
+ * count argument will be read. Bytes will be written into the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns the number of bytes read. If an error has occurred, the value pointed
+ * to by the given status_t pointer will be set to a negative error code.
+ */
+ virtual size_t read(uint8_t* buf, size_t offset, size_t count, status_t* err) = 0;
+
+ /**
+ * Close the Input. It is not valid to call open on a previously closed Input.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+
+#endif /*IMG_UTILS_INPUT_H*/
diff --git a/media/img_utils/include/img_utils/Orderable.h b/media/img_utils/include/img_utils/Orderable.h
new file mode 100644
index 0000000..87253a4
--- /dev/null
+++ b/media/img_utils/include/img_utils/Orderable.h
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_ORDERABLE
+#define IMG_UTILS_ORDERABLE
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+#define COMPARE_DEF(op) \
+inline bool operator op (const Orderable& orderable) const;
+
+/**
+ * Subclasses of Orderable can be compared and sorted. This is
+ * intended to be used to create sorted arrays of TIFF entries
+ * and IFDs.
+ */
+class ANDROID_API Orderable {
+ public:
+ virtual ~Orderable();
+
+ /**
+ * Comparison operatotors are based on the value returned
+ * from this method.
+ */
+ virtual uint32_t getComparableValue() const = 0;
+
+ COMPARE_DEF(>)
+ COMPARE_DEF(<)
+ COMPARE_DEF(>=)
+ COMPARE_DEF(<=)
+ COMPARE_DEF(==)
+ COMPARE_DEF(!=)
+};
+
+#undef COMPARE_DEF
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_ORDERABLE*/
diff --git a/media/img_utils/include/img_utils/Output.h b/media/img_utils/include/img_utils/Output.h
new file mode 100644
index 0000000..35fae23
--- /dev/null
+++ b/media/img_utils/include/img_utils/Output.h
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_OUTPUT_H
+#define IMG_UTILS_OUTPUT_H
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Utility class used to output bytes.
+ */
+class ANDROID_API Output {
+ public:
+ virtual ~Output();
+
+ /**
+ * Open this Output.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t open();
+
+ /**
+ * Write bytes from the given buffer. The number of bytes given in the count
+ * argument will be written. Bytes will be written from the given buffer starting
+ * at the index given in the offset argument.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t write(const uint8_t* buf, size_t offset, size_t count) = 0;
+
+ /**
+ * Close this Output. It is not valid to call open on a previously closed Output.
+ *
+ * Returns OK on success, or a negative error code.
+ */
+ virtual status_t close();
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_OUTPUT_H*/
diff --git a/media/libstagefright/chromium_http/chromium_http_stub.cpp b/media/img_utils/include/img_utils/Pair.h
index 289f6de..d651cac 100644
--- a/media/libstagefright/chromium_http/chromium_http_stub.cpp
+++ b/media/img_utils/include/img_utils/Pair.h
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,25 +14,31 @@
* limitations under the License.
*/
-#include <dlfcn.h>
+#ifndef IMG_UTILS_PAIR_H
+#define IMG_UTILS_PAIR_H
-#include <include/chromium_http_stub.h>
-#include <include/ChromiumHTTPDataSource.h>
-#include <include/DataUriSource.h>
+#include <cutils/compiler.h>
namespace android {
+namespace img_utils {
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
- return new ChromiumHTTPDataSource(flags);
-}
+/**
+ * Generic pair utility class. Nothing special here.
+ */
+template<typename F, typename S>
+class ANDROID_API Pair {
+ public:
+ F first;
+ S second;
+
+ Pair() {}
+
+ Pair(const Pair& o) : first(o.first), second(o.second) {}
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- return ChromiumHTTPDataSource::UpdateProxyConfig(host, port, exclusionList);
-}
+ Pair(const F& f, const S& s) : first(f), second(s) {}
+};
-DataSource *createDataUriSource(const char *uri) {
- return new DataUriSource(uri);
-}
+} /*namespace img_utils*/
+} /*namespace android*/
-}
+#endif /*IMG_UTILS_PAIR_H*/
diff --git a/media/img_utils/include/img_utils/SortedEntryVector.h b/media/img_utils/include/img_utils/SortedEntryVector.h
new file mode 100644
index 0000000..f059a82
--- /dev/null
+++ b/media/img_utils/include/img_utils/SortedEntryVector.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_SORTED_ENTRY_VECTOR_H
+#define IMG_UTILS_SORTED_ENTRY_VECTOR_H
+
+#include <img_utils/TiffEntry.h>
+
+#include <utils/StrongPointer.h>
+#include <utils/SortedVector.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Subclass of SortedVector that has been extended to
+ * do comparisons/lookups based on the tag ID of the entries.
+ */
+class SortedEntryVector : public SortedVector<sp<TiffEntry> > {
+ public:
+ virtual ~SortedEntryVector();
+
+ /**
+ * Returns the index of the entry with the given tag ID, or
+ * -1 if none exists.
+ */
+ ssize_t indexOfTag(uint16_t tag) const;
+
+ protected:
+ /**
+ * Compare tag ID.
+ */
+ virtual int do_compare(const void* lhs, const void* rhs) const;
+};
+
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_SORTED_ENTRY_VECTOR_H*/
diff --git a/media/img_utils/include/img_utils/TagDefinitions.h b/media/img_utils/include/img_utils/TagDefinitions.h
new file mode 100644
index 0000000..6cc42b2
--- /dev/null
+++ b/media/img_utils/include/img_utils/TagDefinitions.h
@@ -0,0 +1,1187 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_TAG_DEFINITION_H
+#define IMG_UTILS_TIFF_TAG_DEFINITION_H
+
+#include <img_utils/TiffEntry.h>
+#include <img_utils/Output.h>
+#include <img_utils/TiffHelpers.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * Tag definitions contain information about standard TIFF compatible tags.
+ */
+typedef struct TagDefinition {
+ // The specified tag ID.
+ uint16_t tagId;
+ // The default type for this tag. This must be a valid TIFF type.
+ TagType defaultType;
+ // The default Image File Directory (IFD) for this tag.
+ uint32_t defaultIfd;
+ // The valid count for this tag, or 0 if the count is not fixed.
+ uint32_t fixedCount;
+ // The endianness of the tag value, or UNDEFINED_ENDIAN if there is no fixed endian
+ Endianness fixedEndian;
+} TagDefinition_t;
+
+/**
+ * Convenience defines for tag ids.
+ */
+enum {
+ TAG_RAWTOPREVIEWGAIN = 0xC7A8u,
+ TAG_NEWRAWIMAGEDIGEST = 0xC7A7u,
+ TAG_ORIGINALDEFAULTCROPSIZE = 0xC793u,
+ TAG_ORIGINALBESTQUALITYFINALSIZE = 0xC792u,
+ TAG_ORIGINALDEFAULTFINALSIZE = 0xC791u,
+ TAG_PROFILEHUESATMAPENCODING = 0xC7A3u,
+ TAG_PROFILELOOKTABLEENCODING = 0xC7A4u,
+ TAG_BASELINEEXPOSUREOFFSET = 0xC7A5u,
+ TAG_DEFAULTBLACKRENDER = 0xC7A6u,
+ TAG_DEFAULTUSERCROP = 0xC7B5u,
+ TAG_NOISEPROFILE = 0xC761u,
+ TAG_OPCODELIST3 = 0xC74Eu,
+ TAG_OPCODELIST2 = 0xC741u,
+ TAG_OPCODELIST1 = 0xC740u,
+ TAG_PROFILELOOKTABLEDATA = 0xC726u,
+ TAG_PROFILELOOKTABLEDIMS = 0xC725u,
+ TAG_ROWINTERLEAVEFACTOR = 0xC71Fu,
+ TAG_SUBTILEBLOCKSIZE = 0xC71Eu,
+ TAG_ORIGINALRAWFILEDIGEST = 0xC71Du,
+ TAG_RAWIMAGEDIGEST = 0xC71Cu,
+ TAG_PREVIEWDATETIME = 0xC71Bu,
+ TAG_PREVIEWCOLORSPACE = 0xC71Au,
+ TAG_PREVIEWSETTINGSDIGEST = 0xC719u,
+ TAG_PREVIEWSETTINGSNAME = 0xC718u,
+ TAG_PREVIEWAPPLICATIONVERSION = 0xC717u,
+ TAG_PREVIEWAPPLICATIONNAME = 0xC716u,
+ TAG_FORWARDMATRIX2 = 0xC715u,
+ TAG_FORWARDMATRIX1 = 0xC714u,
+ TAG_PROFILECOPYRIGHT = 0xC6FEu,
+ TAG_PROFILEEMBEDPOLICY = 0xC6FDu,
+ TAG_PROFILETONECURVE = 0xC6FCu,
+ TAG_PROFILEHUESATMAPDATA2 = 0xC6FBu,
+ TAG_PROFILEHUESATMAPDATA1 = 0xC6FAu,
+ TAG_PROFILEHUESATMAPDIMS = 0xC6F9u,
+ TAG_PROFILENAME = 0xC6F8u,
+ TAG_NOISEREDUCTIONAPPLIED = 0xC6F7u,
+ TAG_ASSHOTPROFILENAME = 0xC6F6u,
+ TAG_EXTRACAMERAPROFILES = 0xC6F5u,
+ TAG_PROFILECALIBRATIONSIGNATURE = 0xC6F4u,
+ TAG_CAMERACALIBRATIONSIGNATURE = 0xC6F3u,
+ TAG_COLORIMETRICREFERENCE = 0xC6BFu,
+ TAG_CURRENTPREPROFILEMATRIX = 0xC692u,
+ TAG_CURRENTICCPROFILE = 0xC691u,
+ TAG_ASSHOTPREPROFILEMATRIX = 0xC690u,
+ TAG_ASSHOTICCPROFILE = 0xC68Fu,
+ TAG_MASKEDAREAS = 0xC68Eu,
+ TAG_ACTIVEAREA = 0xC68Du,
+ TAG_ORIGINALRAWFILEDATA = 0xC68Cu,
+ TAG_ORIGINALRAWFILENAME = 0xC68Bu,
+ TAG_RAWDATAUNIQUEID = 0xC65Du,
+ TAG_MAKERNOTESAFETY = 0xC635u,
+ TAG_DNGPRIVATEDATA = 0xC634u,
+ TAG_SHADOWSCALE = 0xC633u,
+ TAG_ANTIALIASSTRENGTH = 0xC632u,
+ TAG_CHROMABLURRADIUS = 0xC631u,
+ TAG_LENSINFO = 0xC630u,
+ TAG_CAMERASERIALNUMBER = 0xC62Fu,
+ TAG_LINEARRESPONSELIMIT = 0xC62Eu,
+ TAG_BAYERGREENSPLIT = 0xC62Du,
+ TAG_BASELINESHARPNESS = 0xC62Cu,
+ TAG_BASELINENOISE = 0xC62Bu,
+ TAG_BASELINEEXPOSURE = 0xC62Au,
+ TAG_ASSHOTWHITEXY = 0xC629u,
+ TAG_ASSHOTNEUTRAL = 0xC628u,
+ TAG_ANALOGBALANCE = 0xC627u,
+ TAG_REDUCTIONMATRIX2 = 0xC626u,
+ TAG_REDUCTIONMATRIX1 = 0xC625u,
+ TAG_CAMERACALIBRATION2 = 0xC624u,
+ TAG_CAMERACALIBRATION1 = 0xC623u,
+ TAG_COLORMATRIX2 = 0xC622u,
+ TAG_COLORMATRIX1 = 0xC621u,
+ TAG_CALIBRATIONILLUMINANT2 = 0xC65Bu,
+ TAG_CALIBRATIONILLUMINANT1 = 0xC65Au,
+ TAG_DEFAULTCROPSIZE = 0xC620u,
+ TAG_DEFAULTCROPORIGIN = 0xC61Fu,
+ TAG_BESTQUALITYSCALE = 0xC65Cu,
+ TAG_DEFAULTSCALE = 0xC61Eu,
+ TAG_WHITELEVEL = 0xC61Du,
+ TAG_BLACKLEVELDELTAV = 0xC61Cu,
+ TAG_BLACKLEVELDELTAH = 0xC61Bu,
+ TAG_BLACKLEVEL = 0xC61Au,
+ TAG_BLACKLEVELREPEATDIM = 0xC619u,
+ TAG_LINEARIZATIONTABLE = 0xC618u,
+ TAG_CFALAYOUT = 0xC617u,
+ TAG_CFAPLANECOLOR = 0xC616u,
+ TAG_LOCALIZEDCAMERAMODEL = 0xC615u,
+ TAG_UNIQUECAMERAMODEL = 0xC614u,
+ TAG_DNGBACKWARDVERSION = 0xC613u,
+ TAG_DNGVERSION = 0xC612u,
+ TAG_SUBFILETYPE = 0x00FFu,
+ TAG_YRESOLUTION = 0x011Bu,
+ TAG_XRESOLUTION = 0x011Au,
+ TAG_THRESHHOLDING = 0x0107u,
+ TAG_STRIPOFFSETS = 0x0111u,
+ TAG_STRIPBYTECOUNTS = 0x0117u,
+ TAG_SOFTWARE = 0x0131u,
+ TAG_SAMPLESPERPIXEL = 0x0115u,
+ TAG_ROWSPERSTRIP = 0x0116u,
+ TAG_RESOLUTIONUNIT = 0x0128u,
+ TAG_PLANARCONFIGURATION = 0x011Cu,
+ TAG_PHOTOMETRICINTERPRETATION = 0x0106u,
+ TAG_ORIENTATION = 0x0112u,
+ TAG_NEWSUBFILETYPE = 0x00FEu,
+ TAG_MODEL = 0x0110u,
+ TAG_MINSAMPLEVALUE = 0x0118u,
+ TAG_MAXSAMPLEVALUE = 0x0119u,
+ TAG_MAKE = 0x010Fu,
+ TAG_IMAGEWIDTH = 0x0100u,
+ TAG_IMAGELENGTH = 0x0101u,
+ TAG_IMAGEDESCRIPTION = 0x010Eu,
+ TAG_HOSTCOMPUTER = 0x013Cu,
+ TAG_GRAYRESPONSEUNIT = 0x0122u,
+ TAG_GRAYRESPONSECURVE = 0x0123u,
+ TAG_FREEOFFSETS = 0x0120u,
+ TAG_FREEBYTECOUNTS = 0x0121u,
+ TAG_FILLORDER = 0x010Au,
+ TAG_EXTRASAMPLES = 0x0152u,
+ TAG_DATETIME = 0x0132u,
+ TAG_COPYRIGHT = 0x8298u,
+ TAG_COMPRESSION = 0x0103u,
+ TAG_COLORMAP = 0x0140u,
+ TAG_CELLWIDTH = 0x0108u,
+ TAG_CELLLENGTH = 0x0109u,
+ TAG_BITSPERSAMPLE = 0x0102u,
+ TAG_ARTIST = 0x013Bu,
+ TAG_EXIFVERSION = 0x9000u,
+ TAG_CFAREPEATPATTERNDIM = 0x828Du,
+ TAG_DATETIMEORIGINAL = 0x9003u,
+ TAG_CFAPATTERN = 0x828Eu,
+ TAG_SUBIFDS = 0x014Au,
+ TAG_TIFFEPSTANDARDID = 0x9216u,
+ TAG_EXPOSURETIME = 0x829Au,
+ TAG_ISOSPEEDRATINGS = 0x8827u,
+ TAG_FOCALLENGTH = 0x920Au,
+ TAG_FNUMBER = 0x829Du,
+};
+
+/**
+ * TIFF_EP_TAG_DEFINITIONS contains tags defined in the TIFF EP spec
+ */
+const TagDefinition_t TIFF_EP_TAG_DEFINITIONS[] = {
+ { // PhotometricInterpretation
+ 0x0106u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // SubIfds
+ 0x014Au,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFAPattern
+ 0x828Eu,
+ BYTE,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFARepeatPatternDim
+ 0x828Du,
+ SHORT,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // DateTimeOriginal
+ 0x9003u,
+ ASCII,
+ IFD_0,
+ 20,
+ UNDEFINED_ENDIAN
+ },
+ { // Tiff/EPStandardID
+ 0x9216u,
+ BYTE,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // ExposureTime
+ 0x829Au,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ISOSpeedRatings
+ 0x8827u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // FocalLength
+ 0x920Au,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // FNumber
+ 0x829Du,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ /*TODO: Remaining TIFF EP tags*/
+};
+
+/**
+ * EXIF_2_3_TAG_DEFINITIONS contains tags defined in the Jeita EXIF 2.3 spec
+ */
+const TagDefinition_t EXIF_2_3_TAG_DEFINITIONS[] = {
+ { // ExifVersion
+ 0x9000u,
+ UNDEFINED,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ /*TODO: Remaining EXIF 2.3 tags*/
+};
+
+/**
+ * TIFF_6_TAG_DEFINITIONS contains tags defined in the TIFF 6.0 spec
+ */
+const TagDefinition_t TIFF_6_TAG_DEFINITIONS[] = {
+ { // SubFileType
+ 0x00FFu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Artist
+ 0x013Bu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BitsPerSample
+ 0x0102u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CellLength
+ 0x0109u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CellWidth
+ 0x0108u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorMap
+ 0x0140u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // Compression
+ 0x0103u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Copyright
+ 0x8298u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // DateTime
+ 0x0132u,
+ ASCII,
+ IFD_0,
+ 20,
+ UNDEFINED_ENDIAN
+ },
+ { // ExtraSamples
+ 0x0152u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // FillOrder
+ 0x010Au,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // FreeByteCounts
+ 0x0121u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // FreeOffsets
+ 0x0120u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // GrayResponseCurve
+ 0x0123u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // GrayResponseUnit
+ 0x0122u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // HostComputer
+ 0x013Cu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ImageDescription
+ 0x010Eu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ImageLength
+ 0x0101u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ImageWidth
+ 0x0100u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Make
+ 0x010Fu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // MaxSampleValue
+ 0x0119u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // MinSampleValue
+ 0x0118u,
+ SHORT,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // Model
+ 0x0110u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // NewSubfileType
+ 0x00FEu,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Orientation
+ 0x0112u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // PhotoMetricInterpretation
+ 0x0106u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // PlanarConfiguration
+ 0x011Cu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ResolutionUnit
+ 0x0128u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // RowsPerStrip
+ 0x0116u,
+ LONG,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // SamplesPerPixel
+ 0x0115u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Software
+ 0x0131u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // StripByteCounts
+ 0x0117u,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // StripOffsets
+ 0x0111u,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // SubfileType
+ 0x00FFu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // Threshholding
+ 0x0107u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // XResolution
+ 0x011Au,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // YResolution
+ 0x011Bu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // YResolution
+ 0x011Bu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ }
+};
+
+/**
+ * DNG_TAG_DEFINITIONS contains tags defined in the DNG 1.4 spec
+ */
+const TagDefinition_t DNG_TAG_DEFINITIONS[] = {
+ { // DNGVersion
+ 0xC612u,
+ BYTE,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // DNGBackwardVersion
+ 0xC613u,
+ BYTE,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // UniqueCameraModel
+ 0xC614u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // LocalizedCameraModel
+ 0xC615u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFAPlaneColor
+ 0xC616u,
+ BYTE,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CFALayout
+ 0xC617u,
+ SHORT,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // LinearizationTable
+ 0xC618u,
+ SHORT,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevelRepeatDim
+ 0xC619u,
+ SHORT,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevel
+ 0xC61Au,
+ LONG,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevelDeltaH
+ 0xC61Bu,
+ SRATIONAL,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // BlackLevelDeltaV
+ 0xC61Cu,
+ SRATIONAL,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // WhiteLevel
+ 0xC61Du,
+ LONG,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultScale
+ 0xC61Eu,
+ RATIONAL,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // BestQualityScale
+ 0xC65Cu,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultCropOrigin
+ 0xC61Fu,
+ LONG,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultCropSize
+ 0xC620u,
+ LONG,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // CalibrationIlluminant1
+ 0xC65Au,
+ SHORT,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CalibrationIlluminant2
+ 0xC65Bu,
+ SHORT,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorMatrix1
+ 0xC621u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorMatrix2
+ 0xC622u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraCalibration1
+ 0xC623u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraCalibration2
+ 0xC624u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ReductionMatrix1
+ 0xC625u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ReductionMatrix2
+ 0xC626u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AnalogBalance
+ 0xC627u,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotNeutral
+ 0xC628u,
+ RATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotWhiteXY
+ 0xC629u,
+ RATIONAL,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineExposure
+ 0xC62Au,
+ SRATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineNoise
+ 0xC62Bu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineSharpness
+ 0xC62Cu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BayerGreenSplit
+ 0xC62Du,
+ LONG,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // LinearResponseLimit
+ 0xC62Eu,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraSerialNumber
+ 0xC62Fu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // LensInfo
+ 0xC630u,
+ RATIONAL,
+ IFD_0,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // ChromaBlurRadius
+ 0xC631u,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // AntiAliasStrength
+ 0xC632u,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ShadowScale
+ 0xC633u,
+ RATIONAL,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // DNGPrivateData
+ 0xC634u,
+ BYTE,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // MakerNoteSafety
+ 0xC635u,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // RawDataUniqueID
+ 0xC65Du,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalRawFileName
+ 0xC68Bu,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalRawFileData
+ 0xC68Cu,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ BIG
+ },
+ { // ActiveArea
+ 0xC68Du,
+ LONG,
+ RAW_IFD,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // MaskedAreas
+ 0xC68Eu,
+ LONG,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotICCProfile
+ 0xC68Fu,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotPreProfileMatrix
+ 0xC690u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CurrentICCProfile
+ 0xC691u,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CurrentICCProfile
+ 0xC691u,
+ UNDEFINED,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // CurrentPreProfileMatrix
+ 0xC692u,
+ SRATIONAL,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ColorimetricReference
+ 0xC6BFu,
+ SHORT,
+ IFD_0,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // CameraCalibrationSignature
+ 0xC6F3u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileCalibrationSignature
+ 0xC6F4u,
+ ASCII,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ExtraCameraProfiles
+ 0xC6F5u,
+ LONG,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // AsShotProfileName
+ 0xC6F6u,
+ ASCII,
+ IFD_0,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // NoiseReductionApplied
+ 0xC6F7u,
+ RATIONAL,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileName
+ 0xC6F8u,
+ ASCII,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapDims
+ 0xC6F9u,
+ LONG,
+ PROFILE_IFD,
+ 3,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapData1
+ 0xC6FAu,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapData2
+ 0xC6FBu,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileToneCurve
+ 0xC6FCu,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileEmbedPolicy
+ 0xC6FDu,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileCopyright
+ 0xC6FEu,
+ ASCII,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ForwardMatrix1
+ 0xC714u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // ForwardMatrix2
+ 0xC715u,
+ SRATIONAL,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewApplicationName
+ 0xC716u,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewApplicationVersion
+ 0xC717u,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewSettingsName
+ 0xC718u,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewSettingsDigest
+ 0xC719u,
+ BYTE,
+ PREVIEW_IFD,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewColorSpace
+ 0xC71Au,
+ LONG,
+ PREVIEW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // PreviewDateTime
+ 0xC71Bu,
+ ASCII,
+ PREVIEW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // RawImageDigest
+ 0xC71Cu,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalRawFileDigest
+ 0xC71Du,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // SubTileBlockSize
+ 0xC71Eu,
+ LONG,
+ RAW_IFD,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // RowInterleaveFactor
+ 0xC71Fu,
+ LONG,
+ RAW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileLookTableDims
+ 0xC725u,
+ LONG,
+ PROFILE_IFD,
+ 3,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileLookTableData
+ 0xC726u,
+ FLOAT,
+ PROFILE_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // OpcodeList1
+ 0xC740u,
+ UNDEFINED,
+ RAW_IFD,
+ 0,
+ BIG
+ },
+ { // OpcodeList2
+ 0xC741u,
+ UNDEFINED,
+ RAW_IFD,
+ 0,
+ BIG
+ },
+ { // OpcodeList3
+ 0xC74Eu,
+ UNDEFINED,
+ RAW_IFD,
+ 0,
+ BIG
+ },
+ { // NoiseProfile
+ 0xC761u,
+ DOUBLE,
+ RAW_IFD,
+ 0,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultUserCrop
+ 0xC7B5u,
+ RATIONAL,
+ RAW_IFD,
+ 4,
+ UNDEFINED_ENDIAN
+ },
+ { // DefaultBlackRender
+ 0xC7A6u,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // BaselineExposureOffset
+ 0xC7A5u,
+ RATIONAL,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileLookTableEncoding
+ 0xC7A4u,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // ProfileHueSatMapEncoding
+ 0xC7A3u,
+ LONG,
+ PROFILE_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalDefaultFinalSize
+ 0xC791u,
+ LONG,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalBestQualityFinalSize
+ 0xC792u,
+ LONG,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // OriginalDefaultCropSize
+ 0xC793u,
+ LONG,
+ IFD_0,
+ 2,
+ UNDEFINED_ENDIAN
+ },
+ { // NewRawImageDigest
+ 0xC7A7u,
+ BYTE,
+ IFD_0,
+ 16,
+ UNDEFINED_ENDIAN
+ },
+ { // RawToPreviewGain
+ 0xC7A8u,
+ DOUBLE,
+ PREVIEW_IFD,
+ 1,
+ UNDEFINED_ENDIAN
+ },
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_TAG_DEFINITION_H*/
diff --git a/media/img_utils/include/img_utils/TiffEntry.h b/media/img_utils/include/img_utils/TiffEntry.h
new file mode 100644
index 0000000..cd01640
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffEntry.h
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_ENTRY
+#define IMG_UTILS_TIFF_ENTRY
+
+#include <img_utils/TiffWritable.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/EndianUtils.h>
+
+#include <cutils/compiler.h>
+#include <utils/String8.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+#define COMPARE_DEF(op) \
+inline bool operator op (const TiffEntry& entry) const;
+
+/**
+ * This class holds a single TIFF IFD entry.
+ */
+class ANDROID_API TiffEntry : public TiffWritable {
+ public:
+ // TODO: Copy constructor/equals here.
+ virtual ~TiffEntry();
+
+ /**
+ * Write the 12-byte IFD entry to the output. The given offset will be
+ * set as the tag value if the size of the tag value exceeds the max
+ * size for the TIFF Value field (4 bytes), and should be word aligned.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const = 0;
+
+ /**
+ * Get the count set for this entry. This corresponds to the TIFF Count
+ * field.
+ */
+ virtual uint32_t getCount() const = 0;
+
+ /**
+ * Get the tag id set for this entry. This corresponds to the TIFF Tag
+ * field.
+ */
+ virtual uint16_t getTag() const = 0;
+
+ /**
+ * Get the type set for this entry. This corresponds to the TIFF Type
+ * field.
+ */
+ virtual TagType getType() const = 0;
+
+ /**
+ * Get the defined endianness for this entry. If this is defined,
+ * the tag value will be written with the given byte order.
+ */
+ virtual Endianness getEndianness() const = 0;
+
+ /**
+ * Get the value for this entry. This corresponds to the TIFF Value
+ * field.
+ *
+ * Returns NULL if the value is NULL, or if the type used does not
+ * match the type of this tag.
+ */
+ template<typename T>
+ const T* getData() const;
+
+ String8 toString() const;
+
+ /**
+ * Force the type used here to be a valid TIFF type.
+ *
+ * Returns NULL if the given value is NULL, or if the type given does
+ * not match the type of the value given.
+ */
+ template<typename T>
+ static const T* forceValidType(TagType type, const T* value);
+
+ virtual const void* getDataHelper() const = 0;
+
+ COMPARE_DEF(>)
+ COMPARE_DEF(<)
+
+ protected:
+ enum {
+ MAX_PRINT_STRING_LENGTH = 256
+ };
+};
+
+#define COMPARE(op) \
+bool TiffEntry::operator op (const TiffEntry& entry) const { \
+ return getComparableValue() op entry.getComparableValue(); \
+}
+
+COMPARE(>)
+COMPARE(<)
+
+
+template<typename T>
+const T* TiffEntry::getData() const {
+ const T* value = reinterpret_cast<const T*>(getDataHelper());
+ return forceValidType<T>(getType(), value);
+}
+
+#undef COMPARE
+#undef COMPARE_DEF
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_ENTRY*/
diff --git a/media/img_utils/include/img_utils/TiffEntryImpl.h b/media/img_utils/include/img_utils/TiffEntryImpl.h
new file mode 100644
index 0000000..cbe0e9a
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffEntryImpl.h
@@ -0,0 +1,190 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_ENTRY_IMPL
+#define IMG_UTILS_TIFF_ENTRY_IMPL
+
+#include <img_utils/TiffEntry.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/Output.h>
+#include <img_utils/EndianUtils.h>
+
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+template<typename T>
+class TiffEntryImpl : public TiffEntry {
+ public:
+ // TODO: Copy constructor/equals here.
+ TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end, const T* data);
+ virtual ~TiffEntryImpl();
+
+ status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const;
+ status_t writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const;
+
+ uint32_t getCount() const;
+ uint16_t getTag() const;
+ TagType getType() const;
+ Endianness getEndianness() const;
+ size_t getSize() const;
+ uint32_t getComparableValue() const;
+
+ protected:
+ const void* getDataHelper() const;
+ uint32_t getActualSize() const;
+
+ uint16_t mTag;
+ uint16_t mType;
+ uint32_t mCount;
+ Endianness mEnd;
+ T* mData;
+
+};
+
+template<typename T>
+TiffEntryImpl<T>::TiffEntryImpl(uint16_t tag, TagType type, uint32_t count, Endianness end,
+ const T* data)
+ : mTag(tag), mType(static_cast<uint16_t>(type)), mCount(count), mEnd(end) {
+ count = (type == RATIONAL || type == SRATIONAL) ? count * 2 : count;
+ mData = new T[count]();
+ for (uint32_t i = 0; i < count; ++i) {
+ mData[i] = data[i];
+ }
+}
+
+template<typename T>
+TiffEntryImpl<T>::~TiffEntryImpl() {
+ if (mData) {
+ delete[] mData;
+ }
+}
+
+template<typename T>
+uint32_t TiffEntryImpl<T>::getCount() const {
+ return mCount;
+}
+
+template<typename T>
+uint16_t TiffEntryImpl<T>::getTag() const {
+ return mTag;
+}
+
+template<typename T>
+TagType TiffEntryImpl<T>::getType() const {
+ return static_cast<TagType>(mType);
+}
+
+template<typename T>
+const void* TiffEntryImpl<T>::getDataHelper() const {
+ return reinterpret_cast<const void*>(mData);
+}
+
+template<typename T>
+size_t TiffEntryImpl<T>::getSize() const {
+ uint32_t total = getActualSize();
+ WORD_ALIGN(total)
+ return (total <= OFFSET_SIZE) ? 0 : total;
+}
+
+template<typename T>
+uint32_t TiffEntryImpl<T>::getActualSize() const {
+ uint32_t total = sizeof(T) * mCount;
+ if (getType() == RATIONAL || getType() == SRATIONAL) {
+ // 2 ints stored for each rational, multiply by 2
+ total <<= 1;
+ }
+ return total;
+}
+
+template<typename T>
+Endianness TiffEntryImpl<T>::getEndianness() const {
+ return mEnd;
+}
+
+template<typename T>
+uint32_t TiffEntryImpl<T>::getComparableValue() const {
+ return mTag;
+}
+
+template<typename T>
+status_t TiffEntryImpl<T>::writeTagInfo(uint32_t offset, /*out*/EndianOutput* out) const {
+ assert((offset % TIFF_WORD_SIZE) == 0);
+ status_t ret = OK;
+ BAIL_ON_FAIL(out->write(&mTag, 0, 1), ret);
+ BAIL_ON_FAIL(out->write(&mType, 0, 1), ret);
+ BAIL_ON_FAIL(out->write(&mCount, 0, 1), ret);
+
+ uint32_t dataSize = getActualSize();
+ if (dataSize > OFFSET_SIZE) {
+ BAIL_ON_FAIL(out->write(&offset, 0, 1), ret);
+ } else {
+ uint32_t count = mCount;
+ if (getType() == RATIONAL || getType() == SRATIONAL) {
+ /**
+ * Rationals are stored as an array of ints. Each
+ * rational is represented by 2 ints. To recover the
+ * size of the array here, multiply the count by 2.
+ */
+ count <<= 1;
+ }
+ BAIL_ON_FAIL(out->write(mData, 0, count), ret);
+ ZERO_TILL_WORD(out, dataSize, ret);
+ }
+ return ret;
+}
+
+template<typename T>
+status_t TiffEntryImpl<T>::writeData(uint32_t offset, EndianOutput* out) const {
+ status_t ret = OK;
+
+ // Some tags have fixed-endian value output
+ Endianness tmp = UNDEFINED_ENDIAN;
+ if (mEnd != UNDEFINED_ENDIAN) {
+ tmp = out->getEndianness();
+ out->setEndianness(mEnd);
+ }
+
+ uint32_t count = mCount;
+ if (getType() == RATIONAL || getType() == SRATIONAL) {
+ /**
+ * Rationals are stored as an array of ints. Each
+ * rational is represented by 2 ints. To recover the
+ * size of the array here, multiply the count by 2.
+ */
+ count <<= 1;
+ }
+
+ BAIL_ON_FAIL(out->write(mData, 0, count), ret);
+
+ if (mEnd != UNDEFINED_ENDIAN) {
+ out->setEndianness(tmp);
+ }
+
+ // Write to next word alignment
+ ZERO_TILL_WORD(out, sizeof(T) * count, ret);
+ return ret;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_ENTRY_IMPL*/
+
+
diff --git a/media/img_utils/include/img_utils/TiffHelpers.h b/media/img_utils/include/img_utils/TiffHelpers.h
new file mode 100644
index 0000000..fd0ea7a
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffHelpers.h
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_HELPERS_H
+#define IMG_UTILS_TIFF_HELPERS_H
+
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+const uint8_t ZERO_WORD[] = {0, 0, 0, 0};
+
+#define BAIL_ON_FAIL(x, flag) \
+ if ((flag = (x)) != OK) return flag;
+
+#define BYTES_TILL_WORD(index) \
+ ((TIFF_WORD_SIZE - ((index) % TIFF_WORD_SIZE)) % TIFF_WORD_SIZE)
+
+#define WORD_ALIGN(count) \
+ count += BYTES_TILL_WORD(count);
+
+#define ZERO_TILL_WORD(output, index, ret) \
+ { \
+ size_t remaining = BYTES_TILL_WORD(index); \
+ if (remaining > 0) { \
+ BAIL_ON_FAIL(output->write(ZERO_WORD, 0, remaining), ret); \
+ } \
+ }
+
+/**
+ * Basic TIFF header constants.
+ */
+enum {
+ BAD_OFFSET = 0,
+ TIFF_WORD_SIZE = 4, // Size in bytes
+ IFD_HEADER_SIZE = 2, // Size in bytes
+ IFD_FOOTER_SIZE = 4, // Size in bytes
+ TIFF_ENTRY_SIZE = 12, // Size in bytes
+ MAX_IFD_ENTRIES = UINT16_MAX,
+ FILE_HEADER_SIZE = 8, // Size in bytes
+ ENDIAN_MARKER_SIZE = 2, // Size in bytes
+ TIFF_MARKER_SIZE = 2, // Size in bytes
+ OFFSET_MARKER_SIZE = 4, // Size in bytes
+ TIFF_FILE_MARKER = 42,
+ BIG_ENDIAN_MARKER = 0x4D4Du,
+ LITTLE_ENDIAN_MARKER = 0x4949u
+};
+
+/**
+ * Constants for the TIFF tag types.
+ */
+enum TagType {
+ UNKNOWN_TAGTYPE = 0,
+ BYTE=1,
+ ASCII,
+ SHORT,
+ LONG,
+ RATIONAL,
+ SBYTE,
+ UNDEFINED,
+ SSHORT,
+ SLONG,
+ SRATIONAL,
+ FLOAT,
+ DOUBLE
+};
+
+/**
+ * Sizes of the TIFF entry fields (in bytes).
+ */
+enum {
+ TAG_SIZE = 2,
+ TYPE_SIZE = 2,
+ COUNT_SIZE = 4,
+ OFFSET_SIZE = 4
+};
+
+/**
+ * Convenience IFD id constants.
+ */
+enum {
+ IFD_0 = 0,
+ RAW_IFD,
+ PROFILE_IFD,
+ PREVIEW_IFD
+};
+
+inline size_t getTypeSize(TagType type) {
+ switch(type) {
+ case UNDEFINED:
+ case ASCII:
+ case BYTE:
+ case SBYTE:
+ return 1;
+ case SHORT:
+ case SSHORT:
+ return 2;
+ case LONG:
+ case SLONG:
+ case FLOAT:
+ return 4;
+ case RATIONAL:
+ case SRATIONAL:
+ case DOUBLE:
+ return 8;
+ default:
+ return 0;
+ }
+}
+
+inline uint32_t calculateIfdSize(size_t numberOfEntries) {
+ return IFD_HEADER_SIZE + IFD_FOOTER_SIZE + TIFF_ENTRY_SIZE * numberOfEntries;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_HELPERS_H*/
diff --git a/media/img_utils/include/img_utils/TiffIfd.h b/media/img_utils/include/img_utils/TiffIfd.h
new file mode 100644
index 0000000..9400456
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffIfd.h
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_IFD_H
+#define IMG_UTILS_TIFF_IFD_H
+
+#include <img_utils/TiffWritable.h>
+#include <img_utils/TiffEntry.h>
+#include <img_utils/Output.h>
+#include <img_utils/SortedEntryVector.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/String8.h>
+#include <utils/SortedVector.h>
+#include <utils/StrongPointer.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * This class holds a single TIFF Image File Directory (IFD) structure.
+ *
+ * This maps to the TIFF IFD structure that is logically composed of:
+ * - A 2-byte field listing the number of entries.
+ * - A list of 12-byte TIFF entries.
+ * - A 4-byte offset to the next IFD.
+ */
+class ANDROID_API TiffIfd : public TiffWritable {
+ public:
+ // TODO: Copy constructor/equals here - needed for SubIfds.
+ TiffIfd(uint32_t ifdId);
+ virtual ~TiffIfd();
+
+ /**
+ * Add a TiffEntry to this IFD or replace an existing entry with the
+ * same tag ID. No validation is done.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t addEntry(const sp<TiffEntry>& entry);
+
+ /**
+ * Set the pointer to the next IFD. This is used to create a linked
+ * list of IFDs as defined by the TIFF 6.0 spec., and is not included
+ * when calculating the size of IFD and entries for the getSize()
+ * method (unlike SubIFDs).
+ */
+ virtual void setNextIfd(const sp<TiffIfd>& ifd);
+
+ /**
+ * Get the pointer to the next IFD, or NULL if none exists.
+ */
+ virtual sp<TiffIfd> getNextIfd() const;
+
+ /**
+ * Write the IFD data. This includes the IFD header, entries, footer,
+ * and the corresponding values for each entry (recursively including
+ * sub-IFDs). The written amount should end on a word boundary, and
+ * the given offset should be word aligned.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const;
+
+ /**
+ * Get the size of the IFD. This includes the IFD header, entries, footer,
+ * and the corresponding values for each entry (recursively including
+ * any sub-IFDs).
+ */
+ virtual size_t getSize() const;
+
+ /**
+ * Get the id of this IFD.
+ */
+ virtual uint32_t getId() const;
+
+ /**
+ * Get an entry with the given tag ID.
+ *
+ * Returns a strong pointer to the entry if it exists, or an empty strong
+ * pointer.
+ */
+ virtual sp<TiffEntry> getEntry(uint16_t tag) const;
+
+ /**
+ * Get a formatted string representing this IFD.
+ */
+ String8 toString() const;
+
+ /**
+ * Print a formatted string representing this IFD to logcat.
+ */
+ void log() const;
+
+ /**
+ * Get value used to determine sort order.
+ */
+ virtual uint32_t getComparableValue() const;
+ protected:
+ virtual uint32_t checkAndGetOffset(uint32_t offset) const;
+ SortedEntryVector mEntries;
+ sp<TiffIfd> mNextIfd;
+ uint32_t mIfdId;
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_IFD_H*/
diff --git a/media/img_utils/include/img_utils/TiffWritable.h b/media/img_utils/include/img_utils/TiffWritable.h
new file mode 100644
index 0000000..a72cecc
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffWritable.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_WRITABLE
+#define IMG_UTILS_TIFF_WRITABLE
+
+#include <img_utils/Orderable.h>
+#include <img_utils/EndianUtils.h>
+#include <img_utils/Output.h>
+
+#include <cutils/compiler.h>
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+/**
+ * TiffWritable subclasses represent TIFF metadata objects that can be written
+ * to an EndianOutput object. This is used for TIFF entries and IFDs.
+ */
+class ANDROID_API TiffWritable : public Orderable, public LightRefBase<TiffWritable> {
+ public:
+ TiffWritable();
+ virtual ~TiffWritable();
+
+ /**
+ * Write the data to the output. The given offset is used to calculate
+ * the header offset for values written. The offset is defined
+ * relative to the beginning of the TIFF header, and is word aligned.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t writeData(uint32_t offset, /*out*/EndianOutput* out) const = 0;
+
+ /**
+ * Get the size of the data to write.
+ */
+ virtual size_t getSize() const = 0;
+
+};
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+#endif /*IMG_UTILS_TIFF_WRITABLE*/
diff --git a/media/img_utils/include/img_utils/TiffWriter.h b/media/img_utils/include/img_utils/TiffWriter.h
new file mode 100644
index 0000000..ec27fc3
--- /dev/null
+++ b/media/img_utils/include/img_utils/TiffWriter.h
@@ -0,0 +1,267 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef IMG_UTILS_TIFF_WRITER_H
+#define IMG_UTILS_TIFF_WRITER_H
+
+#include <img_utils/EndianUtils.h>
+#include <img_utils/TiffEntryImpl.h>
+#include <img_utils/TagDefinitions.h>
+
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <utils/StrongPointer.h>
+#include <utils/KeyedVector.h>
+#include <utils/Vector.h>
+
+#include <cutils/compiler.h>
+#include <stdint.h>
+
+namespace android {
+namespace img_utils {
+
+class TiffEntry;
+class TiffIfd;
+class Output;
+
+/**
+ * This class holds a collection of TIFF IFDs that can be written as a
+ * complete DNG file header.
+ *
+ * This maps to the TIFF header structure that is logically composed of:
+ * - An 8-byte file header containing an endianness indicator, the TIFF
+ * file marker, and the offset to the first IFD.
+ * - A list of TIFF IFD structures.
+ */
+class ANDROID_API TiffWriter : public LightRefBase<TiffWriter> {
+ public:
+
+ /**
+ * Constructs a TiffWriter with the default tag mappings. This enables
+ * all of the tags defined in TagDefinitions.h, and uses the following
+ * mapping precedence to resolve collisions:
+ * (highest precedence) TIFF/EP > DNG > EXIF 2.3 > TIFF 6.0
+ */
+ TiffWriter();
+
+ /**
+ * Constructs a TiffWriter with the given tag mappings. The mapping
+ * precedence will be in the order that the definition maps are given,
+ * where the lower index map gets precedence.
+ *
+ * This can be used with user-defined definitions, or definitions form
+ * TagDefinitions.h
+ *
+ * The enabledDefinitions mapping object is owned by the caller, and must
+ * stay alive for the lifespan of the constructed TiffWriter object.
+ */
+ TiffWriter(KeyedVector<uint16_t, const TagDefinition_t*>* enabledDefinitions,
+ size_t length);
+
+ virtual ~TiffWriter();
+
+ /**
+ * Write a TIFF header containing each IFD set. This will recursively
+ * write all SubIFDs and tags.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t write(Output* out, Endianness end = LITTLE);
+
+ /**
+ * Get the total size in bytes of the TIFF header. This includes all
+ * IFDs, tags, and values set for this TiffWriter.
+ */
+ virtual uint32_t getTotalSize() const;
+
+ /**
+ * Add the given entry to its default IFD. If that IFD does not
+ * exist, it will be created.
+ */
+ virtual status_t addEntry(const sp<TiffEntry>& entry);
+
+ /**
+ * Build an entry for a known tag. This tag must be one of the tags
+ * defined in one of the definition vectors this TIFF writer was constructed
+ * with. The count and type are validated. If this succeeds, the resulting
+ * entry will be placed in the outEntry pointer.
+ *
+ * Returns OK on success, or a negative error code on failure. Valid
+ * error codes for this method are:
+ * - BAD_INDEX - The given tag doesn't exist.
+ * - BAD_VALUE - The given count doesn't match the required count for
+ * this tag.
+ * - BAD_TYPE - The type of the given data isn't compatible with the
+ * type required for this tag.
+ */
+ template<typename T>
+ status_t buildEntry(uint16_t tag, uint32_t count, const T* data,
+ /*out*/sp<TiffEntry>* outEntry) const;
+
+ /**
+ * Build an entry for a known tag and add it to the IFD with the given ID.
+ * This tag must be defined in one of the definition vectors this TIFF writer
+ * was constructed with. The count and type are validated. If this succeeds,
+ * the resulting entry will be placed in the outEntry pointer.
+ *
+ * Returns OK on success, or a negative error code on failure. Valid
+ * error codes for this method are:
+ * - BAD_INDEX - The given tag doesn't exist.
+ * - BAD_VALUE - The given count doesn't match the required count for
+ * this tag.
+ * - BAD_TYPE - The type of the given data isn't compatible with the
+ * type required for this tag.
+ * - NAME_NOT_FOUND - No ifd exists with the given ID.
+ */
+ template<typename T>
+ status_t addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd);
+
+ /**
+ * Return the TIFF entry with the given tag ID in the IFD with the given ID,
+ * or an empty pointer if none exists.
+ */
+ virtual sp<TiffEntry> getEntry(uint16_t tag, uint32_t ifd) const;
+
+ /**
+ * Add the given IFD to the end of the top-level IFD chain. No
+ * validation is done.
+ *
+ * Returns OK on success, or a negative error code on failure.
+ */
+ virtual status_t uncheckedAddIfd(const sp<TiffIfd>& ifd);
+
+ /**
+ * Create an empty IFD with the given ID and add it to the end of the
+ * list of IFDs.
+ */
+ virtual status_t addIfd(uint32_t ifd);
+
+ /**
+ * Build an entry. No validation is done.
+ *
+ * WARNING: Using this method can result in creating poorly formatted
+ * TIFF files.
+ *
+ * Returns a TiffEntry with the given tag, type, count, endianness,
+ * and data.
+ */
+ template<typename T>
+ static sp<TiffEntry> uncheckedBuildEntry(uint16_t tag, TagType type,
+ uint32_t count, Endianness end, const T* data);
+
+ /**
+ * Utility function to build atag-to-definition mapping from a given
+ * array of tag definitions.
+ */
+ static KeyedVector<uint16_t, const TagDefinition_t*> buildTagMap(
+ const TagDefinition_t* definitions, size_t length);
+
+ /**
+ * Returns the default type for the given tag ID.
+ */
+ virtual TagType getDefaultType(uint16_t tag) const;
+
+ /**
+ * Returns the default count for a given tag ID, or 0 if this
+ * tag normally has a variable count.
+ */
+ virtual uint32_t getDefaultCount(uint16_t tag) const;
+
+ /**
+ * Returns true if a definition exist for the given tag ID.
+ */
+ virtual bool checkIfDefined(uint16_t tag) const;
+
+ /**
+ * Print the currently configured IFDs and entries to logcat.
+ */
+ virtual void log() const;
+
+ protected:
+ enum {
+ DEFAULT_NUM_TAG_MAPS = 4,
+ };
+
+ sp<TiffIfd> findLastIfd();
+ status_t writeFileHeader(EndianOutput& out);
+ const TagDefinition_t* lookupDefinition(uint16_t tag) const;
+ status_t calculateOffsets();
+
+ sp<TiffIfd> mIfd;
+ KeyedVector<uint32_t, sp<TiffIfd> > mNamedIfds;
+ KeyedVector<uint16_t, const TagDefinition_t*>* mTagMaps;
+ size_t mNumTagMaps;
+
+ static KeyedVector<uint16_t, const TagDefinition_t*> sTagMaps[];
+};
+
+template<typename T>
+status_t TiffWriter::buildEntry(uint16_t tag, uint32_t count, const T* data,
+ /*out*/sp<TiffEntry>* outEntry) const {
+ const TagDefinition_t* definition = lookupDefinition(tag);
+
+ if (definition == NULL) {
+ ALOGE("%s: No such tag exists for id %x.", __FUNCTION__, tag);
+ return BAD_INDEX;
+ }
+
+ uint32_t fixedCount = definition->fixedCount;
+ if (fixedCount > 0 && fixedCount != count) {
+ ALOGE("%s: Invalid count %d for tag %x (expects %d).", __FUNCTION__, count, tag,
+ fixedCount);
+ return BAD_VALUE;
+ }
+
+ TagType fixedType = definition->defaultType;
+ if (TiffEntry::forceValidType(fixedType, data) == NULL) {
+ ALOGE("%s: Invalid type used for tag value for tag %x.", __FUNCTION__, tag);
+ return BAD_TYPE;
+ }
+
+ *outEntry = new TiffEntryImpl<T>(tag, fixedType, count,
+ definition->fixedEndian, data);
+
+ return OK;
+}
+
+template<typename T>
+status_t TiffWriter::addEntry(uint16_t tag, uint32_t count, const T* data, uint32_t ifd) {
+ sp<TiffEntry> outEntry;
+ status_t ret = buildEntry<T>(tag, count, data, &outEntry);
+ if (ret != OK) {
+ ALOGE("%s: Could not build entry for tag %x.", __FUNCTION__, tag);
+ return ret;
+ }
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index < 0) {
+ ALOGE("%s: No IFD %d set for this writer.", __FUNCTION__, ifd);
+ return NAME_NOT_FOUND;
+ }
+ return mNamedIfds[index]->addEntry(outEntry);
+}
+
+template<typename T>
+sp<TiffEntry> TiffWriter::uncheckedBuildEntry(uint16_t tag, TagType type, uint32_t count,
+ Endianness end, const T* data) {
+ TiffEntryImpl<T>* entry = new TiffEntryImpl<T>(tag, type, count, end, data);
+ return sp<TiffEntry>(entry);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
+
+#endif /*IMG_UTILS_TIFF_WRITER_H*/
diff --git a/media/img_utils/src/Android.mk b/media/img_utils/src/Android.mk
new file mode 100644
index 0000000..80893be
--- /dev/null
+++ b/media/img_utils/src/Android.mk
@@ -0,0 +1,61 @@
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ EndianUtils.cpp \
+ FileInput.cpp \
+ FileOutput.cpp \
+ SortedEntryVector.cpp \
+ Input.cpp \
+ Output.cpp \
+ Orderable.cpp \
+ TiffIfd.cpp \
+ TiffWritable.cpp \
+ TiffWriter.cpp \
+ TiffEntry.cpp \
+ TiffEntryImpl.cpp \
+ ByteArrayOutput.cpp \
+ DngUtils.cpp \
+
+LOCAL_SHARED_LIBRARIES := \
+ libexpat \
+ libutils \
+ libcutils \
+ libcamera_metadata \
+ libcamera_client
+
+LOCAL_C_INCLUDES += \
+ $(LOCAL_PATH)/../include \
+ system/media/camera/include
+
+LOCAL_CFLAGS += \
+ -Wall \
+ -Wextra \
+ -Werror \
+ -fvisibility=hidden
+
+ifneq ($(filter userdebug eng,$(TARGET_BUILD_VARIANT)),)
+ # Enable assert() in eng builds
+ LOCAL_CFLAGS += -UNDEBUG -DLOG_NDEBUG=1
+endif
+
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/../include
+
+LOCAL_MODULE := libimg_utils
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/media/img_utils/src/ByteArrayOutput.cpp b/media/img_utils/src/ByteArrayOutput.cpp
new file mode 100644
index 0000000..db2d248
--- /dev/null
+++ b/media/img_utils/src/ByteArrayOutput.cpp
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/ByteArrayOutput.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+ByteArrayOutput::ByteArrayOutput() {}
+
+ByteArrayOutput::~ByteArrayOutput() {}
+
+status_t ByteArrayOutput::open() {
+ return OK;
+}
+
+status_t ByteArrayOutput::write(const uint8_t* buf, size_t offset, size_t count) {
+ if (mByteArray.appendArray(buf + offset, count) < 0) {
+ ALOGE("%s: Failed to write to ByteArrayOutput.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t ByteArrayOutput::close() {
+ mByteArray.clear();
+ return OK;
+}
+
+size_t ByteArrayOutput::getSize() const {
+ return mByteArray.size();
+}
+
+const uint8_t* ByteArrayOutput::getArray() const {
+ return mByteArray.array();
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/DngUtils.cpp b/media/img_utils/src/DngUtils.cpp
new file mode 100644
index 0000000..14b31ec
--- /dev/null
+++ b/media/img_utils/src/DngUtils.cpp
@@ -0,0 +1,280 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/DngUtils.h>
+
+namespace android {
+namespace img_utils {
+
+OpcodeListBuilder::OpcodeListBuilder() : mCount(0), mOpList(), mEndianOut(&mOpList, BIG) {
+ if(mEndianOut.open() != OK) {
+ ALOGE("%s: Open failed.", __FUNCTION__);
+ }
+}
+
+OpcodeListBuilder::~OpcodeListBuilder() {
+ if(mEndianOut.close() != OK) {
+ ALOGE("%s: Close failed.", __FUNCTION__);
+ }
+}
+
+size_t OpcodeListBuilder::getSize() const {
+ return mOpList.getSize() + sizeof(mCount);
+}
+
+uint32_t OpcodeListBuilder::getCount() const {
+ return mCount;
+}
+
+status_t OpcodeListBuilder::buildOpList(uint8_t* buf) const {
+ uint32_t count = convertToBigEndian(mCount);
+ memcpy(buf, &count, sizeof(count));
+ memcpy(buf + sizeof(count), mOpList.getArray(), mOpList.getSize());
+ return OK;
+}
+
+status_t OpcodeListBuilder::addGainMapsForMetadata(uint32_t lsmWidth,
+ uint32_t lsmHeight,
+ uint32_t activeAreaTop,
+ uint32_t activeAreaLeft,
+ uint32_t activeAreaBottom,
+ uint32_t activeAreaRight,
+ CfaLayout cfa,
+ const float* lensShadingMap) {
+ uint32_t activeAreaWidth = activeAreaRight - activeAreaLeft;
+ uint32_t activeAreaHeight = activeAreaBottom - activeAreaTop;
+ double spacingV = 1.0 / lsmHeight;
+ double spacingH = 1.0 / lsmWidth;
+
+ float redMap[lsmWidth * lsmHeight];
+ float greenEvenMap[lsmWidth * lsmHeight];
+ float greenOddMap[lsmWidth * lsmHeight];
+ float blueMap[lsmWidth * lsmHeight];
+
+ size_t lsmMapSize = lsmWidth * lsmHeight * 4;
+
+ // Split lens shading map channels into separate arrays
+ size_t j = 0;
+ for (size_t i = 0; i < lsmMapSize; i += 4, ++j) {
+ redMap[j] = lensShadingMap[i + LSM_R_IND];
+ greenEvenMap[j] = lensShadingMap[i + LSM_GE_IND];
+ greenOddMap[j] = lensShadingMap[i + LSM_GO_IND];
+ blueMap[j] = lensShadingMap[i + LSM_B_IND];
+ }
+
+ uint32_t redTop = 0;
+ uint32_t redLeft = 0;
+ uint32_t greenEvenTop = 0;
+ uint32_t greenEvenLeft = 1;
+ uint32_t greenOddTop = 1;
+ uint32_t greenOddLeft = 0;
+ uint32_t blueTop = 1;
+ uint32_t blueLeft = 1;
+
+ switch(cfa) {
+ case CFA_RGGB:
+ redTop = 0;
+ redLeft = 0;
+ greenEvenTop = 0;
+ greenEvenLeft = 1;
+ greenOddTop = 1;
+ greenOddLeft = 0;
+ blueTop = 1;
+ blueLeft = 1;
+ break;
+ case CFA_GRBG:
+ redTop = 0;
+ redLeft = 1;
+ greenEvenTop = 0;
+ greenEvenLeft = 0;
+ greenOddTop = 1;
+ greenOddLeft = 1;
+ blueTop = 1;
+ blueLeft = 0;
+ break;
+ case CFA_GBRG:
+ redTop = 1;
+ redLeft = 0;
+ greenEvenTop = 0;
+ greenEvenLeft = 0;
+ greenOddTop = 1;
+ greenOddLeft = 1;
+ blueTop = 0;
+ blueLeft = 1;
+ break;
+ case CFA_BGGR:
+ redTop = 1;
+ redLeft = 1;
+ greenEvenTop = 0;
+ greenEvenLeft = 1;
+ greenOddTop = 1;
+ greenOddLeft = 0;
+ blueTop = 0;
+ blueLeft = 0;
+ break;
+ default:
+ ALOGE("%s: Unknown CFA layout %d", __FUNCTION__, cfa);
+ return BAD_VALUE;
+ }
+
+ status_t err = addGainMap(/*top*/redTop,
+ /*left*/redLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/redMap);
+ if (err != OK) return err;
+
+ err = addGainMap(/*top*/greenEvenTop,
+ /*left*/greenEvenLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/greenEvenMap);
+ if (err != OK) return err;
+
+ err = addGainMap(/*top*/greenOddTop,
+ /*left*/greenOddLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/greenOddMap);
+ if (err != OK) return err;
+
+ err = addGainMap(/*top*/blueTop,
+ /*left*/blueLeft,
+ /*bottom*/activeAreaHeight - 1,
+ /*right*/activeAreaWidth - 1,
+ /*plane*/0,
+ /*planes*/1,
+ /*rowPitch*/2,
+ /*colPitch*/2,
+ /*mapPointsV*/lsmHeight,
+ /*mapPointsH*/lsmWidth,
+ /*mapSpacingV*/spacingV,
+ /*mapSpacingH*/spacingH,
+ /*mapOriginV*/0,
+ /*mapOriginH*/0,
+ /*mapPlanes*/1,
+ /*mapGains*/blueMap);
+ return err;
+}
+
+status_t OpcodeListBuilder::addGainMap(uint32_t top,
+ uint32_t left,
+ uint32_t bottom,
+ uint32_t right,
+ uint32_t plane,
+ uint32_t planes,
+ uint32_t rowPitch,
+ uint32_t colPitch,
+ uint32_t mapPointsV,
+ uint32_t mapPointsH,
+ double mapSpacingV,
+ double mapSpacingH,
+ double mapOriginV,
+ double mapOriginH,
+ uint32_t mapPlanes,
+ const float* mapGains) {
+
+ uint32_t opcodeId = GAIN_MAP_ID;
+
+ status_t err = mEndianOut.write(&opcodeId, 0, 1);
+ if (err != OK) return err;
+
+ uint8_t version[] = {1, 3, 0, 0};
+ err = mEndianOut.write(version, 0, NELEMS(version));
+ if (err != OK) return err;
+
+ uint32_t flags = FLAG_OPTIONAL | FLAG_OPTIONAL_FOR_PREVIEW;
+ err = mEndianOut.write(&flags, 0, 1);
+ if (err != OK) return err;
+
+ const uint32_t NUMBER_INT_ARGS = 11;
+ const uint32_t NUMBER_DOUBLE_ARGS = 4;
+
+ uint32_t totalSize = NUMBER_INT_ARGS * sizeof(uint32_t) + NUMBER_DOUBLE_ARGS * sizeof(double) +
+ mapPointsV * mapPointsH * mapPlanes * sizeof(float);
+
+ err = mEndianOut.write(&totalSize, 0, 1);
+ if (err != OK) return err;
+
+ // Batch writes as much as possible
+ uint32_t settings1[] = { top,
+ left,
+ bottom,
+ right,
+ plane,
+ planes,
+ rowPitch,
+ colPitch,
+ mapPointsV,
+ mapPointsH };
+
+ err = mEndianOut.write(settings1, 0, NELEMS(settings1));
+ if (err != OK) return err;
+
+ double settings2[] = { mapSpacingV,
+ mapSpacingH,
+ mapOriginV,
+ mapOriginH };
+
+ err = mEndianOut.write(settings2, 0, NELEMS(settings2));
+ if (err != OK) return err;
+
+ err = mEndianOut.write(&mapPlanes, 0, 1);
+ if (err != OK) return err;
+
+ err = mEndianOut.write(mapGains, 0, mapPointsV * mapPointsH * mapPlanes);
+ if (err != OK) return err;
+
+ mCount++;
+
+ return OK;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/EndianUtils.cpp b/media/img_utils/src/EndianUtils.cpp
new file mode 100644
index 0000000..8681cbe
--- /dev/null
+++ b/media/img_utils/src/EndianUtils.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/EndianUtils.h>
+
+namespace android {
+namespace img_utils {
+
+EndianOutput::EndianOutput(Output* out, Endianness end)
+ : mOffset(0), mOutput(out), mEndian(end) {}
+
+EndianOutput::~EndianOutput() {}
+
+status_t EndianOutput::open() {
+ mOffset = 0;
+ return mOutput->open();
+}
+
+status_t EndianOutput::close() {
+ return mOutput->close();
+}
+
+void EndianOutput::setEndianness(Endianness end) {
+ mEndian = end;
+}
+
+uint32_t EndianOutput::getCurrentOffset() const {
+ return mOffset;
+}
+
+Endianness EndianOutput::getEndianness() const {
+ return mEndian;
+}
+
+status_t EndianOutput::write(const uint8_t* buf, size_t offset, size_t count) {
+ status_t res = OK;
+ if((res = mOutput->write(buf, offset, count)) == OK) {
+ mOffset += count;
+ }
+ return res;
+}
+
+status_t EndianOutput::write(const int8_t* buf, size_t offset, size_t count) {
+ return write(reinterpret_cast<const uint8_t*>(buf), offset, count);
+}
+
+#define DEFINE_WRITE(_type_) \
+status_t EndianOutput::write(const _type_* buf, size_t offset, size_t count) { \
+ return writeHelper<_type_>(buf, offset, count); \
+}
+
+DEFINE_WRITE(uint16_t)
+DEFINE_WRITE(int16_t)
+DEFINE_WRITE(uint32_t)
+DEFINE_WRITE(int32_t)
+DEFINE_WRITE(uint64_t)
+DEFINE_WRITE(int64_t)
+
+status_t EndianOutput::write(const float* buf, size_t offset, size_t count) {
+ assert(sizeof(float) == sizeof(uint32_t));
+ return writeHelper<uint32_t>(reinterpret_cast<const uint32_t*>(buf), offset, count);
+}
+
+status_t EndianOutput::write(const double* buf, size_t offset, size_t count) {
+ assert(sizeof(double) == sizeof(uint64_t));
+ return writeHelper<uint64_t>(reinterpret_cast<const uint64_t*>(buf), offset, count);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/FileInput.cpp b/media/img_utils/src/FileInput.cpp
new file mode 100644
index 0000000..e43fd53
--- /dev/null
+++ b/media/img_utils/src/FileInput.cpp
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/FileInput.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+FileInput::FileInput(String8 path) : mFp(NULL), mPath(path), mOpen(false) {}
+
+FileInput::~FileInput() {
+ if (mOpen) {
+ ALOGE("%s: FileInput destroyed without calling close!", __FUNCTION__);
+ close();
+ }
+
+}
+
+status_t FileInput::open() {
+ if (mOpen) {
+ ALOGW("%s: Open called when file %s already open.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+ mFp = ::fopen(mPath, "rb");
+ if (!mFp) {
+ ALOGE("%s: Could not open file %s", __FUNCTION__, mPath.string());
+ return BAD_VALUE;
+ }
+ mOpen = true;
+ return OK;
+}
+
+size_t FileInput::read(uint8_t* buf, size_t offset, size_t count, status_t* err) {
+ if (!mOpen) {
+ ALOGE("%s: Could not read file %s, file not open.", __FUNCTION__, mPath.string());
+ if (err != NULL) *err = BAD_VALUE;
+ return 0;
+ }
+
+ size_t bytesRead = ::fread(buf + offset, sizeof(uint8_t), count, mFp);
+ int error = ::ferror(mFp);
+ if (error != 0) {
+ ALOGE("%s: Error %d occurred while reading file %s.", __FUNCTION__, error, mPath.string());
+ if (err != NULL) *err = BAD_VALUE;
+ }
+ return bytesRead;
+}
+
+status_t FileInput::close() {
+ if(!mOpen) {
+ ALOGW("%s: Close called when file %s already close.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+
+ status_t ret = OK;
+ if(::fclose(mFp) != 0) {
+ ALOGE("%s: Failed to close file %s.", __FUNCTION__, mPath.string());
+ ret = BAD_VALUE;
+ }
+ mOpen = false;
+ return OK;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/FileOutput.cpp b/media/img_utils/src/FileOutput.cpp
new file mode 100644
index 0000000..ce763ff
--- /dev/null
+++ b/media/img_utils/src/FileOutput.cpp
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/FileOutput.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+FileOutput::FileOutput(String8 path) : mFp(NULL), mPath(path), mOpen(false) {}
+
+FileOutput::~FileOutput() {
+ if (mOpen) {
+ ALOGW("%s: Destructor called with %s still open.", __FUNCTION__, mPath.string());
+ close();
+ }
+}
+
+status_t FileOutput::open() {
+ if (mOpen) {
+ ALOGW("%s: Open called when file %s already open.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+ mFp = ::fopen(mPath, "wb");
+ if (!mFp) {
+ ALOGE("%s: Could not open file %s", __FUNCTION__, mPath.string());
+ return BAD_VALUE;
+ }
+ mOpen = true;
+ return OK;
+}
+
+status_t FileOutput::write(const uint8_t* buf, size_t offset, size_t count) {
+ if (!mOpen) {
+ ALOGE("%s: Could not write file %s, file not open.", __FUNCTION__, mPath.string());
+ return BAD_VALUE;
+ }
+
+ ::fwrite(buf + offset, sizeof(uint8_t), count, mFp);
+
+ int error = ::ferror(mFp);
+ if (error != 0) {
+ ALOGE("%s: Error %d occurred while writing file %s.", __FUNCTION__, error, mPath.string());
+ return BAD_VALUE;
+ }
+ return OK;
+}
+
+status_t FileOutput::close() {
+ if(!mOpen) {
+ ALOGW("%s: Close called when file %s already close.", __FUNCTION__, mPath.string());
+ return OK;
+ }
+
+ status_t ret = OK;
+ if(::fclose(mFp) != 0) {
+ ALOGE("%s: Failed to close file %s.", __FUNCTION__, mPath.string());
+ ret = BAD_VALUE;
+ }
+ mOpen = false;
+ return OK;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/Input.cpp b/media/img_utils/src/Input.cpp
new file mode 100644
index 0000000..1e51e10
--- /dev/null
+++ b/media/img_utils/src/Input.cpp
@@ -0,0 +1,29 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/Input.h>
+
+namespace android {
+namespace img_utils {
+
+Input::~Input() {}
+status_t Input::open() { return OK; }
+status_t Input::close() { return OK; }
+
+
+} /*namespace img_utils*/
+} /*namespace android*/
+
diff --git a/media/libstagefright/include/chromium_http_stub.h b/media/img_utils/src/Orderable.cpp
index e0651a4..300f122 100644
--- a/media/libstagefright/include/chromium_http_stub.h
+++ b/media/img_utils/src/Orderable.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2012 The Android Open Source Project
+ * Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
@@ -14,21 +14,26 @@
* limitations under the License.
*/
-#ifndef CHROMIUM_HTTP_STUB_H_
-#define CHROMIUM_HTTP_STUB_H_
+#include <img_utils/Orderable.h>
-#include <include/HTTPBase.h>
-#include <media/stagefright/DataSource.h>
+#include <utils/Log.h>
namespace android {
-extern "C" {
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags);
+namespace img_utils {
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
-DataSource *createDataUriSource(const char *uri);
-}
+#define COMPARE(op) \
+bool Orderable::operator op (const Orderable& orderable) const { \
+ return getComparableValue() op orderable.getComparableValue(); \
}
-#endif
+COMPARE(>)
+COMPARE(<)
+COMPARE(>=)
+COMPARE(<=)
+COMPARE(==)
+COMPARE(!=)
+
+Orderable::~Orderable() {}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/Output.cpp b/media/img_utils/src/Output.cpp
new file mode 100644
index 0000000..0e395b9
--- /dev/null
+++ b/media/img_utils/src/Output.cpp
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <img_utils/Output.h>
+
+namespace android {
+namespace img_utils {
+
+Output::~Output() {}
+status_t Output::open() { return OK; }
+status_t Output::close() { return OK; }
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/SortedEntryVector.cpp b/media/img_utils/src/SortedEntryVector.cpp
new file mode 100644
index 0000000..f0e1fa1
--- /dev/null
+++ b/media/img_utils/src/SortedEntryVector.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/SortedEntryVector.h>
+
+#include <utils/TypeHelpers.h>
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+SortedEntryVector::~SortedEntryVector() {}
+
+ssize_t SortedEntryVector::indexOfTag(uint16_t tag) const {
+ // TODO: Use binary search here.
+ for (size_t i = 0; i < size(); ++i) {
+ if (itemAt(i)->getTag() == tag) {
+ return i;
+ }
+ }
+ return -1;
+}
+
+int SortedEntryVector::do_compare(const void* lhs, const void* rhs) const {
+ const sp<TiffEntry>* lEntry = reinterpret_cast<const sp<TiffEntry>*>(lhs);
+ const sp<TiffEntry>* rEntry = reinterpret_cast<const sp<TiffEntry>*>(rhs);
+ return compare_type(**lEntry, **rEntry);
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffEntry.cpp b/media/img_utils/src/TiffEntry.cpp
new file mode 100644
index 0000000..e028827
--- /dev/null
+++ b/media/img_utils/src/TiffEntry.cpp
@@ -0,0 +1,238 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/TiffIfd.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/TiffEntry.h>
+
+#include <utils/Errors.h>
+#include <utils/StrongPointer.h>
+#include <utils/Vector.h>
+
+namespace android {
+namespace img_utils {
+
+TiffEntry::~TiffEntry() {}
+
+/**
+ * Specialize for each valid type, including sub-IFDs.
+ *
+ * Values with types other than the ones given here should not compile.
+ */
+template<>
+const Vector<sp<TiffIfd> >* TiffEntry::forceValidType<Vector<sp<TiffIfd> > >(TagType type,
+ const Vector<sp<TiffIfd> >* value) {
+ if (type == LONG) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'ifd vector' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const sp<TiffIfd>* TiffEntry::forceValidType<sp<TiffIfd> >(TagType type, const sp<TiffIfd>* value) {
+ if (type == LONG) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'ifd' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const uint8_t* TiffEntry::forceValidType<uint8_t>(TagType type, const uint8_t* value) {
+ if (type == BYTE || type == ASCII || type == UNDEFINED) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'uint8_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const int8_t* TiffEntry::forceValidType<int8_t>(TagType type, const int8_t* value) {
+ if (type == SBYTE || type == ASCII || type == UNDEFINED) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'int8_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const uint16_t* TiffEntry::forceValidType<uint16_t>(TagType type, const uint16_t* value) {
+ if (type == SHORT) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'uint16_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const int16_t* TiffEntry::forceValidType<int16_t>(TagType type, const int16_t* value) {
+ if (type == SSHORT) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'int16_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const uint32_t* TiffEntry::forceValidType<uint32_t>(TagType type, const uint32_t* value) {
+ if (type == LONG || type == RATIONAL) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'uint32_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const int32_t* TiffEntry::forceValidType<int32_t>(TagType type, const int32_t* value) {
+ if (type == SLONG || type == SRATIONAL) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'int32_t' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const double* TiffEntry::forceValidType<double>(TagType type, const double* value) {
+ if (type == DOUBLE) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'double' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+template<>
+const float* TiffEntry::forceValidType<float>(TagType type, const float* value) {
+ if (type == FLOAT) {
+ return value;
+ }
+ ALOGE("%s: Value of type 'float' is not valid for tag with TIFF type %d.",
+ __FUNCTION__, type);
+ return NULL;
+}
+
+String8 TiffEntry::toString() const {
+ String8 output;
+ uint32_t count = getCount();
+ output.appendFormat("[id: %x, type: %d, count: %u, value: '", getTag(), getType(), count);
+
+ size_t cappedCount = count;
+ if (count > MAX_PRINT_STRING_LENGTH) {
+ cappedCount = MAX_PRINT_STRING_LENGTH;
+ }
+
+ TagType type = getType();
+ switch (type) {
+ case UNDEFINED:
+ case BYTE: {
+ const uint8_t* typed_data = getData<uint8_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%u ", typed_data[i]);
+ }
+ break;
+ }
+ case ASCII: {
+ const char* typed_data = reinterpret_cast<const char*>(getData<uint8_t>());
+ size_t len = count;
+ if (count > MAX_PRINT_STRING_LENGTH) {
+ len = MAX_PRINT_STRING_LENGTH;
+ }
+ output.append(typed_data, len);
+ break;
+ }
+ case SHORT: {
+ const uint16_t* typed_data = getData<uint16_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%u ", typed_data[i]);
+ }
+ break;
+ }
+ case LONG: {
+ const uint32_t* typed_data = getData<uint32_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%u ", typed_data[i]);
+ }
+ break;
+ }
+ case RATIONAL: {
+ const uint32_t* typed_data = getData<uint32_t>();
+ cappedCount <<= 1;
+ for (size_t i = 0; i < cappedCount; i+=2) {
+ output.appendFormat("%u/%u ", typed_data[i], typed_data[i + 1]);
+ }
+ break;
+ }
+ case SBYTE: {
+ const int8_t* typed_data = getData<int8_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%d ", typed_data[i]);
+ }
+ break;
+ }
+ case SSHORT: {
+ const int16_t* typed_data = getData<int16_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%d ", typed_data[i]);
+ }
+ break;
+ }
+ case SLONG: {
+ const int32_t* typed_data = getData<int32_t>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%d ", typed_data[i]);
+ }
+ break;
+ }
+ case SRATIONAL: {
+ const int32_t* typed_data = getData<int32_t>();
+ cappedCount <<= 1;
+ for (size_t i = 0; i < cappedCount; i+=2) {
+ output.appendFormat("%d/%d ", typed_data[i], typed_data[i + 1]);
+ }
+ break;
+ }
+ case FLOAT:
+ case DOUBLE: {
+ const float* typed_data = getData<float>();
+ for (size_t i = 0; i < cappedCount; ++i) {
+ output.appendFormat("%f ", typed_data[i]);
+ }
+ break;
+ }
+ default: {
+ output.append("unknown type ");
+ break;
+ }
+ }
+
+ if (count > MAX_PRINT_STRING_LENGTH) {
+ output.append("...");
+ }
+ output.append("']");
+ return output;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffEntryImpl.cpp b/media/img_utils/src/TiffEntryImpl.cpp
new file mode 100644
index 0000000..6efa458
--- /dev/null
+++ b/media/img_utils/src/TiffEntryImpl.cpp
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/TiffEntryImpl.h>
+#include <img_utils/TiffIfd.h>
+
+#include <utils/Vector.h>
+
+namespace android {
+namespace img_utils {
+
+template<>
+size_t TiffEntryImpl<TiffIfd>::getSize() const {
+ uint32_t total = 0;
+ for (uint32_t i = 0; i < mCount; ++i) {
+ total += mData[i].getSize();
+ }
+ return total;
+}
+
+template<>
+status_t TiffEntryImpl<TiffIfd>::writeData(uint32_t offset, EndianOutput* out) const {
+ status_t ret = OK;
+ for (uint32_t i = 0; i < mCount; ++i) {
+ BAIL_ON_FAIL(mData[i].writeData(offset, out), ret);
+ }
+ return ret;
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffIfd.cpp b/media/img_utils/src/TiffIfd.cpp
new file mode 100644
index 0000000..1b3b40d
--- /dev/null
+++ b/media/img_utils/src/TiffIfd.cpp
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/TiffIfd.h>
+#include <img_utils/TiffHelpers.h>
+
+#include <utils/Log.h>
+
+namespace android {
+namespace img_utils {
+
+TiffIfd::TiffIfd(uint32_t ifdId)
+ : mNextIfd(), mIfdId(ifdId) {}
+
+TiffIfd::~TiffIfd() {}
+
+status_t TiffIfd::addEntry(const sp<TiffEntry>& entry) {
+ size_t size = mEntries.size();
+ if (size >= MAX_IFD_ENTRIES) {
+ ALOGW("%s: Failed to add entry for tag 0x%x to IFD %u, too many entries in IFD!",
+ __FUNCTION__, entry->getTag(), mIfdId);
+ return BAD_INDEX;
+ }
+
+ if (mEntries.add(entry) < 0) {
+ ALOGW("%s: Failed to add entry for tag 0x%x to ifd %u.", __FUNCTION__, entry->getTag(),
+ mIfdId);
+ return BAD_INDEX;
+ }
+ return OK;
+}
+
+sp<TiffEntry> TiffIfd::getEntry(uint16_t tag) const {
+ ssize_t index = mEntries.indexOfTag(tag);
+ if (index < 0) {
+ ALOGW("%s: No entry for tag 0x%x in ifd %u.", __FUNCTION__, tag, mIfdId);
+ return NULL;
+ }
+ return mEntries[index];
+}
+
+void TiffIfd::setNextIfd(const sp<TiffIfd>& ifd) {
+ mNextIfd = ifd;
+}
+
+sp<TiffIfd> TiffIfd::getNextIfd() const {
+ return mNextIfd;
+}
+
+uint32_t TiffIfd::checkAndGetOffset(uint32_t offset) const {
+ size_t size = mEntries.size();
+
+ if (size > MAX_IFD_ENTRIES) {
+ ALOGW("%s: Could not calculate IFD offsets, IFD %u contains too many entries.",
+ __FUNCTION__, mIfdId);
+ return BAD_OFFSET;
+ }
+
+ if (size <= 0) {
+ ALOGW("%s: Could not calculate IFD offsets, IFD %u contains no entries.", __FUNCTION__,
+ mIfdId);
+ return BAD_OFFSET;
+ }
+
+ if (offset == BAD_OFFSET) {
+ ALOGW("%s: Could not calculate IFD offsets, IFD %u had a bad initial offset.",
+ __FUNCTION__, mIfdId);
+ return BAD_OFFSET;
+ }
+
+ uint32_t ifdSize = calculateIfdSize(size);
+ WORD_ALIGN(ifdSize);
+ return offset + ifdSize;
+}
+
+status_t TiffIfd::writeData(uint32_t offset, /*out*/EndianOutput* out) const {
+ assert((offset % TIFF_WORD_SIZE) == 0);
+ status_t ret = OK;
+
+ ALOGV("%s: IFD %u written to offset %u", __FUNCTION__, mIfdId, offset );
+ uint32_t valueOffset = checkAndGetOffset(offset);
+ if (valueOffset == 0) {
+ return BAD_VALUE;
+ }
+
+ size_t size = mEntries.size();
+
+ // Writer IFD header (2 bytes, number of entries).
+ uint16_t header = static_cast<uint16_t>(size);
+ BAIL_ON_FAIL(out->write(&header, 0, 1), ret);
+
+ // Write tag entries
+ for (size_t i = 0; i < size; ++i) {
+ BAIL_ON_FAIL(mEntries[i]->writeTagInfo(valueOffset, out), ret);
+ valueOffset += mEntries[i]->getSize();
+ }
+
+ // Writer IFD footer (4 bytes, offset to next IFD).
+ uint32_t footer = (mNextIfd != NULL) ? offset + getSize() : 0;
+ BAIL_ON_FAIL(out->write(&footer, 0, 1), ret);
+
+ assert(out->getCurrentOffset() == offset + calculateIfdSize(size));
+
+ // Write zeroes till word aligned
+ ZERO_TILL_WORD(out, calculateIfdSize(size), ret);
+
+ // Write values for each tag entry
+ for (size_t i = 0; i < size; ++i) {
+ size_t last = out->getCurrentOffset();
+ // Only write values that are too large to fit in the 12-byte TIFF entry
+ if (mEntries[i]->getSize() > OFFSET_SIZE) {
+ BAIL_ON_FAIL(mEntries[i]->writeData(out->getCurrentOffset(), out), ret);
+ }
+ size_t next = out->getCurrentOffset();
+ size_t diff = (next - last);
+ size_t actual = mEntries[i]->getSize();
+ if (diff != actual) {
+ ALOGW("Sizes do not match for tag %x. Expected %zu, received %zu",
+ mEntries[i]->getTag(), actual, diff);
+ }
+ }
+
+ assert(out->getCurrentOffset() == offset + getSize());
+
+ return ret;
+}
+
+size_t TiffIfd::getSize() const {
+ size_t size = mEntries.size();
+ uint32_t total = calculateIfdSize(size);
+ WORD_ALIGN(total);
+ for (size_t i = 0; i < size; ++i) {
+ total += mEntries[i]->getSize();
+ }
+ return total;
+}
+
+uint32_t TiffIfd::getId() const {
+ return mIfdId;
+}
+
+uint32_t TiffIfd::getComparableValue() const {
+ return mIfdId;
+}
+
+String8 TiffIfd::toString() const {
+ size_t s = mEntries.size();
+ String8 output;
+ output.appendFormat("[ifd: %x, num_entries: %zu, entries:\n", getId(), s);
+ for(size_t i = 0; i < mEntries.size(); ++i) {
+ output.append("\t");
+ output.append(mEntries[i]->toString());
+ output.append("\n");
+ }
+ output.append(", next_ifd: %x]", ((mNextIfd != NULL) ? mNextIfd->getId() : 0));
+ return output;
+}
+
+void TiffIfd::log() const {
+ size_t s = mEntries.size();
+ ALOGI("[ifd: %x, num_entries: %zu, entries:\n", getId(), s);
+ for(size_t i = 0; i < s; ++i) {
+ ALOGI("\t%s", mEntries[i]->toString().string());
+ }
+ ALOGI(", next_ifd: %x]", ((mNextIfd != NULL) ? mNextIfd->getId() : 0));
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffWritable.cpp b/media/img_utils/src/TiffWritable.cpp
new file mode 100644
index 0000000..f8d7de7
--- /dev/null
+++ b/media/img_utils/src/TiffWritable.cpp
@@ -0,0 +1,31 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+#include <img_utils/TiffWritable.h>
+#include <img_utils/TiffHelpers.h>
+
+#include <assert.h>
+
+namespace android {
+namespace img_utils {
+
+TiffWritable::TiffWritable() {}
+
+TiffWritable::~TiffWritable() {}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/img_utils/src/TiffWriter.cpp b/media/img_utils/src/TiffWriter.cpp
new file mode 100644
index 0000000..2439033
--- /dev/null
+++ b/media/img_utils/src/TiffWriter.cpp
@@ -0,0 +1,232 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <img_utils/TiffIfd.h>
+#include <img_utils/TiffHelpers.h>
+#include <img_utils/TiffWriter.h>
+#include <img_utils/TagDefinitions.h>
+
+#include <assert.h>
+
+namespace android {
+namespace img_utils {
+
+KeyedVector<uint16_t, const TagDefinition_t*> TiffWriter::buildTagMap(
+ const TagDefinition_t* definitions, size_t length) {
+ KeyedVector<uint16_t, const TagDefinition_t*> map;
+ for(size_t i = 0; i < length; ++i) {
+ map.add(definitions[i].tagId, definitions + i);
+ }
+ return map;
+}
+
+#define COMPARE(op) \
+bool Orderable::operator op (const Orderable& orderable) const { \
+ return getComparableValue() op orderable.getComparableValue(); \
+}
+
+#define ARRAY_SIZE(array) \
+ (sizeof(array) / sizeof(array[0]))
+
+KeyedVector<uint16_t, const TagDefinition_t*> TiffWriter::sTagMaps[] = {
+ buildTagMap(TIFF_EP_TAG_DEFINITIONS, ARRAY_SIZE(TIFF_EP_TAG_DEFINITIONS)),
+ buildTagMap(DNG_TAG_DEFINITIONS, ARRAY_SIZE(DNG_TAG_DEFINITIONS)),
+ buildTagMap(EXIF_2_3_TAG_DEFINITIONS, ARRAY_SIZE(EXIF_2_3_TAG_DEFINITIONS)),
+ buildTagMap(TIFF_6_TAG_DEFINITIONS, ARRAY_SIZE(TIFF_6_TAG_DEFINITIONS))
+};
+
+TiffWriter::TiffWriter() : mTagMaps(sTagMaps), mNumTagMaps(DEFAULT_NUM_TAG_MAPS) {}
+
+TiffWriter::TiffWriter(KeyedVector<uint16_t, const TagDefinition_t*>* enabledDefinitions,
+ size_t length) : mTagMaps(enabledDefinitions), mNumTagMaps(length) {}
+
+TiffWriter::~TiffWriter() {}
+
+status_t TiffWriter::write(Output* out, Endianness end) {
+ status_t ret = OK;
+ EndianOutput endOut(out, end);
+
+ if (mIfd == NULL) {
+ ALOGE("%s: Tiff header is empty.", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ BAIL_ON_FAIL(writeFileHeader(endOut), ret);
+
+ uint32_t offset = FILE_HEADER_SIZE;
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ BAIL_ON_FAIL(ifd->writeData(offset, &endOut), ret);
+ offset += ifd->getSize();
+ ifd = ifd->getNextIfd();
+ }
+ return ret;
+}
+
+
+const TagDefinition_t* TiffWriter::lookupDefinition(uint16_t tag) const {
+ const TagDefinition_t* definition = NULL;
+ for (size_t i = 0; i < mNumTagMaps; ++i) {
+ ssize_t index = mTagMaps[i].indexOfKey(tag);
+ if (index >= 0) {
+ definition = mTagMaps[i][index];
+ break;
+ }
+ }
+
+ if (definition == NULL) {
+ ALOGE("%s: No definition exists for tag with id %x.", __FUNCTION__, tag);
+ }
+ return definition;
+}
+
+sp<TiffEntry> TiffWriter::getEntry(uint16_t tag, uint32_t ifd) const {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index < 0) {
+ ALOGE("%s: No IFD %d set for this writer.", __FUNCTION__, ifd);
+ return NULL;
+ }
+ return mNamedIfds[index]->getEntry(tag);
+}
+
+
+// TODO: Fix this to handle IFD position in chain/sub-IFD tree
+status_t TiffWriter::addEntry(const sp<TiffEntry>& entry) {
+ uint16_t tag = entry->getTag();
+
+ const TagDefinition_t* definition = lookupDefinition(tag);
+
+ if (definition == NULL) {
+ return BAD_INDEX;
+ }
+ uint32_t ifdId = 0; // TODO: all in IFD0 for now.
+
+ ssize_t index = mNamedIfds.indexOfKey(ifdId);
+
+ // Add a new IFD if necessary
+ if (index < 0) {
+ sp<TiffIfd> ifdEntry = new TiffIfd(ifdId);
+ if (mIfd == NULL) {
+ mIfd = ifdEntry;
+ }
+ index = mNamedIfds.add(ifdId, ifdEntry);
+ assert(index >= 0);
+ }
+
+ sp<TiffIfd> selectedIfd = mNamedIfds[index];
+ return selectedIfd->addEntry(entry);
+}
+
+status_t TiffWriter::uncheckedAddIfd(const sp<TiffIfd>& ifd) {
+ mNamedIfds.add(ifd->getId(), ifd);
+ sp<TiffIfd> last = findLastIfd();
+ if (last == NULL) {
+ mIfd = ifd;
+ } else {
+ last->setNextIfd(ifd);
+ }
+ last = ifd->getNextIfd();
+ while (last != NULL) {
+ mNamedIfds.add(last->getId(), last);
+ last = last->getNextIfd();
+ }
+ return OK;
+}
+
+status_t TiffWriter::addIfd(uint32_t ifd) {
+ ssize_t index = mNamedIfds.indexOfKey(ifd);
+ if (index >= 0) {
+ ALOGE("%s: Ifd with ID 0x%x already exists.", __FUNCTION__, ifd);
+ return BAD_VALUE;
+ }
+ sp<TiffIfd> newIfd = new TiffIfd(ifd);
+ if (mIfd == NULL) {
+ mIfd = newIfd;
+ } else {
+ sp<TiffIfd> last = findLastIfd();
+ last->setNextIfd(newIfd);
+ }
+ mNamedIfds.add(ifd, newIfd);
+ return OK;
+}
+
+TagType TiffWriter::getDefaultType(uint16_t tag) const {
+ const TagDefinition_t* definition = lookupDefinition(tag);
+ if (definition == NULL) {
+ ALOGE("%s: Could not find definition for tag %x", __FUNCTION__, tag);
+ return UNKNOWN_TAGTYPE;
+ }
+ return definition->defaultType;
+}
+
+uint32_t TiffWriter::getDefaultCount(uint16_t tag) const {
+ const TagDefinition_t* definition = lookupDefinition(tag);
+ if (definition == NULL) {
+ ALOGE("%s: Could not find definition for tag %x", __FUNCTION__, tag);
+ return 0;
+ }
+ return definition->fixedCount;
+}
+
+bool TiffWriter::checkIfDefined(uint16_t tag) const {
+ return lookupDefinition(tag) != NULL;
+}
+
+sp<TiffIfd> TiffWriter::findLastIfd() {
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ sp<TiffIfd> nextIfd = ifd->getNextIfd();
+ if (nextIfd == NULL) {
+ break;
+ }
+ ifd = nextIfd;
+ }
+ return ifd;
+}
+
+status_t TiffWriter::writeFileHeader(EndianOutput& out) {
+ status_t ret = OK;
+ uint16_t endMarker = (out.getEndianness() == BIG) ? BIG_ENDIAN_MARKER : LITTLE_ENDIAN_MARKER;
+ BAIL_ON_FAIL(out.write(&endMarker, 0, 1), ret);
+
+ uint16_t tiffMarker = TIFF_FILE_MARKER;
+ BAIL_ON_FAIL(out.write(&tiffMarker, 0, 1), ret);
+
+ uint32_t offsetMarker = FILE_HEADER_SIZE;
+ BAIL_ON_FAIL(out.write(&offsetMarker, 0, 1), ret);
+ return ret;
+}
+
+uint32_t TiffWriter::getTotalSize() const {
+ uint32_t totalSize = FILE_HEADER_SIZE;
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ totalSize += ifd->getSize();
+ ifd = ifd->getNextIfd();
+ }
+ return totalSize;
+}
+
+void TiffWriter::log() const {
+ ALOGI("%s: TiffWriter:", __FUNCTION__);
+ sp<TiffIfd> ifd = mIfd;
+ while(ifd != NULL) {
+ ifd->log();
+ ifd = ifd->getNextIfd();
+ }
+}
+
+} /*namespace img_utils*/
+} /*namespace android*/
diff --git a/media/libeffects/downmix/Android.mk b/media/libeffects/downmix/Android.mk
index 2bb6dbe..e0ca8af 100644
--- a/media/libeffects/downmix/Android.mk
+++ b/media/libeffects/downmix/Android.mk
@@ -15,16 +15,10 @@ LOCAL_MODULE_TAGS := optional
LOCAL_MODULE_RELATIVE_PATH := soundfx
-ifeq ($(TARGET_OS)-$(TARGET_SIMULATOR),linux-true)
-LOCAL_LDLIBS += -ldl
-endif
-
LOCAL_C_INCLUDES := \
$(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
-LOCAL_PRELINK_MODULE := false
-
LOCAL_CFLAGS += -fvisibility=hidden
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/downmix/EffectDownmix.c b/media/libeffects/downmix/EffectDownmix.c
index 1663d47..6686f27 100644
--- a/media/libeffects/downmix/EffectDownmix.c
+++ b/media/libeffects/downmix/EffectDownmix.c
@@ -30,25 +30,13 @@
#define MINUS_3_DB_IN_Q19_12 2896 // -3dB = 0.707 * 2^12 = 2896
+// subset of possible audio_channel_mask_t values, and AUDIO_CHANNEL_OUT_* renamed to CHANNEL_MASK_*
typedef enum {
- CHANNEL_MASK_SURROUND = AUDIO_CHANNEL_OUT_SURROUND,
- CHANNEL_MASK_QUAD_BACK = AUDIO_CHANNEL_OUT_QUAD,
- // like AUDIO_CHANNEL_OUT_QUAD with *_SIDE_* instead of *_BACK_*, same channel order
- CHANNEL_MASK_QUAD_SIDE =
- AUDIO_CHANNEL_OUT_FRONT_LEFT |
- AUDIO_CHANNEL_OUT_FRONT_RIGHT |
- AUDIO_CHANNEL_OUT_SIDE_LEFT |
- AUDIO_CHANNEL_OUT_SIDE_RIGHT,
- CHANNEL_MASK_5POINT1_BACK = AUDIO_CHANNEL_OUT_5POINT1,
- // like AUDIO_CHANNEL_OUT_5POINT1 with *_SIDE_* instead of *_BACK_*, same channel order
- CHANNEL_MASK_5POINT1_SIDE =
- AUDIO_CHANNEL_OUT_FRONT_LEFT |
- AUDIO_CHANNEL_OUT_FRONT_RIGHT |
- AUDIO_CHANNEL_OUT_FRONT_CENTER |
- AUDIO_CHANNEL_OUT_LOW_FREQUENCY |
- AUDIO_CHANNEL_OUT_SIDE_LEFT |
- AUDIO_CHANNEL_OUT_SIDE_RIGHT,
- CHANNEL_MASK_7POINT1_SIDE_BACK = AUDIO_CHANNEL_OUT_7POINT1,
+ CHANNEL_MASK_QUAD_BACK = AUDIO_CHANNEL_OUT_QUAD_BACK,
+ CHANNEL_MASK_QUAD_SIDE = AUDIO_CHANNEL_OUT_QUAD_SIDE,
+ CHANNEL_MASK_5POINT1_BACK = AUDIO_CHANNEL_OUT_5POINT1_BACK,
+ CHANNEL_MASK_5POINT1_SIDE = AUDIO_CHANNEL_OUT_5POINT1_SIDE,
+ CHANNEL_MASK_7POINT1 = AUDIO_CHANNEL_OUT_7POINT1,
} downmix_input_channel_mask_t;
// effect_handle_t interface implementation for downmix effect
@@ -130,7 +118,7 @@ void Downmix_testIndexComputation(uint32_t mask) {
hasBacks = true;
}
- const int numChan = popcount(mask);
+ const int numChan = audio_channel_count_from_out_mask(mask);
const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
const bool hasLFE =
((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
@@ -340,14 +328,11 @@ static int Downmix_Process(effect_handle_t self,
case CHANNEL_MASK_QUAD_SIDE:
Downmix_foldFromQuad(pSrc, pDst, numFrames, accumulate);
break;
- case CHANNEL_MASK_SURROUND:
- Downmix_foldFromSurround(pSrc, pDst, numFrames, accumulate);
- break;
case CHANNEL_MASK_5POINT1_BACK:
case CHANNEL_MASK_5POINT1_SIDE:
Downmix_foldFrom5Point1(pSrc, pDst, numFrames, accumulate);
break;
- case CHANNEL_MASK_7POINT1_SIDE_BACK:
+ case CHANNEL_MASK_7POINT1:
Downmix_foldFrom7Point1(pSrc, pDst, numFrames, accumulate);
break;
default:
@@ -644,7 +629,8 @@ int Downmix_Configure(downmix_module_t *pDwmModule, effect_config_t *pConfig, bo
ALOGE("Downmix_Configure error: input channel mask can't be 0");
return -EINVAL;
}
- pDownmixer->input_channel_count = popcount(pConfig->inputCfg.channels);
+ pDownmixer->input_channel_count =
+ audio_channel_count_from_out_mask(pConfig->inputCfg.channels);
}
Downmix_Reset(pDownmixer, init);
@@ -828,65 +814,6 @@ void Downmix_foldFromQuad(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool ac
/*----------------------------------------------------------------------------
- * Downmix_foldFromSurround()
- *----------------------------------------------------------------------------
- * Purpose:
- * downmix a "surround sound" (mono rear) signal to stereo
- *
- * Inputs:
- * pSrc surround signal to downmix
- * numFrames the number of surround frames to downmix
- * accumulate whether to mix (when true) the result of the downmix with the contents of pDst,
- * or overwrite pDst (when false)
- *
- * Outputs:
- * pDst downmixed stereo audio samples
- *
- *----------------------------------------------------------------------------
- */
-void Downmix_foldFromSurround(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate) {
- int32_t lt, rt, centerPlusRearContrib; // samples in Q19.12 format
- // sample at index 0 is FL
- // sample at index 1 is FR
- // sample at index 2 is FC
- // sample at index 3 is RC
- // code is mostly duplicated between the two values of accumulate to avoid repeating the test
- // for every sample
- if (accumulate) {
- while (numFrames) {
- // centerPlusRearContrib = FC(-3dB) + RC(-3dB)
- centerPlusRearContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12) + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
- // FL + centerPlusRearContrib
- lt = (pSrc[0] << 12) + centerPlusRearContrib;
- // FR + centerPlusRearContrib
- rt = (pSrc[1] << 12) + centerPlusRearContrib;
- // accumulate in destination
- pDst[0] = clamp16(pDst[0] + (lt >> 13));
- pDst[1] = clamp16(pDst[1] + (rt >> 13));
- pSrc += 4;
- pDst += 2;
- numFrames--;
- }
- } else { // same code as above but without adding and clamping pDst[i] to itself
- while (numFrames) {
- // centerPlusRearContrib = FC(-3dB) + RC(-3dB)
- centerPlusRearContrib = (pSrc[2] * MINUS_3_DB_IN_Q19_12) + (pSrc[3] * MINUS_3_DB_IN_Q19_12);
- // FL + centerPlusRearContrib
- lt = (pSrc[0] << 12) + centerPlusRearContrib;
- // FR + centerPlusRearContrib
- rt = (pSrc[1] << 12) + centerPlusRearContrib;
- // store in destination
- pDst[0] = clamp16(lt >> 13); // differs from when accumulate is true above
- pDst[1] = clamp16(rt >> 13); // differs from when accumulate is true above
- pSrc += 4;
- pDst += 2;
- numFrames--;
- }
- }
-}
-
-
-/*----------------------------------------------------------------------------
* Downmix_foldFrom5Point1()
*----------------------------------------------------------------------------
* Purpose:
@@ -1071,7 +998,7 @@ bool Downmix_foldGeneric(
hasBacks = true;
}
- const int numChan = popcount(mask);
+ const int numChan = audio_channel_count_from_out_mask(mask);
const bool hasFC = ((mask & AUDIO_CHANNEL_OUT_FRONT_CENTER) == AUDIO_CHANNEL_OUT_FRONT_CENTER);
const bool hasLFE =
((mask & AUDIO_CHANNEL_OUT_LOW_FREQUENCY) == AUDIO_CHANNEL_OUT_LOW_FREQUENCY);
diff --git a/media/libeffects/downmix/EffectDownmix.h b/media/libeffects/downmix/EffectDownmix.h
index fcb3c9e..2399abd 100644
--- a/media/libeffects/downmix/EffectDownmix.h
+++ b/media/libeffects/downmix/EffectDownmix.h
@@ -97,7 +97,6 @@ int Downmix_setParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t s
int Downmix_getParameter(downmix_object_t *pDownmixer, int32_t param, uint32_t *pSize, void *pValue);
void Downmix_foldFromQuad(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
-void Downmix_foldFromSurround(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
void Downmix_foldFrom5Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
void Downmix_foldFrom7Point1(int16_t *pSrc, int16_t*pDst, size_t numFrames, bool accumulate);
bool Downmix_foldGeneric(
diff --git a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
index db5c78f..695767d 100644
--- a/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
+++ b/media/libeffects/lvm/wrapper/Bundle/EffectBundle.cpp
@@ -19,11 +19,13 @@
#define ARRAY_SIZE(array) (sizeof array / sizeof array[0])
//#define LOG_NDEBUG 0
-#include <cutils/log.h>
#include <assert.h>
+#include <inttypes.h>
+#include <new>
#include <stdlib.h>
#include <string.h>
-#include <new>
+
+#include <cutils/log.h>
#include "EffectBundle.h"
@@ -560,11 +562,12 @@ int LvmBundle_init(EffectContext *pContext){
MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %ld bytes "
- "for region %u\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u\n", MemTab.Region[i].Size, i );
bMallocFailure = LVM_TRUE;
}else{
- ALOGV("\tLvmBundle_init CreateInstance allocated %ld bytes for region %u at %p\n",
+ ALOGV("\tLvmBundle_init CreateInstance allocated %" PRIu32
+ " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
@@ -576,11 +579,11 @@ int LvmBundle_init(EffectContext *pContext){
if(bMallocFailure == LVM_TRUE){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %ld bytes "
- "for region %u Not freeing\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u Not freeing\n", MemTab.Region[i].Size, i );
}else{
- ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated %ld bytes "
- "for region %u at %p- free\n",
+ ALOGV("\tLVM_ERROR :LvmBundle_init CreateInstance Failed: but allocated %" PRIu32
+ " bytes for region %u at %p- free\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
}
@@ -889,16 +892,16 @@ void LvmEffect_free(EffectContext *pContext){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].Size != 0){
if (MemTab.Region[i].pBaseAddress != NULL){
- ALOGV("\tLvmEffect_free - START freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tLvmEffect_free - START freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
- ALOGV("\tLvmEffect_free - END freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tLvmEffect_free - END freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}else{
- ALOGV("\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer %ld bytes "
- "for region %u at %p ERROR\n",
+ ALOGV("\tLVM_ERROR : LvmEffect_free - trying to free with NULL pointer %" PRIu32
+ " bytes for region %u at %p ERROR\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
diff --git a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
index c6d3759..13f1a0d 100644
--- a/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/EffectReverb.cpp
@@ -19,11 +19,13 @@
#define ARRAY_SIZE(array) (sizeof array / sizeof array[0])
//#define LOG_NDEBUG 0
-#include <cutils/log.h>
#include <assert.h>
+#include <inttypes.h>
+#include <new>
#include <stdlib.h>
#include <string.h>
-#include <new>
+
+#include <cutils/log.h>
#include "EffectReverb.h"
// from Reverb/lib
#include "LVREV.h"
@@ -269,7 +271,7 @@ extern "C" int EffectCreate(const effect_uuid_t *uuid,
pContext->InFrames32 = (LVM_INT32 *)malloc(LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2);
pContext->OutFrames32 = (LVM_INT32 *)malloc(LVREV_MAX_FRAME_SIZE * sizeof(LVM_INT32) * 2);
- ALOGV("\tEffectCreate %p, size %d", pContext, sizeof(ReverbContext));
+ ALOGV("\tEffectCreate %p, size %zu", pContext, sizeof(ReverbContext));
ALOGV("\tEffectCreate end\n");
return 0;
} /* end EffectCreate */
@@ -570,15 +572,15 @@ void Reverb_free(ReverbContext *pContext){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].Size != 0){
if (MemTab.Region[i].pBaseAddress != NULL){
- ALOGV("\tfree() - START freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tfree() - START freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
- ALOGV("\tfree() - END freeing %ld bytes for region %u at %p\n",
+ ALOGV("\tfree() - END freeing %" PRIu32 " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}else{
- ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %ld bytes "
+ ALOGV("\tLVM_ERROR : free() - trying to free with NULL pointer %" PRIu32 " bytes "
"for region %u at %p ERROR\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
@@ -771,11 +773,12 @@ int Reverb_init(ReverbContext *pContext){
MemTab.Region[i].pBaseAddress = malloc(MemTab.Region[i].Size);
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %ld "
- "bytes for region %u\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVREV_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u\n", MemTab.Region[i].Size, i );
bMallocFailure = LVM_TRUE;
}else{
- ALOGV("\tReverb_init CreateInstance allocate %ld bytes for region %u at %p\n",
+ ALOGV("\tReverb_init CreateInstance allocate %" PRIu32
+ " bytes for region %u at %p\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
}
}
@@ -787,11 +790,11 @@ int Reverb_init(ReverbContext *pContext){
if(bMallocFailure == LVM_TRUE){
for (int i=0; i<LVM_NR_MEMORY_REGIONS; i++){
if (MemTab.Region[i].pBaseAddress == LVM_NULL){
- ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %ld bytes "
- "for region %u - Not freeing\n", MemTab.Region[i].Size, i );
+ ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed to allocate %" PRIu32
+ " bytes for region %u - Not freeing\n", MemTab.Region[i].Size, i );
}else{
- ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %ld bytes "
- "for region %u at %p- free\n",
+ ALOGV("\tLVM_ERROR :Reverb_init CreateInstance Failed: but allocated %" PRIu32
+ " bytes for region %u at %p- free\n",
MemTab.Region[i].Size, i, MemTab.Region[i].pBaseAddress);
free(MemTab.Region[i].pBaseAddress);
}
diff --git a/media/libeffects/preprocessing/Android.mk b/media/libeffects/preprocessing/Android.mk
index 9e8cb83..ea3c59d 100644
--- a/media/libeffects/preprocessing/Android.mk
+++ b/media/libeffects/preprocessing/Android.mk
@@ -24,12 +24,7 @@ LOCAL_SHARED_LIBRARIES := \
libutils \
liblog
-ifeq ($(TARGET_SIMULATOR),true)
-LOCAL_LDLIBS += -ldl
-else
LOCAL_SHARED_LIBRARIES += libdl
-endif
-
LOCAL_CFLAGS += -fvisibility=hidden
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libeffects/preprocessing/PreProcessing.cpp b/media/libeffects/preprocessing/PreProcessing.cpp
index a96a703..cf98f56 100644
--- a/media/libeffects/preprocessing/PreProcessing.cpp
+++ b/media/libeffects/preprocessing/PreProcessing.cpp
@@ -879,8 +879,8 @@ int Session_ReleaseEffect(preproc_session_t *session,
int Session_SetConfig(preproc_session_t *session, effect_config_t *config)
{
uint32_t sr;
- uint32_t inCnl = popcount(config->inputCfg.channels);
- uint32_t outCnl = popcount(config->outputCfg.channels);
+ uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
+ uint32_t outCnl = audio_channel_count_from_out_mask(config->outputCfg.channels);
if (config->inputCfg.samplingRate != config->outputCfg.samplingRate ||
config->inputCfg.format != config->outputCfg.format ||
@@ -1035,7 +1035,7 @@ int Session_SetReverseConfig(preproc_session_t *session, effect_config_t *config
config->inputCfg.format != AUDIO_FORMAT_PCM_16_BIT) {
return -EINVAL;
}
- uint32_t inCnl = popcount(config->inputCfg.channels);
+ uint32_t inCnl = audio_channel_count_from_out_mask(config->inputCfg.channels);
int status = session->apm->set_num_reverse_channels(inCnl);
if (status < 0) {
return -EINVAL;
diff --git a/media/libeffects/visualizer/Android.mk b/media/libeffects/visualizer/Android.mk
index dd2d306..c92c543 100644
--- a/media/libeffects/visualizer/Android.mk
+++ b/media/libeffects/visualizer/Android.mk
@@ -17,7 +17,6 @@ LOCAL_MODULE_RELATIVE_PATH := soundfx
LOCAL_MODULE:= libvisualizer
LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg) \
$(call include-path-for, audio-effects)
diff --git a/media/libeffects/visualizer/EffectVisualizer.cpp b/media/libeffects/visualizer/EffectVisualizer.cpp
index 47cab62..e5089da 100644
--- a/media/libeffects/visualizer/EffectVisualizer.cpp
+++ b/media/libeffects/visualizer/EffectVisualizer.cpp
@@ -207,7 +207,8 @@ int Visualizer_init(VisualizerContext *pContext)
pContext->mScalingMode = VISUALIZER_SCALING_MODE_NORMALIZED;
// measurement initialization
- pContext->mChannelCount = popcount(pContext->mConfig.inputCfg.channels);
+ pContext->mChannelCount =
+ audio_channel_count_from_out_mask(pContext->mConfig.inputCfg.channels);
pContext->mMeasurementMode = MEASUREMENT_MODE_NONE;
pContext->mMeasurementWindowSizeInBuffers = MEASUREMENT_WINDOW_MAX_SIZE_IN_BUFFERS;
pContext->mMeasurementBufferIdx = 0;
diff --git a/media/libmedia/Android.mk b/media/libmedia/Android.mk
index 56e7787..69eead3 100644
--- a/media/libmedia/Android.mk
+++ b/media/libmedia/Android.mk
@@ -25,6 +25,8 @@ LOCAL_SRC_FILES:= \
AudioRecord.cpp \
AudioSystem.cpp \
mediaplayer.cpp \
+ IMediaHTTPConnection.cpp \
+ IMediaHTTPService.cpp \
IMediaLogService.cpp \
IMediaPlayerService.cpp \
IMediaPlayerClient.cpp \
@@ -42,9 +44,10 @@ LOCAL_SRC_FILES:= \
JetPlayer.cpp \
IOMX.cpp \
IAudioPolicyService.cpp \
+ IAudioPolicyServiceClient.cpp \
MediaScanner.cpp \
MediaScannerClient.cpp \
- autodetect.cpp \
+ CharacterEncodingDetector.cpp \
IMediaDeathNotifier.cpp \
MediaProfiles.cpp \
IEffect.cpp \
@@ -58,26 +61,34 @@ LOCAL_SRC_FILES:= \
LOCAL_SRC_FILES += ../libnbaio/roundup.c
-# for <cutils/atomic-inline.h>
-LOCAL_CFLAGS += -DANDROID_SMP=$(if $(findstring true,$(TARGET_CPU_SMP)),1,0)
-LOCAL_SRC_FILES += SingleStateQueue.cpp
-LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"'
-# Consider a separate a library for SingleStateQueueInstantiations.
-
LOCAL_SHARED_LIBRARIES := \
- libui liblog libcutils libutils libbinder libsonivox libicuuc libexpat \
+ libui liblog libcutils libutils libbinder libsonivox libicuuc libicui18n libexpat \
libcamera_client libstagefright_foundation \
- libgui libdl libaudioutils
+ libgui libdl libaudioutils libnbaio
+
+LOCAL_STATIC_LIBRARIES += libinstantssq
LOCAL_WHOLE_STATIC_LIBRARY := libmedia_helper
LOCAL_MODULE:= libmedia
LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg) \
$(TOP)/frameworks/native/include/media/openmax \
external/icu4c/common \
+ external/icu4c/i18n \
$(call include-path-for, audio-effects) \
$(call include-path-for, audio-utils)
include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+# for <cutils/atomic-inline.h>
+LOCAL_CFLAGS += -DANDROID_SMP=$(if $(findstring true,$(TARGET_CPU_SMP)),1,0)
+LOCAL_SRC_FILES += SingleStateQueue.cpp
+LOCAL_CFLAGS += -DSINGLE_STATE_QUEUE_INSTANTIATIONS='"SingleStateQueueInstantiations.cpp"'
+
+LOCAL_MODULE := libinstantssq
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libmedia/AudioEffect.cpp b/media/libmedia/AudioEffect.cpp
index 8dfffb3..35f6557 100644
--- a/media/libmedia/AudioEffect.cpp
+++ b/media/libmedia/AudioEffect.cpp
@@ -380,9 +380,9 @@ void AudioEffect::enableStatusChanged(bool enabled)
}
void AudioEffect::commandExecuted(uint32_t cmdCode,
- uint32_t cmdSize,
+ uint32_t cmdSize __unused,
void *cmdData,
- uint32_t replySize,
+ uint32_t replySize __unused,
void *replyData)
{
if (cmdData == NULL || replyData == NULL) {
diff --git a/media/libmedia/AudioRecord.cpp b/media/libmedia/AudioRecord.cpp
index ccbc5a3..f865d38 100644
--- a/media/libmedia/AudioRecord.cpp
+++ b/media/libmedia/AudioRecord.cpp
@@ -18,7 +18,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioRecord"
+#include <inttypes.h>
#include <sys/resource.h>
+
#include <binder/IPCThreadState.h>
#include <media/AudioRecord.h>
#include <utils/Log.h>
@@ -41,37 +43,30 @@ status_t AudioRecord::getMinFrameCount(
return BAD_VALUE;
}
- // default to 0 in case of error
- *frameCount = 0;
-
- size_t size = 0;
+ size_t size;
status_t status = AudioSystem::getInputBufferSize(sampleRate, format, channelMask, &size);
if (status != NO_ERROR) {
- ALOGE("AudioSystem could not query the input buffer size; status %d", status);
- return NO_INIT;
+ ALOGE("AudioSystem could not query the input buffer size for sampleRate %u, format %#x, "
+ "channelMask %#x; status %d", sampleRate, format, channelMask, status);
+ return status;
}
- if (size == 0) {
- ALOGE("Unsupported configuration: sampleRate %u, format %d, channelMask %#x",
+ // We double the size of input buffer for ping pong use of record buffer.
+ // Assumes audio_is_linear_pcm(format)
+ if ((*frameCount = (size * 2) / (audio_channel_count_from_in_mask(channelMask) *
+ audio_bytes_per_sample(format))) == 0) {
+ ALOGE("Unsupported configuration: sampleRate %u, format %#x, channelMask %#x",
sampleRate, format, channelMask);
return BAD_VALUE;
}
- // We double the size of input buffer for ping pong use of record buffer.
- size <<= 1;
-
- // Assumes audio_is_linear_pcm(format)
- uint32_t channelCount = popcount(channelMask);
- size /= channelCount * audio_bytes_per_sample(format);
-
- *frameCount = size;
return NO_ERROR;
}
// ---------------------------------------------------------------------------
AudioRecord::AudioRecord()
- : mStatus(NO_INIT), mSessionId(0),
+ : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE),
mPreviousPriority(ANDROID_PRIORITY_NORMAL), mPreviousSchedulingGroup(SP_DEFAULT)
{
}
@@ -81,20 +76,20 @@ AudioRecord::AudioRecord(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCount,
+ size_t frameCount,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
int sessionId,
transfer_type transferType,
audio_input_flags_t flags)
- : mStatus(NO_INIT), mSessionId(0),
+ : mStatus(NO_INIT), mSessionId(AUDIO_SESSION_ALLOCATE),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
mPreviousSchedulingGroup(SP_DEFAULT),
mProxy(NULL)
{
mStatus = set(inputSource, sampleRate, format, channelMask, frameCount, cbf, user,
- notificationFrames, false /*threadCanCallJava*/, sessionId, transferType);
+ notificationFrames, false /*threadCanCallJava*/, sessionId, transferType, flags);
}
AudioRecord::~AudioRecord()
@@ -110,12 +105,12 @@ AudioRecord::~AudioRecord()
mAudioRecordThread->requestExitAndWait();
mAudioRecordThread.clear();
}
- if (mAudioRecord != 0) {
- mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
- mAudioRecord.clear();
- }
+ mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
+ mAudioRecord.clear();
+ mCblkMemory.clear();
+ mBufferMemory.clear();
IPCThreadState::self()->flushCommands();
- AudioSystem::releaseAudioSessionId(mSessionId);
+ AudioSystem::releaseAudioSessionId(mSessionId, -1);
}
}
@@ -124,15 +119,20 @@ status_t AudioRecord::set(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCountInt,
+ size_t frameCount,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
bool threadCanCallJava,
int sessionId,
transfer_type transferType,
audio_input_flags_t flags)
{
+ ALOGV("set(): inputSource %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
+ "notificationFrames %u, sessionId %d, transferType %d, flags %#x",
+ inputSource, sampleRate, format, channelMask, frameCount, notificationFrames,
+ sessionId, transferType, flags);
+
switch (transferType) {
case TRANSFER_DEFAULT:
if (cbf == NULL || threadCanCallJava) {
@@ -156,23 +156,15 @@ status_t AudioRecord::set(
}
mTransfer = transferType;
- // FIXME "int" here is legacy and will be replaced by size_t later
- if (frameCountInt < 0) {
- ALOGE("Invalid frame count %d", frameCountInt);
- return BAD_VALUE;
- }
- size_t frameCount = frameCountInt;
-
- ALOGV("set(): sampleRate %u, channelMask %#x, frameCount %u", sampleRate, channelMask,
- frameCount);
-
AutoMutex lock(mLock);
+ // invariant that mAudioRecord != 0 is true only after set() returns successfully
if (mAudioRecord != 0) {
ALOGE("Track already in use");
return INVALID_OPERATION;
}
+ // handle default values first.
if (inputSource == AUDIO_SOURCE_DEFAULT) {
inputSource = AUDIO_SOURCE_MIC;
}
@@ -191,12 +183,12 @@ status_t AudioRecord::set(
// validate parameters
if (!audio_is_valid_format(format)) {
- ALOGE("Invalid format %d", format);
+ ALOGE("Invalid format %#x", format);
return BAD_VALUE;
}
// Temporary restriction: AudioFlinger currently supports 16-bit PCM only
if (format != AUDIO_FORMAT_PCM_16_BIT) {
- ALOGE("Format %d is not supported", format);
+ ALOGE("Format %#x is not supported", format);
return BAD_VALUE;
}
mFormat = format;
@@ -206,34 +198,22 @@ status_t AudioRecord::set(
return BAD_VALUE;
}
mChannelMask = channelMask;
- uint32_t channelCount = popcount(channelMask);
+ uint32_t channelCount = audio_channel_count_from_in_mask(channelMask);
mChannelCount = channelCount;
- // Assumes audio_is_linear_pcm(format), else sizeof(uint8_t)
- mFrameSize = channelCount * audio_bytes_per_sample(format);
-
- // validate framecount
- size_t minFrameCount = 0;
- status_t status = AudioRecord::getMinFrameCount(&minFrameCount,
- sampleRate, format, channelMask);
- if (status != NO_ERROR) {
- ALOGE("getMinFrameCount() failed; status %d", status);
- return status;
+ if (audio_is_linear_pcm(format)) {
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
+ } else {
+ mFrameSize = sizeof(uint8_t);
}
- ALOGV("AudioRecord::set() minFrameCount = %d", minFrameCount);
- if (frameCount == 0) {
- frameCount = minFrameCount;
- } else if (frameCount < minFrameCount) {
- ALOGE("frameCount %u < minFrameCount %u", frameCount, minFrameCount);
- return BAD_VALUE;
- }
- mFrameCount = frameCount;
+ // mFrameCount is initialized in openRecord_l
+ mReqFrameCount = frameCount;
mNotificationFramesReq = notificationFrames;
mNotificationFramesAct = 0;
- if (sessionId == 0 ) {
+ if (sessionId == AUDIO_SESSION_ALLOCATE) {
mSessionId = AudioSystem::newAudioSessionId();
} else {
mSessionId = sessionId;
@@ -241,26 +221,27 @@ status_t AudioRecord::set(
ALOGV("set(): mSessionId %d", mSessionId);
mFlags = flags;
-
- // create the IAudioRecord
- status = openRecord_l(0 /*epoch*/);
- if (status) {
- return status;
- }
+ mCbf = cbf;
if (cbf != NULL) {
mAudioRecordThread = new AudioRecordThread(*this, threadCanCallJava);
mAudioRecordThread->run("AudioRecord", ANDROID_PRIORITY_AUDIO);
}
- mStatus = NO_ERROR;
+ // create the IAudioRecord
+ status_t status = openRecord_l(0 /*epoch*/);
- // Update buffer size in case it has been limited by AudioFlinger during track creation
- mFrameCount = mCblk->frameCount_;
+ if (status != NO_ERROR) {
+ if (mAudioRecordThread != 0) {
+ mAudioRecordThread->requestExit(); // see comment in AudioRecord.h
+ mAudioRecordThread->requestExitAndWait();
+ mAudioRecordThread.clear();
+ }
+ return status;
+ }
+ mStatus = NO_ERROR;
mActive = false;
- mCbf = cbf;
- mRefreshRemaining = true;
mUserData = user;
// TODO: add audio hardware input latency here
mLatency = (1000*mFrameCount) / sampleRate;
@@ -268,7 +249,7 @@ status_t AudioRecord::set(
mMarkerReached = false;
mNewPosition = 0;
mUpdatePeriod = 0;
- AudioSystem::acquireAudioSessionId(mSessionId);
+ AudioSystem::acquireAudioSessionId(mSessionId, -1);
mSequence = 1;
mObservedSequence = mSequence;
mInOverrun = false;
@@ -289,6 +270,9 @@ status_t AudioRecord::start(AudioSystem::sync_event_t event, int triggerSession)
// reset current position as seen by client to 0
mProxy->setEpoch(mProxy->getEpoch() - mProxy->getPosition());
+ // force refresh of remaining frames by processAudioBuffer() as last
+ // read before stop could be partial.
+ mRefreshRemaining = true;
mNewPosition = mProxy->getPosition() + mUpdatePeriod;
int32_t flags = android_atomic_acquire_load(&mCblk->mFlags);
@@ -352,6 +336,7 @@ bool AudioRecord::stopped() const
status_t AudioRecord::setMarkerPosition(uint32_t marker)
{
+ // The only purpose of setting marker position is to get a callback
if (mCbf == NULL) {
return INVALID_OPERATION;
}
@@ -377,6 +362,7 @@ status_t AudioRecord::getMarkerPosition(uint32_t *marker) const
status_t AudioRecord::setPositionUpdatePeriod(uint32_t updatePeriod)
{
+ // The only purpose of setting position update period is to get a callback
if (mCbf == NULL) {
return INVALID_OPERATION;
}
@@ -412,7 +398,7 @@ status_t AudioRecord::getPosition(uint32_t *position) const
return NO_ERROR;
}
-unsigned int AudioRecord::getInputFramesLost() const
+uint32_t AudioRecord::getInputFramesLost() const
{
// no need to check mActive, because if inactive this will return 0, which is what we want
return AudioSystem::getInputFramesLost(getInput());
@@ -430,56 +416,109 @@ status_t AudioRecord::openRecord_l(size_t epoch)
return NO_INIT;
}
- IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT;
- pid_t tid = -1;
+ // Fast tracks must be at the primary _output_ [sic] sampling rate,
+ // because there is currently no concept of a primary input sampling rate
+ uint32_t afSampleRate = AudioSystem::getPrimaryOutputSamplingRate();
+ if (afSampleRate == 0) {
+ ALOGW("getPrimaryOutputSamplingRate failed");
+ }
// Client can only express a preference for FAST. Server will perform additional tests.
- // The only supported use case for FAST is callback transfer mode.
+ if ((mFlags & AUDIO_INPUT_FLAG_FAST) && !(
+ // use case: callback transfer mode
+ (mTransfer == TRANSFER_CALLBACK) &&
+ // matching sample rate
+ (mSampleRate == afSampleRate))) {
+ ALOGW("AUDIO_INPUT_FLAG_FAST denied by client");
+ // once denied, do not request again if IAudioRecord is re-created
+ mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
+ }
+
+ IAudioFlinger::track_flags_t trackFlags = IAudioFlinger::TRACK_DEFAULT;
+
+ pid_t tid = -1;
if (mFlags & AUDIO_INPUT_FLAG_FAST) {
- if ((mTransfer != TRANSFER_CALLBACK) || (mAudioRecordThread == 0)) {
- ALOGW("AUDIO_INPUT_FLAG_FAST denied by client");
- // once denied, do not request again if IAudioRecord is re-created
- mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
- } else {
- trackFlags |= IAudioFlinger::TRACK_FAST;
+ trackFlags |= IAudioFlinger::TRACK_FAST;
+ if (mAudioRecordThread != 0) {
tid = mAudioRecordThread->getTid();
}
}
+ // FIXME Assume double buffering, because we don't know the true HAL sample rate
+ const uint32_t nBuffering = 2;
+
mNotificationFramesAct = mNotificationFramesReq;
+ size_t frameCount = mReqFrameCount;
if (!(mFlags & AUDIO_INPUT_FLAG_FAST)) {
+ // validate framecount
+ // If fast track was not requested, this preserves
+ // the old behavior of validating on client side.
+ // FIXME Eventually the validation should be done on server side
+ // regardless of whether it's a fast or normal track. It's debatable
+ // whether to account for the input latency to provision buffers appropriately.
+ size_t minFrameCount;
+ status = AudioRecord::getMinFrameCount(&minFrameCount,
+ mSampleRate, mFormat, mChannelMask);
+ if (status != NO_ERROR) {
+ ALOGE("getMinFrameCount() failed for sampleRate %u, format %#x, channelMask %#x; "
+ "status %d",
+ mSampleRate, mFormat, mChannelMask, status);
+ return status;
+ }
+
+ if (frameCount == 0) {
+ frameCount = minFrameCount;
+ } else if (frameCount < minFrameCount) {
+ ALOGE("frameCount %zu < minFrameCount %zu", frameCount, minFrameCount);
+ return BAD_VALUE;
+ }
+
// Make sure that application is notified with sufficient margin before overrun
- if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount/2) {
- mNotificationFramesAct = mFrameCount/2;
+ if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/2) {
+ mNotificationFramesAct = frameCount/2;
}
}
audio_io_handle_t input = AudioSystem::getInput(mInputSource, mSampleRate, mFormat,
mChannelMask, mSessionId);
- if (input == 0) {
- ALOGE("Could not get audio input for record source %d", mInputSource);
+ if (input == AUDIO_IO_HANDLE_NONE) {
+ ALOGE("Could not get audio input for record source %d, sample rate %u, format %#x, "
+ "channel mask %#x, session %d",
+ mInputSource, mSampleRate, mFormat, mChannelMask, mSessionId);
return BAD_VALUE;
}
+ {
+ // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
+ // we must release it ourselves if anything goes wrong.
+ size_t temp = frameCount; // temp may be replaced by a revised value of frameCount,
+ // but we will still need the original value also
int originalSessionId = mSessionId;
+ sp<IMemory> iMem; // for cblk
+ sp<IMemory> bufferMem;
sp<IAudioRecord> record = audioFlinger->openRecord(input,
mSampleRate, mFormat,
mChannelMask,
- mFrameCount,
+ &temp,
&trackFlags,
tid,
&mSessionId,
+ iMem,
+ bufferMem,
&status);
- ALOGE_IF(originalSessionId != 0 && mSessionId != originalSessionId,
+ ALOGE_IF(originalSessionId != AUDIO_SESSION_ALLOCATE && mSessionId != originalSessionId,
"session ID changed from %d to %d", originalSessionId, mSessionId);
- if (record == 0 || status != NO_ERROR) {
+ if (status != NO_ERROR) {
ALOGE("AudioFlinger could not create record track, status: %d", status);
- AudioSystem::releaseInput(input);
- return status;
+ goto release;
}
- sp<IMemory> iMem = record->getCblk();
+ ALOG_ASSERT(record != 0);
+
+ // AudioFlinger now owns the reference to the I/O handle,
+ // so we are no longer responsible for releasing it.
+
if (iMem == 0) {
ALOGE("Could not get control block");
return NO_INIT;
@@ -489,37 +528,67 @@ status_t AudioRecord::openRecord_l(size_t epoch)
ALOGE("Could not get control block pointer");
return NO_INIT;
}
+ audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
+
+ // Starting address of buffers in shared memory.
+ // The buffers are either immediately after the control block,
+ // or in a separate area at discretion of server.
+ void *buffers;
+ if (bufferMem == 0) {
+ buffers = cblk + 1;
+ } else {
+ buffers = bufferMem->pointer();
+ if (buffers == NULL) {
+ ALOGE("Could not get buffer pointer");
+ return NO_INIT;
+ }
+ }
+
+ // invariant that mAudioRecord != 0 is true only after set() returns successfully
if (mAudioRecord != 0) {
mAudioRecord->asBinder()->unlinkToDeath(mDeathNotifier, this);
mDeathNotifier.clear();
}
- mInput = input;
mAudioRecord = record;
mCblkMemory = iMem;
- audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
+ mBufferMemory = bufferMem;
+ IPCThreadState::self()->flushCommands();
+
mCblk = cblk;
- // FIXME missing fast track frameCount logic
+ // note that temp is the (possibly revised) value of frameCount
+ if (temp < frameCount || (frameCount == 0 && temp == 0)) {
+ ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
+ }
+ frameCount = temp;
+
mAwaitBoost = false;
if (mFlags & AUDIO_INPUT_FLAG_FAST) {
if (trackFlags & IAudioFlinger::TRACK_FAST) {
- ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %u", mFrameCount);
+ ALOGV("AUDIO_INPUT_FLAG_FAST successful; frameCount %zu", frameCount);
mAwaitBoost = true;
- // double-buffering is not required for fast tracks, due to tighter scheduling
- if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount) {
- mNotificationFramesAct = mFrameCount;
- }
} else {
- ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %u", mFrameCount);
+ ALOGV("AUDIO_INPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
// once denied, do not request again if IAudioRecord is re-created
mFlags = (audio_input_flags_t) (mFlags & ~AUDIO_INPUT_FLAG_FAST);
- if (mNotificationFramesAct == 0 || mNotificationFramesAct > mFrameCount/2) {
- mNotificationFramesAct = mFrameCount/2;
- }
+ }
+ // Theoretically double-buffering is not required for fast tracks,
+ // due to tighter scheduling. But in practice, to accomodate kernels with
+ // scheduling jitter, and apps with computation jitter, we use double-buffering.
+ if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
+ mNotificationFramesAct = frameCount/nBuffering;
}
}
- // starting address of buffers in shared memory
- void *buffers = (char*)cblk + sizeof(audio_track_cblk_t);
+ // We retain a copy of the I/O handle, but don't own the reference
+ mInput = input;
+ mRefreshRemaining = true;
+
+ mFrameCount = frameCount;
+ // If IAudioRecord is re-created, don't let the requested frameCount
+ // decrease. This can confuse clients that cache frameCount().
+ if (frameCount > mReqFrameCount) {
+ mReqFrameCount = frameCount;
+ }
// update proxy
mProxy = new AudioRecordClientProxy(cblk, buffers, mFrameCount, mFrameSize);
@@ -530,6 +599,14 @@ status_t AudioRecord::openRecord_l(size_t epoch)
mAudioRecord->asBinder()->linkToDeath(mDeathNotifier, this);
return NO_ERROR;
+ }
+
+release:
+ AudioSystem::releaseInput(input);
+ if (status == NO_ERROR) {
+ status = NO_INIT;
+ }
+ return status;
}
status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
@@ -580,6 +657,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r
// keep them from going away if another thread re-creates the track during obtainBuffer()
sp<AudioRecordClientProxy> proxy;
sp<IMemory> iMem;
+ sp<IMemory> bufferMem;
{
// start of lock scope
AutoMutex lock(mLock);
@@ -591,6 +669,9 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r
if (newSequence == oldSequence) {
status = restoreRecord_l("obtainBuffer");
if (status != NO_ERROR) {
+ buffer.mFrameCount = 0;
+ buffer.mRaw = NULL;
+ buffer.mNonContig = 0;
break;
}
}
@@ -600,6 +681,7 @@ status_t AudioRecord::obtainBuffer(Buffer* audioBuffer, const struct timespec *r
// Keep the extra references
proxy = mProxy;
iMem = mCblkMemory;
+ bufferMem = mBufferMemory;
// Non-blocking if track is stopped
if (!mActive) {
@@ -660,7 +742,7 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)
if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
// sanity-check. user is most-likely passing an error code, and it would
// make the return value ambiguous (actualSize vs error).
- ALOGE("AudioRecord::read(buffer=%p, size=%u (%d)", buffer, userSize, userSize);
+ ALOGE("AudioRecord::read(buffer=%p, size=%zu (%zu)", buffer, userSize, userSize);
return BAD_VALUE;
}
@@ -692,7 +774,7 @@ ssize_t AudioRecord::read(void* buffer, size_t userSize)
// -------------------------------------------------------------------------
-nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
+nsecs_t AudioRecord::processAudioBuffer()
{
mLock.lock();
if (mAwaitBoost) {
@@ -760,17 +842,17 @@ nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
}
// Cache other fields that will be needed soon
- size_t notificationFrames = mNotificationFramesAct;
+ uint32_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
mRefreshRemaining = false;
mRemainingFrames = notificationFrames;
mRetryOnPartialBuffer = false;
}
size_t misalignment = mProxy->getMisalignment();
- int32_t sequence = mSequence;
+ uint32_t sequence = mSequence;
// These fields don't need to be cached, because they are assigned only by set():
- // mTransfer, mCbf, mUserData, mSampleRate
+ // mTransfer, mCbf, mUserData, mSampleRate, mFrameSize
mLock.unlock();
@@ -841,11 +923,11 @@ nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
size_t nonContig;
status_t err = obtainBuffer(&audioBuffer, requested, NULL, &nonContig);
LOG_ALWAYS_FATAL_IF((err != NO_ERROR) != (audioBuffer.frameCount == 0),
- "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount);
+ "obtainBuffer() err=%d frameCount=%zu", err, audioBuffer.frameCount);
requested = &ClientProxy::kNonBlocking;
size_t avail = audioBuffer.frameCount + nonContig;
- ALOGV("obtainBuffer(%u) returned %u = %u + %u",
- mRemainingFrames, avail, audioBuffer.frameCount, nonContig);
+ ALOGV("obtainBuffer(%u) returned %zu = %zu + %zu err %d",
+ mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
if (err != NO_ERROR) {
if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR) {
break;
@@ -872,8 +954,8 @@ nsecs_t AudioRecord::processAudioBuffer(const sp<AudioRecordThread>& thread)
// Sanity check on returned size
if (ssize_t(readSize) < 0 || readSize > reqSize) {
- ALOGE("EVENT_MORE_DATA requested %u bytes but callback returned %d bytes",
- reqSize, (int) readSize);
+ ALOGE("EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
+ reqSize, ssize_t(readSize));
return NS_NEVER;
}
@@ -932,7 +1014,7 @@ status_t AudioRecord::restoreRecord_l(const char *from)
status_t result;
// if the new IAudioRecord is created, openRecord_l() will modify the
- // following member variables: mAudioRecord, mCblkMemory and mCblk.
+ // following member variables: mAudioRecord, mCblkMemory, mCblk, mBufferMemory.
// It will also delete the strong references on previous IAudioRecord and IMemory
size_t position = mProxy->getPosition();
mNewPosition = position + mUpdatePeriod;
@@ -954,7 +1036,7 @@ status_t AudioRecord::restoreRecord_l(const char *from)
// =========================================================================
-void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who)
+void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
{
sp<AudioRecord> audioRecord = mAudioRecord.promote();
if (audioRecord != 0) {
@@ -966,7 +1048,8 @@ void AudioRecord::DeathNotifier::binderDied(const wp<IBinder>& who)
// =========================================================================
AudioRecord::AudioRecordThread::AudioRecordThread(AudioRecord& receiver, bool bCanCallJava)
- : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL)
+ : Thread(bCanCallJava), mReceiver(receiver), mPaused(true), mPausedInt(false), mPausedNs(0LL),
+ mIgnoreNextPausedInt(false)
{
}
@@ -983,6 +1066,10 @@ bool AudioRecord::AudioRecordThread::threadLoop()
// caller will check for exitPending()
return true;
}
+ if (mIgnoreNextPausedInt) {
+ mIgnoreNextPausedInt = false;
+ mPausedInt = false;
+ }
if (mPausedInt) {
if (mPausedNs > 0) {
(void) mMyCond.waitRelative(mMyLock, mPausedNs);
@@ -993,7 +1080,7 @@ bool AudioRecord::AudioRecordThread::threadLoop()
return true;
}
}
- nsecs_t ns = mReceiver.processAudioBuffer(this);
+ nsecs_t ns = mReceiver.processAudioBuffer();
switch (ns) {
case 0:
return true;
@@ -1007,7 +1094,7 @@ bool AudioRecord::AudioRecordThread::threadLoop()
ns = 1000000000LL;
// fall through
default:
- LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
+ LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
pauseInternal(ns);
return true;
}
@@ -1017,12 +1104,7 @@ void AudioRecord::AudioRecordThread::requestExit()
{
// must be in this order to avoid a race condition
Thread::requestExit();
- AutoMutex _l(mMyLock);
- if (mPaused || mPausedInt) {
- mPaused = false;
- mPausedInt = false;
- mMyCond.signal();
- }
+ resume();
}
void AudioRecord::AudioRecordThread::pause()
@@ -1034,6 +1116,7 @@ void AudioRecord::AudioRecordThread::pause()
void AudioRecord::AudioRecordThread::resume()
{
AutoMutex _l(mMyLock);
+ mIgnoreNextPausedInt = true;
if (mPaused || mPausedInt) {
mPaused = false;
mPausedInt = false;
diff --git a/media/libmedia/AudioSystem.cpp b/media/libmedia/AudioSystem.cpp
index cc5b810..a47d45c 100644
--- a/media/libmedia/AudioSystem.cpp
+++ b/media/libmedia/AudioSystem.cpp
@@ -35,16 +35,17 @@ Mutex AudioSystem::gLock;
sp<IAudioFlinger> AudioSystem::gAudioFlinger;
sp<AudioSystem::AudioFlingerClient> AudioSystem::gAudioFlingerClient;
audio_error_callback AudioSystem::gAudioErrorCallback = NULL;
-// Cached values
-DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(0);
+// Cached values for output handles
+DefaultKeyedVector<audio_io_handle_t, AudioSystem::OutputDescriptor *> AudioSystem::gOutputs(NULL);
// Cached values for recording queries, all protected by gLock
-uint32_t AudioSystem::gPrevInSamplingRate = 16000;
-audio_format_t AudioSystem::gPrevInFormat = AUDIO_FORMAT_PCM_16_BIT;
-audio_channel_mask_t AudioSystem::gPrevInChannelMask = AUDIO_CHANNEL_IN_MONO;
-size_t AudioSystem::gInBuffSize = 0;
+uint32_t AudioSystem::gPrevInSamplingRate;
+audio_format_t AudioSystem::gPrevInFormat;
+audio_channel_mask_t AudioSystem::gPrevInChannelMask;
+size_t AudioSystem::gInBuffSize = 0; // zero indicates cache is invalid
+sp<AudioSystem::AudioPortCallback> AudioSystem::gAudioPortCallback;
// establish binder interface to AudioFlinger service
const sp<IAudioFlinger>& AudioSystem::get_audio_flinger()
@@ -84,13 +85,15 @@ const sp<IAudioFlinger>& AudioSystem::get_audio_flinger()
return DEAD_OBJECT;
}
-status_t AudioSystem::muteMicrophone(bool state) {
+status_t AudioSystem::muteMicrophone(bool state)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setMicMute(state);
}
-status_t AudioSystem::isMicrophoneMuted(bool* state) {
+status_t AudioSystem::isMicrophoneMuted(bool* state)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
*state = af->getMicMute();
@@ -175,13 +178,15 @@ status_t AudioSystem::setMode(audio_mode_t mode)
return af->setMode(mode);
}
-status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs) {
+status_t AudioSystem::setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
return af->setParameters(ioHandle, keyValuePairs);
}
-String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys) {
+String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& keys)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
String8 result = String8("");
if (af == 0) return result;
@@ -190,6 +195,16 @@ String8 AudioSystem::getParameters(audio_io_handle_t ioHandle, const String8& ke
return result;
}
+status_t AudioSystem::setParameters(const String8& keyValuePairs)
+{
+ return setParameters(AUDIO_IO_HANDLE_NONE, keyValuePairs);
+}
+
+String8 AudioSystem::getParameters(const String8& keys)
+{
+ return getParameters(AUDIO_IO_HANDLE_NONE, keys);
+}
+
// convert volume steps to natural log scale
// change this value to change volume scaling
@@ -227,11 +242,23 @@ status_t AudioSystem::getOutputSamplingRate(uint32_t* samplingRate, audio_stream
return PERMISSION_DENIED;
}
- return getSamplingRate(output, streamType, samplingRate);
+ return getSamplingRate(output, samplingRate);
+}
+
+status_t AudioSystem::getOutputSamplingRateForAttr(uint32_t* samplingRate,
+ const audio_attributes_t *attr)
+{
+ if (attr == NULL) {
+ return BAD_VALUE;
+ }
+ audio_io_handle_t output = getOutputForAttr(attr);
+ if (output == 0) {
+ return PERMISSION_DENIED;
+ }
+ return getSamplingRate(output, samplingRate);
}
status_t AudioSystem::getSamplingRate(audio_io_handle_t output,
- audio_stream_type_t streamType,
uint32_t* samplingRate)
{
OutputDescriptor *outputDesc;
@@ -249,9 +276,12 @@ status_t AudioSystem::getSamplingRate(audio_io_handle_t output,
*samplingRate = outputDesc->samplingRate;
gLock.unlock();
}
+ if (*samplingRate == 0) {
+ ALOGE("AudioSystem::getSamplingRate failed for output %d", output);
+ return BAD_VALUE;
+ }
- ALOGV("getSamplingRate() streamType %d, output %d, sampling rate %u", streamType, output,
- *samplingRate);
+ ALOGV("getSamplingRate() output %d, sampling rate %u", output, *samplingRate);
return NO_ERROR;
}
@@ -265,15 +295,14 @@ status_t AudioSystem::getOutputFrameCount(size_t* frameCount, audio_stream_type_
}
output = getOutput(streamType);
- if (output == 0) {
+ if (output == AUDIO_IO_HANDLE_NONE) {
return PERMISSION_DENIED;
}
- return getFrameCount(output, streamType, frameCount);
+ return getFrameCount(output, frameCount);
}
status_t AudioSystem::getFrameCount(audio_io_handle_t output,
- audio_stream_type_t streamType,
size_t* frameCount)
{
OutputDescriptor *outputDesc;
@@ -289,9 +318,12 @@ status_t AudioSystem::getFrameCount(audio_io_handle_t output,
*frameCount = outputDesc->frameCount;
gLock.unlock();
}
+ if (*frameCount == 0) {
+ ALOGE("AudioSystem::getFrameCount failed for output %d", output);
+ return BAD_VALUE;
+ }
- ALOGV("getFrameCount() streamType %d, output %d, frameCount %d", streamType, output,
- *frameCount);
+ ALOGV("getFrameCount() output %d, frameCount %zu", output, *frameCount);
return NO_ERROR;
}
@@ -305,15 +337,14 @@ status_t AudioSystem::getOutputLatency(uint32_t* latency, audio_stream_type_t st
}
output = getOutput(streamType);
- if (output == 0) {
+ if (output == AUDIO_IO_HANDLE_NONE) {
return PERMISSION_DENIED;
}
- return getLatency(output, streamType, latency);
+ return getLatency(output, latency);
}
status_t AudioSystem::getLatency(audio_io_handle_t output,
- audio_stream_type_t streamType,
uint32_t* latency)
{
OutputDescriptor *outputDesc;
@@ -330,7 +361,7 @@ status_t AudioSystem::getLatency(audio_io_handle_t output,
gLock.unlock();
}
- ALOGV("getLatency() streamType %d, output %d, latency %d", streamType, output, *latency);
+ ALOGV("getLatency() output %d, latency %d", output, *latency);
return NO_ERROR;
}
@@ -349,6 +380,12 @@ status_t AudioSystem::getInputBufferSize(uint32_t sampleRate, audio_format_t for
return PERMISSION_DENIED;
}
inBuffSize = af->getInputBufferSize(sampleRate, format, channelMask);
+ if (inBuffSize == 0) {
+ ALOGE("AudioSystem::getInputBufferSize failed sampleRate %d format %#x channelMask %x",
+ sampleRate, format, channelMask);
+ return BAD_VALUE;
+ }
+ // A benign race is possible here: we could overwrite a fresher cache entry
gLock.lock();
// save the request params
gPrevInSamplingRate = sampleRate;
@@ -371,55 +408,52 @@ status_t AudioSystem::setVoiceVolume(float value)
}
status_t AudioSystem::getRenderPosition(audio_io_handle_t output, uint32_t *halFrames,
- uint32_t *dspFrames, audio_stream_type_t stream)
+ uint32_t *dspFrames)
{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af == 0) return PERMISSION_DENIED;
- if (stream == AUDIO_STREAM_DEFAULT) {
- stream = AUDIO_STREAM_MUSIC;
- }
-
- if (output == 0) {
- output = getOutput(stream);
- }
-
return af->getRenderPosition(halFrames, dspFrames, output);
}
-size_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle) {
+uint32_t AudioSystem::getInputFramesLost(audio_io_handle_t ioHandle)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- unsigned int result = 0;
+ uint32_t result = 0;
if (af == 0) return result;
- if (ioHandle == 0) return result;
+ if (ioHandle == AUDIO_IO_HANDLE_NONE) return result;
result = af->getInputFramesLost(ioHandle);
return result;
}
-int AudioSystem::newAudioSessionId() {
+int AudioSystem::newAudioSessionId()
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
- if (af == 0) return 0;
+ if (af == 0) return AUDIO_SESSION_ALLOCATE;
return af->newAudioSessionId();
}
-void AudioSystem::acquireAudioSessionId(int audioSession) {
+void AudioSystem::acquireAudioSessionId(int audioSession, pid_t pid)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af != 0) {
- af->acquireAudioSessionId(audioSession);
+ af->acquireAudioSessionId(audioSession, pid);
}
}
-void AudioSystem::releaseAudioSessionId(int audioSession) {
+void AudioSystem::releaseAudioSessionId(int audioSession, pid_t pid)
+{
const sp<IAudioFlinger>& af = AudioSystem::get_audio_flinger();
if (af != 0) {
- af->releaseAudioSessionId(audioSession);
+ af->releaseAudioSessionId(audioSession, pid);
}
}
// ---------------------------------------------------------------------------
-void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who) {
+void AudioSystem::AudioFlingerClient::binderDied(const wp<IBinder>& who __unused)
+{
Mutex::Autolock _l(AudioSystem::gLock);
AudioSystem::gAudioFlinger.clear();
@@ -438,7 +472,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
const OutputDescriptor *desc;
audio_stream_type_t stream;
- if (ioHandle == 0) return;
+ if (ioHandle == AUDIO_IO_HANDLE_NONE) return;
Mutex::Autolock _l(AudioSystem::gLock);
@@ -455,7 +489,7 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
OutputDescriptor *outputDesc = new OutputDescriptor(*desc);
gOutputs.add(ioHandle, outputDesc);
- ALOGV("ioConfigChanged() new output samplingRate %u, format %d channel mask %#x frameCount %u "
+ ALOGV("ioConfigChanged() new output samplingRate %u, format %#x channel mask %#x frameCount %zu "
"latency %d",
outputDesc->samplingRate, outputDesc->format, outputDesc->channelMask,
outputDesc->frameCount, outputDesc->latency);
@@ -479,8 +513,8 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
if (param2 == NULL) break;
desc = (const OutputDescriptor *)param2;
- ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %d channel mask %#x "
- "frameCount %d latency %d",
+ ALOGV("ioConfigChanged() new config for output %d samplingRate %u, format %#x channel mask %#x "
+ "frameCount %zu latency %d",
ioHandle, desc->samplingRate, desc->format,
desc->channelMask, desc->frameCount, desc->latency);
OutputDescriptor *outputDesc = gOutputs.valueAt(index);
@@ -496,12 +530,15 @@ void AudioSystem::AudioFlingerClient::ioConfigChanged(int event, audio_io_handle
}
}
-void AudioSystem::setErrorCallback(audio_error_callback cb) {
+void AudioSystem::setErrorCallback(audio_error_callback cb)
+{
Mutex::Autolock _l(gLock);
gAudioErrorCallback = cb;
}
-bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType) {
+
+bool AudioSystem::routedToA2dpOutput(audio_stream_type_t streamType)
+{
switch (streamType) {
case AUDIO_STREAM_MUSIC:
case AUDIO_STREAM_VOICE_CALL:
@@ -538,6 +575,7 @@ const sp<IAudioPolicyService>& AudioSystem::get_audio_policy_service()
}
binder->linkToDeath(gAudioPolicyServiceClient);
gAudioPolicyService = interface_cast<IAudioPolicyService>(binder);
+ gAudioPolicyService->registerClient(gAudioPolicyServiceClient);
gLock.unlock();
} else {
gLock.unlock();
@@ -608,6 +646,19 @@ audio_io_handle_t AudioSystem::getOutput(audio_stream_type_t stream,
return aps->getOutput(stream, samplingRate, format, channelMask, flags, offloadInfo);
}
+audio_io_handle_t AudioSystem::getOutputForAttr(const audio_attributes_t *attr,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ const audio_offload_info_t *offloadInfo)
+{
+ if (attr == NULL) return 0;
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return 0;
+ return aps->getOutputForAttr(attr, samplingRate, format, channelMask, flags, offloadInfo);
+}
+
status_t AudioSystem::startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session)
@@ -702,14 +753,15 @@ uint32_t AudioSystem::getStrategyForStream(audio_stream_type_t stream)
audio_devices_t AudioSystem::getDevicesForStream(audio_stream_type_t stream)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return (audio_devices_t)0;
+ if (aps == 0) return AUDIO_DEVICE_NONE;
return aps->getDevicesForStream(stream);
}
audio_io_handle_t AudioSystem::getOutputForEffect(const effect_descriptor_t *desc)
{
const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
- if (aps == 0) return PERMISSION_DENIED;
+ // FIXME change return type to status_t, and return PERMISSION_DENIED here
+ if (aps == 0) return AUDIO_IO_HANDLE_NONE;
return aps->getOutputForEffect(desc);
}
@@ -802,13 +854,88 @@ bool AudioSystem::isOffloadSupported(const audio_offload_info_t& info)
return aps->isOffloadSupported(info);
}
+status_t AudioSystem::listAudioPorts(audio_port_role_t role,
+ audio_port_type_t type,
+ unsigned int *num_ports,
+ struct audio_port *ports,
+ unsigned int *generation)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->listAudioPorts(role, type, num_ports, ports, generation);
+}
+
+status_t AudioSystem::getAudioPort(struct audio_port *port)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->getAudioPort(port);
+}
+
+status_t AudioSystem::createAudioPatch(const struct audio_patch *patch,
+ audio_patch_handle_t *handle)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->createAudioPatch(patch, handle);
+}
+
+status_t AudioSystem::releaseAudioPatch(audio_patch_handle_t handle)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->releaseAudioPatch(handle);
+}
+
+status_t AudioSystem::listAudioPatches(unsigned int *num_patches,
+ struct audio_patch *patches,
+ unsigned int *generation)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->listAudioPatches(num_patches, patches, generation);
+}
+
+status_t AudioSystem::setAudioPortConfig(const struct audio_port_config *config)
+{
+ const sp<IAudioPolicyService>& aps = AudioSystem::get_audio_policy_service();
+ if (aps == 0) return PERMISSION_DENIED;
+ return aps->setAudioPortConfig(config);
+}
+
+void AudioSystem::setAudioPortCallback(sp<AudioPortCallback> callBack)
+{
+ Mutex::Autolock _l(gLock);
+ gAudioPortCallback = callBack;
+}
+
// ---------------------------------------------------------------------------
-void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who) {
- Mutex::Autolock _l(AudioSystem::gLock);
+void AudioSystem::AudioPolicyServiceClient::binderDied(const wp<IBinder>& who __unused)
+{
+ Mutex::Autolock _l(gLock);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onServiceDied();
+ }
AudioSystem::gAudioPolicyService.clear();
ALOGW("AudioPolicyService server died!");
}
+void AudioSystem::AudioPolicyServiceClient::onAudioPortListUpdate()
+{
+ Mutex::Autolock _l(gLock);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onAudioPortListUpdate();
+ }
+}
+
+void AudioSystem::AudioPolicyServiceClient::onAudioPatchListUpdate()
+{
+ Mutex::Autolock _l(gLock);
+ if (gAudioPortCallback != 0) {
+ gAudioPortCallback->onAudioPatchListUpdate();
+ }
+}
+
}; // namespace android
diff --git a/media/libmedia/AudioTrack.cpp b/media/libmedia/AudioTrack.cpp
index 3f3a88c..898d58d 100644
--- a/media/libmedia/AudioTrack.cpp
+++ b/media/libmedia/AudioTrack.cpp
@@ -15,11 +15,13 @@
** limitations under the License.
*/
-
//#define LOG_NDEBUG 0
#define LOG_TAG "AudioTrack"
+#include <inttypes.h>
+#include <math.h>
#include <sys/resource.h>
+
#include <audio_utils/primitives.h>
#include <binder/IPCThreadState.h>
#include <media/AudioTrack.h>
@@ -44,9 +46,6 @@ status_t AudioTrack::getMinFrameCount(
return BAD_VALUE;
}
- // default to 0 in case of error
- *frameCount = 0;
-
// FIXME merge with similar code in createTrack_l(), except we're missing
// some information here that is available in createTrack_l():
// audio_io_handle_t output
@@ -54,16 +53,26 @@ status_t AudioTrack::getMinFrameCount(
// audio_channel_mask_t channelMask
// audio_output_flags_t flags
uint32_t afSampleRate;
- if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
- return NO_INIT;
+ status_t status;
+ status = AudioSystem::getOutputSamplingRate(&afSampleRate, streamType);
+ if (status != NO_ERROR) {
+ ALOGE("Unable to query output sample rate for stream type %d; status %d",
+ streamType, status);
+ return status;
}
size_t afFrameCount;
- if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) {
- return NO_INIT;
+ status = AudioSystem::getOutputFrameCount(&afFrameCount, streamType);
+ if (status != NO_ERROR) {
+ ALOGE("Unable to query output frame count for stream type %d; status %d",
+ streamType, status);
+ return status;
}
uint32_t afLatency;
- if (AudioSystem::getOutputLatency(&afLatency, streamType) != NO_ERROR) {
- return NO_INIT;
+ status = AudioSystem::getOutputLatency(&afLatency, streamType);
+ if (status != NO_ERROR) {
+ ALOGE("Unable to query output latency for stream type %d; status %d",
+ streamType, status);
+ return status;
}
// Ensure that buffer depth covers at least audio hardware latency
@@ -74,7 +83,14 @@ status_t AudioTrack::getMinFrameCount(
*frameCount = (sampleRate == 0) ? afFrameCount * minBufCount :
afFrameCount * minBufCount * sampleRate / afSampleRate;
- ALOGV("getMinFrameCount=%d: afFrameCount=%d, minBufCount=%d, afSampleRate=%d, afLatency=%d",
+ // The formula above should always produce a non-zero value, but return an error
+ // in the unlikely event that it does not, as that's part of the API contract.
+ if (*frameCount == 0) {
+ ALOGE("AudioTrack::getMinFrameCount failed for streamType %d, sampleRate %d",
+ streamType, sampleRate);
+ return BAD_VALUE;
+ }
+ ALOGV("getMinFrameCount=%zu: afFrameCount=%zu, minBufCount=%d, afSampleRate=%d, afLatency=%d",
*frameCount, afFrameCount, minBufCount, afSampleRate, afLatency);
return NO_ERROR;
}
@@ -88,6 +104,10 @@ AudioTrack::AudioTrack()
mPreviousSchedulingGroup(SP_DEFAULT),
mPausedPosition(0)
{
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_UNKNOWN;
+ mAttributes.usage = AUDIO_USAGE_UNKNOWN;
+ mAttributes.flags = 0x0;
+ strcpy(mAttributes.tags, "");
}
AudioTrack::AudioTrack(
@@ -95,15 +115,16 @@ AudioTrack::AudioTrack(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCount,
+ size_t frameCount,
audio_output_flags_t flags,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
int sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid)
+ int uid,
+ pid_t pid)
: mStatus(NO_INIT),
mIsTimed(false),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -113,7 +134,7 @@ AudioTrack::AudioTrack(
mStatus = set(streamType, sampleRate, format, channelMask,
frameCount, flags, cbf, user, notificationFrames,
0 /*sharedBuffer*/, false /*threadCanCallJava*/, sessionId, transferType,
- offloadInfo, uid);
+ offloadInfo, uid, pid, NULL /*no audio attributes*/);
}
AudioTrack::AudioTrack(
@@ -125,11 +146,12 @@ AudioTrack::AudioTrack(
audio_output_flags_t flags,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
int sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid)
+ int uid,
+ pid_t pid)
: mStatus(NO_INIT),
mIsTimed(false),
mPreviousPriority(ANDROID_PRIORITY_NORMAL),
@@ -138,7 +160,8 @@ AudioTrack::AudioTrack(
{
mStatus = set(streamType, sampleRate, format, channelMask,
0 /*frameCount*/, flags, cbf, user, notificationFrames,
- sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo, uid);
+ sharedBuffer, false /*threadCanCallJava*/, sessionId, transferType, offloadInfo,
+ uid, pid, NULL /*no audio attributes*/);
}
AudioTrack::~AudioTrack()
@@ -156,8 +179,12 @@ AudioTrack::~AudioTrack()
}
mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
mAudioTrack.clear();
+ mCblkMemory.clear();
+ mSharedBuffer.clear();
IPCThreadState::self()->flushCommands();
- AudioSystem::releaseAudioSessionId(mSessionId);
+ ALOGV("~AudioTrack, releasing session id from %d on behalf of %d",
+ IPCThreadState::self()->getCallingPid(), mClientPid);
+ AudioSystem::releaseAudioSessionId(mSessionId, mClientPid);
}
}
@@ -166,18 +193,25 @@ status_t AudioTrack::set(
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- int frameCountInt,
+ size_t frameCount,
audio_output_flags_t flags,
callback_t cbf,
void* user,
- int notificationFrames,
+ uint32_t notificationFrames,
const sp<IMemory>& sharedBuffer,
bool threadCanCallJava,
int sessionId,
transfer_type transferType,
const audio_offload_info_t *offloadInfo,
- int uid)
+ int uid,
+ pid_t pid,
+ audio_attributes_t* pAttributes)
{
+ ALOGV("set(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
+ "flags #%x, notificationFrames %u, sessionId %d, transferType %d",
+ streamType, sampleRate, format, channelMask, frameCount, flags, notificationFrames,
+ sessionId, transferType);
+
switch (transferType) {
case TRANSFER_DEFAULT:
if (sharedBuffer != 0) {
@@ -211,19 +245,13 @@ status_t AudioTrack::set(
ALOGE("Invalid transfer type %d", transferType);
return BAD_VALUE;
}
+ mSharedBuffer = sharedBuffer;
mTransfer = transferType;
- // FIXME "int" here is legacy and will be replaced by size_t later
- if (frameCountInt < 0) {
- ALOGE("Invalid frame count %d", frameCountInt);
- return BAD_VALUE;
- }
- size_t frameCount = frameCountInt;
-
ALOGV_IF(sharedBuffer != 0, "sharedBuffer: %p, size: %d", sharedBuffer->pointer(),
sharedBuffer->size());
- ALOGV("set() streamType %d frameCount %u flags %04x", streamType, frameCount, flags);
+ ALOGV("set() streamType %d frameCount %zu flags %04x", streamType, frameCount, flags);
AutoMutex lock(mLock);
@@ -233,19 +261,39 @@ status_t AudioTrack::set(
return INVALID_OPERATION;
}
- mOutput = 0;
-
// handle default values first.
if (streamType == AUDIO_STREAM_DEFAULT) {
streamType = AUDIO_STREAM_MUSIC;
}
+ if (pAttributes == NULL) {
+ if (uint32_t(streamType) >= AUDIO_STREAM_CNT) {
+ ALOGE("Invalid stream type %d", streamType);
+ return BAD_VALUE;
+ }
+ setAttributesFromStreamType(streamType);
+ mStreamType = streamType;
+ } else {
+ if (!isValidAttributes(pAttributes)) {
+ ALOGE("Invalid attributes: usage=%d content=%d flags=0x%x tags=[%s]",
+ pAttributes->usage, pAttributes->content_type, pAttributes->flags,
+ pAttributes->tags);
+ }
+ // stream type shouldn't be looked at, this track has audio attributes
+ memcpy(&mAttributes, pAttributes, sizeof(audio_attributes_t));
+ setStreamTypeFromAttributes(mAttributes);
+ ALOGV("Building AudioTrack with attributes: usage=%d content=%d flags=0x%x tags=[%s]",
+ mAttributes.usage, mAttributes.content_type, mAttributes.flags, mAttributes.tags);
+ }
+
+ status_t status;
if (sampleRate == 0) {
- uint32_t afSampleRate;
- if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
- return NO_INIT;
+ status = AudioSystem::getOutputSamplingRateForAttr(&sampleRate, &mAttributes);
+ if (status != NO_ERROR) {
+ ALOGE("Could not get output sample rate for stream type %d; status %d",
+ mStreamType, status);
+ return status;
}
- sampleRate = afSampleRate;
}
mSampleRate = sampleRate;
@@ -253,15 +301,21 @@ status_t AudioTrack::set(
if (format == AUDIO_FORMAT_DEFAULT) {
format = AUDIO_FORMAT_PCM_16_BIT;
}
- if (channelMask == 0) {
- channelMask = AUDIO_CHANNEL_OUT_STEREO;
- }
// validate parameters
if (!audio_is_valid_format(format)) {
- ALOGE("Invalid format %d", format);
+ ALOGE("Invalid format %#x", format);
return BAD_VALUE;
}
+ mFormat = format;
+
+ if (!audio_is_output_channel(channelMask)) {
+ ALOGE("Invalid channel mask %#x", channelMask);
+ return BAD_VALUE;
+ }
+ mChannelMask = channelMask;
+ uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
+ mChannelCount = channelCount;
// AudioFlinger does not currently support 8-bit data in shared memory
if (format == AUDIO_FORMAT_PCM_8_BIT && sharedBuffer != 0) {
@@ -281,50 +335,56 @@ status_t AudioTrack::set(
((flags | AUDIO_OUTPUT_FLAG_DIRECT) & ~AUDIO_OUTPUT_FLAG_FAST);
}
// only allow deep buffering for music stream type
- if (streamType != AUDIO_STREAM_MUSIC) {
+ if (mStreamType != AUDIO_STREAM_MUSIC) {
flags = (audio_output_flags_t)(flags &~AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
}
- if (!audio_is_output_channel(channelMask)) {
- ALOGE("Invalid channel mask %#x", channelMask);
- return BAD_VALUE;
- }
- mChannelMask = channelMask;
- uint32_t channelCount = popcount(channelMask);
- mChannelCount = channelCount;
-
- if (audio_is_linear_pcm(format)) {
+ if (flags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ if (audio_is_linear_pcm(format)) {
+ mFrameSize = channelCount * audio_bytes_per_sample(format);
+ } else {
+ mFrameSize = sizeof(uint8_t);
+ }
+ mFrameSizeAF = mFrameSize;
+ } else {
+ ALOG_ASSERT(audio_is_linear_pcm(format));
mFrameSize = channelCount * audio_bytes_per_sample(format);
- mFrameSizeAF = channelCount * sizeof(int16_t);
+ mFrameSizeAF = channelCount * audio_bytes_per_sample(
+ format == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : format);
+ // createTrack will return an error if PCM format is not supported by server,
+ // so no need to check for specific PCM formats here
+ }
+
+ // Make copy of input parameter offloadInfo so that in the future:
+ // (a) createTrack_l doesn't need it as an input parameter
+ // (b) we can support re-creation of offloaded tracks
+ if (offloadInfo != NULL) {
+ mOffloadInfoCopy = *offloadInfo;
+ mOffloadInfo = &mOffloadInfoCopy;
} else {
- mFrameSize = sizeof(uint8_t);
- mFrameSizeAF = sizeof(uint8_t);
+ mOffloadInfo = NULL;
}
- audio_io_handle_t output = AudioSystem::getOutput(
- streamType,
- sampleRate, format, channelMask,
- flags,
- offloadInfo);
-
- if (output == 0) {
- ALOGE("Could not get audio output for stream type %d", streamType);
- return BAD_VALUE;
- }
-
- mVolume[LEFT] = 1.0f;
- mVolume[RIGHT] = 1.0f;
+ mVolume[AUDIO_INTERLEAVE_LEFT] = 1.0f;
+ mVolume[AUDIO_INTERLEAVE_RIGHT] = 1.0f;
mSendLevel = 0.0f;
- mFrameCount = frameCount;
+ // mFrameCount is initialized in createTrack_l
mReqFrameCount = frameCount;
mNotificationFramesReq = notificationFrames;
mNotificationFramesAct = 0;
mSessionId = sessionId;
- if (uid == -1 || (IPCThreadState::self()->getCallingPid() != getpid())) {
+ int callingpid = IPCThreadState::self()->getCallingPid();
+ int mypid = getpid();
+ if (uid == -1 || (callingpid != mypid)) {
mClientUid = IPCThreadState::self()->getCallingUid();
} else {
mClientUid = uid;
}
+ if (pid == -1 || (callingpid != mypid)) {
+ mClientPid = callingpid;
+ } else {
+ mClientPid = pid;
+ }
mAuxEffectId = 0;
mFlags = flags;
mCbf = cbf;
@@ -335,14 +395,7 @@ status_t AudioTrack::set(
}
// create the IAudioTrack
- status_t status = createTrack_l(streamType,
- sampleRate,
- format,
- frameCount,
- flags,
- sharedBuffer,
- output,
- 0 /*epoch*/);
+ status = createTrack_l(0 /*epoch*/);
if (status != NO_ERROR) {
if (mAudioTrackThread != 0) {
@@ -350,17 +403,10 @@ status_t AudioTrack::set(
mAudioTrackThread->requestExitAndWait();
mAudioTrackThread.clear();
}
- //Use of direct and offloaded output streams is ref counted by audio policy manager.
- // As getOutput was called above and resulted in an output stream to be opened,
- // we need to release it.
- AudioSystem::releaseOutput(output);
return status;
}
mStatus = NO_ERROR;
- mStreamType = streamType;
- mFormat = format;
- mSharedBuffer = sharedBuffer;
mState = STATE_STOPPED;
mUserData = user;
mLoopPeriod = 0;
@@ -368,11 +414,10 @@ status_t AudioTrack::set(
mMarkerReached = false;
mNewPosition = 0;
mUpdatePeriod = 0;
- AudioSystem::acquireAudioSessionId(mSessionId);
+ AudioSystem::acquireAudioSessionId(mSessionId, mClientPid);
mSequence = 1;
mObservedSequence = mSequence;
mInUnderrun = false;
- mOutput = output;
return NO_ERROR;
}
@@ -448,12 +493,11 @@ status_t AudioTrack::start()
void AudioTrack::stop()
{
AutoMutex lock(mLock);
- // FIXME pause then stop should not be a nop
- if (mState != STATE_ACTIVE) {
+ if (mState != STATE_ACTIVE && mState != STATE_PAUSED) {
return;
}
- if (isOffloaded()) {
+ if (isOffloaded_l()) {
mState = STATE_STOPPING;
} else {
mState = STATE_STOPPED;
@@ -475,7 +519,7 @@ void AudioTrack::stop()
sp<AudioTrackThread> t = mAudioTrackThread;
if (t != 0) {
- if (!isOffloaded()) {
+ if (!isOffloaded_l()) {
t->pause();
}
} else {
@@ -513,7 +557,7 @@ void AudioTrack::flush_l()
mRefreshRemaining = true;
mState = STATE_FLUSHED;
- if (isOffloaded()) {
+ if (isOffloaded_l()) {
mProxy->interrupt();
}
mProxy->flush();
@@ -533,8 +577,8 @@ void AudioTrack::pause()
mProxy->interrupt();
mAudioTrack->pause();
- if (isOffloaded()) {
- if (mOutput != 0) {
+ if (isOffloaded_l()) {
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
uint32_t halFrames;
// OffloadThread sends HAL pause in its threadLoop.. time saved
// here can be slightly off
@@ -546,17 +590,19 @@ void AudioTrack::pause()
status_t AudioTrack::setVolume(float left, float right)
{
- if (left < 0.0f || left > 1.0f || right < 0.0f || right > 1.0f) {
+ // This duplicates a test by AudioTrack JNI, but that is not the only caller
+ if (isnanf(left) || left < GAIN_FLOAT_ZERO || left > GAIN_FLOAT_UNITY ||
+ isnanf(right) || right < GAIN_FLOAT_ZERO || right > GAIN_FLOAT_UNITY) {
return BAD_VALUE;
}
AutoMutex lock(mLock);
- mVolume[LEFT] = left;
- mVolume[RIGHT] = right;
+ mVolume[AUDIO_INTERLEAVE_LEFT] = left;
+ mVolume[AUDIO_INTERLEAVE_RIGHT] = right;
- mProxy->setVolumeLR((uint32_t(uint16_t(right * 0x1000)) << 16) | uint16_t(left * 0x1000));
+ mProxy->setVolumeLR(gain_minifloat_pack(gain_from_float(left), gain_from_float(right)));
- if (isOffloaded()) {
+ if (isOffloaded_l()) {
mAudioTrack->signal();
}
return NO_ERROR;
@@ -569,7 +615,8 @@ status_t AudioTrack::setVolume(float volume)
status_t AudioTrack::setAuxEffectSendLevel(float level)
{
- if (level < 0.0f || level > 1.0f) {
+ // This duplicates a test by AudioTrack JNI, but that is not the only caller
+ if (isnanf(level) || level < GAIN_FLOAT_ZERO || level > GAIN_FLOAT_UNITY) {
return BAD_VALUE;
}
@@ -589,12 +636,12 @@ void AudioTrack::getAuxEffectSendLevel(float* level) const
status_t AudioTrack::setSampleRate(uint32_t rate)
{
- if (mIsTimed || isOffloaded()) {
+ if (mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
uint32_t afSamplingRate;
- if (AudioSystem::getOutputSamplingRate(&afSamplingRate, mStreamType) != NO_ERROR) {
+ if (AudioSystem::getOutputSamplingRateForAttr(&afSamplingRate, &mAttributes) != NO_ERROR) {
return NO_INIT;
}
// Resampler implementation limits input sampling rate to 2 x output sampling rate.
@@ -620,10 +667,10 @@ uint32_t AudioTrack::getSampleRate() const
// sample rate can be updated during playback by the offloaded decoder so we need to
// query the HAL and update if needed.
// FIXME use Proxy return channel to update the rate from server and avoid polling here
- if (isOffloaded()) {
- if (mOutput != 0) {
+ if (isOffloadedOrDirect_l()) {
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
uint32_t sampleRate = 0;
- status_t status = AudioSystem::getSamplingRate(mOutput, mStreamType, &sampleRate);
+ status_t status = AudioSystem::getSamplingRate(mOutput, &sampleRate);
if (status == NO_ERROR) {
mSampleRate = sampleRate;
}
@@ -634,7 +681,7 @@ uint32_t AudioTrack::getSampleRate() const
status_t AudioTrack::setLoop(uint32_t loopStart, uint32_t loopEnd, int loopCount)
{
- if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) {
+ if (mSharedBuffer == 0 || mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
@@ -668,7 +715,7 @@ void AudioTrack::setLoop_l(uint32_t loopStart, uint32_t loopEnd, int loopCount)
status_t AudioTrack::setMarkerPosition(uint32_t marker)
{
// The only purpose of setting marker position is to get a callback
- if (mCbf == NULL || isOffloaded()) {
+ if (mCbf == NULL || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
@@ -681,7 +728,7 @@ status_t AudioTrack::setMarkerPosition(uint32_t marker)
status_t AudioTrack::getMarkerPosition(uint32_t *marker) const
{
- if (isOffloaded()) {
+ if (isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
if (marker == NULL) {
@@ -697,19 +744,20 @@ status_t AudioTrack::getMarkerPosition(uint32_t *marker) const
status_t AudioTrack::setPositionUpdatePeriod(uint32_t updatePeriod)
{
// The only purpose of setting position update period is to get a callback
- if (mCbf == NULL || isOffloaded()) {
+ if (mCbf == NULL || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
AutoMutex lock(mLock);
mNewPosition = mProxy->getPosition() + updatePeriod;
mUpdatePeriod = updatePeriod;
+
return NO_ERROR;
}
status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const
{
- if (isOffloaded()) {
+ if (isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
if (updatePeriod == NULL) {
@@ -724,7 +772,7 @@ status_t AudioTrack::getPositionUpdatePeriod(uint32_t *updatePeriod) const
status_t AudioTrack::setPosition(uint32_t position)
{
- if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) {
+ if (mSharedBuffer == 0 || mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
if (position > mFrameCount) {
@@ -757,16 +805,16 @@ status_t AudioTrack::getPosition(uint32_t *position) const
}
AutoMutex lock(mLock);
- if (isOffloaded()) {
+ if (isOffloadedOrDirect_l()) {
uint32_t dspFrames = 0;
- if ((mState == STATE_PAUSED) || (mState == STATE_PAUSED_STOPPING)) {
+ if (isOffloaded_l() && ((mState == STATE_PAUSED) || (mState == STATE_PAUSED_STOPPING))) {
ALOGV("getPosition called in paused state, return cached position %u", mPausedPosition);
*position = mPausedPosition;
return NO_ERROR;
}
- if (mOutput != 0) {
+ if (mOutput != AUDIO_IO_HANDLE_NONE) {
uint32_t halFrames;
AudioSystem::getRenderPosition(mOutput, &halFrames, &dspFrames);
}
@@ -795,7 +843,7 @@ status_t AudioTrack::getBufferPosition(uint32_t *position)
status_t AudioTrack::reload()
{
- if (mSharedBuffer == 0 || mIsTimed || isOffloaded()) {
+ if (mSharedBuffer == 0 || mIsTimed || isOffloadedOrDirect()) {
return INVALID_OPERATION;
}
@@ -812,23 +860,12 @@ status_t AudioTrack::reload()
return NO_ERROR;
}
-audio_io_handle_t AudioTrack::getOutput()
+audio_io_handle_t AudioTrack::getOutput() const
{
AutoMutex lock(mLock);
return mOutput;
}
-// must be called with mLock held
-audio_io_handle_t AudioTrack::getOutput_l()
-{
- if (mOutput) {
- return mOutput;
- } else {
- return AudioSystem::getOutput(mStreamType,
- mSampleRate, mFormat, mChannelMask, mFlags);
- }
-}
-
status_t AudioTrack::attachAuxEffect(int effectId)
{
AutoMutex lock(mLock);
@@ -842,15 +879,7 @@ status_t AudioTrack::attachAuxEffect(int effectId)
// -------------------------------------------------------------------------
// must be called with mLock held
-status_t AudioTrack::createTrack_l(
- audio_stream_type_t streamType,
- uint32_t sampleRate,
- audio_format_t format,
- size_t frameCount,
- audio_output_flags_t flags,
- const sp<IMemory>& sharedBuffer,
- audio_io_handle_t output,
- size_t epoch)
+status_t AudioTrack::createTrack_l(size_t epoch)
{
status_t status;
const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
@@ -859,50 +888,57 @@ status_t AudioTrack::createTrack_l(
return NO_INIT;
}
+ audio_io_handle_t output = AudioSystem::getOutputForAttr(&mAttributes, mSampleRate, mFormat,
+ mChannelMask, mFlags, mOffloadInfo);
+ if (output == AUDIO_IO_HANDLE_NONE) {
+ ALOGE("Could not get audio output for stream type %d, usage %d, sample rate %u, format %#x,"
+ " channel mask %#x, flags %#x",
+ mStreamType, mAttributes.usage, mSampleRate, mFormat, mChannelMask, mFlags);
+ return BAD_VALUE;
+ }
+ {
+ // Now that we have a reference to an I/O handle and have not yet handed it off to AudioFlinger,
+ // we must release it ourselves if anything goes wrong.
+
// Not all of these values are needed under all conditions, but it is easier to get them all
uint32_t afLatency;
- status = AudioSystem::getLatency(output, streamType, &afLatency);
+ status = AudioSystem::getLatency(output, &afLatency);
if (status != NO_ERROR) {
ALOGE("getLatency(%d) failed status %d", output, status);
- return NO_INIT;
+ goto release;
}
size_t afFrameCount;
- status = AudioSystem::getFrameCount(output, streamType, &afFrameCount);
+ status = AudioSystem::getFrameCount(output, &afFrameCount);
if (status != NO_ERROR) {
- ALOGE("getFrameCount(output=%d, streamType=%d) status %d", output, streamType, status);
- return NO_INIT;
+ ALOGE("getFrameCount(output=%d) status %d", output, status);
+ goto release;
}
uint32_t afSampleRate;
- status = AudioSystem::getSamplingRate(output, streamType, &afSampleRate);
+ status = AudioSystem::getSamplingRate(output, &afSampleRate);
if (status != NO_ERROR) {
- ALOGE("getSamplingRate(output=%d, streamType=%d) status %d", output, streamType, status);
- return NO_INIT;
+ ALOGE("getSamplingRate(output=%d) status %d", output, status);
+ goto release;
}
// Client decides whether the track is TIMED (see below), but can only express a preference
// for FAST. Server will perform additional tests.
- if ((flags & AUDIO_OUTPUT_FLAG_FAST) && !(
+ if ((mFlags & AUDIO_OUTPUT_FLAG_FAST) && !((
// either of these use cases:
// use case 1: shared buffer
- (sharedBuffer != 0) ||
- // use case 2: callback handler
- (mCbf != NULL))) {
+ (mSharedBuffer != 0) ||
+ // use case 2: callback transfer mode
+ (mTransfer == TRANSFER_CALLBACK)) &&
+ // matching sample rate
+ (mSampleRate == afSampleRate))) {
ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client");
// once denied, do not request again if IAudioTrack is re-created
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
- mFlags = flags;
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
}
ALOGV("createTrack_l() output %d afLatency %d", output, afLatency);
- if ((flags & AUDIO_OUTPUT_FLAG_FAST) && sampleRate != afSampleRate) {
- ALOGW("AUDIO_OUTPUT_FLAG_FAST denied by client due to mismatching sample rate (%d vs %d)",
- sampleRate, afSampleRate);
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
- }
-
// The client's AudioTrack buffer is divided into n parts for purpose of wakeup by server, where
// n = 1 fast track with single buffering; nBuffering is ignored
// n = 2 fast track with double buffering
@@ -910,64 +946,70 @@ status_t AudioTrack::createTrack_l(
// n = 3 normal track, with sample rate conversion
// (pessimistic; some non-1:1 conversion ratios don't actually need triple-buffering)
// n > 3 very high latency or very small notification interval; nBuffering is ignored
- const uint32_t nBuffering = (sampleRate == afSampleRate) ? 2 : 3;
+ const uint32_t nBuffering = (mSampleRate == afSampleRate) ? 2 : 3;
mNotificationFramesAct = mNotificationFramesReq;
- if (!audio_is_linear_pcm(format)) {
+ size_t frameCount = mReqFrameCount;
+ if (!audio_is_linear_pcm(mFormat)) {
- if (sharedBuffer != 0) {
+ if (mSharedBuffer != 0) {
// Same comment as below about ignoring frameCount parameter for set()
- frameCount = sharedBuffer->size();
+ frameCount = mSharedBuffer->size();
} else if (frameCount == 0) {
frameCount = afFrameCount;
}
if (mNotificationFramesAct != frameCount) {
mNotificationFramesAct = frameCount;
}
- } else if (sharedBuffer != 0) {
+ } else if (mSharedBuffer != 0) {
// Ensure that buffer alignment matches channel count
// 8-bit data in shared memory is not currently supported by AudioFlinger
- size_t alignment = /* format == AUDIO_FORMAT_PCM_8_BIT ? 1 : */ 2;
+ size_t alignment = audio_bytes_per_sample(
+ mFormat == AUDIO_FORMAT_PCM_8_BIT ? AUDIO_FORMAT_PCM_16_BIT : mFormat);
+ if (alignment & 1) {
+ alignment = 1;
+ }
if (mChannelCount > 1) {
// More than 2 channels does not require stronger alignment than stereo
alignment <<= 1;
}
- if (((uintptr_t)sharedBuffer->pointer() & (alignment - 1)) != 0) {
+ if (((uintptr_t)mSharedBuffer->pointer() & (alignment - 1)) != 0) {
ALOGE("Invalid buffer alignment: address %p, channel count %u",
- sharedBuffer->pointer(), mChannelCount);
- return BAD_VALUE;
+ mSharedBuffer->pointer(), mChannelCount);
+ status = BAD_VALUE;
+ goto release;
}
// When initializing a shared buffer AudioTrack via constructors,
// there's no frameCount parameter.
// But when initializing a shared buffer AudioTrack via set(),
// there _is_ a frameCount parameter. We silently ignore it.
- frameCount = sharedBuffer->size()/mChannelCount/sizeof(int16_t);
+ frameCount = mSharedBuffer->size() / mFrameSizeAF;
- } else if (!(flags & AUDIO_OUTPUT_FLAG_FAST)) {
+ } else if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
// FIXME move these calculations and associated checks to server
// Ensure that buffer depth covers at least audio hardware latency
uint32_t minBufCount = afLatency / ((1000 * afFrameCount)/afSampleRate);
- ALOGV("afFrameCount=%d, minBufCount=%d, afSampleRate=%u, afLatency=%d",
+ ALOGV("afFrameCount=%zu, minBufCount=%d, afSampleRate=%u, afLatency=%d",
afFrameCount, minBufCount, afSampleRate, afLatency);
if (minBufCount <= nBuffering) {
minBufCount = nBuffering;
}
- size_t minFrameCount = (afFrameCount*sampleRate*minBufCount)/afSampleRate;
- ALOGV("minFrameCount: %u, afFrameCount=%d, minBufCount=%d, sampleRate=%u, afSampleRate=%u"
+ size_t minFrameCount = (afFrameCount*mSampleRate*minBufCount)/afSampleRate;
+ ALOGV("minFrameCount: %zu, afFrameCount=%zu, minBufCount=%d, sampleRate=%u, afSampleRate=%u"
", afLatency=%d",
- minFrameCount, afFrameCount, minBufCount, sampleRate, afSampleRate, afLatency);
+ minFrameCount, afFrameCount, minBufCount, mSampleRate, afSampleRate, afLatency);
if (frameCount == 0) {
frameCount = minFrameCount;
} else if (frameCount < minFrameCount) {
// not ALOGW because it happens all the time when playing key clicks over A2DP
- ALOGV("Minimum buffer size corrected from %d to %d",
+ ALOGV("Minimum buffer size corrected from %zu to %zu",
frameCount, minFrameCount);
frameCount = minFrameCount;
}
@@ -986,42 +1028,58 @@ status_t AudioTrack::createTrack_l(
}
pid_t tid = -1;
- if (flags & AUDIO_OUTPUT_FLAG_FAST) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
trackFlags |= IAudioFlinger::TRACK_FAST;
if (mAudioTrackThread != 0) {
tid = mAudioTrackThread->getTid();
}
}
- if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
trackFlags |= IAudioFlinger::TRACK_OFFLOAD;
}
- sp<IAudioTrack> track = audioFlinger->createTrack(streamType,
- sampleRate,
+ if (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ trackFlags |= IAudioFlinger::TRACK_DIRECT;
+ }
+
+ size_t temp = frameCount; // temp may be replaced by a revised value of frameCount,
+ // but we will still need the original value also
+ sp<IAudioTrack> track = audioFlinger->createTrack(mStreamType,
+ mSampleRate,
// AudioFlinger only sees 16-bit PCM
- format == AUDIO_FORMAT_PCM_8_BIT ?
- AUDIO_FORMAT_PCM_16_BIT : format,
+ mFormat == AUDIO_FORMAT_PCM_8_BIT &&
+ !(mFlags & AUDIO_OUTPUT_FLAG_DIRECT) ?
+ AUDIO_FORMAT_PCM_16_BIT : mFormat,
mChannelMask,
- frameCount,
+ &temp,
&trackFlags,
- sharedBuffer,
+ mSharedBuffer,
output,
tid,
&mSessionId,
- mName,
mClientUid,
&status);
- if (track == 0) {
+ if (status != NO_ERROR) {
ALOGE("AudioFlinger could not create track, status: %d", status);
- return status;
+ goto release;
}
+ ALOG_ASSERT(track != 0);
+
+ // AudioFlinger now owns the reference to the I/O handle,
+ // so we are no longer responsible for releasing it.
+
sp<IMemory> iMem = track->getCblk();
if (iMem == 0) {
ALOGE("Could not get control block");
return NO_INIT;
}
+ void *iMemPointer = iMem->pointer();
+ if (iMemPointer == NULL) {
+ ALOGE("Could not get control block pointer");
+ return NO_INIT;
+ }
// invariant that mAudioTrack != 0 is true only after set() returns successfully
if (mAudioTrack != 0) {
mAudioTrack->asBinder()->unlinkToDeath(mDeathNotifier, this);
@@ -1029,22 +1087,25 @@ status_t AudioTrack::createTrack_l(
}
mAudioTrack = track;
mCblkMemory = iMem;
- audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMem->pointer());
+ IPCThreadState::self()->flushCommands();
+
+ audio_track_cblk_t* cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
mCblk = cblk;
- size_t temp = cblk->frameCount_;
+ // note that temp is the (possibly revised) value of frameCount
if (temp < frameCount || (frameCount == 0 && temp == 0)) {
// In current design, AudioTrack client checks and ensures frame count validity before
// passing it to AudioFlinger so AudioFlinger should not return a different value except
// for fast track as it uses a special method of assigning frame count.
- ALOGW("Requested frameCount %u but received frameCount %u", frameCount, temp);
+ ALOGW("Requested frameCount %zu but received frameCount %zu", frameCount, temp);
}
frameCount = temp;
+
mAwaitBoost = false;
- if (flags & AUDIO_OUTPUT_FLAG_FAST) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_FAST) {
if (trackFlags & IAudioFlinger::TRACK_FAST) {
- ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %u", frameCount);
+ ALOGV("AUDIO_OUTPUT_FLAG_FAST successful; frameCount %zu", frameCount);
mAwaitBoost = true;
- if (sharedBuffer == 0) {
+ if (mSharedBuffer == 0) {
// Theoretically double-buffering is not required for fast tracks,
// due to tighter scheduling. But in practice, to accommodate kernels with
// scheduling jitter, and apps with computation jitter, we use double-buffering.
@@ -1053,28 +1114,39 @@ status_t AudioTrack::createTrack_l(
}
}
} else {
- ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %u", frameCount);
+ ALOGV("AUDIO_OUTPUT_FLAG_FAST denied by server; frameCount %zu", frameCount);
// once denied, do not request again if IAudioTrack is re-created
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_FAST);
- mFlags = flags;
- if (sharedBuffer == 0) {
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_FAST);
+ if (mSharedBuffer == 0) {
if (mNotificationFramesAct == 0 || mNotificationFramesAct > frameCount/nBuffering) {
mNotificationFramesAct = frameCount/nBuffering;
}
}
}
}
- if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+ if (mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
if (trackFlags & IAudioFlinger::TRACK_OFFLOAD) {
ALOGV("AUDIO_OUTPUT_FLAG_OFFLOAD successful");
} else {
ALOGW("AUDIO_OUTPUT_FLAG_OFFLOAD denied by server");
- flags = (audio_output_flags_t) (flags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
- mFlags = flags;
- return NO_INIT;
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+ // FIXME This is a warning, not an error, so don't return error status
+ //return NO_INIT;
+ }
+ }
+ if (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) {
+ if (trackFlags & IAudioFlinger::TRACK_DIRECT) {
+ ALOGV("AUDIO_OUTPUT_FLAG_DIRECT successful");
+ } else {
+ ALOGW("AUDIO_OUTPUT_FLAG_DIRECT denied by server");
+ mFlags = (audio_output_flags_t) (mFlags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+ // FIXME This is a warning, not an error, so don't return error status
+ //return NO_INIT;
}
}
+ // We retain a copy of the I/O handle, but don't own the reference
+ mOutput = output;
mRefreshRemaining = true;
// Starting address of buffers in shared memory. If there is a shared buffer, buffers
@@ -1082,15 +1154,16 @@ status_t AudioTrack::createTrack_l(
// immediately after the control block. This address is for the mapping within client
// address space. AudioFlinger::TrackBase::mBuffer is for the server address space.
void* buffers;
- if (sharedBuffer == 0) {
+ if (mSharedBuffer == 0) {
buffers = (char*)cblk + sizeof(audio_track_cblk_t);
} else {
- buffers = sharedBuffer->pointer();
+ buffers = mSharedBuffer->pointer();
}
mAudioTrack->attachAuxEffect(mAuxEffectId);
// FIXME don't believe this lie
- mLatency = afLatency + (1000*frameCount) / sampleRate;
+ mLatency = afLatency + (1000*frameCount) / mSampleRate;
+
mFrameCount = frameCount;
// If IAudioTrack is re-created, don't let the requested frameCount
// decrease. This can confuse clients that cache frameCount().
@@ -1099,15 +1172,14 @@ status_t AudioTrack::createTrack_l(
}
// update proxy
- if (sharedBuffer == 0) {
+ if (mSharedBuffer == 0) {
mStaticProxy.clear();
mProxy = new AudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
} else {
mStaticProxy = new StaticAudioTrackClientProxy(cblk, buffers, frameCount, mFrameSizeAF);
mProxy = mStaticProxy;
}
- mProxy->setVolumeLR((uint32_t(uint16_t(mVolume[RIGHT] * 0x1000)) << 16) |
- uint16_t(mVolume[LEFT] * 0x1000));
+ mProxy->setVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY);
mProxy->setSendLevel(mSendLevel);
mProxy->setSampleRate(mSampleRate);
mProxy->setEpoch(epoch);
@@ -1117,6 +1189,14 @@ status_t AudioTrack::createTrack_l(
mAudioTrack->asBinder()->linkToDeath(mDeathNotifier, this);
return NO_ERROR;
+ }
+
+release:
+ AudioSystem::releaseOutput(output);
+ if (status == NO_ERROR) {
+ status = NO_INIT;
+ }
+ return status;
}
status_t AudioTrack::obtainBuffer(Buffer* audioBuffer, int32_t waitCount)
@@ -1244,8 +1324,7 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer)
if (mState == STATE_ACTIVE) {
audio_track_cblk_t* cblk = mCblk;
if (android_atomic_and(~CBLK_DISABLED, &cblk->mFlags) & CBLK_DISABLED) {
- ALOGW("releaseBuffer() track %p name=%s disabled due to previous underrun, restarting",
- this, mName.string());
+ ALOGW("releaseBuffer() track %p disabled due to previous underrun, restarting", this);
// FIXME ignoring status
mAudioTrack->start();
}
@@ -1254,12 +1333,22 @@ void AudioTrack::releaseBuffer(Buffer* audioBuffer)
// -------------------------------------------------------------------------
-ssize_t AudioTrack::write(const void* buffer, size_t userSize)
+ssize_t AudioTrack::write(const void* buffer, size_t userSize, bool blocking)
{
if (mTransfer != TRANSFER_SYNC || mIsTimed) {
return INVALID_OPERATION;
}
+ if (isDirect()) {
+ AutoMutex lock(mLock);
+ int32_t flags = android_atomic_and(
+ ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END),
+ &mCblk->mFlags);
+ if (flags & CBLK_INVALID) {
+ return DEAD_OBJECT;
+ }
+ }
+
if (ssize_t(userSize) < 0 || (buffer == NULL && userSize != 0)) {
// Sanity-check: user is most-likely passing an error code, and it would
// make the return value ambiguous (actualSize vs error).
@@ -1273,7 +1362,8 @@ ssize_t AudioTrack::write(const void* buffer, size_t userSize)
while (userSize >= mFrameSize) {
audioBuffer.frameCount = userSize / mFrameSize;
- status_t err = obtainBuffer(&audioBuffer, &ClientProxy::kForever);
+ status_t err = obtainBuffer(&audioBuffer,
+ blocking ? &ClientProxy::kForever : &ClientProxy::kNonBlocking);
if (err < 0) {
if (written > 0) {
break;
@@ -1369,7 +1459,7 @@ status_t TimedAudioTrack::setMediaTimeTransform(const LinearTransform& xform,
// -------------------------------------------------------------------------
-nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
+nsecs_t AudioTrack::processAudioBuffer()
{
// Currently the AudioTrack thread is not created if there are no callbacks.
// Would it ever make sense to run the thread, even without callbacks?
@@ -1407,7 +1497,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
// for offloaded tracks restoreTrack_l() will just update the sequence and clear
// AudioSystem cache. We should not exit here but after calling the callback so
// that the upper layers can recreate the track
- if (!isOffloaded() || (mSequence == mObservedSequence)) {
+ if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) {
status_t status = restoreTrack_l("processAudioBuffer");
mLock.unlock();
// Run again immediately, but with a new IAudioTrack
@@ -1462,7 +1552,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
// Cache other fields that will be needed soon
uint32_t loopPeriod = mLoopPeriod;
uint32_t sampleRate = mSampleRate;
- size_t notificationFrames = mNotificationFramesAct;
+ uint32_t notificationFrames = mNotificationFramesAct;
if (mRefreshRemaining) {
mRefreshRemaining = false;
mRemainingFrames = notificationFrames;
@@ -1533,7 +1623,7 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
mObservedSequence = sequence;
mCbf(EVENT_NEW_IAUDIOTRACK, mUserData, NULL);
// for offloaded tracks, just wait for the upper layers to recreate the track
- if (isOffloaded()) {
+ if (isOffloadedOrDirect()) {
return NS_INACTIVE;
}
}
@@ -1591,10 +1681,10 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
size_t nonContig;
status_t err = obtainBuffer(&audioBuffer, requested, NULL, &nonContig);
LOG_ALWAYS_FATAL_IF((err != NO_ERROR) != (audioBuffer.frameCount == 0),
- "obtainBuffer() err=%d frameCount=%u", err, audioBuffer.frameCount);
+ "obtainBuffer() err=%d frameCount=%zu", err, audioBuffer.frameCount);
requested = &ClientProxy::kNonBlocking;
size_t avail = audioBuffer.frameCount + nonContig;
- ALOGV("obtainBuffer(%u) returned %u = %u + %u err %d",
+ ALOGV("obtainBuffer(%u) returned %zu = %zu + %zu err %d",
mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
if (err != NO_ERROR) {
if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR ||
@@ -1626,12 +1716,11 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
size_t reqSize = audioBuffer.size;
mCbf(EVENT_MORE_DATA, mUserData, &audioBuffer);
size_t writtenSize = audioBuffer.size;
- size_t writtenFrames = writtenSize / mFrameSize;
// Sanity check on returned size
if (ssize_t(writtenSize) < 0 || writtenSize > reqSize) {
- ALOGE("EVENT_MORE_DATA requested %u bytes but callback returned %d bytes",
- reqSize, (int) writtenSize);
+ ALOGE("EVENT_MORE_DATA requested %zu bytes but callback returned %zd bytes",
+ reqSize, ssize_t(writtenSize));
return NS_NEVER;
}
@@ -1692,22 +1781,19 @@ nsecs_t AudioTrack::processAudioBuffer(const sp<AudioTrackThread>& thread)
status_t AudioTrack::restoreTrack_l(const char *from)
{
ALOGW("dead IAudioTrack, %s, creating a new one from %s()",
- isOffloaded() ? "Offloaded" : "PCM", from);
+ isOffloadedOrDirect_l() ? "Offloaded or Direct" : "PCM", from);
++mSequence;
status_t result;
// refresh the audio configuration cache in this process to make sure we get new
- // output parameters in getOutput_l() and createTrack_l()
+ // output parameters in createTrack_l()
AudioSystem::clearAudioConfigCache();
- if (isOffloaded()) {
+ if (isOffloadedOrDirect_l()) {
+ // FIXME re-creation of offloaded tracks is not yet implemented
return DEAD_OBJECT;
}
- // force new output query from audio policy manager;
- mOutput = 0;
- audio_io_handle_t output = getOutput_l();
-
// if the new IAudioTrack is created, createTrack_l() will modify the
// following member variables: mAudioTrack, mCblkMemory and mCblk.
// It will also delete the strong references on previous IAudioTrack and IMemory
@@ -1715,14 +1801,7 @@ status_t AudioTrack::restoreTrack_l(const char *from)
// take the frames that will be lost by track recreation into account in saved position
size_t position = mProxy->getPosition() + mProxy->getFramesFilled();
size_t bufferPosition = mStaticProxy != NULL ? mStaticProxy->getBufferPosition() : 0;
- result = createTrack_l(mStreamType,
- mSampleRate,
- mFormat,
- mReqFrameCount, // so that frame count never goes down
- mFlags,
- mSharedBuffer,
- output,
- position /*epoch*/);
+ result = createTrack_l(position /*epoch*/);
if (result == NO_ERROR) {
// continue playback from last known position, but
@@ -1750,10 +1829,6 @@ status_t AudioTrack::restoreTrack_l(const char *from)
}
}
if (result != NO_ERROR) {
- //Use of direct and offloaded output streams is ref counted by audio policy manager.
- // As getOutput was called above and resulted in an output stream to be opened,
- // we need to release it.
- AudioSystem::releaseOutput(output);
ALOGW("restoreTrack_l() failed status %d", result);
mState = STATE_STOPPED;
}
@@ -1786,14 +1861,34 @@ status_t AudioTrack::getTimestamp(AudioTimestamp& timestamp)
String8 AudioTrack::getParameters(const String8& keys)
{
- if (mOutput) {
- return AudioSystem::getParameters(mOutput, keys);
+ audio_io_handle_t output = getOutput();
+ if (output != AUDIO_IO_HANDLE_NONE) {
+ return AudioSystem::getParameters(output, keys);
} else {
return String8::empty();
}
}
-status_t AudioTrack::dump(int fd, const Vector<String16>& args) const
+bool AudioTrack::isOffloaded() const
+{
+ AutoMutex lock(mLock);
+ return isOffloaded_l();
+}
+
+bool AudioTrack::isDirect() const
+{
+ AutoMutex lock(mLock);
+ return isDirect_l();
+}
+
+bool AudioTrack::isOffloadedOrDirect() const
+{
+ AutoMutex lock(mLock);
+ return isOffloadedOrDirect_l();
+}
+
+
+status_t AudioTrack::dump(int fd, const Vector<String16>& args __unused) const
{
const size_t SIZE = 256;
@@ -1802,7 +1897,7 @@ status_t AudioTrack::dump(int fd, const Vector<String16>& args) const
result.append(" AudioTrack::dump\n");
snprintf(buffer, 255, " stream type(%d), left - right volume(%f, %f)\n", mStreamType,
- mVolume[0], mVolume[1]);
+ mVolume[AUDIO_INTERLEAVE_LEFT], mVolume[AUDIO_INTERLEAVE_RIGHT]);
result.append(buffer);
snprintf(buffer, 255, " format(%d), channel count(%d), frame count(%zu)\n", mFormat,
mChannelCount, mFrameCount);
@@ -1821,9 +1916,139 @@ uint32_t AudioTrack::getUnderrunFrames() const
return mProxy->getUnderrunFrames();
}
+void AudioTrack::setAttributesFromStreamType(audio_stream_type_t streamType) {
+ mAttributes.flags = 0x0;
+
+ switch(streamType) {
+ case AUDIO_STREAM_DEFAULT:
+ case AUDIO_STREAM_MUSIC:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+ mAttributes.usage = AUDIO_USAGE_MEDIA;
+ break;
+ case AUDIO_STREAM_VOICE_CALL:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+ mAttributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+ break;
+ case AUDIO_STREAM_ENFORCED_AUDIBLE:
+ mAttributes.flags |= AUDIO_FLAG_AUDIBILITY_ENFORCED;
+ // intended fall through, attributes in common with STREAM_SYSTEM
+ case AUDIO_STREAM_SYSTEM:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ mAttributes.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+ break;
+ case AUDIO_STREAM_RING:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ mAttributes.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+ break;
+ case AUDIO_STREAM_ALARM:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ mAttributes.usage = AUDIO_USAGE_ALARM;
+ break;
+ case AUDIO_STREAM_NOTIFICATION:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ mAttributes.usage = AUDIO_USAGE_NOTIFICATION;
+ break;
+ case AUDIO_STREAM_BLUETOOTH_SCO:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+ mAttributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+ mAttributes.flags |= AUDIO_FLAG_SCO;
+ break;
+ case AUDIO_STREAM_DTMF:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+ mAttributes.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+ break;
+ case AUDIO_STREAM_TTS:
+ mAttributes.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+ mAttributes.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+ break;
+ default:
+ ALOGE("invalid stream type %d when converting to attributes", streamType);
+ }
+}
+
+void AudioTrack::setStreamTypeFromAttributes(audio_attributes_t& aa) {
+ // flags to stream type mapping
+ if ((aa.flags & AUDIO_FLAG_AUDIBILITY_ENFORCED) == AUDIO_FLAG_AUDIBILITY_ENFORCED) {
+ mStreamType = AUDIO_STREAM_ENFORCED_AUDIBLE;
+ return;
+ }
+ if ((aa.flags & AUDIO_FLAG_SCO) == AUDIO_FLAG_SCO) {
+ mStreamType = AUDIO_STREAM_BLUETOOTH_SCO;
+ return;
+ }
+
+ // usage to stream type mapping
+ switch (aa.usage) {
+ case AUDIO_USAGE_MEDIA:
+ case AUDIO_USAGE_GAME:
+ case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+ case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+ mStreamType = AUDIO_STREAM_MUSIC;
+ return;
+ case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
+ mStreamType = AUDIO_STREAM_SYSTEM;
+ return;
+ case AUDIO_USAGE_VOICE_COMMUNICATION:
+ mStreamType = AUDIO_STREAM_VOICE_CALL;
+ return;
+
+ case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+ mStreamType = AUDIO_STREAM_DTMF;
+ return;
+
+ case AUDIO_USAGE_ALARM:
+ mStreamType = AUDIO_STREAM_ALARM;
+ return;
+ case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
+ mStreamType = AUDIO_STREAM_RING;
+ return;
+
+ case AUDIO_USAGE_NOTIFICATION:
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+ case AUDIO_USAGE_NOTIFICATION_EVENT:
+ mStreamType = AUDIO_STREAM_NOTIFICATION;
+ return;
+
+ case AUDIO_USAGE_UNKNOWN:
+ default:
+ mStreamType = AUDIO_STREAM_MUSIC;
+ }
+}
+
+bool AudioTrack::isValidAttributes(const audio_attributes_t *paa) {
+ // has flags that map to a strategy?
+ if ((paa->flags & (AUDIO_FLAG_AUDIBILITY_ENFORCED | AUDIO_FLAG_SCO)) != 0) {
+ return true;
+ }
+
+ // has known usage?
+ switch (paa->usage) {
+ case AUDIO_USAGE_UNKNOWN:
+ case AUDIO_USAGE_MEDIA:
+ case AUDIO_USAGE_VOICE_COMMUNICATION:
+ case AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING:
+ case AUDIO_USAGE_ALARM:
+ case AUDIO_USAGE_NOTIFICATION:
+ case AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE:
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+ case AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+ case AUDIO_USAGE_NOTIFICATION_EVENT:
+ case AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY:
+ case AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+ case AUDIO_USAGE_ASSISTANCE_SONIFICATION:
+ case AUDIO_USAGE_GAME:
+ break;
+ default:
+ return false;
+ }
+ return true;
+}
// =========================================================================
-void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who)
+void AudioTrack::DeathNotifier::binderDied(const wp<IBinder>& who __unused)
{
sp<AudioTrack> audioTrack = mAudioTrack.promote();
if (audioTrack != 0) {
@@ -1867,7 +2092,7 @@ bool AudioTrack::AudioTrackThread::threadLoop()
return true;
}
}
- nsecs_t ns = mReceiver.processAudioBuffer(this);
+ nsecs_t ns = mReceiver.processAudioBuffer();
switch (ns) {
case 0:
return true;
@@ -1881,7 +2106,7 @@ bool AudioTrack::AudioTrackThread::threadLoop()
ns = 1000000000LL;
// fall through
default:
- LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %lld", ns);
+ LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
pauseInternal(ns);
return true;
}
diff --git a/media/libmedia/AudioTrackShared.cpp b/media/libmedia/AudioTrackShared.cpp
index d5b0b07..eec025e 100644
--- a/media/libmedia/AudioTrackShared.cpp
+++ b/media/libmedia/AudioTrackShared.cpp
@@ -26,8 +26,8 @@
namespace android {
audio_track_cblk_t::audio_track_cblk_t()
- : mServer(0), frameCount_(0), mFutex(0), mMinimum(0),
- mVolumeLR(0x10001000), mSampleRate(0), mSendLevel(0), mFlags(0)
+ : mServer(0), mFutex(0), mMinimum(0),
+ mVolumeLR(GAIN_MINIFLOAT_PACKED_UNITY), mSampleRate(0), mSendLevel(0), mFlags(0)
{
memset(&u, 0, sizeof(u));
}
@@ -134,10 +134,17 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques
ssize_t filled = rear - front;
// pipe should not be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
- mIsShutdown = true;
- status = NO_INIT;
- goto end;
+ if (mIsOut) {
+ ALOGE("Shared memory control block is corrupt (filled=%zd, mFrameCount=%zu); "
+ "shutting down", filled, mFrameCount);
+ mIsShutdown = true;
+ status = NO_INIT;
+ goto end;
+ }
+ // for input, sync up on overrun
+ filled = 0;
+ cblk->u.mStreaming.mFront = rear;
+ (void) android_atomic_or(CBLK_OVERRUN, &cblk->mFlags);
}
// don't allow filling pipe beyond the nominal size
size_t avail = mIsOut ? mFrameCount - filled : filled;
@@ -200,7 +207,7 @@ status_t ClientProxy::obtainBuffer(Buffer* buffer, const struct timespec *reques
ts = &remaining;
break;
default:
- LOG_FATAL("obtainBuffer() timeout=%d", timeout);
+ LOG_ALWAYS_FATAL("obtainBuffer() timeout=%d", timeout);
ts = NULL;
break;
}
@@ -331,7 +338,7 @@ size_t ClientProxy::getFramesFilled() {
ssize_t filled = rear - front;
// pipe should not be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
+ ALOGE("Shared memory control block is corrupt (filled=%zd); shutting down", filled);
return 0;
}
return (size_t)filled;
@@ -429,7 +436,7 @@ status_t AudioTrackClientProxy::waitStreamEndDone(const struct timespec *request
ts = &remaining;
break;
default:
- LOG_FATAL("waitStreamEndDone() timeout=%d", timeout);
+ LOG_ALWAYS_FATAL("waitStreamEndDone() timeout=%d", timeout);
ts = NULL;
break;
}
@@ -470,7 +477,7 @@ StaticAudioTrackClientProxy::StaticAudioTrackClientProxy(audio_track_cblk_t* cbl
void StaticAudioTrackClientProxy::flush()
{
- LOG_FATAL("static flush");
+ LOG_ALWAYS_FATAL("static flush");
}
void StaticAudioTrackClientProxy::setLoop(size_t loopStart, size_t loopEnd, int loopCount)
@@ -548,7 +555,7 @@ status_t ServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
ssize_t filled = rear - front;
// pipe should not already be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
+ ALOGE("Shared memory control block is corrupt (filled=%zd); shutting down", filled);
mIsShutdown = true;
}
if (mIsShutdown) {
@@ -621,7 +628,7 @@ void ServerProxy::releaseBuffer(Buffer* buffer)
android_atomic_release_store(stepCount + rear, &cblk->u.mStreaming.mRear);
}
- mCblk->mServer += stepCount;
+ cblk->mServer += stepCount;
size_t half = mFrameCount / 2;
if (half == 0) {
@@ -635,7 +642,7 @@ void ServerProxy::releaseBuffer(Buffer* buffer)
}
// FIXME AudioRecord wakeup needs to be optimized; it currently wakes up client every time
if (!mIsOut || (mAvailToClient + stepCount >= minimum)) {
- ALOGV("mAvailToClient=%u stepCount=%u minimum=%u", mAvailToClient, stepCount, minimum);
+ ALOGV("mAvailToClient=%zu stepCount=%zu minimum=%zu", mAvailToClient, stepCount, minimum);
int32_t old = android_atomic_or(CBLK_FUTEX_WAKE, &cblk->mFutex);
if (!(old & CBLK_FUTEX_WAKE)) {
(void) syscall(__NR_futex, &cblk->mFutex,
@@ -668,7 +675,7 @@ size_t AudioTrackServerProxy::framesReady()
ssize_t filled = rear - cblk->u.mStreaming.mFront;
// pipe should not already be overfull
if (!(0 <= filled && (size_t) filled <= mFrameCount)) {
- ALOGE("Shared memory control block is corrupt (filled=%d); shutting down", filled);
+ ALOGE("Shared memory control block is corrupt (filled=%zd); shutting down", filled);
mIsShutdown = true;
return 0;
}
@@ -679,10 +686,11 @@ size_t AudioTrackServerProxy::framesReady()
}
bool AudioTrackServerProxy::setStreamEndDone() {
+ audio_track_cblk_t* cblk = mCblk;
bool old =
- (android_atomic_or(CBLK_STREAM_END_DONE, &mCblk->mFlags) & CBLK_STREAM_END_DONE) != 0;
+ (android_atomic_or(CBLK_STREAM_END_DONE, &cblk->mFlags) & CBLK_STREAM_END_DONE) != 0;
if (!old) {
- (void) syscall(__NR_futex, &mCblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
+ (void) syscall(__NR_futex, &cblk->mFutex, mClientInServer ? FUTEX_WAKE_PRIVATE : FUTEX_WAKE,
1);
}
return old;
@@ -690,10 +698,11 @@ bool AudioTrackServerProxy::setStreamEndDone() {
void AudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
{
- mCblk->u.mStreaming.mUnderrunFrames += frameCount;
+ audio_track_cblk_t* cblk = mCblk;
+ cblk->u.mStreaming.mUnderrunFrames += frameCount;
// FIXME also wake futex so that underrun is noticed more quickly
- (void) android_atomic_or(CBLK_UNDERRUN, &mCblk->mFlags);
+ (void) android_atomic_or(CBLK_UNDERRUN, &cblk->mFlags);
}
// ---------------------------------------------------------------------------
@@ -771,7 +780,7 @@ ssize_t StaticAudioTrackServerProxy::pollPosition()
return (ssize_t) position;
}
-status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush)
+status_t StaticAudioTrackServerProxy::obtainBuffer(Buffer* buffer, bool ackFlush __unused)
{
if (mIsShutdown) {
buffer->mFrameCount = 0;
@@ -825,7 +834,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
size_t newPosition = position + stepCount;
int32_t setFlags = 0;
if (!(position <= newPosition && newPosition <= mFrameCount)) {
- ALOGW("%s newPosition %u outside [%u, %u]", __func__, newPosition, position, mFrameCount);
+ ALOGW("%s newPosition %zu outside [%zu, %zu]", __func__, newPosition, position, mFrameCount);
newPosition = mFrameCount;
} else if (mState.mLoopCount != 0 && newPosition == mState.mLoopEnd) {
if (mState.mLoopCount == -1 || --mState.mLoopCount != 0) {
@@ -854,7 +863,7 @@ void StaticAudioTrackServerProxy::releaseBuffer(Buffer* buffer)
buffer->mNonContig = 0;
}
-void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount)
+void StaticAudioTrackServerProxy::tallyUnderrunFrames(uint32_t frameCount __unused)
{
// Unlike AudioTrackServerProxy::tallyUnderrunFrames() used for streaming tracks,
// we don't have a location to count underrun frames. The underrun frame counter
diff --git a/media/libmedia/CharacterEncodingDetector.cpp b/media/libmedia/CharacterEncodingDetector.cpp
new file mode 100644
index 0000000..7d1ddfd
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetector.cpp
@@ -0,0 +1,441 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CharacterEncodingDector"
+#include <utils/Log.h>
+
+#include "CharacterEncodingDetector.h"
+#include "CharacterEncodingDetectorTables.h"
+
+#include "utils/Vector.h"
+#include "StringArray.h"
+
+#include "unicode/ucnv.h"
+#include "unicode/ucsdet.h"
+#include "unicode/ustring.h"
+
+namespace android {
+
+CharacterEncodingDetector::CharacterEncodingDetector() {
+
+ UErrorCode status = U_ZERO_ERROR;
+ mUtf8Conv = ucnv_open("UTF-8", &status);
+ if (U_FAILURE(status)) {
+ ALOGE("could not create UConverter for UTF-8");
+ mUtf8Conv = NULL;
+ }
+}
+
+CharacterEncodingDetector::~CharacterEncodingDetector() {
+ ucnv_close(mUtf8Conv);
+}
+
+void CharacterEncodingDetector::addTag(const char *name, const char *value) {
+ mNames.push_back(name);
+ mValues.push_back(value);
+}
+
+size_t CharacterEncodingDetector::size() {
+ return mNames.size();
+}
+
+status_t CharacterEncodingDetector::getTag(int index, const char **name, const char**value) {
+ if (index >= mNames.size()) {
+ return BAD_VALUE;
+ }
+
+ *name = mNames.getEntry(index);
+ *value = mValues.getEntry(index);
+ return OK;
+}
+
+static bool isPrintableAscii(const char *value, size_t len) {
+ for (size_t i = 0; i < len; i++) {
+ if ((value[i] & 0x80) || value[i] < 0x20 || value[i] == 0x7f) {
+ return false;
+ }
+ }
+ return true;
+}
+
+void CharacterEncodingDetector::detectAndConvert() {
+
+ int size = mNames.size();
+ ALOGV("%d tags before conversion", size);
+ for (int i = 0; i < size; i++) {
+ ALOGV("%s: %s", mNames.getEntry(i), mValues.getEntry(i));
+ }
+
+ if (size && mUtf8Conv) {
+
+ UErrorCode status = U_ZERO_ERROR;
+ UCharsetDetector *csd = ucsdet_open(&status);
+ const UCharsetMatch *ucm;
+
+ // try combined detection of artist/album/title etc.
+ char buf[1024];
+ buf[0] = 0;
+ int idx;
+ bool allprintable = true;
+ for (int i = 0; i < size; i++) {
+ const char *name = mNames.getEntry(i);
+ const char *value = mValues.getEntry(i);
+ if (!isPrintableAscii(value, strlen(value)) && (
+ !strcmp(name, "artist") ||
+ !strcmp(name, "albumartist") ||
+ !strcmp(name, "composer") ||
+ !strcmp(name, "genre") ||
+ !strcmp(name, "album") ||
+ !strcmp(name, "title"))) {
+ strlcat(buf, value, sizeof(buf));
+ // separate tags by space so ICU's ngram detector can do its job
+ strlcat(buf, " ", sizeof(buf));
+ allprintable = false;
+ }
+ }
+
+ const char *combinedenc = "UTF-8";
+ if (allprintable) {
+ // since 'buf' is empty, ICU would return a UTF-8 matcher with low confidence, so
+ // no need to even call it
+ ALOGV("all tags are printable, assuming ascii (%zu)", strlen(buf));
+ } else {
+ ucsdet_setText(csd, buf, strlen(buf), &status);
+ int32_t matches;
+ const UCharsetMatch** ucma = ucsdet_detectAll(csd, &matches, &status);
+ bool goodmatch = true;
+ const UCharsetMatch* bestCombinedMatch = getPreferred(buf, strlen(buf),
+ ucma, matches, &goodmatch);
+
+ if (!goodmatch && strlen(buf) < 20) {
+ ALOGV("not a good match, trying with more data");
+ // This string might be too short for ICU to do anything useful with.
+ // (real world example: "Björk" in ISO-8859-1 might be detected as GB18030, because
+ // the ISO detector reports a confidence of 0, while the GB18030 detector reports
+ // a confidence of 10 with no invalid characters)
+ // Append artist, album and title if they were previously omitted because they
+ // were printable ascii.
+ bool added = false;
+ for (int i = 0; i < size; i++) {
+ const char *name = mNames.getEntry(i);
+ const char *value = mValues.getEntry(i);
+ if (isPrintableAscii(value, strlen(value)) && (
+ !strcmp(name, "artist") ||
+ !strcmp(name, "album") ||
+ !strcmp(name, "title"))) {
+ strlcat(buf, value, sizeof(buf));
+ strlcat(buf, " ", sizeof(buf));
+ added = true;
+ }
+ }
+ if (added) {
+ ucsdet_setText(csd, buf, strlen(buf), &status);
+ ucma = ucsdet_detectAll(csd, &matches, &status);
+ bestCombinedMatch = getPreferred(buf, strlen(buf),
+ ucma, matches, &goodmatch);
+ if (!goodmatch) {
+ ALOGV("still not a good match after adding printable tags");
+ }
+ } else {
+ ALOGV("no printable tags to add");
+ }
+ }
+
+ if (bestCombinedMatch != NULL) {
+ combinedenc = ucsdet_getName(bestCombinedMatch, &status);
+ }
+ }
+
+ for (int i = 0; i < size; i++) {
+ const char *name = mNames.getEntry(i);
+ uint8_t* src = (uint8_t *)mValues.getEntry(i);
+ int len = strlen((char *)src);
+ uint8_t* dest = src;
+
+ ALOGV("@@@ checking %s", name);
+ const char *s = mValues.getEntry(i);
+ int32_t inputLength = strlen(s);
+ const char *enc;
+
+ if (!allprintable && (!strcmp(name, "artist") ||
+ !strcmp(name, "albumartist") ||
+ !strcmp(name, "composer") ||
+ !strcmp(name, "genre") ||
+ !strcmp(name, "album") ||
+ !strcmp(name, "title"))) {
+ // use encoding determined from the combination of artist/album/title etc.
+ enc = combinedenc;
+ } else {
+ if (isPrintableAscii(s, inputLength)) {
+ enc = "UTF-8";
+ ALOGV("@@@@ %s is ascii", mNames.getEntry(i));
+ } else {
+ ucsdet_setText(csd, s, inputLength, &status);
+ ucm = ucsdet_detect(csd, &status);
+ if (!ucm) {
+ mValues.setEntry(i, "???");
+ continue;
+ }
+ enc = ucsdet_getName(ucm, &status);
+ ALOGV("@@@@ recognized charset: %s for %s confidence %d",
+ enc, mNames.getEntry(i), ucsdet_getConfidence(ucm, &status));
+ }
+ }
+
+ if (strcmp(enc,"UTF-8") != 0) {
+ // only convert if the source encoding isn't already UTF-8
+ ALOGV("@@@ using converter %s for %s", enc, mNames.getEntry(i));
+ UConverter *conv = ucnv_open(enc, &status);
+ if (U_FAILURE(status)) {
+ ALOGE("could not create UConverter for %s", enc);
+ continue;
+ }
+
+ // convert from native encoding to UTF-8
+ const char* source = mValues.getEntry(i);
+ int targetLength = len * 3 + 1;
+ char* buffer = new char[targetLength];
+ // don't normally check for NULL, but in this case targetLength may be large
+ if (!buffer)
+ break;
+ char* target = buffer;
+
+ ucnv_convertEx(mUtf8Conv, conv, &target, target + targetLength,
+ &source, source + strlen(source),
+ NULL, NULL, NULL, NULL, TRUE, TRUE, &status);
+
+ if (U_FAILURE(status)) {
+ ALOGE("ucnv_convertEx failed: %d", status);
+ mValues.setEntry(i, "???");
+ } else {
+ // zero terminate
+ *target = 0;
+ mValues.setEntry(i, buffer);
+ }
+
+ delete[] buffer;
+
+ ucnv_close(conv);
+ }
+ }
+
+ for (int i = size - 1; i >= 0; --i) {
+ if (strlen(mValues.getEntry(i)) == 0) {
+ ALOGV("erasing %s because entry is empty", mNames.getEntry(i));
+ mNames.erase(i);
+ mValues.erase(i);
+ }
+ }
+
+ ucsdet_close(csd);
+ }
+}
+
+/*
+ * When ICU detects multiple encoding matches, apply additional heuristics to determine
+ * which one is the best match, since ICU can't always be trusted to make the right choice.
+ *
+ * What this method does is:
+ * - decode the input using each of the matches found
+ * - recalculate the starting confidence level for multibyte encodings using a different
+ * algorithm and larger frequent character lists than ICU
+ * - devalue encoding where the conversion contains unlikely characters (symbols, reserved, etc)
+ * - pick the highest match
+ * - signal to the caller whether this match is considered good: confidence > 15, and confidence
+ * delta with the next runner up > 15
+ */
+const UCharsetMatch *CharacterEncodingDetector::getPreferred(
+ const char *input, size_t len,
+ const UCharsetMatch** ucma, size_t nummatches,
+ bool *goodmatch) {
+
+ *goodmatch = false;
+ Vector<const UCharsetMatch*> matches;
+ UErrorCode status = U_ZERO_ERROR;
+
+ ALOGV("%zu matches", nummatches);
+ for (size_t i = 0; i < nummatches; i++) {
+ const char *encname = ucsdet_getName(ucma[i], &status);
+ int confidence = ucsdet_getConfidence(ucma[i], &status);
+ ALOGV("%zu: %s %d", i, encname, confidence);
+ matches.push_back(ucma[i]);
+ }
+
+ size_t num = matches.size();
+ if (num == 0) {
+ return NULL;
+ }
+ if (num == 1) {
+ int confidence = ucsdet_getConfidence(matches[0], &status);
+ if (confidence > 15) {
+ *goodmatch = true;
+ }
+ return matches[0];
+ }
+
+ ALOGV("considering %zu matches", num);
+
+ // keep track of how many "special" characters result when converting the input using each
+ // encoding
+ Vector<int> newconfidence;
+ for (size_t i = 0; i < num; i++) {
+ const uint16_t *freqdata = NULL;
+ float freqcoverage = 0;
+ status = U_ZERO_ERROR;
+ const char *encname = ucsdet_getName(matches[i], &status);
+ int confidence = ucsdet_getConfidence(matches[i], &status);
+ if (!strcmp("GB18030", encname)) {
+ freqdata = frequent_zhCN;
+ freqcoverage = frequent_zhCN_coverage;
+ } else if (!strcmp("Big5", encname)) {
+ freqdata = frequent_zhTW;
+ freqcoverage = frequent_zhTW_coverage;
+ } else if (!strcmp("EUC-KR", encname)) {
+ freqdata = frequent_ko;
+ freqcoverage = frequent_ko_coverage;
+ } else if (!strcmp("EUC-JP", encname)) {
+ freqdata = frequent_ja;
+ freqcoverage = frequent_ja_coverage;
+ } else if (!strcmp("Shift_JIS", encname)) {
+ freqdata = frequent_ja;
+ freqcoverage = frequent_ja_coverage;
+ }
+
+ ALOGV("%zu: %s %d", i, encname, confidence);
+ UConverter *conv = ucnv_open(encname, &status);
+ const char *source = input;
+ const char *sourceLimit = input + len;
+ status = U_ZERO_ERROR;
+ int demerit = 0;
+ int frequentchars = 0;
+ int totalchars = 0;
+ while (true) {
+ // demerit the current encoding for each "special" character found after conversion.
+ // The amount of demerit is somewhat arbitrarily chosen.
+ int inchar;
+ if (source != sourceLimit) {
+ inchar = (source[0] << 8) + source[1];
+ }
+ UChar32 c = ucnv_getNextUChar(conv, &source, sourceLimit, &status);
+ if (!U_SUCCESS(status)) {
+ break;
+ }
+ if (c < 0x20 || (c >= 0x7f && c <= 0x009f)) {
+ ALOGV("control character %x", c);
+ demerit += 100;
+ } else if ((c >= 0xa0 && c <= 0xbe) // symbols, superscripts
+ || (c == 0xd7) || (c == 0xf7) // multiplication and division signs
+ || (c >= 0x2000 && c <= 0x209f)) { // punctuation, superscripts
+ ALOGV("unlikely character %x", c);
+ demerit += 10;
+ } else if (c >= 0xe000 && c <= 0xf8ff) {
+ ALOGV("private use character %x", c);
+ demerit += 30;
+ } else if (c >= 0x2190 && c <= 0x2bff) {
+ // this range comprises various symbol ranges that are unlikely to appear in
+ // music file metadata.
+ ALOGV("symbol %x", c);
+ demerit += 10;
+ } else if (c == 0xfffd) {
+ ALOGV("replacement character");
+ demerit += 50;
+ } else if (c >= 0xfff0 && c <= 0xfffc) {
+ ALOGV("unicode special %x", c);
+ demerit += 50;
+ } else if (freqdata != NULL) {
+ totalchars++;
+ if (isFrequent(freqdata, c)) {
+ frequentchars++;
+ }
+ }
+ }
+ if (freqdata != NULL && totalchars != 0) {
+ int myconfidence = 10 + float((100 * frequentchars) / totalchars) / freqcoverage;
+ ALOGV("ICU confidence: %d, my confidence: %d (%d %d)", confidence, myconfidence,
+ totalchars, frequentchars);
+ if (myconfidence > 100) myconfidence = 100;
+ if (myconfidence < 0) myconfidence = 0;
+ confidence = myconfidence;
+ }
+ ALOGV("%d-%d=%d", confidence, demerit, confidence - demerit);
+ newconfidence.push_back(confidence - demerit);
+ ucnv_close(conv);
+ if (i == 0 && (confidence - demerit) == 100) {
+ // no need to check any further, we'll end up using this match anyway
+ break;
+ }
+ }
+
+ // find match with highest confidence after adjusting for unlikely characters
+ int highest = newconfidence[0];
+ size_t highestidx = 0;
+ int runnerup = -10000;
+ int runnerupidx = -10000;
+ num = newconfidence.size();
+ for (size_t i = 1; i < num; i++) {
+ if (newconfidence[i] > highest) {
+ runnerup = highest;
+ runnerupidx = highestidx;
+ highest = newconfidence[i];
+ highestidx = i;
+ } else if (newconfidence[i] > runnerup){
+ runnerup = newconfidence[i];
+ runnerupidx = i;
+ }
+ }
+ status = U_ZERO_ERROR;
+ ALOGV("selecting: '%s' w/ %d confidence",
+ ucsdet_getName(matches[highestidx], &status), highest);
+ if (runnerupidx < 0) {
+ ALOGV("no runner up");
+ if (highest > 15) {
+ *goodmatch = true;
+ }
+ } else {
+ ALOGV("runner up: '%s' w/ %d confidence",
+ ucsdet_getName(matches[runnerupidx], &status), runnerup);
+ if ((highest - runnerup) > 15) {
+ *goodmatch = true;
+ }
+ }
+ return matches[highestidx];
+}
+
+
+bool CharacterEncodingDetector::isFrequent(const uint16_t *values, uint32_t c) {
+
+ int start = 0;
+ int end = 511; // All the tables have 512 entries
+ int mid = (start+end)/2;
+
+ while(start <= end) {
+ if(c == values[mid]) {
+ return true;
+ } else if (c > values[mid]) {
+ start = mid + 1;
+ } else {
+ end = mid - 1;
+ }
+
+ mid = (start + end) / 2;
+ }
+
+ return false;
+}
+
+
+} // namespace android
diff --git a/media/libmedia/CharacterEncodingDetector.h b/media/libmedia/CharacterEncodingDetector.h
new file mode 100644
index 0000000..7b5ed86
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetector.h
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _CHARACTER_ENCODING_DETECTOR_H
+#define _CHARACTER_ENCODING_DETECTOR_H
+
+#include <media/mediascanner.h>
+
+#include "StringArray.h"
+
+#include "unicode/ucnv.h"
+#include "unicode/ucsdet.h"
+#include "unicode/ustring.h"
+
+namespace android {
+
+class CharacterEncodingDetector {
+
+ public:
+ CharacterEncodingDetector();
+ ~CharacterEncodingDetector();
+
+ void addTag(const char *name, const char *value);
+ size_t size();
+
+ void detectAndConvert();
+ status_t getTag(int index, const char **name, const char**value);
+
+ private:
+ const UCharsetMatch *getPreferred(
+ const char *input, size_t len,
+ const UCharsetMatch** ucma, size_t matches,
+ bool *goodmatch);
+
+ bool isFrequent(const uint16_t *values, uint32_t c);
+
+ // cached name and value strings, for native encoding support.
+ // TODO: replace these with byte blob arrays that don't require the data to be
+ // singlenullbyte-terminated
+ StringArray mNames;
+ StringArray mValues;
+
+ UConverter* mUtf8Conv;
+};
+
+
+
+}; // namespace android
+
+#endif
diff --git a/media/libmedia/CharacterEncodingDetectorTables.h b/media/libmedia/CharacterEncodingDetectorTables.h
new file mode 100644
index 0000000..1fe1137
--- /dev/null
+++ b/media/libmedia/CharacterEncodingDetectorTables.h
@@ -0,0 +1,2092 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// The 512 most frequently occuring characters for the zhCN language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_zhCN[] = {
+ 0x4E00, // 一, #2
+ 0x4E07, // 万, #306
+ 0x4E09, // 三, #138
+ 0x4E0A, // 上, #16
+ 0x4E0B, // 下, #25
+ 0x4E0D, // 不, #7
+ 0x4E0E, // 与, #133
+ 0x4E13, // 专, #151
+ 0x4E16, // 世, #346
+ 0x4E1A, // 业, #39
+ 0x4E1C, // 东, #197
+ 0x4E24, // 两, #376
+ 0x4E2A, // 个, #23
+ 0x4E2D, // 中, #4
+ 0x4E3A, // 为, #31
+ 0x4E3B, // 主, #95
+ 0x4E3E, // 举, #418
+ 0x4E48, // 么, #93
+ 0x4E4B, // 之, #131
+ 0x4E50, // 乐, #130
+ 0x4E5F, // 也, #145
+ 0x4E66, // 书, #283
+ 0x4E70, // 买, #483
+ 0x4E86, // 了, #13
+ 0x4E8B, // 事, #168
+ 0x4E8C, // 二, #218
+ 0x4E8E, // 于, #64
+ 0x4E94, // 五, #430
+ 0x4E9A, // 亚, #468
+ 0x4E9B, // 些, #366
+ 0x4EA4, // 交, #243
+ 0x4EA7, // 产, #86
+ 0x4EAB, // 享, #345
+ 0x4EAC, // 京, #206
+ 0x4EBA, // 人, #3
+ 0x4EC0, // 什, #287
+ 0x4ECB, // 介, #478
+ 0x4ECE, // 从, #381
+ 0x4ED6, // 他, #129
+ 0x4EE3, // 代, #241
+ 0x4EE5, // 以, #51
+ 0x4EEC, // 们, #83
+ 0x4EF6, // 件, #141
+ 0x4EF7, // 价, #140
+ 0x4EFB, // 任, #383
+ 0x4F01, // 企, #439
+ 0x4F18, // 优, #374
+ 0x4F1A, // 会, #29
+ 0x4F20, // 传, #222
+ 0x4F46, // 但, #451
+ 0x4F4D, // 位, #208
+ 0x4F53, // 体, #98
+ 0x4F55, // 何, #339
+ 0x4F5C, // 作, #44
+ 0x4F60, // 你, #76
+ 0x4F7F, // 使, #272
+ 0x4F9B, // 供, #375
+ 0x4FDD, // 保, #180
+ 0x4FE1, // 信, #84
+ 0x4FEE, // 修, #437
+ 0x503C, // 值, #450
+ 0x505A, // 做, #368
+ 0x5065, // 健, #484
+ 0x50CF, // 像, #487
+ 0x513F, // 儿, #326
+ 0x5143, // 元, #202
+ 0x5148, // 先, #485
+ 0x5149, // 光, #254
+ 0x514B, // 克, #503
+ 0x514D, // 免, #349
+ 0x5165, // 入, #156
+ 0x5168, // 全, #47
+ 0x516C, // 公, #35
+ 0x5171, // 共, #448
+ 0x5173, // 关, #49
+ 0x5176, // 其, #195
+ 0x5177, // 具, #329
+ 0x5185, // 内, #109
+ 0x518C, // 册, #225
+ 0x519B, // 军, #466
+ 0x51FA, // 出, #53
+ 0x51FB, // 击, #359
+ 0x5206, // 分, #22
+ 0x5217, // 列, #410
+ 0x521B, // 创, #399
+ 0x5229, // 利, #296
+ 0x522B, // 别, #372
+ 0x5230, // 到, #33
+ 0x5236, // 制, #192
+ 0x524D, // 前, #117
+ 0x529B, // 力, #173
+ 0x529E, // 办, #436
+ 0x529F, // 功, #455
+ 0x52A0, // 加, #97
+ 0x52A1, // 务, #100
+ 0x52A8, // 动, #46
+ 0x52A9, // 助, #365
+ 0x5305, // 包, #331
+ 0x5316, // 化, #155
+ 0x5317, // 北, #194
+ 0x533A, // 区, #105
+ 0x533B, // 医, #234
+ 0x5341, // 十, #294
+ 0x534E, // 华, #205
+ 0x5355, // 单, #259
+ 0x5357, // 南, #182
+ 0x535A, // 博, #153
+ 0x5361, // 卡, #332
+ 0x539F, // 原, #271
+ 0x53BB, // 去, #282
+ 0x53C2, // 参, #500
+ 0x53CA, // 及, #255
+ 0x53CB, // 友, #186
+ 0x53CD, // 反, #422
+ 0x53D1, // 发, #15
+ 0x53D7, // 受, #507
+ 0x53D8, // 变, #395
+ 0x53E3, // 口, #293
+ 0x53EA, // 只, #340
+ 0x53EF, // 可, #45
+ 0x53F0, // 台, #267
+ 0x53F7, // 号, #121
+ 0x53F8, // 司, #150
+ 0x5404, // 各, #491
+ 0x5408, // 合, #115
+ 0x540C, // 同, #189
+ 0x540D, // 名, #127
+ 0x540E, // 后, #75
+ 0x5411, // 向, #459
+ 0x5427, // 吧, #353
+ 0x544A, // 告, #318
+ 0x5458, // 员, #232
+ 0x5468, // 周, #347
+ 0x548C, // 和, #43
+ 0x54C1, // 品, #36
+ 0x5546, // 商, #148
+ 0x5668, // 器, #228
+ 0x56DB, // 四, #352
+ 0x56DE, // 回, #38
+ 0x56E0, // 因, #355
+ 0x56E2, // 团, #412
+ 0x56ED, // 园, #470
+ 0x56FD, // 国, #12
+ 0x56FE, // 图, #32
+ 0x5728, // 在, #10
+ 0x5730, // 地, #30
+ 0x573A, // 场, #177
+ 0x575B, // 坛, #364
+ 0x578B, // 型, #274
+ 0x57CE, // 城, #172
+ 0x57FA, // 基, #315
+ 0x58EB, // 士, #434
+ 0x58F0, // 声, #397
+ 0x5904, // 处, #416
+ 0x5907, // 备, #270
+ 0x590D, // 复, #122
+ 0x5916, // 外, #190
+ 0x591A, // 多, #40
+ 0x5927, // 大, #8
+ 0x5929, // 天, #52
+ 0x592A, // 太, #456
+ 0x5934, // 头, #258
+ 0x5973, // 女, #65
+ 0x597D, // 好, #62
+ 0x5982, // 如, #135
+ 0x5A31, // 娱, #452
+ 0x5B50, // 子, #37
+ 0x5B57, // 字, #285
+ 0x5B66, // 学, #19
+ 0x5B89, // 安, #144
+ 0x5B8C, // 完, #469
+ 0x5B9A, // 定, #179
+ 0x5B9D, // 宝, #188
+ 0x5B9E, // 实, #154
+ 0x5BA2, // 客, #174
+ 0x5BB6, // 家, #26
+ 0x5BB9, // 容, #307
+ 0x5BC6, // 密, #471
+ 0x5BF9, // 对, #90
+ 0x5BFC, // 导, #348
+ 0x5C06, // 将, #265
+ 0x5C0F, // 小, #28
+ 0x5C11, // 少, #379
+ 0x5C14, // 尔, #490
+ 0x5C31, // 就, #101
+ 0x5C55, // 展, #291
+ 0x5C71, // 山, #239
+ 0x5DDE, // 州, #227
+ 0x5DE5, // 工, #73
+ 0x5DF1, // 己, #480
+ 0x5DF2, // 已, #310
+ 0x5E02, // 市, #78
+ 0x5E03, // 布, #350
+ 0x5E08, // 师, #277
+ 0x5E16, // 帖, #396
+ 0x5E26, // 带, #449
+ 0x5E2E, // 帮, #461
+ 0x5E38, // 常, #319
+ 0x5E73, // 平, #217
+ 0x5E74, // 年, #20
+ 0x5E76, // 并, #440
+ 0x5E7F, // 广, #166
+ 0x5E93, // 库, #446
+ 0x5E94, // 应, #187
+ 0x5E97, // 店, #320
+ 0x5EA6, // 度, #114
+ 0x5EB7, // 康, #499
+ 0x5EFA, // 建, #211
+ 0x5F00, // 开, #72
+ 0x5F0F, // 式, #207
+ 0x5F15, // 引, #495
+ 0x5F20, // 张, #385
+ 0x5F3A, // 强, #404
+ 0x5F53, // 当, #233
+ 0x5F55, // 录, #146
+ 0x5F62, // 形, #494
+ 0x5F69, // 彩, #356
+ 0x5F71, // 影, #214
+ 0x5F88, // 很, #300
+ 0x5F97, // 得, #193
+ 0x5FAE, // 微, #245
+ 0x5FC3, // 心, #70
+ 0x5FEB, // 快, #324
+ 0x6001, // 态, #508
+ 0x600E, // 怎, #370
+ 0x6027, // 性, #99
+ 0x603B, // 总, #398
+ 0x606F, // 息, #176
+ 0x60A8, // 您, #251
+ 0x60C5, // 情, #87
+ 0x60F3, // 想, #290
+ 0x610F, // 意, #184
+ 0x611F, // 感, #253
+ 0x620F, // 戏, #237
+ 0x6210, // 成, #71
+ 0x6211, // 我, #11
+ 0x6216, // 或, #321
+ 0x6218, // 战, #369
+ 0x6237, // 户, #215
+ 0x623F, // 房, #236
+ 0x6240, // 所, #147
+ 0x624B, // 手, #55
+ 0x624D, // 才, #407
+ 0x6253, // 打, #281
+ 0x6280, // 技, #203
+ 0x6295, // 投, #408
+ 0x62A4, // 护, #502
+ 0x62A5, // 报, #113
+ 0x62DB, // 招, #363
+ 0x6301, // 持, #403
+ 0x6307, // 指, #414
+ 0x636E, // 据, #409
+ 0x6392, // 排, #377
+ 0x63A5, // 接, #266
+ 0x63A8, // 推, #244
+ 0x63D0, // 提, #181
+ 0x641C, // 搜, #301
+ 0x64AD, // 播, #401
+ 0x652F, // 支, #400
+ 0x6536, // 收, #158
+ 0x653E, // 放, #317
+ 0x653F, // 政, #380
+ 0x6548, // 效, #496
+ 0x6559, // 教, #170
+ 0x6570, // 数, #136
+ 0x6587, // 文, #21
+ 0x6599, // 料, #295
+ 0x65AF, // 斯, #473
+ 0x65B0, // 新, #14
+ 0x65B9, // 方, #68
+ 0x65C5, // 旅, #457
+ 0x65E0, // 无, #164
+ 0x65E5, // 日, #50
+ 0x65F6, // 时, #18
+ 0x660E, // 明, #132
+ 0x6613, // 易, #428
+ 0x661F, // 星, #240
+ 0x662F, // 是, #6
+ 0x663E, // 显, #486
+ 0x66F4, // 更, #103
+ 0x6700, // 最, #61
+ 0x6708, // 月, #80
+ 0x6709, // 有, #5
+ 0x670D, // 服, #94
+ 0x671F, // 期, #139
+ 0x672C, // 本, #56
+ 0x672F, // 术, #216
+ 0x673A, // 机, #27
+ 0x6743, // 权, #250
+ 0x6761, // 条, #309
+ 0x6765, // 来, #42
+ 0x677F, // 板, #505
+ 0x6797, // 林, #475
+ 0x679C, // 果, #212
+ 0x67E5, // 查, #165
+ 0x6807, // 标, #269
+ 0x6821, // 校, #462
+ 0x6837, // 样, #314
+ 0x683C, // 格, #238
+ 0x6848, // 案, #378
+ 0x697C, // 楼, #342
+ 0x6A21, // 模, #413
+ 0x6B21, // 次, #263
+ 0x6B22, // 欢, #443
+ 0x6B3E, // 款, #358
+ 0x6B63, // 正, #219
+ 0x6B64, // 此, #362
+ 0x6BD4, // 比, #298
+ 0x6C11, // 民, #279
+ 0x6C14, // 气, #303
+ 0x6C34, // 水, #163
+ 0x6C42, // 求, #373
+ 0x6C5F, // 江, #336
+ 0x6CA1, // 没, #229
+ 0x6CBB, // 治, #425
+ 0x6CD5, // 法, #85
+ 0x6CE8, // 注, #119
+ 0x6D3B, // 活, #231
+ 0x6D41, // 流, #280
+ 0x6D4B, // 测, #460
+ 0x6D77, // 海, #124
+ 0x6D88, // 消, #415
+ 0x6DF1, // 深, #477
+ 0x6E05, // 清, #311
+ 0x6E38, // 游, #81
+ 0x6E90, // 源, #325
+ 0x706B, // 火, #498
+ 0x70B9, // 点, #58
+ 0x70ED, // 热, #183
+ 0x7136, // 然, #308
+ 0x7167, // 照, #431
+ 0x7231, // 爱, #223
+ 0x7247, // 片, #128
+ 0x7248, // 版, #91
+ 0x724C, // 牌, #429
+ 0x7269, // 物, #169
+ 0x7279, // 特, #224
+ 0x738B, // 王, #351
+ 0x73A9, // 玩, #476
+ 0x73B0, // 现, #125
+ 0x7403, // 球, #367
+ 0x7406, // 理, #69
+ 0x751F, // 生, #24
+ 0x7528, // 用, #17
+ 0x7531, // 由, #441
+ 0x7535, // 电, #34
+ 0x7537, // 男, #275
+ 0x754C, // 界, #419
+ 0x75C5, // 病, #371
+ 0x767B, // 登, #204
+ 0x767D, // 白, #338
+ 0x767E, // 百, #157
+ 0x7684, // 的, #1
+ 0x76D8, // 盘, #493
+ 0x76EE, // 目, #261
+ 0x76F4, // 直, #391
+ 0x76F8, // 相, #143
+ 0x7701, // 省, #464
+ 0x770B, // 看, #54
+ 0x771F, // 真, #249
+ 0x7740, // 着, #302
+ 0x77E5, // 知, #142
+ 0x7801, // 码, #257
+ 0x7814, // 研, #387
+ 0x793A, // 示, #334
+ 0x793E, // 社, #343
+ 0x795E, // 神, #330
+ 0x798F, // 福, #509
+ 0x79BB, // 离, #454
+ 0x79CD, // 种, #278
+ 0x79D1, // 科, #126
+ 0x79EF, // 积, #390
+ 0x7A0B, // 程, #209
+ 0x7A76, // 究, #504
+ 0x7A7A, // 空, #312
+ 0x7ACB, // 立, #393
+ 0x7AD9, // 站, #107
+ 0x7AE0, // 章, #304
+ 0x7B2C, // 第, #96
+ 0x7B49, // 等, #210
+ 0x7B54, // 答, #256
+ 0x7B80, // 简, #474
+ 0x7BA1, // 管, #221
+ 0x7C7B, // 类, #246
+ 0x7CBE, // 精, #226
+ 0x7CFB, // 系, #89
+ 0x7D22, // 索, #354
+ 0x7EA2, // 红, #417
+ 0x7EA7, // 级, #178
+ 0x7EBF, // 线, #108
+ 0x7EC4, // 组, #389
+ 0x7EC6, // 细, #442
+ 0x7ECF, // 经, #74
+ 0x7ED3, // 结, #333
+ 0x7ED9, // 给, #384
+ 0x7EDC, // 络, #472
+ 0x7EDF, // 统, #344
+ 0x7F16, // 编, #424
+ 0x7F51, // 网, #9
+ 0x7F6E, // 置, #411
+ 0x7F8E, // 美, #60
+ 0x8001, // 老, #292
+ 0x8003, // 考, #288
+ 0x8005, // 者, #106
+ 0x800C, // 而, #297
+ 0x8054, // 联, #159
+ 0x80B2, // 育, #327
+ 0x80FD, // 能, #59
+ 0x81EA, // 自, #77
+ 0x8272, // 色, #198
+ 0x8282, // 节, #361
+ 0x82B1, // 花, #299
+ 0x82F1, // 英, #316
+ 0x8350, // 荐, #402
+ 0x836F, // 药, #481
+ 0x8425, // 营, #394
+ 0x85CF, // 藏, #337
+ 0x884C, // 行, #41
+ 0x8868, // 表, #104
+ 0x88AB, // 被, #289
+ 0x88C5, // 装, #161
+ 0x897F, // 西, #199
+ 0x8981, // 要, #48
+ 0x89C1, // 见, #360
+ 0x89C2, // 观, #423
+ 0x89C4, // 规, #453
+ 0x89C6, // 视, #120
+ 0x89E3, // 解, #264
+ 0x8A00, // 言, #433
+ 0x8BA1, // 计, #191
+ 0x8BA4, // 认, #482
+ 0x8BA9, // 让, #421
+ 0x8BAE, // 议, #427
+ 0x8BAF, // 讯, #388
+ 0x8BB0, // 记, #273
+ 0x8BBA, // 论, #66
+ 0x8BBE, // 设, #162
+ 0x8BC1, // 证, #201
+ 0x8BC4, // 评, #111
+ 0x8BC6, // 识, #463
+ 0x8BD5, // 试, #323
+ 0x8BDD, // 话, #247
+ 0x8BE2, // 询, #432
+ 0x8BE5, // 该, #447
+ 0x8BE6, // 详, #497
+ 0x8BED, // 语, #268
+ 0x8BF4, // 说, #112
+ 0x8BF7, // 请, #213
+ 0x8BFB, // 读, #341
+ 0x8C03, // 调, #438
+ 0x8D22, // 财, #488
+ 0x8D28, // 质, #386
+ 0x8D2D, // 购, #260
+ 0x8D34, // 贴, #510
+ 0x8D39, // 费, #242
+ 0x8D44, // 资, #116
+ 0x8D77, // 起, #220
+ 0x8D85, // 超, #406
+ 0x8DEF, // 路, #235
+ 0x8EAB, // 身, #262
+ 0x8F66, // 车, #82
+ 0x8F6C, // 转, #322
+ 0x8F7D, // 载, #175
+ 0x8FBE, // 达, #435
+ 0x8FC7, // 过, #118
+ 0x8FD0, // 运, #357
+ 0x8FD1, // 近, #492
+ 0x8FD8, // 还, #171
+ 0x8FD9, // 这, #57
+ 0x8FDB, // 进, #160
+ 0x8FDE, // 连, #489
+ 0x9009, // 选, #328
+ 0x901A, // 通, #137
+ 0x901F, // 速, #458
+ 0x9020, // 造, #511
+ 0x9053, // 道, #79
+ 0x90A3, // 那, #305
+ 0x90E8, // 部, #102
+ 0x90FD, // 都, #167
+ 0x914D, // 配, #479
+ 0x9152, // 酒, #444
+ 0x91CC, // 里, #196
+ 0x91CD, // 重, #230
+ 0x91CF, // 量, #248
+ 0x91D1, // 金, #134
+ 0x9500, // 销, #465
+ 0x957F, // 长, #152
+ 0x95E8, // 门, #185
+ 0x95EE, // 问, #92
+ 0x95F4, // 间, #88
+ 0x95FB, // 闻, #313
+ 0x9605, // 阅, #467
+ 0x9633, // 阳, #420
+ 0x9645, // 际, #501
+ 0x9650, // 限, #286
+ 0x9662, // 院, #276
+ 0x96C6, // 集, #284
+ 0x9700, // 需, #405
+ 0x9762, // 面, #123
+ 0x97F3, // 音, #335
+ 0x9875, // 页, #63
+ 0x9879, // 项, #506
+ 0x9891, // 频, #200
+ 0x9898, // 题, #110
+ 0x98CE, // 风, #252
+ 0x98DF, // 食, #445
+ 0x9996, // 首, #149
+ 0x9999, // 香, #512
+ 0x9A6C, // 马, #392
+ 0x9A8C, // 验, #382
+ 0x9AD8, // 高, #67
+ 0x9F99, // 龙, #426
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_zhCN_coverage=0.718950369339973;
+
+// The 512 most frequently occuring characters for the zhTW language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_zhTW[] = {
+ 0x4E00, // 一, #2
+ 0x4E09, // 三, #131
+ 0x4E0A, // 上, #12
+ 0x4E0B, // 下, #37
+ 0x4E0D, // 不, #6
+ 0x4E16, // 世, #312
+ 0x4E26, // 並, #434
+ 0x4E2D, // 中, #9
+ 0x4E3B, // 主, #97
+ 0x4E4B, // 之, #55
+ 0x4E5F, // 也, #95
+ 0x4E86, // 了, #19
+ 0x4E8B, // 事, #128
+ 0x4E8C, // 二, #187
+ 0x4E94, // 五, #339
+ 0x4E9B, // 些, #435
+ 0x4E9E, // 亞, #432
+ 0x4EA4, // 交, #264
+ 0x4EAB, // 享, #160
+ 0x4EBA, // 人, #3
+ 0x4EC0, // 什, #483
+ 0x4ECA, // 今, #380
+ 0x4ECB, // 介, #468
+ 0x4ED6, // 他, #65
+ 0x4EE3, // 代, #284
+ 0x4EE5, // 以, #26
+ 0x4EF6, // 件, #234
+ 0x4EFB, // 任, #381
+ 0x4EFD, // 份, #447
+ 0x4F46, // 但, #281
+ 0x4F4D, // 位, #202
+ 0x4F4F, // 住, #471
+ 0x4F55, // 何, #334
+ 0x4F5C, // 作, #56
+ 0x4F60, // 你, #64
+ 0x4F7F, // 使, #236
+ 0x4F86, // 來, #38
+ 0x4F9B, // 供, #397
+ 0x4FBF, // 便, #440
+ 0x4FC2, // 係, #506
+ 0x4FDD, // 保, #161
+ 0x4FE1, // 信, #268
+ 0x4FEE, // 修, #473
+ 0x500B, // 個, #27
+ 0x5011, // 們, #109
+ 0x505A, // 做, #383
+ 0x5065, // 健, #415
+ 0x5099, // 備, #461
+ 0x50B3, // 傳, #277
+ 0x50CF, // 像, #403
+ 0x50F9, // 價, #93
+ 0x512A, // 優, #396
+ 0x5143, // 元, #158
+ 0x5148, // 先, #382
+ 0x5149, // 光, #216
+ 0x514D, // 免, #321
+ 0x5152, // 兒, #374
+ 0x5165, // 入, #58
+ 0x5167, // 內, #106
+ 0x5168, // 全, #67
+ 0x5169, // 兩, #322
+ 0x516C, // 公, #53
+ 0x516D, // 六, #493
+ 0x5171, // 共, #456
+ 0x5176, // 其, #148
+ 0x5177, // 具, #328
+ 0x518A, // 冊, #360
+ 0x518D, // 再, #311
+ 0x51FA, // 出, #44
+ 0x5206, // 分, #15
+ 0x5217, // 列, #259
+ 0x5225, // 別, #361
+ 0x5229, // 利, #251
+ 0x5230, // 到, #29
+ 0x5247, // 則, #511
+ 0x524D, // 前, #82
+ 0x5275, // 創, #409
+ 0x529B, // 力, #176
+ 0x529F, // 功, #430
+ 0x52A0, // 加, #87
+ 0x52A9, // 助, #465
+ 0x52D5, // 動, #48
+ 0x52D9, // 務, #102
+ 0x5305, // 包, #248
+ 0x5316, // 化, #223
+ 0x5317, // 北, #145
+ 0x5340, // 區, #60
+ 0x5341, // 十, #242
+ 0x5357, // 南, #261
+ 0x535A, // 博, #484
+ 0x5361, // 卡, #327
+ 0x5370, // 印, #498
+ 0x5373, // 即, #351
+ 0x539F, // 原, #237
+ 0x53BB, // 去, #190
+ 0x53C3, // 參, #444
+ 0x53C8, // 又, #426
+ 0x53CA, // 及, #136
+ 0x53CB, // 友, #142
+ 0x53D6, // 取, #422
+ 0x53D7, // 受, #410
+ 0x53E3, // 口, #357
+ 0x53EA, // 只, #250
+ 0x53EF, // 可, #35
+ 0x53F0, // 台, #34
+ 0x53F8, // 司, #226
+ 0x5403, // 吃, #362
+ 0x5404, // 各, #454
+ 0x5408, // 合, #147
+ 0x540C, // 同, #173
+ 0x540D, // 名, #108
+ 0x544A, // 告, #186
+ 0x548C, // 和, #130
+ 0x54C1, // 品, #23
+ 0x54E1, // 員, #150
+ 0x5546, // 商, #75
+ 0x554F, // 問, #120
+ 0x559C, // 喜, #502
+ 0x55AE, // 單, #210
+ 0x55CE, // 嗎, #443
+ 0x5668, // 器, #305
+ 0x56DB, // 四, #318
+ 0x56DE, // 回, #59
+ 0x56E0, // 因, #253
+ 0x570B, // 國, #21
+ 0x5712, // 園, #345
+ 0x5716, // 圖, #73
+ 0x5718, // 團, #338
+ 0x5728, // 在, #11
+ 0x5730, // 地, #50
+ 0x578B, // 型, #270
+ 0x57CE, // 城, #466
+ 0x57FA, // 基, #349
+ 0x5831, // 報, #127
+ 0x5834, // 場, #165
+ 0x58EB, // 士, #372
+ 0x5916, // 外, #152
+ 0x591A, // 多, #54
+ 0x5927, // 大, #8
+ 0x5929, // 天, #43
+ 0x592A, // 太, #343
+ 0x5947, // 奇, #325
+ 0x5973, // 女, #85
+ 0x5979, // 她, #420
+ 0x597D, // 好, #22
+ 0x5982, // 如, #144
+ 0x5B50, // 子, #46
+ 0x5B57, // 字, #275
+ 0x5B78, // 學, #49
+ 0x5B89, // 安, #239
+ 0x5B8C, // 完, #320
+ 0x5B9A, // 定, #159
+ 0x5BA2, // 客, #188
+ 0x5BB6, // 家, #31
+ 0x5BB9, // 容, #244
+ 0x5BE6, // 實, #198
+ 0x5BF6, // 寶, #367
+ 0x5C07, // 將, #232
+ 0x5C08, // 專, #133
+ 0x5C0B, // 尋, #352
+ 0x5C0D, // 對, #126
+ 0x5C0E, // 導, #418
+ 0x5C0F, // 小, #20
+ 0x5C11, // 少, #368
+ 0x5C31, // 就, #63
+ 0x5C55, // 展, #341
+ 0x5C71, // 山, #273
+ 0x5DE5, // 工, #121
+ 0x5DF1, // 己, #402
+ 0x5DF2, // 已, #299
+ 0x5E02, // 市, #81
+ 0x5E2B, // 師, #262
+ 0x5E36, // 帶, #470
+ 0x5E38, // 常, #303
+ 0x5E73, // 平, #297
+ 0x5E74, // 年, #30
+ 0x5E97, // 店, #171
+ 0x5EA6, // 度, #220
+ 0x5EB7, // 康, #441
+ 0x5EE3, // 廣, #279
+ 0x5EFA, // 建, #254
+ 0x5F0F, // 式, #155
+ 0x5F15, // 引, #346
+ 0x5F35, // 張, #366
+ 0x5F37, // 強, #437
+ 0x5F71, // 影, #94
+ 0x5F88, // 很, #177
+ 0x5F8C, // 後, #66
+ 0x5F97, // 得, #113
+ 0x5F9E, // 從, #436
+ 0x5FC3, // 心, #57
+ 0x5FEB, // 快, #292
+ 0x6027, // 性, #175
+ 0x606F, // 息, #378
+ 0x60A8, // 您, #252
+ 0x60C5, // 情, #123
+ 0x60F3, // 想, #178
+ 0x610F, // 意, #168
+ 0x611B, // 愛, #125
+ 0x611F, // 感, #211
+ 0x61C9, // 應, #164
+ 0x6210, // 成, #86
+ 0x6211, // 我, #7
+ 0x6216, // 或, #199
+ 0x6230, // 戰, #438
+ 0x6232, // 戲, #309
+ 0x6236, // 戶, #497
+ 0x623F, // 房, #274
+ 0x6240, // 所, #79
+ 0x624B, // 手, #68
+ 0x624D, // 才, #400
+ 0x6253, // 打, #278
+ 0x627E, // 找, #449
+ 0x6280, // 技, #332
+ 0x6295, // 投, #425
+ 0x62C9, // 拉, #500
+ 0x62CD, // 拍, #398
+ 0x6307, // 指, #407
+ 0x6392, // 排, #458
+ 0x63A5, // 接, #326
+ 0x63A8, // 推, #153
+ 0x63D0, // 提, #235
+ 0x641C, // 搜, #314
+ 0x6469, // 摩, #472
+ 0x6536, // 收, #249
+ 0x6539, // 改, #508
+ 0x653E, // 放, #331
+ 0x653F, // 政, #295
+ 0x6559, // 教, #184
+ 0x6574, // 整, #394
+ 0x6578, // 數, #134
+ 0x6587, // 文, #16
+ 0x6599, // 料, #167
+ 0x65AF, // 斯, #476
+ 0x65B0, // 新, #10
+ 0x65B9, // 方, #96
+ 0x65BC, // 於, #70
+ 0x65C5, // 旅, #289
+ 0x65E5, // 日, #18
+ 0x660E, // 明, #118
+ 0x6613, // 易, #482
+ 0x661F, // 星, #205
+ 0x662F, // 是, #5
+ 0x6642, // 時, #13
+ 0x66F4, // 更, #149
+ 0x66F8, // 書, #209
+ 0x6700, // 最, #51
+ 0x6703, // 會, #14
+ 0x6708, // 月, #25
+ 0x6709, // 有, #4
+ 0x670D, // 服, #99
+ 0x671F, // 期, #139
+ 0x672A, // 未, #404
+ 0x672C, // 本, #45
+ 0x6771, // 東, #221
+ 0x677F, // 板, #364
+ 0x6797, // 林, #330
+ 0x679C, // 果, #179
+ 0x67E5, // 查, #283
+ 0x683C, // 格, #157
+ 0x6848, // 案, #392
+ 0x689D, // 條, #406
+ 0x696D, // 業, #103
+ 0x6A02, // 樂, #116
+ 0x6A13, // 樓, #411
+ 0x6A19, // 標, #384
+ 0x6A23, // 樣, #306
+ 0x6A5F, // 機, #40
+ 0x6AA2, // 檢, #359
+ 0x6B0A, // 權, #228
+ 0x6B21, // 次, #227
+ 0x6B3E, // 款, #276
+ 0x6B4C, // 歌, #496
+ 0x6B61, // 歡, #427
+ 0x6B63, // 正, #206
+ 0x6B64, // 此, #247
+ 0x6BCF, // 每, #391
+ 0x6BD4, // 比, #257
+ 0x6C11, // 民, #230
+ 0x6C23, // 氣, #200
+ 0x6C34, // 水, #140
+ 0x6C42, // 求, #501
+ 0x6C92, // 沒, #162
+ 0x6CD5, // 法, #89
+ 0x6D3B, // 活, #124
+ 0x6D41, // 流, #315
+ 0x6D77, // 海, #258
+ 0x6D88, // 消, #342
+ 0x6E05, // 清, #329
+ 0x6E2F, // 港, #293
+ 0x6F14, // 演, #491
+ 0x7063, // 灣, #195
+ 0x70BA, // 為, #39
+ 0x7121, // 無, #107
+ 0x7136, // 然, #215
+ 0x7167, // 照, #376
+ 0x71B1, // 熱, #245
+ 0x7247, // 片, #90
+ 0x7248, // 版, #112
+ 0x724C, // 牌, #467
+ 0x7269, // 物, #110
+ 0x7279, // 特, #183
+ 0x738B, // 王, #287
+ 0x73A9, // 玩, #354
+ 0x73FE, // 現, #143
+ 0x7403, // 球, #350
+ 0x7406, // 理, #105
+ 0x751F, // 生, #24
+ 0x7522, // 產, #201
+ 0x7528, // 用, #17
+ 0x7531, // 由, #288
+ 0x7537, // 男, #298
+ 0x754C, // 界, #399
+ 0x7559, // 留, #218
+ 0x756B, // 畫, #412
+ 0x7576, // 當, #185
+ 0x767B, // 登, #138
+ 0x767C, // 發, #28
+ 0x767D, // 白, #377
+ 0x767E, // 百, #393
+ 0x7684, // 的, #1
+ 0x76EE, // 目, #271
+ 0x76F4, // 直, #379
+ 0x76F8, // 相, #98
+ 0x770B, // 看, #52
+ 0x771F, // 真, #180
+ 0x773C, // 眼, #433
+ 0x77E5, // 知, #170
+ 0x78BC, // 碼, #481
+ 0x793A, // 示, #353
+ 0x793E, // 社, #333
+ 0x795E, // 神, #304
+ 0x7968, // 票, #477
+ 0x798F, // 福, #494
+ 0x79C1, // 私, #507
+ 0x79D1, // 科, #280
+ 0x7A0B, // 程, #272
+ 0x7A2E, // 種, #337
+ 0x7A4D, // 積, #385
+ 0x7A7A, // 空, #324
+ 0x7ACB, // 立, #286
+ 0x7AD9, // 站, #117
+ 0x7AE0, // 章, #141
+ 0x7B2C, // 第, #135
+ 0x7B49, // 等, #240
+ 0x7BA1, // 管, #340
+ 0x7BC0, // 節, #431
+ 0x7BC7, // 篇, #479
+ 0x7C21, // 簡, #499
+ 0x7CBE, // 精, #213
+ 0x7CFB, // 系, #212
+ 0x7D04, // 約, #462
+ 0x7D05, // 紅, #452
+ 0x7D1A, // 級, #267
+ 0x7D30, // 細, #486
+ 0x7D44, // 組, #335
+ 0x7D50, // 結, #243
+ 0x7D66, // 給, #355
+ 0x7D71, // 統, #375
+ 0x7D93, // 經, #111
+ 0x7DB2, // 網, #32
+ 0x7DDA, // 線, #151
+ 0x7E23, // 縣, #439
+ 0x7E3D, // 總, #370
+ 0x7F8E, // 美, #41
+ 0x7FA9, // 義, #504
+ 0x8001, // 老, #290
+ 0x8003, // 考, #428
+ 0x8005, // 者, #92
+ 0x800C, // 而, #217
+ 0x805E, // 聞, #181
+ 0x806F, // 聯, #310
+ 0x8072, // 聲, #413
+ 0x80A1, // 股, #390
+ 0x80B2, // 育, #453
+ 0x80FD, // 能, #71
+ 0x8166, // 腦, #408
+ 0x81EA, // 自, #61
+ 0x81F3, // 至, #344
+ 0x8207, // 與, #84
+ 0x8209, // 舉, #463
+ 0x8272, // 色, #192
+ 0x82B1, // 花, #255
+ 0x82F1, // 英, #348
+ 0x83EF, // 華, #196
+ 0x842C, // 萬, #316
+ 0x843D, // 落, #308
+ 0x8457, // 著, #233
+ 0x85A6, // 薦, #401
+ 0x85CF, // 藏, #503
+ 0x85DD, // 藝, #488
+ 0x8655, // 處, #419
+ 0x865F, // 號, #191
+ 0x884C, // 行, #47
+ 0x8853, // 術, #395
+ 0x8868, // 表, #77
+ 0x88AB, // 被, #291
+ 0x88DD, // 裝, #256
+ 0x88E1, // 裡, #369
+ 0x88FD, // 製, #510
+ 0x897F, // 西, #300
+ 0x8981, // 要, #36
+ 0x898B, // 見, #307
+ 0x8996, // 視, #204
+ 0x89BA, // 覺, #450
+ 0x89BD, // 覽, #387
+ 0x89C0, // 觀, #365
+ 0x89E3, // 解, #323
+ 0x8A00, // 言, #169
+ 0x8A02, // 訂, #423
+ 0x8A08, // 計, #225
+ 0x8A0A, // 訊, #156
+ 0x8A0E, // 討, #373
+ 0x8A18, // 記, #222
+ 0x8A2D, // 設, #174
+ 0x8A3B, // 註, #356
+ 0x8A55, // 評, #246
+ 0x8A66, // 試, #448
+ 0x8A71, // 話, #229
+ 0x8A72, // 該, #446
+ 0x8A8D, // 認, #464
+ 0x8A9E, // 語, #371
+ 0x8AAA, // 說, #91
+ 0x8ABF, // 調, #509
+ 0x8ACB, // 請, #119
+ 0x8AD6, // 論, #114
+ 0x8B1D, // 謝, #389
+ 0x8B49, // 證, #429
+ 0x8B58, // 識, #416
+ 0x8B70, // 議, #485
+ 0x8B77, // 護, #475
+ 0x8B80, // 讀, #386
+ 0x8B8A, // 變, #388
+ 0x8B93, // 讓, #336
+ 0x8CA8, // 貨, #313
+ 0x8CB7, // 買, #260
+ 0x8CBB, // 費, #203
+ 0x8CC7, // 資, #62
+ 0x8CE3, // 賣, #294
+ 0x8CEA, // 質, #457
+ 0x8CFC, // 購, #189
+ 0x8D77, // 起, #214
+ 0x8D85, // 超, #296
+ 0x8DDF, // 跟, #489
+ 0x8DEF, // 路, #137
+ 0x8EAB, // 身, #197
+ 0x8ECA, // 車, #76
+ 0x8F09, // 載, #301
+ 0x8F49, // 轉, #282
+ 0x8FD1, // 近, #414
+ 0x9001, // 送, #363
+ 0x9019, // 這, #42
+ 0x901A, // 通, #207
+ 0x901F, // 速, #495
+ 0x9020, // 造, #455
+ 0x9023, // 連, #285
+ 0x9032, // 進, #231
+ 0x904A, // 遊, #132
+ 0x904B, // 運, #219
+ 0x904E, // 過, #101
+ 0x9053, // 道, #146
+ 0x9054, // 達, #417
+ 0x9078, // 選, #182
+ 0x9084, // 還, #154
+ 0x908A, // 邊, #487
+ 0x90A3, // 那, #269
+ 0x90E8, // 部, #78
+ 0x90FD, // 都, #104
+ 0x914D, // 配, #421
+ 0x9152, // 酒, #512
+ 0x91AB, // 醫, #358
+ 0x91CD, // 重, #224
+ 0x91CF, // 量, #319
+ 0x91D1, // 金, #115
+ 0x9304, // 錄, #302
+ 0x9577, // 長, #172
+ 0x9580, // 門, #193
+ 0x958B, // 開, #72
+ 0x9593, // 間, #80
+ 0x95B1, // 閱, #405
+ 0x95DC, // 關, #74
+ 0x963F, // 阿, #460
+ 0x9650, // 限, #265
+ 0x9662, // 院, #474
+ 0x9664, // 除, #478
+ 0x969B, // 際, #459
+ 0x96C6, // 集, #347
+ 0x96E2, // 離, #442
+ 0x96FB, // 電, #33
+ 0x9700, // 需, #445
+ 0x975E, // 非, #451
+ 0x9762, // 面, #129
+ 0x97F3, // 音, #194
+ 0x9801, // 頁, #83
+ 0x982D, // 頭, #238
+ 0x984C, // 題, #122
+ 0x985E, // 類, #163
+ 0x98A8, // 風, #266
+ 0x98DF, // 食, #208
+ 0x9910, // 餐, #469
+ 0x9928, // 館, #424
+ 0x9996, // 首, #166
+ 0x9999, // 香, #263
+ 0x99AC, // 馬, #317
+ 0x9A57, // 驗, #492
+ 0x9AD4, // 體, #100
+ 0x9AD8, // 高, #88
+ 0x9EBC, // 麼, #241
+ 0x9EC3, // 黃, #480
+ 0x9ED1, // 黑, #490
+ 0x9EDE, // 點, #69
+ 0x9F8D, // 龍, #505
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_zhTW_coverage=0.704841200026877;
+
+// The 512 most frequently occuring characters for the ja language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_ja[] = {
+ 0x3005, // 々, #352
+ 0x3041, // ぁ, #486
+ 0x3042, // あ, #50
+ 0x3044, // い, #2
+ 0x3046, // う, #33
+ 0x3048, // え, #83
+ 0x304A, // お, #37
+ 0x304B, // か, #21
+ 0x304C, // が, #17
+ 0x304D, // き, #51
+ 0x304E, // ぎ, #324
+ 0x304F, // く, #38
+ 0x3050, // ぐ, #334
+ 0x3051, // け, #60
+ 0x3052, // げ, #296
+ 0x3053, // こ, #34
+ 0x3054, // ご, #100
+ 0x3055, // さ, #31
+ 0x3056, // ざ, #378
+ 0x3057, // し, #4
+ 0x3058, // じ, #121
+ 0x3059, // す, #12
+ 0x305A, // ず, #215
+ 0x305B, // せ, #86
+ 0x305D, // そ, #68
+ 0x305F, // た, #11
+ 0x3060, // だ, #42
+ 0x3061, // ち, #67
+ 0x3063, // っ, #23
+ 0x3064, // つ, #73
+ 0x3066, // て, #7
+ 0x3067, // で, #6
+ 0x3068, // と, #14
+ 0x3069, // ど, #75
+ 0x306A, // な, #8
+ 0x306B, // に, #5
+ 0x306D, // ね, #123
+ 0x306E, // の, #1
+ 0x306F, // は, #16
+ 0x3070, // ば, #150
+ 0x3071, // ぱ, #259
+ 0x3072, // ひ, #364
+ 0x3073, // び, #266
+ 0x3075, // ふ, #484
+ 0x3076, // ぶ, #330
+ 0x3078, // へ, #146
+ 0x3079, // べ, #207
+ 0x307B, // ほ, #254
+ 0x307E, // ま, #18
+ 0x307F, // み, #74
+ 0x3080, // む, #285
+ 0x3081, // め, #78
+ 0x3082, // も, #32
+ 0x3083, // ゃ, #111
+ 0x3084, // や, #85
+ 0x3086, // ゆ, #392
+ 0x3087, // ょ, #224
+ 0x3088, // よ, #63
+ 0x3089, // ら, #29
+ 0x308A, // り, #28
+ 0x308B, // る, #9
+ 0x308C, // れ, #35
+ 0x308D, // ろ, #127
+ 0x308F, // わ, #88
+ 0x3092, // を, #19
+ 0x3093, // ん, #22
+ 0x30A1, // ァ, #193
+ 0x30A2, // ア, #27
+ 0x30A3, // ィ, #70
+ 0x30A4, // イ, #15
+ 0x30A6, // ウ, #89
+ 0x30A7, // ェ, #134
+ 0x30A8, // エ, #81
+ 0x30A9, // ォ, #225
+ 0x30AA, // オ, #76
+ 0x30AB, // カ, #52
+ 0x30AC, // ガ, #147
+ 0x30AD, // キ, #66
+ 0x30AE, // ギ, #246
+ 0x30AF, // ク, #25
+ 0x30B0, // グ, #39
+ 0x30B1, // ケ, #137
+ 0x30B2, // ゲ, #200
+ 0x30B3, // コ, #46
+ 0x30B4, // ゴ, #183
+ 0x30B5, // サ, #64
+ 0x30B6, // ザ, #221
+ 0x30B7, // シ, #48
+ 0x30B8, // ジ, #55
+ 0x30B9, // ス, #13
+ 0x30BA, // ズ, #103
+ 0x30BB, // セ, #109
+ 0x30BC, // ゼ, #499
+ 0x30BD, // ソ, #175
+ 0x30BF, // タ, #45
+ 0x30C0, // ダ, #104
+ 0x30C1, // チ, #71
+ 0x30C3, // ッ, #20
+ 0x30C4, // ツ, #119
+ 0x30C6, // テ, #59
+ 0x30C7, // デ, #82
+ 0x30C8, // ト, #10
+ 0x30C9, // ド, #44
+ 0x30CA, // ナ, #102
+ 0x30CB, // ニ, #72
+ 0x30CD, // ネ, #117
+ 0x30CE, // ノ, #192
+ 0x30CF, // ハ, #164
+ 0x30D0, // バ, #62
+ 0x30D1, // パ, #90
+ 0x30D2, // ヒ, #398
+ 0x30D3, // ビ, #77
+ 0x30D4, // ピ, #135
+ 0x30D5, // フ, #47
+ 0x30D6, // ブ, #56
+ 0x30D7, // プ, #43
+ 0x30D8, // ヘ, #268
+ 0x30D9, // ベ, #157
+ 0x30DA, // ペ, #125
+ 0x30DB, // ホ, #155
+ 0x30DC, // ボ, #168
+ 0x30DD, // ポ, #114
+ 0x30DE, // マ, #57
+ 0x30DF, // ミ, #97
+ 0x30E0, // ム, #69
+ 0x30E1, // メ, #53
+ 0x30E2, // モ, #142
+ 0x30E3, // ャ, #93
+ 0x30E4, // ヤ, #258
+ 0x30E5, // ュ, #79
+ 0x30E6, // ユ, #405
+ 0x30E7, // ョ, #98
+ 0x30E9, // ラ, #26
+ 0x30EA, // リ, #30
+ 0x30EB, // ル, #24
+ 0x30EC, // レ, #41
+ 0x30ED, // ロ, #40
+ 0x30EF, // ワ, #144
+ 0x30F3, // ン, #3
+ 0x30F4, // ヴ, #483
+ 0x30FD, // ヽ, #501
+ 0x4E00, // 一, #84
+ 0x4E07, // 万, #337
+ 0x4E09, // 三, #323
+ 0x4E0A, // 上, #133
+ 0x4E0B, // 下, #180
+ 0x4E0D, // 不, #277
+ 0x4E16, // 世, #385
+ 0x4E2D, // 中, #87
+ 0x4E3B, // 主, #432
+ 0x4E88, // 予, #326
+ 0x4E8B, // 事, #95
+ 0x4E8C, // 二, #394
+ 0x4E95, // 井, #468
+ 0x4EA4, // 交, #410
+ 0x4EAC, // 京, #260
+ 0x4EBA, // 人, #61
+ 0x4ECA, // 今, #184
+ 0x4ECB, // 介, #358
+ 0x4ED5, // 仕, #391
+ 0x4ED6, // 他, #256
+ 0x4ED8, // 付, #243
+ 0x4EE3, // 代, #280
+ 0x4EE5, // 以, #216
+ 0x4EF6, // 件, #190
+ 0x4F1A, // 会, #105
+ 0x4F4D, // 位, #177
+ 0x4F4F, // 住, #376
+ 0x4F53, // 体, #223
+ 0x4F55, // 何, #294
+ 0x4F5C, // 作, #154
+ 0x4F7F, // 使, #233
+ 0x4F9B, // 供, #503
+ 0x4FA1, // 価, #217
+ 0x4FBF, // 便, #511
+ 0x4FDD, // 保, #279
+ 0x4FE1, // 信, #271
+ 0x500B, // 個, #415
+ 0x50CF, // 像, #178
+ 0x512A, // 優, #403
+ 0x5143, // 元, #384
+ 0x5148, // 先, #311
+ 0x5149, // 光, #488
+ 0x5165, // 入, #115
+ 0x5168, // 全, #173
+ 0x516C, // 公, #287
+ 0x5177, // 具, #447
+ 0x5185, // 内, #169
+ 0x5186, // 円, #131
+ 0x5199, // 写, #275
+ 0x51FA, // 出, #110
+ 0x5206, // 分, #130
+ 0x5207, // 切, #401
+ 0x521D, // 初, #319
+ 0x5225, // 別, #290
+ 0x5229, // 利, #226
+ 0x5236, // 制, #507
+ 0x524D, // 前, #124
+ 0x529B, // 力, #272
+ 0x52A0, // 加, #249
+ 0x52D5, // 動, #120
+ 0x52D9, // 務, #421
+ 0x52DF, // 募, #476
+ 0x5316, // 化, #308
+ 0x5317, // 北, #341
+ 0x533A, // 区, #348
+ 0x539F, // 原, #321
+ 0x53C2, // 参, #452
+ 0x53CB, // 友, #451
+ 0x53D6, // 取, #237
+ 0x53D7, // 受, #354
+ 0x53E3, // 口, #289
+ 0x53E4, // 古, #339
+ 0x53EF, // 可, #298
+ 0x53F0, // 台, #439
+ 0x53F7, // 号, #361
+ 0x5408, // 合, #118
+ 0x540C, // 同, #263
+ 0x540D, // 名, #65
+ 0x5411, // 向, #434
+ 0x544A, // 告, #386
+ 0x5468, // 周, #393
+ 0x5473, // 味, #299
+ 0x548C, // 和, #350
+ 0x54C1, // 品, #96
+ 0x54E1, // 員, #293
+ 0x5546, // 商, #198
+ 0x554F, // 問, #158
+ 0x55B6, // 営, #438
+ 0x5668, // 器, #366
+ 0x56DE, // 回, #143
+ 0x56F3, // 図, #444
+ 0x56FD, // 国, #153
+ 0x5712, // 園, #435
+ 0x571F, // 土, #239
+ 0x5728, // 在, #351
+ 0x5730, // 地, #163
+ 0x578B, // 型, #430
+ 0x5831, // 報, #112
+ 0x5834, // 場, #139
+ 0x58F2, // 売, #232
+ 0x5909, // 変, #306
+ 0x5916, // 外, #222
+ 0x591A, // 多, #336
+ 0x5927, // 大, #80
+ 0x5929, // 天, #278
+ 0x5973, // 女, #161
+ 0x597D, // 好, #349
+ 0x5A5A, // 婚, #479
+ 0x5B50, // 子, #113
+ 0x5B57, // 字, #492
+ 0x5B66, // 学, #132
+ 0x5B89, // 安, #295
+ 0x5B9A, // 定, #145
+ 0x5B9F, // 実, #220
+ 0x5BA4, // 室, #482
+ 0x5BAE, // 宮, #487
+ 0x5BB6, // 家, #211
+ 0x5BB9, // 容, #333
+ 0x5BFE, // 対, #252
+ 0x5C02, // 専, #474
+ 0x5C0F, // 小, #212
+ 0x5C11, // 少, #377
+ 0x5C4B, // 屋, #284
+ 0x5C71, // 山, #206
+ 0x5CA1, // 岡, #429
+ 0x5CF6, // 島, #297
+ 0x5DDD, // 川, #253
+ 0x5DE5, // 工, #374
+ 0x5E02, // 市, #159
+ 0x5E2F, // 帯, #416
+ 0x5E38, // 常, #437
+ 0x5E73, // 平, #390
+ 0x5E74, // 年, #54
+ 0x5E83, // 広, #367
+ 0x5E97, // 店, #149
+ 0x5EA6, // 度, #269
+ 0x5EAB, // 庫, #380
+ 0x5F0F, // 式, #265
+ 0x5F15, // 引, #345
+ 0x5F37, // 強, #446
+ 0x5F53, // 当, #240
+ 0x5F62, // 形, #502
+ 0x5F8C, // 後, #230
+ 0x5F97, // 得, #490
+ 0x5FC3, // 心, #307
+ 0x5FC5, // 必, #422
+ 0x5FDC, // 応, #356
+ 0x601D, // 思, #189
+ 0x6027, // 性, #201
+ 0x6075, // 恵, #400
+ 0x60C5, // 情, #140
+ 0x60F3, // 想, #477
+ 0x610F, // 意, #305
+ 0x611B, // 愛, #273
+ 0x611F, // 感, #257
+ 0x6210, // 成, #262
+ 0x6226, // 戦, #365
+ 0x6240, // 所, #236
+ 0x624B, // 手, #160
+ 0x6295, // 投, #129
+ 0x6301, // 持, #355
+ 0x6307, // 指, #425
+ 0x63A2, // 探, #369
+ 0x63B2, // 掲, #399
+ 0x643A, // 携, #459
+ 0x652F, // 支, #512
+ 0x653E, // 放, #469
+ 0x6559, // 教, #270
+ 0x6570, // 数, #181
+ 0x6587, // 文, #202
+ 0x6599, // 料, #106
+ 0x65B0, // 新, #99
+ 0x65B9, // 方, #126
+ 0x65C5, // 旅, #445
+ 0x65E5, // 日, #36
+ 0x660E, // 明, #300
+ 0x6620, // 映, #418
+ 0x6642, // 時, #107
+ 0x66F4, // 更, #359
+ 0x66F8, // 書, #174
+ 0x6700, // 最, #152
+ 0x6708, // 月, #49
+ 0x6709, // 有, #302
+ 0x671F, // 期, #332
+ 0x6728, // 木, #203
+ 0x672C, // 本, #92
+ 0x6750, // 材, #489
+ 0x6751, // 村, #466
+ 0x6765, // 来, #267
+ 0x6771, // 東, #191
+ 0x677F, // 板, #411
+ 0x679C, // 果, #441
+ 0x6821, // 校, #327
+ 0x682A, // 株, #412
+ 0x683C, // 格, #228
+ 0x691C, // 検, #179
+ 0x696D, // 業, #166
+ 0x697D, // 楽, #172
+ 0x69D8, // 様, #255
+ 0x6A5F, // 機, #235
+ 0x6B21, // 次, #318
+ 0x6B62, // 止, #475
+ 0x6B63, // 正, #312
+ 0x6C17, // 気, #116
+ 0x6C34, // 水, #165
+ 0x6C42, // 求, #465
+ 0x6C7A, // 決, #370
+ 0x6CBB, // 治, #505
+ 0x6CC1, // 況, #462
+ 0x6CD5, // 法, #227
+ 0x6CE8, // 注, #372
+ 0x6D3B, // 活, #303
+ 0x6D41, // 流, #480
+ 0x6D77, // 海, #274
+ 0x6E08, // 済, #417
+ 0x6F14, // 演, #504
+ 0x706B, // 火, #264
+ 0x70B9, // 点, #331
+ 0x7121, // 無, #58
+ 0x7248, // 版, #409
+ 0x7269, // 物, #170
+ 0x7279, // 特, #242
+ 0x72B6, // 状, #458
+ 0x73FE, // 現, #322
+ 0x7406, // 理, #162
+ 0x751F, // 生, #122
+ 0x7523, // 産, #320
+ 0x7528, // 用, #94
+ 0x7530, // 田, #195
+ 0x7537, // 男, #373
+ 0x753A, // 町, #314
+ 0x753B, // 画, #91
+ 0x754C, // 界, #436
+ 0x756A, // 番, #261
+ 0x75C5, // 病, #428
+ 0x767A, // 発, #194
+ 0x767B, // 登, #231
+ 0x767D, // 白, #419
+ 0x7684, // 的, #251
+ 0x76EE, // 目, #197
+ 0x76F4, // 直, #497
+ 0x76F8, // 相, #286
+ 0x770C, // 県, #199
+ 0x771F, // 真, #219
+ 0x7740, // 着, #283
+ 0x77E5, // 知, #185
+ 0x77F3, // 石, #500
+ 0x78BA, // 確, #383
+ 0x793A, // 示, #241
+ 0x793E, // 社, #167
+ 0x795E, // 神, #315
+ 0x798F, // 福, #423
+ 0x79C1, // 私, #347
+ 0x79D1, // 科, #420
+ 0x7A0E, // 税, #368
+ 0x7A2E, // 種, #455
+ 0x7A3F, // 稿, #148
+ 0x7A7A, // 空, #427
+ 0x7ACB, // 立, #309
+ 0x7B11, // 笑, #454
+ 0x7B2C, // 第, #317
+ 0x7B49, // 等, #457
+ 0x7B54, // 答, #426
+ 0x7BA1, // 管, #481
+ 0x7CFB, // 系, #408
+ 0x7D04, // 約, #276
+ 0x7D20, // 素, #407
+ 0x7D22, // 索, #214
+ 0x7D30, // 細, #381
+ 0x7D39, // 紹, #471
+ 0x7D42, // 終, #456
+ 0x7D44, // 組, #424
+ 0x7D4C, // 経, #360
+ 0x7D50, // 結, #291
+ 0x7D9A, // 続, #357
+ 0x7DCF, // 総, #467
+ 0x7DDA, // 線, #338
+ 0x7DE8, // 編, #453
+ 0x7F8E, // 美, #204
+ 0x8003, // 考, #387
+ 0x8005, // 者, #151
+ 0x805E, // 聞, #463
+ 0x8077, // 職, #363
+ 0x80B2, // 育, #433
+ 0x80FD, // 能, #250
+ 0x8179, // 腹, #396
+ 0x81EA, // 自, #156
+ 0x826F, // 良, #329
+ 0x8272, // 色, #402
+ 0x82B1, // 花, #440
+ 0x82B8, // 芸, #413
+ 0x82F1, // 英, #485
+ 0x8449, // 葉, #472
+ 0x884C, // 行, #128
+ 0x8853, // 術, #460
+ 0x8868, // 表, #209
+ 0x88FD, // 製, #431
+ 0x897F, // 西, #406
+ 0x8981, // 要, #313
+ 0x898B, // 見, #101
+ 0x898F, // 規, #375
+ 0x89A7, // 覧, #171
+ 0x89E3, // 解, #388
+ 0x8A00, // 言, #210
+ 0x8A08, // 計, #343
+ 0x8A18, // 記, #136
+ 0x8A2D, // 設, #292
+ 0x8A71, // 話, #213
+ 0x8A73, // 詳, #371
+ 0x8A8D, // 認, #404
+ 0x8A9E, // 語, #234
+ 0x8AAC, // 説, #494
+ 0x8AAD, // 読, #301
+ 0x8ABF, // 調, #443
+ 0x8AC7, // 談, #448
+ 0x8B77, // 護, #509
+ 0x8C37, // 谷, #506
+ 0x8CA9, // 販, #362
+ 0x8CB7, // 買, #346
+ 0x8CC7, // 資, #473
+ 0x8CEA, // 質, #281
+ 0x8CFC, // 購, #495
+ 0x8EAB, // 身, #470
+ 0x8ECA, // 車, #205
+ 0x8EE2, // 転, #335
+ 0x8F09, // 載, #342
+ 0x8FBC, // 込, #229
+ 0x8FD1, // 近, #304
+ 0x8FD4, // 返, #461
+ 0x8FFD, // 追, #379
+ 0x9001, // 送, #186
+ 0x901A, // 通, #182
+ 0x901F, // 速, #340
+ 0x9023, // 連, #244
+ 0x904B, // 運, #382
+ 0x904E, // 過, #498
+ 0x9053, // 道, #282
+ 0x9054, // 達, #450
+ 0x9055, // 違, #414
+ 0x9078, // 選, #288
+ 0x90E8, // 部, #208
+ 0x90FD, // 都, #344
+ 0x914D, // 配, #389
+ 0x91CD, // 重, #478
+ 0x91CE, // 野, #245
+ 0x91D1, // 金, #138
+ 0x9332, // 録, #238
+ 0x9577, // 長, #247
+ 0x9580, // 門, #508
+ 0x958B, // 開, #248
+ 0x9593, // 間, #141
+ 0x95A2, // 関, #188
+ 0x962A, // 阪, #496
+ 0x9650, // 限, #395
+ 0x9662, // 院, #449
+ 0x9664, // 除, #510
+ 0x969B, // 際, #493
+ 0x96C6, // 集, #196
+ 0x96D1, // 雑, #442
+ 0x96FB, // 電, #187
+ 0x9762, // 面, #328
+ 0x97F3, // 音, #325
+ 0x984C, // 題, #310
+ 0x985E, // 類, #491
+ 0x98A8, // 風, #353
+ 0x98DF, // 食, #218
+ 0x9928, // 館, #464
+ 0x99C5, // 駅, #316
+ 0x9A13, // 験, #397
+ 0x9AD8, // 高, #176
+ 0xFF57, // w, #108
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_ja_coverage=0.880569589120162;
+
+// The 512 most frequently occuring characters for the ko language in a sample of the Internet.
+// Ordered by codepoint, comment shows character and ranking by frequency
+const uint16_t frequent_ko[] = {
+ 0x314B, // ㅋ, #148
+ 0x314E, // ㅎ, #390
+ 0x3160, // ㅠ, #354
+ 0x318D, // ㆍ, #439
+ 0xAC00, // 가, #6
+ 0xAC01, // 각, #231
+ 0xAC04, // 간, #106
+ 0xAC08, // 갈, #362
+ 0xAC10, // 감, #122
+ 0xAC11, // 갑, #493
+ 0xAC15, // 강, #155
+ 0xAC19, // 같, #264
+ 0xAC1C, // 개, #87
+ 0xAC1D, // 객, #198
+ 0xAC24, // 갤, #457
+ 0xAC70, // 거, #91
+ 0xAC74, // 건, #161
+ 0xAC78, // 걸, #338
+ 0xAC80, // 검, #184
+ 0xAC83, // 것, #116
+ 0xAC8C, // 게, #36
+ 0xACA0, // 겠, #233
+ 0xACA8, // 겨, #341
+ 0xACA9, // 격, #245
+ 0xACAC, // 견, #413
+ 0xACB0, // 결, #202
+ 0xACBD, // 경, #62
+ 0xACC4, // 계, #142
+ 0xACE0, // 고, #12
+ 0xACE1, // 곡, #444
+ 0xACE8, // 골, #379
+ 0xACF3, // 곳, #388
+ 0xACF5, // 공, #59
+ 0xACFC, // 과, #69
+ 0xAD00, // 관, #95
+ 0xAD11, // 광, #235
+ 0xAD50, // 교, #128
+ 0xAD6C, // 구, #52
+ 0xAD6D, // 국, #85
+ 0xAD70, // 군, #293
+ 0xAD74, // 굴, #487
+ 0xAD81, // 궁, #441
+ 0xAD8C, // 권, #192
+ 0xADC0, // 귀, #386
+ 0xADDC, // 규, #367
+ 0xADF8, // 그, #30
+ 0xADF9, // 극, #424
+ 0xADFC, // 근, #241
+ 0xAE00, // 글, #61
+ 0xAE08, // 금, #138
+ 0xAE09, // 급, #269
+ 0xAE30, // 기, #3
+ 0xAE34, // 긴, #465
+ 0xAE38, // 길, #297
+ 0xAE40, // 김, #205
+ 0xAE4C, // 까, #171
+ 0xAED8, // 께, #273
+ 0xAF43, // 꽃, #475
+ 0xB05D, // 끝, #505
+ 0xB07C, // 끼, #490
+ 0xB098, // 나, #39
+ 0xB09C, // 난, #274
+ 0xB0A0, // 날, #292
+ 0xB0A8, // 남, #139
+ 0xB0B4, // 내, #56
+ 0xB108, // 너, #272
+ 0xB110, // 널, #476
+ 0xB118, // 넘, #492
+ 0xB124, // 네, #100
+ 0xB137, // 넷, #329
+ 0xB140, // 녀, #288
+ 0xB144, // 년, #151
+ 0xB178, // 노, #149
+ 0xB17C, // 논, #491
+ 0xB180, // 놀, #464
+ 0xB18D, // 농, #442
+ 0xB204, // 누, #319
+ 0xB208, // 눈, #383
+ 0xB274, // 뉴, #173
+ 0xB290, // 느, #368
+ 0xB294, // 는, #5
+ 0xB298, // 늘, #322
+ 0xB2A5, // 능, #190
+ 0xB2C8, // 니, #16
+ 0xB2D8, // 님, #153
+ 0xB2E4, // 다, #2
+ 0xB2E8, // 단, #134
+ 0xB2EB, // 닫, #195
+ 0xB2EC, // 달, #243
+ 0xB2F4, // 담, #254
+ 0xB2F5, // 답, #287
+ 0xB2F9, // 당, #159
+ 0xB300, // 대, #33
+ 0xB313, // 댓, #303
+ 0xB354, // 더, #140
+ 0xB358, // 던, #252
+ 0xB367, // 덧, #463
+ 0xB370, // 데, #104
+ 0xB378, // 델, #429
+ 0xB3C4, // 도, #25
+ 0xB3C5, // 독, #301
+ 0xB3CC, // 돌, #309
+ 0xB3D9, // 동, #58
+ 0xB418, // 되, #82
+ 0xB41C, // 된, #189
+ 0xB420, // 될, #408
+ 0xB429, // 됩, #332
+ 0xB450, // 두, #199
+ 0xB4A4, // 뒤, #496
+ 0xB4DC, // 드, #40
+ 0xB4E0, // 든, #283
+ 0xB4E4, // 들, #54
+ 0xB4EF, // 듯, #478
+ 0xB4F1, // 등, #90
+ 0xB514, // 디, #133
+ 0xB529, // 딩, #462
+ 0xB530, // 따, #333
+ 0xB54C, // 때, #240
+ 0xB610, // 또, #313
+ 0xB77C, // 라, #42
+ 0xB77D, // 락, #355
+ 0xB780, // 란, #290
+ 0xB78C, // 람, #246
+ 0xB78D, // 랍, #420
+ 0xB791, // 랑, #270
+ 0xB798, // 래, #174
+ 0xB799, // 랙, #381
+ 0xB79C, // 랜, #357
+ 0xB7A8, // 램, #359
+ 0xB7A9, // 랩, #402
+ 0xB7C9, // 량, #346
+ 0xB7EC, // 러, #130
+ 0xB7F0, // 런, #312
+ 0xB7FC, // 럼, #327
+ 0xB7FD, // 럽, #447
+ 0xB807, // 렇, #412
+ 0xB808, // 레, #114
+ 0xB80C, // 렌, #395
+ 0xB824, // 려, #158
+ 0xB825, // 력, #194
+ 0xB828, // 련, #326
+ 0xB839, // 령, #389
+ 0xB85C, // 로, #4
+ 0xB85D, // 록, #84
+ 0xB860, // 론, #366
+ 0xB8CC, // 료, #154
+ 0xB8E8, // 루, #236
+ 0xB958, // 류, #265
+ 0xB974, // 르, #212
+ 0xB978, // 른, #250
+ 0xB97C, // 를, #35
+ 0xB984, // 름, #276
+ 0xB9AC, // 리, #19
+ 0xB9AD, // 릭, #394
+ 0xB9B0, // 린, #259
+ 0xB9B4, // 릴, #485
+ 0xB9BC, // 림, #305
+ 0xB9BD, // 립, #217
+ 0xB9C1, // 링, #351
+ 0xB9C8, // 마, #67
+ 0xB9C9, // 막, #310
+ 0xB9CC, // 만, #65
+ 0xB9CE, // 많, #257
+ 0xB9D0, // 말, #188
+ 0xB9DB, // 맛, #397
+ 0xB9DD, // 망, #370
+ 0xB9DE, // 맞, #399
+ 0xB9E4, // 매, #125
+ 0xB9E8, // 맨, #422
+ 0xBA38, // 머, #311
+ 0xBA39, // 먹, #377
+ 0xBA3C, // 먼, #469
+ 0xBA54, // 메, #147
+ 0xBA70, // 며, #191
+ 0xBA74, // 면, #72
+ 0xBA85, // 명, #131
+ 0xBAA8, // 모, #73
+ 0xBAA9, // 목, #157
+ 0xBAB0, // 몰, #401
+ 0xBAB8, // 몸, #437
+ 0xBABB, // 못, #336
+ 0xBB34, // 무, #80
+ 0xBB38, // 문, #57
+ 0xBB3C, // 물, #94
+ 0xBBA4, // 뮤, #431
+ 0xBBF8, // 미, #76
+ 0xBBFC, // 민, #200
+ 0xBC00, // 밀, #308
+ 0xBC0F, // 및, #249
+ 0xBC14, // 바, #89
+ 0xBC15, // 박, #226
+ 0xBC18, // 반, #175
+ 0xBC1B, // 받, #248
+ 0xBC1C, // 발, #164
+ 0xBC29, // 방, #92
+ 0xBC30, // 배, #162
+ 0xBC31, // 백, #256
+ 0xBC84, // 버, #111
+ 0xBC88, // 번, #167
+ 0xBC8C, // 벌, #423
+ 0xBC94, // 범, #427
+ 0xBC95, // 법, #207
+ 0xBCA0, // 베, #281
+ 0xBCA4, // 벤, #378
+ 0xBCA8, // 벨, #387
+ 0xBCC0, // 변, #253
+ 0xBCC4, // 별, #262
+ 0xBCD1, // 병, #340
+ 0xBCF4, // 보, #20
+ 0xBCF5, // 복, #204
+ 0xBCF8, // 본, #182
+ 0xBCFC, // 볼, #385
+ 0xBD09, // 봉, #405
+ 0xBD80, // 부, #46
+ 0xBD81, // 북, #261
+ 0xBD84, // 분, #105
+ 0xBD88, // 불, #225
+ 0xBDF0, // 뷰, #350
+ 0xBE0C, // 브, #214
+ 0xBE14, // 블, #99
+ 0xBE44, // 비, #55
+ 0xBE4C, // 빌, #510
+ 0xBE60, // 빠, #398
+ 0xC0AC, // 사, #14
+ 0xC0AD, // 삭, #342
+ 0xC0B0, // 산, #121
+ 0xC0B4, // 살, #279
+ 0xC0BC, // 삼, #348
+ 0xC0C1, // 상, #41
+ 0xC0C8, // 새, #282
+ 0xC0C9, // 색, #181
+ 0xC0DD, // 생, #109
+ 0xC11C, // 서, #21
+ 0xC11D, // 석, #234
+ 0xC120, // 선, #107
+ 0xC124, // 설, #170
+ 0xC131, // 성, #50
+ 0xC138, // 세, #60
+ 0xC139, // 섹, #456
+ 0xC13C, // 센, #267
+ 0xC154, // 셔, #455
+ 0xC158, // 션, #237
+ 0xC15C, // 셜, #448
+ 0xC168, // 셨, #421
+ 0xC18C, // 소, #51
+ 0xC18D, // 속, #219
+ 0xC190, // 손, #323
+ 0xC1A1, // 송, #203
+ 0xC1C4, // 쇄, #501
+ 0xC1FC, // 쇼, #364
+ 0xC218, // 수, #27
+ 0xC219, // 숙, #467
+ 0xC21C, // 순, #258
+ 0xC220, // 술, #302
+ 0xC26C, // 쉬, #511
+ 0xC288, // 슈, #384
+ 0xC2A4, // 스, #11
+ 0xC2AC, // 슬, #438
+ 0xC2B4, // 슴, #504
+ 0xC2B5, // 습, #77
+ 0xC2B9, // 승, #299
+ 0xC2DC, // 시, #13
+ 0xC2DD, // 식, #137
+ 0xC2E0, // 신, #47
+ 0xC2E4, // 실, #132
+ 0xC2EC, // 심, #196
+ 0xC2ED, // 십, #482
+ 0xC2F6, // 싶, #352
+ 0xC2F8, // 싸, #419
+ 0xC4F0, // 쓰, #278
+ 0xC528, // 씨, #360
+ 0xC544, // 아, #23
+ 0xC545, // 악, #296
+ 0xC548, // 안, #71
+ 0xC54A, // 않, #209
+ 0xC54C, // 알, #222
+ 0xC554, // 암, #460
+ 0xC558, // 았, #349
+ 0xC559, // 앙, #473
+ 0xC55E, // 앞, #434
+ 0xC560, // 애, #271
+ 0xC561, // 액, #415
+ 0xC571, // 앱, #477
+ 0xC57C, // 야, #124
+ 0xC57D, // 약, #229
+ 0xC591, // 양, #177
+ 0xC5B4, // 어, #24
+ 0xC5B5, // 억, #407
+ 0xC5B8, // 언, #294
+ 0xC5BC, // 얼, #356
+ 0xC5C4, // 엄, #426
+ 0xC5C5, // 업, #118
+ 0xC5C6, // 없, #178
+ 0xC5C8, // 었, #165
+ 0xC5D0, // 에, #9
+ 0xC5D4, // 엔, #375
+ 0xC5D8, // 엘, #506
+ 0xC5EC, // 여, #66
+ 0xC5ED, // 역, #186
+ 0xC5EE, // 엮, #488
+ 0xC5F0, // 연, #96
+ 0xC5F4, // 열, #266
+ 0xC5FC, // 염, #449
+ 0xC600, // 였, #374
+ 0xC601, // 영, #83
+ 0xC608, // 예, #168
+ 0xC624, // 오, #75
+ 0xC628, // 온, #300
+ 0xC62C, // 올, #306
+ 0xC640, // 와, #119
+ 0xC644, // 완, #361
+ 0xC654, // 왔, #489
+ 0xC655, // 왕, #418
+ 0xC678, // 외, #218
+ 0xC694, // 요, #43
+ 0xC695, // 욕, #479
+ 0xC6A9, // 용, #48
+ 0xC6B0, // 우, #64
+ 0xC6B1, // 욱, #503
+ 0xC6B4, // 운, #108
+ 0xC6B8, // 울, #223
+ 0xC6C0, // 움, #317
+ 0xC6C3, // 웃, #404
+ 0xC6CC, // 워, #280
+ 0xC6D0, // 원, #45
+ 0xC6D4, // 월, #150
+ 0xC6E8, // 웨, #446
+ 0xC6F9, // 웹, #500
+ 0xC704, // 위, #78
+ 0xC720, // 유, #81
+ 0xC721, // 육, #321
+ 0xC724, // 윤, #416
+ 0xC73C, // 으, #49
+ 0xC740, // 은, #31
+ 0xC744, // 을, #17
+ 0xC74C, // 음, #112
+ 0xC751, // 응, #461
+ 0xC758, // 의, #8
+ 0xC774, // 이, #1
+ 0xC775, // 익, #403
+ 0xC778, // 인, #18
+ 0xC77C, // 일, #28
+ 0xC784, // 임, #160
+ 0xC785, // 입, #93
+ 0xC788, // 있, #44
+ 0xC790, // 자, #22
+ 0xC791, // 작, #88
+ 0xC798, // 잘, #347
+ 0xC7A1, // 잡, #372
+ 0xC7A5, // 장, #53
+ 0xC7AC, // 재, #120
+ 0xC7C1, // 쟁, #483
+ 0xC800, // 저, #98
+ 0xC801, // 적, #97
+ 0xC804, // 전, #34
+ 0xC808, // 절, #320
+ 0xC810, // 점, #201
+ 0xC811, // 접, #331
+ 0xC815, // 정, #26
+ 0xC81C, // 제, #29
+ 0xC838, // 져, #414
+ 0xC870, // 조, #86
+ 0xC871, // 족, #373
+ 0xC874, // 존, #432
+ 0xC880, // 좀, #470
+ 0xC885, // 종, #208
+ 0xC88B, // 좋, #239
+ 0xC8E0, // 죠, #451
+ 0xC8FC, // 주, #38
+ 0xC8FD, // 죽, #471
+ 0xC900, // 준, #286
+ 0xC904, // 줄, #392
+ 0xC911, // 중, #103
+ 0xC988, // 즈, #255
+ 0xC98C, // 즌, #507
+ 0xC990, // 즐, #371
+ 0xC99D, // 증, #260
+ 0xC9C0, // 지, #10
+ 0xC9C1, // 직, #216
+ 0xC9C4, // 진, #79
+ 0xC9C8, // 질, #238
+ 0xC9D1, // 집, #206
+ 0xC9DC, // 짜, #411
+ 0xC9F8, // 째, #494
+ 0xCABD, // 쪽, #435
+ 0xCC28, // 차, #146
+ 0xCC29, // 착, #443
+ 0xCC2C, // 찬, #481
+ 0xCC30, // 찰, #440
+ 0xCC38, // 참, #343
+ 0xCC3D, // 창, #304
+ 0xCC3E, // 찾, #335
+ 0xCC44, // 채, #284
+ 0xCC45, // 책, #298
+ 0xCC98, // 처, #242
+ 0xCC9C, // 천, #143
+ 0xCCA0, // 철, #380
+ 0xCCA8, // 첨, #452
+ 0xCCAB, // 첫, #484
+ 0xCCAD, // 청, #197
+ 0xCCB4, // 체, #126
+ 0xCCD0, // 쳐, #472
+ 0xCD08, // 초, #220
+ 0xCD1D, // 총, #406
+ 0xCD5C, // 최, #179
+ 0xCD94, // 추, #136
+ 0xCD95, // 축, #337
+ 0xCD9C, // 출, #166
+ 0xCDA9, // 충, #369
+ 0xCDE8, // 취, #210
+ 0xCE20, // 츠, #215
+ 0xCE21, // 측, #468
+ 0xCE35, // 층, #512
+ 0xCE58, // 치, #102
+ 0xCE5C, // 친, #325
+ 0xCE68, // 침, #263
+ 0xCE74, // 카, #115
+ 0xCE7C, // 칼, #466
+ 0xCE90, // 캐, #454
+ 0xCEE4, // 커, #285
+ 0xCEE8, // 컨, #328
+ 0xCEF4, // 컴, #417
+ 0xCF00, // 케, #339
+ 0xCF13, // 켓, #509
+ 0xCF1C, // 켜, #508
+ 0xCF54, // 코, #193
+ 0xCF58, // 콘, #391
+ 0xCFE0, // 쿠, #393
+ 0xD035, // 퀵, #453
+ 0xD06C, // 크, #101
+ 0xD070, // 큰, #495
+ 0xD074, // 클, #289
+ 0xD0A4, // 키, #230
+ 0xD0C0, // 타, #127
+ 0xD0C1, // 탁, #314
+ 0xD0C4, // 탄, #450
+ 0xD0C8, // 탈, #436
+ 0xD0DC, // 태, #221
+ 0xD0DD, // 택, #275
+ 0xD130, // 터, #70
+ 0xD14C, // 테, #213
+ 0xD150, // 텐, #324
+ 0xD154, // 텔, #430
+ 0xD15C, // 템, #382
+ 0xD1A0, // 토, #145
+ 0xD1B5, // 통, #156
+ 0xD22C, // 투, #227
+ 0xD2B8, // 트, #37
+ 0xD2B9, // 특, #247
+ 0xD2F0, // 티, #187
+ 0xD305, // 팅, #410
+ 0xD30C, // 파, #141
+ 0xD310, // 판, #163
+ 0xD314, // 팔, #499
+ 0xD328, // 패, #307
+ 0xD32C, // 팬, #459
+ 0xD338, // 팸, #433
+ 0xD37C, // 퍼, #344
+ 0xD398, // 페, #172
+ 0xD3B8, // 편, #251
+ 0xD3C9, // 평, #291
+ 0xD3EC, // 포, #68
+ 0xD3ED, // 폭, #445
+ 0xD3F0, // 폰, #318
+ 0xD45C, // 표, #232
+ 0xD480, // 풀, #497
+ 0xD488, // 품, #113
+ 0xD48D, // 풍, #425
+ 0xD504, // 프, #110
+ 0xD508, // 픈, #498
+ 0xD50C, // 플, #211
+ 0xD53C, // 피, #169
+ 0xD544, // 필, #295
+ 0xD551, // 핑, #376
+ 0xD558, // 하, #7
+ 0xD559, // 학, #129
+ 0xD55C, // 한, #15
+ 0xD560, // 할, #144
+ 0xD568, // 함, #152
+ 0xD569, // 합, #123
+ 0xD56D, // 항, #268
+ 0xD574, // 해, #32
+ 0xD588, // 했, #180
+ 0xD589, // 행, #135
+ 0xD5A5, // 향, #345
+ 0xD5C8, // 허, #396
+ 0xD5D8, // 험, #316
+ 0xD5E4, // 헤, #474
+ 0xD604, // 현, #185
+ 0xD611, // 협, #315
+ 0xD615, // 형, #244
+ 0xD61C, // 혜, #428
+ 0xD638, // 호, #117
+ 0xD63C, // 혼, #358
+ 0xD648, // 홈, #330
+ 0xD64D, // 홍, #363
+ 0xD654, // 화, #63
+ 0xD655, // 확, #183
+ 0xD658, // 환, #224
+ 0xD65C, // 활, #277
+ 0xD669, // 황, #353
+ 0xD68C, // 회, #74
+ 0xD68D, // 획, #458
+ 0xD69F, // 횟, #409
+ 0xD6A8, // 효, #400
+ 0xD6C4, // 후, #176
+ 0xD6C8, // 훈, #486
+ 0xD734, // 휴, #365
+ 0xD754, // 흔, #480
+ 0xD76C, // 희, #334
+ 0xD788, // 히, #228
+ 0xD798, // 힘, #502
+};
+// the percentage of the sample covered by the above characters
+static const float frequent_ko_coverage=0.948157021464184;
+
diff --git a/media/libmedia/IAudioFlinger.cpp b/media/libmedia/IAudioFlinger.cpp
index e02107f..687fa76 100644
--- a/media/libmedia/IAudioFlinger.cpp
+++ b/media/libmedia/IAudioFlinger.cpp
@@ -58,7 +58,7 @@ enum {
RESTORE_OUTPUT,
OPEN_INPUT,
CLOSE_INPUT,
- SET_STREAM_OUTPUT,
+ INVALIDATE_STREAM,
SET_VOICE_VOLUME,
GET_RENDER_POSITION,
GET_INPUT_FRAMES_LOST,
@@ -74,6 +74,12 @@ enum {
GET_PRIMARY_OUTPUT_SAMPLING_RATE,
GET_PRIMARY_OUTPUT_FRAME_COUNT,
SET_LOW_RAM_DEVICE,
+ LIST_AUDIO_PORTS,
+ GET_AUDIO_PORT,
+ CREATE_AUDIO_PATCH,
+ RELEASE_AUDIO_PATCH,
+ LIST_AUDIO_PATCHES,
+ SET_AUDIO_PORT_CONFIG
};
class BpAudioFlinger : public BpInterface<IAudioFlinger>
@@ -89,13 +95,12 @@ public:
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- size_t frameCount,
+ size_t *pFrameCount,
track_flags_t *flags,
const sp<IMemory>& sharedBuffer,
audio_io_handle_t output,
pid_t tid,
int *sessionId,
- String8& name,
int clientUid,
status_t *status)
{
@@ -106,9 +111,11 @@ public:
data.writeInt32(sampleRate);
data.writeInt32(format);
data.writeInt32(channelMask);
+ size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
data.writeInt64(frameCount);
track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
data.writeInt32(lFlags);
+ // haveSharedBuffer
if (sharedBuffer != 0) {
data.writeInt32(true);
data.writeStrongBinder(sharedBuffer->asBinder());
@@ -117,7 +124,7 @@ public:
}
data.writeInt32((int32_t) output);
data.writeInt32((int32_t) tid);
- int lSessionId = 0;
+ int lSessionId = AUDIO_SESSION_ALLOCATE;
if (sessionId != NULL) {
lSessionId = *sessionId;
}
@@ -127,6 +134,10 @@ public:
if (lStatus != NO_ERROR) {
ALOGE("createTrack error: %s", strerror(-lStatus));
} else {
+ frameCount = reply.readInt64();
+ if (pFrameCount != NULL) {
+ *pFrameCount = frameCount;
+ }
lFlags = reply.readInt32();
if (flags != NULL) {
*flags = lFlags;
@@ -135,11 +146,21 @@ public:
if (sessionId != NULL) {
*sessionId = lSessionId;
}
- name = reply.readString8();
lStatus = reply.readInt32();
track = interface_cast<IAudioTrack>(reply.readStrongBinder());
+ if (lStatus == NO_ERROR) {
+ if (track == 0) {
+ ALOGE("createTrack should have returned an IAudioTrack");
+ lStatus = UNKNOWN_ERROR;
+ }
+ } else {
+ if (track != 0) {
+ ALOGE("createTrack returned an IAudioTrack but with status %d", lStatus);
+ track.clear();
+ }
+ }
}
- if (status) {
+ if (status != NULL) {
*status = lStatus;
}
return track;
@@ -150,10 +171,12 @@ public:
uint32_t sampleRate,
audio_format_t format,
audio_channel_mask_t channelMask,
- size_t frameCount,
+ size_t *pFrameCount,
track_flags_t *flags,
pid_t tid,
int *sessionId,
+ sp<IMemory>& cblk,
+ sp<IMemory>& buffers,
status_t *status)
{
Parcel data, reply;
@@ -163,19 +186,26 @@ public:
data.writeInt32(sampleRate);
data.writeInt32(format);
data.writeInt32(channelMask);
+ size_t frameCount = pFrameCount != NULL ? *pFrameCount : 0;
data.writeInt64(frameCount);
track_flags_t lFlags = flags != NULL ? *flags : (track_flags_t) TRACK_DEFAULT;
data.writeInt32(lFlags);
data.writeInt32((int32_t) tid);
- int lSessionId = 0;
+ int lSessionId = AUDIO_SESSION_ALLOCATE;
if (sessionId != NULL) {
lSessionId = *sessionId;
}
data.writeInt32(lSessionId);
+ cblk.clear();
+ buffers.clear();
status_t lStatus = remote()->transact(OPEN_RECORD, data, &reply);
if (lStatus != NO_ERROR) {
ALOGE("openRecord error: %s", strerror(-lStatus));
} else {
+ frameCount = reply.readInt64();
+ if (pFrameCount != NULL) {
+ *pFrameCount = frameCount;
+ }
lFlags = reply.readInt32();
if (flags != NULL) {
*flags = lFlags;
@@ -186,19 +216,36 @@ public:
}
lStatus = reply.readInt32();
record = interface_cast<IAudioRecord>(reply.readStrongBinder());
+ cblk = interface_cast<IMemory>(reply.readStrongBinder());
+ if (cblk != 0 && cblk->pointer() == NULL) {
+ cblk.clear();
+ }
+ buffers = interface_cast<IMemory>(reply.readStrongBinder());
+ if (buffers != 0 && buffers->pointer() == NULL) {
+ buffers.clear();
+ }
if (lStatus == NO_ERROR) {
if (record == 0) {
ALOGE("openRecord should have returned an IAudioRecord");
lStatus = UNKNOWN_ERROR;
+ } else if (cblk == 0) {
+ ALOGE("openRecord should have returned a cblk");
+ lStatus = NO_MEMORY;
}
+ // buffers is permitted to be 0
} else {
- if (record != 0) {
- ALOGE("openRecord returned an IAudioRecord but with status %d", lStatus);
- record.clear();
+ if (record != 0 || cblk != 0 || buffers != 0) {
+ ALOGE("openRecord returned an IAudioRecord, cblk, "
+ "or buffers but with status %d", lStatus);
}
}
+ if (lStatus != NO_ERROR) {
+ record.clear();
+ cblk.clear();
+ buffers.clear();
+ }
}
- if (status) {
+ if (status != NULL) {
*status = lStatus;
}
return record;
@@ -391,7 +438,7 @@ public:
const audio_offload_info_t *offloadInfo)
{
Parcel data, reply;
- audio_devices_t devices = pDevices != NULL ? *pDevices : (audio_devices_t)0;
+ audio_devices_t devices = pDevices != NULL ? *pDevices : AUDIO_DEVICE_NONE;
uint32_t samplingRate = pSamplingRate != NULL ? *pSamplingRate : 0;
audio_format_t format = pFormat != NULL ? *pFormat : AUDIO_FORMAT_DEFAULT;
audio_channel_mask_t channelMask = pChannelMask != NULL ?
@@ -405,6 +452,7 @@ public:
data.writeInt32(channelMask);
data.writeInt32(latency);
data.writeInt32((int32_t) flags);
+ // hasOffloadInfo
if (offloadInfo == NULL) {
data.writeInt32(0);
} else {
@@ -415,15 +463,25 @@ public:
audio_io_handle_t output = (audio_io_handle_t) reply.readInt32();
ALOGV("openOutput() returned output, %d", output);
devices = (audio_devices_t)reply.readInt32();
- if (pDevices != NULL) *pDevices = devices;
+ if (pDevices != NULL) {
+ *pDevices = devices;
+ }
samplingRate = reply.readInt32();
- if (pSamplingRate != NULL) *pSamplingRate = samplingRate;
+ if (pSamplingRate != NULL) {
+ *pSamplingRate = samplingRate;
+ }
format = (audio_format_t) reply.readInt32();
- if (pFormat != NULL) *pFormat = format;
+ if (pFormat != NULL) {
+ *pFormat = format;
+ }
channelMask = (audio_channel_mask_t)reply.readInt32();
- if (pChannelMask != NULL) *pChannelMask = channelMask;
+ if (pChannelMask != NULL) {
+ *pChannelMask = channelMask;
+ }
latency = reply.readInt32();
- if (pLatencyMs != NULL) *pLatencyMs = latency;
+ if (pLatencyMs != NULL) {
+ *pLatencyMs = latency;
+ }
return output;
}
@@ -472,7 +530,7 @@ public:
audio_channel_mask_t *pChannelMask)
{
Parcel data, reply;
- audio_devices_t devices = pDevices != NULL ? *pDevices : (audio_devices_t)0;
+ audio_devices_t devices = pDevices != NULL ? *pDevices : AUDIO_DEVICE_NONE;
uint32_t samplingRate = pSamplingRate != NULL ? *pSamplingRate : 0;
audio_format_t format = pFormat != NULL ? *pFormat : AUDIO_FORMAT_DEFAULT;
audio_channel_mask_t channelMask = pChannelMask != NULL ?
@@ -487,13 +545,21 @@ public:
remote()->transact(OPEN_INPUT, data, &reply);
audio_io_handle_t input = (audio_io_handle_t) reply.readInt32();
devices = (audio_devices_t)reply.readInt32();
- if (pDevices != NULL) *pDevices = devices;
+ if (pDevices != NULL) {
+ *pDevices = devices;
+ }
samplingRate = reply.readInt32();
- if (pSamplingRate != NULL) *pSamplingRate = samplingRate;
+ if (pSamplingRate != NULL) {
+ *pSamplingRate = samplingRate;
+ }
format = (audio_format_t) reply.readInt32();
- if (pFormat != NULL) *pFormat = format;
+ if (pFormat != NULL) {
+ *pFormat = format;
+ }
channelMask = (audio_channel_mask_t)reply.readInt32();
- if (pChannelMask != NULL) *pChannelMask = channelMask;
+ if (pChannelMask != NULL) {
+ *pChannelMask = channelMask;
+ }
return input;
}
@@ -506,13 +572,12 @@ public:
return reply.readInt32();
}
- virtual status_t setStreamOutput(audio_stream_type_t stream, audio_io_handle_t output)
+ virtual status_t invalidateStream(audio_stream_type_t stream)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32((int32_t) stream);
- data.writeInt32((int32_t) output);
- remote()->transact(SET_STREAM_OUTPUT, data, &reply);
+ remote()->transact(INVALIDATE_STREAM, data, &reply);
return reply.readInt32();
}
@@ -535,11 +600,11 @@ public:
status_t status = reply.readInt32();
if (status == NO_ERROR) {
uint32_t tmp = reply.readInt32();
- if (halFrames) {
+ if (halFrames != NULL) {
*halFrames = tmp;
}
tmp = reply.readInt32();
- if (dspFrames) {
+ if (dspFrames != NULL) {
*dspFrames = tmp;
}
}
@@ -551,8 +616,11 @@ public:
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32((int32_t) ioHandle);
- remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
- return reply.readInt32();
+ status_t status = remote()->transact(GET_INPUT_FRAMES_LOST, data, &reply);
+ if (status != NO_ERROR) {
+ return 0;
+ }
+ return (uint32_t) reply.readInt32();
}
virtual int newAudioSessionId()
@@ -560,26 +628,28 @@ public:
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
status_t status = remote()->transact(NEW_AUDIO_SESSION_ID, data, &reply);
- int id = 0;
+ int id = AUDIO_SESSION_ALLOCATE;
if (status == NO_ERROR) {
id = reply.readInt32();
}
return id;
}
- virtual void acquireAudioSessionId(int audioSession)
+ virtual void acquireAudioSessionId(int audioSession, int pid)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(audioSession);
+ data.writeInt32(pid);
remote()->transact(ACQUIRE_AUDIO_SESSION_ID, data, &reply);
}
- virtual void releaseAudioSessionId(int audioSession)
+ virtual void releaseAudioSessionId(int audioSession, int pid)
{
Parcel data, reply;
data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
data.writeInt32(audioSession);
+ data.writeInt32(pid);
remote()->transact(RELEASE_AUDIO_SESSION_ID, data, &reply);
}
@@ -657,7 +727,7 @@ public:
if (pDesc == NULL) {
return effect;
- if (status) {
+ if (status != NULL) {
*status = BAD_VALUE;
}
}
@@ -675,7 +745,7 @@ public:
} else {
lStatus = reply.readInt32();
int tmp = reply.readInt32();
- if (id) {
+ if (id != NULL) {
*id = tmp;
}
tmp = reply.readInt32();
@@ -685,7 +755,7 @@ public:
effect = interface_cast<IEffect>(reply.readStrongBinder());
reply.read(pDesc, sizeof(effect_descriptor_t));
}
- if (status) {
+ if (status != NULL) {
*status = lStatus;
}
@@ -737,7 +807,101 @@ public:
remote()->transact(SET_LOW_RAM_DEVICE, data, &reply);
return reply.readInt32();
}
-
+ virtual status_t listAudioPorts(unsigned int *num_ports,
+ struct audio_port *ports)
+ {
+ if (num_ports == NULL || *num_ports == 0 || ports == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32(*num_ports);
+ status_t status = remote()->transact(LIST_AUDIO_PORTS, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ *num_ports = (unsigned int)reply.readInt32();
+ reply.read(ports, *num_ports * sizeof(struct audio_port));
+ return status;
+ }
+ virtual status_t getAudioPort(struct audio_port *port)
+ {
+ if (port == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(port, sizeof(struct audio_port));
+ status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(port, sizeof(struct audio_port));
+ return status;
+ }
+ virtual status_t createAudioPatch(const struct audio_patch *patch,
+ audio_patch_handle_t *handle)
+ {
+ if (patch == NULL || handle == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(patch, sizeof(struct audio_patch));
+ data.write(handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(CREATE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(handle, sizeof(audio_patch_handle_t));
+ return status;
+ }
+ virtual status_t releaseAudioPatch(audio_patch_handle_t handle)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(&handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(RELEASE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+ virtual status_t listAudioPatches(unsigned int *num_patches,
+ struct audio_patch *patches)
+ {
+ if (num_patches == NULL || *num_patches == 0 || patches == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.writeInt32(*num_patches);
+ status_t status = remote()->transact(LIST_AUDIO_PATCHES, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ *num_patches = (unsigned int)reply.readInt32();
+ reply.read(patches, *num_patches * sizeof(struct audio_patch));
+ return status;
+ }
+ virtual status_t setAudioPortConfig(const struct audio_port_config *config)
+ {
+ if (config == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioFlinger::getInterfaceDescriptor());
+ data.write(config, sizeof(struct audio_port_config));
+ status_t status = remote()->transact(SET_AUDIO_PORT_CONFIG, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
};
IMPLEMENT_META_INTERFACE(AudioFlinger, "android.media.IAudioFlinger");
@@ -765,7 +929,6 @@ status_t BnAudioFlinger::onTransact(
pid_t tid = (pid_t) data.readInt32();
int sessionId = data.readInt32();
int clientUid = data.readInt32();
- String8 name;
status_t status;
sp<IAudioTrack> track;
if ((haveSharedBuffer && (buffer == 0)) ||
@@ -775,12 +938,13 @@ status_t BnAudioFlinger::onTransact(
} else {
track = createTrack(
(audio_stream_type_t) streamType, sampleRate, format,
- channelMask, frameCount, &flags, buffer, output, tid,
- &sessionId, name, clientUid, &status);
+ channelMask, &frameCount, &flags, buffer, output, tid,
+ &sessionId, clientUid, &status);
+ LOG_ALWAYS_FATAL_IF((track != 0) != (status == NO_ERROR));
}
+ reply->writeInt64(frameCount);
reply->writeInt32(flags);
reply->writeInt32(sessionId);
- reply->writeString8(name);
reply->writeInt32(status);
reply->writeStrongBinder(track->asBinder());
return NO_ERROR;
@@ -795,14 +959,20 @@ status_t BnAudioFlinger::onTransact(
track_flags_t flags = (track_flags_t) data.readInt32();
pid_t tid = (pid_t) data.readInt32();
int sessionId = data.readInt32();
+ sp<IMemory> cblk;
+ sp<IMemory> buffers;
status_t status;
sp<IAudioRecord> record = openRecord(input,
- sampleRate, format, channelMask, frameCount, &flags, tid, &sessionId, &status);
+ sampleRate, format, channelMask, &frameCount, &flags, tid, &sessionId,
+ cblk, buffers, &status);
LOG_ALWAYS_FATAL_IF((record != 0) != (status == NO_ERROR));
+ reply->writeInt64(frameCount);
reply->writeInt32(flags);
reply->writeInt32(sessionId);
reply->writeInt32(status);
reply->writeStrongBinder(record->asBinder());
+ reply->writeStrongBinder(cblk->asBinder());
+ reply->writeStrongBinder(buffers->asBinder());
return NO_ERROR;
} break;
case SAMPLE_RATE: {
@@ -941,7 +1111,7 @@ status_t BnAudioFlinger::onTransact(
&latency,
flags,
hasOffloadInfo ? &offloadInfo : NULL);
- ALOGV("OPEN_OUTPUT output, %p", output);
+ ALOGV("OPEN_OUTPUT output, %d", output);
reply->writeInt32((int32_t) output);
reply->writeInt32(devices);
reply->writeInt32(samplingRate);
@@ -997,11 +1167,10 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32(closeInput((audio_io_handle_t) data.readInt32()));
return NO_ERROR;
} break;
- case SET_STREAM_OUTPUT: {
+ case INVALIDATE_STREAM: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
- uint32_t stream = data.readInt32();
- audio_io_handle_t output = (audio_io_handle_t) data.readInt32();
- reply->writeInt32(setStreamOutput((audio_stream_type_t) stream, output));
+ audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
+ reply->writeInt32(invalidateStream(stream));
return NO_ERROR;
} break;
case SET_VOICE_VOLUME: {
@@ -1026,7 +1195,7 @@ status_t BnAudioFlinger::onTransact(
case GET_INPUT_FRAMES_LOST: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
audio_io_handle_t ioHandle = (audio_io_handle_t) data.readInt32();
- reply->writeInt32(getInputFramesLost(ioHandle));
+ reply->writeInt32((int32_t) getInputFramesLost(ioHandle));
return NO_ERROR;
} break;
case NEW_AUDIO_SESSION_ID: {
@@ -1037,13 +1206,15 @@ status_t BnAudioFlinger::onTransact(
case ACQUIRE_AUDIO_SESSION_ID: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int audioSession = data.readInt32();
- acquireAudioSessionId(audioSession);
+ int pid = data.readInt32();
+ acquireAudioSessionId(audioSession, pid);
return NO_ERROR;
} break;
case RELEASE_AUDIO_SESSION_ID: {
CHECK_INTERFACE(IAudioFlinger, data, reply);
int audioSession = data.readInt32();
- releaseAudioSessionId(audioSession);
+ int pid = data.readInt32();
+ releaseAudioSessionId(audioSession, pid);
return NO_ERROR;
} break;
case QUERY_NUM_EFFECTS: {
@@ -1128,6 +1299,76 @@ status_t BnAudioFlinger::onTransact(
reply->writeInt32(setLowRamDevice(isLowRamDevice));
return NO_ERROR;
} break;
+ case LIST_AUDIO_PORTS: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ unsigned int num_ports = data.readInt32();
+ struct audio_port *ports =
+ (struct audio_port *)calloc(num_ports,
+ sizeof(struct audio_port));
+ status_t status = listAudioPorts(&num_ports, ports);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeInt32(num_ports);
+ reply->write(&ports, num_ports * sizeof(struct audio_port));
+ }
+ free(ports);
+ return NO_ERROR;
+ } break;
+ case GET_AUDIO_PORT: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ struct audio_port port;
+ data.read(&port, sizeof(struct audio_port));
+ status_t status = getAudioPort(&port);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&port, sizeof(struct audio_port));
+ }
+ return NO_ERROR;
+ } break;
+ case CREATE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ struct audio_patch patch;
+ data.read(&patch, sizeof(struct audio_patch));
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = createAudioPatch(&patch, &handle);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&handle, sizeof(audio_patch_handle_t));
+ }
+ return NO_ERROR;
+ } break;
+ case RELEASE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = releaseAudioPatch(handle);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ } break;
+ case LIST_AUDIO_PATCHES: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ unsigned int num_patches = data.readInt32();
+ struct audio_patch *patches =
+ (struct audio_patch *)calloc(num_patches,
+ sizeof(struct audio_patch));
+ status_t status = listAudioPatches(&num_patches, patches);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->writeInt32(num_patches);
+ reply->write(&patches, num_patches * sizeof(struct audio_patch));
+ }
+ free(patches);
+ return NO_ERROR;
+ } break;
+ case SET_AUDIO_PORT_CONFIG: {
+ CHECK_INTERFACE(IAudioFlinger, data, reply);
+ struct audio_port_config config;
+ data.read(&config, sizeof(struct audio_port_config));
+ status_t status = setAudioPortConfig(&config);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ } break;
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IAudioPolicyService.cpp b/media/libmedia/IAudioPolicyService.cpp
index 4be3c09..41a9065 100644
--- a/media/libmedia/IAudioPolicyService.cpp
+++ b/media/libmedia/IAudioPolicyService.cpp
@@ -57,7 +57,15 @@ enum {
QUERY_DEFAULT_PRE_PROCESSING,
SET_EFFECT_ENABLED,
IS_STREAM_ACTIVE_REMOTELY,
- IS_OFFLOAD_SUPPORTED
+ IS_OFFLOAD_SUPPORTED,
+ LIST_AUDIO_PORTS,
+ GET_AUDIO_PORT,
+ CREATE_AUDIO_PATCH,
+ RELEASE_AUDIO_PATCH,
+ LIST_AUDIO_PATCHES,
+ SET_AUDIO_PORT_CONFIG,
+ REGISTER_CLIENT,
+ GET_OUTPUT_FOR_ATTR
};
class BpAudioPolicyService : public BpInterface<IAudioPolicyService>
@@ -137,6 +145,7 @@ public:
data.writeInt32(static_cast <uint32_t>(format));
data.writeInt32(channelMask);
data.writeInt32(static_cast <uint32_t>(flags));
+ // hasOffloadInfo
if (offloadInfo == NULL) {
data.writeInt32(0);
} else {
@@ -147,6 +156,36 @@ public:
return static_cast <audio_io_handle_t> (reply.readInt32());
}
+ virtual audio_io_handle_t getOutputForAttr(
+ const audio_attributes_t *attr,
+ uint32_t samplingRate,
+ audio_format_t format,
+ audio_channel_mask_t channelMask,
+ audio_output_flags_t flags,
+ const audio_offload_info_t *offloadInfo)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ if (attr == NULL) {
+ ALOGE("Writing NULL audio attributes - shouldn't happen");
+ return (audio_io_handle_t) 0;
+ }
+ data.write(attr, sizeof(audio_attributes_t));
+ data.writeInt32(samplingRate);
+ data.writeInt32(static_cast <uint32_t>(format));
+ data.writeInt32(channelMask);
+ data.writeInt32(static_cast <uint32_t>(flags));
+ // hasOffloadInfo
+ if (offloadInfo == NULL) {
+ data.writeInt32(0);
+ } else {
+ data.writeInt32(1);
+ data.write(offloadInfo, sizeof(audio_offload_info_t));
+ }
+ remote()->transact(GET_OUTPUT_FOR_ATTR, data, &reply);
+ return static_cast <audio_io_handle_t> (reply.readInt32());
+ }
+
virtual status_t startOutput(audio_io_handle_t output,
audio_stream_type_t stream,
int session)
@@ -389,7 +428,140 @@ public:
data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
data.write(&info, sizeof(audio_offload_info_t));
remote()->transact(IS_OFFLOAD_SUPPORTED, data, &reply);
- return reply.readInt32(); }
+ return reply.readInt32();
+ }
+
+ virtual status_t listAudioPorts(audio_port_role_t role,
+ audio_port_type_t type,
+ unsigned int *num_ports,
+ struct audio_port *ports,
+ unsigned int *generation)
+ {
+ if (num_ports == NULL || (*num_ports != 0 && ports == NULL) ||
+ generation == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ unsigned int numPortsReq = (ports == NULL) ? 0 : *num_ports;
+ data.writeInt32(role);
+ data.writeInt32(type);
+ data.writeInt32(numPortsReq);
+ status_t status = remote()->transact(LIST_AUDIO_PORTS, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ *num_ports = (unsigned int)reply.readInt32();
+ }
+ if (status == NO_ERROR) {
+ if (numPortsReq > *num_ports) {
+ numPortsReq = *num_ports;
+ }
+ if (numPortsReq > 0) {
+ reply.read(ports, numPortsReq * sizeof(struct audio_port));
+ }
+ *generation = reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t getAudioPort(struct audio_port *port)
+ {
+ if (port == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(port, sizeof(struct audio_port));
+ status_t status = remote()->transact(GET_AUDIO_PORT, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(port, sizeof(struct audio_port));
+ return status;
+ }
+
+ virtual status_t createAudioPatch(const struct audio_patch *patch,
+ audio_patch_handle_t *handle)
+ {
+ if (patch == NULL || handle == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(patch, sizeof(struct audio_patch));
+ data.write(handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(CREATE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR ||
+ (status = (status_t)reply.readInt32()) != NO_ERROR) {
+ return status;
+ }
+ reply.read(handle, sizeof(audio_patch_handle_t));
+ return status;
+ }
+
+ virtual status_t releaseAudioPatch(audio_patch_handle_t handle)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(&handle, sizeof(audio_patch_handle_t));
+ status_t status = remote()->transact(RELEASE_AUDIO_PATCH, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t listAudioPatches(unsigned int *num_patches,
+ struct audio_patch *patches,
+ unsigned int *generation)
+ {
+ if (num_patches == NULL || (*num_patches != 0 && patches == NULL) ||
+ generation == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ unsigned int numPatchesReq = (patches == NULL) ? 0 : *num_patches;
+ data.writeInt32(numPatchesReq);
+ status_t status = remote()->transact(LIST_AUDIO_PATCHES, data, &reply);
+ if (status == NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ *num_patches = (unsigned int)reply.readInt32();
+ }
+ if (status == NO_ERROR) {
+ if (numPatchesReq > *num_patches) {
+ numPatchesReq = *num_patches;
+ }
+ if (numPatchesReq > 0) {
+ reply.read(patches, numPatchesReq * sizeof(struct audio_patch));
+ }
+ *generation = reply.readInt32();
+ }
+ return status;
+ }
+
+ virtual status_t setAudioPortConfig(const struct audio_port_config *config)
+ {
+ if (config == NULL) {
+ return BAD_VALUE;
+ }
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.write(config, sizeof(struct audio_port_config));
+ status_t status = remote()->transact(SET_AUDIO_PORT_CONFIG, data, &reply);
+ if (status != NO_ERROR) {
+ status = (status_t)reply.readInt32();
+ }
+ return status;
+ }
+ virtual void registerClient(const sp<IAudioPolicyServiceClient>& client)
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyService::getInterfaceDescriptor());
+ data.writeStrongBinder(client->asBinder());
+ remote()->transact(REGISTER_CLIENT, data, &reply);
+ }
};
IMPLEMENT_META_INTERFACE(AudioPolicyService, "android.media.IAudioPolicyService");
@@ -473,13 +645,38 @@ status_t BnAudioPolicyService::onTransact(
return NO_ERROR;
} break;
+ case GET_OUTPUT_FOR_ATTR: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_attributes_t *attr = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
+ data.read(attr, sizeof(audio_attributes_t));
+ uint32_t samplingRate = data.readInt32();
+ audio_format_t format = (audio_format_t) data.readInt32();
+ audio_channel_mask_t channelMask = data.readInt32();
+ audio_output_flags_t flags =
+ static_cast <audio_output_flags_t>(data.readInt32());
+ bool hasOffloadInfo = data.readInt32() != 0;
+ audio_offload_info_t offloadInfo;
+ if (hasOffloadInfo) {
+ data.read(&offloadInfo, sizeof(audio_offload_info_t));
+ }
+ audio_io_handle_t output = getOutputForAttr(attr,
+ samplingRate,
+ format,
+ channelMask,
+ flags,
+ hasOffloadInfo ? &offloadInfo : NULL);
+ reply->writeInt32(static_cast <int>(output));
+ return NO_ERROR;
+ } break;
+
case START_OUTPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
- uint32_t stream = data.readInt32();
+ audio_stream_type_t stream =
+ static_cast <audio_stream_type_t>(data.readInt32());
int session = data.readInt32();
reply->writeInt32(static_cast <uint32_t>(startOutput(output,
- (audio_stream_type_t)stream,
+ stream,
session)));
return NO_ERROR;
} break;
@@ -487,10 +684,11 @@ status_t BnAudioPolicyService::onTransact(
case STOP_OUTPUT: {
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_io_handle_t output = static_cast <audio_io_handle_t>(data.readInt32());
- uint32_t stream = data.readInt32();
+ audio_stream_type_t stream =
+ static_cast <audio_stream_type_t>(data.readInt32());
int session = data.readInt32();
reply->writeInt32(static_cast <uint32_t>(stopOutput(output,
- (audio_stream_type_t)stream,
+ stream,
session)));
return NO_ERROR;
} break;
@@ -633,7 +831,7 @@ status_t BnAudioPolicyService::onTransact(
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
uint32_t inPastMs = (uint32_t)data.readInt32();
- reply->writeInt32( isStreamActive((audio_stream_type_t) stream, inPastMs) );
+ reply->writeInt32( isStreamActive(stream, inPastMs) );
return NO_ERROR;
} break;
@@ -641,7 +839,7 @@ status_t BnAudioPolicyService::onTransact(
CHECK_INTERFACE(IAudioPolicyService, data, reply);
audio_stream_type_t stream = (audio_stream_type_t) data.readInt32();
uint32_t inPastMs = (uint32_t)data.readInt32();
- reply->writeInt32( isStreamActiveRemotely((audio_stream_type_t) stream, inPastMs) );
+ reply->writeInt32( isStreamActiveRemotely(stream, inPastMs) );
return NO_ERROR;
} break;
@@ -684,6 +882,103 @@ status_t BnAudioPolicyService::onTransact(
return NO_ERROR;
}
+ case LIST_AUDIO_PORTS: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_port_role_t role = (audio_port_role_t)data.readInt32();
+ audio_port_type_t type = (audio_port_type_t)data.readInt32();
+ unsigned int numPortsReq = data.readInt32();
+ unsigned int numPorts = numPortsReq;
+ unsigned int generation;
+ struct audio_port *ports =
+ (struct audio_port *)calloc(numPortsReq, sizeof(struct audio_port));
+ status_t status = listAudioPorts(role, type, &numPorts, ports, &generation);
+ reply->writeInt32(status);
+ reply->writeInt32(numPorts);
+
+ if (status == NO_ERROR) {
+ if (numPortsReq > numPorts) {
+ numPortsReq = numPorts;
+ }
+ reply->write(ports, numPortsReq * sizeof(struct audio_port));
+ reply->writeInt32(generation);
+ }
+ free(ports);
+ return NO_ERROR;
+ }
+
+ case GET_AUDIO_PORT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ struct audio_port port;
+ data.read(&port, sizeof(struct audio_port));
+ status_t status = getAudioPort(&port);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&port, sizeof(struct audio_port));
+ }
+ return NO_ERROR;
+ }
+
+ case CREATE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ struct audio_patch patch;
+ data.read(&patch, sizeof(struct audio_patch));
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = createAudioPatch(&patch, &handle);
+ reply->writeInt32(status);
+ if (status == NO_ERROR) {
+ reply->write(&handle, sizeof(audio_patch_handle_t));
+ }
+ return NO_ERROR;
+ }
+
+ case RELEASE_AUDIO_PATCH: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ audio_patch_handle_t handle;
+ data.read(&handle, sizeof(audio_patch_handle_t));
+ status_t status = releaseAudioPatch(handle);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+
+ case LIST_AUDIO_PATCHES: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ unsigned int numPatchesReq = data.readInt32();
+ unsigned int numPatches = numPatchesReq;
+ unsigned int generation;
+ struct audio_patch *patches =
+ (struct audio_patch *)calloc(numPatchesReq,
+ sizeof(struct audio_patch));
+ status_t status = listAudioPatches(&numPatches, patches, &generation);
+ reply->writeInt32(status);
+ reply->writeInt32(numPatches);
+ if (status == NO_ERROR) {
+ if (numPatchesReq > numPatches) {
+ numPatchesReq = numPatches;
+ }
+ reply->write(patches, numPatchesReq * sizeof(struct audio_patch));
+ reply->writeInt32(generation);
+ }
+ free(patches);
+ return NO_ERROR;
+ }
+
+ case SET_AUDIO_PORT_CONFIG: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ struct audio_port_config config;
+ data.read(&config, sizeof(struct audio_port_config));
+ status_t status = setAudioPortConfig(&config);
+ reply->writeInt32(status);
+ return NO_ERROR;
+ }
+ case REGISTER_CLIENT: {
+ CHECK_INTERFACE(IAudioPolicyService, data, reply);
+ sp<IAudioPolicyServiceClient> client = interface_cast<IAudioPolicyServiceClient>(
+ data.readStrongBinder());
+ registerClient(client);
+ return NO_ERROR;
+ } break;
+
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IAudioPolicyServiceClient.cpp b/media/libmedia/IAudioPolicyServiceClient.cpp
new file mode 100644
index 0000000..e802277
--- /dev/null
+++ b/media/libmedia/IAudioPolicyServiceClient.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "IAudioPolicyServiceClient"
+#include <utils/Log.h>
+
+#include <stdint.h>
+#include <sys/types.h>
+
+#include <binder/Parcel.h>
+
+#include <media/IAudioPolicyServiceClient.h>
+#include <media/AudioSystem.h>
+
+namespace android {
+
+enum {
+ PORT_LIST_UPDATE = IBinder::FIRST_CALL_TRANSACTION,
+ PATCH_LIST_UPDATE
+};
+
+class BpAudioPolicyServiceClient : public BpInterface<IAudioPolicyServiceClient>
+{
+public:
+ BpAudioPolicyServiceClient(const sp<IBinder>& impl)
+ : BpInterface<IAudioPolicyServiceClient>(impl)
+ {
+ }
+
+ void onAudioPortListUpdate()
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
+ remote()->transact(PORT_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+
+ void onAudioPatchListUpdate()
+ {
+ Parcel data, reply;
+ data.writeInterfaceToken(IAudioPolicyServiceClient::getInterfaceDescriptor());
+ remote()->transact(PATCH_LIST_UPDATE, data, &reply, IBinder::FLAG_ONEWAY);
+ }
+};
+
+IMPLEMENT_META_INTERFACE(AudioPolicyServiceClient, "android.media.IAudioPolicyServiceClient");
+
+// ----------------------------------------------------------------------
+
+status_t BnAudioPolicyServiceClient::onTransact(
+ uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
+{
+ switch (code) {
+ case PORT_LIST_UPDATE: {
+ CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
+ onAudioPortListUpdate();
+ return NO_ERROR;
+ } break;
+ case PATCH_LIST_UPDATE: {
+ CHECK_INTERFACE(IAudioPolicyServiceClient, data, reply);
+ onAudioPatchListUpdate();
+ return NO_ERROR;
+ } break;
+ default:
+ return BBinder::onTransact(code, data, reply, flags);
+ }
+}
+
+// ----------------------------------------------------------------------------
+
+}; // namespace android
diff --git a/media/libmedia/IAudioRecord.cpp b/media/libmedia/IAudioRecord.cpp
index 4a7de65..8a4a383 100644
--- a/media/libmedia/IAudioRecord.cpp
+++ b/media/libmedia/IAudioRecord.cpp
@@ -29,7 +29,7 @@
namespace android {
enum {
- GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
+ UNUSED_WAS_GET_CBLK = IBinder::FIRST_CALL_TRANSACTION,
START,
STOP
};
@@ -42,18 +42,6 @@ public:
{
}
- virtual sp<IMemory> getCblk() const
- {
- Parcel data, reply;
- sp<IMemory> cblk;
- data.writeInterfaceToken(IAudioRecord::getInterfaceDescriptor());
- status_t status = remote()->transact(GET_CBLK, data, &reply);
- if (status == NO_ERROR) {
- cblk = interface_cast<IMemory>(reply.readStrongBinder());
- }
- return cblk;
- }
-
virtual status_t start(int /*AudioSystem::sync_event_t*/ event, int triggerSession)
{
Parcel data, reply;
@@ -86,11 +74,6 @@ status_t BnAudioRecord::onTransact(
uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags)
{
switch (code) {
- case GET_CBLK: {
- CHECK_INTERFACE(IAudioRecord, data, reply);
- reply->writeStrongBinder(getCblk()->asBinder());
- return NO_ERROR;
- } break;
case START: {
CHECK_INTERFACE(IAudioRecord, data, reply);
int /*AudioSystem::sync_event_t*/ event = data.readInt32();
diff --git a/media/libmedia/IAudioTrack.cpp b/media/libmedia/IAudioTrack.cpp
index e9df704..265bb1b 100644
--- a/media/libmedia/IAudioTrack.cpp
+++ b/media/libmedia/IAudioTrack.cpp
@@ -60,6 +60,9 @@ public:
status_t status = remote()->transact(GET_CBLK, data, &reply);
if (status == NO_ERROR) {
cblk = interface_cast<IMemory>(reply.readStrongBinder());
+ if (cblk != 0 && cblk->pointer() == NULL) {
+ cblk.clear();
+ }
}
return cblk;
}
@@ -122,6 +125,9 @@ public:
status = reply.readInt32();
if (status == NO_ERROR) {
*buffer = interface_cast<IMemory>(reply.readStrongBinder());
+ if (*buffer != 0 && (*buffer)->pointer() == NULL) {
+ (*buffer).clear();
+ }
}
}
return status;
diff --git a/media/libmedia/IEffect.cpp b/media/libmedia/IEffect.cpp
index a303a8f..b94012a 100644
--- a/media/libmedia/IEffect.cpp
+++ b/media/libmedia/IEffect.cpp
@@ -117,6 +117,9 @@ public:
status_t status = remote()->transact(GET_CBLK, data, &reply);
if (status == NO_ERROR) {
cblk = interface_cast<IMemory>(reply.readStrongBinder());
+ if (cblk != 0 && cblk->pointer() == NULL) {
+ cblk.clear();
+ }
}
return cblk;
}
diff --git a/media/libmedia/IMediaDeathNotifier.cpp b/media/libmedia/IMediaDeathNotifier.cpp
index 9db5b1b..10b4934 100644
--- a/media/libmedia/IMediaDeathNotifier.cpp
+++ b/media/libmedia/IMediaDeathNotifier.cpp
@@ -75,7 +75,7 @@ IMediaDeathNotifier::removeObitRecipient(const wp<IMediaDeathNotifier>& recipien
}
void
-IMediaDeathNotifier::DeathNotifier::binderDied(const wp<IBinder>& who) {
+IMediaDeathNotifier::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
ALOGW("media server died");
// Need to do this with the lock held
diff --git a/media/libmedia/IMediaHTTPConnection.cpp b/media/libmedia/IMediaHTTPConnection.cpp
new file mode 100644
index 0000000..7e26ee6
--- /dev/null
+++ b/media/libmedia/IMediaHTTPConnection.cpp
@@ -0,0 +1,182 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IMediaHTTPConnection"
+#include <utils/Log.h>
+
+#include <media/IMediaHTTPConnection.h>
+
+#include <binder/IMemory.h>
+#include <binder/Parcel.h>
+#include <utils/String8.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+namespace android {
+
+enum {
+ CONNECT = IBinder::FIRST_CALL_TRANSACTION,
+ DISCONNECT,
+ READ_AT,
+ GET_SIZE,
+ GET_MIME_TYPE,
+ GET_URI
+};
+
+struct BpMediaHTTPConnection : public BpInterface<IMediaHTTPConnection> {
+ BpMediaHTTPConnection(const sp<IBinder> &impl)
+ : BpInterface<IMediaHTTPConnection>(impl) {
+ }
+
+ virtual bool connect(
+ const char *uri, const KeyedVector<String8, String8> *headers) {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ String16 tmp(uri);
+ data.writeString16(tmp);
+
+ tmp = String16("");
+ if (headers != NULL) {
+ for (size_t i = 0; i < headers->size(); ++i) {
+ String16 key(headers->keyAt(i).string());
+ String16 val(headers->valueAt(i).string());
+
+ tmp.append(key);
+ tmp.append(String16(": "));
+ tmp.append(val);
+ tmp.append(String16("\r\n"));
+ }
+ }
+ data.writeString16(tmp);
+
+ remote()->transact(CONNECT, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ sp<IBinder> binder = reply.readStrongBinder();
+ mMemory = interface_cast<IMemory>(binder);
+
+ return mMemory != NULL;
+ }
+
+ virtual void disconnect() {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(DISCONNECT, data, &reply);
+ }
+
+ virtual ssize_t readAt(off64_t offset, void *buffer, size_t size) {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ data.writeInt64(offset);
+ data.writeInt32(size);
+
+ status_t err = remote()->transact(READ_AT, data, &reply);
+ if (err != OK) {
+ ALOGE("remote readAt failed");
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ int32_t len = reply.readInt32();
+
+ if (len > 0) {
+ memcpy(buffer, mMemory->pointer(), len);
+ }
+
+ return len;
+ }
+
+ virtual off64_t getSize() {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(GET_SIZE, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ return reply.readInt64();
+ }
+
+ virtual status_t getMIMEType(String8 *mimeType) {
+ *mimeType = String8("");
+
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(GET_MIME_TYPE, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ *mimeType = String8(reply.readString16());
+
+ return OK;
+ }
+
+ virtual status_t getUri(String8 *uri) {
+ *uri = String8("");
+
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPConnection::getInterfaceDescriptor());
+
+ remote()->transact(GET_URI, data, &reply);
+
+ int32_t exceptionCode = reply.readExceptionCode();
+
+ if (exceptionCode) {
+ return UNKNOWN_ERROR;
+ }
+
+ *uri = String8(reply.readString16());
+
+ return OK;
+ }
+
+private:
+ sp<IMemory> mMemory;
+};
+
+IMPLEMENT_META_INTERFACE(
+ MediaHTTPConnection, "android.media.IMediaHTTPConnection");
+
+} // namespace android
+
diff --git a/media/libmedia/IMediaHTTPService.cpp b/media/libmedia/IMediaHTTPService.cpp
new file mode 100644
index 0000000..1260582
--- /dev/null
+++ b/media/libmedia/IMediaHTTPService.cpp
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "IMediaHTTPService"
+#include <utils/Log.h>
+
+#include <media/IMediaHTTPService.h>
+
+#include <binder/Parcel.h>
+#include <media/IMediaHTTPConnection.h>
+
+namespace android {
+
+enum {
+ MAKE_HTTP = IBinder::FIRST_CALL_TRANSACTION,
+};
+
+struct BpMediaHTTPService : public BpInterface<IMediaHTTPService> {
+ BpMediaHTTPService(const sp<IBinder> &impl)
+ : BpInterface<IMediaHTTPService>(impl) {
+ }
+
+ virtual sp<IMediaHTTPConnection> makeHTTPConnection() {
+ Parcel data, reply;
+ data.writeInterfaceToken(
+ IMediaHTTPService::getInterfaceDescriptor());
+
+ remote()->transact(MAKE_HTTP, data, &reply);
+
+ status_t err = reply.readInt32();
+
+ if (err != OK) {
+ return NULL;
+ }
+
+ return interface_cast<IMediaHTTPConnection>(reply.readStrongBinder());
+ }
+};
+
+IMPLEMENT_META_INTERFACE(
+ MediaHTTPService, "android.media.IMediaHTTPService");
+
+} // namespace android
+
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index a91ad49..38f717c 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -15,9 +15,12 @@
** limitations under the License.
*/
+#include <inttypes.h>
#include <stdint.h>
#include <sys/types.h>
+
#include <binder/Parcel.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaMetadataRetriever.h>
#include <utils/String8.h>
#include <utils/KeyedVector.h>
@@ -84,10 +87,16 @@ public:
}
status_t setDataSource(
- const char *srcUrl, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *srcUrl,
+ const KeyedVector<String8, String8> *headers)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
+ data.writeInt32(httpService != NULL);
+ if (httpService != NULL) {
+ data.writeStrongBinder(httpService->asBinder());
+ }
data.writeCString(srcUrl);
if (headers == NULL) {
@@ -118,7 +127,7 @@ public:
sp<IMemory> getFrameAtTime(int64_t timeUs, int option)
{
- ALOGV("getTimeAtTime: time(%lld us) and option(%d)", timeUs, option);
+ ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
Parcel data, reply;
data.writeInterfaceToken(IMediaMetadataRetriever::getInterfaceDescriptor());
data.writeInt64(timeUs);
@@ -195,6 +204,13 @@ status_t BnMediaMetadataRetriever::onTransact(
} break;
case SET_DATA_SOURCE_URL: {
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
+
+ sp<IMediaHTTPService> httpService;
+ if (data.readInt32()) {
+ httpService =
+ interface_cast<IMediaHTTPService>(data.readStrongBinder());
+ }
+
const char* srcUrl = data.readCString();
KeyedVector<String8, String8> headers;
@@ -206,7 +222,8 @@ status_t BnMediaMetadataRetriever::onTransact(
}
reply->writeInt32(
- setDataSource(srcUrl, numHeaders > 0 ? &headers : NULL));
+ setDataSource(
+ httpService, srcUrl, numHeaders > 0 ? &headers : NULL));
return NO_ERROR;
} break;
@@ -222,7 +239,7 @@ status_t BnMediaMetadataRetriever::onTransact(
CHECK_INTERFACE(IMediaMetadataRetriever, data, reply);
int64_t timeUs = data.readInt64();
int option = data.readInt32();
- ALOGV("getTimeAtTime: time(%lld us) and option(%d)", timeUs, option);
+ ALOGV("getTimeAtTime: time(%" PRId64 " us) and option(%d)", timeUs, option);
#ifndef DISABLE_GROUP_SCHEDULE_HACK
setSchedPolicy(data);
#endif
diff --git a/media/libmedia/IMediaPlayer.cpp b/media/libmedia/IMediaPlayer.cpp
index e79bcd2..d778d05 100644
--- a/media/libmedia/IMediaPlayer.cpp
+++ b/media/libmedia/IMediaPlayer.cpp
@@ -21,6 +21,7 @@
#include <binder/Parcel.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayer.h>
#include <media/IStreamSource.h>
@@ -75,11 +76,17 @@ public:
remote()->transact(DISCONNECT, data, &reply);
}
- status_t setDataSource(const char* url,
+ status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
const KeyedVector<String8, String8>* headers)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayer::getInterfaceDescriptor());
+ data.writeInt32(httpService != NULL);
+ if (httpService != NULL) {
+ data.writeStrongBinder(httpService->asBinder());
+ }
data.writeCString(url);
if (headers == NULL) {
data.writeInt32(0);
@@ -355,6 +362,13 @@ status_t BnMediaPlayer::onTransact(
} break;
case SET_DATA_SOURCE_URL: {
CHECK_INTERFACE(IMediaPlayer, data, reply);
+
+ sp<IMediaHTTPService> httpService;
+ if (data.readInt32()) {
+ httpService =
+ interface_cast<IMediaHTTPService>(data.readStrongBinder());
+ }
+
const char* url = data.readCString();
KeyedVector<String8, String8> headers;
int32_t numHeaders = data.readInt32();
@@ -363,7 +377,8 @@ status_t BnMediaPlayer::onTransact(
String8 value = data.readString8();
headers.add(key, value);
}
- reply->writeInt32(setDataSource(url, numHeaders > 0 ? &headers : NULL));
+ reply->writeInt32(setDataSource(
+ httpService, url, numHeaders > 0 ? &headers : NULL));
return NO_ERROR;
} break;
case SET_DATA_SOURCE_FD: {
diff --git a/media/libmedia/IMediaPlayerService.cpp b/media/libmedia/IMediaPlayerService.cpp
index 3c22b4c..d116b14 100644
--- a/media/libmedia/IMediaPlayerService.cpp
+++ b/media/libmedia/IMediaPlayerService.cpp
@@ -23,6 +23,7 @@
#include <media/ICrypto.h>
#include <media/IDrm.h>
#include <media/IHDCP.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/IMediaRecorder.h>
#include <media/IOMX.h>
@@ -48,7 +49,6 @@ enum {
ADD_BATTERY_DATA,
PULL_BATTERY_DATA,
LISTEN_FOR_REMOTE_DISPLAY,
- UPDATE_PROXY_CONFIG,
};
class BpMediaPlayerService: public BpInterface<IMediaPlayerService>
@@ -86,12 +86,21 @@ public:
return interface_cast<IMediaRecorder>(reply.readStrongBinder());
}
- virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
+ virtual status_t decode(
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
+ uint32_t *pSampleRate,
+ int* pNumChannels,
+ audio_format_t* pFormat,
+ const sp<IMemoryHeap>& heap,
+ size_t *pSize)
{
Parcel data, reply;
data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
+ data.writeInt32(httpService != NULL);
+ if (httpService != NULL) {
+ data.writeStrongBinder(httpService->asBinder());
+ }
data.writeCString(url);
data.writeStrongBinder(heap->asBinder());
status_t status = remote()->transact(DECODE_URL, data, &reply);
@@ -182,25 +191,6 @@ public:
remote()->transact(LISTEN_FOR_REMOTE_DISPLAY, data, &reply);
return interface_cast<IRemoteDisplay>(reply.readStrongBinder());
}
-
- virtual status_t updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- Parcel data, reply;
-
- data.writeInterfaceToken(IMediaPlayerService::getInterfaceDescriptor());
- if (host == NULL) {
- data.writeInt32(0);
- } else {
- data.writeInt32(1);
- data.writeCString(host);
- data.writeInt32(port);
- data.writeCString(exclusionList);
- }
-
- remote()->transact(UPDATE_PROXY_CONFIG, data, &reply);
-
- return reply.readInt32();
- }
};
IMPLEMENT_META_INTERFACE(MediaPlayerService, "android.media.IMediaPlayerService");
@@ -222,13 +212,25 @@ status_t BnMediaPlayerService::onTransact(
} break;
case DECODE_URL: {
CHECK_INTERFACE(IMediaPlayerService, data, reply);
+ sp<IMediaHTTPService> httpService;
+ if (data.readInt32()) {
+ httpService =
+ interface_cast<IMediaHTTPService>(data.readStrongBinder());
+ }
const char* url = data.readCString();
sp<IMemoryHeap> heap = interface_cast<IMemoryHeap>(data.readStrongBinder());
uint32_t sampleRate;
int numChannels;
audio_format_t format;
size_t size;
- status_t status = decode(url, &sampleRate, &numChannels, &format, heap, &size);
+ status_t status =
+ decode(httpService,
+ url,
+ &sampleRate,
+ &numChannels,
+ &format,
+ heap,
+ &size);
reply->writeInt32(status);
if (status == NO_ERROR) {
reply->writeInt32(sampleRate);
@@ -316,24 +318,6 @@ status_t BnMediaPlayerService::onTransact(
reply->writeStrongBinder(display->asBinder());
return NO_ERROR;
} break;
- case UPDATE_PROXY_CONFIG:
- {
- CHECK_INTERFACE(IMediaPlayerService, data, reply);
-
- const char *host = NULL;
- int32_t port = 0;
- const char *exclusionList = NULL;
-
- if (data.readInt32()) {
- host = data.readCString();
- port = data.readInt32();
- exclusionList = data.readCString();
- }
-
- reply->writeInt32(updateProxyConfig(host, port, exclusionList));
-
- return OK;
- }
default:
return BBinder::onTransact(code, data, reply, flags);
}
diff --git a/media/libmedia/IMediaRecorder.cpp b/media/libmedia/IMediaRecorder.cpp
index 8e58162..95af006 100644
--- a/media/libmedia/IMediaRecorder.cpp
+++ b/media/libmedia/IMediaRecorder.cpp
@@ -17,6 +17,10 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "IMediaRecorder"
+
+#include <inttypes.h>
+#include <unistd.h>
+
#include <utils/Log.h>
#include <binder/Parcel.h>
#include <camera/ICamera.h>
@@ -24,8 +28,6 @@
#include <media/IMediaRecorder.h>
#include <gui/Surface.h>
#include <gui/IGraphicBufferProducer.h>
-#include <unistd.h>
-
namespace android {
@@ -167,7 +169,7 @@ public:
}
status_t setOutputFile(int fd, int64_t offset, int64_t length) {
- ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
Parcel data, reply;
data.writeInterfaceToken(IMediaRecorder::getInterfaceDescriptor());
data.writeFileDescriptor(fd);
diff --git a/media/libmedia/JetPlayer.cpp b/media/libmedia/JetPlayer.cpp
index e914b34..f0f1832 100644
--- a/media/libmedia/JetPlayer.cpp
+++ b/media/libmedia/JetPlayer.cpp
@@ -90,7 +90,7 @@ int JetPlayer::init()
pLibConfig->sampleRate,
AUDIO_FORMAT_PCM_16_BIT,
audio_channel_out_mask_from_count(pLibConfig->numChannels),
- mTrackBufferSize,
+ (size_t) mTrackBufferSize,
AUDIO_OUTPUT_FLAG_NONE);
// create render and playback thread
diff --git a/media/libmedia/MediaProfiles.cpp b/media/libmedia/MediaProfiles.cpp
index 8319cd7..e9e453b 100644
--- a/media/libmedia/MediaProfiles.cpp
+++ b/media/libmedia/MediaProfiles.cpp
@@ -69,6 +69,7 @@ const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
{"480p", CAMCORDER_QUALITY_480P},
{"720p", CAMCORDER_QUALITY_720P},
{"1080p", CAMCORDER_QUALITY_1080P},
+ {"2160p", CAMCORDER_QUALITY_2160P},
{"qvga", CAMCORDER_QUALITY_QVGA},
{"timelapselow", CAMCORDER_QUALITY_TIME_LAPSE_LOW},
@@ -78,11 +79,18 @@ const MediaProfiles::NameToTagMap MediaProfiles::sCamcorderQualityNameMap[] = {
{"timelapse480p", CAMCORDER_QUALITY_TIME_LAPSE_480P},
{"timelapse720p", CAMCORDER_QUALITY_TIME_LAPSE_720P},
{"timelapse1080p", CAMCORDER_QUALITY_TIME_LAPSE_1080P},
+ {"timelapse2160p", CAMCORDER_QUALITY_TIME_LAPSE_2160P},
{"timelapseqvga", CAMCORDER_QUALITY_TIME_LAPSE_QVGA},
};
+#if LOG_NDEBUG
+#define UNUSED __unused
+#else
+#define UNUSED
+#endif
+
/*static*/ void
-MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec)
+MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec UNUSED)
{
ALOGV("video codec:");
ALOGV("codec = %d", codec.mCodec);
@@ -93,7 +101,7 @@ MediaProfiles::logVideoCodec(const MediaProfiles::VideoCodec& codec)
}
/*static*/ void
-MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec)
+MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec UNUSED)
{
ALOGV("audio codec:");
ALOGV("codec = %d", codec.mCodec);
@@ -103,7 +111,7 @@ MediaProfiles::logAudioCodec(const MediaProfiles::AudioCodec& codec)
}
/*static*/ void
-MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap)
+MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap UNUSED)
{
ALOGV("video encoder cap:");
ALOGV("codec = %d", cap.mCodec);
@@ -114,7 +122,7 @@ MediaProfiles::logVideoEncoderCap(const MediaProfiles::VideoEncoderCap& cap)
}
/*static*/ void
-MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap)
+MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap UNUSED)
{
ALOGV("audio encoder cap:");
ALOGV("codec = %d", cap.mCodec);
@@ -124,21 +132,21 @@ MediaProfiles::logAudioEncoderCap(const MediaProfiles::AudioEncoderCap& cap)
}
/*static*/ void
-MediaProfiles::logVideoDecoderCap(const MediaProfiles::VideoDecoderCap& cap)
+MediaProfiles::logVideoDecoderCap(const MediaProfiles::VideoDecoderCap& cap UNUSED)
{
ALOGV("video decoder cap:");
ALOGV("codec = %d", cap.mCodec);
}
/*static*/ void
-MediaProfiles::logAudioDecoderCap(const MediaProfiles::AudioDecoderCap& cap)
+MediaProfiles::logAudioDecoderCap(const MediaProfiles::AudioDecoderCap& cap UNUSED)
{
ALOGV("audio codec cap:");
ALOGV("codec = %d", cap.mCodec);
}
/*static*/ void
-MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap)
+MediaProfiles::logVideoEditorCap(const MediaProfiles::VideoEditorCap& cap UNUSED)
{
ALOGV("videoeditor cap:");
ALOGV("mMaxInputFrameWidth = %d", cap.mMaxInputFrameWidth);
@@ -467,7 +475,7 @@ static bool isTimelapseProfile(camcorder_quality quality) {
}
void MediaProfiles::initRequiredProfileRefs(const Vector<int>& cameraIds) {
- ALOGV("Number of camera ids: %d", cameraIds.size());
+ ALOGV("Number of camera ids: %zu", cameraIds.size());
CHECK(cameraIds.size() > 0);
mRequiredProfileRefs = new RequiredProfiles[cameraIds.size()];
for (size_t i = 0, n = cameraIds.size(); i < n; ++i) {
@@ -594,14 +602,14 @@ void MediaProfiles::checkAndAddRequiredProfilesIfNecessary() {
int index = getCamcorderProfileIndex(cameraId, profile->mQuality);
if (index != -1) {
- ALOGV("Profile quality %d for camera %d already exists",
+ ALOGV("Profile quality %d for camera %zu already exists",
profile->mQuality, cameraId);
CHECK(index == refIndex);
continue;
}
// Insert the new profile
- ALOGV("Add a profile: quality %d=>%d for camera %d",
+ ALOGV("Add a profile: quality %d=>%d for camera %zu",
mCamcorderProfiles[info->mRefProfileIndex]->mQuality,
profile->mQuality, cameraId);
diff --git a/media/libmedia/MediaScannerClient.cpp b/media/libmedia/MediaScannerClient.cpp
index 93a4a4c..1661f04 100644
--- a/media/libmedia/MediaScannerClient.cpp
+++ b/media/libmedia/MediaScannerClient.cpp
@@ -14,217 +14,57 @@
* limitations under the License.
*/
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaScannerClient"
+#include <utils/Log.h>
+
#include <media/mediascanner.h>
+#include "CharacterEncodingDetector.h"
#include "StringArray.h"
-#include "autodetect.h"
-#include "unicode/ucnv.h"
-#include "unicode/ustring.h"
-
namespace android {
MediaScannerClient::MediaScannerClient()
- : mNames(NULL),
- mValues(NULL),
- mLocaleEncoding(kEncodingNone)
+ : mEncodingDetector(NULL)
{
}
MediaScannerClient::~MediaScannerClient()
{
- delete mNames;
- delete mValues;
+ delete mEncodingDetector;
}
void MediaScannerClient::setLocale(const char* locale)
{
- if (!locale) return;
-
- if (!strncmp(locale, "ja", 2))
- mLocaleEncoding = kEncodingShiftJIS;
- else if (!strncmp(locale, "ko", 2))
- mLocaleEncoding = kEncodingEUCKR;
- else if (!strncmp(locale, "zh", 2)) {
- if (!strcmp(locale, "zh_CN")) {
- // simplified chinese for mainland China
- mLocaleEncoding = kEncodingGBK;
- } else {
- // assume traditional for non-mainland Chinese locales (Taiwan, Hong Kong, Singapore)
- mLocaleEncoding = kEncodingBig5;
- }
- }
+ mLocale = locale; // not currently used
}
void MediaScannerClient::beginFile()
{
- mNames = new StringArray;
- mValues = new StringArray;
+ delete mEncodingDetector;
+ mEncodingDetector = new CharacterEncodingDetector();
}
status_t MediaScannerClient::addStringTag(const char* name, const char* value)
{
- if (mLocaleEncoding != kEncodingNone) {
- // don't bother caching strings that are all ASCII.
- // call handleStringTag directly instead.
- // check to see if value (which should be utf8) has any non-ASCII characters
- bool nonAscii = false;
- const char* chp = value;
- char ch;
- while ((ch = *chp++)) {
- if (ch & 0x80) {
- nonAscii = true;
- break;
- }
- }
-
- if (nonAscii) {
- // save the strings for later so they can be used for native encoding detection
- mNames->push_back(name);
- mValues->push_back(value);
- return OK;
- }
- // else fall through
- }
-
- // autodetection is not necessary, so no need to cache the values
- // pass directly to the client instead
- return handleStringTag(name, value);
-}
-
-static uint32_t possibleEncodings(const char* s)
-{
- uint32_t result = kEncodingAll;
- // if s contains a native encoding, then it was mistakenly encoded in utf8 as if it were latin-1
- // so we need to reverse the latin-1 -> utf8 conversion to get the native chars back
- uint8_t ch1, ch2;
- uint8_t* chp = (uint8_t *)s;
-
- while ((ch1 = *chp++)) {
- if (ch1 & 0x80) {
- ch2 = *chp++;
- ch1 = ((ch1 << 6) & 0xC0) | (ch2 & 0x3F);
- // ch1 is now the first byte of the potential native char
-
- ch2 = *chp++;
- if (ch2 & 0x80)
- ch2 = ((ch2 << 6) & 0xC0) | (*chp++ & 0x3F);
- // ch2 is now the second byte of the potential native char
- int ch = (int)ch1 << 8 | (int)ch2;
- result &= findPossibleEncodings(ch);
- }
- // else ASCII character, which could be anything
- }
-
- return result;
-}
-
-void MediaScannerClient::convertValues(uint32_t encoding)
-{
- const char* enc = NULL;
- switch (encoding) {
- case kEncodingShiftJIS:
- enc = "shift-jis";
- break;
- case kEncodingGBK:
- enc = "gbk";
- break;
- case kEncodingBig5:
- enc = "Big5";
- break;
- case kEncodingEUCKR:
- enc = "EUC-KR";
- break;
- }
-
- if (enc) {
- UErrorCode status = U_ZERO_ERROR;
-
- UConverter *conv = ucnv_open(enc, &status);
- if (U_FAILURE(status)) {
- ALOGE("could not create UConverter for %s", enc);
- return;
- }
- UConverter *utf8Conv = ucnv_open("UTF-8", &status);
- if (U_FAILURE(status)) {
- ALOGE("could not create UConverter for UTF-8");
- ucnv_close(conv);
- return;
- }
-
- // for each value string, convert from native encoding to UTF-8
- for (int i = 0; i < mNames->size(); i++) {
- // first we need to untangle the utf8 and convert it back to the original bytes
- // since we are reducing the length of the string, we can do this in place
- uint8_t* src = (uint8_t *)mValues->getEntry(i);
- int len = strlen((char *)src);
- uint8_t* dest = src;
-
- uint8_t uch;
- while ((uch = *src++)) {
- if (uch & 0x80)
- *dest++ = ((uch << 6) & 0xC0) | (*src++ & 0x3F);
- else
- *dest++ = uch;
- }
- *dest = 0;
-
- // now convert from native encoding to UTF-8
- const char* source = mValues->getEntry(i);
- int targetLength = len * 3 + 1;
- char* buffer = new char[targetLength];
- // don't normally check for NULL, but in this case targetLength may be large
- if (!buffer)
- break;
- char* target = buffer;
-
- ucnv_convertEx(utf8Conv, conv, &target, target + targetLength,
- &source, (const char *)dest, NULL, NULL, NULL, NULL, TRUE, TRUE, &status);
- if (U_FAILURE(status)) {
- ALOGE("ucnv_convertEx failed: %d", status);
- mValues->setEntry(i, "???");
- } else {
- // zero terminate
- *target = 0;
- mValues->setEntry(i, buffer);
- }
-
- delete[] buffer;
- }
-
- ucnv_close(conv);
- ucnv_close(utf8Conv);
- }
+ mEncodingDetector->addTag(name, value);
+ return OK;
}
void MediaScannerClient::endFile()
{
- if (mLocaleEncoding != kEncodingNone) {
- int size = mNames->size();
- uint32_t encoding = kEncodingAll;
-
- // compute a bit mask containing all possible encodings
- for (int i = 0; i < mNames->size(); i++)
- encoding &= possibleEncodings(mValues->getEntry(i));
-
- // if the locale encoding matches, then assume we have a native encoding.
- if (encoding & mLocaleEncoding)
- convertValues(mLocaleEncoding);
-
- // finally, push all name/value pairs to the client
- for (int i = 0; i < mNames->size(); i++) {
- status_t status = handleStringTag(mNames->getEntry(i), mValues->getEntry(i));
- if (status) {
- break;
- }
+ mEncodingDetector->detectAndConvert();
+
+ int size = mEncodingDetector->size();
+ if (size) {
+ for (int i = 0; i < size; i++) {
+ const char *name;
+ const char *value;
+ mEncodingDetector->getTag(i, &name, &value);
+ handleStringTag(name, value);
}
}
- // else addStringTag() has done all the work so we have nothing to do
-
- delete mNames;
- delete mValues;
- mNames = NULL;
- mValues = NULL;
}
} // namespace android
diff --git a/media/libmedia/SoundPool.cpp b/media/libmedia/SoundPool.cpp
index 22e9fad..2aa0592 100644
--- a/media/libmedia/SoundPool.cpp
+++ b/media/libmedia/SoundPool.cpp
@@ -16,11 +16,15 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoundPool"
+
+#include <inttypes.h>
+
#include <utils/Log.h>
#define USE_SHARED_MEM_BUFFER
#include <media/AudioTrack.h>
+#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
#include <media/SoundPool.h>
#include "SoundPoolThread.h"
@@ -199,7 +203,7 @@ SoundChannel* SoundPool::findNextChannel(int channelID)
return NULL;
}
-int SoundPool::load(const char* path, int priority)
+int SoundPool::load(const char* path, int priority __unused)
{
ALOGV("load: path=%s, priority=%d", path, priority);
Mutex::Autolock lock(&mLock);
@@ -209,9 +213,9 @@ int SoundPool::load(const char* path, int priority)
return sample->sampleID();
}
-int SoundPool::load(int fd, int64_t offset, int64_t length, int priority)
+int SoundPool::load(int fd, int64_t offset, int64_t length, int priority __unused)
{
- ALOGV("load: fd=%d, offset=%lld, length=%lld, priority=%d",
+ ALOGV("load: fd=%d, offset=%" PRId64 ", length=%" PRId64 ", priority=%d",
fd, offset, length, priority);
Mutex::Autolock lock(&mLock);
sp<Sample> sample = new Sample(++mNextSampleID, fd, offset, length);
@@ -461,7 +465,8 @@ Sample::Sample(int sampleID, int fd, int64_t offset, int64_t length)
mFd = dup(fd);
mOffset = offset;
mLength = length;
- ALOGV("create sampleID=%d, fd=%d, offset=%lld, length=%lld", mSampleID, mFd, mLength, mOffset);
+ ALOGV("create sampleID=%d, fd=%d, offset=%" PRId64 " length=%" PRId64,
+ mSampleID, mFd, mLength, mOffset);
}
void Sample::init()
@@ -496,7 +501,14 @@ status_t Sample::doLoad()
ALOGV("Start decode");
if (mUrl) {
- status = MediaPlayer::decode(mUrl, &sampleRate, &numChannels, &format, mHeap, &mSize);
+ status = MediaPlayer::decode(
+ NULL /* httpService */,
+ mUrl,
+ &sampleRate,
+ &numChannels,
+ &format,
+ mHeap,
+ &mSize);
} else {
status = MediaPlayer::decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format,
mHeap, &mSize);
@@ -508,7 +520,7 @@ status_t Sample::doLoad()
ALOGE("Unable to load sample: %s", mUrl);
goto error;
}
- ALOGV("pointer = %p, size = %u, sampleRate = %u, numChannels = %d",
+ ALOGV("pointer = %p, size = %zu, sampleRate = %u, numChannels = %d",
mHeap->getBase(), mSize, sampleRate, numChannels);
if (sampleRate > kMaxSampleRate) {
@@ -579,7 +591,7 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
uint32_t sampleRate = uint32_t(float(sample->sampleRate()) * rate + 0.5);
uint32_t totalFrames = (kDefaultBufferCount * afFrameCount * sampleRate) / afSampleRate;
uint32_t bufferFrames = (totalFrames + (kDefaultBufferCount - 1)) / kDefaultBufferCount;
- uint32_t frameCount = 0;
+ size_t frameCount = 0;
if (loop) {
frameCount = sample->size()/numChannels/
@@ -600,16 +612,15 @@ void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftV
// wrong audio audio buffer size (mAudioBufferSize)
unsigned long toggle = mToggle ^ 1;
void *userData = (void *)((unsigned long)this | toggle);
- uint32_t channels = (numChannels == 2) ?
- AUDIO_CHANNEL_OUT_STEREO : AUDIO_CHANNEL_OUT_MONO;
+ audio_channel_mask_t channelMask = audio_channel_out_mask_from_count(numChannels);
// do not create a new audio track if current track is compatible with sample parameters
#ifdef USE_SHARED_MEM_BUFFER
newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channels, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
+ channelMask, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData);
#else
newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channels, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
+ channelMask, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
bufferFrames);
#endif
oldTrack = mAudioTrack;
@@ -730,7 +741,8 @@ void SoundChannel::process(int event, void *info, unsigned long toggle)
count = b->size;
}
memcpy(q, p, count);
-// ALOGV("fill: q=%p, p=%p, mPos=%u, b->size=%u, count=%d", q, p, mPos, b->size, count);
+// ALOGV("fill: q=%p, p=%p, mPos=%u, b->size=%u, count=%d", q, p, mPos, b->size,
+// count);
} else if (mPos < mAudioBufferSize) {
count = mAudioBufferSize - mPos;
if (count > b->size) {
diff --git a/media/libmedia/ToneGenerator.cpp b/media/libmedia/ToneGenerator.cpp
index adef3be..61b6d36 100644
--- a/media/libmedia/ToneGenerator.cpp
+++ b/media/libmedia/ToneGenerator.cpp
@@ -1057,7 +1057,7 @@ bool ToneGenerator::initAudioTrack() {
0, // notificationFrames
0, // sharedBuffer
mThreadCanCallJava,
- 0, // sessionId
+ AUDIO_SESSION_ALLOCATE,
AudioTrack::TRANSFER_CALLBACK);
if (mpAudioTrack->initCheck() != NO_ERROR) {
diff --git a/media/libmedia/autodetect.cpp b/media/libmedia/autodetect.cpp
deleted file mode 100644
index be5c3b2..0000000
--- a/media/libmedia/autodetect.cpp
+++ /dev/null
@@ -1,885 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "autodetect.h"
-
-struct CharRange {
- uint16_t first;
- uint16_t last;
-};
-
-#define ARRAY_SIZE(x) (sizeof(x) / sizeof(*x))
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP932.TXT
-static const CharRange kShiftJISRanges[] = {
- { 0x8140, 0x817E },
- { 0x8180, 0x81AC },
- { 0x81B8, 0x81BF },
- { 0x81C8, 0x81CE },
- { 0x81DA, 0x81E8 },
- { 0x81F0, 0x81F7 },
- { 0x81FC, 0x81FC },
- { 0x824F, 0x8258 },
- { 0x8260, 0x8279 },
- { 0x8281, 0x829A },
- { 0x829F, 0x82F1 },
- { 0x8340, 0x837E },
- { 0x8380, 0x8396 },
- { 0x839F, 0x83B6 },
- { 0x83BF, 0x83D6 },
- { 0x8440, 0x8460 },
- { 0x8470, 0x847E },
- { 0x8480, 0x8491 },
- { 0x849F, 0x84BE },
- { 0x8740, 0x875D },
- { 0x875F, 0x8775 },
- { 0x877E, 0x877E },
- { 0x8780, 0x879C },
- { 0x889F, 0x88FC },
- { 0x8940, 0x897E },
- { 0x8980, 0x89FC },
- { 0x8A40, 0x8A7E },
- { 0x8A80, 0x8AFC },
- { 0x8B40, 0x8B7E },
- { 0x8B80, 0x8BFC },
- { 0x8C40, 0x8C7E },
- { 0x8C80, 0x8CFC },
- { 0x8D40, 0x8D7E },
- { 0x8D80, 0x8DFC },
- { 0x8E40, 0x8E7E },
- { 0x8E80, 0x8EFC },
- { 0x8F40, 0x8F7E },
- { 0x8F80, 0x8FFC },
- { 0x9040, 0x907E },
- { 0x9080, 0x90FC },
- { 0x9140, 0x917E },
- { 0x9180, 0x91FC },
- { 0x9240, 0x927E },
- { 0x9280, 0x92FC },
- { 0x9340, 0x937E },
- { 0x9380, 0x93FC },
- { 0x9440, 0x947E },
- { 0x9480, 0x94FC },
- { 0x9540, 0x957E },
- { 0x9580, 0x95FC },
- { 0x9640, 0x967E },
- { 0x9680, 0x96FC },
- { 0x9740, 0x977E },
- { 0x9780, 0x97FC },
- { 0x9840, 0x9872 },
- { 0x989F, 0x98FC },
- { 0x9940, 0x997E },
- { 0x9980, 0x99FC },
- { 0x9A40, 0x9A7E },
- { 0x9A80, 0x9AFC },
- { 0x9B40, 0x9B7E },
- { 0x9B80, 0x9BFC },
- { 0x9C40, 0x9C7E },
- { 0x9C80, 0x9CFC },
- { 0x9D40, 0x9D7E },
- { 0x9D80, 0x9DFC },
- { 0x9E40, 0x9E7E },
- { 0x9E80, 0x9EFC },
- { 0x9F40, 0x9F7E },
- { 0x9F80, 0x9FFC },
- { 0xE040, 0xE07E },
- { 0xE080, 0xE0FC },
- { 0xE140, 0xE17E },
- { 0xE180, 0xE1FC },
- { 0xE240, 0xE27E },
- { 0xE280, 0xE2FC },
- { 0xE340, 0xE37E },
- { 0xE380, 0xE3FC },
- { 0xE440, 0xE47E },
- { 0xE480, 0xE4FC },
- { 0xE540, 0xE57E },
- { 0xE580, 0xE5FC },
- { 0xE640, 0xE67E },
- { 0xE680, 0xE6FC },
- { 0xE740, 0xE77E },
- { 0xE780, 0xE7FC },
- { 0xE840, 0xE87E },
- { 0xE880, 0xE8FC },
- { 0xE940, 0xE97E },
- { 0xE980, 0xE9FC },
- { 0xEA40, 0xEA7E },
- { 0xEA80, 0xEAA4 },
- { 0xED40, 0xED7E },
- { 0xED80, 0xEDFC },
- { 0xEE40, 0xEE7E },
- { 0xEE80, 0xEEEC },
- { 0xEEEF, 0xEEFC },
- { 0xFA40, 0xFA7E },
- { 0xFA80, 0xFAFC },
- { 0xFB40, 0xFB7E },
- { 0xFB80, 0xFBFC },
- { 0xFC40, 0xFC4B },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP936.TXT
-static const CharRange kGBKRanges[] = {
- { 0x8140, 0x817E },
- { 0x8180, 0x81FE },
- { 0x8240, 0x827E },
- { 0x8280, 0x82FE },
- { 0x8340, 0x837E },
- { 0x8380, 0x83FE },
- { 0x8440, 0x847E },
- { 0x8480, 0x84FE },
- { 0x8540, 0x857E },
- { 0x8580, 0x85FE },
- { 0x8640, 0x867E },
- { 0x8680, 0x86FE },
- { 0x8740, 0x877E },
- { 0x8780, 0x87FE },
- { 0x8840, 0x887E },
- { 0x8880, 0x88FE },
- { 0x8940, 0x897E },
- { 0x8980, 0x89FE },
- { 0x8A40, 0x8A7E },
- { 0x8A80, 0x8AFE },
- { 0x8B40, 0x8B7E },
- { 0x8B80, 0x8BFE },
- { 0x8C40, 0x8C7E },
- { 0x8C80, 0x8CFE },
- { 0x8D40, 0x8D7E },
- { 0x8D80, 0x8DFE },
- { 0x8E40, 0x8E7E },
- { 0x8E80, 0x8EFE },
- { 0x8F40, 0x8F7E },
- { 0x8F80, 0x8FFE },
- { 0x9040, 0x907E },
- { 0x9080, 0x90FE },
- { 0x9140, 0x917E },
- { 0x9180, 0x91FE },
- { 0x9240, 0x927E },
- { 0x9280, 0x92FE },
- { 0x9340, 0x937E },
- { 0x9380, 0x93FE },
- { 0x9440, 0x947E },
- { 0x9480, 0x94FE },
- { 0x9540, 0x957E },
- { 0x9580, 0x95FE },
- { 0x9640, 0x967E },
- { 0x9680, 0x96FE },
- { 0x9740, 0x977E },
- { 0x9780, 0x97FE },
- { 0x9840, 0x987E },
- { 0x9880, 0x98FE },
- { 0x9940, 0x997E },
- { 0x9980, 0x99FE },
- { 0x9A40, 0x9A7E },
- { 0x9A80, 0x9AFE },
- { 0x9B40, 0x9B7E },
- { 0x9B80, 0x9BFE },
- { 0x9C40, 0x9C7E },
- { 0x9C80, 0x9CFE },
- { 0x9D40, 0x9D7E },
- { 0x9D80, 0x9DFE },
- { 0x9E40, 0x9E7E },
- { 0x9E80, 0x9EFE },
- { 0x9F40, 0x9F7E },
- { 0x9F80, 0x9FFE },
- { 0xA040, 0xA07E },
- { 0xA080, 0xA0FE },
- { 0xA1A1, 0xA1FE },
- { 0xA2A1, 0xA2AA },
- { 0xA2B1, 0xA2E2 },
- { 0xA2E5, 0xA2EE },
- { 0xA2F1, 0xA2FC },
- { 0xA3A1, 0xA3FE },
- { 0xA4A1, 0xA4F3 },
- { 0xA5A1, 0xA5F6 },
- { 0xA6A1, 0xA6B8 },
- { 0xA6C1, 0xA6D8 },
- { 0xA6E0, 0xA6EB },
- { 0xA6EE, 0xA6F2 },
- { 0xA6F4, 0xA6F5 },
- { 0xA7A1, 0xA7C1 },
- { 0xA7D1, 0xA7F1 },
- { 0xA840, 0xA87E },
- { 0xA880, 0xA895 },
- { 0xA8A1, 0xA8BB },
- { 0xA8BD, 0xA8BE },
- { 0xA8C0, 0xA8C0 },
- { 0xA8C5, 0xA8E9 },
- { 0xA940, 0xA957 },
- { 0xA959, 0xA95A },
- { 0xA95C, 0xA95C },
- { 0xA960, 0xA97E },
- { 0xA980, 0xA988 },
- { 0xA996, 0xA996 },
- { 0xA9A4, 0xA9EF },
- { 0xAA40, 0xAA7E },
- { 0xAA80, 0xAAA0 },
- { 0xAB40, 0xAB7E },
- { 0xAB80, 0xABA0 },
- { 0xAC40, 0xAC7E },
- { 0xAC80, 0xACA0 },
- { 0xAD40, 0xAD7E },
- { 0xAD80, 0xADA0 },
- { 0xAE40, 0xAE7E },
- { 0xAE80, 0xAEA0 },
- { 0xAF40, 0xAF7E },
- { 0xAF80, 0xAFA0 },
- { 0xB040, 0xB07E },
- { 0xB080, 0xB0FE },
- { 0xB140, 0xB17E },
- { 0xB180, 0xB1FE },
- { 0xB240, 0xB27E },
- { 0xB280, 0xB2FE },
- { 0xB340, 0xB37E },
- { 0xB380, 0xB3FE },
- { 0xB440, 0xB47E },
- { 0xB480, 0xB4FE },
- { 0xB540, 0xB57E },
- { 0xB580, 0xB5FE },
- { 0xB640, 0xB67E },
- { 0xB680, 0xB6FE },
- { 0xB740, 0xB77E },
- { 0xB780, 0xB7FE },
- { 0xB840, 0xB87E },
- { 0xB880, 0xB8FE },
- { 0xB940, 0xB97E },
- { 0xB980, 0xB9FE },
- { 0xBA40, 0xBA7E },
- { 0xBA80, 0xBAFE },
- { 0xBB40, 0xBB7E },
- { 0xBB80, 0xBBFE },
- { 0xBC40, 0xBC7E },
- { 0xBC80, 0xBCFE },
- { 0xBD40, 0xBD7E },
- { 0xBD80, 0xBDFE },
- { 0xBE40, 0xBE7E },
- { 0xBE80, 0xBEFE },
- { 0xBF40, 0xBF7E },
- { 0xBF80, 0xBFFE },
- { 0xC040, 0xC07E },
- { 0xC080, 0xC0FE },
- { 0xC140, 0xC17E },
- { 0xC180, 0xC1FE },
- { 0xC240, 0xC27E },
- { 0xC280, 0xC2FE },
- { 0xC340, 0xC37E },
- { 0xC380, 0xC3FE },
- { 0xC440, 0xC47E },
- { 0xC480, 0xC4FE },
- { 0xC540, 0xC57E },
- { 0xC580, 0xC5FE },
- { 0xC640, 0xC67E },
- { 0xC680, 0xC6FE },
- { 0xC740, 0xC77E },
- { 0xC780, 0xC7FE },
- { 0xC840, 0xC87E },
- { 0xC880, 0xC8FE },
- { 0xC940, 0xC97E },
- { 0xC980, 0xC9FE },
- { 0xCA40, 0xCA7E },
- { 0xCA80, 0xCAFE },
- { 0xCB40, 0xCB7E },
- { 0xCB80, 0xCBFE },
- { 0xCC40, 0xCC7E },
- { 0xCC80, 0xCCFE },
- { 0xCD40, 0xCD7E },
- { 0xCD80, 0xCDFE },
- { 0xCE40, 0xCE7E },
- { 0xCE80, 0xCEFE },
- { 0xCF40, 0xCF7E },
- { 0xCF80, 0xCFFE },
- { 0xD040, 0xD07E },
- { 0xD080, 0xD0FE },
- { 0xD140, 0xD17E },
- { 0xD180, 0xD1FE },
- { 0xD240, 0xD27E },
- { 0xD280, 0xD2FE },
- { 0xD340, 0xD37E },
- { 0xD380, 0xD3FE },
- { 0xD440, 0xD47E },
- { 0xD480, 0xD4FE },
- { 0xD540, 0xD57E },
- { 0xD580, 0xD5FE },
- { 0xD640, 0xD67E },
- { 0xD680, 0xD6FE },
- { 0xD740, 0xD77E },
- { 0xD780, 0xD7F9 },
- { 0xD840, 0xD87E },
- { 0xD880, 0xD8FE },
- { 0xD940, 0xD97E },
- { 0xD980, 0xD9FE },
- { 0xDA40, 0xDA7E },
- { 0xDA80, 0xDAFE },
- { 0xDB40, 0xDB7E },
- { 0xDB80, 0xDBFE },
- { 0xDC40, 0xDC7E },
- { 0xDC80, 0xDCFE },
- { 0xDD40, 0xDD7E },
- { 0xDD80, 0xDDFE },
- { 0xDE40, 0xDE7E },
- { 0xDE80, 0xDEFE },
- { 0xDF40, 0xDF7E },
- { 0xDF80, 0xDFFE },
- { 0xE040, 0xE07E },
- { 0xE080, 0xE0FE },
- { 0xE140, 0xE17E },
- { 0xE180, 0xE1FE },
- { 0xE240, 0xE27E },
- { 0xE280, 0xE2FE },
- { 0xE340, 0xE37E },
- { 0xE380, 0xE3FE },
- { 0xE440, 0xE47E },
- { 0xE480, 0xE4FE },
- { 0xE540, 0xE57E },
- { 0xE580, 0xE5FE },
- { 0xE640, 0xE67E },
- { 0xE680, 0xE6FE },
- { 0xE740, 0xE77E },
- { 0xE780, 0xE7FE },
- { 0xE840, 0xE87E },
- { 0xE880, 0xE8FE },
- { 0xE940, 0xE97E },
- { 0xE980, 0xE9FE },
- { 0xEA40, 0xEA7E },
- { 0xEA80, 0xEAFE },
- { 0xEB40, 0xEB7E },
- { 0xEB80, 0xEBFE },
- { 0xEC40, 0xEC7E },
- { 0xEC80, 0xECFE },
- { 0xED40, 0xED7E },
- { 0xED80, 0xEDFE },
- { 0xEE40, 0xEE7E },
- { 0xEE80, 0xEEFE },
- { 0xEF40, 0xEF7E },
- { 0xEF80, 0xEFFE },
- { 0xF040, 0xF07E },
- { 0xF080, 0xF0FE },
- { 0xF140, 0xF17E },
- { 0xF180, 0xF1FE },
- { 0xF240, 0xF27E },
- { 0xF280, 0xF2FE },
- { 0xF340, 0xF37E },
- { 0xF380, 0xF3FE },
- { 0xF440, 0xF47E },
- { 0xF480, 0xF4FE },
- { 0xF540, 0xF57E },
- { 0xF580, 0xF5FE },
- { 0xF640, 0xF67E },
- { 0xF680, 0xF6FE },
- { 0xF740, 0xF77E },
- { 0xF780, 0xF7FE },
- { 0xF840, 0xF87E },
- { 0xF880, 0xF8A0 },
- { 0xF940, 0xF97E },
- { 0xF980, 0xF9A0 },
- { 0xFA40, 0xFA7E },
- { 0xFA80, 0xFAA0 },
- { 0xFB40, 0xFB7E },
- { 0xFB80, 0xFBA0 },
- { 0xFC40, 0xFC7E },
- { 0xFC80, 0xFCA0 },
- { 0xFD40, 0xFD7E },
- { 0xFD80, 0xFDA0 },
- { 0xFE40, 0xFE4F },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP949.TXT
-static const CharRange kEUCKRRanges[] = {
- { 0x8141, 0x815A },
- { 0x8161, 0x817A },
- { 0x8181, 0x81FE },
- { 0x8241, 0x825A },
- { 0x8261, 0x827A },
- { 0x8281, 0x82FE },
- { 0x8341, 0x835A },
- { 0x8361, 0x837A },
- { 0x8381, 0x83FE },
- { 0x8441, 0x845A },
- { 0x8461, 0x847A },
- { 0x8481, 0x84FE },
- { 0x8541, 0x855A },
- { 0x8561, 0x857A },
- { 0x8581, 0x85FE },
- { 0x8641, 0x865A },
- { 0x8661, 0x867A },
- { 0x8681, 0x86FE },
- { 0x8741, 0x875A },
- { 0x8761, 0x877A },
- { 0x8781, 0x87FE },
- { 0x8841, 0x885A },
- { 0x8861, 0x887A },
- { 0x8881, 0x88FE },
- { 0x8941, 0x895A },
- { 0x8961, 0x897A },
- { 0x8981, 0x89FE },
- { 0x8A41, 0x8A5A },
- { 0x8A61, 0x8A7A },
- { 0x8A81, 0x8AFE },
- { 0x8B41, 0x8B5A },
- { 0x8B61, 0x8B7A },
- { 0x8B81, 0x8BFE },
- { 0x8C41, 0x8C5A },
- { 0x8C61, 0x8C7A },
- { 0x8C81, 0x8CFE },
- { 0x8D41, 0x8D5A },
- { 0x8D61, 0x8D7A },
- { 0x8D81, 0x8DFE },
- { 0x8E41, 0x8E5A },
- { 0x8E61, 0x8E7A },
- { 0x8E81, 0x8EFE },
- { 0x8F41, 0x8F5A },
- { 0x8F61, 0x8F7A },
- { 0x8F81, 0x8FFE },
- { 0x9041, 0x905A },
- { 0x9061, 0x907A },
- { 0x9081, 0x90FE },
- { 0x9141, 0x915A },
- { 0x9161, 0x917A },
- { 0x9181, 0x91FE },
- { 0x9241, 0x925A },
- { 0x9261, 0x927A },
- { 0x9281, 0x92FE },
- { 0x9341, 0x935A },
- { 0x9361, 0x937A },
- { 0x9381, 0x93FE },
- { 0x9441, 0x945A },
- { 0x9461, 0x947A },
- { 0x9481, 0x94FE },
- { 0x9541, 0x955A },
- { 0x9561, 0x957A },
- { 0x9581, 0x95FE },
- { 0x9641, 0x965A },
- { 0x9661, 0x967A },
- { 0x9681, 0x96FE },
- { 0x9741, 0x975A },
- { 0x9761, 0x977A },
- { 0x9781, 0x97FE },
- { 0x9841, 0x985A },
- { 0x9861, 0x987A },
- { 0x9881, 0x98FE },
- { 0x9941, 0x995A },
- { 0x9961, 0x997A },
- { 0x9981, 0x99FE },
- { 0x9A41, 0x9A5A },
- { 0x9A61, 0x9A7A },
- { 0x9A81, 0x9AFE },
- { 0x9B41, 0x9B5A },
- { 0x9B61, 0x9B7A },
- { 0x9B81, 0x9BFE },
- { 0x9C41, 0x9C5A },
- { 0x9C61, 0x9C7A },
- { 0x9C81, 0x9CFE },
- { 0x9D41, 0x9D5A },
- { 0x9D61, 0x9D7A },
- { 0x9D81, 0x9DFE },
- { 0x9E41, 0x9E5A },
- { 0x9E61, 0x9E7A },
- { 0x9E81, 0x9EFE },
- { 0x9F41, 0x9F5A },
- { 0x9F61, 0x9F7A },
- { 0x9F81, 0x9FFE },
- { 0xA041, 0xA05A },
- { 0xA061, 0xA07A },
- { 0xA081, 0xA0FE },
- { 0xA141, 0xA15A },
- { 0xA161, 0xA17A },
- { 0xA181, 0xA1FE },
- { 0xA241, 0xA25A },
- { 0xA261, 0xA27A },
- { 0xA281, 0xA2E7 },
- { 0xA341, 0xA35A },
- { 0xA361, 0xA37A },
- { 0xA381, 0xA3FE },
- { 0xA441, 0xA45A },
- { 0xA461, 0xA47A },
- { 0xA481, 0xA4FE },
- { 0xA541, 0xA55A },
- { 0xA561, 0xA57A },
- { 0xA581, 0xA5AA },
- { 0xA5B0, 0xA5B9 },
- { 0xA5C1, 0xA5D8 },
- { 0xA5E1, 0xA5F8 },
- { 0xA641, 0xA65A },
- { 0xA661, 0xA67A },
- { 0xA681, 0xA6E4 },
- { 0xA741, 0xA75A },
- { 0xA761, 0xA77A },
- { 0xA781, 0xA7EF },
- { 0xA841, 0xA85A },
- { 0xA861, 0xA87A },
- { 0xA881, 0xA8A4 },
- { 0xA8A6, 0xA8A6 },
- { 0xA8A8, 0xA8AF },
- { 0xA8B1, 0xA8FE },
- { 0xA941, 0xA95A },
- { 0xA961, 0xA97A },
- { 0xA981, 0xA9FE },
- { 0xAA41, 0xAA5A },
- { 0xAA61, 0xAA7A },
- { 0xAA81, 0xAAF3 },
- { 0xAB41, 0xAB5A },
- { 0xAB61, 0xAB7A },
- { 0xAB81, 0xABF6 },
- { 0xAC41, 0xAC5A },
- { 0xAC61, 0xAC7A },
- { 0xAC81, 0xACC1 },
- { 0xACD1, 0xACF1 },
- { 0xAD41, 0xAD5A },
- { 0xAD61, 0xAD7A },
- { 0xAD81, 0xADA0 },
- { 0xAE41, 0xAE5A },
- { 0xAE61, 0xAE7A },
- { 0xAE81, 0xAEA0 },
- { 0xAF41, 0xAF5A },
- { 0xAF61, 0xAF7A },
- { 0xAF81, 0xAFA0 },
- { 0xB041, 0xB05A },
- { 0xB061, 0xB07A },
- { 0xB081, 0xB0FE },
- { 0xB141, 0xB15A },
- { 0xB161, 0xB17A },
- { 0xB181, 0xB1FE },
- { 0xB241, 0xB25A },
- { 0xB261, 0xB27A },
- { 0xB281, 0xB2FE },
- { 0xB341, 0xB35A },
- { 0xB361, 0xB37A },
- { 0xB381, 0xB3FE },
- { 0xB441, 0xB45A },
- { 0xB461, 0xB47A },
- { 0xB481, 0xB4FE },
- { 0xB541, 0xB55A },
- { 0xB561, 0xB57A },
- { 0xB581, 0xB5FE },
- { 0xB641, 0xB65A },
- { 0xB661, 0xB67A },
- { 0xB681, 0xB6FE },
- { 0xB741, 0xB75A },
- { 0xB761, 0xB77A },
- { 0xB781, 0xB7FE },
- { 0xB841, 0xB85A },
- { 0xB861, 0xB87A },
- { 0xB881, 0xB8FE },
- { 0xB941, 0xB95A },
- { 0xB961, 0xB97A },
- { 0xB981, 0xB9FE },
- { 0xBA41, 0xBA5A },
- { 0xBA61, 0xBA7A },
- { 0xBA81, 0xBAFE },
- { 0xBB41, 0xBB5A },
- { 0xBB61, 0xBB7A },
- { 0xBB81, 0xBBFE },
- { 0xBC41, 0xBC5A },
- { 0xBC61, 0xBC7A },
- { 0xBC81, 0xBCFE },
- { 0xBD41, 0xBD5A },
- { 0xBD61, 0xBD7A },
- { 0xBD81, 0xBDFE },
- { 0xBE41, 0xBE5A },
- { 0xBE61, 0xBE7A },
- { 0xBE81, 0xBEFE },
- { 0xBF41, 0xBF5A },
- { 0xBF61, 0xBF7A },
- { 0xBF81, 0xBFFE },
- { 0xC041, 0xC05A },
- { 0xC061, 0xC07A },
- { 0xC081, 0xC0FE },
- { 0xC141, 0xC15A },
- { 0xC161, 0xC17A },
- { 0xC181, 0xC1FE },
- { 0xC241, 0xC25A },
- { 0xC261, 0xC27A },
- { 0xC281, 0xC2FE },
- { 0xC341, 0xC35A },
- { 0xC361, 0xC37A },
- { 0xC381, 0xC3FE },
- { 0xC441, 0xC45A },
- { 0xC461, 0xC47A },
- { 0xC481, 0xC4FE },
- { 0xC541, 0xC55A },
- { 0xC561, 0xC57A },
- { 0xC581, 0xC5FE },
- { 0xC641, 0xC652 },
- { 0xC6A1, 0xC6FE },
- { 0xC7A1, 0xC7FE },
- { 0xC8A1, 0xC8FE },
- { 0xCAA1, 0xCAFE },
- { 0xCBA1, 0xCBFE },
- { 0xCCA1, 0xCCFE },
- { 0xCDA1, 0xCDFE },
- { 0xCEA1, 0xCEFE },
- { 0xCFA1, 0xCFFE },
- { 0xD0A1, 0xD0FE },
- { 0xD1A1, 0xD1FE },
- { 0xD2A1, 0xD2FE },
- { 0xD3A1, 0xD3FE },
- { 0xD4A1, 0xD4FE },
- { 0xD5A1, 0xD5FE },
- { 0xD6A1, 0xD6FE },
- { 0xD7A1, 0xD7FE },
- { 0xD8A1, 0xD8FE },
- { 0xD9A1, 0xD9FE },
- { 0xDAA1, 0xDAFE },
- { 0xDBA1, 0xDBFE },
- { 0xDCA1, 0xDCFE },
- { 0xDDA1, 0xDDFE },
- { 0xDEA1, 0xDEFE },
- { 0xDFA1, 0xDFFE },
- { 0xE0A1, 0xE0FE },
- { 0xE1A1, 0xE1FE },
- { 0xE2A1, 0xE2FE },
- { 0xE3A1, 0xE3FE },
- { 0xE4A1, 0xE4FE },
- { 0xE5A1, 0xE5FE },
- { 0xE6A1, 0xE6FE },
- { 0xE7A1, 0xE7FE },
- { 0xE8A1, 0xE8FE },
- { 0xE9A1, 0xE9FE },
- { 0xEAA1, 0xEAFE },
- { 0xEBA1, 0xEBFE },
- { 0xECA1, 0xECFE },
- { 0xEDA1, 0xEDFE },
- { 0xEEA1, 0xEEFE },
- { 0xEFA1, 0xEFFE },
- { 0xF0A1, 0xF0FE },
- { 0xF1A1, 0xF1FE },
- { 0xF2A1, 0xF2FE },
- { 0xF3A1, 0xF3FE },
- { 0xF4A1, 0xF4FE },
- { 0xF5A1, 0xF5FE },
- { 0xF6A1, 0xF6FE },
- { 0xF7A1, 0xF7FE },
- { 0xF8A1, 0xF8FE },
- { 0xF9A1, 0xF9FE },
- { 0xFAA1, 0xFAFE },
- { 0xFBA1, 0xFBFE },
- { 0xFCA1, 0xFCFE },
- { 0xFDA1, 0xFDFE },
-};
-
-// generated from http://unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
-static const CharRange kBig5Ranges[] = {
- { 0xA140, 0xA17E },
- { 0xA1A1, 0xA1FE },
- { 0xA240, 0xA27E },
- { 0xA2A1, 0xA2FE },
- { 0xA340, 0xA37E },
- { 0xA3A1, 0xA3BF },
- { 0xA3E1, 0xA3E1 },
- { 0xA440, 0xA47E },
- { 0xA4A1, 0xA4FE },
- { 0xA540, 0xA57E },
- { 0xA5A1, 0xA5FE },
- { 0xA640, 0xA67E },
- { 0xA6A1, 0xA6FE },
- { 0xA740, 0xA77E },
- { 0xA7A1, 0xA7FE },
- { 0xA840, 0xA87E },
- { 0xA8A1, 0xA8FE },
- { 0xA940, 0xA97E },
- { 0xA9A1, 0xA9FE },
- { 0xAA40, 0xAA7E },
- { 0xAAA1, 0xAAFE },
- { 0xAB40, 0xAB7E },
- { 0xABA1, 0xABFE },
- { 0xAC40, 0xAC7E },
- { 0xACA1, 0xACFE },
- { 0xAD40, 0xAD7E },
- { 0xADA1, 0xADFE },
- { 0xAE40, 0xAE7E },
- { 0xAEA1, 0xAEFE },
- { 0xAF40, 0xAF7E },
- { 0xAFA1, 0xAFFE },
- { 0xB040, 0xB07E },
- { 0xB0A1, 0xB0FE },
- { 0xB140, 0xB17E },
- { 0xB1A1, 0xB1FE },
- { 0xB240, 0xB27E },
- { 0xB2A1, 0xB2FE },
- { 0xB340, 0xB37E },
- { 0xB3A1, 0xB3FE },
- { 0xB440, 0xB47E },
- { 0xB4A1, 0xB4FE },
- { 0xB540, 0xB57E },
- { 0xB5A1, 0xB5FE },
- { 0xB640, 0xB67E },
- { 0xB6A1, 0xB6FE },
- { 0xB740, 0xB77E },
- { 0xB7A1, 0xB7FE },
- { 0xB840, 0xB87E },
- { 0xB8A1, 0xB8FE },
- { 0xB940, 0xB97E },
- { 0xB9A1, 0xB9FE },
- { 0xBA40, 0xBA7E },
- { 0xBAA1, 0xBAFE },
- { 0xBB40, 0xBB7E },
- { 0xBBA1, 0xBBFE },
- { 0xBC40, 0xBC7E },
- { 0xBCA1, 0xBCFE },
- { 0xBD40, 0xBD7E },
- { 0xBDA1, 0xBDFE },
- { 0xBE40, 0xBE7E },
- { 0xBEA1, 0xBEFE },
- { 0xBF40, 0xBF7E },
- { 0xBFA1, 0xBFFE },
- { 0xC040, 0xC07E },
- { 0xC0A1, 0xC0FE },
- { 0xC140, 0xC17E },
- { 0xC1A1, 0xC1FE },
- { 0xC240, 0xC27E },
- { 0xC2A1, 0xC2FE },
- { 0xC340, 0xC37E },
- { 0xC3A1, 0xC3FE },
- { 0xC440, 0xC47E },
- { 0xC4A1, 0xC4FE },
- { 0xC540, 0xC57E },
- { 0xC5A1, 0xC5FE },
- { 0xC640, 0xC67E },
- { 0xC940, 0xC97E },
- { 0xC9A1, 0xC9FE },
- { 0xCA40, 0xCA7E },
- { 0xCAA1, 0xCAFE },
- { 0xCB40, 0xCB7E },
- { 0xCBA1, 0xCBFE },
- { 0xCC40, 0xCC7E },
- { 0xCCA1, 0xCCFE },
- { 0xCD40, 0xCD7E },
- { 0xCDA1, 0xCDFE },
- { 0xCE40, 0xCE7E },
- { 0xCEA1, 0xCEFE },
- { 0xCF40, 0xCF7E },
- { 0xCFA1, 0xCFFE },
- { 0xD040, 0xD07E },
- { 0xD0A1, 0xD0FE },
- { 0xD140, 0xD17E },
- { 0xD1A1, 0xD1FE },
- { 0xD240, 0xD27E },
- { 0xD2A1, 0xD2FE },
- { 0xD340, 0xD37E },
- { 0xD3A1, 0xD3FE },
- { 0xD440, 0xD47E },
- { 0xD4A1, 0xD4FE },
- { 0xD540, 0xD57E },
- { 0xD5A1, 0xD5FE },
- { 0xD640, 0xD67E },
- { 0xD6A1, 0xD6FE },
- { 0xD740, 0xD77E },
- { 0xD7A1, 0xD7FE },
- { 0xD840, 0xD87E },
- { 0xD8A1, 0xD8FE },
- { 0xD940, 0xD97E },
- { 0xD9A1, 0xD9FE },
- { 0xDA40, 0xDA7E },
- { 0xDAA1, 0xDAFE },
- { 0xDB40, 0xDB7E },
- { 0xDBA1, 0xDBFE },
- { 0xDC40, 0xDC7E },
- { 0xDCA1, 0xDCFE },
- { 0xDD40, 0xDD7E },
- { 0xDDA1, 0xDDFE },
- { 0xDE40, 0xDE7E },
- { 0xDEA1, 0xDEFE },
- { 0xDF40, 0xDF7E },
- { 0xDFA1, 0xDFFE },
- { 0xE040, 0xE07E },
- { 0xE0A1, 0xE0FE },
- { 0xE140, 0xE17E },
- { 0xE1A1, 0xE1FE },
- { 0xE240, 0xE27E },
- { 0xE2A1, 0xE2FE },
- { 0xE340, 0xE37E },
- { 0xE3A1, 0xE3FE },
- { 0xE440, 0xE47E },
- { 0xE4A1, 0xE4FE },
- { 0xE540, 0xE57E },
- { 0xE5A1, 0xE5FE },
- { 0xE640, 0xE67E },
- { 0xE6A1, 0xE6FE },
- { 0xE740, 0xE77E },
- { 0xE7A1, 0xE7FE },
- { 0xE840, 0xE87E },
- { 0xE8A1, 0xE8FE },
- { 0xE940, 0xE97E },
- { 0xE9A1, 0xE9FE },
- { 0xEA40, 0xEA7E },
- { 0xEAA1, 0xEAFE },
- { 0xEB40, 0xEB7E },
- { 0xEBA1, 0xEBFE },
- { 0xEC40, 0xEC7E },
- { 0xECA1, 0xECFE },
- { 0xED40, 0xED7E },
- { 0xEDA1, 0xEDFE },
- { 0xEE40, 0xEE7E },
- { 0xEEA1, 0xEEFE },
- { 0xEF40, 0xEF7E },
- { 0xEFA1, 0xEFFE },
- { 0xF040, 0xF07E },
- { 0xF0A1, 0xF0FE },
- { 0xF140, 0xF17E },
- { 0xF1A1, 0xF1FE },
- { 0xF240, 0xF27E },
- { 0xF2A1, 0xF2FE },
- { 0xF340, 0xF37E },
- { 0xF3A1, 0xF3FE },
- { 0xF440, 0xF47E },
- { 0xF4A1, 0xF4FE },
- { 0xF540, 0xF57E },
- { 0xF5A1, 0xF5FE },
- { 0xF640, 0xF67E },
- { 0xF6A1, 0xF6FE },
- { 0xF740, 0xF77E },
- { 0xF7A1, 0xF7FE },
- { 0xF840, 0xF87E },
- { 0xF8A1, 0xF8FE },
- { 0xF940, 0xF97E },
- { 0xF9A1, 0xF9FE },
-};
-
-static bool charMatchesEncoding(int ch, const CharRange* encodingRanges, int rangeCount) {
- // Use binary search to see if the character is contained in the encoding
- int low = 0;
- int high = rangeCount;
-
- while (low < high) {
- int i = (low + high) / 2;
- const CharRange* range = &encodingRanges[i];
- if (ch >= range->first && ch <= range->last)
- return true;
- if (ch > range->last)
- low = i + 1;
- else
- high = i;
- }
-
- return false;
-}
-
-extern uint32_t findPossibleEncodings(int ch)
-{
- // ASCII matches everything
- if (ch < 256) return kEncodingAll;
-
- int result = kEncodingNone;
-
- if (charMatchesEncoding(ch, kShiftJISRanges, ARRAY_SIZE(kShiftJISRanges)))
- result |= kEncodingShiftJIS;
- if (charMatchesEncoding(ch, kGBKRanges, ARRAY_SIZE(kGBKRanges)))
- result |= kEncodingGBK;
- if (charMatchesEncoding(ch, kBig5Ranges, ARRAY_SIZE(kBig5Ranges)))
- result |= kEncodingBig5;
- if (charMatchesEncoding(ch, kEUCKRRanges, ARRAY_SIZE(kEUCKRRanges)))
- result |= kEncodingEUCKR;
-
- return result;
-}
diff --git a/media/libmedia/autodetect.h b/media/libmedia/autodetect.h
deleted file mode 100644
index 9675db3..0000000
--- a/media/libmedia/autodetect.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2008 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef AUTODETECT_H
-#define AUTODETECT_H
-
-#include <inttypes.h>
-
-// flags used for native encoding detection
-enum {
- kEncodingNone = 0,
- kEncodingShiftJIS = (1 << 0),
- kEncodingGBK = (1 << 1),
- kEncodingBig5 = (1 << 2),
- kEncodingEUCKR = (1 << 3),
-
- kEncodingAll = (kEncodingShiftJIS | kEncodingGBK | kEncodingBig5 | kEncodingEUCKR),
-};
-
-
-// returns a bitfield containing the possible native encodings for the given character
-extern uint32_t findPossibleEncodings(int ch);
-
-#endif // AUTODETECT_H
diff --git a/media/libmedia/mediametadataretriever.cpp b/media/libmedia/mediametadataretriever.cpp
index 110b94c..39a239d 100644
--- a/media/libmedia/mediametadataretriever.cpp
+++ b/media/libmedia/mediametadataretriever.cpp
@@ -18,9 +18,12 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaMetadataRetriever"
+#include <inttypes.h>
+
#include <binder/IServiceManager.h>
#include <binder/IPCThreadState.h>
#include <media/mediametadataretriever.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <utils/Log.h>
#include <dlfcn.h>
@@ -93,7 +96,9 @@ void MediaMetadataRetriever::disconnect()
}
status_t MediaMetadataRetriever::setDataSource(
- const char *srcUrl, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *srcUrl,
+ const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource");
Mutex::Autolock _l(mLock);
@@ -106,12 +111,12 @@ status_t MediaMetadataRetriever::setDataSource(
return UNKNOWN_ERROR;
}
ALOGV("data source (%s)", srcUrl);
- return mRetriever->setDataSource(srcUrl, headers);
+ return mRetriever->setDataSource(httpService, srcUrl, headers);
}
status_t MediaMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
{
- ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
Mutex::Autolock _l(mLock);
if (mRetriever == 0) {
ALOGE("retriever is not initialized");
@@ -126,7 +131,7 @@ status_t MediaMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t l
sp<IMemory> MediaMetadataRetriever::getFrameAtTime(int64_t timeUs, int option)
{
- ALOGV("getFrameAtTime: time(%lld us) option(%d)", timeUs, option);
+ ALOGV("getFrameAtTime: time(%" PRId64 " us) option(%d)", timeUs, option);
Mutex::Autolock _l(mLock);
if (mRetriever == 0) {
ALOGE("retriever is not initialized");
@@ -157,7 +162,7 @@ sp<IMemory> MediaMetadataRetriever::extractAlbumArt()
return mRetriever->extractAlbumArt();
}
-void MediaMetadataRetriever::DeathNotifier::binderDied(const wp<IBinder>& who) {
+void MediaMetadataRetriever::DeathNotifier::binderDied(const wp<IBinder>& who __unused) {
Mutex::Autolock lock(MediaMetadataRetriever::sServiceLock);
MediaMetadataRetriever::sService.clear();
ALOGW("MediaMetadataRetriever server died!");
diff --git a/media/libmedia/mediaplayer.cpp b/media/libmedia/mediaplayer.cpp
index 7a6f31d..406f9f2 100644
--- a/media/libmedia/mediaplayer.cpp
+++ b/media/libmedia/mediaplayer.cpp
@@ -17,12 +17,14 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaPlayer"
-#include <utils/Log.h>
-#include <sys/types.h>
+#include <fcntl.h>
+#include <inttypes.h>
#include <sys/stat.h>
+#include <sys/types.h>
#include <unistd.h>
-#include <fcntl.h>
+
+#include <utils/Log.h>
#include <binder/IServiceManager.h>
#include <binder/IPCThreadState.h>
@@ -58,7 +60,7 @@ MediaPlayer::MediaPlayer()
mVideoWidth = mVideoHeight = 0;
mLockThreadId = 0;
mAudioSessionId = AudioSystem::newAudioSessionId();
- AudioSystem::acquireAudioSessionId(mAudioSessionId);
+ AudioSystem::acquireAudioSessionId(mAudioSessionId, -1);
mSendLevel = 0;
mRetransmitEndpointValid = false;
}
@@ -66,7 +68,7 @@ MediaPlayer::MediaPlayer()
MediaPlayer::~MediaPlayer()
{
ALOGV("destructor");
- AudioSystem::releaseAudioSessionId(mAudioSessionId);
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
disconnect();
IPCThreadState::self()->flushCommands();
}
@@ -136,6 +138,7 @@ status_t MediaPlayer::attachNewPlayer(const sp<IMediaPlayer>& player)
}
status_t MediaPlayer::setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url, const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
@@ -145,7 +148,7 @@ status_t MediaPlayer::setDataSource(
if (service != 0) {
sp<IMediaPlayer> player(service->create(this, mAudioSessionId));
if ((NO_ERROR != doSetRetransmitEndpoint(player)) ||
- (NO_ERROR != player->setDataSource(url, headers))) {
+ (NO_ERROR != player->setDataSource(httpService, url, headers))) {
player.clear();
}
err = attachNewPlayer(player);
@@ -156,7 +159,7 @@ status_t MediaPlayer::setDataSource(
status_t MediaPlayer::setDataSource(int fd, int64_t offset, int64_t length)
{
- ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setDataSource(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
status_t err = UNKNOWN_ERROR;
const sp<IMediaPlayerService>& service(getMediaPlayerService());
if (service != 0) {
@@ -193,7 +196,7 @@ status_t MediaPlayer::invoke(const Parcel& request, Parcel *reply)
(mCurrentState != MEDIA_PLAYER_STATE_ERROR) &&
((mCurrentState & MEDIA_PLAYER_IDLE) != MEDIA_PLAYER_IDLE);
if ((mPlayer != NULL) && hasBeenInitialized) {
- ALOGV("invoke %d", request.dataSize());
+ ALOGV("invoke %zu", request.dataSize());
return mPlayer->invoke(request, reply);
}
ALOGE("invoke failed: wrong state %X", mCurrentState);
@@ -530,6 +533,14 @@ status_t MediaPlayer::setAudioStreamType(audio_stream_type_t type)
return OK;
}
+status_t MediaPlayer::getAudioStreamType(audio_stream_type_t *type)
+{
+ ALOGV("getAudioStreamType");
+ Mutex::Autolock _l(mLock);
+ *type = mStreamType;
+ return OK;
+}
+
status_t MediaPlayer::setLooping(int loop)
{
ALOGV("MediaPlayer::setLooping");
@@ -575,8 +586,8 @@ status_t MediaPlayer::setAudioSessionId(int sessionId)
return BAD_VALUE;
}
if (sessionId != mAudioSessionId) {
- AudioSystem::acquireAudioSessionId(sessionId);
- AudioSystem::releaseAudioSessionId(mAudioSessionId);
+ AudioSystem::acquireAudioSessionId(sessionId, -1);
+ AudioSystem::releaseAudioSessionId(mAudioSessionId, -1);
mAudioSessionId = sessionId;
}
return NO_ERROR;
@@ -776,15 +787,20 @@ void MediaPlayer::notify(int msg, int ext1, int ext2, const Parcel *obj)
}
}
-/*static*/ status_t MediaPlayer::decode(const char* url, uint32_t *pSampleRate,
- int* pNumChannels, audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
+/*static*/ status_t MediaPlayer::decode(
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
+ uint32_t *pSampleRate,
+ int* pNumChannels,
+ audio_format_t* pFormat,
+ const sp<IMemoryHeap>& heap,
+ size_t *pSize)
{
ALOGV("decode(%s)", url);
status_t status;
const sp<IMediaPlayerService>& service = getMediaPlayerService();
if (service != 0) {
- status = service->decode(url, pSampleRate, pNumChannels, pFormat, heap, pSize);
+ status = service->decode(httpService, url, pSampleRate, pNumChannels, pFormat, heap, pSize);
} else {
ALOGE("Unable to locate media service");
status = DEAD_OBJECT;
@@ -804,7 +820,7 @@ void MediaPlayer::died()
audio_format_t* pFormat,
const sp<IMemoryHeap>& heap, size_t *pSize)
{
- ALOGV("decode(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("decode(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
status_t status;
const sp<IMediaPlayerService>& service = getMediaPlayerService();
if (service != 0) {
@@ -832,15 +848,4 @@ status_t MediaPlayer::setNextMediaPlayer(const sp<MediaPlayer>& next) {
return mPlayer->setNextPlayer(next == NULL ? NULL : next->mPlayer);
}
-status_t MediaPlayer::updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- const sp<IMediaPlayerService>& service = getMediaPlayerService();
-
- if (service != NULL) {
- return service->updateProxyConfig(host, port, exclusionList);
- }
-
- return INVALID_OPERATION;
-}
-
}; // namespace android
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 3710e46..c8192e9 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -17,6 +17,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaRecorder"
+
+#include <inttypes.h>
+
#include <utils/Log.h>
#include <media/mediarecorder.h>
#include <binder/IServiceManager.h>
@@ -286,7 +289,7 @@ status_t MediaRecorder::setOutputFile(const char* path)
status_t MediaRecorder::setOutputFile(int fd, int64_t offset, int64_t length)
{
- ALOGV("setOutputFile(%d, %lld, %lld)", fd, offset, length);
+ ALOGV("setOutputFile(%d, %" PRId64 ", %" PRId64 ")", fd, offset, length);
if (mMediaRecorder == NULL) {
ALOGE("media recorder is not initialized yet");
return INVALID_OPERATION;
diff --git a/media/libmediaplayerservice/Android.mk b/media/libmediaplayerservice/Android.mk
index a645f13..48d44c1 100644
--- a/media/libmediaplayerservice/Android.mk
+++ b/media/libmediaplayerservice/Android.mk
@@ -46,7 +46,6 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_rtsp \
LOCAL_C_INCLUDES := \
- $(call include-path-for, graphics corecg) \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/av/media/libstagefright/rtsp \
$(TOP)/frameworks/av/media/libstagefright/wifi-display \
diff --git a/media/libmediaplayerservice/HDCP.cpp b/media/libmediaplayerservice/HDCP.cpp
index c2ac1a3..afe3936 100644
--- a/media/libmediaplayerservice/HDCP.cpp
+++ b/media/libmediaplayerservice/HDCP.cpp
@@ -107,11 +107,7 @@ uint32_t HDCP::getCaps() {
return NO_INIT;
}
- // TO-DO:
- // Only support HDCP_CAPS_ENCRYPT (byte-array to byte-array) for now.
- // use mHDCPModule->getCaps() when the HDCP libraries get updated.
- //return mHDCPModule->getCaps();
- return HDCPModule::HDCP_CAPS_ENCRYPT;
+ return mHDCPModule->getCaps();
}
status_t HDCP::encrypt(
diff --git a/media/libmediaplayerservice/MediaPlayerFactory.cpp b/media/libmediaplayerservice/MediaPlayerFactory.cpp
index 90aed39..9b239b1 100644
--- a/media/libmediaplayerservice/MediaPlayerFactory.cpp
+++ b/media/libmediaplayerservice/MediaPlayerFactory.cpp
@@ -67,6 +67,12 @@ player_type MediaPlayerFactory::getDefaultPlayerType() {
return NU_PLAYER;
}
+ // TODO: remove this EXPERIMENTAL developer settings property
+ if (property_get("persist.sys.media.use-nuplayer", value, NULL)
+ && !strcasecmp("true", value)) {
+ return NU_PLAYER;
+ }
+
return STAGEFRIGHT_PLAYER;
}
@@ -180,7 +186,7 @@ class StagefrightPlayerFactory :
read(fd, buf, sizeof(buf));
lseek(fd, offset, SEEK_SET);
- long ident = *((long*)buf);
+ uint32_t ident = *((uint32_t*)buf);
// Ogg vorbis?
if (ident == 0x5367674f) // 'OggS'
diff --git a/media/libmediaplayerservice/MediaPlayerService.cpp b/media/libmediaplayerservice/MediaPlayerService.cpp
index a392b76..76632a7 100644
--- a/media/libmediaplayerservice/MediaPlayerService.cpp
+++ b/media/libmediaplayerservice/MediaPlayerService.cpp
@@ -44,6 +44,7 @@
#include <utils/SystemClock.h>
#include <utils/Vector.h>
+#include <media/IMediaHTTPService.h>
#include <media/IRemoteDisplay.h>
#include <media/IRemoteDisplayClient.h>
#include <media/MediaPlayerInterface.h>
@@ -306,12 +307,7 @@ sp<IRemoteDisplay> MediaPlayerService::listenForRemoteDisplay(
return new RemoteDisplay(client, iface.string());
}
-status_t MediaPlayerService::updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- return HTTPBase::UpdateProxyConfig(host, port, exclusionList);
-}
-
-status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& args) const
+status_t MediaPlayerService::AudioCache::dump(int fd, const Vector<String16>& /*args*/) const
{
const size_t SIZE = 256;
char buffer[SIZE];
@@ -590,7 +586,8 @@ sp<MediaPlayerBase> MediaPlayerService::Client::setDataSource_pre(
}
if (!p->hardwareOutput()) {
- mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid());
+ mAudioOutput = new AudioOutput(mAudioSessionId, IPCThreadState::self()->getCallingUid(),
+ mPid);
static_cast<MediaPlayerInterface*>(p.get())->setAudioSink(mAudioOutput);
}
@@ -622,7 +619,9 @@ void MediaPlayerService::Client::setDataSource_post(
}
status_t MediaPlayerService::Client::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
if (url == NULL)
@@ -657,7 +656,7 @@ status_t MediaPlayerService::Client::setDataSource(
return NO_INIT;
}
- setDataSource_post(p, p->setDataSource(url, headers));
+ setDataSource_post(p, p->setDataSource(httpService, url, headers));
return mStatus;
}
}
@@ -674,8 +673,8 @@ status_t MediaPlayerService::Client::setDataSource(int fd, int64_t offset, int64
ALOGV("st_dev = %llu", sb.st_dev);
ALOGV("st_mode = %u", sb.st_mode);
- ALOGV("st_uid = %lu", sb.st_uid);
- ALOGV("st_gid = %lu", sb.st_gid);
+ ALOGV("st_uid = %lu", static_cast<unsigned long>(sb.st_uid));
+ ALOGV("st_gid = %lu", static_cast<unsigned long>(sb.st_gid));
ALOGV("st_size = %llu", sb.st_size);
if (offset >= sb.st_size) {
@@ -804,7 +803,7 @@ status_t MediaPlayerService::Client::setMetadataFilter(const Parcel& filter)
}
status_t MediaPlayerService::Client::getMetadata(
- bool update_only, bool apply_filter, Parcel *reply)
+ bool update_only, bool /*apply_filter*/, Parcel *reply)
{
sp<MediaPlayerBase> player = getPlayer();
if (player == 0) return UNKNOWN_ERROR;
@@ -1176,9 +1175,14 @@ int Antagonizer::callbackThread(void* user)
}
#endif
-status_t MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize)
+status_t MediaPlayerService::decode(
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
+ uint32_t *pSampleRate,
+ int* pNumChannels,
+ audio_format_t* pFormat,
+ const sp<IMemoryHeap>& heap,
+ size_t *pSize)
{
ALOGV("decode(%s)", url);
sp<MediaPlayerBase> player;
@@ -1206,7 +1210,7 @@ status_t MediaPlayerService::decode(const char* url, uint32_t *pSampleRate, int*
static_cast<MediaPlayerInterface*>(player.get())->setAudioSink(cache);
// set data source
- if (player->setDataSource(url) != NO_ERROR) goto Exit;
+ if (player->setDataSource(httpService, url) != NO_ERROR) goto Exit;
ALOGV("prepare");
player->prepareAsync();
@@ -1296,13 +1300,14 @@ Exit:
#undef LOG_TAG
#define LOG_TAG "AudioSink"
-MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid)
+MediaPlayerService::AudioOutput::AudioOutput(int sessionId, int uid, int pid)
: mCallback(NULL),
mCallbackCookie(NULL),
mCallbackData(NULL),
mBytesWritten(0),
mSessionId(sessionId),
mUid(uid),
+ mPid(pid),
mFlags(AUDIO_OUTPUT_FLAG_NONE) {
ALOGV("AudioOutput(%d)", sessionId);
mStreamType = AUDIO_STREAM_MUSIC;
@@ -1450,7 +1455,7 @@ status_t MediaPlayerService::AudioOutput::open(
format, bufferCount, mSessionId, flags);
uint32_t afSampleRate;
size_t afFrameCount;
- uint32_t frameCount;
+ size_t frameCount;
// offloading is only supported in callback mode for now.
// offloadInfo must be present if offload flag is set
@@ -1551,7 +1556,8 @@ status_t MediaPlayerService::AudioOutput::open(
mSessionId,
AudioTrack::TRANSFER_CALLBACK,
offloadInfo,
- mUid);
+ mUid,
+ mPid);
} else {
t = new AudioTrack(
mStreamType,
@@ -1566,7 +1572,8 @@ status_t MediaPlayerService::AudioOutput::open(
mSessionId,
AudioTrack::TRANSFER_DEFAULT,
NULL, // offload info
- mUid);
+ mUid,
+ mPid);
}
if ((t == 0) || (t->initCheck() != NO_ERROR)) {
@@ -1672,7 +1679,7 @@ void MediaPlayerService::AudioOutput::switchToNextOutput() {
ssize_t MediaPlayerService::AudioOutput::write(const void* buffer, size_t size)
{
- LOG_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
+ LOG_ALWAYS_FATAL_IF(mCallback != NULL, "Don't call write if supplying a callback.");
//ALOGV("write(%p, %u)", buffer, size);
if (mTrack != 0) {
@@ -1919,8 +1926,8 @@ bool CallbackThread::threadLoop() {
status_t MediaPlayerService::AudioCache::open(
uint32_t sampleRate, int channelCount, audio_channel_mask_t channelMask,
audio_format_t format, int bufferCount,
- AudioCallback cb, void *cookie, audio_output_flags_t flags,
- const audio_offload_info_t *offloadInfo)
+ AudioCallback cb, void *cookie, audio_output_flags_t /*flags*/,
+ const audio_offload_info_t* /*offloadInfo*/)
{
ALOGV("open(%u, %d, 0x%x, %d, %d)", sampleRate, channelCount, channelMask, format, bufferCount);
if (mHeap->getHeapID() < 0) {
@@ -1987,7 +1994,7 @@ status_t MediaPlayerService::AudioCache::wait()
}
void MediaPlayerService::AudioCache::notify(
- void* cookie, int msg, int ext1, int ext2, const Parcel *obj)
+ void* cookie, int msg, int ext1, int ext2, const Parcel* /*obj*/)
{
ALOGV("notify(%p, %d, %d, %d)", cookie, msg, ext1, ext2);
AudioCache* p = static_cast<AudioCache*>(cookie);
diff --git a/media/libmediaplayerservice/MediaPlayerService.h b/media/libmediaplayerservice/MediaPlayerService.h
index 9c084e1..448f27a 100644
--- a/media/libmediaplayerservice/MediaPlayerService.h
+++ b/media/libmediaplayerservice/MediaPlayerService.h
@@ -72,7 +72,7 @@ class MediaPlayerService : public BnMediaPlayerService
class CallbackData;
public:
- AudioOutput(int sessionId, int uid);
+ AudioOutput(int sessionId, int uid, int pid);
virtual ~AudioOutput();
virtual bool ready() const { return mTrack != 0; }
@@ -140,6 +140,7 @@ class MediaPlayerService : public BnMediaPlayerService
float mMsecsPerFrame;
int mSessionId;
int mUid;
+ int mPid;
float mSendLevel;
int mAuxEffectId;
static bool mIsOnEmulator;
@@ -211,12 +212,12 @@ class MediaPlayerService : public BnMediaPlayerService
virtual void flush() {}
virtual void pause() {}
virtual void close() {}
- void setAudioStreamType(audio_stream_type_t streamType) {}
+ void setAudioStreamType(audio_stream_type_t streamType __unused) {}
// stream type is not used for AudioCache
virtual audio_stream_type_t getAudioStreamType() const { return AUDIO_STREAM_DEFAULT; }
- void setVolume(float left, float right) {}
- virtual status_t setPlaybackRatePermille(int32_t ratePermille) { return INVALID_OPERATION; }
+ void setVolume(float left __unused, float right __unused) {}
+ virtual status_t setPlaybackRatePermille(int32_t ratePermille __unused) { return INVALID_OPERATION; }
uint32_t sampleRate() const { return mSampleRate; }
audio_format_t format() const { return mFormat; }
size_t size() const { return mSize; }
@@ -256,9 +257,15 @@ public:
virtual sp<IMediaPlayer> create(const sp<IMediaPlayerClient>& client, int audioSessionId);
- virtual status_t decode(const char* url, uint32_t *pSampleRate, int* pNumChannels,
- audio_format_t* pFormat,
- const sp<IMemoryHeap>& heap, size_t *pSize);
+ virtual status_t decode(
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
+ uint32_t *pSampleRate,
+ int* pNumChannels,
+ audio_format_t* pFormat,
+ const sp<IMemoryHeap>& heap,
+ size_t *pSize);
+
virtual status_t decode(int fd, int64_t offset, int64_t length,
uint32_t *pSampleRate, int* pNumChannels,
audio_format_t* pFormat,
@@ -272,9 +279,6 @@ public:
const String8& iface);
virtual status_t dump(int fd, const Vector<String16>& args);
- virtual status_t updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
void removeClient(wp<Client> client);
// For battery usage tracking purpose
@@ -356,6 +360,7 @@ private:
sp<MediaPlayerBase> createPlayer(player_type playerType);
virtual status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers);
diff --git a/media/libmediaplayerservice/MediaRecorderClient.cpp b/media/libmediaplayerservice/MediaRecorderClient.cpp
index a9820e0..194abbb 100644
--- a/media/libmediaplayerservice/MediaRecorderClient.cpp
+++ b/media/libmediaplayerservice/MediaRecorderClient.cpp
@@ -95,7 +95,8 @@ status_t MediaRecorderClient::setPreviewSurface(const sp<IGraphicBufferProducer>
status_t MediaRecorderClient::setVideoSource(int vs)
{
ALOGV("setVideoSource(%d)", vs);
- if (!checkPermission(cameraPermission)) {
+ // Check camera permission for sources other than SURFACE
+ if (vs != VIDEO_SOURCE_SURFACE && !checkPermission(cameraPermission)) {
return PERMISSION_DENIED;
}
Mutex::Autolock lock(mLock);
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.cpp b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
index 5c9bd8f..a91b0e5 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.cpp
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.cpp
@@ -31,6 +31,7 @@
#include <binder/MemoryHeapBase.h>
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
+#include <media/IMediaHTTPService.h>
#include <media/MediaMetadataRetrieverInterface.h>
#include <media/MediaPlayerInterface.h>
#include <private/media/VideoFrame.h>
@@ -106,7 +107,9 @@ static sp<MediaMetadataRetrieverBase> createRetriever(player_type playerType)
}
status_t MetadataRetrieverClient::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource(%s)", url);
Mutex::Autolock lock(mLock);
@@ -127,7 +130,7 @@ status_t MetadataRetrieverClient::setDataSource(
ALOGV("player type = %d", playerType);
sp<MediaMetadataRetrieverBase> p = createRetriever(playerType);
if (p == NULL) return NO_INIT;
- status_t ret = p->setDataSource(url, headers);
+ status_t ret = p->setDataSource(httpService, url, headers);
if (ret == NO_ERROR) mRetriever = p;
return ret;
}
@@ -144,8 +147,8 @@ status_t MetadataRetrieverClient::setDataSource(int fd, int64_t offset, int64_t
}
ALOGV("st_dev = %llu", sb.st_dev);
ALOGV("st_mode = %u", sb.st_mode);
- ALOGV("st_uid = %lu", sb.st_uid);
- ALOGV("st_gid = %lu", sb.st_gid);
+ ALOGV("st_uid = %lu", static_cast<unsigned long>(sb.st_uid));
+ ALOGV("st_gid = %lu", static_cast<unsigned long>(sb.st_gid));
ALOGV("st_size = %llu", sb.st_size);
if (offset >= sb.st_size) {
diff --git a/media/libmediaplayerservice/MetadataRetrieverClient.h b/media/libmediaplayerservice/MetadataRetrieverClient.h
index f08f933..9d3fbe9 100644
--- a/media/libmediaplayerservice/MetadataRetrieverClient.h
+++ b/media/libmediaplayerservice/MetadataRetrieverClient.h
@@ -30,6 +30,7 @@
namespace android {
+struct IMediaHTTPService;
class IMediaPlayerService;
class MemoryDealer;
@@ -43,7 +44,9 @@ public:
virtual void disconnect();
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual sp<IMemory> getFrameAtTime(int64_t timeUs, int option);
diff --git a/media/libmediaplayerservice/MidiFile.cpp b/media/libmediaplayerservice/MidiFile.cpp
index 0a6aa90..749ef96 100644
--- a/media/libmediaplayerservice/MidiFile.cpp
+++ b/media/libmediaplayerservice/MidiFile.cpp
@@ -114,7 +114,9 @@ MidiFile::~MidiFile() {
}
status_t MidiFile::setDataSource(
- const char* path, const KeyedVector<String8, String8> *) {
+ const sp<IMediaHTTPService> & /*httpService*/,
+ const char* path,
+ const KeyedVector<String8, String8> *) {
ALOGV("MidiFile::setDataSource url=%s", path);
Mutex::Autolock lock(mMutex);
diff --git a/media/libmediaplayerservice/MidiFile.h b/media/libmediaplayerservice/MidiFile.h
index 24d59b4..12802ba 100644
--- a/media/libmediaplayerservice/MidiFile.h
+++ b/media/libmediaplayerservice/MidiFile.h
@@ -32,7 +32,9 @@ public:
virtual status_t initCheck();
virtual status_t setDataSource(
- const char* path, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char* path,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual status_t setVideoSurfaceTexture(
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.cpp b/media/libmediaplayerservice/MidiMetadataRetriever.cpp
index 465209f..f3cf6ef 100644
--- a/media/libmediaplayerservice/MidiMetadataRetriever.cpp
+++ b/media/libmediaplayerservice/MidiMetadataRetriever.cpp
@@ -22,6 +22,8 @@
#include "MidiMetadataRetriever.h"
#include <media/mediametadataretriever.h>
+#include <media/IMediaHTTPService.h>
+
namespace android {
static status_t ERROR_NOT_OPEN = -1;
@@ -36,7 +38,9 @@ void MidiMetadataRetriever::clearMetadataValues()
}
status_t MidiMetadataRetriever::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers)
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers)
{
ALOGV("setDataSource: %s", url? url: "NULL pointer");
Mutex::Autolock lock(mLock);
@@ -44,7 +48,7 @@ status_t MidiMetadataRetriever::setDataSource(
if (mMidiPlayer == 0) {
mMidiPlayer = new MidiFile();
}
- return mMidiPlayer->setDataSource(url, headers);
+ return mMidiPlayer->setDataSource(httpService, url, headers);
}
status_t MidiMetadataRetriever::setDataSource(int fd, int64_t offset, int64_t length)
diff --git a/media/libmediaplayerservice/MidiMetadataRetriever.h b/media/libmediaplayerservice/MidiMetadataRetriever.h
index 4cee42d..b8214ee 100644
--- a/media/libmediaplayerservice/MidiMetadataRetriever.h
+++ b/media/libmediaplayerservice/MidiMetadataRetriever.h
@@ -32,7 +32,9 @@ public:
~MidiMetadataRetriever() {}
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
virtual const char* extractMetadata(int keyCode);
diff --git a/media/libmediaplayerservice/StagefrightPlayer.cpp b/media/libmediaplayerservice/StagefrightPlayer.cpp
index 42b7766..b37aee3 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.cpp
+++ b/media/libmediaplayerservice/StagefrightPlayer.cpp
@@ -54,8 +54,10 @@ status_t StagefrightPlayer::setUID(uid_t uid) {
}
status_t StagefrightPlayer::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
- return mPlayer->setDataSource(url, headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
+ return mPlayer->setDataSource(httpService, url, headers);
}
// Warning: The filedescriptor passed into this method will only be valid until
diff --git a/media/libmediaplayerservice/StagefrightPlayer.h b/media/libmediaplayerservice/StagefrightPlayer.h
index 600945e..e6c30ff 100644
--- a/media/libmediaplayerservice/StagefrightPlayer.h
+++ b/media/libmediaplayerservice/StagefrightPlayer.h
@@ -34,7 +34,9 @@ public:
virtual status_t setUID(uid_t uid);
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 4da74e1..bfc075c 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -25,8 +25,10 @@
#include <binder/IServiceManager.h>
#include <media/IMediaPlayerService.h>
-#include <media/openmax/OMX_Audio.h>
+#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ALooper.h>
#include <media/stagefright/AudioSource.h>
#include <media/stagefright/AMRWriter.h>
#include <media/stagefright/AACWriter.h>
@@ -36,13 +38,12 @@
#include <media/stagefright/MPEG4Writer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaCodecSource.h>
#include <media/stagefright/OMXClient.h>
#include <media/stagefright/OMXCodec.h>
-#include <media/stagefright/SurfaceMediaSource.h>
#include <media/MediaProfiles.h>
#include <camera/ICamera.h>
#include <camera/CameraParameters.h>
-#include <gui/Surface.h>
#include <utils/Errors.h>
#include <sys/types.h>
@@ -72,8 +73,7 @@ StagefrightRecorder::StagefrightRecorder()
mAudioSource(AUDIO_SOURCE_CNT),
mVideoSource(VIDEO_SOURCE_LIST_END),
mCaptureTimeLapse(false),
- mStarted(false),
- mSurfaceMediaSource(NULL) {
+ mStarted(false) {
ALOGV("Constructor");
reset();
@@ -82,10 +82,19 @@ StagefrightRecorder::StagefrightRecorder()
StagefrightRecorder::~StagefrightRecorder() {
ALOGV("Destructor");
stop();
+
+ if (mLooper != NULL) {
+ mLooper->stop();
+ }
}
status_t StagefrightRecorder::init() {
ALOGV("init");
+
+ mLooper = new ALooper;
+ mLooper->setName("recorder_looper");
+ mLooper->start();
+
return OK;
}
@@ -94,7 +103,7 @@ status_t StagefrightRecorder::init() {
// while encoding GL Frames
sp<IGraphicBufferProducer> StagefrightRecorder::querySurfaceMediaSource() const {
ALOGV("Get SurfaceMediaSource");
- return mSurfaceMediaSource->getBufferQueue();
+ return mGraphicBufferProducer;
}
status_t StagefrightRecorder::setAudioSource(audio_source_t as) {
@@ -234,7 +243,7 @@ status_t StagefrightRecorder::setPreviewSurface(const sp<IGraphicBufferProducer>
return OK;
}
-status_t StagefrightRecorder::setOutputFile(const char *path) {
+status_t StagefrightRecorder::setOutputFile(const char * /* path */) {
ALOGE("setOutputFile(const char*) must not be called");
// We don't actually support this at all, as the media_server process
// no longer has permissions to create files.
@@ -681,10 +690,10 @@ status_t StagefrightRecorder::setParameter(
return setParamTimeLapseEnable(timeLapseEnable);
}
} else if (key == "time-between-time-lapse-frame-capture") {
- int64_t timeBetweenTimeLapseFrameCaptureMs;
- if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureMs)) {
+ int64_t timeBetweenTimeLapseFrameCaptureUs;
+ if (safe_strtoi64(value.string(), &timeBetweenTimeLapseFrameCaptureUs)) {
return setParamTimeBetweenTimeLapseFrameCapture(
- 1000LL * timeBetweenTimeLapseFrameCaptureMs);
+ timeBetweenTimeLapseFrameCaptureUs);
}
} else {
ALOGE("setParameter: failed to find key %s", key.string());
@@ -739,19 +748,15 @@ status_t StagefrightRecorder::setClientName(const String16& clientName) {
return OK;
}
-status_t StagefrightRecorder::prepare() {
- return OK;
-}
-
-status_t StagefrightRecorder::start() {
- CHECK_GE(mOutputFd, 0);
+status_t StagefrightRecorder::prepareInternal() {
+ ALOGV("prepare");
+ if (mOutputFd < 0) {
+ ALOGE("Output file descriptor is invalid");
+ return INVALID_OPERATION;
+ }
// Get UID here for permission checking
mClientUid = IPCThreadState::self()->getCallingUid();
- if (mWriter != NULL) {
- ALOGE("File writer is not avaialble");
- return UNKNOWN_ERROR;
- }
status_t status = OK;
@@ -759,25 +764,25 @@ status_t StagefrightRecorder::start() {
case OUTPUT_FORMAT_DEFAULT:
case OUTPUT_FORMAT_THREE_GPP:
case OUTPUT_FORMAT_MPEG_4:
- status = startMPEG4Recording();
+ status = setupMPEG4Recording();
break;
case OUTPUT_FORMAT_AMR_NB:
case OUTPUT_FORMAT_AMR_WB:
- status = startAMRRecording();
+ status = setupAMRRecording();
break;
case OUTPUT_FORMAT_AAC_ADIF:
case OUTPUT_FORMAT_AAC_ADTS:
- status = startAACRecording();
+ status = setupAACRecording();
break;
case OUTPUT_FORMAT_RTP_AVP:
- status = startRTPRecording();
+ status = setupRTPRecording();
break;
case OUTPUT_FORMAT_MPEG2TS:
- status = startMPEG2TSRecording();
+ status = setupMPEG2TSRecording();
break;
default:
@@ -786,6 +791,72 @@ status_t StagefrightRecorder::start() {
break;
}
+ return status;
+}
+
+status_t StagefrightRecorder::prepare() {
+ if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+ return prepareInternal();
+ }
+ return OK;
+}
+
+status_t StagefrightRecorder::start() {
+ ALOGV("start");
+ if (mOutputFd < 0) {
+ ALOGE("Output file descriptor is invalid");
+ return INVALID_OPERATION;
+ }
+
+ status_t status = OK;
+
+ if (mVideoSource != VIDEO_SOURCE_SURFACE) {
+ status = prepareInternal();
+ if (status != OK) {
+ return status;
+ }
+ }
+
+ if (mWriter == NULL) {
+ ALOGE("File writer is not avaialble");
+ return UNKNOWN_ERROR;
+ }
+
+ switch (mOutputFormat) {
+ case OUTPUT_FORMAT_DEFAULT:
+ case OUTPUT_FORMAT_THREE_GPP:
+ case OUTPUT_FORMAT_MPEG_4:
+ {
+ sp<MetaData> meta = new MetaData;
+ setupMPEG4MetaData(&meta);
+ status = mWriter->start(meta.get());
+ break;
+ }
+
+ case OUTPUT_FORMAT_AMR_NB:
+ case OUTPUT_FORMAT_AMR_WB:
+ case OUTPUT_FORMAT_AAC_ADIF:
+ case OUTPUT_FORMAT_AAC_ADTS:
+ case OUTPUT_FORMAT_RTP_AVP:
+ case OUTPUT_FORMAT_MPEG2TS:
+ {
+ status = mWriter->start();
+ break;
+ }
+
+ default:
+ {
+ ALOGE("Unsupported output file format: %d", mOutputFormat);
+ status = UNKNOWN_ERROR;
+ break;
+ }
+ }
+
+ if (status != OK) {
+ mWriter.clear();
+ mWriter = NULL;
+ }
+
if ((status == OK) && (!mStarted)) {
mStarted = true;
@@ -817,58 +888,58 @@ sp<MediaSource> StagefrightRecorder::createAudioSource() {
return NULL;
}
- sp<MetaData> encMeta = new MetaData;
+ sp<AMessage> format = new AMessage;
const char *mime;
switch (mAudioEncoder) {
case AUDIO_ENCODER_AMR_NB:
case AUDIO_ENCODER_DEFAULT:
- mime = MEDIA_MIMETYPE_AUDIO_AMR_NB;
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
break;
case AUDIO_ENCODER_AMR_WB:
- mime = MEDIA_MIMETYPE_AUDIO_AMR_WB;
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
break;
case AUDIO_ENCODER_AAC:
- mime = MEDIA_MIMETYPE_AUDIO_AAC;
- encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectLC);
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ format->setInt32("aac-profile", OMX_AUDIO_AACObjectLC);
break;
case AUDIO_ENCODER_HE_AAC:
- mime = MEDIA_MIMETYPE_AUDIO_AAC;
- encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectHE);
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ format->setInt32("aac-profile", OMX_AUDIO_AACObjectHE);
break;
case AUDIO_ENCODER_AAC_ELD:
- mime = MEDIA_MIMETYPE_AUDIO_AAC;
- encMeta->setInt32(kKeyAACProfile, OMX_AUDIO_AACObjectELD);
+ format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
+ format->setInt32("aac-profile", OMX_AUDIO_AACObjectELD);
break;
default:
ALOGE("Unknown audio encoder: %d", mAudioEncoder);
return NULL;
}
- encMeta->setCString(kKeyMIMEType, mime);
int32_t maxInputSize;
CHECK(audioSource->getFormat()->findInt32(
kKeyMaxInputSize, &maxInputSize));
- encMeta->setInt32(kKeyMaxInputSize, maxInputSize);
- encMeta->setInt32(kKeyChannelCount, mAudioChannels);
- encMeta->setInt32(kKeySampleRate, mSampleRate);
- encMeta->setInt32(kKeyBitRate, mAudioBitRate);
+ format->setInt32("max-input-size", maxInputSize);
+ format->setInt32("channel-count", mAudioChannels);
+ format->setInt32("sample-rate", mSampleRate);
+ format->setInt32("bitrate", mAudioBitRate);
if (mAudioTimeScale > 0) {
- encMeta->setInt32(kKeyTimeScale, mAudioTimeScale);
+ format->setInt32("time-scale", mAudioTimeScale);
}
- OMXClient client;
- CHECK_EQ(client.connect(), (status_t)OK);
sp<MediaSource> audioEncoder =
- OMXCodec::Create(client.interface(), encMeta,
- true /* createEncoder */, audioSource);
+ MediaCodecSource::Create(mLooper, format, audioSource);
mAudioSourceNode = audioSource;
+ if (audioEncoder == NULL) {
+ ALOGE("Failed to create audio encoder");
+ }
+
return audioEncoder;
}
-status_t StagefrightRecorder::startAACRecording() {
+status_t StagefrightRecorder::setupAACRecording() {
// FIXME:
// Add support for OUTPUT_FORMAT_AAC_ADIF
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_AAC_ADTS);
@@ -879,16 +950,10 @@ status_t StagefrightRecorder::startAACRecording() {
CHECK(mAudioSource != AUDIO_SOURCE_CNT);
mWriter = new AACWriter(mOutputFd);
- status_t status = startRawAudioRecording();
- if (status != OK) {
- mWriter.clear();
- mWriter = NULL;
- }
-
- return status;
+ return setupRawAudioRecording();
}
-status_t StagefrightRecorder::startAMRRecording() {
+status_t StagefrightRecorder::setupAMRRecording() {
CHECK(mOutputFormat == OUTPUT_FORMAT_AMR_NB ||
mOutputFormat == OUTPUT_FORMAT_AMR_WB);
@@ -908,15 +973,10 @@ status_t StagefrightRecorder::startAMRRecording() {
}
mWriter = new AMRWriter(mOutputFd);
- status_t status = startRawAudioRecording();
- if (status != OK) {
- mWriter.clear();
- mWriter = NULL;
- }
- return status;
+ return setupRawAudioRecording();
}
-status_t StagefrightRecorder::startRawAudioRecording() {
+status_t StagefrightRecorder::setupRawAudioRecording() {
if (mAudioSource >= AUDIO_SOURCE_CNT) {
ALOGE("Invalid audio source: %d", mAudioSource);
return BAD_VALUE;
@@ -942,12 +1002,11 @@ status_t StagefrightRecorder::startRawAudioRecording() {
mWriter->setMaxFileSize(mMaxFileSizeBytes);
}
mWriter->setListener(mListener);
- mWriter->start();
return OK;
}
-status_t StagefrightRecorder::startRTPRecording() {
+status_t StagefrightRecorder::setupRTPRecording() {
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_RTP_AVP);
if ((mAudioSource != AUDIO_SOURCE_CNT
@@ -974,7 +1033,7 @@ status_t StagefrightRecorder::startRTPRecording() {
return err;
}
- err = setupVideoEncoder(mediaSource, mVideoBitRate, &source);
+ err = setupVideoEncoder(mediaSource, &source);
if (err != OK) {
return err;
}
@@ -984,10 +1043,10 @@ status_t StagefrightRecorder::startRTPRecording() {
mWriter->addSource(source);
mWriter->setListener(mListener);
- return mWriter->start();
+ return OK;
}
-status_t StagefrightRecorder::startMPEG2TSRecording() {
+status_t StagefrightRecorder::setupMPEG2TSRecording() {
CHECK_EQ(mOutputFormat, OUTPUT_FORMAT_MPEG2TS);
sp<MediaWriter> writer = new MPEG2TSWriter(mOutputFd);
@@ -1018,7 +1077,7 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
}
sp<MediaSource> encoder;
- err = setupVideoEncoder(mediaSource, mVideoBitRate, &encoder);
+ err = setupVideoEncoder(mediaSource, &encoder);
if (err != OK) {
return err;
@@ -1037,7 +1096,7 @@ status_t StagefrightRecorder::startMPEG2TSRecording() {
mWriter = writer;
- return mWriter->start();
+ return OK;
}
void StagefrightRecorder::clipVideoFrameRate() {
@@ -1278,49 +1337,14 @@ status_t StagefrightRecorder::setupMediaSource(
return err;
}
*mediaSource = cameraSource;
- } else if (mVideoSource == VIDEO_SOURCE_GRALLOC_BUFFER) {
- // If using GRAlloc buffers, setup surfacemediasource.
- // Later a handle to that will be passed
- // to the client side when queried
- status_t err = setupSurfaceMediaSource();
- if (err != OK) {
- return err;
- }
- *mediaSource = mSurfaceMediaSource;
+ } else if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+ *mediaSource = NULL;
} else {
return INVALID_OPERATION;
}
return OK;
}
-// setupSurfaceMediaSource creates a source with the given
-// width and height and framerate.
-// TODO: This could go in a static function inside SurfaceMediaSource
-// similar to that in CameraSource
-status_t StagefrightRecorder::setupSurfaceMediaSource() {
- status_t err = OK;
- mSurfaceMediaSource = new SurfaceMediaSource(mVideoWidth, mVideoHeight);
- if (mSurfaceMediaSource == NULL) {
- return NO_INIT;
- }
-
- if (mFrameRate == -1) {
- int32_t frameRate = 0;
- CHECK (mSurfaceMediaSource->getFormat()->findInt32(
- kKeyFrameRate, &frameRate));
- ALOGI("Frame rate is not explicitly set. Use the current frame "
- "rate (%d fps)", frameRate);
- mFrameRate = frameRate;
- } else {
- err = mSurfaceMediaSource->setFrameRate(mFrameRate);
- }
- CHECK(mFrameRate != -1);
-
- mIsMetaDataStoredInVideoBuffers =
- mSurfaceMediaSource->isMetaDataStoredInVideoBuffers();
- return err;
-}
-
status_t StagefrightRecorder::setupCameraSource(
sp<CameraSource> *cameraSource) {
status_t err = OK;
@@ -1384,25 +1408,22 @@ status_t StagefrightRecorder::setupCameraSource(
status_t StagefrightRecorder::setupVideoEncoder(
sp<MediaSource> cameraSource,
- int32_t videoBitRate,
sp<MediaSource> *source) {
source->clear();
- sp<MetaData> enc_meta = new MetaData;
- enc_meta->setInt32(kKeyBitRate, videoBitRate);
- enc_meta->setInt32(kKeyFrameRate, mFrameRate);
+ sp<AMessage> format = new AMessage();
switch (mVideoEncoder) {
case VIDEO_ENCODER_H263:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_H263);
break;
case VIDEO_ENCODER_MPEG_4_SP:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_MPEG4);
break;
case VIDEO_ENCODER_H264:
- enc_meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+ format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
break;
default:
@@ -1410,59 +1431,80 @@ status_t StagefrightRecorder::setupVideoEncoder(
break;
}
- sp<MetaData> meta = cameraSource->getFormat();
+ if (cameraSource != NULL) {
+ sp<MetaData> meta = cameraSource->getFormat();
+
+ int32_t width, height, stride, sliceHeight, colorFormat;
+ CHECK(meta->findInt32(kKeyWidth, &width));
+ CHECK(meta->findInt32(kKeyHeight, &height));
+ CHECK(meta->findInt32(kKeyStride, &stride));
+ CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
+ CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
- int32_t width, height, stride, sliceHeight, colorFormat;
- CHECK(meta->findInt32(kKeyWidth, &width));
- CHECK(meta->findInt32(kKeyHeight, &height));
- CHECK(meta->findInt32(kKeyStride, &stride));
- CHECK(meta->findInt32(kKeySliceHeight, &sliceHeight));
- CHECK(meta->findInt32(kKeyColorFormat, &colorFormat));
+ format->setInt32("width", width);
+ format->setInt32("height", height);
+ format->setInt32("stride", stride);
+ format->setInt32("slice-height", sliceHeight);
+ format->setInt32("color-format", colorFormat);
+ } else {
+ format->setInt32("width", mVideoWidth);
+ format->setInt32("height", mVideoHeight);
+ format->setInt32("stride", mVideoWidth);
+ format->setInt32("slice-height", mVideoWidth);
+ format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
+
+ // set up time lapse/slow motion for surface source
+ if (mCaptureTimeLapse) {
+ if (mTimeBetweenTimeLapseFrameCaptureUs <= 0) {
+ ALOGE("Invalid mTimeBetweenTimeLapseFrameCaptureUs value: %lld",
+ mTimeBetweenTimeLapseFrameCaptureUs);
+ return BAD_VALUE;
+ }
+ format->setInt64("time-lapse",
+ mTimeBetweenTimeLapseFrameCaptureUs);
+ }
+ }
+
+ format->setInt32("bitrate", mVideoBitRate);
+ format->setInt32("frame-rate", mFrameRate);
+ format->setInt32("i-frame-interval", mIFramesIntervalSec);
- enc_meta->setInt32(kKeyWidth, width);
- enc_meta->setInt32(kKeyHeight, height);
- enc_meta->setInt32(kKeyIFramesInterval, mIFramesIntervalSec);
- enc_meta->setInt32(kKeyStride, stride);
- enc_meta->setInt32(kKeySliceHeight, sliceHeight);
- enc_meta->setInt32(kKeyColorFormat, colorFormat);
if (mVideoTimeScale > 0) {
- enc_meta->setInt32(kKeyTimeScale, mVideoTimeScale);
+ format->setInt32("time-scale", mVideoTimeScale);
}
if (mVideoEncoderProfile != -1) {
- enc_meta->setInt32(kKeyVideoProfile, mVideoEncoderProfile);
+ format->setInt32("profile", mVideoEncoderProfile);
}
if (mVideoEncoderLevel != -1) {
- enc_meta->setInt32(kKeyVideoLevel, mVideoEncoderLevel);
+ format->setInt32("level", mVideoEncoderLevel);
}
- OMXClient client;
- CHECK_EQ(client.connect(), (status_t)OK);
-
- uint32_t encoder_flags = 0;
+ uint32_t flags = 0;
if (mIsMetaDataStoredInVideoBuffers) {
- encoder_flags |= OMXCodec::kStoreMetaDataInVideoBuffers;
+ flags |= MediaCodecSource::FLAG_USE_METADATA_INPUT;
}
- // Do not wait for all the input buffers to become available.
- // This give timelapse video recording faster response in
- // receiving output from video encoder component.
- if (mCaptureTimeLapse) {
- encoder_flags |= OMXCodec::kOnlySubmitOneInputBufferAtOneTime;
+ if (cameraSource == NULL) {
+ flags |= MediaCodecSource::FLAG_USE_SURFACE_INPUT;
}
- sp<MediaSource> encoder = OMXCodec::Create(
- client.interface(), enc_meta,
- true /* createEncoder */, cameraSource,
- NULL, encoder_flags);
+ sp<MediaCodecSource> encoder =
+ MediaCodecSource::Create(mLooper, format, cameraSource, flags);
if (encoder == NULL) {
- ALOGW("Failed to create the encoder");
+ ALOGE("Failed to create video encoder");
// When the encoder fails to be created, we need
// release the camera source due to the camera's lock
// and unlock mechanism.
- cameraSource->stop();
+ if (cameraSource != NULL) {
+ cameraSource->stop();
+ }
return UNKNOWN_ERROR;
}
+ if (cameraSource == NULL) {
+ mGraphicBufferProducer = encoder->getGraphicBufferProducer();
+ }
+
*source = encoder;
return OK;
@@ -1496,16 +1538,12 @@ status_t StagefrightRecorder::setupAudioEncoder(const sp<MediaWriter>& writer) {
return OK;
}
-status_t StagefrightRecorder::setupMPEG4Recording(
- int outputFd,
- int32_t videoWidth, int32_t videoHeight,
- int32_t videoBitRate,
- int32_t *totalBitRate,
- sp<MediaWriter> *mediaWriter) {
- mediaWriter->clear();
- *totalBitRate = 0;
+status_t StagefrightRecorder::setupMPEG4Recording() {
+ mWriter.clear();
+ mTotalBitRate = 0;
+
status_t err = OK;
- sp<MediaWriter> writer = new MPEG4Writer(outputFd);
+ sp<MediaWriter> writer = new MPEG4Writer(mOutputFd);
if (mVideoSource < VIDEO_SOURCE_LIST_END) {
@@ -1516,13 +1554,13 @@ status_t StagefrightRecorder::setupMPEG4Recording(
}
sp<MediaSource> encoder;
- err = setupVideoEncoder(mediaSource, videoBitRate, &encoder);
+ err = setupVideoEncoder(mediaSource, &encoder);
if (err != OK) {
return err;
}
writer->addSource(encoder);
- *totalBitRate += videoBitRate;
+ mTotalBitRate += mVideoBitRate;
}
// Audio source is added at the end if it exists.
@@ -1531,7 +1569,7 @@ status_t StagefrightRecorder::setupMPEG4Recording(
if (!mCaptureTimeLapse && (mAudioSource != AUDIO_SOURCE_CNT)) {
err = setupAudioEncoder(writer);
if (err != OK) return err;
- *totalBitRate += mAudioBitRate;
+ mTotalBitRate += mAudioBitRate;
}
if (mInterleaveDurationUs > 0) {
@@ -1549,22 +1587,28 @@ status_t StagefrightRecorder::setupMPEG4Recording(
writer->setMaxFileSize(mMaxFileSizeBytes);
}
- mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+ if (mVideoSource == VIDEO_SOURCE_DEFAULT
+ || mVideoSource == VIDEO_SOURCE_CAMERA) {
+ mStartTimeOffsetMs = mEncoderProfiles->getStartTimeOffsetMs(mCameraId);
+ } else if (mVideoSource == VIDEO_SOURCE_SURFACE) {
+ // surface source doesn't need large initial delay
+ mStartTimeOffsetMs = 200;
+ }
if (mStartTimeOffsetMs > 0) {
reinterpret_cast<MPEG4Writer *>(writer.get())->
setStartTimeOffsetMs(mStartTimeOffsetMs);
}
writer->setListener(mListener);
- *mediaWriter = writer;
+ mWriter = writer;
return OK;
}
-void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
- sp<MetaData> *meta) {
+void StagefrightRecorder::setupMPEG4MetaData(sp<MetaData> *meta) {
+ int64_t startTimeUs = systemTime() / 1000;
(*meta)->setInt64(kKeyTime, startTimeUs);
(*meta)->setInt32(kKeyFileType, mOutputFormat);
- (*meta)->setInt32(kKeyBitRate, totalBitRate);
+ (*meta)->setInt32(kKeyBitRate, mTotalBitRate);
(*meta)->setInt32(kKey64BitFileOffset, mUse64BitFileOffset);
if (mMovieTimeScale > 0) {
(*meta)->setInt32(kKeyTimeScale, mMovieTimeScale);
@@ -1577,27 +1621,6 @@ void StagefrightRecorder::setupMPEG4MetaData(int64_t startTimeUs, int32_t totalB
}
}
-status_t StagefrightRecorder::startMPEG4Recording() {
- int32_t totalBitRate;
- status_t err = setupMPEG4Recording(
- mOutputFd, mVideoWidth, mVideoHeight,
- mVideoBitRate, &totalBitRate, &mWriter);
- if (err != OK) {
- return err;
- }
-
- int64_t startTimeUs = systemTime() / 1000;
- sp<MetaData> meta = new MetaData;
- setupMPEG4MetaData(startTimeUs, totalBitRate, &meta);
-
- err = mWriter->start(meta.get());
- if (err != OK) {
- return err;
- }
-
- return OK;
-}
-
status_t StagefrightRecorder::pause() {
ALOGV("pause");
if (mWriter == NULL) {
@@ -1637,6 +1660,8 @@ status_t StagefrightRecorder::stop() {
mWriter.clear();
}
+ mGraphicBufferProducer.clear();
+
if (mOutputFd >= 0) {
::close(mOutputFd);
mOutputFd = -1;
@@ -1656,7 +1681,6 @@ status_t StagefrightRecorder::stop() {
addBatteryData(params);
}
-
return err;
}
@@ -1708,6 +1732,7 @@ status_t StagefrightRecorder::reset() {
mRotationDegrees = 0;
mLatitudex10000 = -3600000;
mLongitudex10000 = -3600000;
+ mTotalBitRate = 0;
mOutputFd = -1;
diff --git a/media/libmediaplayerservice/StagefrightRecorder.h b/media/libmediaplayerservice/StagefrightRecorder.h
index 31f09e0..377d168 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.h
+++ b/media/libmediaplayerservice/StagefrightRecorder.h
@@ -37,6 +37,7 @@ struct AudioSource;
class MediaProfiles;
class IGraphicBufferProducer;
class SurfaceMediaSource;
+class ALooper;
struct StagefrightRecorder : public MediaRecorderBase {
StagefrightRecorder();
@@ -106,6 +107,7 @@ private:
int32_t mLatitudex10000;
int32_t mLongitudex10000;
int32_t mStartTimeOffsetMs;
+ int32_t mTotalBitRate;
bool mCaptureTimeLapse;
int64_t mTimeBetweenTimeLapseFrameCaptureUs;
@@ -122,22 +124,17 @@ private:
// An <IGraphicBufferProducer> pointer
// will be sent to the client side using which the
// frame buffers will be queued and dequeued
- sp<SurfaceMediaSource> mSurfaceMediaSource;
-
- status_t setupMPEG4Recording(
- int outputFd,
- int32_t videoWidth, int32_t videoHeight,
- int32_t videoBitRate,
- int32_t *totalBitRate,
- sp<MediaWriter> *mediaWriter);
- void setupMPEG4MetaData(int64_t startTimeUs, int32_t totalBitRate,
- sp<MetaData> *meta);
- status_t startMPEG4Recording();
- status_t startAMRRecording();
- status_t startAACRecording();
- status_t startRawAudioRecording();
- status_t startRTPRecording();
- status_t startMPEG2TSRecording();
+ sp<IGraphicBufferProducer> mGraphicBufferProducer;
+ sp<ALooper> mLooper;
+
+ status_t prepareInternal();
+ status_t setupMPEG4Recording();
+ void setupMPEG4MetaData(sp<MetaData> *meta);
+ status_t setupAMRRecording();
+ status_t setupAACRecording();
+ status_t setupRawAudioRecording();
+ status_t setupRTPRecording();
+ status_t setupMPEG2TSRecording();
sp<MediaSource> createAudioSource();
status_t checkVideoEncoderCapabilities(
bool *supportsCameraSourceMetaDataMode);
@@ -147,14 +144,8 @@ private:
// depending on the videosource type
status_t setupMediaSource(sp<MediaSource> *mediaSource);
status_t setupCameraSource(sp<CameraSource> *cameraSource);
- // setup the surfacemediasource for the encoder
- status_t setupSurfaceMediaSource();
-
status_t setupAudioEncoder(const sp<MediaWriter>& writer);
- status_t setupVideoEncoder(
- sp<MediaSource> cameraSource,
- int32_t videoBitRate,
- sp<MediaSource> *source);
+ status_t setupVideoEncoder(sp<MediaSource> cameraSource, sp<MediaSource> *source);
// Encoding parameter handling utilities
status_t setParameter(const String8 &key, const String8 &value);
diff --git a/media/libmediaplayerservice/TestPlayerStub.cpp b/media/libmediaplayerservice/TestPlayerStub.cpp
index 5d9728a..5795773 100644
--- a/media/libmediaplayerservice/TestPlayerStub.cpp
+++ b/media/libmediaplayerservice/TestPlayerStub.cpp
@@ -113,7 +113,9 @@ status_t TestPlayerStub::parseUrl()
// Create the test player.
// Call setDataSource on the test player with the url in param.
status_t TestPlayerStub::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
if (!isTestUrl(url) || NULL != mHandle) {
return INVALID_OPERATION;
}
@@ -162,7 +164,7 @@ status_t TestPlayerStub::setDataSource(
}
mPlayer = (*mNewPlayer)();
- return mPlayer->setDataSource(mContentUrl, headers);
+ return mPlayer->setDataSource(httpService, mContentUrl, headers);
}
// Internal cleanup.
diff --git a/media/libmediaplayerservice/TestPlayerStub.h b/media/libmediaplayerservice/TestPlayerStub.h
index a3802eb..55bf2c8 100644
--- a/media/libmediaplayerservice/TestPlayerStub.h
+++ b/media/libmediaplayerservice/TestPlayerStub.h
@@ -66,7 +66,9 @@ class TestPlayerStub : public MediaPlayerInterface {
// @param url Should be a test url. See class comment.
virtual status_t setDataSource(
- const char* url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char* url,
+ const KeyedVector<String8, String8> *headers);
// Test player for a file descriptor source is not supported.
virtual status_t setDataSource(int, int64_t, int64_t) {
diff --git a/media/libmediaplayerservice/nuplayer/Android.mk b/media/libmediaplayerservice/nuplayer/Android.mk
index f946c1c..f97ba57 100644
--- a/media/libmediaplayerservice/nuplayer/Android.mk
+++ b/media/libmediaplayerservice/nuplayer/Android.mk
@@ -11,7 +11,6 @@ LOCAL_SRC_FILES:= \
NuPlayerStreamListener.cpp \
RTSPSource.cpp \
StreamingSource.cpp \
- mp4/MP4Source.cpp \
LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright/httplive \
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.cpp b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
index b04e7a6..5cf9238 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.cpp
@@ -33,17 +33,16 @@ namespace android {
NuPlayer::GenericSource::GenericSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid,
- uid_t uid)
+ const KeyedVector<String8, String8> *headers)
: Source(notify),
mDurationUs(0ll),
mAudioIsVorbis(false) {
DataSource::RegisterDefaultSniffers();
sp<DataSource> dataSource =
- DataSource::CreateFromURI(url, headers);
+ DataSource::CreateFromURI(httpService, url, headers);
CHECK(dataSource != NULL);
initFromDataSource(dataSource);
@@ -68,6 +67,14 @@ void NuPlayer::GenericSource::initFromDataSource(
CHECK(extractor != NULL);
+ sp<MetaData> fileMeta = extractor->getMetaData();
+ if (fileMeta != NULL) {
+ int64_t duration;
+ if (fileMeta->findInt64(kKeyDuration, &duration)) {
+ mDurationUs = duration;
+ }
+ }
+
for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<MetaData> meta = extractor->getTrackMetaData(i);
diff --git a/media/libmediaplayerservice/nuplayer/GenericSource.h b/media/libmediaplayerservice/nuplayer/GenericSource.h
index 2da680c..20d597e 100644
--- a/media/libmediaplayerservice/nuplayer/GenericSource.h
+++ b/media/libmediaplayerservice/nuplayer/GenericSource.h
@@ -33,10 +33,9 @@ struct MediaSource;
struct NuPlayer::GenericSource : public NuPlayer::Source {
GenericSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid = false,
- uid_t uid = 0);
+ const KeyedVector<String8, String8> *headers);
GenericSource(
const sp<AMessage> &notify,
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
index 510dcc9..e8431e9 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.cpp
@@ -24,6 +24,7 @@
#include "LiveDataSource.h"
#include "LiveSession.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -34,13 +35,12 @@ namespace android {
NuPlayer::HTTPLiveSource::HTTPLiveSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid, uid_t uid)
+ const KeyedVector<String8, String8> *headers)
: Source(notify),
+ mHTTPService(httpService),
mURL(url),
- mUIDValid(uidValid),
- mUID(uid),
mFlags(0),
mFinalResult(OK),
mOffset(0),
@@ -79,8 +79,7 @@ void NuPlayer::HTTPLiveSource::prepareAsync() {
mLiveSession = new LiveSession(
notify,
(mFlags & kFlagIncognito) ? LiveSession::kFlagIncognito : 0,
- mUIDValid,
- mUID);
+ mHTTPService);
mLiveLooper->registerHandler(mLiveSession);
@@ -121,8 +120,12 @@ status_t NuPlayer::HTTPLiveSource::getDuration(int64_t *durationUs) {
return mLiveSession->getDuration(durationUs);
}
-status_t NuPlayer::HTTPLiveSource::getTrackInfo(Parcel *reply) const {
- return mLiveSession->getTrackInfo(reply);
+size_t NuPlayer::HTTPLiveSource::getTrackCount() const {
+ return mLiveSession->getTrackCount();
+}
+
+sp<AMessage> NuPlayer::HTTPLiveSource::getTrackInfo(size_t trackIndex) const {
+ return mLiveSession->getTrackInfo(trackIndex);
}
status_t NuPlayer::HTTPLiveSource::selectTrack(size_t trackIndex, bool select) {
diff --git a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
index bcc3f8b..6b5f6af 100644
--- a/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
+++ b/media/libmediaplayerservice/nuplayer/HTTPLiveSource.h
@@ -28,10 +28,9 @@ struct LiveSession;
struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
HTTPLiveSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
- const KeyedVector<String8, String8> *headers,
- bool uidValid = false,
- uid_t uid = 0);
+ const KeyedVector<String8, String8> *headers);
virtual void prepareAsync();
virtual void start();
@@ -41,7 +40,8 @@ struct NuPlayer::HTTPLiveSource : public NuPlayer::Source {
virtual status_t feedMoreTSData();
virtual status_t getDuration(int64_t *durationUs);
- virtual status_t getTrackInfo(Parcel *reply) const;
+ virtual size_t getTrackCount() const;
+ virtual sp<AMessage> getTrackInfo(size_t trackIndex) const;
virtual status_t selectTrack(size_t trackIndex, bool select);
virtual status_t seekTo(int64_t seekTimeUs);
@@ -61,10 +61,9 @@ private:
kWhatFetchSubtitleData,
};
+ sp<IMediaHTTPService> mHTTPService;
AString mURL;
KeyedVector<String8, String8> mExtraHeaders;
- bool mUIDValid;
- uid_t mUID;
uint32_t mFlags;
status_t mFinalResult;
off64_t mOffset;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
index 25d55a3..b333043 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.cpp
@@ -28,18 +28,13 @@
#include "RTSPSource.h"
#include "StreamingSource.h"
#include "GenericSource.h"
-#include "mp4/MP4Source.h"
#include "ATSParser.h"
-#include "SoftwareRenderer.h"
-
-#include <cutils/properties.h> // for property_get
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ACodec.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/MetaData.h>
@@ -148,7 +143,6 @@ NuPlayer::NuPlayer()
: mUIDValid(false),
mSourceFlags(0),
mVideoIsAVC(false),
- mNeedsSwRenderer(false),
mAudioEOS(false),
mVideoEOS(false),
mScanSourcesPending(false),
@@ -183,14 +177,7 @@ void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) {
sp<AMessage> notify = new AMessage(kWhatSourceNotify, id());
- char prop[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.use-mp4source", prop, NULL)
- && (!strcmp(prop, "1") || !strcasecmp(prop, "true"))) {
- msg->setObject("source", new MP4Source(notify, source));
- } else {
- msg->setObject("source", new StreamingSource(notify, source));
- }
-
+ msg->setObject("source", new StreamingSource(notify, source));
msg->post();
}
@@ -212,7 +199,9 @@ static bool IsHTTPLiveURL(const char *url) {
}
void NuPlayer::setDataSourceAsync(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
sp<AMessage> msg = new AMessage(kWhatSetDataSource, id());
size_t len = strlen(url);
@@ -220,16 +209,18 @@ void NuPlayer::setDataSourceAsync(
sp<Source> source;
if (IsHTTPLiveURL(url)) {
- source = new HTTPLiveSource(notify, url, headers, mUIDValid, mUID);
+ source = new HTTPLiveSource(notify, httpService, url, headers);
} else if (!strncasecmp(url, "rtsp://", 7)) {
- source = new RTSPSource(notify, url, headers, mUIDValid, mUID);
+ source = new RTSPSource(
+ notify, httpService, url, headers, mUIDValid, mUID);
} else if ((!strncasecmp(url, "http://", 7)
|| !strncasecmp(url, "https://", 8))
&& ((len >= 4 && !strcasecmp(".sdp", &url[len - 4]))
|| strstr(url, ".sdp?"))) {
- source = new RTSPSource(notify, url, headers, mUIDValid, mUID, true);
+ source = new RTSPSource(
+ notify, httpService, url, headers, mUIDValid, mUID, true);
} else {
- source = new GenericSource(notify, url, headers, mUIDValid, mUID);
+ source = new GenericSource(notify, httpService, url, headers);
}
msg->setObject("source", source);
@@ -314,6 +305,34 @@ bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) {
}
}
+void NuPlayer::writeTrackInfo(
+ Parcel* reply, const sp<AMessage> format) const {
+ int32_t trackType;
+ CHECK(format->findInt32("type", &trackType));
+
+ AString lang;
+ CHECK(format->findString("language", &lang));
+
+ reply->writeInt32(2); // write something non-zero
+ reply->writeInt32(trackType);
+ reply->writeString16(String16(lang.c_str()));
+
+ if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+
+ int32_t isAuto, isDefault, isForced;
+ CHECK(format->findInt32("auto", &isAuto));
+ CHECK(format->findInt32("default", &isDefault));
+ CHECK(format->findInt32("forced", &isForced));
+
+ reply->writeString16(String16(mime.c_str()));
+ reply->writeInt32(isAuto);
+ reply->writeInt32(isDefault);
+ reply->writeInt32(isForced);
+ }
+}
+
void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatSetDataSource:
@@ -348,16 +367,33 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- status_t err = INVALID_OPERATION;
+ Parcel* reply;
+ CHECK(msg->findPointer("reply", (void**)&reply));
+
+ size_t inbandTracks = 0;
if (mSource != NULL) {
- Parcel* reply;
- CHECK(msg->findPointer("reply", (void**)&reply));
- err = mSource->getTrackInfo(reply);
+ inbandTracks = mSource->getTrackCount();
}
- sp<AMessage> response = new AMessage;
- response->setInt32("err", err);
+ size_t ccTracks = 0;
+ if (mCCDecoder != NULL) {
+ ccTracks = mCCDecoder->getTrackCount();
+ }
+
+ // total track count
+ reply->writeInt32(inbandTracks + ccTracks);
+
+ // write inband tracks
+ for (size_t i = 0; i < inbandTracks; ++i) {
+ writeTrackInfo(reply, mSource->getTrackInfo(i));
+ }
+
+ // write CC track
+ for (size_t i = 0; i < ccTracks; ++i) {
+ writeTrackInfo(reply, mCCDecoder->getTrackInfo(i));
+ }
+ sp<AMessage> response = new AMessage;
response->postReply(replyID);
break;
}
@@ -367,13 +403,30 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
+ size_t trackIndex;
+ int32_t select;
+ CHECK(msg->findSize("trackIndex", &trackIndex));
+ CHECK(msg->findInt32("select", &select));
+
status_t err = INVALID_OPERATION;
+
+ size_t inbandTracks = 0;
if (mSource != NULL) {
- size_t trackIndex;
- int32_t select;
- CHECK(msg->findSize("trackIndex", &trackIndex));
- CHECK(msg->findInt32("select", &select));
+ inbandTracks = mSource->getTrackCount();
+ }
+ size_t ccTracks = 0;
+ if (mCCDecoder != NULL) {
+ ccTracks = mCCDecoder->getTrackCount();
+ }
+
+ if (trackIndex < inbandTracks) {
err = mSource->selectTrack(trackIndex, select);
+ } else {
+ trackIndex -= inbandTracks;
+
+ if (trackIndex < ccTracks) {
+ err = mCCDecoder->selectTrack(trackIndex, select);
+ }
}
sp<AMessage> response = new AMessage;
@@ -447,7 +500,6 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
ALOGV("kWhatStart");
mVideoIsAVC = false;
- mNeedsSwRenderer = false;
mAudioEOS = false;
mVideoEOS = false;
mSkipRenderingAudioUntilMediaTimeUs = -1;
@@ -538,24 +590,21 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
{
bool audio = msg->what() == kWhatAudioNotify;
- sp<AMessage> codecRequest;
- CHECK(msg->findMessage("codec-request", &codecRequest));
-
int32_t what;
- CHECK(codecRequest->findInt32("what", &what));
+ CHECK(msg->findInt32("what", &what));
- if (what == ACodec::kWhatFillThisBuffer) {
+ if (what == Decoder::kWhatFillThisBuffer) {
status_t err = feedDecoderInputData(
- audio, codecRequest);
+ audio, msg);
if (err == -EWOULDBLOCK) {
if (mSource->feedMoreTSData() == OK) {
msg->post(10000ll);
}
}
- } else if (what == ACodec::kWhatEOS) {
+ } else if (what == Decoder::kWhatEOS) {
int32_t err;
- CHECK(codecRequest->findInt32("err", &err));
+ CHECK(msg->findInt32("err", &err));
if (err == ERROR_END_OF_STREAM) {
ALOGV("got %s decoder EOS", audio ? "audio" : "video");
@@ -566,7 +615,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
mRenderer->queueEOS(audio, err);
- } else if (what == ACodec::kWhatFlushCompleted) {
+ } else if (what == Decoder::kWhatFlushCompleted) {
bool needShutdown;
if (audio) {
@@ -595,14 +644,17 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
finishFlushIfPossible();
- } else if (what == ACodec::kWhatOutputFormatChanged) {
+ } else if (what == Decoder::kWhatOutputFormatChanged) {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+
if (audio) {
int32_t numChannels;
- CHECK(codecRequest->findInt32(
+ CHECK(format->findInt32(
"channel-count", &numChannels));
int32_t sampleRate;
- CHECK(codecRequest->findInt32("sample-rate", &sampleRate));
+ CHECK(format->findInt32("sample-rate", &sampleRate));
ALOGV("Audio output format changed to %d Hz, %d channels",
sampleRate, numChannels);
@@ -626,7 +678,7 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
int32_t channelMask;
- if (!codecRequest->findInt32("channel-mask", &channelMask)) {
+ if (!format->findInt32("channel-mask", &channelMask)) {
channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
}
@@ -647,11 +699,11 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
// video
int32_t width, height;
- CHECK(codecRequest->findInt32("width", &width));
- CHECK(codecRequest->findInt32("height", &height));
+ CHECK(format->findInt32("width", &width));
+ CHECK(format->findInt32("height", &height));
int32_t cropLeft, cropTop, cropRight, cropBottom;
- CHECK(codecRequest->findRect(
+ CHECK(format->findRect(
"crop",
&cropLeft, &cropTop, &cropRight, &cropBottom));
@@ -684,22 +736,8 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
notifyListener(
MEDIA_SET_VIDEO_SIZE, displayWidth, displayHeight);
-
- if (mNeedsSwRenderer && mNativeWindow != NULL) {
- int32_t colorFormat;
- CHECK(codecRequest->findInt32("color-format", &colorFormat));
-
- sp<MetaData> meta = new MetaData;
- meta->setInt32(kKeyWidth, width);
- meta->setInt32(kKeyHeight, height);
- meta->setRect(kKeyCropRect, cropLeft, cropTop, cropRight, cropBottom);
- meta->setInt32(kKeyColorFormat, colorFormat);
-
- mRenderer->setSoftRenderer(
- new SoftwareRenderer(mNativeWindow->getNativeWindow(), meta));
- }
}
- } else if (what == ACodec::kWhatShutdownCompleted) {
+ } else if (what == Decoder::kWhatShutdownCompleted) {
ALOGV("%s shutdown completed", audio ? "audio" : "video");
if (audio) {
mAudioDecoder.clear();
@@ -714,22 +752,15 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
}
finishFlushIfPossible();
- } else if (what == ACodec::kWhatError) {
+ } else if (what == Decoder::kWhatError) {
ALOGE("Received error from %s decoder, aborting playback.",
audio ? "audio" : "video");
mRenderer->queueEOS(audio, UNKNOWN_ERROR);
- } else if (what == ACodec::kWhatDrainThisBuffer) {
- renderBuffer(audio, codecRequest);
- } else if (what == ACodec::kWhatComponentAllocated) {
- if (!audio) {
- AString name;
- CHECK(codecRequest->findString("componentName", &name));
- mNeedsSwRenderer = name.startsWith("OMX.google.");
- }
- } else if (what != ACodec::kWhatComponentConfigured
- && what != ACodec::kWhatBuffersAllocated) {
- ALOGV("Unhandled codec notification %d '%c%c%c%c'.",
+ } else if (what == Decoder::kWhatDrainThisBuffer) {
+ renderBuffer(audio, msg);
+ } else {
+ ALOGV("Unhandled decoder notification %d '%c%c%c%c'.",
what,
what >> 24,
(what >> 16) & 0xff,
@@ -859,6 +890,12 @@ void NuPlayer::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatClosedCaptionNotify:
+ {
+ onClosedCaptionNotify(msg);
+ break;
+ }
+
default:
TRESPASS();
break;
@@ -922,6 +959,9 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
AString mime;
CHECK(format->findString("mime", &mime));
mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str());
+
+ sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id());
+ mCCDecoder = new CCDecoder(ccNotify);
}
sp<AMessage> notify =
@@ -930,8 +970,7 @@ status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) {
*decoder = audio ? new Decoder(notify) :
new Decoder(notify, mNativeWindow);
- looper()->registerHandler(*decoder);
-
+ (*decoder)->init();
(*decoder)->configure(format);
return OK;
@@ -1063,6 +1102,10 @@ status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) {
mediaTimeUs / 1E6);
#endif
+ if (!audio) {
+ mCCDecoder->decode(accessUnit);
+ }
+
reply->setBuffer("buffer", accessUnit);
reply->post();
@@ -1091,14 +1134,15 @@ void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
+ int64_t mediaTimeUs;
+ CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
+
int64_t &skipUntilMediaTimeUs =
audio
? mSkipRenderingAudioUntilMediaTimeUs
: mSkipRenderingVideoUntilMediaTimeUs;
if (skipUntilMediaTimeUs >= 0) {
- int64_t mediaTimeUs;
- CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs));
if (mediaTimeUs < skipUntilMediaTimeUs) {
ALOGV("dropping %s buffer at time %lld as requested.",
@@ -1112,6 +1156,10 @@ void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) {
skipUntilMediaTimeUs = -1;
}
+ if (!audio && mCCDecoder->isSelected()) {
+ mCCDecoder->display(mediaTimeUs);
+ }
+
mRenderer->queueBuffer(audio, buffer, reply);
}
@@ -1219,6 +1267,14 @@ status_t NuPlayer::selectTrack(size_t trackIndex, bool select) {
sp<AMessage> response;
status_t err = msg->postAndAwaitResponse(&response);
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
return err;
}
@@ -1408,16 +1464,15 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
sp<NuPlayerDriver> driver = mDriver.promote();
if (driver != NULL) {
- driver->notifyPrepareCompleted(err);
- }
-
- int64_t durationUs;
- if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) {
- sp<NuPlayerDriver> driver = mDriver.promote();
- if (driver != NULL) {
+ // notify duration first, so that it's definitely set when
+ // the app received the "prepare complete" callback.
+ int64_t durationUs;
+ if (mSource->getDuration(&durationUs) == OK) {
driver->notifyDuration(durationUs);
}
+ driver->notifyPrepareCompleted(err);
}
+
break;
}
@@ -1471,21 +1526,7 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
- int32_t trackIndex;
- int64_t timeUs, durationUs;
- CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
- CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
- CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
-
- Parcel in;
- in.writeInt32(trackIndex);
- in.writeInt64(timeUs);
- in.writeInt64(durationUs);
- in.writeInt32(buffer->size());
- in.writeInt32(buffer->size());
- in.write(buffer->data(), buffer->size());
-
- notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
+ sendSubtitleData(buffer, 0 /* baseIndex */);
break;
}
@@ -1507,6 +1548,56 @@ void NuPlayer::onSourceNotify(const sp<AMessage> &msg) {
}
}
+void NuPlayer::onClosedCaptionNotify(const sp<AMessage> &msg) {
+ int32_t what;
+ CHECK(msg->findInt32("what", &what));
+
+ switch (what) {
+ case NuPlayer::CCDecoder::kWhatClosedCaptionData:
+ {
+ sp<ABuffer> buffer;
+ CHECK(msg->findBuffer("buffer", &buffer));
+
+ size_t inbandTracks = 0;
+ if (mSource != NULL) {
+ inbandTracks = mSource->getTrackCount();
+ }
+
+ sendSubtitleData(buffer, inbandTracks);
+ break;
+ }
+
+ case NuPlayer::CCDecoder::kWhatTrackAdded:
+ {
+ notifyListener(MEDIA_INFO, MEDIA_INFO_METADATA_UPDATE, 0);
+
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+
+
+}
+
+void NuPlayer::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) {
+ int32_t trackIndex;
+ int64_t timeUs, durationUs;
+ CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex));
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+ CHECK(buffer->meta()->findInt64("durationUs", &durationUs));
+
+ Parcel in;
+ in.writeInt32(trackIndex + baseIndex);
+ in.writeInt64(timeUs);
+ in.writeInt64(durationUs);
+ in.writeInt32(buffer->size());
+ in.writeInt32(buffer->size());
+ in.write(buffer->data(), buffer->size());
+
+ notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in);
+}
////////////////////////////////////////////////////////////////////////////////
void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) {
@@ -1531,7 +1622,7 @@ void NuPlayer::Source::notifyPrepared(status_t err) {
notify->post();
}
-void NuPlayer::Source::onMessageReceived(const sp<AMessage> &msg) {
+void NuPlayer::Source::onMessageReceived(const sp<AMessage> & /* msg */) {
TRESPASS();
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayer.h b/media/libmediaplayerservice/nuplayer/NuPlayer.h
index 590e1f2..5be71fb 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayer.h
@@ -24,7 +24,7 @@
namespace android {
-struct ACodec;
+struct ABuffer;
struct MetaData;
struct NuPlayerDriver;
@@ -38,7 +38,9 @@ struct NuPlayer : public AHandler {
void setDataSourceAsync(const sp<IStreamSource> &source);
void setDataSourceAsync(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
void setDataSourceAsync(int fd, int64_t offset, int64_t length);
@@ -74,6 +76,7 @@ public:
private:
struct Decoder;
+ struct CCDecoder;
struct GenericSource;
struct HTTPLiveSource;
struct Renderer;
@@ -96,6 +99,7 @@ private:
kWhatScanSources = 'scan',
kWhatVideoNotify = 'vidN',
kWhatAudioNotify = 'audN',
+ kWhatClosedCaptionNotify = 'capN',
kWhatRendererNotify = 'renN',
kWhatReset = 'rset',
kWhatSeek = 'seek',
@@ -116,8 +120,8 @@ private:
sp<MediaPlayerBase::AudioSink> mAudioSink;
sp<Decoder> mVideoDecoder;
bool mVideoIsAVC;
- bool mNeedsSwRenderer;
sp<Decoder> mAudioDecoder;
+ sp<CCDecoder> mCCDecoder;
sp<Renderer> mRenderer;
List<sp<Action> > mDeferredActions;
@@ -185,10 +189,15 @@ private:
void performSetSurface(const sp<NativeWindowWrapper> &wrapper);
void onSourceNotify(const sp<AMessage> &msg);
+ void onClosedCaptionNotify(const sp<AMessage> &msg);
void queueDecoderShutdown(
bool audio, bool video, const sp<AMessage> &reply);
+ void sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex);
+
+ void writeTrackInfo(Parcel* reply, const sp<AMessage> format) const;
+
DISALLOW_EVIL_CONSTRUCTORS(NuPlayer);
};
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index 2423fd5..5abfb71 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -17,14 +17,18 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "NuPlayerDecoder"
#include <utils/Log.h>
+#include <inttypes.h>
#include "NuPlayerDecoder.h"
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/ACodec.h>
+#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/MediaErrors.h>
namespace android {
@@ -32,122 +36,441 @@ NuPlayer::Decoder::Decoder(
const sp<AMessage> &notify,
const sp<NativeWindowWrapper> &nativeWindow)
: mNotify(notify),
- mNativeWindow(nativeWindow) {
+ mNativeWindow(nativeWindow),
+ mBufferGeneration(0),
+ mPaused(true),
+ mComponentName("decoder") {
+ // Every decoder has its own looper because MediaCodec operations
+ // are blocking, but NuPlayer needs asynchronous operations.
+ mDecoderLooper = new ALooper;
+ mDecoderLooper->setName("NuPlayerDecoder");
+ mDecoderLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("NuPlayerDecoder-MC");
+ mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}
NuPlayer::Decoder::~Decoder() {
}
-void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
+void NuPlayer::Decoder::onConfigure(const sp<AMessage> &format) {
CHECK(mCodec == NULL);
+ ++mBufferGeneration;
+
AString mime;
CHECK(format->findString("mime", &mime));
- sp<AMessage> notifyMsg =
- new AMessage(kWhatCodecNotify, id());
+ sp<Surface> surface = NULL;
+ if (mNativeWindow != NULL) {
+ surface = mNativeWindow->getSurfaceTextureClient();
+ }
- mCSDIndex = 0;
- for (size_t i = 0;; ++i) {
- sp<ABuffer> csd;
- if (!format->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
- break;
- }
+ mComponentName = mime;
+ mComponentName.append(" decoder");
+ ALOGV("[%s] onConfigure (surface=%p)", mComponentName.c_str(), surface.get());
- mCSD.push(csd);
+ mCodec = MediaCodec::CreateByType(mCodecLooper, mime.c_str(), false /* encoder */);
+ if (mCodec == NULL) {
+ ALOGE("Failed to create %s decoder", mime.c_str());
+ handleError(UNKNOWN_ERROR);
+ return;
}
+ mCodec->getName(&mComponentName);
+
if (mNativeWindow != NULL) {
- format->setObject("native-window", mNativeWindow);
+ // disconnect from surface as MediaCodec will reconnect
+ CHECK_EQ((int)NO_ERROR,
+ native_window_api_disconnect(
+ surface.get(),
+ NATIVE_WINDOW_API_MEDIA));
+ }
+ status_t err = mCodec->configure(
+ format, surface, NULL /* crypto */, 0 /* flags */);
+ if (err != OK) {
+ ALOGE("Failed to configure %s decoder (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ return;
+ }
+ // the following should work in configured state
+ CHECK_EQ((status_t)OK, mCodec->getOutputFormat(&mOutputFormat));
+ CHECK_EQ((status_t)OK, mCodec->getInputFormat(&mInputFormat));
+
+ err = mCodec->start();
+ if (err != OK) {
+ ALOGE("Failed to start %s decoder (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ return;
}
- // Current video decoders do not return from OMX_FillThisBuffer
- // quickly, violating the OpenMAX specs, until that is remedied
- // we need to invest in an extra looper to free the main event
- // queue.
- bool needDedicatedLooper = !strncasecmp(mime.c_str(), "video/", 6);
+ // the following should work after start
+ CHECK_EQ((status_t)OK, mCodec->getInputBuffers(&mInputBuffers));
+ CHECK_EQ((status_t)OK, mCodec->getOutputBuffers(&mOutputBuffers));
+ ALOGV("[%s] got %zu input and %zu output buffers",
+ mComponentName.c_str(),
+ mInputBuffers.size(),
+ mOutputBuffers.size());
- mFormat = format;
- mCodec = new ACodec;
+ requestCodecNotification();
+ mPaused = false;
+}
- if (needDedicatedLooper && mCodecLooper == NULL) {
- mCodecLooper = new ALooper;
- mCodecLooper->setName("NuPlayerDecoder");
- mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
+void NuPlayer::Decoder::requestCodecNotification() {
+ if (mCodec != NULL) {
+ sp<AMessage> reply = new AMessage(kWhatCodecNotify, id());
+ reply->setInt32("generation", mBufferGeneration);
+ mCodec->requestActivityNotification(reply);
}
+}
- (needDedicatedLooper ? mCodecLooper : looper())->registerHandler(mCodec);
+bool NuPlayer::Decoder::isStaleReply(const sp<AMessage> &msg) {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+ return generation != mBufferGeneration;
+}
- mCodec->setNotificationMessage(notifyMsg);
- mCodec->initiateSetup(format);
+void NuPlayer::Decoder::init() {
+ mDecoderLooper->registerHandler(this);
}
-void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatCodecNotify:
- {
- int32_t what;
- CHECK(msg->findInt32("what", &what));
+void NuPlayer::Decoder::configure(const sp<AMessage> &format) {
+ sp<AMessage> msg = new AMessage(kWhatConfigure, id());
+ msg->setMessage("format", format);
+ msg->post();
+}
- if (what == ACodec::kWhatFillThisBuffer) {
- onFillThisBuffer(msg);
- } else {
- sp<AMessage> notify = mNotify->dup();
- notify->setMessage("codec-request", msg);
- notify->post();
- }
- break;
+void NuPlayer::Decoder::handleError(int32_t err)
+{
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatError);
+ notify->setInt32("err", err);
+ notify->post();
+}
+
+bool NuPlayer::Decoder::handleAnInputBuffer() {
+ size_t bufferIx = -1;
+ status_t res = mCodec->dequeueInputBuffer(&bufferIx);
+ ALOGV("[%s] dequeued input: %d",
+ mComponentName.c_str(), res == OK ? (int)bufferIx : res);
+ if (res != OK) {
+ if (res != -EAGAIN) {
+ handleError(res);
}
+ return false;
+ }
- default:
- TRESPASS();
- break;
+ CHECK_LT(bufferIx, mInputBuffers.size());
+
+ sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, id());
+ reply->setSize("buffer-ix", bufferIx);
+ reply->setInt32("generation", mBufferGeneration);
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFillThisBuffer);
+ notify->setBuffer("buffer", mInputBuffers[bufferIx]);
+ notify->setMessage("reply", reply);
+ notify->post();
+ return true;
+}
+
+void android::NuPlayer::Decoder::onInputBufferFilled(const sp<AMessage> &msg) {
+ size_t bufferIx;
+ CHECK(msg->findSize("buffer-ix", &bufferIx));
+ CHECK_LT(bufferIx, mInputBuffers.size());
+ sp<ABuffer> codecBuffer = mInputBuffers[bufferIx];
+
+ sp<ABuffer> buffer;
+ bool hasBuffer = msg->findBuffer("buffer", &buffer);
+ if (buffer == NULL /* includes !hasBuffer */) {
+ int32_t streamErr = ERROR_END_OF_STREAM;
+ CHECK(msg->findInt32("err", &streamErr) || !hasBuffer);
+
+ if (streamErr == OK) {
+ /* buffers are returned to hold on to */
+ return;
+ }
+
+ // attempt to queue EOS
+ status_t err = mCodec->queueInputBuffer(
+ bufferIx,
+ 0,
+ 0,
+ 0,
+ MediaCodec::BUFFER_FLAG_EOS);
+ if (streamErr == ERROR_END_OF_STREAM && err != OK) {
+ streamErr = err;
+ // err will not be ERROR_END_OF_STREAM
+ }
+
+ if (streamErr != ERROR_END_OF_STREAM) {
+ handleError(streamErr);
+ }
+ } else {
+ int64_t timeUs = 0;
+ uint32_t flags = 0;
+ CHECK(buffer->meta()->findInt64("timeUs", &timeUs));
+
+ int32_t eos;
+ // we do not expect CODECCONFIG or SYNCFRAME for decoder
+ if (buffer->meta()->findInt32("eos", &eos) && eos) {
+ flags |= MediaCodec::BUFFER_FLAG_EOS;
+ }
+
+ // copy into codec buffer
+ if (buffer != codecBuffer) {
+ CHECK_LE(buffer->size(), codecBuffer->capacity());
+ codecBuffer->setRange(0, buffer->size());
+ memcpy(codecBuffer->data(), buffer->data(), buffer->size());
+ }
+
+ status_t err = mCodec->queueInputBuffer(
+ bufferIx,
+ codecBuffer->offset(),
+ codecBuffer->size(),
+ timeUs,
+ flags);
+ if (err != OK) {
+ ALOGE("Failed to queue input buffer for %s (err=%d)",
+ mComponentName.c_str(), err);
+ handleError(err);
+ }
}
}
-void NuPlayer::Decoder::onFillThisBuffer(const sp<AMessage> &msg) {
- sp<AMessage> reply;
- CHECK(msg->findMessage("reply", &reply));
+bool NuPlayer::Decoder::handleAnOutputBuffer() {
+ size_t bufferIx = -1;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ status_t res = mCodec->dequeueOutputBuffer(
+ &bufferIx, &offset, &size, &timeUs, &flags);
+
+ if (res != OK) {
+ ALOGV("[%s] dequeued output: %d", mComponentName.c_str(), res);
+ } else {
+ ALOGV("[%s] dequeued output: %d (time=%lld flags=%" PRIu32 ")",
+ mComponentName.c_str(), (int)bufferIx, timeUs, flags);
+ }
-#if 0
- sp<ABuffer> outBuffer;
- CHECK(msg->findBuffer("buffer", &outBuffer));
-#else
- sp<ABuffer> outBuffer;
-#endif
+ if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
+ res = mCodec->getOutputBuffers(&mOutputBuffers);
+ if (res != OK) {
+ ALOGE("Failed to get output buffers for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
+ // NuPlayer ignores this
+ return true;
+ } else if (res == INFO_FORMAT_CHANGED) {
+ sp<AMessage> format = new AMessage();
+ res = mCodec->getOutputFormat(&format);
+ if (res != OK) {
+ ALOGE("Failed to get output format for %s after INFO event (err=%d)",
+ mComponentName.c_str(), res);
+ handleError(res);
+ return false;
+ }
- if (mCSDIndex < mCSD.size()) {
- outBuffer = mCSD.editItemAt(mCSDIndex++);
- outBuffer->meta()->setInt64("timeUs", 0);
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatOutputFormatChanged);
+ notify->setMessage("format", format);
+ notify->post();
+ return true;
+ } else if (res == INFO_DISCONTINUITY) {
+ // nothing to do
+ return true;
+ } else if (res != OK) {
+ if (res != -EAGAIN) {
+ handleError(res);
+ }
+ return false;
+ }
- reply->setBuffer("buffer", outBuffer);
- reply->post();
- return;
+ CHECK_LT(bufferIx, mOutputBuffers.size());
+ sp<ABuffer> buffer = mOutputBuffers[bufferIx];
+ buffer->setRange(offset, size);
+ buffer->meta()->clear();
+ buffer->meta()->setInt64("timeUs", timeUs);
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ buffer->meta()->setInt32("eos", true);
}
+ // we do not expect CODECCONFIG or SYNCFRAME for decoder
+
+ sp<AMessage> reply = new AMessage(kWhatRenderBuffer, id());
+ reply->setSize("buffer-ix", bufferIx);
+ reply->setInt32("generation", mBufferGeneration);
sp<AMessage> notify = mNotify->dup();
- notify->setMessage("codec-request", msg);
+ notify->setInt32("what", kWhatDrainThisBuffer);
+ notify->setBuffer("buffer", buffer);
+ notify->setMessage("reply", reply);
notify->post();
+
+ // FIXME: This should be handled after rendering is complete,
+ // but Renderer needs it now
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ ALOGV("queueing eos [%s]", mComponentName.c_str());
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatEOS);
+ notify->setInt32("err", ERROR_END_OF_STREAM);
+ notify->post();
+ }
+ return true;
}
-void NuPlayer::Decoder::signalFlush() {
- if (mCodec != NULL) {
- mCodec->signalFlush();
+void NuPlayer::Decoder::onRenderBuffer(const sp<AMessage> &msg) {
+ status_t err;
+ int32_t render;
+ size_t bufferIx;
+ CHECK(msg->findSize("buffer-ix", &bufferIx));
+ if (msg->findInt32("render", &render) && render) {
+ err = mCodec->renderOutputBufferAndRelease(bufferIx);
+ } else {
+ err = mCodec->releaseOutputBuffer(bufferIx);
+ }
+ if (err != OK) {
+ ALOGE("failed to release output buffer for %s (err=%d)",
+ mComponentName.c_str(), err);
+ handleError(err);
}
}
-void NuPlayer::Decoder::signalResume() {
+void NuPlayer::Decoder::onFlush() {
+ status_t err = OK;
if (mCodec != NULL) {
- mCodec->signalResume();
+ err = mCodec->flush();
+ ++mBufferGeneration;
}
+
+ if (err != OK) {
+ ALOGE("failed to flush %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ return;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatFlushCompleted);
+ notify->post();
+ mPaused = true;
}
-void NuPlayer::Decoder::initiateShutdown() {
+void NuPlayer::Decoder::onResume() {
+ mPaused = false;
+}
+
+void NuPlayer::Decoder::onShutdown() {
+ status_t err = OK;
if (mCodec != NULL) {
- mCodec->initiateShutdown();
+ err = mCodec->release();
+ mCodec = NULL;
+ ++mBufferGeneration;
+
+ if (mNativeWindow != NULL) {
+ // reconnect to surface as MediaCodec disconnected from it
+ CHECK_EQ((int)NO_ERROR,
+ native_window_api_connect(
+ mNativeWindow->getNativeWindow().get(),
+ NATIVE_WINDOW_API_MEDIA));
+ }
+ mComponentName = "decoder";
+ }
+
+ if (err != OK) {
+ ALOGE("failed to release %s (err=%d)", mComponentName.c_str(), err);
+ handleError(err);
+ return;
+ }
+
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatShutdownCompleted);
+ notify->post();
+ mPaused = true;
+}
+
+void NuPlayer::Decoder::onMessageReceived(const sp<AMessage> &msg) {
+ ALOGV("[%s] onMessage: %s", mComponentName.c_str(), msg->debugString().c_str());
+
+ switch (msg->what()) {
+ case kWhatConfigure:
+ {
+ sp<AMessage> format;
+ CHECK(msg->findMessage("format", &format));
+ onConfigure(format);
+ break;
+ }
+
+ case kWhatCodecNotify:
+ {
+ if (!isStaleReply(msg)) {
+ if (!mPaused) {
+ while (handleAnInputBuffer()) {
+ }
+ }
+
+ while (handleAnOutputBuffer()) {
+ }
+ }
+
+ requestCodecNotification();
+ break;
+ }
+
+ case kWhatInputBufferFilled:
+ {
+ if (!isStaleReply(msg)) {
+ onInputBufferFilled(msg);
+ }
+ break;
+ }
+
+ case kWhatRenderBuffer:
+ {
+ if (!isStaleReply(msg)) {
+ onRenderBuffer(msg);
+ }
+ break;
+ }
+
+ case kWhatFlush:
+ {
+ onFlush();
+ break;
+ }
+
+ case kWhatResume:
+ {
+ onResume();
+ break;
+ }
+
+ case kWhatShutdown:
+ {
+ onShutdown();
+ break;
+ }
+
+ default:
+ TRESPASS();
+ break;
}
}
+void NuPlayer::Decoder::signalFlush() {
+ (new AMessage(kWhatFlush, id()))->post();
+}
+
+void NuPlayer::Decoder::signalResume() {
+ (new AMessage(kWhatResume, id()))->post();
+}
+
+void NuPlayer::Decoder::initiateShutdown() {
+ (new AMessage(kWhatShutdown, id()))->post();
+}
+
bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const {
if (targetFormat == NULL) {
return true;
@@ -163,14 +486,16 @@ bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &ta
const char * keys[] = { "channel-count", "sample-rate", "is-adts" };
for (unsigned int i = 0; i < sizeof(keys) / sizeof(keys[0]); i++) {
int32_t oldVal, newVal;
- if (!mFormat->findInt32(keys[i], &oldVal) || !targetFormat->findInt32(keys[i], &newVal)
- || oldVal != newVal) {
+ if (!mOutputFormat->findInt32(keys[i], &oldVal) ||
+ !targetFormat->findInt32(keys[i], &newVal) ||
+ oldVal != newVal) {
return false;
}
}
sp<ABuffer> oldBuf, newBuf;
- if (mFormat->findBuffer("csd-0", &oldBuf) && targetFormat->findBuffer("csd-0", &newBuf)) {
+ if (mOutputFormat->findBuffer("csd-0", &oldBuf) &&
+ targetFormat->findBuffer("csd-0", &newBuf)) {
if (oldBuf->size() != newBuf->size()) {
return false;
}
@@ -181,7 +506,7 @@ bool NuPlayer::Decoder::supportsSeamlessAudioFormatChange(const sp<AMessage> &ta
}
bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetFormat) const {
- if (mFormat == NULL) {
+ if (mOutputFormat == NULL) {
return false;
}
@@ -190,7 +515,7 @@ bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetF
}
AString oldMime, newMime;
- if (!mFormat->findString("mime", &oldMime)
+ if (!mOutputFormat->findString("mime", &oldMime)
|| !targetFormat->findString("mime", &newMime)
|| !(oldMime == newMime)) {
return false;
@@ -201,12 +526,282 @@ bool NuPlayer::Decoder::supportsSeamlessFormatChange(const sp<AMessage> &targetF
if (audio) {
seamless = supportsSeamlessAudioFormatChange(targetFormat);
} else {
- seamless = mCodec != NULL && mCodec->isConfiguredForAdaptivePlayback();
+ int32_t isAdaptive;
+ seamless = (mCodec != NULL &&
+ mInputFormat->findInt32("adaptive-playback", &isAdaptive) &&
+ isAdaptive);
}
ALOGV("%s seamless support for %s", seamless ? "yes" : "no", oldMime.c_str());
return seamless;
}
+struct NuPlayer::CCDecoder::CCData {
+ CCData(uint8_t type, uint8_t data1, uint8_t data2)
+ : mType(type), mData1(data1), mData2(data2) {
+ }
+
+ uint8_t mType;
+ uint8_t mData1;
+ uint8_t mData2;
+};
+
+NuPlayer::CCDecoder::CCDecoder(const sp<AMessage> &notify)
+ : mNotify(notify),
+ mTrackCount(0),
+ mSelectedTrack(-1) {
+}
+
+size_t NuPlayer::CCDecoder::getTrackCount() const {
+ return mTrackCount;
+}
+
+sp<AMessage> NuPlayer::CCDecoder::getTrackInfo(size_t index) const {
+ CHECK(index == 0);
+
+ sp<AMessage> format = new AMessage();
+
+ format->setInt32("type", MEDIA_TRACK_TYPE_SUBTITLE);
+ format->setString("language", "und");
+ format->setString("mime", MEDIA_MIMETYPE_TEXT_CEA_608);
+ format->setInt32("auto", 1);
+ format->setInt32("default", 1);
+ format->setInt32("forced", 0);
+
+ return format;
+}
+
+status_t NuPlayer::CCDecoder::selectTrack(size_t index, bool select) {
+ CHECK(index < mTrackCount);
+
+ if (select) {
+ if (mSelectedTrack == (ssize_t)index) {
+ ALOGE("track %zu already selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("selected track %zu", index);
+ mSelectedTrack = index;
+ } else {
+ if (mSelectedTrack != (ssize_t)index) {
+ ALOGE("track %zu is not selected", index);
+ return BAD_VALUE;
+ }
+ ALOGV("unselected track %zu", index);
+ mSelectedTrack = -1;
+ }
+
+ return OK;
+}
+
+bool NuPlayer::CCDecoder::isSelected() const {
+ return mSelectedTrack >= 0 && mSelectedTrack < (int32_t)mTrackCount;
+}
+
+bool NuPlayer::CCDecoder::isNullPad(CCData *cc) const {
+ return cc->mData1 < 0x10 && cc->mData2 < 0x10;
+}
+
+void NuPlayer::CCDecoder::dumpBytePair(const sp<ABuffer> &ccBuf) const {
+ size_t offset = 0;
+ AString out;
+
+ while (offset < ccBuf->size()) {
+ char tmp[128];
+
+ CCData *cc = (CCData *) (ccBuf->data() + offset);
+
+ if (isNullPad(cc)) {
+ // 1 null pad or XDS metadata, ignore
+ offset += sizeof(CCData);
+ continue;
+ }
+
+ if (cc->mData1 >= 0x20 && cc->mData1 <= 0x7f) {
+ // 2 basic chars
+ sprintf(tmp, "[%d]Basic: %c %c", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x30 && cc->mData2 <= 0x3f) {
+ // 1 special char
+ sprintf(tmp, "[%d]Special: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x12 || cc->mData1 == 0x1A)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Spanish/French char
+ sprintf(tmp, "[%d]Spanish: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x13 || cc->mData1 == 0x1B)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x3f){
+ // 1 Portuguese/German/Danish char
+ sprintf(tmp, "[%d]German: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 == 0x11 || cc->mData1 == 0x19)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f){
+ // Mid-Row Codes (Table 69)
+ sprintf(tmp, "[%d]Mid-row: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if (((cc->mData1 == 0x14 || cc->mData1 == 0x1c)
+ && cc->mData2 >= 0x20 && cc->mData2 <= 0x2f)
+ ||
+ ((cc->mData1 == 0x17 || cc->mData1 == 0x1f)
+ && cc->mData2 >= 0x21 && cc->mData2 <= 0x23)){
+ // Misc Control Codes (Table 70)
+ sprintf(tmp, "[%d]Ctrl: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else if ((cc->mData1 & 0x70) == 0x10
+ && (cc->mData2 & 0x40) == 0x40
+ && ((cc->mData1 & 0x07) || !(cc->mData2 & 0x20)) ) {
+ // Preamble Address Codes (Table 71)
+ sprintf(tmp, "[%d]PAC: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ } else {
+ sprintf(tmp, "[%d]Invalid: %02x %02x", cc->mType, cc->mData1, cc->mData2);
+ }
+
+ if (out.size() > 0) {
+ out.append(", ");
+ }
+
+ out.append(tmp);
+
+ offset += sizeof(CCData);
+ }
+
+ ALOGI("%s", out.c_str());
+}
+
+bool NuPlayer::CCDecoder::extractFromSEI(const sp<ABuffer> &accessUnit) {
+ int64_t timeUs;
+ CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
+
+ sp<ABuffer> sei;
+ if (!accessUnit->meta()->findBuffer("sei", &sei) || sei == NULL) {
+ return false;
+ }
+
+ bool hasCC = false;
+
+ ABitReader br(sei->data() + 1, sei->size() - 1);
+ // sei_message()
+ while (br.numBitsLeft() >= 16) { // at least 16-bit for sei_message()
+ uint32_t payload_type = 0;
+ size_t payload_size = 0;
+ uint8_t last_byte;
+
+ do {
+ last_byte = br.getBits(8);
+ payload_type += last_byte;
+ } while (last_byte == 0xFF);
+
+ do {
+ last_byte = br.getBits(8);
+ payload_size += last_byte;
+ } while (last_byte == 0xFF);
+
+ // sei_payload()
+ if (payload_type == 4) {
+ // user_data_registered_itu_t_t35()
+
+ // ATSC A/72: 6.4.2
+ uint8_t itu_t_t35_country_code = br.getBits(8);
+ uint16_t itu_t_t35_provider_code = br.getBits(16);
+ uint32_t user_identifier = br.getBits(32);
+ uint8_t user_data_type_code = br.getBits(8);
+
+ payload_size -= 1 + 2 + 4 + 1;
+
+ if (itu_t_t35_country_code == 0xB5
+ && itu_t_t35_provider_code == 0x0031
+ && user_identifier == 'GA94'
+ && user_data_type_code == 0x3) {
+ hasCC = true;
+
+ // MPEG_cc_data()
+ // ATSC A/53 Part 4: 6.2.3.1
+ br.skipBits(1); //process_em_data_flag
+ bool process_cc_data_flag = br.getBits(1);
+ br.skipBits(1); //additional_data_flag
+ size_t cc_count = br.getBits(5);
+ br.skipBits(8); // em_data;
+ payload_size -= 2;
+
+ if (process_cc_data_flag) {
+ AString out;
+
+ sp<ABuffer> ccBuf = new ABuffer(cc_count * sizeof(CCData));
+ ccBuf->setRange(0, 0);
+
+ for (size_t i = 0; i < cc_count; i++) {
+ uint8_t marker = br.getBits(5);
+ CHECK_EQ(marker, 0x1f);
+
+ bool cc_valid = br.getBits(1);
+ uint8_t cc_type = br.getBits(2);
+ // remove odd parity bit
+ uint8_t cc_data_1 = br.getBits(8) & 0x7f;
+ uint8_t cc_data_2 = br.getBits(8) & 0x7f;
+
+ if (cc_valid
+ && (cc_type == 0 || cc_type == 1)) {
+ CCData cc(cc_type, cc_data_1, cc_data_2);
+ if (!isNullPad(&cc)) {
+ memcpy(ccBuf->data() + ccBuf->size(),
+ (void *)&cc, sizeof(cc));
+ ccBuf->setRange(0, ccBuf->size() + sizeof(CCData));
+ }
+ }
+ }
+ payload_size -= cc_count * 3;
+
+ mCCMap.add(timeUs, ccBuf);
+ break;
+ }
+ } else {
+ ALOGV("Malformed SEI payload type 4");
+ }
+ } else {
+ ALOGV("Unsupported SEI payload type %d", payload_type);
+ }
+
+ // skipping remaining bits of this payload
+ br.skipBits(payload_size * 8);
+ }
+
+ return hasCC;
+}
+
+void NuPlayer::CCDecoder::decode(const sp<ABuffer> &accessUnit) {
+ if (extractFromSEI(accessUnit) && mTrackCount == 0) {
+ mTrackCount++;
+
+ ALOGI("Found CEA-608 track");
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatTrackAdded);
+ msg->post();
+ }
+ // TODO: extract CC from other sources
+}
+
+void NuPlayer::CCDecoder::display(int64_t timeUs) {
+ ssize_t index = mCCMap.indexOfKey(timeUs);
+ if (index < 0) {
+ ALOGV("cc for timestamp %" PRId64 " not found", timeUs);
+ return;
+ }
+
+ sp<ABuffer> &ccBuf = mCCMap.editValueAt(index);
+
+ if (ccBuf->size() > 0) {
+#if 0
+ dumpBytePair(ccBuf);
+#endif
+
+ ccBuf->meta()->setInt32("trackIndex", mSelectedTrack);
+ ccBuf->meta()->setInt64("timeUs", timeUs);
+ ccBuf->meta()->setInt64("durationUs", 0ll);
+
+ sp<AMessage> msg = mNotify->dup();
+ msg->setInt32("what", kWhatClosedCaptionData);
+ msg->setBuffer("buffer", ccBuf);
+ msg->post();
+ }
+
+ // remove all entries before timeUs
+ mCCMap.removeItemsAt(0, index + 1);
+}
+
} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
index 78ea74a..1a4f4ab 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.h
@@ -25,12 +25,14 @@
namespace android {
struct ABuffer;
+struct MediaCodec;
struct NuPlayer::Decoder : public AHandler {
Decoder(const sp<AMessage> &notify,
const sp<NativeWindowWrapper> &nativeWindow = NULL);
void configure(const sp<AMessage> &format);
+ void init();
void signalFlush();
void signalResume();
@@ -38,7 +40,18 @@ struct NuPlayer::Decoder : public AHandler {
bool supportsSeamlessFormatChange(const sp<AMessage> &to) const;
+ enum {
+ kWhatFillThisBuffer = 'flTB',
+ kWhatDrainThisBuffer = 'drTB',
+ kWhatOutputFormatChanged = 'fmtC',
+ kWhatFlushCompleted = 'flsC',
+ kWhatShutdownCompleted = 'shDC',
+ kWhatEOS = 'eos ',
+ kWhatError = 'err ',
+ };
+
protected:
+
virtual ~Decoder();
virtual void onMessageReceived(const sp<AMessage> &msg);
@@ -46,27 +59,78 @@ protected:
private:
enum {
kWhatCodecNotify = 'cdcN',
+ kWhatConfigure = 'conf',
+ kWhatInputBufferFilled = 'inpF',
+ kWhatRenderBuffer = 'rndr',
+ kWhatFlush = 'flus',
+ kWhatShutdown = 'shuD',
};
sp<AMessage> mNotify;
sp<NativeWindowWrapper> mNativeWindow;
- sp<AMessage> mFormat;
- sp<ACodec> mCodec;
+ sp<AMessage> mInputFormat;
+ sp<AMessage> mOutputFormat;
+ sp<MediaCodec> mCodec;
sp<ALooper> mCodecLooper;
+ sp<ALooper> mDecoderLooper;
+
+ Vector<sp<ABuffer> > mInputBuffers;
+ Vector<sp<ABuffer> > mOutputBuffers;
- Vector<sp<ABuffer> > mCSD;
- size_t mCSDIndex;
+ void handleError(int32_t err);
+ bool handleAnInputBuffer();
+ bool handleAnOutputBuffer();
- sp<AMessage> makeFormat(const sp<MetaData> &meta);
+ void requestCodecNotification();
+ bool isStaleReply(const sp<AMessage> &msg);
- void onFillThisBuffer(const sp<AMessage> &msg);
+ void onConfigure(const sp<AMessage> &format);
+ void onFlush();
+ void onResume();
+ void onInputBufferFilled(const sp<AMessage> &msg);
+ void onRenderBuffer(const sp<AMessage> &msg);
+ void onShutdown();
+
+ int32_t mBufferGeneration;
+ bool mPaused;
+ AString mComponentName;
bool supportsSeamlessAudioFormatChange(const sp<AMessage> &targetFormat) const;
DISALLOW_EVIL_CONSTRUCTORS(Decoder);
};
+struct NuPlayer::CCDecoder : public RefBase {
+ enum {
+ kWhatClosedCaptionData,
+ kWhatTrackAdded,
+ };
+
+ CCDecoder(const sp<AMessage> &notify);
+
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
+ status_t selectTrack(size_t index, bool select);
+ bool isSelected() const;
+ void decode(const sp<ABuffer> &accessUnit);
+ void display(int64_t timeUs);
+
+private:
+ struct CCData;
+
+ sp<AMessage> mNotify;
+ KeyedVector<int64_t, sp<ABuffer> > mCCMap;
+ size_t mTrackCount;
+ int32_t mSelectedTrack;
+
+ bool isNullPad(CCData *cc) const;
+ void dumpBytePair(const sp<ABuffer> &ccBuf) const;
+ bool extractFromSEI(const sp<ABuffer> &accessUnit);
+
+ DISALLOW_EVIL_CONSTRUCTORS(CCDecoder);
+};
+
} // namespace android
#endif // NUPLAYER_DECODER_H_
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index b9651a1..280b5af 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -71,7 +71,9 @@ status_t NuPlayerDriver::setUID(uid_t uid) {
}
status_t NuPlayerDriver::setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers) {
Mutex::Autolock autoLock(mLock);
if (mState != STATE_IDLE) {
@@ -80,7 +82,7 @@ status_t NuPlayerDriver::setDataSource(
mState = STATE_SET_DATASOURCE_PENDING;
- mPlayer->setDataSourceAsync(url, headers);
+ mPlayer->setDataSourceAsync(httpService, url, headers);
while (mState == STATE_SET_DATASOURCE_PENDING) {
mCondition.wait(mLock);
@@ -282,6 +284,10 @@ status_t NuPlayerDriver::seekTo(int msec) {
case STATE_PREPARED:
{
mStartupSeekTimeUs = seekTimeUs;
+ // pretend that the seek completed. It will actually happen when starting playback.
+ // TODO: actually perform the seek here, so the player is ready to go at the new
+ // location
+ notifySeekComplete();
break;
}
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
index 99f72a6..0148fb1 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.h
@@ -31,7 +31,9 @@ struct NuPlayerDriver : public MediaPlayerInterface {
virtual status_t setUID(uid_t uid);
virtual status_t setDataSource(
- const char *url, const KeyedVector<String8, String8> *headers);
+ const sp<IMediaHTTPService> &httpService,
+ const char *url,
+ const KeyedVector<String8, String8> *headers);
virtual status_t setDataSource(int fd, int64_t offset, int64_t length);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index bf5271e..a070c1a 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -20,8 +20,6 @@
#include "NuPlayerRenderer.h"
-#include "SoftwareRenderer.h"
-
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
@@ -36,7 +34,6 @@ NuPlayer::Renderer::Renderer(
const sp<AMessage> &notify,
uint32_t flags)
: mAudioSink(sink),
- mSoftRenderer(NULL),
mNotify(notify),
mFlags(flags),
mNumFramesWritten(0),
@@ -60,12 +57,6 @@ NuPlayer::Renderer::Renderer(
}
NuPlayer::Renderer::~Renderer() {
- delete mSoftRenderer;
-}
-
-void NuPlayer::Renderer::setSoftRenderer(SoftwareRenderer *softRenderer) {
- delete mSoftRenderer;
- mSoftRenderer = softRenderer;
}
void NuPlayer::Renderer::queueBuffer(
@@ -425,9 +416,6 @@ void NuPlayer::Renderer::onDrainVideoQueue() {
ALOGV("rendering video at media time %.2f secs",
(mFlags & FLAG_REAL_TIME ? realTimeUs :
(realTimeUs + mAnchorTimeMediaUs - mAnchorTimeRealUs)) / 1E6);
- if (mSoftRenderer != NULL) {
- mSoftRenderer->render(entry->mBuffer->data(), entry->mBuffer->size(), NULL);
- }
}
entry->mNotifyConsumed->setInt32("render", !tooLate);
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
index 9124e03..94a05ea 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.h
@@ -23,7 +23,6 @@
namespace android {
struct ABuffer;
-class SoftwareRenderer;
struct NuPlayer::Renderer : public AHandler {
enum Flags {
@@ -57,8 +56,6 @@ struct NuPlayer::Renderer : public AHandler {
kWhatMediaRenderingStart = 'mdrd',
};
- void setSoftRenderer(SoftwareRenderer *softRenderer);
-
protected:
virtual ~Renderer();
@@ -86,7 +83,6 @@ private:
static const int64_t kMinPositionUpdateDelayUs;
sp<MediaPlayerBase::AudioSink> mAudioSink;
- SoftwareRenderer *mSoftRenderer;
sp<AMessage> mNotify;
uint32_t mFlags;
List<QueueEntry> mAudioQueue;
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
index 11279fc..f5a1d6d 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerSource.h
@@ -72,8 +72,12 @@ struct NuPlayer::Source : public AHandler {
return INVALID_OPERATION;
}
- virtual status_t getTrackInfo(Parcel* /* reply */) const {
- return INVALID_OPERATION;
+ virtual size_t getTrackCount() const {
+ return 0;
+ }
+
+ virtual sp<AMessage> getTrackInfo(size_t /* trackIndex */) const {
+ return NULL;
}
virtual status_t selectTrack(size_t /* trackIndex */, bool /* select */) {
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
index 18cf6d1..94800ba 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.cpp
@@ -24,6 +24,7 @@
#include "MyHandler.h"
#include "SDPLoader.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MetaData.h>
@@ -33,12 +34,14 @@ const int64_t kNearEOSTimeoutUs = 2000000ll; // 2 secs
NuPlayer::RTSPSource::RTSPSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid,
uid_t uid,
bool isSDP)
: Source(notify),
+ mHTTPService(httpService),
mURL(url),
mUIDValid(uidValid),
mUID(uid),
@@ -92,7 +95,7 @@ void NuPlayer::RTSPSource::prepareAsync() {
if (mIsSDP) {
mSDPLoader = new SDPLoader(notify,
(mFlags & kFlagIncognito) ? SDPLoader::kFlagIncognito : 0,
- mUIDValid, mUID);
+ mHTTPService);
mSDPLoader->load(
mURL.c_str(), mExtraHeaders.isEmpty() ? NULL : &mExtraHeaders);
diff --git a/media/libmediaplayerservice/nuplayer/RTSPSource.h b/media/libmediaplayerservice/nuplayer/RTSPSource.h
index 8cf34a0..3718bf9 100644
--- a/media/libmediaplayerservice/nuplayer/RTSPSource.h
+++ b/media/libmediaplayerservice/nuplayer/RTSPSource.h
@@ -34,6 +34,7 @@ struct SDPLoader;
struct NuPlayer::RTSPSource : public NuPlayer::Source {
RTSPSource(
const sp<AMessage> &notify,
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers,
bool uidValid = false,
@@ -88,6 +89,7 @@ private:
bool mNPTMappingValid;
};
+ sp<IMediaHTTPService> mHTTPService;
AString mURL;
KeyedVector<String8, String8> mExtraHeaders;
bool mUIDValid;
diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp
deleted file mode 100644
index 2aae4dd..0000000
--- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.cpp
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "MP4Source.h"
-
-#include "FragmentedMP4Parser.h"
-#include "../NuPlayerStreamListener.h"
-
-#include <media/IStreamSource.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/MetaData.h>
-
-namespace android {
-
-struct StreamSource : public FragmentedMP4Parser::Source {
- StreamSource(const sp<IStreamSource> &source)
- : mListener(new NuPlayer::NuPlayerStreamListener(source, 0)),
- mPosition(0) {
- mListener->start();
- }
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- if (offset < mPosition) {
- return -EPIPE;
- }
-
- while (offset > mPosition) {
- char buffer[1024];
- off64_t skipBytes = offset - mPosition;
- if (skipBytes > sizeof(buffer)) {
- skipBytes = sizeof(buffer);
- }
-
- sp<AMessage> extra;
- ssize_t n;
- for (;;) {
- n = mListener->read(buffer, skipBytes, &extra);
-
- if (n == -EWOULDBLOCK) {
- usleep(10000);
- continue;
- }
-
- break;
- }
-
- ALOGV("skipped %ld bytes at offset %lld", n, mPosition);
-
- if (n < 0) {
- return n;
- }
-
- mPosition += n;
- }
-
- sp<AMessage> extra;
- size_t total = 0;
- while (total < size) {
- ssize_t n = mListener->read(
- (uint8_t *)data + total, size - total, &extra);
-
- if (n == -EWOULDBLOCK) {
- usleep(10000);
- continue;
- } else if (n == 0) {
- break;
- } else if (n < 0) {
- mPosition += total;
- return n;
- }
-
- total += n;
- }
-
- ALOGV("read %ld bytes at offset %lld", total, mPosition);
-
- mPosition += total;
-
- return total;
- }
-
- bool isSeekable() {
- return false;
- }
-
-private:
- sp<NuPlayer::NuPlayerStreamListener> mListener;
- off64_t mPosition;
-
- DISALLOW_EVIL_CONSTRUCTORS(StreamSource);
-};
-
-MP4Source::MP4Source(
- const sp<AMessage> &notify, const sp<IStreamSource> &source)
- : Source(notify),
- mSource(source),
- mLooper(new ALooper),
- mParser(new FragmentedMP4Parser),
- mEOS(false) {
- mLooper->registerHandler(mParser);
-}
-
-MP4Source::~MP4Source() {
-}
-
-void MP4Source::prepareAsync() {
- notifyVideoSizeChanged(0, 0);
- notifyFlagsChanged(0);
- notifyPrepared();
-}
-
-void MP4Source::start() {
- mLooper->start(false /* runOnCallingThread */);
- mParser->start(new StreamSource(mSource));
-}
-
-status_t MP4Source::feedMoreTSData() {
- return mEOS ? ERROR_END_OF_STREAM : (status_t)OK;
-}
-
-sp<AMessage> MP4Source::getFormat(bool audio) {
- return mParser->getFormat(audio);
-}
-
-status_t MP4Source::dequeueAccessUnit(
- bool audio, sp<ABuffer> *accessUnit) {
- return mParser->dequeueAccessUnit(audio, accessUnit);
-}
-
-} // namespace android
diff --git a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h b/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h
deleted file mode 100644
index a6ef622..0000000
--- a/media/libmediaplayerservice/nuplayer/mp4/MP4Source.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef MP4_SOURCE_H
-#define MP4_SOURCE_H
-
-#include "NuPlayerSource.h"
-
-namespace android {
-
-struct FragmentedMP4Parser;
-
-struct MP4Source : public NuPlayer::Source {
- MP4Source(const sp<AMessage> &notify, const sp<IStreamSource> &source);
-
- virtual void prepareAsync();
- virtual void start();
-
- virtual status_t feedMoreTSData();
-
- virtual sp<AMessage> getFormat(bool audio);
-
- virtual status_t dequeueAccessUnit(
- bool audio, sp<ABuffer> *accessUnit);
-
-protected:
- virtual ~MP4Source();
-
-private:
- sp<IStreamSource> mSource;
- sp<ALooper> mLooper;
- sp<FragmentedMP4Parser> mParser;
- bool mEOS;
-
- DISALLOW_EVIL_CONSTRUCTORS(MP4Source);
-};
-
-} // namespace android
-
-#endif // MP4_SOURCE_H
diff --git a/media/libnbaio/Android.mk b/media/libnbaio/Android.mk
index 69c75b8..9707c4a 100644
--- a/media/libnbaio/Android.mk
+++ b/media/libnbaio/Android.mk
@@ -31,9 +31,8 @@ LOCAL_SHARED_LIBRARIES := \
libcommon_time_client \
libcutils \
libutils \
- liblog \
- libmedia
-# This dependency on libmedia is for SingleStateQueueInstantiations.
-# Consider a separate a library for SingleStateQueueInstantiations.
+ liblog
+
+LOCAL_STATIC_LIBRARIES += libinstantssq
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libnbaio/AudioBufferProviderSource.cpp b/media/libnbaio/AudioBufferProviderSource.cpp
index 74a6fdb..551f516 100644
--- a/media/libnbaio/AudioBufferProviderSource.cpp
+++ b/media/libnbaio/AudioBufferProviderSource.cpp
@@ -24,11 +24,11 @@
namespace android {
AudioBufferProviderSource::AudioBufferProviderSource(AudioBufferProvider *provider,
- NBAIO_Format format) :
+ const NBAIO_Format& format) :
NBAIO_Source(format), mProvider(provider), mConsumed(0)
{
ALOG_ASSERT(provider != NULL);
- ALOG_ASSERT(format != Format_Invalid);
+ ALOG_ASSERT(Format_isValid(format));
}
AudioBufferProviderSource::~AudioBufferProviderSource()
@@ -68,7 +68,7 @@ ssize_t AudioBufferProviderSource::read(void *buffer,
}
// count could be zero, either because count was zero on entry or
// available is zero, but both are unlikely so don't check for that
- memcpy(buffer, (char *) mBuffer.raw + (mConsumed << mBitShift), count << mBitShift);
+ memcpy(buffer, (char *) mBuffer.raw + (mConsumed * mFrameSize), count * mFrameSize);
if (CC_UNLIKELY((mConsumed += count) >= mBuffer.frameCount)) {
mProvider->releaseBuffer(&mBuffer);
mBuffer.raw = NULL;
@@ -120,7 +120,7 @@ ssize_t AudioBufferProviderSource::readVia(readVia_t via, size_t total, void *us
count = available;
}
if (CC_LIKELY(count > 0)) {
- char* readTgt = (char *) mBuffer.raw + (mConsumed << mBitShift);
+ char* readTgt = (char *) mBuffer.raw + (mConsumed * mFrameSize);
ssize_t ret = via(user, readTgt, count, readPTS);
if (CC_UNLIKELY(ret <= 0)) {
if (CC_LIKELY(accumulator > 0)) {
diff --git a/media/libnbaio/AudioStreamInSource.cpp b/media/libnbaio/AudioStreamInSource.cpp
index 05273f6..6aab48a 100644
--- a/media/libnbaio/AudioStreamInSource.cpp
+++ b/media/libnbaio/AudioStreamInSource.cpp
@@ -40,16 +40,15 @@ AudioStreamInSource::~AudioStreamInSource()
ssize_t AudioStreamInSource::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
- if (mFormat == Format_Invalid) {
+ if (!Format_isValid(mFormat)) {
mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
- if (streamFormat == AUDIO_FORMAT_PCM_16_BIT) {
- uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
- audio_channel_mask_t channelMask =
- (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
- mFormat = Format_from_SR_C(sampleRate, popcount(channelMask));
- mBitShift = Format_frameBitShift(mFormat);
- }
+ uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
+ audio_channel_mask_t channelMask =
+ (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
+ mFormat = Format_from_SR_C(sampleRate,
+ audio_channel_count_from_in_mask(channelMask), streamFormat);
+ mFrameSize = Format_frameSize(mFormat);
}
return NBAIO_Source::negotiate(offers, numOffers, counterOffers, numCounterOffers);
}
@@ -65,14 +64,14 @@ size_t AudioStreamInSource::framesOverrun()
return mFramesOverrun;
}
-ssize_t AudioStreamInSource::read(void *buffer, size_t count)
+ssize_t AudioStreamInSource::read(void *buffer, size_t count, int64_t readPTS __unused)
{
- if (CC_UNLIKELY(mFormat == Format_Invalid)) {
+ if (CC_UNLIKELY(!Format_isValid(mFormat))) {
return NEGOTIATE;
}
- ssize_t bytesRead = mStream->read(mStream, buffer, count << mBitShift);
+ ssize_t bytesRead = mStream->read(mStream, buffer, count * mFrameSize);
if (bytesRead > 0) {
- size_t framesRead = bytesRead >> mBitShift;
+ size_t framesRead = bytesRead / mFrameSize;
mFramesRead += framesRead;
return framesRead;
} else {
diff --git a/media/libnbaio/AudioStreamOutSink.cpp b/media/libnbaio/AudioStreamOutSink.cpp
index e4341d7..0d5f935 100644
--- a/media/libnbaio/AudioStreamOutSink.cpp
+++ b/media/libnbaio/AudioStreamOutSink.cpp
@@ -37,16 +37,15 @@ AudioStreamOutSink::~AudioStreamOutSink()
ssize_t AudioStreamOutSink::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
- if (mFormat == Format_Invalid) {
+ if (!Format_isValid(mFormat)) {
mStreamBufferSizeBytes = mStream->common.get_buffer_size(&mStream->common);
audio_format_t streamFormat = mStream->common.get_format(&mStream->common);
- if (streamFormat == AUDIO_FORMAT_PCM_16_BIT) {
- uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
- audio_channel_mask_t channelMask =
- (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
- mFormat = Format_from_SR_C(sampleRate, popcount(channelMask));
- mBitShift = Format_frameBitShift(mFormat);
- }
+ uint32_t sampleRate = mStream->common.get_sample_rate(&mStream->common);
+ audio_channel_mask_t channelMask =
+ (audio_channel_mask_t) mStream->common.get_channels(&mStream->common);
+ mFormat = Format_from_SR_C(sampleRate,
+ audio_channel_count_from_out_mask(channelMask), streamFormat);
+ mFrameSize = Format_frameSize(mFormat);
}
return NBAIO_Sink::negotiate(offers, numOffers, counterOffers, numCounterOffers);
}
@@ -56,10 +55,10 @@ ssize_t AudioStreamOutSink::write(const void *buffer, size_t count)
if (!mNegotiated) {
return NEGOTIATE;
}
- ALOG_ASSERT(mFormat != Format_Invalid);
- ssize_t ret = mStream->write(mStream, buffer, count << mBitShift);
+ ALOG_ASSERT(Format_isValid(mFormat));
+ ssize_t ret = mStream->write(mStream, buffer, count * mFrameSize);
if (ret > 0) {
- ret >>= mBitShift;
+ ret /= mFrameSize;
mFramesWritten += ret;
} else {
// FIXME verify HAL implementations are returning the correct error codes e.g. WOULD_BLOCK
diff --git a/media/libnbaio/MonoPipe.cpp b/media/libnbaio/MonoPipe.cpp
index 3c61b60..0b65861 100644
--- a/media/libnbaio/MonoPipe.cpp
+++ b/media/libnbaio/MonoPipe.cpp
@@ -14,6 +14,8 @@
* limitations under the License.
*/
+#include <inttypes.h>
+
#define LOG_TAG "MonoPipe"
//#define LOG_NDEBUG 0
@@ -30,7 +32,24 @@
namespace android {
-MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) :
+static uint64_t cacheN; // output of CCHelper::getLocalFreq()
+static bool cacheValid; // whether cacheN is valid
+static pthread_once_t cacheOnceControl = PTHREAD_ONCE_INIT;
+
+static void cacheOnceInit()
+{
+ CCHelper tmpHelper;
+ status_t res;
+ if (OK != (res = tmpHelper.getLocalFreq(&cacheN))) {
+ ALOGE("Failed to fetch local time frequency when constructing a"
+ " MonoPipe (res = %d). getNextWriteTimestamp calls will be"
+ " non-functional", res);
+ return;
+ }
+ cacheValid = true;
+}
+
+MonoPipe::MonoPipe(size_t reqFrames, const NBAIO_Format& format, bool writeCanBlock) :
NBAIO_Sink(format),
mUpdateSeq(0),
mReqFrames(reqFrames),
@@ -47,8 +66,6 @@ MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) :
mTimestampMutator(&mTimestampShared),
mTimestampObserver(&mTimestampShared)
{
- CCHelper tmpHelper;
- status_t res;
uint64_t N, D;
mNextRdPTS = AudioBufferProvider::kInvalidPTS;
@@ -59,19 +76,20 @@ MonoPipe::MonoPipe(size_t reqFrames, NBAIO_Format format, bool writeCanBlock) :
mSamplesToLocalTime.a_to_b_denom = 0;
D = Format_sampleRate(format);
- if (OK != (res = tmpHelper.getLocalFreq(&N))) {
- ALOGE("Failed to fetch local time frequency when constructing a"
- " MonoPipe (res = %d). getNextWriteTimestamp calls will be"
- " non-functional", res);
+
+ (void) pthread_once(&cacheOnceControl, cacheOnceInit);
+ if (!cacheValid) {
+ // log has already been done
return;
}
+ N = cacheN;
LinearTransform::reduce(&N, &D);
static const uint64_t kSignedHiBitsMask = ~(0x7FFFFFFFull);
static const uint64_t kUnsignedHiBitsMask = ~(0xFFFFFFFFull);
if ((N & kSignedHiBitsMask) || (D & kUnsignedHiBitsMask)) {
ALOGE("Cannot reduce sample rate to local clock frequency ratio to fit"
- " in a 32/32 bit rational. (max reduction is 0x%016llx/0x%016llx"
+ " in a 32/32 bit rational. (max reduction is 0x%016" PRIx64 "/0x%016" PRIx64
"). getNextWriteTimestamp calls will be non-functional", N, D);
return;
}
@@ -115,11 +133,11 @@ ssize_t MonoPipe::write(const void *buffer, size_t count)
part1 = written;
}
if (CC_LIKELY(part1 > 0)) {
- memcpy((char *) mBuffer + (rear << mBitShift), buffer, part1 << mBitShift);
+ memcpy((char *) mBuffer + (rear * mFrameSize), buffer, part1 * mFrameSize);
if (CC_UNLIKELY(rear + part1 == mMaxFrames)) {
size_t part2 = written - part1;
if (CC_LIKELY(part2 > 0)) {
- memcpy(mBuffer, (char *) buffer + (part1 << mBitShift), part2 << mBitShift);
+ memcpy(mBuffer, (char *) buffer + (part1 * mFrameSize), part2 * mFrameSize);
}
}
android_atomic_release_store(written + mRear, &mRear);
@@ -129,7 +147,7 @@ ssize_t MonoPipe::write(const void *buffer, size_t count)
break;
}
count -= written;
- buffer = (char *) buffer + (written << mBitShift);
+ buffer = (char *) buffer + (written * mFrameSize);
// Simulate blocking I/O by sleeping at different rates, depending on a throttle.
// The throttle tries to keep the mean pipe depth near the setpoint, with a slight jitter.
uint32_t ns;
@@ -292,7 +310,7 @@ int64_t MonoPipe::offsetTimestampByAudioFrames(int64_t ts, size_t audFrames)
// error, but then zero out the ratio in the linear transform so
// that we don't try to do any conversions from now on. This
// MonoPipe's getNextWriteTimestamp is now broken for good.
- ALOGE("Overflow when attempting to convert %d audio frames to"
+ ALOGE("Overflow when attempting to convert %zu audio frames to"
" duration in local time. getNextWriteTimestamp will fail from"
" now on.", audFrames);
mSamplesToLocalTime.a_to_b_numer = 0;
diff --git a/media/libnbaio/MonoPipeReader.cpp b/media/libnbaio/MonoPipeReader.cpp
index 851341a..de82229 100644
--- a/media/libnbaio/MonoPipeReader.cpp
+++ b/media/libnbaio/MonoPipeReader.cpp
@@ -73,11 +73,11 @@ ssize_t MonoPipeReader::read(void *buffer, size_t count, int64_t readPTS)
part1 = red;
}
if (CC_LIKELY(part1 > 0)) {
- memcpy(buffer, (char *) mPipe->mBuffer + (front << mBitShift), part1 << mBitShift);
+ memcpy(buffer, (char *) mPipe->mBuffer + (front * mFrameSize), part1 * mFrameSize);
if (CC_UNLIKELY(front + part1 == mPipe->mMaxFrames)) {
size_t part2 = red - part1;
if (CC_LIKELY(part2 > 0)) {
- memcpy((char *) buffer + (part1 << mBitShift), mPipe->mBuffer, part2 << mBitShift);
+ memcpy((char *) buffer + (part1 * mFrameSize), mPipe->mBuffer, part2 * mFrameSize);
}
}
mPipe->updateFrontAndNRPTS(red + mPipe->mFront, nextReadPTS);
diff --git a/media/libnbaio/NBAIO.cpp b/media/libnbaio/NBAIO.cpp
index e0d2c21..d641e74 100644
--- a/media/libnbaio/NBAIO.cpp
+++ b/media/libnbaio/NBAIO.cpp
@@ -22,119 +22,42 @@
namespace android {
-size_t Format_frameSize(NBAIO_Format format)
+size_t Format_frameSize(const NBAIO_Format& format)
{
- return Format_channelCount(format) * sizeof(short);
+ return format.mFrameSize;
}
-size_t Format_frameBitShift(NBAIO_Format format)
-{
- // sizeof(short) == 2, so frame size == 1 << channels
- return Format_channelCount(format);
-}
-
-enum {
- Format_SR_8000,
- Format_SR_11025,
- Format_SR_16000,
- Format_SR_22050,
- Format_SR_24000,
- Format_SR_32000,
- Format_SR_44100,
- Format_SR_48000,
- Format_SR_Mask = 7
-};
-
-enum {
- Format_C_1 = 0x08,
- Format_C_2 = 0x10,
- Format_C_Mask = 0x18
-};
+const NBAIO_Format Format_Invalid = { 0, 0, AUDIO_FORMAT_INVALID, 0 };
-unsigned Format_sampleRate(NBAIO_Format format)
+unsigned Format_sampleRate(const NBAIO_Format& format)
{
- if (format == Format_Invalid) {
- return 0;
- }
- switch (format & Format_SR_Mask) {
- case Format_SR_8000:
- return 8000;
- case Format_SR_11025:
- return 11025;
- case Format_SR_16000:
- return 16000;
- case Format_SR_22050:
- return 22050;
- case Format_SR_24000:
- return 24000;
- case Format_SR_32000:
- return 32000;
- case Format_SR_44100:
- return 44100;
- case Format_SR_48000:
- return 48000;
- default:
+ if (!Format_isValid(format)) {
return 0;
}
+ return format.mSampleRate;
}
-unsigned Format_channelCount(NBAIO_Format format)
+unsigned Format_channelCount(const NBAIO_Format& format)
{
- if (format == Format_Invalid) {
- return 0;
- }
- switch (format & Format_C_Mask) {
- case Format_C_1:
- return 1;
- case Format_C_2:
- return 2;
- default:
+ if (!Format_isValid(format)) {
return 0;
}
+ return format.mChannelCount;
}
-NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount)
+NBAIO_Format Format_from_SR_C(unsigned sampleRate, unsigned channelCount,
+ audio_format_t format)
{
- NBAIO_Format format;
- switch (sampleRate) {
- case 8000:
- format = Format_SR_8000;
- break;
- case 11025:
- format = Format_SR_11025;
- break;
- case 16000:
- format = Format_SR_16000;
- break;
- case 22050:
- format = Format_SR_22050;
- break;
- case 24000:
- format = Format_SR_24000;
- break;
- case 32000:
- format = Format_SR_32000;
- break;
- case 44100:
- format = Format_SR_44100;
- break;
- case 48000:
- format = Format_SR_48000;
- break;
- default:
+ if (sampleRate == 0 || channelCount == 0 || !audio_is_valid_format(format)) {
return Format_Invalid;
}
- switch (channelCount) {
- case 1:
- format |= Format_C_1;
- break;
- case 2:
- format |= Format_C_2;
- break;
- default:
- return Format_Invalid;
- }
- return format;
+ NBAIO_Format ret;
+ ret.mSampleRate = sampleRate;
+ ret.mChannelCount = channelCount;
+ ret.mFormat = format;
+ ret.mFrameSize = audio_is_linear_pcm(format) ?
+ channelCount * audio_bytes_per_sample(format) : sizeof(uint8_t);
+ return ret;
}
// This is a default implementation; it is expected that subclasses will optimize this.
@@ -214,11 +137,11 @@ ssize_t NBAIO_Source::readVia(readVia_t via, size_t total, void *user,
ssize_t NBAIO_Port::negotiate(const NBAIO_Format offers[], size_t numOffers,
NBAIO_Format counterOffers[], size_t& numCounterOffers)
{
- ALOGV("negotiate offers=%p numOffers=%u countersOffers=%p numCounterOffers=%u",
+ ALOGV("negotiate offers=%p numOffers=%zu countersOffers=%p numCounterOffers=%zu",
offers, numOffers, counterOffers, numCounterOffers);
- if (mFormat != Format_Invalid) {
+ if (Format_isValid(mFormat)) {
for (size_t i = 0; i < numOffers; ++i) {
- if (offers[i] == mFormat) {
+ if (Format_isEqual(offers[i], mFormat)) {
mNegotiated = true;
return i;
}
@@ -233,4 +156,17 @@ ssize_t NBAIO_Port::negotiate(const NBAIO_Format offers[], size_t numOffers,
return (ssize_t) NEGOTIATE;
}
+bool Format_isValid(const NBAIO_Format& format)
+{
+ return format.mSampleRate != 0 && format.mChannelCount != 0 &&
+ format.mFormat != AUDIO_FORMAT_INVALID && format.mFrameSize != 0;
+}
+
+bool Format_isEqual(const NBAIO_Format& format1, const NBAIO_Format& format2)
+{
+ return format1.mSampleRate == format2.mSampleRate &&
+ format1.mChannelCount == format2.mChannelCount && format1.mFormat == format2.mFormat &&
+ format1.mFrameSize == format2.mFrameSize;
+}
+
} // namespace android
diff --git a/media/libnbaio/NBLog.cpp b/media/libnbaio/NBLog.cpp
index 4f5762f..4d14904 100644
--- a/media/libnbaio/NBLog.cpp
+++ b/media/libnbaio/NBLog.cpp
@@ -341,8 +341,8 @@ void NBLog::Reader::dump(int fd, size_t indent)
mFd = fd;
mIndent = indent;
String8 timestamp, body;
- if (i > 0) {
- lost += i;
+ lost += i;
+ if (lost > 0) {
body.appendFormat("warning: lost %zu bytes worth of events", lost);
// TODO timestamp empty here, only other choice to wait for the first timestamp event in the
// log to push it out. Consider keeping the timestamp/body between calls to readAt().
@@ -447,7 +447,7 @@ void NBLog::Reader::dumpLine(const String8& timestamp, String8& body)
bool NBLog::Reader::isIMemory(const sp<IMemory>& iMemory) const
{
- return iMemory.get() == mIMemory.get();
+ return iMemory != 0 && mIMemory != 0 && iMemory->pointer() == mIMemory->pointer();
}
} // namespace android
diff --git a/media/libnbaio/Pipe.cpp b/media/libnbaio/Pipe.cpp
index 1c21f9c..6e0ec8c 100644
--- a/media/libnbaio/Pipe.cpp
+++ b/media/libnbaio/Pipe.cpp
@@ -25,19 +25,22 @@
namespace android {
-Pipe::Pipe(size_t maxFrames, NBAIO_Format format) :
+Pipe::Pipe(size_t maxFrames, const NBAIO_Format& format, void *buffer) :
NBAIO_Sink(format),
mMaxFrames(roundup(maxFrames)),
- mBuffer(malloc(mMaxFrames * Format_frameSize(format))),
+ mBuffer(buffer == NULL ? malloc(mMaxFrames * Format_frameSize(format)) : buffer),
mRear(0),
- mReaders(0)
+ mReaders(0),
+ mFreeBufferInDestructor(buffer == NULL)
{
}
Pipe::~Pipe()
{
ALOG_ASSERT(android_atomic_acquire_load(&mReaders) == 0);
- free(mBuffer);
+ if (mFreeBufferInDestructor) {
+ free(mBuffer);
+ }
}
ssize_t Pipe::write(const void *buffer, size_t count)
@@ -52,13 +55,13 @@ ssize_t Pipe::write(const void *buffer, size_t count)
if (CC_LIKELY(written > count)) {
written = count;
}
- memcpy((char *) mBuffer + (rear << mBitShift), buffer, written << mBitShift);
+ memcpy((char *) mBuffer + (rear * mFrameSize), buffer, written * mFrameSize);
if (CC_UNLIKELY(rear + written == mMaxFrames)) {
if (CC_UNLIKELY((count -= written) > rear)) {
count = rear;
}
if (CC_LIKELY(count > 0)) {
- memcpy(mBuffer, (char *) buffer + (written << mBitShift), count << mBitShift);
+ memcpy(mBuffer, (char *) buffer + (written * mFrameSize), count * mFrameSize);
written += count;
}
}
diff --git a/media/libnbaio/PipeReader.cpp b/media/libnbaio/PipeReader.cpp
index d786b84..c8e4953 100644
--- a/media/libnbaio/PipeReader.cpp
+++ b/media/libnbaio/PipeReader.cpp
@@ -59,7 +59,7 @@ ssize_t PipeReader::availableToRead()
return avail;
}
-ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS)
+ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS __unused)
{
ssize_t avail = availableToRead();
if (CC_UNLIKELY(avail <= 0)) {
@@ -76,14 +76,14 @@ ssize_t PipeReader::read(void *buffer, size_t count, int64_t readPTS)
red = count;
}
// In particular, an overrun during the memcpy will result in reading corrupt data
- memcpy(buffer, (char *) mPipe.mBuffer + (front << mBitShift), red << mBitShift);
+ memcpy(buffer, (char *) mPipe.mBuffer + (front * mFrameSize), red * mFrameSize);
// We could re-read the rear pointer here to detect the corruption, but why bother?
if (CC_UNLIKELY(front + red == mPipe.mMaxFrames)) {
if (CC_UNLIKELY((count -= red) > front)) {
count = front;
}
if (CC_LIKELY(count > 0)) {
- memcpy((char *) buffer + (red << mBitShift), mPipe.mBuffer, count << mBitShift);
+ memcpy((char *) buffer + (red * mFrameSize), mPipe.mBuffer, count * mFrameSize);
red += count;
}
}
diff --git a/media/libnbaio/SourceAudioBufferProvider.cpp b/media/libnbaio/SourceAudioBufferProvider.cpp
index 062fa0f..e21ef48 100644
--- a/media/libnbaio/SourceAudioBufferProvider.cpp
+++ b/media/libnbaio/SourceAudioBufferProvider.cpp
@@ -24,7 +24,7 @@ namespace android {
SourceAudioBufferProvider::SourceAudioBufferProvider(const sp<NBAIO_Source>& source) :
mSource(source),
- // mFrameBitShiftFormat below
+ // mFrameSize below
mAllocated(NULL), mSize(0), mOffset(0), mRemaining(0), mGetCount(0), mFramesReleased(0)
{
ALOG_ASSERT(source != 0);
@@ -37,7 +37,7 @@ SourceAudioBufferProvider::SourceAudioBufferProvider(const sp<NBAIO_Source>& sou
numCounterOffers = 0;
index = source->negotiate(counterOffers, 1, NULL, numCounterOffers);
ALOG_ASSERT(index == 0);
- mFrameBitShift = Format_frameBitShift(source->format());
+ mFrameSize = Format_frameSize(source->format());
}
SourceAudioBufferProvider::~SourceAudioBufferProvider()
@@ -54,14 +54,14 @@ status_t SourceAudioBufferProvider::getNextBuffer(Buffer *buffer, int64_t pts)
if (mRemaining < buffer->frameCount) {
buffer->frameCount = mRemaining;
}
- buffer->raw = (char *) mAllocated + (mOffset << mFrameBitShift);
+ buffer->raw = (char *) mAllocated + (mOffset * mFrameSize);
mGetCount = buffer->frameCount;
return OK;
}
// do we need to reallocate?
if (buffer->frameCount > mSize) {
free(mAllocated);
- mAllocated = malloc(buffer->frameCount << mFrameBitShift);
+ mAllocated = malloc(buffer->frameCount * mFrameSize);
mSize = buffer->frameCount;
}
// read from source
@@ -84,7 +84,7 @@ status_t SourceAudioBufferProvider::getNextBuffer(Buffer *buffer, int64_t pts)
void SourceAudioBufferProvider::releaseBuffer(Buffer *buffer)
{
ALOG_ASSERT((buffer != NULL) &&
- (buffer->raw == (char *) mAllocated + (mOffset << mFrameBitShift)) &&
+ (buffer->raw == (char *) mAllocated + (mOffset * mFrameSize)) &&
(buffer->frameCount <= mGetCount) &&
(mGetCount <= mRemaining) &&
(mOffset + mRemaining <= mSize));
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index f2d960a..9ab1417 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -17,6 +17,10 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "ACodec"
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
#include <inttypes.h>
#include <utils/Trace.h>
@@ -39,6 +43,7 @@
#include <media/hardware/HardwareAPI.h>
#include <OMX_AudioExt.h>
+#include <OMX_VideoExt.h>
#include <OMX_Component.h>
#include <OMX_IndexExt.h>
@@ -100,12 +105,6 @@ struct CodecObserver : public BnOMXObserver {
msg->setInt64(
"timestamp",
omx_msg.u.extended_buffer_data.timestamp);
- msg->setPointer(
- "platform_private",
- omx_msg.u.extended_buffer_data.platform_private);
- msg->setPointer(
- "data_ptr",
- omx_msg.u.extended_buffer_data.data_ptr);
break;
}
@@ -160,9 +159,7 @@ private:
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
- int64_t timeUs,
- void *platformPrivate,
- void *dataPtr);
+ int64_t timeUs);
void getMoreInputDataIfPossible();
@@ -368,7 +365,7 @@ ACodec::ACodec()
mIsEncoder(false),
mUseMetadataOnEncoderOutput(false),
mShutdownInProgress(false),
- mIsConfiguredForAdaptivePlayback(false),
+ mExplicitShutdown(false),
mEncoderDelay(0),
mEncoderPadding(0),
mChannelMaskPresent(false),
@@ -377,7 +374,10 @@ ACodec::ACodec()
mStoreMetaDataInOutputBuffers(false),
mMetaDataBuffersToSubmit(0),
mRepeatFrameDelayUs(-1ll),
- mMaxPtsGapUs(-1ll) {
+ mMaxPtsGapUs(-1ll),
+ mTimePerFrameUs(-1ll),
+ mTimePerCaptureUs(-1ll),
+ mCreateInputBuffersSuspended(false) {
mUninitializedState = new UninitializedState(this);
mLoadedState = new LoadedState(this);
mLoadedToIdleState = new LoadedToIdleState(this);
@@ -643,11 +643,21 @@ status_t ACodec::configureOutputBuffersFromNativeWindow(
return err;
}
- // XXX: Is this the right logic to use? It's not clear to me what the OMX
- // buffer counts refer to - how do they account for the renderer holding on
- // to buffers?
- if (def.nBufferCountActual < def.nBufferCountMin + *minUndequeuedBuffers) {
- OMX_U32 newBufferCount = def.nBufferCountMin + *minUndequeuedBuffers;
+ // FIXME: assume that surface is controlled by app (native window
+ // returns the number for the case when surface is not controlled by app)
+ // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
+ // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
+
+ // Use conservative allocation while also trying to reduce starvation
+ //
+ // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
+ // minimum needed for the consumer to be able to work
+ // 2. try to allocate two (2) additional buffers to reduce starvation from
+ // the consumer
+ // plus an extra buffer to account for incorrect minUndequeuedBufs
+ for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
+ OMX_U32 newBufferCount =
+ def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers;
def.nBufferCountActual = newBufferCount;
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
@@ -685,6 +695,7 @@ status_t ACodec::allocateOutputBuffersFromNativeWindow() {
&bufferCount, &bufferSize, &minUndequeuedBuffers);
if (err != 0)
return err;
+ mNumUndequeuedBuffers = minUndequeuedBuffers;
ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
"output port",
@@ -750,6 +761,7 @@ status_t ACodec::allocateOutputMetaDataBuffers() {
&bufferCount, &bufferSize, &minUndequeuedBuffers);
if (err != 0)
return err;
+ mNumUndequeuedBuffers = minUndequeuedBuffers;
ALOGV("[%s] Allocating %u meta buffers on output port",
mComponentName.c_str(), bufferCount);
@@ -970,12 +982,16 @@ status_t ACodec::setComponentRole(
"audio_decoder.aac", "audio_encoder.aac" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS,
"audio_decoder.vorbis", "audio_encoder.vorbis" },
+ { MEDIA_MIMETYPE_AUDIO_OPUS,
+ "audio_decoder.opus", "audio_encoder.opus" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW,
"audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW,
"audio_decoder.g711alaw", "audio_encoder.g711alaw" },
{ MEDIA_MIMETYPE_VIDEO_AVC,
"video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_HEVC,
+ "video_decoder.hevc", "video_encoder.hevc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4,
"video_decoder.mpeg4", "video_encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_H263,
@@ -1045,6 +1061,9 @@ status_t ACodec::configureCodec(
encoder = false;
}
+ sp<AMessage> inputFormat = new AMessage();
+ sp<AMessage> outputFormat = new AMessage();
+
mIsEncoder = encoder;
status_t err = setComponentRole(encoder /* isEncoder */, mime);
@@ -1127,7 +1146,17 @@ status_t ACodec::configureCodec(
}
if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
- mMaxPtsGapUs = -1l;
+ mMaxPtsGapUs = -1ll;
+ }
+
+ if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {
+ mTimePerCaptureUs = -1ll;
+ }
+
+ if (!msg->findInt32(
+ "create-input-buffers-suspended",
+ (int32_t*)&mCreateInputBuffersSuspended)) {
+ mCreateInputBuffersSuspended = false;
}
}
@@ -1136,7 +1165,9 @@ status_t ACodec::configureCodec(
int32_t haveNativeWindow = msg->findObject("native-window", &obj) &&
obj != NULL;
mStoreMetaDataInOutputBuffers = false;
- mIsConfiguredForAdaptivePlayback = false;
+ if (video && !encoder) {
+ inputFormat->setInt32("adaptive-playback", false);
+ }
if (!encoder && video && haveNativeWindow) {
err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, OMX_TRUE);
if (err != OK) {
@@ -1181,14 +1212,19 @@ status_t ACodec::configureCodec(
ALOGW_IF(err != OK,
"[%s] prepareForAdaptivePlayback failed w/ err %d",
mComponentName.c_str(), err);
- mIsConfiguredForAdaptivePlayback = (err == OK);
+
+ if (err == OK) {
+ inputFormat->setInt32("max-width", maxWidth);
+ inputFormat->setInt32("max-height", maxHeight);
+ inputFormat->setInt32("adaptive-playback", true);
+ }
}
// allow failure
err = OK;
} else {
ALOGV("[%s] storeMetaDataInBuffers succeeded", mComponentName.c_str());
mStoreMetaDataInOutputBuffers = true;
- mIsConfiguredForAdaptivePlayback = true;
+ inputFormat->setInt32("adaptive-playback", true);
}
int32_t push;
@@ -1328,6 +1364,11 @@ status_t ACodec::configureCodec(
err = setMinBufferSize(kPortIndexInput, 8192); // XXX
}
+ CHECK_EQ(getPortFormat(kPortIndexInput, inputFormat), (status_t)OK);
+ CHECK_EQ(getPortFormat(kPortIndexOutput, outputFormat), (status_t)OK);
+ mInputFormat = inputFormat;
+ mOutputFormat = outputFormat;
+
return err;
}
@@ -1772,6 +1813,7 @@ static const struct VideoCodingMapEntry {
OMX_VIDEO_CODINGTYPE mVideoCodingType;
} kVideoCodingMapEntry[] = {
{ MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
+ { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC },
{ MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
{ MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
{ MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
@@ -1918,6 +1960,7 @@ status_t ACodec::setupVideoEncoder(const char *mime, const sp<AMessage> &msg) {
return INVALID_OPERATION;
}
frameRate = (float)tmp;
+ mTimePerFrameUs = (int64_t) (1000000.0f / frameRate);
}
video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
@@ -2330,12 +2373,81 @@ status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) {
int32_t bitrate;
+ int32_t iFrameInterval = 0;
+ size_t tsLayers = 0;
+ OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern =
+ OMX_VIDEO_VPXTemporalLayerPatternNone;
+ static const uint32_t kVp8LayerRateAlloction
+ [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS]
+ [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = {
+ {100, 100, 100}, // 1 layer
+ { 60, 100, 100}, // 2 layers {60%, 40%}
+ { 40, 60, 100}, // 3 layers {40%, 20%, 40%}
+ };
if (!msg->findInt32("bitrate", &bitrate)) {
return INVALID_OPERATION;
}
+ msg->findInt32("i-frame-interval", &iFrameInterval);
OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
+ float frameRate;
+ if (!msg->findFloat("frame-rate", &frameRate)) {
+ int32_t tmp;
+ if (!msg->findInt32("frame-rate", &tmp)) {
+ return INVALID_OPERATION;
+ }
+ frameRate = (float)tmp;
+ }
+
+ AString tsSchema;
+ if (msg->findString("ts-schema", &tsSchema)) {
+ if (tsSchema == "webrtc.vp8.1-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 1;
+ } else if (tsSchema == "webrtc.vp8.2-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 2;
+ } else if (tsSchema == "webrtc.vp8.3-layer") {
+ pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
+ tsLayers = 3;
+ } else {
+ ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str());
+ }
+ }
+
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
+ InitOMXParams(&vp8type);
+ vp8type.nPortIndex = kPortIndexOutput;
+ status_t err = mOMX->getParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type, sizeof(vp8type));
+
+ if (err == OK) {
+ if (iFrameInterval > 0) {
+ vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate);
+ }
+ vp8type.eTemporalPattern = pattern;
+ vp8type.nTemporalLayerCount = tsLayers;
+ if (tsLayers > 0) {
+ for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) {
+ vp8type.nTemporalLayerBitrateRatio[i] =
+ kVp8LayerRateAlloction[tsLayers - 1][i];
+ }
+ }
+ if (bitrateMode == OMX_Video_ControlRateConstant) {
+ vp8type.nMinQuantizer = 2;
+ vp8type.nMaxQuantizer = 63;
+ }
+
+ err = mOMX->setParameter(
+ mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
+ &vp8type, sizeof(vp8type));
+ if (err != OK) {
+ ALOGW("Extended VP8 parameters set failed: %d", err);
+ }
+ }
+
return configureBitrate(bitrate, bitrateMode);
}
@@ -2491,19 +2603,7 @@ void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() {
return;
}
- int minUndequeuedBufs = 0;
- status_t err = mNativeWindow->query(
- mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
- &minUndequeuedBufs);
-
- if (err != OK) {
- ALOGE("[%s] NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
- mComponentName.c_str(), strerror(-err), -err);
-
- minUndequeuedBufs = 0;
- }
-
- while (countBuffersOwnedByNativeWindow() > (size_t)minUndequeuedBufs
+ while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers
&& dequeueBufferFromNativeWindow() != NULL) {
// these buffers will be submitted as regular buffers; account for this
if (mStoreMetaDataInOutputBuffers && mMetaDataBuffersToSubmit > 0) {
@@ -2549,79 +2649,78 @@ void ACodec::processDeferredMessages() {
}
}
-void ACodec::sendFormatChange(const sp<AMessage> &reply) {
- sp<AMessage> notify = mNotify->dup();
- notify->setInt32("what", kWhatOutputFormatChanged);
-
+status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify) {
+ // TODO: catch errors an return them instead of using CHECK
OMX_PARAM_PORTDEFINITIONTYPE def;
InitOMXParams(&def);
- def.nPortIndex = kPortIndexOutput;
+ def.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)),
(status_t)OK);
- CHECK_EQ((int)def.eDir, (int)OMX_DirOutput);
+ CHECK_EQ((int)def.eDir,
+ (int)(portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput));
switch (def.eDomain) {
case OMX_PortDomainVideo:
{
OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
+ switch ((int)videoDef->eCompressionFormat) {
+ case OMX_VIDEO_CodingUnused:
+ {
+ CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput));
+ notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
+
+ notify->setInt32("stride", videoDef->nStride);
+ notify->setInt32("slice-height", videoDef->nSliceHeight);
+ notify->setInt32("color-format", videoDef->eColorFormat);
+
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = kPortIndexOutput;
+
+ if (mOMX->getConfig(
+ mNode, OMX_IndexConfigCommonOutputCrop,
+ &rect, sizeof(rect)) != OK) {
+ rect.nLeft = 0;
+ rect.nTop = 0;
+ rect.nWidth = videoDef->nFrameWidth;
+ rect.nHeight = videoDef->nFrameHeight;
+ }
- AString mime;
- if (!mIsEncoder) {
- notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
- } else if (GetMimeTypeForVideoCoding(
- videoDef->eCompressionFormat, &mime) != OK) {
- notify->setString("mime", "application/octet-stream");
- } else {
- notify->setString("mime", mime.c_str());
- }
-
- notify->setInt32("width", videoDef->nFrameWidth);
- notify->setInt32("height", videoDef->nFrameHeight);
+ CHECK_GE(rect.nLeft, 0);
+ CHECK_GE(rect.nTop, 0);
+ CHECK_GE(rect.nWidth, 0u);
+ CHECK_GE(rect.nHeight, 0u);
+ CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
+ CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
- if (!mIsEncoder) {
- notify->setInt32("stride", videoDef->nStride);
- notify->setInt32("slice-height", videoDef->nSliceHeight);
- notify->setInt32("color-format", videoDef->eColorFormat);
-
- OMX_CONFIG_RECTTYPE rect;
- InitOMXParams(&rect);
- rect.nPortIndex = kPortIndexOutput;
-
- if (mOMX->getConfig(
- mNode, OMX_IndexConfigCommonOutputCrop,
- &rect, sizeof(rect)) != OK) {
- rect.nLeft = 0;
- rect.nTop = 0;
- rect.nWidth = videoDef->nFrameWidth;
- rect.nHeight = videoDef->nFrameHeight;
- }
-
- CHECK_GE(rect.nLeft, 0);
- CHECK_GE(rect.nTop, 0);
- CHECK_GE(rect.nWidth, 0u);
- CHECK_GE(rect.nHeight, 0u);
- CHECK_LE(rect.nLeft + rect.nWidth - 1, videoDef->nFrameWidth);
- CHECK_LE(rect.nTop + rect.nHeight - 1, videoDef->nFrameHeight);
-
- notify->setRect(
- "crop",
- rect.nLeft,
- rect.nTop,
- rect.nLeft + rect.nWidth - 1,
- rect.nTop + rect.nHeight - 1);
-
- if (mNativeWindow != NULL) {
- reply->setRect(
+ notify->setRect(
"crop",
rect.nLeft,
rect.nTop,
- rect.nLeft + rect.nWidth,
- rect.nTop + rect.nHeight);
+ rect.nLeft + rect.nWidth - 1,
+ rect.nTop + rect.nHeight - 1);
+
+ break;
+ }
+ default:
+ {
+ CHECK(mIsEncoder ^ (portIndex == kPortIndexInput));
+ AString mime;
+ if (GetMimeTypeForVideoCoding(
+ videoDef->eCompressionFormat, &mime) != OK) {
+ notify->setString("mime", "application/octet-stream");
+ } else {
+ notify->setString("mime", mime.c_str());
+ }
+ break;
}
}
+
+ notify->setInt32("width", videoDef->nFrameWidth);
+ notify->setInt32("height", videoDef->nFrameHeight);
break;
}
@@ -2634,7 +2733,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PARAM_PCMMODETYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioPcm,
@@ -2654,20 +2753,6 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
notify->setInt32("channel-count", params.nChannels);
notify->setInt32("sample-rate", params.nSamplingRate);
- if (mEncoderDelay + mEncoderPadding) {
- size_t frameSize = params.nChannels * sizeof(int16_t);
- if (mSkipCutBuffer != NULL) {
- size_t prevbufsize = mSkipCutBuffer->size();
- if (prevbufsize != 0) {
- ALOGW("Replacing SkipCutBuffer holding %d "
- "bytes",
- prevbufsize);
- }
- }
- mSkipCutBuffer = new SkipCutBuffer(
- mEncoderDelay * frameSize,
- mEncoderPadding * frameSize);
- }
if (mChannelMaskPresent) {
notify->setInt32("channel-mask", mChannelMask);
@@ -2679,7 +2764,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PARAM_AACPROFILETYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioAac,
@@ -2696,7 +2781,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PARAM_AMRTYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioAmr,
@@ -2722,7 +2807,7 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
{
OMX_AUDIO_PARAM_FLACTYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ(mOMX->getParameter(
mNode, OMX_IndexParamAudioFlac,
@@ -2735,11 +2820,45 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
break;
}
+ case OMX_AUDIO_CodingMP3:
+ {
+ OMX_AUDIO_PARAM_MP3TYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioMp3,
+ &params, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
+ case OMX_AUDIO_CodingVORBIS:
+ {
+ OMX_AUDIO_PARAM_VORBISTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ(mOMX->getParameter(
+ mNode, OMX_IndexParamAudioVorbis,
+ &params, sizeof(params)),
+ (status_t)OK);
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
case OMX_AUDIO_CodingAndroidAC3:
{
OMX_AUDIO_PARAM_ANDROID_AC3TYPE params;
InitOMXParams(&params);
- params.nPortIndex = kPortIndexOutput;
+ params.nPortIndex = portIndex;
CHECK_EQ((status_t)OK, mOMX->getParameter(
mNode,
@@ -2753,7 +2872,26 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
break;
}
+ case OMX_AUDIO_CodingAndroidOPUS:
+ {
+ OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params;
+ InitOMXParams(&params);
+ params.nPortIndex = portIndex;
+
+ CHECK_EQ((status_t)OK, mOMX->getParameter(
+ mNode,
+ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus,
+ &params,
+ sizeof(params)));
+
+ notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS);
+ notify->setInt32("channel-count", params.nChannels);
+ notify->setInt32("sample-rate", params.nSampleRate);
+ break;
+ }
+
default:
+ ALOGE("UNKNOWN AUDIO CODING: %d\n", audioDef->eEncoding);
TRESPASS();
}
break;
@@ -2763,6 +2901,43 @@ void ACodec::sendFormatChange(const sp<AMessage> &reply) {
TRESPASS();
}
+ return OK;
+}
+
+void ACodec::sendFormatChange(const sp<AMessage> &reply) {
+ sp<AMessage> notify = mNotify->dup();
+ notify->setInt32("what", kWhatOutputFormatChanged);
+
+ CHECK_EQ(getPortFormat(kPortIndexOutput, notify), (status_t)OK);
+
+ AString mime;
+ CHECK(notify->findString("mime", &mime));
+
+ int32_t left, top, right, bottom;
+ if (mime == MEDIA_MIMETYPE_VIDEO_RAW &&
+ mNativeWindow != NULL &&
+ notify->findRect("crop", &left, &top, &right, &bottom)) {
+ // notify renderer of the crop change
+ // NOTE: native window uses extended right-bottom coordinate
+ reply->setRect("crop", left, top, right + 1, bottom + 1);
+ } else if (mime == MEDIA_MIMETYPE_AUDIO_RAW &&
+ (mEncoderDelay || mEncoderPadding)) {
+ int32_t channelCount;
+ CHECK(notify->findInt32("channel-count", &channelCount));
+ size_t frameSize = channelCount * sizeof(int16_t);
+ if (mSkipCutBuffer != NULL) {
+ size_t prevbufsize = mSkipCutBuffer->size();
+ if (prevbufsize != 0) {
+ ALOGW("Replacing SkipCutBuffer holding %d "
+ "bytes",
+ prevbufsize);
+ }
+ }
+ mSkipCutBuffer = new SkipCutBuffer(
+ mEncoderDelay * frameSize,
+ mEncoderPadding * frameSize);
+ }
+
notify->post();
mSentFormat = true;
@@ -2969,7 +3144,8 @@ ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState)
mCodec(codec) {
}
-ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(OMX_U32 portIndex) {
+ACodec::BaseState::PortMode ACodec::BaseState::getPortMode(
+ OMX_U32 /* portIndex */) {
return KEEP_BUFFERS;
}
@@ -3018,6 +3194,14 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
int32_t type;
CHECK(msg->findInt32("type", &type));
+ // there is a possibility that this is an outstanding message for a
+ // codec that we have already destroyed
+ if (mCodec->mNode == NULL) {
+ ALOGI("ignoring message as already freed component: %s",
+ msg->debugString().c_str());
+ return true;
+ }
+
IOMX::node_id nodeID;
CHECK(msg->findInt32("node", (int32_t*)&nodeID));
CHECK_EQ(nodeID, mCodec->mNode);
@@ -3062,23 +3246,17 @@ bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) {
int32_t rangeOffset, rangeLength, flags;
int64_t timeUs;
- void *platformPrivate;
- void *dataPtr;
CHECK(msg->findInt32("range_offset", &rangeOffset));
CHECK(msg->findInt32("range_length", &rangeLength));
CHECK(msg->findInt32("flags", &flags));
CHECK(msg->findInt64("timestamp", &timeUs));
- CHECK(msg->findPointer("platform_private", &platformPrivate));
- CHECK(msg->findPointer("data_ptr", &dataPtr));
return onOMXFillBufferDone(
bufferID,
(size_t)rangeOffset, (size_t)rangeLength,
(OMX_U32)flags,
- timeUs,
- platformPrivate,
- dataPtr);
+ timeUs);
}
default:
@@ -3376,10 +3554,8 @@ bool ACodec::BaseState::onOMXFillBufferDone(
IOMX::buffer_id bufferID,
size_t rangeOffset, size_t rangeLength,
OMX_U32 flags,
- int64_t timeUs,
- void *platformPrivate,
- void *dataPtr) {
- ALOGV("[%s] onOMXFillBufferDone %p time %lld us, flags = 0x%08lx",
+ int64_t timeUs) {
+ ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x",
mCodec->mComponentName.c_str(), bufferID, timeUs, flags);
ssize_t index;
@@ -3430,7 +3606,7 @@ bool ACodec::BaseState::onOMXFillBufferDone(
sp<AMessage> reply =
new AMessage(kWhatOutputBufferDrained, mCodec->id());
- if (!mCodec->mSentFormat) {
+ if (!mCodec->mSentFormat && rangeLength > 0) {
mCodec->sendFormatChange(reply);
}
@@ -3518,7 +3694,26 @@ void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) {
ATRACE_NAME("render");
// The client wants this buffer to be rendered.
+ int64_t timestampNs = 0;
+ if (!msg->findInt64("timestampNs", &timestampNs)) {
+ // TODO: it seems like we should use the timestamp
+ // in the (media)buffer as it potentially came from
+ // an input surface, but we did not propagate it prior to
+ // API 20. Perhaps check for target SDK version.
+#if 0
+ if (info->mData->meta()->findInt64("timeUs", &timestampNs)) {
+ ALOGV("using buffer PTS of %" PRId64, timestampNs);
+ timestampNs *= 1000;
+ }
+#endif
+ }
+
status_t err;
+ err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs);
+ if (err != OK) {
+ ALOGW("failed to set buffer timestamp: %d", err);
+ }
+
if ((err = mCodec->mNativeWindow->queueBuffer(
mCodec->mNativeWindow.get(),
info->mGraphicBuffer.get(), -1)) == OK) {
@@ -3632,7 +3827,8 @@ bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) {
int32_t keepComponentAllocated;
CHECK(msg->findInt32(
"keepComponentAllocated", &keepComponentAllocated));
- CHECK(!keepComponentAllocated);
+ ALOGW_IF(keepComponentAllocated,
+ "cannot keep component allocated on shutdown in Uninitialized state");
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", ACodec::kWhatShutdownCompleted);
@@ -3692,6 +3888,7 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
AString componentName;
uint32_t quirks = 0;
+ int32_t encoder = false;
if (msg->findString("componentName", &componentName)) {
ssize_t index = matchingCodecs.add();
OMXCodec::CodecNameAndQuirks *entry = &matchingCodecs.editItemAt(index);
@@ -3704,7 +3901,6 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
} else {
CHECK(msg->findString("mime", &mime));
- int32_t encoder;
if (!msg->findInt32("encoder", &encoder)) {
encoder = false;
}
@@ -3740,10 +3936,10 @@ bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) {
if (node == NULL) {
if (!mime.empty()) {
- ALOGE("Unable to instantiate a decoder for type '%s'.",
- mime.c_str());
+ ALOGE("Unable to instantiate a %scoder for type '%s'.",
+ encoder ? "en" : "de", mime.c_str());
} else {
- ALOGE("Unable to instantiate decoder '%s'.", componentName.c_str());
+ ALOGE("Unable to instantiate codec '%s'.", componentName.c_str());
}
mCodec->signalError(OMX_ErrorComponentNotFound);
@@ -3794,7 +3990,8 @@ void ACodec::LoadedState::stateEntered() {
mCodec->mDequeueCounter = 0;
mCodec->mMetaDataBuffersToSubmit = 0;
mCodec->mRepeatFrameDelayUs = -1ll;
- mCodec->mIsConfiguredForAdaptivePlayback = false;
+ mCodec->mInputFormat.clear();
+ mCodec->mOutputFormat.clear();
if (mCodec->mShutdownInProgress) {
bool keepComponentAllocated = mCodec->mKeepComponentAllocated;
@@ -3804,6 +4001,7 @@ void ACodec::LoadedState::stateEntered() {
onShutdown(keepComponentAllocated);
}
+ mCodec->mExplicitShutdown = false;
}
void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
@@ -3813,9 +4011,12 @@ void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) {
mCodec->changeState(mCodec->mUninitializedState);
}
- sp<AMessage> notify = mCodec->mNotify->dup();
- notify->setInt32("what", ACodec::kWhatShutdownCompleted);
- notify->post();
+ if (mCodec->mExplicitShutdown) {
+ sp<AMessage> notify = mCodec->mNotify->dup();
+ notify->setInt32("what", ACodec::kWhatShutdownCompleted);
+ notify->post();
+ mCodec->mExplicitShutdown = false;
+ }
}
bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
@@ -3849,6 +4050,7 @@ bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) {
CHECK(msg->findInt32(
"keepComponentAllocated", &keepComponentAllocated));
+ mCodec->mExplicitShutdown = true;
onShutdown(keepComponentAllocated);
handled = true;
@@ -3908,6 +4110,8 @@ bool ACodec::LoadedState::onConfigureComponent(
{
sp<AMessage> notify = mCodec->mNotify->dup();
notify->setInt32("what", ACodec::kWhatComponentConfigured);
+ notify->setMessage("input-format", mCodec->mInputFormat);
+ notify->setMessage("output-format", mCodec->mOutputFormat);
notify->post();
}
@@ -3915,7 +4119,7 @@ bool ACodec::LoadedState::onConfigureComponent(
}
void ACodec::LoadedState::onCreateInputSurface(
- const sp<AMessage> &msg) {
+ const sp<AMessage> & /* msg */) {
ALOGV("onCreateInputSurface");
sp<AMessage> notify = mCodec->mNotify->dup();
@@ -3943,7 +4147,7 @@ void ACodec::LoadedState::onCreateInputSurface(
}
}
- if (err == OK && mCodec->mMaxPtsGapUs > 0l) {
+ if (err == OK && mCodec->mMaxPtsGapUs > 0ll) {
err = mCodec->mOMX->setInternalOption(
mCodec->mNode,
kPortIndexInput,
@@ -3953,6 +4157,41 @@ void ACodec::LoadedState::onCreateInputSurface(
if (err != OK) {
ALOGE("[%s] Unable to configure max timestamp gap (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ }
+ }
+
+ if (err == OK && mCodec->mTimePerCaptureUs > 0ll
+ && mCodec->mTimePerFrameUs > 0ll) {
+ int64_t timeLapse[2];
+ timeLapse[0] = mCodec->mTimePerFrameUs;
+ timeLapse[1] = mCodec->mTimePerCaptureUs;
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_TIME_LAPSE,
+ &timeLapse[0],
+ sizeof(timeLapse));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure time lapse (err %d)",
+ mCodec->mComponentName.c_str(),
+ err);
+ }
+ }
+
+ if (err == OK && mCodec->mCreateInputBuffersSuspended) {
+ bool suspend = true;
+ err = mCodec->mOMX->setInternalOption(
+ mCodec->mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_SUSPEND,
+ &suspend,
+ sizeof(suspend));
+
+ if (err != OK) {
+ ALOGE("[%s] Unable to configure option to suspend (err %d)",
mCodec->mComponentName.c_str(),
err);
}
@@ -4015,6 +4254,7 @@ status_t ACodec::LoadedToIdleState::allocateBuffers() {
bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatSetParameters:
case kWhatShutdown:
{
mCodec->deferMessage(msg);
@@ -4081,6 +4321,7 @@ void ACodec::IdleToExecutingState::stateEntered() {
bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
+ case kWhatSetParameters:
case kWhatShutdown:
{
mCodec->deferMessage(msg);
@@ -4141,7 +4382,7 @@ ACodec::ExecutingState::ExecutingState(ACodec *codec)
}
ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode(
- OMX_U32 portIndex) {
+ OMX_U32 /* portIndex */) {
return RESUBMIT_BUFFERS;
}
@@ -4229,6 +4470,7 @@ bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) {
"keepComponentAllocated", &keepComponentAllocated));
mCodec->mShutdownInProgress = true;
+ mCodec->mExplicitShutdown = true;
mCodec->mKeepComponentAllocated = keepComponentAllocated;
mActive = false;
@@ -4350,6 +4592,22 @@ status_t ACodec::setParameters(const sp<AMessage> &params) {
}
}
+ int64_t skipFramesBeforeUs;
+ if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) {
+ status_t err =
+ mOMX->setInternalOption(
+ mNode,
+ kPortIndexInput,
+ IOMX::INTERNAL_OPTION_START_TIME,
+ &skipFramesBeforeUs,
+ sizeof(skipFramesBeforeUs));
+
+ if (err != OK) {
+ ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err);
+ return err;
+ }
+ }
+
int32_t dropInputFrames;
if (params->findInt32("drop-input-frames", &dropInputFrames)) {
bool suspend = dropInputFrames != 0;
diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
index 3619821..d9e39ff 100644
--- a/media/libstagefright/Android.mk
+++ b/media/libstagefright/Android.mk
@@ -16,6 +16,7 @@ LOCAL_SRC_FILES:= \
CameraSourceTimeLapse.cpp \
ClockEstimator.cpp \
DataSource.cpp \
+ DataURISource.cpp \
DRMExtractor.cpp \
ESDS.cpp \
FileSource.cpp \
@@ -31,8 +32,10 @@ LOCAL_SRC_FILES:= \
MediaBufferGroup.cpp \
MediaCodec.cpp \
MediaCodecList.cpp \
+ MediaCodecSource.cpp \
MediaDefs.cpp \
MediaExtractor.cpp \
+ http/MediaHTTP.cpp \
MediaMuxer.cpp \
MediaSource.cpp \
MetaData.cpp \
@@ -56,8 +59,6 @@ LOCAL_SRC_FILES:= \
WVMExtractor.cpp \
XINGSeeker.cpp \
avc_utils.cpp \
- mp4/FragmentedMP4Parser.cpp \
- mp4/TrackFragment.cpp \
LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/include/media/stagefright/timedtext \
@@ -81,6 +82,7 @@ LOCAL_SHARED_LIBRARIES := \
libicuuc \
liblog \
libmedia \
+ libopus \
libsonivox \
libssl \
libstagefright_omx \
@@ -96,6 +98,7 @@ LOCAL_STATIC_LIBRARIES := \
libstagefright_color_conversion \
libstagefright_aacenc \
libstagefright_matroska \
+ libstagefright_webm \
libstagefright_timedtext \
libvpx \
libwebm \
@@ -104,13 +107,6 @@ LOCAL_STATIC_LIBRARIES := \
libFLAC \
libmedia_helper
-LOCAL_SRC_FILES += \
- chromium_http_stub.cpp
-LOCAL_CPPFLAGS += -DCHROMIUM_AVAILABLE=1
-
-LOCAL_SHARED_LIBRARIES += libstlport
-include external/stlport/libstlport.mk
-
LOCAL_SHARED_LIBRARIES += \
libstagefright_enc_common \
libstagefright_avc_common \
diff --git a/media/libstagefright/AudioPlayer.cpp b/media/libstagefright/AudioPlayer.cpp
index 8623100..2669849 100644
--- a/media/libstagefright/AudioPlayer.cpp
+++ b/media/libstagefright/AudioPlayer.cpp
@@ -221,7 +221,8 @@ status_t AudioPlayer::start(bool sourceAlreadyStarted) {
mAudioTrack = new AudioTrack(
AUDIO_STREAM_MUSIC, mSampleRate, AUDIO_FORMAT_PCM_16_BIT, audioMask,
- 0, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this, 0);
+ 0 /*frameCount*/, AUDIO_OUTPUT_FLAG_NONE, &AudioCallback, this,
+ 0 /*notificationFrames*/);
if ((err = mAudioTrack->initCheck()) != OK) {
mAudioTrack.clear();
diff --git a/media/libstagefright/AudioSource.cpp b/media/libstagefright/AudioSource.cpp
index e68a710..d0e0e8e 100644
--- a/media/libstagefright/AudioSource.cpp
+++ b/media/libstagefright/AudioSource.cpp
@@ -65,7 +65,7 @@ AudioSource::AudioSource(
if (status == OK) {
// make sure that the AudioRecord callback never returns more than the maximum
// buffer size
- int frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
+ uint32_t frameCount = kMaxBufferSize / sizeof(int16_t) / channelCount;
// make sure that the AudioRecord total buffer size is large enough
size_t bufCount = 2;
@@ -76,10 +76,10 @@ AudioSource::AudioSource(
mRecord = new AudioRecord(
inputSource, sampleRate, AUDIO_FORMAT_PCM_16_BIT,
audio_channel_in_mask_from_count(channelCount),
- bufCount * frameCount,
+ (size_t) (bufCount * frameCount),
AudioRecordCallbackFunction,
this,
- frameCount);
+ frameCount /*notificationFrames*/);
mInitCheck = mRecord->initCheck();
} else {
mInitCheck = status;
@@ -278,7 +278,7 @@ status_t AudioSource::dataCallback(const AudioRecord::Buffer& audioBuffer) {
// Drop retrieved and previously lost audio data.
if (mNumFramesReceived == 0 && timeUs < mStartTimeUs) {
- mRecord->getInputFramesLost();
+ (void) mRecord->getInputFramesLost();
ALOGV("Drop audio data at %lld/%lld us", timeUs, mStartTimeUs);
return OK;
}
diff --git a/media/libstagefright/AwesomePlayer.cpp b/media/libstagefright/AwesomePlayer.cpp
index ea6c15b..f35a5b1 100644
--- a/media/libstagefright/AwesomePlayer.cpp
+++ b/media/libstagefright/AwesomePlayer.cpp
@@ -35,6 +35,8 @@
#include <binder/IPCThreadState.h>
#include <binder/IServiceManager.h>
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -46,6 +48,7 @@
#include <media/stagefright/MediaBuffer.h>
#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaExtractor.h>
+#include <media/stagefright/MediaHTTP.h>
#include <media/stagefright/MediaSource.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/OMXCodec.h>
@@ -103,12 +106,15 @@ struct AwesomeLocalRenderer : public AwesomeRenderer {
}
virtual void render(MediaBuffer *buffer) {
+ int64_t timeUs;
+ CHECK(buffer->meta_data()->findInt64(kKeyTime, &timeUs));
+
render((const uint8_t *)buffer->data() + buffer->range_offset(),
- buffer->range_length());
+ buffer->range_length(), timeUs * 1000);
}
- void render(const void *data, size_t size) {
- mTarget->render(data, size, NULL);
+ void render(const void *data, size_t size, int64_t timestampNs) {
+ mTarget->render(data, size, timestampNs, NULL);
}
protected:
@@ -280,15 +286,20 @@ void AwesomePlayer::setUID(uid_t uid) {
}
status_t AwesomePlayer::setDataSource(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers) {
Mutex::Autolock autoLock(mLock);
- return setDataSource_l(uri, headers);
+ return setDataSource_l(httpService, uri, headers);
}
status_t AwesomePlayer::setDataSource_l(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers) {
reset_l();
+ mHTTPService = httpService;
mUri = uri;
if (headers) {
@@ -305,7 +316,7 @@ status_t AwesomePlayer::setDataSource_l(
}
}
- ALOGI("setDataSource_l(URL suppressed)");
+ ALOGI("setDataSource_l(%s)", uriDebugString(mUri, mFlags & INCOGNITO).c_str());
// The actual work will be done during preparation in the call to
// ::finishSetDataSource_l to avoid blocking the calling thread in
@@ -397,6 +408,13 @@ status_t AwesomePlayer::setDataSource_l(const sp<MediaExtractor> &extractor) {
totalBitRate += bitrate;
}
+ sp<MetaData> fileMeta = mExtractor->getMetaData();
+ if (fileMeta != NULL) {
+ int64_t duration;
+ if (fileMeta->findInt64(kKeyDuration, &duration)) {
+ mDurationUs = duration;
+ }
+ }
mBitrate = totalBitRate;
@@ -585,6 +603,7 @@ void AwesomePlayer::reset_l() {
mSeekNotificationSent = true;
mSeekTimeUs = 0;
+ mHTTPService.clear();
mUri.setTo("");
mUriHeaders.clear();
@@ -712,11 +731,9 @@ void AwesomePlayer::onBufferingUpdate() {
finishAsyncPrepare_l();
}
} else {
- int64_t bitrate;
- if (getBitrate(&bitrate)) {
- size_t cachedSize = mCachedSource->cachedSize();
- int64_t cachedDurationUs = cachedSize * 8000000ll / bitrate;
-
+ bool eos2;
+ int64_t cachedDurationUs;
+ if (getCachedDuration_l(&cachedDurationUs, &eos2) && mDurationUs > 0) {
int percentage = 100.0 * (double)cachedDurationUs / mDurationUs;
if (percentage > 100) {
percentage = 100;
@@ -724,7 +741,7 @@ void AwesomePlayer::onBufferingUpdate() {
notifyListener_l(MEDIA_BUFFERING_UPDATE, percentage);
} else {
- // We don't know the bitrate of the stream, use absolute size
+ // We don't know the bitrate/duration of the stream, use absolute size
// limits to maintain the cache.
if ((mFlags & PLAYING) && !eos
@@ -1486,7 +1503,7 @@ void AwesomePlayer::addTextSource_l(size_t trackIndex, const sp<MediaSource>& so
CHECK(source != NULL);
if (mTextDriver == NULL) {
- mTextDriver = new TimedTextDriver(mListener);
+ mTextDriver = new TimedTextDriver(mListener, mHTTPService);
}
mTextDriver->addInBandTextSource(trackIndex, source);
@@ -2218,15 +2235,14 @@ status_t AwesomePlayer::finishSetDataSource_l() {
if (!strncasecmp("http://", mUri.string(), 7)
|| !strncasecmp("https://", mUri.string(), 8)
|| isWidevineStreaming) {
- mConnectingDataSource = HTTPBase::Create(
- (mFlags & INCOGNITO)
- ? HTTPBase::kFlagIncognito
- : 0);
-
- if (mUIDValid) {
- mConnectingDataSource->setUID(mUID);
+ if (mHTTPService == NULL) {
+ ALOGE("Attempt to play media from http URI without HTTP service.");
+ return UNKNOWN_ERROR;
}
+ sp<IMediaHTTPConnection> conn = mHTTPService->makeHTTPConnection();
+ mConnectingDataSource = new MediaHTTP(conn);
+
String8 cacheConfig;
bool disconnectAtHighwatermark;
NuCachedSource2::RemoveCacheSpecificHeaders(
@@ -2234,6 +2250,10 @@ status_t AwesomePlayer::finishSetDataSource_l() {
mLock.unlock();
status_t err = mConnectingDataSource->connect(mUri, &mUriHeaders);
+ // force connection at this point, to avoid a race condition between getMIMEType and the
+ // caching datasource constructed below, which could result in multiple requests to the
+ // server, and/or failed connections.
+ String8 contentType = mConnectingDataSource->getMIMEType();
mLock.lock();
if (err != OK) {
@@ -2264,8 +2284,6 @@ status_t AwesomePlayer::finishSetDataSource_l() {
mConnectingDataSource.clear();
- String8 contentType = dataSource->getMIMEType();
-
if (strncasecmp(contentType.string(), "audio/", 6)) {
// We're not doing this for streams that appear to be audio-only
// streams to ensure that even low bandwidth streams start
@@ -2342,7 +2360,8 @@ status_t AwesomePlayer::finishSetDataSource_l() {
}
}
} else {
- dataSource = DataSource::CreateFromURI(mUri.string(), &mUriHeaders);
+ dataSource = DataSource::CreateFromURI(
+ mHTTPService, mUri.string(), &mUriHeaders);
}
if (dataSource == NULL) {
@@ -2784,7 +2803,7 @@ status_t AwesomePlayer::invoke(const Parcel &request, Parcel *reply) {
{
Mutex::Autolock autoLock(mLock);
if (mTextDriver == NULL) {
- mTextDriver = new TimedTextDriver(mListener);
+ mTextDriver = new TimedTextDriver(mListener, mHTTPService);
}
// String values written in Parcel are UTF-16 values.
String8 uri(request.readString16());
@@ -2796,7 +2815,7 @@ status_t AwesomePlayer::invoke(const Parcel &request, Parcel *reply) {
{
Mutex::Autolock autoLock(mLock);
if (mTextDriver == NULL) {
- mTextDriver = new TimedTextDriver(mListener);
+ mTextDriver = new TimedTextDriver(mListener, mHTTPService);
}
int fd = request.readFileDescriptor();
off64_t offset = request.readInt64();
@@ -2837,7 +2856,7 @@ status_t AwesomePlayer::dump(
fprintf(out, " AwesomePlayer\n");
if (mStats.mFd < 0) {
- fprintf(out, " URI(suppressed)");
+ fprintf(out, " URI(%s)", uriDebugString(mUri, mFlags & INCOGNITO).c_str());
} else {
fprintf(out, " fd(%d)", mStats.mFd);
}
@@ -2926,6 +2945,8 @@ void AwesomePlayer::onAudioTearDownEvent() {
// get current position so we can start recreated stream from here
getPosition(&mAudioTearDownPosition);
+ sp<IMediaHTTPService> savedHTTPService = mHTTPService;
+
// Reset and recreate
reset_l();
@@ -2935,7 +2956,7 @@ void AwesomePlayer::onAudioTearDownEvent() {
mFileSource = fileSource;
err = setDataSource_l(fileSource);
} else {
- err = setDataSource_l(uri, &uriHeaders);
+ err = setDataSource_l(savedHTTPService, uri, &uriHeaders);
}
mFlags |= PREPARING;
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 5b41f30..b31e9e8 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -31,6 +31,12 @@
#include <utils/String8.h>
#include <cutils/properties.h>
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
@@ -63,6 +69,9 @@ CameraSourceListener::~CameraSourceListener() {
}
void CameraSourceListener::notify(int32_t msgType, int32_t ext1, int32_t ext2) {
+ UNUSED_UNLESS_VERBOSE(msgType);
+ UNUSED_UNLESS_VERBOSE(ext1);
+ UNUSED_UNLESS_VERBOSE(ext2);
ALOGV("notify(%d, %d, %d)", msgType, ext1, ext2);
}
@@ -577,14 +586,15 @@ CameraSource::~CameraSource() {
}
}
-void CameraSource::startCameraRecording() {
+status_t CameraSource::startCameraRecording() {
ALOGV("startCameraRecording");
// Reset the identity to the current thread because media server owns the
// camera and recording is started by the applications. The applications
// will connect to the camera in ICameraRecordingProxy::startRecording.
int64_t token = IPCThreadState::self()->clearCallingIdentity();
+ status_t err;
if (mNumInputBuffers > 0) {
- status_t err = mCamera->sendCommand(
+ err = mCamera->sendCommand(
CAMERA_CMD_SET_VIDEO_BUFFER_COUNT, mNumInputBuffers, 0);
// This could happen for CameraHAL1 clients; thus the failure is
@@ -595,17 +605,25 @@ void CameraSource::startCameraRecording() {
}
}
+ err = OK;
if (mCameraFlags & FLAGS_HOT_CAMERA) {
mCamera->unlock();
mCamera.clear();
- CHECK_EQ((status_t)OK,
- mCameraRecordingProxy->startRecording(new ProxyListener(this)));
+ if ((err = mCameraRecordingProxy->startRecording(
+ new ProxyListener(this))) != OK) {
+ ALOGE("Failed to start recording, received error: %s (%d)",
+ strerror(-err), err);
+ }
} else {
mCamera->setListener(new CameraSourceListener(this));
mCamera->startRecording();
- CHECK(mCamera->recordingEnabled());
+ if (!mCamera->recordingEnabled()) {
+ err = -EINVAL;
+ ALOGE("Failed to start recording");
+ }
}
IPCThreadState::self()->restoreCallingIdentity(token);
+ return err;
}
status_t CameraSource::start(MetaData *meta) {
@@ -637,10 +655,12 @@ status_t CameraSource::start(MetaData *meta) {
}
}
- startCameraRecording();
+ status_t err;
+ if ((err = startCameraRecording()) == OK) {
+ mStarted = true;
+ }
- mStarted = true;
- return OK;
+ return err;
}
void CameraSource::stopCameraRecording() {
diff --git a/media/libstagefright/CameraSourceTimeLapse.cpp b/media/libstagefright/CameraSourceTimeLapse.cpp
index 591daac..15ba967 100644
--- a/media/libstagefright/CameraSourceTimeLapse.cpp
+++ b/media/libstagefright/CameraSourceTimeLapse.cpp
@@ -85,7 +85,8 @@ CameraSourceTimeLapse::CameraSourceTimeLapse(
mVideoWidth = videoSize.width;
mVideoHeight = videoSize.height;
- if (!trySettingVideoSize(videoSize.width, videoSize.height)) {
+ if (OK == mInitCheck && !trySettingVideoSize(videoSize.width, videoSize.height)) {
+ releaseCamera();
mInitCheck = NO_INIT;
}
diff --git a/media/libstagefright/DataSource.cpp b/media/libstagefright/DataSource.cpp
index 97987e2..6e0f37a 100644
--- a/media/libstagefright/DataSource.cpp
+++ b/media/libstagefright/DataSource.cpp
@@ -16,10 +16,6 @@
#include "include/AMRExtractor.h"
-#if CHROMIUM_AVAILABLE
-#include "include/chromium_http_stub.h"
-#endif
-
#include "include/AACExtractor.h"
#include "include/DRMExtractor.h"
#include "include/FLACExtractor.h"
@@ -35,10 +31,14 @@
#include "matroska/MatroskaExtractor.h"
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/AMessage.h>
#include <media/stagefright/DataSource.h>
+#include <media/stagefright/DataURISource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaHTTP.h>
#include <utils/String8.h>
#include <cutils/properties.h>
@@ -180,7 +180,9 @@ void DataSource::RegisterDefaultSniffers() {
// static
sp<DataSource> DataSource::CreateFromURI(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers) {
bool isWidevine = !strncasecmp("widevine://", uri, 11);
sp<DataSource> source;
@@ -189,7 +191,7 @@ sp<DataSource> DataSource::CreateFromURI(
} else if (!strncasecmp("http://", uri, 7)
|| !strncasecmp("https://", uri, 8)
|| isWidevine) {
- sp<HTTPBase> httpSource = HTTPBase::Create();
+ sp<HTTPBase> httpSource = new MediaHTTP(httpService->makeHTTPConnection());
String8 tmp;
if (isWidevine) {
@@ -220,11 +222,8 @@ sp<DataSource> DataSource::CreateFromURI(
// in the widevine:// case.
source = httpSource;
}
-
-# if CHROMIUM_AVAILABLE
} else if (!strncasecmp("data:", uri, 5)) {
- source = createDataUriSource(uri);
-#endif
+ source = DataURISource::Create(uri);
} else {
// Assume it's a filename.
source = new FileSource(uri);
diff --git a/media/libstagefright/DataURISource.cpp b/media/libstagefright/DataURISource.cpp
new file mode 100644
index 0000000..377bc85
--- /dev/null
+++ b/media/libstagefright/DataURISource.cpp
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/stagefright/DataURISource.h>
+
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/foundation/base64.h>
+
+namespace android {
+
+// static
+sp<DataURISource> DataURISource::Create(const char *uri) {
+ if (strncasecmp("data:", uri, 5)) {
+ return NULL;
+ }
+
+ char *commaPos = strrchr(uri, ',');
+
+ if (commaPos == NULL) {
+ return NULL;
+ }
+
+ sp<ABuffer> buffer;
+
+ AString tmp(&uri[5], commaPos - &uri[5]);
+
+ if (tmp.endsWith(";base64")) {
+ AString encoded(commaPos + 1);
+
+ // Strip CR and LF...
+ for (size_t i = encoded.size(); i-- > 0;) {
+ if (encoded.c_str()[i] == '\r' || encoded.c_str()[i] == '\n') {
+ encoded.erase(i, 1);
+ }
+ }
+
+ buffer = decodeBase64(encoded);
+
+ if (buffer == NULL) {
+ ALOGE("Malformed base64 encoded content found.");
+ return NULL;
+ }
+ } else {
+#if 0
+ size_t dataLen = strlen(uri) - tmp.size() - 6;
+ buffer = new ABuffer(dataLen);
+ memcpy(buffer->data(), commaPos + 1, dataLen);
+
+ // unescape
+#else
+ // MediaPlayer doesn't care for this right now as we don't
+ // play any text-based media.
+ return NULL;
+#endif
+ }
+
+ // We don't really care about charset or mime type.
+
+ return new DataURISource(buffer);
+}
+
+DataURISource::DataURISource(const sp<ABuffer> &buffer)
+ : mBuffer(buffer) {
+}
+
+DataURISource::~DataURISource() {
+}
+
+status_t DataURISource::initCheck() const {
+ return OK;
+}
+
+ssize_t DataURISource::readAt(off64_t offset, void *data, size_t size) {
+ if (offset >= mBuffer->size()) {
+ return 0;
+ }
+
+ size_t copy = mBuffer->size() - offset;
+ if (copy > size) {
+ copy = size;
+ }
+
+ memcpy(data, mBuffer->data() + offset, copy);
+
+ return copy;
+}
+
+status_t DataURISource::getSize(off64_t *size) {
+ *size = mBuffer->size();
+
+ return OK;
+}
+
+} // namespace android
+
diff --git a/media/libstagefright/HTTPBase.cpp b/media/libstagefright/HTTPBase.cpp
index 5fa4b6f..ca68c3d 100644
--- a/media/libstagefright/HTTPBase.cpp
+++ b/media/libstagefright/HTTPBase.cpp
@@ -20,10 +20,6 @@
#include "include/HTTPBase.h"
-#if CHROMIUM_AVAILABLE
-#include "include/chromium_http_stub.h"
-#endif
-
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -40,34 +36,7 @@ HTTPBase::HTTPBase()
mTotalTransferBytes(0),
mPrevBandwidthMeasureTimeUs(0),
mPrevEstimatedBandWidthKbps(0),
- mBandWidthCollectFreqMs(5000),
- mUIDValid(false),
- mUID(0) {
-}
-
-// static
-sp<HTTPBase> HTTPBase::Create(uint32_t flags) {
-#if CHROMIUM_AVAILABLE
- HTTPBase *dataSource = createChromiumHTTPDataSource(flags);
- if (dataSource) {
- return dataSource;
- }
-#endif
- {
- TRESPASS();
-
- return NULL;
- }
-}
-
-// static
-status_t HTTPBase::UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
-#if CHROMIUM_AVAILABLE
- return UpdateChromiumHTTPDataSourceProxyConfig(host, port, exclusionList);
-#else
- return INVALID_OPERATION;
-#endif
+ mBandWidthCollectFreqMs(5000) {
}
void HTTPBase::addBandwidthMeasurement(
@@ -135,21 +104,6 @@ status_t HTTPBase::setBandwidthStatCollectFreq(int32_t freqMs) {
return OK;
}
-void HTTPBase::setUID(uid_t uid) {
- mUIDValid = true;
- mUID = uid;
-}
-
-bool HTTPBase::getUID(uid_t *uid) const {
- if (!mUIDValid) {
- return false;
- }
-
- *uid = mUID;
-
- return true;
-}
-
// static
void HTTPBase::RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag) {
int res = qtaguid_tagSocket(sockfd, kTag, uid);
diff --git a/media/libstagefright/MPEG4Extractor.cpp b/media/libstagefright/MPEG4Extractor.cpp
index 362cd6b..1b9551f 100644
--- a/media/libstagefright/MPEG4Extractor.cpp
+++ b/media/libstagefright/MPEG4Extractor.cpp
@@ -51,6 +51,7 @@ public:
int32_t timeScale,
const sp<SampleTable> &sampleTable,
Vector<SidxEntry> &sidx,
+ const Trex *trex,
off64_t firstMoofOffset);
virtual status_t start(MetaData *params = NULL);
@@ -74,6 +75,7 @@ private:
uint32_t mCurrentSampleIndex;
uint32_t mCurrentFragmentIndex;
Vector<SidxEntry> &mSegments;
+ const Trex *mTrex;
off64_t mFirstMoofOffset;
off64_t mCurrentMoofOffset;
off64_t mNextMoofOffset;
@@ -95,6 +97,7 @@ private:
uint64_t* mCurrentSampleInfoOffsets;
bool mIsAVC;
+ bool mIsHEVC;
size_t mNALLengthSize;
bool mStarted;
@@ -140,6 +143,7 @@ private:
off64_t offset;
size_t size;
uint32_t duration;
+ int32_t compositionOffset;
uint8_t iv[16];
Vector<size_t> clearsizes;
Vector<size_t> encryptedsizes;
@@ -317,6 +321,9 @@ static const char *FourCC2MIME(uint32_t fourcc) {
case FOURCC('a', 'v', 'c', '1'):
return MEDIA_MIMETYPE_VIDEO_AVC;
+ case FOURCC('h', 'v', 'c', '1'):
+ case FOURCC('h', 'e', 'v', '1'):
+ return MEDIA_MIMETYPE_VIDEO_HEVC;
default:
CHECK(!"should not be here.");
return NULL;
@@ -339,8 +346,7 @@ static bool AdjustChannelsAndRate(uint32_t fourcc, uint32_t *channels, uint32_t
}
MPEG4Extractor::MPEG4Extractor(const sp<DataSource> &source)
- : mSidxDuration(0),
- mMoofOffset(0),
+ : mMoofOffset(0),
mDataSource(source),
mInitCheck(NO_INIT),
mHasVideo(false),
@@ -365,7 +371,7 @@ MPEG4Extractor::~MPEG4Extractor() {
SINF *sinf = mFirstSINF;
while (sinf) {
SINF *next = sinf->next;
- delete sinf->IPMPData;
+ delete[] sinf->IPMPData;
delete sinf;
sinf = next;
}
@@ -478,22 +484,31 @@ status_t MPEG4Extractor::readMetaData() {
off64_t offset = 0;
status_t err;
while (true) {
+ off64_t orig_offset = offset;
err = parseChunk(&offset, 0);
- if (err == OK) {
+
+ if (offset <= orig_offset) {
+ // only continue parsing if the offset was advanced,
+ // otherwise we might end up in an infinite loop
+ ALOGE("did not advance: 0x%lld->0x%lld", orig_offset, offset);
+ err = ERROR_MALFORMED;
+ break;
+ } else if (err == OK) {
continue;
+ } else if (err != UNKNOWN_ERROR) {
+ break;
}
-
uint32_t hdr[2];
if (mDataSource->readAt(offset, hdr, 8) < 8) {
break;
}
uint32_t chunk_type = ntohl(hdr[1]);
- if (chunk_type == FOURCC('s', 'i', 'd', 'x')) {
- // parse the sidx box too
- continue;
- } else if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
// store the offset of the first segment
mMoofOffset = offset;
+ } else if (chunk_type != FOURCC('m', 'd', 'a', 't')) {
+ // keep parsing until we get to the data
+ continue;
}
break;
}
@@ -505,8 +520,6 @@ status_t MPEG4Extractor::readMetaData() {
} else {
mFileMetaData->setCString(kKeyMIMEType, "audio/mp4");
}
-
- mInitCheck = OK;
} else {
mInitCheck = err;
}
@@ -683,7 +696,10 @@ status_t MPEG4Extractor::parseDrmSINF(
return ERROR_MALFORMED;
}
sinf->len = dataLen - 3;
- sinf->IPMPData = new char[sinf->len];
+ sinf->IPMPData = new (std::nothrow) char[sinf->len];
+ if (sinf->IPMPData == NULL) {
+ return ERROR_MALFORMED;
+ }
data_offset += 2;
if (mDataSource->readAt(data_offset, sinf->IPMPData, sinf->len) < sinf->len) {
@@ -758,8 +774,25 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
// The smallest valid chunk is 16 bytes long in this case.
return ERROR_MALFORMED;
}
+ } else if (chunk_size == 0) {
+ if (depth == 0) {
+ // atom extends to end of file
+ off64_t sourceSize;
+ if (mDataSource->getSize(&sourceSize) == OK) {
+ chunk_size = (sourceSize - *offset);
+ } else {
+ // XXX could we just pick a "sufficiently large" value here?
+ ALOGE("atom size is 0, and data source has no size");
+ return ERROR_MALFORMED;
+ }
+ } else {
+ // not allowed for non-toplevel atoms, skip it
+ *offset += 4;
+ return OK;
+ }
} else if (chunk_size < 8) {
// The smallest valid chunk is 8 bytes long.
+ ALOGE("invalid chunk size: %d", int(chunk_size));
return ERROR_MALFORMED;
}
@@ -913,6 +946,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('e', 'l', 's', 't'):
{
+ *offset += chunk_size;
+
// See 14496-12 8.6.6
uint8_t version;
if (mDataSource->readAt(data_offset, &version, 1) < 1) {
@@ -975,12 +1010,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyEncoderPadding, paddingsamples);
}
}
- *offset += chunk_size;
break;
}
case FOURCC('f', 'r', 'm', 'a'):
{
+ *offset += chunk_size;
+
uint32_t original_fourcc;
if (mDataSource->readAt(data_offset, &original_fourcc, 4) < 4) {
return ERROR_IO;
@@ -994,12 +1030,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyChannelCount, num_channels);
mLastTrack->meta->setInt32(kKeySampleRate, sample_rate);
}
- *offset += chunk_size;
break;
}
case FOURCC('t', 'e', 'n', 'c'):
{
+ *offset += chunk_size;
+
if (chunk_size < 32) {
return ERROR_MALFORMED;
}
@@ -1044,23 +1081,25 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setInt32(kKeyCryptoMode, defaultAlgorithmId);
mLastTrack->meta->setInt32(kKeyCryptoDefaultIVSize, defaultIVSize);
mLastTrack->meta->setData(kKeyCryptoKey, 'tenc', defaultKeyId, 16);
- *offset += chunk_size;
break;
}
case FOURCC('t', 'k', 'h', 'd'):
{
+ *offset += chunk_size;
+
status_t err;
if ((err = parseTrackHeader(data_offset, chunk_data_size)) != OK) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('p', 's', 's', 'h'):
{
+ *offset += chunk_size;
+
PsshInfo pssh;
if (mDataSource->readAt(data_offset + 4, &pssh.uuid, 16) < 16) {
@@ -1078,7 +1117,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return ERROR_MALFORMED;
}
- pssh.data = new uint8_t[pssh.datalen];
+ pssh.data = new (std::nothrow) uint8_t[pssh.datalen];
+ if (pssh.data == NULL) {
+ return ERROR_MALFORMED;
+ }
ALOGV("allocated pssh @ %p", pssh.data);
ssize_t requested = (ssize_t) pssh.datalen;
if (mDataSource->readAt(data_offset + 24, pssh.data, requested) < requested) {
@@ -1086,12 +1128,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
mPssh.push_back(pssh);
- *offset += chunk_size;
break;
}
case FOURCC('m', 'd', 'h', 'd'):
{
+ *offset += chunk_size;
+
if (chunk_data_size < 4) {
return ERROR_MALFORMED;
}
@@ -1122,6 +1165,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->timescale = ntohl(timescale);
+ // 14496-12 says all ones means indeterminate, but some files seem to use
+ // 0 instead. We treat both the same.
int64_t duration = 0;
if (version == 1) {
if (mDataSource->readAt(
@@ -1129,7 +1174,9 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
< (ssize_t)sizeof(duration)) {
return ERROR_IO;
}
- duration = ntoh64(duration);
+ if (duration != -1) {
+ duration = ntoh64(duration);
+ }
} else {
uint32_t duration32;
if (mDataSource->readAt(
@@ -1137,13 +1184,14 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
< (ssize_t)sizeof(duration32)) {
return ERROR_IO;
}
- // ffmpeg sets duration to -1, which is incorrect.
if (duration32 != 0xffffffff) {
duration = ntohl(duration32);
}
}
- mLastTrack->meta->setInt64(
- kKeyDuration, (duration * 1000000) / mLastTrack->timescale);
+ if (duration != 0) {
+ mLastTrack->meta->setInt64(
+ kKeyDuration, (duration * 1000000) / mLastTrack->timescale);
+ }
uint8_t lang[2];
off64_t lang_offset;
@@ -1172,7 +1220,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setCString(
kKeyMediaLanguage, lang_code);
- *offset += chunk_size;
break;
}
@@ -1282,6 +1329,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('H', '2', '6', '3'):
case FOURCC('h', '2', '6', '3'):
case FOURCC('a', 'v', 'c', '1'):
+ case FOURCC('h', 'v', 'c', '1'):
+ case FOURCC('h', 'e', 'v', '1'):
{
mHasVideo = true;
@@ -1339,11 +1388,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->sampleTable->setChunkOffsetParams(
chunk_type, data_offset, chunk_data_size);
+ *offset += chunk_size;
+
if (err != OK) {
return err;
}
- *offset += chunk_size;
break;
}
@@ -1353,11 +1403,12 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->sampleTable->setSampleToChunkParams(
data_offset, chunk_data_size);
+ *offset += chunk_size;
+
if (err != OK) {
return err;
}
- *offset += chunk_size;
break;
}
@@ -1368,6 +1419,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->sampleTable->setSampleSizeParams(
chunk_type, data_offset, chunk_data_size);
+ *offset += chunk_size;
+
if (err != OK) {
return err;
}
@@ -1408,7 +1461,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
mLastTrack->meta->setInt32(kKeyMaxInputSize, max_size);
}
- *offset += chunk_size;
// NOTE: setting another piece of metadata invalidates any pointers (such as the
// mimetype) previously obtained, so don't cache them.
@@ -1432,6 +1484,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('s', 't', 't', 's'):
{
+ *offset += chunk_size;
+
status_t err =
mLastTrack->sampleTable->setTimeToSampleParams(
data_offset, chunk_data_size);
@@ -1440,12 +1494,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('c', 't', 't', 's'):
{
+ *offset += chunk_size;
+
status_t err =
mLastTrack->sampleTable->setCompositionTimeToSampleParams(
data_offset, chunk_data_size);
@@ -1454,12 +1509,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('s', 't', 's', 's'):
{
+ *offset += chunk_size;
+
status_t err =
mLastTrack->sampleTable->setSyncSampleParams(
data_offset, chunk_data_size);
@@ -1468,13 +1524,14 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
return err;
}
- *offset += chunk_size;
break;
}
// @xyz
case FOURCC('\xA9', 'x', 'y', 'z'):
{
+ *offset += chunk_size;
+
// Best case the total data length inside "@xyz" box
// would be 8, for instance "@xyz" + "\x00\x04\x15\xc7" + "0+0/",
// where "\x00\x04" is the text string length with value = 4,
@@ -1503,12 +1560,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
buffer[location_length] = '\0';
mFileMetaData->setCString(kKeyLocation, buffer);
- *offset += chunk_size;
break;
}
case FOURCC('e', 's', 'd', 's'):
{
+ *offset += chunk_size;
+
if (chunk_data_size < 4) {
return ERROR_MALFORMED;
}
@@ -1546,12 +1604,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
}
- *offset += chunk_size;
break;
}
case FOURCC('a', 'v', 'c', 'C'):
{
+ *offset += chunk_size;
+
sp<ABuffer> buffer = new ABuffer(chunk_data_size);
if (mDataSource->readAt(
@@ -1562,12 +1621,27 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setData(
kKeyAVCC, kTypeAVCC, buffer->data(), chunk_data_size);
+ break;
+ }
+ case FOURCC('h', 'v', 'c', 'C'):
+ {
+ sp<ABuffer> buffer = new ABuffer(chunk_data_size);
+
+ if (mDataSource->readAt(
+ data_offset, buffer->data(), chunk_data_size) < chunk_data_size) {
+ return ERROR_IO;
+ }
+
+ mLastTrack->meta->setData(
+ kKeyHVCC, kTypeHVCC, buffer->data(), chunk_data_size);
+
*offset += chunk_size;
break;
}
case FOURCC('d', '2', '6', '3'):
{
+ *offset += chunk_size;
/*
* d263 contains a fixed 7 bytes part:
* vendor - 4 bytes
@@ -1593,7 +1667,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setData(kKeyD263, kTypeD263, buffer, chunk_data_size);
- *offset += chunk_size;
break;
}
@@ -1601,11 +1674,13 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
{
uint8_t buffer[4];
if (chunk_data_size < (off64_t)sizeof(buffer)) {
+ *offset += chunk_size;
return ERROR_MALFORMED;
}
if (mDataSource->readAt(
data_offset, buffer, 4) < 4) {
+ *offset += chunk_size;
return ERROR_IO;
}
@@ -1639,6 +1714,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('n', 'a', 'm', 'e'):
case FOURCC('d', 'a', 't', 'a'):
{
+ *offset += chunk_size;
+
if (mPath.size() == 6 && underMetaDataPath(mPath)) {
status_t err = parseITunesMetaData(data_offset, chunk_data_size);
@@ -1647,17 +1724,18 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
}
- *offset += chunk_size;
break;
}
case FOURCC('m', 'v', 'h', 'd'):
{
- if (chunk_data_size < 24) {
+ *offset += chunk_size;
+
+ if (chunk_data_size < 32) {
return ERROR_MALFORMED;
}
- uint8_t header[24];
+ uint8_t header[32];
if (mDataSource->readAt(
data_offset, header, sizeof(header))
< (ssize_t)sizeof(header)) {
@@ -1665,14 +1743,27 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
}
uint64_t creationTime;
+ uint64_t duration = 0;
if (header[0] == 1) {
creationTime = U64_AT(&header[4]);
mHeaderTimescale = U32_AT(&header[20]);
+ duration = U64_AT(&header[24]);
+ if (duration == 0xffffffffffffffff) {
+ duration = 0;
+ }
} else if (header[0] != 0) {
return ERROR_MALFORMED;
} else {
creationTime = U32_AT(&header[4]);
mHeaderTimescale = U32_AT(&header[12]);
+ uint32_t d32 = U32_AT(&header[16]);
+ if (d32 == 0xffffffff) {
+ d32 = 0;
+ }
+ duration = d32;
+ }
+ if (duration != 0) {
+ mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);
}
String8 s;
@@ -1680,7 +1771,50 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mFileMetaData->setCString(kKeyDate, s.string());
+ break;
+ }
+
+ case FOURCC('m', 'e', 'h', 'd'):
+ {
*offset += chunk_size;
+
+ if (chunk_data_size < 8) {
+ return ERROR_MALFORMED;
+ }
+
+ uint8_t flags[4];
+ if (mDataSource->readAt(
+ data_offset, flags, sizeof(flags))
+ < (ssize_t)sizeof(flags)) {
+ return ERROR_IO;
+ }
+
+ uint64_t duration = 0;
+ if (flags[0] == 1) {
+ // 64 bit
+ if (chunk_data_size < 12) {
+ return ERROR_MALFORMED;
+ }
+ mDataSource->getUInt64(data_offset + 4, &duration);
+ if (duration == 0xffffffffffffffff) {
+ duration = 0;
+ }
+ } else if (flags[0] == 0) {
+ // 32 bit
+ uint32_t d32;
+ mDataSource->getUInt32(data_offset + 4, &d32);
+ if (d32 == 0xffffffff) {
+ d32 = 0;
+ }
+ duration = d32;
+ } else {
+ return ERROR_MALFORMED;
+ }
+
+ if (duration != 0) {
+ mFileMetaData->setInt64(kKeyDuration, duration * 1000000 / mHeaderTimescale);
+ }
+
break;
}
@@ -1701,6 +1835,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('h', 'd', 'l', 'r'):
{
+ *offset += chunk_size;
+
uint32_t buffer;
if (mDataSource->readAt(
data_offset + 8, &buffer, 4) < 4) {
@@ -1715,7 +1851,26 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
mLastTrack->meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_TEXT_3GPP);
}
+ break;
+ }
+
+ case FOURCC('t', 'r', 'e', 'x'):
+ {
*offset += chunk_size;
+
+ if (chunk_data_size < 24) {
+ return ERROR_IO;
+ }
+ uint32_t duration;
+ Trex trex;
+ if (!mDataSource->getUInt32(data_offset + 4, &trex.track_ID) ||
+ !mDataSource->getUInt32(data_offset + 8, &trex.default_sample_description_index) ||
+ !mDataSource->getUInt32(data_offset + 12, &trex.default_sample_duration) ||
+ !mDataSource->getUInt32(data_offset + 16, &trex.default_sample_size) ||
+ !mDataSource->getUInt32(data_offset + 20, &trex.default_sample_flags)) {
+ return ERROR_IO;
+ }
+ mTrex.add(trex);
break;
}
@@ -1729,7 +1884,10 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
size = 0;
}
- uint8_t *buffer = new uint8_t[size + chunk_size];
+ uint8_t *buffer = new (std::nothrow) uint8_t[size + chunk_size];
+ if (buffer == NULL) {
+ return ERROR_MALFORMED;
+ }
if (size > 0) {
memcpy(buffer, data, size);
@@ -1740,6 +1898,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
delete[] buffer;
buffer = NULL;
+ // advance read pointer so we don't end up reading this again
+ *offset += chunk_size;
return ERROR_IO;
}
@@ -1754,6 +1914,8 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('c', 'o', 'v', 'r'):
{
+ *offset += chunk_size;
+
if (mFileMetaData != NULL) {
ALOGV("chunk_data_size = %lld and data_offset = %lld",
chunk_data_size, data_offset);
@@ -1768,7 +1930,6 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
buffer->data() + kSkipBytesOfDataBox, chunk_data_size - kSkipBytesOfDataBox);
}
- *offset += chunk_size;
break;
}
@@ -1779,25 +1940,27 @@ status_t MPEG4Extractor::parseChunk(off64_t *offset, int depth) {
case FOURCC('a', 'l', 'b', 'm'):
case FOURCC('y', 'r', 'r', 'c'):
{
+ *offset += chunk_size;
+
status_t err = parse3GPPMetaData(data_offset, chunk_data_size, depth);
if (err != OK) {
return err;
}
- *offset += chunk_size;
break;
}
case FOURCC('I', 'D', '3', '2'):
{
+ *offset += chunk_size;
+
if (chunk_data_size < 6) {
return ERROR_MALFORMED;
}
parseID3v2MetaData(data_offset + 6);
- *offset += chunk_size;
break;
}
@@ -1921,9 +2084,10 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
ALOGW("sub-sidx boxes not supported yet");
}
bool sap = d3 & 0x80000000;
- bool saptype = d3 >> 28;
- if (!sap || saptype > 2) {
- ALOGW("not a stream access point, or unsupported type");
+ uint32_t saptype = (d3 >> 28) & 7;
+ if (!sap || (saptype != 1 && saptype != 2)) {
+ // type 1 and 2 are sync samples
+ ALOGW("not a stream access point, or unsupported type: %08x", d3);
}
total_duration += d2;
offset += 12;
@@ -1934,12 +2098,11 @@ status_t MPEG4Extractor::parseSegmentIndex(off64_t offset, size_t size) {
mSidxEntries.add(se);
}
- mSidxDuration = total_duration * 1000000 / timeScale;
- ALOGV("duration: %lld", mSidxDuration);
+ uint64_t sidxDuration = total_duration * 1000000 / timeScale;
int64_t metaDuration;
if (!mLastTrack->meta->findInt64(kKeyDuration, &metaDuration) || metaDuration == 0) {
- mLastTrack->meta->setInt64(kKeyDuration, mSidxDuration);
+ mLastTrack->meta->setInt64(kKeyDuration, sidxDuration);
}
return OK;
}
@@ -2040,7 +2203,10 @@ status_t MPEG4Extractor::parseITunesMetaData(off64_t offset, size_t size) {
return ERROR_MALFORMED;
}
- uint8_t *buffer = new uint8_t[size + 1];
+ uint8_t *buffer = new (std::nothrow) uint8_t[size + 1];
+ if (buffer == NULL) {
+ return ERROR_MALFORMED;
+ }
if (mDataSource->readAt(
offset, buffer, size) != (ssize_t)size) {
delete[] buffer;
@@ -2227,7 +2393,10 @@ status_t MPEG4Extractor::parse3GPPMetaData(off64_t offset, size_t size, int dept
return ERROR_MALFORMED;
}
- uint8_t *buffer = new uint8_t[size];
+ uint8_t *buffer = new (std::nothrow) uint8_t[size];
+ if (buffer == NULL) {
+ return ERROR_MALFORMED;
+ }
if (mDataSource->readAt(
offset, buffer, size) != (ssize_t)size) {
delete[] buffer;
@@ -2406,11 +2575,24 @@ sp<MediaSource> MPEG4Extractor::getTrack(size_t index) {
return NULL;
}
+
+ Trex *trex = NULL;
+ int32_t trackId;
+ if (track->meta->findInt32(kKeyTrackID, &trackId)) {
+ for (size_t i = 0; i < mTrex.size(); i++) {
+ Trex *t = &mTrex.editItemAt(index);
+ if (t->track_ID == (uint32_t) trackId) {
+ trex = t;
+ break;
+ }
+ }
+ }
+
ALOGV("getTrack called, pssh: %d", mPssh.size());
return new MPEG4Source(
track->meta, mDataSource, track->timescale, track->sampleTable,
- mSidxEntries, mMoofOffset);
+ mSidxEntries, trex, mMoofOffset);
}
// static
@@ -2426,6 +2608,11 @@ status_t MPEG4Extractor::verifyTrack(Track *track) {
|| type != kTypeAVCC) {
return ERROR_MALFORMED;
}
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC)) {
+ if (!track->meta->findData(kKeyHVCC, &type, &data, &size)
+ || type != kTypeHVCC) {
+ return ERROR_MALFORMED;
+ }
} else if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_MPEG4)
|| !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
if (!track->meta->findData(kKeyESDS, &type, &data, &size)
@@ -2434,14 +2621,67 @@ status_t MPEG4Extractor::verifyTrack(Track *track) {
}
}
- if (!track->sampleTable->isValid()) {
+ if (track->sampleTable == NULL || !track->sampleTable->isValid()) {
// Make sure we have all the metadata we need.
+ ALOGE("stbl atom missing/invalid.");
return ERROR_MALFORMED;
}
return OK;
}
+typedef enum {
+ //AOT_NONE = -1,
+ //AOT_NULL_OBJECT = 0,
+ //AOT_AAC_MAIN = 1, /**< Main profile */
+ AOT_AAC_LC = 2, /**< Low Complexity object */
+ //AOT_AAC_SSR = 3,
+ //AOT_AAC_LTP = 4,
+ AOT_SBR = 5,
+ //AOT_AAC_SCAL = 6,
+ //AOT_TWIN_VQ = 7,
+ //AOT_CELP = 8,
+ //AOT_HVXC = 9,
+ //AOT_RSVD_10 = 10, /**< (reserved) */
+ //AOT_RSVD_11 = 11, /**< (reserved) */
+ //AOT_TTSI = 12, /**< TTSI Object */
+ //AOT_MAIN_SYNTH = 13, /**< Main Synthetic object */
+ //AOT_WAV_TAB_SYNTH = 14, /**< Wavetable Synthesis object */
+ //AOT_GEN_MIDI = 15, /**< General MIDI object */
+ //AOT_ALG_SYNTH_AUD_FX = 16, /**< Algorithmic Synthesis and Audio FX object */
+ AOT_ER_AAC_LC = 17, /**< Error Resilient(ER) AAC Low Complexity */
+ //AOT_RSVD_18 = 18, /**< (reserved) */
+ //AOT_ER_AAC_LTP = 19, /**< Error Resilient(ER) AAC LTP object */
+ AOT_ER_AAC_SCAL = 20, /**< Error Resilient(ER) AAC Scalable object */
+ //AOT_ER_TWIN_VQ = 21, /**< Error Resilient(ER) TwinVQ object */
+ AOT_ER_BSAC = 22, /**< Error Resilient(ER) BSAC object */
+ AOT_ER_AAC_LD = 23, /**< Error Resilient(ER) AAC LowDelay object */
+ //AOT_ER_CELP = 24, /**< Error Resilient(ER) CELP object */
+ //AOT_ER_HVXC = 25, /**< Error Resilient(ER) HVXC object */
+ //AOT_ER_HILN = 26, /**< Error Resilient(ER) HILN object */
+ //AOT_ER_PARA = 27, /**< Error Resilient(ER) Parametric object */
+ //AOT_RSVD_28 = 28, /**< might become SSC */
+ AOT_PS = 29, /**< PS, Parametric Stereo (includes SBR) */
+ //AOT_MPEGS = 30, /**< MPEG Surround */
+
+ AOT_ESCAPE = 31, /**< Signal AOT uses more than 5 bits */
+
+ //AOT_MP3ONMP4_L1 = 32, /**< MPEG-Layer1 in mp4 */
+ //AOT_MP3ONMP4_L2 = 33, /**< MPEG-Layer2 in mp4 */
+ //AOT_MP3ONMP4_L3 = 34, /**< MPEG-Layer3 in mp4 */
+ //AOT_RSVD_35 = 35, /**< might become DST */
+ //AOT_RSVD_36 = 36, /**< might become ALS */
+ //AOT_AAC_SLS = 37, /**< AAC + SLS */
+ //AOT_SLS = 38, /**< SLS */
+ //AOT_ER_AAC_ELD = 39, /**< AAC Enhanced Low Delay */
+
+ //AOT_USAC = 42, /**< USAC */
+ //AOT_SAOC = 43, /**< SAOC */
+ //AOT_LD_MPEGS = 44, /**< Low Delay MPEG Surround */
+
+ //AOT_RSVD50 = 50, /**< Interim AOT for Rsvd50 */
+} AUDIO_OBJECT_TYPE;
+
status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
const void *esds_data, size_t esds_size) {
ESDS esds(esds_data, esds_size);
@@ -2524,7 +2764,7 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
sampleRate = kSamplingRate[freqIndex];
}
- if (objectType == 5 || objectType == 29) { // SBR specific config per 14496-3 table 1.13
+ if (objectType == AOT_SBR || objectType == AOT_PS) {//SBR specific config per 14496-3 table 1.13
uint32_t extFreqIndex = br.getBits(4);
int32_t extSampleRate;
if (extFreqIndex == 15) {
@@ -2542,6 +2782,111 @@ status_t MPEG4Extractor::updateAudioTrackInfoFromESDS_MPEG4Audio(
// mLastTrack->meta->setInt32(kKeyExtSampleRate, extSampleRate);
}
+ switch (numChannels) {
+ // values defined in 14496-3_2009 amendment-4 Table 1.19 - Channel Configuration
+ case 0:
+ case 1:// FC
+ case 2:// FL FR
+ case 3:// FC, FL FR
+ case 4:// FC, FL FR, RC
+ case 5:// FC, FL FR, SL SR
+ case 6:// FC, FL FR, SL SR, LFE
+ //numChannels already contains the right value
+ break;
+ case 11:// FC, FL FR, SL SR, RC, LFE
+ numChannels = 7;
+ break;
+ case 7: // FC, FCL FCR, FL FR, SL SR, LFE
+ case 12:// FC, FL FR, SL SR, RL RR, LFE
+ case 14:// FC, FL FR, SL SR, LFE, FHL FHR
+ numChannels = 8;
+ break;
+ default:
+ return ERROR_UNSUPPORTED;
+ }
+
+ {
+ if (objectType == AOT_SBR || objectType == AOT_PS) {
+ const int32_t extensionSamplingFrequency = br.getBits(4);
+ objectType = br.getBits(5);
+
+ if (objectType == AOT_ESCAPE) {
+ objectType = 32 + br.getBits(6);
+ }
+ }
+ if (objectType == AOT_AAC_LC || objectType == AOT_ER_AAC_LC ||
+ objectType == AOT_ER_AAC_LD || objectType == AOT_ER_AAC_SCAL ||
+ objectType == AOT_ER_BSAC) {
+ const int32_t frameLengthFlag = br.getBits(1);
+
+ const int32_t dependsOnCoreCoder = br.getBits(1);
+
+ if (dependsOnCoreCoder ) {
+ const int32_t coreCoderDelay = br.getBits(14);
+ }
+
+ const int32_t extensionFlag = br.getBits(1);
+
+ if (numChannels == 0 ) {
+ int32_t channelsEffectiveNum = 0;
+ int32_t channelsNum = 0;
+ const int32_t ElementInstanceTag = br.getBits(4);
+ const int32_t Profile = br.getBits(2);
+ const int32_t SamplingFrequencyIndex = br.getBits(4);
+ const int32_t NumFrontChannelElements = br.getBits(4);
+ const int32_t NumSideChannelElements = br.getBits(4);
+ const int32_t NumBackChannelElements = br.getBits(4);
+ const int32_t NumLfeChannelElements = br.getBits(2);
+ const int32_t NumAssocDataElements = br.getBits(3);
+ const int32_t NumValidCcElements = br.getBits(4);
+
+ const int32_t MonoMixdownPresent = br.getBits(1);
+ if (MonoMixdownPresent != 0) {
+ const int32_t MonoMixdownElementNumber = br.getBits(4);
+ }
+
+ const int32_t StereoMixdownPresent = br.getBits(1);
+ if (StereoMixdownPresent != 0) {
+ const int32_t StereoMixdownElementNumber = br.getBits(4);
+ }
+
+ const int32_t MatrixMixdownIndexPresent = br.getBits(1);
+ if (MatrixMixdownIndexPresent != 0) {
+ const int32_t MatrixMixdownIndex = br.getBits(2);
+ const int32_t PseudoSurroundEnable = br.getBits(1);
+ }
+
+ int i;
+ for (i=0; i < NumFrontChannelElements; i++) {
+ const int32_t FrontElementIsCpe = br.getBits(1);
+ const int32_t FrontElementTagSelect = br.getBits(4);
+ channelsNum += FrontElementIsCpe ? 2 : 1;
+ }
+
+ for (i=0; i < NumSideChannelElements; i++) {
+ const int32_t SideElementIsCpe = br.getBits(1);
+ const int32_t SideElementTagSelect = br.getBits(4);
+ channelsNum += SideElementIsCpe ? 2 : 1;
+ }
+
+ for (i=0; i < NumBackChannelElements; i++) {
+ const int32_t BackElementIsCpe = br.getBits(1);
+ const int32_t BackElementTagSelect = br.getBits(4);
+ channelsNum += BackElementIsCpe ? 2 : 1;
+ }
+ channelsEffectiveNum = channelsNum;
+
+ for (i=0; i < NumLfeChannelElements; i++) {
+ const int32_t LfeElementTagSelect = br.getBits(4);
+ channelsNum += 1;
+ }
+ ALOGV("mpeg4 audio channelsNum = %d", channelsNum);
+ ALOGV("mpeg4 audio channelsEffectiveNum = %d", channelsEffectiveNum);
+ numChannels = channelsNum;
+ }
+ }
+ }
+
if (numChannels == 0) {
return ERROR_UNSUPPORTED;
}
@@ -2577,6 +2922,7 @@ MPEG4Source::MPEG4Source(
int32_t timeScale,
const sp<SampleTable> &sampleTable,
Vector<SidxEntry> &sidx,
+ const Trex *trex,
off64_t firstMoofOffset)
: mFormat(format),
mDataSource(dataSource),
@@ -2585,6 +2931,7 @@ MPEG4Source::MPEG4Source(
mCurrentSampleIndex(0),
mCurrentFragmentIndex(0),
mSegments(sidx),
+ mTrex(trex),
mFirstMoofOffset(firstMoofOffset),
mCurrentMoofOffset(firstMoofOffset),
mCurrentTime(0),
@@ -2593,6 +2940,7 @@ MPEG4Source::MPEG4Source(
mCurrentSampleInfoOffsetsAllocSize(0),
mCurrentSampleInfoOffsets(NULL),
mIsAVC(false),
+ mIsHEVC(false),
mNALLengthSize(0),
mStarted(false),
mGroup(NULL),
@@ -2600,6 +2948,8 @@ MPEG4Source::MPEG4Source(
mWantsNALFragments(false),
mSrcBuffer(NULL) {
+ memset(&mTrackFragmentHeaderInfo, 0, sizeof(mTrackFragmentHeaderInfo));
+
mFormat->findInt32(kKeyCryptoMode, &mCryptoMode);
mDefaultIVSize = 0;
mFormat->findInt32(kKeyCryptoDefaultIVSize, &mDefaultIVSize);
@@ -2617,6 +2967,7 @@ MPEG4Source::MPEG4Source(
CHECK(success);
mIsAVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC);
+ mIsHEVC = !strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_HEVC);
if (mIsAVC) {
uint32_t type;
@@ -2631,6 +2982,18 @@ MPEG4Source::MPEG4Source(
// The number of bytes used to encode the length of a NAL unit.
mNALLengthSize = 1 + (ptr[4] & 3);
+ } else if (mIsHEVC) {
+ uint32_t type;
+ const void *data;
+ size_t size;
+ CHECK(format->findData(kKeyHVCC, &type, &data, &size));
+
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ CHECK(size >= 7);
+ CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+
+ mNALLengthSize = 1 + (ptr[14 + 7] & 3);
}
CHECK(format->findInt32(kKeyTrackID, &mTrackId));
@@ -2669,7 +3032,11 @@ status_t MPEG4Source::start(MetaData *params) {
mGroup->add_buffer(new MediaBuffer(max_size));
- mSrcBuffer = new uint8_t[max_size];
+ mSrcBuffer = new (std::nothrow) uint8_t[max_size];
+ if (mSrcBuffer == NULL) {
+ // file probably specified a bad max size
+ return ERROR_MALFORMED;
+ }
mStarted = true;
@@ -2742,9 +3109,20 @@ status_t MPEG4Source::parseChunk(off64_t *offset) {
}
}
if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
- // *offset points to the mdat box following this moof
- parseChunk(offset); // doesn't actually parse it, just updates offset
- mNextMoofOffset = *offset;
+ // *offset points to the box following this moof. Find the next moof from there.
+
+ while (true) {
+ if (mDataSource->readAt(*offset, hdr, 8) < 8) {
+ return ERROR_END_OF_STREAM;
+ }
+ chunk_size = ntohl(hdr[0]);
+ chunk_type = ntohl(hdr[1]);
+ if (chunk_type == FOURCC('m', 'o', 'o', 'f')) {
+ mNextMoofOffset = *offset;
+ break;
+ }
+ *offset += chunk_size;
+ }
}
break;
}
@@ -3143,8 +3521,8 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
} else if (mTrackFragmentHeaderInfo.mFlags
& TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
- } else {
- sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
+ } else if (mTrex) {
+ sampleDuration = mTrex->default_sample_duration;
}
if (flags & kSampleSizePresent) {
@@ -3213,6 +3591,7 @@ status_t MPEG4Source::parseTrackFragmentRun(off64_t offset, off64_t size) {
tmp.offset = dataOffset;
tmp.size = sampleSize;
tmp.duration = sampleDuration;
+ tmp.compositionOffset = sampleCtsOffset;
mCurrentSamples.add(tmp);
dataOffset += sampleSize;
@@ -3346,7 +3725,7 @@ status_t MPEG4Source::read(
off64_t offset;
size_t size;
- uint32_t cts;
+ uint32_t cts, stts;
bool isSyncSample;
bool newBuffer = false;
if (mBuffer == NULL) {
@@ -3354,7 +3733,7 @@ status_t MPEG4Source::read(
status_t err =
mSampleTable->getMetaDataForSample(
- mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample);
+ mCurrentSampleIndex, &offset, &size, &cts, &isSyncSample, &stts);
if (err != OK) {
return err;
@@ -3368,7 +3747,7 @@ status_t MPEG4Source::read(
}
}
- if (!mIsAVC || mWantsNALFragments) {
+ if ((!mIsAVC && !mIsHEVC) || mWantsNALFragments) {
if (newBuffer) {
ssize_t num_bytes_read =
mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size);
@@ -3385,6 +3764,8 @@ status_t MPEG4Source::read(
mBuffer->meta_data()->clear();
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)stts * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3398,7 +3779,7 @@ status_t MPEG4Source::read(
++mCurrentSampleIndex;
}
- if (!mIsAVC) {
+ if (!mIsAVC && !mIsHEVC) {
*out = mBuffer;
mBuffer = NULL;
@@ -3507,6 +3888,8 @@ status_t MPEG4Source::read(
mBuffer->meta_data()->clear();
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)stts * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3549,7 +3932,7 @@ status_t MPEG4Source::fragmentedRead(
const SidxEntry *se = &mSegments[i];
if (totalTime + se->mDurationUs > seekTimeUs) {
// The requested time is somewhere in this segment
- if ((mode == ReadOptions::SEEK_NEXT_SYNC) ||
+ if ((mode == ReadOptions::SEEK_NEXT_SYNC && seekTimeUs > totalTime) ||
(mode == ReadOptions::SEEK_CLOSEST_SYNC &&
(seekTimeUs - totalTime) > (totalTime + se->mDurationUs - seekTimeUs))) {
// requested next sync, or closest sync and it was closer to the end of
@@ -3562,11 +3945,19 @@ status_t MPEG4Source::fragmentedRead(
totalTime += se->mDurationUs;
totalOffset += se->mSize;
}
- mCurrentMoofOffset = totalOffset;
- mCurrentSamples.clear();
- mCurrentSampleIndex = 0;
- parseChunk(&totalOffset);
- mCurrentTime = totalTime * mTimescale / 1000000ll;
+ mCurrentMoofOffset = totalOffset;
+ mCurrentSamples.clear();
+ mCurrentSampleIndex = 0;
+ parseChunk(&totalOffset);
+ mCurrentTime = totalTime * mTimescale / 1000000ll;
+ } else {
+ // without sidx boxes, we can only seek to 0
+ mCurrentMoofOffset = mFirstMoofOffset;
+ mCurrentSamples.clear();
+ mCurrentSampleIndex = 0;
+ off64_t tmp = mCurrentMoofOffset;
+ parseChunk(&tmp);
+ mCurrentTime = 0;
}
if (mBuffer != NULL) {
@@ -3578,7 +3969,7 @@ status_t MPEG4Source::fragmentedRead(
}
off64_t offset = 0;
- size_t size;
+ size_t size = 0;
uint32_t cts = 0;
bool isSyncSample = false;
bool newBuffer = false;
@@ -3586,22 +3977,24 @@ status_t MPEG4Source::fragmentedRead(
newBuffer = true;
if (mCurrentSampleIndex >= mCurrentSamples.size()) {
- // move to next fragment
- Sample lastSample = mCurrentSamples[mCurrentSamples.size() - 1];
- off64_t nextMoof = mNextMoofOffset; // lastSample.offset + lastSample.size;
+ // move to next fragment if there is one
+ if (mNextMoofOffset <= mCurrentMoofOffset) {
+ return ERROR_END_OF_STREAM;
+ }
+ off64_t nextMoof = mNextMoofOffset;
mCurrentMoofOffset = nextMoof;
mCurrentSamples.clear();
mCurrentSampleIndex = 0;
parseChunk(&nextMoof);
- if (mCurrentSampleIndex >= mCurrentSamples.size()) {
- return ERROR_END_OF_STREAM;
- }
+ if (mCurrentSampleIndex >= mCurrentSamples.size()) {
+ return ERROR_END_OF_STREAM;
+ }
}
const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex];
offset = smpl->offset;
size = smpl->size;
- cts = mCurrentTime;
+ cts = mCurrentTime + smpl->compositionOffset;
mCurrentTime += smpl->duration;
isSyncSample = (mCurrentSampleIndex == 0); // XXX
@@ -3629,7 +4022,7 @@ status_t MPEG4Source::fragmentedRead(
bufmeta->setData(kKeyCryptoKey, 0, mCryptoKey, 16);
}
- if (!mIsAVC || mWantsNALFragments) {
+ if ((!mIsAVC && !mIsHEVC)|| mWantsNALFragments) {
if (newBuffer) {
ssize_t num_bytes_read =
mDataSource->readAt(offset, (uint8_t *)mBuffer->data(), size);
@@ -3646,6 +4039,8 @@ status_t MPEG4Source::fragmentedRead(
mBuffer->set_range(0, size);
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)smpl->duration * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3659,7 +4054,7 @@ status_t MPEG4Source::fragmentedRead(
++mCurrentSampleIndex;
}
- if (!mIsAVC) {
+ if (!mIsAVC && !mIsHEVC) {
*out = mBuffer;
mBuffer = NULL;
@@ -3769,6 +4164,8 @@ status_t MPEG4Source::fragmentedRead(
mBuffer->meta_data()->setInt64(
kKeyTime, ((int64_t)cts * 1000000) / mTimescale);
+ mBuffer->meta_data()->setInt64(
+ kKeyDuration, ((int64_t)smpl->duration * 1000000) / mTimescale);
if (targetSampleTimeUs >= 0) {
mBuffer->meta_data()->setInt64(
@@ -3831,6 +4228,8 @@ static bool isCompatibleBrand(uint32_t fourcc) {
FOURCC('i', 's', 'o', 'm'),
FOURCC('i', 's', 'o', '2'),
FOURCC('a', 'v', 'c', '1'),
+ FOURCC('h', 'v', 'c', '1'),
+ FOURCC('h', 'e', 'v', '1'),
FOURCC('3', 'g', 'p', '4'),
FOURCC('m', 'p', '4', '1'),
FOURCC('m', 'p', '4', '2'),
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index 58a4487..24e53b3 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -41,6 +41,12 @@
#include "include/ESDS.h"
+#define WARN_UNLESS(condition, message, ...) \
+( (CONDITION(condition)) ? false : ({ \
+ ALOGW("Condition %s failed " message, #condition, ##__VA_ARGS__); \
+ true; \
+}))
+
namespace android {
static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
@@ -975,13 +981,16 @@ void MPEG4Writer::writeFtypBox(MetaData *param) {
if (param && param->findInt32(kKeyFileType, &fileType) &&
fileType != OUTPUT_FORMAT_MPEG_4) {
writeFourcc("3gp4");
+ writeInt32(0);
+ writeFourcc("isom");
+ writeFourcc("3gp4");
} else {
+ writeFourcc("mp42");
+ writeInt32(0);
writeFourcc("isom");
+ writeFourcc("mp42");
}
- writeInt32(0);
- writeFourcc("isom");
- writeFourcc("3gp4");
endBox();
}
@@ -1763,7 +1772,7 @@ status_t MPEG4Writer::Track::pause() {
}
status_t MPEG4Writer::Track::stop() {
- ALOGD("Stopping %s track", mIsAudio? "Audio": "Video");
+ ALOGD("%s track stopping", mIsAudio? "Audio": "Video");
if (!mStarted) {
ALOGE("Stop() called but track is not started");
return ERROR_END_OF_STREAM;
@@ -1774,19 +1783,14 @@ status_t MPEG4Writer::Track::stop() {
}
mDone = true;
+ ALOGD("%s track source stopping", mIsAudio? "Audio": "Video");
+ mSource->stop();
+ ALOGD("%s track source stopped", mIsAudio? "Audio": "Video");
+
void *dummy;
pthread_join(mThread, &dummy);
-
status_t err = static_cast<status_t>(reinterpret_cast<uintptr_t>(dummy));
- ALOGD("Stopping %s track source", mIsAudio? "Audio": "Video");
- {
- status_t status = mSource->stop();
- if (err == OK && status != OK && status != ERROR_END_OF_STREAM) {
- err = status;
- }
- }
-
ALOGD("%s track stopped", mIsAudio? "Audio": "Video");
return err;
}
@@ -2100,6 +2104,7 @@ status_t MPEG4Writer::Track::threadEntry() {
status_t err = OK;
MediaBuffer *buffer;
+ const char *trackName = mIsAudio ? "Audio" : "Video";
while (!mDone && (err = mSource->read(&buffer)) == OK) {
if (buffer->range_length() == 0) {
buffer->release();
@@ -2195,15 +2200,27 @@ status_t MPEG4Writer::Track::threadEntry() {
if (mResumed) {
int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
- CHECK_GE(durExcludingEarlierPausesUs, 0ll);
+ if (WARN_UNLESS(durExcludingEarlierPausesUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
- CHECK_GE(pausedDurationUs, lastDurationUs);
+ if (WARN_UNLESS(pausedDurationUs >= lastDurationUs, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
previousPausedDurationUs += pausedDurationUs - lastDurationUs;
mResumed = false;
}
timestampUs -= previousPausedDurationUs;
- CHECK_GE(timestampUs, 0ll);
+ if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
if (!mIsAudio) {
/*
* Composition time: timestampUs
@@ -2215,7 +2232,11 @@ status_t MPEG4Writer::Track::threadEntry() {
decodingTimeUs -= previousPausedDurationUs;
cttsOffsetTimeUs =
timestampUs + kMaxCttsOffsetTimeUs - decodingTimeUs;
- CHECK_GE(cttsOffsetTimeUs, 0ll);
+ if (WARN_UNLESS(cttsOffsetTimeUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
timestampUs = decodingTimeUs;
ALOGV("decoding time: %lld and ctts offset time: %lld",
timestampUs, cttsOffsetTimeUs);
@@ -2223,7 +2244,11 @@ status_t MPEG4Writer::Track::threadEntry() {
// Update ctts box table if necessary
currCttsOffsetTimeTicks =
(cttsOffsetTimeUs * mTimeScale + 500000LL) / 1000000LL;
- CHECK_LE(currCttsOffsetTimeTicks, 0x0FFFFFFFFLL);
+ if (WARN_UNLESS(currCttsOffsetTimeTicks <= 0x0FFFFFFFFLL, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
if (mStszTableEntries->count() == 0) {
// Force the first ctts table entry to have one single entry
// so that we can do adjustment for the initial track start
@@ -2261,9 +2286,13 @@ status_t MPEG4Writer::Track::threadEntry() {
}
}
- CHECK_GE(timestampUs, 0ll);
+ if (WARN_UNLESS(timestampUs >= 0ll, "for %s track", trackName)) {
+ copy->release();
+ return ERROR_MALFORMED;
+ }
+
ALOGV("%s media time stamp: %lld and previous paused duration %lld",
- mIsAudio? "Audio": "Video", timestampUs, previousPausedDurationUs);
+ trackName, timestampUs, previousPausedDurationUs);
if (timestampUs > mTrackDurationUs) {
mTrackDurationUs = timestampUs;
}
@@ -2278,10 +2307,27 @@ status_t MPEG4Writer::Track::threadEntry() {
(lastTimestampUs * mTimeScale + 500000LL) / 1000000LL);
if (currDurationTicks < 0ll) {
ALOGE("timestampUs %lld < lastTimestampUs %lld for %s track",
- timestampUs, lastTimestampUs, mIsAudio? "Audio": "Video");
+ timestampUs, lastTimestampUs, trackName);
+ copy->release();
return UNKNOWN_ERROR;
}
+ // if the duration is different for this sample, see if it is close enough to the previous
+ // duration that we can fudge it and use the same value, to avoid filling the stts table
+ // with lots of near-identical entries.
+ // "close enough" here means that the current duration needs to be adjusted by less
+ // than 0.1 milliseconds
+ if (lastDurationTicks && (currDurationTicks != lastDurationTicks)) {
+ int64_t deltaUs = ((lastDurationTicks - currDurationTicks) * 1000000LL
+ + (mTimeScale / 2)) / mTimeScale;
+ if (deltaUs > -100 && deltaUs < 100) {
+ // use previous ticks, and adjust timestamp as if it was actually that number
+ // of ticks
+ currDurationTicks = lastDurationTicks;
+ timestampUs += deltaUs;
+ }
+ }
+
mStszTableEntries->add(htonl(sampleSize));
if (mStszTableEntries->count() > 2) {
@@ -2302,7 +2348,7 @@ status_t MPEG4Writer::Track::threadEntry() {
previousSampleSize = sampleSize;
}
ALOGV("%s timestampUs/lastTimestampUs: %lld/%lld",
- mIsAudio? "Audio": "Video", timestampUs, lastTimestampUs);
+ trackName, timestampUs, lastTimestampUs);
lastDurationUs = timestampUs - lastTimestampUs;
lastDurationTicks = currDurationTicks;
lastTimestampUs = timestampUs;
@@ -2407,7 +2453,7 @@ status_t MPEG4Writer::Track::threadEntry() {
sendTrackSummary(hasMultipleTracks);
ALOGI("Received total/0-length (%d/%d) buffers and encoded %d frames. - %s",
- count, nZeroLengthFrames, mStszTableEntries->count(), mIsAudio? "audio": "video");
+ count, nZeroLengthFrames, mStszTableEntries->count(), trackName);
if (mIsAudio) {
ALOGI("Audio track drift time: %lld us", mOwner->getDriftTimeUs());
}
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 3cb4217..b9c5904 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaCodec"
#include <utils/Log.h>
+#include <inttypes.h>
#include <media/stagefright/MediaCodec.h>
@@ -323,6 +324,16 @@ status_t MediaCodec::renderOutputBufferAndRelease(size_t index) {
return PostAndAwaitResponse(msg, &response);
}
+status_t MediaCodec::renderOutputBufferAndRelease(size_t index, int64_t timestampNs) {
+ sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
+ msg->setSize("index", index);
+ msg->setInt32("render", true);
+ msg->setInt64("timestampNs", timestampNs);
+
+ sp<AMessage> response;
+ return PostAndAwaitResponse(msg, &response);
+}
+
status_t MediaCodec::releaseOutputBuffer(size_t index) {
sp<AMessage> msg = new AMessage(kWhatReleaseOutputBuffer, id());
msg->setSize("index", index);
@@ -352,6 +363,20 @@ status_t MediaCodec::getOutputFormat(sp<AMessage> *format) const {
return OK;
}
+status_t MediaCodec::getInputFormat(sp<AMessage> *format) const {
+ sp<AMessage> msg = new AMessage(kWhatGetInputFormat, id());
+
+ sp<AMessage> response;
+ status_t err;
+ if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+ return err;
+ }
+
+ CHECK(response->findMessage("format", format));
+
+ return OK;
+}
+
status_t MediaCodec::getName(AString *name) const {
sp<AMessage> msg = new AMessage(kWhatGetName, id());
@@ -589,6 +614,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
postActivityNotificationIfPossible();
cancelPendingDequeueOperations();
+ setState(UNINITIALIZED);
break;
}
@@ -598,6 +624,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
mFlags |= kFlagStickyError;
postActivityNotificationIfPossible();
+ setState(UNINITIALIZED);
break;
}
}
@@ -642,6 +669,9 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
// reset input surface flag
mHaveInputSurface = false;
+ CHECK(msg->findMessage("input-format", &mInputFormat));
+ CHECK(msg->findMessage("output-format", &mOutputFormat));
+
(new AMessage)->postReply(mReplyID);
break;
}
@@ -1330,14 +1360,19 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
break;
}
+ case kWhatGetInputFormat:
case kWhatGetOutputFormat:
{
+ sp<AMessage> format =
+ (msg->what() == kWhatGetOutputFormat ? mOutputFormat : mInputFormat);
+
uint32_t replyID;
CHECK(msg->senderAwaitsResponse(&replyID));
- if ((mState != STARTED && mState != FLUSHING)
+ if ((mState != CONFIGURED && mState != STARTING &&
+ mState != STARTED && mState != FLUSHING)
|| (mFlags & kFlagStickyError)
- || mOutputFormat == NULL) {
+ || format == NULL) {
sp<AMessage> response = new AMessage;
response->setInt32("err", INVALID_OPERATION);
@@ -1346,7 +1381,7 @@ void MediaCodec::onMessageReceived(const sp<AMessage> &msg) {
}
sp<AMessage> response = new AMessage;
- response->setMessage("format", mOutputFormat);
+ response->setMessage("format", format);
response->postReply(replyID);
break;
}
@@ -1683,9 +1718,25 @@ status_t MediaCodec::onReleaseOutputBuffer(const sp<AMessage> &msg) {
if (render && info->mData != NULL && info->mData->size() != 0) {
info->mNotify->setInt32("render", true);
+ int64_t timestampNs = 0;
+ if (msg->findInt64("timestampNs", &timestampNs)) {
+ info->mNotify->setInt64("timestampNs", timestampNs);
+ } else {
+ // TODO: it seems like we should use the timestamp
+ // in the (media)buffer as it potentially came from
+ // an input surface, but we did not propagate it prior to
+ // API 20. Perhaps check for target SDK version.
+#if 0
+ if (info->mData->meta()->findInt64("timeUs", &timestampNs)) {
+ ALOGV("using buffer PTS of %" PRId64, timestampNs);
+ timestampNs *= 1000;
+ }
+#endif
+ }
+
if (mSoftRenderer != NULL) {
mSoftRenderer->render(
- info->mData->data(), info->mData->size(), NULL);
+ info->mData->data(), info->mData->size(), timestampNs, NULL);
}
}
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index b74b2e2..cd51582 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -48,14 +48,39 @@ const MediaCodecList *MediaCodecList::getInstance() {
MediaCodecList::MediaCodecList()
: mInitCheck(NO_INIT) {
- FILE *file = fopen("/etc/media_codecs.xml", "r");
+ parseTopLevelXMLFile("/etc/media_codecs.xml");
+}
- if (file == NULL) {
- ALOGW("unable to open media codecs configuration xml file.");
+void MediaCodecList::parseTopLevelXMLFile(const char *codecs_xml) {
+ // get href_base
+ char *href_base_end = strrchr(codecs_xml, '/');
+ if (href_base_end != NULL) {
+ mHrefBase = AString(codecs_xml, href_base_end - codecs_xml + 1);
+ }
+
+ mInitCheck = OK;
+ mCurrentSection = SECTION_TOPLEVEL;
+ mDepth = 0;
+
+ parseXMLFile(codecs_xml);
+
+ if (mInitCheck != OK) {
+ mCodecInfos.clear();
+ mCodecQuirks.clear();
return;
}
- parseXMLFile(file);
+ for (size_t i = mCodecInfos.size(); i-- > 0;) {
+ CodecInfo *info = &mCodecInfos.editItemAt(i);
+
+ if (info->mTypes == 0) {
+ // No types supported by this component???
+ ALOGW("Component %s does not support any type of media?",
+ info->mName.c_str());
+
+ mCodecInfos.removeAt(i);
+ }
+ }
#if 0
for (size_t i = 0; i < mCodecInfos.size(); ++i) {
@@ -75,9 +100,6 @@ MediaCodecList::MediaCodecList()
ALOGI("%s", line.c_str());
}
#endif
-
- fclose(file);
- file = NULL;
}
MediaCodecList::~MediaCodecList() {
@@ -87,10 +109,14 @@ status_t MediaCodecList::initCheck() const {
return mInitCheck;
}
-void MediaCodecList::parseXMLFile(FILE *file) {
- mInitCheck = OK;
- mCurrentSection = SECTION_TOPLEVEL;
- mDepth = 0;
+void MediaCodecList::parseXMLFile(const char *path) {
+ FILE *file = fopen(path, "r");
+
+ if (file == NULL) {
+ ALOGW("unable to open media codecs configuration xml file: %s", path);
+ mInitCheck = NAME_NOT_FOUND;
+ return;
+ }
XML_Parser parser = ::XML_ParserCreate(NULL);
CHECK(parser != NULL);
@@ -103,7 +129,7 @@ void MediaCodecList::parseXMLFile(FILE *file) {
while (mInitCheck == OK) {
void *buff = ::XML_GetBuffer(parser, BUFF_SIZE);
if (buff == NULL) {
- ALOGE("failed to in call to XML_GetBuffer()");
+ ALOGE("failed in call to XML_GetBuffer()");
mInitCheck = UNKNOWN_ERROR;
break;
}
@@ -115,8 +141,9 @@ void MediaCodecList::parseXMLFile(FILE *file) {
break;
}
- if (::XML_ParseBuffer(parser, bytes_read, bytes_read == 0)
- != XML_STATUS_OK) {
+ XML_Status status = ::XML_ParseBuffer(parser, bytes_read, bytes_read == 0);
+ if (status != XML_STATUS_OK) {
+ ALOGE("malformed (%s)", ::XML_ErrorString(::XML_GetErrorCode(parser)));
mInitCheck = ERROR_MALFORMED;
break;
}
@@ -128,25 +155,8 @@ void MediaCodecList::parseXMLFile(FILE *file) {
::XML_ParserFree(parser);
- if (mInitCheck == OK) {
- for (size_t i = mCodecInfos.size(); i-- > 0;) {
- CodecInfo *info = &mCodecInfos.editItemAt(i);
-
- if (info->mTypes == 0) {
- // No types supported by this component???
-
- ALOGW("Component %s does not support any type of media?",
- info->mName.c_str());
-
- mCodecInfos.removeAt(i);
- }
- }
- }
-
- if (mInitCheck != OK) {
- mCodecInfos.clear();
- mCodecQuirks.clear();
- }
+ fclose(file);
+ file = NULL;
}
// static
@@ -160,12 +170,63 @@ void MediaCodecList::EndElementHandlerWrapper(void *me, const char *name) {
static_cast<MediaCodecList *>(me)->endElementHandler(name);
}
+status_t MediaCodecList::includeXMLFile(const char **attrs) {
+ const char *href = NULL;
+ size_t i = 0;
+ while (attrs[i] != NULL) {
+ if (!strcmp(attrs[i], "href")) {
+ if (attrs[i + 1] == NULL) {
+ return -EINVAL;
+ }
+ href = attrs[i + 1];
+ ++i;
+ } else {
+ return -EINVAL;
+ }
+ ++i;
+ }
+
+ // For security reasons and for simplicity, file names can only contain
+ // [a-zA-Z0-9_.] and must start with media_codecs_ and end with .xml
+ for (i = 0; href[i] != '\0'; i++) {
+ if (href[i] == '.' || href[i] == '_' ||
+ (href[i] >= '0' && href[i] <= '9') ||
+ (href[i] >= 'A' && href[i] <= 'Z') ||
+ (href[i] >= 'a' && href[i] <= 'z')) {
+ continue;
+ }
+ ALOGE("invalid include file name: %s", href);
+ return -EINVAL;
+ }
+
+ AString filename = href;
+ if (!filename.startsWith("media_codecs_") ||
+ !filename.endsWith(".xml")) {
+ ALOGE("invalid include file name: %s", href);
+ return -EINVAL;
+ }
+ filename.insert(mHrefBase, 0);
+
+ parseXMLFile(filename.c_str());
+ return mInitCheck;
+}
+
void MediaCodecList::startElementHandler(
const char *name, const char **attrs) {
if (mInitCheck != OK) {
return;
}
+ if (!strcmp(name, "Include")) {
+ mInitCheck = includeXMLFile(attrs);
+ if (mInitCheck == OK) {
+ mPastSections.push(mCurrentSection);
+ mCurrentSection = SECTION_INCLUDE;
+ }
+ ++mDepth;
+ return;
+ }
+
switch (mCurrentSection) {
case SECTION_TOPLEVEL:
{
@@ -255,6 +316,15 @@ void MediaCodecList::endElementHandler(const char *name) {
break;
}
+ case SECTION_INCLUDE:
+ {
+ if (!strcmp(name, "Include") && mPastSections.size() > 0) {
+ mCurrentSection = mPastSections.top();
+ mPastSections.pop();
+ }
+ break;
+ }
+
default:
break;
}
diff --git a/media/libstagefright/MediaCodecSource.cpp b/media/libstagefright/MediaCodecSource.cpp
new file mode 100644
index 0000000..924173c
--- /dev/null
+++ b/media/libstagefright/MediaCodecSource.cpp
@@ -0,0 +1,881 @@
+/*
+ * Copyright 2014, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaCodecSource"
+#define DEBUG_DRIFT_TIME 0
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <media/ICrypto.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaCodecSource.h>
+#include <media/stagefright/Utils.h>
+
+namespace android {
+
+static void ReleaseMediaBufferReference(const sp<ABuffer> &accessUnit) {
+ void *mbuf;
+ if (accessUnit->meta()->findPointer("mediaBuffer", &mbuf)
+ && mbuf != NULL) {
+ ALOGV("releasing mbuf %p", mbuf);
+
+ accessUnit->meta()->setPointer("mediaBuffer", NULL);
+
+ static_cast<MediaBuffer *>(mbuf)->release();
+ mbuf = NULL;
+ }
+}
+
+struct MediaCodecSource::Puller : public AHandler {
+ Puller(const sp<MediaSource> &source);
+
+ status_t start(const sp<MetaData> &meta, const sp<AMessage> &notify);
+ void stopAsync();
+
+ void pause();
+ void resume();
+
+protected:
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+ virtual ~Puller();
+
+private:
+ enum {
+ kWhatStart = 'msta',
+ kWhatStop,
+ kWhatPull,
+ kWhatPause,
+ kWhatResume,
+ };
+
+ sp<MediaSource> mSource;
+ sp<AMessage> mNotify;
+ sp<ALooper> mLooper;
+ int32_t mPullGeneration;
+ bool mIsAudio;
+ bool mPaused;
+ bool mReachedEOS;
+
+ status_t postSynchronouslyAndReturnError(const sp<AMessage> &msg);
+ void schedulePull();
+ void handleEOS();
+
+ DISALLOW_EVIL_CONSTRUCTORS(Puller);
+};
+
+MediaCodecSource::Puller::Puller(const sp<MediaSource> &source)
+ : mSource(source),
+ mLooper(new ALooper()),
+ mPullGeneration(0),
+ mIsAudio(false),
+ mPaused(false),
+ mReachedEOS(false) {
+ sp<MetaData> meta = source->getFormat();
+ const char *mime;
+ CHECK(meta->findCString(kKeyMIMEType, &mime));
+
+ mIsAudio = !strncasecmp(mime, "audio/", 6);
+
+ mLooper->setName("pull_looper");
+}
+
+MediaCodecSource::Puller::~Puller() {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+}
+
+status_t MediaCodecSource::Puller::postSynchronouslyAndReturnError(
+ const sp<AMessage> &msg) {
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+status_t MediaCodecSource::Puller::start(const sp<MetaData> &meta,
+ const sp<AMessage> &notify) {
+ ALOGV("puller (%s) start", mIsAudio ? "audio" : "video");
+ mLooper->start(
+ false /* runOnCallingThread */,
+ false /* canCallJava */,
+ PRIORITY_AUDIO);
+ mLooper->registerHandler(this);
+ mNotify = notify;
+
+ sp<AMessage> msg = new AMessage(kWhatStart, id());
+ msg->setObject("meta", meta);
+ return postSynchronouslyAndReturnError(msg);
+}
+
+void MediaCodecSource::Puller::stopAsync() {
+ ALOGV("puller (%s) stopAsync", mIsAudio ? "audio" : "video");
+ (new AMessage(kWhatStop, id()))->post();
+}
+
+void MediaCodecSource::Puller::pause() {
+ (new AMessage(kWhatPause, id()))->post();
+}
+
+void MediaCodecSource::Puller::resume() {
+ (new AMessage(kWhatResume, id()))->post();
+}
+
+void MediaCodecSource::Puller::schedulePull() {
+ sp<AMessage> msg = new AMessage(kWhatPull, id());
+ msg->setInt32("generation", mPullGeneration);
+ msg->post();
+}
+
+void MediaCodecSource::Puller::handleEOS() {
+ if (!mReachedEOS) {
+ ALOGV("puller (%s) posting EOS", mIsAudio ? "audio" : "video");
+ mReachedEOS = true;
+ sp<AMessage> notify = mNotify->dup();
+ notify->setPointer("accessUnit", NULL);
+ notify->post();
+ }
+}
+
+void MediaCodecSource::Puller::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatStart:
+ {
+ sp<RefBase> obj;
+ CHECK(msg->findObject("meta", &obj));
+
+ mReachedEOS = false;
+
+ status_t err = mSource->start(static_cast<MetaData *>(obj.get()));
+
+ if (err == OK) {
+ schedulePull();
+ }
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", err);
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+ response->postReply(replyID);
+ break;
+ }
+
+ case kWhatStop:
+ {
+ ALOGV("source (%s) stopping", mIsAudio ? "audio" : "video");
+ mSource->stop();
+ ALOGV("source (%s) stopped", mIsAudio ? "audio" : "video");
+ ++mPullGeneration;
+
+ handleEOS();
+ break;
+ }
+
+ case kWhatPull:
+ {
+ int32_t generation;
+ CHECK(msg->findInt32("generation", &generation));
+
+ if (generation != mPullGeneration) {
+ break;
+ }
+
+ MediaBuffer *mbuf;
+ status_t err = mSource->read(&mbuf);
+
+ if (mPaused) {
+ if (err == OK) {
+ mbuf->release();
+ mbuf = NULL;
+ }
+
+ msg->post();
+ break;
+ }
+
+ if (err != OK) {
+ if (err == ERROR_END_OF_STREAM) {
+ ALOGV("stream ended, mbuf %p", mbuf);
+ } else {
+ ALOGE("error %d reading stream.", err);
+ }
+ handleEOS();
+ } else {
+ sp<AMessage> notify = mNotify->dup();
+
+ notify->setPointer("accessUnit", mbuf);
+ notify->post();
+
+ msg->post();
+ }
+ break;
+ }
+
+ case kWhatPause:
+ {
+ mPaused = true;
+ break;
+ }
+
+ case kWhatResume:
+ {
+ mPaused = false;
+ break;
+ }
+
+ default:
+ TRESPASS();
+ }
+}
+
+// static
+sp<MediaCodecSource> MediaCodecSource::Create(
+ const sp<ALooper> &looper,
+ const sp<AMessage> &format,
+ const sp<MediaSource> &source,
+ uint32_t flags) {
+ sp<MediaCodecSource> mediaSource =
+ new MediaCodecSource(looper, format, source, flags);
+
+ if (mediaSource->init() == OK) {
+ return mediaSource;
+ }
+ return NULL;
+}
+
+status_t MediaCodecSource::start(MetaData* params) {
+ sp<AMessage> msg = new AMessage(kWhatStart, mReflector->id());
+ msg->setObject("meta", params);
+ return postSynchronouslyAndReturnError(msg);
+}
+
+status_t MediaCodecSource::stop() {
+ sp<AMessage> msg = new AMessage(kWhatStop, mReflector->id());
+ return postSynchronouslyAndReturnError(msg);
+}
+
+status_t MediaCodecSource::pause() {
+ (new AMessage(kWhatPause, mReflector->id()))->post();
+ return OK;
+}
+
+sp<IGraphicBufferProducer> MediaCodecSource::getGraphicBufferProducer() {
+ CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+ return mGraphicBufferProducer;
+}
+
+status_t MediaCodecSource::read(
+ MediaBuffer** buffer, const ReadOptions* /* options */) {
+ Mutex::Autolock autolock(mOutputBufferLock);
+
+ *buffer = NULL;
+ while (mOutputBufferQueue.size() == 0 && !mEncodedReachedEOS) {
+ mOutputBufferCond.wait(mOutputBufferLock);
+ }
+ if (!mEncodedReachedEOS) {
+ *buffer = *mOutputBufferQueue.begin();
+ mOutputBufferQueue.erase(mOutputBufferQueue.begin());
+ return OK;
+ }
+ return mErrorCode;
+}
+
+void MediaCodecSource::signalBufferReturned(MediaBuffer *buffer) {
+ buffer->setObserver(0);
+ buffer->release();
+}
+
+MediaCodecSource::MediaCodecSource(
+ const sp<ALooper> &looper,
+ const sp<AMessage> &outputFormat,
+ const sp<MediaSource> &source,
+ uint32_t flags)
+ : mLooper(looper),
+ mOutputFormat(outputFormat),
+ mMeta(new MetaData),
+ mFlags(flags),
+ mIsVideo(false),
+ mStarted(false),
+ mStopping(false),
+ mDoMoreWorkPending(false),
+ mPullerReachedEOS(false),
+ mFirstSampleTimeUs(-1ll),
+ mEncodedReachedEOS(false),
+ mErrorCode(OK) {
+ CHECK(mLooper != NULL);
+
+ AString mime;
+ CHECK(mOutputFormat->findString("mime", &mime));
+
+ if (!strncasecmp("video/", mime.c_str(), 6)) {
+ mIsVideo = true;
+ }
+
+ if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
+ mPuller = new Puller(source);
+ }
+}
+
+MediaCodecSource::~MediaCodecSource() {
+ releaseEncoder();
+
+ mCodecLooper->stop();
+ mLooper->unregisterHandler(mReflector->id());
+}
+
+status_t MediaCodecSource::init() {
+ status_t err = initEncoder();
+
+ if (err != OK) {
+ releaseEncoder();
+ }
+
+ return err;
+}
+
+status_t MediaCodecSource::initEncoder() {
+ mReflector = new AHandlerReflector<MediaCodecSource>(this);
+ mLooper->registerHandler(mReflector);
+
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("codec_looper");
+ mCodecLooper->start();
+
+ if (mFlags & FLAG_USE_METADATA_INPUT) {
+ mOutputFormat->setInt32("store-metadata-in-buffers", 1);
+ }
+
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ mOutputFormat->setInt32("create-input-buffers-suspended", 1);
+ }
+
+ AString outputMIME;
+ CHECK(mOutputFormat->findString("mime", &outputMIME));
+
+ mEncoder = MediaCodec::CreateByType(
+ mCodecLooper, outputMIME.c_str(), true /* encoder */);
+
+ if (mEncoder == NULL) {
+ return NO_INIT;
+ }
+
+ ALOGV("output format is '%s'", mOutputFormat->debugString(0).c_str());
+
+ status_t err = mEncoder->configure(
+ mOutputFormat,
+ NULL /* nativeWindow */,
+ NULL /* crypto */,
+ MediaCodec::CONFIGURE_FLAG_ENCODE);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mEncoder->getOutputFormat(&mOutputFormat);
+ convertMessageToMetaData(mOutputFormat, mMeta);
+
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ CHECK(mIsVideo);
+
+ err = mEncoder->createInputSurface(&mGraphicBufferProducer);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ err = mEncoder->start();
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = mEncoder->getInputBuffers(&mEncoderInputBuffers);
+
+ if (err != OK) {
+ return err;
+ }
+
+ err = mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+
+ if (err != OK) {
+ return err;
+ }
+
+ mEncodedReachedEOS = false;
+ mErrorCode = OK;
+
+ return OK;
+}
+
+void MediaCodecSource::releaseEncoder() {
+ if (mEncoder == NULL) {
+ return;
+ }
+
+ mEncoder->release();
+ mEncoder.clear();
+
+ while (!mInputBufferQueue.empty()) {
+ MediaBuffer *mbuf = *mInputBufferQueue.begin();
+ mInputBufferQueue.erase(mInputBufferQueue.begin());
+ if (mbuf != NULL) {
+ mbuf->release();
+ }
+ }
+
+ for (size_t i = 0; i < mEncoderInputBuffers.size(); ++i) {
+ sp<ABuffer> accessUnit = mEncoderInputBuffers.itemAt(i);
+ ReleaseMediaBufferReference(accessUnit);
+ }
+
+ mEncoderInputBuffers.clear();
+ mEncoderOutputBuffers.clear();
+}
+
+bool MediaCodecSource::reachedEOS() {
+ return mEncodedReachedEOS && ((mPuller == NULL) || mPullerReachedEOS);
+}
+
+status_t MediaCodecSource::postSynchronouslyAndReturnError(
+ const sp<AMessage> &msg) {
+ sp<AMessage> response;
+ status_t err = msg->postAndAwaitResponse(&response);
+
+ if (err != OK) {
+ return err;
+ }
+
+ if (!response->findInt32("err", &err)) {
+ err = OK;
+ }
+
+ return err;
+}
+
+void MediaCodecSource::signalEOS(status_t err) {
+ if (!mEncodedReachedEOS) {
+ ALOGI("encoder (%s) reached EOS", mIsVideo ? "video" : "audio");
+ {
+ Mutex::Autolock autoLock(mOutputBufferLock);
+ // release all unread media buffers
+ for (List<MediaBuffer*>::iterator it = mOutputBufferQueue.begin();
+ it != mOutputBufferQueue.end(); it++) {
+ (*it)->release();
+ }
+ mOutputBufferQueue.clear();
+ mEncodedReachedEOS = true;
+ mErrorCode = err;
+ mOutputBufferCond.signal();
+ }
+
+ releaseEncoder();
+ }
+ if (mStopping && reachedEOS()) {
+ ALOGI("MediaCodecSource (%s) fully stopped",
+ mIsVideo ? "video" : "audio");
+ // posting reply to everyone that's waiting
+ List<uint32_t>::iterator it;
+ for (it = mStopReplyIDQueue.begin();
+ it != mStopReplyIDQueue.end(); it++) {
+ (new AMessage)->postReply(*it);
+ }
+ mStopReplyIDQueue.clear();
+ mStopping = false;
+ }
+}
+
+void MediaCodecSource::suspend() {
+ CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+ if (mEncoder != NULL) {
+ sp<AMessage> params = new AMessage;
+ params->setInt32("drop-input-frames", true);
+ mEncoder->setParameters(params);
+ }
+}
+
+void MediaCodecSource::resume(int64_t skipFramesBeforeUs) {
+ CHECK(mFlags & FLAG_USE_SURFACE_INPUT);
+ if (mEncoder != NULL) {
+ sp<AMessage> params = new AMessage;
+ params->setInt32("drop-input-frames", false);
+ if (skipFramesBeforeUs > 0) {
+ params->setInt64("skip-frames-before", skipFramesBeforeUs);
+ }
+ mEncoder->setParameters(params);
+ }
+}
+
+void MediaCodecSource::scheduleDoMoreWork() {
+ if (mDoMoreWorkPending) {
+ return;
+ }
+
+ mDoMoreWorkPending = true;
+
+ if (mEncoderActivityNotify == NULL) {
+ mEncoderActivityNotify = new AMessage(
+ kWhatEncoderActivity, mReflector->id());
+ }
+ mEncoder->requestActivityNotification(mEncoderActivityNotify);
+}
+
+status_t MediaCodecSource::feedEncoderInputBuffers() {
+ while (!mInputBufferQueue.empty()
+ && !mAvailEncoderInputIndices.empty()) {
+ MediaBuffer* mbuf = *mInputBufferQueue.begin();
+ mInputBufferQueue.erase(mInputBufferQueue.begin());
+
+ size_t bufferIndex = *mAvailEncoderInputIndices.begin();
+ mAvailEncoderInputIndices.erase(mAvailEncoderInputIndices.begin());
+
+ int64_t timeUs = 0ll;
+ uint32_t flags = 0;
+ size_t size = 0;
+
+ if (mbuf != NULL) {
+ CHECK(mbuf->meta_data()->findInt64(kKeyTime, &timeUs));
+
+ // push decoding time for video, or drift time for audio
+ if (mIsVideo) {
+ mDecodingTimeQueue.push_back(timeUs);
+ } else {
+#if DEBUG_DRIFT_TIME
+ if (mFirstSampleTimeUs < 0ll) {
+ mFirstSampleTimeUs = timeUs;
+ }
+
+ int64_t driftTimeUs = 0;
+ if (mbuf->meta_data()->findInt64(kKeyDriftTime, &driftTimeUs)
+ && driftTimeUs) {
+ driftTimeUs = timeUs - mFirstSampleTimeUs - driftTimeUs;
+ }
+ mDriftTimeQueue.push_back(driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+ }
+
+ size = mbuf->size();
+
+ memcpy(mEncoderInputBuffers.itemAt(bufferIndex)->data(),
+ mbuf->data(), size);
+
+ if (mIsVideo) {
+ // video encoder will release MediaBuffer when done
+ // with underlying data.
+ mEncoderInputBuffers.itemAt(bufferIndex)->meta()
+ ->setPointer("mediaBuffer", mbuf);
+ } else {
+ mbuf->release();
+ }
+ } else {
+ flags = MediaCodec::BUFFER_FLAG_EOS;
+ }
+
+ status_t err = mEncoder->queueInputBuffer(
+ bufferIndex, 0, size, timeUs, flags);
+
+ if (err != OK) {
+ return err;
+ }
+ }
+
+ return OK;
+}
+
+status_t MediaCodecSource::doMoreWork() {
+ status_t err;
+
+ if (!(mFlags & FLAG_USE_SURFACE_INPUT)) {
+ for (;;) {
+ size_t bufferIndex;
+ err = mEncoder->dequeueInputBuffer(&bufferIndex);
+
+ if (err != OK) {
+ break;
+ }
+
+ mAvailEncoderInputIndices.push_back(bufferIndex);
+ }
+
+ feedEncoderInputBuffers();
+ }
+
+ for (;;) {
+ size_t bufferIndex;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ native_handle_t* handle = NULL;
+ err = mEncoder->dequeueOutputBuffer(
+ &bufferIndex, &offset, &size, &timeUs, &flags);
+
+ if (err != OK) {
+ if (err == INFO_FORMAT_CHANGED) {
+ continue;
+ } else if (err == INFO_OUTPUT_BUFFERS_CHANGED) {
+ mEncoder->getOutputBuffers(&mEncoderOutputBuffers);
+ continue;
+ }
+
+ if (err == -EAGAIN) {
+ err = OK;
+ }
+ break;
+ }
+ if (!(flags & MediaCodec::BUFFER_FLAG_EOS)) {
+ sp<ABuffer> outbuf = mEncoderOutputBuffers.itemAt(bufferIndex);
+
+ MediaBuffer *mbuf = new MediaBuffer(outbuf->size());
+ memcpy(mbuf->data(), outbuf->data(), outbuf->size());
+
+ if (!(flags & MediaCodec::BUFFER_FLAG_CODECCONFIG)) {
+ if (mIsVideo) {
+ int64_t decodingTimeUs;
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ // GraphicBufferSource is supposed to discard samples
+ // queued before start, and offset timeUs by start time
+ CHECK_GE(timeUs, 0ll);
+ // TODO:
+ // Decoding time for surface source is unavailable,
+ // use presentation time for now. May need to move
+ // this logic into MediaCodec.
+ decodingTimeUs = timeUs;
+ } else {
+ CHECK(!mDecodingTimeQueue.empty());
+ decodingTimeUs = *(mDecodingTimeQueue.begin());
+ mDecodingTimeQueue.erase(mDecodingTimeQueue.begin());
+ }
+ mbuf->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
+
+ ALOGV("[video] time %lld us (%.2f secs), dts/pts diff %lld",
+ timeUs, timeUs / 1E6, decodingTimeUs - timeUs);
+ } else {
+ int64_t driftTimeUs = 0;
+#if DEBUG_DRIFT_TIME
+ CHECK(!mDriftTimeQueue.empty());
+ driftTimeUs = *(mDriftTimeQueue.begin());
+ mDriftTimeQueue.erase(mDriftTimeQueue.begin());
+ mbuf->meta_data()->setInt64(kKeyDriftTime, driftTimeUs);
+#endif // DEBUG_DRIFT_TIME
+ ALOGV("[audio] time %lld us (%.2f secs), drift %lld",
+ timeUs, timeUs / 1E6, driftTimeUs);
+ }
+ mbuf->meta_data()->setInt64(kKeyTime, timeUs);
+ } else {
+ mbuf->meta_data()->setInt32(kKeyIsCodecConfig, true);
+ }
+ if (flags & MediaCodec::BUFFER_FLAG_SYNCFRAME) {
+ mbuf->meta_data()->setInt32(kKeyIsSyncFrame, true);
+ }
+ mbuf->setObserver(this);
+ mbuf->add_ref();
+
+ {
+ Mutex::Autolock autoLock(mOutputBufferLock);
+ mOutputBufferQueue.push_back(mbuf);
+ mOutputBufferCond.signal();
+ }
+ }
+
+ mEncoder->releaseOutputBuffer(bufferIndex);
+
+ if (flags & MediaCodec::BUFFER_FLAG_EOS) {
+ err = ERROR_END_OF_STREAM;
+ break;
+ }
+ }
+
+ return err;
+}
+
+status_t MediaCodecSource::onStart(MetaData *params) {
+ if (mStopping) {
+ ALOGE("Failed to start while we're stopping");
+ return INVALID_OPERATION;
+ }
+
+ if (mStarted) {
+ ALOGI("MediaCodecSource (%s) resuming", mIsVideo ? "video" : "audio");
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ resume();
+ } else {
+ CHECK(mPuller != NULL);
+ mPuller->resume();
+ }
+ return OK;
+ }
+
+ ALOGI("MediaCodecSource (%s) starting", mIsVideo ? "video" : "audio");
+
+ status_t err = OK;
+
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ int64_t startTimeUs;
+ if (!params || !params->findInt64(kKeyTime, &startTimeUs)) {
+ startTimeUs = -1ll;
+ }
+ resume(startTimeUs);
+ scheduleDoMoreWork();
+ } else {
+ CHECK(mPuller != NULL);
+ sp<AMessage> notify = new AMessage(
+ kWhatPullerNotify, mReflector->id());
+ err = mPuller->start(params, notify);
+ if (err != OK) {
+ mPullerReachedEOS = true;
+ return err;
+ }
+ }
+
+ ALOGI("MediaCodecSource (%s) started", mIsVideo ? "video" : "audio");
+
+ mStarted = true;
+ return OK;
+}
+
+void MediaCodecSource::onMessageReceived(const sp<AMessage> &msg) {
+ switch (msg->what()) {
+ case kWhatPullerNotify:
+ {
+ MediaBuffer *mbuf;
+ CHECK(msg->findPointer("accessUnit", (void**)&mbuf));
+
+ if (mbuf == NULL) {
+ ALOGI("puller (%s) reached EOS",
+ mIsVideo ? "video" : "audio");
+ mPullerReachedEOS = true;
+ }
+
+ if (mEncoder == NULL) {
+ ALOGV("got msg '%s' after encoder shutdown.",
+ msg->debugString().c_str());
+
+ if (mbuf != NULL) {
+ mbuf->release();
+ } else {
+ signalEOS();
+ }
+ break;
+ }
+
+ mInputBufferQueue.push_back(mbuf);
+
+ feedEncoderInputBuffers();
+ scheduleDoMoreWork();
+
+ break;
+ }
+ case kWhatEncoderActivity:
+ {
+ mDoMoreWorkPending = false;
+
+ if (mEncoder == NULL) {
+ break;
+ }
+
+ status_t err = doMoreWork();
+
+ if (err == OK) {
+ scheduleDoMoreWork();
+ } else {
+ // reached EOS, or error
+ signalEOS(err);
+ }
+
+ break;
+ }
+ case kWhatStart:
+ {
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ sp<RefBase> obj;
+ CHECK(msg->findObject("meta", &obj));
+ MetaData *params = static_cast<MetaData *>(obj.get());
+
+ sp<AMessage> response = new AMessage;
+ response->setInt32("err", onStart(params));
+ response->postReply(replyID);
+ break;
+ }
+ case kWhatStop:
+ {
+ ALOGI("MediaCodecSource (%s) stopping", mIsVideo ? "video" : "audio");
+
+ uint32_t replyID;
+ CHECK(msg->senderAwaitsResponse(&replyID));
+
+ if (reachedEOS()) {
+ // if we already reached EOS, reply and return now
+ ALOGI("MediaCodecSource (%s) already stopped",
+ mIsVideo ? "video" : "audio");
+ (new AMessage)->postReply(replyID);
+ break;
+ }
+
+ mStopReplyIDQueue.push_back(replyID);
+ if (mStopping) {
+ // nothing to do if we're already stopping, reply will be posted
+ // to all when we're stopped.
+ break;
+ }
+
+ mStopping = true;
+
+ // if using surface, signal source EOS and wait for EOS to come back.
+ // otherwise, release encoder and post EOS if haven't done already
+ if (mFlags & FLAG_USE_SURFACE_INPUT) {
+ mEncoder->signalEndOfInputStream();
+ } else {
+ CHECK(mPuller != NULL);
+ mPuller->stopAsync();
+ signalEOS();
+ }
+ break;
+ }
+ case kWhatPause:
+ {
+ if (mFlags && FLAG_USE_SURFACE_INPUT) {
+ suspend();
+ } else {
+ CHECK(mPuller != NULL);
+ mPuller->pause();
+ }
+ break;
+ }
+ default:
+ TRESPASS();
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/MediaDefs.cpp b/media/libstagefright/MediaDefs.cpp
index 340cba7..d48dd84 100644
--- a/media/libstagefright/MediaDefs.cpp
+++ b/media/libstagefright/MediaDefs.cpp
@@ -23,6 +23,7 @@ const char *MEDIA_MIMETYPE_IMAGE_JPEG = "image/jpeg";
const char *MEDIA_MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
const char *MEDIA_MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
const char *MEDIA_MIMETYPE_VIDEO_AVC = "video/avc";
+const char *MEDIA_MIMETYPE_VIDEO_HEVC = "video/hevc";
const char *MEDIA_MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
const char *MEDIA_MIMETYPE_VIDEO_H263 = "video/3gpp";
const char *MEDIA_MIMETYPE_VIDEO_MPEG2 = "video/mpeg2";
@@ -36,6 +37,7 @@ const char *MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II = "audio/mpeg-L2";
const char *MEDIA_MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
const char *MEDIA_MIMETYPE_AUDIO_QCELP = "audio/qcelp";
const char *MEDIA_MIMETYPE_AUDIO_VORBIS = "audio/vorbis";
+const char *MEDIA_MIMETYPE_AUDIO_OPUS = "audio/opus";
const char *MEDIA_MIMETYPE_AUDIO_G711_ALAW = "audio/g711-alaw";
const char *MEDIA_MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw";
const char *MEDIA_MIMETYPE_AUDIO_RAW = "audio/raw";
@@ -56,5 +58,7 @@ const char *MEDIA_MIMETYPE_CONTAINER_WVM = "video/wvm";
const char *MEDIA_MIMETYPE_TEXT_3GPP = "text/3gpp-tt";
const char *MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";
+const char *MEDIA_MIMETYPE_TEXT_VTT = "text/vtt";
+const char *MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608";
} // namespace android
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index d87e910..90335ee 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -16,6 +16,9 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "MediaMuxer"
+
+#include "webm/WebmWriter.h"
+
#include <utils/Log.h>
#include <media/stagefright/MediaMuxer.h>
@@ -36,19 +39,30 @@
namespace android {
MediaMuxer::MediaMuxer(const char *path, OutputFormat format)
- : mState(UNINITIALIZED) {
+ : mFormat(format),
+ mState(UNINITIALIZED) {
if (format == OUTPUT_FORMAT_MPEG_4) {
mWriter = new MPEG4Writer(path);
+ } else if (format == OUTPUT_FORMAT_WEBM) {
+ mWriter = new WebmWriter(path);
+ }
+
+ if (mWriter != NULL) {
mFileMeta = new MetaData;
mState = INITIALIZED;
}
-
}
MediaMuxer::MediaMuxer(int fd, OutputFormat format)
- : mState(UNINITIALIZED) {
+ : mFormat(format),
+ mState(UNINITIALIZED) {
if (format == OUTPUT_FORMAT_MPEG_4) {
mWriter = new MPEG4Writer(fd);
+ } else if (format == OUTPUT_FORMAT_WEBM) {
+ mWriter = new WebmWriter(fd);
+ }
+
+ if (mWriter != NULL) {
mFileMeta = new MetaData;
mState = INITIALIZED;
}
@@ -109,8 +123,13 @@ status_t MediaMuxer::setLocation(int latitude, int longitude) {
ALOGE("setLocation() must be called before start().");
return INVALID_OPERATION;
}
+ if (mFormat != OUTPUT_FORMAT_MPEG_4) {
+ ALOGE("setLocation() is only supported for .mp4 output.");
+ return INVALID_OPERATION;
+ }
+
ALOGV("Setting location: latitude = %d, longitude = %d", latitude, longitude);
- return mWriter->setGeoData(latitude, longitude);
+ return static_cast<MPEG4Writer*>(mWriter.get())->setGeoData(latitude, longitude);
}
status_t MediaMuxer::start() {
diff --git a/media/libstagefright/NuCachedSource2.cpp b/media/libstagefright/NuCachedSource2.cpp
index 06e2d43..61cf0ad 100644
--- a/media/libstagefright/NuCachedSource2.cpp
+++ b/media/libstagefright/NuCachedSource2.cpp
@@ -213,7 +213,14 @@ NuCachedSource2::NuCachedSource2(
mLooper->setName("NuCachedSource2");
mLooper->registerHandler(mReflector);
- mLooper->start();
+
+ // Since it may not be obvious why our looper thread needs to be
+ // able to call into java since it doesn't appear to do so at all...
+ // IMediaHTTPConnection may be (and most likely is) implemented in JAVA
+ // and a local JAVA IBinder will call directly into JNI methods.
+ // So whenever we call DataSource::readAt it may end up in a call to
+ // IMediaHTTPConnection::readAt and therefore call back into JAVA.
+ mLooper->start(false /* runOnCallingThread */, true /* canCallJava */);
Mutex::Autolock autoLock(mLock);
(new AMessage(kWhatFetchMore, mReflector->id()))->post();
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 7bc7da2..64f56e9 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -58,7 +58,9 @@ NuMediaExtractor::~NuMediaExtractor() {
}
status_t NuMediaExtractor::setDataSource(
- const char *path, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *path,
+ const KeyedVector<String8, String8> *headers) {
Mutex::Autolock autoLock(mLock);
if (mImpl != NULL) {
@@ -66,7 +68,7 @@ status_t NuMediaExtractor::setDataSource(
}
sp<DataSource> dataSource =
- DataSource::CreateFromURI(path, headers);
+ DataSource::CreateFromURI(httpService, path, headers);
if (dataSource == NULL) {
return -ENOENT;
diff --git a/media/libstagefright/OMXClient.cpp b/media/libstagefright/OMXClient.cpp
index 72f2306..aca21cf 100644
--- a/media/libstagefright/OMXClient.cpp
+++ b/media/libstagefright/OMXClient.cpp
@@ -16,6 +16,11 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "OMXClient"
+
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
#include <utils/Log.h>
#include <binder/IServiceManager.h>
@@ -165,7 +170,14 @@ bool MuxOMX::isLocalNode_l(node_id node) const {
// static
bool MuxOMX::CanLiveLocally(const char *name) {
+#ifdef __LP64__
+ (void)name; // disable unused parameter warning
+ // 64 bit processes always run OMX remote on MediaServer
+ return false;
+#else
+ // 32 bit processes run only OMX.google.* components locally
return !strncasecmp(name, "OMX.google.", 11);
+#endif
}
const sp<IOMX> &MuxOMX::getOMX(node_id node) const {
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 97bf514..354712c 100644
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -18,6 +18,11 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "OMXCodec"
+
+#ifdef __LP64__
+#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
+#endif
+
#include <utils/Log.h>
#include "include/AACEncoder.h"
@@ -96,6 +101,7 @@ static sp<MediaSource> InstantiateSoftwareEncoder(
#define CODEC_LOGI(x, ...) ALOGI("[%s] "x, mComponentName, ##__VA_ARGS__)
#define CODEC_LOGV(x, ...) ALOGV("[%s] "x, mComponentName, ##__VA_ARGS__)
+#define CODEC_LOGW(x, ...) ALOGW("[%s] "x, mComponentName, ##__VA_ARGS__)
#define CODEC_LOGE(x, ...) ALOGE("[%s] "x, mComponentName, ##__VA_ARGS__)
struct OMXCodecObserver : public BnOMXObserver {
@@ -129,6 +135,7 @@ private:
template<class T>
static void InitOMXParams(T *params) {
+ COMPILE_TIME_ASSERT_FUNCTION_SCOPE(sizeof(OMX_PTR) == 4); // check OMX_PTR is 4 bytes.
params->nSize = sizeof(T);
params->nVersion.s.nVersionMajor = 1;
params->nVersion.s.nVersionMinor = 0;
@@ -374,6 +381,57 @@ sp<MediaSource> OMXCodec::Create(
return NULL;
}
+status_t OMXCodec::parseHEVCCodecSpecificData(
+ const void *data, size_t size,
+ unsigned *profile, unsigned *level) {
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ // verify minimum size and configurationVersion == 1.
+ if (size < 7 || ptr[0] != 1) {
+ return ERROR_MALFORMED;
+ }
+
+ *profile = (ptr[1] & 31);
+ *level = ptr[12];
+
+ ptr += 22;
+ size -= 22;
+
+ size_t numofArrays = (char)ptr[0];
+ ptr += 1;
+ size -= 1;
+ size_t j = 0, i = 0;
+ for (i = 0; i < numofArrays; i++) {
+ ptr += 1;
+ size -= 1;
+
+ // Num of nals
+ size_t numofNals = U16_AT(ptr);
+ ptr += 2;
+ size -= 2;
+
+ for (j = 0;j < numofNals;j++) {
+ if (size < 2) {
+ return ERROR_MALFORMED;
+ }
+
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ if (size < length) {
+ return ERROR_MALFORMED;
+ }
+ addCodecSpecificData(ptr, length);
+
+ ptr += length;
+ size -= length;
+ }
+ }
+ return OK;
+}
+
status_t OMXCodec::parseAVCCodecSpecificData(
const void *data, size_t size,
unsigned *profile, unsigned *level) {
@@ -486,11 +544,32 @@ status_t OMXCodec::configureCodec(const sp<MetaData> &meta) {
CODEC_LOGI(
"AVC profile = %u (%s), level = %u",
profile, AVCProfileToString(profile), level);
+ } else if (meta->findData(kKeyHVCC, &type, &data, &size)) {
+ // Parse the HEVCDecoderConfigurationRecord
+
+ unsigned profile, level;
+ status_t err;
+ if ((err = parseHEVCCodecSpecificData(
+ data, size, &profile, &level)) != OK) {
+ ALOGE("Malformed HEVC codec specific data.");
+ return err;
+ }
+
+ CODEC_LOGI(
+ "HEVC profile = %u , level = %u",
+ profile, level);
} else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
addCodecSpecificData(data, size);
CHECK(meta->findData(kKeyVorbisBooks, &type, &data, &size));
addCodecSpecificData(data, size);
+ } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) {
+ addCodecSpecificData(data, size);
+
+ CHECK(meta->findData(kKeyOpusCodecDelay, &type, &data, &size));
+ addCodecSpecificData(data, size);
+ CHECK(meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size));
+ addCodecSpecificData(data, size);
}
}
@@ -808,6 +887,8 @@ void OMXCodec::setVideoInputFormat(
OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
compressionFormat = OMX_VIDEO_CodingAVC;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingHEVC;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
compressionFormat = OMX_VIDEO_CodingMPEG4;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
@@ -1203,6 +1284,8 @@ status_t OMXCodec::setVideoOutputFormat(
compressionFormat = OMX_VIDEO_CodingAVC;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime)) {
compressionFormat = OMX_VIDEO_CodingMPEG4;
+ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
+ compressionFormat = OMX_VIDEO_CodingHEVC;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
compressionFormat = OMX_VIDEO_CodingH263;
} else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP8, mime)) {
@@ -1389,12 +1472,16 @@ void OMXCodec::setComponentRole(
"audio_decoder.aac", "audio_encoder.aac" },
{ MEDIA_MIMETYPE_AUDIO_VORBIS,
"audio_decoder.vorbis", "audio_encoder.vorbis" },
+ { MEDIA_MIMETYPE_AUDIO_OPUS,
+ "audio_decoder.opus", "audio_encoder.opus" },
{ MEDIA_MIMETYPE_AUDIO_G711_MLAW,
"audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW,
"audio_decoder.g711alaw", "audio_encoder.g711alaw" },
{ MEDIA_MIMETYPE_VIDEO_AVC,
"video_decoder.avc", "video_encoder.avc" },
+ { MEDIA_MIMETYPE_VIDEO_HEVC,
+ "video_decoder.hevc", "video_encoder.hevc" },
{ MEDIA_MIMETYPE_VIDEO_MPEG4,
"video_decoder.mpeg4", "video_encoder.mpeg4" },
{ MEDIA_MIMETYPE_VIDEO_H263,
@@ -1617,15 +1704,15 @@ status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
info.mMediaBuffer = NULL;
if (portIndex == kPortIndexOutput) {
- if (!(mOMXLivesLocally
- && (mQuirks & kRequiresAllocateBufferOnOutputPorts)
- && (mQuirks & kDefersOutputBufferAllocation))) {
- // If the node does not fill in the buffer ptr at this time,
- // we will defer creating the MediaBuffer until receiving
- // the first FILL_BUFFER_DONE notification instead.
- info.mMediaBuffer = new MediaBuffer(info.mData, info.mSize);
- info.mMediaBuffer->setObserver(this);
- }
+ // Fail deferred MediaBuffer creation until FILL_BUFFER_DONE;
+ // this legacy mode is no longer supported.
+ LOG_ALWAYS_FATAL_IF((mOMXLivesLocally
+ && (mQuirks & kRequiresAllocateBufferOnOutputPorts)
+ && (mQuirks & kDefersOutputBufferAllocation)),
+ "allocateBuffersOnPort cannot defer buffer allocation");
+
+ info.mMediaBuffer = new MediaBuffer(info.mData, info.mSize);
+ info.mMediaBuffer->setObserver(this);
}
mPortBuffers[portIndex].push(info);
@@ -1796,12 +1883,24 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
strerror(-err), -err);
return err;
}
-
- // XXX: Is this the right logic to use? It's not clear to me what the OMX
- // buffer counts refer to - how do they account for the renderer holding on
- // to buffers?
- if (def.nBufferCountActual < def.nBufferCountMin + minUndequeuedBufs) {
- OMX_U32 newBufferCount = def.nBufferCountMin + minUndequeuedBufs;
+ // FIXME: assume that surface is controlled by app (native window
+ // returns the number for the case when surface is not controlled by app)
+ // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
+ // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
+
+ // Use conservative allocation while also trying to reduce starvation
+ //
+ // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
+ // minimum needed for the consumer to be able to work
+ // 2. try to allocate two (2) additional buffers to reduce starvation from
+ // the consumer
+ // plus an extra buffer to account for incorrect minUndequeuedBufs
+ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1",
+ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
+
+ for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
+ OMX_U32 newBufferCount =
+ def.nBufferCountMin + minUndequeuedBufs + extraBuffers;
def.nBufferCountActual = newBufferCount;
err = mOMX->setParameter(
mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
@@ -1818,6 +1917,8 @@ status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
return err;
}
}
+ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d+1",
+ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
err = native_window_set_buffer_count(
mNativeWindow.get(), def.nBufferCountActual);
@@ -2210,22 +2311,6 @@ void OMXCodec::on_message(const omx_message &msg) {
} else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
- if (info->mMediaBuffer == NULL) {
- CHECK(mOMXLivesLocally);
- CHECK(mQuirks & kRequiresAllocateBufferOnOutputPorts);
- CHECK(mQuirks & kDefersOutputBufferAllocation);
-
- // The qcom video decoders on Nexus don't actually allocate
- // output buffer memory on a call to OMX_AllocateBuffer
- // the "pBuffer" member of the OMX_BUFFERHEADERTYPE
- // structure is only filled in later.
-
- info->mMediaBuffer = new MediaBuffer(
- msg.u.extended_buffer_data.data_ptr,
- info->mSize);
- info->mMediaBuffer->setObserver(this);
- }
-
MediaBuffer *buffer = info->mMediaBuffer;
bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
@@ -2260,10 +2345,6 @@ void OMXCodec::on_message(const omx_message &msg) {
buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
}
- buffer->meta_data()->setPointer(
- kKeyPlatformPrivate,
- msg.u.extended_buffer_data.platform_private);
-
buffer->meta_data()->setInt32(
kKeyBufferID,
msg.u.extended_buffer_data.buffer);
@@ -2999,7 +3080,8 @@ bool OMXCodec::drainInputBuffer(BufferInfo *info) {
size_t size = specific->mSize;
- if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mMIME)
+ if ((!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mMIME) ||
+ !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mMIME))
&& !(mQuirks & kWantsNALFragments)) {
static const uint8_t kNALStartCode[4] =
{ 0x00, 0x00, 0x00, 0x01 };
@@ -4134,6 +4216,7 @@ static const char *audioCodingTypeString(OMX_AUDIO_CODINGTYPE type) {
"OMX_AUDIO_CodingMP3",
"OMX_AUDIO_CodingSBC",
"OMX_AUDIO_CodingVORBIS",
+ "OMX_AUDIO_CodingOPUS",
"OMX_AUDIO_CodingWMA",
"OMX_AUDIO_CodingRA",
"OMX_AUDIO_CodingMIDI",
diff --git a/media/libstagefright/SampleIterator.cpp b/media/libstagefright/SampleIterator.cpp
index eae721b..2748349 100644
--- a/media/libstagefright/SampleIterator.cpp
+++ b/media/libstagefright/SampleIterator.cpp
@@ -133,7 +133,8 @@ status_t SampleIterator::seekTo(uint32_t sampleIndex) {
}
status_t err;
- if ((err = findSampleTime(sampleIndex, &mCurrentSampleTime)) != OK) {
+ if ((err = findSampleTimeAndDuration(
+ sampleIndex, &mCurrentSampleTime, &mCurrentSampleDuration)) != OK) {
ALOGE("findSampleTime return error");
return err;
}
@@ -285,8 +286,8 @@ status_t SampleIterator::getSampleSizeDirect(
return OK;
}
-status_t SampleIterator::findSampleTime(
- uint32_t sampleIndex, uint32_t *time) {
+status_t SampleIterator::findSampleTimeAndDuration(
+ uint32_t sampleIndex, uint32_t *time, uint32_t *duration) {
if (sampleIndex >= mTable->mNumSampleSizes) {
return ERROR_OUT_OF_RANGE;
}
@@ -309,6 +310,8 @@ status_t SampleIterator::findSampleTime(
*time += mTable->getCompositionTimeOffset(sampleIndex);
+ *duration = mTTSDuration;
+
return OK;
}
diff --git a/media/libstagefright/SampleTable.cpp b/media/libstagefright/SampleTable.cpp
index d9858d7..9a92805 100644
--- a/media/libstagefright/SampleTable.cpp
+++ b/media/libstagefright/SampleTable.cpp
@@ -778,7 +778,8 @@ status_t SampleTable::getMetaDataForSample(
off64_t *offset,
size_t *size,
uint32_t *compositionTime,
- bool *isSyncSample) {
+ bool *isSyncSample,
+ uint32_t *sampleDuration) {
Mutex::Autolock autoLock(mLock);
status_t err;
@@ -820,6 +821,10 @@ status_t SampleTable::getMetaDataForSample(
}
}
+ if (sampleDuration) {
+ *sampleDuration = mSampleIterator->getSampleDuration();
+ }
+
return OK;
}
diff --git a/media/libstagefright/SkipCutBuffer.cpp b/media/libstagefright/SkipCutBuffer.cpp
index 773854f..e2e6d79 100644
--- a/media/libstagefright/SkipCutBuffer.cpp
+++ b/media/libstagefright/SkipCutBuffer.cpp
@@ -25,7 +25,7 @@
namespace android {
SkipCutBuffer::SkipCutBuffer(int32_t skip, int32_t cut) {
- mFrontPadding = skip;
+ mFrontPadding = mSkip = skip;
mBackPadding = cut;
mWriteHead = 0;
mReadHead = 0;
@@ -94,6 +94,7 @@ void SkipCutBuffer::submit(const sp<ABuffer>& buffer) {
void SkipCutBuffer::clear() {
mWriteHead = mReadHead = 0;
+ mFrontPadding = mSkip;
}
void SkipCutBuffer::write(const char *src, size_t num) {
diff --git a/media/libstagefright/StagefrightMediaScanner.cpp b/media/libstagefright/StagefrightMediaScanner.cpp
index f3fc3f6..4449d57 100644
--- a/media/libstagefright/StagefrightMediaScanner.cpp
+++ b/media/libstagefright/StagefrightMediaScanner.cpp
@@ -24,6 +24,7 @@
#include <media/stagefright/StagefrightMediaScanner.h>
+#include <media/IMediaHTTPService.h>
#include <media/mediametadataretriever.h>
#include <private/media/VideoFrame.h>
@@ -147,7 +148,7 @@ MediaScanResult StagefrightMediaScanner::processFileInternal(
status_t status;
if (fd < 0) {
// couldn't open it locally, maybe the media server can?
- status = mRetriever->setDataSource(path);
+ status = mRetriever->setDataSource(NULL /* httpService */, path);
} else {
status = mRetriever->setDataSource(fd, 0, 0x7ffffffffffffffL);
close(fd);
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index e4d3c79..af96225 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -21,6 +21,7 @@
#include "include/StagefrightMetadataRetriever.h"
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/ColorConverter.h>
#include <media/stagefright/DataSource.h>
@@ -51,7 +52,9 @@ StagefrightMetadataRetriever::~StagefrightMetadataRetriever() {
}
status_t StagefrightMetadataRetriever::setDataSource(
- const char *uri, const KeyedVector<String8, String8> *headers) {
+ const sp<IMediaHTTPService> &httpService,
+ const char *uri,
+ const KeyedVector<String8, String8> *headers) {
ALOGV("setDataSource(%s)", uri);
mParsedMetaData = false;
@@ -59,7 +62,7 @@ status_t StagefrightMetadataRetriever::setDataSource(
delete mAlbumArt;
mAlbumArt = NULL;
- mSource = DataSource::CreateFromURI(uri, headers);
+ mSource = DataSource::CreateFromURI(httpService, uri, headers);
if (mSource == NULL) {
ALOGE("Unable to create data source for '%s'.", uri);
diff --git a/media/libstagefright/SurfaceMediaSource.cpp b/media/libstagefright/SurfaceMediaSource.cpp
index 686d03a..62aea36 100644
--- a/media/libstagefright/SurfaceMediaSource.cpp
+++ b/media/libstagefright/SurfaceMediaSource.cpp
@@ -54,9 +54,9 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig
ALOGE("Invalid dimensions %dx%d", bufferWidth, bufferHeight);
}
- mBufferQueue = new BufferQueue();
- mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
- mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
GRALLOC_USAGE_HW_TEXTURE);
sp<ISurfaceComposer> composer(ComposerService::getComposerService());
@@ -68,7 +68,7 @@ SurfaceMediaSource::SurfaceMediaSource(uint32_t bufferWidth, uint32_t bufferHeig
wp<ConsumerListener> listener = static_cast<ConsumerListener*>(this);
sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
- status_t err = mBufferQueue->consumerConnect(proxy, false);
+ status_t err = mConsumer->consumerConnect(proxy, false);
if (err != NO_ERROR) {
ALOGE("SurfaceMediaSource: error connecting to BufferQueue: %s (%d)",
strerror(-err), err);
@@ -108,7 +108,7 @@ void SurfaceMediaSource::dump(
Mutex::Autolock lock(mMutex);
result.append(buffer);
- mBufferQueue->dump(result, "");
+ mConsumer->dump(result, "");
}
status_t SurfaceMediaSource::setFrameRate(int32_t fps)
@@ -166,7 +166,7 @@ status_t SurfaceMediaSource::start(MetaData *params)
CHECK_GT(mMaxAcquiredBufferCount, 1);
status_t err =
- mBufferQueue->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
+ mConsumer->setMaxAcquiredBufferCount(mMaxAcquiredBufferCount);
if (err != OK) {
return err;
@@ -205,6 +205,9 @@ status_t SurfaceMediaSource::stop()
return OK;
}
+ mStarted = false;
+ mFrameAvailableCondition.signal();
+
while (mNumPendingBuffers > 0) {
ALOGI("Still waiting for %d buffers to be returned.",
mNumPendingBuffers);
@@ -218,11 +221,9 @@ status_t SurfaceMediaSource::stop()
mMediaBuffersAvailableCondition.wait(mMutex);
}
- mStarted = false;
- mFrameAvailableCondition.signal();
mMediaBuffersAvailableCondition.signal();
- return mBufferQueue->consumerDisconnect();
+ return mConsumer->consumerDisconnect();
}
sp<MetaData> SurfaceMediaSource::getFormat()
@@ -292,7 +293,7 @@ status_t SurfaceMediaSource::read(
// wait here till the frames come in from the client side
while (mStarted) {
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// wait for a buffer to be queued
mFrameAvailableCondition.wait(mMutex);
@@ -315,7 +316,7 @@ status_t SurfaceMediaSource::read(
if (mStartTimeNs > 0) {
if (item.mTimestamp < mStartTimeNs) {
// This frame predates start of record, discard
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
item.mBuf, item.mFrameNumber, EGL_NO_DISPLAY,
EGL_NO_SYNC_KHR, Fence::NO_FENCE);
continue;
@@ -415,7 +416,7 @@ void SurfaceMediaSource::signalBufferReturned(MediaBuffer *buffer) {
ALOGV("Slot %d returned, matches handle = %p", id,
mSlots[id].mGraphicBuffer->handle);
- mBufferQueue->releaseBuffer(id, mSlots[id].mFrameNumber,
+ mConsumer->releaseBuffer(id, mSlots[id].mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR,
Fence::NO_FENCE);
@@ -476,4 +477,8 @@ void SurfaceMediaSource::onBuffersReleased() {
}
}
+void SurfaceMediaSource::onSidebandStreamChanged() {
+ ALOG_ASSERT(false, "SurfaceMediaSource can't consume sideband streams");
+}
+
} // end of namespace android
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 216a329..7ff31a1 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -17,6 +17,7 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "Utils"
#include <utils/Log.h>
+#include <ctype.h>
#include "include/ESDS.h"
@@ -216,6 +217,56 @@ status_t convertMetaDataToMessage(
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-1", buffer);
+ } else if (meta->findData(kKeyHVCC, &type, &data, &size)) {
+ const uint8_t *ptr = (const uint8_t *)data;
+
+ CHECK(size >= 7);
+ CHECK_EQ((unsigned)ptr[0], 1u); // configurationVersion == 1
+ uint8_t profile = ptr[1] & 31;
+ uint8_t level = ptr[12];
+ ptr += 22;
+ size -= 22;
+
+
+ size_t numofArrays = (char)ptr[0];
+ ptr += 1;
+ size -= 1;
+ size_t j = 0, i = 0;
+
+ sp<ABuffer> buffer = new ABuffer(1024);
+ buffer->setRange(0, 0);
+
+ for (i = 0; i < numofArrays; i++) {
+ ptr += 1;
+ size -= 1;
+
+ //Num of nals
+ size_t numofNals = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ for (j = 0; j < numofNals; j++) {
+ CHECK(size >= 2);
+ size_t length = U16_AT(ptr);
+
+ ptr += 2;
+ size -= 2;
+
+ CHECK(size >= length);
+
+ memcpy(buffer->data() + buffer->size(), "\x00\x00\x00\x01", 4);
+ memcpy(buffer->data() + buffer->size() + 4, ptr, length);
+ buffer->setRange(0, buffer->size() + 4 + length);
+
+ ptr += length;
+ size -= length;
+ }
+ }
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
+
} else if (meta->findData(kKeyESDS, &type, &data, &size)) {
ESDS esds((const char *)data, size);
CHECK_EQ(esds.InitCheck(), (status_t)OK);
@@ -251,6 +302,13 @@ status_t convertMetaDataToMessage(
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-1", buffer);
+ } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) {
+ sp<ABuffer> buffer = new ABuffer(size);
+ memcpy(buffer->data(), data, size);
+
+ buffer->meta()->setInt32("csd", true);
+ buffer->meta()->setInt64("timeUs", 0);
+ msg->setBuffer("csd-0", buffer);
}
*format = msg;
@@ -452,6 +510,11 @@ void convertMessageToMetaData(const sp<AMessage> &msg, sp<MetaData> &meta) {
}
}
+ int32_t timeScale;
+ if (msg->findInt32("time-scale", &timeScale)) {
+ meta->setInt32(kKeyTimeScale, timeScale);
+ }
+
// XXX TODO add whatever other keys there are
#if 0
@@ -523,6 +586,7 @@ static const struct mime_conv_t mimeLookup[] = {
{ MEDIA_MIMETYPE_AUDIO_AMR_WB, AUDIO_FORMAT_AMR_WB },
{ MEDIA_MIMETYPE_AUDIO_AAC, AUDIO_FORMAT_AAC },
{ MEDIA_MIMETYPE_AUDIO_VORBIS, AUDIO_FORMAT_VORBIS },
+ { MEDIA_MIMETYPE_AUDIO_OPUS, AUDIO_FORMAT_OPUS},
{ 0, AUDIO_FORMAT_INVALID }
};
@@ -615,5 +679,40 @@ bool canOffloadStream(const sp<MetaData>& meta, bool hasVideo,
return AudioSystem::isOffloadSupported(info);
}
+AString uriDebugString(const AString &uri, bool incognito) {
+ if (incognito) {
+ return AString("<URI suppressed>");
+ }
+
+ char prop[PROPERTY_VALUE_MAX];
+ if (property_get("media.stagefright.log-uri", prop, "false") &&
+ (!strcmp(prop, "1") || !strcmp(prop, "true"))) {
+ return uri;
+ }
+
+ // find scheme
+ AString scheme;
+ const char *chars = uri.c_str();
+ for (size_t i = 0; i < uri.size(); i++) {
+ const char c = chars[i];
+ if (!isascii(c)) {
+ break;
+ } else if (isalpha(c)) {
+ continue;
+ } else if (i == 0) {
+ // first character must be a letter
+ break;
+ } else if (isdigit(c) || c == '+' || c == '.' || c =='-') {
+ continue;
+ } else if (c != ':') {
+ break;
+ }
+ scheme = AString(uri, 0, i);
+ scheme.append("://<suppressed>");
+ return scheme;
+ }
+ return AString("<no-scheme URI suppressed>");
+}
+
} // namespace android
diff --git a/media/libstagefright/avc_utils.cpp b/media/libstagefright/avc_utils.cpp
index c6ac0da..38a1f6b 100644
--- a/media/libstagefright/avc_utils.cpp
+++ b/media/libstagefright/avc_utils.cpp
@@ -40,6 +40,25 @@ unsigned parseUE(ABitReader *br) {
return x + (1u << numZeroes) - 1;
}
+signed parseSE(ABitReader *br) {
+ unsigned codeNum = parseUE(br);
+
+ return (codeNum & 1) ? (codeNum + 1) / 2 : -(codeNum / 2);
+}
+
+static void skipScalingList(ABitReader *br, size_t sizeOfScalingList) {
+ size_t lastScale = 8;
+ size_t nextScale = 8;
+ for (size_t j = 0; j < sizeOfScalingList; ++j) {
+ if (nextScale != 0) {
+ signed delta_scale = parseSE(br);
+ nextScale = (lastScale + delta_scale + 256) % 256;
+ }
+
+ lastScale = (nextScale == 0) ? lastScale : nextScale;
+ }
+}
+
// Determine video dimensions from the sequence parameterset.
void FindAVCDimensions(
const sp<ABuffer> &seqParamSet,
@@ -63,7 +82,24 @@ void FindAVCDimensions(
parseUE(&br); // bit_depth_luma_minus8
parseUE(&br); // bit_depth_chroma_minus8
br.skipBits(1); // qpprime_y_zero_transform_bypass_flag
- CHECK_EQ(br.getBits(1), 0u); // seq_scaling_matrix_present_flag
+
+ if (br.getBits(1)) { // seq_scaling_matrix_present_flag
+ for (size_t i = 0; i < 8; ++i) {
+ if (br.getBits(1)) { // seq_scaling_list_present_flag[i]
+
+ // WARNING: the code below has not ever been exercised...
+ // need a real-world example.
+
+ if (i < 6) {
+ // ScalingList4x4[i],16,...
+ skipScalingList(&br, 16);
+ } else {
+ // ScalingList8x8[i-6],64,...
+ skipScalingList(&br, 64);
+ }
+ }
+ }
+ }
}
parseUE(&br); // log2_max_frame_num_minus4
diff --git a/media/libstagefright/chromium_http/Android.mk b/media/libstagefright/chromium_http/Android.mk
deleted file mode 100644
index 109e3fe..0000000
--- a/media/libstagefright/chromium_http/Android.mk
+++ /dev/null
@@ -1,39 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-ifneq ($(TARGET_BUILD_PDK), true)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES:= \
- DataUriSource.cpp \
- ChromiumHTTPDataSource.cpp \
- support.cpp \
- chromium_http_stub.cpp
-
-LOCAL_C_INCLUDES:= \
- $(TOP)/frameworks/av/media/libstagefright \
- $(TOP)/frameworks/native/include/media/openmax \
- external/chromium \
- external/chromium/android
-
-LOCAL_CFLAGS += -Wno-multichar
-
-LOCAL_SHARED_LIBRARIES += \
- libbinder \
- libstlport \
- libchromium_net \
- libutils \
- libbinder \
- libcutils \
- liblog \
- libstagefright_foundation \
- libstagefright \
- libdrmframework
-
-include external/stlport/libstlport.mk
-
-LOCAL_MODULE:= libstagefright_chromium_http
-
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_SHARED_LIBRARY)
-endif
diff --git a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp b/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
deleted file mode 100644
index 7e5c280..0000000
--- a/media/libstagefright/chromium_http/ChromiumHTTPDataSource.cpp
+++ /dev/null
@@ -1,355 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ChromiumHTTPDataSource"
-#include <media/stagefright/foundation/ADebug.h>
-
-#include "include/ChromiumHTTPDataSource.h"
-
-#include <media/stagefright/foundation/ALooper.h>
-#include <media/stagefright/MediaErrors.h>
-
-#include "support.h"
-
-#include <cutils/properties.h> // for property_get
-
-namespace android {
-
-ChromiumHTTPDataSource::ChromiumHTTPDataSource(uint32_t flags)
- : mFlags(flags),
- mState(DISCONNECTED),
- mDelegate(new SfDelegate),
- mCurrentOffset(0),
- mIOResult(OK),
- mContentSize(-1),
- mDecryptHandle(NULL),
- mDrmManagerClient(NULL) {
- mDelegate->setOwner(this);
-}
-
-ChromiumHTTPDataSource::~ChromiumHTTPDataSource() {
- disconnect();
-
- delete mDelegate;
- mDelegate = NULL;
-
- clearDRMState_l();
-
- if (mDrmManagerClient != NULL) {
- delete mDrmManagerClient;
- mDrmManagerClient = NULL;
- }
-}
-
-status_t ChromiumHTTPDataSource::connect(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- Mutex::Autolock autoLock(mLock);
-
- uid_t uid;
- if (getUID(&uid)) {
- mDelegate->setUID(uid);
- }
-
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "connect on behalf of uid %d", uid);
-#endif
-
- return connect_l(uri, headers, offset);
-}
-
-status_t ChromiumHTTPDataSource::connect_l(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- if (mState != DISCONNECTED) {
- disconnect_l();
- }
-
-#if defined(LOG_NDEBUG) && !LOG_NDEBUG
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG,
- "connect to <URL suppressed> @%lld", offset);
-#endif
-
- mURI = uri;
- mContentType = String8("application/octet-stream");
-
- if (headers != NULL) {
- mHeaders = *headers;
- } else {
- mHeaders.clear();
- }
-
- mState = CONNECTING;
- mContentSize = -1;
- mCurrentOffset = offset;
-
- mDelegate->initiateConnection(mURI.c_str(), &mHeaders, offset);
-
- while (mState == CONNECTING || mState == DISCONNECTING) {
- mCondition.wait(mLock);
- }
-
- return mState == CONNECTED ? OK : mIOResult;
-}
-
-void ChromiumHTTPDataSource::onRedirect(const char *url) {
- Mutex::Autolock autoLock(mLock);
- mURI = url;
-}
-
-void ChromiumHTTPDataSource::onConnectionEstablished(
- int64_t contentSize, const char *contentType) {
- Mutex::Autolock autoLock(mLock);
-
- if (mState != CONNECTING) {
- // We may have initiated disconnection.
- CHECK_EQ(mState, DISCONNECTING);
- return;
- }
-
- mState = CONNECTED;
- mContentSize = (contentSize < 0) ? -1 : contentSize + mCurrentOffset;
- mContentType = String8(contentType);
- mCondition.broadcast();
-}
-
-void ChromiumHTTPDataSource::onConnectionFailed(status_t err) {
- Mutex::Autolock autoLock(mLock);
- mState = DISCONNECTED;
- mCondition.broadcast();
-
- // mURI.clear();
-
- mIOResult = err;
-}
-
-void ChromiumHTTPDataSource::disconnect() {
- Mutex::Autolock autoLock(mLock);
- disconnect_l();
-}
-
-void ChromiumHTTPDataSource::disconnect_l() {
- if (mState == DISCONNECTED) {
- return;
- }
-
- mState = DISCONNECTING;
- mIOResult = -EINTR;
-
- mDelegate->initiateDisconnect();
-
- while (mState == DISCONNECTING) {
- mCondition.wait(mLock);
- }
-
- CHECK_EQ((int)mState, (int)DISCONNECTED);
-}
-
-status_t ChromiumHTTPDataSource::initCheck() const {
- Mutex::Autolock autoLock(mLock);
-
- return mState == CONNECTED ? OK : NO_INIT;
-}
-
-ssize_t ChromiumHTTPDataSource::readAt(off64_t offset, void *data, size_t size) {
- Mutex::Autolock autoLock(mLock);
-
- if (mState != CONNECTED) {
- return INVALID_OPERATION;
- }
-
-#if 0
- char value[PROPERTY_VALUE_MAX];
- if (property_get("media.stagefright.disable-net", value, 0)
- && (!strcasecmp(value, "true") || !strcmp(value, "1"))) {
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Simulating that the network is down.");
- disconnect_l();
- return ERROR_IO;
- }
-#endif
-
- if (offset != mCurrentOffset) {
- AString tmp = mURI;
- KeyedVector<String8, String8> tmpHeaders = mHeaders;
-
- disconnect_l();
-
- status_t err = connect_l(tmp.c_str(), &tmpHeaders, offset);
-
- if (err != OK) {
- return err;
- }
- }
-
- mState = READING;
-
- int64_t startTimeUs = ALooper::GetNowUs();
-
- mDelegate->initiateRead(data, size);
-
- while (mState == READING) {
- mCondition.wait(mLock);
- }
-
- if (mIOResult < OK) {
- return mIOResult;
- }
-
- if (mState == CONNECTED) {
- int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
-
- // The read operation was successful, mIOResult contains
- // the number of bytes read.
- addBandwidthMeasurement(mIOResult, delayUs);
-
- mCurrentOffset += mIOResult;
- return mIOResult;
- }
-
- return ERROR_IO;
-}
-
-void ChromiumHTTPDataSource::onReadCompleted(ssize_t size) {
- Mutex::Autolock autoLock(mLock);
-
- mIOResult = size;
-
- if (mState == READING) {
- mState = CONNECTED;
- mCondition.broadcast();
- }
-}
-
-status_t ChromiumHTTPDataSource::getSize(off64_t *size) {
- Mutex::Autolock autoLock(mLock);
-
- if (mContentSize < 0) {
- return ERROR_UNSUPPORTED;
- }
-
- *size = mContentSize;
-
- return OK;
-}
-
-uint32_t ChromiumHTTPDataSource::flags() {
- return kWantsPrefetching | kIsHTTPBasedSource;
-}
-
-// static
-void ChromiumHTTPDataSource::InitiateRead(
- ChromiumHTTPDataSource *me, void *data, size_t size) {
- me->initiateRead(data, size);
-}
-
-void ChromiumHTTPDataSource::initiateRead(void *data, size_t size) {
- mDelegate->initiateRead(data, size);
-}
-
-void ChromiumHTTPDataSource::onDisconnectComplete() {
- Mutex::Autolock autoLock(mLock);
- CHECK_EQ((int)mState, (int)DISCONNECTING);
-
- mState = DISCONNECTED;
- // mURI.clear();
- mIOResult = -ENOTCONN;
-
- mCondition.broadcast();
-}
-
-sp<DecryptHandle> ChromiumHTTPDataSource::DrmInitialization(const char* mime) {
- Mutex::Autolock autoLock(mLock);
-
- if (mDrmManagerClient == NULL) {
- mDrmManagerClient = new DrmManagerClient();
- }
-
- if (mDrmManagerClient == NULL) {
- return NULL;
- }
-
- if (mDecryptHandle == NULL) {
- /* Note if redirect occurs, mUri is the redirect uri instead of the
- * original one
- */
- mDecryptHandle = mDrmManagerClient->openDecryptSession(
- String8(mURI.c_str()), mime);
- }
-
- if (mDecryptHandle == NULL) {
- delete mDrmManagerClient;
- mDrmManagerClient = NULL;
- }
-
- return mDecryptHandle;
-}
-
-void ChromiumHTTPDataSource::getDrmInfo(
- sp<DecryptHandle> &handle, DrmManagerClient **client) {
- Mutex::Autolock autoLock(mLock);
-
- handle = mDecryptHandle;
- *client = mDrmManagerClient;
-}
-
-String8 ChromiumHTTPDataSource::getUri() {
- Mutex::Autolock autoLock(mLock);
-
- return String8(mURI.c_str());
-}
-
-String8 ChromiumHTTPDataSource::getMIMEType() const {
- Mutex::Autolock autoLock(mLock);
-
- return mContentType;
-}
-
-void ChromiumHTTPDataSource::clearDRMState_l() {
- if (mDecryptHandle != NULL) {
- // To release mDecryptHandle
- CHECK(mDrmManagerClient);
- mDrmManagerClient->closeDecryptSession(mDecryptHandle);
- mDecryptHandle = NULL;
- }
-}
-
-status_t ChromiumHTTPDataSource::reconnectAtOffset(off64_t offset) {
- Mutex::Autolock autoLock(mLock);
-
- if (mURI.empty()) {
- return INVALID_OPERATION;
- }
-
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnecting...");
- status_t err = connect_l(mURI.c_str(), &mHeaders, offset);
- if (err != OK) {
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "Reconnect failed w/ err 0x%08x", err);
- }
-
- return err;
-}
-
-// static
-status_t ChromiumHTTPDataSource::UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- return SfDelegate::UpdateProxyConfig(host, port, exclusionList);
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/chromium_http/DataUriSource.cpp b/media/libstagefright/chromium_http/DataUriSource.cpp
deleted file mode 100644
index ecf3fa1..0000000
--- a/media/libstagefright/chromium_http/DataUriSource.cpp
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <include/DataUriSource.h>
-
-#include <net/base/data_url.h>
-#include <googleurl/src/gurl.h>
-
-
-namespace android {
-
-DataUriSource::DataUriSource(const char *uri) :
- mDataUri(uri),
- mInited(NO_INIT) {
-
- // Copy1: const char *uri -> String8 mDataUri.
- std::string mimeTypeStr, unusedCharsetStr, dataStr;
- // Copy2: String8 mDataUri -> std::string
- const bool ret = net::DataURL::Parse(
- GURL(std::string(mDataUri.string())),
- &mimeTypeStr, &unusedCharsetStr, &dataStr);
- // Copy3: std::string dataStr -> AString mData
- mData.setTo(dataStr.data(), dataStr.length());
- mInited = ret ? OK : UNKNOWN_ERROR;
-
- // The chromium data url implementation defaults to using "text/plain"
- // if no mime type is specified. We prefer to leave this unspecified
- // instead, since the mime type is sniffed in most cases.
- if (mimeTypeStr != "text/plain") {
- mMimeType = mimeTypeStr.c_str();
- }
-}
-
-ssize_t DataUriSource::readAt(off64_t offset, void *out, size_t size) {
- if (mInited != OK) {
- return mInited;
- }
-
- const off64_t length = mData.size();
- if (offset >= length) {
- return UNKNOWN_ERROR;
- }
-
- const char *dataBuf = mData.c_str();
- const size_t bytesToCopy =
- offset + size >= length ? (length - offset) : size;
-
- if (bytesToCopy > 0) {
- memcpy(out, dataBuf + offset, bytesToCopy);
- }
-
- return bytesToCopy;
-}
-
-} // namespace android
diff --git a/media/libstagefright/chromium_http/support.cpp b/media/libstagefright/chromium_http/support.cpp
deleted file mode 100644
index 3de4877..0000000
--- a/media/libstagefright/chromium_http/support.cpp
+++ /dev/null
@@ -1,659 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "ChromiumHTTPDataSourceSupport"
-#include <utils/Log.h>
-
-#include <media/stagefright/foundation/AString.h>
-
-#include "support.h"
-
-#include "android/net/android_network_library_impl.h"
-#include "base/logging.h"
-#include "base/threading/thread.h"
-#include "net/base/cert_verifier.h"
-#include "net/base/cookie_monster.h"
-#include "net/base/host_resolver.h"
-#include "net/base/ssl_config_service.h"
-#include "net/http/http_auth_handler_factory.h"
-#include "net/http/http_cache.h"
-#include "net/proxy/proxy_config_service_android.h"
-
-#include "include/ChromiumHTTPDataSource.h"
-#include <arpa/inet.h>
-#include <binder/Parcel.h>
-#include <cutils/log.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-#include <string>
-
-#include <utils/Errors.h>
-#include <binder/IInterface.h>
-#include <binder/IServiceManager.h>
-
-namespace android {
-
-// must be kept in sync with interface defined in IAudioService.aidl
-class IAudioService : public IInterface
-{
-public:
- DECLARE_META_INTERFACE(AudioService);
-
- virtual int verifyX509CertChain(
- const std::vector<std::string>& cert_chain,
- const std::string& hostname,
- const std::string& auth_type) = 0;
-};
-
-class BpAudioService : public BpInterface<IAudioService>
-{
-public:
- BpAudioService(const sp<IBinder>& impl)
- : BpInterface<IAudioService>(impl)
- {
- }
-
- virtual int verifyX509CertChain(
- const std::vector<std::string>& cert_chain,
- const std::string& hostname,
- const std::string& auth_type)
- {
- Parcel data, reply;
- data.writeInterfaceToken(IAudioService::getInterfaceDescriptor());
-
- // The vector of std::string we get isn't really a vector of strings,
- // but rather a vector of binary certificate data. If we try to pass
- // it to Java language code as a string, it ends up mangled on the other
- // side, so send them as bytes instead.
- // Since we can't send an array of byte arrays, send a single array,
- // which will be split out by the recipient.
-
- int numcerts = cert_chain.size();
- data.writeInt32(numcerts);
- size_t total = 0;
- for (int i = 0; i < numcerts; i++) {
- total += cert_chain[i].size();
- }
- size_t bytesize = total + numcerts * 4;
- uint8_t *bytes = (uint8_t*) malloc(bytesize);
- if (!bytes) {
- return 5; // SSL_INVALID
- }
- ALOGV("%d certs: %d -> %d", numcerts, total, bytesize);
-
- int offset = 0;
- for (int i = 0; i < numcerts; i++) {
- int32_t certsize = cert_chain[i].size();
- // store this in a known order, which just happens to match the default
- // byte order of a java ByteBuffer
- int32_t bigsize = htonl(certsize);
- ALOGV("cert %d, size %d", i, certsize);
- memcpy(bytes + offset, &bigsize, sizeof(bigsize));
- offset += sizeof(bigsize);
- memcpy(bytes + offset, cert_chain[i].data(), certsize);
- offset += certsize;
- }
- data.writeByteArray(bytesize, bytes);
- free(bytes);
- data.writeString16(String16(hostname.c_str()));
- data.writeString16(String16(auth_type.c_str()));
-
- int32_t result;
- if (remote()->transact(IBinder::FIRST_CALL_TRANSACTION, data, &reply) != NO_ERROR
- || reply.readExceptionCode() < 0 || reply.readInt32(&result) != NO_ERROR) {
- return 5; // SSL_INVALID;
- }
- return result;
- }
-
-};
-
-IMPLEMENT_META_INTERFACE(AudioService, "android.media.IAudioService");
-
-
-static Mutex gNetworkThreadLock;
-static base::Thread *gNetworkThread = NULL;
-static scoped_refptr<SfRequestContext> gReqContext;
-static scoped_ptr<net::NetworkChangeNotifier> gNetworkChangeNotifier;
-
-bool logMessageHandler(
- int severity,
- const char* file,
- int line,
- size_t message_start,
- const std::string& str) {
- int androidSeverity = ANDROID_LOG_VERBOSE;
- switch(severity) {
- case logging::LOG_FATAL:
- androidSeverity = ANDROID_LOG_FATAL;
- break;
- case logging::LOG_ERROR_REPORT:
- case logging::LOG_ERROR:
- androidSeverity = ANDROID_LOG_ERROR;
- break;
- case logging::LOG_WARNING:
- androidSeverity = ANDROID_LOG_WARN;
- break;
- default:
- androidSeverity = ANDROID_LOG_VERBOSE;
- break;
- }
- android_printLog(androidSeverity, "chromium-libstagefright",
- "%s:%d: %s", file, line, str.c_str());
- return false;
-}
-
-struct AutoPrioritySaver {
- AutoPrioritySaver()
- : mTID(androidGetTid()),
- mPrevPriority(androidGetThreadPriority(mTID)) {
- androidSetThreadPriority(mTID, ANDROID_PRIORITY_NORMAL);
- }
-
- ~AutoPrioritySaver() {
- androidSetThreadPriority(mTID, mPrevPriority);
- }
-
-private:
- pid_t mTID;
- int mPrevPriority;
-
- DISALLOW_EVIL_CONSTRUCTORS(AutoPrioritySaver);
-};
-
-static void InitializeNetworkThreadIfNecessary() {
- Mutex::Autolock autoLock(gNetworkThreadLock);
-
- if (gNetworkThread == NULL) {
- // Make sure any threads spawned by the chromium framework are
- // running at normal priority instead of inheriting this thread's.
- AutoPrioritySaver saver;
-
- gNetworkThread = new base::Thread("network");
- base::Thread::Options options;
- options.message_loop_type = MessageLoop::TYPE_IO;
- CHECK(gNetworkThread->StartWithOptions(options));
-
- gReqContext = new SfRequestContext;
-
- gNetworkChangeNotifier.reset(net::NetworkChangeNotifier::Create());
-
- net::AndroidNetworkLibrary::RegisterSharedInstance(
- new SfNetworkLibrary);
- logging::SetLogMessageHandler(logMessageHandler);
- }
-}
-
-static void MY_LOGI(const char *s) {
- LOG_PRI(ANDROID_LOG_INFO, LOG_TAG, "%s", s);
-}
-
-static void MY_LOGV(const char *s) {
-#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG, "%s", s);
-#endif
-}
-
-SfNetLog::SfNetLog()
- : mNextID(1) {
-}
-
-void SfNetLog::AddEntry(
- EventType type,
- const base::TimeTicks &time,
- const Source &source,
- EventPhase phase,
- EventParameters *params) {
-#if 0
- MY_LOGI(StringPrintf(
- "AddEntry time=%s type=%s source=%s phase=%s\n",
- TickCountToString(time).c_str(),
- EventTypeToString(type),
- SourceTypeToString(source.type),
- EventPhaseToString(phase)).c_str());
-#endif
-}
-
-uint32 SfNetLog::NextID() {
- return mNextID++;
-}
-
-net::NetLog::LogLevel SfNetLog::GetLogLevel() const {
- return LOG_BASIC;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfRequestContext::SfRequestContext() {
- mUserAgent = MakeUserAgent().c_str();
-
- set_net_log(new SfNetLog());
-
- set_host_resolver(
- net::CreateSystemHostResolver(
- net::HostResolver::kDefaultParallelism,
- NULL /* resolver_proc */,
- net_log()));
-
- set_ssl_config_service(
- net::SSLConfigService::CreateSystemSSLConfigService());
-
- mProxyConfigService = new net::ProxyConfigServiceAndroid;
-
- set_proxy_service(net::ProxyService::CreateWithoutProxyResolver(
- mProxyConfigService, net_log()));
-
- set_http_transaction_factory(new net::HttpCache(
- host_resolver(),
- new net::CertVerifier(),
- dnsrr_resolver(),
- dns_cert_checker(),
- proxy_service(),
- ssl_config_service(),
- net::HttpAuthHandlerFactory::CreateDefault(host_resolver()),
- network_delegate(),
- net_log(),
- NULL)); // backend_factory
-
- set_cookie_store(new net::CookieMonster(NULL, NULL));
-}
-
-const std::string &SfRequestContext::GetUserAgent(const GURL &url) const {
- return mUserAgent;
-}
-
-status_t SfRequestContext::updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- Mutex::Autolock autoLock(mProxyConfigLock);
-
- if (host == NULL || *host == '\0') {
- MY_LOGV("updateProxyConfig NULL");
-
- std::string proxy;
- std::string exList;
- mProxyConfigService->UpdateProxySettings(proxy, exList);
- } else {
-#if !defined(LOG_NDEBUG) || LOG_NDEBUG == 0
- LOG_PRI(ANDROID_LOG_VERBOSE, LOG_TAG,
- "updateProxyConfig %s:%d, exclude '%s'",
- host, port, exclusionList);
-#endif
-
- std::string proxy = StringPrintf("%s:%d", host, port).c_str();
- std::string exList = exclusionList;
- mProxyConfigService->UpdateProxySettings(proxy, exList);
- }
-
- return OK;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfNetworkLibrary::SfNetworkLibrary() {}
-
-SfNetworkLibrary::VerifyResult SfNetworkLibrary::VerifyX509CertChain(
- const std::vector<std::string>& cert_chain,
- const std::string& hostname,
- const std::string& auth_type) {
-
- sp<IBinder> binder =
- defaultServiceManager()->checkService(String16("audio"));
- if (binder == 0) {
- ALOGW("Thread cannot connect to the audio service");
- } else {
- sp<IAudioService> service = interface_cast<IAudioService>(binder);
- int code = service->verifyX509CertChain(cert_chain, hostname, auth_type);
- ALOGV("verified: %d", code);
- if (code == -1) {
- return VERIFY_OK;
- } else if (code == 2) { // SSL_IDMISMATCH
- return VERIFY_BAD_HOSTNAME;
- } else if (code == 3) { // SSL_UNTRUSTED
- return VERIFY_NO_TRUSTED_ROOT;
- }
- }
- return VERIFY_INVOCATION_ERROR;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-SfDelegate::SfDelegate()
- : mOwner(NULL),
- mURLRequest(NULL),
- mReadBuffer(new net::IOBufferWithSize(8192)),
- mNumBytesRead(0),
- mNumBytesTotal(0),
- mDataDestination(NULL),
- mAtEOS(false) {
- InitializeNetworkThreadIfNecessary();
-}
-
-SfDelegate::~SfDelegate() {
- CHECK(mURLRequest == NULL);
-}
-
-// static
-status_t SfDelegate::UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- InitializeNetworkThreadIfNecessary();
-
- return gReqContext->updateProxyConfig(host, port, exclusionList);
-}
-
-void SfDelegate::setOwner(ChromiumHTTPDataSource *owner) {
- mOwner = owner;
-}
-
-void SfDelegate::setUID(uid_t uid) {
- gReqContext->setUID(uid);
-}
-
-bool SfDelegate::getUID(uid_t *uid) const {
- return gReqContext->getUID(uid);
-}
-
-void SfDelegate::OnReceivedRedirect(
- net::URLRequest *request, const GURL &new_url, bool *defer_redirect) {
- MY_LOGV("OnReceivedRedirect");
- mOwner->onRedirect(new_url.spec().c_str());
-}
-
-void SfDelegate::OnAuthRequired(
- net::URLRequest *request, net::AuthChallengeInfo *auth_info) {
- MY_LOGV("OnAuthRequired");
-
- inherited::OnAuthRequired(request, auth_info);
-}
-
-void SfDelegate::OnCertificateRequested(
- net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info) {
- MY_LOGV("OnCertificateRequested");
-
- inherited::OnCertificateRequested(request, cert_request_info);
-}
-
-void SfDelegate::OnSSLCertificateError(
- net::URLRequest *request, int cert_error, net::X509Certificate *cert) {
- fprintf(stderr, "OnSSLCertificateError cert_error=%d\n", cert_error);
-
- inherited::OnSSLCertificateError(request, cert_error, cert);
-}
-
-void SfDelegate::OnGetCookies(net::URLRequest *request, bool blocked_by_policy) {
- MY_LOGV("OnGetCookies");
-}
-
-void SfDelegate::OnSetCookie(
- net::URLRequest *request,
- const std::string &cookie_line,
- const net::CookieOptions &options,
- bool blocked_by_policy) {
- MY_LOGV("OnSetCookie");
-}
-
-void SfDelegate::OnResponseStarted(net::URLRequest *request) {
- if (request->status().status() != net::URLRequestStatus::SUCCESS) {
- MY_LOGI(StringPrintf(
- "Request failed with status %d and os_error %d",
- request->status().status(),
- request->status().os_error()).c_str());
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onConnectionFailed(ERROR_IO);
- return;
- } else if (mRangeRequested && request->GetResponseCode() != 206) {
- MY_LOGI(StringPrintf(
- "We requested a content range, but server didn't "
- "support that. (responded with %d)",
- request->GetResponseCode()).c_str());
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onConnectionFailed(-EPIPE);
- return;
- } else if ((request->GetResponseCode() / 100) != 2) {
- MY_LOGI(StringPrintf(
- "Server responded with http status %d",
- request->GetResponseCode()).c_str());
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onConnectionFailed(ERROR_IO);
- return;
- }
-
- MY_LOGV("OnResponseStarted");
-
- std::string headers;
- request->GetAllResponseHeaders(&headers);
-
- MY_LOGV(StringPrintf("response headers: %s", headers.c_str()).c_str());
-
- std::string contentType;
- request->GetResponseHeaderByName("Content-Type", &contentType);
-
- mOwner->onConnectionEstablished(
- request->GetExpectedContentSize(), contentType.c_str());
-}
-
-void SfDelegate::OnReadCompleted(net::URLRequest *request, int bytes_read) {
- if (bytes_read == -1) {
- MY_LOGI(StringPrintf(
- "OnReadCompleted, read failed, status %d",
- request->status().status()).c_str());
-
- mOwner->onReadCompleted(ERROR_IO);
- return;
- }
-
- MY_LOGV(StringPrintf("OnReadCompleted, read %d bytes", bytes_read).c_str());
-
- if (bytes_read < 0) {
- MY_LOGI(StringPrintf(
- "Read failed w/ status %d\n",
- request->status().status()).c_str());
-
- mOwner->onReadCompleted(ERROR_IO);
- return;
- } else if (bytes_read == 0) {
- mAtEOS = true;
- mOwner->onReadCompleted(mNumBytesRead);
- return;
- }
-
- CHECK_GT(bytes_read, 0);
- CHECK_LE(mNumBytesRead + bytes_read, mNumBytesTotal);
-
- memcpy((uint8_t *)mDataDestination + mNumBytesRead,
- mReadBuffer->data(),
- bytes_read);
-
- mNumBytesRead += bytes_read;
-
- readMore(request);
-}
-
-void SfDelegate::readMore(net::URLRequest *request) {
- while (mNumBytesRead < mNumBytesTotal) {
- size_t copy = mNumBytesTotal - mNumBytesRead;
- if (copy > mReadBuffer->size()) {
- copy = mReadBuffer->size();
- }
-
- int n;
- if (request->Read(mReadBuffer, copy, &n)) {
- MY_LOGV(StringPrintf("Read %d bytes directly.", n).c_str());
-
- CHECK_LE((size_t)n, copy);
-
- memcpy((uint8_t *)mDataDestination + mNumBytesRead,
- mReadBuffer->data(),
- n);
-
- mNumBytesRead += n;
-
- if (n == 0) {
- mAtEOS = true;
- break;
- }
- } else {
- MY_LOGV("readMore pending read");
-
- if (request->status().status() != net::URLRequestStatus::IO_PENDING) {
- MY_LOGI(StringPrintf(
- "Direct read failed w/ status %d\n",
- request->status().status()).c_str());
-
- mOwner->onReadCompleted(ERROR_IO);
- return;
- }
-
- return;
- }
- }
-
- mOwner->onReadCompleted(mNumBytesRead);
-}
-
-void SfDelegate::initiateConnection(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- GURL url(uri);
-
- MessageLoop *loop = gNetworkThread->message_loop();
- loop->PostTask(
- FROM_HERE,
- NewRunnableFunction(
- &SfDelegate::OnInitiateConnectionWrapper,
- this,
- url,
- headers,
- offset));
-
-}
-
-// static
-void SfDelegate::OnInitiateConnectionWrapper(
- SfDelegate *me, GURL url,
- const KeyedVector<String8, String8> *headers,
- off64_t offset) {
- me->onInitiateConnection(url, headers, offset);
-}
-
-void SfDelegate::onInitiateConnection(
- const GURL &url,
- const KeyedVector<String8, String8> *extra,
- off64_t offset) {
- CHECK(mURLRequest == NULL);
-
- mURLRequest = new net::URLRequest(url, this);
- mAtEOS = false;
-
- mRangeRequested = false;
-
- if (offset != 0 || extra != NULL) {
- net::HttpRequestHeaders headers =
- mURLRequest->extra_request_headers();
-
- if (offset != 0) {
- headers.AddHeaderFromString(
- StringPrintf("Range: bytes=%lld-", offset).c_str());
-
- mRangeRequested = true;
- }
-
- if (extra != NULL) {
- for (size_t i = 0; i < extra->size(); ++i) {
- AString s;
- s.append(extra->keyAt(i).string());
- s.append(": ");
- s.append(extra->valueAt(i).string());
-
- headers.AddHeaderFromString(s.c_str());
- }
- }
-
- mURLRequest->SetExtraRequestHeaders(headers);
- }
-
- mURLRequest->set_context(gReqContext);
-
- mURLRequest->Start();
-}
-
-void SfDelegate::initiateDisconnect() {
- MessageLoop *loop = gNetworkThread->message_loop();
- loop->PostTask(
- FROM_HERE,
- NewRunnableFunction(
- &SfDelegate::OnInitiateDisconnectWrapper, this));
-}
-
-// static
-void SfDelegate::OnInitiateDisconnectWrapper(SfDelegate *me) {
- me->onInitiateDisconnect();
-}
-
-void SfDelegate::onInitiateDisconnect() {
- if (mURLRequest == NULL) {
- return;
- }
-
- mURLRequest->Cancel();
-
- delete mURLRequest;
- mURLRequest = NULL;
-
- mOwner->onDisconnectComplete();
-}
-
-void SfDelegate::initiateRead(void *data, size_t size) {
- MessageLoop *loop = gNetworkThread->message_loop();
- loop->PostTask(
- FROM_HERE,
- NewRunnableFunction(
- &SfDelegate::OnInitiateReadWrapper, this, data, size));
-}
-
-// static
-void SfDelegate::OnInitiateReadWrapper(
- SfDelegate *me, void *data, size_t size) {
- me->onInitiateRead(data, size);
-}
-
-void SfDelegate::onInitiateRead(void *data, size_t size) {
- CHECK(mURLRequest != NULL);
-
- mNumBytesRead = 0;
- mNumBytesTotal = size;
- mDataDestination = data;
-
- if (mAtEOS) {
- mOwner->onReadCompleted(0);
- return;
- }
-
- readMore(mURLRequest);
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/chromium_http/support.h b/media/libstagefright/chromium_http/support.h
deleted file mode 100644
index 975a1d3..0000000
--- a/media/libstagefright/chromium_http/support.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SUPPORT_H_
-
-#define SUPPORT_H_
-
-#include <assert.h>
-
-#include "net/base/net_log.h"
-#include "net/url_request/url_request.h"
-#include "net/url_request/url_request_context.h"
-#include "net/base/android_network_library.h"
-#include "net/base/io_buffer.h"
-
-#include <utils/KeyedVector.h>
-#include <utils/Mutex.h>
-#include <utils/String8.h>
-
-namespace net {
- struct ProxyConfigServiceAndroid;
-};
-
-namespace android {
-
-struct SfNetLog : public net::NetLog {
- SfNetLog();
-
- virtual void AddEntry(
- EventType type,
- const base::TimeTicks &time,
- const Source &source,
- EventPhase phase,
- EventParameters *params);
-
- virtual uint32 NextID();
- virtual LogLevel GetLogLevel() const;
-
-private:
- uint32 mNextID;
-
- DISALLOW_EVIL_CONSTRUCTORS(SfNetLog);
-};
-
-struct SfRequestContext : public net::URLRequestContext {
- SfRequestContext();
-
- virtual const std::string &GetUserAgent(const GURL &url) const;
-
- status_t updateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
-private:
- Mutex mProxyConfigLock;
-
- std::string mUserAgent;
- net::ProxyConfigServiceAndroid *mProxyConfigService;
-
- DISALLOW_EVIL_CONSTRUCTORS(SfRequestContext);
-};
-
-// This is required for https support, we don't really verify certificates,
-// we accept anything...
-struct SfNetworkLibrary : public net::AndroidNetworkLibrary {
- SfNetworkLibrary();
-
- virtual VerifyResult VerifyX509CertChain(
- const std::vector<std::string>& cert_chain,
- const std::string& hostname,
- const std::string& auth_type);
-
-private:
- DISALLOW_EVIL_CONSTRUCTORS(SfNetworkLibrary);
-};
-
-struct ChromiumHTTPDataSource;
-
-struct SfDelegate : public net::URLRequest::Delegate {
- SfDelegate();
- virtual ~SfDelegate();
-
- void initiateConnection(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- void initiateDisconnect();
- void initiateRead(void *data, size_t size);
-
- void setOwner(ChromiumHTTPDataSource *mOwner);
-
- // Gets the UID of the calling process
- bool getUID(uid_t *uid) const;
-
- void setUID(uid_t uid);
-
- virtual void OnReceivedRedirect(
- net::URLRequest *request, const GURL &new_url, bool *defer_redirect);
-
- virtual void OnAuthRequired(
- net::URLRequest *request, net::AuthChallengeInfo *auth_info);
-
- virtual void OnCertificateRequested(
- net::URLRequest *request, net::SSLCertRequestInfo *cert_request_info);
-
- virtual void OnSSLCertificateError(
- net::URLRequest *request, int cert_error, net::X509Certificate *cert);
-
- virtual void OnGetCookies(net::URLRequest *request, bool blocked_by_policy);
-
- virtual void OnSetCookie(
- net::URLRequest *request,
- const std::string &cookie_line,
- const net::CookieOptions &options,
- bool blocked_by_policy);
-
- virtual void OnResponseStarted(net::URLRequest *request);
-
- virtual void OnReadCompleted(net::URLRequest *request, int bytes_read);
-
- static status_t UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
-private:
- typedef Delegate inherited;
-
- ChromiumHTTPDataSource *mOwner;
-
- net::URLRequest *mURLRequest;
- scoped_refptr<net::IOBufferWithSize> mReadBuffer;
-
- size_t mNumBytesRead;
- size_t mNumBytesTotal;
- void *mDataDestination;
-
- bool mRangeRequested;
- bool mAtEOS;
-
- void readMore(net::URLRequest *request);
-
- static void OnInitiateConnectionWrapper(
- SfDelegate *me,
- GURL url,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- static void OnInitiateDisconnectWrapper(SfDelegate *me);
-
- static void OnInitiateReadWrapper(
- SfDelegate *me, void *data, size_t size);
-
- void onInitiateConnection(
- const GURL &url,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- void onInitiateDisconnect();
- void onInitiateRead(void *data, size_t size);
-
- DISALLOW_EVIL_CONSTRUCTORS(SfDelegate);
-};
-
-} // namespace android
-
-#endif // SUPPORT_H_
diff --git a/media/libstagefright/chromium_http_stub.cpp b/media/libstagefright/chromium_http_stub.cpp
deleted file mode 100644
index ed8a878..0000000
--- a/media/libstagefright/chromium_http_stub.cpp
+++ /dev/null
@@ -1,102 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <dlfcn.h>
-
-#include <media/stagefright/DataSource.h>
-
-#include "include/chromium_http_stub.h"
-#include "include/HTTPBase.h"
-
-namespace android {
-
-static bool gFirst = true;
-static void *gHandle;
-static Mutex gLibMutex;
-
-HTTPBase *(*gLib_createChromiumHTTPDataSource)(uint32_t flags);
-DataSource *(*gLib_createDataUriSource)(const char *uri);
-
-status_t (*gLib_UpdateChromiumHTTPDataSourceProxyConfig)(
- const char *host, int32_t port, const char *exclusionList);
-
-static bool load_libstagefright_chromium_http() {
- Mutex::Autolock autoLock(gLibMutex);
- void *sym;
-
- if (!gFirst) {
- return (gHandle != NULL);
- }
-
- gFirst = false;
-
- gHandle = dlopen("libstagefright_chromium_http.so", RTLD_NOW);
- if (gHandle == NULL) {
- return false;
- }
-
- sym = dlsym(gHandle, "createChromiumHTTPDataSource");
- if (sym == NULL) {
- gHandle = NULL;
- return false;
- }
- gLib_createChromiumHTTPDataSource = (HTTPBase *(*)(uint32_t))sym;
-
- sym = dlsym(gHandle, "createDataUriSource");
- if (sym == NULL) {
- gHandle = NULL;
- return false;
- }
- gLib_createDataUriSource = (DataSource *(*)(const char *))sym;
-
- sym = dlsym(gHandle, "UpdateChromiumHTTPDataSourceProxyConfig");
- if (sym == NULL) {
- gHandle = NULL;
- return false;
- }
- gLib_UpdateChromiumHTTPDataSourceProxyConfig =
- (status_t (*)(const char *, int32_t, const char *))sym;
-
- return true;
-}
-
-HTTPBase *createChromiumHTTPDataSource(uint32_t flags) {
- if (!load_libstagefright_chromium_http()) {
- return NULL;
- }
-
- return gLib_createChromiumHTTPDataSource(flags);
-}
-
-status_t UpdateChromiumHTTPDataSourceProxyConfig(
- const char *host, int32_t port, const char *exclusionList) {
- if (!load_libstagefright_chromium_http()) {
- return INVALID_OPERATION;
- }
-
- return gLib_UpdateChromiumHTTPDataSourceProxyConfig(
- host, port, exclusionList);
-}
-
-DataSource *createDataUriSource(const char *uri) {
- if (!load_libstagefright_chromium_http()) {
- return NULL;
- }
-
- return gLib_createDataUriSource(uri);
-}
-
-}
diff --git a/media/libstagefright/codecs/aacdec/Android.mk b/media/libstagefright/codecs/aacdec/Android.mk
index ffa64f9..afb00aa 100644
--- a/media/libstagefright/codecs/aacdec/Android.mk
+++ b/media/libstagefright/codecs/aacdec/Android.mk
@@ -3,7 +3,8 @@ LOCAL_PATH:= $(call my-dir)
include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
- SoftAAC2.cpp
+ SoftAAC2.cpp \
+ DrcPresModeWrap.cpp
LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/include \
@@ -17,6 +18,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS :=
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
new file mode 100644
index 0000000..129ad65
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.cpp
@@ -0,0 +1,372 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "DrcPresModeWrap.h"
+
+#include <assert.h>
+
+#define LOG_TAG "SoftAAC2_DrcWrapper"
+//#define LOG_NDEBUG 0
+#include <utils/Log.h>
+
+//#define DRC_PRES_MODE_WRAP_DEBUG
+
+#define GPM_ENCODER_TARGET_LEVEL 64
+#define MAX_TARGET_LEVEL 64
+
+CDrcPresModeWrapper::CDrcPresModeWrapper()
+{
+ mDataUpdate = true;
+
+ /* Data from streamInfo. */
+ /* Initialized to the same values as in the aac decoder */
+ mStreamPRL = -1;
+ mStreamDRCPresMode = -1;
+ mStreamNrAACChan = 0;
+ mStreamNrOutChan = 0;
+
+ /* Desired values (set by user). */
+ /* Initialized to the same values as in the aac decoder */
+ mDesTarget = -1;
+ mDesAttFactor = 0;
+ mDesBoostFactor = 0;
+ mDesHeavy = 0;
+
+ mEncoderTarget = -1;
+
+ /* Values from last time. */
+ /* Initialized to the same values as the desired values */
+ mLastTarget = -1;
+ mLastAttFactor = 0;
+ mLastBoostFactor = 0;
+ mLastHeavy = 0;
+}
+
+CDrcPresModeWrapper::~CDrcPresModeWrapper()
+{
+}
+
+void
+CDrcPresModeWrapper::setDecoderHandle(const HANDLE_AACDECODER handle)
+{
+ mHandleDecoder = handle;
+}
+
+void
+CDrcPresModeWrapper::submitStreamData(CStreamInfo* pStreamInfo)
+{
+ assert(pStreamInfo);
+
+ if (mStreamPRL != pStreamInfo->drcProgRefLev) {
+ mStreamPRL = pStreamInfo->drcProgRefLev;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcProgRefLev is %d\n", mStreamPRL);
+#endif
+ }
+
+ if (mStreamDRCPresMode != pStreamInfo->drcPresMode) {
+ mStreamDRCPresMode = pStreamInfo->drcPresMode;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: drcPresMode is %d\n", mStreamDRCPresMode);
+#endif
+ }
+
+ if (mStreamNrAACChan != pStreamInfo->aacNumChannels) {
+ mStreamNrAACChan = pStreamInfo->aacNumChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: aacNumChannels is %d\n", mStreamNrAACChan);
+#endif
+ }
+
+ if (mStreamNrOutChan != pStreamInfo->numChannels) {
+ mStreamNrOutChan = pStreamInfo->numChannels;
+ mDataUpdate = true;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC presentation mode wrapper: numChannels is %d\n", mStreamNrOutChan);
+#endif
+ }
+
+
+
+ if (mStreamNrOutChan<mStreamNrAACChan) {
+ mIsDownmix = true;
+ } else {
+ mIsDownmix = false;
+ }
+
+ if (mIsDownmix && (mStreamNrOutChan == 1)) {
+ mIsMonoDownmix = true;
+ } else {
+ mIsMonoDownmix = false;
+ }
+
+ if (mIsDownmix && mStreamNrOutChan == 2){
+ mIsStereoDownmix = true;
+ } else {
+ mIsStereoDownmix = false;
+ }
+
+}
+
+void
+CDrcPresModeWrapper::setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value)
+{
+ switch (param) {
+ case DRC_PRES_MODE_WRAP_DESIRED_TARGET:
+ mDesTarget = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR:
+ mDesAttFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR:
+ mDesBoostFactor = value;
+ break;
+ case DRC_PRES_MODE_WRAP_DESIRED_HEAVY:
+ mDesHeavy = value;
+ break;
+ case DRC_PRES_MODE_WRAP_ENCODER_TARGET:
+ mEncoderTarget = value;
+ break;
+ default:
+ break;
+ }
+ mDataUpdate = true;
+}
+
+void
+CDrcPresModeWrapper::update()
+{
+ // Get Data from Decoder
+ int progRefLevel = mStreamPRL;
+ int drcPresMode = mStreamDRCPresMode;
+
+ // by default, do as desired
+ int newTarget = mDesTarget;
+ int newAttFactor = mDesAttFactor;
+ int newBoostFactor = mDesBoostFactor;
+ int newHeavy = mDesHeavy;
+
+ if (mDataUpdate) {
+ // sanity check
+ if (mDesTarget < MAX_TARGET_LEVEL){
+ mDesTarget = MAX_TARGET_LEVEL; // limit target level to -16 dB or below
+ newTarget = MAX_TARGET_LEVEL;
+ }
+
+ if (mEncoderTarget != -1) {
+ if (mDesTarget<124) { // if target level > -31 dB
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ // no stereo or mono downmixing, calculated scaling of light DRC
+ /* use as little compression as possible */
+ newAttFactor = 0;
+ newBoostFactor = 0;
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ if (mEncoderTarget < mDesTarget) { // if mEncoderTarget > target level
+ // mEncoderTarget > target level > PRL
+ int calcFactor;
+ float calcFactor_norm;
+ // 0.0f < calcFactor_norm < 1.0f
+ calcFactor_norm = (float)(mDesTarget - progRefLevel) /
+ (float)(mEncoderTarget - progRefLevel);
+ calcFactor = (int)(calcFactor_norm*127.0f); // 0 <= calcFactor < 127
+ // calcFactor is the lower limit
+ newAttFactor = (calcFactor>newAttFactor) ? calcFactor : newAttFactor;
+ // new AttFactor will be always = calcFactor, as it is set to 0 before.
+ newBoostFactor = newAttFactor;
+ } else {
+ /* target level > mEncoderTarget > PRL */
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ newBoostFactor = 127;
+ }
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget<92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else {
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB: light DRC only needed if we perform downmixing
+ if (mIsDownmix) { // we do downmixing
+ newAttFactor = 127;
+ }
+ }
+ }
+ else { // handle other used encoder target levels
+
+ // Sanity check: DRC presentation mode is only specified for max. 5.1 channels
+ if (mStreamNrAACChan > 6) {
+ drcPresMode = 0;
+ }
+
+ switch (drcPresMode) {
+ case 0:
+ default: // presentation mode not indicated
+ {
+
+ if (mDesTarget<124) { // if target level > -31 dB
+ // no stereo or mono downmixing
+ if ((mIsStereoDownmix == false) && (mIsMonoDownmix == false)) {
+ if (mDesTarget<progRefLevel) { // if target level > PRL
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127; // at least, use light compression
+ } else { // target level <= PRL
+ // no restrictions required
+ // newAttFactor = newAttFactor;
+ }
+ } else { // downmixing
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+
+ // if target level > -23 dB or mono downmix
+ if ( (mDesTarget < 92) || mIsMonoDownmix ) {
+ newHeavy = 1;
+ } else{
+ // we perform a downmix, so, we need at least full light DRC
+ newAttFactor = 127;
+ }
+ }
+ } else { // target level <= -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ // newTDLimiterEnable = 1;
+ // the time domain limiter must always be active in this case.
+ // It is assumed that the framework activates it by default
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ // Presentation mode 1 and 2 according to ETSI TS 101 154:
+ // Digital Video Broadcasting (DVB); Specification for the use of Video and Audio Coding
+ // in Broadcasting Applications based on the MPEG-2 Transport Stream,
+ // section C.5.4., "Decoding", and Table C.33
+ // ISO DRC -> newHeavy = 0 (Use light compression, MPEG-style)
+ // Compression_value -> newHeavy = 1 (Use heavy compression, DVB-style)
+ // scaling restricted -> newAttFactor = 127
+
+ case 1: // presentation mode 1, Light:-31/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ newHeavy = 1;
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ case 2: // presentation mode 2, Light:-23/Heavy:-23
+ {
+ if (mDesTarget < 124) { // if target level > -31 dB
+ // playback up to -23 dB
+ if (mIsMonoDownmix) { // if mono downmix
+ newHeavy = 1;
+ } else {
+ newHeavy = 0;
+ newAttFactor = 127;
+ }
+ } else { // target level <= -31 dB
+ // playback -31 dB
+ newHeavy = 0;
+ if (mIsDownmix) { // we do downmixing.
+ newAttFactor = 127;
+ }
+ }
+ }
+ break;
+
+ } // switch()
+ } // if (mEncoderTarget == GPM_ENCODER_TARGET_LEVEL)
+
+ // sanity again
+ if (newHeavy == 1) {
+ newBoostFactor=127; // not really needed as the same would be done by the decoder anyway
+ newAttFactor = 127;
+ }
+
+ // update the decoder
+ if (newTarget != mLastTarget) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_REFERENCE_LEVEL, newTarget);
+ mLastTarget = newTarget;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newTarget != mDesTarget)
+ ALOGV("DRC presentation mode wrapper: forced target level to %d (from %d)\n", newTarget, mDesTarget);
+ else
+ ALOGV("DRC presentation mode wrapper: set target level to %d\n", newTarget);
+#endif
+ }
+
+ if (newAttFactor != mLastAttFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_ATTENUATION_FACTOR, newAttFactor);
+ mLastAttFactor = newAttFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newAttFactor != mDesAttFactor)
+ ALOGV("DRC presentation mode wrapper: forced attenuation factor to %d (from %d)\n", newAttFactor, mDesAttFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set attenuation factor to %d\n", newAttFactor);
+#endif
+ }
+
+ if (newBoostFactor != mLastBoostFactor) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_BOOST_FACTOR, newBoostFactor);
+ mLastBoostFactor = newBoostFactor;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newBoostFactor != mDesBoostFactor)
+ ALOGV("DRC presentation mode wrapper: forced boost factor to %d (from %d)\n",
+ newBoostFactor, mDesBoostFactor);
+ else
+ ALOGV("DRC presentation mode wrapper: set boost factor to %d\n", newBoostFactor);
+#endif
+ }
+
+ if (newHeavy != mLastHeavy) {
+ aacDecoder_SetParam(mHandleDecoder, AAC_DRC_HEAVY_COMPRESSION, newHeavy);
+ mLastHeavy = newHeavy;
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ if (newHeavy != mDesHeavy)
+ ALOGV("DRC presentation mode wrapper: forced heavy compression to %d (from %d)\n",
+ newHeavy, mDesHeavy);
+ else
+ ALOGV("DRC presentation mode wrapper: set heavy compression to %d\n", newHeavy);
+#endif
+ }
+
+#ifdef DRC_PRES_MODE_WRAP_DEBUG
+ ALOGV("DRC config: tgt_lev: %3d, cut: %3d, boost: %3d, heavy: %d\n", newTarget,
+ newAttFactor, newBoostFactor, newHeavy);
+#endif
+ mDataUpdate = false;
+
+ } // if (mDataUpdate)
+}
diff --git a/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
new file mode 100644
index 0000000..f0b6cf2
--- /dev/null
+++ b/media/libstagefright/codecs/aacdec/DrcPresModeWrap.h
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+#include "aacdecoder_lib.h"
+
+typedef enum
+{
+ DRC_PRES_MODE_WRAP_DESIRED_TARGET = 0x0000,
+ DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR = 0x0001,
+ DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR = 0x0002,
+ DRC_PRES_MODE_WRAP_DESIRED_HEAVY = 0x0003,
+ DRC_PRES_MODE_WRAP_ENCODER_TARGET = 0x0004
+} DRC_PRES_MODE_WRAP_PARAM;
+
+
+class CDrcPresModeWrapper {
+public:
+ CDrcPresModeWrapper();
+ ~CDrcPresModeWrapper();
+ void setDecoderHandle(const HANDLE_AACDECODER handle);
+ void setParam(const DRC_PRES_MODE_WRAP_PARAM param, const int value);
+ void submitStreamData(CStreamInfo*);
+ void update();
+
+protected:
+ HANDLE_AACDECODER mHandleDecoder;
+ int mDesTarget;
+ int mDesAttFactor;
+ int mDesBoostFactor;
+ int mDesHeavy;
+
+ int mEncoderTarget;
+
+ int mLastTarget;
+ int mLastAttFactor;
+ int mLastBoostFactor;
+ int mLastHeavy;
+
+ SCHAR mStreamPRL;
+ SCHAR mStreamDRCPresMode;
+ INT mStreamNrAACChan;
+ INT mStreamNrOutChan;
+
+ bool mIsDownmix;
+ bool mIsMonoDownmix;
+ bool mIsStereoDownmix;
+
+ bool mDataUpdate;
+};
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index d4b0de7..64bf2b6 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -25,16 +25,22 @@
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/MediaErrors.h>
+#include <math.h>
+
#define FILEREAD_MAX_LAYERS 2
#define DRC_DEFAULT_MOBILE_REF_LEVEL 64 /* 64*-0.25dB = -16 dB below full scale for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_CUT 127 /* maximum compression of dynamic range for mobile conf */
#define DRC_DEFAULT_MOBILE_DRC_BOOST 127 /* maximum compression of dynamic range for mobile conf */
-#define MAX_CHANNEL_COUNT 6 /* maximum number of audio channels that can be decoded */
+#define DRC_DEFAULT_MOBILE_DRC_HEAVY 1 /* switch for heavy compression for mobile conf */
+#define DRC_DEFAULT_MOBILE_ENC_LEVEL -1 /* encoder target level; -1 => the value is unknown, otherwise dB step value (e.g. 64 for -16 dB) */
+#define MAX_CHANNEL_COUNT 8 /* maximum number of audio channels that can be decoded */
// names of properties that can be used to override the default DRC settings
#define PROP_DRC_OVERRIDE_REF_LEVEL "aac_drc_reference_level"
#define PROP_DRC_OVERRIDE_CUT "aac_drc_cut"
#define PROP_DRC_OVERRIDE_BOOST "aac_drc_boost"
+#define PROP_DRC_OVERRIDE_HEAVY "aac_drc_heavy"
+#define PROP_DRC_OVERRIDE_ENC_LEVEL "aac_drc_enc_target_level"
namespace android {
@@ -57,18 +63,19 @@ SoftAAC2::SoftAAC2(
mStreamInfo(NULL),
mIsADTS(false),
mInputBufferCount(0),
+ mOutputBufferCount(0),
mSignalledError(false),
- mSawInputEos(false),
- mSignalledOutputEos(false),
- mAnchorTimeUs(0),
- mNumSamplesOutput(0),
mOutputPortSettingsChange(NONE) {
+ for (unsigned int i = 0; i < kNumDelayBlocksMax; i++) {
+ mAnchorTimeUs[i] = 0;
+ }
initPorts();
CHECK_EQ(initDecoder(), (status_t)OK);
}
SoftAAC2::~SoftAAC2() {
aacDecoder_Close(mAACDecoder);
+ delete mOutputDelayRingBuffer;
}
void SoftAAC2::initPorts() {
@@ -121,36 +128,72 @@ status_t SoftAAC2::initDecoder() {
status = OK;
}
}
- mDecoderHasData = false;
- // for streams that contain metadata, use the mobile profile DRC settings unless overridden
- // by platform properties:
+ mEndOfInput = false;
+ mEndOfOutput = false;
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
+ mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize];
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+
+ if (mAACDecoder == NULL) {
+ ALOGE("AAC decoder is null. TODO: Can not call aacDecoder_SetParam in the following code");
+ }
+
+ //aacDecoder_SetParam(mAACDecoder, AAC_PCM_LIMITER_ENABLE, 0);
+
+ //init DRC wrapper
+ mDrcWrap.setDecoderHandle(mAACDecoder);
+ mDrcWrap.submitStreamData(mStreamInfo);
+
+ // for streams that contain metadata, use the mobile profile DRC settings unless overridden by platform properties
+ // TODO: change the DRC settings depending on audio output device type (HDMI, loadspeaker, headphone)
char value[PROPERTY_VALUE_MAX];
- // * AAC_DRC_REFERENCE_LEVEL
+ // DRC_PRES_MODE_WRAP_DESIRED_TARGET
if (property_get(PROP_DRC_OVERRIDE_REF_LEVEL, value, NULL)) {
unsigned refLevel = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_REFERENCE_LEVEL of %d instead of %d",
- refLevel, DRC_DEFAULT_MOBILE_REF_LEVEL);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_REFERENCE_LEVEL, refLevel);
+ ALOGV("AAC decoder using desired DRC target reference level of %d instead of %d", refLevel,
+ DRC_DEFAULT_MOBILE_REF_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, refLevel);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_REFERENCE_LEVEL, DRC_DEFAULT_MOBILE_REF_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_TARGET, DRC_DEFAULT_MOBILE_REF_LEVEL);
}
- // * AAC_DRC_ATTENUATION_FACTOR
+ // DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR
if (property_get(PROP_DRC_OVERRIDE_CUT, value, NULL)) {
unsigned cut = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_ATTENUATION_FACTOR of %d instead of %d",
- cut, DRC_DEFAULT_MOBILE_DRC_CUT);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_ATTENUATION_FACTOR, cut);
+ ALOGV("AAC decoder using desired DRC attenuation factor of %d instead of %d", cut,
+ DRC_DEFAULT_MOBILE_DRC_CUT);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, cut);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_ATTENUATION_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_ATT_FACTOR, DRC_DEFAULT_MOBILE_DRC_CUT);
}
- // * AAC_DRC_BOOST_FACTOR (note: no default, using cut)
+ // DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR
if (property_get(PROP_DRC_OVERRIDE_BOOST, value, NULL)) {
unsigned boost = atoi(value);
- ALOGV("AAC decoder using AAC_DRC_BOOST_FACTOR of %d", boost);
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, boost);
+ ALOGV("AAC decoder using desired DRC boost factor of %d instead of %d", boost,
+ DRC_DEFAULT_MOBILE_DRC_BOOST);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, boost);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+ }
+ // DRC_PRES_MODE_WRAP_DESIRED_HEAVY
+ if (property_get(PROP_DRC_OVERRIDE_HEAVY, value, NULL)) {
+ unsigned heavy = atoi(value);
+ ALOGV("AAC decoder using desried DRC heavy compression switch of %d instead of %d", heavy,
+ DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, heavy);
} else {
- aacDecoder_SetParam(mAACDecoder, AAC_DRC_BOOST_FACTOR, DRC_DEFAULT_MOBILE_DRC_BOOST);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_DESIRED_HEAVY, DRC_DEFAULT_MOBILE_DRC_HEAVY);
+ }
+ // DRC_PRES_MODE_WRAP_ENCODER_TARGET
+ if (property_get(PROP_DRC_OVERRIDE_ENC_LEVEL, value, NULL)) {
+ unsigned encoderRefLevel = atoi(value);
+ ALOGV("AAC decoder using encoder-side DRC reference level of %d instead of %d",
+ encoderRefLevel, DRC_DEFAULT_MOBILE_ENC_LEVEL);
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, encoderRefLevel);
+ } else {
+ mDrcWrap.setParam(DRC_PRES_MODE_WRAP_ENCODER_TARGET, DRC_DEFAULT_MOBILE_ENC_LEVEL);
}
return status;
@@ -290,16 +333,101 @@ bool SoftAAC2::isConfigured() const {
return mInputBufferCount > 0;
}
-void SoftAAC2::maybeConfigureDownmix() const {
- if (mStreamInfo->numChannels > 2) {
- char value[PROPERTY_VALUE_MAX];
- if (!(property_get("media.aac_51_output_enabled", value, NULL) &&
- (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
- ALOGI("Downmixing multichannel AAC to stereo");
- aacDecoder_SetParam(mAACDecoder, AAC_PCM_OUTPUT_CHANNELS, 2);
- mStreamInfo->numChannels = 2;
+void SoftAAC2::configureDownmix() const {
+ char value[PROPERTY_VALUE_MAX];
+ if (!(property_get("media.aac_51_output_enabled", value, NULL)
+ && (!strcmp(value, "1") || !strcasecmp(value, "true")))) {
+ ALOGI("limiting to stereo output");
+ aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, 2);
+ // By default, the decoder creates a 5.1 channel downmix signal
+ // for seven and eight channel input streams. To enable 6.1 and 7.1 channel output
+ // use aacDecoder_SetParam(mAACDecoder, AAC_PCM_MAX_OUTPUT_CHANNELS, -1)
+ }
+}
+
+bool SoftAAC2::outputDelayRingBufferPutSamples(INT_PCM *samples, int32_t numSamples) {
+ if (mOutputDelayRingBufferWritePos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferReadPos <= mOutputDelayRingBufferWritePos
+ || mOutputDelayRingBufferReadPos > mOutputDelayRingBufferWritePos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos++] = samples[i];
+ }
+
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ if (mOutputDelayRingBufferWritePos == mOutputDelayRingBufferReadPos) {
+ ALOGE("RING BUFFER OVERFLOW");
+ return false;
+ }
+ } else {
+ ALOGV("slow SoftAAC2::outputDelayRingBufferPutSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ mOutputDelayRingBuffer[mOutputDelayRingBufferWritePos] = samples[i];
+ mOutputDelayRingBufferWritePos++;
+ if (mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferWritePos -= mOutputDelayRingBufferSize;
+ }
+ if (mOutputDelayRingBufferWritePos == mOutputDelayRingBufferReadPos) {
+ ALOGE("RING BUFFER OVERFLOW");
+ return false;
+ }
}
}
+ return true;
+}
+
+int32_t SoftAAC2::outputDelayRingBufferGetSamples(INT_PCM *samples, int32_t numSamples) {
+ if (mOutputDelayRingBufferReadPos + numSamples <= mOutputDelayRingBufferSize
+ && (mOutputDelayRingBufferWritePos < mOutputDelayRingBufferReadPos
+ || mOutputDelayRingBufferWritePos >= mOutputDelayRingBufferReadPos + numSamples)) {
+ // faster memcopy loop without checks, if the preconditions allow this
+ if (samples != 0) {
+ for (int32_t i = 0; i < numSamples; i++) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos++];
+ }
+ } else {
+ mOutputDelayRingBufferReadPos += numSamples;
+ }
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ } else {
+ ALOGV("slow SoftAAC2::outputDelayRingBufferGetSamples()");
+
+ for (int32_t i = 0; i < numSamples; i++) {
+ if (mOutputDelayRingBufferWritePos == mOutputDelayRingBufferReadPos) {
+ ALOGE("RING BUFFER UNDERRUN");
+ return -1;
+ }
+ if (samples != 0) {
+ samples[i] = mOutputDelayRingBuffer[mOutputDelayRingBufferReadPos];
+ }
+ mOutputDelayRingBufferReadPos++;
+ if (mOutputDelayRingBufferReadPos >= mOutputDelayRingBufferSize) {
+ mOutputDelayRingBufferReadPos -= mOutputDelayRingBufferSize;
+ }
+ }
+ }
+ return numSamples;
+}
+
+int32_t SoftAAC2::outputDelayRingBufferSamplesAvailable() {
+ int32_t available = mOutputDelayRingBufferWritePos - mOutputDelayRingBufferReadPos;
+ if (available < 0) {
+ available += mOutputDelayRingBufferSize;
+ }
+ if (available < 0) {
+ ALOGE("FATAL RING BUFFER ERROR");
+ return 0;
+ }
+ return available;
+}
+
+int32_t SoftAAC2::outputDelayRingBufferSamplesLeft() {
+ return mOutputDelayRingBufferSize - outputDelayRingBufferSamplesAvailable();
}
void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
@@ -315,12 +443,11 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
List<BufferInfo *> &outQueue = getPortQueue(1);
if (portIndex == 0 && mInputBufferCount == 0) {
- ++mInputBufferCount;
- BufferInfo *info = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *header = info->mHeader;
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
- inBuffer[0] = header->pBuffer + header->nOffset;
- inBufferLength[0] = header->nFilledLen;
+ inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
+ inBufferLength[0] = inHeader->nFilledLen;
AAC_DECODER_ERROR decoderErr =
aacDecoder_ConfigRaw(mAACDecoder,
@@ -328,19 +455,25 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
inBufferLength);
if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_ConfigRaw decoderErr = 0x%4.4x", decoderErr);
mSignalledError = true;
notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
return;
}
+ mInputBufferCount++;
+ mOutputBufferCount++; // fake increase of outputBufferCount to keep the counters aligned
+
+ inInfo->mOwnedByUs = false;
inQueue.erase(inQueue.begin());
- info->mOwnedByUs = false;
- notifyEmptyBufferDone(header);
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ configureDownmix();
// Only send out port settings changed event if both sample rate
// and numChannels are valid.
if (mStreamInfo->sampleRate && mStreamInfo->numChannels) {
- maybeConfigureDownmix();
ALOGI("Initially configuring decoder: %d Hz, %d channels",
mStreamInfo->sampleRate,
mStreamInfo->numChannels);
@@ -352,202 +485,304 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
return;
}
- while ((!inQueue.empty() || (mSawInputEos && !mSignalledOutputEos)) && !outQueue.empty()) {
- BufferInfo *inInfo = NULL;
- OMX_BUFFERHEADERTYPE *inHeader = NULL;
+ while ((!inQueue.empty() || mEndOfInput) && !outQueue.empty()) {
if (!inQueue.empty()) {
- inInfo = *inQueue.begin();
- inHeader = inInfo->mHeader;
- }
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
- BufferInfo *outInfo = *outQueue.begin();
- OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- outHeader->nFlags = 0;
-
- if (inHeader) {
if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- mSawInputEos = true;
+ mEndOfInput = true;
+ } else {
+ mEndOfInput = false;
}
-
- if (inHeader->nOffset == 0 && inHeader->nFilledLen) {
- mAnchorTimeUs = inHeader->nTimeStamp;
- mNumSamplesOutput = 0;
+ if (inHeader->nOffset == 0) { // TODO: does nOffset != 0 happen?
+ mAnchorTimeUs[mInputBufferCount % kNumDelayBlocksMax] =
+ inHeader->nTimeStamp;
}
- if (mIsADTS) {
- size_t adtsHeaderSize = 0;
- // skip 30 bits, aac_frame_length follows.
- // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ } else {
+ if (mIsADTS) {
+ size_t adtsHeaderSize = 0;
+ // skip 30 bits, aac_frame_length follows.
+ // ssssssss ssssiiip ppffffPc ccohCCll llllllll lll?????
- const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
+ const uint8_t *adtsHeader = inHeader->pBuffer + inHeader->nOffset;
- bool signalError = false;
- if (inHeader->nFilledLen < 7) {
- ALOGE("Audio data too short to contain even the ADTS header. "
- "Got %d bytes.", inHeader->nFilledLen);
- hexdump(adtsHeader, inHeader->nFilledLen);
- signalError = true;
- } else {
- bool protectionAbsent = (adtsHeader[1] & 1);
-
- unsigned aac_frame_length =
- ((adtsHeader[3] & 3) << 11)
- | (adtsHeader[4] << 3)
- | (adtsHeader[5] >> 5);
-
- if (inHeader->nFilledLen < aac_frame_length) {
- ALOGE("Not enough audio data for the complete frame. "
- "Got %d bytes, frame size according to the ADTS "
- "header is %u bytes.",
- inHeader->nFilledLen, aac_frame_length);
+ bool signalError = false;
+ if (inHeader->nFilledLen < 7) {
+ ALOGE("Audio data too short to contain even the ADTS header. "
+ "Got %d bytes.", inHeader->nFilledLen);
hexdump(adtsHeader, inHeader->nFilledLen);
signalError = true;
} else {
- adtsHeaderSize = (protectionAbsent ? 7 : 9);
+ bool protectionAbsent = (adtsHeader[1] & 1);
+
+ unsigned aac_frame_length =
+ ((adtsHeader[3] & 3) << 11)
+ | (adtsHeader[4] << 3)
+ | (adtsHeader[5] >> 5);
+
+ if (inHeader->nFilledLen < aac_frame_length) {
+ ALOGE("Not enough audio data for the complete frame. "
+ "Got %d bytes, frame size according to the ADTS "
+ "header is %u bytes.",
+ inHeader->nFilledLen, aac_frame_length);
+ hexdump(adtsHeader, inHeader->nFilledLen);
+ signalError = true;
+ } else {
+ adtsHeaderSize = (protectionAbsent ? 7 : 9);
+
+ inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
+ inBufferLength[0] = aac_frame_length - adtsHeaderSize;
+
+ inHeader->nOffset += adtsHeaderSize;
+ inHeader->nFilledLen -= adtsHeaderSize;
+ }
+ }
+
+ if (signalError) {
+ mSignalledError = true;
- inBuffer[0] = (UCHAR *)adtsHeader + adtsHeaderSize;
- inBufferLength[0] = aac_frame_length - adtsHeaderSize;
+ notify(OMX_EventError,
+ OMX_ErrorStreamCorrupt,
+ ERROR_MALFORMED,
+ NULL);
- inHeader->nOffset += adtsHeaderSize;
- inHeader->nFilledLen -= adtsHeaderSize;
+ return;
}
+ } else {
+ inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
+ inBufferLength[0] = inHeader->nFilledLen;
}
- if (signalError) {
- mSignalledError = true;
+ // Fill and decode
+ bytesValid[0] = inBufferLength[0];
+
+ INT prevSampleRate = mStreamInfo->sampleRate;
+ INT prevNumChannels = mStreamInfo->numChannels;
+
+ aacDecoder_Fill(mAACDecoder,
+ inBuffer,
+ inBufferLength,
+ bytesValid);
- notify(OMX_EventError,
- OMX_ErrorStreamCorrupt,
- ERROR_MALFORMED,
- NULL);
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ 0 /* flags */);
+
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
+ ALOGE("AAC_DEC_NOT_ENOUGH_BITS should never happen");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
return;
}
- } else {
- inBuffer[0] = inHeader->pBuffer + inHeader->nOffset;
- inBufferLength[0] = inHeader->nFilledLen;
- }
- } else {
- inBufferLength[0] = 0;
- }
- // Fill and decode
- INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(
- outHeader->pBuffer + outHeader->nOffset);
-
- bytesValid[0] = inBufferLength[0];
-
- int prevSampleRate = mStreamInfo->sampleRate;
- int prevNumChannels = mStreamInfo->numChannels;
-
- AAC_DECODER_ERROR decoderErr = AAC_DEC_NOT_ENOUGH_BITS;
- while ((bytesValid[0] > 0 || mSawInputEos) && decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
- mDecoderHasData |= (bytesValid[0] > 0);
- aacDecoder_Fill(mAACDecoder,
- inBuffer,
- inBufferLength,
- bytesValid);
-
- decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
- outBuffer,
- outHeader->nAllocLen,
- 0 /* flags */);
- if (decoderErr == AAC_DEC_NOT_ENOUGH_BITS) {
- if (mSawInputEos && bytesValid[0] <= 0) {
- if (mDecoderHasData) {
- // flush out the decoder's delayed data by calling DecodeFrame
- // one more time, with the AACDEC_FLUSH flag set
- decoderErr = aacDecoder_DecodeFrame(mAACDecoder,
- outBuffer,
- outHeader->nAllocLen,
- AACDEC_FLUSH);
- mDecoderHasData = false;
+ if (bytesValid[0] != 0) {
+ ALOGE("bytesValid[0] != 0 should never happen");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ size_t numOutBytes =
+ mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+
+ if (decoderErr == AAC_DEC_OK) {
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
}
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
- mSignalledOutputEos = true;
- break;
+ UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
+ inHeader->nFilledLen -= inBufferUsedLength;
+ inHeader->nOffset += inBufferUsedLength;
} else {
- ALOGW("Not enough bits, bytesValid %d", bytesValid[0]);
- }
- }
- }
+ ALOGW("AAC decoder returned error 0x%4.4x, substituting silence", decoderErr);
- size_t numOutBytes =
- mStreamInfo->frameSize * sizeof(int16_t) * mStreamInfo->numChannels;
+ memset(tmpOutBuffer, 0, numOutBytes); // TODO: check for overflow
- if (inHeader) {
- if (decoderErr == AAC_DEC_OK) {
- UINT inBufferUsedLength = inBufferLength[0] - bytesValid[0];
- inHeader->nFilledLen -= inBufferUsedLength;
- inHeader->nOffset += inBufferUsedLength;
- } else {
- ALOGW("AAC decoder returned error %d, substituting silence",
- decoderErr);
+ if (!outputDelayRingBufferPutSamples(tmpOutBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels)) {
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
- memset(outHeader->pBuffer + outHeader->nOffset, 0, numOutBytes);
+ // Discard input buffer.
+ inHeader->nFilledLen = 0;
- // Discard input buffer.
- inHeader->nFilledLen = 0;
+ aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
- aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
+ // fall through
+ }
- // fall through
+ /*
+ * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
+ * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
+ * rate system and the sampling rate in the final output is actually
+ * doubled compared with the core AAC decoder sampling rate.
+ *
+ * Explicit signalling is done by explicitly defining SBR audio object
+ * type in the bitstream. Implicit signalling is done by embedding
+ * SBR content in AAC extension payload specific to SBR, and hence
+ * requires an AAC decoder to perform pre-checks on actual audio frames.
+ *
+ * Thus, we could not say for sure whether a stream is
+ * AAC+/eAAC+ until the first data frame is decoded.
+ */
+ if (mOutputBufferCount > 1) {
+ if (mStreamInfo->sampleRate != prevSampleRate ||
+ mStreamInfo->numChannels != prevNumChannels) {
+ ALOGE("can not reconfigure AAC output");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
+ }
+ if (mInputBufferCount <= 2) { // TODO: <= 1
+ if (mStreamInfo->sampleRate != prevSampleRate ||
+ mStreamInfo->numChannels != prevNumChannels) {
+ ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
+ prevSampleRate, mStreamInfo->sampleRate,
+ prevNumChannels, mStreamInfo->numChannels);
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ mInputBufferCount++;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ return;
+ }
+ } else if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
+ ALOGW("Invalid AAC stream");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
+ return;
+ }
+ if (inHeader->nFilledLen == 0) {
+ inInfo->mOwnedByUs = false;
+ mInputBufferCount++;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ } else {
+ ALOGW("inHeader->nFilledLen = %d", inHeader->nFilledLen);
+ }
}
+ }
- if (inHeader->nFilledLen == 0) {
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
+ int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+ if (!mEndOfInput && mOutputDelayCompensated < outputDelay) {
+ // discard outputDelay at the beginning
+ int32_t toCompensate = outputDelay - mOutputDelayCompensated;
+ int32_t discard = outputDelayRingBufferSamplesAvailable();
+ if (discard > toCompensate) {
+ discard = toCompensate;
}
+ int32_t discarded = outputDelayRingBufferGetSamples(0, discard);
+ mOutputDelayCompensated += discarded;
+ continue;
}
- /*
- * AAC+/eAAC+ streams can be signalled in two ways: either explicitly
- * or implicitly, according to MPEG4 spec. AAC+/eAAC+ is a dual
- * rate system and the sampling rate in the final output is actually
- * doubled compared with the core AAC decoder sampling rate.
- *
- * Explicit signalling is done by explicitly defining SBR audio object
- * type in the bitstream. Implicit signalling is done by embedding
- * SBR content in AAC extension payload specific to SBR, and hence
- * requires an AAC decoder to perform pre-checks on actual audio frames.
- *
- * Thus, we could not say for sure whether a stream is
- * AAC+/eAAC+ until the first data frame is decoded.
- */
- if (mInputBufferCount <= 2) {
- if (mStreamInfo->sampleRate != prevSampleRate ||
- mStreamInfo->numChannels != prevNumChannels) {
- maybeConfigureDownmix();
- ALOGI("Reconfiguring decoder: %d->%d Hz, %d->%d channels",
- prevSampleRate, mStreamInfo->sampleRate,
- prevNumChannels, mStreamInfo->numChannels);
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
- return;
+ if (mEndOfInput) {
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+ mOutputDelayCompensated -= tmpOutBufferSamples;
}
- } else if (!mStreamInfo->sampleRate || !mStreamInfo->numChannels) {
- ALOGW("Invalid AAC stream");
- mSignalledError = true;
- notify(OMX_EventError, OMX_ErrorUndefined, decoderErr, NULL);
- return;
}
- if (decoderErr == AAC_DEC_OK || mNumSamplesOutput > 0) {
- // We'll only output data if we successfully decoded it or
- // we've previously decoded valid data, in the latter case
- // (decode failed) we'll output a silent frame.
- outHeader->nFilledLen = numOutBytes;
+ while (!outQueue.empty()
+ && outputDelayRingBufferSamplesAvailable()
+ >= mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
- outHeader->nTimeStamp =
- mAnchorTimeUs
- + (mNumSamplesOutput * 1000000ll) / mStreamInfo->sampleRate;
+ if (outHeader->nOffset != 0) {
+ ALOGE("outHeader->nOffset != 0 is not handled");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
- mNumSamplesOutput += mStreamInfo->frameSize;
+ INT_PCM *outBuffer =
+ reinterpret_cast<INT_PCM *>(outHeader->pBuffer + outHeader->nOffset);
+ if (outHeader->nOffset
+ + mStreamInfo->frameSize * mStreamInfo->numChannels * sizeof(int16_t)
+ > outHeader->nAllocLen) {
+ ALOGE("buffer overflow");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ int32_t ns = outputDelayRingBufferGetSamples(outBuffer,
+ mStreamInfo->frameSize * mStreamInfo->numChannels); // TODO: check for overflow
+ if (ns != mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ ALOGE("not a complete frame of samples available");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ outHeader->nFilledLen = mStreamInfo->frameSize * mStreamInfo->numChannels
+ * sizeof(int16_t);
+ if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+ mEndOfOutput = true;
+ } else {
+ outHeader->nFlags = 0;
+ }
+
+ outHeader->nTimeStamp = mAnchorTimeUs[mOutputBufferCount
+ % kNumDelayBlocksMax];
+
+ mOutputBufferCount++;
outInfo->mOwnedByUs = false;
outQueue.erase(outQueue.begin());
outInfo = NULL;
@@ -555,8 +790,48 @@ void SoftAAC2::onQueueFilled(OMX_U32 portIndex) {
outHeader = NULL;
}
- if (decoderErr == AAC_DEC_OK) {
- ++mInputBufferCount;
+ if (mEndOfInput) {
+ if (outputDelayRingBufferSamplesAvailable() > 0
+ && outputDelayRingBufferSamplesAvailable()
+ < mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ ALOGE("not a complete frame of samples available");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ if (mEndOfInput && !outQueue.empty() && outputDelayRingBufferSamplesAvailable() == 0) {
+ if (!mEndOfOutput) {
+ // send empty block signaling EOS
+ mEndOfOutput = true;
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (outHeader->nOffset != 0) {
+ ALOGE("outHeader->nOffset != 0 is not handled");
+ mSignalledError = true;
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ INT_PCM *outBuffer = reinterpret_cast<INT_PCM *>(outHeader->pBuffer
+ + outHeader->nOffset);
+ int32_t ns = 0;
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outHeader->nTimeStamp = mAnchorTimeUs[mOutputBufferCount
+ % kNumDelayBlocksMax];
+
+ mOutputBufferCount++;
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ }
+ break; // if outQueue not empty but no more output
+ }
}
}
}
@@ -567,38 +842,66 @@ void SoftAAC2::onPortFlushCompleted(OMX_U32 portIndex) {
// depend on fragments from the last one decoded.
// drain all existing data
drainDecoder();
- // force decoder loop to drop the first decoded buffer by resetting these state variables,
- // but only if initialization has already happened.
- if (mInputBufferCount != 0) {
- mInputBufferCount = 1;
- mStreamInfo->sampleRate = 0;
+ } else {
+ while (outputDelayRingBufferSamplesAvailable() > 0) {
+ int32_t ns = outputDelayRingBufferGetSamples(0,
+ mStreamInfo->frameSize * mStreamInfo->numChannels);
+ if (ns != mStreamInfo->frameSize * mStreamInfo->numChannels) {
+ ALOGE("not a complete frame of samples available");
+ }
+ mOutputBufferCount++;
}
+ mOutputDelayRingBufferReadPos = mOutputDelayRingBufferWritePos;
}
}
void SoftAAC2::drainDecoder() {
- // a buffer big enough for 6 channels of decoded HE-AAC
- short buf [2048*6];
- aacDecoder_DecodeFrame(mAACDecoder,
- buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
- aacDecoder_DecodeFrame(mAACDecoder,
- buf, sizeof(buf), AACDEC_FLUSH | AACDEC_CLRHIST | AACDEC_INTR);
- aacDecoder_SetParam(mAACDecoder, AAC_TPDEC_CLEAR_BUFFER, 1);
- mDecoderHasData = false;
+ int32_t outputDelay = mStreamInfo->outputDelay * mStreamInfo->numChannels;
+
+ // flush decoder until outputDelay is compensated
+ while (mOutputDelayCompensated > 0) {
+ // a buffer big enough for MAX_CHANNEL_COUNT channels of decoded HE-AAC
+ INT_PCM tmpOutBuffer[2048 * MAX_CHANNEL_COUNT];
+
+ // run DRC check
+ mDrcWrap.submitStreamData(mStreamInfo);
+ mDrcWrap.update();
+
+ AAC_DECODER_ERROR decoderErr =
+ aacDecoder_DecodeFrame(mAACDecoder,
+ tmpOutBuffer,
+ 2048 * MAX_CHANNEL_COUNT,
+ AACDEC_FLUSH);
+ if (decoderErr != AAC_DEC_OK) {
+ ALOGW("aacDecoder_DecodeFrame decoderErr = 0x%4.4x", decoderErr);
+ }
+
+ int32_t tmpOutBufferSamples = mStreamInfo->frameSize * mStreamInfo->numChannels;
+ if (tmpOutBufferSamples > mOutputDelayCompensated) {
+ tmpOutBufferSamples = mOutputDelayCompensated;
+ }
+ outputDelayRingBufferPutSamples(tmpOutBuffer, tmpOutBufferSamples);
+
+ mOutputDelayCompensated -= tmpOutBufferSamples;
+ }
}
void SoftAAC2::onReset() {
drainDecoder();
// reset the "configured" state
mInputBufferCount = 0;
- mNumSamplesOutput = 0;
+ mOutputBufferCount = 0;
+ mOutputDelayCompensated = 0;
+ mOutputDelayRingBufferWritePos = 0;
+ mOutputDelayRingBufferReadPos = 0;
+ mEndOfInput = false;
+ mEndOfOutput = false;
+
// To make the codec behave the same before and after a reset, we need to invalidate the
// streaminfo struct. This does that:
- mStreamInfo->sampleRate = 0;
+ mStreamInfo->sampleRate = 0; // TODO: mStreamInfo is read only
mSignalledError = false;
- mSawInputEos = false;
- mSignalledOutputEos = false;
mOutputPortSettingsChange = NONE;
}
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index a7ea1e2..5cde03a 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -20,6 +20,7 @@
#include "SimpleSoftOMXComponent.h"
#include "aacdecoder_lib.h"
+#include "DrcPresModeWrap.h"
namespace android {
@@ -47,18 +48,19 @@ private:
enum {
kNumInputBuffers = 4,
kNumOutputBuffers = 4,
+ kNumDelayBlocksMax = 8,
};
HANDLE_AACDECODER mAACDecoder;
CStreamInfo *mStreamInfo;
bool mIsADTS;
- bool mDecoderHasData;
+ bool mIsFirst;
size_t mInputBufferCount;
+ size_t mOutputBufferCount;
bool mSignalledError;
- bool mSawInputEos;
- bool mSignalledOutputEos;
- int64_t mAnchorTimeUs;
- int64_t mNumSamplesOutput;
+ int64_t mAnchorTimeUs[kNumDelayBlocksMax];
+
+ CDrcPresModeWrapper mDrcWrap;
enum {
NONE,
@@ -69,9 +71,22 @@ private:
void initPorts();
status_t initDecoder();
bool isConfigured() const;
- void maybeConfigureDownmix() const;
+ void configureDownmix() const;
void drainDecoder();
+// delay compensation
+ bool mEndOfInput;
+ bool mEndOfOutput;
+ int32_t mOutputDelayCompensated;
+ int32_t mOutputDelayRingBufferSize;
+ short *mOutputDelayRingBuffer;
+ int32_t mOutputDelayRingBufferWritePos;
+ int32_t mOutputDelayRingBufferReadPos;
+ bool outputDelayRingBufferPutSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferGetSamples(INT_PCM *samples, int numSamples);
+ int32_t outputDelayRingBufferSamplesAvailable();
+ int32_t outputDelayRingBufferSamplesLeft();
+
DISALLOW_EVIL_CONSTRUCTORS(SoftAAC2);
};
diff --git a/media/libstagefright/codecs/aacenc/Android.mk b/media/libstagefright/codecs/aacenc/Android.mk
index 057c69b..58ec3ba 100644
--- a/media/libstagefright/codecs/aacenc/Android.mk
+++ b/media/libstagefright/codecs/aacenc/Android.mk
@@ -82,6 +82,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7
endif
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -106,6 +108,8 @@ ifeq ($(AAC_LIBRARY), fraunhofer)
LOCAL_CFLAGS :=
+ LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := libFraunhoferAAC
LOCAL_SHARED_LIBRARIES := \
@@ -128,6 +132,8 @@ else # visualon
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+ LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_aacenc
diff --git a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
index cc01927..1d029fc 100644
--- a/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
+++ b/media/libstagefright/codecs/aacenc/basic_op/oper_32b.c
@@ -24,6 +24,8 @@
#include "basic_op.h"
#include "oper_32b.h"
+#define UNUSED(x) (void)(x)
+
/*****************************************************************************
* *
* Function L_Extract() *
@@ -243,6 +245,8 @@ Word16 iLog4(Word32 value)
Word32 rsqrt(Word32 value, /*!< Operand to square root (0.0 ... 1) */
Word32 accuracy) /*!< Number of valid bits that will be calculated */
{
+ UNUSED(accuracy);
+
Word32 root = 0;
Word32 scale;
diff --git a/media/libstagefright/codecs/aacenc/src/aacenc.c b/media/libstagefright/codecs/aacenc/src/aacenc.c
index d1c8621..40db92c 100644
--- a/media/libstagefright/codecs/aacenc/src/aacenc.c
+++ b/media/libstagefright/codecs/aacenc/src/aacenc.c
@@ -27,6 +27,8 @@
#include "cmnMemory.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
+
/**
* Init the audio codec module and return codec handle
* \param phCodec [OUT] Return the video codec handle
@@ -46,6 +48,8 @@ VO_U32 VO_API voAACEncInit(VO_HANDLE * phCodec,VO_AUDIO_CODINGTYPE vType, VO_COD
VO_MEM_OPERATOR *pMemOP;
int interMem;
+ UNUSED(vType);
+
interMem = 0;
error = 0;
@@ -471,6 +475,10 @@ VO_U32 VO_API voAACEncSetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData)
*/
VO_U32 VO_API voAACEncGetParam(VO_HANDLE hCodec, VO_S32 uParamID, VO_PTR pData)
{
+ UNUSED(hCodec);
+ UNUSED(uParamID);
+ UNUSED(pData);
+
return VO_ERR_NONE;
}
diff --git a/media/libstagefright/codecs/aacenc/src/bitenc.c b/media/libstagefright/codecs/aacenc/src/bitenc.c
index fcc12dd..d1fd647 100644
--- a/media/libstagefright/codecs/aacenc/src/bitenc.c
+++ b/media/libstagefright/codecs/aacenc/src/bitenc.c
@@ -26,6 +26,7 @@
#include "qc_data.h"
#include "interface.h"
+#define UNUSED(x) (void)(x)
static const Word16 globalGainOffset = 100;
static const Word16 icsReservedBit = 0;
@@ -585,6 +586,8 @@ Word16 WriteBitstream (HANDLE_BIT_BUF hBitStream,
Word16 elementUsedBits;
Word16 frameBits=0;
+ UNUSED(ancBytes);
+
/* struct bitbuffer bsWriteCopy; */
bitMarkUp = GetBitsAvail(hBitStream);
if(qcOut->qcElement.adtsUsed) /* write adts header*/
diff --git a/media/libstagefright/codecs/aacenc/src/psy_main.c b/media/libstagefright/codecs/aacenc/src/psy_main.c
index 4e9218c..6f0679c 100644
--- a/media/libstagefright/codecs/aacenc/src/psy_main.c
+++ b/media/libstagefright/codecs/aacenc/src/psy_main.c
@@ -38,6 +38,8 @@
#include "tns_func.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
+
/* long start short stop */
static Word16 blockType2windowShape[] = {KBD_WINDOW,SINE_WINDOW,SINE_WINDOW,KBD_WINDOW};
@@ -170,7 +172,9 @@ Word16 PsyOutNew(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP)
*****************************************************************************/
Word16 PsyOutDelete(PSY_OUT *hPsyOut, VO_MEM_OPERATOR *pMemOP)
{
- hPsyOut=NULL;
+ UNUSED(hPsyOut);
+ UNUSED(pMemOP);
+
return 0;
}
diff --git a/media/libstagefright/codecs/aacenc/src/qc_main.c b/media/libstagefright/codecs/aacenc/src/qc_main.c
index 48ff300..e5d78aa 100644
--- a/media/libstagefright/codecs/aacenc/src/qc_main.c
+++ b/media/libstagefright/codecs/aacenc/src/qc_main.c
@@ -33,6 +33,7 @@
#include "channel_map.h"
#include "memalign.h"
+#define UNUSED(x) (void)(x)
typedef enum{
FRAME_LEN_BYTES_MODULO = 1,
@@ -204,11 +205,8 @@ Word16 QCNew(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP)
**********************************************************************************/
void QCDelete(QC_STATE *hQC, VO_MEM_OPERATOR *pMemOP)
{
-
- /*
- nothing to do
- */
- hQC=NULL;
+ UNUSED(hQC);
+ UNUSED(pMemOP);
}
/*********************************************************************************
diff --git a/media/libstagefright/codecs/aacenc/src/tns.c b/media/libstagefright/codecs/aacenc/src/tns.c
index 455a864..5172612 100644
--- a/media/libstagefright/codecs/aacenc/src/tns.c
+++ b/media/libstagefright/codecs/aacenc/src/tns.c
@@ -30,6 +30,8 @@
#include "psy_configuration.h"
#include "tns_func.h"
+#define UNUSED(x) (void)(x)
+
#define TNS_MODIFY_BEGIN 2600 /* Hz */
#define RATIO_PATCH_LOWER_BORDER 380 /* Hz */
#define TNS_GAIN_THRESH 141 /* 1.41*100 */
@@ -643,6 +645,8 @@ static Word16 CalcTnsFilter(const Word16 *signal,
Word32 i;
Word32 tnsOrderPlus1 = tnsOrder + 1;
+ UNUSED(window);
+
assert(tnsOrder <= TNS_MAX_ORDER); /* remove asserts later? (btg) */
for(i=0;i<tnsOrder;i++) {
diff --git a/media/libstagefright/codecs/amrnb/common/Android.mk b/media/libstagefright/codecs/amrnb/common/Android.mk
index 30ce29c..a2b3c8f 100644
--- a/media/libstagefright/codecs/amrnb/common/Android.mk
+++ b/media/libstagefright/codecs/amrnb/common/Android.mk
@@ -69,6 +69,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF= -DOSCL_EXPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrnb_common
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/amrnb/dec/Android.mk b/media/libstagefright/codecs/amrnb/dec/Android.mk
index 8d6c6f8..b067456 100644
--- a/media/libstagefright/codecs/amrnb/dec/Android.mk
+++ b/media/libstagefright/codecs/amrnb/dec/Android.mk
@@ -47,6 +47,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrnbdec
include $(BUILD_STATIC_LIBRARY)
@@ -68,6 +70,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbdec libstagefright_amrwbdec
diff --git a/media/libstagefright/codecs/amrnb/enc/Android.mk b/media/libstagefright/codecs/amrnb/enc/Android.mk
index f4e467a..afc0b89 100644
--- a/media/libstagefright/codecs/amrnb/enc/Android.mk
+++ b/media/libstagefright/codecs/amrnb/enc/Android.mk
@@ -69,6 +69,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrnbenc
include $(BUILD_STATIC_LIBRARY)
@@ -88,6 +90,8 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/../common/include \
$(LOCAL_PATH)/../common
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrnbenc
diff --git a/media/libstagefright/codecs/amrwb/Android.mk b/media/libstagefright/codecs/amrwb/Android.mk
index 677107f..efdf988 100644
--- a/media/libstagefright/codecs/amrwb/Android.mk
+++ b/media/libstagefright/codecs/amrwb/Android.mk
@@ -50,6 +50,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG= -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_amrwbdec
include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/codecs/amrwbenc/Android.mk b/media/libstagefright/codecs/amrwbenc/Android.mk
index c5b8e0c..64fe8d1 100644
--- a/media/libstagefright/codecs/amrwbenc/Android.mk
+++ b/media/libstagefright/codecs/amrwbenc/Android.mk
@@ -112,6 +112,8 @@ LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV5E
LOCAL_C_INCLUDES += $(LOCAL_PATH)/src/asm/ARMV7
endif
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -126,6 +128,8 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/codecs/common/include \
frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_STATIC_LIBRARIES := \
libstagefright_amrwbenc
diff --git a/media/libstagefright/codecs/amrwbenc/src/autocorr.c b/media/libstagefright/codecs/amrwbenc/src/autocorr.c
index 8c477ca..0b2ea89 100644
--- a/media/libstagefright/codecs/amrwbenc/src/autocorr.c
+++ b/media/libstagefright/codecs/amrwbenc/src/autocorr.c
@@ -28,6 +28,8 @@
#include "acelp.h"
#include "ham_wind.tab"
+#define UNUSED(x) (void)(x)
+
void Autocorr(
Word16 x[], /* (i) : Input signal */
Word16 m, /* (i) : LPC order */
@@ -40,6 +42,8 @@ void Autocorr(
Word32 L_sum, L_sum1, L_tmp, F_LEN;
Word16 *p1,*p2,*p3;
const Word16 *p4;
+ UNUSED(m);
+
/* Windowing of signal */
p1 = x;
p4 = vo_window;
diff --git a/media/libstagefright/codecs/amrwbenc/src/convolve.c b/media/libstagefright/codecs/amrwbenc/src/convolve.c
index acba532..4c1f7d4 100644
--- a/media/libstagefright/codecs/amrwbenc/src/convolve.c
+++ b/media/libstagefright/codecs/amrwbenc/src/convolve.c
@@ -25,6 +25,8 @@
#include "typedef.h"
#include "basic_op.h"
+#define UNUSED(x) (void)(x)
+
void Convolve (
Word16 x[], /* (i) : input vector */
Word16 h[], /* (i) : impulse response */
@@ -35,6 +37,8 @@ void Convolve (
Word32 i, n;
Word16 *tmpH,*tmpX;
Word32 s;
+ UNUSED(L);
+
for (n = 0; n < 64;)
{
tmpH = h+n;
diff --git a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
index 0d66c31..b66b55e 100644
--- a/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
+++ b/media/libstagefright/codecs/amrwbenc/src/pitch_f4.c
@@ -31,6 +31,8 @@
#define UP_SAMP 4
#define L_INTERPOL1 4
+#define UNUSED(x) (void)(x)
+
/* Local functions */
#ifdef ASM_OPT
@@ -171,6 +173,7 @@ static void Norm_Corr(
Word32 corr, exp_corr, norm, exp, scale;
Word16 exp_norm, excf[L_SUBFR], tmp;
Word32 L_tmp, L_tmp1, L_tmp2;
+ UNUSED(L_subfr);
/* compute the filtered excitation for the first delay t_min */
k = -t_min;
diff --git a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
index 1bda05a..961aadc 100644
--- a/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
+++ b/media/libstagefright/codecs/amrwbenc/src/syn_filt.c
@@ -26,6 +26,8 @@
#include "math_op.h"
#include "cnst.h"
+#define UNUSED(x) (void)(x)
+
void Syn_filt(
Word16 a[], /* (i) Q12 : a[m+1] prediction coefficients */
Word16 x[], /* (i) : input signal */
@@ -95,6 +97,8 @@ void Syn_filt_32(
Word32 i,a0;
Word32 L_tmp, L_tmp1;
Word16 *p1, *p2, *p3;
+ UNUSED(m);
+
a0 = a[0] >> (4 + Qnew); /* input / 16 and >>Qnew */
/* Do the filtering. */
for (i = 0; i < lg; i++)
diff --git a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
index ea9da52..df7b9b3 100644
--- a/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
+++ b/media/libstagefright/codecs/amrwbenc/src/voAMRWBEnc.c
@@ -39,6 +39,8 @@
#include "mem_align.h"
#include "cmnMemory.h"
+#define UNUSED(x) (void)(x)
+
#ifdef __cplusplus
extern "C" {
#endif
@@ -1602,6 +1604,8 @@ VO_U32 VO_API voAMRWB_Init(VO_HANDLE * phCodec, /* o: the audi
VO_MEM_OPERATOR voMemoprator;
#endif
VO_MEM_OPERATOR *pMemOP;
+ UNUSED(vType);
+
int interMem = 0;
if(pUserData == NULL || pUserData->memflag != VO_IMF_USERMEMOPERATOR || pUserData->memData == NULL )
diff --git a/media/libstagefright/codecs/avc/common/Android.mk b/media/libstagefright/codecs/avc/common/Android.mk
index 22dee15..844ef0a 100644
--- a/media/libstagefright/codecs/avc/common/Android.mk
+++ b/media/libstagefright/codecs/avc/common/Android.mk
@@ -16,4 +16,6 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/enc/Android.mk b/media/libstagefright/codecs/avc/enc/Android.mk
index 7d17c2a..537ba42 100644
--- a/media/libstagefright/codecs/avc/enc/Android.mk
+++ b/media/libstagefright/codecs/avc/enc/Android.mk
@@ -30,6 +30,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -69,4 +71,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_h264enc
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
index 89f0fed..0f4a00d 100644
--- a/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
+++ b/media/libstagefright/codecs/avc/enc/SoftAVCEncoder.cpp
@@ -34,6 +34,12 @@
#include "SoftAVCEncoder.h"
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
template<class T>
@@ -136,14 +142,14 @@ inline static void ConvertYUV420SemiPlanarToYUV420Planar(
}
static void* MallocWrapper(
- void *userData, int32_t size, int32_t attrs) {
+ void * /* userData */, int32_t size, int32_t /* attrs */) {
void *ptr = malloc(size);
if (ptr)
memset(ptr, 0, size);
return ptr;
}
-static void FreeWrapper(void *userData, void* ptr) {
+static void FreeWrapper(void * /* userData */, void* ptr) {
free(ptr);
}
@@ -722,7 +728,7 @@ OMX_ERRORTYPE SoftAVCEncoder::internalSetParameter(
}
}
-void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
+void SoftAVCEncoder::onQueueFilled(OMX_U32 /* portIndex */) {
if (mSignalledError || mSawInputEOS) {
return;
}
@@ -795,7 +801,7 @@ void SoftAVCEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
- buffer_handle_t srcBuffer; // for MetaDataMode only
+ buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
// Get next input video frame
if (mReadyForNextFrame) {
@@ -964,6 +970,7 @@ int32_t SoftAVCEncoder::bindOutputBuffer(int32_t index, uint8_t **yuv) {
}
void SoftAVCEncoder::signalBufferReturned(MediaBuffer *buffer) {
+ UNUSED_UNLESS_VERBOSE(buffer);
ALOGV("signalBufferReturned: %p", buffer);
}
diff --git a/media/libstagefright/codecs/common/Android.mk b/media/libstagefright/codecs/common/Android.mk
index a33cb92..b0010ff 100644
--- a/media/libstagefright/codecs/common/Android.mk
+++ b/media/libstagefright/codecs/common/Android.mk
@@ -14,6 +14,8 @@ LOCAL_STATIC_LIBRARIES :=
LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/flac/enc/Android.mk b/media/libstagefright/codecs/flac/enc/Android.mk
index f01d605..59a11de 100644
--- a/media/libstagefright/codecs/flac/enc/Android.mk
+++ b/media/libstagefright/codecs/flac/enc/Android.mk
@@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax \
external/flac/include
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index d797197..1301060 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -27,6 +27,12 @@
#define FLAC_COMPRESSION_LEVEL_DEFAULT 5
#define FLAC_COMPRESSION_LEVEL_MAX 8
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
namespace android {
template<class T>
@@ -257,7 +263,7 @@ OMX_ERRORTYPE SoftFlacEncoder::internalSetParameter(
}
void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
- //UNUSED_UNLESS_VERBOSE(portIndex);
+ UNUSED_UNLESS_VERBOSE(portIndex);
ALOGV("SoftFlacEncoder::onQueueFilled(portIndex=%d)", portIndex);
if (mSignalledError) {
@@ -343,16 +349,17 @@ void SoftFlacEncoder::onQueueFilled(OMX_U32 portIndex) {
}
}
-
FLAC__StreamEncoderWriteStatus SoftFlacEncoder::onEncodedFlacAvailable(
const FLAC__byte buffer[],
- size_t bytes, unsigned samples, unsigned current_frame) {
- ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%d, samples=%d, curr_frame=%d)",
+ size_t bytes, unsigned samples,
+ unsigned current_frame) {
+ UNUSED_UNLESS_VERBOSE(current_frame);
+ ALOGV("SoftFlacEncoder::onEncodedFlacAvailable(bytes=%zu, samples=%u, curr_frame=%u)",
bytes, samples, current_frame);
#ifdef WRITE_FLAC_HEADER_IN_FIRST_BUFFER
if (samples == 0) {
- ALOGI(" saving %d bytes of header", bytes);
+ ALOGI(" saving %zu bytes of header", bytes);
memcpy(mHeader + mHeaderOffset, buffer, bytes);
mHeaderOffset += bytes;// will contain header size when finished receiving header
return FLAC__STREAM_ENCODER_WRITE_STATUS_OK;
@@ -444,8 +451,12 @@ return_result:
// static
FLAC__StreamEncoderWriteStatus SoftFlacEncoder::flacEncoderWriteCallback(
- const FLAC__StreamEncoder *encoder, const FLAC__byte buffer[],
- size_t bytes, unsigned samples, unsigned current_frame, void *client_data) {
+ const FLAC__StreamEncoder * /* encoder */,
+ const FLAC__byte buffer[],
+ size_t bytes,
+ unsigned samples,
+ unsigned current_frame,
+ void *client_data) {
return ((SoftFlacEncoder*) client_data)->onEncodedFlacAvailable(
buffer, bytes, samples, current_frame);
}
diff --git a/media/libstagefright/codecs/g711/dec/Android.mk b/media/libstagefright/codecs/g711/dec/Android.mk
index 4c80da6..a0112e1 100644
--- a/media/libstagefright/codecs/g711/dec/Android.mk
+++ b/media/libstagefright/codecs/g711/dec/Android.mk
@@ -14,4 +14,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_g711dec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/gsm/dec/Android.mk b/media/libstagefright/codecs/gsm/dec/Android.mk
index 71613d2..30868d5 100644
--- a/media/libstagefright/codecs/gsm/dec/Android.mk
+++ b/media/libstagefright/codecs/gsm/dec/Android.mk
@@ -9,6 +9,8 @@ LOCAL_C_INCLUDES := \
frameworks/native/include/media/openmax \
external/libgsm/inc
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/hevcdec/Android.mk b/media/libstagefright/codecs/hevcdec/Android.mk
new file mode 100644
index 0000000..960602f
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/Android.mk
@@ -0,0 +1,26 @@
+ifeq ($(if $(wildcard external/libhevc),1,0),1)
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := libstagefright_soft_hevcdec
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_STATIC_LIBRARIES := libhevcdec
+LOCAL_SRC_FILES := SoftHEVC.cpp
+
+LOCAL_C_INCLUDES := $(TOP)/external/libhevc/decoder
+LOCAL_C_INCLUDES += $(TOP)/external/libhevc/common
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/media/libstagefright/include
+LOCAL_C_INCLUDES += $(TOP)/frameworks/native/include/media/openmax
+
+LOCAL_SHARED_LIBRARIES := libstagefright
+LOCAL_SHARED_LIBRARIES += libstagefright_omx
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation
+LOCAL_SHARED_LIBRARIES += libutils
+LOCAL_SHARED_LIBRARIES += liblog
+
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
new file mode 100644
index 0000000..0aae5ed
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.cpp
@@ -0,0 +1,710 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftHEVC"
+#include <utils/Log.h>
+
+#include "ihevc_typedefs.h"
+#include "iv.h"
+#include "ivd.h"
+#include "ithread.h"
+#include "ihevcd_cxa.h"
+#include "SoftHEVC.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+#include <OMX_VideoExt.h>
+
+namespace android {
+
+#define componentName "video_decoder.hevc"
+#define codingType OMX_VIDEO_CodingHEVC
+#define CODEC_MIME_TYPE MEDIA_MIMETYPE_VIDEO_HEVC
+
+/** Function and structure definitions to keep code similar for each codec */
+#define ivdec_api_function ihevcd_cxa_api_function
+#define ivdext_init_ip_t ihevcd_cxa_init_ip_t
+#define ivdext_init_op_t ihevcd_cxa_init_op_t
+#define ivdext_fill_mem_rec_ip_t ihevcd_cxa_fill_mem_rec_ip_t
+#define ivdext_fill_mem_rec_op_t ihevcd_cxa_fill_mem_rec_op_t
+#define ivdext_ctl_set_num_cores_ip_t ihevcd_cxa_ctl_set_num_cores_ip_t
+#define ivdext_ctl_set_num_cores_op_t ihevcd_cxa_ctl_set_num_cores_op_t
+
+#define IVDEXT_CMD_CTL_SET_NUM_CORES \
+ (IVD_CONTROL_API_COMMAND_TYPE_T)IHEVCD_CXA_CMD_CTL_SET_NUM_CORES
+
+static const CodecProfileLevel kProfileLevels[] = {
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel1 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel2 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel21 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel3 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel31 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel4 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel41 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel5 },
+ { OMX_VIDEO_HEVCProfileMain, OMX_VIDEO_HEVCMainTierLevel51 },
+};
+
+SoftHEVC::SoftHEVC(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SoftVideoDecoderOMXComponent(name, componentName, codingType,
+ kProfileLevels, ARRAY_SIZE(kProfileLevels),
+ CODEC_MAX_WIDTH /* width */, CODEC_MAX_HEIGHT /* height */, callbacks,
+ appData, component) {
+ initPorts(kNumBuffers, INPUT_BUF_SIZE, kNumBuffers,
+ CODEC_MIME_TYPE);
+
+ mOmxColorFormat = OMX_COLOR_FormatYUV420Planar;
+ mStride = mWidth;
+
+ if (OMX_COLOR_FormatYUV420Planar == mOmxColorFormat) {
+ mIvColorFormat = IV_YUV_420P;
+ } else if (OMX_COLOR_FormatYUV420SemiPlanar == mOmxColorFormat) {
+ mIvColorFormat = IV_YUV_420SP_UV;
+ }
+
+ mInitWidth = mWidth;
+ mInitHeight = mHeight;
+
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftHEVC::~SoftHEVC() {
+ ALOGD("In SoftHEVC::~SoftHEVC");
+ CHECK_EQ(deInitDecoder(), (status_t)OK);
+}
+
+static size_t GetCPUCoreCount() {
+ long cpuCoreCount = 1;
+#if defined(_SC_NPROCESSORS_ONLN)
+ cpuCoreCount = sysconf(_SC_NPROCESSORS_ONLN);
+#else
+ // _SC_NPROC_ONLN must be defined...
+ cpuCoreCount = sysconf(_SC_NPROC_ONLN);
+#endif
+ CHECK(cpuCoreCount >= 1);
+ ALOGD("Number of CPU cores: %ld", cpuCoreCount);
+ return (size_t)cpuCoreCount;
+}
+
+status_t SoftHEVC::getVersion() {
+ ivd_ctl_getversioninfo_ip_t s_ctl_ip;
+ ivd_ctl_getversioninfo_op_t s_ctl_op;
+ UWORD8 au1_buf[512];
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_GETVERSION;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_getversioninfo_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_getversioninfo_op_t);
+ s_ctl_ip.pv_version_buffer = au1_buf;
+ s_ctl_ip.u4_version_buffer_size = sizeof(au1_buf);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in getting version number: 0x%x",
+ s_ctl_op.u4_error_code);
+ } else {
+ ALOGD("Ittiam decoder version number: %s",
+ (char *)s_ctl_ip.pv_version_buffer);
+ }
+ return OK;
+}
+
+status_t SoftHEVC::setParams(WORD32 stride, IVD_VIDEO_DECODE_MODE_T decMode) {
+ ivd_ctl_set_config_ip_t s_ctl_ip;
+ ivd_ctl_set_config_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+ s_ctl_ip.u4_disp_wd = stride;
+ s_ctl_ip.e_frm_skip_mode = IVD_SKIP_NONE;
+
+ s_ctl_ip.e_frm_out_mode = IVD_DISPLAY_FRAME_OUT;
+ s_ctl_ip.e_vid_dec_mode = decMode;
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_SETPARAMS;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_set_config_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_set_config_op_t);
+
+ ALOGD("Set the run-time (dynamic) parameters");
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the run-time parameters: 0x%x",
+ s_ctl_op.u4_error_code);
+
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftHEVC::resetPlugin() {
+ mIsInFlush = false;
+ mReceivedEOS = false;
+ memset(mTimeStamps, 0, sizeof(mTimeStamps));
+ memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
+
+ /* Initialize both start and end times */
+ gettimeofday(&mTimeStart, NULL);
+ gettimeofday(&mTimeEnd, NULL);
+
+ return OK;
+}
+
+status_t SoftHEVC::resetDecoder() {
+ ivd_ctl_reset_ip_t s_ctl_ip;
+ ivd_ctl_reset_op_t s_ctl_op;
+ IV_API_CALL_STATUS_T status;
+
+ s_ctl_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_ctl_ip.e_sub_cmd = IVD_CMD_CTL_RESET;
+ s_ctl_ip.u4_size = sizeof(ivd_ctl_reset_ip_t);
+ s_ctl_op.u4_size = sizeof(ivd_ctl_reset_op_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_ctl_ip,
+ (void *)&s_ctl_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in reset: 0x%x", s_ctl_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ /* Set the run-time (dynamic) parameters */
+ setParams(0, IVD_DECODE_FRAME);
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ return OK;
+}
+
+status_t SoftHEVC::setNumCores() {
+ ivdext_ctl_set_num_cores_ip_t s_set_cores_ip;
+ ivdext_ctl_set_num_cores_op_t s_set_cores_op;
+ IV_API_CALL_STATUS_T status;
+ s_set_cores_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_set_cores_ip.e_sub_cmd = IVDEXT_CMD_CTL_SET_NUM_CORES;
+ s_set_cores_ip.u4_num_cores = MIN(mNumCores, CODEC_MAX_NUM_CORES);
+ s_set_cores_ip.u4_size = sizeof(ivdext_ctl_set_num_cores_ip_t);
+ s_set_cores_op.u4_size = sizeof(ivdext_ctl_set_num_cores_op_t);
+ ALOGD("Set number of cores to %u", s_set_cores_ip.u4_num_cores);
+ status = ivdec_api_function(mCodecCtx, (void *)&s_set_cores_ip,
+ (void *)&s_set_cores_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in setting number of cores: 0x%x",
+ s_set_cores_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ return OK;
+}
+
+status_t SoftHEVC::setFlushMode() {
+ IV_API_CALL_STATUS_T status;
+ ivd_ctl_flush_ip_t s_video_flush_ip;
+ ivd_ctl_flush_op_t s_video_flush_op;
+
+ s_video_flush_ip.e_cmd = IVD_CMD_VIDEO_CTL;
+ s_video_flush_ip.e_sub_cmd = IVD_CMD_CTL_FLUSH;
+ s_video_flush_ip.u4_size = sizeof(ivd_ctl_flush_ip_t);
+ s_video_flush_op.u4_size = sizeof(ivd_ctl_flush_op_t);
+ ALOGD("Set the decoder in flush mode ");
+
+ /* Set the decoder in Flush mode, subsequent decode() calls will flush */
+ status = ivdec_api_function(mCodecCtx, (void *)&s_video_flush_ip,
+ (void *)&s_video_flush_op);
+
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in setting the decoder in flush mode: (%d) 0x%x", status,
+ s_video_flush_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mIsInFlush = true;
+ return OK;
+}
+
+status_t SoftHEVC::initDecoder() {
+ IV_API_CALL_STATUS_T status;
+
+ UWORD32 u4_num_reorder_frames;
+ UWORD32 u4_num_ref_frames;
+ UWORD32 u4_share_disp_buf;
+ WORD32 i4_level;
+
+ mNumCores = GetCPUCoreCount();
+
+ /* Initialize number of ref and reorder modes (for HEVC) */
+ u4_num_reorder_frames = 16;
+ u4_num_ref_frames = 16;
+ u4_share_disp_buf = 0;
+
+ if ((mWidth * mHeight) > (1920 * 1088)) {
+ i4_level = 50;
+ } else if ((mWidth * mHeight) > (1280 * 720)) {
+ i4_level = 41;
+ } else {
+ i4_level = 31;
+ }
+
+ {
+ iv_num_mem_rec_ip_t s_num_mem_rec_ip;
+ iv_num_mem_rec_op_t s_num_mem_rec_op;
+
+ s_num_mem_rec_ip.u4_size = sizeof(s_num_mem_rec_ip);
+ s_num_mem_rec_op.u4_size = sizeof(s_num_mem_rec_op);
+ s_num_mem_rec_ip.e_cmd = IV_CMD_GET_NUM_MEM_REC;
+
+ ALOGV("Get number of mem records");
+ status = ivdec_api_function(mCodecCtx, (void*)&s_num_mem_rec_ip,
+ (void*)&s_num_mem_rec_op);
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in getting mem records: 0x%x",
+ s_num_mem_rec_op.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+
+ mNumMemRecords = s_num_mem_rec_op.u4_num_mem_rec;
+ }
+
+ mMemRecords = (iv_mem_rec_t*)ivd_aligned_malloc(
+ 128, mNumMemRecords * sizeof(iv_mem_rec_t));
+ if (mMemRecords == NULL) {
+ ALOGE("Allocation failure");
+ return NO_MEMORY;
+ }
+
+ {
+ size_t i;
+ ivdext_fill_mem_rec_ip_t s_fill_mem_ip;
+ ivdext_fill_mem_rec_op_t s_fill_mem_op;
+ iv_mem_rec_t *ps_mem_rec;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_ip_t);
+ s_fill_mem_ip.i4_level = i4_level;
+ s_fill_mem_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_fill_mem_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_fill_mem_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_fill_mem_ip.u4_num_extra_disp_buf = 0;
+ s_fill_mem_ip.e_output_format = mIvColorFormat;
+
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.pv_mem_rec_location = mMemRecords;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_wd = mWidth;
+ s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_max_frm_ht = mHeight;
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_size =
+ sizeof(ivdext_fill_mem_rec_op_t);
+
+ ps_mem_rec = mMemRecords;
+ for (i = 0; i < mNumMemRecords; i++)
+ ps_mem_rec[i].u4_size = sizeof(iv_mem_rec_t);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_fill_mem_ip,
+ (void *)&s_fill_mem_op);
+
+ if (IV_SUCCESS != status) {
+ ALOGE("Error in filling mem records: 0x%x",
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ mNumMemRecords =
+ s_fill_mem_op.s_ivd_fill_mem_rec_op_t.u4_num_mem_rec_filled;
+
+ ps_mem_rec = mMemRecords;
+
+ for (i = 0; i < mNumMemRecords; i++) {
+ ps_mem_rec->pv_base = ivd_aligned_malloc(
+ ps_mem_rec->u4_mem_alignment, ps_mem_rec->u4_mem_size);
+ if (ps_mem_rec->pv_base == NULL) {
+ ALOGE("Allocation failure for memory record #%zu of size %u",
+ i, ps_mem_rec->u4_mem_size);
+ status = IV_FAIL;
+ return NO_MEMORY;
+ }
+
+ ps_mem_rec++;
+ }
+ }
+
+ /* Initialize the decoder */
+ {
+ ivdext_init_ip_t s_init_ip;
+ ivdext_init_op_t s_init_op;
+
+ void *dec_fxns = (void *)ivdec_api_function;
+
+ s_init_ip.s_ivd_init_ip_t.u4_size = sizeof(ivdext_init_ip_t);
+ s_init_ip.s_ivd_init_ip_t.e_cmd = (IVD_API_COMMAND_TYPE_T)IV_CMD_INIT;
+ s_init_ip.s_ivd_init_ip_t.pv_mem_rec_location = mMemRecords;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_wd = mWidth;
+ s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
+
+ s_init_ip.i4_level = i4_level;
+ s_init_ip.u4_num_reorder_frames = u4_num_reorder_frames;
+ s_init_ip.u4_num_ref_frames = u4_num_ref_frames;
+ s_init_ip.u4_share_disp_buf = u4_share_disp_buf;
+ s_init_ip.u4_num_extra_disp_buf = 0;
+
+ s_init_op.s_ivd_init_op_t.u4_size = sizeof(s_init_op);
+
+ s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
+ s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorFormat;
+
+ mCodecCtx = (iv_obj_t*)mMemRecords[0].pv_base;
+ mCodecCtx->pv_fxns = dec_fxns;
+ mCodecCtx->u4_size = sizeof(iv_obj_t);
+
+ ALOGD("Initializing decoder");
+ status = ivdec_api_function(mCodecCtx, (void *)&s_init_ip,
+ (void *)&s_init_op);
+ if (status != IV_SUCCESS) {
+ ALOGE("Error in init: 0x%x",
+ s_init_op.s_ivd_init_op_t.u4_error_code);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* Reset the plugin state */
+ resetPlugin();
+
+ /* Set the run time (dynamic) parameters */
+ setParams(0, IVD_DECODE_FRAME);
+
+ /* Set number of cores/threads to be used by the codec */
+ setNumCores();
+
+ /* Get codec version */
+ getVersion();
+
+ /* Allocate internal picture buffer */
+ mFlushOutBuffer = (uint8_t *)ivd_aligned_malloc(128, mStride * mHeight * 3 / 2);
+ if (NULL == mFlushOutBuffer) {
+ ALOGE("Could not allocate flushOutputBuffer of size %zu", mStride * mHeight * 3 / 2);
+ return NO_MEMORY;
+ }
+
+ return OK;
+}
+
+status_t SoftHEVC::deInitDecoder() {
+ size_t i;
+ iv_mem_rec_t *ps_mem_rec;
+ ps_mem_rec = mMemRecords;
+ ALOGD("Freeing codec memory");
+ for (i = 0; i < mNumMemRecords; i++) {
+ ivd_aligned_free(ps_mem_rec->pv_base);
+ ps_mem_rec++;
+ }
+
+ ivd_aligned_free(mMemRecords);
+ ivd_aligned_free(mFlushOutBuffer);
+ return OK;
+}
+
+void SoftHEVC::onReset() {
+ ALOGD("onReset called");
+ SoftVideoDecoderOMXComponent::onReset();
+
+ resetDecoder();
+ resetPlugin();
+}
+
+void SoftHEVC::onPortFlushCompleted(OMX_U32 portIndex) {
+ ALOGD("onPortFlushCompleted on port %d", portIndex);
+
+ /* Once the output buffers are flushed, ignore any buffers that are held in decoder */
+ if (kOutputPortIndex == portIndex) {
+ setFlushMode();
+
+ /* Reset the time stamp arrays */
+ memset(mTimeStamps, 0, sizeof(mTimeStamps));
+ memset(mTimeStampsValid, 0, sizeof(mTimeStampsValid));
+
+ while (true) {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ IV_API_CALL_STATUS_T status;
+ size_t sizeY, sizeUV;
+
+ s_dec_ip.e_cmd = IVD_CMD_VIDEO_DECODE;
+
+ s_dec_ip.u4_ts = 0;
+ s_dec_ip.pv_stream_buffer = NULL;
+ s_dec_ip.u4_num_Bytes = 0;
+
+ s_dec_ip.u4_size = sizeof(ivd_video_decode_ip_t);
+ s_dec_op.u4_size = sizeof(ivd_video_decode_op_t);
+
+ sizeY = mStride * mHeight;
+ sizeUV = sizeY / 4;
+ s_dec_ip.s_out_buffer.u4_min_out_buf_size[0] = sizeY;
+ s_dec_ip.s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
+ s_dec_ip.s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
+
+ s_dec_ip.s_out_buffer.pu1_bufs[0] = mFlushOutBuffer;
+ s_dec_ip.s_out_buffer.pu1_bufs[1] =
+ s_dec_ip.s_out_buffer.pu1_bufs[0] + sizeY;
+ s_dec_ip.s_out_buffer.pu1_bufs[2] =
+ s_dec_ip.s_out_buffer.pu1_bufs[1] + sizeUV;
+ s_dec_ip.s_out_buffer.u4_num_bufs = 3;
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
+ (void *)&s_dec_op);
+ if (0 == s_dec_op.u4_output_present) {
+ resetPlugin();
+ break;
+ }
+ }
+ }
+}
+
+void SoftHEVC::onQueueFilled(OMX_U32 portIndex) {
+ IV_API_CALL_STATUS_T status;
+
+ UNUSED(portIndex);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex);
+ List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex);
+
+ /* If input EOS is seen and decoder is not in flush mode,
+ * set the decoder in flush mode.
+ * There can be a case where EOS is sent along with last picture data
+ * In that case, only after decoding that input data, decoder has to be
+ * put in flush. This case is handled here */
+
+ if (mReceivedEOS && !mIsInFlush) {
+ setFlushMode();
+ }
+
+ while (outQueue.size() == kNumBuffers) {
+ BufferInfo *inInfo;
+ OMX_BUFFERHEADERTYPE *inHeader;
+
+ BufferInfo *outInfo;
+ OMX_BUFFERHEADERTYPE *outHeader;
+ size_t timeStampIx;
+
+ inInfo = NULL;
+ inHeader = NULL;
+
+ if (!mIsInFlush) {
+ if (!inQueue.empty()) {
+ inInfo = *inQueue.begin();
+ inHeader = inInfo->mHeader;
+ } else {
+ break;
+ }
+ }
+
+ outInfo = *outQueue.begin();
+ outHeader = outInfo->mHeader;
+ outHeader->nFlags = 0;
+ outHeader->nTimeStamp = 0;
+ outHeader->nOffset = 0;
+
+ if (inHeader != NULL && (inHeader->nFlags & OMX_BUFFERFLAG_EOS)) {
+ ALOGD("EOS seen on input");
+ mReceivedEOS = true;
+ if (inHeader->nFilledLen == 0) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ setFlushMode();
+ }
+ }
+
+ /* Get a free slot in timestamp array to hold input timestamp */
+ {
+ size_t i;
+ timeStampIx = 0;
+ for (i = 0; i < MAX_TIME_STAMPS; i++) {
+ if (!mTimeStampsValid[i]) {
+ timeStampIx = i;
+ break;
+ }
+ }
+ if (inHeader != NULL) {
+ mTimeStampsValid[timeStampIx] = true;
+ mTimeStamps[timeStampIx] = inHeader->nTimeStamp;
+ }
+ }
+
+ {
+ ivd_video_decode_ip_t s_dec_ip;
+ ivd_video_decode_op_t s_dec_op;
+ WORD32 timeDelay, timeTaken;
+ size_t sizeY, sizeUV;
+
+ s_dec_ip.e_cmd = IVD_CMD_VIDEO_DECODE;
+
+ /* When in flush and after EOS with zero byte input,
+ * inHeader is set to zero. Hence check for non-null */
+ if (inHeader != NULL) {
+ s_dec_ip.u4_ts = timeStampIx;
+ s_dec_ip.pv_stream_buffer = inHeader->pBuffer
+ + inHeader->nOffset;
+ s_dec_ip.u4_num_Bytes = inHeader->nFilledLen;
+ } else {
+ s_dec_ip.u4_ts = 0;
+ s_dec_ip.pv_stream_buffer = NULL;
+ s_dec_ip.u4_num_Bytes = 0;
+ }
+
+ s_dec_ip.u4_size = sizeof(ivd_video_decode_ip_t);
+ s_dec_op.u4_size = sizeof(ivd_video_decode_op_t);
+
+ sizeY = mStride * mHeight;
+ sizeUV = sizeY / 4;
+ s_dec_ip.s_out_buffer.u4_min_out_buf_size[0] = sizeY;
+ s_dec_ip.s_out_buffer.u4_min_out_buf_size[1] = sizeUV;
+ s_dec_ip.s_out_buffer.u4_min_out_buf_size[2] = sizeUV;
+
+ s_dec_ip.s_out_buffer.pu1_bufs[0] = outHeader->pBuffer;
+ s_dec_ip.s_out_buffer.pu1_bufs[1] =
+ s_dec_ip.s_out_buffer.pu1_bufs[0] + sizeY;
+ s_dec_ip.s_out_buffer.pu1_bufs[2] =
+ s_dec_ip.s_out_buffer.pu1_bufs[1] + sizeUV;
+ s_dec_ip.s_out_buffer.u4_num_bufs = 3;
+
+ GETTIME(&mTimeStart, NULL);
+ /* Compute time elapsed between end of previous decode()
+ * to start of current decode() */
+ TIME_DIFF(mTimeEnd, mTimeStart, timeDelay);
+
+ status = ivdec_api_function(mCodecCtx, (void *)&s_dec_ip,
+ (void *)&s_dec_op);
+
+ GETTIME(&mTimeEnd, NULL);
+ /* Compute time taken for decode() */
+ TIME_DIFF(mTimeStart, mTimeEnd, timeTaken);
+
+ ALOGD("timeTaken=%6d delay=%6d numBytes=%6d", timeTaken, timeDelay,
+ s_dec_op.u4_num_bytes_consumed);
+
+ if ((inHeader != NULL) && (1 != s_dec_op.u4_frame_decoded_flag)) {
+ /* If the input did not contain picture data, then ignore
+ * the associated timestamp */
+ mTimeStampsValid[timeStampIx] = false;
+ }
+
+ /* If valid height and width are decoded,
+ * then look at change in resolution */
+ if ((0 < s_dec_op.u4_pic_wd) && (0 < s_dec_op.u4_pic_ht)) {
+ uint32_t width = s_dec_op.u4_pic_wd;
+ uint32_t height = s_dec_op.u4_pic_ht;
+
+ if ((width != mWidth || height != mHeight)) {
+ mWidth = width;
+ mHeight = height;
+ mStride = mWidth;
+
+ /* If width and height are greater than the
+ * the dimensions used during codec create, then
+ * delete the current instance and recreate an instance with
+ * new dimensions */
+ /* TODO: The following does not work currently, since the decoder
+ * currently returns 0 x 0 as width height when it is not supported
+ * Once the decoder is updated to return actual width and height,
+ * then this can be validated*/
+
+ if ((mWidth * mHeight) > (mInitWidth * mInitHeight)) {
+ status_t ret;
+ ALOGD("Trying reInit");
+ ret = deInitDecoder();
+ if (OK != ret) {
+ // TODO: Handle graceful exit
+ ALOGE("Create failure");
+ return;
+ }
+
+ mInitWidth = mWidth;
+ mInitHeight = mHeight;
+
+ ret = initDecoder();
+ if (OK != ret) {
+ // TODO: Handle graceful exit
+ ALOGE("Create failure");
+ return;
+ }
+ }
+ updatePortDefinitions();
+
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ return;
+ }
+ }
+
+ if (s_dec_op.u4_output_present) {
+ outHeader->nFilledLen = (mStride * mHeight * 3) / 2;
+
+ outHeader->nTimeStamp = mTimeStamps[s_dec_op.u4_ts];
+ mTimeStampsValid[s_dec_op.u4_ts] = false;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ } else {
+ /* If in flush mode and no output is returned by the codec,
+ * then come out of flush mode */
+ mIsInFlush = false;
+
+ /* If EOS was recieved on input port and there is no output
+ * from the codec, then signal EOS on output port */
+ if (mReceivedEOS) {
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags |= OMX_BUFFERFLAG_EOS;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+ resetPlugin();
+ }
+ }
+ }
+
+ // TODO: Handle more than one picture data
+ if (inHeader != NULL) {
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(const char *name,
+ const OMX_CALLBACKTYPE *callbacks, OMX_PTR appData,
+ OMX_COMPONENTTYPE **component) {
+ return new android::SoftHEVC(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/hevcdec/SoftHEVC.h b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
new file mode 100644
index 0000000..20db0e1
--- /dev/null
+++ b/media/libstagefright/codecs/hevcdec/SoftHEVC.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SOFT_HEVC_H_
+
+#define SOFT_HEVC_H_
+
+#include "SoftVideoDecoderOMXComponent.h"
+#include <sys/time.h>
+
+namespace android {
+
+#define ivd_aligned_malloc(alignment, size) memalign(alignment, size)
+#define ivd_aligned_free(buf) free(buf)
+
+/** Number of entries in the time-stamp array */
+#define MAX_TIME_STAMPS 64
+
+/** Maximum number of cores supported by the codec */
+#define CODEC_MAX_NUM_CORES 4
+
+#define CODEC_MAX_WIDTH 1920
+
+#define CODEC_MAX_HEIGHT 1088
+
+/** Input buffer size */
+#define INPUT_BUF_SIZE (1024 * 1024)
+
+#define MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+/** Used to remove warnings about unused parameters */
+#define UNUSED(x) ((void)(x))
+
+/** Get time */
+#define GETTIME(a, b) gettimeofday(a, b);
+
+/** Compute difference between start and end */
+#define TIME_DIFF(start, end, diff) \
+ diff = ((end.tv_sec - start.tv_sec) * 1000000) + \
+ (end.tv_usec - start.tv_usec);
+
+struct SoftHEVC: public SoftVideoDecoderOMXComponent {
+ SoftHEVC(const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftHEVC();
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onReset();
+private:
+ // Number of input and output buffers
+ enum {
+ kNumBuffers = 8
+ };
+
+ iv_obj_t *mCodecCtx; // Codec context
+ iv_mem_rec_t *mMemRecords; // Memory records requested by the codec
+ size_t mNumMemRecords; // Number of memory records requested by the codec
+
+ uint32_t mNewWidth; // New width after change in resolution
+ uint32_t mNewHeight; // New height after change in resolution
+ uint32_t mInitWidth; // Width used during codec creation
+ uint32_t mInitHeight; // Height used during codec creation
+ size_t mStride; // Stride to be used for display buffers
+
+ size_t mNumCores; // Number of cores to be uesd by the codec
+
+ struct timeval mTimeStart; // Time at the start of decode()
+ struct timeval mTimeEnd; // Time at the end of decode()
+
+ // Internal buffer to be used to flush out the buffers from decoder
+ uint8_t *mFlushOutBuffer;
+
+ // Status of entries in the timestamp array
+ bool mTimeStampsValid[MAX_TIME_STAMPS];
+
+ // Timestamp array - Since codec does not take 64 bit timestamps,
+ // they are maintained in the plugin
+ OMX_S64 mTimeStamps[MAX_TIME_STAMPS];
+
+ OMX_COLOR_FORMATTYPE mOmxColorFormat; // OMX Color format
+ IV_COLOR_FORMAT_T mIvColorFormat; // Ittiam Color format
+
+ bool mIsInFlush; // codec is flush mode
+ bool mReceivedEOS; // EOS is receieved on input port
+ bool mIsAdapting; // plugin in middle of change in resolution
+
+ status_t initDecoder();
+ status_t deInitDecoder();
+ status_t setFlushMode();
+ status_t setParams(WORD32 stride, IVD_VIDEO_DECODE_MODE_T decMode);
+ status_t getVersion();
+ status_t setNumCores();
+ status_t resetDecoder();
+ status_t resetPlugin();
+
+ DISALLOW_EVIL_CONSTRUCTORS (SoftHEVC);
+};
+
+} // namespace android
+
+#endif // SOFT_HEVC_H_
diff --git a/media/libstagefright/codecs/m4v_h263/dec/Android.mk b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
index a3d5779..1d232c6 100644
--- a/media/libstagefright/codecs/m4v_h263/dec/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/dec/Android.mk
@@ -46,6 +46,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := -DOSCL_EXPORT_REF= -DOSCL_IMPORT_REF=
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_mpeg4dec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/enc/Android.mk b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
index 83a2dd2..c9006d9 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/Android.mk
+++ b/media/libstagefright/codecs/m4v_h263/enc/Android.mk
@@ -33,6 +33,8 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright/include \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_STATIC_LIBRARY)
################################################################################
@@ -72,4 +74,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_mpeg4enc
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
index da5b785..e25709d 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
+++ b/media/libstagefright/codecs/m4v_h263/enc/SoftMPEG4Encoder.cpp
@@ -679,7 +679,7 @@ void SoftMPEG4Encoder::onQueueFilled(OMX_U32 /* portIndex */) {
mSawInputEOS = true;
}
- buffer_handle_t srcBuffer; // for MetaDataMode only
+ buffer_handle_t srcBuffer = NULL; // for MetaDataMode only
if (inHeader->nFilledLen > 0) {
uint8_t *inputData = NULL;
if (mStoreMetaDataInBuffers) {
diff --git a/media/libstagefright/codecs/mp3dec/Android.mk b/media/libstagefright/codecs/mp3dec/Android.mk
index 135c715..8284490 100644
--- a/media/libstagefright/codecs/mp3dec/Android.mk
+++ b/media/libstagefright/codecs/mp3dec/Android.mk
@@ -50,6 +50,8 @@ LOCAL_C_INCLUDES := \
LOCAL_CFLAGS := \
-DOSCL_UNUSED_ARG=
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_mp3dec
LOCAL_ARM_MODE := arm
@@ -69,6 +71,8 @@ LOCAL_C_INCLUDES := \
$(LOCAL_PATH)/src \
$(LOCAL_PATH)/include
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
index 4d864df..5396022 100644
--- a/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
+++ b/media/libstagefright/codecs/mp3dec/SoftMP3.cpp
@@ -146,6 +146,23 @@ OMX_ERRORTYPE SoftMP3::internalGetParameter(
return OMX_ErrorNone;
}
+ case OMX_IndexParamAudioMp3:
+ {
+ OMX_AUDIO_PARAM_MP3TYPE *mp3Params =
+ (OMX_AUDIO_PARAM_MP3TYPE *)params;
+
+ if (mp3Params->nPortIndex > 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ mp3Params->nChannels = mNumChannels;
+ mp3Params->nBitRate = 0 /* unknown */;
+ mp3Params->nSampleRate = mSamplingRate;
+ // other fields are encoder-only
+
+ return OMX_ErrorNone;
+ }
+
default:
return SimpleSoftOMXComponent::internalGetParameter(index, params);
}
@@ -335,6 +352,9 @@ void SoftMP3::onPortFlushCompleted(OMX_U32 portIndex) {
// depend on fragments from the last one decoded.
pvmp3_InitDecoder(mConfig, mDecoderBuf);
mIsFirst = true;
+ mSignalledError = false;
+ mSawInputEos = false;
+ mSignalledOutputEos = false;
}
}
diff --git a/media/libstagefright/codecs/on2/dec/Android.mk b/media/libstagefright/codecs/on2/dec/Android.mk
index 7f2c46d..93ff64c 100644
--- a/media/libstagefright/codecs/on2/dec/Android.mk
+++ b/media/libstagefright/codecs/on2/dec/Android.mk
@@ -20,4 +20,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_vpxdec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index 5efe022..dc38ea8 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -27,7 +27,6 @@
namespace android {
-
template<class T>
static void InitOMXParams(T *params) {
params->nSize = sizeof(T);
@@ -141,17 +140,27 @@ SoftVPXEncoder::SoftVPXEncoder(const char *name,
mWidth(176),
mHeight(144),
mBitrate(192000), // in bps
+ mFramerate(30 << 16), // in Q16 format
mBitrateUpdated(false),
mBitrateControlMode(VPX_VBR), // variable bitrate
- mFrameDurationUs(33333), // Defaults to 30 fps
mDCTPartitions(0),
mErrorResilience(OMX_FALSE),
mColorFormat(OMX_COLOR_FormatYUV420Planar),
mLevel(OMX_VIDEO_VP8Level_Version0),
+ mKeyFrameInterval(0),
+ mMinQuantizer(0),
+ mMaxQuantizer(0),
+ mTemporalLayers(0),
+ mTemporalPatternType(OMX_VIDEO_VPXTemporalLayerPatternNone),
+ mTemporalPatternLength(0),
+ mTemporalPatternIdx(0),
+ mLastTimestamp(0x7FFFFFFFFFFFFFFFLL),
mConversionBuffer(NULL),
mInputDataIsMeta(false),
mGrallocModule(NULL),
mKeyFrameRequested(false) {
+ memset(mTemporalLayerBitrateRatio, 0, sizeof(mTemporalLayerBitrateRatio));
+ mTemporalLayerBitrateRatio[0] = 100;
initPorts();
}
@@ -180,9 +189,8 @@ void SoftVPXEncoder::initPorts() {
inputPort.format.video.nStride = inputPort.format.video.nFrameWidth;
inputPort.format.video.nSliceHeight = inputPort.format.video.nFrameHeight;
inputPort.format.video.nBitrate = 0;
- // frameRate is reciprocal of frameDuration, which is
- // in microseconds. It is also in Q16 format.
- inputPort.format.video.xFramerate = (1000000/mFrameDurationUs) << 16;
+ // frameRate is in Q16 format.
+ inputPort.format.video.xFramerate = mFramerate;
inputPort.format.video.bFlagErrorConcealment = OMX_FALSE;
inputPort.nPortIndex = kInputPortIndex;
inputPort.eDir = OMX_DirInput;
@@ -220,7 +228,7 @@ void SoftVPXEncoder::initPorts() {
outputPort.format.video.eCompressionFormat = OMX_VIDEO_CodingVP8;
outputPort.format.video.eColorFormat = OMX_COLOR_FormatUnused;
outputPort.format.video.pNativeWindow = NULL;
- outputPort.nBufferSize = 256 * 1024; // arbitrary
+ outputPort.nBufferSize = 1024 * 1024; // arbitrary
addPort(outputPort);
}
@@ -236,7 +244,9 @@ status_t SoftVPXEncoder::initEncoder() {
if (mCodecInterface == NULL) {
return UNKNOWN_ERROR;
}
-
+ ALOGD("VP8: initEncoder. BRMode: %u. TSLayers: %zu. KF: %u. QP: %u - %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
+ mMinQuantizer, mMaxQuantizer);
codec_return = vpx_codec_enc_config_default(mCodecInterface,
mCodecConfiguration,
0); // Codec specific flags
@@ -277,8 +287,120 @@ status_t SoftVPXEncoder::initEncoder() {
mCodecConfiguration->g_timebase.num = 1;
mCodecConfiguration->g_timebase.den = 1000000;
// rc_target_bitrate is in kbps, mBitrate in bps
- mCodecConfiguration->rc_target_bitrate = mBitrate/1000;
+ mCodecConfiguration->rc_target_bitrate = (mBitrate + 500) / 1000;
mCodecConfiguration->rc_end_usage = mBitrateControlMode;
+ // Disable frame drop - not allowed in MediaCodec now.
+ mCodecConfiguration->rc_dropframe_thresh = 0;
+ if (mBitrateControlMode == VPX_CBR) {
+ // Disable spatial resizing.
+ mCodecConfiguration->rc_resize_allowed = 0;
+ // Single-pass mode.
+ mCodecConfiguration->g_pass = VPX_RC_ONE_PASS;
+ // Maximum amount of bits that can be subtracted from the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_undershoot_pct = 100;
+ // Maximum amount of bits that can be added to the target
+ // bitrate - expressed as percentage of the target bitrate.
+ mCodecConfiguration->rc_overshoot_pct = 15;
+ // Initial value of the buffer level in ms.
+ mCodecConfiguration->rc_buf_initial_sz = 500;
+ // Amount of data that the encoder should try to maintain in ms.
+ mCodecConfiguration->rc_buf_optimal_sz = 600;
+ // The amount of data that may be buffered by the decoding
+ // application in ms.
+ mCodecConfiguration->rc_buf_sz = 1000;
+ // Enable error resilience - needed for packet loss.
+ mCodecConfiguration->g_error_resilient = 1;
+ // Disable lagged encoding.
+ mCodecConfiguration->g_lag_in_frames = 0;
+ // Maximum key frame interval - for CBR boost to 3000
+ mCodecConfiguration->kf_max_dist = 3000;
+ // Encoder determines optimal key frame placement automatically.
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+
+ // Frames temporal pattern - for now WebRTC like pattern is only supported.
+ switch (mTemporalLayers) {
+ case 0:
+ {
+ mTemporalPatternLength = 0;
+ break;
+ }
+ case 1:
+ {
+ mCodecConfiguration->ts_number_layers = 1;
+ mCodecConfiguration->ts_rate_decimator[0] = 1;
+ mCodecConfiguration->ts_periodicity = 1;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mTemporalPattern[0] = kTemporalUpdateLastRefAll;
+ mTemporalPatternLength = 1;
+ break;
+ }
+ case 2:
+ {
+ mCodecConfiguration->ts_number_layers = 2;
+ mCodecConfiguration->ts_rate_decimator[0] = 2;
+ mCodecConfiguration->ts_rate_decimator[1] = 1;
+ mCodecConfiguration->ts_periodicity = 2;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 1;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[6] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ }
+ case 3:
+ {
+ mCodecConfiguration->ts_number_layers = 3;
+ mCodecConfiguration->ts_rate_decimator[0] = 4;
+ mCodecConfiguration->ts_rate_decimator[1] = 2;
+ mCodecConfiguration->ts_rate_decimator[2] = 1;
+ mCodecConfiguration->ts_periodicity = 4;
+ mCodecConfiguration->ts_layer_id[0] = 0;
+ mCodecConfiguration->ts_layer_id[1] = 2;
+ mCodecConfiguration->ts_layer_id[2] = 1;
+ mCodecConfiguration->ts_layer_id[3] = 2;
+ mTemporalPattern[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+ mTemporalPattern[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
+ mTemporalPattern[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+ mTemporalPattern[3] = kTemporalUpdateNone;
+ mTemporalPattern[4] = kTemporalUpdateLastRefAltRef;
+ mTemporalPattern[5] = kTemporalUpdateNone;
+ mTemporalPattern[6] = kTemporalUpdateGoldenRefAltRef;
+ mTemporalPattern[7] = kTemporalUpdateNone;
+ mTemporalPatternLength = 8;
+ break;
+ }
+ default:
+ {
+ ALOGE("Wrong number of temporal layers %u", mTemporalLayers);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ // Set bitrate values for each layer
+ for (size_t i = 0; i < mCodecConfiguration->ts_number_layers; i++) {
+ mCodecConfiguration->ts_target_bitrate[i] =
+ mCodecConfiguration->rc_target_bitrate *
+ mTemporalLayerBitrateRatio[i] / 100;
+ }
+ if (mKeyFrameInterval > 0) {
+ mCodecConfiguration->kf_max_dist = mKeyFrameInterval;
+ mCodecConfiguration->kf_min_dist = mKeyFrameInterval;
+ mCodecConfiguration->kf_mode = VPX_KF_AUTO;
+ }
+ if (mMinQuantizer > 0) {
+ mCodecConfiguration->rc_min_quantizer = mMinQuantizer;
+ }
+ if (mMaxQuantizer > 0) {
+ mCodecConfiguration->rc_max_quantizer = mMaxQuantizer;
+ }
codec_return = vpx_codec_enc_init(mCodecContext,
mCodecInterface,
@@ -298,6 +420,33 @@ status_t SoftVPXEncoder::initEncoder() {
return UNKNOWN_ERROR;
}
+ // Extra CBR settings
+ if (mBitrateControlMode == VPX_CBR) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_STATIC_THRESHOLD,
+ 1);
+ if (codec_return == VPX_CODEC_OK) {
+ uint32_t rc_max_intra_target =
+ mCodecConfiguration->rc_buf_optimal_sz * (mFramerate >> 17) / 10;
+ // Don't go below 3 times per frame bandwidth.
+ if (rc_max_intra_target < 300) {
+ rc_max_intra_target = 300;
+ }
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_MAX_INTRA_BITRATE_PCT,
+ rc_max_intra_target);
+ }
+ if (codec_return == VPX_CODEC_OK) {
+ codec_return = vpx_codec_control(mCodecContext,
+ VP8E_SET_CPUUSED,
+ -8);
+ }
+ if (codec_return != VPX_CODEC_OK) {
+ ALOGE("Error setting cbr parameters for vpx encoder.");
+ return UNKNOWN_ERROR;
+ }
+ }
+
if (mColorFormat == OMX_COLOR_FormatYUV420SemiPlanar || mInputDataIsMeta) {
if (mConversionBuffer == NULL) {
mConversionBuffer = (uint8_t *)malloc(mWidth * mHeight * 3 / 2);
@@ -361,9 +510,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
}
formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- // Converting from microseconds
- // Also converting to Q16 format
- formatParams->xFramerate = (1000000/mFrameDurationUs) << 16;
+ formatParams->xFramerate = mFramerate;
return OMX_ErrorNone;
} else if (formatParams->nPortIndex == kOutputPortIndex) {
formatParams->eCompressionFormat = OMX_VIDEO_CodingVP8;
@@ -411,6 +558,24 @@ OMX_ERRORTYPE SoftVPXEncoder::internalGetParameter(OMX_INDEXTYPE index,
return OMX_ErrorNone;
}
+ case OMX_IndexParamVideoAndroidVp8Encoder: {
+ OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *vp8AndroidParams =
+ (OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param;
+
+ if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+
+ vp8AndroidParams->nKeyFrameInterval = mKeyFrameInterval;
+ vp8AndroidParams->eTemporalPattern = mTemporalPatternType;
+ vp8AndroidParams->nTemporalLayerCount = mTemporalLayers;
+ vp8AndroidParams->nMinQuantizer = mMinQuantizer;
+ vp8AndroidParams->nMaxQuantizer = mMaxQuantizer;
+ memcpy(vp8AndroidParams->nTemporalLayerBitrateRatio,
+ mTemporalLayerBitrateRatio, sizeof(mTemporalLayerBitrateRatio));
+ return OMX_ErrorNone;
+ }
+
case OMX_IndexParamVideoProfileLevelQuerySupported: {
OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileAndLevel =
(OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param;
@@ -497,11 +662,15 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetParameter(OMX_INDEXTYPE index,
return internalSetVp8Params(
(const OMX_VIDEO_PARAM_VP8TYPE *)param);
+ case OMX_IndexParamVideoAndroidVp8Encoder:
+ return internalSetAndroidVp8Params(
+ (const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE *)param);
+
case OMX_IndexParamVideoProfileLevelCurrent:
return internalSetProfileLevel(
(const OMX_VIDEO_PARAM_PROFILELEVELTYPE *)param);
- case OMX_IndexVendorStartUnused:
+ case kStoreMetaDataExtensionIndex:
{
// storeMetaDataInBuffers
const StoreMetaDataInBuffersParams *storeParam =
@@ -610,6 +779,50 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetVp8Params(
return OMX_ErrorNone;
}
+OMX_ERRORTYPE SoftVPXEncoder::internalSetAndroidVp8Params(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams) {
+ if (vp8AndroidParams->nPortIndex != kOutputPortIndex) {
+ return OMX_ErrorUnsupportedIndex;
+ }
+ if (vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternNone &&
+ vp8AndroidParams->eTemporalPattern != OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vp8AndroidParams->nTemporalLayerCount > OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
+ return OMX_ErrorBadParameter;
+ }
+ if (vp8AndroidParams->nMinQuantizer > vp8AndroidParams->nMaxQuantizer) {
+ return OMX_ErrorBadParameter;
+ }
+
+ mTemporalPatternType = vp8AndroidParams->eTemporalPattern;
+ if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) {
+ mTemporalLayers = vp8AndroidParams->nTemporalLayerCount;
+ } else if (vp8AndroidParams->eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternNone) {
+ mTemporalLayers = 0;
+ }
+ // Check the bitrate distribution between layers is in increasing order
+ if (mTemporalLayers > 1) {
+ for (size_t i = 0; i < mTemporalLayers - 1; i++) {
+ if (vp8AndroidParams->nTemporalLayerBitrateRatio[i + 1] <=
+ vp8AndroidParams->nTemporalLayerBitrateRatio[i]) {
+ ALOGE("Wrong bitrate ratio - should be in increasing order.");
+ return OMX_ErrorBadParameter;
+ }
+ }
+ }
+ mKeyFrameInterval = vp8AndroidParams->nKeyFrameInterval;
+ mMinQuantizer = vp8AndroidParams->nMinQuantizer;
+ mMaxQuantizer = vp8AndroidParams->nMaxQuantizer;
+ memcpy(mTemporalLayerBitrateRatio, vp8AndroidParams->nTemporalLayerBitrateRatio,
+ sizeof(mTemporalLayerBitrateRatio));
+ ALOGD("VP8: internalSetAndroidVp8Params. BRMode: %u. TS: %zu. KF: %u."
+ " QP: %u - %u BR0: %u. BR1: %u. BR2: %u",
+ (uint32_t)mBitrateControlMode, mTemporalLayers, mKeyFrameInterval,
+ mMinQuantizer, mMaxQuantizer, mTemporalLayerBitrateRatio[0],
+ mTemporalLayerBitrateRatio[1], mTemporalLayerBitrateRatio[2]);
+ return OMX_ErrorNone;
+}
OMX_ERRORTYPE SoftVPXEncoder::internalSetFormatParams(
const OMX_VIDEO_PARAM_PORTFORMATTYPE* format) {
@@ -660,9 +873,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
mHeight = port->format.video.nFrameHeight;
// xFramerate comes in Q16 format, in frames per second unit
- const uint32_t framerate = port->format.video.xFramerate >> 16;
- // frame duration is in microseconds
- mFrameDurationUs = (1000000/framerate);
+ mFramerate = port->format.video.xFramerate;
if (port->format.video.eColorFormat == OMX_COLOR_FormatYUV420Planar ||
port->format.video.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar ||
@@ -675,7 +886,7 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kInputPortIndex)->mDef;
def->format.video.nFrameWidth = mWidth;
def->format.video.nFrameHeight = mHeight;
- def->format.video.xFramerate = port->format.video.xFramerate;
+ def->format.video.xFramerate = mFramerate;
def->format.video.eColorFormat = mColorFormat;
def = &editPortInfo(kOutputPortIndex)->mDef;
def->format.video.nFrameWidth = mWidth;
@@ -684,6 +895,13 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetPortParams(
return OMX_ErrorNone;
} else if (port->nPortIndex == kOutputPortIndex) {
mBitrate = port->format.video.nBitrate;
+ mWidth = port->format.video.nFrameWidth;
+ mHeight = port->format.video.nFrameHeight;
+
+ OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(kOutputPortIndex)->mDef;
+ def->format.video.nFrameWidth = mWidth;
+ def->format.video.nFrameHeight = mHeight;
+ def->format.video.nBitrate = mBitrate;
return OMX_ErrorNone;
} else {
return OMX_ErrorBadPortIndex;
@@ -710,6 +928,74 @@ OMX_ERRORTYPE SoftVPXEncoder::internalSetBitrateParams(
return OMX_ErrorNone;
}
+vpx_enc_frame_flags_t SoftVPXEncoder::getEncodeFlags() {
+ vpx_enc_frame_flags_t flags = 0;
+ int patternIdx = mTemporalPatternIdx % mTemporalPatternLength;
+ mTemporalPatternIdx++;
+ switch (mTemporalPattern[patternIdx]) {
+ case kTemporalUpdateLast:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ break;
+ case kTemporalUpdateGoldenWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateGolden:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateAltrefWithoutDependency:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ // Deliberately no break here.
+ case kTemporalUpdateAltref:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateNoneNoRefAltref:
+ flags |= VP8_EFLAG_NO_REF_ARF;
+ // Deliberately no break here.
+ case kTemporalUpdateNone:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateNoneNoRefGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+ break;
+ case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
+ flags |= VP8_EFLAG_NO_REF_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_LAST;
+ break;
+ case kTemporalUpdateLastAndGoldenRefAltRef:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_REF_GF;
+ break;
+ case kTemporalUpdateLastRefAll:
+ flags |= VP8_EFLAG_NO_UPD_ARF;
+ flags |= VP8_EFLAG_NO_UPD_GF;
+ break;
+ }
+ return flags;
+}
void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
// Initialize encoder if not already
@@ -794,6 +1080,9 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
kInputBufferAlignment, source);
vpx_enc_frame_flags_t flags = 0;
+ if (mTemporalPatternLength > 0) {
+ flags = getEncodeFlags();
+ }
if (mKeyFrameRequested) {
flags |= VPX_EFLAG_FORCE_KF;
mKeyFrameRequested = false;
@@ -814,11 +1103,18 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
mBitrateUpdated = false;
}
+ uint32_t frameDuration;
+ if (inputBufferHeader->nTimeStamp > mLastTimestamp) {
+ frameDuration = (uint32_t)(inputBufferHeader->nTimeStamp - mLastTimestamp);
+ } else {
+ frameDuration = (uint32_t)(((uint64_t)1000000 << 16) / mFramerate);
+ }
+ mLastTimestamp = inputBufferHeader->nTimeStamp;
codec_return = vpx_codec_encode(
mCodecContext,
&raw_frame,
inputBufferHeader->nTimeStamp, // in timebase units
- mFrameDurationUs, // frame duration in timebase units
+ frameDuration, // frame duration in timebase units
flags, // frame flags
VPX_DL_REALTIME); // encoding deadline
if (codec_return != VPX_CODEC_OK) {
@@ -860,10 +1156,9 @@ void SoftVPXEncoder::onQueueFilled(OMX_U32 portIndex) {
OMX_ERRORTYPE SoftVPXEncoder::getExtensionIndex(
const char *name, OMX_INDEXTYPE *index) {
if (!strcmp(name, "OMX.google.android.index.storeMetaDataInBuffers")) {
- *index = OMX_IndexVendorStartUnused;
+ *(int32_t*)index = kStoreMetaDataExtensionIndex;
return OMX_ErrorNone;
}
-
return SimpleSoftOMXComponent::getExtensionIndex(name, index);
}
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
index 076830f..c5a83d1 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.h
@@ -91,6 +91,47 @@ protected:
const char *name, OMX_INDEXTYPE *index);
private:
+ enum {
+ kStoreMetaDataExtensionIndex = OMX_IndexVendorStartUnused + 1,
+ };
+
+ enum TemporalReferences {
+ // For 1 layer case: reference all (last, golden, and alt ref), but only
+ // update last.
+ kTemporalUpdateLastRefAll = 12,
+ // First base layer frame for 3 temporal layers, which updates last and
+ // golden with alt ref dependency.
+ kTemporalUpdateLastAndGoldenRefAltRef = 11,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenRefAltRef = 10,
+ // First enhancement layer with alt ref dependency.
+ kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9,
+ // Base layer with alt ref dependency.
+ kTemporalUpdateLastRefAltRef = 8,
+ // Highest enhacement layer without dependency on golden with alt ref
+ // dependency.
+ kTemporalUpdateNoneNoRefGoldenRefAltRef = 7,
+ // Second layer and last frame in cycle, for 2 layers.
+ kTemporalUpdateNoneNoRefAltref = 6,
+ // Highest enhancement layer.
+ kTemporalUpdateNone = 5,
+ // Second enhancement layer.
+ kTemporalUpdateAltref = 4,
+ // Second enhancement layer without dependency on previous frames in
+ // the second enhancement layer.
+ kTemporalUpdateAltrefWithoutDependency = 3,
+ // First enhancement layer.
+ kTemporalUpdateGolden = 2,
+ // First enhancement layer without dependency on previous frames in
+ // the first enhancement layer.
+ kTemporalUpdateGoldenWithoutDependency = 1,
+ // Base layer.
+ kTemporalUpdateLast = 0,
+ };
+ enum {
+ kMaxTemporalPattern = 8
+ };
+
// number of buffers allocated per port
static const uint32_t kNumBuffers = 4;
@@ -130,16 +171,15 @@ private:
// Target bitrate set for the encoder, in bits per second.
uint32_t mBitrate;
+ // Target framerate set for the encoder.
+ uint32_t mFramerate;
+
// If a request for a change it bitrate has been received.
bool mBitrateUpdated;
// Bitrate control mode, either constant or variable
vpx_rc_mode mBitrateControlMode;
- // Frame duration is the reciprocal of framerate, denoted
- // in microseconds
- uint64_t mFrameDurationUs;
-
// vp8 specific configuration parameter
// that enables token partitioning of
// the stream into substreams
@@ -160,6 +200,36 @@ private:
// something else.
OMX_VIDEO_VP8LEVELTYPE mLevel;
+ // Key frame interval in frames
+ uint32_t mKeyFrameInterval;
+
+ // Minimum (best quality) quantizer
+ uint32_t mMinQuantizer;
+
+ // Maximum (worst quality) quantizer
+ uint32_t mMaxQuantizer;
+
+ // Number of coding temporal layers to be used.
+ size_t mTemporalLayers;
+
+ // Temporal layer bitrare ratio in percentage
+ uint32_t mTemporalLayerBitrateRatio[OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS];
+
+ // Temporal pattern type
+ OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE mTemporalPatternType;
+
+ // Temporal pattern length
+ size_t mTemporalPatternLength;
+
+ // Temporal pattern current index
+ size_t mTemporalPatternIdx;
+
+ // Frame type temporal pattern
+ TemporalReferences mTemporalPattern[kMaxTemporalPattern];
+
+ // Last input buffer timestamp
+ OMX_TICKS mLastTimestamp;
+
// Conversion buffer is needed to convert semi
// planar yuv420 to planar format
// It is only allocated if input format is
@@ -185,6 +255,9 @@ private:
// dtor.
status_t releaseEncoder();
+ // Get current encode flags
+ vpx_enc_frame_flags_t getEncodeFlags();
+
// Handles port changes with respect to color formats
OMX_ERRORTYPE internalSetFormatParams(
const OMX_VIDEO_PARAM_PORTFORMATTYPE* format);
@@ -206,6 +279,10 @@ private:
OMX_ERRORTYPE internalSetVp8Params(
const OMX_VIDEO_PARAM_VP8TYPE* vp8Params);
+ // Handles Android vp8 specific parameters.
+ OMX_ERRORTYPE internalSetAndroidVp8Params(
+ const OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE* vp8AndroidParams);
+
// Updates encoder profile
OMX_ERRORTYPE internalSetProfileLevel(
const OMX_VIDEO_PARAM_PROFILELEVELTYPE* profileAndLevel);
diff --git a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
index 2bb4c4d..524a3f0 100644
--- a/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/H264SwDecApi.c
@@ -42,6 +42,8 @@
#include "h264bsd_decoder.h"
#include "h264bsd_util.h"
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
Version Information
------------------------------------------------------------------------------*/
@@ -73,6 +75,7 @@ H264DEC_EVALUATION Compile evaluation version, restricts number of frames
#endif
void H264SwDecTrace(char *string) {
+ UNUSED(string);
}
void* H264SwDecMalloc(u32 size) {
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
index c948776..b409a06 100755
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_reconstruct.c
@@ -42,6 +42,8 @@
#include "armVC.h"
#endif /* H264DEC_OMXDL */
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
2. External compiler flags
--------------------------------------------------------------------------------
@@ -2136,7 +2138,8 @@ static void FillRow1(
i32 center,
i32 right)
{
-
+ UNUSED(left);
+ UNUSED(right);
ASSERT(ref);
ASSERT(fill);
diff --git a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
index a7c6f64..23401c6 100755
--- a/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
+++ b/media/libstagefright/codecs/on2/h264dec/source/h264bsd_slice_header.c
@@ -47,6 +47,8 @@
#include "h264bsd_nal_unit.h"
#include "h264bsd_dpb.h"
+#define UNUSED(x) (void)(x)
+
/*------------------------------------------------------------------------------
2. External compiler flags
--------------------------------------------------------------------------------
@@ -1407,6 +1409,7 @@ u32 h264bsdCheckPriorPicsFlag(u32 * noOutputOfPriorPicsFlag,
u32 tmp, value, i;
i32 ivalue;
strmData_t tmpStrmData[1];
+ UNUSED(nalUnitType);
/* Code */
diff --git a/media/libstagefright/codecs/opus/Android.mk b/media/libstagefright/codecs/opus/Android.mk
new file mode 100644
index 0000000..365b179
--- /dev/null
+++ b/media/libstagefright/codecs/opus/Android.mk
@@ -0,0 +1,4 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+include $(call all-makefiles-under,$(LOCAL_PATH)) \ No newline at end of file
diff --git a/media/libstagefright/codecs/opus/dec/Android.mk b/media/libstagefright/codecs/opus/dec/Android.mk
new file mode 100644
index 0000000..2379c5f
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/Android.mk
@@ -0,0 +1,19 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+ SoftOpus.cpp
+
+LOCAL_C_INCLUDES := \
+ external/libopus/include \
+ frameworks/av/media/libstagefright/include \
+ frameworks/native/include/media/openmax \
+
+LOCAL_SHARED_LIBRARIES := \
+ libopus libstagefright libstagefright_omx \
+ libstagefright_foundation libutils liblog
+
+LOCAL_MODULE := libstagefright_soft_opusdec
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY) \ No newline at end of file
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.cpp b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
new file mode 100644
index 0000000..b8084ae
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.cpp
@@ -0,0 +1,540 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoftOpus"
+#include <utils/Log.h>
+
+#include "SoftOpus.h"
+#include <OMX_AudioExt.h>
+#include <OMX_IndexExt.h>
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/MediaDefs.h>
+
+extern "C" {
+ #include <opus.h>
+ #include <opus_multistream.h>
+}
+
+namespace android {
+
+static const int kRate = 48000;
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+SoftOpus::SoftOpus(
+ const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component)
+ : SimpleSoftOMXComponent(name, callbacks, appData, component),
+ mInputBufferCount(0),
+ mDecoder(NULL),
+ mHeader(NULL),
+ mCodecDelay(0),
+ mSeekPreRoll(0),
+ mAnchorTimeUs(0),
+ mNumFramesOutput(0),
+ mOutputPortSettingsChange(NONE) {
+ initPorts();
+ CHECK_EQ(initDecoder(), (status_t)OK);
+}
+
+SoftOpus::~SoftOpus() {
+ if (mDecoder != NULL) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = NULL;
+ }
+ if (mHeader != NULL) {
+ delete mHeader;
+ mHeader = NULL;
+ }
+}
+
+void SoftOpus::initPorts() {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+
+ def.nPortIndex = 0;
+ def.eDir = OMX_DirInput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = 960 * 6;
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 1;
+
+ def.format.audio.cMIMEType =
+ const_cast<char *>(MEDIA_MIMETYPE_AUDIO_OPUS);
+
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding =
+ (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidOPUS;
+
+ addPort(def);
+
+ def.nPortIndex = 1;
+ def.eDir = OMX_DirOutput;
+ def.nBufferCountMin = kNumBuffers;
+ def.nBufferCountActual = def.nBufferCountMin;
+ def.nBufferSize = kMaxNumSamplesPerBuffer * sizeof(int16_t);
+ def.bEnabled = OMX_TRUE;
+ def.bPopulated = OMX_FALSE;
+ def.eDomain = OMX_PortDomainAudio;
+ def.bBuffersContiguous = OMX_FALSE;
+ def.nBufferAlignment = 2;
+
+ def.format.audio.cMIMEType = const_cast<char *>("audio/raw");
+ def.format.audio.pNativeRender = NULL;
+ def.format.audio.bFlagErrorConcealment = OMX_FALSE;
+ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
+
+ addPort(def);
+}
+
+status_t SoftOpus::initDecoder() {
+ return OK;
+}
+
+OMX_ERRORTYPE SoftOpus::internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params) {
+ switch ((int)index) {
+ case OMX_IndexParamAudioAndroidOpus:
+ {
+ OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
+ (OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
+
+ if (opusParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ opusParams->nAudioBandWidth = 0;
+ opusParams->nSampleRate = kRate;
+ opusParams->nBitRate = 0;
+
+ if (!isConfigured()) {
+ opusParams->nChannels = 1;
+ } else {
+ opusParams->nChannels = mHeader->channels;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioPcm:
+ {
+ OMX_AUDIO_PARAM_PCMMODETYPE *pcmParams =
+ (OMX_AUDIO_PARAM_PCMMODETYPE *)params;
+
+ if (pcmParams->nPortIndex != 1) {
+ return OMX_ErrorUndefined;
+ }
+
+ pcmParams->eNumData = OMX_NumericalDataSigned;
+ pcmParams->eEndian = OMX_EndianBig;
+ pcmParams->bInterleaved = OMX_TRUE;
+ pcmParams->nBitPerSample = 16;
+ pcmParams->ePCMMode = OMX_AUDIO_PCMModeLinear;
+ pcmParams->eChannelMapping[0] = OMX_AUDIO_ChannelLF;
+ pcmParams->eChannelMapping[1] = OMX_AUDIO_ChannelRF;
+ pcmParams->nSamplingRate = kRate;
+
+ if (!isConfigured()) {
+ pcmParams->nChannels = 1;
+ } else {
+ pcmParams->nChannels = mHeader->channels;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalGetParameter(index, params);
+ }
+}
+
+OMX_ERRORTYPE SoftOpus::internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params) {
+ switch ((int)index) {
+ case OMX_IndexParamStandardComponentRole:
+ {
+ const OMX_PARAM_COMPONENTROLETYPE *roleParams =
+ (const OMX_PARAM_COMPONENTROLETYPE *)params;
+
+ if (strncmp((const char *)roleParams->cRole,
+ "audio_decoder.opus",
+ OMX_MAX_STRINGNAME_SIZE - 1)) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ case OMX_IndexParamAudioAndroidOpus:
+ {
+ const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *opusParams =
+ (const OMX_AUDIO_PARAM_ANDROID_OPUSTYPE *)params;
+
+ if (opusParams->nPortIndex != 0) {
+ return OMX_ErrorUndefined;
+ }
+
+ return OMX_ErrorNone;
+ }
+
+ default:
+ return SimpleSoftOMXComponent::internalSetParameter(index, params);
+ }
+}
+
+bool SoftOpus::isConfigured() const {
+ return mInputBufferCount >= 1;
+}
+
+static uint16_t ReadLE16(const uint8_t *data, size_t data_size,
+ uint32_t read_offset) {
+ if (read_offset + 1 > data_size)
+ return 0;
+ uint16_t val;
+ val = data[read_offset];
+ val |= data[read_offset + 1] << 8;
+ return val;
+}
+
+// Opus uses Vorbis channel mapping, and Vorbis channel mapping specifies
+// mappings for up to 8 channels. This information is part of the Vorbis I
+// Specification:
+// http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html
+static const int kMaxChannels = 8;
+
+// Maximum packet size used in Xiph's opusdec.
+static const int kMaxOpusOutputPacketSizeSamples = 960 * 6;
+
+// Default audio output channel layout. Used to initialize |stream_map| in
+// OpusHeader, and passed to opus_multistream_decoder_create() when the header
+// does not contain mapping information. The values are valid only for mono and
+// stereo output: Opus streams with more than 2 channels require a stream map.
+static const int kMaxChannelsWithDefaultLayout = 2;
+static const uint8_t kDefaultOpusChannelLayout[kMaxChannelsWithDefaultLayout] = { 0, 1 };
+
+// Parses Opus Header. Header spec: http://wiki.xiph.org/OggOpus#ID_Header
+static bool ParseOpusHeader(const uint8_t *data, size_t data_size,
+ OpusHeader* header) {
+ // Size of the Opus header excluding optional mapping information.
+ const size_t kOpusHeaderSize = 19;
+
+ // Offset to the channel count byte in the Opus header.
+ const size_t kOpusHeaderChannelsOffset = 9;
+
+ // Offset to the pre-skip value in the Opus header.
+ const size_t kOpusHeaderSkipSamplesOffset = 10;
+
+ // Offset to the gain value in the Opus header.
+ const size_t kOpusHeaderGainOffset = 16;
+
+ // Offset to the channel mapping byte in the Opus header.
+ const size_t kOpusHeaderChannelMappingOffset = 18;
+
+ // Opus Header contains a stream map. The mapping values are in the header
+ // beyond the always present |kOpusHeaderSize| bytes of data. The mapping
+ // data contains stream count, coupling information, and per channel mapping
+ // values:
+ // - Byte 0: Number of streams.
+ // - Byte 1: Number coupled.
+ // - Byte 2: Starting at byte 2 are |header->channels| uint8 mapping
+ // values.
+ const size_t kOpusHeaderNumStreamsOffset = kOpusHeaderSize;
+ const size_t kOpusHeaderNumCoupledOffset = kOpusHeaderNumStreamsOffset + 1;
+ const size_t kOpusHeaderStreamMapOffset = kOpusHeaderNumStreamsOffset + 2;
+
+ if (data_size < kOpusHeaderSize) {
+ ALOGV("Header size is too small.");
+ return false;
+ }
+ header->channels = *(data + kOpusHeaderChannelsOffset);
+
+ if (header->channels <= 0 || header->channels > kMaxChannels) {
+ ALOGV("Invalid Header, wrong channel count: %d", header->channels);
+ return false;
+ }
+ header->skip_samples = ReadLE16(data, data_size,
+ kOpusHeaderSkipSamplesOffset);
+ header->gain_db = static_cast<int16_t>(
+ ReadLE16(data, data_size,
+ kOpusHeaderGainOffset));
+ header->channel_mapping = *(data + kOpusHeaderChannelMappingOffset);
+ if (!header->channel_mapping) {
+ if (header->channels > kMaxChannelsWithDefaultLayout) {
+ ALOGV("Invalid Header, missing stream map.");
+ return false;
+ }
+ header->num_streams = 1;
+ header->num_coupled = header->channels > 1;
+ header->stream_map[0] = 0;
+ header->stream_map[1] = 1;
+ return true;
+ }
+ if (data_size < kOpusHeaderStreamMapOffset + header->channels) {
+ ALOGV("Invalid stream map; insufficient data for current channel "
+ "count: %d", header->channels);
+ return false;
+ }
+ header->num_streams = *(data + kOpusHeaderNumStreamsOffset);
+ header->num_coupled = *(data + kOpusHeaderNumCoupledOffset);
+ if (header->num_streams + header->num_coupled != header->channels) {
+ ALOGV("Inconsistent channel mapping.");
+ return false;
+ }
+ for (int i = 0; i < header->channels; ++i)
+ header->stream_map[i] = *(data + kOpusHeaderStreamMapOffset + i);
+ return true;
+}
+
+// Convert nanoseconds to number of samples.
+static uint64_t ns_to_samples(uint64_t ns, int kRate) {
+ return static_cast<double>(ns) * kRate / 1000000000;
+}
+
+void SoftOpus::onQueueFilled(OMX_U32 portIndex) {
+ List<BufferInfo *> &inQueue = getPortQueue(0);
+ List<BufferInfo *> &outQueue = getPortQueue(1);
+
+ if (mOutputPortSettingsChange != NONE) {
+ return;
+ }
+
+ if (portIndex == 0 && mInputBufferCount < 3) {
+ BufferInfo *info = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *header = info->mHeader;
+
+ const uint8_t *data = header->pBuffer + header->nOffset;
+ size_t size = header->nFilledLen;
+
+ if (mInputBufferCount == 0) {
+ CHECK(mHeader == NULL);
+ mHeader = new OpusHeader();
+ memset(mHeader, 0, sizeof(*mHeader));
+ if (!ParseOpusHeader(data, size, mHeader)) {
+ ALOGV("Parsing Opus Header failed.");
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ uint8_t channel_mapping[kMaxChannels] = {0};
+ memcpy(&channel_mapping,
+ kDefaultOpusChannelLayout,
+ kMaxChannelsWithDefaultLayout);
+
+ int status = OPUS_INVALID_STATE;
+ mDecoder = opus_multistream_decoder_create(kRate,
+ mHeader->channels,
+ mHeader->num_streams,
+ mHeader->num_coupled,
+ channel_mapping,
+ &status);
+ if (!mDecoder || status != OPUS_OK) {
+ ALOGV("opus_multistream_decoder_create failed status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ status =
+ opus_multistream_decoder_ctl(mDecoder,
+ OPUS_SET_GAIN(mHeader->gain_db));
+ if (status != OPUS_OK) {
+ ALOGV("Failed to set OPUS header gain; status=%s",
+ opus_strerror(status));
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+ } else if (mInputBufferCount == 1) {
+ mCodecDelay = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(header->pBuffer +
+ header->nOffset)),
+ kRate);
+ mSamplesToDiscard = mCodecDelay;
+ } else {
+ mSeekPreRoll = ns_to_samples(
+ *(reinterpret_cast<int64_t*>(header->pBuffer +
+ header->nOffset)),
+ kRate);
+ notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
+ mOutputPortSettingsChange = AWAITING_DISABLED;
+ }
+
+ inQueue.erase(inQueue.begin());
+ info->mOwnedByUs = false;
+ notifyEmptyBufferDone(header);
+ ++mInputBufferCount;
+ return;
+ }
+
+ while (!inQueue.empty() && !outQueue.empty()) {
+ BufferInfo *inInfo = *inQueue.begin();
+ OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
+
+ BufferInfo *outInfo = *outQueue.begin();
+ OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
+
+ if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
+ inQueue.erase(inQueue.begin());
+ inInfo->mOwnedByUs = false;
+ notifyEmptyBufferDone(inHeader);
+
+ outHeader->nFilledLen = 0;
+ outHeader->nFlags = OMX_BUFFERFLAG_EOS;
+
+ outQueue.erase(outQueue.begin());
+ outInfo->mOwnedByUs = false;
+ notifyFillBufferDone(outHeader);
+ return;
+ }
+
+ if (inHeader->nOffset == 0) {
+ mAnchorTimeUs = inHeader->nTimeStamp;
+ mNumFramesOutput = 0;
+ }
+
+ // When seeking to zero, |mCodecDelay| samples has to be discarded
+ // instead of |mSeekPreRoll| samples (as we would when seeking to any
+ // other timestamp).
+ if (inHeader->nTimeStamp == 0) {
+ mSamplesToDiscard = mCodecDelay;
+ }
+
+ const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
+ const uint32_t size = inHeader->nFilledLen;
+
+ int numFrames = opus_multistream_decode(mDecoder,
+ data,
+ size,
+ (int16_t *)outHeader->pBuffer,
+ kMaxOpusOutputPacketSizeSamples,
+ 0);
+ if (numFrames < 0) {
+ ALOGE("opus_multistream_decode returned %d", numFrames);
+ notify(OMX_EventError, OMX_ErrorUndefined, 0, NULL);
+ return;
+ }
+
+ outHeader->nOffset = 0;
+ if (mSamplesToDiscard > 0) {
+ if (mSamplesToDiscard > numFrames) {
+ mSamplesToDiscard -= numFrames;
+ numFrames = 0;
+ } else {
+ numFrames -= mSamplesToDiscard;
+ outHeader->nOffset = mSamplesToDiscard * sizeof(int16_t) *
+ mHeader->channels;
+ mSamplesToDiscard = 0;
+ }
+ }
+
+ outHeader->nFilledLen = numFrames * sizeof(int16_t) * mHeader->channels;
+ outHeader->nFlags = 0;
+
+ outHeader->nTimeStamp = mAnchorTimeUs +
+ (mNumFramesOutput * 1000000ll) /
+ kRate;
+
+ mNumFramesOutput += numFrames;
+
+ inInfo->mOwnedByUs = false;
+ inQueue.erase(inQueue.begin());
+ inInfo = NULL;
+ notifyEmptyBufferDone(inHeader);
+ inHeader = NULL;
+
+ outInfo->mOwnedByUs = false;
+ outQueue.erase(outQueue.begin());
+ outInfo = NULL;
+ notifyFillBufferDone(outHeader);
+ outHeader = NULL;
+
+ ++mInputBufferCount;
+ }
+}
+
+void SoftOpus::onPortFlushCompleted(OMX_U32 portIndex) {
+ if (portIndex == 0 && mDecoder != NULL) {
+ // Make sure that the next buffer output does not still
+ // depend on fragments from the last one decoded.
+ mNumFramesOutput = 0;
+ opus_multistream_decoder_ctl(mDecoder, OPUS_RESET_STATE);
+ mAnchorTimeUs = 0;
+ mSamplesToDiscard = mSeekPreRoll;
+ }
+}
+
+void SoftOpus::onReset() {
+ mInputBufferCount = 0;
+ mNumFramesOutput = 0;
+ if (mDecoder != NULL) {
+ opus_multistream_decoder_destroy(mDecoder);
+ mDecoder = NULL;
+ }
+ if (mHeader != NULL) {
+ delete mHeader;
+ mHeader = NULL;
+ }
+
+ mOutputPortSettingsChange = NONE;
+}
+
+void SoftOpus::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
+ if (portIndex != 1) {
+ return;
+ }
+
+ switch (mOutputPortSettingsChange) {
+ case NONE:
+ break;
+
+ case AWAITING_DISABLED:
+ {
+ CHECK(!enabled);
+ mOutputPortSettingsChange = AWAITING_ENABLED;
+ break;
+ }
+
+ default:
+ {
+ CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
+ CHECK(enabled);
+ mOutputPortSettingsChange = NONE;
+ break;
+ }
+ }
+}
+
+} // namespace android
+
+android::SoftOMXComponent *createSoftOMXComponent(
+ const char *name, const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData, OMX_COMPONENTTYPE **component) {
+ return new android::SoftOpus(name, callbacks, appData, component);
+}
diff --git a/media/libstagefright/codecs/opus/dec/SoftOpus.h b/media/libstagefright/codecs/opus/dec/SoftOpus.h
new file mode 100644
index 0000000..97f6561
--- /dev/null
+++ b/media/libstagefright/codecs/opus/dec/SoftOpus.h
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * The Opus specification is part of IETF RFC 6716:
+ * http://tools.ietf.org/html/rfc6716
+ */
+
+#ifndef SOFT_OPUS_H_
+
+#define SOFT_OPUS_H_
+
+#include "SimpleSoftOMXComponent.h"
+
+struct OpusMSDecoder;
+
+namespace android {
+
+struct OpusHeader {
+ int channels;
+ int skip_samples;
+ int channel_mapping;
+ int num_streams;
+ int num_coupled;
+ int16_t gain_db;
+ uint8_t stream_map[8];
+};
+
+struct SoftOpus : public SimpleSoftOMXComponent {
+ SoftOpus(const char *name,
+ const OMX_CALLBACKTYPE *callbacks,
+ OMX_PTR appData,
+ OMX_COMPONENTTYPE **component);
+
+protected:
+ virtual ~SoftOpus();
+
+ virtual OMX_ERRORTYPE internalGetParameter(
+ OMX_INDEXTYPE index, OMX_PTR params);
+
+ virtual OMX_ERRORTYPE internalSetParameter(
+ OMX_INDEXTYPE index, const OMX_PTR params);
+
+ virtual void onQueueFilled(OMX_U32 portIndex);
+ virtual void onPortFlushCompleted(OMX_U32 portIndex);
+ virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
+ virtual void onReset();
+
+private:
+ enum {
+ kNumBuffers = 4,
+ kMaxNumSamplesPerBuffer = 960 * 6
+ };
+
+ size_t mInputBufferCount;
+
+ OpusMSDecoder *mDecoder;
+ OpusHeader *mHeader;
+
+ int64_t mCodecDelay;
+ int64_t mSeekPreRoll;
+ int64_t mSamplesToDiscard;
+ int64_t mAnchorTimeUs;
+ int64_t mNumFramesOutput;
+
+ enum {
+ NONE,
+ AWAITING_DISABLED,
+ AWAITING_ENABLED
+ } mOutputPortSettingsChange;
+
+ void initPorts();
+ status_t initDecoder();
+ bool isConfigured() const;
+
+ DISALLOW_EVIL_CONSTRUCTORS(SoftOpus);
+};
+
+} // namespace android
+
+#endif // SOFT_OPUS_H_
diff --git a/media/libstagefright/codecs/raw/Android.mk b/media/libstagefright/codecs/raw/Android.mk
index fe90a03..87080e7 100644
--- a/media/libstagefright/codecs/raw/Android.mk
+++ b/media/libstagefright/codecs/raw/Android.mk
@@ -8,6 +8,8 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libstagefright/include \
frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright_omx libstagefright_foundation libutils liblog
diff --git a/media/libstagefright/codecs/vorbis/dec/Android.mk b/media/libstagefright/codecs/vorbis/dec/Android.mk
index 2232353..217a6d2 100644
--- a/media/libstagefright/codecs/vorbis/dec/Android.mk
+++ b/media/libstagefright/codecs/vorbis/dec/Android.mk
@@ -16,4 +16,6 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE := libstagefright_soft_vorbisdec
LOCAL_MODULE_TAGS := optional
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index 77f21b7..67dfcd2 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -138,7 +138,7 @@ static int ALIGN(int x, int y) {
}
void SoftwareRenderer::render(
- const void *data, size_t size, void *platformPrivate) {
+ const void *data, size_t size, int64_t timestampNs, void *platformPrivate) {
ANativeWindowBuffer *buf;
int err;
if ((err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(),
@@ -230,6 +230,11 @@ void SoftwareRenderer::render(
CHECK_EQ(0, mapper.unlock(buf->handle));
+ if ((err = native_window_set_buffers_timestamp(mNativeWindow.get(),
+ timestampNs)) != 0) {
+ ALOGW("Surface::set_buffers_timestamp returned error %d", err);
+ }
+
if ((err = mNativeWindow->queueBuffer(mNativeWindow.get(), buf,
-1)) != 0) {
ALOGW("Surface::queueBuffer returned error %d", err);
diff --git a/media/libstagefright/data/media_codecs_google_audio.xml b/media/libstagefright/data/media_codecs_google_audio.xml
new file mode 100644
index 0000000..f6db0cc
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_audio.xml
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.mp3.decoder" type="audio/mpeg" />
+ <MediaCodec name="OMX.google.amrnb.decoder" type="audio/3gpp" />
+ <MediaCodec name="OMX.google.amrwb.decoder" type="audio/amr-wb" />
+ <MediaCodec name="OMX.google.aac.decoder" type="audio/mp4a-latm" />
+ <MediaCodec name="OMX.google.g711.alaw.decoder" type="audio/g711-alaw" />
+ <MediaCodec name="OMX.google.g711.mlaw.decoder" type="audio/g711-mlaw" />
+ <MediaCodec name="OMX.google.vorbis.decoder" type="audio/vorbis" />
+ <MediaCodec name="OMX.google.opus.decoder" type="audio/opus" />
+ <MediaCodec name="OMX.google.raw.decoder" type="audio/raw" />
+ </Decoders>
+
+ <Encoders>
+ <MediaCodec name="OMX.google.aac.encoder" type="audio/mp4a-latm" />
+ <MediaCodec name="OMX.google.amrnb.encoder" type="audio/3gpp" />
+ <MediaCodec name="OMX.google.amrwb.encoder" type="audio/amr-wb" />
+ <MediaCodec name="OMX.google.flac.encoder" type="audio/flac" />
+ </Encoders>
+</Included>
diff --git a/media/libstagefright/data/media_codecs_google_telephony.xml b/media/libstagefright/data/media_codecs_google_telephony.xml
new file mode 100644
index 0000000..28f5ffc
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_telephony.xml
@@ -0,0 +1,21 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.gsm.decoder" type="audio/gsm" />
+ </Decoders>
+</Included>
diff --git a/media/libstagefright/data/media_codecs_google_video.xml b/media/libstagefright/data/media_codecs_google_video.xml
new file mode 100644
index 0000000..9b930bc
--- /dev/null
+++ b/media/libstagefright/data/media_codecs_google_video.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<!-- Copyright (C) 2014 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Included>
+ <Decoders>
+ <MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es" />
+ <MediaCodec name="OMX.google.h263.decoder" type="video/3gpp" />
+ <MediaCodec name="OMX.google.h264.decoder" type="video/avc" />
+ <MediaCodec name="OMX.google.hevc.decoder" type="video/hevc" />
+ <MediaCodec name="OMX.google.vp8.decoder" type="video/x-vnd.on2.vp8" />
+ <MediaCodec name="OMX.google.vp9.decoder" type="video/x-vnd.on2.vp9" />
+ </Decoders>
+
+ <Encoders>
+ <MediaCodec name="OMX.google.h263.encoder" type="video/3gpp" />
+ <MediaCodec name="OMX.google.h264.encoder" type="video/avc" />
+ <MediaCodec name="OMX.google.mpeg4.encoder" type="video/mp4v-es" />
+ <MediaCodec name="OMX.google.vp8.encoder" type="video/x-vnd.on2.vp8" />
+ </Encoders>
+</Included>
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index b6b21f1..f2d501e 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -20,6 +20,7 @@
#include <stdlib.h>
#include <string.h>
+#include <utils/String8.h>
#include "ADebug.h"
#include "AString.h"
@@ -48,6 +49,13 @@ AString::AString(const char *s, size_t size)
setTo(s, size);
}
+AString::AString(const String8 &from)
+ : mData(NULL),
+ mSize(0),
+ mAllocSize(1) {
+ setTo(from.string(), from.length());
+}
+
AString::AString(const AString &from)
: mData(NULL),
mSize(0),
diff --git a/media/libstagefright/foundation/base64.cpp b/media/libstagefright/foundation/base64.cpp
index d5fb4e0..dcf5bef 100644
--- a/media/libstagefright/foundation/base64.cpp
+++ b/media/libstagefright/foundation/base64.cpp
@@ -33,6 +33,10 @@ sp<ABuffer> decodeBase64(const AString &s) {
if (n >= 2 && s.c_str()[n - 2] == '=') {
padding = 2;
+
+ if (n >= 3 && s.c_str()[n - 3] == '=') {
+ padding = 3;
+ }
}
}
@@ -71,7 +75,7 @@ sp<ABuffer> decodeBase64(const AString &s) {
if (((i + 1) % 4) == 0) {
out[j++] = (accum >> 16);
- if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
+ if (j < outLen) { out[j++] = (accum >> 8) & 0xff; }
if (j < outLen) { out[j++] = accum & 0xff; }
accum = 0;
diff --git a/media/libstagefright/http/Android.mk b/media/libstagefright/http/Android.mk
new file mode 100644
index 0000000..7f3307d
--- /dev/null
+++ b/media/libstagefright/http/Android.mk
@@ -0,0 +1,28 @@
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_BUILD_PDK), true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ HTTPHelper.cpp \
+
+LOCAL_C_INCLUDES:= \
+ $(TOP)/frameworks/av/media/libstagefright \
+ $(TOP)/frameworks/native/include/media/openmax \
+ $(TOP)/frameworks/base/core/jni \
+
+LOCAL_SHARED_LIBRARIES := \
+ libstagefright liblog libutils libbinder libstagefright_foundation \
+ libandroid_runtime \
+ libmedia
+
+LOCAL_MODULE:= libstagefright_http_support
+
+LOCAL_CFLAGS += -Wno-multichar
+
+LOCAL_CFLAGS += -Werror
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/libstagefright/http/HTTPHelper.cpp b/media/libstagefright/http/HTTPHelper.cpp
new file mode 100644
index 0000000..77845e2
--- /dev/null
+++ b/media/libstagefright/http/HTTPHelper.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "HTTPHelper"
+#include <utils/Log.h>
+
+#include "HTTPHelper.h"
+
+#include "android_runtime/AndroidRuntime.h"
+#include "android_util_Binder.h"
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <nativehelper/ScopedLocalRef.h>
+#include "jni.h"
+
+namespace android {
+
+sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext() {
+ if (AndroidRuntime::getJavaVM() == NULL) {
+ ALOGE("CreateHTTPServiceInCurrentJavaContext called outside "
+ "JAVA environment.");
+ return NULL;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ ScopedLocalRef<jclass> clazz(
+ env, env->FindClass("android/media/MediaHTTPService"));
+ CHECK(clazz.get() != NULL);
+
+ jmethodID constructID = env->GetMethodID(clazz.get(), "<init>", "()V");
+ CHECK(constructID != NULL);
+
+ ScopedLocalRef<jobject> httpServiceObj(
+ env, env->NewObject(clazz.get(), constructID));
+
+ sp<IMediaHTTPService> httpService;
+ if (httpServiceObj.get() != NULL) {
+ jmethodID asBinderID =
+ env->GetMethodID(clazz.get(), "asBinder", "()Landroid/os/IBinder;");
+ CHECK(asBinderID != NULL);
+
+ ScopedLocalRef<jobject> httpServiceBinderObj(
+ env, env->CallObjectMethod(httpServiceObj.get(), asBinderID));
+ CHECK(httpServiceBinderObj.get() != NULL);
+
+ sp<IBinder> binder =
+ ibinderForJavaObject(env, httpServiceBinderObj.get());
+
+ httpService = interface_cast<IMediaHTTPService>(binder);
+ }
+
+ return httpService;
+}
+
+} // namespace android
diff --git a/media/libstagefright/http/HTTPHelper.h b/media/libstagefright/http/HTTPHelper.h
new file mode 100644
index 0000000..8aef115
--- /dev/null
+++ b/media/libstagefright/http/HTTPHelper.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef HTTP_HELPER_H_
+
+#define HTTP_HELPER_H_
+
+#include <utils/RefBase.h>
+
+namespace android {
+
+struct IMediaHTTPService;
+
+sp<IMediaHTTPService> CreateHTTPServiceInCurrentJavaContext();
+
+} // namespace android
+
+#endif // HTTP_HELPER_H_
diff --git a/media/libstagefright/http/MediaHTTP.cpp b/media/libstagefright/http/MediaHTTP.cpp
new file mode 100644
index 0000000..2d29913
--- /dev/null
+++ b/media/libstagefright/http/MediaHTTP.cpp
@@ -0,0 +1,205 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaHTTP"
+#include <utils/Log.h>
+
+#include <media/stagefright/MediaHTTP.h>
+
+#include <binder/IServiceManager.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/Utils.h>
+
+#include <media/IMediaHTTPConnection.h>
+
+namespace android {
+
+MediaHTTP::MediaHTTP(const sp<IMediaHTTPConnection> &conn)
+ : mInitCheck(NO_INIT),
+ mHTTPConnection(conn),
+ mCachedSizeValid(false),
+ mCachedSize(0ll),
+ mDrmManagerClient(NULL) {
+ mInitCheck = OK;
+}
+
+MediaHTTP::~MediaHTTP() {
+ clearDRMState_l();
+}
+
+status_t MediaHTTP::connect(
+ const char *uri,
+ const KeyedVector<String8, String8> *headers,
+ off64_t /* offset */) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ KeyedVector<String8, String8> extHeaders;
+ if (headers != NULL) {
+ extHeaders = *headers;
+ }
+ extHeaders.add(String8("User-Agent"), String8(MakeUserAgent().c_str()));
+
+ bool success = mHTTPConnection->connect(uri, &extHeaders);
+
+ mLastHeaders = extHeaders;
+ mLastURI = uri;
+
+ mCachedSizeValid = false;
+
+ return success ? OK : UNKNOWN_ERROR;
+}
+
+void MediaHTTP::disconnect() {
+ if (mInitCheck != OK) {
+ return;
+ }
+
+ mHTTPConnection->disconnect();
+}
+
+status_t MediaHTTP::initCheck() const {
+ return mInitCheck;
+}
+
+ssize_t MediaHTTP::readAt(off64_t offset, void *data, size_t size) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ int64_t startTimeUs = ALooper::GetNowUs();
+
+ size_t numBytesRead = 0;
+ while (numBytesRead < size) {
+ size_t copy = size - numBytesRead;
+
+ if (copy > 64 * 1024) {
+ // limit the buffer sizes transferred across binder boundaries
+ // to avoid spurious transaction failures.
+ copy = 64 * 1024;
+ }
+
+ ssize_t n = mHTTPConnection->readAt(
+ offset + numBytesRead, (uint8_t *)data + numBytesRead, copy);
+
+ if (n < 0) {
+ return n;
+ } else if (n == 0) {
+ break;
+ }
+
+ numBytesRead += n;
+ }
+
+ int64_t delayUs = ALooper::GetNowUs() - startTimeUs;
+
+ addBandwidthMeasurement(numBytesRead, delayUs);
+
+ return numBytesRead;
+}
+
+status_t MediaHTTP::getSize(off64_t *size) {
+ if (mInitCheck != OK) {
+ return mInitCheck;
+ }
+
+ // Caching the returned size so that it stays valid even after a
+ // disconnect. NuCachedSource2 relies on this.
+
+ if (!mCachedSizeValid) {
+ mCachedSize = mHTTPConnection->getSize();
+ mCachedSizeValid = true;
+ }
+
+ *size = mCachedSize;
+
+ return *size < 0 ? *size : OK;
+}
+
+uint32_t MediaHTTP::flags() {
+ return kWantsPrefetching | kIsHTTPBasedSource;
+}
+
+status_t MediaHTTP::reconnectAtOffset(off64_t offset) {
+ return connect(mLastURI.c_str(), &mLastHeaders, offset);
+}
+
+// DRM...
+
+sp<DecryptHandle> MediaHTTP::DrmInitialization(const char* mime) {
+ if (mDrmManagerClient == NULL) {
+ mDrmManagerClient = new DrmManagerClient();
+ }
+
+ if (mDrmManagerClient == NULL) {
+ return NULL;
+ }
+
+ if (mDecryptHandle == NULL) {
+ mDecryptHandle = mDrmManagerClient->openDecryptSession(
+ String8(mLastURI.c_str()), mime);
+ }
+
+ if (mDecryptHandle == NULL) {
+ delete mDrmManagerClient;
+ mDrmManagerClient = NULL;
+ }
+
+ return mDecryptHandle;
+}
+
+void MediaHTTP::getDrmInfo(
+ sp<DecryptHandle> &handle, DrmManagerClient **client) {
+ handle = mDecryptHandle;
+ *client = mDrmManagerClient;
+}
+
+String8 MediaHTTP::getUri() {
+ String8 uri;
+ if (OK == mHTTPConnection->getUri(&uri)) {
+ return uri;
+ }
+ return String8(mLastURI.c_str());
+}
+
+String8 MediaHTTP::getMIMEType() const {
+ if (mInitCheck != OK) {
+ return String8("application/octet-stream");
+ }
+
+ String8 mimeType;
+ status_t err = mHTTPConnection->getMIMEType(&mimeType);
+
+ if (err != OK) {
+ return String8("application/octet-stream");
+ }
+
+ return mimeType;
+}
+
+void MediaHTTP::clearDRMState_l() {
+ if (mDecryptHandle != NULL) {
+ // To release mDecryptHandle
+ CHECK(mDrmManagerClient);
+ mDrmManagerClient->closeDecryptSession(mDecryptHandle);
+ mDecryptHandle = NULL;
+ }
+}
+
+} // namespace android
diff --git a/media/libstagefright/httplive/Android.mk b/media/libstagefright/httplive/Android.mk
index f3529f9..e8d558c 100644
--- a/media/libstagefright/httplive/Android.mk
+++ b/media/libstagefright/httplive/Android.mk
@@ -13,6 +13,8 @@ LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/native/include/media/openmax \
$(TOP)/external/openssl/include
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libbinder \
libcrypto \
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 6d48ab7..10cdde2 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -27,6 +27,8 @@
#include "mpeg2ts/AnotherPacketSource.h"
#include <cutils/properties.h>
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/hexdump.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -34,6 +36,7 @@
#include <media/stagefright/DataSource.h>
#include <media/stagefright/FileSource.h>
#include <media/stagefright/MediaErrors.h>
+#include <media/stagefright/MediaHTTP.h>
#include <media/stagefright/MetaData.h>
#include <media/stagefright/Utils.h>
@@ -47,17 +50,13 @@
namespace android {
LiveSession::LiveSession(
- const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+ const sp<AMessage> &notify, uint32_t flags,
+ const sp<IMediaHTTPService> &httpService)
: mNotify(notify),
mFlags(flags),
- mUIDValid(uidValid),
- mUID(uid),
+ mHTTPService(httpService),
mInPreparationPhase(true),
- mHTTPDataSource(
- HTTPBase::Create(
- (mFlags & kFlagIncognito)
- ? HTTPBase::kFlagIncognito
- : 0)),
+ mHTTPDataSource(new MediaHTTP(mHTTPService->makeHTTPConnection())),
mPrevBandwidthIndex(-1),
mStreamMask(0),
mNewStreamMask(0),
@@ -70,9 +69,6 @@ LiveSession::LiveSession(
mSwitchInProgress(false),
mDisconnectReplyID(0),
mSeekReplyID(0) {
- if (mUIDValid) {
- mHTTPDataSource->setUID(mUID);
- }
mStreams[kAudioIndex] = StreamItem("audio");
mStreams[kVideoIndex] = StreamItem("video");
@@ -481,11 +477,8 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
headers = NULL;
}
-#if 1
- ALOGI("onConnect <URL suppressed>");
-#else
- ALOGI("onConnect %s", url.c_str());
-#endif
+ // TODO currently we don't know if we are coming here from incognito mode
+ ALOGI("onConnect %s", uriDebugString(url).c_str());
mMasterURL = url;
@@ -493,7 +486,7 @@ void LiveSession::onConnect(const sp<AMessage> &msg) {
mPlaylist = fetchPlaylist(url.c_str(), NULL /* curPlaylistHash */, &dummy);
if (mPlaylist == NULL) {
- ALOGE("unable to fetch master playlist <URL suppressed>.");
+ ALOGE("unable to fetch master playlist %s.", uriDebugString(url).c_str());
postPrepared(ERROR_IO);
return;
@@ -680,7 +673,7 @@ ssize_t LiveSession::fetchFile(
ssize_t bytesRead = 0;
// adjust range_length if only reading partial block
- if (block_size > 0 && (range_length == -1 || buffer->size() + block_size < range_length)) {
+ if (block_size > 0 && (range_length == -1 || (int64_t)(buffer->size() + block_size) < range_length)) {
range_length = buffer->size() + block_size;
}
for (;;) {
@@ -933,8 +926,12 @@ bool LiveSession::hasDynamicDuration() const {
return false;
}
-status_t LiveSession::getTrackInfo(Parcel *reply) const {
- return mPlaylist->getTrackInfo(reply);
+size_t LiveSession::getTrackCount() const {
+ return mPlaylist->getTrackCount();
+}
+
+sp<AMessage> LiveSession::getTrackInfo(size_t trackIndex) const {
+ return mPlaylist->getTrackInfo(trackIndex);
}
status_t LiveSession::selectTrack(size_t index, bool select) {
diff --git a/media/libstagefright/httplive/LiveSession.h b/media/libstagefright/httplive/LiveSession.h
index 3f8fee5..ed3818f 100644
--- a/media/libstagefright/httplive/LiveSession.h
+++ b/media/libstagefright/httplive/LiveSession.h
@@ -28,6 +28,7 @@ struct ABuffer;
struct AnotherPacketSource;
struct DataSource;
struct HTTPBase;
+struct IMediaHTTPService;
struct LiveDataSource;
struct M3UParser;
struct PlaylistFetcher;
@@ -40,7 +41,8 @@ struct LiveSession : public AHandler {
};
LiveSession(
const sp<AMessage> &notify,
- uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+ uint32_t flags,
+ const sp<IMediaHTTPService> &httpService);
enum StreamIndex {
kAudioIndex = 0,
@@ -68,7 +70,8 @@ struct LiveSession : public AHandler {
status_t seekTo(int64_t timeUs);
status_t getDuration(int64_t *durationUs) const;
- status_t getTrackInfo(Parcel *reply) const;
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t trackIndex) const;
status_t selectTrack(size_t index, bool select);
bool isSeekable() const;
@@ -134,8 +137,7 @@ private:
sp<AMessage> mNotify;
uint32_t mFlags;
- bool mUIDValid;
- uid_t mUID;
+ sp<IMediaHTTPService> mHTTPService;
bool mInPreparationPhase;
diff --git a/media/libstagefright/httplive/M3UParser.cpp b/media/libstagefright/httplive/M3UParser.cpp
index 20c3a76..281e0da 100644
--- a/media/libstagefright/httplive/M3UParser.cpp
+++ b/media/libstagefright/httplive/M3UParser.cpp
@@ -23,6 +23,7 @@
#include <cutils/properties.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MediaDefs.h>
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/Utils.h>
#include <media/mediaplayer.h>
@@ -58,8 +59,8 @@ struct M3UParser::MediaGroup : public RefBase {
void pickRandomMediaItems();
status_t selectTrack(size_t index, bool select);
- void getTrackInfo(Parcel* reply) const;
size_t countTracks() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
protected:
virtual ~MediaGroup();
@@ -170,49 +171,56 @@ status_t M3UParser::MediaGroup::selectTrack(size_t index, bool select) {
ALOGE("track %zu already selected", index);
return BAD_VALUE;
}
- ALOGV("selected track %d", index);
+ ALOGV("selected track %zu", index);
mSelectedIndex = index;
} else {
if (mSelectedIndex != (ssize_t)index) {
ALOGE("track %zu is not selected", index);
return BAD_VALUE;
}
- ALOGV("unselected track %d", index);
+ ALOGV("unselected track %zu", index);
mSelectedIndex = -1;
}
return OK;
}
-void M3UParser::MediaGroup::getTrackInfo(Parcel* reply) const {
- for (size_t i = 0; i < mMediaItems.size(); ++i) {
- reply->writeInt32(2); // 2 fields
-
- if (mType == TYPE_AUDIO) {
- reply->writeInt32(MEDIA_TRACK_TYPE_AUDIO);
- } else if (mType == TYPE_VIDEO) {
- reply->writeInt32(MEDIA_TRACK_TYPE_VIDEO);
- } else if (mType == TYPE_SUBS) {
- reply->writeInt32(MEDIA_TRACK_TYPE_SUBTITLE);
- } else {
- reply->writeInt32(MEDIA_TRACK_TYPE_UNKNOWN);
- }
+size_t M3UParser::MediaGroup::countTracks() const {
+ return mMediaItems.size();
+}
- const Media &item = mMediaItems.itemAt(i);
- const char *lang = item.mLanguage.empty() ? "und" : item.mLanguage.c_str();
- reply->writeString16(String16(lang));
+sp<AMessage> M3UParser::MediaGroup::getTrackInfo(size_t index) const {
+ if (index >= mMediaItems.size()) {
+ return NULL;
+ }
- if (mType == TYPE_SUBS) {
- // TO-DO: pass in a MediaFormat instead
- reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_AUTOSELECT));
- reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_DEFAULT));
- reply->writeInt32(!!(item.mFlags & MediaGroup::FLAG_FORCED));
- }
+ sp<AMessage> format = new AMessage();
+
+ int32_t trackType;
+ if (mType == TYPE_AUDIO) {
+ trackType = MEDIA_TRACK_TYPE_AUDIO;
+ } else if (mType == TYPE_VIDEO) {
+ trackType = MEDIA_TRACK_TYPE_VIDEO;
+ } else if (mType == TYPE_SUBS) {
+ trackType = MEDIA_TRACK_TYPE_SUBTITLE;
+ } else {
+ trackType = MEDIA_TRACK_TYPE_UNKNOWN;
+ }
+ format->setInt32("type", trackType);
+
+ const Media &item = mMediaItems.itemAt(index);
+ const char *lang = item.mLanguage.empty() ? "und" : item.mLanguage.c_str();
+ format->setString("language", lang);
+
+ if (mType == TYPE_SUBS) {
+ // TO-DO: pass in a MediaFormat instead
+ format->setString("mime", MEDIA_MIMETYPE_TEXT_VTT);
+ format->setInt32("auto", !!(item.mFlags & MediaGroup::FLAG_AUTOSELECT));
+ format->setInt32("default", !!(item.mFlags & MediaGroup::FLAG_DEFAULT));
+ format->setInt32("forced", !!(item.mFlags & MediaGroup::FLAG_FORCED));
}
-}
-size_t M3UParser::MediaGroup::countTracks() const {
- return mMediaItems.size();
+ return format;
}
bool M3UParser::MediaGroup::getActiveURI(AString *uri) const {
@@ -319,17 +327,24 @@ status_t M3UParser::selectTrack(size_t index, bool select) {
return INVALID_OPERATION;
}
-status_t M3UParser::getTrackInfo(Parcel* reply) const {
+size_t M3UParser::getTrackCount() const {
size_t trackCount = 0;
for (size_t i = 0; i < mMediaGroups.size(); ++i) {
trackCount += mMediaGroups.valueAt(i)->countTracks();
}
- reply->writeInt32(trackCount);
+ return trackCount;
+}
- for (size_t i = 0; i < mMediaGroups.size(); ++i) {
- mMediaGroups.valueAt(i)->getTrackInfo(reply);
+sp<AMessage> M3UParser::getTrackInfo(size_t index) const {
+ for (size_t i = 0, ii = index; i < mMediaGroups.size(); ++i) {
+ sp<MediaGroup> group = mMediaGroups.valueAt(i);
+ size_t tracks = group->countTracks();
+ if (ii < tracks) {
+ return group->getTrackInfo(ii);
+ }
+ ii -= tracks;
}
- return OK;
+ return NULL;
}
ssize_t M3UParser::getSelectedIndex() const {
@@ -798,7 +813,8 @@ status_t M3UParser::parseCipherInfo(
if (MakeURL(baseURI.c_str(), val.c_str(), &absURI)) {
val = absURI;
} else {
- ALOGE("failed to make absolute url for <URL suppressed>.");
+ ALOGE("failed to make absolute url for %s.",
+ uriDebugString(baseURI).c_str());
}
}
diff --git a/media/libstagefright/httplive/M3UParser.h b/media/libstagefright/httplive/M3UParser.h
index ccd6556..fe9fb9d 100644
--- a/media/libstagefright/httplive/M3UParser.h
+++ b/media/libstagefright/httplive/M3UParser.h
@@ -42,7 +42,8 @@ struct M3UParser : public RefBase {
void pickRandomMediaItems();
status_t selectTrack(size_t index, bool select);
- status_t getTrackInfo(Parcel* reply) const;
+ size_t getTrackCount() const;
+ sp<AMessage> getTrackInfo(size_t index) const;
ssize_t getSelectedIndex() const;
bool getTypeURI(size_t index, const char *key, AString *uri) const;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 513f114..326d85b 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -69,6 +69,7 @@ PlaylistFetcher::PlaylistFetcher(
mNumRetries(0),
mStartup(true),
mPrepared(false),
+ mSkipToFirstIDRAfterConnect(false),
mNextPTSTimeUs(-1ll),
mMonitorQueueGeneration(0),
mRefreshState(INITIAL_MINIMUM_RELOAD_DELAY),
@@ -317,7 +318,7 @@ void PlaylistFetcher::postMonitorQueue(int64_t delayUs, int64_t minDelayUs) {
maxDelayUs = minDelayUs;
}
if (delayUs > maxDelayUs) {
- ALOGV("Need to refresh playlist in %lld", maxDelayUs);
+ ALOGV("Need to refresh playlist in %" PRId64 , maxDelayUs);
delayUs = maxDelayUs;
}
sp<AMessage> msg = new AMessage(kWhatMonitorQueue, id());
@@ -628,7 +629,7 @@ void PlaylistFetcher::onMonitorQueue() {
int64_t bufferedStreamDurationUs =
mPacketSources.valueAt(i)->getBufferedDurationUs(&finalResult);
- ALOGV("buffered %lld for stream %d",
+ ALOGV("buffered %" PRId64 " for stream %d",
bufferedStreamDurationUs, mPacketSources.keyAt(i));
if (bufferedStreamDurationUs > bufferedDurationUs) {
bufferedDurationUs = bufferedStreamDurationUs;
@@ -641,7 +642,7 @@ void PlaylistFetcher::onMonitorQueue() {
if (!mPrepared && bufferedDurationUs > targetDurationUs && downloadMore) {
mPrepared = true;
- ALOGV("prepared, buffered=%lld > %lld",
+ ALOGV("prepared, buffered=%" PRId64 " > %" PRId64 "",
bufferedDurationUs, targetDurationUs);
sp<AMessage> msg = mNotify->dup();
msg->setInt32("what", kWhatTemporarilyDoneFetching);
@@ -649,7 +650,7 @@ void PlaylistFetcher::onMonitorQueue() {
}
if (finalResult == OK && downloadMore) {
- ALOGV("monitoring, buffered=%lld < %lld",
+ ALOGV("monitoring, buffered=%" PRId64 " < %" PRId64 "",
bufferedDurationUs, durationToBufferUs);
// delay the next download slightly; hopefully this gives other concurrent fetchers
// a better chance to run.
@@ -665,7 +666,7 @@ void PlaylistFetcher::onMonitorQueue() {
msg->post();
int64_t delayUs = mPrepared ? kMaxMonitorDelayUs : targetDurationUs / 2;
- ALOGV("pausing for %lld, buffered=%lld > %lld",
+ ALOGV("pausing for %" PRId64 ", buffered=%" PRId64 " > %" PRId64 "",
delayUs, bufferedDurationUs, durationToBufferUs);
// :TRICKY: need to enforce minimum delay because the delay to
// refresh the playlist will become 0
@@ -739,7 +740,7 @@ void PlaylistFetcher::onDownloadNext() {
if (mPlaylist->isComplete() || mPlaylist->isEvent()) {
mSeqNumber = getSeqNumberForTime(mStartTimeUs);
- ALOGV("Initial sequence number for time %lld is %ld from (%ld .. %ld)",
+ ALOGV("Initial sequence number for time %" PRId64 " is %d from (%d .. %d)",
mStartTimeUs, mSeqNumber, firstSeqNumberInPlaylist,
lastSeqNumberInPlaylist);
} else {
@@ -748,7 +749,7 @@ void PlaylistFetcher::onDownloadNext() {
if (mSeqNumber < firstSeqNumberInPlaylist) {
mSeqNumber = firstSeqNumberInPlaylist;
}
- ALOGV("Initial sequence number for live event %ld from (%ld .. %ld)",
+ ALOGV("Initial sequence number for live event %d from (%d .. %d)",
mSeqNumber, firstSeqNumberInPlaylist,
lastSeqNumberInPlaylist);
}
@@ -772,7 +773,8 @@ void PlaylistFetcher::onDownloadNext() {
if (delayUs > kMaxMonitorDelayUs) {
delayUs = kMaxMonitorDelayUs;
}
- ALOGV("sequence number high: %ld from (%ld .. %ld), monitor in %lld (retry=%d)",
+ ALOGV("sequence number high: %d from (%d .. %d), "
+ "monitor in %" PRId64 " (retry=%d)",
mSeqNumber, firstSeqNumberInPlaylist,
lastSeqNumberInPlaylist, delayUs, mNumRetries);
postMonitorQueue(delayUs);
@@ -915,6 +917,7 @@ void PlaylistFetcher::onDownloadNext() {
if (err == -EAGAIN) {
// bad starting sequence number hint
+ mTSParser.clear();
postMonitorQueue();
return;
}
@@ -1095,12 +1098,30 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
continue;
}
+ if (stream == LiveSession::STREAMTYPE_VIDEO && mVideoMime.empty()) {
+ const char *mime;
+ if (source->getFormat()->findCString(kKeyMIMEType, &mime)) {
+ mVideoMime.setTo(mime);
+ if (!strcasecmp(mime, MEDIA_MIMETYPE_VIDEO_AVC)) {
+ mSkipToFirstIDRAfterConnect = true;
+ }
+ }
+ }
+
int64_t timeUs;
sp<ABuffer> accessUnit;
status_t finalResult;
while (source->hasBufferAvailable(&finalResult)
&& source->dequeueAccessUnit(&accessUnit) == OK) {
+ if (stream == LiveSession::STREAMTYPE_VIDEO && mSkipToFirstIDRAfterConnect) {
+ if (!IsIDR(accessUnit)) {
+ continue;
+ } else {
+ mSkipToFirstIDRAfterConnect = false;
+ }
+ }
+
CHECK(accessUnit->meta()->findInt64("timeUs", &timeUs));
if (mMinStartTimeUs > 0) {
if (timeUs < mMinStartTimeUs) {
@@ -1181,9 +1202,35 @@ status_t PlaylistFetcher::extractAndQueueAccessUnitsFromTs(const sp<ABuffer> &bu
return OK;
}
+/* static */
+bool PlaylistFetcher::bufferStartsWithWebVTTMagicSequence(
+ const sp<ABuffer> &buffer) {
+ size_t pos = 0;
+
+ // skip possible BOM
+ if (buffer->size() >= pos + 3 &&
+ !memcmp("\xef\xbb\xbf", buffer->data() + pos, 3)) {
+ pos += 3;
+ }
+
+ // accept WEBVTT followed by SPACE, TAB or (CR) LF
+ if (buffer->size() < pos + 6 ||
+ memcmp("WEBVTT", buffer->data() + pos, 6)) {
+ return false;
+ }
+ pos += 6;
+
+ if (buffer->size() == pos) {
+ return true;
+ }
+
+ uint8_t sep = buffer->data()[pos];
+ return sep == ' ' || sep == '\t' || sep == '\n' || sep == '\r';
+}
+
status_t PlaylistFetcher::extractAndQueueAccessUnits(
const sp<ABuffer> &buffer, const sp<AMessage> &itemMeta) {
- if (buffer->size() >= 7 && !memcmp("WEBVTT\n", buffer->data(), 7)) {
+ if (bufferStartsWithWebVTTMagicSequence(buffer)) {
if (mStreamTypeMask != LiveSession::STREAMTYPE_SUBTITLES) {
ALOGE("This stream only contains subtitles.");
return ERROR_MALFORMED;
diff --git a/media/libstagefright/httplive/PlaylistFetcher.h b/media/libstagefright/httplive/PlaylistFetcher.h
index 7e21523..e4fdbff 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.h
+++ b/media/libstagefright/httplive/PlaylistFetcher.h
@@ -91,6 +91,7 @@ private:
static const int32_t kNumSkipFrames;
static bool bufferStartsWithTsSyncByte(const sp<ABuffer>& buffer);
+ static bool bufferStartsWithWebVTTMagicSequence(const sp<ABuffer>& buffer);
// notifications to mSession
sp<AMessage> mNotify;
@@ -98,6 +99,7 @@ private:
sp<LiveSession> mSession;
AString mURI;
+ AString mVideoMime;
uint32_t mStreamTypeMask;
int64_t mStartTimeUs;
@@ -115,6 +117,7 @@ private:
int32_t mNumRetries;
bool mStartup;
bool mPrepared;
+ bool mSkipToFirstIDRAfterConnect;
int64_t mNextPTSTimeUs;
int32_t mMonitorQueueGeneration;
diff --git a/media/libstagefright/id3/Android.mk b/media/libstagefright/id3/Android.mk
index bf6f7bb..2194c38 100644
--- a/media/libstagefright/id3/Android.mk
+++ b/media/libstagefright/id3/Android.mk
@@ -4,6 +4,8 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
ID3.cpp
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := libstagefright_id3
include $(BUILD_STATIC_LIBRARY)
@@ -15,6 +17,8 @@ include $(CLEAR_VARS)
LOCAL_SRC_FILES := \
testid3.cpp
+LOCAL_CFLAGS += -Werror
+
LOCAL_SHARED_LIBRARIES := \
libstagefright libutils liblog libbinder libstagefright_foundation
diff --git a/media/libstagefright/id3/ID3.cpp b/media/libstagefright/id3/ID3.cpp
index 1199c22..7f221a0 100644
--- a/media/libstagefright/id3/ID3.cpp
+++ b/media/libstagefright/id3/ID3.cpp
@@ -468,49 +468,6 @@ void ID3::Iterator::getID(String8 *id) const {
}
}
-static void convertISO8859ToString8(
- const uint8_t *data, size_t size,
- String8 *s) {
- size_t utf8len = 0;
- for (size_t i = 0; i < size; ++i) {
- if (data[i] == '\0') {
- size = i;
- break;
- } else if (data[i] < 0x80) {
- ++utf8len;
- } else {
- utf8len += 2;
- }
- }
-
- if (utf8len == size) {
- // Only ASCII characters present.
-
- s->setTo((const char *)data, size);
- return;
- }
-
- char *tmp = new char[utf8len];
- char *ptr = tmp;
- for (size_t i = 0; i < size; ++i) {
- if (data[i] == '\0') {
- break;
- } else if (data[i] < 0x80) {
- *ptr++ = data[i];
- } else if (data[i] < 0xc0) {
- *ptr++ = 0xc2;
- *ptr++ = data[i];
- } else {
- *ptr++ = 0xc3;
- *ptr++ = data[i] - 64;
- }
- }
-
- s->setTo(tmp, utf8len);
-
- delete[] tmp;
- tmp = NULL;
-}
// the 2nd argument is used to get the data following the \0 in a comment field
void ID3::Iterator::getString(String8 *id, String8 *comment) const {
@@ -543,7 +500,9 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
return;
}
- convertISO8859ToString8(frameData, mFrameSize, id);
+ // this is supposed to be ISO-8859-1, but pass it up as-is to the caller, who will figure
+ // out the real encoding
+ id->setTo((const char*)frameData, mFrameSize);
return;
}
@@ -561,13 +520,13 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
}
if (encoding == 0x00) {
- // ISO 8859-1
- convertISO8859ToString8(frameData + 1, n, id);
+ // supposedly ISO 8859-1
+ id->setTo((const char*)frameData + 1, n);
} else if (encoding == 0x03) {
- // UTF-8
+ // supposedly UTF-8
id->setTo((const char *)(frameData + 1), n);
} else if (encoding == 0x02) {
- // UTF-16 BE, no byte order mark.
+ // supposedly UTF-16 BE, no byte order mark.
// API wants number of characters, not number of bytes...
int len = n / 2;
const char16_t *framedata = (const char16_t *) (frameData + 1);
@@ -583,7 +542,7 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
if (framedatacopy != NULL) {
delete[] framedatacopy;
}
- } else {
+ } else if (encoding == 0x01) {
// UCS-2
// API wants number of characters, not number of bytes...
int len = n / 2;
@@ -602,7 +561,27 @@ void ID3::Iterator::getstring(String8 *id, bool otherdata) const {
framedata++;
len--;
}
- id->setTo(framedata, len);
+
+ // check if the resulting data consists entirely of 8-bit values
+ bool eightBit = true;
+ for (int i = 0; i < len; i++) {
+ if (framedata[i] > 0xff) {
+ eightBit = false;
+ break;
+ }
+ }
+ if (eightBit) {
+ // collapse to 8 bit, then let the media scanner client figure out the real encoding
+ char *frame8 = new char[len];
+ for (int i = 0; i < len; i++) {
+ frame8[i] = framedata[i];
+ }
+ id->setTo(frame8, len);
+ delete [] frame8;
+ } else {
+ id->setTo(framedata, len);
+ }
+
if (framedatacopy != NULL) {
delete[] framedatacopy;
}
diff --git a/media/libstagefright/include/AwesomePlayer.h b/media/libstagefright/include/AwesomePlayer.h
index 6ee95a9..77d65e0 100644
--- a/media/libstagefright/include/AwesomePlayer.h
+++ b/media/libstagefright/include/AwesomePlayer.h
@@ -64,6 +64,7 @@ struct AwesomePlayer {
void setUID(uid_t uid);
status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *uri,
const KeyedVector<String8, String8> *headers = NULL);
@@ -160,6 +161,7 @@ private:
SystemTimeSource mSystemTimeSource;
TimeSource *mTimeSource;
+ sp<IMediaHTTPService> mHTTPService;
String8 mUri;
KeyedVector<String8, String8> mUriHeaders;
@@ -249,6 +251,7 @@ private:
sp<MediaExtractor> mExtractor;
status_t setDataSource_l(
+ const sp<IMediaHTTPService> &httpService,
const char *uri,
const KeyedVector<String8, String8> *headers = NULL);
diff --git a/media/libstagefright/include/ChromiumHTTPDataSource.h b/media/libstagefright/include/ChromiumHTTPDataSource.h
deleted file mode 100644
index da188dd..0000000
--- a/media/libstagefright/include/ChromiumHTTPDataSource.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef CHROME_HTTP_DATA_SOURCE_H_
-
-#define CHROME_HTTP_DATA_SOURCE_H_
-
-#include <media/stagefright/foundation/AString.h>
-#include <utils/threads.h>
-
-#include "HTTPBase.h"
-
-namespace android {
-
-struct SfDelegate;
-
-struct ChromiumHTTPDataSource : public HTTPBase {
- ChromiumHTTPDataSource(uint32_t flags = 0);
-
- virtual status_t connect(
- const char *uri,
- const KeyedVector<String8, String8> *headers = NULL,
- off64_t offset = 0);
-
- virtual void disconnect();
-
- virtual status_t initCheck() const;
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size);
- virtual status_t getSize(off64_t *size);
- virtual uint32_t flags();
-
- virtual sp<DecryptHandle> DrmInitialization(const char *mime);
-
- virtual void getDrmInfo(sp<DecryptHandle> &handle, DrmManagerClient **client);
-
- virtual String8 getUri();
-
- virtual String8 getMIMEType() const;
-
- virtual status_t reconnectAtOffset(off64_t offset);
-
- static status_t UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
-protected:
- virtual ~ChromiumHTTPDataSource();
-
-private:
- friend struct SfDelegate;
-
- enum State {
- DISCONNECTED,
- CONNECTING,
- CONNECTED,
- READING,
- DISCONNECTING
- };
-
- const uint32_t mFlags;
-
- mutable Mutex mLock;
- Condition mCondition;
-
- State mState;
-
- SfDelegate *mDelegate;
-
- AString mURI;
- KeyedVector<String8, String8> mHeaders;
-
- off64_t mCurrentOffset;
-
- // Any connection error or the result of a read operation
- // (for the lattter this is the number of bytes read, if successful).
- ssize_t mIOResult;
-
- int64_t mContentSize;
-
- String8 mContentType;
-
- sp<DecryptHandle> mDecryptHandle;
- DrmManagerClient *mDrmManagerClient;
-
- void disconnect_l();
-
- status_t connect_l(
- const char *uri,
- const KeyedVector<String8, String8> *headers,
- off64_t offset);
-
- static void InitiateRead(
- ChromiumHTTPDataSource *me, void *data, size_t size);
-
- void initiateRead(void *data, size_t size);
-
- void onConnectionEstablished(
- int64_t contentSize, const char *contentType);
-
- void onConnectionFailed(status_t err);
- void onReadCompleted(ssize_t size);
- void onDisconnectComplete();
- void onRedirect(const char *url);
-
- void clearDRMState_l();
-
- DISALLOW_EVIL_CONSTRUCTORS(ChromiumHTTPDataSource);
-};
-
-} // namespace android
-
-#endif // CHROME_HTTP_DATA_SOURCE_H_
diff --git a/media/libstagefright/include/FragmentedMP4Parser.h b/media/libstagefright/include/FragmentedMP4Parser.h
deleted file mode 100644
index dbe02b8..0000000
--- a/media/libstagefright/include/FragmentedMP4Parser.h
+++ /dev/null
@@ -1,274 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef PARSER_H_
-
-#define PARSER_H_
-
-#include <media/stagefright/foundation/AHandler.h>
-#include <media/stagefright/DataSource.h>
-#include <utils/Vector.h>
-
-namespace android {
-
-struct ABuffer;
-
-struct FragmentedMP4Parser : public AHandler {
- struct Source : public RefBase {
- Source() {}
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) = 0;
- virtual bool isSeekable() = 0;
-
- protected:
- virtual ~Source() {}
-
- private:
- DISALLOW_EVIL_CONSTRUCTORS(Source);
- };
-
- FragmentedMP4Parser();
-
- void start(const char *filename);
- void start(const sp<Source> &source);
- void start(sp<DataSource> &source);
-
- sp<AMessage> getFormat(bool audio, bool synchronous = false);
- status_t dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit, bool synchronous = false);
- status_t seekTo(bool audio, int64_t timeUs);
- bool isSeekable() const;
-
- virtual void onMessageReceived(const sp<AMessage> &msg);
-
-protected:
- virtual ~FragmentedMP4Parser();
-
-private:
- enum {
- kWhatStart,
- kWhatProceed,
- kWhatReadMore,
- kWhatGetFormat,
- kWhatDequeueAccessUnit,
- kWhatSeekTo,
- };
-
- struct TrackFragment;
- struct DynamicTrackFragment;
- struct StaticTrackFragment;
-
- struct DispatchEntry {
- uint32_t mType;
- uint32_t mParentType;
- status_t (FragmentedMP4Parser::*mHandler)(uint32_t, size_t, uint64_t);
- };
-
- struct Container {
- uint64_t mOffset;
- uint64_t mBytesRemaining;
- uint32_t mType;
- bool mExtendsToEOF;
- };
-
- struct SampleDescription {
- uint32_t mType;
- uint16_t mDataRefIndex;
-
- sp<AMessage> mFormat;
- };
-
- struct SampleInfo {
- off64_t mOffset;
- size_t mSize;
- uint32_t mPresentationTime;
- size_t mSampleDescIndex;
- uint32_t mFlags;
- };
-
- struct MediaDataInfo {
- sp<ABuffer> mBuffer;
- off64_t mOffset;
- };
-
- struct SidxEntry {
- size_t mSize;
- uint32_t mDurationUs;
- };
-
- struct TrackInfo {
- enum Flags {
- kTrackEnabled = 0x01,
- kTrackInMovie = 0x02,
- kTrackInPreview = 0x04,
- };
-
- uint32_t mTrackID;
- uint32_t mFlags;
- uint32_t mDuration; // This is the duration in terms of movie timescale!
- uint64_t mSidxDuration; // usec, from sidx box, which can use a different timescale
-
- uint32_t mMediaTimeScale;
-
- uint32_t mMediaHandlerType;
- Vector<SampleDescription> mSampleDescs;
-
- // from track extends:
- uint32_t mDefaultSampleDescriptionIndex;
- uint32_t mDefaultSampleDuration;
- uint32_t mDefaultSampleSize;
- uint32_t mDefaultSampleFlags;
-
- uint32_t mDecodingTime;
-
- Vector<SidxEntry> mSidx;
- sp<StaticTrackFragment> mStaticFragment;
- List<sp<TrackFragment> > mFragments;
- };
-
- struct TrackFragmentHeaderInfo {
- enum Flags {
- kBaseDataOffsetPresent = 0x01,
- kSampleDescriptionIndexPresent = 0x02,
- kDefaultSampleDurationPresent = 0x08,
- kDefaultSampleSizePresent = 0x10,
- kDefaultSampleFlagsPresent = 0x20,
- kDurationIsEmpty = 0x10000,
- };
-
- uint32_t mTrackID;
- uint32_t mFlags;
- uint64_t mBaseDataOffset;
- uint32_t mSampleDescriptionIndex;
- uint32_t mDefaultSampleDuration;
- uint32_t mDefaultSampleSize;
- uint32_t mDefaultSampleFlags;
-
- uint64_t mDataOffset;
- };
-
- static const DispatchEntry kDispatchTable[];
-
- sp<Source> mSource;
- off_t mBufferPos;
- bool mSuspended;
- bool mDoneWithMoov;
- off_t mFirstMoofOffset; // used as the starting point for offsets calculated from the sidx box
- sp<ABuffer> mBuffer;
- Vector<Container> mStack;
- KeyedVector<uint32_t, TrackInfo> mTracks; // TrackInfo by trackID
- Vector<MediaDataInfo> mMediaData;
-
- uint32_t mCurrentTrackID;
-
- status_t mFinalResult;
-
- TrackFragmentHeaderInfo mTrackFragmentHeaderInfo;
-
- status_t onProceed();
- status_t onDequeueAccessUnit(size_t trackIndex, sp<ABuffer> *accessUnit);
- status_t onSeekTo(bool wantAudio, int64_t position);
-
- void enter(off64_t offset, uint32_t type, uint64_t size);
-
- uint16_t readU16(size_t offset);
- uint32_t readU32(size_t offset);
- uint64_t readU64(size_t offset);
- void skip(off_t distance);
- status_t need(size_t size);
- bool fitsContainer(uint64_t size) const;
-
- status_t parseTrackHeader(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseMediaHeader(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseMediaHandler(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseTrackExtends(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseTrackFragmentHeader(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseTrackFragmentRun(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseVisualSampleEntry(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseAudioSampleEntry(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSampleSizes(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseCompactSampleSizes(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSampleToChunk(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets64(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseAVCCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseESDSCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseMediaData(
- uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSegmentIndex(
- uint32_t type, size_t offset, uint64_t size);
-
- TrackInfo *editTrack(uint32_t trackID, bool createIfNecessary = false);
-
- ssize_t findTrack(bool wantAudio) const;
-
- status_t makeAccessUnit(
- TrackInfo *info,
- const SampleInfo &sample,
- const MediaDataInfo &mdatInfo,
- sp<ABuffer> *accessUnit);
-
- status_t getSample(
- TrackInfo *info,
- sp<TrackFragment> *fragment,
- SampleInfo *sampleInfo);
-
- static int CompareSampleLocation(
- const SampleInfo &sample, const MediaDataInfo &mdatInfo);
-
- void resumeIfNecessary();
-
- void copyBuffer(
- sp<ABuffer> *dst,
- size_t offset, uint64_t size) const;
-
- DISALLOW_EVIL_CONSTRUCTORS(FragmentedMP4Parser);
-};
-
-} // namespace android
-
-#endif // PARSER_H_
-
diff --git a/media/libstagefright/include/HTTPBase.h b/media/libstagefright/include/HTTPBase.h
index d4b7f9f..1c3cd5e 100644
--- a/media/libstagefright/include/HTTPBase.h
+++ b/media/libstagefright/include/HTTPBase.h
@@ -48,14 +48,6 @@ struct HTTPBase : public DataSource {
virtual status_t setBandwidthStatCollectFreq(int32_t freqMs);
- static status_t UpdateProxyConfig(
- const char *host, int32_t port, const char *exclusionList);
-
- void setUID(uid_t uid);
- bool getUID(uid_t *uid) const;
-
- static sp<HTTPBase> Create(uint32_t flags = 0);
-
static void RegisterSocketUserTag(int sockfd, uid_t uid, uint32_t kTag);
static void UnRegisterSocketUserTag(int sockfd);
@@ -87,9 +79,6 @@ private:
int32_t mPrevEstimatedBandWidthKbps;
int32_t mBandWidthCollectFreqMs;
- bool mUIDValid;
- uid_t mUID;
-
DISALLOW_EVIL_CONSTRUCTORS(HTTPBase);
};
diff --git a/media/libstagefright/include/MPEG4Extractor.h b/media/libstagefright/include/MPEG4Extractor.h
index 7b4bc6d..1fe6fcf 100644
--- a/media/libstagefright/include/MPEG4Extractor.h
+++ b/media/libstagefright/include/MPEG4Extractor.h
@@ -39,6 +39,14 @@ struct SidxEntry {
uint32_t mDurationUs;
};
+struct Trex {
+ uint32_t track_ID;
+ uint32_t default_sample_description_index;
+ uint32_t default_sample_duration;
+ uint32_t default_sample_size;
+ uint32_t default_sample_flags;
+};
+
class MPEG4Extractor : public MediaExtractor {
public:
// Extractor assumes ownership of "source".
@@ -74,11 +82,12 @@ private:
};
Vector<SidxEntry> mSidxEntries;
- uint64_t mSidxDuration;
off64_t mMoofOffset;
Vector<PsshInfo> mPssh;
+ Vector<Trex> mTrex;
+
sp<DataSource> mDataSource;
status_t mInitCheck;
bool mHasVideo;
diff --git a/media/libstagefright/include/SDPLoader.h b/media/libstagefright/include/SDPLoader.h
index ca59dc0..2c4f543 100644
--- a/media/libstagefright/include/SDPLoader.h
+++ b/media/libstagefright/include/SDPLoader.h
@@ -25,6 +25,7 @@
namespace android {
struct HTTPBase;
+struct IMediaHTTPService;
struct SDPLoader : public AHandler {
enum Flags {
@@ -34,7 +35,10 @@ struct SDPLoader : public AHandler {
enum {
kWhatSDPLoaded = 'sdpl'
};
- SDPLoader(const sp<AMessage> &notify, uint32_t flags = 0, bool uidValid = false, uid_t uid = 0);
+ SDPLoader(
+ const sp<AMessage> &notify,
+ uint32_t flags,
+ const sp<IMediaHTTPService> &httpService);
void load(const char* url, const KeyedVector<String8, String8> *headers);
@@ -55,8 +59,6 @@ private:
sp<AMessage> mNotify;
const char* mUrl;
uint32_t mFlags;
- bool mUIDValid;
- uid_t mUID;
sp<ALooper> mNetLooper;
bool mCancelled;
diff --git a/media/libstagefright/include/SampleIterator.h b/media/libstagefright/include/SampleIterator.h
index b5a043c..60c9e7e 100644
--- a/media/libstagefright/include/SampleIterator.h
+++ b/media/libstagefright/include/SampleIterator.h
@@ -30,6 +30,7 @@ struct SampleIterator {
off64_t getSampleOffset() const { return mCurrentSampleOffset; }
size_t getSampleSize() const { return mCurrentSampleSize; }
uint32_t getSampleTime() const { return mCurrentSampleTime; }
+ uint32_t getSampleDuration() const { return mCurrentSampleDuration; }
status_t getSampleSizeDirect(
uint32_t sampleIndex, size_t *size);
@@ -61,11 +62,12 @@ private:
off64_t mCurrentSampleOffset;
size_t mCurrentSampleSize;
uint32_t mCurrentSampleTime;
+ uint32_t mCurrentSampleDuration;
void reset();
status_t findChunkRange(uint32_t sampleIndex);
status_t getChunkOffset(uint32_t chunk, off64_t *offset);
- status_t findSampleTime(uint32_t sampleIndex, uint32_t *time);
+ status_t findSampleTimeAndDuration(uint32_t sampleIndex, uint32_t *time, uint32_t *duration);
SampleIterator(const SampleIterator &);
SampleIterator &operator=(const SampleIterator &);
diff --git a/media/libstagefright/include/SampleTable.h b/media/libstagefright/include/SampleTable.h
index 847dff7..fe146f2 100644
--- a/media/libstagefright/include/SampleTable.h
+++ b/media/libstagefright/include/SampleTable.h
@@ -66,7 +66,8 @@ public:
off64_t *offset,
size_t *size,
uint32_t *compositionTime,
- bool *isSyncSample = NULL);
+ bool *isSyncSample = NULL,
+ uint32_t *sampleDuration = NULL);
enum {
kFlagBefore,
diff --git a/media/libstagefright/include/SoftwareRenderer.h b/media/libstagefright/include/SoftwareRenderer.h
index 7ab0042..0ba670c 100644
--- a/media/libstagefright/include/SoftwareRenderer.h
+++ b/media/libstagefright/include/SoftwareRenderer.h
@@ -34,7 +34,7 @@ public:
~SoftwareRenderer();
void render(
- const void *data, size_t size, void *platformPrivate);
+ const void *data, size_t size, int64_t timestampNs, void *platformPrivate);
private:
enum YUVMode {
diff --git a/media/libstagefright/include/StagefrightMetadataRetriever.h b/media/libstagefright/include/StagefrightMetadataRetriever.h
index b02ed0e..6632c27 100644
--- a/media/libstagefright/include/StagefrightMetadataRetriever.h
+++ b/media/libstagefright/include/StagefrightMetadataRetriever.h
@@ -33,6 +33,7 @@ struct StagefrightMetadataRetriever : public MediaMetadataRetrieverInterface {
virtual ~StagefrightMetadataRetriever();
virtual status_t setDataSource(
+ const sp<IMediaHTTPService> &httpService,
const char *url,
const KeyedVector<String8, String8> *headers);
diff --git a/media/libstagefright/matroska/MatroskaExtractor.cpp b/media/libstagefright/matroska/MatroskaExtractor.cpp
index 0e4dd2b..d7bec59 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.cpp
+++ b/media/libstagefright/matroska/MatroskaExtractor.cpp
@@ -315,7 +315,7 @@ void BlockIterator::seek(
*actualFrameTimeUs = -1ll;
- const int64_t seekTimeNs = seekTimeUs * 1000ll;
+ const int64_t seekTimeNs = seekTimeUs * 1000ll - mExtractor->mSeekPreRollNs;
mkvparser::Segment* const pSegment = mExtractor->mSegment;
@@ -630,7 +630,8 @@ MatroskaExtractor::MatroskaExtractor(const sp<DataSource> &source)
mReader(new DataSourceReader(mDataSource)),
mSegment(NULL),
mExtractedThumbnails(false),
- mIsWebm(false) {
+ mIsWebm(false),
+ mSeekPreRollNs(0) {
off64_t size;
mIsLiveStreaming =
(mDataSource->flags()
@@ -656,14 +657,22 @@ MatroskaExtractor::MatroskaExtractor(const sp<DataSource> &source)
return;
}
+ // from mkvparser::Segment::Load(), but stop at first cluster
ret = mSegment->ParseHeaders();
- CHECK_EQ(ret, 0);
-
- long len;
- ret = mSegment->LoadCluster(pos, len);
- CHECK_EQ(ret, 0);
+ if (ret == 0) {
+ long len;
+ ret = mSegment->LoadCluster(pos, len);
+ if (ret >= 1) {
+ // no more clusters
+ ret = 0;
+ }
+ } else if (ret > 0) {
+ ret = mkvparser::E_BUFFER_NOT_FULL;
+ }
if (ret < 0) {
+ ALOGW("Corrupt %s source: %s", mIsWebm ? "webm" : "matroska",
+ uriDebugString(mDataSource->getUri()).c_str());
delete mSegment;
mSegment = NULL;
return;
@@ -921,6 +930,12 @@ void MatroskaExtractor::addTracks() {
err = addVorbisCodecInfo(
meta, codecPrivate, codecPrivateSize);
+ } else if (!strcmp("A_OPUS", codecID)) {
+ meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+ meta->setData(kKeyOpusHeader, 0, codecPrivate, codecPrivateSize);
+ meta->setInt64(kKeyOpusCodecDelay, track->GetCodecDelay());
+ meta->setInt64(kKeyOpusSeekPreRoll, track->GetSeekPreRoll());
+ mSeekPreRollNs = track->GetSeekPreRoll();
} else if (!strcmp("A_MPEG/L3", codecID)) {
meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_MPEG);
} else {
diff --git a/media/libstagefright/matroska/MatroskaExtractor.h b/media/libstagefright/matroska/MatroskaExtractor.h
index 1294b4f..cf200f3 100644
--- a/media/libstagefright/matroska/MatroskaExtractor.h
+++ b/media/libstagefright/matroska/MatroskaExtractor.h
@@ -69,6 +69,7 @@ private:
bool mExtractedThumbnails;
bool mIsLiveStreaming;
bool mIsWebm;
+ int64_t mSeekPreRollNs;
void addTracks();
void findThumbnails();
diff --git a/media/libstagefright/mp4/FragmentedMP4Parser.cpp b/media/libstagefright/mp4/FragmentedMP4Parser.cpp
deleted file mode 100644
index 0102656..0000000
--- a/media/libstagefright/mp4/FragmentedMP4Parser.cpp
+++ /dev/null
@@ -1,1993 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "FragmentedMP4Parser"
-#include <utils/Log.h>
-
-#include "include/avc_utils.h"
-#include "include/ESDS.h"
-#include "include/FragmentedMP4Parser.h"
-#include "TrackFragment.h"
-
-
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/hexdump.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-
-
-namespace android {
-
-static const char *Fourcc2String(uint32_t fourcc) {
- static char buffer[5];
- buffer[4] = '\0';
- buffer[0] = fourcc >> 24;
- buffer[1] = (fourcc >> 16) & 0xff;
- buffer[2] = (fourcc >> 8) & 0xff;
- buffer[3] = fourcc & 0xff;
-
- return buffer;
-}
-
-static const char *IndentString(size_t n) {
- static const char kSpace[] = " ";
- return kSpace + sizeof(kSpace) - 2 * n - 1;
-}
-
-// static
-const FragmentedMP4Parser::DispatchEntry FragmentedMP4Parser::kDispatchTable[] = {
- { FOURCC('m', 'o', 'o', 'v'), 0, NULL },
- { FOURCC('t', 'r', 'a', 'k'), FOURCC('m', 'o', 'o', 'v'), NULL },
- { FOURCC('u', 'd', 't', 'a'), FOURCC('t', 'r', 'a', 'k'), NULL },
- { FOURCC('u', 'd', 't', 'a'), FOURCC('m', 'o', 'o', 'v'), NULL },
- { FOURCC('m', 'e', 't', 'a'), FOURCC('u', 'd', 't', 'a'), NULL },
- { FOURCC('i', 'l', 's', 't'), FOURCC('m', 'e', 't', 'a'), NULL },
-
- { FOURCC('t', 'k', 'h', 'd'), FOURCC('t', 'r', 'a', 'k'),
- &FragmentedMP4Parser::parseTrackHeader
- },
-
- { FOURCC('m', 'v', 'e', 'x'), FOURCC('m', 'o', 'o', 'v'), NULL },
-
- { FOURCC('t', 'r', 'e', 'x'), FOURCC('m', 'v', 'e', 'x'),
- &FragmentedMP4Parser::parseTrackExtends
- },
-
- { FOURCC('e', 'd', 't', 's'), FOURCC('t', 'r', 'a', 'k'), NULL },
- { FOURCC('m', 'd', 'i', 'a'), FOURCC('t', 'r', 'a', 'k'), NULL },
-
- { FOURCC('m', 'd', 'h', 'd'), FOURCC('m', 'd', 'i', 'a'),
- &FragmentedMP4Parser::parseMediaHeader
- },
-
- { FOURCC('h', 'd', 'l', 'r'), FOURCC('m', 'd', 'i', 'a'),
- &FragmentedMP4Parser::parseMediaHandler
- },
-
- { FOURCC('m', 'i', 'n', 'f'), FOURCC('m', 'd', 'i', 'a'), NULL },
- { FOURCC('d', 'i', 'n', 'f'), FOURCC('m', 'i', 'n', 'f'), NULL },
- { FOURCC('s', 't', 'b', 'l'), FOURCC('m', 'i', 'n', 'f'), NULL },
- { FOURCC('s', 't', 's', 'd'), FOURCC('s', 't', 'b', 'l'), NULL },
-
- { FOURCC('s', 't', 's', 'z'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseSampleSizes },
-
- { FOURCC('s', 't', 'z', '2'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseCompactSampleSizes },
-
- { FOURCC('s', 't', 's', 'c'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseSampleToChunk },
-
- { FOURCC('s', 't', 'c', 'o'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseChunkOffsets },
-
- { FOURCC('c', 'o', '6', '4'), FOURCC('s', 't', 'b', 'l'),
- &FragmentedMP4Parser::parseChunkOffsets64 },
-
- { FOURCC('a', 'v', 'c', 'C'), FOURCC('a', 'v', 'c', '1'),
- &FragmentedMP4Parser::parseAVCCodecSpecificData },
-
- { FOURCC('e', 's', 'd', 's'), FOURCC('m', 'p', '4', 'a'),
- &FragmentedMP4Parser::parseESDSCodecSpecificData },
-
- { FOURCC('e', 's', 'd', 's'), FOURCC('m', 'p', '4', 'v'),
- &FragmentedMP4Parser::parseESDSCodecSpecificData },
-
- { FOURCC('m', 'd', 'a', 't'), 0, &FragmentedMP4Parser::parseMediaData },
-
- { FOURCC('m', 'o', 'o', 'f'), 0, NULL },
- { FOURCC('t', 'r', 'a', 'f'), FOURCC('m', 'o', 'o', 'f'), NULL },
-
- { FOURCC('t', 'f', 'h', 'd'), FOURCC('t', 'r', 'a', 'f'),
- &FragmentedMP4Parser::parseTrackFragmentHeader
- },
- { FOURCC('t', 'r', 'u', 'n'), FOURCC('t', 'r', 'a', 'f'),
- &FragmentedMP4Parser::parseTrackFragmentRun
- },
-
- { FOURCC('m', 'f', 'r', 'a'), 0, NULL },
-
- { FOURCC('s', 'i', 'd', 'x'), 0, &FragmentedMP4Parser::parseSegmentIndex },
-};
-
-struct FileSource : public FragmentedMP4Parser::Source {
- FileSource(const char *filename)
- : mFile(fopen(filename, "rb")) {
- CHECK(mFile != NULL);
- }
-
- virtual ~FileSource() {
- fclose(mFile);
- }
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- fseek(mFile, offset, SEEK_SET);
- return fread(data, 1, size, mFile);
- }
-
- virtual bool isSeekable() {
- return true;
- }
-
- private:
- FILE *mFile;
-
- DISALLOW_EVIL_CONSTRUCTORS(FileSource);
-};
-
-struct ReadTracker : public RefBase {
- ReadTracker(off64_t size) {
- allocSize = 1 + size / 8192; // 1 bit per kilobyte
- bitmap = (char*) calloc(1, allocSize);
- }
- virtual ~ReadTracker() {
- dumpToLog();
- free(bitmap);
- }
- void mark(off64_t offset, size_t size) {
- int firstbit = offset / 1024;
- int lastbit = (offset + size - 1) / 1024;
- for (int i = firstbit; i <= lastbit; i++) {
- bitmap[i/8] |= (0x80 >> (i & 7));
- }
- }
-
- private:
- void dumpToLog() {
- // 96 chars per line, each char represents one kilobyte, 1 kb per bit
- int numlines = allocSize / 12;
- char buf[97];
- char *cur = bitmap;
- for (int i = 0; i < numlines; i++ && cur) {
- for (int j = 0; j < 12; j++) {
- for (int k = 0; k < 8; k++) {
- buf[(j * 8) + k] = (*cur & (0x80 >> k)) ? 'X' : '.';
- }
- cur++;
- }
- buf[96] = '\0';
- ALOGI("%5dk: %s", i * 96, buf);
- }
- }
-
- size_t allocSize;
- char *bitmap;
-};
-
-struct DataSourceSource : public FragmentedMP4Parser::Source {
- DataSourceSource(sp<DataSource> &source)
- : mDataSource(source) {
- CHECK(mDataSource != NULL);
-#if 0
- off64_t size;
- if (source->getSize(&size) == OK) {
- mReadTracker = new ReadTracker(size);
- } else {
- ALOGE("couldn't get data source size");
- }
-#endif
- }
-
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) {
- if (mReadTracker != NULL) {
- mReadTracker->mark(offset, size);
- }
- return mDataSource->readAt(offset, data, size);
- }
-
- virtual bool isSeekable() {
- return true;
- }
-
- private:
- sp<DataSource> mDataSource;
- sp<ReadTracker> mReadTracker;
-
- DISALLOW_EVIL_CONSTRUCTORS(DataSourceSource);
-};
-
-FragmentedMP4Parser::FragmentedMP4Parser()
- : mBufferPos(0),
- mSuspended(false),
- mDoneWithMoov(false),
- mFirstMoofOffset(0),
- mFinalResult(OK) {
-}
-
-FragmentedMP4Parser::~FragmentedMP4Parser() {
-}
-
-void FragmentedMP4Parser::start(const char *filename) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setObject("source", new FileSource(filename));
- msg->post();
- ALOGV("Parser::start(%s)", filename);
-}
-
-void FragmentedMP4Parser::start(const sp<Source> &source) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setObject("source", source);
- msg->post();
- ALOGV("Parser::start(Source)");
-}
-
-void FragmentedMP4Parser::start(sp<DataSource> &source) {
- sp<AMessage> msg = new AMessage(kWhatStart, id());
- msg->setObject("source", new DataSourceSource(source));
- msg->post();
- ALOGV("Parser::start(DataSource)");
-}
-
-sp<AMessage> FragmentedMP4Parser::getFormat(bool audio, bool synchronous) {
-
- while (true) {
- bool moovDone = mDoneWithMoov;
- sp<AMessage> msg = new AMessage(kWhatGetFormat, id());
- msg->setInt32("audio", audio);
-
- sp<AMessage> response;
- status_t err = msg->postAndAwaitResponse(&response);
-
- if (err != OK) {
- ALOGV("getFormat post failed: %d", err);
- return NULL;
- }
-
- if (response->findInt32("err", &err) && err != OK) {
- if (synchronous && err == -EWOULDBLOCK && !moovDone) {
- resumeIfNecessary();
- ALOGV("@getFormat parser not ready yet, retrying");
- usleep(10000);
- continue;
- }
- ALOGV("getFormat failed: %d", err);
- return NULL;
- }
-
- sp<AMessage> format;
- CHECK(response->findMessage("format", &format));
-
- ALOGV("returning format %s", format->debugString().c_str());
- return format;
- }
-}
-
-status_t FragmentedMP4Parser::seekTo(bool wantAudio, int64_t timeUs) {
- sp<AMessage> msg = new AMessage(kWhatSeekTo, id());
- msg->setInt32("audio", wantAudio);
- msg->setInt64("position", timeUs);
-
- sp<AMessage> response;
- status_t err = msg->postAndAwaitResponse(&response);
- return err;
-}
-
-bool FragmentedMP4Parser::isSeekable() const {
- while (mFirstMoofOffset == 0 && mFinalResult == OK) {
- usleep(10000);
- }
- bool seekable = mSource->isSeekable();
- for (size_t i = 0; seekable && i < mTracks.size(); i++) {
- const TrackInfo *info = &mTracks.valueAt(i);
- seekable &= !info->mSidx.empty();
- }
- return seekable;
-}
-
-status_t FragmentedMP4Parser::onSeekTo(bool wantAudio, int64_t position) {
- status_t err = -EINVAL;
- ssize_t trackIndex = findTrack(wantAudio);
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- TrackInfo *info = &mTracks.editValueAt(trackIndex);
-
- int numSidxEntries = info->mSidx.size();
- int64_t totalTime = 0;
- off_t totalOffset = mFirstMoofOffset;
- for (int i = 0; i < numSidxEntries; i++) {
- const SidxEntry *se = &info->mSidx[i];
- if (totalTime + se->mDurationUs > position) {
- mBuffer->setRange(0,0);
- mBufferPos = totalOffset;
- if (mFinalResult == ERROR_END_OF_STREAM) {
- mFinalResult = OK;
- mSuspended = true; // force resume
- resumeIfNecessary();
- }
- info->mFragments.clear();
- info->mDecodingTime = totalTime * info->mMediaTimeScale / 1000000ll;
- return OK;
- }
- totalTime += se->mDurationUs;
- totalOffset += se->mSize;
- }
- }
- ALOGV("seekTo out of range");
- return err;
-}
-
-status_t FragmentedMP4Parser::dequeueAccessUnit(bool audio, sp<ABuffer> *accessUnit,
- bool synchronous) {
-
- while (true) {
- sp<AMessage> msg = new AMessage(kWhatDequeueAccessUnit, id());
- msg->setInt32("audio", audio);
-
- sp<AMessage> response;
- status_t err = msg->postAndAwaitResponse(&response);
-
- if (err != OK) {
- ALOGV("dequeue fail 1: %d", err);
- return err;
- }
-
- if (response->findInt32("err", &err) && err != OK) {
- if (synchronous && err == -EWOULDBLOCK) {
- resumeIfNecessary();
- ALOGV("Parser not ready yet, retrying");
- usleep(10000);
- continue;
- }
- ALOGV("dequeue fail 2: %d, %d", err, synchronous);
- return err;
- }
-
- CHECK(response->findBuffer("accessUnit", accessUnit));
-
- return OK;
- }
-}
-
-ssize_t FragmentedMP4Parser::findTrack(bool wantAudio) const {
- for (size_t i = 0; i < mTracks.size(); ++i) {
- const TrackInfo *info = &mTracks.valueAt(i);
-
- bool isAudio =
- info->mMediaHandlerType == FOURCC('s', 'o', 'u', 'n');
-
- bool isVideo =
- info->mMediaHandlerType == FOURCC('v', 'i', 'd', 'e');
-
- if ((wantAudio && isAudio) || (!wantAudio && !isAudio)) {
- if (info->mSampleDescs.empty()) {
- break;
- }
-
- return i;
- }
- }
-
- return -EWOULDBLOCK;
-}
-
-void FragmentedMP4Parser::onMessageReceived(const sp<AMessage> &msg) {
- switch (msg->what()) {
- case kWhatStart:
- {
- sp<RefBase> obj;
- CHECK(msg->findObject("source", &obj));
-
- mSource = static_cast<Source *>(obj.get());
-
- mBuffer = new ABuffer(512 * 1024);
- mBuffer->setRange(0, 0);
-
- enter(0ll, 0, 0);
-
- (new AMessage(kWhatProceed, id()))->post();
- break;
- }
-
- case kWhatProceed:
- {
- CHECK(!mSuspended);
-
- status_t err = onProceed();
-
- if (err == OK) {
- if (!mSuspended) {
- msg->post();
- }
- } else if (err != -EAGAIN) {
- ALOGE("onProceed returned error %d", err);
- }
-
- break;
- }
-
- case kWhatReadMore:
- {
- size_t needed;
- CHECK(msg->findSize("needed", &needed));
-
- memmove(mBuffer->base(), mBuffer->data(), mBuffer->size());
- mBufferPos += mBuffer->offset();
- mBuffer->setRange(0, mBuffer->size());
-
- size_t maxBytesToRead = mBuffer->capacity() - mBuffer->size();
-
- if (maxBytesToRead < needed) {
- ALOGV("resizing buffer.");
-
- sp<ABuffer> newBuffer =
- new ABuffer((mBuffer->size() + needed + 1023) & ~1023);
- memcpy(newBuffer->data(), mBuffer->data(), mBuffer->size());
- newBuffer->setRange(0, mBuffer->size());
-
- mBuffer = newBuffer;
- maxBytesToRead = mBuffer->capacity() - mBuffer->size();
- }
-
- CHECK_GE(maxBytesToRead, needed);
-
- ssize_t n = mSource->readAt(
- mBufferPos + mBuffer->size(),
- mBuffer->data() + mBuffer->size(), needed);
-
- if (n < (ssize_t)needed) {
- ALOGV("Reached EOF when reading %d @ %d + %d", needed, mBufferPos, mBuffer->size());
- if (n < 0) {
- mFinalResult = n;
- } else if (n == 0) {
- mFinalResult = ERROR_END_OF_STREAM;
- } else {
- mFinalResult = ERROR_IO;
- }
- } else {
- mBuffer->setRange(0, mBuffer->size() + n);
- (new AMessage(kWhatProceed, id()))->post();
- }
-
- break;
- }
-
- case kWhatGetFormat:
- {
- int32_t wantAudio;
- CHECK(msg->findInt32("audio", &wantAudio));
-
- status_t err = -EWOULDBLOCK;
- sp<AMessage> response = new AMessage;
-
- ssize_t trackIndex = findTrack(wantAudio);
-
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- TrackInfo *info = &mTracks.editValueAt(trackIndex);
-
- sp<AMessage> format = info->mSampleDescs.itemAt(0).mFormat;
- if (info->mSidxDuration) {
- format->setInt64("durationUs", info->mSidxDuration);
- } else {
- // this is probably going to be zero. Oh well...
- format->setInt64("durationUs",
- 1000000ll * info->mDuration / info->mMediaTimeScale);
- }
- response->setMessage(
- "format", format);
-
- err = OK;
- }
-
- response->setInt32("err", err);
-
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
- response->postReply(replyID);
- break;
- }
-
- case kWhatDequeueAccessUnit:
- {
- int32_t wantAudio;
- CHECK(msg->findInt32("audio", &wantAudio));
-
- status_t err = -EWOULDBLOCK;
- sp<AMessage> response = new AMessage;
-
- ssize_t trackIndex = findTrack(wantAudio);
-
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- sp<ABuffer> accessUnit;
- err = onDequeueAccessUnit(trackIndex, &accessUnit);
-
- if (err == OK) {
- response->setBuffer("accessUnit", accessUnit);
- }
- }
-
- response->setInt32("err", err);
-
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
-
- response->postReply(replyID);
- break;
- }
-
- case kWhatSeekTo:
- {
- ALOGV("kWhatSeekTo");
- int32_t wantAudio;
- CHECK(msg->findInt32("audio", &wantAudio));
- int64_t position;
- CHECK(msg->findInt64("position", &position));
-
- status_t err = -EWOULDBLOCK;
- sp<AMessage> response = new AMessage;
-
- ssize_t trackIndex = findTrack(wantAudio);
-
- if (trackIndex < 0) {
- err = trackIndex;
- } else {
- err = onSeekTo(wantAudio, position);
- }
- response->setInt32("err", err);
- uint32_t replyID;
- CHECK(msg->senderAwaitsResponse(&replyID));
- response->postReply(replyID);
- break;
- }
- default:
- TRESPASS();
- }
-}
-
-status_t FragmentedMP4Parser::onProceed() {
- status_t err;
-
- if ((err = need(8)) != OK) {
- return err;
- }
-
- uint64_t size = readU32(0);
- uint32_t type = readU32(4);
-
- size_t offset = 8;
-
- if (size == 1) {
- if ((err = need(16)) != OK) {
- return err;
- }
-
- size = readU64(offset);
- offset += 8;
- }
-
- uint8_t userType[16];
-
- if (type == FOURCC('u', 'u', 'i', 'd')) {
- if ((err = need(offset + 16)) != OK) {
- return err;
- }
-
- memcpy(userType, mBuffer->data() + offset, 16);
- offset += 16;
- }
-
- CHECK(!mStack.isEmpty());
- uint32_t ptype = mStack.itemAt(mStack.size() - 1).mType;
-
- static const size_t kNumDispatchers =
- sizeof(kDispatchTable) / sizeof(kDispatchTable[0]);
-
- size_t i;
- for (i = 0; i < kNumDispatchers; ++i) {
- if (kDispatchTable[i].mType == type
- && kDispatchTable[i].mParentType == ptype) {
- break;
- }
- }
-
- // SampleEntry boxes are container boxes that start with a variable
- // amount of data depending on the media handler type.
- // We don't look inside 'hint' type SampleEntry boxes.
-
- bool isSampleEntryBox =
- (ptype == FOURCC('s', 't', 's', 'd'))
- && editTrack(mCurrentTrackID)->mMediaHandlerType
- != FOURCC('h', 'i', 'n', 't');
-
- if ((i < kNumDispatchers && kDispatchTable[i].mHandler == 0)
- || isSampleEntryBox || ptype == FOURCC('i', 'l', 's', 't')) {
- // This is a container box.
- if (type == FOURCC('m', 'o', 'o', 'f')) {
- if (mFirstMoofOffset == 0) {
- ALOGV("first moof @ %08x", mBufferPos + offset);
- mFirstMoofOffset = mBufferPos + offset - 8; // point at the size
- }
- }
- if (type == FOURCC('m', 'e', 't', 'a')) {
- if ((err = need(offset + 4)) < OK) {
- return err;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- offset += 4;
- } else if (type == FOURCC('s', 't', 's', 'd')) {
- if ((err = need(offset + 8)) < OK) {
- return err;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- if (readU32(offset + 4) == 0) {
- // We need at least some entries.
- return -EINVAL;
- }
-
- offset += 8;
- } else if (isSampleEntryBox) {
- size_t headerSize;
-
- switch (editTrack(mCurrentTrackID)->mMediaHandlerType) {
- case FOURCC('v', 'i', 'd', 'e'):
- {
- // 8 bytes SampleEntry + 70 bytes VisualSampleEntry
- headerSize = 78;
- break;
- }
-
- case FOURCC('s', 'o', 'u', 'n'):
- {
- // 8 bytes SampleEntry + 20 bytes AudioSampleEntry
- headerSize = 28;
- break;
- }
-
- case FOURCC('m', 'e', 't', 'a'):
- {
- headerSize = 8; // 8 bytes SampleEntry
- break;
- }
-
- default:
- TRESPASS();
- }
-
- if (offset + headerSize > size) {
- return -EINVAL;
- }
-
- if ((err = need(offset + headerSize)) != OK) {
- return err;
- }
-
- switch (editTrack(mCurrentTrackID)->mMediaHandlerType) {
- case FOURCC('v', 'i', 'd', 'e'):
- {
- err = parseVisualSampleEntry(
- type, offset, offset + headerSize);
- break;
- }
-
- case FOURCC('s', 'o', 'u', 'n'):
- {
- err = parseAudioSampleEntry(
- type, offset, offset + headerSize);
- break;
- }
-
- case FOURCC('m', 'e', 't', 'a'):
- {
- err = OK;
- break;
- }
-
- default:
- TRESPASS();
- }
-
- if (err != OK) {
- return err;
- }
-
- offset += headerSize;
- }
-
- skip(offset);
-
- ALOGV("%sentering box of type '%s'",
- IndentString(mStack.size()), Fourcc2String(type));
-
- enter(mBufferPos - offset, type, size - offset);
- } else {
- if (!fitsContainer(size)) {
- return -EINVAL;
- }
-
- if (i < kNumDispatchers && kDispatchTable[i].mHandler != 0) {
- // We have a handler for this box type.
-
- if ((err = need(size)) != OK) {
- return err;
- }
-
- ALOGV("%sparsing box of type '%s'",
- IndentString(mStack.size()), Fourcc2String(type));
-
- if ((err = (this->*kDispatchTable[i].mHandler)(
- type, offset, size)) != OK) {
- return err;
- }
- } else {
- // Unknown box type
-
- ALOGV("%sskipping box of type '%s', size %llu",
- IndentString(mStack.size()),
- Fourcc2String(type), size);
-
- }
-
- skip(size);
- }
-
- return OK;
-}
-
-// static
-int FragmentedMP4Parser::CompareSampleLocation(
- const SampleInfo &sample, const MediaDataInfo &mdatInfo) {
- if (sample.mOffset + sample.mSize < mdatInfo.mOffset) {
- return -1;
- }
-
- if (sample.mOffset >= mdatInfo.mOffset + mdatInfo.mBuffer->size()) {
- return 1;
- }
-
- // Otherwise make sure the sample is completely contained within this
- // media data block.
-
- CHECK_GE(sample.mOffset, mdatInfo.mOffset);
-
- CHECK_LE(sample.mOffset + sample.mSize,
- mdatInfo.mOffset + mdatInfo.mBuffer->size());
-
- return 0;
-}
-
-void FragmentedMP4Parser::resumeIfNecessary() {
- if (!mSuspended) {
- return;
- }
-
- ALOGV("resuming.");
-
- mSuspended = false;
- (new AMessage(kWhatProceed, id()))->post();
-}
-
-status_t FragmentedMP4Parser::getSample(
- TrackInfo *info, sp<TrackFragment> *fragment, SampleInfo *sampleInfo) {
- for (;;) {
- if (info->mFragments.empty()) {
- if (mFinalResult != OK) {
- return mFinalResult;
- }
-
- resumeIfNecessary();
- return -EWOULDBLOCK;
- }
-
- *fragment = *info->mFragments.begin();
-
- status_t err = (*fragment)->getSample(sampleInfo);
-
- if (err == OK) {
- return OK;
- } else if (err != ERROR_END_OF_STREAM) {
- return err;
- }
-
- // Really, end of this fragment...
-
- info->mFragments.erase(info->mFragments.begin());
- }
-}
-
-status_t FragmentedMP4Parser::onDequeueAccessUnit(
- size_t trackIndex, sp<ABuffer> *accessUnit) {
- TrackInfo *info = &mTracks.editValueAt(trackIndex);
-
- sp<TrackFragment> fragment;
- SampleInfo sampleInfo;
- status_t err = getSample(info, &fragment, &sampleInfo);
-
- if (err == -EWOULDBLOCK) {
- resumeIfNecessary();
- return err;
- } else if (err != OK) {
- return err;
- }
-
- err = -EWOULDBLOCK;
-
- bool checkDroppable = false;
-
- for (size_t i = 0; i < mMediaData.size(); ++i) {
- const MediaDataInfo &mdatInfo = mMediaData.itemAt(i);
-
- int cmp = CompareSampleLocation(sampleInfo, mdatInfo);
-
- if (cmp < 0 && !mSource->isSeekable()) {
- return -EPIPE;
- } else if (cmp == 0) {
- if (i > 0) {
- checkDroppable = true;
- }
-
- err = makeAccessUnit(info, sampleInfo, mdatInfo, accessUnit);
- break;
- }
- }
-
- if (err != OK) {
- return err;
- }
-
- fragment->advance();
-
- if (!mMediaData.empty() && checkDroppable) {
- size_t numDroppable = 0;
- bool done = false;
-
- // XXX FIXME: if one of the tracks is not advanced (e.g. if you play an audio+video
- // file with sf2), then mMediaData will not be pruned and keeps growing
- for (size_t i = 0; !done && i < mMediaData.size(); ++i) {
- const MediaDataInfo &mdatInfo = mMediaData.itemAt(i);
-
- for (size_t j = 0; j < mTracks.size(); ++j) {
- TrackInfo *info = &mTracks.editValueAt(j);
-
- sp<TrackFragment> fragment;
- SampleInfo sampleInfo;
- err = getSample(info, &fragment, &sampleInfo);
-
- if (err != OK) {
- done = true;
- break;
- }
-
- int cmp = CompareSampleLocation(sampleInfo, mdatInfo);
-
- if (cmp <= 0) {
- done = true;
- break;
- }
- }
-
- if (!done) {
- ++numDroppable;
- }
- }
-
- if (numDroppable > 0) {
- mMediaData.removeItemsAt(0, numDroppable);
-
- if (mMediaData.size() < 5) {
- resumeIfNecessary();
- }
- }
- }
-
- return err;
-}
-
-static size_t parseNALSize(size_t nalLengthSize, const uint8_t *data) {
- switch (nalLengthSize) {
- case 1:
- return *data;
- case 2:
- return U16_AT(data);
- case 3:
- return ((size_t)data[0] << 16) | U16_AT(&data[1]);
- case 4:
- return U32_AT(data);
- }
-
- // This cannot happen, mNALLengthSize springs to life by adding 1 to
- // a 2-bit integer.
- TRESPASS();
-
- return 0;
-}
-
-status_t FragmentedMP4Parser::makeAccessUnit(
- TrackInfo *info,
- const SampleInfo &sample,
- const MediaDataInfo &mdatInfo,
- sp<ABuffer> *accessUnit) {
- if (sample.mSampleDescIndex < 1
- || sample.mSampleDescIndex > info->mSampleDescs.size()) {
- return ERROR_MALFORMED;
- }
-
- int64_t presentationTimeUs =
- 1000000ll * sample.mPresentationTime / info->mMediaTimeScale;
-
- const SampleDescription &sampleDesc =
- info->mSampleDescs.itemAt(sample.mSampleDescIndex - 1);
-
- size_t nalLengthSize;
- if (!sampleDesc.mFormat->findSize("nal-length-size", &nalLengthSize)) {
- *accessUnit = new ABuffer(sample.mSize);
-
- memcpy((*accessUnit)->data(),
- mdatInfo.mBuffer->data() + (sample.mOffset - mdatInfo.mOffset),
- sample.mSize);
-
- (*accessUnit)->meta()->setInt64("timeUs", presentationTimeUs);
- if (IsIDR(*accessUnit)) {
- (*accessUnit)->meta()->setInt32("is-sync-frame", 1);
- }
-
- return OK;
- }
-
- const uint8_t *srcPtr =
- mdatInfo.mBuffer->data() + (sample.mOffset - mdatInfo.mOffset);
-
- for (int i = 0; i < 2 ; ++i) {
- size_t srcOffset = 0;
- size_t dstOffset = 0;
-
- while (srcOffset < sample.mSize) {
- if (srcOffset + nalLengthSize > sample.mSize) {
- return ERROR_MALFORMED;
- }
-
- size_t nalSize = parseNALSize(nalLengthSize, &srcPtr[srcOffset]);
- srcOffset += nalLengthSize;
-
- if (srcOffset + nalSize > sample.mSize) {
- return ERROR_MALFORMED;
- }
-
- if (i == 1) {
- memcpy((*accessUnit)->data() + dstOffset,
- "\x00\x00\x00\x01",
- 4);
-
- memcpy((*accessUnit)->data() + dstOffset + 4,
- srcPtr + srcOffset,
- nalSize);
- }
-
- srcOffset += nalSize;
- dstOffset += nalSize + 4;
- }
-
- if (i == 0) {
- (*accessUnit) = new ABuffer(dstOffset);
- (*accessUnit)->meta()->setInt64(
- "timeUs", presentationTimeUs);
- }
- }
- if (IsIDR(*accessUnit)) {
- (*accessUnit)->meta()->setInt32("is-sync-frame", 1);
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::need(size_t size) {
- if (!fitsContainer(size)) {
- return -EINVAL;
- }
-
- if (size <= mBuffer->size()) {
- return OK;
- }
-
- sp<AMessage> msg = new AMessage(kWhatReadMore, id());
- msg->setSize("needed", size - mBuffer->size());
- msg->post();
-
- // ALOGV("need(%d) returning -EAGAIN, only have %d", size, mBuffer->size());
-
- return -EAGAIN;
-}
-
-void FragmentedMP4Parser::enter(off64_t offset, uint32_t type, uint64_t size) {
- Container container;
- container.mOffset = offset;
- container.mType = type;
- container.mExtendsToEOF = (size == 0);
- container.mBytesRemaining = size;
-
- mStack.push(container);
-}
-
-bool FragmentedMP4Parser::fitsContainer(uint64_t size) const {
- CHECK(!mStack.isEmpty());
- const Container &container = mStack.itemAt(mStack.size() - 1);
-
- return container.mExtendsToEOF || size <= container.mBytesRemaining;
-}
-
-uint16_t FragmentedMP4Parser::readU16(size_t offset) {
- CHECK_LE(offset + 2, mBuffer->size());
-
- const uint8_t *ptr = mBuffer->data() + offset;
- return (ptr[0] << 8) | ptr[1];
-}
-
-uint32_t FragmentedMP4Parser::readU32(size_t offset) {
- CHECK_LE(offset + 4, mBuffer->size());
-
- const uint8_t *ptr = mBuffer->data() + offset;
- return (ptr[0] << 24) | (ptr[1] << 16) | (ptr[2] << 8) | ptr[3];
-}
-
-uint64_t FragmentedMP4Parser::readU64(size_t offset) {
- return (((uint64_t)readU32(offset)) << 32) | readU32(offset + 4);
-}
-
-void FragmentedMP4Parser::skip(off_t distance) {
- CHECK(!mStack.isEmpty());
- for (size_t i = mStack.size(); i-- > 0;) {
- Container *container = &mStack.editItemAt(i);
- if (!container->mExtendsToEOF) {
- CHECK_LE(distance, (off_t)container->mBytesRemaining);
-
- container->mBytesRemaining -= distance;
-
- if (container->mBytesRemaining == 0) {
- ALOGV("%sleaving box of type '%s'",
- IndentString(mStack.size() - 1),
- Fourcc2String(container->mType));
-
-#if 0
- if (container->mType == FOURCC('s', 't', 's', 'd')) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
- for (size_t i = 0;
- i < trackInfo->mSampleDescs.size(); ++i) {
- ALOGI("format #%d: %s",
- i,
- trackInfo->mSampleDescs.itemAt(i)
- .mFormat->debugString().c_str());
- }
- }
-#endif
-
- if (container->mType == FOURCC('s', 't', 'b', 'l')) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- trackInfo->mStaticFragment->signalCompletion();
-
- CHECK(trackInfo->mFragments.empty());
- trackInfo->mFragments.push_back(trackInfo->mStaticFragment);
- trackInfo->mStaticFragment.clear();
- } else if (container->mType == FOURCC('t', 'r', 'a', 'f')) {
- TrackInfo *trackInfo =
- editTrack(mTrackFragmentHeaderInfo.mTrackID);
-
- const sp<TrackFragment> &fragment =
- *--trackInfo->mFragments.end();
-
- static_cast<DynamicTrackFragment *>(
- fragment.get())->signalCompletion();
- } else if (container->mType == FOURCC('m', 'o', 'o', 'v')) {
- mDoneWithMoov = true;
- }
-
- container = NULL;
- mStack.removeItemsAt(i);
- }
- }
- }
-
- if (distance < (off_t)mBuffer->size()) {
- mBuffer->setRange(mBuffer->offset() + distance, mBuffer->size() - distance);
- mBufferPos += distance;
- return;
- }
-
- mBuffer->setRange(0, 0);
- mBufferPos += distance;
-}
-
-status_t FragmentedMP4Parser::parseTrackHeader(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- uint32_t flags = readU32(offset);
-
- uint32_t version = flags >> 24;
- flags &= 0xffffff;
-
- uint32_t trackID;
- uint64_t duration;
-
- if (version == 1) {
- if (offset + 36 > size) {
- return -EINVAL;
- }
-
- trackID = readU32(offset + 20);
- duration = readU64(offset + 28);
-
- offset += 36;
- } else if (version == 0) {
- if (offset + 24 > size) {
- return -EINVAL;
- }
-
- trackID = readU32(offset + 12);
- duration = readU32(offset + 20);
-
- offset += 24;
- } else {
- return -EINVAL;
- }
-
- TrackInfo *info = editTrack(trackID, true /* createIfNecessary */);
- info->mFlags = flags;
- info->mDuration = duration;
- if (info->mDuration == 0xffffffff) {
- // ffmpeg sets this to -1, which is incorrect.
- info->mDuration = 0;
- }
-
- info->mStaticFragment = new StaticTrackFragment;
-
- mCurrentTrackID = trackID;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseMediaHeader(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- uint32_t versionAndFlags = readU32(offset);
-
- if (versionAndFlags & 0xffffff) {
- return ERROR_MALFORMED;
- }
-
- uint32_t version = versionAndFlags >> 24;
-
- TrackInfo *info = editTrack(mCurrentTrackID);
-
- if (version == 1) {
- if (offset + 4 + 32 > size) {
- return -EINVAL;
- }
- info->mMediaTimeScale = U32_AT(mBuffer->data() + offset + 20);
- } else if (version == 0) {
- if (offset + 4 + 20 > size) {
- return -EINVAL;
- }
- info->mMediaTimeScale = U32_AT(mBuffer->data() + offset + 12);
- } else {
- return ERROR_MALFORMED;
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseMediaHandler(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 12 > size) {
- return -EINVAL;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- uint32_t handlerType = readU32(offset + 8);
-
- switch (handlerType) {
- case FOURCC('v', 'i', 'd', 'e'):
- case FOURCC('s', 'o', 'u', 'n'):
- case FOURCC('h', 'i', 'n', 't'):
- case FOURCC('m', 'e', 't', 'a'):
- break;
-
- default:
- return -EINVAL;
- }
-
- editTrack(mCurrentTrackID)->mMediaHandlerType = handlerType;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseVisualSampleEntry(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 78 > size) {
- return -EINVAL;
- }
-
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- trackInfo->mSampleDescs.push();
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- sampleDesc->mType = type;
- sampleDesc->mDataRefIndex = readU16(offset + 6);
-
- sp<AMessage> format = new AMessage;
-
- switch (type) {
- case FOURCC('a', 'v', 'c', '1'):
- format->setString("mime", MEDIA_MIMETYPE_VIDEO_AVC);
- break;
- case FOURCC('m', 'p', '4', 'v'):
- format->setString("mime", MEDIA_MIMETYPE_VIDEO_MPEG4);
- break;
- case FOURCC('s', '2', '6', '3'):
- case FOURCC('h', '2', '6', '3'):
- case FOURCC('H', '2', '6', '3'):
- format->setString("mime", MEDIA_MIMETYPE_VIDEO_H263);
- break;
- default:
- format->setString("mime", "application/octet-stream");
- break;
- }
-
- format->setInt32("width", readU16(offset + 8 + 16));
- format->setInt32("height", readU16(offset + 8 + 18));
-
- sampleDesc->mFormat = format;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseAudioSampleEntry(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 28 > size) {
- return -EINVAL;
- }
-
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- trackInfo->mSampleDescs.push();
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- sampleDesc->mType = type;
- sampleDesc->mDataRefIndex = readU16(offset + 6);
-
- sp<AMessage> format = new AMessage;
-
- format->setInt32("channel-count", readU16(offset + 8 + 8));
- format->setInt32("sample-size", readU16(offset + 8 + 10));
- format->setInt32("sample-rate", readU32(offset + 8 + 16) / 65536.0f);
-
- switch (type) {
- case FOURCC('m', 'p', '4', 'a'):
- format->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
- break;
-
- case FOURCC('s', 'a', 'm', 'r'):
- format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
- format->setInt32("channel-count", 1);
- format->setInt32("sample-rate", 8000);
- break;
-
- case FOURCC('s', 'a', 'w', 'b'):
- format->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
- format->setInt32("channel-count", 1);
- format->setInt32("sample-rate", 16000);
- break;
- default:
- format->setString("mime", "application/octet-stream");
- break;
- }
-
- sampleDesc->mFormat = format;
-
- return OK;
-}
-
-static void addCodecSpecificData(
- const sp<AMessage> &format, int32_t index,
- const void *data, size_t size,
- bool insertStartCode = false) {
- sp<ABuffer> csd = new ABuffer(insertStartCode ? size + 4 : size);
-
- memcpy(csd->data() + (insertStartCode ? 4 : 0), data, size);
-
- if (insertStartCode) {
- memcpy(csd->data(), "\x00\x00\x00\x01", 4);
- }
-
- csd->meta()->setInt32("csd", true);
- csd->meta()->setInt64("timeUs", 0ll);
-
- format->setBuffer(StringPrintf("csd-%d", index).c_str(), csd);
-}
-
-status_t FragmentedMP4Parser::parseSampleSizes(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseSampleSizes(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseCompactSampleSizes(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseCompactSampleSizes(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseSampleToChunk(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseSampleToChunk(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseChunkOffsets(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseChunkOffsets(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseChunkOffsets64(
- uint32_t type, size_t offset, uint64_t size) {
- return editTrack(mCurrentTrackID)->mStaticFragment->parseChunkOffsets64(
- this, type, offset, size);
-}
-
-status_t FragmentedMP4Parser::parseAVCCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- if (sampleDesc->mType != FOURCC('a', 'v', 'c', '1')) {
- return -EINVAL;
- }
-
- const uint8_t *ptr = mBuffer->data() + offset;
-
- size -= offset;
- offset = 0;
-
- if (size < 7 || ptr[0] != 0x01) {
- return ERROR_MALFORMED;
- }
-
- sampleDesc->mFormat->setSize("nal-length-size", 1 + (ptr[4] & 3));
-
- size_t numSPS = ptr[5] & 31;
-
- ptr += 6;
- size -= 6;
-
- for (size_t i = 0; i < numSPS; ++i) {
- if (size < 2) {
- return ERROR_MALFORMED;
- }
-
- size_t length = U16_AT(ptr);
-
- ptr += 2;
- size -= 2;
-
- if (size < length) {
- return ERROR_MALFORMED;
- }
-
- addCodecSpecificData(
- sampleDesc->mFormat, i, ptr, length,
- true /* insertStartCode */);
-
- ptr += length;
- size -= length;
- }
-
- if (size < 1) {
- return ERROR_MALFORMED;
- }
-
- size_t numPPS = *ptr;
- ++ptr;
- --size;
-
- for (size_t i = 0; i < numPPS; ++i) {
- if (size < 2) {
- return ERROR_MALFORMED;
- }
-
- size_t length = U16_AT(ptr);
-
- ptr += 2;
- size -= 2;
-
- if (size < length) {
- return ERROR_MALFORMED;
- }
-
- addCodecSpecificData(
- sampleDesc->mFormat, numSPS + i, ptr, length,
- true /* insertStartCode */);
-
- ptr += length;
- size -= length;
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseESDSCodecSpecificData(
- uint32_t type, size_t offset, uint64_t size) {
- TrackInfo *trackInfo = editTrack(mCurrentTrackID);
-
- SampleDescription *sampleDesc =
- &trackInfo->mSampleDescs.editItemAt(
- trackInfo->mSampleDescs.size() - 1);
-
- if (sampleDesc->mType != FOURCC('m', 'p', '4', 'a')
- && sampleDesc->mType != FOURCC('m', 'p', '4', 'v')) {
- return -EINVAL;
- }
-
- const uint8_t *ptr = mBuffer->data() + offset;
-
- size -= offset;
- offset = 0;
-
- if (size < 4) {
- return -EINVAL;
- }
-
- if (U32_AT(ptr) != 0) {
- return -EINVAL;
- }
-
- ptr += 4;
- size -=4;
-
- ESDS esds(ptr, size);
-
- uint8_t objectTypeIndication;
- if (esds.getObjectTypeIndication(&objectTypeIndication) != OK) {
- return ERROR_MALFORMED;
- }
-
- const uint8_t *csd;
- size_t csd_size;
- if (esds.getCodecSpecificInfo(
- (const void **)&csd, &csd_size) != OK) {
- return ERROR_MALFORMED;
- }
-
- addCodecSpecificData(sampleDesc->mFormat, 0, csd, csd_size);
-
- if (sampleDesc->mType != FOURCC('m', 'p', '4', 'a')) {
- return OK;
- }
-
- if (csd_size == 0) {
- // There's no further information, i.e. no codec specific data
- // Let's assume that the information provided in the mpeg4 headers
- // is accurate and hope for the best.
-
- return OK;
- }
-
- if (csd_size < 2) {
- return ERROR_MALFORMED;
- }
-
- uint32_t objectType = csd[0] >> 3;
-
- if (objectType == 31) {
- return ERROR_UNSUPPORTED;
- }
-
- uint32_t freqIndex = (csd[0] & 7) << 1 | (csd[1] >> 7);
- int32_t sampleRate = 0;
- int32_t numChannels = 0;
- if (freqIndex == 15) {
- if (csd_size < 5) {
- return ERROR_MALFORMED;
- }
-
- sampleRate = (csd[1] & 0x7f) << 17
- | csd[2] << 9
- | csd[3] << 1
- | (csd[4] >> 7);
-
- numChannels = (csd[4] >> 3) & 15;
- } else {
- static uint32_t kSamplingRate[] = {
- 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050,
- 16000, 12000, 11025, 8000, 7350
- };
-
- if (freqIndex == 13 || freqIndex == 14) {
- return ERROR_MALFORMED;
- }
-
- sampleRate = kSamplingRate[freqIndex];
- numChannels = (csd[1] >> 3) & 15;
- }
-
- if (numChannels == 0) {
- return ERROR_UNSUPPORTED;
- }
-
- sampleDesc->mFormat->setInt32("sample-rate", sampleRate);
- sampleDesc->mFormat->setInt32("channel-count", numChannels);
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseMediaData(
- uint32_t type, size_t offset, uint64_t size) {
- ALOGV("skipping 'mdat' chunk at offsets 0x%08lx-0x%08llx.",
- mBufferPos + offset, mBufferPos + size);
-
- sp<ABuffer> buffer = new ABuffer(size - offset);
- memcpy(buffer->data(), mBuffer->data() + offset, size - offset);
-
- mMediaData.push();
- MediaDataInfo *info = &mMediaData.editItemAt(mMediaData.size() - 1);
- info->mBuffer = buffer;
- info->mOffset = mBufferPos + offset;
-
- if (mMediaData.size() > 10) {
- ALOGV("suspending for now.");
- mSuspended = true;
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseSegmentIndex(
- uint32_t type, size_t offset, uint64_t size) {
- ALOGV("sidx box type %d, offset %d, size %d", type, int(offset), int(size));
-// AString sidxstr;
-// hexdump(mBuffer->data() + offset, size, 0 /* indent */, &sidxstr);
-// ALOGV("raw sidx:");
-// ALOGV("%s", sidxstr.c_str());
- if (offset + 12 > size) {
- return -EINVAL;
- }
-
- uint32_t flags = readU32(offset);
-
- uint32_t version = flags >> 24;
- flags &= 0xffffff;
-
- ALOGV("sidx version %d", version);
-
- uint32_t referenceId = readU32(offset + 4);
- uint32_t timeScale = readU32(offset + 8);
- ALOGV("sidx refid/timescale: %d/%d", referenceId, timeScale);
-
- uint64_t earliestPresentationTime;
- uint64_t firstOffset;
-
- offset += 12;
-
- if (version == 0) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
- earliestPresentationTime = readU32(offset);
- firstOffset = readU32(offset + 4);
- offset += 8;
- } else {
- if (offset + 16 > size) {
- return -EINVAL;
- }
- earliestPresentationTime = readU64(offset);
- firstOffset = readU64(offset + 8);
- offset += 16;
- }
- ALOGV("sidx pres/off: %Ld/%Ld", earliestPresentationTime, firstOffset);
-
- if (offset + 4 > size) {
- return -EINVAL;
- }
- if (readU16(offset) != 0) { // reserved
- return -EINVAL;
- }
- int32_t referenceCount = readU16(offset + 2);
- offset += 4;
- ALOGV("refcount: %d", referenceCount);
-
- if (offset + referenceCount * 12 > size) {
- return -EINVAL;
- }
-
- TrackInfo *info = editTrack(mCurrentTrackID);
- uint64_t total_duration = 0;
- for (int i = 0; i < referenceCount; i++) {
- uint32_t d1 = readU32(offset);
- uint32_t d2 = readU32(offset + 4);
- uint32_t d3 = readU32(offset + 8);
-
- if (d1 & 0x80000000) {
- ALOGW("sub-sidx boxes not supported yet");
- }
- bool sap = d3 & 0x80000000;
- bool saptype = d3 >> 28;
- if (!sap || saptype > 2) {
- ALOGW("not a stream access point, or unsupported type");
- }
- total_duration += d2;
- offset += 12;
- ALOGV(" item %d, %08x %08x %08x", i, d1, d2, d3);
- SidxEntry se;
- se.mSize = d1 & 0x7fffffff;
- se.mDurationUs = 1000000LL * d2 / timeScale;
- info->mSidx.add(se);
- }
-
- info->mSidxDuration = total_duration * 1000000 / timeScale;
- ALOGV("duration: %lld", info->mSidxDuration);
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseTrackExtends(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 24 > size) {
- return -EINVAL;
- }
-
- if (readU32(offset) != 0) {
- return -EINVAL;
- }
-
- uint32_t trackID = readU32(offset + 4);
-
- TrackInfo *info = editTrack(trackID, true /* createIfNecessary */);
- info->mDefaultSampleDescriptionIndex = readU32(offset + 8);
- info->mDefaultSampleDuration = readU32(offset + 12);
- info->mDefaultSampleSize = readU32(offset + 16);
- info->mDefaultSampleFlags = readU32(offset + 20);
-
- return OK;
-}
-
-FragmentedMP4Parser::TrackInfo *FragmentedMP4Parser::editTrack(
- uint32_t trackID, bool createIfNecessary) {
- ssize_t i = mTracks.indexOfKey(trackID);
-
- if (i >= 0) {
- return &mTracks.editValueAt(i);
- }
-
- if (!createIfNecessary) {
- return NULL;
- }
-
- TrackInfo info;
- info.mTrackID = trackID;
- info.mFlags = 0;
- info.mDuration = 0xffffffff;
- info.mSidxDuration = 0;
- info.mMediaTimeScale = 0;
- info.mMediaHandlerType = 0;
- info.mDefaultSampleDescriptionIndex = 0;
- info.mDefaultSampleDuration = 0;
- info.mDefaultSampleSize = 0;
- info.mDefaultSampleFlags = 0;
-
- info.mDecodingTime = 0;
-
- mTracks.add(trackID, info);
- return &mTracks.editValueAt(mTracks.indexOfKey(trackID));
-}
-
-status_t FragmentedMP4Parser::parseTrackFragmentHeader(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
-
- uint32_t flags = readU32(offset);
-
- if (flags & 0xff000000) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mFlags = flags;
-
- mTrackFragmentHeaderInfo.mTrackID = readU32(offset + 4);
- offset += 8;
-
- if (flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mBaseDataOffset = readU64(offset);
- offset += 8;
- }
-
- if (flags & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mSampleDescriptionIndex = readU32(offset);
- offset += 4;
- }
-
- if (flags & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mDefaultSampleDuration = readU32(offset);
- offset += 4;
- }
-
- if (flags & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mDefaultSampleSize = readU32(offset);
- offset += 4;
- }
-
- if (flags & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- mTrackFragmentHeaderInfo.mDefaultSampleFlags = readU32(offset);
- offset += 4;
- }
-
- if (!(flags & TrackFragmentHeaderInfo::kBaseDataOffsetPresent)) {
- // This should point to the position of the first byte of the
- // enclosing 'moof' container for the first track and
- // the end of the data of the preceding fragment for subsequent
- // tracks.
-
- CHECK_GE(mStack.size(), 2u);
-
- mTrackFragmentHeaderInfo.mBaseDataOffset =
- mStack.itemAt(mStack.size() - 2).mOffset;
-
- // XXX TODO: This does not do the right thing for the 2nd and
- // subsequent tracks yet.
- }
-
- mTrackFragmentHeaderInfo.mDataOffset =
- mTrackFragmentHeaderInfo.mBaseDataOffset;
-
- TrackInfo *trackInfo = editTrack(mTrackFragmentHeaderInfo.mTrackID);
-
- if (trackInfo->mFragments.empty()
- || (*trackInfo->mFragments.begin())->complete()) {
- trackInfo->mFragments.push_back(new DynamicTrackFragment);
- }
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::parseTrackFragmentRun(
- uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return -EINVAL;
- }
-
- enum {
- kDataOffsetPresent = 0x01,
- kFirstSampleFlagsPresent = 0x04,
- kSampleDurationPresent = 0x100,
- kSampleSizePresent = 0x200,
- kSampleFlagsPresent = 0x400,
- kSampleCompositionTimeOffsetPresent = 0x800,
- };
-
- uint32_t flags = readU32(offset);
-
- if (flags & 0xff000000) {
- return -EINVAL;
- }
-
- if ((flags & kFirstSampleFlagsPresent) && (flags & kSampleFlagsPresent)) {
- // These two shall not be used together.
- return -EINVAL;
- }
-
- uint32_t sampleCount = readU32(offset + 4);
- offset += 8;
-
- uint64_t dataOffset = mTrackFragmentHeaderInfo.mDataOffset;
-
- uint32_t firstSampleFlags = 0;
-
- if (flags & kDataOffsetPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- int32_t dataOffsetDelta = (int32_t)readU32(offset);
-
- dataOffset = mTrackFragmentHeaderInfo.mBaseDataOffset + dataOffsetDelta;
-
- offset += 4;
- }
-
- if (flags & kFirstSampleFlagsPresent) {
- if (offset + 4 > size) {
- return -EINVAL;
- }
-
- firstSampleFlags = readU32(offset);
- offset += 4;
- }
-
- TrackInfo *info = editTrack(mTrackFragmentHeaderInfo.mTrackID);
-
- if (info == NULL) {
- return -EINVAL;
- }
-
- uint32_t sampleDuration = 0, sampleSize = 0, sampleFlags = 0,
- sampleCtsOffset = 0;
-
- size_t bytesPerSample = 0;
- if (flags & kSampleDurationPresent) {
- bytesPerSample += 4;
- } else if (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kDefaultSampleDurationPresent) {
- sampleDuration = mTrackFragmentHeaderInfo.mDefaultSampleDuration;
- } else {
- sampleDuration = info->mDefaultSampleDuration;
- }
-
- if (flags & kSampleSizePresent) {
- bytesPerSample += 4;
- } else if (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kDefaultSampleSizePresent) {
- sampleSize = mTrackFragmentHeaderInfo.mDefaultSampleSize;
- } else {
- sampleSize = info->mDefaultSampleSize;
- }
-
- if (flags & kSampleFlagsPresent) {
- bytesPerSample += 4;
- } else if (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kDefaultSampleFlagsPresent) {
- sampleFlags = mTrackFragmentHeaderInfo.mDefaultSampleFlags;
- } else {
- sampleFlags = info->mDefaultSampleFlags;
- }
-
- if (flags & kSampleCompositionTimeOffsetPresent) {
- bytesPerSample += 4;
- } else {
- sampleCtsOffset = 0;
- }
-
- if (offset + sampleCount * bytesPerSample > size) {
- return -EINVAL;
- }
-
- uint32_t sampleDescIndex =
- (mTrackFragmentHeaderInfo.mFlags
- & TrackFragmentHeaderInfo::kSampleDescriptionIndexPresent)
- ? mTrackFragmentHeaderInfo.mSampleDescriptionIndex
- : info->mDefaultSampleDescriptionIndex;
-
- for (uint32_t i = 0; i < sampleCount; ++i) {
- if (flags & kSampleDurationPresent) {
- sampleDuration = readU32(offset);
- offset += 4;
- }
-
- if (flags & kSampleSizePresent) {
- sampleSize = readU32(offset);
- offset += 4;
- }
-
- if (flags & kSampleFlagsPresent) {
- sampleFlags = readU32(offset);
- offset += 4;
- }
-
- if (flags & kSampleCompositionTimeOffsetPresent) {
- sampleCtsOffset = readU32(offset);
- offset += 4;
- }
-
- ALOGV("adding sample at offset 0x%08llx, size %u, duration %u, "
- "sampleDescIndex=%u, flags 0x%08x",
- dataOffset, sampleSize, sampleDuration,
- sampleDescIndex,
- (flags & kFirstSampleFlagsPresent) && i == 0
- ? firstSampleFlags : sampleFlags);
-
- const sp<TrackFragment> &fragment = *--info->mFragments.end();
-
- uint32_t decodingTime = info->mDecodingTime;
- info->mDecodingTime += sampleDuration;
- uint32_t presentationTime = decodingTime + sampleCtsOffset;
-
- static_cast<DynamicTrackFragment *>(
- fragment.get())->addSample(
- dataOffset,
- sampleSize,
- presentationTime,
- sampleDescIndex,
- ((flags & kFirstSampleFlagsPresent) && i == 0)
- ? firstSampleFlags : sampleFlags);
-
- dataOffset += sampleSize;
- }
-
- mTrackFragmentHeaderInfo.mDataOffset = dataOffset;
-
- return OK;
-}
-
-void FragmentedMP4Parser::copyBuffer(
- sp<ABuffer> *dst, size_t offset, uint64_t size) const {
- sp<ABuffer> buf = new ABuffer(size);
- memcpy(buf->data(), mBuffer->data() + offset, size);
-
- *dst = buf;
-}
-
-} // namespace android
diff --git a/media/libstagefright/mp4/TrackFragment.cpp b/media/libstagefright/mp4/TrackFragment.cpp
deleted file mode 100644
index 3699038..0000000
--- a/media/libstagefright/mp4/TrackFragment.cpp
+++ /dev/null
@@ -1,364 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "TrackFragment"
-#include <utils/Log.h>
-
-#include "TrackFragment.h"
-
-#include <media/stagefright/MediaErrors.h>
-#include <media/stagefright/Utils.h>
-#include <media/stagefright/foundation/ABuffer.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/hexdump.h>
-
-namespace android {
-
-FragmentedMP4Parser::DynamicTrackFragment::DynamicTrackFragment()
- : mComplete(false),
- mSampleIndex(0) {
-}
-
-FragmentedMP4Parser::DynamicTrackFragment::~DynamicTrackFragment() {
-}
-
-status_t FragmentedMP4Parser::DynamicTrackFragment::getSample(SampleInfo *info) {
- if (mSampleIndex >= mSamples.size()) {
- return mComplete ? ERROR_END_OF_STREAM : -EWOULDBLOCK;
- }
-
- *info = mSamples.itemAt(mSampleIndex);
-
- return OK;
-}
-
-void FragmentedMP4Parser::DynamicTrackFragment::advance() {
- ++mSampleIndex;
-}
-
-void FragmentedMP4Parser::DynamicTrackFragment::addSample(
- off64_t dataOffset, size_t sampleSize,
- uint32_t presentationTime,
- size_t sampleDescIndex,
- uint32_t flags) {
- mSamples.push();
- SampleInfo *sampleInfo = &mSamples.editItemAt(mSamples.size() - 1);
-
- sampleInfo->mOffset = dataOffset;
- sampleInfo->mSize = sampleSize;
- sampleInfo->mPresentationTime = presentationTime;
- sampleInfo->mSampleDescIndex = sampleDescIndex;
- sampleInfo->mFlags = flags;
-}
-
-status_t FragmentedMP4Parser::DynamicTrackFragment::signalCompletion() {
- mComplete = true;
-
- return OK;
-}
-
-bool FragmentedMP4Parser::DynamicTrackFragment::complete() const {
- return mComplete;
-}
-
-////////////////////////////////////////////////////////////////////////////////
-
-FragmentedMP4Parser::StaticTrackFragment::StaticTrackFragment()
- : mSampleIndex(0),
- mSampleCount(0),
- mChunkIndex(0),
- mSampleToChunkIndex(-1),
- mSampleToChunkRemaining(0),
- mPrevChunkIndex(0xffffffff),
- mNextSampleOffset(0) {
-}
-
-FragmentedMP4Parser::StaticTrackFragment::~StaticTrackFragment() {
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::getSample(SampleInfo *info) {
- if (mSampleIndex >= mSampleCount) {
- return ERROR_END_OF_STREAM;
- }
-
- *info = mSampleInfo;
-
- ALOGV("returning sample %d at [0x%08llx, 0x%08llx)",
- mSampleIndex,
- info->mOffset, info->mOffset + info->mSize);
-
- return OK;
-}
-
-void FragmentedMP4Parser::StaticTrackFragment::updateSampleInfo() {
- if (mSampleIndex >= mSampleCount) {
- return;
- }
-
- if (mSampleSizes != NULL) {
- uint32_t defaultSampleSize = U32_AT(mSampleSizes->data() + 4);
- if (defaultSampleSize > 0) {
- mSampleInfo.mSize = defaultSampleSize;
- } else {
- mSampleInfo.mSize= U32_AT(mSampleSizes->data() + 12 + 4 * mSampleIndex);
- }
- } else {
- CHECK(mCompactSampleSizes != NULL);
-
- uint32_t fieldSize = U32_AT(mCompactSampleSizes->data() + 4);
-
- switch (fieldSize) {
- case 4:
- {
- unsigned byte = mCompactSampleSizes->data()[12 + mSampleIndex / 2];
- mSampleInfo.mSize = (mSampleIndex & 1) ? byte & 0x0f : byte >> 4;
- break;
- }
-
- case 8:
- {
- mSampleInfo.mSize = mCompactSampleSizes->data()[12 + mSampleIndex];
- break;
- }
-
- default:
- {
- CHECK_EQ(fieldSize, 16);
- mSampleInfo.mSize =
- U16_AT(mCompactSampleSizes->data() + 12 + mSampleIndex * 2);
- break;
- }
- }
- }
-
- CHECK_GT(mSampleToChunkRemaining, 0);
-
- // The sample desc index is 1-based... XXX
- mSampleInfo.mSampleDescIndex =
- U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 8);
-
- if (mChunkIndex != mPrevChunkIndex) {
- mPrevChunkIndex = mChunkIndex;
-
- if (mChunkOffsets != NULL) {
- uint32_t entryCount = U32_AT(mChunkOffsets->data() + 4);
-
- if (mChunkIndex >= entryCount) {
- mSampleIndex = mSampleCount;
- return;
- }
-
- mNextSampleOffset =
- U32_AT(mChunkOffsets->data() + 8 + 4 * mChunkIndex);
- } else {
- CHECK(mChunkOffsets64 != NULL);
-
- uint32_t entryCount = U32_AT(mChunkOffsets64->data() + 4);
-
- if (mChunkIndex >= entryCount) {
- mSampleIndex = mSampleCount;
- return;
- }
-
- mNextSampleOffset =
- U64_AT(mChunkOffsets64->data() + 8 + 8 * mChunkIndex);
- }
- }
-
- mSampleInfo.mOffset = mNextSampleOffset;
-
- mSampleInfo.mPresentationTime = 0;
- mSampleInfo.mFlags = 0;
-}
-
-void FragmentedMP4Parser::StaticTrackFragment::advance() {
- mNextSampleOffset += mSampleInfo.mSize;
-
- ++mSampleIndex;
- if (--mSampleToChunkRemaining == 0) {
- ++mChunkIndex;
-
- uint32_t entryCount = U32_AT(mSampleToChunk->data() + 4);
-
- // If this is the last entry in the sample to chunk table, we will
- // stay on this entry.
- if ((uint32_t)(mSampleToChunkIndex + 1) < entryCount) {
- uint32_t nextChunkIndex =
- U32_AT(mSampleToChunk->data() + 8 + 12 * (mSampleToChunkIndex + 1));
-
- CHECK_GE(nextChunkIndex, 1u);
- --nextChunkIndex;
-
- if (mChunkIndex >= nextChunkIndex) {
- CHECK_EQ(mChunkIndex, nextChunkIndex);
- ++mSampleToChunkIndex;
- }
- }
-
- mSampleToChunkRemaining =
- U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 4);
- }
-
- updateSampleInfo();
-}
-
-static void setU32At(uint8_t *ptr, uint32_t x) {
- ptr[0] = x >> 24;
- ptr[1] = (x >> 16) & 0xff;
- ptr[2] = (x >> 8) & 0xff;
- ptr[3] = x & 0xff;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::signalCompletion() {
- mSampleToChunkIndex = 0;
-
- mSampleToChunkRemaining =
- (mSampleToChunk == NULL)
- ? 0
- : U32_AT(mSampleToChunk->data() + 8 + 12 * mSampleToChunkIndex + 4);
-
- updateSampleInfo();
-
- return OK;
-}
-
-bool FragmentedMP4Parser::StaticTrackFragment::complete() const {
- return true;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 12 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t sampleSize = parser->readU32(offset + 4);
- uint32_t sampleCount = parser->readU32(offset + 8);
-
- if (sampleSize == 0 && offset + 12 + sampleCount * 4 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mSampleSizes, offset, size);
-
- mSampleCount = sampleCount;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseCompactSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 12 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t fieldSize = parser->readU32(offset + 4);
-
- if (fieldSize != 4 && fieldSize != 8 && fieldSize != 16) {
- return ERROR_MALFORMED;
- }
-
- uint32_t sampleCount = parser->readU32(offset + 8);
-
- if (offset + 12 + (sampleCount * fieldSize + 4) / 8 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mCompactSampleSizes, offset, size);
-
- mSampleCount = sampleCount;
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseSampleToChunk(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t entryCount = parser->readU32(offset + 4);
-
- if (entryCount == 0) {
- return OK;
- }
-
- if (offset + 8 + entryCount * 12 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mSampleToChunk, offset, size);
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseChunkOffsets(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t entryCount = parser->readU32(offset + 4);
-
- if (offset + 8 + entryCount * 4 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mChunkOffsets, offset, size);
-
- return OK;
-}
-
-status_t FragmentedMP4Parser::StaticTrackFragment::parseChunkOffsets64(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size) {
- if (offset + 8 > size) {
- return ERROR_MALFORMED;
- }
-
- if (parser->readU32(offset) != 0) {
- return ERROR_MALFORMED;
- }
-
- uint32_t entryCount = parser->readU32(offset + 4);
-
- if (offset + 8 + entryCount * 8 != size) {
- return ERROR_MALFORMED;
- }
-
- parser->copyBuffer(&mChunkOffsets64, offset, size);
-
- return OK;
-}
-
-} // namespace android
-
diff --git a/media/libstagefright/mp4/TrackFragment.h b/media/libstagefright/mp4/TrackFragment.h
deleted file mode 100644
index e1ad46e..0000000
--- a/media/libstagefright/mp4/TrackFragment.h
+++ /dev/null
@@ -1,122 +0,0 @@
-/*
- * Copyright (C) 2012 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef TRACK_FRAGMENT_H_
-
-#define TRACK_FRAGMENT_H_
-
-#include "include/FragmentedMP4Parser.h"
-
-namespace android {
-
-struct FragmentedMP4Parser::TrackFragment : public RefBase {
- TrackFragment() {}
-
- virtual status_t getSample(SampleInfo *info) = 0;
- virtual void advance() = 0;
-
- virtual status_t signalCompletion() = 0;
- virtual bool complete() const = 0;
-
-protected:
- virtual ~TrackFragment() {}
-
-private:
- DISALLOW_EVIL_CONSTRUCTORS(TrackFragment);
-};
-
-struct FragmentedMP4Parser::DynamicTrackFragment : public FragmentedMP4Parser::TrackFragment {
- DynamicTrackFragment();
-
- virtual status_t getSample(SampleInfo *info);
- virtual void advance();
-
- void addSample(
- off64_t dataOffset, size_t sampleSize,
- uint32_t presentationTime,
- size_t sampleDescIndex,
- uint32_t flags);
-
- // No more samples will be added to this fragment.
- virtual status_t signalCompletion();
-
- virtual bool complete() const;
-
-protected:
- virtual ~DynamicTrackFragment();
-
-private:
- bool mComplete;
- size_t mSampleIndex;
- Vector<SampleInfo> mSamples;
-
- DISALLOW_EVIL_CONSTRUCTORS(DynamicTrackFragment);
-};
-
-struct FragmentedMP4Parser::StaticTrackFragment : public FragmentedMP4Parser::TrackFragment {
- StaticTrackFragment();
-
- virtual status_t getSample(SampleInfo *info);
- virtual void advance();
-
- virtual status_t signalCompletion();
- virtual bool complete() const;
-
- status_t parseSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseCompactSampleSizes(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseSampleToChunk(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
- status_t parseChunkOffsets64(
- FragmentedMP4Parser *parser, uint32_t type, size_t offset, uint64_t size);
-
-protected:
- virtual ~StaticTrackFragment();
-
-private:
- size_t mSampleIndex;
- size_t mSampleCount;
- uint32_t mChunkIndex;
-
- SampleInfo mSampleInfo;
-
- sp<ABuffer> mSampleSizes;
- sp<ABuffer> mCompactSampleSizes;
-
- sp<ABuffer> mSampleToChunk;
- ssize_t mSampleToChunkIndex;
- size_t mSampleToChunkRemaining;
-
- sp<ABuffer> mChunkOffsets;
- sp<ABuffer> mChunkOffsets64;
- uint32_t mPrevChunkIndex;
- uint64_t mNextSampleOffset;
-
- void updateSampleInfo();
-
- DISALLOW_EVIL_CONSTRUCTORS(StaticTrackFragment);
-};
-
-} // namespace android
-
-#endif // TRACK_FRAGMENT_H_
diff --git a/media/libstagefright/mpeg2ts/ATSParser.cpp b/media/libstagefright/mpeg2ts/ATSParser.cpp
index d1afd8b..338e899 100644
--- a/media/libstagefright/mpeg2ts/ATSParser.cpp
+++ b/media/libstagefright/mpeg2ts/ATSParser.cpp
@@ -555,7 +555,9 @@ status_t ATSParser::Stream::parse(
}
#endif
- return OK;
+ if (!payload_unit_start_indicator) {
+ return OK;
+ }
}
mExpectedContinuityCounter = (continuity_counter + 1) & 0x0f;
diff --git a/media/libstagefright/mpeg2ts/Android.mk b/media/libstagefright/mpeg2ts/Android.mk
index c1a7a9d..c17a0b7 100644
--- a/media/libstagefright/mpeg2ts/Android.mk
+++ b/media/libstagefright/mpeg2ts/Android.mk
@@ -13,6 +13,8 @@ LOCAL_C_INCLUDES:= \
$(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE:= libstagefright_mpeg2ts
ifeq ($(TARGET_ARCH),arm)
diff --git a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
index 6dfaa94..021b640 100644
--- a/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
+++ b/media/libstagefright/mpeg2ts/AnotherPacketSource.cpp
@@ -26,6 +26,8 @@
#include <media/stagefright/MetaData.h>
#include <utils/Vector.h>
+#include <inttypes.h>
+
namespace android {
const int64_t kNearEOSMarkUs = 2000000ll; // 2 secs
@@ -186,7 +188,7 @@ void AnotherPacketSource::queueAccessUnit(const sp<ABuffer> &buffer) {
int64_t lastQueuedTimeUs;
CHECK(buffer->meta()->findInt64("timeUs", &lastQueuedTimeUs));
mLastQueuedTimeUs = lastQueuedTimeUs;
- ALOGV("queueAccessUnit timeUs=%lld us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
+ ALOGV("queueAccessUnit timeUs=%" PRIi64 " us (%.2f secs)", mLastQueuedTimeUs, mLastQueuedTimeUs / 1E6);
Mutex::Autolock autoLock(mLock);
mBuffers.push_back(buffer);
diff --git a/media/libstagefright/mpeg2ts/ESQueue.cpp b/media/libstagefright/mpeg2ts/ESQueue.cpp
index e9252cc..3c8f03e 100644
--- a/media/libstagefright/mpeg2ts/ESQueue.cpp
+++ b/media/libstagefright/mpeg2ts/ESQueue.cpp
@@ -265,7 +265,7 @@ status_t ElementaryStreamQueue::appendData(
if (startOffset > 0) {
ALOGI("found something resembling an H.264/MPEG syncword "
- "at offset %d",
+ "at offset %zd",
startOffset);
}
@@ -359,7 +359,7 @@ status_t ElementaryStreamQueue::appendData(
if (startOffset > 0) {
ALOGI("found something resembling an AC3 syncword at "
- "offset %d",
+ "offset %zd",
startOffset);
}
@@ -777,6 +777,12 @@ sp<ABuffer> ElementaryStreamQueue::dequeueAccessUnitH264() {
unsigned nalType = mBuffer->data()[pos.nalOffset] & 0x1f;
+ if (nalType == 6) {
+ sp<ABuffer> sei = new ABuffer(pos.nalSize);
+ memcpy(sei->data(), mBuffer->data() + pos.nalOffset, pos.nalSize);
+ accessUnit->meta()->setBuffer("sei", sei);
+ }
+
#if !LOG_NDEBUG
char tmp[128];
sprintf(tmp, "0x%02x", nalType);
diff --git a/media/libstagefright/omx/GraphicBufferSource.cpp b/media/libstagefright/omx/GraphicBufferSource.cpp
index 3fe9c23..16f6c58 100644
--- a/media/libstagefright/omx/GraphicBufferSource.cpp
+++ b/media/libstagefright/omx/GraphicBufferSource.cpp
@@ -29,6 +29,8 @@
#include <media/hardware/MetadataBufferType.h>
#include <ui/GraphicBuffer.h>
+#include <inttypes.h>
+
namespace android {
static const bool EXTRA_CHECK = true;
@@ -43,16 +45,21 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
mNumFramesAvailable(0),
mEndOfStream(false),
mEndOfStreamSent(false),
- mRepeatAfterUs(-1ll),
mMaxTimestampGapUs(-1ll),
mPrevOriginalTimeUs(-1ll),
mPrevModifiedTimeUs(-1ll),
+ mSkipFramesBeforeNs(-1ll),
+ mRepeatAfterUs(-1ll),
mRepeatLastFrameGeneration(0),
mRepeatLastFrameTimestamp(-1ll),
mLatestSubmittedBufferId(-1),
mLatestSubmittedBufferFrameNum(0),
mLatestSubmittedBufferUseCount(0),
- mRepeatBufferDeferred(false) {
+ mRepeatBufferDeferred(false),
+ mTimePerCaptureUs(-1ll),
+ mTimePerFrameUs(-1ll),
+ mPrevCaptureUs(-1ll),
+ mPrevFrameUs(-1ll) {
ALOGV("GraphicBufferSource w=%u h=%u c=%u",
bufferWidth, bufferHeight, bufferCount);
@@ -65,13 +72,13 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
String8 name("GraphicBufferSource");
- mBufferQueue = new BufferQueue();
- mBufferQueue->setConsumerName(name);
- mBufferQueue->setDefaultBufferSize(bufferWidth, bufferHeight);
- mBufferQueue->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
+ BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+ mConsumer->setConsumerName(name);
+ mConsumer->setDefaultBufferSize(bufferWidth, bufferHeight);
+ mConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_VIDEO_ENCODER |
GRALLOC_USAGE_HW_TEXTURE);
- mInitCheck = mBufferQueue->setMaxAcquiredBufferCount(bufferCount);
+ mInitCheck = mConsumer->setMaxAcquiredBufferCount(bufferCount);
if (mInitCheck != NO_ERROR) {
ALOGE("Unable to set BQ max acquired buffer count to %u: %d",
bufferCount, mInitCheck);
@@ -85,7 +92,7 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
wp<BufferQueue::ConsumerListener> listener = static_cast<BufferQueue::ConsumerListener*>(this);
sp<BufferQueue::ProxyConsumerListener> proxy = new BufferQueue::ProxyConsumerListener(listener);
- mInitCheck = mBufferQueue->consumerConnect(proxy, false);
+ mInitCheck = mConsumer->consumerConnect(proxy, false);
if (mInitCheck != NO_ERROR) {
ALOGE("Error connecting to BufferQueue: %s (%d)",
strerror(-mInitCheck), mInitCheck);
@@ -97,8 +104,8 @@ GraphicBufferSource::GraphicBufferSource(OMXNodeInstance* nodeInstance,
GraphicBufferSource::~GraphicBufferSource() {
ALOGV("~GraphicBufferSource");
- if (mBufferQueue != NULL) {
- status_t err = mBufferQueue->consumerDisconnect();
+ if (mConsumer != NULL) {
+ status_t err = mConsumer->consumerDisconnect();
if (err != NO_ERROR) {
ALOGW("consumerDisconnect failed: %d", err);
}
@@ -270,7 +277,7 @@ void GraphicBufferSource::codecBufferEmptied(OMX_BUFFERHEADERTYPE* header) {
if (id == mLatestSubmittedBufferId) {
CHECK_GT(mLatestSubmittedBufferUseCount--, 0);
} else {
- mBufferQueue->releaseBuffer(id, codecBuffer.mFrameNumber,
+ mConsumer->releaseBuffer(id, codecBuffer.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
}
} else {
@@ -339,7 +346,7 @@ void GraphicBufferSource::suspend(bool suspend) {
while (mNumFramesAvailable > 0) {
BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// shouldn't happen.
@@ -352,7 +359,7 @@ void GraphicBufferSource::suspend(bool suspend) {
--mNumFramesAvailable;
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
}
return;
@@ -389,7 +396,7 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
ALOGV("fillCodecBuffer_l: acquiring buffer, avail=%d",
mNumFramesAvailable);
BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == BufferQueue::NO_BUFFER_AVAILABLE) {
// shouldn't happen
ALOGW("fillCodecBuffer_l: frame was not available");
@@ -416,10 +423,21 @@ bool GraphicBufferSource::fillCodecBuffer_l() {
mBufferSlot[item.mBuf] = item.mGraphicBuffer;
}
- err = submitBuffer_l(item, cbi);
+ err = UNKNOWN_ERROR;
+
+ // only submit sample if start time is unspecified, or sample
+ // is queued after the specified start time
+ if (mSkipFramesBeforeNs < 0ll || item.mTimestamp >= mSkipFramesBeforeNs) {
+ // if start time is set, offset time stamp by start time
+ if (mSkipFramesBeforeNs > 0) {
+ item.mTimestamp -= mSkipFramesBeforeNs;
+ }
+ err = submitBuffer_l(item, cbi);
+ }
+
if (err != OK) {
ALOGV("submitBuffer_l failed, releasing bq buf %d", item.mBuf);
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, Fence::NO_FENCE);
} else {
ALOGV("buffer submitted (bq %d, cbi %d)", item.mBuf, cbi);
@@ -442,7 +460,7 @@ bool GraphicBufferSource::repeatLatestSubmittedBuffer_l() {
//
// To be on the safe side we try to release the buffer.
ALOGD("repeatLatestSubmittedBuffer_l: slot was NULL");
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
mLatestSubmittedBufferId,
mLatestSubmittedBufferFrameNum,
EGL_NO_DISPLAY,
@@ -496,7 +514,7 @@ void GraphicBufferSource::setLatestSubmittedBuffer_l(
if (mLatestSubmittedBufferId >= 0) {
if (mLatestSubmittedBufferUseCount == 0) {
- mBufferQueue->releaseBuffer(
+ mConsumer->releaseBuffer(
mLatestSubmittedBufferId,
mLatestSubmittedBufferFrameNum,
EGL_NO_DISPLAY,
@@ -550,7 +568,30 @@ status_t GraphicBufferSource::signalEndOfInputStream() {
int64_t GraphicBufferSource::getTimestamp(const BufferQueue::BufferItem &item) {
int64_t timeUs = item.mTimestamp / 1000;
- if (mMaxTimestampGapUs > 0ll) {
+ if (mTimePerCaptureUs > 0ll) {
+ // Time lapse or slow motion mode
+ if (mPrevCaptureUs < 0ll) {
+ // first capture
+ mPrevCaptureUs = timeUs;
+ mPrevFrameUs = timeUs;
+ } else {
+ // snap to nearest capture point
+ int64_t nFrames = (timeUs + mTimePerCaptureUs / 2 - mPrevCaptureUs)
+ / mTimePerCaptureUs;
+ if (nFrames <= 0) {
+ // skip this frame as it's too close to previous capture
+ ALOGV("skipping frame, timeUs %lld", timeUs);
+ return -1;
+ }
+ mPrevCaptureUs = mPrevCaptureUs + nFrames * mTimePerCaptureUs;
+ mPrevFrameUs += mTimePerFrameUs * nFrames;
+ }
+
+ ALOGV("timeUs %lld, captureUs %lld, frameUs %lld",
+ timeUs, mPrevCaptureUs, mPrevFrameUs);
+
+ return mPrevFrameUs;
+ } else if (mMaxTimestampGapUs > 0ll) {
/* Cap timestamp gap between adjacent frames to specified max
*
* In the scenario of cast mirroring, encoding could be suspended for
@@ -696,15 +737,15 @@ void GraphicBufferSource::onFrameAvailable() {
}
BufferQueue::BufferItem item;
- status_t err = mBufferQueue->acquireBuffer(&item, 0);
+ status_t err = mConsumer->acquireBuffer(&item, 0);
if (err == OK) {
// If this is the first time we're seeing this buffer, add it to our
// slot table.
if (item.mGraphicBuffer != NULL) {
- ALOGV("fillCodecBuffer_l: setting mBufferSlot %d", item.mBuf);
+ ALOGV("onFrameAvailable: setting mBufferSlot %d", item.mBuf);
mBufferSlot[item.mBuf] = item.mGraphicBuffer;
}
- mBufferQueue->releaseBuffer(item.mBuf, item.mFrameNumber,
+ mConsumer->releaseBuffer(item.mBuf, item.mFrameNumber,
EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence);
}
return;
@@ -724,13 +765,13 @@ void GraphicBufferSource::onFrameAvailable() {
void GraphicBufferSource::onBuffersReleased() {
Mutex::Autolock lock(mMutex);
- uint32_t slotMask;
- if (mBufferQueue->getReleasedBuffers(&slotMask) != NO_ERROR) {
+ uint64_t slotMask;
+ if (mConsumer->getReleasedBuffers(&slotMask) != NO_ERROR) {
ALOGW("onBuffersReleased: unable to get released buffer set");
- slotMask = 0xffffffff;
+ slotMask = 0xffffffffffffffffULL;
}
- ALOGV("onBuffersReleased: 0x%08x", slotMask);
+ ALOGV("onBuffersReleased: 0x%016" PRIx64, slotMask);
for (int i = 0; i < BufferQueue::NUM_BUFFER_SLOTS; i++) {
if ((slotMask & 0x01) != 0) {
@@ -740,6 +781,11 @@ void GraphicBufferSource::onBuffersReleased() {
}
}
+// BufferQueue::ConsumerListener callback
+void GraphicBufferSource::onSidebandStreamChanged() {
+ ALOG_ASSERT(false, "GraphicBufferSource can't consume sideband streams");
+}
+
status_t GraphicBufferSource::setRepeatPreviousFrameDelayUs(
int64_t repeatAfterUs) {
Mutex::Autolock autoLock(mMutex);
@@ -764,6 +810,27 @@ status_t GraphicBufferSource::setMaxTimestampGapUs(int64_t maxGapUs) {
return OK;
}
+
+void GraphicBufferSource::setSkipFramesBeforeUs(int64_t skipFramesBeforeUs) {
+ Mutex::Autolock autoLock(mMutex);
+
+ mSkipFramesBeforeNs =
+ (skipFramesBeforeUs > 0) ? (skipFramesBeforeUs * 1000) : -1ll;
+}
+
+status_t GraphicBufferSource::setTimeLapseUs(int64_t* data) {
+ Mutex::Autolock autoLock(mMutex);
+
+ if (mExecuting || data[0] <= 0ll || data[1] <= 0ll) {
+ return INVALID_OPERATION;
+ }
+
+ mTimePerFrameUs = data[0];
+ mTimePerCaptureUs = data[1];
+
+ return OK;
+}
+
void GraphicBufferSource::onMessageReceived(const sp<AMessage> &msg) {
switch (msg->what()) {
case kWhatRepeatLastFrame:
diff --git a/media/libstagefright/omx/GraphicBufferSource.h b/media/libstagefright/omx/GraphicBufferSource.h
index 3b0e454..a70cc1a 100644
--- a/media/libstagefright/omx/GraphicBufferSource.h
+++ b/media/libstagefright/omx/GraphicBufferSource.h
@@ -61,7 +61,7 @@ public:
// Returns the handle to the producer side of the BufferQueue. Buffers
// queued on this will be received by GraphicBufferSource.
sp<IGraphicBufferProducer> getIGraphicBufferProducer() const {
- return mBufferQueue;
+ return mProducer;
}
// This is called when OMX transitions to OMX_StateExecuting, which means
@@ -118,6 +118,17 @@ public:
// of suspension on input.
status_t setMaxTimestampGapUs(int64_t maxGapUs);
+ // Sets the time lapse (or slow motion) parameters.
+ // data[0] is the time (us) between two frames for playback
+ // data[1] is the time (us) between two frames for capture
+ // When set, the sample's timestamp will be modified to playback framerate,
+ // and capture timestamp will be modified to capture rate.
+ status_t setTimeLapseUs(int64_t* data);
+
+ // Sets the start time us (in system time), samples before which should
+ // be dropped and not submitted to encoder
+ void setSkipFramesBeforeUs(int64_t startTimeUs);
+
protected:
// BufferQueue::ConsumerListener interface, called when a new frame of
// data is available. If we're executing and a codec buffer is
@@ -132,6 +143,11 @@ protected:
// set of mBufferSlot entries.
virtual void onBuffersReleased();
+ // BufferQueue::ConsumerListener interface, called when the client has
+ // changed the sideband stream. GraphicBufferSource doesn't handle sideband
+ // streams so this is a no-op (and should never be called).
+ virtual void onSidebandStreamChanged();
+
private:
// Keep track of codec input buffers. They may either be available
// (mGraphicBuffer == NULL) or in use by the codec.
@@ -194,8 +210,11 @@ private:
bool mSuspended;
- // We consume graphic buffers from this.
- sp<BufferQueue> mBufferQueue;
+ // Our BufferQueue interfaces. mProducer is passed to the producer through
+ // getIGraphicBufferProducer, and mConsumer is used internally to retrieve
+ // the buffers queued by the producer.
+ sp<IGraphicBufferProducer> mProducer;
+ sp<IGraphicBufferConsumer> mConsumer;
// Number of frames pending in BufferQueue that haven't yet been
// forwarded to the codec.
@@ -223,16 +242,17 @@ private:
enum {
kRepeatLastFrameCount = 10,
};
- int64_t mRepeatAfterUs;
- int64_t mMaxTimestampGapUs;
KeyedVector<int64_t, int64_t> mOriginalTimeUs;
+ int64_t mMaxTimestampGapUs;
int64_t mPrevOriginalTimeUs;
int64_t mPrevModifiedTimeUs;
+ int64_t mSkipFramesBeforeNs;
sp<ALooper> mLooper;
sp<AHandlerReflector<GraphicBufferSource> > mReflector;
+ int64_t mRepeatAfterUs;
int32_t mRepeatLastFrameGeneration;
int64_t mRepeatLastFrameTimestamp;
int32_t mRepeatLastFrameCount;
@@ -245,6 +265,12 @@ private:
// no codec buffer was available at the time.
bool mRepeatBufferDeferred;
+ // Time lapse / slow motion configuration
+ int64_t mTimePerCaptureUs;
+ int64_t mTimePerFrameUs;
+ int64_t mPrevCaptureUs;
+ int64_t mPrevFrameUs;
+
void onMessageReceived(const sp<AMessage> &msg);
DISALLOW_EVIL_CONSTRUCTORS(GraphicBufferSource);
diff --git a/media/libstagefright/omx/OMX.cpp b/media/libstagefright/omx/OMX.cpp
index 7819fc3..22b12d9 100644
--- a/media/libstagefright/omx/OMX.cpp
+++ b/media/libstagefright/omx/OMX.cpp
@@ -472,8 +472,6 @@ OMX_ERRORTYPE OMX::OnFillBufferDone(
msg.u.extended_buffer_data.range_length = pBuffer->nFilledLen;
msg.u.extended_buffer_data.flags = pBuffer->nFlags;
msg.u.extended_buffer_data.timestamp = pBuffer->nTimeStamp;
- msg.u.extended_buffer_data.platform_private = pBuffer->pPlatformPrivate;
- msg.u.extended_buffer_data.data_ptr = pBuffer->pBuffer;
findDispatcher(node)->post(msg);
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index c64dcf0..d6ab109 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -855,6 +855,8 @@ status_t OMXNodeInstance::setInternalOption(
case IOMX::INTERNAL_OPTION_SUSPEND:
case IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY:
case IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP:
+ case IOMX::INTERNAL_OPTION_START_TIME:
+ case IOMX::INTERNAL_OPTION_TIME_LAPSE:
{
const sp<GraphicBufferSource> &bufferSource =
getGraphicBufferSource();
@@ -879,7 +881,8 @@ status_t OMXNodeInstance::setInternalOption(
int64_t delayUs = *(int64_t *)data;
return bufferSource->setRepeatPreviousFrameDelayUs(delayUs);
- } else {
+ } else if (type ==
+ IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP){
if (size != sizeof(int64_t)) {
return INVALID_OPERATION;
}
@@ -887,6 +890,20 @@ status_t OMXNodeInstance::setInternalOption(
int64_t maxGapUs = *(int64_t *)data;
return bufferSource->setMaxTimestampGapUs(maxGapUs);
+ } else if (type == IOMX::INTERNAL_OPTION_START_TIME) {
+ if (size != sizeof(int64_t)) {
+ return INVALID_OPERATION;
+ }
+
+ int64_t skipFramesBeforeUs = *(int64_t *)data;
+
+ bufferSource->setSkipFramesBeforeUs(skipFramesBeforeUs);
+ } else { // IOMX::INTERNAL_OPTION_TIME_LAPSE
+ if (size != sizeof(int64_t) * 2) {
+ return INVALID_OPERATION;
+ }
+
+ bufferSource->setTimeLapseUs((int64_t *)data);
}
return OK;
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index d49e50b..9b6958a 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -42,6 +42,7 @@ static const struct {
{ "OMX.google.amrwb.encoder", "amrwbenc", "audio_encoder.amrwb" },
{ "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" },
{ "OMX.google.h264.encoder", "h264enc", "video_encoder.avc" },
+ { "OMX.google.hevc.decoder", "hevcdec", "video_decoder.hevc" },
{ "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" },
{ "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" },
{ "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" },
@@ -50,6 +51,7 @@ static const struct {
{ "OMX.google.mpeg4.encoder", "mpeg4enc", "video_encoder.mpeg4" },
{ "OMX.google.mp3.decoder", "mp3dec", "audio_decoder.mp3" },
{ "OMX.google.vorbis.decoder", "vorbisdec", "audio_decoder.vorbis" },
+ { "OMX.google.opus.decoder", "opusdec", "audio_decoder.opus" },
{ "OMX.google.vp8.decoder", "vpxdec", "video_decoder.vp8" },
{ "OMX.google.vp9.decoder", "vpxdec", "video_decoder.vp9" },
{ "OMX.google.vp8.encoder", "vpxenc", "video_encoder.vp8" },
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index eb9fcf7..1c383f7 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -183,12 +183,12 @@ OMX_ERRORTYPE SoftVideoDecoderOMXComponent::internalGetParameter(
return OMX_ErrorUnsupportedIndex;
}
- if (index >= mNumProfileLevels) {
+ if (profileLevel->nProfileIndex >= mNumProfileLevels) {
return OMX_ErrorNoMore;
}
- profileLevel->eProfile = mProfileLevels[index].mProfile;
- profileLevel->eLevel = mProfileLevels[index].mLevel;
+ profileLevel->eProfile = mProfileLevels[profileLevel->nProfileIndex].mProfile;
+ profileLevel->eLevel = mProfileLevels[profileLevel->nProfileIndex].mLevel;
return OMX_ErrorNone;
}
diff --git a/media/libstagefright/omx/tests/Android.mk b/media/libstagefright/omx/tests/Android.mk
index e368134..447b29e 100644
--- a/media/libstagefright/omx/tests/Android.mk
+++ b/media/libstagefright/omx/tests/Android.mk
@@ -11,6 +11,8 @@ LOCAL_C_INCLUDES := \
$(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/native/include/media/openmax
+LOCAL_CFLAGS += -Werror
+
LOCAL_MODULE := omx_tests
LOCAL_MODULE_TAGS := tests
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 44e4f9d..f4dfd6b 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -26,6 +26,7 @@
#include <binder/ProcessState.h>
#include <binder/IServiceManager.h>
#include <binder/MemoryDealer.h>
+#include <media/IMediaHTTPService.h>
#include <media/IMediaPlayerService.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/foundation/ALooper.h>
@@ -242,7 +243,8 @@ private:
};
static sp<MediaExtractor> CreateExtractorFromURI(const char *uri) {
- sp<DataSource> source = DataSource::CreateFromURI(uri);
+ sp<DataSource> source =
+ DataSource::CreateFromURI(NULL /* httpService */, uri);
if (source == NULL) {
return NULL;
@@ -461,6 +463,7 @@ static const char *GetMimeFromComponentRole(const char *componentRole) {
{ "audio_decoder.aac", "audio/mp4a-latm" },
{ "audio_decoder.mp3", "audio/mpeg" },
{ "audio_decoder.vorbis", "audio/vorbis" },
+ { "audio_decoder.opus", "audio/opus" },
{ "audio_decoder.g711alaw", MEDIA_MIMETYPE_AUDIO_G711_ALAW },
{ "audio_decoder.g711mlaw", MEDIA_MIMETYPE_AUDIO_G711_MLAW },
};
@@ -493,6 +496,7 @@ static const char *GetURLForMime(const char *mime) {
{ "audio/mpeg",
"file:///sdcard/media_api/music/MP3_48KHz_128kbps_s_1_17_CBR.mp3" },
{ "audio/vorbis", NULL },
+ { "audio/opus", NULL },
{ "video/x-vnd.on2.vp8",
"file:///sdcard/media_api/video/big-buck-bunny_trailer.webm" },
{ MEDIA_MIMETYPE_AUDIO_G711_ALAW, "file:///sdcard/M1F1-Alaw-AFsp.wav" },
diff --git a/media/libstagefright/rtsp/APacketSource.cpp b/media/libstagefright/rtsp/APacketSource.cpp
index 462c384..09f52bc 100644
--- a/media/libstagefright/rtsp/APacketSource.cpp
+++ b/media/libstagefright/rtsp/APacketSource.cpp
@@ -23,7 +23,7 @@
#include "ARawAudioAssembler.h"
#include "ASessionDescription.h"
-#include "avc_utils.h"
+#include "include/avc_utils.h"
#include <ctype.h>
diff --git a/media/libstagefright/rtsp/ARTSPConnection.cpp b/media/libstagefright/rtsp/ARTSPConnection.cpp
index 492bd4a..f25539c 100644
--- a/media/libstagefright/rtsp/ARTSPConnection.cpp
+++ b/media/libstagefright/rtsp/ARTSPConnection.cpp
@@ -33,7 +33,7 @@
#include <openssl/md5.h>
#include <sys/socket.h>
-#include "HTTPBase.h"
+#include "include/HTTPBase.h"
namespace android {
@@ -239,7 +239,7 @@ void ARTSPConnection::onConnect(const sp<AMessage> &msg) {
// right here, since we currently have no way of asking the user
// for this information.
- ALOGE("Malformed rtsp url <URL suppressed>");
+ ALOGE("Malformed rtsp url %s", uriDebugString(url).c_str());
reply->setInt32("result", ERROR_MALFORMED);
reply->post();
diff --git a/media/libstagefright/rtsp/Android.mk b/media/libstagefright/rtsp/Android.mk
index 36e8d82..d60dc2f 100644
--- a/media/libstagefright/rtsp/Android.mk
+++ b/media/libstagefright/rtsp/Android.mk
@@ -20,7 +20,7 @@ LOCAL_SRC_FILES:= \
SDPLoader.cpp \
LOCAL_C_INCLUDES:= \
- $(TOP)/frameworks/av/media/libstagefright/include \
+ $(TOP)/frameworks/av/media/libstagefright \
$(TOP)/frameworks/native/include/media/openmax \
$(TOP)/external/openssl/include
@@ -30,6 +30,8 @@ ifeq ($(TARGET_ARCH),arm)
LOCAL_CFLAGS += -Wno-psabi
endif
+LOCAL_CFLAGS += -Werror
+
LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/rtsp/MyHandler.h b/media/libstagefright/rtsp/MyHandler.h
index e7580c2..f3dfc59 100644
--- a/media/libstagefright/rtsp/MyHandler.h
+++ b/media/libstagefright/rtsp/MyHandler.h
@@ -19,7 +19,11 @@
#define MY_HANDLER_H_
//#define LOG_NDEBUG 0
+
+#ifndef LOG_TAG
#define LOG_TAG "MyHandler"
+#endif
+
#include <utils/Log.h>
#include "APacketSource.h"
@@ -42,6 +46,12 @@
#include "HTTPBase.h"
+#if LOG_NDEBUG
+#define UNUSED_UNLESS_VERBOSE(x) (void)(x)
+#else
+#define UNUSED_UNLESS_VERBOSE(x)
+#endif
+
// If no access units are received within 5 secs, assume that the rtp
// stream has ended and signal end of stream.
static int64_t kAccessUnitTimeoutUs = 10000000ll;
@@ -178,7 +188,7 @@ struct MyHandler : public AHandler {
mConn->connect(mOriginalSessionURL.c_str(), reply);
}
- AString getControlURL(sp<ASessionDescription> desc) {
+ AString getControlURL() {
AString sessionLevelControlURL;
if (mSessionDesc->findAttribute(
0,
@@ -556,7 +566,7 @@ struct MyHandler : public AHandler {
mBaseURL = tmp;
}
- mControlURL = getControlURL(mSessionDesc);
+ mControlURL = getControlURL();
if (mSessionDesc->countTracks() < 2) {
// There's no actual tracks in this session.
@@ -602,7 +612,7 @@ struct MyHandler : public AHandler {
mSeekable = !isLiveStream(mSessionDesc);
- mControlURL = getControlURL(mSessionDesc);
+ mControlURL = getControlURL();
if (mSessionDesc->countTracks() < 2) {
// There's no actual tracks in this session.
@@ -1816,6 +1826,8 @@ private:
bool addMediaTimestamp(
int32_t trackIndex, const TrackInfo *track,
const sp<ABuffer> &accessUnit) {
+ UNUSED_UNLESS_VERBOSE(trackIndex);
+
uint32_t rtpTime;
CHECK(accessUnit->meta()->findInt32(
"rtp-time", (int32_t *)&rtpTime));
diff --git a/media/libstagefright/rtsp/SDPLoader.cpp b/media/libstagefright/rtsp/SDPLoader.cpp
index 89ff17d..424badf 100644
--- a/media/libstagefright/rtsp/SDPLoader.cpp
+++ b/media/libstagefright/rtsp/SDPLoader.cpp
@@ -18,34 +18,30 @@
#define LOG_TAG "SDPLoader"
#include <utils/Log.h>
-#include "SDPLoader.h"
+#include "include/SDPLoader.h"
#include "ASessionDescription.h"
-#include "HTTPBase.h"
+#include <media/IMediaHTTPConnection.h>
+#include <media/IMediaHTTPService.h>
+#include <media/stagefright/MediaHTTP.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/Utils.h>
#define DEFAULT_SDP_SIZE 100000
namespace android {
-SDPLoader::SDPLoader(const sp<AMessage> &notify, uint32_t flags, bool uidValid, uid_t uid)
+SDPLoader::SDPLoader(
+ const sp<AMessage> &notify,
+ uint32_t flags,
+ const sp<IMediaHTTPService> &httpService)
: mNotify(notify),
mFlags(flags),
- mUIDValid(uidValid),
- mUID(uid),
mNetLooper(new ALooper),
mCancelled(false),
- mHTTPDataSource(
- HTTPBase::Create(
- (mFlags & kFlagIncognito)
- ? HTTPBase::kFlagIncognito
- : 0)) {
- if (mUIDValid) {
- mHTTPDataSource->setUID(mUID);
- }
-
+ mHTTPDataSource(new MediaHTTP(httpService->makeHTTPConnection())) {
mNetLooper->setName("sdp net");
mNetLooper->start(false /* runOnCallingThread */,
false /* canCallJava */,
@@ -94,11 +90,7 @@ void SDPLoader::onLoad(const sp<AMessage> &msg) {
KeyedVector<String8, String8> *headers = NULL;
msg->findPointer("headers", (void **)&headers);
- if (!(mFlags & kFlagIncognito)) {
- ALOGV("onLoad '%s'", url.c_str());
- } else {
- ALOGI("onLoad <URL suppressed>");
- }
+ ALOGV("onLoad %s", uriDebugString(url, mFlags & kFlagIncognito).c_str());
if (!mCancelled) {
err = mHTTPDataSource->connect(url.c_str(), headers);
diff --git a/media/libstagefright/tests/SurfaceMediaSource_test.cpp b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
index 49ffcd6..fd889f9 100644
--- a/media/libstagefright/tests/SurfaceMediaSource_test.cpp
+++ b/media/libstagefright/tests/SurfaceMediaSource_test.cpp
@@ -35,7 +35,6 @@
#include <gui/SurfaceComposerClient.h>
#include <binder/ProcessState.h>
-#include <ui/FramebufferNativeWindow.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaBufferGroup.h>
@@ -110,7 +109,7 @@ protected:
} else {
ALOGV("No actual display. Choosing EGLSurface based on SurfaceMediaSource");
sp<IGraphicBufferProducer> sms = (new SurfaceMediaSource(
- getSurfaceWidth(), getSurfaceHeight()))->getBufferQueue();
+ getSurfaceWidth(), getSurfaceHeight()))->getProducer();
sp<Surface> stc = new Surface(sms);
sp<ANativeWindow> window = stc;
@@ -361,9 +360,7 @@ protected:
virtual void SetUp() {
android::ProcessState::self()->startThreadPool();
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
-
- // Manual cast is required to avoid constructor ambiguity
- mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(mSMS->getProducer());
mANW = mSTC;
}
@@ -398,7 +395,7 @@ protected:
ALOGV("SMS-GLTest::SetUp()");
android::ProcessState::self()->startThreadPool();
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
- mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(mSMS->getProducer());
mANW = mSTC;
// Doing the setup related to the GL Side
@@ -527,7 +524,8 @@ void SurfaceMediaSourceTest::oneBufferPass(int width, int height ) {
}
// Dequeuing and queuing the buffer without really filling it in.
-void SurfaceMediaSourceTest::oneBufferPassNoFill(int width, int height ) {
+void SurfaceMediaSourceTest::oneBufferPassNoFill(
+ int /* width */, int /* height */) {
ANativeWindowBuffer* anb;
ASSERT_EQ(NO_ERROR, native_window_dequeue_buffer_and_wait(mANW.get(), &anb));
ASSERT_TRUE(anb != NULL);
@@ -746,9 +744,8 @@ TEST_F(SurfaceMediaSourceTest, DISABLED_EncodingFromCpuYV12BufferNpotWriteMediaS
CHECK(fd >= 0);
sp<MediaRecorder> mr = SurfaceMediaSourceGLTest::setUpMediaRecorder(fd,
- VIDEO_SOURCE_GRALLOC_BUFFER,
- OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth,
- mYuvTexHeight, 30);
+ VIDEO_SOURCE_SURFACE, OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264,
+ mYuvTexWidth, mYuvTexHeight, 30);
// get the reference to the surfacemediasource living in
// mediaserver that is created by stagefrightrecorder
sp<IGraphicBufferProducer> iST = mr->querySurfaceMediaSourceFromMediaServer();
@@ -783,7 +780,7 @@ TEST_F(SurfaceMediaSourceGLTest, ChooseAndroidRecordableEGLConfigDummyWriter) {
ALOGV("Verify creating a surface w/ right config + dummy writer*********");
mSMS = new SurfaceMediaSource(mYuvTexWidth, mYuvTexHeight);
- mSTC = new Surface(static_cast<sp<IGraphicBufferProducer> >( mSMS->getBufferQueue()));
+ mSTC = new Surface(mSMS->getProducer());
mANW = mSTC;
DummyRecorder writer(mSMS);
@@ -880,7 +877,7 @@ TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaSameImageEachBufNpotWrite) {
}
CHECK(fd >= 0);
- sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER,
+ sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
// get the reference to the surfacemediasource living in
@@ -923,7 +920,7 @@ TEST_F(SurfaceMediaSourceGLTest, EncodingFromGLRgbaDiffImageEachBufNpotWrite) {
}
CHECK(fd >= 0);
- sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_GRALLOC_BUFFER,
+ sp<MediaRecorder> mr = setUpMediaRecorder(fd, VIDEO_SOURCE_SURFACE,
OUTPUT_FORMAT_MPEG_4, VIDEO_ENCODER_H264, mYuvTexWidth, mYuvTexHeight, 30);
// get the reference to the surfacemediasource living in
diff --git a/media/libstagefright/timedtext/TimedTextDriver.cpp b/media/libstagefright/timedtext/TimedTextDriver.cpp
index 12fd7f4..71aa21e 100644
--- a/media/libstagefright/timedtext/TimedTextDriver.cpp
+++ b/media/libstagefright/timedtext/TimedTextDriver.cpp
@@ -20,6 +20,7 @@
#include <binder/IPCThreadState.h>
+#include <media/IMediaHTTPService.h>
#include <media/mediaplayer.h>
#include <media/MediaPlayerInterface.h>
#include <media/stagefright/DataSource.h>
@@ -40,9 +41,11 @@
namespace android {
TimedTextDriver::TimedTextDriver(
- const wp<MediaPlayerBase> &listener)
+ const wp<MediaPlayerBase> &listener,
+ const sp<IMediaHTTPService> &httpService)
: mLooper(new ALooper),
mListener(listener),
+ mHTTPService(httpService),
mState(UNINITIALIZED),
mCurrentTrackIndex(UINT_MAX) {
mLooper->setName("TimedTextDriver");
@@ -207,7 +210,7 @@ status_t TimedTextDriver::addOutOfBandTextSource(
}
sp<DataSource> dataSource =
- DataSource::CreateFromURI(uri);
+ DataSource::CreateFromURI(mHTTPService, uri);
return createOutOfBandTextSource(trackIndex, mimeType, dataSource);
}
diff --git a/media/libstagefright/timedtext/test/Android.mk b/media/libstagefright/timedtext/test/Android.mk
index a5e7ba2..9a9fde2 100644
--- a/media/libstagefright/timedtext/test/Android.mk
+++ b/media/libstagefright/timedtext/test/Android.mk
@@ -2,7 +2,6 @@ LOCAL_PATH:= $(call my-dir)
# ================================================================
# Unit tests for libstagefright_timedtext
-# See also /development/testrunner/test_defs.xml
# ================================================================
# ================================================================
@@ -18,10 +17,13 @@ LOCAL_SRC_FILES := TimedTextSRTSource_test.cpp
LOCAL_C_INCLUDES := \
$(TOP)/external/expat/lib \
- $(TOP)/frameworks/base/media/libstagefright/timedtext
+ $(TOP)/frameworks/av/media/libstagefright/timedtext
LOCAL_SHARED_LIBRARIES := \
+ libbinder \
libexpat \
- libstagefright
+ libstagefright \
+ libstagefright_foundation \
+ libutils
include $(BUILD_NATIVE_TEST)
diff --git a/media/libstagefright/webm/Android.mk b/media/libstagefright/webm/Android.mk
new file mode 100644
index 0000000..7081463
--- /dev/null
+++ b/media/libstagefright/webm/Android.mk
@@ -0,0 +1,23 @@
+LOCAL_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_CPPFLAGS += -D__STDINT_LIMITS \
+ -Werror
+
+LOCAL_SRC_FILES:= EbmlUtil.cpp \
+ WebmElement.cpp \
+ WebmFrame.cpp \
+ WebmFrameThread.cpp \
+ WebmWriter.cpp
+
+
+LOCAL_C_INCLUDES += $(TOP)/frameworks/av/include
+
+LOCAL_SHARED_LIBRARIES += libstagefright_foundation \
+ libstagefright \
+ libutils \
+ liblog
+
+LOCAL_MODULE:= libstagefright_webm
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/media/libstagefright/webm/EbmlUtil.cpp b/media/libstagefright/webm/EbmlUtil.cpp
new file mode 100644
index 0000000..449fec6
--- /dev/null
+++ b/media/libstagefright/webm/EbmlUtil.cpp
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <stdint.h>
+
+namespace {
+
+// Table for Seal's algorithm for Number of Trailing Zeros. Hacker's Delight
+// online, Figure 5-18 (http://www.hackersdelight.org/revisions.pdf)
+// The entries whose value is -1 are never referenced.
+int NTZ_TABLE[] = {
+ 32, 0, 1, 12, 2, 6, -1, 13, 3, -1, 7, -1, -1, -1, -1, 14,
+ 10, 4, -1, -1, 8, -1, -1, 25, -1, -1, -1, -1, -1, 21, 27, 15,
+ 31, 11, 5, -1, -1, -1, -1, -1, 9, -1, -1, 24, -1, -1, 20, 26,
+ 30, -1, -1, -1, -1, 23, -1, 19, 29, -1, 22, 18, 28, 17, 16, -1
+};
+
+int numberOfTrailingZeros32(int32_t i) {
+ uint32_t u = (i & -i) * 0x0450FBAF;
+ return NTZ_TABLE[(u) >> 26];
+}
+
+uint64_t highestOneBit(uint64_t n) {
+ n |= (n >> 1);
+ n |= (n >> 2);
+ n |= (n >> 4);
+ n |= (n >> 8);
+ n |= (n >> 16);
+ n |= (n >> 32);
+ return n - (n >> 1);
+}
+
+uint64_t _powerOf2(uint64_t u) {
+ uint64_t powerOf2 = highestOneBit(u);
+ return powerOf2 ? powerOf2 : 1;
+}
+
+// Based on Long.numberOfTrailingZeros in Long.java
+int numberOfTrailingZeros(uint64_t u) {
+ int32_t low = u;
+ return low !=0 ? numberOfTrailingZeros32(low)
+ : 32 + numberOfTrailingZeros32((int32_t) (u >> 32));
+}
+}
+
+namespace webm {
+
+// Encode the id and/or size of an EBML element bytes by setting a leading length descriptor bit:
+//
+// 1xxxxxxx - 1-byte values
+// 01xxxxxx xxxxxxxx -
+// 001xxxxx xxxxxxxx xxxxxxxx -
+// 0001xxxx xxxxxxxx xxxxxxxx xxxxxxxx - ...
+// 00001xxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 000001xx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 0000001x xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 00000001 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - 8-byte values
+//
+// This function uses the least the number of bytes possible.
+uint64_t encodeUnsigned(uint64_t u) {
+ uint64_t powerOf2 = _powerOf2(u);
+ if (u + 1 == powerOf2 << 1)
+ powerOf2 <<= 1;
+ int shiftWidth = (7 + numberOfTrailingZeros(powerOf2)) / 7 * 7;
+ long lengthDescriptor = 1 << shiftWidth;
+ return lengthDescriptor | u;
+}
+
+// Like above but pads the input value with leading zeros up to the specified width. The length
+// descriptor is calculated based on width.
+uint64_t encodeUnsigned(uint64_t u, int width) {
+ int shiftWidth = 7 * width;
+ uint64_t lengthDescriptor = 1;
+ lengthDescriptor <<= shiftWidth;
+ return lengthDescriptor | u;
+}
+
+// Calculate the length of an EBML coded id or size from its length descriptor.
+int sizeOf(uint64_t u) {
+ uint64_t powerOf2 = _powerOf2(u);
+ int unsignedLength = numberOfTrailingZeros(powerOf2) / 8 + 1;
+ return unsignedLength;
+}
+
+// Serialize an EBML coded id or size in big-endian order.
+int serializeCodedUnsigned(uint64_t u, uint8_t* bary) {
+ int unsignedLength = sizeOf(u);
+ for (int i = unsignedLength - 1; i >= 0; i--) {
+ bary[i] = u & 0xff;
+ u >>= 8;
+ }
+ return unsignedLength;
+}
+
+}
diff --git a/media/libstagefright/webm/EbmlUtil.h b/media/libstagefright/webm/EbmlUtil.h
new file mode 100644
index 0000000..eb9c37c
--- /dev/null
+++ b/media/libstagefright/webm/EbmlUtil.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef EBMLUTIL_H_
+#define EBMLUTIL_H_
+
+#include <stdint.h>
+
+namespace webm {
+
+// Encode the id and/or size of an EBML element bytes by setting a leading length descriptor bit:
+//
+// 1xxxxxxx - 1-byte values
+// 01xxxxxx xxxxxxxx -
+// 001xxxxx xxxxxxxx xxxxxxxx -
+// 0001xxxx xxxxxxxx xxxxxxxx xxxxxxxx - ...
+// 00001xxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 000001xx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 0000001x xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx -
+// 00000001 xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx xxxxxxxx - 8-byte values
+//
+// This function uses the least the number of bytes possible.
+uint64_t encodeUnsigned(uint64_t u);
+
+// Like above but pads the input value with leading zeros up to the specified width. The length
+// descriptor is calculated based on width.
+uint64_t encodeUnsigned(uint64_t u, int width);
+
+// Serialize an EBML coded id or size in big-endian order.
+int serializeCodedUnsigned(uint64_t u, uint8_t* bary);
+
+// Calculate the length of an EBML coded id or size from its length descriptor.
+int sizeOf(uint64_t u);
+
+}
+
+#endif /* EBMLUTIL_H_ */
diff --git a/media/libstagefright/webm/LinkedBlockingQueue.h b/media/libstagefright/webm/LinkedBlockingQueue.h
new file mode 100644
index 0000000..0b6a9a1
--- /dev/null
+++ b/media/libstagefright/webm/LinkedBlockingQueue.h
@@ -0,0 +1,79 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef LINKEDBLOCKINGQUEUE_H_
+#define LINKEDBLOCKINGQUEUE_H_
+
+#include <utils/List.h>
+#include <utils/Mutex.h>
+#include <utils/Condition.h>
+
+namespace android {
+
+template<typename T>
+class LinkedBlockingQueue {
+ List<T> mList;
+ Mutex mLock;
+ Condition mContentAvailableCondition;
+
+ T front(bool remove) {
+ Mutex::Autolock autolock(mLock);
+ while (mList.empty()) {
+ mContentAvailableCondition.wait(mLock);
+ }
+ T e = *(mList.begin());
+ if (remove) {
+ mList.erase(mList.begin());
+ }
+ return e;
+ }
+
+ DISALLOW_EVIL_CONSTRUCTORS(LinkedBlockingQueue);
+
+public:
+ LinkedBlockingQueue() {
+ }
+
+ ~LinkedBlockingQueue() {
+ }
+
+ bool empty() {
+ Mutex::Autolock autolock(mLock);
+ return mList.empty();
+ }
+
+ void clear() {
+ Mutex::Autolock autolock(mLock);
+ mList.clear();
+ }
+
+ T peek() {
+ return front(false);
+ }
+
+ T take() {
+ return front(true);
+ }
+
+ void push(T e) {
+ Mutex::Autolock autolock(mLock);
+ mList.push_back(e);
+ mContentAvailableCondition.signal();
+ }
+};
+
+} /* namespace android */
+#endif /* LINKEDBLOCKINGQUEUE_H_ */
diff --git a/media/libstagefright/webm/WebmConstants.h b/media/libstagefright/webm/WebmConstants.h
new file mode 100644
index 0000000..c53f458
--- /dev/null
+++ b/media/libstagefright/webm/WebmConstants.h
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMCONSTANTS_H_
+#define WEBMCONSTANTS_H_
+
+#include <stdint.h>
+
+namespace webm {
+
+const int kMinEbmlVoidSize = 2;
+const int64_t kMaxMetaSeekSize = 64;
+const int64_t kMkvUnknownLength = 0x01ffffffffffffffl;
+
+// EBML element id's from http://matroska.org/technical/specs/index.html
+enum Mkv {
+ kMkvEbml = 0x1A45DFA3,
+ kMkvEbmlVersion = 0x4286,
+ kMkvEbmlReadVersion = 0x42F7,
+ kMkvEbmlMaxIdlength = 0x42F2,
+ kMkvEbmlMaxSizeLength = 0x42F3,
+ kMkvDocType = 0x4282,
+ kMkvDocTypeVersion = 0x4287,
+ kMkvDocTypeReadVersion = 0x4285,
+ kMkvVoid = 0xEC,
+ kMkvSignatureSlot = 0x1B538667,
+ kMkvSignatureAlgo = 0x7E8A,
+ kMkvSignatureHash = 0x7E9A,
+ kMkvSignaturePublicKey = 0x7EA5,
+ kMkvSignature = 0x7EB5,
+ kMkvSignatureElements = 0x7E5B,
+ kMkvSignatureElementList = 0x7E7B,
+ kMkvSignedElement = 0x6532,
+ kMkvSegment = 0x18538067,
+ kMkvSeekHead = 0x114D9B74,
+ kMkvSeek = 0x4DBB,
+ kMkvSeekId = 0x53AB,
+ kMkvSeekPosition = 0x53AC,
+ kMkvInfo = 0x1549A966,
+ kMkvTimecodeScale = 0x2AD7B1,
+ kMkvSegmentDuration = 0x4489,
+ kMkvDateUtc = 0x4461,
+ kMkvMuxingApp = 0x4D80,
+ kMkvWritingApp = 0x5741,
+ kMkvCluster = 0x1F43B675,
+ kMkvTimecode = 0xE7,
+ kMkvPrevSize = 0xAB,
+ kMkvBlockGroup = 0xA0,
+ kMkvBlock = 0xA1,
+ kMkvBlockAdditions = 0x75A1,
+ kMkvBlockMore = 0xA6,
+ kMkvBlockAddId = 0xEE,
+ kMkvBlockAdditional = 0xA5,
+ kMkvBlockDuration = 0x9B,
+ kMkvReferenceBlock = 0xFB,
+ kMkvLaceNumber = 0xCC,
+ kMkvSimpleBlock = 0xA3,
+ kMkvTracks = 0x1654AE6B,
+ kMkvTrackEntry = 0xAE,
+ kMkvTrackNumber = 0xD7,
+ kMkvTrackUid = 0x73C5,
+ kMkvTrackType = 0x83,
+ kMkvFlagEnabled = 0xB9,
+ kMkvFlagDefault = 0x88,
+ kMkvFlagForced = 0x55AA,
+ kMkvFlagLacing = 0x9C,
+ kMkvDefaultDuration = 0x23E383,
+ kMkvMaxBlockAdditionId = 0x55EE,
+ kMkvName = 0x536E,
+ kMkvLanguage = 0x22B59C,
+ kMkvCodecId = 0x86,
+ kMkvCodecPrivate = 0x63A2,
+ kMkvCodecName = 0x258688,
+ kMkvVideo = 0xE0,
+ kMkvFlagInterlaced = 0x9A,
+ kMkvStereoMode = 0x53B8,
+ kMkvAlphaMode = 0x53C0,
+ kMkvPixelWidth = 0xB0,
+ kMkvPixelHeight = 0xBA,
+ kMkvPixelCropBottom = 0x54AA,
+ kMkvPixelCropTop = 0x54BB,
+ kMkvPixelCropLeft = 0x54CC,
+ kMkvPixelCropRight = 0x54DD,
+ kMkvDisplayWidth = 0x54B0,
+ kMkvDisplayHeight = 0x54BA,
+ kMkvDisplayUnit = 0x54B2,
+ kMkvAspectRatioType = 0x54B3,
+ kMkvFrameRate = 0x2383E3,
+ kMkvAudio = 0xE1,
+ kMkvSamplingFrequency = 0xB5,
+ kMkvOutputSamplingFrequency = 0x78B5,
+ kMkvChannels = 0x9F,
+ kMkvBitDepth = 0x6264,
+ kMkvCues = 0x1C53BB6B,
+ kMkvCuePoint = 0xBB,
+ kMkvCueTime = 0xB3,
+ kMkvCueTrackPositions = 0xB7,
+ kMkvCueTrack = 0xF7,
+ kMkvCueClusterPosition = 0xF1,
+ kMkvCueBlockNumber = 0x5378
+};
+
+enum TrackTypes {
+ kInvalidType = -1,
+ kVideoType = 0x1,
+ kAudioType = 0x2,
+ kComplexType = 0x3,
+ kLogoType = 0x10,
+ kSubtitleType = 0x11,
+ kButtonsType = 0x12,
+ kControlType = 0x20
+};
+
+enum TrackNum {
+ kVideoTrackNum = 0x1,
+ kAudioTrackNum = 0x2
+};
+}
+
+#endif /* WEBMCONSTANTS_H_ */
diff --git a/media/libstagefright/webm/WebmElement.cpp b/media/libstagefright/webm/WebmElement.cpp
new file mode 100644
index 0000000..a008cab
--- /dev/null
+++ b/media/libstagefright/webm/WebmElement.cpp
@@ -0,0 +1,367 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "WebmElement"
+
+#include "EbmlUtil.h"
+#include "WebmElement.h"
+#include "WebmConstants.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <utils/Log.h>
+
+#include <string.h>
+#include <unistd.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+using namespace android;
+using namespace webm;
+
+namespace {
+
+int64_t voidSize(int64_t totalSize) {
+ if (totalSize < 2) {
+ return -1;
+ }
+ if (totalSize < 9) {
+ return totalSize - 2;
+ }
+ return totalSize - 9;
+}
+
+uint64_t childrenSum(const List<sp<WebmElement> >& children) {
+ uint64_t total = 0;
+ for (List<sp<WebmElement> >::const_iterator it = children.begin();
+ it != children.end(); ++it) {
+ total += (*it)->totalSize();
+ }
+ return total;
+}
+
+void populateCommonTrackEntries(
+ int num,
+ uint64_t uid,
+ bool lacing,
+ const char *lang,
+ const char *codec,
+ TrackTypes type,
+ List<sp<WebmElement> > &ls) {
+ ls.push_back(new WebmUnsigned(kMkvTrackNumber, num));
+ ls.push_back(new WebmUnsigned(kMkvTrackUid, uid));
+ ls.push_back(new WebmUnsigned(kMkvFlagLacing, lacing));
+ ls.push_back(new WebmString(kMkvLanguage, lang));
+ ls.push_back(new WebmString(kMkvCodecId, codec));
+ ls.push_back(new WebmUnsigned(kMkvTrackType, type));
+}
+}
+
+namespace android {
+
+WebmElement::WebmElement(uint64_t id, uint64_t size)
+ : mId(id), mSize(size) {
+}
+
+WebmElement::~WebmElement() {
+}
+
+int WebmElement::serializePayloadSize(uint8_t *buf) {
+ return serializeCodedUnsigned(encodeUnsigned(mSize), buf);
+}
+
+uint64_t WebmElement::serializeInto(uint8_t *buf) {
+ uint8_t *cur = buf;
+ int head = serializeCodedUnsigned(mId, cur);
+ cur += head;
+ int neck = serializePayloadSize(cur);
+ cur += neck;
+ serializePayload(cur);
+ cur += mSize;
+ return cur - buf;
+}
+
+uint64_t WebmElement::totalSize() {
+ uint8_t buf[8];
+ //............... + sizeOf(encodeUnsigned(size))
+ return sizeOf(mId) + serializePayloadSize(buf) + mSize;
+}
+
+uint8_t *WebmElement::serialize(uint64_t& size) {
+ size = totalSize();
+ uint8_t *buf = new uint8_t[size];
+ serializeInto(buf);
+ return buf;
+}
+
+int WebmElement::write(int fd, uint64_t& size) {
+ uint8_t buf[8];
+ size = totalSize();
+ off64_t off = ::lseek64(fd, (size - 1), SEEK_CUR) - (size - 1);
+ ::write(fd, buf, 1); // extend file
+
+ off64_t curOff = off + size;
+ off64_t alignedOff = off & ~(::sysconf(_SC_PAGE_SIZE) - 1);
+ off64_t mapSize = curOff - alignedOff;
+ off64_t pageOff = off - alignedOff;
+ void *dst = ::mmap64(NULL, mapSize, PROT_WRITE, MAP_SHARED, fd, alignedOff);
+ if (dst == MAP_FAILED) {
+ ALOGE("mmap64 failed; errno = %d", errno);
+ ALOGE("fd %d; flags: %o", fd, ::fcntl(fd, F_GETFL, 0));
+ return errno;
+ } else {
+ serializeInto((uint8_t*) dst + pageOff);
+ ::msync(dst, mapSize, MS_SYNC);
+ return ::munmap(dst, mapSize);
+ }
+}
+
+//=================================================================================================
+
+WebmUnsigned::WebmUnsigned(uint64_t id, uint64_t value)
+ : WebmElement(id, sizeOf(value)), mValue(value) {
+}
+
+void WebmUnsigned::serializePayload(uint8_t *buf) {
+ serializeCodedUnsigned(mValue, buf);
+}
+
+//=================================================================================================
+
+WebmFloat::WebmFloat(uint64_t id, double value)
+ : WebmElement(id, sizeof(double)), mValue(value) {
+}
+
+WebmFloat::WebmFloat(uint64_t id, float value)
+ : WebmElement(id, sizeof(float)), mValue(value) {
+}
+
+void WebmFloat::serializePayload(uint8_t *buf) {
+ uint64_t data;
+ if (mSize == sizeof(float)) {
+ float f = mValue;
+ data = *reinterpret_cast<const uint32_t*>(&f);
+ } else {
+ data = *reinterpret_cast<const uint64_t*>(&mValue);
+ }
+ for (int i = mSize - 1; i >= 0; --i) {
+ buf[i] = data & 0xff;
+ data >>= 8;
+ }
+}
+
+//=================================================================================================
+
+WebmBinary::WebmBinary(uint64_t id, const sp<ABuffer> &ref)
+ : WebmElement(id, ref->size()), mRef(ref) {
+}
+
+void WebmBinary::serializePayload(uint8_t *buf) {
+ memcpy(buf, mRef->data(), mRef->size());
+}
+
+//=================================================================================================
+
+WebmString::WebmString(uint64_t id, const char *str)
+ : WebmElement(id, strlen(str)), mStr(str) {
+}
+
+void WebmString::serializePayload(uint8_t *buf) {
+ memcpy(buf, mStr, strlen(mStr));
+}
+
+//=================================================================================================
+
+WebmSimpleBlock::WebmSimpleBlock(
+ int trackNum,
+ int16_t relTimecode,
+ bool key,
+ const sp<ABuffer>& orig)
+ // ............................ trackNum*1 + timecode*2 + flags*1
+ // ^^^
+ // Only the least significant byte of trackNum is encoded
+ : WebmElement(kMkvSimpleBlock, orig->size() + 4),
+ mTrackNum(trackNum),
+ mRelTimecode(relTimecode),
+ mKey(key),
+ mRef(orig) {
+}
+
+void WebmSimpleBlock::serializePayload(uint8_t *buf) {
+ serializeCodedUnsigned(encodeUnsigned(mTrackNum), buf);
+ buf[1] = (mRelTimecode & 0xff00) >> 8;
+ buf[2] = mRelTimecode & 0xff;
+ buf[3] = mKey ? 0x80 : 0;
+ memcpy(buf + 4, mRef->data(), mSize - 4);
+}
+
+//=================================================================================================
+
+EbmlVoid::EbmlVoid(uint64_t totalSize)
+ : WebmElement(kMkvVoid, voidSize(totalSize)),
+ mSizeWidth(totalSize - sizeOf(kMkvVoid) - voidSize(totalSize)) {
+ CHECK_GE(voidSize(totalSize), 0);
+}
+
+int EbmlVoid::serializePayloadSize(uint8_t *buf) {
+ return serializeCodedUnsigned(encodeUnsigned(mSize, mSizeWidth), buf);
+}
+
+void EbmlVoid::serializePayload(uint8_t *buf) {
+ ::memset(buf, 0, mSize);
+ return;
+}
+
+//=================================================================================================
+
+WebmMaster::WebmMaster(uint64_t id, const List<sp<WebmElement> >& children)
+ : WebmElement(id, childrenSum(children)), mChildren(children) {
+}
+
+WebmMaster::WebmMaster(uint64_t id)
+ : WebmElement(id, 0) {
+}
+
+int WebmMaster::serializePayloadSize(uint8_t *buf) {
+ if (mSize == 0){
+ return serializeCodedUnsigned(kMkvUnknownLength, buf);
+ }
+ return WebmElement::serializePayloadSize(buf);
+}
+
+void WebmMaster::serializePayload(uint8_t *buf) {
+ uint64_t off = 0;
+ for (List<sp<WebmElement> >::const_iterator it = mChildren.begin(); it != mChildren.end();
+ ++it) {
+ sp<WebmElement> child = (*it);
+ child->serializeInto(buf + off);
+ off += child->totalSize();
+ }
+}
+
+//=================================================================================================
+
+sp<WebmElement> WebmElement::CuePointEntry(uint64_t time, int track, uint64_t off) {
+ List<sp<WebmElement> > cuePointEntryFields;
+ cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueTrack, track));
+ cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueClusterPosition, off));
+ WebmElement *cueTrackPositions = new WebmMaster(kMkvCueTrackPositions, cuePointEntryFields);
+
+ cuePointEntryFields.clear();
+ cuePointEntryFields.push_back(new WebmUnsigned(kMkvCueTime, time));
+ cuePointEntryFields.push_back(cueTrackPositions);
+ return new WebmMaster(kMkvCuePoint, cuePointEntryFields);
+}
+
+sp<WebmElement> WebmElement::SeekEntry(uint64_t id, uint64_t off) {
+ List<sp<WebmElement> > seekEntryFields;
+ seekEntryFields.push_back(new WebmUnsigned(kMkvSeekId, id));
+ seekEntryFields.push_back(new WebmUnsigned(kMkvSeekPosition, off));
+ return new WebmMaster(kMkvSeek, seekEntryFields);
+}
+
+sp<WebmElement> WebmElement::EbmlHeader(
+ int ver,
+ int readVer,
+ int maxIdLen,
+ int maxSizeLen,
+ int docVer,
+ int docReadVer) {
+ List<sp<WebmElement> > headerFields;
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlVersion, ver));
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlReadVersion, readVer));
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlMaxIdlength, maxIdLen));
+ headerFields.push_back(new WebmUnsigned(kMkvEbmlMaxSizeLength, maxSizeLen));
+ headerFields.push_back(new WebmString(kMkvDocType, "webm"));
+ headerFields.push_back(new WebmUnsigned(kMkvDocTypeVersion, docVer));
+ headerFields.push_back(new WebmUnsigned(kMkvDocTypeReadVersion, docReadVer));
+ return new WebmMaster(kMkvEbml, headerFields);
+}
+
+sp<WebmElement> WebmElement::SegmentInfo(uint64_t scale, double dur) {
+ List<sp<WebmElement> > segmentInfo;
+ // place duration first; easier to patch
+ segmentInfo.push_back(new WebmFloat(kMkvSegmentDuration, dur));
+ segmentInfo.push_back(new WebmUnsigned(kMkvTimecodeScale, scale));
+ segmentInfo.push_back(new WebmString(kMkvMuxingApp, "android"));
+ segmentInfo.push_back(new WebmString(kMkvWritingApp, "android"));
+ return new WebmMaster(kMkvInfo, segmentInfo);
+}
+
+sp<WebmElement> WebmElement::AudioTrackEntry(
+ int chans,
+ double rate,
+ const sp<ABuffer> &buf,
+ int bps,
+ uint64_t uid,
+ bool lacing,
+ const char *lang) {
+ if (uid == 0) {
+ uid = kAudioTrackNum;
+ }
+
+ List<sp<WebmElement> > trackEntryFields;
+ populateCommonTrackEntries(
+ kAudioTrackNum,
+ uid,
+ lacing,
+ lang,
+ "A_VORBIS",
+ kAudioType,
+ trackEntryFields);
+
+ List<sp<WebmElement> > audioInfo;
+ audioInfo.push_back(new WebmUnsigned(kMkvChannels, chans));
+ audioInfo.push_back(new WebmFloat(kMkvSamplingFrequency, rate));
+ if (bps) {
+ WebmElement *bitDepth = new WebmUnsigned(kMkvBitDepth, bps);
+ audioInfo.push_back(bitDepth);
+ }
+
+ trackEntryFields.push_back(new WebmMaster(kMkvAudio, audioInfo));
+ trackEntryFields.push_back(new WebmBinary(kMkvCodecPrivate, buf));
+ return new WebmMaster(kMkvTrackEntry, trackEntryFields);
+}
+
+sp<WebmElement> WebmElement::VideoTrackEntry(
+ uint64_t width,
+ uint64_t height,
+ uint64_t uid,
+ bool lacing,
+ const char *lang) {
+ if (uid == 0) {
+ uid = kVideoTrackNum;
+ }
+
+ List<sp<WebmElement> > trackEntryFields;
+ populateCommonTrackEntries(
+ kVideoTrackNum,
+ uid,
+ lacing,
+ lang,
+ "V_VP8",
+ kVideoType,
+ trackEntryFields);
+
+ List<sp<WebmElement> > videoInfo;
+ videoInfo.push_back(new WebmUnsigned(kMkvPixelWidth, width));
+ videoInfo.push_back(new WebmUnsigned(kMkvPixelHeight, height));
+
+ trackEntryFields.push_back(new WebmMaster(kMkvVideo, videoInfo));
+ return new WebmMaster(kMkvTrackEntry, trackEntryFields);
+}
+} /* namespace android */
diff --git a/media/libstagefright/webm/WebmElement.h b/media/libstagefright/webm/WebmElement.h
new file mode 100644
index 0000000..f19933e
--- /dev/null
+++ b/media/libstagefright/webm/WebmElement.h
@@ -0,0 +1,127 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMELEMENT_H_
+#define WEBMELEMENT_H_
+
+#include <media/stagefright/MediaBuffer.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <utils/List.h>
+
+namespace android {
+
+struct WebmElement : public LightRefBase<WebmElement> {
+ const uint64_t mId, mSize;
+
+ WebmElement(uint64_t id, uint64_t size);
+ virtual ~WebmElement();
+
+ virtual int serializePayloadSize(uint8_t *buf);
+ virtual void serializePayload(uint8_t *buf)=0;
+ uint64_t totalSize();
+ uint64_t serializeInto(uint8_t *buf);
+ uint8_t *serialize(uint64_t& size);
+ int write(int fd, uint64_t& size);
+
+ static sp<WebmElement> EbmlHeader(
+ int ver = 1,
+ int readVer = 1,
+ int maxIdLen = 4,
+ int maxSizeLen = 8,
+ int docVer = 2,
+ int docReadVer = 2);
+
+ static sp<WebmElement> SegmentInfo(uint64_t scale = 1000000, double dur = 0);
+
+ static sp<WebmElement> AudioTrackEntry(
+ int chans,
+ double rate,
+ const sp<ABuffer> &buf,
+ int bps = 0,
+ uint64_t uid = 0,
+ bool lacing = false,
+ const char *lang = "und");
+
+ static sp<WebmElement> VideoTrackEntry(
+ uint64_t width,
+ uint64_t height,
+ uint64_t uid = 0,
+ bool lacing = false,
+ const char *lang = "und");
+
+ static sp<WebmElement> SeekEntry(uint64_t id, uint64_t off);
+ static sp<WebmElement> CuePointEntry(uint64_t time, int track, uint64_t off);
+ static sp<WebmElement> SimpleBlock(
+ int trackNum,
+ int16_t timecode,
+ bool key,
+ const uint8_t *data,
+ uint64_t dataSize);
+};
+
+struct WebmUnsigned : public WebmElement {
+ WebmUnsigned(uint64_t id, uint64_t value);
+ const uint64_t mValue;
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmFloat : public WebmElement {
+ const double mValue;
+ WebmFloat(uint64_t id, float value);
+ WebmFloat(uint64_t id, double value);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmBinary : public WebmElement {
+ const sp<ABuffer> mRef;
+ WebmBinary(uint64_t id, const sp<ABuffer> &ref);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmString : public WebmElement {
+ const char *const mStr;
+ WebmString(uint64_t id, const char *str);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmSimpleBlock : public WebmElement {
+ const int mTrackNum;
+ const int16_t mRelTimecode;
+ const bool mKey;
+ const sp<ABuffer> mRef;
+
+ WebmSimpleBlock(int trackNum, int16_t timecode, bool key, const sp<ABuffer>& orig);
+ void serializePayload(uint8_t *buf);
+};
+
+struct EbmlVoid : public WebmElement {
+ const uint64_t mSizeWidth;
+ EbmlVoid(uint64_t totalSize);
+ int serializePayloadSize(uint8_t *buf);
+ void serializePayload(uint8_t *buf);
+};
+
+struct WebmMaster : public WebmElement {
+ const List<sp<WebmElement> > mChildren;
+ WebmMaster(uint64_t id);
+ WebmMaster(uint64_t id, const List<sp<WebmElement> > &children);
+ int serializePayloadSize(uint8_t *buf);
+ void serializePayload(uint8_t *buf);
+};
+
+} /* namespace android */
+#endif /* WEBMELEMENT_H_ */
diff --git a/media/libstagefright/webm/WebmFrame.cpp b/media/libstagefright/webm/WebmFrame.cpp
new file mode 100644
index 0000000..e5134ed
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrame.cpp
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WebmFrame"
+
+#include "WebmFrame.h"
+#include "WebmConstants.h"
+
+#include <media/stagefright/foundation/ADebug.h>
+#include <unistd.h>
+
+using namespace android;
+using namespace webm;
+
+namespace {
+sp<ABuffer> toABuffer(MediaBuffer *mbuf) {
+ sp<ABuffer> abuf = new ABuffer(mbuf->range_length());
+ memcpy(abuf->data(), (uint8_t*) mbuf->data() + mbuf->range_offset(), mbuf->range_length());
+ return abuf;
+}
+}
+
+namespace android {
+
+const sp<WebmFrame> WebmFrame::EOS = new WebmFrame();
+
+WebmFrame::WebmFrame()
+ : mType(kInvalidType),
+ mKey(false),
+ mAbsTimecode(UINT64_MAX),
+ mData(new ABuffer(0)),
+ mEos(true) {
+}
+
+WebmFrame::WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *mbuf)
+ : mType(type),
+ mKey(key),
+ mAbsTimecode(absTimecode),
+ mData(toABuffer(mbuf)),
+ mEos(false) {
+}
+
+sp<WebmElement> WebmFrame::SimpleBlock(uint64_t baseTimecode) const {
+ return new WebmSimpleBlock(
+ mType == kVideoType ? kVideoTrackNum : kAudioTrackNum,
+ mAbsTimecode - baseTimecode,
+ mKey,
+ mData);
+}
+
+bool WebmFrame::operator<(const WebmFrame &other) const {
+ if (this->mEos) {
+ return false;
+ }
+ if (other.mEos) {
+ return true;
+ }
+ if (this->mAbsTimecode == other.mAbsTimecode) {
+ if (this->mType == kAudioType && other.mType == kVideoType) {
+ return true;
+ }
+ if (this->mType == kVideoType && other.mType == kAudioType) {
+ return false;
+ }
+ return false;
+ }
+ return this->mAbsTimecode < other.mAbsTimecode;
+}
+} /* namespace android */
diff --git a/media/libstagefright/webm/WebmFrame.h b/media/libstagefright/webm/WebmFrame.h
new file mode 100644
index 0000000..4f0b055
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrame.h
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMFRAME_H_
+#define WEBMFRAME_H_
+
+#include "WebmElement.h"
+
+namespace android {
+
+struct WebmFrame : LightRefBase<WebmFrame> {
+public:
+ const int mType;
+ const bool mKey;
+ const uint64_t mAbsTimecode;
+ const sp<ABuffer> mData;
+ const bool mEos;
+
+ WebmFrame();
+ WebmFrame(int type, bool key, uint64_t absTimecode, MediaBuffer *buf);
+ ~WebmFrame() {}
+
+ sp<WebmElement> SimpleBlock(uint64_t baseTimecode) const;
+
+ bool operator<(const WebmFrame &other) const;
+
+ static const sp<WebmFrame> EOS;
+private:
+ DISALLOW_EVIL_CONSTRUCTORS(WebmFrame);
+};
+
+} /* namespace android */
+#endif /* WEBMFRAME_H_ */
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
new file mode 100644
index 0000000..a4b8a42
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -0,0 +1,399 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "WebmFrameThread"
+
+#include "WebmConstants.h"
+#include "WebmFrameThread.h"
+
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <utils/Log.h>
+#include <inttypes.h>
+
+using namespace webm;
+
+namespace android {
+
+void *WebmFrameThread::wrap(void *arg) {
+ WebmFrameThread *worker = reinterpret_cast<WebmFrameThread*>(arg);
+ worker->run();
+ return NULL;
+}
+
+status_t WebmFrameThread::start() {
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE);
+ pthread_create(&mThread, &attr, WebmFrameThread::wrap, this);
+ pthread_attr_destroy(&attr);
+ return OK;
+}
+
+status_t WebmFrameThread::stop() {
+ void *status;
+ pthread_join(mThread, &status);
+ return (status_t)(intptr_t)status;
+}
+
+//=================================================================================================
+
+WebmFrameSourceThread::WebmFrameSourceThread(
+ int type,
+ LinkedBlockingQueue<const sp<WebmFrame> >& sink)
+ : mType(type), mSink(sink) {
+}
+
+//=================================================================================================
+
+WebmFrameSinkThread::WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ sp<WebmFrameSourceThread> videoThread,
+ sp<WebmFrameSourceThread> audioThread,
+ List<sp<WebmElement> >& cues)
+ : mFd(fd),
+ mSegmentDataStart(off),
+ mVideoFrames(videoThread->mSink),
+ mAudioFrames(audioThread->mSink),
+ mCues(cues),
+ mDone(true) {
+}
+
+WebmFrameSinkThread::WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
+ LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
+ List<sp<WebmElement> >& cues)
+ : mFd(fd),
+ mSegmentDataStart(off),
+ mVideoFrames(videoSource),
+ mAudioFrames(audioSource),
+ mCues(cues),
+ mDone(true) {
+}
+
+// Initializes a webm cluster with its starting timecode.
+//
+// frames:
+// sequence of input audio/video frames received from the source.
+//
+// clusterTimecodeL:
+// the starting timecode of the cluster; this is the timecode of the first
+// frame since frames are ordered by timestamp.
+//
+// children:
+// list to hold child elements in a webm cluster (start timecode and
+// simple blocks).
+//
+// static
+void WebmFrameSinkThread::initCluster(
+ List<const sp<WebmFrame> >& frames,
+ uint64_t& clusterTimecodeL,
+ List<sp<WebmElement> >& children) {
+ CHECK(!frames.empty() && children.empty());
+
+ const sp<WebmFrame> f = *(frames.begin());
+ clusterTimecodeL = f->mAbsTimecode;
+ WebmUnsigned *clusterTimecode = new WebmUnsigned(kMkvTimecode, clusterTimecodeL);
+ children.clear();
+ children.push_back(clusterTimecode);
+}
+
+void WebmFrameSinkThread::writeCluster(List<sp<WebmElement> >& children) {
+ // children must contain at least one simpleblock and its timecode
+ CHECK_GE(children.size(), 2);
+
+ uint64_t size;
+ sp<WebmElement> cluster = new WebmMaster(kMkvCluster, children);
+ cluster->write(mFd, size);
+ children.clear();
+}
+
+// Write out (possibly multiple) webm cluster(s) from frames split on video key frames.
+//
+// last:
+// current flush is triggered by EOS instead of a second outstanding video key frame.
+void WebmFrameSinkThread::flushFrames(List<const sp<WebmFrame> >& frames, bool last) {
+ if (frames.empty()) {
+ return;
+ }
+
+ uint64_t clusterTimecodeL;
+ List<sp<WebmElement> > children;
+ initCluster(frames, clusterTimecodeL, children);
+
+ uint64_t cueTime = clusterTimecodeL;
+ off_t fpos = ::lseek(mFd, 0, SEEK_CUR);
+ size_t n = frames.size();
+ if (!last) {
+ // If we are not flushing the last sequence of outstanding frames, flushFrames
+ // must have been called right after we have pushed a second outstanding video key
+ // frame (the last frame), which belongs to the next cluster; also hold back on
+ // flushing the second to last frame before we check its type. A audio frame
+ // should precede the aforementioned video key frame in the next sequence, a video
+ // frame should be the last frame in the current (to-be-flushed) sequence.
+ CHECK_GE(n, 2);
+ n -= 2;
+ }
+
+ for (size_t i = 0; i < n; i++) {
+ const sp<WebmFrame> f = *(frames.begin());
+ if (f->mType == kVideoType && f->mKey) {
+ cueTime = f->mAbsTimecode;
+ }
+
+ if (f->mAbsTimecode - clusterTimecodeL > INT16_MAX) {
+ writeCluster(children);
+ initCluster(frames, clusterTimecodeL, children);
+ }
+
+ frames.erase(frames.begin());
+ children.push_back(f->SimpleBlock(clusterTimecodeL));
+ }
+
+ // equivalent to last==false
+ if (!frames.empty()) {
+ // decide whether to write out the second to last frame.
+ const sp<WebmFrame> secondLastFrame = *(frames.begin());
+ if (secondLastFrame->mType == kVideoType) {
+ frames.erase(frames.begin());
+ children.push_back(secondLastFrame->SimpleBlock(clusterTimecodeL));
+ }
+ }
+
+ writeCluster(children);
+ sp<WebmElement> cuePoint = WebmElement::CuePointEntry(cueTime, 1, fpos - mSegmentDataStart);
+ mCues.push_back(cuePoint);
+}
+
+status_t WebmFrameSinkThread::start() {
+ mDone = false;
+ return WebmFrameThread::start();
+}
+
+status_t WebmFrameSinkThread::stop() {
+ mDone = true;
+ mVideoFrames.push(WebmFrame::EOS);
+ mAudioFrames.push(WebmFrame::EOS);
+ return WebmFrameThread::stop();
+}
+
+void WebmFrameSinkThread::run() {
+ int numVideoKeyFrames = 0;
+ List<const sp<WebmFrame> > outstandingFrames;
+ while (!mDone) {
+ ALOGV("wait v frame");
+ const sp<WebmFrame> videoFrame = mVideoFrames.peek();
+ ALOGV("v frame: %p", videoFrame.get());
+
+ ALOGV("wait a frame");
+ const sp<WebmFrame> audioFrame = mAudioFrames.peek();
+ ALOGV("a frame: %p", audioFrame.get());
+
+ if (videoFrame->mEos && audioFrame->mEos) {
+ break;
+ }
+
+ if (*audioFrame < *videoFrame) {
+ ALOGV("take a frame");
+ mAudioFrames.take();
+ outstandingFrames.push_back(audioFrame);
+ } else {
+ ALOGV("take v frame");
+ mVideoFrames.take();
+ outstandingFrames.push_back(videoFrame);
+ if (videoFrame->mKey)
+ numVideoKeyFrames++;
+ }
+
+ if (numVideoKeyFrames == 2) {
+ flushFrames(outstandingFrames, /* last = */ false);
+ numVideoKeyFrames--;
+ }
+ }
+ ALOGV("flushing last cluster (size %zu)", outstandingFrames.size());
+ flushFrames(outstandingFrames, /* last = */ true);
+ mDone = true;
+}
+
+//=================================================================================================
+
+static const int64_t kInitialDelayTimeUs = 700000LL;
+
+void WebmFrameMediaSourceThread::clearFlags() {
+ mDone = false;
+ mPaused = false;
+ mResumed = false;
+ mStarted = false;
+ mReachedEOS = false;
+}
+
+WebmFrameMediaSourceThread::WebmFrameMediaSourceThread(
+ const sp<MediaSource>& source,
+ int type,
+ LinkedBlockingQueue<const sp<WebmFrame> >& sink,
+ uint64_t timeCodeScale,
+ int64_t startTimeRealUs,
+ int32_t startTimeOffsetMs,
+ int numTracks,
+ bool realTimeRecording)
+ : WebmFrameSourceThread(type, sink),
+ mSource(source),
+ mTimeCodeScale(timeCodeScale),
+ mTrackDurationUs(0) {
+ clearFlags();
+ mStartTimeUs = startTimeRealUs;
+ if (realTimeRecording && numTracks > 1) {
+ /*
+ * Copied from MPEG4Writer
+ *
+ * This extra delay of accepting incoming audio/video signals
+ * helps to align a/v start time at the beginning of a recording
+ * session, and it also helps eliminate the "recording" sound for
+ * camcorder applications.
+ *
+ * If client does not set the start time offset, we fall back to
+ * use the default initial delay value.
+ */
+ int64_t startTimeOffsetUs = startTimeOffsetMs * 1000LL;
+ if (startTimeOffsetUs < 0) { // Start time offset was not set
+ startTimeOffsetUs = kInitialDelayTimeUs;
+ }
+ mStartTimeUs += startTimeOffsetUs;
+ ALOGI("Start time offset: %" PRId64 " us", startTimeOffsetUs);
+ }
+}
+
+status_t WebmFrameMediaSourceThread::start() {
+ sp<MetaData> meta = new MetaData;
+ meta->setInt64(kKeyTime, mStartTimeUs);
+ status_t err = mSource->start(meta.get());
+ if (err != OK) {
+ mDone = true;
+ mReachedEOS = true;
+ return err;
+ } else {
+ mStarted = true;
+ return WebmFrameThread::start();
+ }
+}
+
+status_t WebmFrameMediaSourceThread::resume() {
+ if (!mDone && mPaused) {
+ mPaused = false;
+ mResumed = true;
+ }
+ return OK;
+}
+
+status_t WebmFrameMediaSourceThread::pause() {
+ if (mStarted) {
+ mPaused = true;
+ }
+ return OK;
+}
+
+status_t WebmFrameMediaSourceThread::stop() {
+ if (mStarted) {
+ mStarted = false;
+ mDone = true;
+ mSource->stop();
+ return WebmFrameThread::stop();
+ }
+ return OK;
+}
+
+void WebmFrameMediaSourceThread::run() {
+ int32_t count = 0;
+ int64_t timestampUs = 0xdeadbeef;
+ int64_t lastTimestampUs = 0; // Previous sample time stamp
+ int64_t lastDurationUs = 0; // Previous sample duration
+ int64_t previousPausedDurationUs = 0;
+
+ const uint64_t kUninitialized = 0xffffffffffffffffL;
+ mStartTimeUs = kUninitialized;
+
+ status_t err = OK;
+ MediaBuffer *buffer;
+ while (!mDone && (err = mSource->read(&buffer, NULL)) == OK) {
+ if (buffer->range_length() == 0) {
+ buffer->release();
+ buffer = NULL;
+ continue;
+ }
+
+ sp<MetaData> md = buffer->meta_data();
+ CHECK(md->findInt64(kKeyTime, &timestampUs));
+ if (mStartTimeUs == kUninitialized) {
+ mStartTimeUs = timestampUs;
+ }
+ timestampUs -= mStartTimeUs;
+
+ if (mPaused && !mResumed) {
+ lastDurationUs = timestampUs - lastTimestampUs;
+ lastTimestampUs = timestampUs;
+ buffer->release();
+ buffer = NULL;
+ continue;
+ }
+ ++count;
+
+ // adjust time-stamps after pause/resume
+ if (mResumed) {
+ int64_t durExcludingEarlierPausesUs = timestampUs - previousPausedDurationUs;
+ CHECK_GE(durExcludingEarlierPausesUs, 0ll);
+ int64_t pausedDurationUs = durExcludingEarlierPausesUs - mTrackDurationUs;
+ CHECK_GE(pausedDurationUs, lastDurationUs);
+ previousPausedDurationUs += pausedDurationUs - lastDurationUs;
+ mResumed = false;
+ }
+ timestampUs -= previousPausedDurationUs;
+ CHECK_GE(timestampUs, 0ll);
+
+ int32_t isSync = false;
+ md->findInt32(kKeyIsSyncFrame, &isSync);
+ const sp<WebmFrame> f = new WebmFrame(
+ mType,
+ isSync,
+ timestampUs * 1000 / mTimeCodeScale,
+ buffer);
+ mSink.push(f);
+
+ ALOGV(
+ "%s %s frame at %" PRId64 " size %zu\n",
+ mType == kVideoType ? "video" : "audio",
+ isSync ? "I" : "P",
+ timestampUs * 1000 / mTimeCodeScale,
+ buffer->range_length());
+
+ buffer->release();
+ buffer = NULL;
+
+ if (timestampUs > mTrackDurationUs) {
+ mTrackDurationUs = timestampUs;
+ }
+ lastDurationUs = timestampUs - lastTimestampUs;
+ lastTimestampUs = timestampUs;
+ }
+
+ mTrackDurationUs += lastDurationUs;
+ mSink.push(WebmFrame::EOS);
+}
+}
diff --git a/media/libstagefright/webm/WebmFrameThread.h b/media/libstagefright/webm/WebmFrameThread.h
new file mode 100644
index 0000000..d65d9b7
--- /dev/null
+++ b/media/libstagefright/webm/WebmFrameThread.h
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMFRAMETHREAD_H_
+#define WEBMFRAMETHREAD_H_
+
+#include "WebmFrame.h"
+#include "LinkedBlockingQueue.h"
+
+#include <media/stagefright/FileSource.h>
+#include <media/stagefright/MediaSource.h>
+
+#include <utils/List.h>
+#include <utils/Errors.h>
+
+#include <pthread.h>
+
+namespace android {
+
+class WebmFrameThread : public LightRefBase<WebmFrameThread> {
+public:
+ virtual void run() = 0;
+ virtual bool running() { return false; }
+ virtual status_t start();
+ virtual status_t pause() { return OK; }
+ virtual status_t resume() { return OK; }
+ virtual status_t stop();
+ virtual ~WebmFrameThread() { stop(); }
+ static void *wrap(void *arg);
+
+protected:
+ WebmFrameThread()
+ : mThread(0) {
+ }
+
+private:
+ pthread_t mThread;
+ DISALLOW_EVIL_CONSTRUCTORS(WebmFrameThread);
+};
+
+//=================================================================================================
+
+class WebmFrameSourceThread;
+class WebmFrameSinkThread : public WebmFrameThread {
+public:
+ WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ sp<WebmFrameSourceThread> videoThread,
+ sp<WebmFrameSourceThread> audioThread,
+ List<sp<WebmElement> >& cues);
+
+ WebmFrameSinkThread(
+ const int& fd,
+ const uint64_t& off,
+ LinkedBlockingQueue<const sp<WebmFrame> >& videoSource,
+ LinkedBlockingQueue<const sp<WebmFrame> >& audioSource,
+ List<sp<WebmElement> >& cues);
+
+ void run();
+ bool running() {
+ return !mDone;
+ }
+ status_t start();
+ status_t stop();
+
+private:
+ const int& mFd;
+ const uint64_t& mSegmentDataStart;
+ LinkedBlockingQueue<const sp<WebmFrame> >& mVideoFrames;
+ LinkedBlockingQueue<const sp<WebmFrame> >& mAudioFrames;
+ List<sp<WebmElement> >& mCues;
+
+ volatile bool mDone;
+
+ static void initCluster(
+ List<const sp<WebmFrame> >& frames,
+ uint64_t& clusterTimecodeL,
+ List<sp<WebmElement> >& children);
+ void writeCluster(List<sp<WebmElement> >& children);
+ void flushFrames(List<const sp<WebmFrame> >& frames, bool last);
+};
+
+//=================================================================================================
+
+class WebmFrameSourceThread : public WebmFrameThread {
+public:
+ WebmFrameSourceThread(int type, LinkedBlockingQueue<const sp<WebmFrame> >& sink);
+ virtual int64_t getDurationUs() = 0;
+protected:
+ const int mType;
+ LinkedBlockingQueue<const sp<WebmFrame> >& mSink;
+
+ friend class WebmFrameSinkThread;
+};
+
+//=================================================================================================
+
+class WebmFrameEmptySourceThread : public WebmFrameSourceThread {
+public:
+ WebmFrameEmptySourceThread(int type, LinkedBlockingQueue<const sp<WebmFrame> >& sink)
+ : WebmFrameSourceThread(type, sink) {
+ }
+ void run() { mSink.push(WebmFrame::EOS); }
+ int64_t getDurationUs() { return 0; }
+};
+
+//=================================================================================================
+
+class WebmFrameMediaSourceThread: public WebmFrameSourceThread {
+public:
+ WebmFrameMediaSourceThread(
+ const sp<MediaSource>& source,
+ int type,
+ LinkedBlockingQueue<const sp<WebmFrame> >& sink,
+ uint64_t timeCodeScale,
+ int64_t startTimeRealUs,
+ int32_t startTimeOffsetMs,
+ int numPeers,
+ bool realTimeRecording);
+
+ void run();
+ status_t start();
+ status_t resume();
+ status_t pause();
+ status_t stop();
+ int64_t getDurationUs() {
+ return mTrackDurationUs;
+ }
+
+private:
+ const sp<MediaSource> mSource;
+ const uint64_t mTimeCodeScale;
+ uint64_t mStartTimeUs;
+
+ volatile bool mDone;
+ volatile bool mPaused;
+ volatile bool mResumed;
+ volatile bool mStarted;
+ volatile bool mReachedEOS;
+ int64_t mTrackDurationUs;
+
+ void clearFlags();
+};
+} /* namespace android */
+
+#endif /* WEBMFRAMETHREAD_H_ */
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
new file mode 100644
index 0000000..03cf92a
--- /dev/null
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -0,0 +1,551 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "WebmWriter"
+
+#include "EbmlUtil.h"
+#include "WebmWriter.h"
+
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaDefs.h>
+#include <media/stagefright/foundation/ADebug.h>
+
+#include <utils/Errors.h>
+
+#include <unistd.h>
+#include <fcntl.h>
+#include <sys/stat.h>
+#include <inttypes.h>
+
+using namespace webm;
+
+namespace {
+size_t XiphLaceCodeLen(size_t size) {
+ return size / 0xff + 1;
+}
+
+size_t XiphLaceEnc(uint8_t *buf, size_t size) {
+ size_t i;
+ for (i = 0; size >= 0xff; ++i, size -= 0xff) {
+ buf[i] = 0xff;
+ }
+ buf[i++] = size;
+ return i;
+}
+}
+
+namespace android {
+
+static const int64_t kMinStreamableFileSizeInBytes = 5 * 1024 * 1024;
+
+WebmWriter::WebmWriter(int fd)
+ : mFd(dup(fd)),
+ mInitCheck(mFd < 0 ? NO_INIT : OK),
+ mTimeCodeScale(1000000),
+ mStartTimestampUs(0),
+ mStartTimeOffsetMs(0),
+ mSegmentOffset(0),
+ mSegmentDataStart(0),
+ mInfoOffset(0),
+ mInfoSize(0),
+ mTracksOffset(0),
+ mCuesOffset(0),
+ mPaused(false),
+ mStarted(false),
+ mIsFileSizeLimitExplicitlyRequested(false),
+ mIsRealTimeRecording(false),
+ mStreamableFile(true),
+ mEstimatedCuesSize(0) {
+ mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack);
+ mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack);
+ mSinkThread = new WebmFrameSinkThread(
+ mFd,
+ mSegmentDataStart,
+ mStreams[kVideoIndex].mSink,
+ mStreams[kAudioIndex].mSink,
+ mCuePoints);
+}
+
+WebmWriter::WebmWriter(const char *filename)
+ : mInitCheck(NO_INIT),
+ mTimeCodeScale(1000000),
+ mStartTimestampUs(0),
+ mStartTimeOffsetMs(0),
+ mSegmentOffset(0),
+ mSegmentDataStart(0),
+ mInfoOffset(0),
+ mInfoSize(0),
+ mTracksOffset(0),
+ mCuesOffset(0),
+ mPaused(false),
+ mStarted(false),
+ mIsFileSizeLimitExplicitlyRequested(false),
+ mIsRealTimeRecording(false),
+ mStreamableFile(true),
+ mEstimatedCuesSize(0) {
+ mFd = open(filename, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
+ if (mFd >= 0) {
+ ALOGV("fd %d; flags: %o", mFd, fcntl(mFd, F_GETFL, 0));
+ mInitCheck = OK;
+ }
+ mStreams[kAudioIndex] = WebmStream(kAudioType, "Audio", &WebmWriter::audioTrack);
+ mStreams[kVideoIndex] = WebmStream(kVideoType, "Video", &WebmWriter::videoTrack);
+ mSinkThread = new WebmFrameSinkThread(
+ mFd,
+ mSegmentDataStart,
+ mStreams[kVideoIndex].mSink,
+ mStreams[kAudioIndex].mSink,
+ mCuePoints);
+}
+
+// static
+sp<WebmElement> WebmWriter::videoTrack(const sp<MetaData>& md) {
+ int32_t width, height;
+ CHECK(md->findInt32(kKeyWidth, &width));
+ CHECK(md->findInt32(kKeyHeight, &height));
+ return WebmElement::VideoTrackEntry(width, height);
+}
+
+// static
+sp<WebmElement> WebmWriter::audioTrack(const sp<MetaData>& md) {
+ int32_t nChannels, samplerate;
+ uint32_t type;
+ const void *headerData1;
+ const char headerData2[] = { 3, 'v', 'o', 'r', 'b', 'i', 's', 7, 0, 0, 0,
+ 'a', 'n', 'd', 'r', 'o', 'i', 'd', 0, 0, 0, 0, 1 };
+ const void *headerData3;
+ size_t headerSize1, headerSize2 = sizeof(headerData2), headerSize3;
+
+ CHECK(md->findInt32(kKeyChannelCount, &nChannels));
+ CHECK(md->findInt32(kKeySampleRate, &samplerate));
+ CHECK(md->findData(kKeyVorbisInfo, &type, &headerData1, &headerSize1));
+ CHECK(md->findData(kKeyVorbisBooks, &type, &headerData3, &headerSize3));
+
+ size_t codecPrivateSize = 1;
+ codecPrivateSize += XiphLaceCodeLen(headerSize1);
+ codecPrivateSize += XiphLaceCodeLen(headerSize2);
+ codecPrivateSize += headerSize1 + headerSize2 + headerSize3;
+
+ off_t off = 0;
+ sp<ABuffer> codecPrivateBuf = new ABuffer(codecPrivateSize);
+ uint8_t *codecPrivateData = codecPrivateBuf->data();
+ codecPrivateData[off++] = 2;
+
+ off += XiphLaceEnc(codecPrivateData + off, headerSize1);
+ off += XiphLaceEnc(codecPrivateData + off, headerSize2);
+
+ memcpy(codecPrivateData + off, headerData1, headerSize1);
+ off += headerSize1;
+ memcpy(codecPrivateData + off, headerData2, headerSize2);
+ off += headerSize2;
+ memcpy(codecPrivateData + off, headerData3, headerSize3);
+
+ sp<WebmElement> entry = WebmElement::AudioTrackEntry(
+ nChannels,
+ samplerate,
+ codecPrivateBuf);
+ return entry;
+}
+
+size_t WebmWriter::numTracks() {
+ Mutex::Autolock autolock(mLock);
+
+ size_t numTracks = 0;
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mTrackEntry != NULL) {
+ numTracks++;
+ }
+ }
+
+ return numTracks;
+}
+
+uint64_t WebmWriter::estimateCuesSize(int32_t bitRate) {
+ // This implementation is based on estimateMoovBoxSize in MPEG4Writer.
+ //
+ // Statistical analysis shows that metadata usually accounts
+ // for a small portion of the total file size, usually < 0.6%.
+
+ // The default MIN_MOOV_BOX_SIZE is set to 0.6% x 1MB / 2,
+ // where 1MB is the common file size limit for MMS application.
+ // The default MAX _MOOV_BOX_SIZE value is based on about 3
+ // minute video recording with a bit rate about 3 Mbps, because
+ // statistics also show that most of the video captured are going
+ // to be less than 3 minutes.
+
+ // If the estimation is wrong, we will pay the price of wasting
+ // some reserved space. This should not happen so often statistically.
+ static const int32_t factor = 2;
+ static const int64_t MIN_CUES_SIZE = 3 * 1024; // 3 KB
+ static const int64_t MAX_CUES_SIZE = (180 * 3000000 * 6LL / 8000);
+ int64_t size = MIN_CUES_SIZE;
+
+ // Max file size limit is set
+ if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
+ size = mMaxFileSizeLimitBytes * 6 / 1000;
+ }
+
+ // Max file duration limit is set
+ if (mMaxFileDurationLimitUs != 0) {
+ if (bitRate > 0) {
+ int64_t size2 = ((mMaxFileDurationLimitUs * bitRate * 6) / 1000 / 8000000);
+ if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
+ // When both file size and duration limits are set,
+ // we use the smaller limit of the two.
+ if (size > size2) {
+ size = size2;
+ }
+ } else {
+ // Only max file duration limit is set
+ size = size2;
+ }
+ }
+ }
+
+ if (size < MIN_CUES_SIZE) {
+ size = MIN_CUES_SIZE;
+ }
+
+ // Any long duration recording will be probably end up with
+ // non-streamable webm file.
+ if (size > MAX_CUES_SIZE) {
+ size = MAX_CUES_SIZE;
+ }
+
+ ALOGV("limits: %" PRId64 "/%" PRId64 " bytes/us,"
+ " bit rate: %d bps and the estimated cues size %" PRId64 " bytes",
+ mMaxFileSizeLimitBytes, mMaxFileDurationLimitUs, bitRate, size);
+ return factor * size;
+}
+
+void WebmWriter::initStream(size_t idx) {
+ if (mStreams[idx].mThread != NULL) {
+ return;
+ }
+ if (mStreams[idx].mSource == NULL) {
+ ALOGV("adding dummy source ... ");
+ mStreams[idx].mThread = new WebmFrameEmptySourceThread(
+ mStreams[idx].mType, mStreams[idx].mSink);
+ } else {
+ ALOGV("adding source %p", mStreams[idx].mSource.get());
+ mStreams[idx].mThread = new WebmFrameMediaSourceThread(
+ mStreams[idx].mSource,
+ mStreams[idx].mType,
+ mStreams[idx].mSink,
+ mTimeCodeScale,
+ mStartTimestampUs,
+ mStartTimeOffsetMs,
+ numTracks(),
+ mIsRealTimeRecording);
+ }
+}
+
+void WebmWriter::release() {
+ close(mFd);
+ mFd = -1;
+ mInitCheck = NO_INIT;
+ mStarted = false;
+}
+
+status_t WebmWriter::reset() {
+ if (mInitCheck != OK) {
+ return OK;
+ } else {
+ if (!mStarted) {
+ release();
+ return OK;
+ }
+ }
+
+ status_t err = OK;
+ int64_t maxDurationUs = 0;
+ int64_t minDurationUs = 0x7fffffffffffffffLL;
+ for (int i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mThread == NULL) {
+ continue;
+ }
+
+ status_t status = mStreams[i].mThread->stop();
+ if (err == OK && status != OK) {
+ err = status;
+ }
+
+ int64_t durationUs = mStreams[i].mThread->getDurationUs();
+ if (durationUs > maxDurationUs) {
+ maxDurationUs = durationUs;
+ }
+ if (durationUs < minDurationUs) {
+ minDurationUs = durationUs;
+ }
+ }
+
+ if (numTracks() > 1) {
+ ALOGD("Duration from tracks range is [%" PRId64 ", %" PRId64 "] us", minDurationUs, maxDurationUs);
+ }
+
+ mSinkThread->stop();
+
+ // Do not write out movie header on error.
+ if (err != OK) {
+ release();
+ return err;
+ }
+
+ sp<WebmElement> cues = new WebmMaster(kMkvCues, mCuePoints);
+ uint64_t cuesSize = cues->totalSize();
+ // TRICKY Even when the cues do fit in the space we reserved, if they do not fit
+ // perfectly, we still need to check if there is enough "extra space" to write an
+ // EBML void element.
+ if (cuesSize != mEstimatedCuesSize && cuesSize > mEstimatedCuesSize - kMinEbmlVoidSize) {
+ mCuesOffset = ::lseek(mFd, 0, SEEK_CUR);
+ cues->write(mFd, cuesSize);
+ } else {
+ uint64_t spaceSize;
+ ::lseek(mFd, mCuesOffset, SEEK_SET);
+ cues->write(mFd, cuesSize);
+ sp<WebmElement> space = new EbmlVoid(mEstimatedCuesSize - cuesSize);
+ space->write(mFd, spaceSize);
+ }
+
+ mCuePoints.clear();
+ mStreams[kVideoIndex].mSink.clear();
+ mStreams[kAudioIndex].mSink.clear();
+
+ uint8_t bary[sizeof(uint64_t)];
+ uint64_t totalSize = ::lseek(mFd, 0, SEEK_END);
+ uint64_t segmentSize = totalSize - mSegmentDataStart;
+ ::lseek(mFd, mSegmentOffset + sizeOf(kMkvSegment), SEEK_SET);
+ uint64_t segmentSizeCoded = encodeUnsigned(segmentSize, sizeOf(kMkvUnknownLength));
+ serializeCodedUnsigned(segmentSizeCoded, bary);
+ ::write(mFd, bary, sizeOf(kMkvUnknownLength));
+
+ uint64_t size;
+ uint64_t durationOffset = mInfoOffset + sizeOf(kMkvInfo) + sizeOf(mInfoSize)
+ + sizeOf(kMkvSegmentDuration) + sizeOf(sizeof(double));
+ sp<WebmElement> duration = new WebmFloat(
+ kMkvSegmentDuration,
+ (double) (maxDurationUs * 1000 / mTimeCodeScale));
+ duration->serializePayload(bary);
+ ::lseek(mFd, durationOffset, SEEK_SET);
+ ::write(mFd, bary, sizeof(double));
+
+ List<sp<WebmElement> > seekEntries;
+ seekEntries.push_back(WebmElement::SeekEntry(kMkvInfo, mInfoOffset - mSegmentDataStart));
+ seekEntries.push_back(WebmElement::SeekEntry(kMkvTracks, mTracksOffset - mSegmentDataStart));
+ seekEntries.push_back(WebmElement::SeekEntry(kMkvCues, mCuesOffset - mSegmentDataStart));
+ sp<WebmElement> seekHead = new WebmMaster(kMkvSeekHead, seekEntries);
+
+ uint64_t metaSeekSize;
+ ::lseek(mFd, mSegmentDataStart, SEEK_SET);
+ seekHead->write(mFd, metaSeekSize);
+
+ uint64_t spaceSize;
+ sp<WebmElement> space = new EbmlVoid(kMaxMetaSeekSize - metaSeekSize);
+ space->write(mFd, spaceSize);
+
+ release();
+ return err;
+}
+
+status_t WebmWriter::addSource(const sp<MediaSource> &source) {
+ Mutex::Autolock l(mLock);
+ if (mStarted) {
+ ALOGE("Attempt to add source AFTER recording is started");
+ return UNKNOWN_ERROR;
+ }
+
+ // At most 2 tracks can be supported.
+ if (mStreams[kVideoIndex].mTrackEntry != NULL
+ && mStreams[kAudioIndex].mTrackEntry != NULL) {
+ ALOGE("Too many tracks (2) to add");
+ return ERROR_UNSUPPORTED;
+ }
+
+ CHECK(source != NULL);
+
+ // A track of type other than video or audio is not supported.
+ const char *mime;
+ source->getFormat()->findCString(kKeyMIMEType, &mime);
+ const char *vp8 = MEDIA_MIMETYPE_VIDEO_VP8;
+ const char *vorbis = MEDIA_MIMETYPE_AUDIO_VORBIS;
+
+ size_t streamIndex;
+ if (!strncasecmp(mime, vp8, strlen(vp8))) {
+ streamIndex = kVideoIndex;
+ } else if (!strncasecmp(mime, vorbis, strlen(vorbis))) {
+ streamIndex = kAudioIndex;
+ } else {
+ ALOGE("Track (%s) other than %s or %s is not supported", mime, vp8, vorbis);
+ return ERROR_UNSUPPORTED;
+ }
+
+ // No more than one video or one audio track is supported.
+ if (mStreams[streamIndex].mTrackEntry != NULL) {
+ ALOGE("%s track already exists", mStreams[streamIndex].mName);
+ return ERROR_UNSUPPORTED;
+ }
+
+ // This is the first track of either audio or video.
+ // Go ahead to add the track.
+ mStreams[streamIndex].mSource = source;
+ mStreams[streamIndex].mTrackEntry = mStreams[streamIndex].mMakeTrack(source->getFormat());
+
+ return OK;
+}
+
+status_t WebmWriter::start(MetaData *params) {
+ if (mInitCheck != OK) {
+ return UNKNOWN_ERROR;
+ }
+
+ if (mStreams[kVideoIndex].mTrackEntry == NULL
+ && mStreams[kAudioIndex].mTrackEntry == NULL) {
+ ALOGE("No source added");
+ return INVALID_OPERATION;
+ }
+
+ if (mMaxFileSizeLimitBytes != 0) {
+ mIsFileSizeLimitExplicitlyRequested = true;
+ }
+
+ if (params) {
+ int32_t isRealTimeRecording;
+ params->findInt32(kKeyRealTimeRecording, &isRealTimeRecording);
+ mIsRealTimeRecording = isRealTimeRecording;
+ }
+
+ if (mStarted) {
+ if (mPaused) {
+ mPaused = false;
+ mStreams[kAudioIndex].mThread->resume();
+ mStreams[kVideoIndex].mThread->resume();
+ }
+ return OK;
+ }
+
+ if (params) {
+ int32_t tcsl;
+ if (params->findInt32(kKeyTimeScale, &tcsl)) {
+ mTimeCodeScale = tcsl;
+ }
+ }
+ CHECK_GT(mTimeCodeScale, 0);
+ ALOGV("movie time scale: %" PRIu64, mTimeCodeScale);
+
+ /*
+ * When the requested file size limit is small, the priority
+ * is to meet the file size limit requirement, rather than
+ * to make the file streamable. mStreamableFile does not tell
+ * whether the actual recorded file is streamable or not.
+ */
+ mStreamableFile = (!mMaxFileSizeLimitBytes)
+ || (mMaxFileSizeLimitBytes >= kMinStreamableFileSizeInBytes);
+
+ /*
+ * Write various metadata.
+ */
+ sp<WebmElement> ebml, segment, info, seekHead, tracks, cues;
+ ebml = WebmElement::EbmlHeader();
+ segment = new WebmMaster(kMkvSegment);
+ seekHead = new EbmlVoid(kMaxMetaSeekSize);
+ info = WebmElement::SegmentInfo(mTimeCodeScale, 0);
+
+ List<sp<WebmElement> > children;
+ for (size_t i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mTrackEntry != NULL) {
+ children.push_back(mStreams[i].mTrackEntry);
+ }
+ }
+ tracks = new WebmMaster(kMkvTracks, children);
+
+ if (!mStreamableFile) {
+ cues = NULL;
+ } else {
+ int32_t bitRate = -1;
+ if (params) {
+ params->findInt32(kKeyBitRate, &bitRate);
+ }
+ mEstimatedCuesSize = estimateCuesSize(bitRate);
+ CHECK_GE(mEstimatedCuesSize, 8);
+ cues = new EbmlVoid(mEstimatedCuesSize);
+ }
+
+ sp<WebmElement> elems[] = { ebml, segment, seekHead, info, tracks, cues };
+ size_t nElems = sizeof(elems) / sizeof(elems[0]);
+ uint64_t offsets[nElems];
+ uint64_t sizes[nElems];
+ for (uint32_t i = 0; i < nElems; i++) {
+ WebmElement *e = elems[i].get();
+ if (!e) {
+ continue;
+ }
+
+ uint64_t size;
+ offsets[i] = ::lseek(mFd, 0, SEEK_CUR);
+ sizes[i] = e->mSize;
+ e->write(mFd, size);
+ }
+
+ mSegmentOffset = offsets[1];
+ mSegmentDataStart = offsets[2];
+ mInfoOffset = offsets[3];
+ mInfoSize = sizes[3];
+ mTracksOffset = offsets[4];
+ mCuesOffset = offsets[5];
+
+ // start threads
+ if (params) {
+ params->findInt64(kKeyTime, &mStartTimestampUs);
+ }
+
+ initStream(kAudioIndex);
+ initStream(kVideoIndex);
+
+ mStreams[kAudioIndex].mThread->start();
+ mStreams[kVideoIndex].mThread->start();
+ mSinkThread->start();
+
+ mStarted = true;
+ return OK;
+}
+
+status_t WebmWriter::pause() {
+ if (mInitCheck != OK) {
+ return OK;
+ }
+ mPaused = true;
+ status_t err = OK;
+ for (int i = 0; i < kMaxStreams; ++i) {
+ if (mStreams[i].mThread == NULL) {
+ continue;
+ }
+ status_t status = mStreams[i].mThread->pause();
+ if (status != OK) {
+ err = status;
+ }
+ }
+ return err;
+}
+
+status_t WebmWriter::stop() {
+ return reset();
+}
+
+bool WebmWriter::reachedEOS() {
+ return !mSinkThread->running();
+}
+} /* namespace android */
diff --git a/media/libstagefright/webm/WebmWriter.h b/media/libstagefright/webm/WebmWriter.h
new file mode 100644
index 0000000..529dec8
--- /dev/null
+++ b/media/libstagefright/webm/WebmWriter.h
@@ -0,0 +1,130 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef WEBMWRITER_H_
+#define WEBMWRITER_H_
+
+#include "WebmConstants.h"
+#include "WebmFrameThread.h"
+#include "LinkedBlockingQueue.h"
+
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/MediaWriter.h>
+
+#include <utils/Errors.h>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
+
+#include <stdint.h>
+
+using namespace webm;
+
+namespace android {
+
+class WebmWriter : public MediaWriter {
+public:
+ WebmWriter(int fd);
+ WebmWriter(const char *filename);
+ ~WebmWriter() { reset(); }
+
+
+ status_t addSource(const sp<MediaSource> &source);
+ status_t start(MetaData *param = NULL);
+ status_t stop();
+ status_t pause();
+ bool reachedEOS();
+
+ void setStartTimeOffsetMs(int ms) { mStartTimeOffsetMs = ms; }
+ int32_t getStartTimeOffsetMs() const { return mStartTimeOffsetMs; }
+
+private:
+ int mFd;
+ status_t mInitCheck;
+
+ uint64_t mTimeCodeScale;
+ int64_t mStartTimestampUs;
+ int32_t mStartTimeOffsetMs;
+
+ uint64_t mSegmentOffset;
+ uint64_t mSegmentDataStart;
+ uint64_t mInfoOffset;
+ uint64_t mInfoSize;
+ uint64_t mTracksOffset;
+ uint64_t mCuesOffset;
+
+ bool mPaused;
+ bool mStarted;
+ bool mIsFileSizeLimitExplicitlyRequested;
+ bool mIsRealTimeRecording;
+ bool mStreamableFile;
+ uint64_t mEstimatedCuesSize;
+
+ Mutex mLock;
+ List<sp<WebmElement> > mCuePoints;
+
+ enum {
+ kAudioIndex = 0,
+ kVideoIndex = 1,
+ kMaxStreams = 2,
+ };
+
+ struct WebmStream {
+ int mType;
+ const char *mName;
+ sp<WebmElement> (*mMakeTrack)(const sp<MetaData>&);
+
+ sp<MediaSource> mSource;
+ sp<WebmElement> mTrackEntry;
+ sp<WebmFrameSourceThread> mThread;
+ LinkedBlockingQueue<const sp<WebmFrame> > mSink;
+
+ WebmStream()
+ : mType(kInvalidType),
+ mName("Invalid"),
+ mMakeTrack(NULL) {
+ }
+
+ WebmStream(int type, const char *name, sp<WebmElement> (*makeTrack)(const sp<MetaData>&))
+ : mType(type),
+ mName(name),
+ mMakeTrack(makeTrack) {
+ }
+
+ WebmStream &operator=(const WebmStream &other) {
+ mType = other.mType;
+ mName = other.mName;
+ mMakeTrack = other.mMakeTrack;
+ return *this;
+ }
+ };
+ WebmStream mStreams[kMaxStreams];
+
+ sp<WebmFrameSinkThread> mSinkThread;
+
+ size_t numTracks();
+ uint64_t estimateCuesSize(int32_t bitRate);
+ void initStream(size_t idx);
+ void release();
+ status_t reset();
+
+ static sp<WebmElement> videoTrack(const sp<MetaData>& md);
+ static sp<WebmElement> audioTrack(const sp<MetaData>& md);
+
+ DISALLOW_EVIL_CONSTRUCTORS(WebmWriter);
+};
+
+} /* namespace android */
+#endif /* WEBMWRITER_H_ */
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.cpp b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
index 286ea13..2cb4786 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.cpp
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.cpp
@@ -29,6 +29,7 @@
#include <binder/IServiceManager.h>
#include <cutils/properties.h>
#include <media/IHDCP.h>
+#include <media/IMediaHTTPService.h>
#include <media/stagefright/foundation/ABitReader.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -749,7 +750,8 @@ status_t WifiDisplaySource::PlaybackSession::setupMediaPacketizer(
mExtractor = new NuMediaExtractor;
- status_t err = mExtractor->setDataSource(mMediaPath.c_str());
+ status_t err = mExtractor->setDataSource(
+ NULL /* httpService */, mMediaPath.c_str());
if (err != OK) {
return err;
@@ -1053,7 +1055,7 @@ status_t WifiDisplaySource::PlaybackSession::addVideoSource(
err = source->setMaxAcquiredBufferCount(numInputBuffers);
CHECK_EQ(err, (status_t)OK);
- mBufferQueue = source->getBufferQueue();
+ mProducer = source->getProducer();
return OK;
}
@@ -1077,7 +1079,7 @@ status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) {
}
sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() {
- return mBufferQueue;
+ return mProducer;
}
void WifiDisplaySource::PlaybackSession::requestIDRFrame() {
diff --git a/media/libstagefright/wifi-display/source/PlaybackSession.h b/media/libstagefright/wifi-display/source/PlaybackSession.h
index 5c8ee94..2824143 100644
--- a/media/libstagefright/wifi-display/source/PlaybackSession.h
+++ b/media/libstagefright/wifi-display/source/PlaybackSession.h
@@ -25,7 +25,6 @@
namespace android {
struct ABuffer;
-struct BufferQueue;
struct IHDCP;
struct IGraphicBufferProducer;
struct MediaPuller;
@@ -111,7 +110,7 @@ private:
int64_t mLastLifesignUs;
- sp<BufferQueue> mBufferQueue;
+ sp<IGraphicBufferProducer> mProducer;
KeyedVector<size_t, sp<Track> > mTracks;
ssize_t mVideoTrackIndex;
diff --git a/media/libstagefright/wifi-display/source/RepeaterSource.cpp b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
index cc8dee3..59d7e6e 100644
--- a/media/libstagefright/wifi-display/source/RepeaterSource.cpp
+++ b/media/libstagefright/wifi-display/source/RepeaterSource.cpp
@@ -79,6 +79,8 @@ status_t RepeaterSource::stop() {
ALOGV("stopping");
+ status_t err = mSource->stop();
+
if (mLooper != NULL) {
mLooper->stop();
mLooper.clear();
@@ -92,7 +94,6 @@ status_t RepeaterSource::stop() {
mBuffer = NULL;
}
- status_t err = mSource->stop();
ALOGV("stopped");
diff --git a/media/libstagefright/yuv/Android.mk b/media/libstagefright/yuv/Android.mk
index b3f7b1b..bb86dfc 100644
--- a/media/libstagefright/yuv/Android.mk
+++ b/media/libstagefright/yuv/Android.mk
@@ -12,5 +12,7 @@ LOCAL_SHARED_LIBRARIES := \
LOCAL_MODULE:= libstagefright_yuv
+LOCAL_CFLAGS += -Werror
+
include $(BUILD_SHARED_LIBRARY)
diff --git a/media/libstagefright/yuv/YUVImage.cpp b/media/libstagefright/yuv/YUVImage.cpp
index 7b9000b..bb3e2fd 100644
--- a/media/libstagefright/yuv/YUVImage.cpp
+++ b/media/libstagefright/yuv/YUVImage.cpp
@@ -226,8 +226,8 @@ void YUVImage::fastCopyRectangle420Planar(
&ySrcOffsetIncrement, &uSrcOffsetIncrement, &vSrcOffsetIncrement);
int32_t yDestOffsetIncrement;
- int32_t uDestOffsetIncrement;
- int32_t vDestOffsetIncrement;
+ int32_t uDestOffsetIncrement = 0;
+ int32_t vDestOffsetIncrement = 0;
destImage.getOffsetIncrementsPerDataRow(
&yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
@@ -309,7 +309,7 @@ void YUVImage::fastCopyRectangle420SemiPlanar(
int32_t yDestOffsetIncrement;
int32_t uDestOffsetIncrement;
- int32_t vDestOffsetIncrement;
+ int32_t vDestOffsetIncrement = 0;
destImage.getOffsetIncrementsPerDataRow(
&yDestOffsetIncrement, &uDestOffsetIncrement, &vDestOffsetIncrement);
@@ -393,9 +393,9 @@ bool YUVImage::writeToPPM(const char *filename) const {
fprintf(fp, "255\n");
for (int32_t y = 0; y < mHeight; ++y) {
for (int32_t x = 0; x < mWidth; ++x) {
- uint8_t yValue;
- uint8_t uValue;
- uint8_t vValue;
+ uint8_t yValue = 0u;
+ uint8_t uValue = 0u;
+ uint8_t vValue = 0u;
getPixelValue(x, y, &yValue, &uValue, & vValue);
uint8_t rValue;
diff --git a/media/mediaserver/Android.mk b/media/mediaserver/Android.mk
index d07bc99..3a280f0 100644
--- a/media/mediaserver/Android.mk
+++ b/media/mediaserver/Android.mk
@@ -15,6 +15,8 @@ LOCAL_SRC_FILES:= \
LOCAL_SHARED_LIBRARIES := \
libaudioflinger \
+ libaudiopolicyservice \
+ libcamera_metadata\
libcameraservice \
libmedialogservice \
libcutils \
@@ -23,7 +25,8 @@ LOCAL_SHARED_LIBRARIES := \
libmediaplayerservice \
libutils \
liblog \
- libbinder
+ libbinder \
+ libsoundtriggerservice
LOCAL_STATIC_LIBRARIES := \
libregistermsext
@@ -32,7 +35,10 @@ LOCAL_C_INCLUDES := \
frameworks/av/media/libmediaplayerservice \
frameworks/av/services/medialog \
frameworks/av/services/audioflinger \
- frameworks/av/services/camera/libcameraservice
+ frameworks/av/services/audiopolicy \
+ frameworks/av/services/camera/libcameraservice \
+ $(call include-path-for, audio-utils) \
+ frameworks/av/services/soundtrigger
LOCAL_MODULE:= mediaserver
LOCAL_32_BIT_ONLY := true
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index d5207d5..af1c9e6 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -34,10 +34,11 @@
#include "MediaLogService.h"
#include "MediaPlayerService.h"
#include "AudioPolicyService.h"
+#include "SoundTriggerHwService.h"
using namespace android;
-int main(int argc, char** argv)
+int main(int argc __unused, char** argv)
{
signal(SIGPIPE, SIG_IGN);
char value[PROPERTY_VALUE_MAX];
@@ -128,6 +129,7 @@ int main(int argc, char** argv)
MediaPlayerService::instantiate();
CameraService::instantiate();
AudioPolicyService::instantiate();
+ SoundTriggerHwService::instantiate();
registerExtensions();
ProcessState::self()->startThreadPool();
IPCThreadState::self()->joinThreadPool();
diff --git a/media/mtp/MtpProperty.cpp b/media/mtp/MtpProperty.cpp
index 375ed9a..c500901 100644
--- a/media/mtp/MtpProperty.cpp
+++ b/media/mtp/MtpProperty.cpp
@@ -17,6 +17,7 @@
#define LOG_TAG "MtpProperty"
#include <inttypes.h>
+#include <cutils/compiler.h>
#include "MtpDataPacket.h"
#include "MtpDebug.h"
#include "MtpProperty.h"
@@ -190,9 +191,9 @@ void MtpProperty::write(MtpDataPacket& packet) {
if (deviceProp)
writeValue(packet, mCurrentValue);
}
- packet.putUInt32(mGroupCode);
if (!deviceProp)
- packet.putUInt8(mFormFlag);
+ packet.putUInt32(mGroupCode);
+ packet.putUInt8(mFormFlag);
if (mFormFlag == kFormRange) {
writeValue(packet, mMinimumValue);
writeValue(packet, mMaximumValue);
@@ -518,8 +519,14 @@ void MtpProperty::writeValue(MtpDataPacket& packet, MtpPropertyValue& value) {
MtpPropertyValue* MtpProperty::readArrayValues(MtpDataPacket& packet, int& length) {
length = packet.getUInt32();
- if (length == 0)
+ // Fail if resulting array is over 2GB. This is because the maximum array
+ // size may be less than SIZE_MAX on some platforms.
+ if ( CC_UNLIKELY(
+ length == 0 ||
+ length >= INT32_MAX / sizeof(MtpPropertyValue)) ) {
+ length = 0;
return NULL;
+ }
MtpPropertyValue* result = new MtpPropertyValue[length];
for (int i = 0; i < length; i++)
readValue(packet, result[i]);
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index 155f645..157f2ce 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -94,6 +94,7 @@ static const MtpEventCode kSupportedEventCodes[] = {
MTP_EVENT_OBJECT_REMOVED,
MTP_EVENT_STORE_ADDED,
MTP_EVENT_STORE_REMOVED,
+ MTP_EVENT_DEVICE_PROP_CHANGED,
};
MtpServer::MtpServer(int fd, MtpDatabase* database, bool ptp,
@@ -262,6 +263,11 @@ void MtpServer::sendStoreRemoved(MtpStorageID id) {
sendEvent(MTP_EVENT_STORE_REMOVED, id);
}
+void MtpServer::sendDevicePropertyChanged(MtpDeviceProperty property) {
+ ALOGV("sendDevicePropertyChanged %d\n", property);
+ sendEvent(MTP_EVENT_DEVICE_PROP_CHANGED, property);
+}
+
void MtpServer::sendEvent(MtpEventCode code, uint32_t param1) {
if (mSessionOpen) {
mEvent.setEventCode(code);
diff --git a/media/mtp/MtpServer.h b/media/mtp/MtpServer.h
index dfa8258..b3a11e0 100644
--- a/media/mtp/MtpServer.h
+++ b/media/mtp/MtpServer.h
@@ -104,6 +104,7 @@ public:
void sendObjectAdded(MtpObjectHandle handle);
void sendObjectRemoved(MtpObjectHandle handle);
+ void sendDevicePropertyChanged(MtpDeviceProperty property);
private:
void sendStoreAdded(MtpStorageID id);
diff --git a/media/ndk/Android.mk b/media/ndk/Android.mk
new file mode 100644
index 0000000..8f795cd
--- /dev/null
+++ b/media/ndk/Android.mk
@@ -0,0 +1,52 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+ifneq ($(TARGET_BUILD_PDK), true)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ NdkMediaCodec.cpp \
+ NdkMediaCrypto.cpp \
+ NdkMediaExtractor.cpp \
+ NdkMediaFormat.cpp \
+ NdkMediaMuxer.cpp \
+ NdkMediaDrm.cpp \
+
+LOCAL_MODULE:= libmediandk
+
+LOCAL_C_INCLUDES := \
+ bionic/libc/private \
+ frameworks/base/core/jni \
+ frameworks/av/include/ndk
+
+LOCAL_CFLAGS += -fvisibility=hidden -D EXPORT='__attribute__ ((visibility ("default")))'
+
+LOCAL_SHARED_LIBRARIES := \
+ libbinder \
+ libmedia \
+ libstagefright \
+ libstagefright_foundation \
+ liblog \
+ libutils \
+ libandroid_runtime \
+ libbinder \
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/media/ndk/NdkMediaCodec.cpp b/media/ndk/NdkMediaCodec.cpp
new file mode 100644
index 0000000..c5d8858
--- /dev/null
+++ b/media/ndk/NdkMediaCodec.cpp
@@ -0,0 +1,503 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCodec"
+
+#include "NdkMediaCodec.h"
+#include "NdkMediaError.h"
+#include "NdkMediaCryptoPriv.h"
+#include "NdkMediaFormatPriv.h"
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <gui/Surface.h>
+
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ABuffer.h>
+
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaErrors.h>
+
+using namespace android;
+
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ } else if (err == -EAGAIN) {
+ return (media_status_t) AMEDIACODEC_INFO_TRY_AGAIN_LATER;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+enum {
+ kWhatActivityNotify,
+ kWhatRequestActivityNotifications,
+ kWhatStopActivityNotifications,
+};
+
+
+class CodecHandler: public AHandler {
+private:
+ AMediaCodec* mCodec;
+public:
+ CodecHandler(AMediaCodec *codec);
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+};
+
+typedef void (*OnCodecEvent)(AMediaCodec *codec, void *userdata);
+
+struct AMediaCodec {
+ sp<android::MediaCodec> mCodec;
+ sp<ALooper> mLooper;
+ sp<CodecHandler> mHandler;
+ sp<AMessage> mActivityNotification;
+ int32_t mGeneration;
+ bool mRequestedActivityNotification;
+ OnCodecEvent mCallback;
+ void *mCallbackUserData;
+};
+
+CodecHandler::CodecHandler(AMediaCodec *codec) {
+ mCodec = codec;
+}
+
+void CodecHandler::onMessageReceived(const sp<AMessage> &msg) {
+
+ switch (msg->what()) {
+ case kWhatRequestActivityNotifications:
+ {
+ if (mCodec->mRequestedActivityNotification) {
+ break;
+ }
+
+ mCodec->mCodec->requestActivityNotification(mCodec->mActivityNotification);
+ mCodec->mRequestedActivityNotification = true;
+ break;
+ }
+
+ case kWhatActivityNotify:
+ {
+ {
+ int32_t generation;
+ msg->findInt32("generation", &generation);
+
+ if (generation != mCodec->mGeneration) {
+ // stale
+ break;
+ }
+
+ mCodec->mRequestedActivityNotification = false;
+ }
+
+ if (mCodec->mCallback) {
+ mCodec->mCallback(mCodec, mCodec->mCallbackUserData);
+ }
+ break;
+ }
+
+ case kWhatStopActivityNotifications:
+ {
+ uint32_t replyID;
+ msg->senderAwaitsResponse(&replyID);
+
+ mCodec->mGeneration++;
+ mCodec->mRequestedActivityNotification = false;
+
+ sp<AMessage> response = new AMessage;
+ response->postReply(replyID);
+ break;
+ }
+
+ default:
+ ALOGE("shouldn't be here");
+ break;
+ }
+
+}
+
+
+static void requestActivityNotification(AMediaCodec *codec) {
+ (new AMessage(kWhatRequestActivityNotifications, codec->mHandler->id()))->post();
+}
+
+extern "C" {
+
+static AMediaCodec * createAMediaCodec(const char *name, bool name_is_type, bool encoder) {
+ AMediaCodec *mData = new AMediaCodec();
+ mData->mLooper = new ALooper;
+ mData->mLooper->setName("NDK MediaCodec_looper");
+ status_t ret = mData->mLooper->start(
+ false, // runOnCallingThread
+ true, // canCallJava XXX
+ PRIORITY_FOREGROUND);
+ if (name_is_type) {
+ mData->mCodec = android::MediaCodec::CreateByType(mData->mLooper, name, encoder);
+ } else {
+ mData->mCodec = android::MediaCodec::CreateByComponentName(mData->mLooper, name);
+ }
+ mData->mHandler = new CodecHandler(mData);
+ mData->mLooper->registerHandler(mData->mHandler);
+ mData->mGeneration = 1;
+ mData->mRequestedActivityNotification = false;
+ mData->mCallback = NULL;
+
+ return mData;
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createCodecByName(const char *name) {
+ return createAMediaCodec(name, false, false);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type) {
+ return createAMediaCodec(mime_type, true, false);
+}
+
+EXPORT
+AMediaCodec* AMediaCodec_createEncoderByType(const char *name) {
+ return createAMediaCodec(name, true, true);
+}
+
+EXPORT
+media_status_t AMediaCodec_delete(AMediaCodec *mData) {
+ if (mData->mCodec != NULL) {
+ mData->mCodec->release();
+ mData->mCodec.clear();
+ }
+
+ if (mData->mLooper != NULL) {
+ mData->mLooper->unregisterHandler(mData->mHandler->id());
+ mData->mLooper->stop();
+ mData->mLooper.clear();
+ }
+ delete mData;
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodec_configure(
+ AMediaCodec *mData,
+ const AMediaFormat* format,
+ ANativeWindow* window,
+ AMediaCrypto *crypto,
+ uint32_t flags) {
+ sp<AMessage> nativeFormat;
+ AMediaFormat_getFormat(format, &nativeFormat);
+ ALOGV("configure with format: %s", nativeFormat->debugString(0).c_str());
+ sp<Surface> surface = NULL;
+ if (window != NULL) {
+ surface = (Surface*) window;
+ }
+
+ return translate_error(mData->mCodec->configure(nativeFormat, surface,
+ crypto ? crypto->mCrypto : NULL, flags));
+}
+
+EXPORT
+media_status_t AMediaCodec_start(AMediaCodec *mData) {
+ status_t ret = mData->mCodec->start();
+ if (ret != OK) {
+ return translate_error(ret);
+ }
+ mData->mActivityNotification = new AMessage(kWhatActivityNotify, mData->mHandler->id());
+ mData->mActivityNotification->setInt32("generation", mData->mGeneration);
+ requestActivityNotification(mData);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodec_stop(AMediaCodec *mData) {
+ media_status_t ret = translate_error(mData->mCodec->stop());
+
+ sp<AMessage> msg = new AMessage(kWhatStopActivityNotifications, mData->mHandler->id());
+ sp<AMessage> response;
+ msg->postAndAwaitResponse(&response);
+ mData->mActivityNotification.clear();
+
+ return ret;
+}
+
+EXPORT
+media_status_t AMediaCodec_flush(AMediaCodec *mData) {
+ return translate_error(mData->mCodec->flush());
+}
+
+EXPORT
+ssize_t AMediaCodec_dequeueInputBuffer(AMediaCodec *mData, int64_t timeoutUs) {
+ size_t idx;
+ status_t ret = mData->mCodec->dequeueInputBuffer(&idx, timeoutUs);
+ requestActivityNotification(mData);
+ if (ret == OK) {
+ return idx;
+ }
+ return translate_error(ret);
+}
+
+EXPORT
+uint8_t* AMediaCodec_getInputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
+ android::Vector<android::sp<android::ABuffer> > abufs;
+ if (mData->mCodec->getInputBuffers(&abufs) == 0) {
+ size_t n = abufs.size();
+ if (idx >= n) {
+ ALOGE("buffer index %d out of range", idx);
+ return NULL;
+ }
+ if (out_size != NULL) {
+ *out_size = abufs[idx]->capacity();
+ }
+ return abufs[idx]->data();
+ }
+ ALOGE("couldn't get input buffers");
+ return NULL;
+}
+
+EXPORT
+uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec *mData, size_t idx, size_t *out_size) {
+ android::Vector<android::sp<android::ABuffer> > abufs;
+ if (mData->mCodec->getOutputBuffers(&abufs) == 0) {
+ size_t n = abufs.size();
+ if (idx >= n) {
+ ALOGE("buffer index %d out of range", idx);
+ return NULL;
+ }
+ if (out_size != NULL) {
+ *out_size = abufs[idx]->capacity();
+ }
+ return abufs[idx]->data();
+ }
+ ALOGE("couldn't get output buffers");
+ return NULL;
+}
+
+EXPORT
+media_status_t AMediaCodec_queueInputBuffer(AMediaCodec *mData,
+ size_t idx, off_t offset, size_t size, uint64_t time, uint32_t flags) {
+
+ AString errorMsg;
+ status_t ret = mData->mCodec->queueInputBuffer(idx, offset, size, time, flags, &errorMsg);
+ return translate_error(ret);
+}
+
+EXPORT
+ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec *mData,
+ AMediaCodecBufferInfo *info, int64_t timeoutUs) {
+ size_t idx;
+ size_t offset;
+ size_t size;
+ uint32_t flags;
+ int64_t presentationTimeUs;
+ status_t ret = mData->mCodec->dequeueOutputBuffer(&idx, &offset, &size, &presentationTimeUs,
+ &flags, timeoutUs);
+ requestActivityNotification(mData);
+ switch (ret) {
+ case OK:
+ info->offset = offset;
+ info->size = size;
+ info->flags = flags;
+ info->presentationTimeUs = presentationTimeUs;
+ return idx;
+ case -EAGAIN:
+ return AMEDIACODEC_INFO_TRY_AGAIN_LATER;
+ case android::INFO_FORMAT_CHANGED:
+ return AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED;
+ case INFO_OUTPUT_BUFFERS_CHANGED:
+ return AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED;
+ default:
+ break;
+ }
+ return translate_error(ret);
+}
+
+EXPORT
+AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec *mData) {
+ sp<AMessage> format;
+ mData->mCodec->getOutputFormat(&format);
+ return AMediaFormat_fromMsg(&format);
+}
+
+EXPORT
+media_status_t AMediaCodec_releaseOutputBuffer(AMediaCodec *mData, size_t idx, bool render) {
+ if (render) {
+ return translate_error(mData->mCodec->renderOutputBufferAndRelease(idx));
+ } else {
+ return translate_error(mData->mCodec->releaseOutputBuffer(idx));
+ }
+}
+
+EXPORT
+media_status_t AMediaCodec_releaseOutputBufferAtTime(
+ AMediaCodec *mData, size_t idx, int64_t timestampNs) {
+ ALOGV("render @ %lld", timestampNs);
+ return translate_error(mData->mCodec->renderOutputBufferAndRelease(idx, timestampNs));
+}
+
+//EXPORT
+media_status_t AMediaCodec_setNotificationCallback(AMediaCodec *mData, OnCodecEvent callback, void *userdata) {
+ mData->mCallback = callback;
+ mData->mCallbackUserData = userdata;
+ return AMEDIA_OK;
+}
+
+typedef struct AMediaCodecCryptoInfo {
+ int numsubsamples;
+ uint8_t key[16];
+ uint8_t iv[16];
+ cryptoinfo_mode_t mode;
+ size_t *clearbytes;
+ size_t *encryptedbytes;
+} AMediaCodecCryptoInfo;
+
+EXPORT
+media_status_t AMediaCodec_queueSecureInputBuffer(
+ AMediaCodec* codec,
+ size_t idx,
+ off_t offset,
+ AMediaCodecCryptoInfo* crypto,
+ uint64_t time,
+ uint32_t flags) {
+
+ CryptoPlugin::SubSample *subSamples = new CryptoPlugin::SubSample[crypto->numsubsamples];
+ for (int i = 0; i < crypto->numsubsamples; i++) {
+ subSamples[i].mNumBytesOfClearData = crypto->clearbytes[i];
+ subSamples[i].mNumBytesOfEncryptedData = crypto->encryptedbytes[i];
+ }
+
+ AString errormsg;
+ status_t err = codec->mCodec->queueSecureInputBuffer(idx,
+ offset,
+ subSamples,
+ crypto->numsubsamples,
+ crypto->key,
+ crypto->iv,
+ (CryptoPlugin::Mode) crypto->mode,
+ time,
+ flags,
+ &errormsg);
+ if (err != 0) {
+ ALOGE("queSecureInputBuffer: %s", errormsg.c_str());
+ }
+ delete [] subSamples;
+ return translate_error(err);
+}
+
+
+
+EXPORT
+AMediaCodecCryptoInfo *AMediaCodecCryptoInfo_new(
+ int numsubsamples,
+ uint8_t key[16],
+ uint8_t iv[16],
+ cryptoinfo_mode_t mode,
+ size_t *clearbytes,
+ size_t *encryptedbytes) {
+
+ // size needed to store all the crypto data
+ size_t cryptosize = sizeof(AMediaCodecCryptoInfo) + sizeof(size_t) * numsubsamples * 2;
+ AMediaCodecCryptoInfo *ret = (AMediaCodecCryptoInfo*) malloc(cryptosize);
+ if (!ret) {
+ ALOGE("couldn't allocate %d bytes", cryptosize);
+ return NULL;
+ }
+ ret->numsubsamples = numsubsamples;
+ memcpy(ret->key, key, 16);
+ memcpy(ret->iv, iv, 16);
+ ret->mode = mode;
+
+ // clearbytes and encryptedbytes point at the actual data, which follows
+ ret->clearbytes = (size_t*) (ret + 1); // point immediately after the struct
+ ret->encryptedbytes = ret->clearbytes + numsubsamples; // point after the clear sizes
+
+ memcpy(ret->clearbytes, clearbytes, numsubsamples * sizeof(size_t));
+ memcpy(ret->encryptedbytes, encryptedbytes, numsubsamples * sizeof(size_t));
+
+ return ret;
+}
+
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_delete(AMediaCodecCryptoInfo* info) {
+ free(info);
+ return AMEDIA_OK;
+}
+
+EXPORT
+size_t AMediaCodecCryptoInfo_getNumSubSamples(AMediaCodecCryptoInfo* ci) {
+ return ci->numsubsamples;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getKey(AMediaCodecCryptoInfo* ci, uint8_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->key, 16);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getIV(AMediaCodecCryptoInfo* ci, uint8_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->iv, 16);
+ return AMEDIA_OK;
+}
+
+EXPORT
+cryptoinfo_mode_t AMediaCodecCryptoInfo_getMode(AMediaCodecCryptoInfo* ci) {
+ if (!ci) {
+ return (cryptoinfo_mode_t) AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ return ci->mode;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getClearBytes(AMediaCodecCryptoInfo* ci, size_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->clearbytes, sizeof(size_t) * ci->numsubsamples);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaCodecCryptoInfo_getEncryptedBytes(AMediaCodecCryptoInfo* ci, size_t *dst) {
+ if (!ci) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!dst) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ memcpy(dst, ci->encryptedbytes, sizeof(size_t) * ci->numsubsamples);
+ return AMEDIA_OK;
+}
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaCrypto.cpp b/media/ndk/NdkMediaCrypto.cpp
new file mode 100644
index 0000000..1cc2f1a
--- /dev/null
+++ b/media/ndk/NdkMediaCrypto.cpp
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaCrypto"
+
+
+#include "NdkMediaCrypto.h"
+#include "NdkMediaCodec.h"
+#include "NdkMediaFormatPriv.h"
+
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <binder/IServiceManager.h>
+#include <media/ICrypto.h>
+#include <media/IMediaPlayerService.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+
+static sp<ICrypto> makeCrypto() {
+ sp<IServiceManager> sm = defaultServiceManager();
+
+ sp<IBinder> binder =
+ sm->getService(String16("media.player"));
+
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
+ if (service == NULL) {
+ return NULL;
+ }
+
+ sp<ICrypto> crypto = service->makeCrypto();
+
+ if (crypto == NULL || (crypto->initCheck() != OK && crypto->initCheck() != NO_INIT)) {
+ return NULL;
+ }
+
+ return crypto;
+}
+
+struct AMediaCrypto {
+ sp<ICrypto> mCrypto;
+};
+
+
+extern "C" {
+
+
+EXPORT
+bool AMediaCrypto_isCryptoSchemeSupported(const AMediaUUID uuid) {
+ sp<ICrypto> crypto = makeCrypto();
+ if (crypto == NULL) {
+ return false;
+ }
+ return crypto->isCryptoSchemeSupported(uuid);
+}
+
+EXPORT
+bool AMediaCrypto_requiresSecureDecoderComponent(const char *mime) {
+ sp<ICrypto> crypto = makeCrypto();
+ if (crypto == NULL) {
+ return false;
+ }
+ return crypto->requiresSecureDecoderComponent(mime);
+}
+
+EXPORT
+AMediaCrypto* AMediaCrypto_new(const AMediaUUID uuid, const void *data, size_t datasize) {
+
+ sp<ICrypto> tmp = makeCrypto();
+ if (tmp == NULL) {
+ return NULL;
+ }
+
+ if (tmp->createPlugin(uuid, data, datasize) != 0) {
+ return NULL;
+ }
+
+ AMediaCrypto *crypto = new AMediaCrypto();
+ crypto->mCrypto = tmp;
+
+ return crypto;
+}
+
+EXPORT
+void AMediaCrypto_delete(AMediaCrypto* crypto) {
+ delete crypto;
+}
+
+
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaCryptoPriv.h b/media/ndk/NdkMediaCryptoPriv.h
new file mode 100644
index 0000000..14ea928
--- /dev/null
+++ b/media/ndk/NdkMediaCryptoPriv.h
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_CRYPTO_PRIV_H
+#define _NDK_MEDIA_CRYPTO_PRIV_H
+
+#include <sys/types.h>
+#include <utils/StrongPointer.h>
+#include <media/ICrypto.h>
+
+using namespace android;
+
+struct AMediaCrypto {
+ sp<ICrypto> mCrypto;
+};
+
+#endif // _NDK_MEDIA_CRYPTO_PRIV_H
diff --git a/media/ndk/NdkMediaDrm.cpp b/media/ndk/NdkMediaDrm.cpp
new file mode 100644
index 0000000..7a1048c
--- /dev/null
+++ b/media/ndk/NdkMediaDrm.cpp
@@ -0,0 +1,728 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaDrm"
+
+#include "NdkMediaDrm.h"
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <gui/Surface.h>
+
+#include <media/IDrm.h>
+#include <media/IDrmClient.h>
+#include <media/stagefright/MediaErrors.h>
+#include <binder/IServiceManager.h>
+#include <media/IMediaPlayerService.h>
+#include <ndk/NdkMediaCrypto.h>
+
+
+using namespace android;
+
+typedef Vector<uint8_t> idvec_t;
+
+struct DrmListener: virtual public BnDrmClient
+{
+private:
+ AMediaDrm *mObj;
+ AMediaDrmEventListener mListener;
+
+public:
+ DrmListener(AMediaDrm *obj, AMediaDrmEventListener listener) : mObj(obj), mListener(listener) {}
+ void notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj);
+};
+
+struct AMediaDrm {
+ sp<IDrm> mDrm;
+ sp<IDrmClient> mDrmClient;
+ List<idvec_t> mIds;
+ KeyedVector<String8, String8> mQueryResults;
+ Vector<uint8_t> mKeyRequest;
+ Vector<uint8_t> mProvisionRequest;
+ String8 mProvisionUrl;
+ String8 mPropertyString;
+ Vector<uint8_t> mPropertyByteArray;
+ List<Vector<uint8_t> > mSecureStops;
+ sp<DrmListener> mListener;
+};
+
+void DrmListener::notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj) {
+ if (!mListener) {
+ return;
+ }
+
+ AMediaDrmSessionId sessionId = {NULL, 0};
+ int32_t sessionIdSize = obj->readInt32();
+ if (sessionIdSize) {
+ uint8_t *sessionIdData = new uint8_t[sessionIdSize];
+ sessionId.ptr = sessionIdData;
+ sessionId.length = sessionIdSize;
+ obj->read(sessionIdData, sessionId.length);
+ }
+
+ int32_t dataSize = obj->readInt32();
+ uint8_t *data = NULL;
+ if (dataSize) {
+ data = new uint8_t[dataSize];
+ obj->read(data, dataSize);
+ }
+
+ // translate DrmPlugin event types into their NDK equivalents
+ AMediaDrmEventType ndkEventType;
+ switch(eventType) {
+ case DrmPlugin::kDrmPluginEventProvisionRequired:
+ ndkEventType = EVENT_PROVISION_REQUIRED;
+ break;
+ case DrmPlugin::kDrmPluginEventKeyNeeded:
+ ndkEventType = EVENT_KEY_REQUIRED;
+ break;
+ case DrmPlugin::kDrmPluginEventKeyExpired:
+ ndkEventType = EVENT_KEY_EXPIRED;
+ break;
+ case DrmPlugin::kDrmPluginEventVendorDefined:
+ ndkEventType = EVENT_VENDOR_DEFINED;
+ break;
+ default:
+ ALOGE("Invalid event DrmPlugin::EventType %d, ignored", (int)eventType);
+ return;
+ }
+
+ (*mListener)(mObj, &sessionId, ndkEventType, extra, data, dataSize);
+
+ delete [] sessionId.ptr;
+ delete [] data;
+}
+
+
+extern "C" {
+
+static media_status_t translateStatus(status_t status) {
+ media_status_t result = AMEDIA_ERROR_UNKNOWN;
+ switch (status) {
+ case OK:
+ result = AMEDIA_OK;
+ break;
+ case android::ERROR_DRM_NOT_PROVISIONED:
+ result = AMEDIA_DRM_NOT_PROVISIONED;
+ break;
+ case android::ERROR_DRM_RESOURCE_BUSY:
+ result = AMEDIA_DRM_RESOURCE_BUSY;
+ break;
+ case android::ERROR_DRM_DEVICE_REVOKED:
+ result = AMEDIA_DRM_DEVICE_REVOKED;
+ break;
+ case android::ERROR_DRM_CANNOT_HANDLE:
+ result = AMEDIA_ERROR_INVALID_PARAMETER;
+ break;
+ case android::ERROR_DRM_TAMPER_DETECTED:
+ result = AMEDIA_DRM_TAMPER_DETECTED;
+ break;
+ case android::ERROR_DRM_SESSION_NOT_OPENED:
+ result = AMEDIA_DRM_SESSION_NOT_OPENED;
+ break;
+ case android::ERROR_DRM_NO_LICENSE:
+ result = AMEDIA_DRM_NEED_KEY;
+ break;
+ case android::ERROR_DRM_LICENSE_EXPIRED:
+ result = AMEDIA_DRM_LICENSE_EXPIRED;
+ break;
+ default:
+ break;
+ }
+ return result;
+}
+
+static sp<IDrm> CreateDrm() {
+ sp<IServiceManager> sm = defaultServiceManager();
+
+ sp<IBinder> binder =
+ sm->getService(String16("media.player"));
+
+ sp<IMediaPlayerService> service =
+ interface_cast<IMediaPlayerService>(binder);
+
+ if (service == NULL) {
+ return NULL;
+ }
+
+ sp<IDrm> drm = service->makeDrm();
+
+ if (drm == NULL || (drm->initCheck() != OK && drm->initCheck() != NO_INIT)) {
+ return NULL;
+ }
+
+ return drm;
+}
+
+
+static sp<IDrm> CreateDrmFromUUID(const AMediaUUID uuid) {
+ sp<IDrm> drm = CreateDrm();
+
+ if (drm == NULL) {
+ return NULL;
+ }
+
+ status_t err = drm->createPlugin(uuid);
+
+ if (err != OK) {
+ return NULL;
+ }
+
+ return drm;
+}
+
+EXPORT
+bool AMediaDrm_isCryptoSchemeSupported(const AMediaUUID uuid, const char *mimeType) {
+ sp<IDrm> drm = CreateDrm();
+
+ if (drm == NULL) {
+ return false;
+ }
+
+ String8 mimeStr = mimeType ? String8(mimeType) : String8("");
+ return drm->isCryptoSchemeSupported(uuid, mimeStr);
+}
+
+EXPORT
+AMediaDrm* AMediaDrm_createByUUID(const AMediaUUID uuid) {
+ AMediaDrm *mObj = new AMediaDrm();
+ mObj->mDrm = CreateDrmFromUUID(uuid);
+ return mObj;
+}
+
+EXPORT
+void AMediaDrm_release(AMediaDrm *mObj) {
+ if (mObj->mDrm != NULL) {
+ mObj->mDrm->setListener(NULL);
+ mObj->mDrm->destroyPlugin();
+ mObj->mDrm.clear();
+ }
+ delete mObj;
+}
+
+EXPORT
+media_status_t AMediaDrm_setOnEventListener(AMediaDrm *mObj, AMediaDrmEventListener listener) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ mObj->mListener = new DrmListener(mObj, listener);
+ mObj->mDrm->setListener(mObj->mListener);
+ return AMEDIA_OK;
+}
+
+
+static bool findId(AMediaDrm *mObj, const AMediaDrmByteArray &id, List<idvec_t>::iterator &iter) {
+ iter = mObj->mIds.begin();
+ while (iter != mObj->mIds.end()) {
+ if (iter->array() == id.ptr && iter->size() == id.length) {
+ return true;
+ }
+ }
+ return false;
+}
+
+EXPORT
+media_status_t AMediaDrm_openSession(AMediaDrm *mObj, AMediaDrmSessionId *sessionId) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ Vector<uint8_t> session;
+ status_t status = mObj->mDrm->openSession(session);
+ if (status == OK) {
+ mObj->mIds.push_front(session);
+ List<idvec_t>::iterator iter = mObj->mIds.begin();
+ sessionId->ptr = iter->array();
+ sessionId->length = iter->size();
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_closeSession(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+ mObj->mDrm->closeSession(*iter);
+ mObj->mIds.erase(iter);
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_getKeyRequest(AMediaDrm *mObj, const AMediaDrmScope *scope,
+ const uint8_t *init, size_t initSize, const char *mimeType, AMediaDrmKeyType keyType,
+ const AMediaDrmKeyValue *optionalParameters, size_t numOptionalParameters,
+ const uint8_t **keyRequest, size_t *keyRequestSize) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!mimeType || !scope || !keyRequest || !keyRequestSize) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *scope, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ Vector<uint8_t> mdInit;
+ mdInit.appendArray(init, initSize);
+ DrmPlugin::KeyType mdKeyType;
+ switch (keyType) {
+ case KEY_TYPE_STREAMING:
+ mdKeyType = DrmPlugin::kKeyType_Streaming;
+ break;
+ case KEY_TYPE_OFFLINE:
+ mdKeyType = DrmPlugin::kKeyType_Offline;
+ break;
+ case KEY_TYPE_RELEASE:
+ mdKeyType = DrmPlugin::kKeyType_Release;
+ break;
+ default:
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ KeyedVector<String8, String8> mdOptionalParameters;
+ for (size_t i = 0; i < numOptionalParameters; i++) {
+ mdOptionalParameters.add(String8(optionalParameters[i].mKey),
+ String8(optionalParameters[i].mValue));
+ }
+ String8 defaultUrl;
+ status_t status = mObj->mDrm->getKeyRequest(*iter, mdInit, String8(mimeType),
+ mdKeyType, mdOptionalParameters, mObj->mKeyRequest, defaultUrl);
+ if (status != OK) {
+ return translateStatus(status);
+ } else {
+ *keyRequest = mObj->mKeyRequest.array();
+ *keyRequestSize = mObj->mKeyRequest.size();
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_provideKeyResponse(AMediaDrm *mObj, const AMediaDrmScope *scope,
+ const uint8_t *response, size_t responseSize, AMediaDrmKeySetId *keySetId) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!scope || !response || !responseSize || !keySetId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *scope, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+ Vector<uint8_t> mdResponse;
+ mdResponse.appendArray(response, responseSize);
+
+ Vector<uint8_t> mdKeySetId;
+ status_t status = mObj->mDrm->provideKeyResponse(*iter, mdResponse, mdKeySetId);
+ if (status == OK) {
+ mObj->mIds.push_front(mdKeySetId);
+ List<idvec_t>::iterator iter = mObj->mIds.begin();
+ keySetId->ptr = iter->array();
+ keySetId->length = iter->size();
+ } else {
+ keySetId->ptr = NULL;
+ keySetId->length = 0;
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_restoreKeys(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const AMediaDrmKeySetId *keySetId) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId || !keySetId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+ Vector<uint8_t> keySet;
+ keySet.appendArray(keySetId->ptr, keySetId->length);
+ return translateStatus(mObj->mDrm->restoreKeys(*iter, keySet));
+}
+
+EXPORT
+media_status_t AMediaDrm_removeKeys(AMediaDrm *mObj, const AMediaDrmSessionId *keySetId) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!keySetId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ status_t status;
+ if (!findId(mObj, *keySetId, iter)) {
+ Vector<uint8_t> keySet;
+ keySet.appendArray(keySetId->ptr, keySetId->length);
+ status = mObj->mDrm->removeKeys(keySet);
+ } else {
+ status = mObj->mDrm->removeKeys(*iter);
+ mObj->mIds.erase(iter);
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_queryKeyStatus(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ AMediaDrmKeyValue *keyValuePairs, size_t *numPairs) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId || !numPairs) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->queryKeyStatus(*iter, mObj->mQueryResults);
+ if (status != OK) {
+ *numPairs = 0;
+ return translateStatus(status);
+ }
+
+ if (mObj->mQueryResults.size() > *numPairs) {
+ *numPairs = mObj->mQueryResults.size();
+ return AMEDIA_DRM_SHORT_BUFFER;
+ }
+
+ for (size_t i = 0; i < mObj->mQueryResults.size(); i++) {
+ keyValuePairs[i].mKey = mObj->mQueryResults.keyAt(i).string();
+ keyValuePairs[i].mValue = mObj->mQueryResults.keyAt(i).string();
+ }
+ *numPairs = mObj->mQueryResults.size();
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_getProvisionRequest(AMediaDrm *mObj, const uint8_t **provisionRequest,
+ size_t *provisionRequestSize, const char **serverUrl) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!provisionRequest || !provisionRequestSize || !*provisionRequestSize || !serverUrl) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ status_t status = mObj->mDrm->getProvisionRequest(String8(""), String8(""),
+ mObj->mProvisionRequest, mObj->mProvisionUrl);
+ if (status != OK) {
+ return translateStatus(status);
+ } else {
+ *provisionRequest = mObj->mProvisionRequest.array();
+ *provisionRequestSize = mObj->mProvisionRequest.size();
+ *serverUrl = mObj->mProvisionUrl.string();
+ }
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_provideProvisionResponse(AMediaDrm *mObj,
+ const uint8_t *response, size_t responseSize) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!response || !responseSize) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ Vector<uint8_t> mdResponse;
+ mdResponse.appendArray(response, responseSize);
+
+ Vector<uint8_t> unused;
+ return translateStatus(mObj->mDrm->provideProvisionResponse(mdResponse, unused, unused));
+}
+
+EXPORT
+media_status_t AMediaDrm_getSecureStops(AMediaDrm *mObj,
+ AMediaDrmSecureStop *secureStops, size_t *numSecureStops) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!numSecureStops) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ status_t status = mObj->mDrm->getSecureStops(mObj->mSecureStops);
+ if (status != OK) {
+ *numSecureStops = 0;
+ return translateStatus(status);
+ }
+ if (*numSecureStops < mObj->mSecureStops.size()) {
+ return AMEDIA_DRM_SHORT_BUFFER;
+ }
+ List<Vector<uint8_t> >::iterator iter = mObj->mSecureStops.begin();
+ size_t i = 0;
+ while (iter != mObj->mSecureStops.end()) {
+ secureStops[i].ptr = iter->array();
+ secureStops[i].length = iter->size();
+ ++iter;
+ ++i;
+ }
+ *numSecureStops = mObj->mSecureStops.size();
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaDrm_releaseSecureStops(AMediaDrm *mObj,
+ const AMediaDrmSecureStop *ssRelease) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!ssRelease) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ Vector<uint8_t> release;
+ release.appendArray(ssRelease->ptr, ssRelease->length);
+ return translateStatus(mObj->mDrm->releaseSecureStops(release));
+}
+
+
+EXPORT
+media_status_t AMediaDrm_getPropertyString(AMediaDrm *mObj, const char *propertyName,
+ const char **propertyValue) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!propertyName || !propertyValue) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ status_t status = mObj->mDrm->getPropertyString(String8(propertyName),
+ mObj->mPropertyString);
+
+ if (status == OK) {
+ *propertyValue = mObj->mPropertyString.string();
+ } else {
+ *propertyValue = NULL;
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_getPropertyByteArray(AMediaDrm *mObj,
+ const char *propertyName, AMediaDrmByteArray *propertyValue) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!propertyName || !propertyValue) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+
+ status_t status = mObj->mDrm->getPropertyByteArray(String8(propertyName),
+ mObj->mPropertyByteArray);
+
+ if (status == OK) {
+ propertyValue->ptr = mObj->mPropertyByteArray.array();
+ propertyValue->length = mObj->mPropertyByteArray.size();
+ } else {
+ propertyValue->ptr = NULL;
+ propertyValue->length = 0;
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_setPropertyString(AMediaDrm *mObj,
+ const char *propertyName, const char *value) {
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+
+ return translateStatus(mObj->mDrm->setPropertyString(String8(propertyName),
+ String8(value)));
+}
+
+EXPORT
+media_status_t AMediaDrm_setPropertyByteArray(AMediaDrm *mObj,
+ const char *propertyName, const uint8_t *value, size_t valueSize) {
+
+ Vector<uint8_t> byteArray;
+ byteArray.appendArray(value, valueSize);
+
+ return translateStatus(mObj->mDrm->getPropertyByteArray(String8(propertyName),
+ byteArray));
+}
+
+
+static media_status_t encrypt_decrypt_common(AMediaDrm *mObj,
+ const AMediaDrmSessionId &sessionId,
+ const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
+ const uint8_t *input, uint8_t *output, size_t dataSize, bool encrypt) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->setCipherAlgorithm(*iter, String8(cipherAlgorithm));
+ if (status != OK) {
+ return translateStatus(status);
+ }
+
+ Vector<uint8_t> keyIdVec;
+ const size_t kKeyIdSize = 16;
+ keyIdVec.appendArray(keyId, kKeyIdSize);
+
+ Vector<uint8_t> inputVec;
+ inputVec.appendArray(input, dataSize);
+
+ Vector<uint8_t> ivVec;
+ const size_t kIvSize = 16;
+ ivVec.appendArray(iv, kIvSize);
+
+ Vector<uint8_t> outputVec;
+ if (encrypt) {
+ status_t status = mObj->mDrm->encrypt(*iter, keyIdVec, inputVec, ivVec, outputVec);
+ } else {
+ status_t status = mObj->mDrm->decrypt(*iter, keyIdVec, inputVec, ivVec, outputVec);
+ }
+ if (status == OK) {
+ memcpy(output, outputVec.array(), outputVec.size());
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_encrypt(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
+ const uint8_t *input, uint8_t *output, size_t dataSize) {
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ return encrypt_decrypt_common(mObj, *sessionId, cipherAlgorithm, keyId, iv,
+ input, output, dataSize, true);
+}
+
+EXPORT
+media_status_t AMediaDrm_decrypt(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
+ const uint8_t *input, uint8_t *output, size_t dataSize) {
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ return encrypt_decrypt_common(mObj, *sessionId, cipherAlgorithm, keyId, iv,
+ input, output, dataSize, false);
+}
+
+EXPORT
+media_status_t AMediaDrm_sign(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *macAlgorithm, uint8_t *keyId, uint8_t *message, size_t messageSize,
+ uint8_t *signature, size_t *signatureSize) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->setMacAlgorithm(*iter, String8(macAlgorithm));
+ if (status != OK) {
+ return translateStatus(status);
+ }
+
+ Vector<uint8_t> keyIdVec;
+ const size_t kKeyIdSize = 16;
+ keyIdVec.appendArray(keyId, kKeyIdSize);
+
+ Vector<uint8_t> messageVec;
+ messageVec.appendArray(message, messageSize);
+
+ Vector<uint8_t> signatureVec;
+ status = mObj->mDrm->sign(*iter, keyIdVec, messageVec, signatureVec);
+ if (signatureVec.size() > *signatureSize) {
+ return AMEDIA_DRM_SHORT_BUFFER;
+ }
+ if (status == OK) {
+ memcpy(signature, signatureVec.array(), signatureVec.size());
+ }
+ return translateStatus(status);
+}
+
+EXPORT
+media_status_t AMediaDrm_verify(AMediaDrm *mObj, const AMediaDrmSessionId *sessionId,
+ const char *macAlgorithm, uint8_t *keyId, const uint8_t *message, size_t messageSize,
+ const uint8_t *signature, size_t signatureSize) {
+
+ if (!mObj || mObj->mDrm == NULL) {
+ return AMEDIA_ERROR_INVALID_OBJECT;
+ }
+ if (!sessionId) {
+ return AMEDIA_ERROR_INVALID_PARAMETER;
+ }
+ List<idvec_t>::iterator iter;
+ if (!findId(mObj, *sessionId, iter)) {
+ return AMEDIA_DRM_SESSION_NOT_OPENED;
+ }
+
+ status_t status = mObj->mDrm->setMacAlgorithm(*iter, String8(macAlgorithm));
+ if (status != OK) {
+ return translateStatus(status);
+ }
+
+ Vector<uint8_t> keyIdVec;
+ const size_t kKeyIdSize = 16;
+ keyIdVec.appendArray(keyId, kKeyIdSize);
+
+ Vector<uint8_t> messageVec;
+ messageVec.appendArray(message, messageSize);
+
+ Vector<uint8_t> signatureVec;
+ signatureVec.appendArray(signature, signatureSize);
+
+ bool match;
+ status = mObj->mDrm->verify(*iter, keyIdVec, messageVec, signatureVec, match);
+ if (status == OK) {
+ return match ? AMEDIA_OK : AMEDIA_DRM_VERIFY_FAILED;
+ }
+ return translateStatus(status);
+}
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaExtractor.cpp b/media/ndk/NdkMediaExtractor.cpp
new file mode 100644
index 0000000..492e002
--- /dev/null
+++ b/media/ndk/NdkMediaExtractor.cpp
@@ -0,0 +1,355 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaExtractor"
+
+
+#include "NdkMediaError.h"
+#include "NdkMediaExtractor.h"
+#include "NdkMediaFormatPriv.h"
+
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <media/hardware/CryptoAPI.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/NuMediaExtractor.h>
+#include <media/IMediaHTTPService.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+struct AMediaExtractor {
+ sp<NuMediaExtractor> mImpl;
+ sp<ABuffer> mPsshBuf;
+
+};
+
+extern "C" {
+
+EXPORT
+AMediaExtractor* AMediaExtractor_new() {
+ ALOGV("ctor");
+ AMediaExtractor *mData = new AMediaExtractor();
+ mData->mImpl = new NuMediaExtractor();
+ return mData;
+}
+
+EXPORT
+media_status_t AMediaExtractor_delete(AMediaExtractor *mData) {
+ ALOGV("dtor");
+ delete mData;
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor *mData, int fd, off64_t offset, off64_t length) {
+ ALOGV("setDataSource(%d, %lld, %lld)", fd, offset, length);
+ return translate_error(mData->mImpl->setDataSource(fd, offset, length));
+}
+
+EXPORT
+media_status_t AMediaExtractor_setDataSource(AMediaExtractor *mData, const char *location) {
+ ALOGV("setDataSource(%s)", location);
+ // TODO: add header support
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jobject service = NULL;
+ if (env == NULL) {
+ ALOGE("setDataSource(path) must be called from Java thread");
+ env->ExceptionClear();
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ jclass mediahttpclass = env->FindClass("android/media/MediaHTTPService");
+ if (mediahttpclass == NULL) {
+ ALOGE("can't find MediaHttpService");
+ env->ExceptionClear();
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ jmethodID mediaHttpCreateMethod = env->GetStaticMethodID(mediahttpclass,
+ "createHttpServiceBinderIfNecessary", "(Ljava/lang/String;)Landroid/os/IBinder;");
+ if (mediaHttpCreateMethod == NULL) {
+ ALOGE("can't find method");
+ env->ExceptionClear();
+ return AMEDIA_ERROR_UNSUPPORTED;
+ }
+
+ jstring jloc = env->NewStringUTF(location);
+
+ service = env->CallStaticObjectMethod(mediahttpclass, mediaHttpCreateMethod, jloc);
+ env->DeleteLocalRef(jloc);
+
+ sp<IMediaHTTPService> httpService;
+ if (service != NULL) {
+ sp<IBinder> binder = ibinderForJavaObject(env, service);
+ httpService = interface_cast<IMediaHTTPService>(binder);
+ }
+
+ status_t err = mData->mImpl->setDataSource(httpService, location, NULL);
+ env->ExceptionClear();
+ return translate_error(err);
+}
+
+EXPORT
+size_t AMediaExtractor_getTrackCount(AMediaExtractor *mData) {
+ return mData->mImpl->countTracks();
+}
+
+EXPORT
+AMediaFormat* AMediaExtractor_getTrackFormat(AMediaExtractor *mData, size_t idx) {
+ sp<AMessage> format;
+ mData->mImpl->getTrackFormat(idx, &format);
+ return AMediaFormat_fromMsg(&format);
+}
+
+EXPORT
+media_status_t AMediaExtractor_selectTrack(AMediaExtractor *mData, size_t idx) {
+ ALOGV("selectTrack(%z)", idx);
+ return translate_error(mData->mImpl->selectTrack(idx));
+}
+
+EXPORT
+media_status_t AMediaExtractor_unselectTrack(AMediaExtractor *mData, size_t idx) {
+ ALOGV("unselectTrack(%z)", idx);
+ return translate_error(mData->mImpl->unselectTrack(idx));
+}
+
+EXPORT
+bool AMediaExtractor_advance(AMediaExtractor *mData) {
+ //ALOGV("advance");
+ return mData->mImpl->advance();
+}
+
+EXPORT
+media_status_t AMediaExtractor_seekTo(AMediaExtractor *ex, int64_t seekPosUs, SeekMode mode) {
+ android::MediaSource::ReadOptions::SeekMode sfmode;
+ if (mode == AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC) {
+ sfmode = android::MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC;
+ } else if (mode == AMEDIAEXTRACTOR_SEEK_CLOSEST_SYNC) {
+ sfmode = android::MediaSource::ReadOptions::SEEK_CLOSEST_SYNC;
+ } else {
+ sfmode = android::MediaSource::ReadOptions::SEEK_NEXT_SYNC;
+ }
+
+ return translate_error(ex->mImpl->seekTo(seekPosUs, sfmode));
+}
+
+EXPORT
+ssize_t AMediaExtractor_readSampleData(AMediaExtractor *mData, uint8_t *buffer, size_t capacity) {
+ //ALOGV("readSampleData");
+ sp<ABuffer> tmp = new ABuffer(buffer, capacity);
+ if (mData->mImpl->readSampleData(tmp) == OK) {
+ return tmp->size();
+ }
+ return -1;
+}
+
+EXPORT
+uint32_t AMediaExtractor_getSampleFlags(AMediaExtractor *mData) {
+ int sampleFlags = 0;
+ sp<MetaData> meta;
+ status_t err = mData->mImpl->getSampleMeta(&meta);
+ if (err != OK) {
+ return -1;
+ }
+ int32_t val;
+ if (meta->findInt32(kKeyIsSyncFrame, &val) && val != 0) {
+ sampleFlags |= AMEDIAEXTRACTOR_SAMPLE_FLAG_SYNC;
+ }
+
+ uint32_t type;
+ const void *data;
+ size_t size;
+ if (meta->findData(kKeyEncryptedSizes, &type, &data, &size)) {
+ sampleFlags |= AMEDIAEXTRACTOR_SAMPLE_FLAG_ENCRYPTED;
+ }
+ return sampleFlags;
+}
+
+EXPORT
+int AMediaExtractor_getSampleTrackIndex(AMediaExtractor *mData) {
+ size_t idx;
+ if (mData->mImpl->getSampleTrackIndex(&idx) != OK) {
+ return -1;
+ }
+ return idx;
+}
+
+EXPORT
+int64_t AMediaExtractor_getSampleTime(AMediaExtractor *mData) {
+ int64_t time;
+ if (mData->mImpl->getSampleTime(&time) != OK) {
+ return -1;
+ }
+ return time;
+}
+
+EXPORT
+PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor *ex) {
+
+ if (ex->mPsshBuf != NULL) {
+ return (PsshInfo*) ex->mPsshBuf->data();
+ }
+
+ sp<AMessage> format;
+ ex->mImpl->getFileFormat(&format);
+ sp<ABuffer> buffer;
+ if(!format->findBuffer("pssh", &buffer)) {
+ return NULL;
+ }
+
+ // the format of the buffer is 1 or more of:
+ // {
+ // 16 byte uuid
+ // 4 byte data length N
+ // N bytes of data
+ // }
+
+ // Determine the number of entries in the source data.
+ // Since we got the data from stagefright, we trust it is valid and properly formatted.
+ const uint8_t* data = buffer->data();
+ size_t len = buffer->size();
+ size_t numentries = 0;
+ while (len > 0) {
+ numentries++;
+
+ // skip uuid
+ data += 16;
+ len -= 16;
+
+ // get data length
+ uint32_t datalen = *((uint32_t*)data);
+ data += 4;
+ len -= 4;
+
+ // skip the data
+ data += datalen;
+ len -= datalen;
+ }
+
+ // there are <numentries> in the buffer, we need
+ // (source buffer size) + 4 + (4 * numentries) bytes for the PsshInfo structure
+ size_t newsize = buffer->size() + 4 + (4 * numentries);
+ ex->mPsshBuf = new ABuffer(newsize);
+ ex->mPsshBuf->setRange(0, newsize);
+
+ // copy data
+ const uint8_t* src = buffer->data();
+ uint8_t* dst = ex->mPsshBuf->data();
+ uint8_t* dstdata = dst + 4 + numentries * sizeof(PsshEntry);
+ *((uint32_t*)dst) = numentries;
+ dst += 4;
+ for (size_t i = 0; i < numentries; i++) {
+ // copy uuid
+ memcpy(dst, src, 16);
+ src += 16;
+ dst += 16;
+
+ // get/copy data length
+ uint32_t datalen = *((uint32_t*)src);
+ memcpy(dst, src, 4);
+ src += 4;
+ dst += 4;
+
+ // the next entry in the destination is a pointer to the actual data, which we store
+ // after the array of PsshEntry
+ memcpy(dst, &dstdata, sizeof(dstdata));
+ dst += 4;
+
+ // copy the actual data
+ memcpy(dstdata, src, datalen);
+ dstdata += datalen;
+ src += datalen;
+ }
+
+ return (PsshInfo*) ex->mPsshBuf->data();
+}
+
+EXPORT
+AMediaCodecCryptoInfo *AMediaExtractor_getSampleCryptoInfo(AMediaExtractor *ex) {
+ sp<MetaData> meta;
+ if(ex->mImpl->getSampleMeta(&meta) != 0) {
+ return NULL;
+ }
+
+ uint32_t type;
+ const void *crypteddata;
+ size_t cryptedsize;
+ if (!meta->findData(kKeyEncryptedSizes, &type, &crypteddata, &cryptedsize)) {
+ return NULL;
+ }
+ size_t numSubSamples = cryptedsize / sizeof(size_t);
+
+ const void *cleardata;
+ size_t clearsize;
+ if (meta->findData(kKeyPlainSizes, &type, &cleardata, &clearsize)) {
+ if (clearsize != cryptedsize) {
+ // The two must be of the same length.
+ return NULL;
+ }
+ }
+
+ const void *key;
+ size_t keysize;
+ if (meta->findData(kKeyCryptoIV, &type, &key, &keysize)) {
+ if (keysize != 16) {
+ // IVs must be 16 bytes in length.
+ return NULL;
+ }
+ }
+
+ const void *iv;
+ size_t ivsize;
+ if (meta->findData(kKeyCryptoIV, &type, &iv, &ivsize)) {
+ if (ivsize != 16) {
+ // IVs must be 16 bytes in length.
+ return NULL;
+ }
+ }
+
+ int32_t mode;
+ if (!meta->findInt32(kKeyCryptoMode, &mode)) {
+ mode = CryptoPlugin::kMode_AES_CTR;
+ }
+
+ return AMediaCodecCryptoInfo_new(
+ numSubSamples,
+ (uint8_t*) key,
+ (uint8_t*) iv,
+ (cryptoinfo_mode_t) mode,
+ (size_t*) cleardata,
+ (size_t*) crypteddata);
+}
+
+
+} // extern "C"
+
diff --git a/media/ndk/NdkMediaFormat.cpp b/media/ndk/NdkMediaFormat.cpp
new file mode 100644
index 0000000..67dc2c2
--- /dev/null
+++ b/media/ndk/NdkMediaFormat.cpp
@@ -0,0 +1,259 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaFormat"
+
+
+#include "NdkMediaFormat.h"
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+struct AMediaFormat {
+ sp<AMessage> mFormat;
+ String8 mDebug;
+ KeyedVector<String8, String8> mStringCache;
+};
+
+extern "C" {
+
+// private functions for conversion to/from AMessage
+AMediaFormat* AMediaFormat_fromMsg(const void* data) {
+ ALOGV("private ctor");
+ AMediaFormat* mData = new AMediaFormat();
+ mData->mFormat = *((sp<AMessage>*)data);
+ return mData;
+}
+
+void AMediaFormat_getFormat(const AMediaFormat* mData, void* dest) {
+ *((sp<AMessage>*)dest) = mData->mFormat;
+}
+
+
+/*
+ * public function follow
+ */
+EXPORT
+AMediaFormat *AMediaFormat_new() {
+ ALOGV("ctor");
+ sp<AMessage> msg = new AMessage();
+ return AMediaFormat_fromMsg(&msg);
+}
+
+EXPORT
+media_status_t AMediaFormat_delete(AMediaFormat *mData) {
+ ALOGV("dtor");
+ delete mData;
+ return AMEDIA_OK;
+}
+
+
+EXPORT
+const char* AMediaFormat_toString(AMediaFormat *mData) {
+ sp<AMessage> f = mData->mFormat;
+ String8 ret;
+ int num = f->countEntries();
+ for (int i = 0; i < num; i++) {
+ if (i != 0) {
+ ret.append(", ");
+ }
+ AMessage::Type t;
+ const char *name = f->getEntryNameAt(i, &t);
+ ret.append(name);
+ ret.append(": ");
+ switch (t) {
+ case AMessage::kTypeInt32:
+ {
+ int32_t val;
+ f->findInt32(name, &val);
+ ret.appendFormat("int32(%d)", val);
+ break;
+ }
+ case AMessage::kTypeInt64:
+ {
+ int64_t val;
+ f->findInt64(name, &val);
+ ret.appendFormat("int64(%lld)", val);
+ break;
+ }
+ case AMessage::kTypeSize:
+ {
+ size_t val;
+ f->findSize(name, &val);
+ ret.appendFormat("size_t(%d)", val);
+ break;
+ }
+ case AMessage::kTypeFloat:
+ {
+ float val;
+ f->findFloat(name, &val);
+ ret.appendFormat("float(%f)", val);
+ break;
+ }
+ case AMessage::kTypeDouble:
+ {
+ double val;
+ f->findDouble(name, &val);
+ ret.appendFormat("double(%f)", val);
+ break;
+ }
+ case AMessage::kTypeString:
+ {
+ AString val;
+ f->findString(name, &val);
+ ret.appendFormat("string(%s)", val.c_str());
+ break;
+ }
+ case AMessage::kTypeBuffer:
+ {
+ ret.appendFormat("data");
+ break;
+ }
+ default:
+ {
+ ret.appendFormat("unknown(%d)", t);
+ break;
+ }
+ }
+ }
+ ret.append("}");
+ mData->mDebug = ret;
+ return mData->mDebug.string();
+}
+
+EXPORT
+bool AMediaFormat_getInt32(AMediaFormat* format, const char *name, int32_t *out) {
+ return format->mFormat->findInt32(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getInt64(AMediaFormat* format, const char *name, int64_t *out) {
+ return format->mFormat->findInt64(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getFloat(AMediaFormat* format, const char *name, float *out) {
+ return format->mFormat->findFloat(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getSize(AMediaFormat* format, const char *name, size_t *out) {
+ return format->mFormat->findSize(name, out);
+}
+
+EXPORT
+bool AMediaFormat_getBuffer(AMediaFormat* format, const char *name, void** data, size_t *outsize) {
+ sp<ABuffer> buf;
+ if (format->mFormat->findBuffer(name, &buf)) {
+ *data = buf->data() + buf->offset();
+ *outsize = buf->size();
+ return true;
+ }
+ return false;
+}
+
+EXPORT
+bool AMediaFormat_getString(AMediaFormat* mData, const char *name, const char **out) {
+
+ for (size_t i = 0; i < mData->mStringCache.size(); i++) {
+ if (strcmp(mData->mStringCache.keyAt(i).string(), name) == 0) {
+ mData->mStringCache.removeItemsAt(i, 1);
+ break;
+ }
+ }
+
+ AString tmp;
+ if (mData->mFormat->findString(name, &tmp)) {
+ String8 ret(tmp.c_str());
+ mData->mStringCache.add(String8(name), ret);
+ *out = ret.string();
+ return true;
+ }
+ return false;
+}
+
+EXPORT
+void AMediaFormat_setInt32(AMediaFormat* format, const char *name, int32_t value) {
+ format->mFormat->setInt32(name, value);
+}
+
+EXPORT
+void AMediaFormat_setInt64(AMediaFormat* format, const char *name, int64_t value) {
+ format->mFormat->setInt64(name, value);
+}
+
+EXPORT
+void AMediaFormat_setFloat(AMediaFormat* format, const char* name, float value) {
+ format->mFormat->setFloat(name, value);
+}
+
+EXPORT
+void AMediaFormat_setString(AMediaFormat* format, const char* name, const char* value) {
+ // AMessage::setString() makes a copy of the string
+ format->mFormat->setString(name, value, strlen(value));
+}
+
+EXPORT
+void AMediaFormat_setBuffer(AMediaFormat* format, const char* name, void* data, size_t size) {
+ // the ABuffer(void*, size_t) constructor doesn't take ownership of the data, so create
+ // a new buffer and copy the data into it
+ sp<ABuffer> buf = new ABuffer(size);
+ memcpy(buf->data(), data, size);
+ buf->setRange(0, size);
+ // AMessage::setBuffer() increases the refcount of the buffer
+ format->mFormat->setBuffer(name, buf);
+}
+
+
+EXPORT const char* AMEDIAFORMAT_KEY_AAC_PROFILE = "aac-profile";
+EXPORT const char* AMEDIAFORMAT_KEY_BIT_RATE = "bitrate";
+EXPORT const char* AMEDIAFORMAT_KEY_CHANNEL_COUNT = "channel-count";
+EXPORT const char* AMEDIAFORMAT_KEY_CHANNEL_MASK = "channel-mask";
+EXPORT const char* AMEDIAFORMAT_KEY_COLOR_FORMAT = "color-format";
+EXPORT const char* AMEDIAFORMAT_KEY_DURATION = "durationUs";
+EXPORT const char* AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL = "flac-compression-level";
+EXPORT const char* AMEDIAFORMAT_KEY_FRAME_RATE = "frame-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_HEIGHT = "height";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_ADTS = "is-adts";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT = "is-autoselect";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_DEFAULT = "is-default";
+EXPORT const char* AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE = "is-forced-subtitle";
+EXPORT const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL = "i-frame-interval";
+EXPORT const char* AMEDIAFORMAT_KEY_LANGUAGE = "language";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_HEIGHT = "max-height";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE = "max-input-size";
+EXPORT const char* AMEDIAFORMAT_KEY_MAX_WIDTH = "max-width";
+EXPORT const char* AMEDIAFORMAT_KEY_MIME = "mime";
+EXPORT const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP = "push-blank-buffers-on-shutdown";
+EXPORT const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER = "repeat-previous-frame-after";
+EXPORT const char* AMEDIAFORMAT_KEY_SAMPLE_RATE = "sample-rate";
+EXPORT const char* AMEDIAFORMAT_KEY_WIDTH = "width";
+EXPORT const char* AMEDIAFORMAT_KEY_STRIDE = "stride";
+
+
+} // extern "C"
+
+
diff --git a/media/ndk/NdkMediaFormatPriv.h b/media/ndk/NdkMediaFormatPriv.h
new file mode 100644
index 0000000..02342d9
--- /dev/null
+++ b/media/ndk/NdkMediaFormatPriv.h
@@ -0,0 +1,44 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * This file defines an NDK API.
+ * Do not remove methods.
+ * Do not change method signatures.
+ * Do not change the value of constants.
+ * Do not change the size of any of the classes defined in here.
+ * Do not reference types that are not part of the NDK.
+ * Do not #include files that aren't part of the NDK.
+ */
+
+#ifndef _NDK_MEDIA_FORMAT_PRIV_H
+#define _NDK_MEDIA_FORMAT_PRIV_H
+
+#include <NdkMediaFormat.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+AMediaFormat* AMediaFormat_fromMsg(void*);
+void AMediaFormat_getFormat(const AMediaFormat* mData, void* dest);
+
+#ifdef __cplusplus
+} // extern "C"
+#endif
+
+#endif // _NDK_MEDIA_FORMAT_PRIV_H
+
diff --git a/media/ndk/NdkMediaMuxer.cpp b/media/ndk/NdkMediaMuxer.cpp
new file mode 100644
index 0000000..b1b0362
--- /dev/null
+++ b/media/ndk/NdkMediaMuxer.cpp
@@ -0,0 +1,107 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "NdkMediaMuxer"
+
+
+#include "NdkMediaMuxer.h"
+#include "NdkMediaCodec.h"
+#include "NdkMediaFormatPriv.h"
+
+
+#include <utils/Log.h>
+#include <utils/StrongPointer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/MetaData.h>
+#include <media/stagefright/MediaMuxer.h>
+#include <media/IMediaHTTPService.h>
+#include <android_runtime/AndroidRuntime.h>
+#include <android_util_Binder.h>
+
+#include <jni.h>
+
+using namespace android;
+
+static media_status_t translate_error(status_t err) {
+ if (err == OK) {
+ return AMEDIA_OK;
+ }
+ ALOGE("sf error code: %d", err);
+ return AMEDIA_ERROR_UNKNOWN;
+}
+
+struct AMediaMuxer {
+ sp<MediaMuxer> mImpl;
+
+};
+
+extern "C" {
+
+EXPORT
+AMediaMuxer* AMediaMuxer_new(int fd, OutputFormat format) {
+ ALOGV("ctor");
+ AMediaMuxer *mData = new AMediaMuxer();
+ mData->mImpl = new MediaMuxer(fd, (android::MediaMuxer::OutputFormat)format);
+ return mData;
+}
+
+EXPORT
+media_status_t AMediaMuxer_delete(AMediaMuxer *muxer) {
+ ALOGV("dtor");
+ delete muxer;
+ return AMEDIA_OK;
+}
+
+EXPORT
+media_status_t AMediaMuxer_setLocation(AMediaMuxer *muxer, float latitude, float longtitude) {
+ return translate_error(muxer->mImpl->setLocation(latitude * 10000, longtitude * 10000));
+}
+
+EXPORT
+media_status_t AMediaMuxer_setOrientationHint(AMediaMuxer *muxer, int degrees) {
+ return translate_error(muxer->mImpl->setOrientationHint(degrees));
+}
+
+EXPORT
+ssize_t AMediaMuxer_addTrack(AMediaMuxer *muxer, const AMediaFormat *format) {
+ sp<AMessage> msg;
+ AMediaFormat_getFormat(format, &msg);
+ return translate_error(muxer->mImpl->addTrack(msg));
+}
+
+EXPORT
+media_status_t AMediaMuxer_start(AMediaMuxer *muxer) {
+ return translate_error(muxer->mImpl->start());
+}
+
+EXPORT
+media_status_t AMediaMuxer_stop(AMediaMuxer *muxer) {
+ return translate_error(muxer->mImpl->stop());
+}
+
+EXPORT
+media_status_t AMediaMuxer_writeSampleData(AMediaMuxer *muxer,
+ size_t trackIdx, const uint8_t *data, const AMediaCodecBufferInfo *info) {
+ sp<ABuffer> buf = new ABuffer((void*)(data + info->offset), info->size);
+ return translate_error(
+ muxer->mImpl->writeSampleData(buf, trackIdx, info->presentationTimeUs, info->flags));
+}
+
+
+} // extern "C"
+